]> gcc.gnu.org Git - gcc.git/blob - gcc/config/i386/i386.c
(override_options): Don't thread the prologue if profiling.
[gcc.git] / gcc / config / i386 / i386.c
1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include <stdio.h>
22 #include <setjmp.h>
23 #include <ctype.h>
24 #include "config.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "tree.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
38
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
46 #endif
47
48 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
49 {
50 reg_p,
51 mem_p,
52 imm_p
53 };
54
55 /* Processor costs (relative to an add) */
56 struct processor_costs i386_cost = { /* 386 specific costs */
57 1, /* cost of an add instruction (2 cycles) */
58 1, /* cost of a lea instruction */
59 3, /* variable shift costs */
60 2, /* constant shift costs */
61 6, /* cost of starting a multiply */
62 1, /* cost of multiply per each bit set */
63 23 /* cost of a divide/mod */
64 };
65
66 struct processor_costs i486_cost = { /* 486 specific costs */
67 1, /* cost of an add instruction */
68 1, /* cost of a lea instruction */
69 3, /* variable shift costs */
70 2, /* constant shift costs */
71 12, /* cost of starting a multiply */
72 1, /* cost of multiply per each bit set */
73 40 /* cost of a divide/mod */
74 };
75
76 struct processor_costs pentium_cost = {
77 1, /* cost of an add instruction */
78 1, /* cost of a lea instruction */
79 3, /* variable shift costs */
80 1, /* constant shift costs */
81 12, /* cost of starting a multiply */
82 1, /* cost of multiply per each bit set */
83 25 /* cost of a divide/mod */
84 };
85
86 struct processor_costs *ix86_cost = &pentium_cost;
87
88 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
89
90 extern FILE *asm_out_file;
91 extern char *strcat ();
92
93 char *singlemove_string ();
94 char *output_move_const_single ();
95 char *output_fp_cc0_set ();
96
97 char *hi_reg_name[] = HI_REGISTER_NAMES;
98 char *qi_reg_name[] = QI_REGISTER_NAMES;
99 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
100
101 /* Array of the smallest class containing reg number REGNO, indexed by
102 REGNO. Used by REGNO_REG_CLASS in i386.h. */
103
104 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
105 {
106 /* ax, dx, cx, bx */
107 AREG, DREG, CREG, BREG,
108 /* si, di, bp, sp */
109 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
110 /* FP registers */
111 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
112 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
113 /* arg pointer */
114 INDEX_REGS
115 };
116
117 /* Test and compare insns in i386.md store the information needed to
118 generate branch and scc insns here. */
119
120 struct rtx_def *i386_compare_op0 = NULL_RTX;
121 struct rtx_def *i386_compare_op1 = NULL_RTX;
122 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
123
124 /* which cpu are we scheduling for */
125 enum processor_type ix86_cpu;
126
127 /* which instruction set architecture to use. */
128 int ix86_isa;
129
130 /* Strings to hold which cpu and instruction set architecture to use. */
131 char *ix86_cpu_string; /* for -mcpu=<xxx> */
132 char *ix86_isa_string; /* for -misa=<xxx> */
133
134 /* Register allocation order */
135 char *i386_reg_alloc_order;
136 static char regs_allocated[FIRST_PSEUDO_REGISTER];
137
138 /* # of registers to use to pass arguments. */
139 char *i386_regparm_string; /* # registers to use to pass args */
140 int i386_regparm; /* i386_regparm_string as a number */
141
142 /* Alignment to use for loops and jumps */
143 char *i386_align_loops_string; /* power of two alignment for loops */
144 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
145 char *i386_align_funcs_string; /* power of two alignment for functions */
146 char *i386_branch_cost_string; /* values 1-5: see jump.c */
147
148 int i386_align_loops; /* power of two alignment for loops */
149 int i386_align_jumps; /* power of two alignment for non-loop jumps */
150 int i386_align_funcs; /* power of two alignment for functions */
151 int i386_branch_cost; /* values 1-5: see jump.c */
152
153 /* Sometimes certain combinations of command options do not make
154 sense on a particular target machine. You can define a macro
155 `OVERRIDE_OPTIONS' to take account of this. This macro, if
156 defined, is executed once just after all the command options have
157 been parsed.
158
159 Don't use this macro to turn on various extra optimizations for
160 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
161
162 void
163 override_options ()
164 {
165 int ch, i, j, regno;
166 char *p;
167 int def_align;
168
169 static struct ptt
170 {
171 char *name; /* Canonical processor name. */
172 enum processor_type processor; /* Processor type enum value. */
173 struct processor_costs *cost; /* Processor costs */
174 int target_enable; /* Target flags to enable. */
175 int target_disable; /* Target flags to disable. */
176 } processor_target_table[]
177 = {{PROCESSOR_COMMON_STRING, PROCESSOR_COMMON, &i486_cost, 0, 0},
178 {PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
179 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
180 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
181 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
182 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
183 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
184
185 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
186
187 #ifdef SUBTARGET_OVERRIDE_OPTIONS
188 SUBTARGET_OVERRIDE_OPTIONS;
189 #endif
190
191 /* Validate registers in register allocation order */
192 if (i386_reg_alloc_order)
193 {
194 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
195 {
196 switch (ch)
197 {
198 case 'a': regno = 0; break;
199 case 'd': regno = 1; break;
200 case 'c': regno = 2; break;
201 case 'b': regno = 3; break;
202 case 'S': regno = 4; break;
203 case 'D': regno = 5; break;
204 case 'B': regno = 6; break;
205
206 default: fatal ("Register '%c' is unknown", ch);
207 }
208
209 if (regs_allocated[regno])
210 fatal ("Register '%c' was already specified in the allocation order", ch);
211
212 regs_allocated[regno] = 1;
213 }
214 }
215
216 /* Get the architectural level. */
217 if (ix86_isa_string == (char *)0)
218 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
219
220 for (i = 0; i < ptt_size; i++)
221 if (! strcmp (ix86_isa_string, processor_target_table[i].name))
222 {
223 ix86_isa = processor_target_table[i].processor;
224 if (ix86_cpu_string == (char *)0)
225 ix86_cpu_string = processor_target_table[i].name;
226 break;
227 }
228
229 if (i == ptt_size)
230 {
231 error ("bad value (%s) for -misa= switch", ix86_isa_string);
232 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
233 ix86_isa = PROCESSOR_DEFAULT;
234 }
235
236 for (j = 0; j < ptt_size; j++)
237 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
238 {
239 ix86_cpu = processor_target_table[j].processor;
240 if (i > j && (int)ix86_isa >= (int)PROCESSOR_PENTIUMPRO)
241 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_isa_string);
242
243 target_flags |= processor_target_table[j].target_enable;
244 target_flags &= ~processor_target_table[j].target_disable;
245 break;
246 }
247
248 if (j == ptt_size)
249 {
250 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
251 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
252 ix86_cpu = PROCESSOR_DEFAULT;
253 }
254
255 /* Validate -mregparm= value */
256 if (i386_regparm_string)
257 {
258 i386_regparm = atoi (i386_regparm_string);
259 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
260 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
261 }
262
263 def_align = (TARGET_386) ? 2 : 4;
264
265 /* Validate -malign-loops= value, or provide default */
266 if (i386_align_loops_string)
267 {
268 i386_align_loops = atoi (i386_align_loops_string);
269 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
270 fatal ("-malign-loops=%d is not between 0 and %d",
271 i386_align_loops, MAX_CODE_ALIGN);
272 }
273 else
274 i386_align_loops = 2;
275
276 /* Validate -malign-jumps= value, or provide default */
277 if (i386_align_jumps_string)
278 {
279 i386_align_jumps = atoi (i386_align_jumps_string);
280 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
281 fatal ("-malign-jumps=%d is not between 0 and %d",
282 i386_align_jumps, MAX_CODE_ALIGN);
283 }
284 else
285 i386_align_jumps = def_align;
286
287 /* Validate -malign-functions= value, or provide default */
288 if (i386_align_funcs_string)
289 {
290 i386_align_funcs = atoi (i386_align_funcs_string);
291 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
292 fatal ("-malign-functions=%d is not between 0 and %d",
293 i386_align_funcs, MAX_CODE_ALIGN);
294 }
295 else
296 i386_align_funcs = def_align;
297
298 /* Validate -mbranch-cost= value, or provide default */
299 if (i386_branch_cost_string)
300 {
301 i386_branch_cost = atoi (i386_branch_cost_string);
302 if (i386_branch_cost < 0 || i386_branch_cost > 5)
303 fatal ("-mbranch-cost=%d is not between 0 and 5",
304 i386_branch_cost);
305 }
306 else
307 i386_branch_cost = TARGET_PENTIUMPRO ? 4 : 1;
308
309 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
310 flag_omit_frame_pointer = 1;
311
312 /* pic references don't explicitly mention pic_offset_table_rtx */
313 /* code threaded into the prologue may conflict with profiling */
314 if (flag_pic || profile_flag || profile_block_flag)
315 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
316 }
317 \f
318 /* A C statement (sans semicolon) to choose the order in which to
319 allocate hard registers for pseudo-registers local to a basic
320 block.
321
322 Store the desired register order in the array `reg_alloc_order'.
323 Element 0 should be the register to allocate first; element 1, the
324 next register; and so on.
325
326 The macro body should not assume anything about the contents of
327 `reg_alloc_order' before execution of the macro.
328
329 On most machines, it is not necessary to define this macro. */
330
331 void
332 order_regs_for_local_alloc ()
333 {
334 int i, ch, order, regno;
335
336 /* User specified the register allocation order */
337 if (i386_reg_alloc_order)
338 {
339 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
340 {
341 switch (ch)
342 {
343 case 'a': regno = 0; break;
344 case 'd': regno = 1; break;
345 case 'c': regno = 2; break;
346 case 'b': regno = 3; break;
347 case 'S': regno = 4; break;
348 case 'D': regno = 5; break;
349 case 'B': regno = 6; break;
350 }
351
352 reg_alloc_order[order++] = regno;
353 }
354
355 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
356 {
357 if (!regs_allocated[i])
358 reg_alloc_order[order++] = i;
359 }
360 }
361
362 /* If users did not specify a register allocation order, use natural order */
363 else
364 {
365 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
366 reg_alloc_order[i] = i;
367 }
368 }
369
370 \f
371 void
372 optimization_options (level)
373 int level;
374 {
375 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
376 make the problem with not enough registers even worse */
377 #ifdef INSN_SCHEDULING
378 if (level > 1)
379 flag_schedule_insns = 0;
380 #endif
381 }
382 \f
383 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
384 attribute for DECL. The attributes in ATTRIBUTES have previously been
385 assigned to DECL. */
386
387 int
388 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
389 tree decl;
390 tree attributes;
391 tree identifier;
392 tree args;
393 {
394 return 0;
395 }
396
397 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
398 attribute for TYPE. The attributes in ATTRIBUTES have previously been
399 assigned to TYPE. */
400
401 int
402 i386_valid_type_attribute_p (type, attributes, identifier, args)
403 tree type;
404 tree attributes;
405 tree identifier;
406 tree args;
407 {
408 if (TREE_CODE (type) != FUNCTION_TYPE
409 && TREE_CODE (type) != FIELD_DECL
410 && TREE_CODE (type) != TYPE_DECL)
411 return 0;
412
413 /* Stdcall attribute says callee is responsible for popping arguments
414 if they are not variable. */
415 if (is_attribute_p ("stdcall", identifier))
416 return (args == NULL_TREE);
417
418 /* Cdecl attribute says the callee is a normal C declaration */
419 if (is_attribute_p ("cdecl", identifier))
420 return (args == NULL_TREE);
421
422 /* Regparm attribute specifies how many integer arguments are to be
423 passed in registers */
424 if (is_attribute_p ("regparm", identifier))
425 {
426 tree cst;
427
428 if (!args || TREE_CODE (args) != TREE_LIST
429 || TREE_CHAIN (args) != NULL_TREE
430 || TREE_VALUE (args) == NULL_TREE)
431 return 0;
432
433 cst = TREE_VALUE (args);
434 if (TREE_CODE (cst) != INTEGER_CST)
435 return 0;
436
437 if (TREE_INT_CST_HIGH (cst) != 0
438 || TREE_INT_CST_LOW (cst) < 0
439 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
440 return 0;
441
442 return 1;
443 }
444
445 return 0;
446 }
447
448 /* Return 0 if the attributes for two types are incompatible, 1 if they
449 are compatible, and 2 if they are nearly compatible (which causes a
450 warning to be generated). */
451
452 int
453 i386_comp_type_attributes (type1, type2)
454 tree type1;
455 tree type2;
456 {
457 return 1;
458 }
459
460 \f
461 /* Value is the number of bytes of arguments automatically
462 popped when returning from a subroutine call.
463 FUNDECL is the declaration node of the function (as a tree),
464 FUNTYPE is the data type of the function (as a tree),
465 or for a library call it is an identifier node for the subroutine name.
466 SIZE is the number of bytes of arguments passed on the stack.
467
468 On the 80386, the RTD insn may be used to pop them if the number
469 of args is fixed, but if the number is variable then the caller
470 must pop them all. RTD can't be used for library calls now
471 because the library is compiled with the Unix compiler.
472 Use of RTD is a selectable option, since it is incompatible with
473 standard Unix calling sequences. If the option is not selected,
474 the caller must always pop the args.
475
476 The attribute stdcall is equivalent to RTD on a per module basis. */
477
478 int
479 i386_return_pops_args (fundecl, funtype, size)
480 tree fundecl;
481 tree funtype;
482 int size;
483 {
484 int rtd = TARGET_RTD;
485
486 if (TREE_CODE (funtype) == IDENTIFIER_NODE)
487 return 0;
488
489 /* Cdecl functions override -mrtd, and never pop the stack */
490 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
491
492 /* Stdcall functions will pop the stack if not variable args */
493 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
494 rtd = 1;
495
496 if (rtd
497 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
498 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
499 return size;
500 }
501
502 /* Lose any fake structure return argument */
503 if (aggregate_value_p (TREE_TYPE (funtype)))
504 return GET_MODE_SIZE (Pmode);
505
506 return 0;
507 }
508
509 \f
510 /* Argument support functions. */
511
512 /* Initialize a variable CUM of type CUMULATIVE_ARGS
513 for a call to a function whose data type is FNTYPE.
514 For a library call, FNTYPE is 0. */
515
516 void
517 init_cumulative_args (cum, fntype, libname)
518 CUMULATIVE_ARGS *cum; /* argument info to initialize */
519 tree fntype; /* tree ptr for function decl */
520 rtx libname; /* SYMBOL_REF of library name or 0 */
521 {
522 static CUMULATIVE_ARGS zero_cum;
523 tree param, next_param;
524
525 if (TARGET_DEBUG_ARG)
526 {
527 fprintf (stderr, "\ninit_cumulative_args (");
528 if (fntype)
529 {
530 tree ret_type = TREE_TYPE (fntype);
531 fprintf (stderr, "fntype code = %s, ret code = %s",
532 tree_code_name[ (int)TREE_CODE (fntype) ],
533 tree_code_name[ (int)TREE_CODE (ret_type) ]);
534 }
535 else
536 fprintf (stderr, "no fntype");
537
538 if (libname)
539 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
540 }
541
542 *cum = zero_cum;
543
544 /* Set up the number of registers to use for passing arguments. */
545 cum->nregs = i386_regparm;
546 if (fntype)
547 {
548 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
549 if (attr)
550 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
551 }
552
553 /* Determine if this function has variable arguments. This is
554 indicated by the last argument being 'void_type_mode' if there
555 are no variable arguments. If there are variable arguments, then
556 we won't pass anything in registers */
557
558 if (cum->nregs)
559 {
560 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
561 param != (tree)0;
562 param = next_param)
563 {
564 next_param = TREE_CHAIN (param);
565 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
566 cum->nregs = 0;
567 }
568 }
569
570 if (TARGET_DEBUG_ARG)
571 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
572
573 return;
574 }
575
576 /* Update the data in CUM to advance over an argument
577 of mode MODE and data type TYPE.
578 (TYPE is null for libcalls where that information may not be available.) */
579
580 void
581 function_arg_advance (cum, mode, type, named)
582 CUMULATIVE_ARGS *cum; /* current arg information */
583 enum machine_mode mode; /* current arg mode */
584 tree type; /* type of the argument or 0 if lib support */
585 int named; /* whether or not the argument was named */
586 {
587 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
588 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
589
590 if (TARGET_DEBUG_ARG)
591 fprintf (stderr,
592 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
593 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
594
595 cum->words += words;
596 cum->nregs -= words;
597 cum->regno += words;
598
599 if (cum->nregs <= 0)
600 {
601 cum->nregs = 0;
602 cum->regno = 0;
603 }
604
605 return;
606 }
607
608 /* Define where to put the arguments to a function.
609 Value is zero to push the argument on the stack,
610 or a hard register in which to store the argument.
611
612 MODE is the argument's machine mode.
613 TYPE is the data type of the argument (as a tree).
614 This is null for libcalls where that information may
615 not be available.
616 CUM is a variable of type CUMULATIVE_ARGS which gives info about
617 the preceding args and about the function being called.
618 NAMED is nonzero if this argument is a named parameter
619 (otherwise it is an extra parameter matching an ellipsis). */
620
621 struct rtx_def *
622 function_arg (cum, mode, type, named)
623 CUMULATIVE_ARGS *cum; /* current arg information */
624 enum machine_mode mode; /* current arg mode */
625 tree type; /* type of the argument or 0 if lib support */
626 int named; /* != 0 for normal args, == 0 for ... args */
627 {
628 rtx ret = NULL_RTX;
629 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
630 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
631
632 switch (mode)
633 {
634 default: /* for now, pass fp/complex values on the stack */
635 break;
636
637 case BLKmode:
638 case DImode:
639 case SImode:
640 case HImode:
641 case QImode:
642 if (words <= cum->nregs)
643 ret = gen_rtx (REG, mode, cum->regno);
644 break;
645 }
646
647 if (TARGET_DEBUG_ARG)
648 {
649 fprintf (stderr,
650 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
651 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
652
653 if (ret)
654 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
655 else
656 fprintf (stderr, ", stack");
657
658 fprintf (stderr, " )\n");
659 }
660
661 return ret;
662 }
663
664 /* For an arg passed partly in registers and partly in memory,
665 this is the number of registers used.
666 For args passed entirely in registers or entirely in memory, zero. */
667
668 int
669 function_arg_partial_nregs (cum, mode, type, named)
670 CUMULATIVE_ARGS *cum; /* current arg information */
671 enum machine_mode mode; /* current arg mode */
672 tree type; /* type of the argument or 0 if lib support */
673 int named; /* != 0 for normal args, == 0 for ... args */
674 {
675 return 0;
676 }
677
678 \f
679 /* Output an insn whose source is a 386 integer register. SRC is the
680 rtx for the register, and TEMPLATE is the op-code template. SRC may
681 be either SImode or DImode.
682
683 The template will be output with operands[0] as SRC, and operands[1]
684 as a pointer to the top of the 386 stack. So a call from floatsidf2
685 would look like this:
686
687 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
688
689 where %z0 corresponds to the caller's operands[1], and is used to
690 emit the proper size suffix.
691
692 ??? Extend this to handle HImode - a 387 can load and store HImode
693 values directly. */
694
695 void
696 output_op_from_reg (src, template)
697 rtx src;
698 char *template;
699 {
700 rtx xops[4];
701 int size = GET_MODE_SIZE (GET_MODE (src));
702
703 xops[0] = src;
704 xops[1] = AT_SP (Pmode);
705 xops[2] = GEN_INT (size);
706 xops[3] = stack_pointer_rtx;
707
708 if (size > UNITS_PER_WORD)
709 {
710 rtx high;
711 if (size > 2 * UNITS_PER_WORD)
712 {
713 high = gen_rtx (REG, SImode, REGNO (src) + 2);
714 output_asm_insn (AS1 (push%L0,%0), &high);
715 }
716 high = gen_rtx (REG, SImode, REGNO (src) + 1);
717 output_asm_insn (AS1 (push%L0,%0), &high);
718 }
719 output_asm_insn (AS1 (push%L0,%0), &src);
720
721 output_asm_insn (template, xops);
722
723 output_asm_insn (AS2 (add%L3,%2,%3), xops);
724 }
725 \f
726 /* Output an insn to pop an value from the 387 top-of-stack to 386
727 register DEST. The 387 register stack is popped if DIES is true. If
728 the mode of DEST is an integer mode, a `fist' integer store is done,
729 otherwise a `fst' float store is done. */
730
731 void
732 output_to_reg (dest, dies)
733 rtx dest;
734 int dies;
735 {
736 rtx xops[4];
737 int size = GET_MODE_SIZE (GET_MODE (dest));
738
739 xops[0] = AT_SP (Pmode);
740 xops[1] = stack_pointer_rtx;
741 xops[2] = GEN_INT (size);
742 xops[3] = dest;
743
744 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
745
746 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
747 {
748 if (dies)
749 output_asm_insn (AS1 (fistp%z3,%y0), xops);
750 else
751 output_asm_insn (AS1 (fist%z3,%y0), xops);
752 }
753 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
754 {
755 if (dies)
756 output_asm_insn (AS1 (fstp%z3,%y0), xops);
757 else
758 {
759 if (GET_MODE (dest) == XFmode)
760 {
761 output_asm_insn (AS1 (fstp%z3,%y0), xops);
762 output_asm_insn (AS1 (fld%z3,%y0), xops);
763 }
764 else
765 output_asm_insn (AS1 (fst%z3,%y0), xops);
766 }
767 }
768 else
769 abort ();
770
771 output_asm_insn (AS1 (pop%L0,%0), &dest);
772
773 if (size > UNITS_PER_WORD)
774 {
775 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
776 output_asm_insn (AS1 (pop%L0,%0), &dest);
777 if (size > 2 * UNITS_PER_WORD)
778 {
779 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
780 output_asm_insn (AS1 (pop%L0,%0), &dest);
781 }
782 }
783 }
784 \f
785 char *
786 singlemove_string (operands)
787 rtx *operands;
788 {
789 rtx x;
790 if (GET_CODE (operands[0]) == MEM
791 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
792 {
793 if (XEXP (x, 0) != stack_pointer_rtx)
794 abort ();
795 return "push%L1 %1";
796 }
797 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
798 {
799 return output_move_const_single (operands);
800 }
801 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
802 return AS2 (mov%L0,%1,%0);
803 else if (CONSTANT_P (operands[1]))
804 return AS2 (mov%L0,%1,%0);
805 else
806 {
807 output_asm_insn ("push%L1 %1", operands);
808 return "pop%L0 %0";
809 }
810 }
811 \f
812 /* Return a REG that occurs in ADDR with coefficient 1.
813 ADDR can be effectively incremented by incrementing REG. */
814
815 static rtx
816 find_addr_reg (addr)
817 rtx addr;
818 {
819 while (GET_CODE (addr) == PLUS)
820 {
821 if (GET_CODE (XEXP (addr, 0)) == REG)
822 addr = XEXP (addr, 0);
823 else if (GET_CODE (XEXP (addr, 1)) == REG)
824 addr = XEXP (addr, 1);
825 else if (CONSTANT_P (XEXP (addr, 0)))
826 addr = XEXP (addr, 1);
827 else if (CONSTANT_P (XEXP (addr, 1)))
828 addr = XEXP (addr, 0);
829 else
830 abort ();
831 }
832 if (GET_CODE (addr) == REG)
833 return addr;
834 abort ();
835 }
836
837 \f
838 /* Output an insn to add the constant N to the register X. */
839
840 static void
841 asm_add (n, x)
842 int n;
843 rtx x;
844 {
845 rtx xops[2];
846 xops[0] = x;
847
848 if (n == -1)
849 output_asm_insn (AS1 (dec%L0,%0), xops);
850 else if (n == 1)
851 output_asm_insn (AS1 (inc%L0,%0), xops);
852 else if (n < 0)
853 {
854 xops[1] = GEN_INT (-n);
855 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
856 }
857 else if (n > 0)
858 {
859 xops[1] = GEN_INT (n);
860 output_asm_insn (AS2 (add%L0,%1,%0), xops);
861 }
862 }
863
864 \f
865 /* Output assembler code to perform a doubleword move insn
866 with operands OPERANDS. */
867
868 char *
869 output_move_double (operands)
870 rtx *operands;
871 {
872 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
873 rtx latehalf[2];
874 rtx middlehalf[2];
875 rtx xops[2];
876 rtx addreg0 = 0, addreg1 = 0;
877 int dest_overlapped_low = 0;
878 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
879
880 middlehalf[0] = 0;
881 middlehalf[1] = 0;
882
883 /* First classify both operands. */
884
885 if (REG_P (operands[0]))
886 optype0 = REGOP;
887 else if (offsettable_memref_p (operands[0]))
888 optype0 = OFFSOP;
889 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
890 optype0 = POPOP;
891 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
892 optype0 = PUSHOP;
893 else if (GET_CODE (operands[0]) == MEM)
894 optype0 = MEMOP;
895 else
896 optype0 = RNDOP;
897
898 if (REG_P (operands[1]))
899 optype1 = REGOP;
900 else if (CONSTANT_P (operands[1]))
901 optype1 = CNSTOP;
902 else if (offsettable_memref_p (operands[1]))
903 optype1 = OFFSOP;
904 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
905 optype1 = POPOP;
906 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
907 optype1 = PUSHOP;
908 else if (GET_CODE (operands[1]) == MEM)
909 optype1 = MEMOP;
910 else
911 optype1 = RNDOP;
912
913 /* Check for the cases that the operand constraints are not
914 supposed to allow to happen. Abort if we get one,
915 because generating code for these cases is painful. */
916
917 if (optype0 == RNDOP || optype1 == RNDOP)
918 abort ();
919
920 /* If one operand is decrementing and one is incrementing
921 decrement the former register explicitly
922 and change that operand into ordinary indexing. */
923
924 if (optype0 == PUSHOP && optype1 == POPOP)
925 {
926 /* ??? Can this ever happen on i386? */
927 operands[0] = XEXP (XEXP (operands[0], 0), 0);
928 asm_add (-size, operands[0]);
929 if (GET_MODE (operands[1]) == XFmode)
930 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
931 else if (GET_MODE (operands[0]) == DFmode)
932 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
933 else
934 operands[0] = gen_rtx (MEM, DImode, operands[0]);
935 optype0 = OFFSOP;
936 }
937
938 if (optype0 == POPOP && optype1 == PUSHOP)
939 {
940 /* ??? Can this ever happen on i386? */
941 operands[1] = XEXP (XEXP (operands[1], 0), 0);
942 asm_add (-size, operands[1]);
943 if (GET_MODE (operands[1]) == XFmode)
944 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
945 else if (GET_MODE (operands[1]) == DFmode)
946 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
947 else
948 operands[1] = gen_rtx (MEM, DImode, operands[1]);
949 optype1 = OFFSOP;
950 }
951
952 /* If an operand is an unoffsettable memory ref, find a register
953 we can increment temporarily to make it refer to the second word. */
954
955 if (optype0 == MEMOP)
956 addreg0 = find_addr_reg (XEXP (operands[0], 0));
957
958 if (optype1 == MEMOP)
959 addreg1 = find_addr_reg (XEXP (operands[1], 0));
960
961 /* Ok, we can do one word at a time.
962 Normally we do the low-numbered word first,
963 but if either operand is autodecrementing then we
964 do the high-numbered word first.
965
966 In either case, set up in LATEHALF the operands to use
967 for the high-numbered word and in some cases alter the
968 operands in OPERANDS to be suitable for the low-numbered word. */
969
970 if (size == 12)
971 {
972 if (optype0 == REGOP)
973 {
974 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
975 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
976 }
977 else if (optype0 == OFFSOP)
978 {
979 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
980 latehalf[0] = adj_offsettable_operand (operands[0], 8);
981 }
982 else
983 {
984 middlehalf[0] = operands[0];
985 latehalf[0] = operands[0];
986 }
987
988 if (optype1 == REGOP)
989 {
990 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
991 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
992 }
993 else if (optype1 == OFFSOP)
994 {
995 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
996 latehalf[1] = adj_offsettable_operand (operands[1], 8);
997 }
998 else if (optype1 == CNSTOP)
999 {
1000 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1001 {
1002 REAL_VALUE_TYPE r; long l[3];
1003
1004 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1005 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1006 operands[1] = GEN_INT (l[0]);
1007 middlehalf[1] = GEN_INT (l[1]);
1008 latehalf[1] = GEN_INT (l[2]);
1009 }
1010 else if (CONSTANT_P (operands[1]))
1011 /* No non-CONST_DOUBLE constant should ever appear here. */
1012 abort ();
1013 }
1014 else
1015 {
1016 middlehalf[1] = operands[1];
1017 latehalf[1] = operands[1];
1018 }
1019 }
1020 else /* size is not 12: */
1021 {
1022 if (optype0 == REGOP)
1023 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1024 else if (optype0 == OFFSOP)
1025 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1026 else
1027 latehalf[0] = operands[0];
1028
1029 if (optype1 == REGOP)
1030 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1031 else if (optype1 == OFFSOP)
1032 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1033 else if (optype1 == CNSTOP)
1034 split_double (operands[1], &operands[1], &latehalf[1]);
1035 else
1036 latehalf[1] = operands[1];
1037 }
1038
1039 /* If insn is effectively movd N (sp),-(sp) then we will do the
1040 high word first. We should use the adjusted operand 1
1041 (which is N+4 (sp) or N+8 (sp))
1042 for the low word and middle word as well,
1043 to compensate for the first decrement of sp. */
1044 if (optype0 == PUSHOP
1045 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1046 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1047 middlehalf[1] = operands[1] = latehalf[1];
1048
1049 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1050 if the upper part of reg N does not appear in the MEM, arrange to
1051 emit the move late-half first. Otherwise, compute the MEM address
1052 into the upper part of N and use that as a pointer to the memory
1053 operand. */
1054 if (optype0 == REGOP
1055 && (optype1 == OFFSOP || optype1 == MEMOP))
1056 {
1057 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1058 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1059 {
1060 /* If both halves of dest are used in the src memory address,
1061 compute the address into latehalf of dest. */
1062 compadr:
1063 xops[0] = latehalf[0];
1064 xops[1] = XEXP (operands[1], 0);
1065 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1066 if( GET_MODE (operands[1]) == XFmode )
1067 {
1068 /* abort (); */
1069 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1070 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1071 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1072 }
1073 else
1074 {
1075 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1076 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1077 }
1078 }
1079 else if (size == 12
1080 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1081 {
1082 /* Check for two regs used by both source and dest. */
1083 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1084 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1085 goto compadr;
1086
1087 /* JRV says this can't happen: */
1088 if (addreg0 || addreg1)
1089 abort();
1090
1091 /* Only the middle reg conflicts; simply put it last. */
1092 output_asm_insn (singlemove_string (operands), operands);
1093 output_asm_insn (singlemove_string (latehalf), latehalf);
1094 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1095 return "";
1096 }
1097 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1098 /* If the low half of dest is mentioned in the source memory
1099 address, the arrange to emit the move late half first. */
1100 dest_overlapped_low = 1;
1101 }
1102
1103 /* If one or both operands autodecrementing,
1104 do the two words, high-numbered first. */
1105
1106 /* Likewise, the first move would clobber the source of the second one,
1107 do them in the other order. This happens only for registers;
1108 such overlap can't happen in memory unless the user explicitly
1109 sets it up, and that is an undefined circumstance. */
1110
1111 /*
1112 if (optype0 == PUSHOP || optype1 == PUSHOP
1113 || (optype0 == REGOP && optype1 == REGOP
1114 && REGNO (operands[0]) == REGNO (latehalf[1]))
1115 || dest_overlapped_low)
1116 */
1117 if (optype0 == PUSHOP || optype1 == PUSHOP
1118 || (optype0 == REGOP && optype1 == REGOP
1119 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1120 || REGNO (operands[0]) == REGNO (latehalf[1])))
1121 || dest_overlapped_low)
1122 {
1123 /* Make any unoffsettable addresses point at high-numbered word. */
1124 if (addreg0)
1125 asm_add (size-4, addreg0);
1126 if (addreg1)
1127 asm_add (size-4, addreg1);
1128
1129 /* Do that word. */
1130 output_asm_insn (singlemove_string (latehalf), latehalf);
1131
1132 /* Undo the adds we just did. */
1133 if (addreg0)
1134 asm_add (-4, addreg0);
1135 if (addreg1)
1136 asm_add (-4, addreg1);
1137
1138 if (size == 12)
1139 {
1140 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1141 if (addreg0)
1142 asm_add (-4, addreg0);
1143 if (addreg1)
1144 asm_add (-4, addreg1);
1145 }
1146
1147 /* Do low-numbered word. */
1148 return singlemove_string (operands);
1149 }
1150
1151 /* Normal case: do the two words, low-numbered first. */
1152
1153 output_asm_insn (singlemove_string (operands), operands);
1154
1155 /* Do the middle one of the three words for long double */
1156 if (size == 12)
1157 {
1158 if (addreg0)
1159 asm_add (4, addreg0);
1160 if (addreg1)
1161 asm_add (4, addreg1);
1162
1163 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1164 }
1165
1166 /* Make any unoffsettable addresses point at high-numbered word. */
1167 if (addreg0)
1168 asm_add (4, addreg0);
1169 if (addreg1)
1170 asm_add (4, addreg1);
1171
1172 /* Do that word. */
1173 output_asm_insn (singlemove_string (latehalf), latehalf);
1174
1175 /* Undo the adds we just did. */
1176 if (addreg0)
1177 asm_add (4-size, addreg0);
1178 if (addreg1)
1179 asm_add (4-size, addreg1);
1180
1181 return "";
1182 }
1183
1184 \f
1185 #define MAX_TMPS 2 /* max temporary registers used */
1186
1187 /* Output the appropriate code to move push memory on the stack */
1188
1189 char *
1190 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1191 rtx operands[];
1192 rtx insn;
1193 int length;
1194 int tmp_start;
1195 int n_operands;
1196 {
1197
1198 struct {
1199 char *load;
1200 char *push;
1201 rtx xops[2];
1202 } tmp_info[MAX_TMPS];
1203
1204 rtx src = operands[1];
1205 int max_tmps = 0;
1206 int offset = 0;
1207 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1208 int stack_offset = 0;
1209 int i, num_tmps;
1210 rtx xops[1];
1211
1212 if (!offsettable_memref_p (src))
1213 fatal_insn ("Source is not offsettable", insn);
1214
1215 if ((length & 3) != 0)
1216 fatal_insn ("Pushing non-word aligned size", insn);
1217
1218 /* Figure out which temporary registers we have available */
1219 for (i = tmp_start; i < n_operands; i++)
1220 {
1221 if (GET_CODE (operands[i]) == REG)
1222 {
1223 if (reg_overlap_mentioned_p (operands[i], src))
1224 continue;
1225
1226 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1227 if (max_tmps == MAX_TMPS)
1228 break;
1229 }
1230 }
1231
1232 if (max_tmps == 0)
1233 for (offset = length - 4; offset >= 0; offset -= 4)
1234 {
1235 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1236 output_asm_insn (AS1(push%L0,%0), xops);
1237 if (stack_p)
1238 stack_offset += 4;
1239 }
1240
1241 else
1242 for (offset = length - 4; offset >= 0; )
1243 {
1244 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1245 {
1246 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1247 tmp_info[num_tmps].push = AS1(push%L0,%1);
1248 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1249 offset -= 4;
1250 }
1251
1252 for (i = 0; i < num_tmps; i++)
1253 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1254
1255 for (i = 0; i < num_tmps; i++)
1256 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1257
1258 if (stack_p)
1259 stack_offset += 4*num_tmps;
1260 }
1261
1262 return "";
1263 }
1264
1265 \f
1266
1267 /* Output the appropriate code to move data between two memory locations */
1268
1269 char *
1270 output_move_memory (operands, insn, length, tmp_start, n_operands)
1271 rtx operands[];
1272 rtx insn;
1273 int length;
1274 int tmp_start;
1275 int n_operands;
1276 {
1277 struct {
1278 char *load;
1279 char *store;
1280 rtx xops[3];
1281 } tmp_info[MAX_TMPS];
1282
1283 rtx dest = operands[0];
1284 rtx src = operands[1];
1285 rtx qi_tmp = NULL_RTX;
1286 int max_tmps = 0;
1287 int offset = 0;
1288 int i, num_tmps;
1289 rtx xops[3];
1290
1291 if (GET_CODE (dest) == MEM
1292 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1293 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1294 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1295
1296 if (!offsettable_memref_p (src))
1297 fatal_insn ("Source is not offsettable", insn);
1298
1299 if (!offsettable_memref_p (dest))
1300 fatal_insn ("Destination is not offsettable", insn);
1301
1302 /* Figure out which temporary registers we have available */
1303 for (i = tmp_start; i < n_operands; i++)
1304 {
1305 if (GET_CODE (operands[i]) == REG)
1306 {
1307 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1308 qi_tmp = operands[i];
1309
1310 if (reg_overlap_mentioned_p (operands[i], dest))
1311 fatal_insn ("Temporary register overlaps the destination", insn);
1312
1313 if (reg_overlap_mentioned_p (operands[i], src))
1314 fatal_insn ("Temporary register overlaps the source", insn);
1315
1316 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1317 if (max_tmps == MAX_TMPS)
1318 break;
1319 }
1320 }
1321
1322 if (max_tmps == 0)
1323 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1324
1325 if ((length & 1) != 0)
1326 {
1327 if (!qi_tmp)
1328 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1329 }
1330
1331 while (length > 1)
1332 {
1333 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1334 {
1335 if (length >= 4)
1336 {
1337 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1338 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1339 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1340 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1341 offset += 4;
1342 length -= 4;
1343 }
1344 else if (length >= 2)
1345 {
1346 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1347 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1348 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1349 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1350 offset += 2;
1351 length -= 2;
1352 }
1353 else
1354 break;
1355 }
1356
1357 for (i = 0; i < num_tmps; i++)
1358 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1359
1360 for (i = 0; i < num_tmps; i++)
1361 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1362 }
1363
1364 if (length == 1)
1365 {
1366 xops[0] = adj_offsettable_operand (dest, offset);
1367 xops[1] = adj_offsettable_operand (src, offset);
1368 xops[2] = qi_tmp;
1369 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1370 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1371 }
1372
1373 return "";
1374 }
1375
1376 \f
1377 int
1378 standard_80387_constant_p (x)
1379 rtx x;
1380 {
1381 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1382 REAL_VALUE_TYPE d;
1383 jmp_buf handler;
1384 int is0, is1;
1385
1386 if (setjmp (handler))
1387 return 0;
1388
1389 set_float_handler (handler);
1390 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1391 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1392 is1 = REAL_VALUES_EQUAL (d, dconst1);
1393 set_float_handler (NULL_PTR);
1394
1395 if (is0)
1396 return 1;
1397
1398 if (is1)
1399 return 2;
1400
1401 /* Note that on the 80387, other constants, such as pi,
1402 are much slower to load as standard constants
1403 than to load from doubles in memory! */
1404 #endif
1405
1406 return 0;
1407 }
1408
1409 char *
1410 output_move_const_single (operands)
1411 rtx *operands;
1412 {
1413 if (FP_REG_P (operands[0]))
1414 {
1415 int conval = standard_80387_constant_p (operands[1]);
1416
1417 if (conval == 1)
1418 return "fldz";
1419
1420 if (conval == 2)
1421 return "fld1";
1422 }
1423 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1424 {
1425 REAL_VALUE_TYPE r; long l;
1426
1427 if (GET_MODE (operands[1]) == XFmode)
1428 abort ();
1429
1430 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1431 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1432 operands[1] = GEN_INT (l);
1433 }
1434 return singlemove_string (operands);
1435 }
1436 \f
1437 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1438 reference and a constant. */
1439
1440 int
1441 symbolic_operand (op, mode)
1442 register rtx op;
1443 enum machine_mode mode;
1444 {
1445 switch (GET_CODE (op))
1446 {
1447 case SYMBOL_REF:
1448 case LABEL_REF:
1449 return 1;
1450 case CONST:
1451 op = XEXP (op, 0);
1452 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1453 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1454 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1455 default:
1456 return 0;
1457 }
1458 }
1459
1460 /* Test for a valid operand for a call instruction.
1461 Don't allow the arg pointer register or virtual regs
1462 since they may change into reg + const, which the patterns
1463 can't handle yet. */
1464
1465 int
1466 call_insn_operand (op, mode)
1467 rtx op;
1468 enum machine_mode mode;
1469 {
1470 if (GET_CODE (op) == MEM
1471 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1472 /* This makes a difference for PIC. */
1473 && general_operand (XEXP (op, 0), Pmode))
1474 || (GET_CODE (XEXP (op, 0)) == REG
1475 && XEXP (op, 0) != arg_pointer_rtx
1476 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1477 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1478 return 1;
1479 return 0;
1480 }
1481
1482 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1483 even if pic. */
1484
1485 int
1486 expander_call_insn_operand (op, mode)
1487 rtx op;
1488 enum machine_mode mode;
1489 {
1490 if (GET_CODE (op) == MEM
1491 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1492 || (GET_CODE (XEXP (op, 0)) == REG
1493 && XEXP (op, 0) != arg_pointer_rtx
1494 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1495 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1496 return 1;
1497 return 0;
1498 }
1499
1500 /* Return 1 if OP is a comparison operator that can use the condition code
1501 generated by an arithmetic operation. */
1502
1503 int
1504 arithmetic_comparison_operator (op, mode)
1505 register rtx op;
1506 enum machine_mode mode;
1507 {
1508 enum rtx_code code;
1509
1510 if (mode != VOIDmode && mode != GET_MODE (op))
1511 return 0;
1512 code = GET_CODE (op);
1513 if (GET_RTX_CLASS (code) != '<')
1514 return 0;
1515
1516 return (code != GT && code != LE);
1517 }
1518 \f
1519 /* Returns 1 if OP contains a symbol reference */
1520
1521 int
1522 symbolic_reference_mentioned_p (op)
1523 rtx op;
1524 {
1525 register char *fmt;
1526 register int i;
1527
1528 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1529 return 1;
1530
1531 fmt = GET_RTX_FORMAT (GET_CODE (op));
1532 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1533 {
1534 if (fmt[i] == 'E')
1535 {
1536 register int j;
1537
1538 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1539 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1540 return 1;
1541 }
1542 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1543 return 1;
1544 }
1545
1546 return 0;
1547 }
1548 \f
1549 /* Attempt to expand a binary operator. Make the expansion closer to the
1550 actual machine, then just general_operand, which will allow 3 separate
1551 memory references (one output, two input) in a single insn. Return
1552 whether the insn fails, or succeeds. */
1553
1554 int
1555 ix86_expand_binary_operator (code, mode, operands)
1556 enum rtx_code code;
1557 enum machine_mode mode;
1558 rtx operands[];
1559 {
1560 rtx insn;
1561 int i;
1562 int modified;
1563
1564 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1565 if (GET_RTX_CLASS (code) == 'c'
1566 && (rtx_equal_p (operands[0], operands[2])
1567 || immediate_operand (operands[1], mode)))
1568 {
1569 rtx temp = operands[1];
1570 operands[1] = operands[2];
1571 operands[2] = temp;
1572 }
1573
1574 /* If optimizing, copy to regs to improve CSE */
1575 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1576 {
1577 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1578 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1579
1580 if (GET_CODE (operands[2]) == MEM)
1581 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1582
1583 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1584 {
1585 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1586 emit_move_insn (temp, operands[1]);
1587 operands[1] = temp;
1588 return TRUE;
1589 }
1590 }
1591
1592 if (!ix86_binary_operator_ok (code, mode, operands))
1593 {
1594 /* If not optimizing, try to make a valid insn (optimize code previously did
1595 this above to improve chances of CSE) */
1596
1597 if ((!TARGET_PSEUDO || !optimize)
1598 && ((reload_in_progress | reload_completed) == 0)
1599 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1600 {
1601 modified = FALSE;
1602 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1603 {
1604 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1605 modified = TRUE;
1606 }
1607
1608 if (GET_CODE (operands[2]) == MEM)
1609 {
1610 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1611 modified = TRUE;
1612 }
1613
1614 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1615 {
1616 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1617 emit_move_insn (temp, operands[1]);
1618 operands[1] = temp;
1619 return TRUE;
1620 }
1621
1622 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1623 return FALSE;
1624 }
1625 else
1626 return FALSE;
1627 }
1628
1629 return TRUE;
1630 }
1631 \f
1632 /* Return TRUE or FALSE depending on whether the binary operator meets the
1633 appropriate constraints. */
1634
1635 int
1636 ix86_binary_operator_ok (code, mode, operands)
1637 enum rtx_code code;
1638 enum machine_mode mode;
1639 rtx operands[3];
1640 {
1641 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1642 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1643 }
1644 \f
1645 /* Attempt to expand a unary operator. Make the expansion closer to the
1646 actual machine, then just general_operand, which will allow 2 separate
1647 memory references (one output, one input) in a single insn. Return
1648 whether the insn fails, or succeeds. */
1649
1650 int
1651 ix86_expand_unary_operator (code, mode, operands)
1652 enum rtx_code code;
1653 enum machine_mode mode;
1654 rtx operands[];
1655 {
1656 rtx insn;
1657
1658 /* If optimizing, copy to regs to improve CSE */
1659 if (TARGET_PSEUDO
1660 && optimize
1661 && ((reload_in_progress | reload_completed) == 0)
1662 && GET_CODE (operands[1]) == MEM)
1663 {
1664 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1665 }
1666
1667 if (!ix86_unary_operator_ok (code, mode, operands))
1668 {
1669 if ((!TARGET_PSEUDO || !optimize)
1670 && ((reload_in_progress | reload_completed) == 0)
1671 && GET_CODE (operands[1]) == MEM)
1672 {
1673 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1674 if (!ix86_unary_operator_ok (code, mode, operands))
1675 return FALSE;
1676 }
1677 else
1678 return FALSE;
1679 }
1680
1681 return TRUE;
1682 }
1683 \f
1684 /* Return TRUE or FALSE depending on whether the unary operator meets the
1685 appropriate constraints. */
1686
1687 int
1688 ix86_unary_operator_ok (code, mode, operands)
1689 enum rtx_code code;
1690 enum machine_mode mode;
1691 rtx operands[2];
1692 {
1693 return TRUE;
1694 }
1695
1696 \f
1697
1698 static rtx pic_label_rtx;
1699
1700 /* This function generates code for -fpic that loads %ebx with
1701 with the return address of the caller and then returns. */
1702 void
1703 asm_output_function_prefix (file, name)
1704 FILE * file;
1705 char * name;
1706 {
1707 rtx xops[2];
1708 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1709 || current_function_uses_const_pool);
1710 xops[0] = pic_offset_table_rtx;
1711 xops[1] = stack_pointer_rtx;
1712
1713 /* deep branch prediction favors having a return for every call */
1714 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1715 {
1716 if (pic_label_rtx == 0)
1717 pic_label_rtx = (rtx) gen_label_rtx ();
1718 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (pic_label_rtx));
1719 output_asm_insn ("movl (%1),%0", xops);
1720 output_asm_insn ("ret", xops);
1721 }
1722 }
1723
1724 /* Set up the stack and frame (if desired) for the function. */
1725
1726 void
1727 function_prologue (file, size)
1728 FILE *file;
1729 int size;
1730 {
1731 register int regno;
1732 int limit;
1733 rtx xops[4];
1734 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1735 || current_function_uses_const_pool);
1736 long tsize = get_frame_size ();
1737
1738 /* pic references don't explicitly mention pic_offset_table_rtx */
1739 if (TARGET_SCHEDULE_PROLOGUE)
1740 return;
1741
1742 xops[0] = stack_pointer_rtx;
1743 xops[1] = frame_pointer_rtx;
1744 xops[2] = GEN_INT (tsize);
1745 if (frame_pointer_needed)
1746 {
1747 output_asm_insn ("push%L1 %1", xops);
1748 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1749 }
1750
1751 if (tsize)
1752 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1753
1754 /* Note If use enter it is NOT reversed args.
1755 This one is not reversed from intel!!
1756 I think enter is slower. Also sdb doesn't like it.
1757 But if you want it the code is:
1758 {
1759 xops[3] = const0_rtx;
1760 output_asm_insn ("enter %2,%3", xops);
1761 }
1762 */
1763 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1764 for (regno = limit - 1; regno >= 0; regno--)
1765 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1766 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1767 {
1768 xops[0] = gen_rtx (REG, SImode, regno);
1769 output_asm_insn ("push%L0 %0", xops);
1770 }
1771
1772 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1773 {
1774 xops[0] = pic_offset_table_rtx;
1775 if (pic_label_rtx == 0)
1776 pic_label_rtx = (rtx) gen_label_rtx ();
1777 xops[1] = pic_label_rtx;
1778
1779 output_asm_insn (AS1 (call,%P1), xops);
1780 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1781 }
1782 else if (pic_reg_used)
1783 {
1784 xops[0] = pic_offset_table_rtx;
1785 xops[1] = (rtx) gen_label_rtx ();
1786
1787 output_asm_insn (AS1 (call,%P1), xops);
1788 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1789 output_asm_insn (AS1 (pop%L0,%0), xops);
1790 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1791 }
1792 }
1793
1794 /* This function generates the assembly code for function entry.
1795 FILE is an stdio stream to output the code to.
1796 SIZE is an int: how many units of temporary storage to allocate. */
1797
1798 void
1799 ix86_expand_prologue ()
1800 {
1801 register int regno;
1802 int limit;
1803 rtx xops[4];
1804 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1805 || current_function_uses_const_pool);
1806 long tsize = get_frame_size ();
1807
1808 if (!TARGET_SCHEDULE_PROLOGUE)
1809 return;
1810
1811 xops[0] = stack_pointer_rtx;
1812 xops[1] = frame_pointer_rtx;
1813 xops[2] = GEN_INT (tsize);
1814 if (frame_pointer_needed)
1815 {
1816 emit_insn (gen_rtx (SET, 0,
1817 gen_rtx (MEM, SImode,
1818 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1819 frame_pointer_rtx));
1820 emit_move_insn (xops[1], xops[0]);
1821 }
1822
1823 if (tsize)
1824 emit_insn (gen_rtx (SET, SImode,
1825 xops[0],
1826 gen_rtx (MINUS, SImode,
1827 xops[0],
1828 xops[2])));
1829
1830 /* Note If use enter it is NOT reversed args.
1831 This one is not reversed from intel!!
1832 I think enter is slower. Also sdb doesn't like it.
1833 But if you want it the code is:
1834 {
1835 xops[3] = const0_rtx;
1836 output_asm_insn ("enter %2,%3", xops);
1837 }
1838 */
1839 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1840 for (regno = limit - 1; regno >= 0; regno--)
1841 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1842 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1843 {
1844 xops[0] = gen_rtx (REG, SImode, regno);
1845 emit_insn (gen_rtx (SET, 0,
1846 gen_rtx (MEM, SImode,
1847 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1848 xops[0]));
1849 }
1850
1851 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1852 {
1853 xops[0] = pic_offset_table_rtx;
1854 if (pic_label_rtx == 0)
1855 pic_label_rtx = (rtx) gen_label_rtx ();
1856 xops[1] = pic_label_rtx;
1857
1858 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1859 emit_insn (gen_prologue_set_got (xops[0],
1860 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1861 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1862 }
1863 else if (pic_reg_used)
1864 {
1865 xops[0] = pic_offset_table_rtx;
1866 xops[1] = (rtx) gen_label_rtx ();
1867
1868 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1869 emit_insn (gen_pop (xops[0]));
1870 emit_insn (gen_prologue_set_got (xops[0],
1871 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1872 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
1873 }
1874 }
1875
1876 /* Restore function stack, frame, and registers. */
1877
1878 void
1879 function_epilogue (file, size)
1880 FILE *file;
1881 int size;
1882 {
1883 }
1884
1885 /* Return 1 if it is appropriate to emit `ret' instructions in the
1886 body of a function. Do this only if the epilogue is simple, needing a
1887 couple of insns. Prior to reloading, we can't tell how many registers
1888 must be saved, so return 0 then. Return 0 if there is no frame
1889 marker to de-allocate.
1890
1891 If NON_SAVING_SETJMP is defined and true, then it is not possible
1892 for the epilogue to be simple, so return 0. This is a special case
1893 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1894 until final, but jump_optimize may need to know sooner if a
1895 `return' is OK. */
1896
1897 int
1898 ix86_can_use_return_insn_p ()
1899 {
1900 int regno;
1901 int nregs = 0;
1902 int reglimit = (frame_pointer_needed
1903 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1904 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1905 || current_function_uses_const_pool);
1906
1907 #ifdef NON_SAVING_SETJMP
1908 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1909 return 0;
1910 #endif
1911
1912 if (! reload_completed)
1913 return 0;
1914
1915 for (regno = reglimit - 1; regno >= 0; regno--)
1916 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1917 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1918 nregs++;
1919
1920 return nregs == 0 || ! frame_pointer_needed;
1921 }
1922
1923 \f
1924 /* This function generates the assembly code for function exit.
1925 FILE is an stdio stream to output the code to.
1926 SIZE is an int: how many units of temporary storage to deallocate. */
1927
1928 void
1929 ix86_expand_epilogue ()
1930 {
1931 register int regno;
1932 register int nregs, limit;
1933 int offset;
1934 rtx xops[3];
1935 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1936 || current_function_uses_const_pool);
1937 long tsize = get_frame_size ();
1938
1939 /* Compute the number of registers to pop */
1940
1941 limit = (frame_pointer_needed
1942 ? FRAME_POINTER_REGNUM
1943 : STACK_POINTER_REGNUM);
1944
1945 nregs = 0;
1946
1947 for (regno = limit - 1; regno >= 0; regno--)
1948 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1949 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1950 nregs++;
1951
1952 /* sp is often unreliable so we must go off the frame pointer,
1953 */
1954
1955 /* In reality, we may not care if sp is unreliable, because we can
1956 restore the register relative to the frame pointer. In theory,
1957 since each move is the same speed as a pop, and we don't need the
1958 leal, this is faster. For now restore multiple registers the old
1959 way. */
1960
1961 offset = -tsize - (nregs * UNITS_PER_WORD);
1962
1963 xops[2] = stack_pointer_rtx;
1964
1965 if (nregs > 1 || ! frame_pointer_needed)
1966 {
1967 if (frame_pointer_needed)
1968 {
1969 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
1970 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
1971 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
1972 }
1973
1974 for (regno = 0; regno < limit; regno++)
1975 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1976 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1977 {
1978 xops[0] = gen_rtx (REG, SImode, regno);
1979 emit_insn (gen_pop (xops[0]));
1980 /* output_asm_insn ("pop%L0 %0", xops);*/
1981 }
1982 }
1983 else
1984 for (regno = 0; regno < limit; regno++)
1985 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1986 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1987 {
1988 xops[0] = gen_rtx (REG, SImode, regno);
1989 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
1990 emit_move_insn (xops[0], xops[1]);
1991 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
1992 offset += 4;
1993 }
1994
1995 if (frame_pointer_needed)
1996 {
1997 /* If not an i386, mov & pop is faster than "leave". */
1998
1999 if (TARGET_USE_LEAVE)
2000 emit_insn (gen_leave());
2001 /* output_asm_insn ("leave", xops);*/
2002 else
2003 {
2004 xops[0] = frame_pointer_rtx;
2005 xops[1] = stack_pointer_rtx;
2006 emit_insn (gen_epilogue_set_stack_ptr());
2007 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2008 emit_insn (gen_pop (xops[0]));
2009 /* output_asm_insn ("pop%L0 %0", xops);*/
2010 }
2011 }
2012 else if (tsize)
2013 {
2014 /* If there is no frame pointer, we must still release the frame. */
2015
2016 xops[0] = GEN_INT (tsize);
2017 emit_insn (gen_rtx (SET, SImode,
2018 xops[2],
2019 gen_rtx (PLUS, SImode,
2020 xops[2],
2021 xops[0])));
2022 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2023 }
2024
2025 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2026 if (profile_block_flag == 2)
2027 {
2028 FUNCTION_BLOCK_PROFILER_EXIT(file);
2029 }
2030 #endif
2031
2032 if (current_function_pops_args && current_function_args_size)
2033 {
2034 xops[1] = GEN_INT (current_function_pops_args);
2035
2036 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2037 asked to pop more, pop return address, do explicit add, and jump
2038 indirectly to the caller. */
2039
2040 if (current_function_pops_args >= 32768)
2041 {
2042 /* ??? Which register to use here? */
2043 xops[0] = gen_rtx (REG, SImode, 2);
2044 emit_insn (gen_pop (xops[0]));
2045 /* output_asm_insn ("pop%L0 %0", xops);*/
2046 emit_insn (gen_rtx (SET, SImode,
2047 xops[2],
2048 gen_rtx (PLUS, SImode,
2049 xops[1],
2050 xops[2])));
2051 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2052 emit_jump_insn (xops[0]);
2053 /* output_asm_insn ("jmp %*%0", xops);*/
2054 }
2055 else
2056 emit_jump_insn (gen_return_pop_internal (xops[1]));
2057 /* output_asm_insn ("ret %1", xops);*/
2058 }
2059 else
2060 /* output_asm_insn ("ret", xops);*/
2061 emit_jump_insn (gen_return_internal ());
2062 }
2063
2064 \f
2065 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2066 that is a valid memory address for an instruction.
2067 The MODE argument is the machine mode for the MEM expression
2068 that wants to use this address.
2069
2070 On x86, legitimate addresses are:
2071 base movl (base),reg
2072 displacement movl disp,reg
2073 base + displacement movl disp(base),reg
2074 index + base movl (base,index),reg
2075 (index + base) + displacement movl disp(base,index),reg
2076 index*scale movl (,index,scale),reg
2077 index*scale + disp movl disp(,index,scale),reg
2078 index*scale + base movl (base,index,scale),reg
2079 (index*scale + base) + disp movl disp(base,index,scale),reg
2080
2081 In each case, scale can be 1, 2, 4, 8. */
2082
2083 /* This is exactly the same as print_operand_addr, except that
2084 it recognizes addresses instead of printing them.
2085
2086 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2087 convert common non-canonical forms to canonical form so that they will
2088 be recognized. */
2089
2090 #define ADDR_INVALID(msg,insn) \
2091 do { \
2092 if (TARGET_DEBUG_ADDR) \
2093 { \
2094 fprintf (stderr, msg); \
2095 debug_rtx (insn); \
2096 } \
2097 } while (0)
2098
2099 int
2100 legitimate_address_p (mode, addr, strict)
2101 enum machine_mode mode;
2102 register rtx addr;
2103 int strict;
2104 {
2105 rtx base = NULL_RTX;
2106 rtx indx = NULL_RTX;
2107 rtx scale = NULL_RTX;
2108 rtx disp = NULL_RTX;
2109
2110 if (TARGET_DEBUG_ADDR)
2111 {
2112 fprintf (stderr,
2113 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2114 GET_MODE_NAME (mode), strict);
2115
2116 debug_rtx (addr);
2117 }
2118
2119 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2120 base = addr; /* base reg */
2121
2122 else if (GET_CODE (addr) == PLUS)
2123 {
2124 rtx op0 = XEXP (addr, 0);
2125 rtx op1 = XEXP (addr, 1);
2126 enum rtx_code code0 = GET_CODE (op0);
2127 enum rtx_code code1 = GET_CODE (op1);
2128
2129 if (code0 == REG || code0 == SUBREG)
2130 {
2131 if (code1 == REG || code1 == SUBREG)
2132 {
2133 indx = op0; /* index + base */
2134 base = op1;
2135 }
2136
2137 else
2138 {
2139 base = op0; /* base + displacement */
2140 disp = op1;
2141 }
2142 }
2143
2144 else if (code0 == MULT)
2145 {
2146 indx = XEXP (op0, 0);
2147 scale = XEXP (op0, 1);
2148
2149 if (code1 == REG || code1 == SUBREG)
2150 base = op1; /* index*scale + base */
2151
2152 else
2153 disp = op1; /* index*scale + disp */
2154 }
2155
2156 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2157 {
2158 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2159 scale = XEXP (XEXP (op0, 0), 1);
2160 base = XEXP (op0, 1);
2161 disp = op1;
2162 }
2163
2164 else if (code0 == PLUS)
2165 {
2166 indx = XEXP (op0, 0); /* index + base + disp */
2167 base = XEXP (op0, 1);
2168 disp = op1;
2169 }
2170
2171 else
2172 {
2173 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2174 return FALSE;
2175 }
2176 }
2177
2178 else if (GET_CODE (addr) == MULT)
2179 {
2180 indx = XEXP (addr, 0); /* index*scale */
2181 scale = XEXP (addr, 1);
2182 }
2183
2184 else
2185 disp = addr; /* displacement */
2186
2187 /* Allow arg pointer and stack pointer as index if there is not scaling */
2188 if (base && indx && !scale
2189 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2190 {
2191 rtx tmp = base;
2192 base = indx;
2193 indx = tmp;
2194 }
2195
2196 /* Validate base register */
2197 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2198 is one word out of a two word structure, which is represented internally
2199 as a DImode int. */
2200 if (base)
2201 {
2202 if (GET_CODE (base) != REG)
2203 {
2204 ADDR_INVALID ("Base is not a register.\n", base);
2205 return FALSE;
2206 }
2207
2208 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2209 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2210 {
2211 ADDR_INVALID ("Base is not valid.\n", base);
2212 return FALSE;
2213 }
2214 }
2215
2216 /* Validate index register */
2217 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2218 is one word out of a two word structure, which is represented internally
2219 as a DImode int. */
2220 if (indx)
2221 {
2222 if (GET_CODE (indx) != REG)
2223 {
2224 ADDR_INVALID ("Index is not a register.\n", indx);
2225 return FALSE;
2226 }
2227
2228 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2229 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2230 {
2231 ADDR_INVALID ("Index is not valid.\n", indx);
2232 return FALSE;
2233 }
2234 }
2235 else if (scale)
2236 abort (); /* scale w/o index invalid */
2237
2238 /* Validate scale factor */
2239 if (scale)
2240 {
2241 HOST_WIDE_INT value;
2242
2243 if (GET_CODE (scale) != CONST_INT)
2244 {
2245 ADDR_INVALID ("Scale is not valid.\n", scale);
2246 return FALSE;
2247 }
2248
2249 value = INTVAL (scale);
2250 if (value != 1 && value != 2 && value != 4 && value != 8)
2251 {
2252 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2253 return FALSE;
2254 }
2255 }
2256
2257 /* Validate displacement
2258 Constant pool addresses must be handled special. They are
2259 considered legitimate addresses, but only if not used with regs.
2260 When printed, the output routines know to print the reference with the
2261 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2262 if (disp)
2263 {
2264 if (GET_CODE (disp) == SYMBOL_REF
2265 && CONSTANT_POOL_ADDRESS_P (disp)
2266 && !base
2267 && !indx)
2268 ;
2269
2270 else if (!CONSTANT_ADDRESS_P (disp))
2271 {
2272 ADDR_INVALID ("Displacement is not valid.\n", disp);
2273 return FALSE;
2274 }
2275
2276 else if (GET_CODE (disp) == CONST_DOUBLE)
2277 {
2278 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2279 return FALSE;
2280 }
2281
2282 else if (flag_pic && SYMBOLIC_CONST (disp)
2283 && base != pic_offset_table_rtx
2284 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2285 {
2286 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2287 return FALSE;
2288 }
2289
2290 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2291 && (base != NULL_RTX || indx != NULL_RTX))
2292 {
2293 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2294 return FALSE;
2295 }
2296 }
2297
2298 if (TARGET_DEBUG_ADDR)
2299 fprintf (stderr, "Address is valid.\n");
2300
2301 /* Everything looks valid, return true */
2302 return TRUE;
2303 }
2304
2305 \f
2306 /* Return a legitimate reference for ORIG (an address) using the
2307 register REG. If REG is 0, a new pseudo is generated.
2308
2309 There are three types of references that must be handled:
2310
2311 1. Global data references must load the address from the GOT, via
2312 the PIC reg. An insn is emitted to do this load, and the reg is
2313 returned.
2314
2315 2. Static data references must compute the address as an offset
2316 from the GOT, whose base is in the PIC reg. An insn is emitted to
2317 compute the address into a reg, and the reg is returned. Static
2318 data objects have SYMBOL_REF_FLAG set to differentiate them from
2319 global data objects.
2320
2321 3. Constant pool addresses must be handled special. They are
2322 considered legitimate addresses, but only if not used with regs.
2323 When printed, the output routines know to print the reference with the
2324 PIC reg, even though the PIC reg doesn't appear in the RTL.
2325
2326 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2327 reg also appears in the address (except for constant pool references,
2328 noted above).
2329
2330 "switch" statements also require special handling when generating
2331 PIC code. See comments by the `casesi' insn in i386.md for details. */
2332
2333 rtx
2334 legitimize_pic_address (orig, reg)
2335 rtx orig;
2336 rtx reg;
2337 {
2338 rtx addr = orig;
2339 rtx new = orig;
2340
2341 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2342 {
2343 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2344 reg = new = orig;
2345 else
2346 {
2347 if (reg == 0)
2348 reg = gen_reg_rtx (Pmode);
2349
2350 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2351 || GET_CODE (addr) == LABEL_REF)
2352 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2353 else
2354 new = gen_rtx (MEM, Pmode,
2355 gen_rtx (PLUS, Pmode,
2356 pic_offset_table_rtx, orig));
2357
2358 emit_move_insn (reg, new);
2359 }
2360 current_function_uses_pic_offset_table = 1;
2361 return reg;
2362 }
2363 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2364 {
2365 rtx base;
2366
2367 if (GET_CODE (addr) == CONST)
2368 {
2369 addr = XEXP (addr, 0);
2370 if (GET_CODE (addr) != PLUS)
2371 abort ();
2372 }
2373
2374 if (XEXP (addr, 0) == pic_offset_table_rtx)
2375 return orig;
2376
2377 if (reg == 0)
2378 reg = gen_reg_rtx (Pmode);
2379
2380 base = legitimize_pic_address (XEXP (addr, 0), reg);
2381 addr = legitimize_pic_address (XEXP (addr, 1),
2382 base == reg ? NULL_RTX : reg);
2383
2384 if (GET_CODE (addr) == CONST_INT)
2385 return plus_constant (base, INTVAL (addr));
2386
2387 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2388 {
2389 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2390 addr = XEXP (addr, 1);
2391 }
2392 return gen_rtx (PLUS, Pmode, base, addr);
2393 }
2394 return new;
2395 }
2396 \f
2397
2398 /* Emit insns to move operands[1] into operands[0]. */
2399
2400 void
2401 emit_pic_move (operands, mode)
2402 rtx *operands;
2403 enum machine_mode mode;
2404 {
2405 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2406
2407 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2408 operands[1] = (rtx) force_reg (SImode, operands[1]);
2409 else
2410 operands[1] = legitimize_pic_address (operands[1], temp);
2411 }
2412
2413 \f
2414 /* Try machine-dependent ways of modifying an illegitimate address
2415 to be legitimate. If we find one, return the new, valid address.
2416 This macro is used in only one place: `memory_address' in explow.c.
2417
2418 OLDX is the address as it was before break_out_memory_refs was called.
2419 In some cases it is useful to look at this to decide what needs to be done.
2420
2421 MODE and WIN are passed so that this macro can use
2422 GO_IF_LEGITIMATE_ADDRESS.
2423
2424 It is always safe for this macro to do nothing. It exists to recognize
2425 opportunities to optimize the output.
2426
2427 For the 80386, we handle X+REG by loading X into a register R and
2428 using R+REG. R will go in a general reg and indexing will be used.
2429 However, if REG is a broken-out memory address or multiplication,
2430 nothing needs to be done because REG can certainly go in a general reg.
2431
2432 When -fpic is used, special handling is needed for symbolic references.
2433 See comments by legitimize_pic_address in i386.c for details. */
2434
2435 rtx
2436 legitimize_address (x, oldx, mode)
2437 register rtx x;
2438 register rtx oldx;
2439 enum machine_mode mode;
2440 {
2441 int changed = 0;
2442 unsigned log;
2443
2444 if (TARGET_DEBUG_ADDR)
2445 {
2446 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2447 debug_rtx (x);
2448 }
2449
2450 if (flag_pic && SYMBOLIC_CONST (x))
2451 return legitimize_pic_address (x, 0);
2452
2453 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2454 if (GET_CODE (x) == ASHIFT
2455 && GET_CODE (XEXP (x, 1)) == CONST_INT
2456 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2457 {
2458 changed = 1;
2459 x = gen_rtx (MULT, Pmode,
2460 force_reg (Pmode, XEXP (x, 0)),
2461 GEN_INT (1 << log));
2462 }
2463
2464 if (GET_CODE (x) == PLUS)
2465 {
2466 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2467 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2468 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2469 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2470 {
2471 changed = 1;
2472 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2473 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2474 GEN_INT (1 << log));
2475 }
2476
2477 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2478 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2479 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2480 {
2481 changed = 1;
2482 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2483 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2484 GEN_INT (1 << log));
2485 }
2486
2487 /* Put multiply first if it isn't already */
2488 if (GET_CODE (XEXP (x, 1)) == MULT)
2489 {
2490 rtx tmp = XEXP (x, 0);
2491 XEXP (x, 0) = XEXP (x, 1);
2492 XEXP (x, 1) = tmp;
2493 changed = 1;
2494 }
2495
2496 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2497 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2498 created by virtual register instantiation, register elimination, and
2499 similar optimizations. */
2500 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2501 {
2502 changed = 1;
2503 x = gen_rtx (PLUS, Pmode,
2504 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2505 XEXP (XEXP (x, 1), 1));
2506 }
2507
2508 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2509 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2510 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2511 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2512 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2513 && CONSTANT_P (XEXP (x, 1)))
2514 {
2515 rtx constant, other;
2516
2517 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2518 {
2519 constant = XEXP (x, 1);
2520 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2521 }
2522 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2523 {
2524 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2525 other = XEXP (x, 1);
2526 }
2527 else
2528 constant = 0;
2529
2530 if (constant)
2531 {
2532 changed = 1;
2533 x = gen_rtx (PLUS, Pmode,
2534 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2535 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2536 plus_constant (other, INTVAL (constant)));
2537 }
2538 }
2539
2540 if (changed && legitimate_address_p (mode, x, FALSE))
2541 return x;
2542
2543 if (GET_CODE (XEXP (x, 0)) == MULT)
2544 {
2545 changed = 1;
2546 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2547 }
2548
2549 if (GET_CODE (XEXP (x, 1)) == MULT)
2550 {
2551 changed = 1;
2552 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2553 }
2554
2555 if (changed
2556 && GET_CODE (XEXP (x, 1)) == REG
2557 && GET_CODE (XEXP (x, 0)) == REG)
2558 return x;
2559
2560 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2561 {
2562 changed = 1;
2563 x = legitimize_pic_address (x, 0);
2564 }
2565
2566 if (changed && legitimate_address_p (mode, x, FALSE))
2567 return x;
2568
2569 if (GET_CODE (XEXP (x, 0)) == REG)
2570 {
2571 register rtx temp = gen_reg_rtx (Pmode);
2572 register rtx val = force_operand (XEXP (x, 1), temp);
2573 if (val != temp)
2574 emit_move_insn (temp, val);
2575
2576 XEXP (x, 1) = temp;
2577 return x;
2578 }
2579
2580 else if (GET_CODE (XEXP (x, 1)) == REG)
2581 {
2582 register rtx temp = gen_reg_rtx (Pmode);
2583 register rtx val = force_operand (XEXP (x, 0), temp);
2584 if (val != temp)
2585 emit_move_insn (temp, val);
2586
2587 XEXP (x, 0) = temp;
2588 return x;
2589 }
2590 }
2591
2592 return x;
2593 }
2594
2595 \f
2596 /* Print an integer constant expression in assembler syntax. Addition
2597 and subtraction are the only arithmetic that may appear in these
2598 expressions. FILE is the stdio stream to write to, X is the rtx, and
2599 CODE is the operand print code from the output string. */
2600
2601 static void
2602 output_pic_addr_const (file, x, code)
2603 FILE *file;
2604 rtx x;
2605 int code;
2606 {
2607 char buf[256];
2608
2609 switch (GET_CODE (x))
2610 {
2611 case PC:
2612 if (flag_pic)
2613 putc ('.', file);
2614 else
2615 abort ();
2616 break;
2617
2618 case SYMBOL_REF:
2619 case LABEL_REF:
2620 if (GET_CODE (x) == SYMBOL_REF)
2621 assemble_name (file, XSTR (x, 0));
2622 else
2623 {
2624 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2625 CODE_LABEL_NUMBER (XEXP (x, 0)));
2626 assemble_name (asm_out_file, buf);
2627 }
2628
2629 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2630 fprintf (file, "@GOTOFF(%%ebx)");
2631 else if (code == 'P')
2632 fprintf (file, "@PLT");
2633 else if (GET_CODE (x) == LABEL_REF)
2634 fprintf (file, "@GOTOFF");
2635 else if (! SYMBOL_REF_FLAG (x))
2636 fprintf (file, "@GOT");
2637 else
2638 fprintf (file, "@GOTOFF");
2639
2640 break;
2641
2642 case CODE_LABEL:
2643 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2644 assemble_name (asm_out_file, buf);
2645 break;
2646
2647 case CONST_INT:
2648 fprintf (file, "%d", INTVAL (x));
2649 break;
2650
2651 case CONST:
2652 /* This used to output parentheses around the expression,
2653 but that does not work on the 386 (either ATT or BSD assembler). */
2654 output_pic_addr_const (file, XEXP (x, 0), code);
2655 break;
2656
2657 case CONST_DOUBLE:
2658 if (GET_MODE (x) == VOIDmode)
2659 {
2660 /* We can use %d if the number is <32 bits and positive. */
2661 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2662 fprintf (file, "0x%x%08x",
2663 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2664 else
2665 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2666 }
2667 else
2668 /* We can't handle floating point constants;
2669 PRINT_OPERAND must handle them. */
2670 output_operand_lossage ("floating constant misused");
2671 break;
2672
2673 case PLUS:
2674 /* Some assemblers need integer constants to appear last (eg masm). */
2675 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2676 {
2677 output_pic_addr_const (file, XEXP (x, 1), code);
2678 if (INTVAL (XEXP (x, 0)) >= 0)
2679 fprintf (file, "+");
2680 output_pic_addr_const (file, XEXP (x, 0), code);
2681 }
2682 else
2683 {
2684 output_pic_addr_const (file, XEXP (x, 0), code);
2685 if (INTVAL (XEXP (x, 1)) >= 0)
2686 fprintf (file, "+");
2687 output_pic_addr_const (file, XEXP (x, 1), code);
2688 }
2689 break;
2690
2691 case MINUS:
2692 output_pic_addr_const (file, XEXP (x, 0), code);
2693 fprintf (file, "-");
2694 output_pic_addr_const (file, XEXP (x, 1), code);
2695 break;
2696
2697 default:
2698 output_operand_lossage ("invalid expression as operand");
2699 }
2700 }
2701 \f
2702 /* Append the correct conditional move suffix which corresponds to CODE */
2703
2704 static void
2705 put_condition_code (code, mode, file)
2706 enum rtx_code code;
2707 enum mode_class mode;
2708 FILE * file;
2709 {
2710 if (mode == MODE_INT)
2711 switch (code)
2712 {
2713 case NE:
2714 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2715 fputs ("b", file);
2716 else
2717 fputs ("ne", file);
2718 return;
2719 case EQ:
2720 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2721 fputs ("ae", file);
2722 else
2723 fputs ("e", file);
2724 return;
2725 case GE:
2726 fputs ("ge", file); return;
2727 case GT:
2728 fputs ("g", file); return;
2729 case LE:
2730 fputs ("le", file); return;
2731 case LT:
2732 fputs ("l", file); return;
2733 case GEU:
2734 fputs ("ae", file); return;
2735 case GTU:
2736 fputs ("a", file); return;
2737 case LEU:
2738 fputs ("be", file); return;
2739 case LTU:
2740 fputs ("b", file); return;
2741 default: output_operand_lossage ("Invalid %%C operand");
2742 }
2743 else if (mode == MODE_FLOAT)
2744 switch (code)
2745 {
2746 case NE:
2747 fputs ("ne", file); return;
2748 case EQ:
2749 fputs ("e", file); return;
2750 case GE:
2751 fputs ("nb", file); return;
2752 case GT:
2753 fputs ("nbe", file); return;
2754 case LE:
2755 fputs ("be", file); return;
2756 case LT:
2757 fputs ("b", file); return;
2758 case GEU:
2759 fputs ("nb", file); return;
2760 case GTU:
2761 fputs ("nbe", file); return;
2762 case LEU:
2763 fputs ("be", file); return;
2764 case LTU:
2765 fputs ("b", file); return;
2766 default: output_operand_lossage ("Invalid %%C operand");
2767 }
2768 }
2769
2770 /* Meaning of CODE:
2771 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2772 C -- print opcode suffix for set/cmov insn.
2773 c -- like C, but print reversed condition
2774 F -- print opcode suffix for fcmov insn.
2775 f -- like C, but print reversed condition
2776 R -- print the prefix for register names.
2777 z -- print the opcode suffix for the size of the current operand.
2778 * -- print a star (in certain assembler syntax)
2779 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2780 c -- don't print special prefixes before constant operands.
2781 J -- print the appropriate jump operand.
2782 s -- print a shift double count, followed by the assemblers argument
2783 delimiter.
2784 b -- print the QImode name of the register for the indicated operand.
2785 %b0 would print %al if operands[0] is reg 0.
2786 w -- likewise, print the HImode name of the register.
2787 k -- likewise, print the SImode name of the register.
2788 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2789 y -- print "st(0)" instead of "st" as a register.
2790 P -- print as a PIC constant
2791 */
2792
2793 void
2794 print_operand (file, x, code)
2795 FILE *file;
2796 rtx x;
2797 int code;
2798 {
2799 if (code)
2800 {
2801 switch (code)
2802 {
2803 case '*':
2804 if (USE_STAR)
2805 putc ('*', file);
2806 return;
2807
2808 case 'L':
2809 PUT_OP_SIZE (code, 'l', file);
2810 return;
2811
2812 case 'W':
2813 PUT_OP_SIZE (code, 'w', file);
2814 return;
2815
2816 case 'B':
2817 PUT_OP_SIZE (code, 'b', file);
2818 return;
2819
2820 case 'Q':
2821 PUT_OP_SIZE (code, 'l', file);
2822 return;
2823
2824 case 'S':
2825 PUT_OP_SIZE (code, 's', file);
2826 return;
2827
2828 case 'T':
2829 PUT_OP_SIZE (code, 't', file);
2830 return;
2831
2832 case 'z':
2833 /* 387 opcodes don't get size suffixes if the operands are
2834 registers. */
2835
2836 if (STACK_REG_P (x))
2837 return;
2838
2839 /* this is the size of op from size of operand */
2840 switch (GET_MODE_SIZE (GET_MODE (x)))
2841 {
2842 case 1:
2843 PUT_OP_SIZE ('B', 'b', file);
2844 return;
2845
2846 case 2:
2847 PUT_OP_SIZE ('W', 'w', file);
2848 return;
2849
2850 case 4:
2851 if (GET_MODE (x) == SFmode)
2852 {
2853 PUT_OP_SIZE ('S', 's', file);
2854 return;
2855 }
2856 else
2857 PUT_OP_SIZE ('L', 'l', file);
2858 return;
2859
2860 case 12:
2861 PUT_OP_SIZE ('T', 't', file);
2862 return;
2863
2864 case 8:
2865 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
2866 {
2867 #ifdef GAS_MNEMONICS
2868 PUT_OP_SIZE ('Q', 'q', file);
2869 return;
2870 #else
2871 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2872 #endif
2873 }
2874
2875 PUT_OP_SIZE ('Q', 'l', file);
2876 return;
2877 }
2878
2879 case 'b':
2880 case 'w':
2881 case 'k':
2882 case 'h':
2883 case 'y':
2884 case 'P':
2885 break;
2886
2887 case 'J':
2888 switch (GET_CODE (x))
2889 {
2890 /* These conditions are appropriate for testing the result
2891 of an arithmetic operation, not for a compare operation.
2892 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2893 CC_Z_IN_NOT_C false and not floating point. */
2894 case NE: fputs ("jne", file); return;
2895 case EQ: fputs ("je", file); return;
2896 case GE: fputs ("jns", file); return;
2897 case LT: fputs ("js", file); return;
2898 case GEU: fputs ("jmp", file); return;
2899 case GTU: fputs ("jne", file); return;
2900 case LEU: fputs ("je", file); return;
2901 case LTU: fputs ("#branch never", file); return;
2902
2903 /* no matching branches for GT nor LE */
2904 }
2905 abort ();
2906
2907 case 's':
2908 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
2909 {
2910 PRINT_OPERAND (file, x, 0);
2911 fputs (AS2C (,) + 1, file);
2912 }
2913 return;
2914
2915 /* This is used by the conditional move instructions. */
2916 case 'C':
2917 put_condition_code (GET_CODE (x), MODE_INT, file);
2918 return;
2919
2920 /* like above, but reverse condition */
2921 case 'c':
2922 put_condition_code (reverse_condition (GET_CODE (x)), MODE_INT, file);
2923 return;
2924
2925 case 'F':
2926 put_condition_code (GET_CODE (x), MODE_FLOAT, file);
2927 return;
2928
2929 /* like above, but reverse condition */
2930 case 'f':
2931 put_condition_code (reverse_condition (GET_CODE (x)),
2932 MODE_FLOAT, file);
2933 return;
2934
2935 default:
2936 {
2937 char str[50];
2938
2939 sprintf (str, "invalid operand code `%c'", code);
2940 output_operand_lossage (str);
2941 }
2942 }
2943 }
2944 if (GET_CODE (x) == REG)
2945 {
2946 PRINT_REG (x, code, file);
2947 }
2948 else if (GET_CODE (x) == MEM)
2949 {
2950 PRINT_PTR (x, file);
2951 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2952 {
2953 if (flag_pic)
2954 output_pic_addr_const (file, XEXP (x, 0), code);
2955 else
2956 output_addr_const (file, XEXP (x, 0));
2957 }
2958 else
2959 output_address (XEXP (x, 0));
2960 }
2961 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
2962 {
2963 REAL_VALUE_TYPE r; long l;
2964 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2965 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2966 PRINT_IMMED_PREFIX (file);
2967 fprintf (file, "0x%x", l);
2968 }
2969 /* These float cases don't actually occur as immediate operands. */
2970 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
2971 {
2972 REAL_VALUE_TYPE r; char dstr[30];
2973 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2974 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2975 fprintf (file, "%s", dstr);
2976 }
2977 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
2978 {
2979 REAL_VALUE_TYPE r; char dstr[30];
2980 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2981 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2982 fprintf (file, "%s", dstr);
2983 }
2984 else
2985 {
2986 if (code != 'P')
2987 {
2988 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2989 PRINT_IMMED_PREFIX (file);
2990 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
2991 || GET_CODE (x) == LABEL_REF)
2992 PRINT_OFFSET_PREFIX (file);
2993 }
2994 if (flag_pic)
2995 output_pic_addr_const (file, x, code);
2996 else
2997 output_addr_const (file, x);
2998 }
2999 }
3000 \f
3001 /* Print a memory operand whose address is ADDR. */
3002
3003 void
3004 print_operand_address (file, addr)
3005 FILE *file;
3006 register rtx addr;
3007 {
3008 register rtx reg1, reg2, breg, ireg;
3009 rtx offset;
3010
3011 switch (GET_CODE (addr))
3012 {
3013 case REG:
3014 ADDR_BEG (file);
3015 fprintf (file, "%se", RP);
3016 fputs (hi_reg_name[REGNO (addr)], file);
3017 ADDR_END (file);
3018 break;
3019
3020 case PLUS:
3021 reg1 = 0;
3022 reg2 = 0;
3023 ireg = 0;
3024 breg = 0;
3025 offset = 0;
3026 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3027 {
3028 offset = XEXP (addr, 0);
3029 addr = XEXP (addr, 1);
3030 }
3031 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3032 {
3033 offset = XEXP (addr, 1);
3034 addr = XEXP (addr, 0);
3035 }
3036 if (GET_CODE (addr) != PLUS) ;
3037 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3038 {
3039 reg1 = XEXP (addr, 0);
3040 addr = XEXP (addr, 1);
3041 }
3042 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3043 {
3044 reg1 = XEXP (addr, 1);
3045 addr = XEXP (addr, 0);
3046 }
3047 else if (GET_CODE (XEXP (addr, 0)) == REG)
3048 {
3049 reg1 = XEXP (addr, 0);
3050 addr = XEXP (addr, 1);
3051 }
3052 else if (GET_CODE (XEXP (addr, 1)) == REG)
3053 {
3054 reg1 = XEXP (addr, 1);
3055 addr = XEXP (addr, 0);
3056 }
3057 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3058 {
3059 if (reg1 == 0) reg1 = addr;
3060 else reg2 = addr;
3061 addr = 0;
3062 }
3063 if (offset != 0)
3064 {
3065 if (addr != 0) abort ();
3066 addr = offset;
3067 }
3068 if ((reg1 && GET_CODE (reg1) == MULT)
3069 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3070 {
3071 breg = reg2;
3072 ireg = reg1;
3073 }
3074 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3075 {
3076 breg = reg1;
3077 ireg = reg2;
3078 }
3079
3080 if (ireg != 0 || breg != 0)
3081 {
3082 int scale = 1;
3083
3084 if (addr != 0)
3085 {
3086 if (flag_pic)
3087 output_pic_addr_const (file, addr, 0);
3088
3089 else if (GET_CODE (addr) == LABEL_REF)
3090 output_asm_label (addr);
3091
3092 else
3093 output_addr_const (file, addr);
3094 }
3095
3096 if (ireg != 0 && GET_CODE (ireg) == MULT)
3097 {
3098 scale = INTVAL (XEXP (ireg, 1));
3099 ireg = XEXP (ireg, 0);
3100 }
3101
3102 /* The stack pointer can only appear as a base register,
3103 never an index register, so exchange the regs if it is wrong. */
3104
3105 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3106 {
3107 rtx tmp;
3108
3109 tmp = breg;
3110 breg = ireg;
3111 ireg = tmp;
3112 }
3113
3114 /* output breg+ireg*scale */
3115 PRINT_B_I_S (breg, ireg, scale, file);
3116 break;
3117 }
3118
3119 case MULT:
3120 {
3121 int scale;
3122 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3123 {
3124 scale = INTVAL (XEXP (addr, 0));
3125 ireg = XEXP (addr, 1);
3126 }
3127 else
3128 {
3129 scale = INTVAL (XEXP (addr, 1));
3130 ireg = XEXP (addr, 0);
3131 }
3132 output_addr_const (file, const0_rtx);
3133 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3134 }
3135 break;
3136
3137 default:
3138 if (GET_CODE (addr) == CONST_INT
3139 && INTVAL (addr) < 0x8000
3140 && INTVAL (addr) >= -0x8000)
3141 fprintf (file, "%d", INTVAL (addr));
3142 else
3143 {
3144 if (flag_pic)
3145 output_pic_addr_const (file, addr, 0);
3146 else
3147 output_addr_const (file, addr);
3148 }
3149 }
3150 }
3151 \f
3152 /* Set the cc_status for the results of an insn whose pattern is EXP.
3153 On the 80386, we assume that only test and compare insns, as well
3154 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3155 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3156 Also, we assume that jumps, moves and sCOND don't affect the condition
3157 codes. All else clobbers the condition codes, by assumption.
3158
3159 We assume that ALL integer add, minus, etc. instructions effect the
3160 condition codes. This MUST be consistent with i386.md.
3161
3162 We don't record any float test or compare - the redundant test &
3163 compare check in final.c does not handle stack-like regs correctly. */
3164
3165 void
3166 notice_update_cc (exp)
3167 rtx exp;
3168 {
3169 if (GET_CODE (exp) == SET)
3170 {
3171 /* Jumps do not alter the cc's. */
3172 if (SET_DEST (exp) == pc_rtx)
3173 return;
3174 #ifdef IS_STACK_MODE
3175 /* Moving into a memory of stack_mode may have been moved
3176 in between the use and set of cc0 by loop_spl(). So
3177 old value of cc.status must be retained */
3178 if(GET_CODE(SET_DEST(exp))==MEM
3179 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3180 {
3181 return;
3182 }
3183 #endif
3184 /* Moving register or memory into a register:
3185 it doesn't alter the cc's, but it might invalidate
3186 the RTX's which we remember the cc's came from.
3187 (Note that moving a constant 0 or 1 MAY set the cc's). */
3188 if (REG_P (SET_DEST (exp))
3189 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3190 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3191 {
3192 if (cc_status.value1
3193 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3194 cc_status.value1 = 0;
3195 if (cc_status.value2
3196 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3197 cc_status.value2 = 0;
3198 return;
3199 }
3200 /* Moving register into memory doesn't alter the cc's.
3201 It may invalidate the RTX's which we remember the cc's came from. */
3202 if (GET_CODE (SET_DEST (exp)) == MEM
3203 && (REG_P (SET_SRC (exp))
3204 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3205 {
3206 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3207 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3208 cc_status.value1 = 0;
3209 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3210 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3211 cc_status.value2 = 0;
3212 return;
3213 }
3214 /* Function calls clobber the cc's. */
3215 else if (GET_CODE (SET_SRC (exp)) == CALL)
3216 {
3217 CC_STATUS_INIT;
3218 return;
3219 }
3220 /* Tests and compares set the cc's in predictable ways. */
3221 else if (SET_DEST (exp) == cc0_rtx)
3222 {
3223 CC_STATUS_INIT;
3224 cc_status.value1 = SET_SRC (exp);
3225 return;
3226 }
3227 /* Certain instructions effect the condition codes. */
3228 else if (GET_MODE (SET_SRC (exp)) == SImode
3229 || GET_MODE (SET_SRC (exp)) == HImode
3230 || GET_MODE (SET_SRC (exp)) == QImode)
3231 switch (GET_CODE (SET_SRC (exp)))
3232 {
3233 case ASHIFTRT: case LSHIFTRT:
3234 case ASHIFT:
3235 /* Shifts on the 386 don't set the condition codes if the
3236 shift count is zero. */
3237 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3238 {
3239 CC_STATUS_INIT;
3240 break;
3241 }
3242 /* We assume that the CONST_INT is non-zero (this rtx would
3243 have been deleted if it were zero. */
3244
3245 case PLUS: case MINUS: case NEG:
3246 case AND: case IOR: case XOR:
3247 cc_status.flags = CC_NO_OVERFLOW;
3248 cc_status.value1 = SET_SRC (exp);
3249 cc_status.value2 = SET_DEST (exp);
3250 break;
3251
3252 default:
3253 CC_STATUS_INIT;
3254 }
3255 else
3256 {
3257 CC_STATUS_INIT;
3258 }
3259 }
3260 else if (GET_CODE (exp) == PARALLEL
3261 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3262 {
3263 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3264 return;
3265 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3266 {
3267 CC_STATUS_INIT;
3268 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3269 cc_status.flags |= CC_IN_80387;
3270 else
3271 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3272 return;
3273 }
3274 CC_STATUS_INIT;
3275 }
3276 else
3277 {
3278 CC_STATUS_INIT;
3279 }
3280 }
3281 \f
3282 /* Split one or more DImode RTL references into pairs of SImode
3283 references. The RTL can be REG, offsettable MEM, integer constant, or
3284 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3285 split and "num" is its length. lo_half and hi_half are output arrays
3286 that parallel "operands". */
3287
3288 void
3289 split_di (operands, num, lo_half, hi_half)
3290 rtx operands[];
3291 int num;
3292 rtx lo_half[], hi_half[];
3293 {
3294 while (num--)
3295 {
3296 if (GET_CODE (operands[num]) == REG)
3297 {
3298 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3299 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3300 }
3301 else if (CONSTANT_P (operands[num]))
3302 {
3303 split_double (operands[num], &lo_half[num], &hi_half[num]);
3304 }
3305 else if (offsettable_memref_p (operands[num]))
3306 {
3307 lo_half[num] = operands[num];
3308 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3309 }
3310 else
3311 abort();
3312 }
3313 }
3314 \f
3315 /* Return 1 if this is a valid binary operation on a 387.
3316 OP is the expression matched, and MODE is its mode. */
3317
3318 int
3319 binary_387_op (op, mode)
3320 register rtx op;
3321 enum machine_mode mode;
3322 {
3323 if (mode != VOIDmode && mode != GET_MODE (op))
3324 return 0;
3325
3326 switch (GET_CODE (op))
3327 {
3328 case PLUS:
3329 case MINUS:
3330 case MULT:
3331 case DIV:
3332 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3333
3334 default:
3335 return 0;
3336 }
3337 }
3338
3339 \f
3340 /* Return 1 if this is a valid shift or rotate operation on a 386.
3341 OP is the expression matched, and MODE is its mode. */
3342
3343 int
3344 shift_op (op, mode)
3345 register rtx op;
3346 enum machine_mode mode;
3347 {
3348 rtx operand = XEXP (op, 0);
3349
3350 if (mode != VOIDmode && mode != GET_MODE (op))
3351 return 0;
3352
3353 if (GET_MODE (operand) != GET_MODE (op)
3354 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3355 return 0;
3356
3357 return (GET_CODE (op) == ASHIFT
3358 || GET_CODE (op) == ASHIFTRT
3359 || GET_CODE (op) == LSHIFTRT
3360 || GET_CODE (op) == ROTATE
3361 || GET_CODE (op) == ROTATERT);
3362 }
3363
3364 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3365 MODE is not used. */
3366
3367 int
3368 VOIDmode_compare_op (op, mode)
3369 register rtx op;
3370 enum machine_mode mode;
3371 {
3372 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3373 }
3374 \f
3375 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3376 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3377 is the expression of the binary operation. The output may either be
3378 emitted here, or returned to the caller, like all output_* functions.
3379
3380 There is no guarantee that the operands are the same mode, as they
3381 might be within FLOAT or FLOAT_EXTEND expressions. */
3382
3383 char *
3384 output_387_binary_op (insn, operands)
3385 rtx insn;
3386 rtx *operands;
3387 {
3388 rtx temp;
3389 char *base_op;
3390 static char buf[100];
3391
3392 switch (GET_CODE (operands[3]))
3393 {
3394 case PLUS:
3395 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3396 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3397 base_op = "fiadd";
3398 else
3399 base_op = "fadd";
3400 break;
3401
3402 case MINUS:
3403 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3404 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3405 base_op = "fisub";
3406 else
3407 base_op = "fsub";
3408 break;
3409
3410 case MULT:
3411 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3412 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3413 base_op = "fimul";
3414 else
3415 base_op = "fmul";
3416 break;
3417
3418 case DIV:
3419 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3420 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3421 base_op = "fidiv";
3422 else
3423 base_op = "fdiv";
3424 break;
3425
3426 default:
3427 abort ();
3428 }
3429
3430 strcpy (buf, base_op);
3431
3432 switch (GET_CODE (operands[3]))
3433 {
3434 case MULT:
3435 case PLUS:
3436 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3437 {
3438 temp = operands[2];
3439 operands[2] = operands[1];
3440 operands[1] = temp;
3441 }
3442
3443 if (GET_CODE (operands[2]) == MEM)
3444 return strcat (buf, AS1 (%z2,%2));
3445
3446 if (NON_STACK_REG_P (operands[1]))
3447 {
3448 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3449 RET;
3450 }
3451 else if (NON_STACK_REG_P (operands[2]))
3452 {
3453 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3454 RET;
3455 }
3456
3457 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3458 return strcat (buf, AS2 (p,%2,%0));
3459
3460 if (STACK_TOP_P (operands[0]))
3461 return strcat (buf, AS2C (%y2,%0));
3462 else
3463 return strcat (buf, AS2C (%2,%0));
3464
3465 case MINUS:
3466 case DIV:
3467 if (GET_CODE (operands[1]) == MEM)
3468 return strcat (buf, AS1 (r%z1,%1));
3469
3470 if (GET_CODE (operands[2]) == MEM)
3471 return strcat (buf, AS1 (%z2,%2));
3472
3473 if (NON_STACK_REG_P (operands[1]))
3474 {
3475 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3476 RET;
3477 }
3478 else if (NON_STACK_REG_P (operands[2]))
3479 {
3480 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3481 RET;
3482 }
3483
3484 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3485 abort ();
3486
3487 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3488 return strcat (buf, AS2 (rp,%2,%0));
3489
3490 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3491 return strcat (buf, AS2 (p,%1,%0));
3492
3493 if (STACK_TOP_P (operands[0]))
3494 {
3495 if (STACK_TOP_P (operands[1]))
3496 return strcat (buf, AS2C (%y2,%0));
3497 else
3498 return strcat (buf, AS2 (r,%y1,%0));
3499 }
3500 else if (STACK_TOP_P (operands[1]))
3501 return strcat (buf, AS2C (%1,%0));
3502 else
3503 return strcat (buf, AS2 (r,%2,%0));
3504
3505 default:
3506 abort ();
3507 }
3508 }
3509 \f
3510 /* Output code for INSN to convert a float to a signed int. OPERANDS
3511 are the insn operands. The output may be SFmode or DFmode and the
3512 input operand may be SImode or DImode. As a special case, make sure
3513 that the 387 stack top dies if the output mode is DImode, because the
3514 hardware requires this. */
3515
3516 char *
3517 output_fix_trunc (insn, operands)
3518 rtx insn;
3519 rtx *operands;
3520 {
3521 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3522 rtx xops[2];
3523
3524 if (! STACK_TOP_P (operands[1]) ||
3525 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3526 abort ();
3527
3528 xops[0] = GEN_INT (12);
3529 xops[1] = operands[4];
3530
3531 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3532 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3533 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3534 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3535 output_asm_insn (AS1 (fldc%W3,%3), operands);
3536
3537 if (NON_STACK_REG_P (operands[0]))
3538 output_to_reg (operands[0], stack_top_dies);
3539 else if (GET_CODE (operands[0]) == MEM)
3540 {
3541 if (stack_top_dies)
3542 output_asm_insn (AS1 (fistp%z0,%0), operands);
3543 else
3544 output_asm_insn (AS1 (fist%z0,%0), operands);
3545 }
3546 else
3547 abort ();
3548
3549 return AS1 (fldc%W2,%2);
3550 }
3551 \f
3552 /* Output code for INSN to compare OPERANDS. The two operands might
3553 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3554 expression. If the compare is in mode CCFPEQmode, use an opcode that
3555 will not fault if a qNaN is present. */
3556
3557 char *
3558 output_float_compare (insn, operands)
3559 rtx insn;
3560 rtx *operands;
3561 {
3562 int stack_top_dies;
3563 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3564 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3565 int target_fcomi = TARGET_CMOVE && STACK_REG_P (operands[1]);
3566
3567 rtx tmp;
3568 if (! STACK_TOP_P (operands[0]))
3569 {
3570 tmp = operands[0];
3571 operands[0] = operands[1];
3572 operands[1] = tmp;
3573 cc_status.flags |= CC_REVERSED;
3574 }
3575
3576 if (! STACK_TOP_P (operands[0]))
3577 abort ();
3578
3579 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3580
3581 if (STACK_REG_P (operands[1])
3582 && stack_top_dies
3583 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3584 && REGNO (operands[1]) != FIRST_STACK_REG)
3585 {
3586 /* If both the top of the 387 stack dies, and the other operand
3587 is also a stack register that dies, then this must be a
3588 `fcompp' float compare */
3589
3590 if (unordered_compare)
3591 output_asm_insn ("fucompp", operands);
3592 else
3593 output_asm_insn ("fcompp", operands);
3594 }
3595 else
3596 {
3597 static char buf[100];
3598
3599 /* Decide if this is the integer or float compare opcode, or the
3600 unordered float compare. */
3601
3602 if (unordered_compare)
3603 strcpy (buf, target_fcomi ? "fucomi" : "fucom");
3604 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3605 strcpy (buf, target_fcomi ? "fcomi" : "fcom");
3606 else
3607 strcpy (buf, "ficom");
3608
3609 /* Modify the opcode if the 387 stack is to be popped. */
3610
3611 if (stack_top_dies)
3612 strcat (buf, "p");
3613
3614 if (NON_STACK_REG_P (operands[1]))
3615 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3616 else if (target_fcomi)
3617 {
3618 rtx xops[3];
3619
3620 xops[0] = operands[0];
3621 xops[1] = operands[1];
3622 xops[2] = operands[0];
3623
3624 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%2)), xops);
3625 RET;
3626 }
3627 else
3628 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3629 }
3630
3631 /* Now retrieve the condition code. */
3632
3633 return output_fp_cc0_set (insn);
3634 }
3635 \f
3636 /* Output opcodes to transfer the results of FP compare or test INSN
3637 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3638 result of the compare or test is unordered, no comparison operator
3639 succeeds except NE. Return an output template, if any. */
3640
3641 char *
3642 output_fp_cc0_set (insn)
3643 rtx insn;
3644 {
3645 rtx xops[3];
3646 rtx unordered_label;
3647 rtx next;
3648 enum rtx_code code;
3649
3650 xops[0] = gen_rtx (REG, HImode, 0);
3651 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3652
3653 if (! TARGET_IEEE_FP)
3654 {
3655 if (!(cc_status.flags & CC_REVERSED))
3656 {
3657 next = next_cc0_user (insn);
3658
3659 if (GET_CODE (next) == JUMP_INSN
3660 && GET_CODE (PATTERN (next)) == SET
3661 && SET_DEST (PATTERN (next)) == pc_rtx
3662 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3663 {
3664 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3665 }
3666 else if (GET_CODE (PATTERN (next)) == SET)
3667 {
3668 code = GET_CODE (SET_SRC (PATTERN (next)));
3669 }
3670 else
3671 {
3672 return "sahf";
3673 }
3674 if (code == GT || code == LT || code == EQ || code == NE
3675 || code == LE || code == GE)
3676 { /* We will test eax directly */
3677 cc_status.flags |= CC_TEST_AX;
3678 RET;
3679 }
3680 }
3681 return "sahf";
3682 }
3683
3684 next = next_cc0_user (insn);
3685 if (next == NULL_RTX)
3686 abort ();
3687
3688 if (GET_CODE (next) == JUMP_INSN
3689 && GET_CODE (PATTERN (next)) == SET
3690 && SET_DEST (PATTERN (next)) == pc_rtx
3691 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3692 {
3693 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3694 }
3695 else if (GET_CODE (PATTERN (next)) == SET)
3696 {
3697 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3698 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3699 else code = GET_CODE (SET_SRC (PATTERN (next)));
3700 }
3701 else
3702 abort ();
3703
3704 xops[0] = gen_rtx (REG, QImode, 0);
3705
3706 switch (code)
3707 {
3708 case GT:
3709 xops[1] = GEN_INT (0x45);
3710 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3711 /* je label */
3712 break;
3713
3714 case LT:
3715 xops[1] = GEN_INT (0x45);
3716 xops[2] = GEN_INT (0x01);
3717 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3718 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3719 /* je label */
3720 break;
3721
3722 case GE:
3723 xops[1] = GEN_INT (0x05);
3724 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3725 /* je label */
3726 break;
3727
3728 case LE:
3729 xops[1] = GEN_INT (0x45);
3730 xops[2] = GEN_INT (0x40);
3731 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3732 output_asm_insn (AS1 (dec%B0,%h0), xops);
3733 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3734 /* jb label */
3735 break;
3736
3737 case EQ:
3738 xops[1] = GEN_INT (0x45);
3739 xops[2] = GEN_INT (0x40);
3740 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3741 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3742 /* je label */
3743 break;
3744
3745 case NE:
3746 xops[1] = GEN_INT (0x44);
3747 xops[2] = GEN_INT (0x40);
3748 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3749 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3750 /* jne label */
3751 break;
3752
3753 case GTU:
3754 case LTU:
3755 case GEU:
3756 case LEU:
3757 default:
3758 abort ();
3759 }
3760 RET;
3761 }
3762 \f
3763 #define MAX_386_STACK_LOCALS 2
3764
3765 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3766
3767 /* Define the structure for the machine field in struct function. */
3768 struct machine_function
3769 {
3770 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3771 };
3772
3773 /* Functions to save and restore i386_stack_locals.
3774 These will be called, via pointer variables,
3775 from push_function_context and pop_function_context. */
3776
3777 void
3778 save_386_machine_status (p)
3779 struct function *p;
3780 {
3781 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3782 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3783 sizeof i386_stack_locals);
3784 }
3785
3786 void
3787 restore_386_machine_status (p)
3788 struct function *p;
3789 {
3790 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3791 sizeof i386_stack_locals);
3792 free (p->machine);
3793 }
3794
3795 /* Clear stack slot assignments remembered from previous functions.
3796 This is called from INIT_EXPANDERS once before RTL is emitted for each
3797 function. */
3798
3799 void
3800 clear_386_stack_locals ()
3801 {
3802 enum machine_mode mode;
3803 int n;
3804
3805 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3806 mode = (enum machine_mode) ((int) mode + 1))
3807 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3808 i386_stack_locals[(int) mode][n] = NULL_RTX;
3809
3810 /* Arrange to save and restore i386_stack_locals around nested functions. */
3811 save_machine_status = save_386_machine_status;
3812 restore_machine_status = restore_386_machine_status;
3813 }
3814
3815 /* Return a MEM corresponding to a stack slot with mode MODE.
3816 Allocate a new slot if necessary.
3817
3818 The RTL for a function can have several slots available: N is
3819 which slot to use. */
3820
3821 rtx
3822 assign_386_stack_local (mode, n)
3823 enum machine_mode mode;
3824 int n;
3825 {
3826 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3827 abort ();
3828
3829 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3830 i386_stack_locals[(int) mode][n]
3831 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3832
3833 return i386_stack_locals[(int) mode][n];
3834 }
3835
3836 \f
3837 int is_mul(op,mode)
3838 register rtx op;
3839 enum machine_mode mode;
3840 {
3841 return (GET_CODE (op) == MULT);
3842 }
3843
3844 int is_div(op,mode)
3845 register rtx op;
3846 enum machine_mode mode;
3847 {
3848 return (GET_CODE (op) == DIV);
3849 }
3850
3851 \f
3852 #ifdef NOTYET
3853 /* Create a new copy of an rtx.
3854 Recursively copies the operands of the rtx,
3855 except for those few rtx codes that are sharable.
3856 Doesn't share CONST */
3857
3858 rtx
3859 copy_all_rtx (orig)
3860 register rtx orig;
3861 {
3862 register rtx copy;
3863 register int i, j;
3864 register RTX_CODE code;
3865 register char *format_ptr;
3866
3867 code = GET_CODE (orig);
3868
3869 switch (code)
3870 {
3871 case REG:
3872 case QUEUED:
3873 case CONST_INT:
3874 case CONST_DOUBLE:
3875 case SYMBOL_REF:
3876 case CODE_LABEL:
3877 case PC:
3878 case CC0:
3879 case SCRATCH:
3880 /* SCRATCH must be shared because they represent distinct values. */
3881 return orig;
3882
3883 #if 0
3884 case CONST:
3885 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3886 a LABEL_REF, it isn't sharable. */
3887 if (GET_CODE (XEXP (orig, 0)) == PLUS
3888 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3889 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3890 return orig;
3891 break;
3892 #endif
3893 /* A MEM with a constant address is not sharable. The problem is that
3894 the constant address may need to be reloaded. If the mem is shared,
3895 then reloading one copy of this mem will cause all copies to appear
3896 to have been reloaded. */
3897 }
3898
3899 copy = rtx_alloc (code);
3900 PUT_MODE (copy, GET_MODE (orig));
3901 copy->in_struct = orig->in_struct;
3902 copy->volatil = orig->volatil;
3903 copy->unchanging = orig->unchanging;
3904 copy->integrated = orig->integrated;
3905 /* intel1 */
3906 copy->is_spill_rtx = orig->is_spill_rtx;
3907
3908 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3909
3910 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3911 {
3912 switch (*format_ptr++)
3913 {
3914 case 'e':
3915 XEXP (copy, i) = XEXP (orig, i);
3916 if (XEXP (orig, i) != NULL)
3917 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3918 break;
3919
3920 case '0':
3921 case 'u':
3922 XEXP (copy, i) = XEXP (orig, i);
3923 break;
3924
3925 case 'E':
3926 case 'V':
3927 XVEC (copy, i) = XVEC (orig, i);
3928 if (XVEC (orig, i) != NULL)
3929 {
3930 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3931 for (j = 0; j < XVECLEN (copy, i); j++)
3932 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
3933 }
3934 break;
3935
3936 case 'w':
3937 XWINT (copy, i) = XWINT (orig, i);
3938 break;
3939
3940 case 'i':
3941 XINT (copy, i) = XINT (orig, i);
3942 break;
3943
3944 case 's':
3945 case 'S':
3946 XSTR (copy, i) = XSTR (orig, i);
3947 break;
3948
3949 default:
3950 abort ();
3951 }
3952 }
3953 return copy;
3954 }
3955
3956 \f
3957 /* try to rewrite a memory address to make it valid */
3958 void
3959 rewrite_address (mem_rtx)
3960 rtx mem_rtx;
3961 {
3962 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
3963 int scale = 1;
3964 int offset_adjust = 0;
3965 int was_only_offset = 0;
3966 rtx mem_addr = XEXP (mem_rtx, 0);
3967 char *storage = (char *) oballoc (0);
3968 int in_struct = 0;
3969 int is_spill_rtx = 0;
3970
3971 in_struct = MEM_IN_STRUCT_P (mem_rtx);
3972 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
3973
3974 if (GET_CODE (mem_addr) == PLUS &&
3975 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
3976 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
3977 { /* this part is utilized by the combiner */
3978 ret_rtx =
3979 gen_rtx (PLUS, GET_MODE (mem_addr),
3980 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
3981 XEXP (mem_addr, 0),
3982 XEXP (XEXP (mem_addr, 1), 0)),
3983 XEXP (XEXP (mem_addr, 1), 1));
3984 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
3985 {
3986 XEXP (mem_rtx, 0) = ret_rtx;
3987 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
3988 return;
3989 }
3990 obfree (storage);
3991 }
3992
3993 /* this part is utilized by loop.c */
3994 /* If the address contains PLUS (reg,const) and this pattern is invalid
3995 in this case - try to rewrite the address to make it valid intel1
3996 */
3997 storage = (char *) oballoc (0);
3998 index_rtx = base_rtx = offset_rtx = NULL;
3999 /* find the base index and offset elements of the memory address */
4000 if (GET_CODE (mem_addr) == PLUS)
4001 {
4002 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4003 {
4004 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4005 {
4006 base_rtx = XEXP (mem_addr, 1);
4007 index_rtx = XEXP (mem_addr, 0);
4008 }
4009 else
4010 {
4011 base_rtx = XEXP (mem_addr, 0);
4012 offset_rtx = XEXP (mem_addr, 1);
4013 }
4014 }
4015 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4016 {
4017 index_rtx = XEXP (mem_addr, 0);
4018 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4019 {
4020 base_rtx = XEXP (mem_addr, 1);
4021 }
4022 else
4023 {
4024 offset_rtx = XEXP (mem_addr, 1);
4025 }
4026 }
4027 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4028 {
4029 /* intel1 */
4030 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4031 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4032 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4033 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4034 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4035 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4036 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4037 {
4038 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4039 offset_rtx = XEXP (mem_addr, 1);
4040 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4041 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4042 }
4043 else
4044 {
4045 offset_rtx = XEXP (mem_addr, 1);
4046 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4047 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4048 }
4049 }
4050 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4051 {
4052 was_only_offset = 1;
4053 index_rtx = NULL;
4054 base_rtx = NULL;
4055 offset_rtx = XEXP (mem_addr, 1);
4056 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4057 if (offset_adjust == 0)
4058 {
4059 XEXP (mem_rtx, 0) = offset_rtx;
4060 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4061 return;
4062 }
4063 }
4064 else
4065 {
4066 obfree (storage);
4067 return;
4068 }
4069 }
4070 else if (GET_CODE (mem_addr) == MULT)
4071 {
4072 index_rtx = mem_addr;
4073 }
4074 else
4075 {
4076 obfree (storage);
4077 return;
4078 }
4079 if (index_rtx && GET_CODE (index_rtx) == MULT)
4080 {
4081 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4082 {
4083 obfree (storage);
4084 return;
4085 }
4086 scale_rtx = XEXP (index_rtx, 1);
4087 scale = INTVAL (scale_rtx);
4088 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4089 }
4090 /* now find which of the elements are invalid and try to fix them */
4091 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4092 {
4093 offset_adjust = INTVAL (index_rtx) * scale;
4094 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4095 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4096 {
4097 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4098 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4099 {
4100 offset_rtx = copy_all_rtx (offset_rtx);
4101 XEXP (XEXP (offset_rtx, 0), 1) =
4102 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4103 if (!CONSTANT_P (offset_rtx))
4104 {
4105 obfree (storage);
4106 return;
4107 }
4108 }
4109 }
4110 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4111 {
4112 offset_rtx =
4113 gen_rtx (CONST, GET_MODE (offset_rtx),
4114 gen_rtx (PLUS, GET_MODE (offset_rtx),
4115 offset_rtx,
4116 gen_rtx (CONST_INT, 0, offset_adjust)));
4117 if (!CONSTANT_P (offset_rtx))
4118 {
4119 obfree (storage);
4120 return;
4121 }
4122 }
4123 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4124 {
4125 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4126 }
4127 else if (!offset_rtx)
4128 {
4129 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4130 }
4131 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4132 XEXP (mem_rtx, 0) = offset_rtx;
4133 return;
4134 }
4135 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4136 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4137 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4138 {
4139 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4140 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4141 }
4142 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4143 {
4144 offset_adjust += INTVAL (base_rtx);
4145 base_rtx = NULL;
4146 }
4147 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4148 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4149 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4150 {
4151 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4152 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4153 }
4154 if (index_rtx)
4155 {
4156 if (!LEGITIMATE_INDEX_P (index_rtx)
4157 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4158 {
4159 obfree (storage);
4160 return;
4161 }
4162 }
4163 if (base_rtx)
4164 {
4165 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4166 {
4167 obfree (storage);
4168 return;
4169 }
4170 }
4171 if (offset_adjust != 0)
4172 {
4173 if (offset_rtx)
4174 {
4175 if (GET_CODE (offset_rtx) == CONST &&
4176 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4177 {
4178 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4179 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4180 {
4181 offset_rtx = copy_all_rtx (offset_rtx);
4182 XEXP (XEXP (offset_rtx, 0), 1) =
4183 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4184 if (!CONSTANT_P (offset_rtx))
4185 {
4186 obfree (storage);
4187 return;
4188 }
4189 }
4190 }
4191 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4192 {
4193 offset_rtx =
4194 gen_rtx (CONST, GET_MODE (offset_rtx),
4195 gen_rtx (PLUS, GET_MODE (offset_rtx),
4196 offset_rtx,
4197 gen_rtx (CONST_INT, 0, offset_adjust)));
4198 if (!CONSTANT_P (offset_rtx))
4199 {
4200 obfree (storage);
4201 return;
4202 }
4203 }
4204 else if (GET_CODE (offset_rtx) == CONST_INT)
4205 {
4206 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4207 }
4208 else
4209 {
4210 obfree (storage);
4211 return;
4212 }
4213 }
4214 else
4215 {
4216 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4217 }
4218 if (index_rtx)
4219 {
4220 if (base_rtx)
4221 {
4222 if (scale != 1)
4223 {
4224 if (GET_CODE (offset_rtx) == CONST_INT &&
4225 INTVAL (offset_rtx) == 0)
4226 {
4227 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4228 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4229 scale_rtx),
4230 base_rtx);
4231 }
4232 else
4233 {
4234 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4235 gen_rtx (PLUS, GET_MODE (base_rtx),
4236 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4237 scale_rtx),
4238 base_rtx),
4239 offset_rtx);
4240 }
4241 }
4242 else
4243 {
4244 if (GET_CODE (offset_rtx) == CONST_INT &&
4245 INTVAL (offset_rtx) == 0)
4246 {
4247 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4248 }
4249 else
4250 {
4251 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4252 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4253 base_rtx),
4254 offset_rtx);
4255 }
4256 }
4257 }
4258 else
4259 {
4260 if (scale != 1)
4261 {
4262 if (GET_CODE (offset_rtx) == CONST_INT &&
4263 INTVAL (offset_rtx) == 0)
4264 {
4265 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4266 }
4267 else
4268 {
4269 ret_rtx =
4270 gen_rtx (PLUS, GET_MODE (offset_rtx),
4271 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4272 scale_rtx),
4273 offset_rtx);
4274 }
4275 }
4276 else
4277 {
4278 if (GET_CODE (offset_rtx) == CONST_INT &&
4279 INTVAL (offset_rtx) == 0)
4280 {
4281 ret_rtx = index_rtx;
4282 }
4283 else
4284 {
4285 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4286 }
4287 }
4288 }
4289 }
4290 else
4291 {
4292 if (base_rtx)
4293 {
4294 if (GET_CODE (offset_rtx) == CONST_INT &&
4295 INTVAL (offset_rtx) == 0)
4296 {
4297 ret_rtx = base_rtx;
4298 }
4299 else
4300 {
4301 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4302 }
4303 }
4304 else if (was_only_offset)
4305 {
4306 ret_rtx = offset_rtx;
4307 }
4308 else
4309 {
4310 obfree (storage);
4311 return;
4312 }
4313 }
4314 XEXP (mem_rtx, 0) = ret_rtx;
4315 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4316 return;
4317 }
4318 else
4319 {
4320 obfree (storage);
4321 return;
4322 }
4323 }
4324 #endif /* NOTYET */
4325
4326 \f
4327 /* return 1 if the first insn to set cc before insn also sets the register
4328 reg_rtx - otherwise return 0 */
4329 int
4330 last_to_set_cc (reg_rtx, insn)
4331 rtx reg_rtx, insn;
4332 {
4333 rtx prev_insn = PREV_INSN (insn);
4334
4335 while (prev_insn)
4336 {
4337 if (GET_CODE (prev_insn) == NOTE)
4338 ;
4339
4340 else if (GET_CODE (prev_insn) == INSN)
4341 {
4342 if (GET_CODE (PATTERN (prev_insn)) != SET)
4343 return (0);
4344
4345 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4346 {
4347 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4348 return (1);
4349
4350 return (0);
4351 }
4352
4353 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4354 return (0);
4355 }
4356
4357 else
4358 return (0);
4359
4360 prev_insn = PREV_INSN (prev_insn);
4361 }
4362
4363 return (0);
4364 }
4365
4366 \f
4367 int
4368 doesnt_set_condition_code (pat)
4369 rtx pat;
4370 {
4371 switch (GET_CODE (pat))
4372 {
4373 case MEM:
4374 case REG:
4375 return (1);
4376
4377 default:
4378 return (0);
4379
4380 }
4381 }
4382
4383 \f
4384 int
4385 sets_condition_code (pat)
4386 rtx pat;
4387 {
4388 switch (GET_CODE (pat))
4389 {
4390 case PLUS:
4391 case MINUS:
4392 case AND:
4393 case IOR:
4394 case XOR:
4395 case NOT:
4396 case NEG:
4397 case MULT:
4398 case DIV:
4399 case MOD:
4400 case UDIV:
4401 case UMOD:
4402 return (1);
4403
4404 default:
4405 return (0);
4406
4407 }
4408 }
4409
4410 \f
4411 int
4412 str_immediate_operand (op, mode)
4413 register rtx op;
4414 enum machine_mode mode;
4415 {
4416 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4417 {
4418 return (1);
4419 }
4420 return (0);
4421 }
4422
4423 \f
4424 int
4425 is_fp_insn (insn)
4426 rtx insn;
4427 {
4428 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4429 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4430 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4431 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4432 {
4433 return (1);
4434 }
4435
4436 return (0);
4437 }
4438
4439 /*
4440 Return 1 if the mode of the SET_DEST of insn is floating point
4441 and it is not an fld or a move from memory to memory.
4442 Otherwise return 0 */
4443 int
4444 is_fp_dest (insn)
4445 rtx insn;
4446 {
4447 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4448 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4449 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4450 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4451 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4452 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4453 && GET_CODE (SET_SRC (insn)) != MEM)
4454 {
4455 return (1);
4456 }
4457
4458 return (0);
4459 }
4460
4461 /*
4462 Return 1 if the mode of the SET_DEST floating point and is memory
4463 and the source is a register.
4464 */
4465 int
4466 is_fp_store (insn)
4467 rtx insn;
4468 {
4469 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4470 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4471 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4472 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4473 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4474 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4475 {
4476 return (1);
4477 }
4478
4479 return (0);
4480 }
4481
4482 \f
4483 /*
4484 Return 1 if dep_insn sets a register which insn uses as a base
4485 or index to reference memory.
4486 otherwise return 0 */
4487
4488 int
4489 agi_dependent (insn, dep_insn)
4490 rtx insn, dep_insn;
4491 {
4492 if (GET_CODE (dep_insn) == INSN
4493 && GET_CODE (PATTERN (dep_insn)) == SET
4494 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4495 {
4496 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4497 }
4498
4499 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4500 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4501 && push_operand (SET_DEST (PATTERN (dep_insn)),
4502 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4503 {
4504 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4505 }
4506
4507 return (0);
4508 }
4509
4510 \f
4511 /*
4512 Return 1 if reg is used in rtl as a base or index for a memory ref
4513 otherwise return 0. */
4514
4515 int
4516 reg_mentioned_in_mem (reg, rtl)
4517 rtx reg, rtl;
4518 {
4519 register char *fmt;
4520 register int i;
4521 register enum rtx_code code;
4522
4523 if (rtl == NULL)
4524 return (0);
4525
4526 code = GET_CODE (rtl);
4527
4528 switch (code)
4529 {
4530 case HIGH:
4531 case CONST_INT:
4532 case CONST:
4533 case CONST_DOUBLE:
4534 case SYMBOL_REF:
4535 case LABEL_REF:
4536 case PC:
4537 case CC0:
4538 case SUBREG:
4539 return (0);
4540
4541
4542 }
4543
4544 if (code == MEM && reg_mentioned_p (reg, rtl))
4545 return (1);
4546
4547 fmt = GET_RTX_FORMAT (code);
4548 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4549 {
4550 if (fmt[i] == 'E')
4551 {
4552 register int j;
4553 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4554 {
4555 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4556 return 1;
4557 }
4558 }
4559
4560 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4561 return 1;
4562 }
4563
4564 return (0);
4565 }
4566 \f
4567 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4568
4569 operands[0] = result, initialized with the startaddress
4570 operands[1] = alignment of the address.
4571 operands[2] = scratch register, initialized with the startaddress when
4572 not aligned, otherwise undefined
4573
4574 This is just the body. It needs the initialisations mentioned above and
4575 some address computing at the end. These things are done in i386.md. */
4576
4577 char *
4578 output_strlen_unroll (operands)
4579 rtx operands[];
4580 {
4581 rtx xops[18];
4582
4583 xops[0] = operands[0]; /* Result */
4584 /* operands[1]; * Alignment */
4585 xops[1] = operands[2]; /* Scratch */
4586 xops[2] = GEN_INT (0);
4587 xops[3] = GEN_INT (2);
4588 xops[4] = GEN_INT (3);
4589 xops[5] = GEN_INT (4);
4590 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4591 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4592 xops[8] = gen_label_rtx (); /* label of main loop */
4593 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4594 xops[9] = gen_label_rtx (); /* pentium optimisation */
4595 xops[10] = gen_label_rtx (); /* end label 2 */
4596 xops[11] = gen_label_rtx (); /* end label 1 */
4597 xops[12] = gen_label_rtx (); /* end label */
4598 /* xops[13] * Temporary used */
4599 xops[14] = GEN_INT (0xff);
4600 xops[15] = GEN_INT (0xff00);
4601 xops[16] = GEN_INT (0xff0000);
4602 xops[17] = GEN_INT (0xff000000);
4603
4604 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4605
4606 /* is there a known alignment and is it less then 4 */
4607 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4608 {
4609 /* is there a known alignment and is it not 2 */
4610 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4611 {
4612 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4613 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4614
4615 /* leave just the 3 lower bits */
4616 /* if this is a q-register, then the high part is used later */
4617 /* therefore user andl rather than andb */
4618 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4619 /* is aligned to 4-byte adress when zero */
4620 output_asm_insn (AS1 (je,%l8), xops);
4621 /* side-effect even Parity when %eax == 3 */
4622 output_asm_insn (AS1 (jp,%6), xops);
4623
4624 /* is it aligned to 2 bytes ? */
4625 if (QI_REG_P (xops[1]))
4626 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4627 else
4628 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4629 output_asm_insn (AS1 (je,%7), xops);
4630 }
4631 else
4632 {
4633 /* since the alignment is 2, we have to check 2 or 0 bytes */
4634
4635 /* check if is aligned to 4 - byte */
4636 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4637 /* is aligned to 4-byte adress when zero */
4638 output_asm_insn (AS1 (je,%l8), xops);
4639 }
4640
4641 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4642 /* now, compare the bytes */
4643 /* compare with the high part of a q-reg gives shorter code */
4644 if (QI_REG_P (xops[1]))
4645 {
4646 /* compare the first n unaligned byte on a byte per byte basis */
4647 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4648 /* when zero we reached the end */
4649 output_asm_insn (AS1 (je,%l12), xops);
4650 /* increment the address */
4651 output_asm_insn (AS1 (inc%L0,%0), xops);
4652
4653 /* not needed with an alignment of 2 */
4654 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4655 {
4656 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4657 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4658 output_asm_insn (AS1 (je,%l12), xops);
4659 output_asm_insn (AS1 (inc%L0,%0), xops);
4660
4661 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4662 }
4663 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4664 }
4665 else
4666 {
4667 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4668 output_asm_insn (AS1 (je,%l12), xops);
4669 output_asm_insn (AS1 (inc%L0,%0), xops);
4670
4671 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4672 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4673 output_asm_insn (AS1 (je,%l12), xops);
4674 output_asm_insn (AS1 (inc%L0,%0), xops);
4675
4676 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4677 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4678 }
4679 output_asm_insn (AS1 (je,%l12), xops);
4680 output_asm_insn (AS1 (inc%L0,%0), xops);
4681 }
4682
4683 /* Generate loop to check 4 bytes at a time */
4684 /* IMHO it is not a good idea to align this loop. It gives only */
4685 /* huge programs, but does not help to speed up */
4686 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4687 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4688
4689 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4690 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4691
4692 if (QI_REG_P (xops[1]))
4693 {
4694 /* On i586 it is faster to combine the hi- and lo- part as
4695 a kind of lookahead. If anding both yields zero, then one
4696 of both *could* be zero, otherwise none of both is zero;
4697 this saves one instruction, on i486 this is slower
4698 tested with P-90, i486DX2-66, AMD486DX2-66 */
4699 if(TARGET_PENTIUM)
4700 {
4701 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4702 output_asm_insn (AS1 (jne,%l9), xops);
4703 }
4704
4705 /* check first byte */
4706 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4707 output_asm_insn (AS1 (je,%l12), xops);
4708
4709 /* check second byte */
4710 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4711 output_asm_insn (AS1 (je,%l11), xops);
4712
4713 if(TARGET_PENTIUM)
4714 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4715 }
4716 else
4717 {
4718 /* check first byte */
4719 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4720 output_asm_insn (AS1 (je,%l12), xops);
4721
4722 /* check second byte */
4723 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4724 output_asm_insn (AS1 (je,%l11), xops);
4725 }
4726
4727 /* check third byte */
4728 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4729 output_asm_insn (AS1 (je,%l10), xops);
4730
4731 /* check fourth byte and increment address */
4732 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4733 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4734 output_asm_insn (AS1 (jne,%l8), xops);
4735
4736 /* now generate fixups when the compare stops within a 4-byte word */
4737 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4738
4739 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4740 output_asm_insn (AS1 (inc%L0,%0), xops);
4741
4742 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4743 output_asm_insn (AS1 (inc%L0,%0), xops);
4744
4745 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
4746
4747 RET;
4748 }
This page took 0.235686 seconds and 6 git commands to generate.