]> gcc.gnu.org Git - gcc.git/blob - gcc/config/vax/vax.cc
032de7129463893bc1f100e4fca240c0a09dceb9
[gcc.git] / gcc / config / vax / vax.cc
1 /* Subroutines for insn-output.cc for VAX.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "stringpool.h"
30 #include "attribs.h"
31 #include "df.h"
32 #include "memmodel.h"
33 #include "tm_p.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "emit-rtl.h"
37 #include "calls.h"
38 #include "varasm.h"
39 #include "conditions.h"
40 #include "output.h"
41 #include "expr.h"
42 #include "reload.h"
43 #include "builtins.h"
44
45 /* This file should be included last. */
46 #include "target-def.h"
47
48 static void vax_option_override (void);
49 static bool vax_legitimate_address_p (machine_mode, rtx, bool,
50 code_helper = ERROR_MARK);
51 static void vax_file_start (void);
52 static void vax_init_libfuncs (void);
53 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
54 HOST_WIDE_INT, tree);
55 static int vax_address_cost_1 (rtx);
56 static int vax_address_cost (rtx, machine_mode, addr_space_t, bool);
57 static bool vax_rtx_costs (rtx, machine_mode, int, int, int *, bool);
58 static machine_mode vax_cc_modes_compatible (machine_mode, machine_mode);
59 static rtx_insn *vax_md_asm_adjust (vec<rtx> &, vec<rtx> &,
60 vec<machine_mode> &, vec<const char *> &,
61 vec<rtx> &, HARD_REG_SET &, location_t);
62 static rtx vax_function_arg (cumulative_args_t, const function_arg_info &);
63 static void vax_function_arg_advance (cumulative_args_t,
64 const function_arg_info &);
65 static rtx vax_struct_value_rtx (tree, int);
66 static bool vax_lra_p (void);
67 static void vax_asm_trampoline_template (FILE *);
68 static void vax_trampoline_init (rtx, tree, rtx);
69 static poly_int64 vax_return_pops_args (tree, tree, poly_int64);
70 static bool vax_mode_dependent_address_p (const_rtx, addr_space_t);
71 static HOST_WIDE_INT vax_starting_frame_offset (void);
72 \f
73 /* Initialize the GCC target structure. */
74 #undef TARGET_ASM_ALIGNED_HI_OP
75 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
76
77 #undef TARGET_ASM_FILE_START
78 #define TARGET_ASM_FILE_START vax_file_start
79 #undef TARGET_ASM_FILE_START_APP_OFF
80 #define TARGET_ASM_FILE_START_APP_OFF true
81
82 #undef TARGET_INIT_LIBFUNCS
83 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
84
85 #undef TARGET_ASM_OUTPUT_MI_THUNK
86 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
87 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
88 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
89
90 /* Enable compare elimination pass. */
91 #undef TARGET_FLAGS_REGNUM
92 #define TARGET_FLAGS_REGNUM VAX_PSL_REGNUM
93
94 #undef TARGET_RTX_COSTS
95 #define TARGET_RTX_COSTS vax_rtx_costs
96 #undef TARGET_ADDRESS_COST
97 #define TARGET_ADDRESS_COST vax_address_cost
98
99 /* Return the narrowest CC mode that spans both modes offered. */
100 #undef TARGET_CC_MODES_COMPATIBLE
101 #define TARGET_CC_MODES_COMPATIBLE vax_cc_modes_compatible
102
103 /* Mark PSL as clobbered for compatibility with the CC0 representation. */
104 #undef TARGET_MD_ASM_ADJUST
105 #define TARGET_MD_ASM_ADJUST vax_md_asm_adjust
106
107 #undef TARGET_PROMOTE_PROTOTYPES
108 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
109
110 #undef TARGET_FUNCTION_ARG
111 #define TARGET_FUNCTION_ARG vax_function_arg
112 #undef TARGET_FUNCTION_ARG_ADVANCE
113 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
114
115 #undef TARGET_STRUCT_VALUE_RTX
116 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
117
118 #undef TARGET_LRA_P
119 #define TARGET_LRA_P vax_lra_p
120
121 #undef TARGET_LEGITIMATE_ADDRESS_P
122 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
123 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
124 #define TARGET_MODE_DEPENDENT_ADDRESS_P vax_mode_dependent_address_p
125
126 #undef TARGET_FRAME_POINTER_REQUIRED
127 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
128
129 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
130 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
131 #undef TARGET_TRAMPOLINE_INIT
132 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
133 #undef TARGET_RETURN_POPS_ARGS
134 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
135
136 #undef TARGET_OPTION_OVERRIDE
137 #define TARGET_OPTION_OVERRIDE vax_option_override
138
139 #undef TARGET_STARTING_FRAME_OFFSET
140 #define TARGET_STARTING_FRAME_OFFSET vax_starting_frame_offset
141
142 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
143 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
144
145 struct gcc_target targetm = TARGET_INITIALIZER;
146 \f
147 /* Set global variables as needed for the options enabled. */
148
149 static void
150 vax_option_override (void)
151 {
152 /* We're VAX floating point, not IEEE floating point. */
153 if (TARGET_G_FLOAT)
154 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
155
156 #ifdef SUBTARGET_OVERRIDE_OPTIONS
157 SUBTARGET_OVERRIDE_OPTIONS;
158 #endif
159 }
160
161 static void
162 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
163 {
164 rtx x;
165
166 x = plus_constant (Pmode, frame_pointer_rtx, offset);
167 x = gen_rtx_MEM (SImode, x);
168 x = gen_rtx_SET (x, src);
169 add_reg_note (insn, REG_CFA_OFFSET, x);
170 }
171
172 /* Generate the assembly code for function entry. FILE is a stdio
173 stream to output the code to. SIZE is an int: how many units of
174 temporary storage to allocate.
175
176 Refer to the array `regs_ever_live' to determine which registers to
177 save; `regs_ever_live[I]' is nonzero if register number I is ever
178 used in the function. This function is responsible for knowing
179 which registers should not be saved even if used. */
180
181 void
182 vax_expand_prologue (void)
183 {
184 int regno, offset;
185 int mask = 0;
186 HOST_WIDE_INT size;
187 rtx insn;
188
189 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
190 if (df_regs_ever_live_p (regno) && !call_used_or_fixed_reg_p (regno))
191 mask |= 1 << regno;
192
193 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
194 RTX_FRAME_RELATED_P (insn) = 1;
195
196 /* The layout of the CALLG/S stack frame is follows:
197
198 <- CFA, AP
199 r11
200 r10
201 ... Registers saved as specified by MASK
202 r3
203 r2
204 return-addr
205 old fp
206 old ap
207 old psw
208 zero
209 <- FP, SP
210
211 The rest of the prologue will adjust the SP for the local frame. */
212
213 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
214 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
215 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
216
217 offset = 16;
218 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
219 if (mask & (1 << regno))
220 {
221 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
222 offset += 4;
223 }
224
225 /* Because add_reg_note pushes the notes, adding this last means that
226 it will be processed first. This is required to allow the other
227 notes be interpreted properly. */
228 add_reg_note (insn, REG_CFA_DEF_CFA,
229 plus_constant (Pmode, frame_pointer_rtx, offset));
230
231 /* Allocate the local stack frame. */
232 size = get_frame_size ();
233 size -= vax_starting_frame_offset ();
234 emit_insn (gen_addsi3 (stack_pointer_rtx,
235 stack_pointer_rtx, GEN_INT (-size)));
236
237 /* Do not allow instructions referencing local stack memory to be
238 scheduled before the frame is allocated. This is more pedantic
239 than anything else, given that VAX does not currently have a
240 scheduling description. */
241 emit_insn (gen_blockage ());
242 }
243
244 /* When debugging with stabs, we want to output an extra dummy label
245 so that gas can distinguish between D_float and G_float prior to
246 processing the .stabs directive identifying type double. */
247 static void
248 vax_file_start (void)
249 {
250 default_file_start ();
251 }
252
253 /* We can use the BSD C library routines for the libgcc calls that are
254 still generated, since that's what they boil down to anyways. When
255 ELF, avoid the user's namespace. */
256
257 static void
258 vax_init_libfuncs (void)
259 {
260 if (TARGET_BSD_DIVMOD)
261 {
262 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
263 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
264 }
265 }
266
267 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
268
269 static void
270 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
271 rtx * low, int n)
272 {
273 int i;
274
275 for (i = 0; i < n; i++)
276 low[i] = 0;
277
278 for (i = 0; i < n; i++)
279 {
280 if (MEM_P (operands[i])
281 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
282 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
283 {
284 rtx addr = XEXP (operands[i], 0);
285 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
286 }
287 else if (optimize_size && MEM_P (operands[i])
288 && REG_P (XEXP (operands[i], 0))
289 && (code != MINUS || operands[1] != const0_rtx)
290 && find_regno_note (insn, REG_DEAD,
291 REGNO (XEXP (operands[i], 0))))
292 {
293 low[i] = gen_rtx_MEM (SImode,
294 gen_rtx_POST_INC (Pmode,
295 XEXP (operands[i], 0)));
296 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
297 }
298 else
299 {
300 low[i] = operand_subword (operands[i], 0, 0, DImode);
301 operands[i] = operand_subword (operands[i], 1, 0, DImode);
302 }
303 }
304 }
305 \f
306 void
307 print_operand_address (FILE * file, rtx addr)
308 {
309 rtx orig = addr;
310 rtx reg1, breg, ireg;
311 rtx offset;
312
313 retry:
314 switch (GET_CODE (addr))
315 {
316 case MEM:
317 fprintf (file, "*");
318 addr = XEXP (addr, 0);
319 goto retry;
320
321 case REG:
322 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
323 break;
324
325 case PRE_DEC:
326 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
327 break;
328
329 case POST_INC:
330 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
331 break;
332
333 case PLUS:
334 /* There can be either two or three things added here. One must be a
335 REG. One can be either a REG or a MULT/ASHIFT of a REG and an
336 appropriate constant, and the third can only be a constant or a MEM.
337
338 We get these two or three things and put the constant or MEM in
339 OFFSET, the MULT/ASHIFT or REG in IREG, and the REG in BREG. If we
340 have a register and can't tell yet if it is a base or index register,
341 put it into REG1. */
342
343 reg1 = 0; ireg = 0; breg = 0; offset = 0;
344
345 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
346 || MEM_P (XEXP (addr, 0)))
347 {
348 offset = XEXP (addr, 0);
349 addr = XEXP (addr, 1);
350 }
351 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
352 || MEM_P (XEXP (addr, 1)))
353 {
354 offset = XEXP (addr, 1);
355 addr = XEXP (addr, 0);
356 }
357 else if (GET_CODE (XEXP (addr, 1)) == MULT
358 || GET_CODE (XEXP (addr, 1)) == ASHIFT)
359 {
360 ireg = XEXP (addr, 1);
361 addr = XEXP (addr, 0);
362 }
363 else if (GET_CODE (XEXP (addr, 0)) == MULT
364 || GET_CODE (XEXP (addr, 0)) == ASHIFT)
365 {
366 ireg = XEXP (addr, 0);
367 addr = XEXP (addr, 1);
368 }
369 else if (REG_P (XEXP (addr, 1)))
370 {
371 reg1 = XEXP (addr, 1);
372 addr = XEXP (addr, 0);
373 }
374 else if (REG_P (XEXP (addr, 0)))
375 {
376 reg1 = XEXP (addr, 0);
377 addr = XEXP (addr, 1);
378 }
379 else
380 gcc_unreachable ();
381
382 if (REG_P (addr))
383 {
384 if (reg1)
385 ireg = addr;
386 else
387 reg1 = addr;
388 }
389 else if (GET_CODE (addr) == MULT || GET_CODE (addr) == ASHIFT)
390 ireg = addr;
391 else
392 {
393 gcc_assert (GET_CODE (addr) == PLUS);
394 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
395 || MEM_P (XEXP (addr, 0)))
396 {
397 if (offset)
398 {
399 if (CONST_INT_P (offset))
400 offset = plus_constant (Pmode, XEXP (addr, 0),
401 INTVAL (offset));
402 else
403 {
404 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
405 offset = plus_constant (Pmode, offset,
406 INTVAL (XEXP (addr, 0)));
407 }
408 }
409 offset = XEXP (addr, 0);
410 }
411 else if (REG_P (XEXP (addr, 0)))
412 {
413 if (reg1)
414 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
415 else
416 reg1 = XEXP (addr, 0);
417 }
418 else
419 {
420 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT
421 || GET_CODE (XEXP (addr, 0)) == ASHIFT);
422 gcc_assert (!ireg);
423 ireg = XEXP (addr, 0);
424 }
425
426 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
427 || MEM_P (XEXP (addr, 1)))
428 {
429 if (offset)
430 {
431 if (CONST_INT_P (offset))
432 offset = plus_constant (Pmode, XEXP (addr, 1),
433 INTVAL (offset));
434 else
435 {
436 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
437 offset = plus_constant (Pmode, offset,
438 INTVAL (XEXP (addr, 1)));
439 }
440 }
441 offset = XEXP (addr, 1);
442 }
443 else if (REG_P (XEXP (addr, 1)))
444 {
445 if (reg1)
446 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
447 else
448 reg1 = XEXP (addr, 1);
449 }
450 else
451 {
452 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT
453 || GET_CODE (XEXP (addr, 1)) == ASHIFT);
454 gcc_assert (!ireg);
455 ireg = XEXP (addr, 1);
456 }
457 }
458
459 /* If REG1 is nonzero, figure out if it is a base or index register. */
460 if (reg1)
461 {
462 if (breg
463 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
464 || (offset
465 && (MEM_P (offset)
466 || (flag_pic && symbolic_operand (offset, SImode)))))
467 {
468 gcc_assert (!ireg);
469 ireg = reg1;
470 }
471 else
472 breg = reg1;
473 }
474
475 if (offset != 0)
476 {
477 if (flag_pic && symbolic_operand (offset, SImode))
478 {
479 if (breg && ireg)
480 {
481 debug_rtx (orig);
482 output_operand_lossage ("symbol used with both base and indexed registers");
483 }
484
485 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
486 if (flag_pic > 1 && GET_CODE (offset) == CONST
487 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
488 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
489 {
490 debug_rtx (orig);
491 output_operand_lossage ("symbol with offset used in PIC mode");
492 }
493 #endif
494
495 /* symbol(reg) isn't PIC, but symbol[reg] is. */
496 if (breg)
497 {
498 ireg = breg;
499 breg = 0;
500 }
501
502 }
503
504 output_address (VOIDmode, offset);
505 }
506
507 if (breg != 0)
508 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
509
510 if (ireg != 0)
511 {
512 if (GET_CODE (ireg) == MULT || GET_CODE (ireg) == ASHIFT)
513 ireg = XEXP (ireg, 0);
514 gcc_assert (REG_P (ireg));
515 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
516 }
517 break;
518
519 default:
520 output_addr_const (file, addr);
521 }
522 }
523
524 void
525 print_operand (FILE *file, rtx x, int code)
526 {
527 if (code == '#')
528 fputc (ASM_DOUBLE_CHAR, file);
529 else if (code == '|')
530 fputs (REGISTER_PREFIX, file);
531 else if (code == 'k')
532 fputs (cond_name (x), file);
533 else if (code == 'K')
534 fputs (rev_cond_name (x), file);
535 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
536 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
537 else if (code == 'P' && CONST_INT_P (x))
538 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
539 else if (code == 'N' && CONST_INT_P (x))
540 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
541 /* rotl instruction cannot deal with negative arguments. */
542 else if (code == 'R' && CONST_INT_P (x))
543 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
544 else if (code == 'H' && CONST_INT_P (x))
545 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
546 else if (code == 'h' && CONST_INT_P (x))
547 fprintf (file, "$%d", (short) - INTVAL (x));
548 else if (code == 'B' && CONST_INT_P (x))
549 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
550 else if (code == 'b' && CONST_INT_P (x))
551 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
552 else if (code == 'M' && CONST_INT_P (x))
553 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
554 else if (code == 'x' && CONST_INT_P (x))
555 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
556 else if (REG_P (x))
557 fprintf (file, "%s", reg_names[REGNO (x)]);
558 else if (MEM_P (x))
559 output_address (GET_MODE (x), XEXP (x, 0));
560 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
561 {
562 char dstr[30];
563 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
564 sizeof (dstr), 0, 1);
565 fprintf (file, "$0f%s", dstr);
566 }
567 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
568 {
569 char dstr[30];
570 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
571 sizeof (dstr), 0, 1);
572 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
573 }
574 else
575 {
576 if (flag_pic > 1 && symbolic_operand (x, SImode))
577 {
578 debug_rtx (x);
579 output_operand_lossage ("symbol used as immediate operand");
580 }
581 putc ('$', file);
582 output_addr_const (file, x);
583 }
584 }
585 \f
586 const char *
587 cond_name (rtx op)
588 {
589 switch (GET_CODE (op))
590 {
591 case NE:
592 return "neq";
593 case EQ:
594 return "eql";
595 case GE:
596 return "geq";
597 case GT:
598 return "gtr";
599 case LE:
600 return "leq";
601 case LT:
602 return "lss";
603 case GEU:
604 return "gequ";
605 case GTU:
606 return "gtru";
607 case LEU:
608 return "lequ";
609 case LTU:
610 return "lssu";
611
612 default:
613 gcc_unreachable ();
614 }
615 }
616
617 const char *
618 rev_cond_name (rtx op)
619 {
620 switch (GET_CODE (op))
621 {
622 case EQ:
623 return "neq";
624 case NE:
625 return "eql";
626 case LT:
627 return "geq";
628 case LE:
629 return "gtr";
630 case GT:
631 return "leq";
632 case GE:
633 return "lss";
634 case LTU:
635 return "gequ";
636 case LEU:
637 return "gtru";
638 case GTU:
639 return "lequ";
640 case GEU:
641 return "lssu";
642
643 default:
644 gcc_unreachable ();
645 }
646 }
647
648 static bool
649 vax_float_literal (rtx c)
650 {
651 machine_mode mode;
652 const REAL_VALUE_TYPE *r;
653 REAL_VALUE_TYPE s;
654 int i;
655
656 if (GET_CODE (c) != CONST_DOUBLE)
657 return false;
658
659 mode = GET_MODE (c);
660
661 if (c == const_tiny_rtx[(int) mode][0]
662 || c == const_tiny_rtx[(int) mode][1]
663 || c == const_tiny_rtx[(int) mode][2])
664 return true;
665
666 r = CONST_DOUBLE_REAL_VALUE (c);
667
668 for (i = 0; i < 7; i++)
669 {
670 int x = 1 << i;
671 bool ok;
672 real_from_integer (&s, mode, x, SIGNED);
673
674 if (real_equal (r, &s))
675 return true;
676 ok = exact_real_inverse (mode, &s);
677 gcc_assert (ok);
678 if (real_equal (r, &s))
679 return true;
680 }
681 return false;
682 }
683
684
685 /* Return the cost in cycles of a memory address, relative to register
686 indirect.
687
688 Each of the following adds the indicated number of cycles:
689
690 1 - symbolic address
691 1 - pre-decrement
692 1 - indexing and/or offset(register)
693 2 - indirect */
694
695
696 static int
697 vax_address_cost_1 (rtx addr)
698 {
699 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
700 rtx plus_op0 = 0, plus_op1 = 0;
701 restart:
702 switch (GET_CODE (addr))
703 {
704 case PRE_DEC:
705 predec = 1;
706 /* FALLTHRU */
707 case REG:
708 case SUBREG:
709 case POST_INC:
710 reg = 1;
711 break;
712 case MULT:
713 case ASHIFT:
714 indexed = 1; /* 2 on VAX 2 */
715 break;
716 case CONST_INT:
717 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
718 if (offset == 0)
719 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
720 break;
721 case CONST:
722 case SYMBOL_REF:
723 offset = 1; /* 2 on VAX 2 */
724 break;
725 case LABEL_REF: /* this is probably a byte offset from the pc */
726 if (offset == 0)
727 offset = 1;
728 break;
729 case PLUS:
730 if (plus_op0)
731 plus_op1 = XEXP (addr, 0);
732 else
733 plus_op0 = XEXP (addr, 0);
734 addr = XEXP (addr, 1);
735 goto restart;
736 case MEM:
737 indir = 2; /* 3 on VAX 2 */
738 addr = XEXP (addr, 0);
739 goto restart;
740 default:
741 break;
742 }
743
744 /* Up to 3 things can be added in an address. They are stored in
745 plus_op0, plus_op1, and addr. */
746
747 if (plus_op0)
748 {
749 addr = plus_op0;
750 plus_op0 = 0;
751 goto restart;
752 }
753 if (plus_op1)
754 {
755 addr = plus_op1;
756 plus_op1 = 0;
757 goto restart;
758 }
759 /* Indexing and register+offset can both be used (except on a VAX 2)
760 without increasing execution time over either one alone. */
761 if (reg && indexed && offset)
762 return reg + indir + offset + predec;
763 return reg + indexed + indir + offset + predec;
764 }
765
766 static int
767 vax_address_cost (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
768 addr_space_t as ATTRIBUTE_UNUSED,
769 bool speed ATTRIBUTE_UNUSED)
770 {
771 return COSTS_N_INSNS (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
772 }
773
774 /* Cost of an expression on a VAX. This version has costs tuned for the
775 CVAX chip (found in the VAX 3 series) with comments for variations on
776 other models.
777
778 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
779 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
780 costs on a per cpu basis. */
781
782 static bool
783 vax_rtx_costs (rtx x, machine_mode mode, int outer_code,
784 int opno ATTRIBUTE_UNUSED,
785 int *total, bool speed ATTRIBUTE_UNUSED)
786 {
787 enum rtx_code code = GET_CODE (x);
788 int i = 0; /* may be modified in switch */
789 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
790
791 switch (code)
792 {
793 /* On a VAX, constants from 0..63 are cheap because they can use the
794 1 byte literal constant format. Compare to -1 should be made cheap
795 so that decrement-and-branch insns can be formed more easily (if
796 the value -1 is copied to a register some decrement-and-branch
797 patterns will not match). */
798 case CONST_INT:
799 if (INTVAL (x) == 0)
800 {
801 *total = COSTS_N_INSNS (1) / 2;
802 return true;
803 }
804 if (outer_code == AND)
805 {
806 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077
807 ? COSTS_N_INSNS (1) : COSTS_N_INSNS (2));
808 return true;
809 }
810 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
811 || (outer_code == COMPARE
812 && INTVAL (x) == -1)
813 || ((outer_code == PLUS || outer_code == MINUS)
814 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
815 {
816 *total = COSTS_N_INSNS (1);
817 return true;
818 }
819 /* FALLTHRU */
820
821 case CONST:
822 case LABEL_REF:
823 case SYMBOL_REF:
824 *total = COSTS_N_INSNS (3);
825 return true;
826
827 case CONST_DOUBLE:
828 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
829 *total = vax_float_literal (x) ? COSTS_N_INSNS (5) : COSTS_N_INSNS (8);
830 else
831 *total = ((CONST_DOUBLE_HIGH (x) == 0
832 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
833 || (outer_code == PLUS
834 && CONST_DOUBLE_HIGH (x) == -1
835 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64)
836 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (5));
837 return true;
838
839 case POST_INC:
840 *total = COSTS_N_INSNS (2);
841 return true; /* Implies register operand. */
842
843 case PRE_DEC:
844 *total = COSTS_N_INSNS (3);
845 return true; /* Implies register operand. */
846
847 case MULT:
848 switch (mode)
849 {
850 case E_DFmode:
851 *total = COSTS_N_INSNS (16); /* 4 on VAX 9000 */
852 break;
853 case E_SFmode:
854 *total = COSTS_N_INSNS (9); /* 4 on VAX 9000, 12 on VAX 2 */
855 break;
856 case E_DImode:
857 *total = COSTS_N_INSNS (16); /* 6 on VAX 9000, 28 on VAX 2 */
858 break;
859 case E_SImode:
860 case E_HImode:
861 case E_QImode:
862 *total = COSTS_N_INSNS (10); /* 3-4 on VAX 9000, 20-28 on VAX 2 */
863 break;
864 default:
865 *total = MAX_COST; /* Mode is not supported. */
866 return true;
867 }
868 break;
869
870 case UDIV:
871 if (mode != SImode)
872 {
873 *total = MAX_COST; /* Mode is not supported. */
874 return true;
875 }
876 *total = COSTS_N_INSNS (17);
877 break;
878
879 case DIV:
880 if (mode == DImode)
881 *total = COSTS_N_INSNS (30); /* Highly variable. */
882 else if (mode == DFmode)
883 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
884 *total = COSTS_N_INSNS (24);
885 else
886 *total = COSTS_N_INSNS (11); /* 25 on VAX 2 */
887 break;
888
889 case MOD:
890 *total = COSTS_N_INSNS (23);
891 break;
892
893 case UMOD:
894 if (mode != SImode)
895 {
896 *total = MAX_COST; /* Mode is not supported. */
897 return true;
898 }
899 *total = COSTS_N_INSNS (29);
900 break;
901
902 case FLOAT:
903 *total = COSTS_N_INSNS (6 /* 4 on VAX 9000 */
904 + (mode == DFmode)
905 + (GET_MODE (XEXP (x, 0)) != SImode));
906 break;
907
908 case FIX:
909 *total = COSTS_N_INSNS (7); /* 17 on VAX 2 */
910 break;
911
912 case ASHIFT:
913 case LSHIFTRT:
914 case ASHIFTRT:
915 if (mode == DImode)
916 *total = COSTS_N_INSNS (12);
917 else
918 *total = COSTS_N_INSNS (10); /* 6 on VAX 9000 */
919 break;
920
921 case ROTATE:
922 case ROTATERT:
923 *total = COSTS_N_INSNS (6); /* 5 on VAX 2, 4 on VAX 9000 */
924 if (CONST_INT_P (XEXP (x, 1)))
925 fmt = "e"; /* all constant rotate counts are short */
926 break;
927
928 case PLUS:
929 case MINUS:
930 *total = (mode == DFmode /* 6/8 on VAX 9000, 16/15 on VAX 2 */
931 ? COSTS_N_INSNS (13) : COSTS_N_INSNS (8));
932 /* Small integer operands can use subl2 and addl2. */
933 if ((CONST_INT_P (XEXP (x, 1)))
934 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
935 fmt = "e";
936 break;
937
938 case IOR:
939 case XOR:
940 *total = COSTS_N_INSNS (3);
941 break;
942
943 case AND:
944 /* AND is special because the first operand is complemented. */
945 *total = COSTS_N_INSNS (3);
946 if (CONST_INT_P (XEXP (x, 0)))
947 {
948 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
949 *total = COSTS_N_INSNS (4);
950 fmt = "e";
951 i = 1;
952 }
953 break;
954
955 case NEG:
956 if (mode == DFmode)
957 *total = COSTS_N_INSNS (9);
958 else if (mode == SFmode)
959 *total = COSTS_N_INSNS (6);
960 else if (mode == DImode)
961 *total = COSTS_N_INSNS (4);
962 else
963 *total = COSTS_N_INSNS (2);
964 break;
965
966 case NOT:
967 *total = COSTS_N_INSNS (2);
968 break;
969
970 case ZERO_EXTRACT:
971 case SIGN_EXTRACT:
972 *total = COSTS_N_INSNS (15);
973 break;
974
975 case MEM:
976 if (mode == DImode || mode == DFmode)
977 *total = COSTS_N_INSNS (5); /* 7 on VAX 2 */
978 else
979 *total = COSTS_N_INSNS (3); /* 4 on VAX 2 */
980 x = XEXP (x, 0);
981 if (!REG_P (x) && GET_CODE (x) != POST_INC)
982 *total += COSTS_N_INSNS (vax_address_cost_1 (x));
983 return true;
984
985 case FLOAT_EXTEND:
986 case FLOAT_TRUNCATE:
987 case TRUNCATE:
988 *total = COSTS_N_INSNS (3); /* FIXME: Costs need to be checked */
989 break;
990
991 default:
992 return false;
993 }
994
995 /* Now look inside the expression. Operands which are not registers or
996 short constants add to the cost.
997
998 FMT and I may have been adjusted in the switch above for instructions
999 which require special handling. */
1000
1001 while (*fmt++ == 'e')
1002 {
1003 rtx op = XEXP (x, i);
1004
1005 i += 1;
1006 code = GET_CODE (op);
1007
1008 /* A NOT is likely to be found as the first operand of an AND
1009 (in which case the relevant cost is of the operand inside
1010 the not) and not likely to be found anywhere else. */
1011 if (code == NOT)
1012 op = XEXP (op, 0), code = GET_CODE (op);
1013
1014 switch (code)
1015 {
1016 case CONST_INT:
1017 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
1018 && mode != QImode)
1019 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
1020 break;
1021 case CONST:
1022 case LABEL_REF:
1023 case SYMBOL_REF:
1024 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
1025 break;
1026 case CONST_DOUBLE:
1027 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
1028 {
1029 /* Registers are faster than floating point constants -- even
1030 those constants which can be encoded in a single byte. */
1031 if (vax_float_literal (op))
1032 *total += COSTS_N_INSNS (1);
1033 else
1034 *total += (GET_MODE (x) == DFmode
1035 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (2));
1036 }
1037 else
1038 {
1039 if (CONST_DOUBLE_HIGH (op) != 0
1040 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1041 *total += COSTS_N_INSNS (2);
1042 }
1043 break;
1044 case MEM:
1045 *total += COSTS_N_INSNS (1); /* 2 on VAX 2 */
1046 if (!REG_P (XEXP (op, 0)))
1047 *total += COSTS_N_INSNS (vax_address_cost_1 (XEXP (op, 0)));
1048 break;
1049 case REG:
1050 case SUBREG:
1051 break;
1052 default:
1053 *total += COSTS_N_INSNS (1);
1054 break;
1055 }
1056 }
1057 return true;
1058 }
1059 \f
1060 /* With ELF we do not support GOT entries for external `symbol+offset'
1061 references, so do not accept external symbol references if an offset
1062 is to be added. Do not accept external symbol references at all if
1063 LOCAL_P is set. This is for cases where making a reference indirect
1064 would make it invalid. Do not accept any kind of symbols if SYMBOL_P
1065 is clear. This is for situations where the a reference is used as an
1066 immediate value for operations other than address loads (MOVA/PUSHA),
1067 as those operations do not support PC-relative immediates. */
1068
1069 bool
1070 vax_acceptable_pic_operand_p (rtx x ATTRIBUTE_UNUSED,
1071 bool local_p ATTRIBUTE_UNUSED,
1072 bool symbol_p ATTRIBUTE_UNUSED)
1073 {
1074 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1075 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
1076 {
1077 x = XEXP (XEXP (x, 0), 0);
1078 local_p = true;
1079 }
1080 switch (GET_CODE (x))
1081 {
1082 case SYMBOL_REF:
1083 return symbol_p && !(local_p && !SYMBOL_REF_LOCAL_P (x));
1084 case LABEL_REF:
1085 return symbol_p && !(local_p && LABEL_REF_NONLOCAL_P (x));
1086 default:
1087 break;
1088 }
1089 #endif
1090 return true;
1091 }
1092 \f
1093 /* Given a comparison code (NE, EQ, etc.) and the operands of a COMPARE,
1094 return the mode to be used for the comparison. As we have the same
1095 interpretation of condition codes across all the instructions we just
1096 return the narrowest mode suitable for the comparison code requested. */
1097
1098 extern machine_mode
1099 vax_select_cc_mode (enum rtx_code op,
1100 rtx x ATTRIBUTE_UNUSED, rtx y ATTRIBUTE_UNUSED)
1101 {
1102 switch (op)
1103 {
1104 default:
1105 gcc_unreachable ();
1106 case NE:
1107 case EQ:
1108 return CCZmode;
1109 case GE:
1110 case LT:
1111 return CCNmode;
1112 case GT:
1113 case LE:
1114 return CCNZmode;
1115 case GEU:
1116 case GTU:
1117 case LEU:
1118 case LTU:
1119 return CCmode;
1120 }
1121 }
1122
1123 /* Return the narrowest CC mode that spans both modes offered. If they
1124 intersect, this will be the wider of the two, and if they do not then
1125 find one that is a superset of both (i.e. CCNZmode for a pair
1126 consisting of CCNmode and CCZmode). A wider CC writer will satisfy
1127 a narrower CC reader, e.g. a comparison operator that uses CCZmode
1128 can use a CCNZmode output of a previous instruction. */
1129
1130 static machine_mode
1131 vax_cc_modes_compatible (machine_mode m1, machine_mode m2)
1132 {
1133 switch (m1)
1134 {
1135 default:
1136 gcc_unreachable ();
1137 case E_CCmode:
1138 switch (m2)
1139 {
1140 default:
1141 gcc_unreachable ();
1142 case E_CCmode:
1143 case E_CCNZmode:
1144 case E_CCNmode:
1145 case E_CCZmode:
1146 return m1;
1147 }
1148 case E_CCNZmode:
1149 switch (m2)
1150 {
1151 default:
1152 gcc_unreachable ();
1153 case E_CCmode:
1154 return m2;
1155 case E_CCNmode:
1156 case E_CCNZmode:
1157 case E_CCZmode:
1158 return m1;
1159 }
1160 case E_CCNmode:
1161 case E_CCZmode:
1162 switch (m2)
1163 {
1164 default:
1165 gcc_unreachable ();
1166 case E_CCmode:
1167 case E_CCNZmode:
1168 return m2;
1169 case E_CCNmode:
1170 case E_CCZmode:
1171 return m1 == m2 ? m1 : E_CCNZmode;
1172 }
1173 }
1174 }
1175 \f
1176 /* Mark PSL as clobbered for compatibility with the CC0 representation. */
1177
1178 static rtx_insn *
1179 vax_md_asm_adjust (vec<rtx> &outputs ATTRIBUTE_UNUSED,
1180 vec<rtx> &inputs ATTRIBUTE_UNUSED,
1181 vec<machine_mode> &input_modes ATTRIBUTE_UNUSED,
1182 vec<const char *> &constraints ATTRIBUTE_UNUSED,
1183 vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs,
1184 location_t /*loc*/)
1185 {
1186 clobbers.safe_push (gen_rtx_REG (CCmode, VAX_PSL_REGNUM));
1187 SET_HARD_REG_BIT (clobbered_regs, VAX_PSL_REGNUM);
1188 return NULL;
1189 }
1190 \f
1191 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1192 Used for C++ multiple inheritance.
1193 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1194 addl2 $DELTA, 4(ap) #adjust first argument
1195 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1196 */
1197
1198 static void
1199 vax_output_mi_thunk (FILE * file,
1200 tree thunk ATTRIBUTE_UNUSED,
1201 HOST_WIDE_INT delta,
1202 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1203 tree function)
1204 {
1205 const char *fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk));
1206
1207 assemble_start_function (thunk, fnname);
1208 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1209 asm_fprintf (file, ",4(%Rap)\n");
1210 fprintf (file, "\tjmp ");
1211 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1212 fprintf (file, "+2\n");
1213 assemble_end_function (thunk, fnname);
1214 }
1215 \f
1216 static rtx
1217 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1218 int incoming ATTRIBUTE_UNUSED)
1219 {
1220 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1221 }
1222
1223 /* Return true if we use LRA instead of reload pass. */
1224
1225 static bool
1226 vax_lra_p (void)
1227 {
1228 return TARGET_LRA;
1229 }
1230
1231 /* Output integer move instructions. */
1232
1233 bool
1234 vax_maybe_split_dimode_move (rtx *operands)
1235 {
1236 return (TARGET_QMATH
1237 && (!MEM_P (operands[0])
1238 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1239 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1240 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1241 && ((CONST_INT_P (operands[1])
1242 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1243 || GET_CODE (operands[1]) == CONST_DOUBLE));
1244 }
1245
1246 const char *
1247 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1248 machine_mode mode)
1249 {
1250 rtx hi[3], lo[3];
1251 const char *pattern_hi, *pattern_lo;
1252 bool push_p;
1253
1254 switch (mode)
1255 {
1256 case E_DImode:
1257 if (operands[1] == const0_rtx)
1258 return "clrq %0";
1259 if (TARGET_QMATH && optimize_size
1260 && (CONST_INT_P (operands[1])
1261 || GET_CODE (operands[1]) == CONST_DOUBLE))
1262 {
1263 unsigned HOST_WIDE_INT hval, lval;
1264 int n;
1265
1266 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1267 {
1268 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1269
1270 /* Make sure only the low 32 bits are valid. */
1271 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1272 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1273 }
1274 else
1275 {
1276 lval = INTVAL (operands[1]);
1277 hval = 0;
1278 }
1279
1280 /* Here we see if we are trying to see if the 64bit value is really
1281 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1282 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1283 8 bytes - 1 shift byte - 1 short literal byte. */
1284 if (lval != 0
1285 && (n = exact_log2 (lval & (- lval))) != -1
1286 && (lval >> n) < 64)
1287 {
1288 lval >>= n;
1289
1290 /* On 32bit platforms, if the 6bits didn't overflow into the
1291 upper 32bit value that value better be 0. If we have
1292 overflowed, make sure it wasn't too much. */
1293 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1294 {
1295 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1296 n = 0; /* failure */
1297 else
1298 lval |= hval << (32 - n);
1299 }
1300 /* If n is 0, then ashq is not the best way to emit this. */
1301 if (n > 0)
1302 {
1303 operands[1] = GEN_INT (lval);
1304 operands[2] = GEN_INT (n);
1305 return "ashq %2,%D1,%0";
1306 }
1307 #if HOST_BITS_PER_WIDE_INT == 32
1308 }
1309 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1310 upper 32bit value. */
1311 else if (hval != 0
1312 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1313 && (hval >> n) < 64)
1314 {
1315 operands[1] = GEN_INT (hval >> n);
1316 operands[2] = GEN_INT (n + 32);
1317 return "ashq %2,%D1,%0";
1318 #endif
1319 }
1320 }
1321
1322 if (vax_maybe_split_dimode_move (operands))
1323 {
1324 hi[0] = operands[0];
1325 hi[1] = operands[1];
1326
1327 split_quadword_operands (insn, SET, hi, lo, 2);
1328
1329 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1330 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1331
1332 /* The patterns are just movl/movl or pushl/pushl then a movq will
1333 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1334 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1335 value bytes. */
1336 if ((startswith (pattern_lo, "movl")
1337 && startswith (pattern_hi, "movl"))
1338 || (startswith (pattern_lo, "pushl")
1339 && startswith (pattern_hi, "pushl")))
1340 return "movq %1,%0";
1341
1342 if (MEM_P (operands[0])
1343 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1344 {
1345 output_asm_insn (pattern_hi, hi);
1346 operands[0] = lo[0];
1347 operands[1] = lo[1];
1348 operands[2] = lo[2];
1349 return pattern_lo;
1350 }
1351 else
1352 {
1353 output_asm_insn (pattern_lo, lo);
1354 operands[0] = hi[0];
1355 operands[1] = hi[1];
1356 operands[2] = hi[2];
1357 return pattern_hi;
1358 }
1359 }
1360 return "movq %1,%0";
1361
1362 case E_SImode:
1363 push_p = push_operand (operands[0], SImode);
1364
1365 if (symbolic_operand (operands[1], SImode))
1366 return push_p ? "pushab %a1" : "movab %a1,%0";
1367
1368 if (operands[1] == const0_rtx)
1369 return push_p ? "pushl %1" : "clrl %0";
1370
1371 if (CONST_INT_P (operands[1])
1372 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1373 {
1374 HOST_WIDE_INT i = INTVAL (operands[1]);
1375 int n;
1376 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1377 return "mcoml %N1,%0";
1378 if ((unsigned HOST_WIDE_INT)i < 0x100)
1379 return "movzbl %1,%0";
1380 if (i >= -0x80 && i < 0)
1381 return "cvtbl %1,%0";
1382 if (optimize_size
1383 && (n = exact_log2 (i & (-i))) != -1
1384 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1385 {
1386 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1387 operands[2] = GEN_INT (n);
1388 return "ashl %2,%1,%0";
1389 }
1390 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1391 return "movzwl %1,%0";
1392 if (i >= -0x8000 && i < 0)
1393 return "cvtwl %1,%0";
1394 }
1395 return push_p ? "pushl %1" : "movl %1,%0";
1396
1397 case E_HImode:
1398 if (CONST_INT_P (operands[1]))
1399 {
1400 HOST_WIDE_INT i = INTVAL (operands[1]);
1401 if (i == 0)
1402 return "clrw %0";
1403 else if ((unsigned HOST_WIDE_INT)i < 64)
1404 return "movw %1,%0";
1405 else if ((unsigned HOST_WIDE_INT)~i < 64)
1406 return "mcomw %H1,%0";
1407 else if ((unsigned HOST_WIDE_INT)i < 256)
1408 return "movzbw %1,%0";
1409 else if (i >= -0x80 && i < 0)
1410 return "cvtbw %1,%0";
1411 }
1412 return "movw %1,%0";
1413
1414 case E_QImode:
1415 if (CONST_INT_P (operands[1]))
1416 {
1417 HOST_WIDE_INT i = INTVAL (operands[1]);
1418 if (i == 0)
1419 return "clrb %0";
1420 else if ((unsigned HOST_WIDE_INT)~i < 64)
1421 return "mcomb %B1,%0";
1422 }
1423 return "movb %1,%0";
1424
1425 default:
1426 gcc_unreachable ();
1427 }
1428 }
1429
1430 /* Output integer add instructions.
1431
1432 The space-time-opcode tradeoffs for addition vary by model of VAX.
1433
1434 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1435 but it not faster on other models.
1436
1437 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1438 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1439 a register is used in an address too soon after it is set.
1440 Compromise by using movab only when it is shorter than the add
1441 or the base register in the address is one of sp, ap, and fp,
1442 which are not modified very often. */
1443
1444 const char *
1445 vax_output_int_add (rtx_insn *insn, rtx *operands, machine_mode mode)
1446 {
1447 switch (mode)
1448 {
1449 case E_DImode:
1450 {
1451 rtx low[3];
1452 const char *pattern;
1453 int carry = 1;
1454 bool sub;
1455
1456 if (TARGET_QMATH && 0)
1457 debug_rtx (insn);
1458
1459 split_quadword_operands (insn, PLUS, operands, low, 3);
1460
1461 if (TARGET_QMATH)
1462 {
1463 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1464 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1465 gcc_assert (!flag_pic
1466 || !non_pic_external_memory_operand (low[2], SImode));
1467 gcc_assert (!flag_pic
1468 || !non_pic_external_memory_operand (low[0], SImode));
1469 #endif
1470
1471 /* No reason to add a 0 to the low part and thus no carry, so just
1472 emit the appropriate add/sub instruction. */
1473 if (low[2] == const0_rtx)
1474 return vax_output_int_add (NULL, operands, SImode);
1475
1476 /* Are we doing addition or subtraction? */
1477 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1478
1479 /* We can't use vax_output_int_add since some the patterns don't
1480 modify the carry bit. */
1481 if (sub)
1482 {
1483 if (low[2] == constm1_rtx)
1484 pattern = "decl %0";
1485 else
1486 pattern = "subl2 $%n2,%0";
1487 }
1488 else
1489 {
1490 if (low[2] == const1_rtx)
1491 pattern = "incl %0";
1492 else
1493 pattern = "addl2 %2,%0";
1494 }
1495 output_asm_insn (pattern, low);
1496
1497 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1498 two 32bit parts, we complement each and then add one to
1499 low part. We know that the low part can't overflow since
1500 it's value can never be 0. */
1501 if (sub)
1502 return "sbwc %N2,%0";
1503 return "adwc %2,%0";
1504 }
1505
1506 /* Add low parts. */
1507 if (rtx_equal_p (operands[0], operands[1]))
1508 {
1509 if (low[2] == const0_rtx)
1510 /* Should examine operand, punt if not POST_INC. */
1511 pattern = "tstl %0", carry = 0;
1512 else if (low[2] == const1_rtx)
1513 pattern = "incl %0";
1514 else
1515 pattern = "addl2 %2,%0";
1516 }
1517 else
1518 {
1519 if (low[2] == const0_rtx)
1520 pattern = "movl %1,%0", carry = 0;
1521 else
1522 pattern = "addl3 %2,%1,%0";
1523 }
1524 if (pattern)
1525 output_asm_insn (pattern, low);
1526 if (!carry)
1527 /* If CARRY is 0, we don't have any carry value to worry about. */
1528 return get_insn_template (CODE_FOR_addsi3, insn);
1529 /* %0 = C + %1 + %2 */
1530 if (!rtx_equal_p (operands[0], operands[1]))
1531 output_asm_insn ((operands[1] == const0_rtx
1532 ? "clrl %0"
1533 : "movl %1,%0"), operands);
1534 return "adwc %2,%0";
1535 }
1536
1537 case E_SImode:
1538 if (rtx_equal_p (operands[0], operands[1]))
1539 {
1540 if (operands[2] == const1_rtx)
1541 return "incl %0";
1542 if (operands[2] == constm1_rtx)
1543 return "decl %0";
1544 if (CONST_INT_P (operands[2])
1545 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1546 return "subl2 $%n2,%0";
1547 if (CONST_INT_P (operands[2])
1548 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1549 && REG_P (operands[1])
1550 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1551 || REGNO (operands[1]) > 11))
1552 return "movab %c2(%1),%0";
1553 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1554 return "movab %a2[%0],%0";
1555 return "addl2 %2,%0";
1556 }
1557
1558 if (rtx_equal_p (operands[0], operands[2]))
1559 {
1560 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1561 return "movab %a1[%0],%0";
1562 return "addl2 %1,%0";
1563 }
1564
1565 if (CONST_INT_P (operands[2])
1566 && INTVAL (operands[2]) < 32767
1567 && INTVAL (operands[2]) > -32768
1568 && REG_P (operands[1])
1569 && push_operand (operands[0], SImode))
1570 return "pushab %c2(%1)";
1571
1572 if (CONST_INT_P (operands[2])
1573 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1574 return "subl3 $%n2,%1,%0";
1575
1576 if (CONST_INT_P (operands[2])
1577 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1578 && REG_P (operands[1])
1579 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1580 || REGNO (operands[1]) > 11))
1581 return "movab %c2(%1),%0";
1582
1583 /* Add this if using gcc on a VAX 3xxx:
1584 if (REG_P (operands[1]) && REG_P (operands[2]))
1585 return "movab (%1)[%2],%0";
1586 */
1587
1588 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1589 {
1590 if (push_operand (operands[0], SImode))
1591 return "pushab %a2[%1]";
1592 return "movab %a2[%1],%0";
1593 }
1594
1595 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1596 {
1597 if (push_operand (operands[0], SImode))
1598 return "pushab %a1[%2]";
1599 return "movab %a1[%2],%0";
1600 }
1601
1602 if (flag_pic && REG_P (operands[0])
1603 && symbolic_operand (operands[2], SImode))
1604 return "movab %a2,%0;addl2 %1,%0";
1605
1606 if (flag_pic
1607 && (symbolic_operand (operands[1], SImode)
1608 || symbolic_operand (operands[2], SImode)))
1609 debug_rtx (insn);
1610
1611 return "addl3 %1,%2,%0";
1612
1613 case E_HImode:
1614 if (rtx_equal_p (operands[0], operands[1]))
1615 {
1616 if (operands[2] == const1_rtx)
1617 return "incw %0";
1618 if (operands[2] == constm1_rtx)
1619 return "decw %0";
1620 if (CONST_INT_P (operands[2])
1621 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1622 return "subw2 $%n2,%0";
1623 return "addw2 %2,%0";
1624 }
1625 if (rtx_equal_p (operands[0], operands[2]))
1626 return "addw2 %1,%0";
1627 if (CONST_INT_P (operands[2])
1628 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1629 return "subw3 $%n2,%1,%0";
1630 return "addw3 %1,%2,%0";
1631
1632 case E_QImode:
1633 if (rtx_equal_p (operands[0], operands[1]))
1634 {
1635 if (operands[2] == const1_rtx)
1636 return "incb %0";
1637 if (operands[2] == constm1_rtx)
1638 return "decb %0";
1639 if (CONST_INT_P (operands[2])
1640 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1641 return "subb2 $%n2,%0";
1642 return "addb2 %2,%0";
1643 }
1644 if (rtx_equal_p (operands[0], operands[2]))
1645 return "addb2 %1,%0";
1646 if (CONST_INT_P (operands[2])
1647 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1648 return "subb3 $%n2,%1,%0";
1649 return "addb3 %1,%2,%0";
1650
1651 default:
1652 gcc_unreachable ();
1653 }
1654 }
1655
1656 const char *
1657 vax_output_int_subtract (rtx_insn *insn, rtx *operands, machine_mode mode)
1658 {
1659 switch (mode)
1660 {
1661 case E_DImode:
1662 {
1663 rtx low[3];
1664 const char *pattern;
1665 int carry = 1;
1666
1667 if (TARGET_QMATH && 0)
1668 debug_rtx (insn);
1669
1670 split_quadword_operands (insn, MINUS, operands, low, 3);
1671
1672 if (TARGET_QMATH)
1673 {
1674 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1675 {
1676 /* Negation is tricky. It's basically complement and increment.
1677 Negate hi, then lo, and subtract the carry back. */
1678 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1679 || (MEM_P (operands[0])
1680 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1681 fatal_insn ("illegal operand detected", insn);
1682 output_asm_insn ("mnegl %2,%0", operands);
1683 output_asm_insn ("mnegl %2,%0", low);
1684 return "sbwc $0,%0";
1685 }
1686 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1687 gcc_assert (rtx_equal_p (low[0], low[1]));
1688 if (low[2] == const1_rtx)
1689 output_asm_insn ("decl %0", low);
1690 else
1691 output_asm_insn ("subl2 %2,%0", low);
1692 return "sbwc %2,%0";
1693 }
1694
1695 /* Subtract low parts. */
1696 if (rtx_equal_p (operands[0], operands[1]))
1697 {
1698 if (low[2] == const0_rtx)
1699 pattern = 0, carry = 0;
1700 else if (low[2] == constm1_rtx)
1701 pattern = "decl %0";
1702 else
1703 pattern = "subl2 %2,%0";
1704 }
1705 else
1706 {
1707 if (low[2] == constm1_rtx)
1708 pattern = "decl %0";
1709 else if (low[2] == const0_rtx)
1710 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1711 else
1712 pattern = "subl3 %2,%1,%0";
1713 }
1714 if (pattern)
1715 output_asm_insn (pattern, low);
1716 if (carry)
1717 {
1718 if (!rtx_equal_p (operands[0], operands[1]))
1719 return "movl %1,%0;sbwc %2,%0";
1720 return "sbwc %2,%0";
1721 /* %0 = %2 - %1 - C */
1722 }
1723 return get_insn_template (CODE_FOR_subsi3, insn);
1724 }
1725
1726 default:
1727 gcc_unreachable ();
1728 }
1729 }
1730
1731 /* True if X is an rtx for a constant that is a valid address. */
1732
1733 bool
1734 legitimate_constant_address_p (rtx x)
1735 {
1736 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1737 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1738 return true;
1739 if (GET_CODE (x) != CONST)
1740 return false;
1741 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1742 if (flag_pic
1743 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1744 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1745 return false;
1746 #endif
1747 return true;
1748 }
1749
1750 /* The other macros defined here are used only in legitimate_address_p (). */
1751
1752 /* Nonzero if X is a hard reg that can be used as an index
1753 or, if not strict, if it is a pseudo reg. */
1754 #define INDEX_REGISTER_P(X, STRICT) \
1755 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1756
1757 /* Nonzero if X is a hard reg that can be used as a base reg
1758 or, if not strict, if it is a pseudo reg. */
1759 #define BASE_REGISTER_P(X, STRICT) \
1760 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1761
1762 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1763
1764 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1765 are no SYMBOL_REFs for external symbols present. */
1766
1767 static bool
1768 indirectable_constant_address_p (rtx x, bool indirect)
1769 {
1770 if (GET_CODE (x) == SYMBOL_REF)
1771 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1772
1773 if (GET_CODE (x) == CONST)
1774 return !flag_pic
1775 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1776 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1777
1778 return CONSTANT_ADDRESS_P (x);
1779 }
1780
1781 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1782
1783 static bool
1784 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1785 {
1786 return CONSTANT_ADDRESS_P (x);
1787 }
1788
1789 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1790
1791 /* True if X is an address which can be indirected. External symbols
1792 could be in a sharable image library, so we disallow those. */
1793
1794 static bool
1795 indirectable_address_p (rtx x, bool strict, bool indirect)
1796 {
1797 if (indirectable_constant_address_p (x, indirect)
1798 || BASE_REGISTER_P (x, strict))
1799 return true;
1800 if (GET_CODE (x) != PLUS
1801 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1802 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1803 return false;
1804 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1805 }
1806
1807 /* Return true if x is a valid address not using indexing.
1808 (This much is the easy part.) */
1809 static bool
1810 nonindexed_address_p (rtx x, bool strict)
1811 {
1812 rtx xfoo0;
1813 if (REG_P (x))
1814 {
1815 if (! reload_in_progress
1816 || reg_equiv_mem (REGNO (x)) == 0
1817 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1818 return true;
1819 }
1820 if (indirectable_constant_address_p (x, false))
1821 return true;
1822 if (indirectable_address_p (x, strict, false))
1823 return true;
1824 xfoo0 = XEXP (x, 0);
1825 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1826 return true;
1827 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1828 && BASE_REGISTER_P (xfoo0, strict))
1829 return true;
1830 return false;
1831 }
1832
1833 /* True if PROD is either a reg times size of mode MODE and MODE is less
1834 than or equal 8 bytes, or just a reg if MODE is one byte. For a MULT
1835 RTX we accept its operands in either order, however ASHIFT is not
1836 commutative, so in that case reg has to be the left operand. */
1837
1838 static bool
1839 index_term_p (rtx prod, machine_mode mode, bool strict)
1840 {
1841 rtx xfoo0, xfoo1;
1842 bool log_p;
1843
1844 if (GET_MODE_SIZE (mode) == 1)
1845 return BASE_REGISTER_P (prod, strict);
1846
1847 if ((GET_CODE (prod) != MULT && GET_CODE (prod) != ASHIFT)
1848 || GET_MODE_SIZE (mode) > 8)
1849 return false;
1850
1851 log_p = GET_CODE (prod) == ASHIFT;
1852 xfoo0 = XEXP (prod, 0);
1853 xfoo1 = XEXP (prod, 1);
1854
1855 if (!log_p
1856 && CONST_INT_P (xfoo0)
1857 && GET_MODE_SIZE (mode) == INTVAL (xfoo0)
1858 && INDEX_REGISTER_P (xfoo1, strict))
1859 return true;
1860
1861 if (CONST_INT_P (xfoo1)
1862 && GET_MODE_SIZE (mode) == (log_p ? 1 << INTVAL (xfoo1) : INTVAL (xfoo1))
1863 && INDEX_REGISTER_P (xfoo0, strict))
1864 return true;
1865
1866 return false;
1867 }
1868
1869 /* Return true if X is the sum of a register
1870 and a valid index term for mode MODE. */
1871 static bool
1872 reg_plus_index_p (rtx x, machine_mode mode, bool strict)
1873 {
1874 rtx xfoo0, xfoo1;
1875
1876 if (GET_CODE (x) != PLUS)
1877 return false;
1878
1879 xfoo0 = XEXP (x, 0);
1880 xfoo1 = XEXP (x, 1);
1881
1882 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1883 return true;
1884
1885 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1886 return true;
1887
1888 return false;
1889 }
1890
1891 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1892 static bool
1893 indexable_address_p (rtx xfoo0, rtx xfoo1, machine_mode mode, bool strict)
1894 {
1895 if (!CONSTANT_ADDRESS_P (xfoo0))
1896 return false;
1897 if (BASE_REGISTER_P (xfoo1, strict))
1898 return !flag_pic || mode == QImode;
1899 if (flag_pic && symbolic_operand (xfoo0, SImode))
1900 return false;
1901 return reg_plus_index_p (xfoo1, mode, strict);
1902 }
1903
1904 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1905 that is a valid memory address for an instruction.
1906 The MODE argument is the machine mode for the MEM expression
1907 that wants to use this address. */
1908 bool
1909 vax_legitimate_address_p (machine_mode mode, rtx x, bool strict, code_helper)
1910 {
1911 rtx xfoo0, xfoo1;
1912
1913 if (nonindexed_address_p (x, strict))
1914 return true;
1915
1916 if (GET_CODE (x) != PLUS)
1917 return false;
1918
1919 /* Handle <address>[index] represented with index-sum outermost */
1920
1921 xfoo0 = XEXP (x, 0);
1922 xfoo1 = XEXP (x, 1);
1923
1924 if (index_term_p (xfoo0, mode, strict)
1925 && nonindexed_address_p (xfoo1, strict))
1926 return true;
1927
1928 if (index_term_p (xfoo1, mode, strict)
1929 && nonindexed_address_p (xfoo0, strict))
1930 return true;
1931
1932 /* Handle offset(reg)[index] with offset added outermost */
1933
1934 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1935 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1936 return true;
1937
1938 return false;
1939 }
1940
1941 /* Return true if x (a legitimate address expression) has an effect that
1942 depends on the machine mode it is used for. On the VAX, the predecrement
1943 and postincrement address depend thus (the amount of decrement or
1944 increment being the length of the operand) and all indexed address depend
1945 thus (because the index scale factor is the length of the operand). */
1946
1947 static bool
1948 vax_mode_dependent_address_p (const_rtx x, addr_space_t as ATTRIBUTE_UNUSED)
1949 {
1950 rtx xfoo0, xfoo1;
1951
1952 /* Auto-increment cases are now dealt with generically in recog.cc. */
1953 if (GET_CODE (x) != PLUS)
1954 return false;
1955
1956 xfoo0 = XEXP (x, 0);
1957 xfoo1 = XEXP (x, 1);
1958
1959 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1960 return false;
1961 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1962 return false;
1963 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1964 return false;
1965 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1966 return false;
1967
1968 return true;
1969 }
1970
1971 static rtx
1972 fixup_mathdi_operand (rtx x, machine_mode mode)
1973 {
1974 if (illegal_addsub_di_memory_operand (x, mode))
1975 {
1976 rtx addr = XEXP (x, 0);
1977 rtx temp = gen_reg_rtx (Pmode);
1978 rtx offset = 0;
1979 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1980 if (GET_CODE (addr) == CONST && flag_pic)
1981 {
1982 offset = XEXP (XEXP (addr, 0), 1);
1983 addr = XEXP (XEXP (addr, 0), 0);
1984 }
1985 #endif
1986 emit_move_insn (temp, addr);
1987 if (offset)
1988 temp = gen_rtx_PLUS (Pmode, temp, offset);
1989 x = gen_rtx_MEM (DImode, temp);
1990 }
1991 return x;
1992 }
1993
1994 void
1995 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1996 {
1997 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1998 rtx temp;
1999
2000 rtx (*gen_old_insn)(rtx, rtx, rtx);
2001 rtx (*gen_si_insn)(rtx, rtx, rtx);
2002 rtx (*gen_insn)(rtx, rtx, rtx);
2003
2004 if (code == PLUS)
2005 {
2006 gen_old_insn = gen_adddi3_old;
2007 gen_si_insn = gen_addsi3;
2008 gen_insn = gen_adcdi3;
2009 }
2010 else if (code == MINUS)
2011 {
2012 gen_old_insn = gen_subdi3_old;
2013 gen_si_insn = gen_subsi3;
2014 gen_insn = gen_sbcdi3;
2015 }
2016 else
2017 gcc_unreachable ();
2018
2019 /* If this is addition (thus operands are commutative) and if there is one
2020 addend that duplicates the desination, we want that addend to be the
2021 first addend. */
2022 if (code == PLUS
2023 && rtx_equal_p (operands[0], operands[2])
2024 && !rtx_equal_p (operands[1], operands[2]))
2025 {
2026 temp = operands[2];
2027 operands[2] = operands[1];
2028 operands[1] = temp;
2029 }
2030
2031 if (!TARGET_QMATH)
2032 {
2033 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
2034 }
2035 else if (hi_only)
2036 {
2037 if (!rtx_equal_p (operands[0], operands[1])
2038 && (REG_P (operands[0]) && MEM_P (operands[1])))
2039 {
2040 emit_move_insn (operands[0], operands[1]);
2041 operands[1] = operands[0];
2042 }
2043
2044 operands[0] = fixup_mathdi_operand (operands[0], DImode);
2045 operands[1] = fixup_mathdi_operand (operands[1], DImode);
2046 operands[2] = fixup_mathdi_operand (operands[2], DImode);
2047
2048 if (!rtx_equal_p (operands[0], operands[1]))
2049 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
2050 operand_subword (operands[1], 0, 0, DImode));
2051
2052 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
2053 operand_subword (operands[1], 1, 0, DImode),
2054 operand_subword (operands[2], 1, 0, DImode)));
2055 }
2056 else
2057 {
2058 /* If we are adding a value to itself, that's really a multiply by 2,
2059 and that's just a left shift by 1. If subtracting, it's just 0. */
2060 if (rtx_equal_p (operands[1], operands[2]))
2061 {
2062 if (code == PLUS)
2063 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
2064 else
2065 emit_move_insn (operands[0], const0_rtx);
2066 return;
2067 }
2068
2069 operands[0] = fixup_mathdi_operand (operands[0], DImode);
2070
2071 /* If an operand is the same as operand[0], use the operand[0] rtx
2072 because fixup will an equivalent rtx but not an equal one. */
2073
2074 if (rtx_equal_p (operands[0], operands[1]))
2075 operands[1] = operands[0];
2076 else
2077 operands[1] = fixup_mathdi_operand (operands[1], DImode);
2078
2079 if (rtx_equal_p (operands[0], operands[2]))
2080 operands[2] = operands[0];
2081 else
2082 operands[2] = fixup_mathdi_operand (operands[2], DImode);
2083
2084 /* If we are adding or subtracting 0, then this is a move. */
2085 if (code == PLUS && operands[1] == const0_rtx)
2086 {
2087 temp = operands[2];
2088 operands[2] = operands[1];
2089 operands[1] = temp;
2090 }
2091 if (operands[2] == const0_rtx)
2092 {
2093 emit_move_insn (operands[0], operands[1]);
2094 return;
2095 }
2096
2097 /* If we are subtracting not from ourselves [d = a - b], and because the
2098 carry ops are two operand only, we would need to do a move prior to
2099 the subtract. And if d == b, we would need a temp otherwise
2100 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
2101 into d = -b, d += a. Since -b can never overflow, even if b == d,
2102 no temp is needed.
2103
2104 If we are doing addition, since the carry ops are two operand, if
2105 we aren't adding to ourselves, move the first addend to the
2106 destination first. */
2107
2108 gcc_assert (operands[1] != const0_rtx || code == MINUS);
2109 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
2110 {
2111 if (code == MINUS && CONSTANT_P (operands[1]))
2112 {
2113 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
2114 code = PLUS;
2115 gen_insn = gen_adcdi3;
2116 operands[2] = operands[1];
2117 operands[1] = operands[0];
2118 }
2119 else
2120 emit_move_insn (operands[0], operands[1]);
2121 }
2122
2123 /* Subtracting a constant will have been rewritten to an addition of the
2124 negative of that constant before we get here. */
2125 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
2126 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2127 }
2128 }
2129
2130 /* Output assembler code for a block containing the constant parts
2131 of a trampoline, leaving space for the variable parts. */
2132
2133 /* On the VAX, the trampoline contains an entry mask and two instructions:
2134 .word NN
2135 movl $STATIC,r0 (store the functions static chain)
2136 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2137
2138 static void
2139 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2140 {
2141 assemble_aligned_integer (2, const0_rtx);
2142 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2143 assemble_aligned_integer (4, const0_rtx);
2144 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2145 assemble_aligned_integer (2, GEN_INT (0x9f17));
2146 assemble_aligned_integer (4, const0_rtx);
2147 }
2148
2149 /* We copy the register-mask from the function's pure code
2150 to the start of the trampoline. */
2151
2152 static void
2153 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2154 {
2155 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2156 rtx mem;
2157
2158 emit_block_move (m_tramp, assemble_trampoline_template (),
2159 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2160
2161 mem = adjust_address (m_tramp, HImode, 0);
2162 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2163
2164 mem = adjust_address (m_tramp, SImode, 4);
2165 emit_move_insn (mem, cxt);
2166 mem = adjust_address (m_tramp, SImode, 11);
2167 emit_move_insn (mem, plus_constant (Pmode, fnaddr, 2));
2168 emit_insn (gen_sync_istream ());
2169 }
2170
2171 /* Value is the number of bytes of arguments automatically
2172 popped when returning from a subroutine call.
2173 FUNDECL is the declaration node of the function (as a tree),
2174 FUNTYPE is the data type of the function (as a tree),
2175 or for a library call it is an identifier node for the subroutine name.
2176 SIZE is the number of bytes of arguments passed on the stack.
2177
2178 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2179
2180 static poly_int64
2181 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2182 tree funtype ATTRIBUTE_UNUSED, poly_int64 size)
2183 {
2184 return size > 255 * 4 ? 0 : (HOST_WIDE_INT) size;
2185 }
2186
2187 /* Implement TARGET_FUNCTION_ARG. On the VAX all args are pushed. */
2188
2189 static rtx
2190 vax_function_arg (cumulative_args_t, const function_arg_info &)
2191 {
2192 return NULL_RTX;
2193 }
2194
2195 /* Update the data in CUM to advance over argument ARG. */
2196
2197 static void
2198 vax_function_arg_advance (cumulative_args_t cum_v,
2199 const function_arg_info &arg)
2200 {
2201 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2202
2203 *cum += (arg.promoted_size_in_bytes () + 3) & ~3;
2204 }
2205
2206 static HOST_WIDE_INT
2207 vax_starting_frame_offset (void)
2208 {
2209 /* On ELF targets, reserve the top of the stack for exception handler
2210 stackadj value. */
2211 return TARGET_ELF ? -4 : 0;
2212 }
2213
This page took 0.166938 seconds and 4 git commands to generate.