]> gcc.gnu.org Git - gcc.git/blame - gcc/config/alpha/alpha.c
alpha.c (some_small_symbolic_operand, [...]): Rename from *symbolic_mem_op*.
[gcc.git] / gcc / config / alpha / alpha.c
CommitLineData
a6f12d7c 1/* Subroutines used for code generation on the DEC Alpha.
9ddd9abd 2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
09e98324 3 2000, 2001 Free Software Foundation, Inc.
d60a05a1 4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
a6f12d7c
RK
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING. If not, write to
38ead7f3
RK
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA. */
a6f12d7c
RK
22
23
a6f12d7c 24#include "config.h"
3c303f52 25#include "system.h"
a6f12d7c 26#include "rtl.h"
e78d8e51 27#include "tree.h"
a6f12d7c
RK
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
a6f12d7c
RK
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
36#include "recog.h"
a6f12d7c 37#include "expr.h"
e78d8e51
ZW
38#include "optabs.h"
39#include "reload.h"
a6f12d7c 40#include "obstack.h"
9ecc37f0
RH
41#include "except.h"
42#include "function.h"
3c303f52 43#include "toplev.h"
01439aee 44#include "ggc.h"
b91055dd 45#include "integrate.h"
aead1ca3 46#include "tm_p.h"
672a6f42
NB
47#include "target.h"
48#include "target-def.h"
14691f8d 49#include "debug.h"
9ecc37f0
RH
50
51/* External data. */
9ecc37f0 52extern int rtx_equal_function_value_matters;
a6f12d7c 53
285a5742 54/* Specify which cpu to schedule for. */
9ecc37f0 55
9b009d45 56enum processor_type alpha_cpu;
df45c7ea 57static const char * const alpha_cpu_name[] =
bcbbac26
RH
58{
59 "ev4", "ev5", "ev6"
60};
da792a68 61
6245e3df
RK
62/* Specify how accurate floating-point traps need to be. */
63
64enum alpha_trap_precision alpha_tp;
65
66/* Specify the floating-point rounding mode. */
67
68enum alpha_fp_rounding_mode alpha_fprm;
69
70/* Specify which things cause traps. */
71
72enum alpha_fp_trap_mode alpha_fptm;
73
74/* Strings decoded into the above options. */
9ecc37f0 75
df45c7ea 76const char *alpha_cpu_string; /* -mcpu= */
a3b815cb 77const char *alpha_tune_string; /* -mtune= */
df45c7ea
KG
78const char *alpha_tp_string; /* -mtrap-precision=[p|s|i] */
79const char *alpha_fprm_string; /* -mfp-rounding-mode=[n|m|c|d] */
80const char *alpha_fptm_string; /* -mfp-trap-mode=[n|u|su|sui] */
81const char *alpha_mlat_string; /* -mmemory-latency= */
6245e3df 82
a6f12d7c
RK
83/* Save information from a "cmpxx" operation until the branch or scc is
84 emitted. */
85
6db21c7f 86struct alpha_compare alpha_compare;
a6f12d7c 87
48f6bfac
RK
88/* Non-zero if inside of a function, because the Alpha asm can't
89 handle .files inside of functions. */
90
91static int inside_function = FALSE;
92
bcbbac26
RH
93/* The number of cycles of latency we should assume on memory reads. */
94
95int alpha_memory_latency = 3;
96
9c0e94a5
RH
97/* Whether the function needs the GP. */
98
99static int alpha_function_needs_gp;
100
3873d24b
RH
101/* The alias set for prologue/epilogue register save/restore. */
102
103static int alpha_sr_alias_set;
104
941cc05a
RK
105/* The assembler name of the current function. */
106
107static const char *alpha_fnname;
108
1eb356b9
RH
109/* The next explicit relocation sequence number. */
110int alpha_next_sequence_number = 1;
111
112/* The literal and gpdisp sequence numbers for this insn, as printed
113 by %# and %* respectively. */
114int alpha_this_literal_sequence_number;
115int alpha_this_gpdisp_sequence_number;
116
d60a05a1 117/* Declarations of static functions. */
62918bd3
RH
118static bool decl_in_text_section
119 PARAMS ((tree));
a615ca3e 120static int some_small_symbolic_operand_1
1e7e480e 121 PARAMS ((rtx *, void *));
a615ca3e 122static int split_small_symbolic_operand_1
1e7e480e 123 PARAMS ((rtx *, void *));
e2c9fb9b
RH
124static bool local_symbol_p
125 PARAMS ((rtx));
9c0e94a5 126static void alpha_set_memflags_1
f6da8bc3 127 PARAMS ((rtx, int, int, int));
9c0e94a5 128static rtx alpha_emit_set_const_1
f6da8bc3 129 PARAMS ((rtx, enum machine_mode, HOST_WIDE_INT, int));
9c0e94a5 130static void alpha_expand_unaligned_load_words
f6da8bc3 131 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
9c0e94a5 132static void alpha_expand_unaligned_store_words
f6da8bc3 133 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
9c0e94a5 134static void alpha_sa_mask
f6da8bc3 135 PARAMS ((unsigned long *imaskP, unsigned long *fmaskP));
1eb356b9
RH
136static int find_lo_sum
137 PARAMS ((rtx *, void *));
9c0e94a5 138static int alpha_does_function_need_gp
f6da8bc3 139 PARAMS ((void));
5495cc55
RH
140static int alpha_ra_ever_killed
141 PARAMS ((void));
be7560ea
RH
142static const char *get_trap_mode_suffix
143 PARAMS ((void));
144static const char *get_round_mode_suffix
145 PARAMS ((void));
5495cc55
RH
146static rtx set_frame_related_p
147 PARAMS ((void));
148static const char *alpha_lookup_xfloating_lib_func
149 PARAMS ((enum rtx_code));
150static int alpha_compute_xfloating_mode_arg
151 PARAMS ((enum rtx_code, enum alpha_fp_rounding_mode));
152static void alpha_emit_xfloating_libcall
153 PARAMS ((const char *, rtx, rtx[], int, rtx));
154static rtx alpha_emit_xfloating_compare
155 PARAMS ((enum rtx_code, rtx, rtx));
b4c25db2
NB
156static void alpha_output_function_end_prologue
157 PARAMS ((FILE *));
c237e94a
ZW
158static int alpha_adjust_cost
159 PARAMS ((rtx, rtx, rtx, int));
160static int alpha_issue_rate
161 PARAMS ((void));
162static int alpha_variable_issue
163 PARAMS ((FILE *, int, rtx, int));
89cfc2c6 164
30102605
RH
165#if TARGET_ABI_UNICOSMK
166static void alpha_init_machine_status
167 PARAMS ((struct function *p));
168static void alpha_mark_machine_status
169 PARAMS ((struct function *p));
170static void alpha_free_machine_status
171 PARAMS ((struct function *p));
172#endif
173
174static void unicosmk_output_deferred_case_vectors PARAMS ((FILE *));
175static void unicosmk_gen_dsib PARAMS ((unsigned long *imaskP));
176static void unicosmk_output_ssib PARAMS ((FILE *, const char *));
177static int unicosmk_need_dex PARAMS ((rtx));
178
e9a25f70 179/* Get the number of args of a function in one of two ways. */
30102605 180#if TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK
e9a25f70
JL
181#define NUM_ARGS current_function_args_info.num_args
182#else
183#define NUM_ARGS current_function_args_info
184#endif
26250081 185
26250081
RH
186#define REG_PV 27
187#define REG_RA 26
a6f12d7c 188\f
672a6f42 189/* Initialize the GCC target structure. */
be7b80f4 190#if TARGET_ABI_OPEN_VMS
91d231cb 191const struct attribute_spec vms_attribute_table[];
7c262518 192static unsigned int vms_section_type_flags PARAMS ((tree, const char *, int));
715bdd29 193static void vms_asm_named_section PARAMS ((const char *, unsigned int));
2cc07db4
RH
194static void vms_asm_out_constructor PARAMS ((rtx, int));
195static void vms_asm_out_destructor PARAMS ((rtx, int));
91d231cb
JM
196# undef TARGET_ATTRIBUTE_TABLE
197# define TARGET_ATTRIBUTE_TABLE vms_attribute_table
7c262518
RH
198# undef TARGET_SECTION_TYPE_FLAGS
199# define TARGET_SECTION_TYPE_FLAGS vms_section_type_flags
8289c43b 200#endif
672a6f42 201
30102605
RH
202#if TARGET_ABI_UNICOSMK
203static void unicosmk_asm_named_section PARAMS ((const char *, unsigned int));
204static void unicosmk_insert_attributes PARAMS ((tree, tree *));
205static unsigned int unicosmk_section_type_flags PARAMS ((tree, const char *,
206 int));
207# undef TARGET_INSERT_ATTRIBUTES
208# define TARGET_INSERT_ATTRIBUTES unicosmk_insert_attributes
209# undef TARGET_SECTION_TYPE_FLAGS
210# define TARGET_SECTION_TYPE_FLAGS unicosmk_section_type_flags
211#endif
212
301d03af
RS
213#undef TARGET_ASM_ALIGNED_HI_OP
214#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
215#undef TARGET_ASM_ALIGNED_DI_OP
216#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
217
218/* Default unaligned ops are provided for ELF systems. To get unaligned
219 data for non-ELF systems, we have to turn off auto alignment. */
220#ifndef OBJECT_FORMAT_ELF
221#undef TARGET_ASM_UNALIGNED_HI_OP
222#define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t"
223#undef TARGET_ASM_UNALIGNED_SI_OP
224#define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t"
225#undef TARGET_ASM_UNALIGNED_DI_OP
226#define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t"
227#endif
228
b4c25db2
NB
229#undef TARGET_ASM_FUNCTION_END_PROLOGUE
230#define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue
231
c237e94a
ZW
232#undef TARGET_SCHED_ADJUST_COST
233#define TARGET_SCHED_ADJUST_COST alpha_adjust_cost
234#undef TARGET_SCHED_ISSUE_RATE
235#define TARGET_SCHED_ISSUE_RATE alpha_issue_rate
236#undef TARGET_SCHED_VARIABLE_ISSUE
237#define TARGET_SCHED_VARIABLE_ISSUE alpha_variable_issue
238
f6897b10 239struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 240\f
285a5742 241/* Parse target option strings. */
6245e3df
RK
242
243void
244override_options ()
245{
a3b815cb 246 int i;
8b60264b
KG
247 static const struct cpu_table {
248 const char *const name;
249 const enum processor_type processor;
250 const int flags;
a3b815cb
JJ
251 } cpu_table[] = {
252#define EV5_MASK (MASK_CPU_EV5)
253#define EV6_MASK (MASK_CPU_EV6|MASK_BWX|MASK_MAX|MASK_FIX)
254 { "ev4", PROCESSOR_EV4, 0 },
255 { "ev45", PROCESSOR_EV4, 0 },
256 { "21064", PROCESSOR_EV4, 0 },
257 { "ev5", PROCESSOR_EV5, EV5_MASK },
258 { "21164", PROCESSOR_EV5, EV5_MASK },
259 { "ev56", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
260 { "21164a", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
261 { "pca56", PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
262 { "21164PC",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
263 { "21164pc",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
264 { "ev6", PROCESSOR_EV6, EV6_MASK },
265 { "21264", PROCESSOR_EV6, EV6_MASK },
266 { "ev67", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
267 { "21264a", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
268 { 0, 0, 0 }
269 };
270
30102605
RH
271 /* Unicos/Mk doesn't have shared libraries. */
272 if (TARGET_ABI_UNICOSMK && flag_pic)
273 {
274 warning ("-f%s ignored for Unicos/Mk (not supported)",
275 (flag_pic > 1) ? "PIC" : "pic");
276 flag_pic = 0;
277 }
278
279 /* On Unicos/Mk, the native compiler consistenly generates /d suffices for
280 floating-point instructions. Make that the default for this target. */
281 if (TARGET_ABI_UNICOSMK)
282 alpha_fprm = ALPHA_FPRM_DYN;
283 else
284 alpha_fprm = ALPHA_FPRM_NORM;
285
6245e3df 286 alpha_tp = ALPHA_TP_PROG;
6245e3df
RK
287 alpha_fptm = ALPHA_FPTM_N;
288
30102605
RH
289 /* We cannot use su and sui qualifiers for conversion instructions on
290 Unicos/Mk. I'm not sure if this is due to assembler or hardware
291 limitations. Right now, we issue a warning if -mieee is specified
292 and then ignore it; eventually, we should either get it right or
293 disable the option altogether. */
294
6245e3df
RK
295 if (TARGET_IEEE)
296 {
30102605
RH
297 if (TARGET_ABI_UNICOSMK)
298 warning ("-mieee not supported on Unicos/Mk");
299 else
300 {
301 alpha_tp = ALPHA_TP_INSN;
302 alpha_fptm = ALPHA_FPTM_SU;
303 }
6245e3df
RK
304 }
305
306 if (TARGET_IEEE_WITH_INEXACT)
307 {
30102605
RH
308 if (TARGET_ABI_UNICOSMK)
309 warning ("-mieee-with-inexact not supported on Unicos/Mk");
310 else
311 {
312 alpha_tp = ALPHA_TP_INSN;
313 alpha_fptm = ALPHA_FPTM_SUI;
314 }
6245e3df
RK
315 }
316
317 if (alpha_tp_string)
10d5c73f
RK
318 {
319 if (! strcmp (alpha_tp_string, "p"))
6245e3df 320 alpha_tp = ALPHA_TP_PROG;
10d5c73f 321 else if (! strcmp (alpha_tp_string, "f"))
6245e3df 322 alpha_tp = ALPHA_TP_FUNC;
10d5c73f 323 else if (! strcmp (alpha_tp_string, "i"))
6245e3df 324 alpha_tp = ALPHA_TP_INSN;
10d5c73f
RK
325 else
326 error ("bad value `%s' for -mtrap-precision switch", alpha_tp_string);
327 }
6245e3df
RK
328
329 if (alpha_fprm_string)
10d5c73f
RK
330 {
331 if (! strcmp (alpha_fprm_string, "n"))
6245e3df 332 alpha_fprm = ALPHA_FPRM_NORM;
10d5c73f 333 else if (! strcmp (alpha_fprm_string, "m"))
6245e3df 334 alpha_fprm = ALPHA_FPRM_MINF;
10d5c73f 335 else if (! strcmp (alpha_fprm_string, "c"))
6245e3df 336 alpha_fprm = ALPHA_FPRM_CHOP;
10d5c73f 337 else if (! strcmp (alpha_fprm_string,"d"))
6245e3df 338 alpha_fprm = ALPHA_FPRM_DYN;
10d5c73f
RK
339 else
340 error ("bad value `%s' for -mfp-rounding-mode switch",
6245e3df 341 alpha_fprm_string);
10d5c73f 342 }
6245e3df
RK
343
344 if (alpha_fptm_string)
10d5c73f
RK
345 {
346 if (strcmp (alpha_fptm_string, "n") == 0)
347 alpha_fptm = ALPHA_FPTM_N;
348 else if (strcmp (alpha_fptm_string, "u") == 0)
349 alpha_fptm = ALPHA_FPTM_U;
350 else if (strcmp (alpha_fptm_string, "su") == 0)
351 alpha_fptm = ALPHA_FPTM_SU;
352 else if (strcmp (alpha_fptm_string, "sui") == 0)
353 alpha_fptm = ALPHA_FPTM_SUI;
354 else
355 error ("bad value `%s' for -mfp-trap-mode switch", alpha_fptm_string);
356 }
6245e3df 357
de4abb91
RH
358 alpha_cpu
359 = TARGET_CPU_DEFAULT & MASK_CPU_EV6 ? PROCESSOR_EV6
360 : (TARGET_CPU_DEFAULT & MASK_CPU_EV5 ? PROCESSOR_EV5 : PROCESSOR_EV4);
361
362 if (alpha_cpu_string)
363 {
a3b815cb
JJ
364 for (i = 0; cpu_table [i].name; i++)
365 if (! strcmp (alpha_cpu_string, cpu_table [i].name))
366 {
367 alpha_cpu = cpu_table [i].processor;
368 target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX
369 | MASK_CPU_EV5 | MASK_CPU_EV6);
370 target_flags |= cpu_table [i].flags;
371 break;
372 }
373 if (! cpu_table [i].name)
de4abb91
RH
374 error ("bad value `%s' for -mcpu switch", alpha_cpu_string);
375 }
376
a3b815cb
JJ
377 if (alpha_tune_string)
378 {
379 for (i = 0; cpu_table [i].name; i++)
380 if (! strcmp (alpha_tune_string, cpu_table [i].name))
381 {
382 alpha_cpu = cpu_table [i].processor;
383 break;
384 }
385 if (! cpu_table [i].name)
386 error ("bad value `%s' for -mcpu switch", alpha_tune_string);
387 }
388
285a5742 389 /* Do some sanity checks on the above options. */
6245e3df 390
30102605
RH
391 if (TARGET_ABI_UNICOSMK && alpha_fptm != ALPHA_FPTM_N)
392 {
393 warning ("trap mode not supported on Unicos/Mk");
394 alpha_fptm = ALPHA_FPTM_N;
395 }
396
10d5c73f 397 if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI)
a3b815cb 398 && alpha_tp != ALPHA_TP_INSN && ! TARGET_CPU_EV6)
6245e3df 399 {
10d5c73f 400 warning ("fp software completion requires -mtrap-precision=i");
6245e3df
RK
401 alpha_tp = ALPHA_TP_INSN;
402 }
89cfc2c6 403
a3b815cb 404 if (TARGET_CPU_EV6)
981a828e
RH
405 {
406 /* Except for EV6 pass 1 (not released), we always have precise
407 arithmetic traps. Which means we can do software completion
408 without minding trap shadows. */
409 alpha_tp = ALPHA_TP_PROG;
410 }
411
89cfc2c6
RK
412 if (TARGET_FLOAT_VAX)
413 {
414 if (alpha_fprm == ALPHA_FPRM_MINF || alpha_fprm == ALPHA_FPRM_DYN)
415 {
416 warning ("rounding mode not supported for VAX floats");
417 alpha_fprm = ALPHA_FPRM_NORM;
418 }
419 if (alpha_fptm == ALPHA_FPTM_SUI)
420 {
421 warning ("trap mode not supported for VAX floats");
422 alpha_fptm = ALPHA_FPTM_SU;
423 }
424 }
bcbbac26
RH
425
426 {
427 char *end;
428 int lat;
429
430 if (!alpha_mlat_string)
431 alpha_mlat_string = "L1";
432
d1e6b55b 433 if (ISDIGIT ((unsigned char)alpha_mlat_string[0])
bcbbac26
RH
434 && (lat = strtol (alpha_mlat_string, &end, 10), *end == '\0'))
435 ;
436 else if ((alpha_mlat_string[0] == 'L' || alpha_mlat_string[0] == 'l')
d1e6b55b 437 && ISDIGIT ((unsigned char)alpha_mlat_string[1])
bcbbac26
RH
438 && alpha_mlat_string[2] == '\0')
439 {
440 static int const cache_latency[][4] =
441 {
442 { 3, 30, -1 }, /* ev4 -- Bcache is a guess */
443 { 2, 12, 38 }, /* ev5 -- Bcache from PC164 LMbench numbers */
285a5742 444 { 3, 12, 30 }, /* ev6 -- Bcache from DS20 LMbench. */
bcbbac26
RH
445 };
446
447 lat = alpha_mlat_string[1] - '0';
54f5c4b0 448 if (lat <= 0 || lat > 3 || cache_latency[alpha_cpu][lat-1] == -1)
bcbbac26
RH
449 {
450 warning ("L%d cache latency unknown for %s",
451 lat, alpha_cpu_name[alpha_cpu]);
452 lat = 3;
453 }
454 else
455 lat = cache_latency[alpha_cpu][lat-1];
456 }
457 else if (! strcmp (alpha_mlat_string, "main"))
458 {
459 /* Most current memories have about 370ns latency. This is
460 a reasonable guess for a fast cpu. */
461 lat = 150;
462 }
463 else
464 {
465 warning ("bad value `%s' for -mmemory-latency", alpha_mlat_string);
466 lat = 3;
467 }
468
469 alpha_memory_latency = lat;
470 }
bb8ebb7f
RH
471
472 /* Default the definition of "small data" to 8 bytes. */
473 if (!g_switch_set)
474 g_switch_value = 8;
3873d24b 475
133d3133
RH
476 /* Infer TARGET_SMALL_DATA from -fpic/-fPIC. */
477 if (flag_pic == 1)
478 target_flags |= MASK_SMALL_DATA;
479 else if (flag_pic == 2)
480 target_flags &= ~MASK_SMALL_DATA;
481
c176c051
RH
482 /* Align labels and loops for optimal branching. */
483 /* ??? Kludge these by not doing anything if we don't optimize and also if
285a5742 484 we are writing ECOFF symbols to work around a bug in DEC's assembler. */
c176c051
RH
485 if (optimize > 0 && write_symbols != SDB_DEBUG)
486 {
487 if (align_loops <= 0)
488 align_loops = 16;
489 if (align_jumps <= 0)
490 align_jumps = 16;
491 }
492 if (align_functions <= 0)
493 align_functions = 16;
494
3873d24b
RH
495 /* Acquire a unique set number for our register saves and restores. */
496 alpha_sr_alias_set = new_alias_set ();
30102605
RH
497
498 /* Register variables and functions with the garbage collector. */
499
500#if TARGET_ABI_UNICOSMK
501 /* Set up function hooks. */
502 init_machine_status = alpha_init_machine_status;
503 mark_machine_status = alpha_mark_machine_status;
504 free_machine_status = alpha_free_machine_status;
505#endif
6245e3df
RK
506}
507\f
a6f12d7c
RK
508/* Returns 1 if VALUE is a mask that contains full bytes of zero or ones. */
509
510int
511zap_mask (value)
512 HOST_WIDE_INT value;
513{
514 int i;
515
516 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
517 i++, value >>= 8)
518 if ((value & 0xff) != 0 && (value & 0xff) != 0xff)
519 return 0;
520
521 return 1;
522}
523
524/* Returns 1 if OP is either the constant zero or a register. If a
525 register, it must be in the proper mode unless MODE is VOIDmode. */
526
527int
528reg_or_0_operand (op, mode)
529 register rtx op;
530 enum machine_mode mode;
531{
532 return op == const0_rtx || register_operand (op, mode);
533}
534
f4014bfd
RK
535/* Return 1 if OP is a constant in the range of 0-63 (for a shift) or
536 any register. */
537
538int
539reg_or_6bit_operand (op, mode)
540 register rtx op;
541 enum machine_mode mode;
542{
543 return ((GET_CODE (op) == CONST_INT
544 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64)
545 || register_operand (op, mode));
546}
547
548
a6f12d7c
RK
549/* Return 1 if OP is an 8-bit constant or any register. */
550
551int
552reg_or_8bit_operand (op, mode)
553 register rtx op;
554 enum machine_mode mode;
555{
556 return ((GET_CODE (op) == CONST_INT
557 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100)
558 || register_operand (op, mode));
559}
560
14edc0e4
TG
561/* Return 1 if OP is an 8-bit constant. */
562
563int
564cint8_operand (op, mode)
565 register rtx op;
3c303f52 566 enum machine_mode mode ATTRIBUTE_UNUSED;
14edc0e4 567{
e3208d53 568 return ((GET_CODE (op) == CONST_INT
eb8da868 569 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100));
14edc0e4
TG
570}
571
a6f12d7c
RK
572/* Return 1 if the operand is a valid second operand to an add insn. */
573
574int
575add_operand (op, mode)
576 register rtx op;
577 enum machine_mode mode;
578{
579 if (GET_CODE (op) == CONST_INT)
80df65c9 580 /* Constraints I, J, O and P are covered by K. */
e6118f89 581 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'K')
80df65c9 582 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
a6f12d7c
RK
583
584 return register_operand (op, mode);
585}
586
587/* Return 1 if the operand is a valid second operand to a sign-extending
588 add insn. */
589
590int
591sext_add_operand (op, mode)
592 register rtx op;
593 enum machine_mode mode;
594{
595 if (GET_CODE (op) == CONST_INT)
80df65c9
RH
596 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
597 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'));
a6f12d7c 598
c5c76735 599 return reg_not_elim_operand (op, mode);
a6f12d7c
RK
600}
601
602/* Return 1 if OP is the constant 4 or 8. */
603
604int
605const48_operand (op, mode)
606 register rtx op;
3c303f52 607 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
608{
609 return (GET_CODE (op) == CONST_INT
610 && (INTVAL (op) == 4 || INTVAL (op) == 8));
611}
612
613/* Return 1 if OP is a valid first operand to an AND insn. */
614
615int
616and_operand (op, mode)
617 register rtx op;
618 enum machine_mode mode;
619{
620 if (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
621 return (zap_mask (CONST_DOUBLE_LOW (op))
622 && zap_mask (CONST_DOUBLE_HIGH (op)));
623
624 if (GET_CODE (op) == CONST_INT)
625 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
626 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100
627 || zap_mask (INTVAL (op)));
628
629 return register_operand (op, mode);
630}
631
c7def335 632/* Return 1 if OP is a valid first operand to an IOR or XOR insn. */
8088469d
RK
633
634int
c7def335 635or_operand (op, mode)
8088469d
RK
636 register rtx op;
637 enum machine_mode mode;
638{
639 if (GET_CODE (op) == CONST_INT)
640 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
641 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100);
642
643 return register_operand (op, mode);
644}
645
a6f12d7c
RK
646/* Return 1 if OP is a constant that is the width, in bits, of an integral
647 mode smaller than DImode. */
648
649int
650mode_width_operand (op, mode)
651 register rtx op;
3c303f52 652 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
653{
654 return (GET_CODE (op) == CONST_INT
6c174fc0
RH
655 && (INTVAL (op) == 8 || INTVAL (op) == 16
656 || INTVAL (op) == 32 || INTVAL (op) == 64));
a6f12d7c
RK
657}
658
659/* Return 1 if OP is a constant that is the width of an integral machine mode
660 smaller than an integer. */
661
662int
663mode_mask_operand (op, mode)
664 register rtx op;
3c303f52 665 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
666{
667#if HOST_BITS_PER_WIDE_INT == 32
668 if (GET_CODE (op) == CONST_DOUBLE)
6c174fc0
RH
669 return (CONST_DOUBLE_LOW (op) == -1
670 && (CONST_DOUBLE_HIGH (op) == -1
671 || CONST_DOUBLE_HIGH (op) == 0));
672#else
673 if (GET_CODE (op) == CONST_DOUBLE)
674 return (CONST_DOUBLE_LOW (op) == -1 && CONST_DOUBLE_HIGH (op) == 0);
a6f12d7c
RK
675#endif
676
16b02ae0
RK
677 return (GET_CODE (op) == CONST_INT
678 && (INTVAL (op) == 0xff
679 || INTVAL (op) == 0xffff
3873d24b 680 || INTVAL (op) == (HOST_WIDE_INT)0xffffffff
11ea364a 681#if HOST_BITS_PER_WIDE_INT == 64
3873d24b 682 || INTVAL (op) == -1
a6f12d7c 683#endif
16b02ae0 684 ));
a6f12d7c
RK
685}
686
687/* Return 1 if OP is a multiple of 8 less than 64. */
688
689int
690mul8_operand (op, mode)
691 register rtx op;
3c303f52 692 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
693{
694 return (GET_CODE (op) == CONST_INT
695 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64
696 && (INTVAL (op) & 7) == 0);
697}
698
699/* Return 1 if OP is the constant zero in floating-point. */
700
701int
702fp0_operand (op, mode)
703 register rtx op;
704 enum machine_mode mode;
705{
706 return (GET_MODE (op) == mode
707 && GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode));
708}
709
710/* Return 1 if OP is the floating-point constant zero or a register. */
711
712int
713reg_or_fp0_operand (op, mode)
714 register rtx op;
715 enum machine_mode mode;
716{
717 return fp0_operand (op, mode) || register_operand (op, mode);
718}
719
4ed43ff8
RH
720/* Return 1 if OP is a hard floating-point register. */
721
722int
723hard_fp_register_operand (op, mode)
724 register rtx op;
725 enum machine_mode mode;
726{
d2c6a1b6
RH
727 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
728 return 0;
729
730 if (GET_CODE (op) == SUBREG)
731 op = SUBREG_REG (op);
732 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == FLOAT_REGS;
733}
734
735/* Return 1 if OP is a hard general register. */
736
737int
738hard_int_register_operand (op, mode)
739 register rtx op;
740 enum machine_mode mode;
741{
742 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
743 return 0;
744
745 if (GET_CODE (op) == SUBREG)
746 op = SUBREG_REG (op);
747 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS;
4ed43ff8
RH
748}
749
a6f12d7c
RK
750/* Return 1 if OP is a register or a constant integer. */
751
752
753int
754reg_or_cint_operand (op, mode)
755 register rtx op;
756 enum machine_mode mode;
757{
e3208d53 758 return (GET_CODE (op) == CONST_INT
e3208d53 759 || register_operand (op, mode));
a6f12d7c
RK
760}
761
8d36d33b
RK
762/* Return 1 if OP is something that can be reloaded into a register;
763 if it is a MEM, it need not be valid. */
764
765int
766some_operand (op, mode)
767 register rtx op;
768 enum machine_mode mode;
769{
770 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
771 return 0;
772
773 switch (GET_CODE (op))
774 {
e3208d53 775 case REG: case MEM: case CONST_DOUBLE: case CONST_INT: case LABEL_REF:
551cc6fd 776 case SYMBOL_REF: case CONST: case HIGH:
8d36d33b
RK
777 return 1;
778
779 case SUBREG:
780 return some_operand (SUBREG_REG (op), VOIDmode);
1d300e19
KG
781
782 default:
783 break;
8d36d33b
RK
784 }
785
786 return 0;
787}
788
f711a22b
RH
789/* Likewise, but don't accept constants. */
790
791int
792some_ni_operand (op, mode)
793 register rtx op;
794 enum machine_mode mode;
795{
796 if (GET_MODE (op) != mode && mode != VOIDmode)
797 return 0;
798
799 if (GET_CODE (op) == SUBREG)
800 op = SUBREG_REG (op);
801
802 return (GET_CODE (op) == REG || GET_CODE (op) == MEM);
803}
804
a6f12d7c
RK
805/* Return 1 if OP is a valid operand for the source of a move insn. */
806
807int
808input_operand (op, mode)
809 register rtx op;
810 enum machine_mode mode;
811{
812 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
813 return 0;
814
815 if (GET_MODE_CLASS (mode) == MODE_FLOAT && GET_MODE (op) != mode)
816 return 0;
817
818 switch (GET_CODE (op))
819 {
820 case LABEL_REF:
821 case SYMBOL_REF:
822 case CONST:
e2c9fb9b 823 if (TARGET_EXPLICIT_RELOCS)
551cc6fd
RH
824 {
825 /* We don't split symbolic operands into something unintelligable
826 until after reload, but we do not wish non-small, non-global
827 symbolic operands to be reconstructed from their high/lo_sum
828 form. */
829 return (small_symbolic_operand (op, mode)
830 || global_symbolic_operand (op, mode));
831 }
e2c9fb9b 832
e3208d53 833 /* This handles both the Windows/NT and OSF cases. */
7daa56f5 834 return mode == ptr_mode || mode == DImode;
a6f12d7c 835
551cc6fd
RH
836 case HIGH:
837 return (TARGET_EXPLICIT_RELOCS
838 && local_symbolic_operand (XEXP (op, 0), mode));
839
a6f12d7c 840 case REG:
14a774a9 841 case ADDRESSOF:
a6f12d7c
RK
842 return 1;
843
844 case SUBREG:
845 if (register_operand (op, mode))
846 return 1;
285a5742 847 /* ... fall through ... */
a6f12d7c 848 case MEM:
e9a25f70 849 return ((TARGET_BWX || (mode != HImode && mode != QImode))
a2574dbe 850 && general_operand (op, mode));
a6f12d7c
RK
851
852 case CONST_DOUBLE:
853 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
854
855 case CONST_INT:
856 return mode == QImode || mode == HImode || add_operand (op, mode);
1d300e19 857
ee5332b8
RH
858 case CONSTANT_P_RTX:
859 return 1;
860
1d300e19
KG
861 default:
862 break;
a6f12d7c
RK
863 }
864
865 return 0;
866}
867
0f33506c 868/* Return 1 if OP is a SYMBOL_REF for a function known to be in this
62918bd3 869 file, and in the same section as the current function. */
a6f12d7c
RK
870
871int
0f33506c 872current_file_function_operand (op, mode)
a6f12d7c 873 rtx op;
3c303f52 874 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c 875{
1afec8ad
RH
876 if (GET_CODE (op) != SYMBOL_REF)
877 return 0;
878
62918bd3
RH
879 /* Easy test for recursion. */
880 if (op == XEXP (DECL_RTL (current_function_decl), 0))
881 return 1;
1afec8ad 882
62918bd3
RH
883 /* Otherwise, we need the DECL for the SYMBOL_REF, which we can't get.
884 So SYMBOL_REF_FLAG has been declared to imply that the function is
885 in the default text section. So we must also check that the current
886 function is also in the text section. */
887 if (SYMBOL_REF_FLAG (op) && decl_in_text_section (current_function_decl))
888 return 1;
889
890 return 0;
1afec8ad
RH
891}
892
893/* Return 1 if OP is a SYMBOL_REF for which we can make a call via bsr. */
894
895int
896direct_call_operand (op, mode)
897 rtx op;
898 enum machine_mode mode;
899{
900 /* Must be defined in this file. */
901 if (! current_file_function_operand (op, mode))
902 return 0;
903
904 /* If profiling is implemented via linker tricks, we can't jump
905 to the nogp alternate entry point. */
906 /* ??? TARGET_PROFILING_NEEDS_GP isn't really the right test,
907 but is approximately correct for the OSF ABIs. Don't know
908 what to do for VMS, NT, or UMK. */
909 if (! TARGET_PROFILING_NEEDS_GP
70f4f91c 910 && ! current_function_profile)
1afec8ad 911 return 0;
4d8f669f
RH
912
913 return 1;
a6f12d7c
RK
914}
915
e2c9fb9b
RH
916/* Return true if OP is a LABEL_REF, or SYMBOL_REF or CONST referencing
917 a variable known to be defined in this file. */
918
919static bool
920local_symbol_p (op)
921 rtx op;
922{
923 const char *str = XSTR (op, 0);
924
925 /* ??? SYMBOL_REF_FLAG is set for local function symbols, but we
926 run into problems with the rtl inliner in that the symbol was
927 once external, but is local after inlining, which results in
928 unrecognizable insns. */
929
930 return (CONSTANT_POOL_ADDRESS_P (op)
931 /* If @, then ENCODE_SECTION_INFO sez it's local. */
932 || str[0] == '@'
933 /* If *$, then ASM_GENERATE_INTERNAL_LABEL sez it's local. */
934 || (str[0] == '*' && str[1] == '$'));
935}
1eb356b9
RH
936
937int
938local_symbolic_operand (op, mode)
939 rtx op;
30102605 940 enum machine_mode mode;
1eb356b9 941{
30102605
RH
942 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
943 return 0;
944
1eb356b9
RH
945 if (GET_CODE (op) == LABEL_REF)
946 return 1;
947
948 if (GET_CODE (op) == CONST
949 && GET_CODE (XEXP (op, 0)) == PLUS
950 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
951 op = XEXP (XEXP (op, 0), 0);
952
953 if (GET_CODE (op) != SYMBOL_REF)
954 return 0;
955
e2c9fb9b 956 return local_symbol_p (op);
1eb356b9
RH
957}
958
133d3133
RH
959/* Return true if OP is a SYMBOL_REF or CONST referencing a variable
960 known to be defined in this file in the small data area. */
961
962int
963small_symbolic_operand (op, mode)
964 rtx op;
965 enum machine_mode mode ATTRIBUTE_UNUSED;
966{
967 const char *str;
968
969 if (! TARGET_SMALL_DATA)
970 return 0;
971
e2c9fb9b
RH
972 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
973 return 0;
974
133d3133
RH
975 if (GET_CODE (op) == CONST
976 && GET_CODE (XEXP (op, 0)) == PLUS
977 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
978 op = XEXP (XEXP (op, 0), 0);
979
980 if (GET_CODE (op) != SYMBOL_REF)
981 return 0;
982
983 if (CONSTANT_POOL_ADDRESS_P (op))
e2c9fb9b 984 return GET_MODE_SIZE (get_pool_mode (op)) <= (unsigned) g_switch_value;
133d3133
RH
985 else
986 {
987 str = XSTR (op, 0);
988 return str[0] == '@' && str[1] == 's';
989 }
990}
991
e2c9fb9b
RH
992/* Return true if OP is a SYMBOL_REF or CONST referencing a variable
993 not known (or known not) to be defined in this file. */
994
995int
996global_symbolic_operand (op, mode)
997 rtx op;
998 enum machine_mode mode;
999{
1000 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1001 return 0;
1002
1003 if (GET_CODE (op) == CONST
1004 && GET_CODE (XEXP (op, 0)) == PLUS
1005 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1006 op = XEXP (XEXP (op, 0), 0);
1007
1008 if (GET_CODE (op) != SYMBOL_REF)
1009 return 0;
1010
1011 return ! local_symbol_p (op);
1012}
1013
6bcf5f0a
RK
1014/* Return 1 if OP is a valid operand for the MEM of a CALL insn. */
1015
1016int
1017call_operand (op, mode)
1018 rtx op;
1019 enum machine_mode mode;
1020{
1021 if (mode != Pmode)
1022 return 0;
1023
be7b80f4
RH
1024 if (GET_CODE (op) == REG)
1025 {
1026 if (TARGET_ABI_OSF)
99407cf2
RH
1027 {
1028 /* Disallow virtual registers to cope with pathalogical test cases
1029 such as compile/930117-1.c in which the virtual reg decomposes
1030 to the frame pointer. Which is a hard reg that is not $27. */
1031 return (REGNO (op) == 27 || REGNO (op) > LAST_VIRTUAL_REGISTER);
1032 }
be7b80f4
RH
1033 else
1034 return 1;
1035 }
e2c9fb9b
RH
1036 if (TARGET_ABI_UNICOSMK)
1037 return 0;
1038 if (GET_CODE (op) == SYMBOL_REF)
1039 return 1;
be7b80f4
RH
1040
1041 return 0;
6bcf5f0a
RK
1042}
1043
30102605
RH
1044/* Returns 1 if OP is a symbolic operand, i.e. a symbol_ref or a label_ref,
1045 possibly with an offset. */
1046
1047int
1048symbolic_operand (op, mode)
1049 register rtx op;
1050 enum machine_mode mode;
1051{
1052 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1053 return 0;
1054 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1055 return 1;
1056 if (GET_CODE (op) == CONST
1057 && GET_CODE (XEXP (op,0)) == PLUS
1058 && GET_CODE (XEXP (XEXP (op,0), 0)) == SYMBOL_REF
1059 && GET_CODE (XEXP (XEXP (op,0), 1)) == CONST_INT)
1060 return 1;
1061 return 0;
1062}
1063
a6f12d7c
RK
1064/* Return 1 if OP is a valid Alpha comparison operator. Here we know which
1065 comparisons are valid in which insn. */
1066
1067int
1068alpha_comparison_operator (op, mode)
1069 register rtx op;
1070 enum machine_mode mode;
1071{
1072 enum rtx_code code = GET_CODE (op);
1073
1eb8759b 1074 if (mode != GET_MODE (op) && mode != VOIDmode)
a6f12d7c
RK
1075 return 0;
1076
1077 return (code == EQ || code == LE || code == LT
a0e5a544 1078 || code == LEU || code == LTU);
a6f12d7c
RK
1079}
1080
8f4773ea
RH
1081/* Return 1 if OP is a valid Alpha comparison operator against zero.
1082 Here we know which comparisons are valid in which insn. */
1083
1084int
1085alpha_zero_comparison_operator (op, mode)
1086 register rtx op;
1087 enum machine_mode mode;
1088{
1089 enum rtx_code code = GET_CODE (op);
1090
1091 if (mode != GET_MODE (op) && mode != VOIDmode)
1092 return 0;
1093
1094 return (code == EQ || code == NE || code == LE || code == LT
1095 || code == LEU || code == LTU);
1096}
1097
5bf6c48a
RK
1098/* Return 1 if OP is a valid Alpha swapped comparison operator. */
1099
1100int
1101alpha_swapped_comparison_operator (op, mode)
1102 register rtx op;
1103 enum machine_mode mode;
1104{
1105 enum rtx_code code = GET_CODE (op);
1106
1eb8759b
RH
1107 if ((mode != GET_MODE (op) && mode != VOIDmode)
1108 || GET_RTX_CLASS (code) != '<')
5bf6c48a
RK
1109 return 0;
1110
1111 code = swap_condition (code);
1112 return (code == EQ || code == LE || code == LT
a0e5a544 1113 || code == LEU || code == LTU);
5bf6c48a
RK
1114}
1115
a6f12d7c
RK
1116/* Return 1 if OP is a signed comparison operation. */
1117
1118int
1119signed_comparison_operator (op, mode)
1120 register rtx op;
3c303f52 1121 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c 1122{
1eb8759b 1123 enum rtx_code code = GET_CODE (op);
1d300e19 1124
1eb8759b
RH
1125 if (mode != GET_MODE (op) && mode != VOIDmode)
1126 return 0;
a6f12d7c 1127
1eb8759b
RH
1128 return (code == EQ || code == NE
1129 || code == LE || code == LT
1130 || code == GE || code == GT);
1131}
1132
1133/* Return 1 if OP is a valid Alpha floating point comparison operator.
1134 Here we know which comparisons are valid in which insn. */
1135
1136int
1137alpha_fp_comparison_operator (op, mode)
1138 register rtx op;
1139 enum machine_mode mode;
1140{
1141 enum rtx_code code = GET_CODE (op);
1142
1143 if (mode != GET_MODE (op) && mode != VOIDmode)
1144 return 0;
1145
1146 return (code == EQ || code == LE || code == LT || code == UNORDERED);
a6f12d7c
RK
1147}
1148
1149/* Return 1 if this is a divide or modulus operator. */
1150
1151int
1152divmod_operator (op, mode)
1153 register rtx op;
3c303f52 1154 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
1155{
1156 switch (GET_CODE (op))
1157 {
1158 case DIV: case MOD: case UDIV: case UMOD:
1159 return 1;
1d300e19
KG
1160
1161 default:
1162 break;
a6f12d7c
RK
1163 }
1164
1165 return 0;
1166}
1167
1168/* Return 1 if this memory address is a known aligned register plus
1169 a constant. It must be a valid address. This means that we can do
1170 this as an aligned reference plus some offset.
1171
96043e7e 1172 Take into account what reload will do. */
a6f12d7c
RK
1173
1174int
1175aligned_memory_operand (op, mode)
1176 register rtx op;
1177 enum machine_mode mode;
1178{
4e46365b 1179 rtx base;
a6f12d7c 1180
96043e7e
RH
1181 if (reload_in_progress)
1182 {
4e46365b
RH
1183 rtx tmp = op;
1184 if (GET_CODE (tmp) == SUBREG)
1185 tmp = SUBREG_REG (tmp);
1186 if (GET_CODE (tmp) == REG
1187 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1188 {
1189 op = reg_equiv_memory_loc[REGNO (tmp)];
1190 if (op == 0)
1191 return 0;
1192 }
96043e7e 1193 }
a6f12d7c 1194
96043e7e 1195 if (GET_CODE (op) != MEM
4e46365b 1196 || GET_MODE (op) != mode)
a6f12d7c 1197 return 0;
a6f12d7c
RK
1198 op = XEXP (op, 0);
1199
4e46365b
RH
1200 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1201 sorts of constructs. Dig for the real base register. */
1202 if (reload_in_progress
1203 && GET_CODE (op) == PLUS
1204 && GET_CODE (XEXP (op, 0)) == PLUS)
1205 base = XEXP (XEXP (op, 0), 0);
1206 else
1207 {
1208 if (! memory_address_p (mode, op))
1209 return 0;
1210 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1211 }
a6f12d7c 1212
bdb429a5 1213 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) >= 32);
a6f12d7c
RK
1214}
1215
1216/* Similar, but return 1 if OP is a MEM which is not alignable. */
1217
1218int
1219unaligned_memory_operand (op, mode)
1220 register rtx op;
1221 enum machine_mode mode;
1222{
4e46365b
RH
1223 rtx base;
1224
1225 if (reload_in_progress)
a6f12d7c 1226 {
4e46365b
RH
1227 rtx tmp = op;
1228 if (GET_CODE (tmp) == SUBREG)
1229 tmp = SUBREG_REG (tmp);
1230 if (GET_CODE (tmp) == REG
1231 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1232 {
1233 op = reg_equiv_memory_loc[REGNO (tmp)];
1234 if (op == 0)
1235 return 0;
1236 }
a6f12d7c
RK
1237 }
1238
4e46365b
RH
1239 if (GET_CODE (op) != MEM
1240 || GET_MODE (op) != mode)
a6f12d7c 1241 return 0;
a6f12d7c
RK
1242 op = XEXP (op, 0);
1243
4e46365b
RH
1244 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1245 sorts of constructs. Dig for the real base register. */
1246 if (reload_in_progress
1247 && GET_CODE (op) == PLUS
1248 && GET_CODE (XEXP (op, 0)) == PLUS)
1249 base = XEXP (XEXP (op, 0), 0);
1250 else
1251 {
1252 if (! memory_address_p (mode, op))
1253 return 0;
1254 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1255 }
a6f12d7c 1256
bdb429a5 1257 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) < 32);
adb18b68
RK
1258}
1259
1260/* Return 1 if OP is either a register or an unaligned memory location. */
1261
1262int
1263reg_or_unaligned_mem_operand (op, mode)
1264 rtx op;
1265 enum machine_mode mode;
1266{
1267 return register_operand (op, mode) || unaligned_memory_operand (op, mode);
a6f12d7c
RK
1268}
1269
1270/* Return 1 if OP is any memory location. During reload a pseudo matches. */
1271
1272int
1273any_memory_operand (op, mode)
1274 register rtx op;
3c303f52 1275 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
1276{
1277 return (GET_CODE (op) == MEM
1278 || (GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == REG)
1279 || (reload_in_progress && GET_CODE (op) == REG
1280 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1281 || (reload_in_progress && GET_CODE (op) == SUBREG
1282 && GET_CODE (SUBREG_REG (op)) == REG
1283 && REGNO (SUBREG_REG (op)) >= FIRST_PSEUDO_REGISTER));
1284}
1285
40b80dad
RH
1286/* Returns 1 if OP is not an eliminable register.
1287
1288 This exists to cure a pathological abort in the s8addq (et al) patterns,
1289
1290 long foo () { long t; bar(); return (long) &t * 26107; }
1291
1292 which run afoul of a hack in reload to cure a (presumably) similar
1293 problem with lea-type instructions on other targets. But there is
1294 one of us and many of them, so work around the problem by selectively
1295 preventing combine from making the optimization. */
1296
1297int
1298reg_not_elim_operand (op, mode)
1299 register rtx op;
1300 enum machine_mode mode;
1301{
1302 rtx inner = op;
1303 if (GET_CODE (op) == SUBREG)
1304 inner = SUBREG_REG (op);
1305 if (inner == frame_pointer_rtx || inner == arg_pointer_rtx)
1306 return 0;
1307
1308 return register_operand (op, mode);
1309}
9c0e94a5 1310
67070f5c 1311/* Return 1 is OP is a memory location that is not a reference (using
ab87f8c8
JL
1312 an AND) to an unaligned location. Take into account what reload
1313 will do. */
1314
1315int
1316normal_memory_operand (op, mode)
1317 register rtx op;
df45c7ea 1318 enum machine_mode mode ATTRIBUTE_UNUSED;
ab87f8c8 1319{
4e46365b 1320 if (reload_in_progress)
ab87f8c8 1321 {
4e46365b
RH
1322 rtx tmp = op;
1323 if (GET_CODE (tmp) == SUBREG)
1324 tmp = SUBREG_REG (tmp);
1325 if (GET_CODE (tmp) == REG
1326 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1327 {
1328 op = reg_equiv_memory_loc[REGNO (tmp)];
ab87f8c8 1329
4e46365b
RH
1330 /* This may not have been assigned an equivalent address if it will
1331 be eliminated. In that case, it doesn't matter what we do. */
1332 if (op == 0)
1333 return 1;
1334 }
ab87f8c8
JL
1335 }
1336
1337 return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) != AND;
1338}
67070f5c
RH
1339
1340/* Accept a register, but not a subreg of any kind. This allows us to
1341 avoid pathological cases in reload wrt data movement common in
1342 int->fp conversion. */
1343
1344int
1345reg_no_subreg_operand (op, mode)
1346 register rtx op;
1347 enum machine_mode mode;
1348{
f6598df3 1349 if (GET_CODE (op) != REG)
67070f5c
RH
1350 return 0;
1351 return register_operand (op, mode);
1352}
3611aef0 1353
5519a4f9 1354/* Recognize an addition operation that includes a constant. Used to
3611aef0
RH
1355 convince reload to canonize (plus (plus reg c1) c2) during register
1356 elimination. */
1357
1358int
1359addition_operation (op, mode)
1360 register rtx op;
1361 enum machine_mode mode;
1362{
1363 if (GET_MODE (op) != mode && mode != VOIDmode)
1364 return 0;
1365 if (GET_CODE (op) == PLUS
1366 && register_operand (XEXP (op, 0), mode)
1367 && GET_CODE (XEXP (op, 1)) == CONST_INT
1368 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op, 1)), 'K'))
1369 return 1;
1370 return 0;
1371}
1372
551cc6fd
RH
1373/* Implements CONST_OK_FOR_LETTER_P. Return true if the value matches
1374 the range defined for C in [I-P]. */
1375
1376bool
1377alpha_const_ok_for_letter_p (value, c)
1378 HOST_WIDE_INT value;
1379 int c;
1380{
1381 switch (c)
1382 {
1383 case 'I':
1384 /* An unsigned 8 bit constant. */
1385 return (unsigned HOST_WIDE_INT) value < 0x100;
1386 case 'J':
1387 /* The constant zero. */
1388 return value == 0;
1389 case 'K':
1390 /* A signed 16 bit constant. */
1391 return (unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000;
1392 case 'L':
1393 /* A shifted signed 16 bit constant appropriate for LDAH. */
1394 return ((value & 0xffff) == 0
1395 && ((value) >> 31 == -1 || value >> 31 == 0));
1396 case 'M':
1397 /* A constant that can be AND'ed with using a ZAP insn. */
1398 return zap_mask (value);
1399 case 'N':
1400 /* A complemented unsigned 8 bit constant. */
1401 return (unsigned HOST_WIDE_INT) (~ value) < 0x100;
1402 case 'O':
1403 /* A negated unsigned 8 bit constant. */
1404 return (unsigned HOST_WIDE_INT) (- value) < 0x100;
1405 case 'P':
1406 /* The constant 1, 2 or 3. */
1407 return value == 1 || value == 2 || value == 3;
1408
1409 default:
1410 return false;
1411 }
1412}
1413
1414/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1415 matches for C in [GH]. */
1416
1417bool
1418alpha_const_double_ok_for_letter_p (value, c)
1419 rtx value;
1420 int c;
1421{
1422 switch (c)
1423 {
1424 case 'G':
1425 /* The floating point zero constant. */
1426 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
1427 && value == CONST0_RTX (GET_MODE (value)));
1428
1429 case 'H':
1430 /* A valid operand of a ZAP insn. */
1431 return (GET_MODE (value) == VOIDmode
1432 && zap_mask (CONST_DOUBLE_LOW (value))
1433 && zap_mask (CONST_DOUBLE_HIGH (value)));
1434
1435 default:
1436 return false;
1437 }
1438}
1439
1440/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1441 matches for C. */
1442
1443bool
1444alpha_extra_constraint (value, c)
1445 rtx value;
1446 int c;
1447{
1448 switch (c)
1449 {
1450 case 'Q':
1451 return normal_memory_operand (value, VOIDmode);
1452 case 'R':
1453 return direct_call_operand (value, Pmode);
1454 case 'S':
1455 return (GET_CODE (value) == CONST_INT
1456 && (unsigned HOST_WIDE_INT) INTVAL (value) < 64);
1457 case 'T':
1458 return GET_CODE (value) == HIGH;
1459 case 'U':
1460 return TARGET_ABI_UNICOSMK && symbolic_operand (value, VOIDmode);
1461
1462 default:
1463 return false;
1464 }
1465}
1466
39157bcc
RH
1467/* Return 1 if this function can directly return via $26. */
1468
1469int
1470direct_return ()
1471{
30102605 1472 return (! TARGET_ABI_OPEN_VMS && ! TARGET_ABI_UNICOSMK
be7b80f4
RH
1473 && reload_completed
1474 && alpha_sa_size () == 0
39157bcc
RH
1475 && get_frame_size () == 0
1476 && current_function_outgoing_args_size == 0
1477 && current_function_pretend_args_size == 0);
1478}
25e21aed
RH
1479
1480/* Return the ADDR_VEC associated with a tablejump insn. */
1481
1482rtx
1483alpha_tablejump_addr_vec (insn)
1484 rtx insn;
1485{
1486 rtx tmp;
1487
1488 tmp = JUMP_LABEL (insn);
1489 if (!tmp)
1490 return NULL_RTX;
1491 tmp = NEXT_INSN (tmp);
1492 if (!tmp)
1493 return NULL_RTX;
1494 if (GET_CODE (tmp) == JUMP_INSN
1495 && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
1496 return PATTERN (tmp);
1497 return NULL_RTX;
1498}
1499
1500/* Return the label of the predicted edge, or CONST0_RTX if we don't know. */
1501
1502rtx
1503alpha_tablejump_best_label (insn)
1504 rtx insn;
1505{
1506 rtx jump_table = alpha_tablejump_addr_vec (insn);
1507 rtx best_label = NULL_RTX;
1508
1509 /* ??? Once the CFG doesn't keep getting completely rebuilt, look
1510 there for edge frequency counts from profile data. */
1511
1512 if (jump_table)
1513 {
1514 int n_labels = XVECLEN (jump_table, 1);
1515 int best_count = -1;
1516 int i, j;
1517
1518 for (i = 0; i < n_labels; i++)
1519 {
1520 int count = 1;
1521
1522 for (j = i + 1; j < n_labels; j++)
1523 if (XEXP (XVECEXP (jump_table, 1, i), 0)
1524 == XEXP (XVECEXP (jump_table, 1, j), 0))
1525 count++;
1526
1527 if (count > best_count)
1528 best_count = count, best_label = XVECEXP (jump_table, 1, i);
1529 }
1530 }
1531
1532 return best_label ? best_label : const0_rtx;
1533}
3611aef0 1534\f
62918bd3
RH
1535/* Return true if the function DECL will be placed in the default text
1536 section. */
1537/* ??? Ideally we'd be able to always move from a SYMBOL_REF back to the
1538 decl, as that would allow us to determine if two functions are in the
1539 same section, which is what we really want to know. */
1540
1541static bool
1542decl_in_text_section (decl)
1543 tree decl;
1544{
1545 return (DECL_SECTION_NAME (decl) == NULL_TREE
1546 && ! (flag_function_sections
1547 || (targetm.have_named_sections
1548 && DECL_ONE_ONLY (decl))));
1549}
1550
1eb356b9
RH
1551/* If we are referencing a function that is static, make the SYMBOL_REF
1552 special. We use this to see indicate we can branch to this function
1553 without setting PV or restoring GP.
1554
1555 If this is a variable that is known to be defined locally, add "@v"
1556 to the name. If in addition the variable is to go in .sdata/.sbss,
1557 then add "@s" instead. */
1558
1559void
1560alpha_encode_section_info (decl)
1561 tree decl;
1562{
1563 const char *symbol_str;
1564 bool is_local, is_small;
1565
1566 if (TREE_CODE (decl) == FUNCTION_DECL)
1567 {
62918bd3
RH
1568 /* We mark public functions once they are emitted; otherwise we
1569 don't know that they exist in this unit of translation. */
1570 if (TREE_PUBLIC (decl))
1571 return;
1572 /* Do not mark functions that are not in .text; otherwise we
1573 don't know that they are near enough for a direct branch. */
1574 if (! decl_in_text_section (decl))
1575 return;
1576
1577 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1eb356b9
RH
1578 return;
1579 }
1580
1581 /* Early out if we're not going to do anything with this data. */
1582 if (! TARGET_EXPLICIT_RELOCS)
1583 return;
1584
1585 /* Careful not to prod global register variables. */
1586 if (TREE_CODE (decl) != VAR_DECL
1587 || GET_CODE (DECL_RTL (decl)) != MEM
1588 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != SYMBOL_REF)
1589 return;
1590
1591 symbol_str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
1592
1593 /* A variable is considered "local" if it is defined in this module. */
1594
1595 if (DECL_EXTERNAL (decl))
1596 is_local = false;
1597 /* Linkonce and weak data is never local. */
1598 else if (DECL_ONE_ONLY (decl) || DECL_WEAK (decl))
1599 is_local = false;
1600 else if (! TREE_PUBLIC (decl))
1601 is_local = true;
1602 /* If PIC, then assume that any global name can be overridden by
1603 symbols resolved from other modules. */
1604 else if (flag_pic)
1605 is_local = false;
1606 /* Uninitialized COMMON variable may be unified with symbols
1607 resolved from other modules. */
1608 else if (DECL_COMMON (decl)
1609 && (DECL_INITIAL (decl) == NULL
1610 || DECL_INITIAL (decl) == error_mark_node))
1611 is_local = false;
1612 /* Otherwise we're left with initialized (or non-common) global data
1613 which is of necessity defined locally. */
1614 else
1615 is_local = true;
1616
1617 /* Determine if DECL will wind up in .sdata/.sbss. */
1618
1619 is_small = false;
1620 if (DECL_SECTION_NAME (decl))
1621 {
1622 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
1623 if (strcmp (section, ".sdata") == 0
1624 || strcmp (section, ".sbss") == 0)
1625 is_small = true;
1626 }
1627 else
1628 {
1629 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1630
1631 /* If the variable has already been defined in the output file, then it
1632 is too late to put it in sdata if it wasn't put there in the first
1633 place. The test is here rather than above, because if it is already
1634 in sdata, then it can stay there. */
1635
1636 if (TREE_ASM_WRITTEN (decl))
1637 ;
1638
1639 /* If this is an incomplete type with size 0, then we can't put it in
1640 sdata because it might be too big when completed. */
1641 else if (size > 0 && size <= g_switch_value)
1642 is_small = true;
1643 }
1644
1645 /* Finally, encode this into the symbol string. */
1646 if (is_local)
1647 {
1648 const char *string;
1649 char *newstr;
1650 size_t len;
1651
1652 if (symbol_str[0] == '@')
1653 {
1654 if (symbol_str[1] == (is_small ? 's' : 'v'))
1655 return;
1656 symbol_str += 2;
1657 }
1658
1659 len = strlen (symbol_str) + 1;
1660 newstr = alloca (len + 2);
1661
1662 newstr[0] = '@';
1663 newstr[1] = (is_small ? 's' : 'v');
1664 memcpy (newstr + 2, symbol_str, len);
1665
1666 string = ggc_alloc_string (newstr, len + 2 - 1);
1667 XSTR (XEXP (DECL_RTL (decl), 0), 0) = string;
1668 }
1669 else if (symbol_str[0] == '@')
1670 abort ();
1671}
1672
a39bdefc
RH
1673/* legitimate_address_p recognizes an RTL expression that is a valid
1674 memory address for an instruction. The MODE argument is the
1675 machine mode for the MEM expression that wants to use this address.
1676
1677 For Alpha, we have either a constant address or the sum of a
1678 register and a constant address, or just a register. For DImode,
1679 any of those forms can be surrounded with an AND that clear the
1680 low-order three bits; this is an "unaligned" access. */
1681
1682bool
1683alpha_legitimate_address_p (mode, x, strict)
1684 enum machine_mode mode;
1685 rtx x;
1686 int strict;
1687{
1688 /* If this is an ldq_u type address, discard the outer AND. */
1689 if (mode == DImode
1690 && GET_CODE (x) == AND
1691 && GET_CODE (XEXP (x, 1)) == CONST_INT
1692 && INTVAL (XEXP (x, 1)) == -8)
1693 x = XEXP (x, 0);
1694
1695 /* Discard non-paradoxical subregs. */
1696 if (GET_CODE (x) == SUBREG
1697 && (GET_MODE_SIZE (GET_MODE (x))
1698 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
1699 x = SUBREG_REG (x);
1700
1701 /* Unadorned general registers are valid. */
1702 if (REG_P (x)
1703 && (strict
1704 ? STRICT_REG_OK_FOR_BASE_P (x)
1705 : NONSTRICT_REG_OK_FOR_BASE_P (x)))
1706 return true;
1707
1708 /* Constant addresses (i.e. +/- 32k) are valid. */
1709 if (CONSTANT_ADDRESS_P (x))
1710 return true;
1711
1712 /* Register plus a small constant offset is valid. */
1713 if (GET_CODE (x) == PLUS)
1714 {
1715 rtx ofs = XEXP (x, 1);
1716 x = XEXP (x, 0);
1717
1718 /* Discard non-paradoxical subregs. */
1719 if (GET_CODE (x) == SUBREG
1720 && (GET_MODE_SIZE (GET_MODE (x))
1721 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
1722 x = SUBREG_REG (x);
1723
1724 if (REG_P (x))
1725 {
1726 if (! strict
1727 && NONSTRICT_REG_OK_FP_BASE_P (x)
1728 && GET_CODE (ofs) == CONST_INT)
1729 return true;
1730 if ((strict
1731 ? STRICT_REG_OK_FOR_BASE_P (x)
1732 : NONSTRICT_REG_OK_FOR_BASE_P (x))
1733 && CONSTANT_ADDRESS_P (ofs))
1734 return true;
1735 }
1736 else if (GET_CODE (x) == ADDRESSOF
1737 && GET_CODE (ofs) == CONST_INT)
1738 return true;
1739 }
1740
551cc6fd
RH
1741 /* If we're managing explicit relocations, LO_SUM is valid, as
1742 are small data symbols. */
1743 else if (TARGET_EXPLICIT_RELOCS)
1eb356b9 1744 {
551cc6fd 1745 if (small_symbolic_operand (x, Pmode))
1eb356b9 1746 return true;
551cc6fd
RH
1747
1748 if (GET_CODE (x) == LO_SUM)
1749 {
1750 rtx ofs = XEXP (x, 1);
1751 x = XEXP (x, 0);
1752
1753 /* Discard non-paradoxical subregs. */
1754 if (GET_CODE (x) == SUBREG
1755 && (GET_MODE_SIZE (GET_MODE (x))
1756 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
1757 x = SUBREG_REG (x);
1758
1759 /* Must have a valid base register. */
1760 if (! (REG_P (x)
1761 && (strict
1762 ? STRICT_REG_OK_FOR_BASE_P (x)
1763 : NONSTRICT_REG_OK_FOR_BASE_P (x))))
1764 return false;
1765
1766 /* The symbol must be local. */
1767 if (local_symbolic_operand (ofs, Pmode))
1768 return true;
1769 }
1eb356b9
RH
1770 }
1771
a39bdefc
RH
1772 return false;
1773}
1774
aead1ca3
RH
1775/* Try machine-dependent ways of modifying an illegitimate address
1776 to be legitimate. If we find one, return the new, valid address. */
1777
1778rtx
551cc6fd 1779alpha_legitimize_address (x, scratch, mode)
aead1ca3 1780 rtx x;
551cc6fd 1781 rtx scratch;
aead1ca3
RH
1782 enum machine_mode mode ATTRIBUTE_UNUSED;
1783{
1784 HOST_WIDE_INT addend;
1785
1786 /* If the address is (plus reg const_int) and the CONST_INT is not a
1787 valid offset, compute the high part of the constant and add it to
1788 the register. Then our address is (plus temp low-part-const). */
1789 if (GET_CODE (x) == PLUS
1790 && GET_CODE (XEXP (x, 0)) == REG
1791 && GET_CODE (XEXP (x, 1)) == CONST_INT
1792 && ! CONSTANT_ADDRESS_P (XEXP (x, 1)))
1793 {
1794 addend = INTVAL (XEXP (x, 1));
1795 x = XEXP (x, 0);
1796 goto split_addend;
1797 }
1798
1799 /* If the address is (const (plus FOO const_int)), find the low-order
1800 part of the CONST_INT. Then load FOO plus any high-order part of the
1801 CONST_INT into a register. Our address is (plus reg low-part-const).
1802 This is done to reduce the number of GOT entries. */
551cc6fd
RH
1803 if (!no_new_pseudos
1804 && GET_CODE (x) == CONST
aead1ca3
RH
1805 && GET_CODE (XEXP (x, 0)) == PLUS
1806 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1807 {
1808 addend = INTVAL (XEXP (XEXP (x, 0), 1));
1809 x = force_reg (Pmode, XEXP (XEXP (x, 0), 0));
1810 goto split_addend;
1811 }
1812
1813 /* If we have a (plus reg const), emit the load as in (2), then add
1814 the two registers, and finally generate (plus reg low-part-const) as
1815 our address. */
551cc6fd
RH
1816 if (!no_new_pseudos
1817 && GET_CODE (x) == PLUS
aead1ca3
RH
1818 && GET_CODE (XEXP (x, 0)) == REG
1819 && GET_CODE (XEXP (x, 1)) == CONST
1820 && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1821 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)
1822 {
1823 addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1));
1824 x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0),
1825 XEXP (XEXP (XEXP (x, 1), 0), 0),
1826 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1827 goto split_addend;
1828 }
1829
1eb356b9 1830 /* If this is a local symbol, split the address into HIGH/LO_SUM parts. */
e2c9fb9b 1831 if (TARGET_EXPLICIT_RELOCS && symbolic_operand (x, Pmode))
1eb356b9 1832 {
e2c9fb9b
RH
1833 if (local_symbolic_operand (x, Pmode))
1834 {
1835 if (small_symbolic_operand (x, Pmode))
551cc6fd 1836 return x;
e2c9fb9b
RH
1837 else
1838 {
551cc6fd
RH
1839 if (!no_new_pseudos)
1840 scratch = gen_reg_rtx (Pmode);
1841 emit_insn (gen_rtx_SET (VOIDmode, scratch,
1842 gen_rtx_HIGH (Pmode, x)));
1843 return gen_rtx_LO_SUM (Pmode, scratch, x);
e2c9fb9b 1844 }
133d3133 1845 }
1eb356b9
RH
1846 }
1847
aead1ca3
RH
1848 return NULL;
1849
1850 split_addend:
1851 {
551cc6fd
RH
1852 HOST_WIDE_INT low, high;
1853
1854 low = ((addend & 0xffff) ^ 0x8000) - 0x8000;
1855 addend -= low;
1856 high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000;
1857 addend -= high;
1858
1859 if (addend)
1860 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend),
1861 (no_new_pseudos ? scratch : NULL_RTX),
1862 1, OPTAB_LIB_WIDEN);
1863 if (high)
1864 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
1865 (no_new_pseudos ? scratch : NULL_RTX),
1866 1, OPTAB_LIB_WIDEN);
1867
1868 return plus_constant (x, low);
aead1ca3
RH
1869 }
1870}
1871
551cc6fd
RH
1872/* For TARGET_EXPLICIT_RELOCS, we don't obfuscate a SYMBOL_REF to a
1873 small symbolic operand until after reload. At which point we need
1874 to replace (mem (symbol_ref)) with (mem (lo_sum $29 symbol_ref))
1875 so that sched2 has the proper dependency information. */
1876
1877int
a615ca3e 1878some_small_symbolic_operand (x, mode)
551cc6fd
RH
1879 rtx x;
1880 enum machine_mode mode ATTRIBUTE_UNUSED;
1881{
a615ca3e 1882 return for_each_rtx (&x, some_small_symbolic_operand_1, NULL);
1e7e480e
RH
1883}
1884
1885static int
a615ca3e 1886some_small_symbolic_operand_1 (px, data)
1e7e480e
RH
1887 rtx *px;
1888 void *data ATTRIBUTE_UNUSED;
1889{
1890 rtx x = *px;
551cc6fd 1891
a615ca3e
RH
1892 /* Don't re-split. */
1893 if (GET_CODE (x) == LO_SUM)
1894 return -1;
1e7e480e 1895
a615ca3e 1896 return small_symbolic_operand (x, Pmode) != 0;
551cc6fd
RH
1897}
1898
1899rtx
a615ca3e 1900split_small_symbolic_operand (x)
551cc6fd
RH
1901 rtx x;
1902{
8b9b74a9 1903 x = copy_insn (x);
a615ca3e 1904 for_each_rtx (&x, split_small_symbolic_operand_1, NULL);
1e7e480e
RH
1905 return x;
1906}
551cc6fd 1907
1e7e480e 1908static int
a615ca3e 1909split_small_symbolic_operand_1 (px, data)
1e7e480e
RH
1910 rtx *px;
1911 void *data ATTRIBUTE_UNUSED;
1912{
1913 rtx x = *px;
51c561e3 1914
a615ca3e
RH
1915 /* Don't re-split. */
1916 if (GET_CODE (x) == LO_SUM)
1917 return -1;
551cc6fd 1918
1e7e480e
RH
1919 if (small_symbolic_operand (x, Pmode))
1920 {
1921 x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
1922 *px = x;
a615ca3e 1923 return -1;
1e7e480e
RH
1924 }
1925
a615ca3e 1926 return 0;
551cc6fd
RH
1927}
1928
aead1ca3
RH
1929/* Try a machine-dependent way of reloading an illegitimate address
1930 operand. If we find one, push the reload and return the new rtx. */
1931
1932rtx
1933alpha_legitimize_reload_address (x, mode, opnum, type, ind_levels)
1934 rtx x;
1935 enum machine_mode mode ATTRIBUTE_UNUSED;
1936 int opnum;
1937 int type;
1938 int ind_levels ATTRIBUTE_UNUSED;
1939{
1940 /* We must recognize output that we have already generated ourselves. */
1941 if (GET_CODE (x) == PLUS
1942 && GET_CODE (XEXP (x, 0)) == PLUS
1943 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1944 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1945 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1946 {
1947 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1948 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1949 opnum, type);
1950 return x;
1951 }
1952
1953 /* We wish to handle large displacements off a base register by
1954 splitting the addend across an ldah and the mem insn. This
1955 cuts number of extra insns needed from 3 to 1. */
1956 if (GET_CODE (x) == PLUS
1957 && GET_CODE (XEXP (x, 0)) == REG
1958 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1959 && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))
1960 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1961 {
1962 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1963 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1964 HOST_WIDE_INT high
1965 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1966
1967 /* Check for 32-bit overflow. */
1968 if (high + low != val)
1969 return NULL_RTX;
1970
1971 /* Reload the high part into a base reg; leave the low part
1972 in the mem directly. */
1973 x = gen_rtx_PLUS (GET_MODE (x),
1974 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1975 GEN_INT (high)),
1976 GEN_INT (low));
1977
1978 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1979 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1980 opnum, type);
1981 return x;
1982 }
1983
1984 return NULL_RTX;
1985}
1986\f
a6f12d7c
RK
1987/* REF is an alignable memory location. Place an aligned SImode
1988 reference into *PALIGNED_MEM and the number of bits to shift into
96043e7e
RH
1989 *PBITNUM. SCRATCH is a free register for use in reloading out
1990 of range stack slots. */
a6f12d7c
RK
1991
1992void
4e46365b
RH
1993get_aligned_mem (ref, paligned_mem, pbitnum)
1994 rtx ref;
a6f12d7c
RK
1995 rtx *paligned_mem, *pbitnum;
1996{
1997 rtx base;
1998 HOST_WIDE_INT offset = 0;
1999
4e46365b
RH
2000 if (GET_CODE (ref) != MEM)
2001 abort ();
a6f12d7c 2002
4e46365b
RH
2003 if (reload_in_progress
2004 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
96043e7e 2005 {
4e46365b
RH
2006 base = find_replacement (&XEXP (ref, 0));
2007
2008 if (! memory_address_p (GET_MODE (ref), base))
2009 abort ();
96043e7e 2010 }
a6f12d7c 2011 else
96043e7e 2012 {
96043e7e
RH
2013 base = XEXP (ref, 0);
2014 }
a6f12d7c
RK
2015
2016 if (GET_CODE (base) == PLUS)
2017 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2018
e7dfe4bb
RH
2019 *paligned_mem
2020 = widen_memory_access (ref, SImode, (offset & ~3) - offset);
a6f12d7c 2021
30102605
RH
2022 if (WORDS_BIG_ENDIAN)
2023 *pbitnum = GEN_INT (32 - (GET_MODE_BITSIZE (GET_MODE (ref))
2024 + (offset & 3) * 8));
2025 else
2026 *pbitnum = GEN_INT ((offset & 3) * 8);
a6f12d7c
RK
2027}
2028
adb18b68
RK
2029/* Similar, but just get the address. Handle the two reload cases.
2030 Add EXTRA_OFFSET to the address we return. */
a6f12d7c
RK
2031
2032rtx
adb18b68 2033get_unaligned_address (ref, extra_offset)
a6f12d7c 2034 rtx ref;
adb18b68 2035 int extra_offset;
a6f12d7c
RK
2036{
2037 rtx base;
2038 HOST_WIDE_INT offset = 0;
2039
4e46365b
RH
2040 if (GET_CODE (ref) != MEM)
2041 abort ();
a6f12d7c 2042
4e46365b
RH
2043 if (reload_in_progress
2044 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
96043e7e 2045 {
96043e7e 2046 base = find_replacement (&XEXP (ref, 0));
4e46365b
RH
2047
2048 if (! memory_address_p (GET_MODE (ref), base))
2049 abort ();
96043e7e 2050 }
a6f12d7c 2051 else
96043e7e 2052 {
96043e7e
RH
2053 base = XEXP (ref, 0);
2054 }
a6f12d7c
RK
2055
2056 if (GET_CODE (base) == PLUS)
2057 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2058
adb18b68 2059 return plus_constant (base, offset + extra_offset);
a6f12d7c 2060}
3611aef0 2061
551cc6fd
RH
2062/* On the Alpha, all (non-symbolic) constants except zero go into
2063 a floating-point register via memory. Note that we cannot
2064 return anything that is not a subset of CLASS, and that some
2065 symbolic constants cannot be dropped to memory. */
2066
2067enum reg_class
2068alpha_preferred_reload_class(x, class)
2069 rtx x;
2070 enum reg_class class;
2071{
2072 /* Zero is present in any register class. */
2073 if (x == CONST0_RTX (GET_MODE (x)))
2074 return class;
2075
2076 /* These sorts of constants we can easily drop to memory. */
2077 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2078 {
2079 if (class == FLOAT_REGS)
2080 return NO_REGS;
2081 if (class == ALL_REGS)
2082 return GENERAL_REGS;
2083 return class;
2084 }
2085
2086 /* All other kinds of constants should not (and in the case of HIGH
2087 cannot) be dropped to memory -- instead we use a GENERAL_REGS
2088 secondary reload. */
2089 if (CONSTANT_P (x))
2090 return (class == ALL_REGS ? GENERAL_REGS : class);
2091
2092 return class;
2093}
2094
3611aef0
RH
2095/* Loading and storing HImode or QImode values to and from memory
2096 usually requires a scratch register. The exceptions are loading
2097 QImode and HImode from an aligned address to a general register
2098 unless byte instructions are permitted.
2099
2100 We also cannot load an unaligned address or a paradoxical SUBREG
2101 into an FP register.
2102
2103 We also cannot do integral arithmetic into FP regs, as might result
2104 from register elimination into a DImode fp register. */
2105
2106enum reg_class
2107secondary_reload_class (class, mode, x, in)
2108 enum reg_class class;
2109 enum machine_mode mode;
2110 rtx x;
2111 int in;
2112{
41bd3d41 2113 if ((mode == QImode || mode == HImode) && ! TARGET_BWX)
35a414df 2114 {
41bd3d41
RH
2115 if (GET_CODE (x) == MEM
2116 || (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
2117 || (GET_CODE (x) == SUBREG
2118 && (GET_CODE (SUBREG_REG (x)) == MEM
2119 || (GET_CODE (SUBREG_REG (x)) == REG
2120 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))
35a414df
RH
2121 {
2122 if (!in || !aligned_memory_operand(x, mode))
2123 return GENERAL_REGS;
2124 }
2125 }
3611aef0
RH
2126
2127 if (class == FLOAT_REGS)
2128 {
2129 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
2130 return GENERAL_REGS;
2131
2132 if (GET_CODE (x) == SUBREG
2133 && (GET_MODE_SIZE (GET_MODE (x))
2134 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2135 return GENERAL_REGS;
2136
1eb356b9
RH
2137 if (in && INTEGRAL_MODE_P (mode)
2138 && ! (memory_operand (x, mode) || x == const0_rtx))
3611aef0
RH
2139 return GENERAL_REGS;
2140 }
2141
2142 return NO_REGS;
2143}
a6f12d7c
RK
2144\f
2145/* Subfunction of the following function. Update the flags of any MEM
2146 found in part of X. */
2147
2148static void
80db34d8 2149alpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p)
a6f12d7c 2150 rtx x;
80db34d8 2151 int in_struct_p, volatile_p, unchanging_p;
a6f12d7c
RK
2152{
2153 int i;
2154
2155 switch (GET_CODE (x))
2156 {
2157 case SEQUENCE:
2158 case PARALLEL:
2159 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2160 alpha_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p,
80db34d8 2161 unchanging_p);
a6f12d7c
RK
2162 break;
2163
2164 case INSN:
2165 alpha_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p,
80db34d8 2166 unchanging_p);
a6f12d7c
RK
2167 break;
2168
2169 case SET:
2170 alpha_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p,
80db34d8 2171 unchanging_p);
a6f12d7c 2172 alpha_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p,
80db34d8 2173 unchanging_p);
a6f12d7c
RK
2174 break;
2175
2176 case MEM:
2177 MEM_IN_STRUCT_P (x) = in_struct_p;
2178 MEM_VOLATILE_P (x) = volatile_p;
2179 RTX_UNCHANGING_P (x) = unchanging_p;
80db34d8
RH
2180 /* Sadly, we cannot use alias sets because the extra aliasing
2181 produced by the AND interferes. Given that two-byte quantities
2182 are the only thing we would be able to differentiate anyway,
2183 there does not seem to be any point in convoluting the early
2184 out of the alias check. */
a6f12d7c 2185 break;
1d300e19
KG
2186
2187 default:
2188 break;
a6f12d7c
RK
2189 }
2190}
2191
2192/* Given INSN, which is either an INSN or a SEQUENCE generated to
2193 perform a memory operation, look for any MEMs in either a SET_DEST or
2194 a SET_SRC and copy the in-struct, unchanging, and volatile flags from
2195 REF into each of the MEMs found. If REF is not a MEM, don't do
2196 anything. */
2197
2198void
2199alpha_set_memflags (insn, ref)
2200 rtx insn;
2201 rtx ref;
2202{
80db34d8 2203 int in_struct_p, volatile_p, unchanging_p;
3873d24b
RH
2204
2205 if (GET_CODE (ref) != MEM)
a6f12d7c
RK
2206 return;
2207
3873d24b
RH
2208 in_struct_p = MEM_IN_STRUCT_P (ref);
2209 volatile_p = MEM_VOLATILE_P (ref);
2210 unchanging_p = RTX_UNCHANGING_P (ref);
3873d24b
RH
2211
2212 /* This is only called from alpha.md, after having had something
2213 generated from one of the insn patterns. So if everything is
2214 zero, the pattern is already up-to-date. */
80db34d8 2215 if (! in_struct_p && ! volatile_p && ! unchanging_p)
3873d24b
RH
2216 return;
2217
80db34d8 2218 alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p);
a6f12d7c
RK
2219}
2220\f
2221/* Try to output insns to set TARGET equal to the constant C if it can be
fd94addf
RK
2222 done in less than N insns. Do all computations in MODE. Returns the place
2223 where the output has been placed if it can be done and the insns have been
2224 emitted. If it would take more than N insns, zero is returned and no
2225 insns and emitted. */
a6f12d7c 2226
fd94addf
RK
2227rtx
2228alpha_emit_set_const (target, mode, c, n)
a6f12d7c 2229 rtx target;
fd94addf 2230 enum machine_mode mode;
a6f12d7c
RK
2231 HOST_WIDE_INT c;
2232 int n;
9102cd1f
RK
2233{
2234 rtx pat;
2235 int i;
2236
285a5742 2237 /* Try 1 insn, then 2, then up to N. */
9102cd1f
RK
2238 for (i = 1; i <= n; i++)
2239 if ((pat = alpha_emit_set_const_1 (target, mode, c, i)) != 0)
2240 return pat;
2241
2242 return 0;
2243}
2244
2245/* Internal routine for the above to check for N or below insns. */
2246
2247static rtx
2248alpha_emit_set_const_1 (target, mode, c, n)
2249 rtx target;
2250 enum machine_mode mode;
2251 HOST_WIDE_INT c;
2252 int n;
a6f12d7c 2253{
20a4db98 2254 HOST_WIDE_INT new;
a6f12d7c 2255 int i, bits;
fd94addf
RK
2256 /* Use a pseudo if highly optimizing and still generating RTL. */
2257 rtx subtarget
2258 = (flag_expensive_optimizations && rtx_equal_function_value_matters
2259 ? 0 : target);
2260 rtx temp;
a6f12d7c
RK
2261
2262#if HOST_BITS_PER_WIDE_INT == 64
2263 /* We are only called for SImode and DImode. If this is SImode, ensure that
2264 we are sign extended to a full word. This does not make any sense when
2265 cross-compiling on a narrow machine. */
2266
fd94addf 2267 if (mode == SImode)
20a4db98 2268 c = ((c & 0xffffffff) ^ 0x80000000) - 0x80000000;
a6f12d7c
RK
2269#endif
2270
2271 /* If this is a sign-extended 32-bit constant, we can do this in at most
2272 three insns, so do it if we have enough insns left. We always have
285a5742 2273 a sign-extended 32-bit constant when compiling on a narrow machine. */
a6f12d7c 2274
858e4e8c
RH
2275 if (HOST_BITS_PER_WIDE_INT != 64
2276 || c >> 31 == -1 || c >> 31 == 0)
a6f12d7c 2277 {
20a4db98 2278 HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;
a6f12d7c 2279 HOST_WIDE_INT tmp1 = c - low;
20a4db98 2280 HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;
a6f12d7c
RK
2281 HOST_WIDE_INT extra = 0;
2282
ab034cfc
RK
2283 /* If HIGH will be interpreted as negative but the constant is
2284 positive, we must adjust it to do two ldha insns. */
2285
2286 if ((high & 0x8000) != 0 && c >= 0)
a6f12d7c
RK
2287 {
2288 extra = 0x4000;
2289 tmp1 -= 0x40000000;
2290 high = ((tmp1 >> 16) & 0xffff) - 2 * ((tmp1 >> 16) & 0x8000);
2291 }
2292
2293 if (c == low || (low == 0 && extra == 0))
858e4e8c
RH
2294 {
2295 /* We used to use copy_to_suggested_reg (GEN_INT (c), target, mode)
2296 but that meant that we can't handle INT_MIN on 32-bit machines
2297 (like NT/Alpha), because we recurse indefinitely through
2298 emit_move_insn to gen_movdi. So instead, since we know exactly
2299 what we want, create it explicitly. */
2300
2301 if (target == NULL)
2302 target = gen_reg_rtx (mode);
38a448ca 2303 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));
858e4e8c
RH
2304 return target;
2305 }
9102cd1f 2306 else if (n >= 2 + (extra != 0))
a6f12d7c 2307 {
20a4db98 2308 temp = copy_to_suggested_reg (GEN_INT (high << 16), subtarget, mode);
fd94addf 2309
a6f12d7c 2310 if (extra != 0)
fd94addf
RK
2311 temp = expand_binop (mode, add_optab, temp, GEN_INT (extra << 16),
2312 subtarget, 0, OPTAB_WIDEN);
a6f12d7c 2313
20a4db98 2314 return expand_binop (mode, add_optab, temp, GEN_INT (low),
fd94addf 2315 target, 0, OPTAB_WIDEN);
a6f12d7c
RK
2316 }
2317 }
2318
0af3ee30 2319 /* If we couldn't do it that way, try some other methods. But if we have
f444f304
RK
2320 no instructions left, don't bother. Likewise, if this is SImode and
2321 we can't make pseudos, we can't do anything since the expand_binop
2322 and expand_unop calls will widen and try to make pseudos. */
a6f12d7c 2323
f444f304
RK
2324 if (n == 1
2325 || (mode == SImode && ! rtx_equal_function_value_matters))
a6f12d7c
RK
2326 return 0;
2327
0af3ee30 2328 /* Next, see if we can load a related constant and then shift and possibly
a6f12d7c
RK
2329 negate it to get the constant we want. Try this once each increasing
2330 numbers of insns. */
2331
2332 for (i = 1; i < n; i++)
2333 {
20a4db98
RH
2334 /* First, see if minus some low bits, we've an easy load of
2335 high bits. */
2336
2337 new = ((c & 0xffff) ^ 0x8000) - 0x8000;
2338 if (new != 0
2339 && (temp = alpha_emit_set_const (subtarget, mode, c - new, i)) != 0)
2340 return expand_binop (mode, add_optab, temp, GEN_INT (new),
2341 target, 0, OPTAB_WIDEN);
2342
2343 /* Next try complementing. */
fd94addf
RK
2344 if ((temp = alpha_emit_set_const (subtarget, mode, ~ c, i)) != 0)
2345 return expand_unop (mode, one_cmpl_optab, temp, target, 0);
a6f12d7c 2346
fd94addf 2347 /* Next try to form a constant and do a left shift. We can do this
a6f12d7c
RK
2348 if some low-order bits are zero; the exact_log2 call below tells
2349 us that information. The bits we are shifting out could be any
2350 value, but here we'll just try the 0- and sign-extended forms of
2351 the constant. To try to increase the chance of having the same
2352 constant in more than one insn, start at the highest number of
2353 bits to shift, but try all possibilities in case a ZAPNOT will
2354 be useful. */
2355
2356 if ((bits = exact_log2 (c & - c)) > 0)
2357 for (; bits > 0; bits--)
0af3ee30 2358 if ((temp = (alpha_emit_set_const
20a4db98 2359 (subtarget, mode, c >> bits, i))) != 0
fd94addf
RK
2360 || ((temp = (alpha_emit_set_const
2361 (subtarget, mode,
2362 ((unsigned HOST_WIDE_INT) c) >> bits, i)))
2363 != 0))
2364 return expand_binop (mode, ashl_optab, temp, GEN_INT (bits),
2365 target, 0, OPTAB_WIDEN);
a6f12d7c
RK
2366
2367 /* Now try high-order zero bits. Here we try the shifted-in bits as
57cfde96
RK
2368 all zero and all ones. Be careful to avoid shifting outside the
2369 mode and to avoid shifting outside the host wide int size. */
858e4e8c
RH
2370 /* On narrow hosts, don't shift a 1 into the high bit, since we'll
2371 confuse the recursive call and set all of the high 32 bits. */
a6f12d7c 2372
57cfde96 2373 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
858e4e8c 2374 - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64))) > 0)
a6f12d7c 2375 for (; bits > 0; bits--)
fd94addf
RK
2376 if ((temp = alpha_emit_set_const (subtarget, mode,
2377 c << bits, i)) != 0
2378 || ((temp = (alpha_emit_set_const
2379 (subtarget, mode,
2380 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2381 i)))
2382 != 0))
2383 return expand_binop (mode, lshr_optab, temp, GEN_INT (bits),
57cfde96 2384 target, 1, OPTAB_WIDEN);
a6f12d7c
RK
2385
2386 /* Now try high-order 1 bits. We get that with a sign-extension.
57cfde96 2387 But one bit isn't enough here. Be careful to avoid shifting outside
285a5742 2388 the mode and to avoid shifting outside the host wide int size. */
30102605 2389
57cfde96
RK
2390 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
2391 - floor_log2 (~ c) - 2)) > 0)
a6f12d7c 2392 for (; bits > 0; bits--)
fd94addf
RK
2393 if ((temp = alpha_emit_set_const (subtarget, mode,
2394 c << bits, i)) != 0
2395 || ((temp = (alpha_emit_set_const
2396 (subtarget, mode,
2397 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2398 i)))
2399 != 0))
2400 return expand_binop (mode, ashr_optab, temp, GEN_INT (bits),
2401 target, 0, OPTAB_WIDEN);
a6f12d7c
RK
2402 }
2403
20a4db98
RH
2404#if HOST_BITS_PER_WIDE_INT == 64
2405 /* Finally, see if can load a value into the target that is the same as the
2406 constant except that all bytes that are 0 are changed to be 0xff. If we
2407 can, then we can do a ZAPNOT to obtain the desired constant. */
2408
2409 new = c;
2410 for (i = 0; i < 64; i += 8)
2411 if ((new & ((HOST_WIDE_INT) 0xff << i)) == 0)
2412 new |= (HOST_WIDE_INT) 0xff << i;
e68c380c 2413
20a4db98
RH
2414 /* We are only called for SImode and DImode. If this is SImode, ensure that
2415 we are sign extended to a full word. */
2416
2417 if (mode == SImode)
2418 new = ((new & 0xffffffff) ^ 0x80000000) - 0x80000000;
2419
2420 if (new != c && new != -1
2421 && (temp = alpha_emit_set_const (subtarget, mode, new, n - 1)) != 0)
2422 return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new),
e68c380c 2423 target, 0, OPTAB_WIDEN);
20a4db98 2424#endif
e68c380c 2425
a6f12d7c
RK
2426 return 0;
2427}
758d2c0c 2428
97aea203
RK
2429/* Having failed to find a 3 insn sequence in alpha_emit_set_const,
2430 fall back to a straight forward decomposition. We do this to avoid
2431 exponential run times encountered when looking for longer sequences
2432 with alpha_emit_set_const. */
2433
2434rtx
3fe5612d 2435alpha_emit_set_long_const (target, c1, c2)
97aea203 2436 rtx target;
3fe5612d 2437 HOST_WIDE_INT c1, c2;
97aea203 2438{
97aea203 2439 HOST_WIDE_INT d1, d2, d3, d4;
97aea203
RK
2440
2441 /* Decompose the entire word */
3fe5612d
RH
2442#if HOST_BITS_PER_WIDE_INT >= 64
2443 if (c2 != -(c1 < 0))
2444 abort ();
2445 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2446 c1 -= d1;
2447 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2448 c1 = (c1 - d2) >> 32;
2449 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2450 c1 -= d3;
2451 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2452 if (c1 != d4)
2453 abort ();
2454#else
2455 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2456 c1 -= d1;
2457 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2458 if (c1 != d2)
2459 abort ();
2460 c2 += (d2 < 0);
2461 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2462 c2 -= d3;
2463 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2464 if (c2 != d4)
2465 abort ();
2466#endif
97aea203
RK
2467
2468 /* Construct the high word */
3fe5612d
RH
2469 if (d4)
2470 {
2471 emit_move_insn (target, GEN_INT (d4));
2472 if (d3)
2473 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d3)));
2474 }
97aea203 2475 else
3fe5612d 2476 emit_move_insn (target, GEN_INT (d3));
97aea203
RK
2477
2478 /* Shift it into place */
3fe5612d 2479 emit_move_insn (target, gen_rtx_ASHIFT (DImode, target, GEN_INT (32)));
97aea203 2480
3fe5612d
RH
2481 /* Add in the low bits. */
2482 if (d2)
2483 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d2)));
2484 if (d1)
2485 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1)));
97aea203 2486
3fe5612d 2487 return target;
97aea203 2488}
97aea203 2489
23296a36
RH
2490/* Expand a move instruction; return true if all work is done.
2491 We don't handle non-bwx subword loads here. */
2492
2493bool
2494alpha_expand_mov (mode, operands)
2495 enum machine_mode mode;
2496 rtx *operands;
2497{
2498 /* If the output is not a register, the input must be. */
2499 if (GET_CODE (operands[0]) == MEM
2500 && ! reg_or_0_operand (operands[1], mode))
2501 operands[1] = force_reg (mode, operands[1]);
2502
551cc6fd 2503 /* Allow legitimize_address to perform some simplifications. */
d3e98208 2504 if (mode == Pmode && symbolic_operand (operands[1], mode))
1eb356b9 2505 {
551cc6fd
RH
2506 rtx tmp = alpha_legitimize_address (operands[1], operands[0], mode);
2507 if (tmp)
133d3133 2508 {
551cc6fd 2509 operands[1] = tmp;
e2c9fb9b
RH
2510 return false;
2511 }
1eb356b9
RH
2512 }
2513
23296a36
RH
2514 /* Early out for non-constants and valid constants. */
2515 if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode))
2516 return false;
2517
2518 /* Split large integers. */
2519 if (GET_CODE (operands[1]) == CONST_INT
2520 || GET_CODE (operands[1]) == CONST_DOUBLE)
2521 {
2522 HOST_WIDE_INT i0, i1;
666b0481 2523 rtx temp = NULL_RTX;
23296a36
RH
2524
2525 if (GET_CODE (operands[1]) == CONST_INT)
2526 {
2527 i0 = INTVAL (operands[1]);
2528 i1 = -(i0 < 0);
2529 }
2530 else if (HOST_BITS_PER_WIDE_INT >= 64)
2531 {
2532 i0 = CONST_DOUBLE_LOW (operands[1]);
2533 i1 = -(i0 < 0);
2534 }
2535 else
2536 {
2537 i0 = CONST_DOUBLE_LOW (operands[1]);
2538 i1 = CONST_DOUBLE_HIGH (operands[1]);
2539 }
2540
2541 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0))
2542 temp = alpha_emit_set_const (operands[0], mode, i0, 3);
2543
2544 if (!temp && TARGET_BUILD_CONSTANTS)
2545 temp = alpha_emit_set_long_const (operands[0], i0, i1);
2546
2547 if (temp)
2548 {
2549 if (rtx_equal_p (operands[0], temp))
2550 return true;
2551 operands[1] = temp;
2552 return false;
2553 }
2554 }
2555
2556 /* Otherwise we've nothing left but to drop the thing to memory. */
2557 operands[1] = force_const_mem (DImode, operands[1]);
2558 if (reload_in_progress)
2559 {
2560 emit_move_insn (operands[0], XEXP (operands[1], 0));
2561 operands[1] = copy_rtx (operands[1]);
2562 XEXP (operands[1], 0) = operands[0];
2563 }
2564 else
2565 operands[1] = validize_mem (operands[1]);
2566 return false;
2567}
2568
2569/* Expand a non-bwx QImode or HImode move instruction;
2570 return true if all work is done. */
2571
2572bool
2573alpha_expand_mov_nobwx (mode, operands)
2574 enum machine_mode mode;
2575 rtx *operands;
2576{
2577 /* If the output is not a register, the input must be. */
2578 if (GET_CODE (operands[0]) == MEM)
2579 operands[1] = force_reg (mode, operands[1]);
2580
2581 /* Handle four memory cases, unaligned and aligned for either the input
2582 or the output. The only case where we can be called during reload is
2583 for aligned loads; all other cases require temporaries. */
2584
2585 if (GET_CODE (operands[1]) == MEM
2586 || (GET_CODE (operands[1]) == SUBREG
2587 && GET_CODE (SUBREG_REG (operands[1])) == MEM)
2588 || (reload_in_progress && GET_CODE (operands[1]) == REG
2589 && REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER)
2590 || (reload_in_progress && GET_CODE (operands[1]) == SUBREG
2591 && GET_CODE (SUBREG_REG (operands[1])) == REG
2592 && REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER))
2593 {
2594 if (aligned_memory_operand (operands[1], mode))
2595 {
2596 if (reload_in_progress)
2597 {
2598 emit_insn ((mode == QImode
2599 ? gen_reload_inqi_help
2600 : gen_reload_inhi_help)
2601 (operands[0], operands[1],
2602 gen_rtx_REG (SImode, REGNO (operands[0]))));
2603 }
2604 else
2605 {
2606 rtx aligned_mem, bitnum;
2607 rtx scratch = gen_reg_rtx (SImode);
2608
2609 get_aligned_mem (operands[1], &aligned_mem, &bitnum);
2610
2611 emit_insn ((mode == QImode
2612 ? gen_aligned_loadqi
2613 : gen_aligned_loadhi)
2614 (operands[0], aligned_mem, bitnum, scratch));
2615 }
2616 }
2617 else
2618 {
2619 /* Don't pass these as parameters since that makes the generated
2620 code depend on parameter evaluation order which will cause
2621 bootstrap failures. */
2622
2623 rtx temp1 = gen_reg_rtx (DImode);
2624 rtx temp2 = gen_reg_rtx (DImode);
2625 rtx seq = ((mode == QImode
2626 ? gen_unaligned_loadqi
2627 : gen_unaligned_loadhi)
2628 (operands[0], get_unaligned_address (operands[1], 0),
2629 temp1, temp2));
2630
2631 alpha_set_memflags (seq, operands[1]);
2632 emit_insn (seq);
2633 }
2634 return true;
2635 }
2636
2637 if (GET_CODE (operands[0]) == MEM
2638 || (GET_CODE (operands[0]) == SUBREG
2639 && GET_CODE (SUBREG_REG (operands[0])) == MEM)
2640 || (reload_in_progress && GET_CODE (operands[0]) == REG
2641 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER)
2642 || (reload_in_progress && GET_CODE (operands[0]) == SUBREG
2643 && GET_CODE (SUBREG_REG (operands[0])) == REG
2644 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER))
2645 {
2646 if (aligned_memory_operand (operands[0], mode))
2647 {
2648 rtx aligned_mem, bitnum;
2649 rtx temp1 = gen_reg_rtx (SImode);
2650 rtx temp2 = gen_reg_rtx (SImode);
2651
2652 get_aligned_mem (operands[0], &aligned_mem, &bitnum);
2653
2654 emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
2655 temp1, temp2));
2656 }
2657 else
2658 {
2659 rtx temp1 = gen_reg_rtx (DImode);
2660 rtx temp2 = gen_reg_rtx (DImode);
2661 rtx temp3 = gen_reg_rtx (DImode);
2662 rtx seq = ((mode == QImode
2663 ? gen_unaligned_storeqi
2664 : gen_unaligned_storehi)
2665 (get_unaligned_address (operands[0], 0),
2666 operands[1], temp1, temp2, temp3));
2667
2668 alpha_set_memflags (seq, operands[0]);
2669 emit_insn (seq);
2670 }
2671 return true;
2672 }
2673
2674 return false;
2675}
2676
01b9e84e
RH
2677/* Generate an unsigned DImode to FP conversion. This is the same code
2678 optabs would emit if we didn't have TFmode patterns.
2679
2680 For SFmode, this is the only construction I've found that can pass
2681 gcc.c-torture/execute/ieee/rbug.c. No scenario that uses DFmode
2682 intermediates will work, because you'll get intermediate rounding
2683 that ruins the end result. Some of this could be fixed by turning
2684 on round-to-positive-infinity, but that requires diddling the fpsr,
2685 which kills performance. I tried turning this around and converting
2686 to a negative number, so that I could turn on /m, but either I did
2687 it wrong or there's something else cause I wound up with the exact
2688 same single-bit error. There is a branch-less form of this same code:
2689
2690 srl $16,1,$1
2691 and $16,1,$2
2692 cmplt $16,0,$3
2693 or $1,$2,$2
2694 cmovge $16,$16,$2
2695 itoft $3,$f10
2696 itoft $2,$f11
2697 cvtqs $f11,$f11
2698 adds $f11,$f11,$f0
2699 fcmoveq $f10,$f11,$f0
2700
2701 I'm not using it because it's the same number of instructions as
2702 this branch-full form, and it has more serialized long latency
2703 instructions on the critical path.
2704
2705 For DFmode, we can avoid rounding errors by breaking up the word
2706 into two pieces, converting them separately, and adding them back:
2707
2708 LC0: .long 0,0x5f800000
2709
2710 itoft $16,$f11
2711 lda $2,LC0
70994f30 2712 cmplt $16,0,$1
01b9e84e
RH
2713 cpyse $f11,$f31,$f10
2714 cpyse $f31,$f11,$f11
2715 s4addq $1,$2,$1
2716 lds $f12,0($1)
2717 cvtqt $f10,$f10
2718 cvtqt $f11,$f11
2719 addt $f12,$f10,$f0
2720 addt $f0,$f11,$f0
2721
2722 This doesn't seem to be a clear-cut win over the optabs form.
2723 It probably all depends on the distribution of numbers being
2724 converted -- in the optabs form, all but high-bit-set has a
2725 much lower minimum execution time. */
2726
2727void
2728alpha_emit_floatuns (operands)
2729 rtx operands[2];
2730{
2731 rtx neglab, donelab, i0, i1, f0, in, out;
2732 enum machine_mode mode;
2733
2734 out = operands[0];
57014cb9 2735 in = force_reg (DImode, operands[1]);
01b9e84e
RH
2736 mode = GET_MODE (out);
2737 neglab = gen_label_rtx ();
2738 donelab = gen_label_rtx ();
2739 i0 = gen_reg_rtx (DImode);
2740 i1 = gen_reg_rtx (DImode);
2741 f0 = gen_reg_rtx (mode);
2742
d43e0b7d 2743 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
01b9e84e
RH
2744
2745 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
2746 emit_jump_insn (gen_jump (donelab));
70994f30 2747 emit_barrier ();
01b9e84e
RH
2748
2749 emit_label (neglab);
2750
2751 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
2752 emit_insn (gen_anddi3 (i1, in, const1_rtx));
2753 emit_insn (gen_iordi3 (i0, i0, i1));
2754 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
2755 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
2756
2757 emit_label (donelab);
2758}
2759
f283421d
RH
2760/* Generate the comparison for a conditional branch. */
2761
2762rtx
2763alpha_emit_conditional_branch (code)
2764 enum rtx_code code;
2765{
2766 enum rtx_code cmp_code, branch_code;
2767 enum machine_mode cmp_mode, branch_mode = VOIDmode;
6db21c7f 2768 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
f283421d
RH
2769 rtx tem;
2770
5495cc55
RH
2771 if (alpha_compare.fp_p && GET_MODE (op0) == TFmode)
2772 {
2773 if (! TARGET_HAS_XFLOATING_LIBS)
2774 abort ();
2775
2776 /* X_floating library comparison functions return
2777 -1 unordered
2778 0 false
2779 1 true
2780 Convert the compare against the raw return value. */
2781
9e495700
RH
2782 if (code == UNORDERED || code == ORDERED)
2783 cmp_code = EQ;
2784 else
2785 cmp_code = code;
2786
2787 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
5495cc55
RH
2788 op1 = const0_rtx;
2789 alpha_compare.fp_p = 0;
9e495700
RH
2790
2791 if (code == UNORDERED)
2792 code = LT;
2793 else if (code == ORDERED)
2794 code = GE;
2795 else
2796 code = GT;
5495cc55
RH
2797 }
2798
f283421d
RH
2799 /* The general case: fold the comparison code to the types of compares
2800 that we have, choosing the branch as necessary. */
2801 switch (code)
2802 {
2803 case EQ: case LE: case LT: case LEU: case LTU:
1eb8759b 2804 case UNORDERED:
f283421d
RH
2805 /* We have these compares: */
2806 cmp_code = code, branch_code = NE;
2807 break;
2808
2809 case NE:
1eb8759b 2810 case ORDERED:
285a5742 2811 /* These must be reversed. */
1eb8759b 2812 cmp_code = reverse_condition (code), branch_code = EQ;
f283421d
RH
2813 break;
2814
2815 case GE: case GT: case GEU: case GTU:
2816 /* For FP, we swap them, for INT, we reverse them. */
6db21c7f 2817 if (alpha_compare.fp_p)
f283421d
RH
2818 {
2819 cmp_code = swap_condition (code);
2820 branch_code = NE;
2821 tem = op0, op0 = op1, op1 = tem;
2822 }
2823 else
2824 {
2825 cmp_code = reverse_condition (code);
2826 branch_code = EQ;
2827 }
2828 break;
2829
2830 default:
2831 abort ();
2832 }
2833
6db21c7f 2834 if (alpha_compare.fp_p)
f283421d
RH
2835 {
2836 cmp_mode = DFmode;
de6c5979 2837 if (flag_unsafe_math_optimizations)
f283421d
RH
2838 {
2839 /* When we are not as concerned about non-finite values, and we
2840 are comparing against zero, we can branch directly. */
2841 if (op1 == CONST0_RTX (DFmode))
2842 cmp_code = NIL, branch_code = code;
2843 else if (op0 == CONST0_RTX (DFmode))
2844 {
2845 /* Undo the swap we probably did just above. */
2846 tem = op0, op0 = op1, op1 = tem;
b771b6b4
RH
2847 branch_code = swap_condition (cmp_code);
2848 cmp_code = NIL;
f283421d
RH
2849 }
2850 }
2851 else
2852 {
2853 /* ??? We mark the the branch mode to be CCmode to prevent the
2854 compare and branch from being combined, since the compare
2855 insn follows IEEE rules that the branch does not. */
2856 branch_mode = CCmode;
2857 }
2858 }
2859 else
2860 {
2861 cmp_mode = DImode;
2862
2863 /* The following optimizations are only for signed compares. */
2864 if (code != LEU && code != LTU && code != GEU && code != GTU)
2865 {
2866 /* Whee. Compare and branch against 0 directly. */
2867 if (op1 == const0_rtx)
2868 cmp_code = NIL, branch_code = code;
2869
2870 /* We want to use cmpcc/bcc when we can, since there is a zero delay
2871 bypass between logicals and br/cmov on EV5. But we don't want to
2872 force valid immediate constants into registers needlessly. */
2873 else if (GET_CODE (op1) == CONST_INT)
2874 {
2875 HOST_WIDE_INT v = INTVAL (op1), n = -v;
2876
2877 if (! CONST_OK_FOR_LETTER_P (v, 'I')
2878 && (CONST_OK_FOR_LETTER_P (n, 'K')
2879 || CONST_OK_FOR_LETTER_P (n, 'L')))
2880 {
2881 cmp_code = PLUS, branch_code = code;
2882 op1 = GEN_INT (n);
2883 }
2884 }
2885 }
f283421d 2886
9e495700
RH
2887 if (!reg_or_0_operand (op0, DImode))
2888 op0 = force_reg (DImode, op0);
2889 if (cmp_code != PLUS && !reg_or_8bit_operand (op1, DImode))
2890 op1 = force_reg (DImode, op1);
2891 }
f283421d
RH
2892
2893 /* Emit an initial compare instruction, if necessary. */
2894 tem = op0;
2895 if (cmp_code != NIL)
2896 {
2897 tem = gen_reg_rtx (cmp_mode);
2898 emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
2899 }
2900
6db21c7f
RH
2901 /* Zero the operands. */
2902 memset (&alpha_compare, 0, sizeof (alpha_compare));
2903
f283421d
RH
2904 /* Return the branch comparison. */
2905 return gen_rtx_fmt_ee (branch_code, branch_mode, tem, CONST0_RTX (cmp_mode));
2906}
2907
9e495700
RH
2908/* Certain simplifications can be done to make invalid setcc operations
2909 valid. Return the final comparison, or NULL if we can't work. */
2910
2911rtx
2912alpha_emit_setcc (code)
2913 enum rtx_code code;
2914{
2915 enum rtx_code cmp_code;
2916 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
2917 int fp_p = alpha_compare.fp_p;
2918 rtx tmp;
2919
2920 /* Zero the operands. */
2921 memset (&alpha_compare, 0, sizeof (alpha_compare));
2922
2923 if (fp_p && GET_MODE (op0) == TFmode)
2924 {
2925 if (! TARGET_HAS_XFLOATING_LIBS)
2926 abort ();
2927
2928 /* X_floating library comparison functions return
2929 -1 unordered
2930 0 false
2931 1 true
2932 Convert the compare against the raw return value. */
2933
2934 if (code == UNORDERED || code == ORDERED)
2935 cmp_code = EQ;
2936 else
2937 cmp_code = code;
2938
2939 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
2940 op1 = const0_rtx;
2941 fp_p = 0;
2942
2943 if (code == UNORDERED)
2944 code = LT;
2945 else if (code == ORDERED)
2946 code = GE;
2947 else
2948 code = GT;
2949 }
2950
2951 if (fp_p && !TARGET_FIX)
2952 return NULL_RTX;
2953
2954 /* The general case: fold the comparison code to the types of compares
2955 that we have, choosing the branch as necessary. */
2956
2957 cmp_code = NIL;
2958 switch (code)
2959 {
2960 case EQ: case LE: case LT: case LEU: case LTU:
2961 case UNORDERED:
2962 /* We have these compares. */
2963 if (fp_p)
2964 cmp_code = code, code = NE;
2965 break;
2966
2967 case NE:
2968 if (!fp_p && op1 == const0_rtx)
2969 break;
2970 /* FALLTHRU */
2971
2972 case ORDERED:
2973 cmp_code = reverse_condition (code);
2974 code = EQ;
2975 break;
2976
2977 case GE: case GT: case GEU: case GTU:
56f19d92 2978 /* These normally need swapping, but for integer zero we have
c74fa144
RH
2979 special patterns that recognize swapped operands. */
2980 if (!fp_p && op1 == const0_rtx)
2981 break;
9e495700
RH
2982 code = swap_condition (code);
2983 if (fp_p)
2984 cmp_code = code, code = NE;
2985 tmp = op0, op0 = op1, op1 = tmp;
2986 break;
2987
2988 default:
2989 abort ();
2990 }
2991
2992 if (!fp_p)
2993 {
c74fa144 2994 if (!register_operand (op0, DImode))
9e495700
RH
2995 op0 = force_reg (DImode, op0);
2996 if (!reg_or_8bit_operand (op1, DImode))
2997 op1 = force_reg (DImode, op1);
2998 }
2999
3000 /* Emit an initial compare instruction, if necessary. */
3001 if (cmp_code != NIL)
3002 {
3003 enum machine_mode mode = fp_p ? DFmode : DImode;
3004
3005 tmp = gen_reg_rtx (mode);
3006 emit_insn (gen_rtx_SET (VOIDmode, tmp,
3007 gen_rtx_fmt_ee (cmp_code, mode, op0, op1)));
3008
3009 op0 = fp_p ? gen_lowpart (DImode, tmp) : tmp;
3010 op1 = const0_rtx;
3011 }
3012
3013 /* Return the setcc comparison. */
3014 return gen_rtx_fmt_ee (code, DImode, op0, op1);
3015}
3016
f283421d 3017
758d2c0c
RK
3018/* Rewrite a comparison against zero CMP of the form
3019 (CODE (cc0) (const_int 0)) so it can be written validly in
3020 a conditional move (if_then_else CMP ...).
3021 If both of the operands that set cc0 are non-zero we must emit
3022 an insn to perform the compare (it can't be done within
285a5742 3023 the conditional move). */
758d2c0c
RK
3024rtx
3025alpha_emit_conditional_move (cmp, mode)
3026 rtx cmp;
3027 enum machine_mode mode;
3028{
1ad2a62d 3029 enum rtx_code code = GET_CODE (cmp);
89b7c471 3030 enum rtx_code cmov_code = NE;
6db21c7f
RH
3031 rtx op0 = alpha_compare.op0;
3032 rtx op1 = alpha_compare.op1;
3033 int fp_p = alpha_compare.fp_p;
1ad2a62d
RK
3034 enum machine_mode cmp_mode
3035 = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
6db21c7f 3036 enum machine_mode cmp_op_mode = fp_p ? DFmode : DImode;
f283421d 3037 enum machine_mode cmov_mode = VOIDmode;
de6c5979 3038 int local_fast_math = flag_unsafe_math_optimizations;
1ad2a62d 3039 rtx tem;
758d2c0c 3040
6db21c7f
RH
3041 /* Zero the operands. */
3042 memset (&alpha_compare, 0, sizeof (alpha_compare));
3043
3044 if (fp_p != FLOAT_MODE_P (mode))
09fe1c49
RH
3045 {
3046 enum rtx_code cmp_code;
3047
3048 if (! TARGET_FIX)
3049 return 0;
3050
3051 /* If we have fp<->int register move instructions, do a cmov by
3052 performing the comparison in fp registers, and move the
3053 zero/non-zero value to integer registers, where we can then
3054 use a normal cmov, or vice-versa. */
3055
3056 switch (code)
3057 {
3058 case EQ: case LE: case LT: case LEU: case LTU:
3059 /* We have these compares. */
3060 cmp_code = code, code = NE;
3061 break;
3062
3063 case NE:
3064 /* This must be reversed. */
3065 cmp_code = EQ, code = EQ;
3066 break;
3067
3068 case GE: case GT: case GEU: case GTU:
56f19d92
RH
3069 /* These normally need swapping, but for integer zero we have
3070 special patterns that recognize swapped operands. */
3071 if (!fp_p && op1 == const0_rtx)
c53f9f5b
RH
3072 cmp_code = code, code = NE;
3073 else
3074 {
3075 cmp_code = swap_condition (code);
3076 code = NE;
3077 tem = op0, op0 = op1, op1 = tem;
3078 }
09fe1c49
RH
3079 break;
3080
3081 default:
3082 abort ();
3083 }
3084
3085 tem = gen_reg_rtx (cmp_op_mode);
3086 emit_insn (gen_rtx_SET (VOIDmode, tem,
3087 gen_rtx_fmt_ee (cmp_code, cmp_op_mode,
3088 op0, op1)));
3089
3090 cmp_mode = cmp_op_mode = fp_p ? DImode : DFmode;
3091 op0 = gen_lowpart (cmp_op_mode, tem);
3092 op1 = CONST0_RTX (cmp_op_mode);
3093 fp_p = !fp_p;
3094 local_fast_math = 1;
3095 }
758d2c0c
RK
3096
3097 /* We may be able to use a conditional move directly.
285a5742 3098 This avoids emitting spurious compares. */
01b9e84e 3099 if (signed_comparison_operator (cmp, VOIDmode)
09fe1c49 3100 && (!fp_p || local_fast_math)
1ad2a62d 3101 && (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
38a448ca 3102 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
758d2c0c 3103
000ca373 3104 /* We can't put the comparison inside the conditional move;
758d2c0c 3105 emit a compare instruction and put that inside the
1ad2a62d
RK
3106 conditional move. Make sure we emit only comparisons we have;
3107 swap or reverse as necessary. */
758d2c0c 3108
000ca373
RH
3109 if (no_new_pseudos)
3110 return NULL_RTX;
3111
758d2c0c
RK
3112 switch (code)
3113 {
1ad2a62d
RK
3114 case EQ: case LE: case LT: case LEU: case LTU:
3115 /* We have these compares: */
758d2c0c 3116 break;
1ad2a62d 3117
758d2c0c 3118 case NE:
285a5742 3119 /* This must be reversed. */
1ad2a62d 3120 code = reverse_condition (code);
89b7c471 3121 cmov_code = EQ;
758d2c0c 3122 break;
1ad2a62d
RK
3123
3124 case GE: case GT: case GEU: case GTU:
9e495700 3125 /* These must be swapped. */
c53f9f5b
RH
3126 if (op1 != CONST0_RTX (cmp_mode))
3127 {
3128 code = swap_condition (code);
3129 tem = op0, op0 = op1, op1 = tem;
3130 }
758d2c0c 3131 break;
1ad2a62d 3132
758d2c0c 3133 default:
1ad2a62d 3134 abort ();
758d2c0c
RK
3135 }
3136
9e495700
RH
3137 if (!fp_p)
3138 {
3139 if (!reg_or_0_operand (op0, DImode))
3140 op0 = force_reg (DImode, op0);
3141 if (!reg_or_8bit_operand (op1, DImode))
3142 op1 = force_reg (DImode, op1);
3143 }
3144
68aed21b 3145 /* ??? We mark the branch mode to be CCmode to prevent the compare
f283421d
RH
3146 and cmov from being combined, since the compare insn follows IEEE
3147 rules that the cmov does not. */
09fe1c49 3148 if (fp_p && !local_fast_math)
f283421d
RH
3149 cmov_mode = CCmode;
3150
1ad2a62d 3151 tem = gen_reg_rtx (cmp_op_mode);
38a448ca 3152 emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1));
f283421d 3153 return gen_rtx_fmt_ee (cmov_code, cmov_mode, tem, CONST0_RTX (cmp_op_mode));
758d2c0c 3154}
8f4773ea
RH
3155
3156/* Simplify a conditional move of two constants into a setcc with
3157 arithmetic. This is done with a splitter since combine would
3158 just undo the work if done during code generation. It also catches
3159 cases we wouldn't have before cse. */
3160
3161int
3162alpha_split_conditional_move (code, dest, cond, t_rtx, f_rtx)
3163 enum rtx_code code;
3164 rtx dest, cond, t_rtx, f_rtx;
3165{
3166 HOST_WIDE_INT t, f, diff;
3167 enum machine_mode mode;
3168 rtx target, subtarget, tmp;
3169
3170 mode = GET_MODE (dest);
3171 t = INTVAL (t_rtx);
3172 f = INTVAL (f_rtx);
3173 diff = t - f;
3174
3175 if (((code == NE || code == EQ) && diff < 0)
3176 || (code == GE || code == GT))
3177 {
3178 code = reverse_condition (code);
3179 diff = t, t = f, f = diff;
3180 diff = t - f;
3181 }
3182
3183 subtarget = target = dest;
3184 if (mode != DImode)
3185 {
3186 target = gen_lowpart (DImode, dest);
3187 if (! no_new_pseudos)
3188 subtarget = gen_reg_rtx (DImode);
3189 else
3190 subtarget = target;
3191 }
a5376276
RH
3192 /* Below, we must be careful to use copy_rtx on target and subtarget
3193 in intermediate insns, as they may be a subreg rtx, which may not
3194 be shared. */
8f4773ea
RH
3195
3196 if (f == 0 && exact_log2 (diff) > 0
3197 /* On EV6, we've got enough shifters to make non-arithmatic shifts
3198 viable over a longer latency cmove. On EV5, the E0 slot is a
285a5742 3199 scarce resource, and on EV4 shift has the same latency as a cmove. */
8f4773ea
RH
3200 && (diff <= 8 || alpha_cpu == PROCESSOR_EV6))
3201 {
3202 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 3203 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea 3204
a5376276
RH
3205 tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget),
3206 GEN_INT (exact_log2 (t)));
8f4773ea
RH
3207 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3208 }
3209 else if (f == 0 && t == -1)
3210 {
3211 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 3212 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea 3213
a5376276 3214 emit_insn (gen_negdi2 (target, copy_rtx (subtarget)));
8f4773ea
RH
3215 }
3216 else if (diff == 1 || diff == 4 || diff == 8)
3217 {
3218 rtx add_op;
3219
3220 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 3221 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea
RH
3222
3223 if (diff == 1)
a5376276 3224 emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f)));
8f4773ea
RH
3225 else
3226 {
3227 add_op = GEN_INT (f);
3228 if (sext_add_operand (add_op, mode))
3229 {
a5376276
RH
3230 tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget),
3231 GEN_INT (diff));
8f4773ea
RH
3232 tmp = gen_rtx_PLUS (DImode, tmp, add_op);
3233 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3234 }
3235 else
3236 return 0;
3237 }
3238 }
3239 else
3240 return 0;
3241
3242 return 1;
3243}
6c174fc0 3244\f
5495cc55
RH
3245/* Look up the function X_floating library function name for the
3246 given operation. */
3247
3248static const char *
3249alpha_lookup_xfloating_lib_func (code)
3250 enum rtx_code code;
3251{
3252 struct xfloating_op
3253 {
8b60264b
KG
3254 const enum rtx_code code;
3255 const char *const func;
5495cc55
RH
3256 };
3257
3258 static const struct xfloating_op vms_xfloating_ops[] =
3259 {
3260 { PLUS, "OTS$ADD_X" },
3261 { MINUS, "OTS$SUB_X" },
3262 { MULT, "OTS$MUL_X" },
3263 { DIV, "OTS$DIV_X" },
3264 { EQ, "OTS$EQL_X" },
3265 { NE, "OTS$NEQ_X" },
3266 { LT, "OTS$LSS_X" },
3267 { LE, "OTS$LEQ_X" },
3268 { GT, "OTS$GTR_X" },
3269 { GE, "OTS$GEQ_X" },
3270 { FIX, "OTS$CVTXQ" },
3271 { FLOAT, "OTS$CVTQX" },
3272 { UNSIGNED_FLOAT, "OTS$CVTQUX" },
3273 { FLOAT_EXTEND, "OTS$CVT_FLOAT_T_X" },
3274 { FLOAT_TRUNCATE, "OTS$CVT_FLOAT_X_T" },
3275 };
3276
3277 static const struct xfloating_op osf_xfloating_ops[] =
3278 {
3279 { PLUS, "_OtsAddX" },
3280 { MINUS, "_OtsSubX" },
3281 { MULT, "_OtsMulX" },
3282 { DIV, "_OtsDivX" },
3283 { EQ, "_OtsEqlX" },
3284 { NE, "_OtsNeqX" },
3285 { LT, "_OtsLssX" },
3286 { LE, "_OtsLeqX" },
3287 { GT, "_OtsGtrX" },
3288 { GE, "_OtsGeqX" },
3289 { FIX, "_OtsCvtXQ" },
3290 { FLOAT, "_OtsCvtQX" },
3291 { UNSIGNED_FLOAT, "_OtsCvtQUX" },
3292 { FLOAT_EXTEND, "_OtsConvertFloatTX" },
3293 { FLOAT_TRUNCATE, "_OtsConvertFloatXT" },
3294 };
3295
3296 const struct xfloating_op *ops;
b6a1cbae 3297 const long n = ARRAY_SIZE (osf_xfloating_ops);
5495cc55
RH
3298 long i;
3299
3300 /* How irritating. Nothing to key off for the table. Hardcode
3301 knowledge of the G_floating routines. */
3302 if (TARGET_FLOAT_VAX)
3303 {
be7b80f4 3304 if (TARGET_ABI_OPEN_VMS)
5495cc55
RH
3305 {
3306 if (code == FLOAT_EXTEND)
3307 return "OTS$CVT_FLOAT_G_X";
3308 if (code == FLOAT_TRUNCATE)
3309 return "OTS$CVT_FLOAT_X_G";
3310 }
3311 else
3312 {
3313 if (code == FLOAT_EXTEND)
3314 return "_OtsConvertFloatGX";
3315 if (code == FLOAT_TRUNCATE)
3316 return "_OtsConvertFloatXG";
3317 }
3318 }
3319
be7b80f4 3320 if (TARGET_ABI_OPEN_VMS)
5495cc55
RH
3321 ops = vms_xfloating_ops;
3322 else
3323 ops = osf_xfloating_ops;
3324
3325 for (i = 0; i < n; ++i)
3326 if (ops[i].code == code)
3327 return ops[i].func;
3328
3329 abort();
3330}
3331
3332/* Most X_floating operations take the rounding mode as an argument.
3333 Compute that here. */
3334
3335static int
3336alpha_compute_xfloating_mode_arg (code, round)
3337 enum rtx_code code;
3338 enum alpha_fp_rounding_mode round;
3339{
3340 int mode;
3341
3342 switch (round)
3343 {
3344 case ALPHA_FPRM_NORM:
3345 mode = 2;
3346 break;
3347 case ALPHA_FPRM_MINF:
3348 mode = 1;
3349 break;
3350 case ALPHA_FPRM_CHOP:
3351 mode = 0;
3352 break;
3353 case ALPHA_FPRM_DYN:
3354 mode = 4;
3355 break;
3356 default:
3357 abort ();
3358
3359 /* XXX For reference, round to +inf is mode = 3. */
3360 }
3361
3362 if (code == FLOAT_TRUNCATE && alpha_fptm == ALPHA_FPTM_N)
3363 mode |= 0x10000;
3364
3365 return mode;
3366}
3367
3368/* Emit an X_floating library function call.
3369
3370 Note that these functions do not follow normal calling conventions:
3371 TFmode arguments are passed in two integer registers (as opposed to
3372 indirect); TFmode return values appear in R16+R17.
3373
3374 FUNC is the function name to call.
3375 TARGET is where the output belongs.
3376 OPERANDS are the inputs.
3377 NOPERANDS is the count of inputs.
3378 EQUIV is the expression equivalent for the function.
3379*/
3380
3381static void
3382alpha_emit_xfloating_libcall (func, target, operands, noperands, equiv)
3383 const char *func;
3384 rtx target;
3385 rtx operands[];
3386 int noperands;
3387 rtx equiv;
3388{
3389 rtx usage = NULL_RTX, tmp, reg;
3390 int regno = 16, i;
3391
3392 start_sequence ();
3393
3394 for (i = 0; i < noperands; ++i)
3395 {
3396 switch (GET_MODE (operands[i]))
3397 {
3398 case TFmode:
3399 reg = gen_rtx_REG (TFmode, regno);
3400 regno += 2;
3401 break;
3402
3403 case DFmode:
3404 reg = gen_rtx_REG (DFmode, regno + 32);
3405 regno += 1;
3406 break;
3407
3408 case VOIDmode:
3409 if (GET_CODE (operands[i]) != CONST_INT)
3410 abort ();
3411 /* FALLTHRU */
3412 case DImode:
3413 reg = gen_rtx_REG (DImode, regno);
3414 regno += 1;
3415 break;
3416
3417 default:
3418 abort ();
3419 }
3420
3421 emit_move_insn (reg, operands[i]);
3422 usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage);
3423 }
3424
3425 switch (GET_MODE (target))
3426 {
3427 case TFmode:
3428 reg = gen_rtx_REG (TFmode, 16);
3429 break;
3430 case DFmode:
3431 reg = gen_rtx_REG (DFmode, 32);
3432 break;
3433 case DImode:
3434 reg = gen_rtx_REG (DImode, 0);
3435 break;
3436 default:
3437 abort ();
3438 }
3439
c8d8ed65 3440 tmp = gen_rtx_MEM (QImode, gen_rtx_SYMBOL_REF (Pmode, (char *) func));
0499c2e4 3441 tmp = emit_call_insn (GEN_CALL_VALUE (reg, tmp, const0_rtx,
5495cc55
RH
3442 const0_rtx, const0_rtx));
3443 CALL_INSN_FUNCTION_USAGE (tmp) = usage;
3444
3445 tmp = get_insns ();
3446 end_sequence ();
3447
3448 emit_libcall_block (tmp, target, reg, equiv);
3449}
3450
3451/* Emit an X_floating library function call for arithmetic (+,-,*,/). */
3452
3453void
3454alpha_emit_xfloating_arith (code, operands)
3455 enum rtx_code code;
3456 rtx operands[];
3457{
3458 const char *func;
3459 int mode;
c77f46c6 3460 rtx out_operands[3];
5495cc55
RH
3461
3462 func = alpha_lookup_xfloating_lib_func (code);
3463 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
3464
c77f46c6
AO
3465 out_operands[0] = operands[1];
3466 out_operands[1] = operands[2];
3467 out_operands[2] = GEN_INT (mode);
3468 alpha_emit_xfloating_libcall (func, operands[0], out_operands, 3,
5495cc55
RH
3469 gen_rtx_fmt_ee (code, TFmode, operands[1],
3470 operands[2]));
3471}
3472
3473/* Emit an X_floating library function call for a comparison. */
3474
3475static rtx
3476alpha_emit_xfloating_compare (code, op0, op1)
3477 enum rtx_code code;
3478 rtx op0, op1;
3479{
3480 const char *func;
3481 rtx out, operands[2];
3482
3483 func = alpha_lookup_xfloating_lib_func (code);
3484
3485 operands[0] = op0;
3486 operands[1] = op1;
3487 out = gen_reg_rtx (DImode);
3488
b762a0ef
RH
3489 /* ??? Strange mode for equiv because what's actually returned
3490 is -1,0,1, not a proper boolean value. */
3491 alpha_emit_xfloating_libcall (func, out, operands, 2,
3492 gen_rtx_fmt_ee (code, CCmode, op0, op1));
5495cc55
RH
3493
3494 return out;
3495}
3496
3497/* Emit an X_floating library function call for a conversion. */
3498
3499void
3500alpha_emit_xfloating_cvt (code, operands)
3501 enum rtx_code code;
3502 rtx operands[];
3503{
3504 int noperands = 1, mode;
c77f46c6 3505 rtx out_operands[2];
5495cc55
RH
3506 const char *func;
3507
3508 func = alpha_lookup_xfloating_lib_func (code);
3509
c77f46c6
AO
3510 out_operands[0] = operands[1];
3511
5495cc55
RH
3512 switch (code)
3513 {
3514 case FIX:
3515 mode = alpha_compute_xfloating_mode_arg (code, ALPHA_FPRM_CHOP);
c77f46c6 3516 out_operands[1] = GEN_INT (mode);
d6cde845 3517 noperands = 2;
5495cc55
RH
3518 break;
3519 case FLOAT_TRUNCATE:
3520 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
c77f46c6 3521 out_operands[1] = GEN_INT (mode);
d6cde845 3522 noperands = 2;
5495cc55
RH
3523 break;
3524 default:
3525 break;
3526 }
3527
c77f46c6 3528 alpha_emit_xfloating_libcall (func, operands[0], out_operands, noperands,
5495cc55
RH
3529 gen_rtx_fmt_e (code, GET_MODE (operands[0]),
3530 operands[1]));
3531}
628d74de 3532
f940c352
RH
3533/* Split a TFmode OP[1] into DImode OP[2,3] and likewise for
3534 OP[0] into OP[0,1]. Naturally, output operand ordering is
3535 little-endian. */
3536
628d74de
RH
3537void
3538alpha_split_tfmode_pair (operands)
3539 rtx operands[4];
3540{
3541 if (GET_CODE (operands[1]) == REG)
3542 {
3543 operands[3] = gen_rtx_REG (DImode, REGNO (operands[1]) + 1);
3544 operands[2] = gen_rtx_REG (DImode, REGNO (operands[1]));
3545 }
3546 else if (GET_CODE (operands[1]) == MEM)
3547 {
f4ef873c
RK
3548 operands[3] = adjust_address (operands[1], DImode, 8);
3549 operands[2] = adjust_address (operands[1], DImode, 0);
628d74de
RH
3550 }
3551 else if (operands[1] == CONST0_RTX (TFmode))
3552 operands[2] = operands[3] = const0_rtx;
3553 else
3554 abort ();
3555
3556 if (GET_CODE (operands[0]) == REG)
3557 {
3558 operands[1] = gen_rtx_REG (DImode, REGNO (operands[0]) + 1);
3559 operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
3560 }
3561 else if (GET_CODE (operands[0]) == MEM)
3562 {
f4ef873c
RK
3563 operands[1] = adjust_address (operands[0], DImode, 8);
3564 operands[0] = adjust_address (operands[0], DImode, 0);
628d74de
RH
3565 }
3566 else
3567 abort ();
3568}
f940c352
RH
3569
3570/* Implement negtf2 or abstf2. Op0 is destination, op1 is source,
3571 op2 is a register containing the sign bit, operation is the
3572 logical operation to be performed. */
3573
3574void
3575alpha_split_tfmode_frobsign (operands, operation)
3576 rtx operands[3];
3577 rtx (*operation) PARAMS ((rtx, rtx, rtx));
3578{
3579 rtx high_bit = operands[2];
3580 rtx scratch;
3581 int move;
3582
3583 alpha_split_tfmode_pair (operands);
3584
3585 /* Detect three flavours of operand overlap. */
3586 move = 1;
3587 if (rtx_equal_p (operands[0], operands[2]))
3588 move = 0;
3589 else if (rtx_equal_p (operands[1], operands[2]))
3590 {
3591 if (rtx_equal_p (operands[0], high_bit))
3592 move = 2;
3593 else
3594 move = -1;
3595 }
3596
3597 if (move < 0)
3598 emit_move_insn (operands[0], operands[2]);
3599
3600 /* ??? If the destination overlaps both source tf and high_bit, then
3601 assume source tf is dead in its entirety and use the other half
3602 for a scratch register. Otherwise "scratch" is just the proper
3603 destination register. */
3604 scratch = operands[move < 2 ? 1 : 3];
3605
3606 emit_insn ((*operation) (scratch, high_bit, operands[3]));
3607
3608 if (move > 0)
3609 {
3610 emit_move_insn (operands[0], operands[2]);
3611 if (move > 1)
3612 emit_move_insn (operands[1], scratch);
3613 }
3614}
5495cc55 3615\f
6c174fc0
RH
3616/* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
3617 unaligned data:
3618
3619 unsigned: signed:
3620 word: ldq_u r1,X(r11) ldq_u r1,X(r11)
3621 ldq_u r2,X+1(r11) ldq_u r2,X+1(r11)
3622 lda r3,X(r11) lda r3,X+2(r11)
3623 extwl r1,r3,r1 extql r1,r3,r1
3624 extwh r2,r3,r2 extqh r2,r3,r2
3625 or r1.r2.r1 or r1,r2,r1
3626 sra r1,48,r1
3627
3628 long: ldq_u r1,X(r11) ldq_u r1,X(r11)
3629 ldq_u r2,X+3(r11) ldq_u r2,X+3(r11)
3630 lda r3,X(r11) lda r3,X(r11)
3631 extll r1,r3,r1 extll r1,r3,r1
3632 extlh r2,r3,r2 extlh r2,r3,r2
3633 or r1.r2.r1 addl r1,r2,r1
3634
3635 quad: ldq_u r1,X(r11)
3636 ldq_u r2,X+7(r11)
3637 lda r3,X(r11)
3638 extql r1,r3,r1
3639 extqh r2,r3,r2
3640 or r1.r2.r1
3641*/
3642
3643void
3644alpha_expand_unaligned_load (tgt, mem, size, ofs, sign)
3645 rtx tgt, mem;
3646 HOST_WIDE_INT size, ofs;
3647 int sign;
3648{
1eb356b9 3649 rtx meml, memh, addr, extl, exth, tmp, mema;
4208b40f 3650 enum machine_mode mode;
6c174fc0
RH
3651
3652 meml = gen_reg_rtx (DImode);
3653 memh = gen_reg_rtx (DImode);
3654 addr = gen_reg_rtx (DImode);
3655 extl = gen_reg_rtx (DImode);
3656 exth = gen_reg_rtx (DImode);
3657
1eb356b9
RH
3658 mema = XEXP (mem, 0);
3659 if (GET_CODE (mema) == LO_SUM)
3660 mema = force_reg (Pmode, mema);
3661
e01acbb1
RH
3662 /* AND addresses cannot be in any alias set, since they may implicitly
3663 alias surrounding code. Ideally we'd have some alias set that
3664 covered all types except those with alignment 8 or higher. */
3665
3666 tmp = change_address (mem, DImode,
3667 gen_rtx_AND (DImode,
1eb356b9 3668 plus_constant (mema, ofs),
e01acbb1 3669 GEN_INT (-8)));
ba4828e0 3670 set_mem_alias_set (tmp, 0);
e01acbb1
RH
3671 emit_move_insn (meml, tmp);
3672
3673 tmp = change_address (mem, DImode,
3674 gen_rtx_AND (DImode,
1eb356b9 3675 plus_constant (mema, ofs + size - 1),
e01acbb1 3676 GEN_INT (-8)));
ba4828e0 3677 set_mem_alias_set (tmp, 0);
e01acbb1 3678 emit_move_insn (memh, tmp);
6c174fc0 3679
30102605
RH
3680 if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4))
3681 {
3682 emit_move_insn (addr, plus_constant (mema, -1));
3683
3684 emit_insn (gen_extqh_be (extl, meml, addr));
3685 emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr));
3686
3687 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
3688 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
3689 addr, 1, OPTAB_WIDEN);
3690 }
3691 else if (sign && size == 2)
6c174fc0 3692 {
1eb356b9 3693 emit_move_insn (addr, plus_constant (mema, ofs+2));
6c174fc0 3694
30102605
RH
3695 emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
3696 emit_insn (gen_extqh_le (exth, memh, addr));
6c174fc0 3697
1a7cb241
JW
3698 /* We must use tgt here for the target. Alpha-vms port fails if we use
3699 addr for the target, because addr is marked as a pointer and combine
3700 knows that pointers are always sign-extended 32 bit values. */
3701 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
4208b40f
RH
3702 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
3703 addr, 1, OPTAB_WIDEN);
6c174fc0 3704 }
4208b40f 3705 else
6c174fc0 3706 {
30102605 3707 if (WORDS_BIG_ENDIAN)
4208b40f 3708 {
30102605
RH
3709 emit_move_insn (addr, plus_constant (mema, ofs+size-1));
3710 switch ((int) size)
3711 {
3712 case 2:
3713 emit_insn (gen_extwh_be (extl, meml, addr));
3714 mode = HImode;
3715 break;
6c174fc0 3716
30102605
RH
3717 case 4:
3718 emit_insn (gen_extlh_be (extl, meml, addr));
3719 mode = SImode;
3720 break;
6c174fc0 3721
30102605
RH
3722 case 8:
3723 emit_insn (gen_extqh_be (extl, meml, addr));
3724 mode = DImode;
3725 break;
5495cc55 3726
30102605
RH
3727 default:
3728 abort ();
3729 }
3730 emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
3731 }
3732 else
3733 {
3734 emit_move_insn (addr, plus_constant (mema, ofs));
3735 emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
3736 switch ((int) size)
3737 {
3738 case 2:
3739 emit_insn (gen_extwh_le (exth, memh, addr));
3740 mode = HImode;
3741 break;
3742
3743 case 4:
3744 emit_insn (gen_extlh_le (exth, memh, addr));
3745 mode = SImode;
3746 break;
3747
3748 case 8:
3749 emit_insn (gen_extqh_le (exth, memh, addr));
3750 mode = DImode;
3751 break;
3752
3753 default:
3754 abort();
3755 }
4208b40f
RH
3756 }
3757
3758 addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
3759 gen_lowpart (mode, exth), gen_lowpart (mode, tgt),
3760 sign, OPTAB_WIDEN);
6c174fc0
RH
3761 }
3762
4208b40f
RH
3763 if (addr != tgt)
3764 emit_move_insn (tgt, gen_lowpart(GET_MODE (tgt), addr));
6c174fc0
RH
3765}
3766
3767/* Similarly, use ins and msk instructions to perform unaligned stores. */
3768
3769void
3770alpha_expand_unaligned_store (dst, src, size, ofs)
3771 rtx dst, src;
3772 HOST_WIDE_INT size, ofs;
3773{
1eb356b9 3774 rtx dstl, dsth, addr, insl, insh, meml, memh, dsta;
6c174fc0
RH
3775
3776 dstl = gen_reg_rtx (DImode);
3777 dsth = gen_reg_rtx (DImode);
3778 insl = gen_reg_rtx (DImode);
3779 insh = gen_reg_rtx (DImode);
3780
1eb356b9
RH
3781 dsta = XEXP (dst, 0);
3782 if (GET_CODE (dsta) == LO_SUM)
3783 dsta = force_reg (Pmode, dsta);
3784
e01acbb1
RH
3785 /* AND addresses cannot be in any alias set, since they may implicitly
3786 alias surrounding code. Ideally we'd have some alias set that
3787 covered all types except those with alignment 8 or higher. */
3788
6c174fc0 3789 meml = change_address (dst, DImode,
38a448ca 3790 gen_rtx_AND (DImode,
1eb356b9 3791 plus_constant (dsta, ofs),
38a448ca 3792 GEN_INT (-8)));
ba4828e0 3793 set_mem_alias_set (meml, 0);
e01acbb1 3794
6c174fc0 3795 memh = change_address (dst, DImode,
38a448ca 3796 gen_rtx_AND (DImode,
1eb356b9 3797 plus_constant (dsta, ofs + size - 1),
38a448ca 3798 GEN_INT (-8)));
ba4828e0 3799 set_mem_alias_set (memh, 0);
6c174fc0
RH
3800
3801 emit_move_insn (dsth, memh);
3802 emit_move_insn (dstl, meml);
30102605 3803 if (WORDS_BIG_ENDIAN)
6c174fc0 3804 {
30102605
RH
3805 addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1));
3806
3807 if (src != const0_rtx)
3808 {
3809 switch ((int) size)
3810 {
3811 case 2:
3812 emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
3813 break;
3814 case 4:
3815 emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
3816 break;
3817 case 8:
3818 emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
3819 break;
3820 }
3821 emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
3822 GEN_INT (size*8), addr));
3823 }
6c174fc0 3824
c8d8ed65 3825 switch ((int) size)
6c174fc0
RH
3826 {
3827 case 2:
30102605 3828 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr));
6c174fc0
RH
3829 break;
3830 case 4:
30102605 3831 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffffffff), addr));
6c174fc0
RH
3832 break;
3833 case 8:
30102605
RH
3834 {
3835#if HOST_BITS_PER_WIDE_INT == 32
3836 rtx msk = immed_double_const (0xffffffff, 0xffffffff, DImode);
3837#else
595b6314 3838 rtx msk = constm1_rtx;
30102605
RH
3839#endif
3840 emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
3841 }
6c174fc0
RH
3842 break;
3843 }
30102605
RH
3844
3845 emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr));
6c174fc0 3846 }
30102605
RH
3847 else
3848 {
3849 addr = copy_addr_to_reg (plus_constant (dsta, ofs));
6c174fc0 3850
30102605
RH
3851 if (src != const0_rtx)
3852 {
3853 emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
3854 GEN_INT (size*8), addr));
6c174fc0 3855
30102605
RH
3856 switch ((int) size)
3857 {
3858 case 2:
3859 emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr));
3860 break;
3861 case 4:
3862 emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr));
3863 break;
3864 case 8:
3865 emit_insn (gen_insql_le (insl, src, addr));
3866 break;
3867 }
3868 }
3869
3870 emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
3871
3872 switch ((int) size)
3873 {
3874 case 2:
3875 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
3876 break;
3877 case 4:
3878 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffffffff), addr));
3879 break;
3880 case 8:
3881 {
6c174fc0 3882#if HOST_BITS_PER_WIDE_INT == 32
30102605 3883 rtx msk = immed_double_const (0xffffffff, 0xffffffff, DImode);
6c174fc0 3884#else
595b6314 3885 rtx msk = constm1_rtx;
6c174fc0 3886#endif
30102605
RH
3887 emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
3888 }
3889 break;
3890 }
6c174fc0
RH
3891 }
3892
3893 if (src != const0_rtx)
3894 {
4208b40f
RH
3895 dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN);
3896 dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
6c174fc0 3897 }
30102605
RH
3898
3899 if (WORDS_BIG_ENDIAN)
3900 {
3901 emit_move_insn (meml, dstl);
3902 emit_move_insn (memh, dsth);
3903 }
3904 else
3905 {
3906 /* Must store high before low for degenerate case of aligned. */
3907 emit_move_insn (memh, dsth);
3908 emit_move_insn (meml, dstl);
3909 }
6c174fc0
RH
3910}
3911
4208b40f
RH
3912/* The block move code tries to maximize speed by separating loads and
3913 stores at the expense of register pressure: we load all of the data
3914 before we store it back out. There are two secondary effects worth
3915 mentioning, that this speeds copying to/from aligned and unaligned
3916 buffers, and that it makes the code significantly easier to write. */
6c174fc0 3917
4208b40f
RH
3918#define MAX_MOVE_WORDS 8
3919
3920/* Load an integral number of consecutive unaligned quadwords. */
6c174fc0
RH
3921
3922static void
4208b40f
RH
3923alpha_expand_unaligned_load_words (out_regs, smem, words, ofs)
3924 rtx *out_regs;
3925 rtx smem;
3926 HOST_WIDE_INT words, ofs;
6c174fc0
RH
3927{
3928 rtx const im8 = GEN_INT (-8);
3929 rtx const i64 = GEN_INT (64);
4208b40f 3930 rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
1eb356b9 3931 rtx sreg, areg, tmp, smema;
6c174fc0
RH
3932 HOST_WIDE_INT i;
3933
1eb356b9
RH
3934 smema = XEXP (smem, 0);
3935 if (GET_CODE (smema) == LO_SUM)
3936 smema = force_reg (Pmode, smema);
3937
6c174fc0
RH
3938 /* Generate all the tmp registers we need. */
3939 for (i = 0; i < words; ++i)
4208b40f
RH
3940 {
3941 data_regs[i] = out_regs[i];
3942 ext_tmps[i] = gen_reg_rtx (DImode);
3943 }
3944 data_regs[words] = gen_reg_rtx (DImode);
3945
3946 if (ofs != 0)
f4ef873c 3947 smem = adjust_address (smem, GET_MODE (smem), ofs);
6c174fc0
RH
3948
3949 /* Load up all of the source data. */
3950 for (i = 0; i < words; ++i)
3951 {
e01acbb1
RH
3952 tmp = change_address (smem, DImode,
3953 gen_rtx_AND (DImode,
1eb356b9 3954 plus_constant (smema, 8*i),
e01acbb1 3955 im8));
ba4828e0 3956 set_mem_alias_set (tmp, 0);
e01acbb1 3957 emit_move_insn (data_regs[i], tmp);
6c174fc0 3958 }
e01acbb1
RH
3959
3960 tmp = change_address (smem, DImode,
3961 gen_rtx_AND (DImode,
1eb356b9 3962 plus_constant (smema, 8*words - 1),
e01acbb1 3963 im8));
ba4828e0 3964 set_mem_alias_set (tmp, 0);
e01acbb1 3965 emit_move_insn (data_regs[words], tmp);
6c174fc0
RH
3966
3967 /* Extract the half-word fragments. Unfortunately DEC decided to make
3968 extxh with offset zero a noop instead of zeroing the register, so
3969 we must take care of that edge condition ourselves with cmov. */
3970
1eb356b9 3971 sreg = copy_addr_to_reg (smema);
4208b40f
RH
3972 areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
3973 1, OPTAB_WIDEN);
30102605
RH
3974 if (WORDS_BIG_ENDIAN)
3975 emit_move_insn (sreg, plus_constant (sreg, 7));
6c174fc0
RH
3976 for (i = 0; i < words; ++i)
3977 {
30102605
RH
3978 if (WORDS_BIG_ENDIAN)
3979 {
3980 emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg));
3981 emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
3982 }
3983 else
3984 {
3985 emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
3986 emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
3987 }
38a448ca
RH
3988 emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
3989 gen_rtx_IF_THEN_ELSE (DImode,
4208b40f
RH
3990 gen_rtx_EQ (DImode, areg,
3991 const0_rtx),
38a448ca 3992 const0_rtx, ext_tmps[i])));
6c174fc0
RH
3993 }
3994
3995 /* Merge the half-words into whole words. */
3996 for (i = 0; i < words; ++i)
3997 {
4208b40f
RH
3998 out_regs[i] = expand_binop (DImode, ior_optab, data_regs[i],
3999 ext_tmps[i], data_regs[i], 1, OPTAB_WIDEN);
6c174fc0
RH
4000 }
4001}
4002
4003/* Store an integral number of consecutive unaligned quadwords. DATA_REGS
4004 may be NULL to store zeros. */
4005
4006static void
4208b40f 4007alpha_expand_unaligned_store_words (data_regs, dmem, words, ofs)
6c174fc0 4008 rtx *data_regs;
4208b40f
RH
4009 rtx dmem;
4010 HOST_WIDE_INT words, ofs;
6c174fc0
RH
4011{
4012 rtx const im8 = GEN_INT (-8);
4013 rtx const i64 = GEN_INT (64);
4014#if HOST_BITS_PER_WIDE_INT == 32
4015 rtx const im1 = immed_double_const (0xffffffff, 0xffffffff, DImode);
4016#else
595b6314 4017 rtx const im1 = constm1_rtx;
6c174fc0
RH
4018#endif
4019 rtx ins_tmps[MAX_MOVE_WORDS];
4208b40f 4020 rtx st_tmp_1, st_tmp_2, dreg;
1eb356b9 4021 rtx st_addr_1, st_addr_2, dmema;
6c174fc0
RH
4022 HOST_WIDE_INT i;
4023
1eb356b9
RH
4024 dmema = XEXP (dmem, 0);
4025 if (GET_CODE (dmema) == LO_SUM)
4026 dmema = force_reg (Pmode, dmema);
4027
6c174fc0
RH
4028 /* Generate all the tmp registers we need. */
4029 if (data_regs != NULL)
4030 for (i = 0; i < words; ++i)
4031 ins_tmps[i] = gen_reg_rtx(DImode);
4032 st_tmp_1 = gen_reg_rtx(DImode);
4033 st_tmp_2 = gen_reg_rtx(DImode);
4034
4208b40f 4035 if (ofs != 0)
f4ef873c 4036 dmem = adjust_address (dmem, GET_MODE (dmem), ofs);
4208b40f
RH
4037
4038 st_addr_2 = change_address (dmem, DImode,
38a448ca 4039 gen_rtx_AND (DImode,
1eb356b9 4040 plus_constant (dmema, words*8 - 1),
6c174fc0 4041 im8));
ba4828e0 4042 set_mem_alias_set (st_addr_2, 0);
e01acbb1 4043
4208b40f 4044 st_addr_1 = change_address (dmem, DImode,
1eb356b9 4045 gen_rtx_AND (DImode, dmema, im8));
ba4828e0 4046 set_mem_alias_set (st_addr_1, 0);
6c174fc0
RH
4047
4048 /* Load up the destination end bits. */
4049 emit_move_insn (st_tmp_2, st_addr_2);
4050 emit_move_insn (st_tmp_1, st_addr_1);
4051
4052 /* Shift the input data into place. */
1eb356b9 4053 dreg = copy_addr_to_reg (dmema);
30102605
RH
4054 if (WORDS_BIG_ENDIAN)
4055 emit_move_insn (dreg, plus_constant (dreg, 7));
6c174fc0
RH
4056 if (data_regs != NULL)
4057 {
4058 for (i = words-1; i >= 0; --i)
4059 {
30102605
RH
4060 if (WORDS_BIG_ENDIAN)
4061 {
4062 emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
4063 emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
4064 }
4065 else
4066 {
4067 emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
4068 emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
4069 }
6c174fc0 4070 }
6c174fc0
RH
4071 for (i = words-1; i > 0; --i)
4072 {
4208b40f
RH
4073 ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i],
4074 ins_tmps[i-1], ins_tmps[i-1], 1,
4075 OPTAB_WIDEN);
6c174fc0
RH
4076 }
4077 }
4078
4079 /* Split and merge the ends with the destination data. */
30102605
RH
4080 if (WORDS_BIG_ENDIAN)
4081 {
4082 emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, im1, dreg));
4083 emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
4084 }
4085 else
4086 {
4087 emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
4088 emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, im1, dreg));
4089 }
6c174fc0
RH
4090
4091 if (data_regs != NULL)
4092 {
4208b40f
RH
4093 st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1],
4094 st_tmp_2, 1, OPTAB_WIDEN);
4095 st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0],
4096 st_tmp_1, 1, OPTAB_WIDEN);
6c174fc0
RH
4097 }
4098
4099 /* Store it all. */
30102605
RH
4100 if (WORDS_BIG_ENDIAN)
4101 emit_move_insn (st_addr_1, st_tmp_1);
4102 else
4103 emit_move_insn (st_addr_2, st_tmp_2);
6c174fc0
RH
4104 for (i = words-1; i > 0; --i)
4105 {
e01acbb1
RH
4106 rtx tmp = change_address (dmem, DImode,
4107 gen_rtx_AND (DImode,
30102605
RH
4108 plus_constant(dmema,
4109 WORDS_BIG_ENDIAN ? i*8-1 : i*8),
e01acbb1 4110 im8));
ba4828e0 4111 set_mem_alias_set (tmp, 0);
e01acbb1 4112 emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
6c174fc0 4113 }
30102605
RH
4114 if (WORDS_BIG_ENDIAN)
4115 emit_move_insn (st_addr_2, st_tmp_2);
4116 else
4117 emit_move_insn (st_addr_1, st_tmp_1);
6c174fc0
RH
4118}
4119
4120
4121/* Expand string/block move operations.
4122
4123 operands[0] is the pointer to the destination.
4124 operands[1] is the pointer to the source.
4125 operands[2] is the number of bytes to move.
4126 operands[3] is the alignment. */
4127
4128int
4129alpha_expand_block_move (operands)
4130 rtx operands[];
4131{
4132 rtx bytes_rtx = operands[2];
4133 rtx align_rtx = operands[3];
f35cba21 4134 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
c17f08e1
RH
4135 HOST_WIDE_INT bytes = orig_bytes;
4136 HOST_WIDE_INT src_align = INTVAL (align_rtx) * BITS_PER_UNIT;
4137 HOST_WIDE_INT dst_align = src_align;
bdb429a5
RK
4138 rtx orig_src = operands[1];
4139 rtx orig_dst = operands[0];
4140 rtx data_regs[2 * MAX_MOVE_WORDS + 16];
4208b40f 4141 rtx tmp;
1eb356b9 4142 unsigned int i, words, ofs, nregs = 0;
6c174fc0 4143
bdb429a5 4144 if (orig_bytes <= 0)
6c174fc0 4145 return 1;
c17f08e1 4146 else if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
6c174fc0
RH
4147 return 0;
4148
4208b40f
RH
4149 /* Look for additional alignment information from recorded register info. */
4150
4151 tmp = XEXP (orig_src, 0);
4152 if (GET_CODE (tmp) == REG)
bdb429a5 4153 src_align = MAX (src_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f
RH
4154 else if (GET_CODE (tmp) == PLUS
4155 && GET_CODE (XEXP (tmp, 0)) == REG
4156 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4157 {
bdb429a5
RK
4158 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4159 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
4160
4161 if (a > src_align)
4162 {
bdb429a5
RK
4163 if (a >= 64 && c % 8 == 0)
4164 src_align = 64;
4165 else if (a >= 32 && c % 4 == 0)
4166 src_align = 32;
4167 else if (a >= 16 && c % 2 == 0)
4168 src_align = 16;
4208b40f
RH
4169 }
4170 }
4171
4172 tmp = XEXP (orig_dst, 0);
4173 if (GET_CODE (tmp) == REG)
bdb429a5 4174 dst_align = MAX (dst_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f
RH
4175 else if (GET_CODE (tmp) == PLUS
4176 && GET_CODE (XEXP (tmp, 0)) == REG
4177 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4178 {
bdb429a5
RK
4179 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4180 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
4181
4182 if (a > dst_align)
4183 {
bdb429a5
RK
4184 if (a >= 64 && c % 8 == 0)
4185 dst_align = 64;
4186 else if (a >= 32 && c % 4 == 0)
4187 dst_align = 32;
4188 else if (a >= 16 && c % 2 == 0)
4189 dst_align = 16;
4208b40f
RH
4190 }
4191 }
4192
bdb429a5 4193 /* Load the entire block into registers. */
15389075 4194 if (GET_CODE (XEXP (orig_src, 0)) == ADDRESSOF)
4208b40f
RH
4195 {
4196 enum machine_mode mode;
bdb429a5 4197
4208b40f 4198 tmp = XEXP (XEXP (orig_src, 0), 0);
15389075 4199
c576fce7
RH
4200 /* Don't use the existing register if we're reading more than
4201 is held in the register. Nor if there is not a mode that
4202 handles the exact size. */
f35cba21 4203 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4208b40f 4204 if (mode != BLKmode
c576fce7 4205 && GET_MODE_SIZE (GET_MODE (tmp)) >= bytes)
4208b40f 4206 {
c576fce7
RH
4207 if (mode == TImode)
4208 {
4209 data_regs[nregs] = gen_lowpart (DImode, tmp);
5197bd50 4210 data_regs[nregs + 1] = gen_highpart (DImode, tmp);
c576fce7
RH
4211 nregs += 2;
4212 }
4213 else
4214 data_regs[nregs++] = gen_lowpart (mode, tmp);
bdb429a5 4215
4208b40f
RH
4216 goto src_done;
4217 }
6c174fc0 4218
4208b40f 4219 /* No appropriate mode; fall back on memory. */
792760b9
RK
4220 orig_src = replace_equiv_address (orig_src,
4221 copy_addr_to_reg (XEXP (orig_src, 0)));
d0285326 4222 src_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4208b40f
RH
4223 }
4224
4225 ofs = 0;
bdb429a5 4226 if (src_align >= 64 && bytes >= 8)
6c174fc0
RH
4227 {
4228 words = bytes / 8;
4229
6c174fc0 4230 for (i = 0; i < words; ++i)
5197bd50 4231 data_regs[nregs + i] = gen_reg_rtx (DImode);
6c174fc0 4232
6c174fc0 4233 for (i = 0; i < words; ++i)
bdb429a5 4234 emit_move_insn (data_regs[nregs + i],
f4ef873c 4235 adjust_address (orig_src, DImode, ofs + i * 8));
6c174fc0 4236
4208b40f 4237 nregs += words;
6c174fc0 4238 bytes -= words * 8;
cd36edbd 4239 ofs += words * 8;
6c174fc0 4240 }
bdb429a5
RK
4241
4242 if (src_align >= 32 && bytes >= 4)
6c174fc0
RH
4243 {
4244 words = bytes / 4;
4245
6c174fc0 4246 for (i = 0; i < words; ++i)
5197bd50 4247 data_regs[nregs + i] = gen_reg_rtx (SImode);
6c174fc0 4248
6c174fc0 4249 for (i = 0; i < words; ++i)
bdb429a5 4250 emit_move_insn (data_regs[nregs + i],
792760b9 4251 adjust_address (orig_src, SImode, ofs + i * 4));
6c174fc0 4252
4208b40f 4253 nregs += words;
6c174fc0 4254 bytes -= words * 4;
cd36edbd 4255 ofs += words * 4;
6c174fc0 4256 }
bdb429a5 4257
c17f08e1 4258 if (bytes >= 8)
6c174fc0
RH
4259 {
4260 words = bytes / 8;
4261
6c174fc0 4262 for (i = 0; i < words+1; ++i)
5197bd50 4263 data_regs[nregs + i] = gen_reg_rtx (DImode);
6c174fc0 4264
c576fce7
RH
4265 alpha_expand_unaligned_load_words (data_regs + nregs, orig_src,
4266 words, ofs);
6c174fc0 4267
4208b40f 4268 nregs += words;
6c174fc0 4269 bytes -= words * 8;
cd36edbd 4270 ofs += words * 8;
6c174fc0 4271 }
bdb429a5 4272
bdb429a5 4273 if (! TARGET_BWX && bytes >= 4)
6c174fc0 4274 {
4208b40f 4275 data_regs[nregs++] = tmp = gen_reg_rtx (SImode);
6c174fc0 4276 alpha_expand_unaligned_load (tmp, orig_src, 4, ofs, 0);
6c174fc0
RH
4277 bytes -= 4;
4278 ofs += 4;
4279 }
bdb429a5 4280
6c174fc0
RH
4281 if (bytes >= 2)
4282 {
bdb429a5 4283 if (src_align >= 16)
6c174fc0
RH
4284 {
4285 do {
4208b40f 4286 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
f4ef873c 4287 emit_move_insn (tmp, adjust_address (orig_src, HImode, ofs));
6c174fc0
RH
4288 bytes -= 2;
4289 ofs += 2;
4290 } while (bytes >= 2);
4291 }
bdb429a5 4292 else if (! TARGET_BWX)
6c174fc0 4293 {
4208b40f 4294 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
6c174fc0 4295 alpha_expand_unaligned_load (tmp, orig_src, 2, ofs, 0);
6c174fc0
RH
4296 bytes -= 2;
4297 ofs += 2;
4298 }
4299 }
bdb429a5 4300
6c174fc0
RH
4301 while (bytes > 0)
4302 {
4208b40f 4303 data_regs[nregs++] = tmp = gen_reg_rtx (QImode);
f4ef873c 4304 emit_move_insn (tmp, adjust_address (orig_src, QImode, ofs));
6c174fc0
RH
4305 bytes -= 1;
4306 ofs += 1;
4307 }
bdb429a5 4308
4208b40f
RH
4309 src_done:
4310
9a56f4f6 4311 if (nregs > ARRAY_SIZE (data_regs))
bdb429a5 4312 abort ();
4208b40f 4313
bdb429a5 4314 /* Now save it back out again. */
4208b40f
RH
4315
4316 i = 0, ofs = 0;
4317
4318 if (GET_CODE (XEXP (orig_dst, 0)) == ADDRESSOF)
4319 {
4320 enum machine_mode mode;
4321 tmp = XEXP (XEXP (orig_dst, 0), 0);
4322
f35cba21 4323 mode = mode_for_size (orig_bytes * BITS_PER_UNIT, MODE_INT, 1);
c576fce7 4324 if (GET_MODE (tmp) == mode)
4208b40f 4325 {
c576fce7
RH
4326 if (nregs == 1)
4327 {
4328 emit_move_insn (tmp, data_regs[0]);
4329 i = 1;
4330 goto dst_done;
4331 }
bdb429a5 4332
c576fce7
RH
4333 else if (nregs == 2 && mode == TImode)
4334 {
4335 /* Undo the subregging done above when copying between
4336 two TImode registers. */
4337 if (GET_CODE (data_regs[0]) == SUBREG
4338 && GET_MODE (SUBREG_REG (data_regs[0])) == TImode)
bdb429a5 4339 emit_move_insn (tmp, SUBREG_REG (data_regs[0]));
c576fce7
RH
4340 else
4341 {
4342 rtx seq;
4343
4344 start_sequence ();
4345 emit_move_insn (gen_lowpart (DImode, tmp), data_regs[0]);
4346 emit_move_insn (gen_highpart (DImode, tmp), data_regs[1]);
34cea4e9 4347 seq = get_insns ();
c576fce7
RH
4348 end_sequence ();
4349
4350 emit_no_conflict_block (seq, tmp, data_regs[0],
4351 data_regs[1], NULL_RTX);
4352 }
4353
4354 i = 2;
4355 goto dst_done;
4356 }
4208b40f
RH
4357 }
4358
4359 /* ??? If nregs > 1, consider reconstructing the word in regs. */
4360 /* ??? Optimize mode < dst_mode with strict_low_part. */
f35cba21
RH
4361
4362 /* No appropriate mode; fall back on memory. We can speed things
4363 up by recognizing extra alignment information. */
792760b9
RK
4364 orig_dst = replace_equiv_address (orig_dst,
4365 copy_addr_to_reg (XEXP (orig_dst, 0)));
d0285326 4366 dst_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4208b40f
RH
4367 }
4368
4369 /* Write out the data in whatever chunks reading the source allowed. */
bdb429a5 4370 if (dst_align >= 64)
4208b40f
RH
4371 {
4372 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4373 {
f4ef873c 4374 emit_move_insn (adjust_address (orig_dst, DImode, ofs),
4208b40f
RH
4375 data_regs[i]);
4376 ofs += 8;
4377 i++;
4378 }
4379 }
bdb429a5
RK
4380
4381 if (dst_align >= 32)
4208b40f
RH
4382 {
4383 /* If the source has remaining DImode regs, write them out in
4384 two pieces. */
4385 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4386 {
4387 tmp = expand_binop (DImode, lshr_optab, data_regs[i], GEN_INT (32),
4388 NULL_RTX, 1, OPTAB_WIDEN);
4389
f4ef873c 4390 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4208b40f 4391 gen_lowpart (SImode, data_regs[i]));
f4ef873c 4392 emit_move_insn (adjust_address (orig_dst, SImode, ofs + 4),
4208b40f
RH
4393 gen_lowpart (SImode, tmp));
4394 ofs += 8;
4395 i++;
4396 }
4397
4398 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4399 {
f4ef873c 4400 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4208b40f
RH
4401 data_regs[i]);
4402 ofs += 4;
4403 i++;
4404 }
4405 }
bdb429a5 4406
4208b40f
RH
4407 if (i < nregs && GET_MODE (data_regs[i]) == DImode)
4408 {
4409 /* Write out a remaining block of words using unaligned methods. */
4410
bdb429a5
RK
4411 for (words = 1; i + words < nregs; words++)
4412 if (GET_MODE (data_regs[i + words]) != DImode)
4208b40f
RH
4413 break;
4414
4415 if (words == 1)
4416 alpha_expand_unaligned_store (orig_dst, data_regs[i], 8, ofs);
4417 else
bdb429a5
RK
4418 alpha_expand_unaligned_store_words (data_regs + i, orig_dst,
4419 words, ofs);
4208b40f
RH
4420
4421 i += words;
4422 ofs += words * 8;
4423 }
4424
4425 /* Due to the above, this won't be aligned. */
4426 /* ??? If we have more than one of these, consider constructing full
4427 words in registers and using alpha_expand_unaligned_store_words. */
4428 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4429 {
4430 alpha_expand_unaligned_store (orig_dst, data_regs[i], 4, ofs);
4431 ofs += 4;
4432 i++;
4433 }
4434
bdb429a5 4435 if (dst_align >= 16)
4208b40f
RH
4436 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4437 {
f4ef873c 4438 emit_move_insn (adjust_address (orig_dst, HImode, ofs), data_regs[i]);
4208b40f
RH
4439 i++;
4440 ofs += 2;
4441 }
4442 else
4443 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4444 {
4445 alpha_expand_unaligned_store (orig_dst, data_regs[i], 2, ofs);
4446 i++;
4447 ofs += 2;
4448 }
bdb429a5 4449
4208b40f
RH
4450 while (i < nregs && GET_MODE (data_regs[i]) == QImode)
4451 {
f4ef873c 4452 emit_move_insn (adjust_address (orig_dst, QImode, ofs), data_regs[i]);
4208b40f
RH
4453 i++;
4454 ofs += 1;
4455 }
bdb429a5 4456
4208b40f
RH
4457 dst_done:
4458
4459 if (i != nregs)
bdb429a5 4460 abort ();
6c174fc0
RH
4461
4462 return 1;
4463}
4464
4465int
4466alpha_expand_block_clear (operands)
4467 rtx operands[];
4468{
4469 rtx bytes_rtx = operands[1];
4470 rtx align_rtx = operands[2];
bdb429a5 4471 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
c17f08e1
RH
4472 HOST_WIDE_INT bytes = orig_bytes;
4473 HOST_WIDE_INT align = INTVAL (align_rtx) * BITS_PER_UNIT;
4474 HOST_WIDE_INT alignofs = 0;
bdb429a5 4475 rtx orig_dst = operands[0];
4208b40f 4476 rtx tmp;
c17f08e1 4477 int i, words, ofs = 0;
6c174fc0 4478
bdb429a5 4479 if (orig_bytes <= 0)
6c174fc0 4480 return 1;
c17f08e1 4481 if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
6c174fc0
RH
4482 return 0;
4483
4208b40f 4484 /* Look for stricter alignment. */
4208b40f
RH
4485 tmp = XEXP (orig_dst, 0);
4486 if (GET_CODE (tmp) == REG)
bdb429a5 4487 align = MAX (align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f
RH
4488 else if (GET_CODE (tmp) == PLUS
4489 && GET_CODE (XEXP (tmp, 0)) == REG
4490 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4491 {
c17f08e1
RH
4492 HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4493 int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
4494
4495 if (a > align)
4496 {
c17f08e1
RH
4497 if (a >= 64)
4498 align = a, alignofs = 8 - c % 8;
4499 else if (a >= 32)
4500 align = a, alignofs = 4 - c % 4;
4501 else if (a >= 16)
4502 align = a, alignofs = 2 - c % 2;
4208b40f
RH
4503 }
4504 }
c576fce7
RH
4505 else if (GET_CODE (tmp) == ADDRESSOF)
4506 {
4507 enum machine_mode mode;
4508
4509 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4510 if (GET_MODE (XEXP (tmp, 0)) == mode)
4511 {
4512 emit_move_insn (XEXP (tmp, 0), const0_rtx);
4513 return 1;
4514 }
4515
4516 /* No appropriate mode; fall back on memory. */
792760b9 4517 orig_dst = replace_equiv_address (orig_dst, copy_addr_to_reg (tmp));
c17f08e1 4518 align = GET_MODE_BITSIZE (GET_MODE (XEXP (tmp, 0)));
c576fce7 4519 }
4208b40f 4520
c17f08e1
RH
4521 /* Handle an unaligned prefix first. */
4522
4523 if (alignofs > 0)
4524 {
4525#if HOST_BITS_PER_WIDE_INT >= 64
4526 /* Given that alignofs is bounded by align, the only time BWX could
4527 generate three stores is for a 7 byte fill. Prefer two individual
4528 stores over a load/mask/store sequence. */
4529 if ((!TARGET_BWX || alignofs == 7)
4530 && align >= 32
4531 && !(alignofs == 4 && bytes >= 4))
4532 {
4533 enum machine_mode mode = (align >= 64 ? DImode : SImode);
4534 int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
4535 rtx mem, tmp;
4536 HOST_WIDE_INT mask;
4537
f4ef873c 4538 mem = adjust_address (orig_dst, mode, ofs - inv_alignofs);
ba4828e0 4539 set_mem_alias_set (mem, 0);
c17f08e1
RH
4540
4541 mask = ~(~(HOST_WIDE_INT)0 << (inv_alignofs * 8));
4542 if (bytes < alignofs)
4543 {
4544 mask |= ~(HOST_WIDE_INT)0 << ((inv_alignofs + bytes) * 8);
4545 ofs += bytes;
4546 bytes = 0;
4547 }
4548 else
4549 {
4550 bytes -= alignofs;
4551 ofs += alignofs;
4552 }
4553 alignofs = 0;
4554
4555 tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask),
4556 NULL_RTX, 1, OPTAB_WIDEN);
4557
4558 emit_move_insn (mem, tmp);
4559 }
4560#endif
4561
4562 if (TARGET_BWX && (alignofs & 1) && bytes >= 1)
4563 {
f4ef873c 4564 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
c17f08e1
RH
4565 bytes -= 1;
4566 ofs += 1;
4567 alignofs -= 1;
4568 }
4569 if (TARGET_BWX && align >= 16 && (alignofs & 3) == 2 && bytes >= 2)
4570 {
f4ef873c 4571 emit_move_insn (adjust_address (orig_dst, HImode, ofs), const0_rtx);
c17f08e1
RH
4572 bytes -= 2;
4573 ofs += 2;
4574 alignofs -= 2;
4575 }
4576 if (alignofs == 4 && bytes >= 4)
4577 {
f4ef873c 4578 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
c17f08e1
RH
4579 bytes -= 4;
4580 ofs += 4;
4581 alignofs = 0;
4582 }
4583
4584 /* If we've not used the extra lead alignment information by now,
4585 we won't be able to. Downgrade align to match what's left over. */
4586 if (alignofs > 0)
4587 {
4588 alignofs = alignofs & -alignofs;
4589 align = MIN (align, alignofs * BITS_PER_UNIT);
4590 }
4591 }
4592
4593 /* Handle a block of contiguous long-words. */
6c174fc0 4594
bdb429a5 4595 if (align >= 64 && bytes >= 8)
6c174fc0
RH
4596 {
4597 words = bytes / 8;
4598
4599 for (i = 0; i < words; ++i)
1eb356b9 4600 emit_move_insn (adjust_address (orig_dst, DImode, ofs + i * 8),
f4ef873c 4601 const0_rtx);
6c174fc0
RH
4602
4603 bytes -= words * 8;
cd36edbd 4604 ofs += words * 8;
6c174fc0 4605 }
bdb429a5 4606
c17f08e1
RH
4607 /* If the block is large and appropriately aligned, emit a single
4608 store followed by a sequence of stq_u insns. */
4609
4610 if (align >= 32 && bytes > 16)
4611 {
1eb356b9
RH
4612 rtx orig_dsta;
4613
f4ef873c 4614 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
c17f08e1
RH
4615 bytes -= 4;
4616 ofs += 4;
4617
1eb356b9
RH
4618 orig_dsta = XEXP (orig_dst, 0);
4619 if (GET_CODE (orig_dsta) == LO_SUM)
4620 orig_dsta = force_reg (Pmode, orig_dsta);
4621
c17f08e1
RH
4622 words = bytes / 8;
4623 for (i = 0; i < words; ++i)
4624 {
ba4828e0
RK
4625 rtx mem
4626 = change_address (orig_dst, DImode,
4627 gen_rtx_AND (DImode,
1eb356b9 4628 plus_constant (orig_dsta, ofs + i*8),
ba4828e0
RK
4629 GEN_INT (-8)));
4630 set_mem_alias_set (mem, 0);
c17f08e1
RH
4631 emit_move_insn (mem, const0_rtx);
4632 }
4633
4634 /* Depending on the alignment, the first stq_u may have overlapped
4635 with the initial stl, which means that the last stq_u didn't
4636 write as much as it would appear. Leave those questionable bytes
4637 unaccounted for. */
4638 bytes -= words * 8 - 4;
4639 ofs += words * 8 - 4;
4640 }
4641
4642 /* Handle a smaller block of aligned words. */
4643
4644 if ((align >= 64 && bytes == 4)
4645 || (align == 32 && bytes >= 4))
6c174fc0
RH
4646 {
4647 words = bytes / 4;
4648
4649 for (i = 0; i < words; ++i)
f4ef873c 4650 emit_move_insn (adjust_address (orig_dst, SImode, ofs + i * 4),
bdb429a5 4651 const0_rtx);
6c174fc0
RH
4652
4653 bytes -= words * 4;
cd36edbd 4654 ofs += words * 4;
6c174fc0 4655 }
bdb429a5 4656
c17f08e1
RH
4657 /* An unaligned block uses stq_u stores for as many as possible. */
4658
4659 if (bytes >= 8)
6c174fc0
RH
4660 {
4661 words = bytes / 8;
4662
cd36edbd 4663 alpha_expand_unaligned_store_words (NULL, orig_dst, words, ofs);
6c174fc0
RH
4664
4665 bytes -= words * 8;
cd36edbd 4666 ofs += words * 8;
6c174fc0
RH
4667 }
4668
c17f08e1 4669 /* Next clean up any trailing pieces. */
6c174fc0 4670
c17f08e1
RH
4671#if HOST_BITS_PER_WIDE_INT >= 64
4672 /* Count the number of bits in BYTES for which aligned stores could
4673 be emitted. */
4674 words = 0;
4675 for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
4676 if (bytes & i)
4677 words += 1;
4678
4679 /* If we have appropriate alignment (and it wouldn't take too many
4680 instructions otherwise), mask out the bytes we need. */
4681 if (TARGET_BWX ? words > 2 : bytes > 0)
4682 {
4683 if (align >= 64)
4684 {
4685 rtx mem, tmp;
4686 HOST_WIDE_INT mask;
4687
f4ef873c 4688 mem = adjust_address (orig_dst, DImode, ofs);
ba4828e0 4689 set_mem_alias_set (mem, 0);
c17f08e1
RH
4690
4691 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
4692
4693 tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
4694 NULL_RTX, 1, OPTAB_WIDEN);
4695
4696 emit_move_insn (mem, tmp);
4697 return 1;
4698 }
4699 else if (align >= 32 && bytes < 4)
4700 {
4701 rtx mem, tmp;
4702 HOST_WIDE_INT mask;
4703
f4ef873c 4704 mem = adjust_address (orig_dst, SImode, ofs);
ba4828e0 4705 set_mem_alias_set (mem, 0);
c17f08e1
RH
4706
4707 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
4708
4709 tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
4710 NULL_RTX, 1, OPTAB_WIDEN);
4711
4712 emit_move_insn (mem, tmp);
4713 return 1;
4714 }
6c174fc0 4715 }
c17f08e1 4716#endif
bdb429a5 4717
6c174fc0
RH
4718 if (!TARGET_BWX && bytes >= 4)
4719 {
4720 alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
4721 bytes -= 4;
4722 ofs += 4;
4723 }
bdb429a5 4724
6c174fc0
RH
4725 if (bytes >= 2)
4726 {
bdb429a5 4727 if (align >= 16)
6c174fc0
RH
4728 {
4729 do {
f4ef873c 4730 emit_move_insn (adjust_address (orig_dst, HImode, ofs),
6c174fc0
RH
4731 const0_rtx);
4732 bytes -= 2;
4733 ofs += 2;
4734 } while (bytes >= 2);
4735 }
bdb429a5 4736 else if (! TARGET_BWX)
6c174fc0
RH
4737 {
4738 alpha_expand_unaligned_store (orig_dst, const0_rtx, 2, ofs);
4739 bytes -= 2;
4740 ofs += 2;
4741 }
4742 }
bdb429a5 4743
6c174fc0
RH
4744 while (bytes > 0)
4745 {
f4ef873c 4746 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
6c174fc0
RH
4747 bytes -= 1;
4748 ofs += 1;
4749 }
4750
4751 return 1;
4752}
a6f12d7c
RK
4753\f
4754/* Adjust the cost of a scheduling dependency. Return the new cost of
4755 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
4756
c237e94a 4757static int
a6f12d7c
RK
4758alpha_adjust_cost (insn, link, dep_insn, cost)
4759 rtx insn;
4760 rtx link;
4761 rtx dep_insn;
4762 int cost;
4763{
74835ed8 4764 rtx set, set_src;
26250081 4765 enum attr_type insn_type, dep_insn_type;
a6f12d7c
RK
4766
4767 /* If the dependence is an anti-dependence, there is no cost. For an
4768 output dependence, there is sometimes a cost, but it doesn't seem
4769 worth handling those few cases. */
4770
4771 if (REG_NOTE_KIND (link) != 0)
4772 return 0;
4773
26250081
RH
4774 /* If we can't recognize the insns, we can't really do anything. */
4775 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
4776 return cost;
4777
4778 insn_type = get_attr_type (insn);
4779 dep_insn_type = get_attr_type (dep_insn);
4780
bcbbac26 4781 /* Bring in the user-defined memory latency. */
71d9b493
RH
4782 if (dep_insn_type == TYPE_ILD
4783 || dep_insn_type == TYPE_FLD
4784 || dep_insn_type == TYPE_LDSYM)
bcbbac26
RH
4785 cost += alpha_memory_latency-1;
4786
71d9b493 4787 switch (alpha_cpu)
74835ed8 4788 {
71d9b493 4789 case PROCESSOR_EV4:
74835ed8
RH
4790 /* On EV4, if INSN is a store insn and DEP_INSN is setting the data
4791 being stored, we can sometimes lower the cost. */
4792
71d9b493 4793 if ((insn_type == TYPE_IST || insn_type == TYPE_FST)
74835ed8
RH
4794 && (set = single_set (dep_insn)) != 0
4795 && GET_CODE (PATTERN (insn)) == SET
4796 && rtx_equal_p (SET_DEST (set), SET_SRC (PATTERN (insn))))
4797 {
26250081 4798 switch (dep_insn_type)
74835ed8 4799 {
71d9b493
RH
4800 case TYPE_ILD:
4801 case TYPE_FLD:
74835ed8
RH
4802 /* No savings here. */
4803 return cost;
4804
71d9b493 4805 case TYPE_IMUL:
74835ed8
RH
4806 /* In these cases, we save one cycle. */
4807 return cost - 1;
4808
4809 default:
4810 /* In all other cases, we save two cycles. */
4811 return MAX (0, cost - 2);
4812 }
4813 }
4814
4815 /* Another case that needs adjustment is an arithmetic or logical
4816 operation. It's cost is usually one cycle, but we default it to
4817 two in the MD file. The only case that it is actually two is
71d9b493 4818 for the address in loads, stores, and jumps. */
74835ed8 4819
26250081 4820 if (dep_insn_type == TYPE_IADD || dep_insn_type == TYPE_ILOG)
74835ed8 4821 {
26250081 4822 switch (insn_type)
74835ed8 4823 {
71d9b493
RH
4824 case TYPE_ILD:
4825 case TYPE_IST:
4826 case TYPE_FLD:
4827 case TYPE_FST:
4828 case TYPE_JSR:
74835ed8
RH
4829 return cost;
4830 default:
4831 return 1;
4832 }
4833 }
4834
4835 /* The final case is when a compare feeds into an integer branch;
4836 the cost is only one cycle in that case. */
4837
26250081 4838 if (dep_insn_type == TYPE_ICMP && insn_type == TYPE_IBR)
da792a68 4839 return 1;
71d9b493
RH
4840 break;
4841
4842 case PROCESSOR_EV5:
4843 /* And the lord DEC saith: "A special bypass provides an effective
4844 latency of 0 cycles for an ICMP or ILOG insn producing the test
4845 operand of an IBR or ICMOV insn." */
4846
4847 if ((dep_insn_type == TYPE_ICMP || dep_insn_type == TYPE_ILOG)
4848 && (set = single_set (dep_insn)) != 0)
4849 {
4850 /* A branch only has one input. This must be it. */
4851 if (insn_type == TYPE_IBR)
4852 return 0;
4853 /* A conditional move has three, make sure it is the test. */
4854 if (insn_type == TYPE_ICMOV
4855 && GET_CODE (set_src = PATTERN (insn)) == SET
4856 && GET_CODE (set_src = SET_SRC (set_src)) == IF_THEN_ELSE
4857 && rtx_equal_p (SET_DEST (set), XEXP (set_src, 0)))
4858 return 0;
4859 }
4860
4861 /* "The multiplier is unable to receive data from IEU bypass paths.
4862 The instruction issues at the expected time, but its latency is
4863 increased by the time it takes for the input data to become
4864 available to the multiplier" -- which happens in pipeline stage
4865 six, when results are comitted to the register file. */
4866
4867 if (insn_type == TYPE_IMUL)
4868 {
4869 switch (dep_insn_type)
4870 {
4871 /* These insns produce their results in pipeline stage five. */
4872 case TYPE_ILD:
4873 case TYPE_ICMOV:
4874 case TYPE_IMUL:
4875 case TYPE_MVI:
4876 return cost + 1;
4877
4878 /* Other integer insns produce results in pipeline stage four. */
4879 default:
4880 return cost + 2;
4881 }
4882 }
4883 break;
4884
4885 case PROCESSOR_EV6:
4886 /* There is additional latency to move the result of (most) FP
4887 operations anywhere but the FP register file. */
4888
4889 if ((insn_type == TYPE_FST || insn_type == TYPE_FTOI)
4890 && (dep_insn_type == TYPE_FADD ||
4891 dep_insn_type == TYPE_FMUL ||
4892 dep_insn_type == TYPE_FCMOV))
4893 return cost + 2;
4894
4895 break;
74835ed8 4896 }
a6f12d7c 4897
285a5742 4898 /* Otherwise, return the default cost. */
a6f12d7c
RK
4899 return cost;
4900}
c237e94a
ZW
4901
4902/* Function to initialize the issue rate used by the scheduler. */
4903static int
4904alpha_issue_rate ()
4905{
4906 return (alpha_cpu == PROCESSOR_EV4 ? 2 : 4);
4907}
4908
4909static int
4910alpha_variable_issue (dump, verbose, insn, cim)
4911 FILE *dump ATTRIBUTE_UNUSED;
4912 int verbose ATTRIBUTE_UNUSED;
4913 rtx insn;
4914 int cim;
4915{
4916 if (recog_memoized (insn) < 0 || get_attr_type (insn) == TYPE_MULTI)
4917 return 0;
4918
4919 return cim - 1;
4920}
4921
9ecc37f0 4922\f
30102605
RH
4923/* Register global variables and machine-specific functions with the
4924 garbage collector. */
4925
4926#if TARGET_ABI_UNICOSMK
4927static void
4928alpha_init_machine_status (p)
4929 struct function *p;
4930{
4931 p->machine =
4932 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
4933
4934 p->machine->first_ciw = NULL_RTX;
4935 p->machine->last_ciw = NULL_RTX;
4936 p->machine->ciw_count = 0;
4937 p->machine->addr_list = NULL_RTX;
4938}
4939
4940static void
4941alpha_mark_machine_status (p)
4942 struct function *p;
4943{
4944 struct machine_function *machine = p->machine;
4945
4946 if (machine)
4947 {
4948 ggc_mark_rtx (machine->first_ciw);
4949 ggc_mark_rtx (machine->addr_list);
4950 }
4951}
4952
4953static void
4954alpha_free_machine_status (p)
4955 struct function *p;
4956{
4957 free (p->machine);
4958 p->machine = NULL;
4959}
4960#endif /* TARGET_ABI_UNICOSMK */
4961
9ecc37f0
RH
4962/* Functions to save and restore alpha_return_addr_rtx. */
4963
9ecc37f0
RH
4964/* Start the ball rolling with RETURN_ADDR_RTX. */
4965
4966rtx
4967alpha_return_addr (count, frame)
4968 int count;
3c303f52 4969 rtx frame ATTRIBUTE_UNUSED;
9ecc37f0 4970{
9ecc37f0
RH
4971 if (count != 0)
4972 return const0_rtx;
4973
b91055dd 4974 return get_hard_reg_initial_val (Pmode, REG_RA);
9ecc37f0
RH
4975}
4976
ccb83cbc
RH
4977/* Return or create a pseudo containing the gp value for the current
4978 function. Needed only if TARGET_LD_BUGGY_LDGP. */
4979
4980rtx
4981alpha_gp_save_rtx ()
4982{
b91055dd 4983 return get_hard_reg_initial_val (DImode, 29);
ccb83cbc
RH
4984}
4985
9ecc37f0
RH
4986static int
4987alpha_ra_ever_killed ()
4988{
6abc6f40
RH
4989 rtx top;
4990
b91055dd 4991 if (!has_hard_reg_initial_val (Pmode, REG_RA))
9ecc37f0
RH
4992 return regs_ever_live[REG_RA];
4993
6abc6f40
RH
4994 push_topmost_sequence ();
4995 top = get_insns ();
4996 pop_topmost_sequence ();
4997
4998 return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX);
9ecc37f0
RH
4999}
5000
a6f12d7c 5001\f
be7560ea 5002/* Return the trap mode suffix applicable to the current
285a5742 5003 instruction, or NULL. */
a6f12d7c 5004
be7560ea
RH
5005static const char *
5006get_trap_mode_suffix ()
a6f12d7c 5007{
be7560ea 5008 enum attr_trap_suffix s = get_attr_trap_suffix (current_output_insn);
a6f12d7c 5009
be7560ea 5010 switch (s)
a6f12d7c 5011 {
be7560ea
RH
5012 case TRAP_SUFFIX_NONE:
5013 return NULL;
6245e3df 5014
be7560ea 5015 case TRAP_SUFFIX_SU:
981a828e 5016 if (alpha_fptm >= ALPHA_FPTM_SU)
be7560ea
RH
5017 return "su";
5018 return NULL;
6245e3df 5019
be7560ea
RH
5020 case TRAP_SUFFIX_SUI:
5021 if (alpha_fptm >= ALPHA_FPTM_SUI)
5022 return "sui";
5023 return NULL;
5024
5025 case TRAP_SUFFIX_V_SV:
e83015a9
RH
5026 switch (alpha_fptm)
5027 {
5028 case ALPHA_FPTM_N:
be7560ea 5029 return NULL;
e83015a9 5030 case ALPHA_FPTM_U:
be7560ea 5031 return "v";
e83015a9
RH
5032 case ALPHA_FPTM_SU:
5033 case ALPHA_FPTM_SUI:
be7560ea 5034 return "sv";
e83015a9
RH
5035 }
5036 break;
5037
be7560ea 5038 case TRAP_SUFFIX_V_SV_SVI:
0022a940
DMT
5039 switch (alpha_fptm)
5040 {
5041 case ALPHA_FPTM_N:
be7560ea 5042 return NULL;
0022a940 5043 case ALPHA_FPTM_U:
be7560ea 5044 return "v";
0022a940 5045 case ALPHA_FPTM_SU:
be7560ea 5046 return "sv";
0022a940 5047 case ALPHA_FPTM_SUI:
be7560ea 5048 return "svi";
0022a940
DMT
5049 }
5050 break;
5051
be7560ea 5052 case TRAP_SUFFIX_U_SU_SUI:
6245e3df
RK
5053 switch (alpha_fptm)
5054 {
5055 case ALPHA_FPTM_N:
be7560ea 5056 return NULL;
6245e3df 5057 case ALPHA_FPTM_U:
be7560ea 5058 return "u";
6245e3df 5059 case ALPHA_FPTM_SU:
be7560ea 5060 return "su";
6245e3df 5061 case ALPHA_FPTM_SUI:
be7560ea 5062 return "sui";
6245e3df
RK
5063 }
5064 break;
be7560ea
RH
5065 }
5066 abort ();
5067}
6245e3df 5068
be7560ea 5069/* Return the rounding mode suffix applicable to the current
285a5742 5070 instruction, or NULL. */
be7560ea
RH
5071
5072static const char *
5073get_round_mode_suffix ()
5074{
5075 enum attr_round_suffix s = get_attr_round_suffix (current_output_insn);
5076
5077 switch (s)
5078 {
5079 case ROUND_SUFFIX_NONE:
5080 return NULL;
5081 case ROUND_SUFFIX_NORMAL:
5082 switch (alpha_fprm)
6245e3df 5083 {
be7560ea
RH
5084 case ALPHA_FPRM_NORM:
5085 return NULL;
5086 case ALPHA_FPRM_MINF:
5087 return "m";
5088 case ALPHA_FPRM_CHOP:
5089 return "c";
5090 case ALPHA_FPRM_DYN:
5091 return "d";
6245e3df
RK
5092 }
5093 break;
5094
be7560ea
RH
5095 case ROUND_SUFFIX_C:
5096 return "c";
5097 }
5098 abort ();
5099}
5100
5101/* Print an operand. Recognize special options, documented below. */
5102
5103void
5104print_operand (file, x, code)
5105 FILE *file;
5106 rtx x;
5107 int code;
5108{
5109 int i;
5110
5111 switch (code)
5112 {
5113 case '~':
5114 /* Print the assembler name of the current function. */
5115 assemble_name (file, alpha_fnname);
5116 break;
5117
5118 case '/':
5119 {
5120 const char *trap = get_trap_mode_suffix ();
5121 const char *round = get_round_mode_suffix ();
5122
5123 if (trap || round)
30102605
RH
5124 fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"),
5125 (trap ? trap : ""), (round ? round : ""));
be7560ea
RH
5126 break;
5127 }
5128
89cfc2c6
RK
5129 case ',':
5130 /* Generates single precision instruction suffix. */
be7560ea 5131 fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
89cfc2c6
RK
5132 break;
5133
5134 case '-':
5135 /* Generates double precision instruction suffix. */
be7560ea 5136 fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
89cfc2c6
RK
5137 break;
5138
1eb356b9
RH
5139 case '#':
5140 if (alpha_this_literal_sequence_number == 0)
5141 alpha_this_literal_sequence_number = alpha_next_sequence_number++;
5142 fprintf (file, "%d", alpha_this_literal_sequence_number);
5143 break;
5144
5145 case '*':
5146 if (alpha_this_gpdisp_sequence_number == 0)
5147 alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
5148 fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
5149 break;
5150
5151 case 'H':
5152 if (GET_CODE (x) == HIGH)
133d3133 5153 output_addr_const (file, XEXP (x, 0));
1eb356b9
RH
5154 else
5155 output_operand_lossage ("invalid %%H value");
5156 break;
5157
40571d67
RH
5158 case 'J':
5159 if (GET_CODE (x) == CONST_INT)
5160 {
5161 if (INTVAL (x) != 0)
5162 fprintf (file, "\t\t!lituse_jsr!%d", (int) INTVAL (x));
5163 }
5164 else
5165 output_operand_lossage ("invalid %%J value");
5166 break;
5167
a6f12d7c
RK
5168 case 'r':
5169 /* If this operand is the constant zero, write it as "$31". */
5170 if (GET_CODE (x) == REG)
5171 fprintf (file, "%s", reg_names[REGNO (x)]);
5172 else if (x == CONST0_RTX (GET_MODE (x)))
5173 fprintf (file, "$31");
5174 else
5175 output_operand_lossage ("invalid %%r value");
a6f12d7c
RK
5176 break;
5177
5178 case 'R':
5179 /* Similar, but for floating-point. */
5180 if (GET_CODE (x) == REG)
5181 fprintf (file, "%s", reg_names[REGNO (x)]);
5182 else if (x == CONST0_RTX (GET_MODE (x)))
5183 fprintf (file, "$f31");
5184 else
5185 output_operand_lossage ("invalid %%R value");
a6f12d7c
RK
5186 break;
5187
5188 case 'N':
5189 /* Write the 1's complement of a constant. */
5190 if (GET_CODE (x) != CONST_INT)
5191 output_operand_lossage ("invalid %%N value");
5192
0bc8ae6e 5193 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
a6f12d7c
RK
5194 break;
5195
5196 case 'P':
5197 /* Write 1 << C, for a constant C. */
5198 if (GET_CODE (x) != CONST_INT)
5199 output_operand_lossage ("invalid %%P value");
5200
0bc8ae6e 5201 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x));
a6f12d7c
RK
5202 break;
5203
5204 case 'h':
5205 /* Write the high-order 16 bits of a constant, sign-extended. */
5206 if (GET_CODE (x) != CONST_INT)
5207 output_operand_lossage ("invalid %%h value");
5208
0bc8ae6e 5209 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) >> 16);
a6f12d7c
RK
5210 break;
5211
5212 case 'L':
5213 /* Write the low-order 16 bits of a constant, sign-extended. */
5214 if (GET_CODE (x) != CONST_INT)
5215 output_operand_lossage ("invalid %%L value");
5216
0bc8ae6e
RK
5217 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5218 (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
a6f12d7c
RK
5219 break;
5220
5221 case 'm':
5222 /* Write mask for ZAP insn. */
5223 if (GET_CODE (x) == CONST_DOUBLE)
5224 {
5225 HOST_WIDE_INT mask = 0;
5226 HOST_WIDE_INT value;
5227
5228 value = CONST_DOUBLE_LOW (x);
5229 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5230 i++, value >>= 8)
5231 if (value & 0xff)
5232 mask |= (1 << i);
5233
5234 value = CONST_DOUBLE_HIGH (x);
5235 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5236 i++, value >>= 8)
5237 if (value & 0xff)
5238 mask |= (1 << (i + sizeof (int)));
5239
0bc8ae6e 5240 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
a6f12d7c
RK
5241 }
5242
5243 else if (GET_CODE (x) == CONST_INT)
5244 {
5245 HOST_WIDE_INT mask = 0, value = INTVAL (x);
5246
5247 for (i = 0; i < 8; i++, value >>= 8)
5248 if (value & 0xff)
5249 mask |= (1 << i);
5250
0bc8ae6e 5251 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask);
a6f12d7c
RK
5252 }
5253 else
5254 output_operand_lossage ("invalid %%m value");
5255 break;
5256
5257 case 'M':
6c174fc0 5258 /* 'b', 'w', 'l', or 'q' as the value of the constant. */
a6f12d7c 5259 if (GET_CODE (x) != CONST_INT
6c174fc0
RH
5260 || (INTVAL (x) != 8 && INTVAL (x) != 16
5261 && INTVAL (x) != 32 && INTVAL (x) != 64))
a6f12d7c
RK
5262 output_operand_lossage ("invalid %%M value");
5263
5264 fprintf (file, "%s",
6c174fc0
RH
5265 (INTVAL (x) == 8 ? "b"
5266 : INTVAL (x) == 16 ? "w"
5267 : INTVAL (x) == 32 ? "l"
5268 : "q"));
a6f12d7c
RK
5269 break;
5270
5271 case 'U':
5272 /* Similar, except do it from the mask. */
5273 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0xff)
5274 fprintf (file, "b");
5275 else if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0xffff)
5276 fprintf (file, "w");
11ea364a
JW
5277 else if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0xffffffff)
5278 fprintf (file, "l");
a6f12d7c
RK
5279#if HOST_BITS_PER_WIDE_INT == 32
5280 else if (GET_CODE (x) == CONST_DOUBLE
5281 && CONST_DOUBLE_HIGH (x) == 0
5282 && CONST_DOUBLE_LOW (x) == -1)
5283 fprintf (file, "l");
6c174fc0
RH
5284 else if (GET_CODE (x) == CONST_DOUBLE
5285 && CONST_DOUBLE_HIGH (x) == -1
5286 && CONST_DOUBLE_LOW (x) == -1)
5287 fprintf (file, "q");
a6f12d7c 5288#else
3873d24b 5289 else if (GET_CODE (x) == CONST_INT && INTVAL (x) == -1)
6c174fc0
RH
5290 fprintf (file, "q");
5291 else if (GET_CODE (x) == CONST_DOUBLE
5292 && CONST_DOUBLE_HIGH (x) == 0
5293 && CONST_DOUBLE_LOW (x) == -1)
5294 fprintf (file, "q");
a6f12d7c
RK
5295#endif
5296 else
5297 output_operand_lossage ("invalid %%U value");
5298 break;
5299
5300 case 's':
30102605
RH
5301 /* Write the constant value divided by 8 for little-endian mode or
5302 (56 - value) / 8 for big-endian mode. */
5303
a6f12d7c 5304 if (GET_CODE (x) != CONST_INT
30102605
RH
5305 || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
5306 ? 56
5307 : 64)
5308 || (INTVAL (x) & 7) != 0)
a6f12d7c
RK
5309 output_operand_lossage ("invalid %%s value");
5310
30102605
RH
5311 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5312 WORDS_BIG_ENDIAN
5313 ? (56 - INTVAL (x)) / 8
5314 : INTVAL (x) / 8);
a6f12d7c
RK
5315 break;
5316
5317 case 'S':
5318 /* Same, except compute (64 - c) / 8 */
5319
5320 if (GET_CODE (x) != CONST_INT
5321 && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5322 && (INTVAL (x) & 7) != 8)
5323 output_operand_lossage ("invalid %%s value");
5324
0bc8ae6e 5325 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
a6f12d7c
RK
5326 break;
5327
30102605
RH
5328 case 't':
5329 {
5330 /* On Unicos/Mk systems: use a DEX expression if the symbol
5331 clashes with a register name. */
5332 int dex = unicosmk_need_dex (x);
5333 if (dex)
5334 fprintf (file, "DEX(%d)", dex);
5335 else
5336 output_addr_const (file, x);
5337 }
5338 break;
5339
bdd4c95a 5340 case 'C': case 'D': case 'c': case 'd':
a6f12d7c 5341 /* Write out comparison name. */
bdd4c95a
RK
5342 {
5343 enum rtx_code c = GET_CODE (x);
5344
5345 if (GET_RTX_CLASS (c) != '<')
5346 output_operand_lossage ("invalid %%C value");
5347
948068e2 5348 else if (code == 'D')
bdd4c95a
RK
5349 c = reverse_condition (c);
5350 else if (code == 'c')
5351 c = swap_condition (c);
5352 else if (code == 'd')
5353 c = swap_condition (reverse_condition (c));
5354
5355 if (c == LEU)
5356 fprintf (file, "ule");
5357 else if (c == LTU)
5358 fprintf (file, "ult");
1eb8759b
RH
5359 else if (c == UNORDERED)
5360 fprintf (file, "un");
bdd4c95a
RK
5361 else
5362 fprintf (file, "%s", GET_RTX_NAME (c));
5363 }
ab561e66
RK
5364 break;
5365
a6f12d7c
RK
5366 case 'E':
5367 /* Write the divide or modulus operator. */
5368 switch (GET_CODE (x))
5369 {
5370 case DIV:
5371 fprintf (file, "div%s", GET_MODE (x) == SImode ? "l" : "q");
5372 break;
5373 case UDIV:
5374 fprintf (file, "div%su", GET_MODE (x) == SImode ? "l" : "q");
5375 break;
5376 case MOD:
5377 fprintf (file, "rem%s", GET_MODE (x) == SImode ? "l" : "q");
5378 break;
5379 case UMOD:
5380 fprintf (file, "rem%su", GET_MODE (x) == SImode ? "l" : "q");
5381 break;
5382 default:
5383 output_operand_lossage ("invalid %%E value");
5384 break;
5385 }
5386 break;
5387
a6f12d7c
RK
5388 case 'A':
5389 /* Write "_u" for unaligned access. */
5390 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
5391 fprintf (file, "_u");
5392 break;
5393
5394 case 0:
5395 if (GET_CODE (x) == REG)
5396 fprintf (file, "%s", reg_names[REGNO (x)]);
5397 else if (GET_CODE (x) == MEM)
5398 output_address (XEXP (x, 0));
5399 else
5400 output_addr_const (file, x);
5401 break;
5402
5403 default:
5404 output_operand_lossage ("invalid %%xn code");
5405 }
5406}
714b019c
RH
5407
5408void
5409print_operand_address (file, addr)
5410 FILE *file;
5411 rtx addr;
5412{
e03ec28f 5413 int basereg = 31;
714b019c
RH
5414 HOST_WIDE_INT offset = 0;
5415
5416 if (GET_CODE (addr) == AND)
5417 addr = XEXP (addr, 0);
714b019c 5418
e03ec28f
RH
5419 if (GET_CODE (addr) == PLUS
5420 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
714b019c
RH
5421 {
5422 offset = INTVAL (XEXP (addr, 1));
e03ec28f 5423 addr = XEXP (addr, 0);
714b019c 5424 }
1eb356b9
RH
5425
5426 if (GET_CODE (addr) == LO_SUM)
5427 {
5428 output_addr_const (file, XEXP (addr, 1));
5429 if (offset)
5430 {
5431 fputc ('+', file);
5432 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
5433 }
5434
5435 addr = XEXP (addr, 0);
5436 if (GET_CODE (addr) == REG)
5437 basereg = REGNO (addr);
5438 else if (GET_CODE (addr) == SUBREG
5439 && GET_CODE (SUBREG_REG (addr)) == REG)
5440 basereg = subreg_regno (addr);
5441 else
5442 abort ();
133d3133
RH
5443
5444 fprintf (file, "($%d)\t\t!%s", basereg,
5445 (basereg == 29 ? "gprel" : "gprellow"));
1eb356b9
RH
5446 return;
5447 }
5448
e03ec28f
RH
5449 if (GET_CODE (addr) == REG)
5450 basereg = REGNO (addr);
5451 else if (GET_CODE (addr) == SUBREG
5452 && GET_CODE (SUBREG_REG (addr)) == REG)
1eb356b9 5453 basereg = subreg_regno (addr);
e03ec28f
RH
5454 else if (GET_CODE (addr) == CONST_INT)
5455 offset = INTVAL (addr);
714b019c
RH
5456 else
5457 abort ();
5458
5459 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
e03ec28f 5460 fprintf (file, "($%d)", basereg);
714b019c 5461}
a6f12d7c 5462\f
9ec36da5
JL
5463/* Emit RTL insns to initialize the variable parts of a trampoline at
5464 TRAMP. FNADDR is an RTX for the address of the function's pure
5465 code. CXT is an RTX for the static chain value for the function.
c714f03d
RH
5466
5467 The three offset parameters are for the individual template's
5468 layout. A JMPOFS < 0 indicates that the trampoline does not
5469 contain instructions at all.
5470
9ec36da5
JL
5471 We assume here that a function will be called many more times than
5472 its address is taken (e.g., it might be passed to qsort), so we
5473 take the trouble to initialize the "hint" field in the JMP insn.
5474 Note that the hint field is PC (new) + 4 * bits 13:0. */
5475
5476void
c714f03d
RH
5477alpha_initialize_trampoline (tramp, fnaddr, cxt, fnofs, cxtofs, jmpofs)
5478 rtx tramp, fnaddr, cxt;
5479 int fnofs, cxtofs, jmpofs;
9ec36da5
JL
5480{
5481 rtx temp, temp1, addr;
d2692ef8 5482 /* VMS really uses DImode pointers in memory at this point. */
be7b80f4 5483 enum machine_mode mode = TARGET_ABI_OPEN_VMS ? Pmode : ptr_mode;
9ec36da5 5484
d2692ef8
DT
5485#ifdef POINTERS_EXTEND_UNSIGNED
5486 fnaddr = convert_memory_address (mode, fnaddr);
5487 cxt = convert_memory_address (mode, cxt);
5488#endif
5489
9ec36da5 5490 /* Store function address and CXT. */
d420e567 5491 addr = memory_address (mode, plus_constant (tramp, fnofs));
c5c76735 5492 emit_move_insn (gen_rtx_MEM (mode, addr), fnaddr);
d420e567 5493 addr = memory_address (mode, plus_constant (tramp, cxtofs));
c5c76735 5494 emit_move_insn (gen_rtx_MEM (mode, addr), cxt);
c714f03d
RH
5495
5496 /* This has been disabled since the hint only has a 32k range, and in
285a5742 5497 no existing OS is the stack within 32k of the text segment. */
c714f03d
RH
5498 if (0 && jmpofs >= 0)
5499 {
5500 /* Compute hint value. */
5501 temp = force_operand (plus_constant (tramp, jmpofs+4), NULL_RTX);
5502 temp = expand_binop (DImode, sub_optab, fnaddr, temp, temp, 1,
5503 OPTAB_WIDEN);
5504 temp = expand_shift (RSHIFT_EXPR, Pmode, temp,
5505 build_int_2 (2, 0), NULL_RTX, 1);
5506 temp = expand_and (gen_lowpart (SImode, temp), GEN_INT (0x3fff), 0);
5507
5508 /* Merge in the hint. */
5509 addr = memory_address (SImode, plus_constant (tramp, jmpofs));
c5c76735 5510 temp1 = force_reg (SImode, gen_rtx_MEM (SImode, addr));
c714f03d
RH
5511 temp1 = expand_and (temp1, GEN_INT (0xffffc000), NULL_RTX);
5512 temp1 = expand_binop (SImode, ior_optab, temp1, temp, temp1, 1,
5513 OPTAB_WIDEN);
c5c76735 5514 emit_move_insn (gen_rtx_MEM (SImode, addr), temp1);
c714f03d 5515 }
9ec36da5
JL
5516
5517#ifdef TRANSFER_FROM_TRAMPOLINE
c5c76735 5518 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__enable_execute_stack"),
9ec36da5
JL
5519 0, VOIDmode, 1, addr, Pmode);
5520#endif
5521
c714f03d
RH
5522 if (jmpofs >= 0)
5523 emit_insn (gen_imb ());
9ec36da5
JL
5524}
5525\f
5495cc55
RH
5526/* Determine where to put an argument to a function.
5527 Value is zero to push the argument on the stack,
5528 or a hard register in which to store the argument.
5529
5530 MODE is the argument's machine mode.
5531 TYPE is the data type of the argument (as a tree).
5532 This is null for libcalls where that information may
5533 not be available.
5534 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5535 the preceding args and about the function being called.
5536 NAMED is nonzero if this argument is a named parameter
5537 (otherwise it is an extra parameter matching an ellipsis).
5538
5539 On Alpha the first 6 words of args are normally in registers
5540 and the rest are pushed. */
5541
5542rtx
c5e1237f 5543function_arg (cum, mode, type, named)
5495cc55
RH
5544 CUMULATIVE_ARGS cum;
5545 enum machine_mode mode;
5546 tree type;
5547 int named ATTRIBUTE_UNUSED;
5548{
5549 int basereg;
a82c7f05 5550 int num_args;
5495cc55 5551
30102605
RH
5552 /* Set up defaults for FP operands passed in FP registers, and
5553 integral operands passed in integer registers. */
5554 if (TARGET_FPREGS
5555 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
5556 || GET_MODE_CLASS (mode) == MODE_FLOAT))
5557 basereg = 32 + 16;
5558 else
5559 basereg = 16;
5560
5561 /* ??? Irritatingly, the definition of CUMULATIVE_ARGS is different for
5562 the three platforms, so we can't avoid conditional compilation. */
be7b80f4 5563#if TARGET_ABI_OPEN_VMS
30102605
RH
5564 {
5565 if (mode == VOIDmode)
5566 return alpha_arg_info_reg_val (cum);
be7b80f4 5567
30102605
RH
5568 num_args = cum.num_args;
5569 if (num_args >= 6 || MUST_PASS_IN_STACK (mode, type))
5570 return NULL_RTX;
5571 }
be7b80f4 5572#else
30102605
RH
5573#if TARGET_ABI_UNICOSMK
5574 {
5575 int size;
5495cc55 5576
30102605
RH
5577 /* If this is the last argument, generate the call info word (CIW). */
5578 /* ??? We don't include the caller's line number in the CIW because
5579 I don't know how to determine it if debug infos are turned off. */
5580 if (mode == VOIDmode)
5581 {
5582 int i;
5583 HOST_WIDE_INT lo;
5584 HOST_WIDE_INT hi;
5585 rtx ciw;
5586
5587 lo = 0;
5588
5589 for (i = 0; i < cum.num_reg_words && i < 5; i++)
5590 if (cum.reg_args_type[i])
5591 lo |= (1 << (7 - i));
5592
5593 if (cum.num_reg_words == 6 && cum.reg_args_type[5])
5594 lo |= 7;
5595 else
5596 lo |= cum.num_reg_words;
5597
5598#if HOST_BITS_PER_WIDE_INT == 32
5599 hi = (cum.num_args << 20) | cum.num_arg_words;
5600#else
999c746f
KG
5601 lo = lo | ((HOST_WIDE_INT) cum.num_args << 52)
5602 | ((HOST_WIDE_INT) cum.num_arg_words << 32);
30102605
RH
5603 hi = 0;
5604#endif
5605 ciw = immed_double_const (lo, hi, DImode);
5606
5607 return gen_rtx_UNSPEC (DImode, gen_rtvec (1, ciw),
5608 UNSPEC_UMK_LOAD_CIW);
5609 }
5610
5611 size = ALPHA_ARG_SIZE (mode, type, named);
5612 num_args = cum.num_reg_words;
5613 if (MUST_PASS_IN_STACK (mode, type)
5614 || cum.num_reg_words + size > 6 || cum.force_stack)
5615 return NULL_RTX;
5616 else if (type && TYPE_MODE (type) == BLKmode)
5617 {
5618 rtx reg1, reg2;
5619
5620 reg1 = gen_rtx_REG (DImode, num_args + 16);
5621 reg1 = gen_rtx_EXPR_LIST (DImode, reg1, const0_rtx);
5622
5623 /* The argument fits in two registers. Note that we still need to
5624 reserve a register for empty structures. */
5625 if (size == 0)
5626 return NULL_RTX;
5627 else if (size == 1)
5628 return gen_rtx_PARALLEL (mode, gen_rtvec (1, reg1));
5629 else
5630 {
5631 reg2 = gen_rtx_REG (DImode, num_args + 17);
5632 reg2 = gen_rtx_EXPR_LIST (DImode, reg2, GEN_INT (8));
5633 return gen_rtx_PARALLEL (mode, gen_rtvec (2, reg1, reg2));
5634 }
5635 }
5636 }
5637#else
5638 {
5639 if (cum >= 6)
5640 return NULL_RTX;
5641 num_args = cum;
5642
5643 /* VOID is passed as a special flag for "last argument". */
5644 if (type == void_type_node)
5645 basereg = 16;
5646 else if (MUST_PASS_IN_STACK (mode, type))
5647 return NULL_RTX;
5648 else if (FUNCTION_ARG_PASS_BY_REFERENCE (cum, mode, type, named))
5649 basereg = 16;
5650 }
5651#endif /* TARGET_ABI_UNICOSMK */
be7b80f4 5652#endif /* TARGET_ABI_OPEN_VMS */
5495cc55 5653
a82c7f05 5654 return gen_rtx_REG (mode, num_args + basereg);
5495cc55
RH
5655}
5656
63966b3b
RH
5657tree
5658alpha_build_va_list ()
a6f12d7c 5659{
d4b15af9 5660 tree base, ofs, record, type_decl;
a6f12d7c 5661
30102605 5662 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
63966b3b
RH
5663 return ptr_type_node;
5664
d4b15af9
RH
5665 record = make_lang_type (RECORD_TYPE);
5666 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5667 TREE_CHAIN (record) = type_decl;
5668 TYPE_NAME (record) = type_decl;
5669
63966b3b 5670 /* C++? SET_IS_AGGR_TYPE (record, 1); */
a6f12d7c 5671
63966b3b
RH
5672 ofs = build_decl (FIELD_DECL, get_identifier ("__offset"),
5673 integer_type_node);
5674 DECL_FIELD_CONTEXT (ofs) = record;
29587b1c 5675
63966b3b
RH
5676 base = build_decl (FIELD_DECL, get_identifier ("__base"),
5677 ptr_type_node);
5678 DECL_FIELD_CONTEXT (base) = record;
5679 TREE_CHAIN (base) = ofs;
29587b1c 5680
63966b3b
RH
5681 TYPE_FIELDS (record) = base;
5682 layout_type (record);
5683
5684 return record;
5685}
5686
5687void
5688alpha_va_start (stdarg_p, valist, nextarg)
5689 int stdarg_p;
5690 tree valist;
5691 rtx nextarg ATTRIBUTE_UNUSED;
5692{
5693 HOST_WIDE_INT offset;
5694 tree t, offset_field, base_field;
29587b1c 5695
bdb429a5
RK
5696 if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK)
5697 return;
5698
f7130778 5699 if (TARGET_ABI_UNICOSMK)
63966b3b
RH
5700 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
5701
5702 /* For Unix, SETUP_INCOMING_VARARGS moves the starting address base
5703 up by 48, storing fp arg registers in the first 48 bytes, and the
5704 integer arg registers in the next 48 bytes. This is only done,
5705 however, if any integer registers need to be stored.
5706
5707 If no integer registers need be stored, then we must subtract 48
5708 in order to account for the integer arg registers which are counted
5709 in argsize above, but which are not actually stored on the stack. */
5710
5711 if (NUM_ARGS <= 5 + stdarg_p)
f7130778 5712 offset = TARGET_ABI_OPEN_VMS ? UNITS_PER_WORD : 6 * UNITS_PER_WORD;
89cfc2c6 5713 else
63966b3b
RH
5714 offset = -6 * UNITS_PER_WORD;
5715
f7130778
DR
5716 if (TARGET_ABI_OPEN_VMS)
5717 {
5718 nextarg = plus_constant (nextarg, offset);
5719 nextarg = plus_constant (nextarg, NUM_ARGS * UNITS_PER_WORD);
5720 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
5721 make_tree (ptr_type_node, nextarg));
5722 TREE_SIDE_EFFECTS (t) = 1;
63966b3b 5723
f7130778
DR
5724 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5725 }
5726 else
5727 {
5728 base_field = TYPE_FIELDS (TREE_TYPE (valist));
5729 offset_field = TREE_CHAIN (base_field);
5730
5731 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
5732 valist, base_field);
5733 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
5734 valist, offset_field);
5735
5736 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
5737 t = build (PLUS_EXPR, ptr_type_node, t, build_int_2 (offset, 0));
5738 t = build (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
5739 TREE_SIDE_EFFECTS (t) = 1;
5740 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5741
5742 t = build_int_2 (NUM_ARGS * UNITS_PER_WORD, 0);
5743 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field, t);
5744 TREE_SIDE_EFFECTS (t) = 1;
5745 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5746 }
63966b3b
RH
5747}
5748
5749rtx
5750alpha_va_arg (valist, type)
5751 tree valist, type;
5752{
5753 HOST_WIDE_INT tsize;
5754 rtx addr;
5755 tree t;
5756 tree offset_field, base_field, addr_tree, addend;
5757 tree wide_type, wide_ofs;
09e98324 5758 int indirect = 0;
63966b3b 5759
30102605 5760 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
63966b3b 5761 return std_expand_builtin_va_arg (valist, type);
a6f12d7c 5762
63966b3b 5763 tsize = ((TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT + 7) / 8) * 8;
1d783d31 5764
63966b3b
RH
5765 base_field = TYPE_FIELDS (TREE_TYPE (valist));
5766 offset_field = TREE_CHAIN (base_field);
1d783d31 5767
63966b3b
RH
5768 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
5769 valist, base_field);
5770 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
5771 valist, offset_field);
5772
5773 wide_type = make_signed_type (64);
5774 wide_ofs = save_expr (build1 (CONVERT_EXPR, wide_type, offset_field));
5775
5776 addend = wide_ofs;
09e98324
RO
5777
5778 if (TYPE_MODE (type) == TFmode || TYPE_MODE (type) == TCmode)
5779 {
5780 indirect = 1;
5781 tsize = UNITS_PER_WORD;
5782 }
5783 else if (FLOAT_TYPE_P (type))
89cfc2c6 5784 {
63966b3b 5785 tree fpaddend, cond;
89cfc2c6 5786
63966b3b
RH
5787 fpaddend = fold (build (PLUS_EXPR, TREE_TYPE (addend),
5788 addend, build_int_2 (-6*8, 0)));
89cfc2c6 5789
63966b3b
RH
5790 cond = fold (build (LT_EXPR, integer_type_node,
5791 wide_ofs, build_int_2 (6*8, 0)));
89cfc2c6 5792
63966b3b
RH
5793 addend = fold (build (COND_EXPR, TREE_TYPE (addend), cond,
5794 fpaddend, addend));
89cfc2c6 5795 }
63966b3b
RH
5796
5797 addr_tree = build (PLUS_EXPR, TREE_TYPE (base_field),
5798 base_field, addend);
5799
5800 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
5801 addr = copy_to_reg (addr);
5802
5803 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
5804 build (PLUS_EXPR, TREE_TYPE (offset_field),
5805 offset_field, build_int_2 (tsize, 0)));
5806 TREE_SIDE_EFFECTS (t) = 1;
5807 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5808
09e98324
RO
5809 if (indirect)
5810 {
5811 addr = force_reg (Pmode, addr);
5812 addr = gen_rtx_MEM (Pmode, addr);
5813 }
5814
63966b3b 5815 return addr;
a6f12d7c
RK
5816}
5817\f
5818/* This page contains routines that are used to determine what the function
5819 prologue and epilogue code will do and write them out. */
5820
5821/* Compute the size of the save area in the stack. */
5822
89cfc2c6
RK
5823/* These variables are used for communication between the following functions.
5824 They indicate various things about the current function being compiled
5825 that are used to tell what kind of prologue, epilogue and procedure
285a5742 5826 descriptior to generate. */
89cfc2c6
RK
5827
5828/* Nonzero if we need a stack procedure. */
c2ea1ac6
DR
5829enum alpha_procedure_types {PT_NULL = 0, PT_REGISTER = 1, PT_STACK = 2};
5830static enum alpha_procedure_types alpha_procedure_type;
89cfc2c6
RK
5831
5832/* Register number (either FP or SP) that is used to unwind the frame. */
9c0e94a5 5833static int vms_unwind_regno;
89cfc2c6
RK
5834
5835/* Register number used to save FP. We need not have one for RA since
5836 we don't modify it for register procedures. This is only defined
5837 for register frame procedures. */
9c0e94a5 5838static int vms_save_fp_regno;
89cfc2c6
RK
5839
5840/* Register number used to reference objects off our PV. */
9c0e94a5 5841static int vms_base_regno;
89cfc2c6 5842
acd92049 5843/* Compute register masks for saved registers. */
89cfc2c6
RK
5844
5845static void
5846alpha_sa_mask (imaskP, fmaskP)
5847 unsigned long *imaskP;
5848 unsigned long *fmaskP;
5849{
5850 unsigned long imask = 0;
5851 unsigned long fmask = 0;
1eb356b9 5852 unsigned int i;
89cfc2c6 5853
14691f8d
RH
5854 /* Irritatingly, there are two kinds of thunks -- those created with
5855 ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go through
5856 the regular part of the compiler. In the ASM_OUTPUT_MI_THUNK case
5857 we don't have valid register life info, but assemble_start_function
5858 wants to output .frame and .mask directives. */
5859 if (current_function_is_thunk && rtx_equal_function_value_matters)
acd92049 5860 {
14691f8d
RH
5861 *imaskP = 0;
5862 *fmaskP = 0;
5863 return;
5864 }
89cfc2c6 5865
c2ea1ac6 5866 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
14691f8d 5867 imask |= (1L << HARD_FRAME_POINTER_REGNUM);
89cfc2c6 5868
14691f8d
RH
5869 /* One for every register we have to save. */
5870 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5871 if (! fixed_regs[i] && ! call_used_regs[i]
5872 && regs_ever_live[i] && i != REG_RA
5873 && (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
5874 {
5875 if (i < 32)
5876 imask |= (1L << i);
5877 else
5878 fmask |= (1L << (i - 32));
5879 }
5880
5881 /* We need to restore these for the handler. */
5882 if (current_function_calls_eh_return)
5883 for (i = 0; ; ++i)
5884 {
5885 unsigned regno = EH_RETURN_DATA_REGNO (i);
5886 if (regno == INVALID_REGNUM)
5887 break;
5888 imask |= 1L << regno;
5889 }
30102605 5890
14691f8d
RH
5891 /* If any register spilled, then spill the return address also. */
5892 /* ??? This is required by the Digital stack unwind specification
5893 and isn't needed if we're doing Dwarf2 unwinding. */
5894 if (imask || fmask || alpha_ra_ever_killed ())
5895 imask |= (1L << REG_RA);
9c0e94a5 5896
89cfc2c6
RK
5897 *imaskP = imask;
5898 *fmaskP = fmask;
89cfc2c6
RK
5899}
5900
5901int
5902alpha_sa_size ()
5903{
61334ebe 5904 unsigned long mask[2];
89cfc2c6 5905 int sa_size = 0;
61334ebe 5906 int i, j;
89cfc2c6 5907
61334ebe
RH
5908 alpha_sa_mask (&mask[0], &mask[1]);
5909
5910 if (TARGET_ABI_UNICOSMK)
5911 {
5912 if (mask[0] || mask[1])
5913 sa_size = 14;
5914 }
acd92049 5915 else
acd92049 5916 {
61334ebe
RH
5917 for (j = 0; j < 2; ++j)
5918 for (i = 0; i < 32; ++i)
5919 if ((mask[j] >> i) & 1)
5920 sa_size++;
acd92049 5921 }
89cfc2c6 5922
30102605
RH
5923 if (TARGET_ABI_UNICOSMK)
5924 {
5925 /* We might not need to generate a frame if we don't make any calls
5926 (including calls to __T3E_MISMATCH if this is a vararg function),
5927 don't have any local variables which require stack slots, don't
5928 use alloca and have not determined that we need a frame for other
5929 reasons. */
5930
c2ea1ac6
DR
5931 alpha_procedure_type
5932 = (sa_size || get_frame_size() != 0
5933 || current_function_outgoing_args_size || current_function_varargs
5934 || current_function_stdarg || current_function_calls_alloca
5935 || frame_pointer_needed)
5936 ? PT_STACK : PT_REGISTER;
30102605
RH
5937
5938 /* Always reserve space for saving callee-saved registers if we
5939 need a frame as required by the calling convention. */
c2ea1ac6 5940 if (alpha_procedure_type == PT_STACK)
30102605
RH
5941 sa_size = 14;
5942 }
5943 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
5944 {
5945 /* Start by assuming we can use a register procedure if we don't
5946 make any calls (REG_RA not used) or need to save any
5947 registers and a stack procedure if we do. */
c2ea1ac6
DR
5948 if ((mask[0] >> REG_RA) & 1)
5949 alpha_procedure_type = PT_STACK;
5950 else if (get_frame_size() != 0)
5951 alpha_procedure_type = PT_REGISTER;
5952 else
5953 alpha_procedure_type = PT_NULL;
61334ebe
RH
5954
5955 /* Don't reserve space for saving RA yet. Do that later after we've
5956 made the final decision on stack procedure vs register procedure. */
c2ea1ac6 5957 if (alpha_procedure_type == PT_STACK)
61334ebe 5958 sa_size--;
9c0e94a5
RH
5959
5960 /* Decide whether to refer to objects off our PV via FP or PV.
5961 If we need FP for something else or if we receive a nonlocal
5962 goto (which expects PV to contain the value), we must use PV.
5963 Otherwise, start by assuming we can use FP. */
c2ea1ac6
DR
5964
5965 vms_base_regno
5966 = (frame_pointer_needed
5967 || current_function_has_nonlocal_label
5968 || alpha_procedure_type == PT_STACK
5969 || current_function_outgoing_args_size)
5970 ? REG_PV : HARD_FRAME_POINTER_REGNUM;
9c0e94a5
RH
5971
5972 /* If we want to copy PV into FP, we need to find some register
5973 in which to save FP. */
5974
5975 vms_save_fp_regno = -1;
5976 if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
5977 for (i = 0; i < 32; i++)
5978 if (! fixed_regs[i] && call_used_regs[i] && ! regs_ever_live[i])
5979 vms_save_fp_regno = i;
5980
c2ea1ac6
DR
5981 if (vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)
5982 vms_base_regno = REG_PV, alpha_procedure_type = PT_STACK;
5983 else if (alpha_procedure_type == PT_NULL)
5984 vms_base_regno = REG_PV;
9c0e94a5
RH
5985
5986 /* Stack unwinding should be done via FP unless we use it for PV. */
5987 vms_unwind_regno = (vms_base_regno == REG_PV
5988 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
5989
5990 /* If this is a stack procedure, allow space for saving FP and RA. */
c2ea1ac6 5991 if (alpha_procedure_type == PT_STACK)
9c0e94a5
RH
5992 sa_size += 2;
5993 }
5994 else
5995 {
9c0e94a5
RH
5996 /* Our size must be even (multiple of 16 bytes). */
5997 if (sa_size & 1)
5998 sa_size++;
5999 }
89cfc2c6
RK
6000
6001 return sa_size * 8;
6002}
6003
6004int
6005alpha_pv_save_size ()
6006{
6007 alpha_sa_size ();
c2ea1ac6 6008 return alpha_procedure_type == PT_STACK ? 8 : 0;
89cfc2c6
RK
6009}
6010
6011int
6012alpha_using_fp ()
6013{
6014 alpha_sa_size ();
9c0e94a5 6015 return vms_unwind_regno == HARD_FRAME_POINTER_REGNUM;
89cfc2c6
RK
6016}
6017
be7b80f4 6018#if TARGET_ABI_OPEN_VMS
8289c43b 6019
91d231cb 6020const struct attribute_spec vms_attribute_table[] =
a6f12d7c 6021{
91d231cb 6022 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
c1bd46a8
DR
6023 { "overlaid", 0, 0, true, false, false, NULL },
6024 { "global", 0, 0, true, false, false, NULL },
6025 { "initialize", 0, 0, true, false, false, NULL },
6026 { NULL, 0, 0, false, false, false, NULL }
91d231cb 6027};
a6f12d7c 6028
8289c43b
NB
6029#endif
6030
1eb356b9
RH
6031static int
6032find_lo_sum (px, data)
6033 rtx *px;
6034 void *data ATTRIBUTE_UNUSED;
6035{
6036 return GET_CODE (*px) == LO_SUM;
6037}
6038
9c0e94a5
RH
6039static int
6040alpha_does_function_need_gp ()
6041{
6042 rtx insn;
a6f12d7c 6043
30102605
RH
6044 /* The GP being variable is an OSF abi thing. */
6045 if (! TARGET_ABI_OSF)
9c0e94a5 6046 return 0;
a6f12d7c 6047
70f4f91c 6048 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
9c0e94a5 6049 return 1;
d60a05a1 6050
acd92049
RH
6051 if (current_function_is_thunk)
6052 return 1;
acd92049 6053
9c0e94a5
RH
6054 /* If we need a GP (we have a LDSYM insn or a CALL_INSN), load it first.
6055 Even if we are a static function, we still need to do this in case
6056 our address is taken and passed to something like qsort. */
a6f12d7c 6057
9c0e94a5
RH
6058 push_topmost_sequence ();
6059 insn = get_insns ();
6060 pop_topmost_sequence ();
89cfc2c6 6061
9c0e94a5 6062 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 6063 if (INSN_P (insn)
9c0e94a5
RH
6064 && GET_CODE (PATTERN (insn)) != USE
6065 && GET_CODE (PATTERN (insn)) != CLOBBER)
6066 {
6067 enum attr_type type = get_attr_type (insn);
6068 if (type == TYPE_LDSYM || type == TYPE_JSR)
6069 return 1;
1eb356b9
RH
6070 if (TARGET_EXPLICIT_RELOCS
6071 && for_each_rtx (&PATTERN (insn), find_lo_sum, NULL) > 0)
6072 return 1;
9c0e94a5 6073 }
a6f12d7c 6074
9c0e94a5 6075 return 0;
a6f12d7c
RK
6076}
6077
0f33506c
RK
6078/* Write a version stamp. Don't write anything if we are running as a
6079 cross-compiler. Otherwise, use the versions in /usr/include/stamp.h. */
6080
9d654bba 6081#ifdef HAVE_STAMP_H
0f33506c
RK
6082#include <stamp.h>
6083#endif
6084
6085void
6086alpha_write_verstamp (file)
4c020733 6087 FILE *file ATTRIBUTE_UNUSED;
0f33506c
RK
6088{
6089#ifdef MS_STAMP
aec4ca5e 6090 fprintf (file, "\t.verstamp %d %d\n", MS_STAMP, LS_STAMP);
0f33506c
RK
6091#endif
6092}
ec6840c1 6093\f
6abc6f40
RH
6094/* Helper function to set RTX_FRAME_RELATED_P on instructions, including
6095 sequences. */
6096
6097static rtx
6098set_frame_related_p ()
6099{
6100 rtx seq = gen_sequence ();
6101 end_sequence ();
6102
6103 if (GET_CODE (seq) == SEQUENCE)
6104 {
6105 int i = XVECLEN (seq, 0);
6106 while (--i >= 0)
6107 RTX_FRAME_RELATED_P (XVECEXP (seq, 0, i)) = 1;
6108 return emit_insn (seq);
6109 }
6110 else
6111 {
6112 seq = emit_insn (seq);
6113 RTX_FRAME_RELATED_P (seq) = 1;
6114 return seq;
6115 }
6116}
6117
6118#define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
6119
a6f12d7c
RK
6120/* Write function prologue. */
6121
89cfc2c6
RK
6122/* On vms we have two kinds of functions:
6123
6124 - stack frame (PROC_STACK)
6125 these are 'normal' functions with local vars and which are
6126 calling other functions
6127 - register frame (PROC_REGISTER)
6128 keeps all data in registers, needs no stack
6129
6130 We must pass this to the assembler so it can generate the
6131 proper pdsc (procedure descriptor)
6132 This is done with the '.pdesc' command.
6133
9c0e94a5
RH
6134 On not-vms, we don't really differentiate between the two, as we can
6135 simply allocate stack without saving registers. */
89cfc2c6
RK
6136
6137void
9c0e94a5 6138alpha_expand_prologue ()
89cfc2c6 6139{
9c0e94a5 6140 /* Registers to save. */
89cfc2c6
RK
6141 unsigned long imask = 0;
6142 unsigned long fmask = 0;
6143 /* Stack space needed for pushing registers clobbered by us. */
6144 HOST_WIDE_INT sa_size;
6145 /* Complete stack size needed. */
6146 HOST_WIDE_INT frame_size;
6147 /* Offset from base reg to register save area. */
9c0e94a5 6148 HOST_WIDE_INT reg_offset;
3873d24b 6149 rtx sa_reg, mem;
89cfc2c6
RK
6150 int i;
6151
6152 sa_size = alpha_sa_size ();
89cfc2c6 6153
9c0e94a5 6154 frame_size = get_frame_size ();
be7b80f4 6155 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6156 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6157 + (alpha_procedure_type == PT_STACK ? 8 : 0)
9c0e94a5
RH
6158 + frame_size
6159 + current_function_pretend_args_size);
30102605
RH
6160 else if (TARGET_ABI_UNICOSMK)
6161 /* We have to allocate space for the DSIB if we generate a frame. */
6162 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6163 + (alpha_procedure_type == PT_STACK ? 48 : 0))
30102605
RH
6164 + ALPHA_ROUND (frame_size
6165 + current_function_outgoing_args_size);
9c0e94a5
RH
6166 else
6167 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
6168 + sa_size
6169 + ALPHA_ROUND (frame_size
6170 + current_function_pretend_args_size));
89cfc2c6 6171
be7b80f4 6172 if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
6173 reg_offset = 8;
6174 else
6175 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
89cfc2c6 6176
9c0e94a5 6177 alpha_sa_mask (&imask, &fmask);
89cfc2c6 6178
941cc05a 6179 /* Emit an insn to reload GP, if needed. */
be7b80f4 6180 if (TARGET_ABI_OSF)
941cc05a
RK
6181 {
6182 alpha_function_needs_gp = alpha_does_function_need_gp ();
6183 if (alpha_function_needs_gp)
6184 emit_insn (gen_prologue_ldgp ());
6185 }
6186
4f1c5cce
RH
6187 /* TARGET_PROFILING_NEEDS_GP actually implies that we need to insert
6188 the call to mcount ourselves, rather than having the linker do it
6189 magically in response to -pg. Since _mcount has special linkage,
6190 don't represent the call as a call. */
70f4f91c 6191 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
4f1c5cce 6192 emit_insn (gen_prologue_mcount ());
30102605
RH
6193
6194 if (TARGET_ABI_UNICOSMK)
6195 unicosmk_gen_dsib (&imask);
6196
89cfc2c6
RK
6197 /* Adjust the stack by the frame size. If the frame size is > 4096
6198 bytes, we need to be sure we probe somewhere in the first and last
6199 4096 bytes (we can probably get away without the latter test) and
6200 every 8192 bytes in between. If the frame size is > 32768, we
6201 do this in a loop. Otherwise, we generate the explicit probe
6202 instructions.
6203
6204 Note that we are only allowed to adjust sp once in the prologue. */
6205
9c0e94a5 6206 if (frame_size <= 32768)
89cfc2c6
RK
6207 {
6208 if (frame_size > 4096)
6209 {
6210 int probed = 4096;
6211
9c0e94a5 6212 do
30102605
RH
6213 emit_insn (gen_probe_stack (GEN_INT (TARGET_ABI_UNICOSMK
6214 ? -probed + 64
6215 : -probed)));
9c0e94a5 6216 while ((probed += 8192) < frame_size);
89cfc2c6
RK
6217
6218 /* We only have to do this probe if we aren't saving registers. */
6219 if (sa_size == 0 && probed + 4096 < frame_size)
9c0e94a5 6220 emit_insn (gen_probe_stack (GEN_INT (-frame_size)));
89cfc2c6
RK
6221 }
6222
6223 if (frame_size != 0)
8207e7c6 6224 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
30102605
RH
6225 GEN_INT (TARGET_ABI_UNICOSMK
6226 ? -frame_size + 64
6227 : -frame_size))));
89cfc2c6
RK
6228 }
6229 else
6230 {
9c0e94a5 6231 /* Here we generate code to set R22 to SP + 4096 and set R23 to the
89cfc2c6
RK
6232 number of 8192 byte blocks to probe. We then probe each block
6233 in the loop and then set SP to the proper location. If the
6234 amount remaining is > 4096, we have to do one more probe if we
6235 are not saving any registers. */
6236
6237 HOST_WIDE_INT blocks = (frame_size + 4096) / 8192;
6238 HOST_WIDE_INT leftover = frame_size + 4096 - blocks * 8192;
9c0e94a5
RH
6239 rtx ptr = gen_rtx_REG (DImode, 22);
6240 rtx count = gen_rtx_REG (DImode, 23);
37679e06 6241 rtx seq;
89cfc2c6 6242
9c0e94a5 6243 emit_move_insn (count, GEN_INT (blocks));
30102605
RH
6244 emit_insn (gen_adddi3 (ptr, stack_pointer_rtx,
6245 GEN_INT (TARGET_ABI_UNICOSMK ? 4096 - 64 : 4096)));
89cfc2c6 6246
9c0e94a5
RH
6247 /* Because of the difficulty in emitting a new basic block this
6248 late in the compilation, generate the loop as a single insn. */
6249 emit_insn (gen_prologue_stack_probe_loop (count, ptr));
89cfc2c6
RK
6250
6251 if (leftover > 4096 && sa_size == 0)
9c0e94a5
RH
6252 {
6253 rtx last = gen_rtx_MEM (DImode, plus_constant (ptr, -leftover));
6254 MEM_VOLATILE_P (last) = 1;
6255 emit_move_insn (last, const0_rtx);
6256 }
89cfc2c6 6257
be7b80f4 6258 if (TARGET_ABI_WINDOWS_NT)
f9d7e5cd
RH
6259 {
6260 /* For NT stack unwind (done by 'reverse execution'), it's
6261 not OK to take the result of a loop, even though the value
6262 is already in ptr, so we reload it via a single operation
37679e06
RH
6263 and subtract it to sp.
6264
6265 Yes, that's correct -- we have to reload the whole constant
6266 into a temporary via ldah+lda then subtract from sp. To
6267 ensure we get ldah+lda, we use a special pattern. */
f9d7e5cd
RH
6268
6269 HOST_WIDE_INT lo, hi;
14eecd34
RH
6270 lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
6271 hi = frame_size - lo;
6abc6f40 6272
37679e06
RH
6273 emit_move_insn (ptr, GEN_INT (hi));
6274 emit_insn (gen_nt_lda (ptr, GEN_INT (lo)));
6275 seq = emit_insn (gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx,
6276 ptr));
f9d7e5cd
RH
6277 }
6278 else
6279 {
f9d7e5cd
RH
6280 seq = emit_insn (gen_adddi3 (stack_pointer_rtx, ptr,
6281 GEN_INT (-leftover)));
f9d7e5cd 6282 }
37679e06
RH
6283
6284 /* This alternative is special, because the DWARF code cannot
6285 possibly intuit through the loop above. So we invent this
6286 note it looks at instead. */
6287 RTX_FRAME_RELATED_P (seq) = 1;
6288 REG_NOTES (seq)
6289 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6290 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6291 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
30102605
RH
6292 GEN_INT (TARGET_ABI_UNICOSMK
6293 ? -frame_size + 64
6294 : -frame_size))),
37679e06 6295 REG_NOTES (seq));
89cfc2c6
RK
6296 }
6297
30102605 6298 if (!TARGET_ABI_UNICOSMK)
89cfc2c6 6299 {
30102605
RH
6300 /* Cope with very large offsets to the register save area. */
6301 sa_reg = stack_pointer_rtx;
6302 if (reg_offset + sa_size > 0x8000)
6303 {
6304 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
6305 HOST_WIDE_INT bias;
89cfc2c6 6306
30102605
RH
6307 if (low + sa_size <= 0x8000)
6308 bias = reg_offset - low, reg_offset = low;
6309 else
6310 bias = reg_offset, reg_offset = 0;
89cfc2c6 6311
30102605
RH
6312 sa_reg = gen_rtx_REG (DImode, 24);
6313 FRP (emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx,
6314 GEN_INT (bias))));
6315 }
9c0e94a5 6316
30102605 6317 /* Save regs in stack order. Beginning with VMS PV. */
c2ea1ac6 6318 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
30102605
RH
6319 {
6320 mem = gen_rtx_MEM (DImode, stack_pointer_rtx);
6321 set_mem_alias_set (mem, alpha_sr_alias_set);
6322 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_PV)));
6323 }
89cfc2c6 6324
30102605
RH
6325 /* Save register RA next. */
6326 if (imask & (1L << REG_RA))
6327 {
6328 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
6329 set_mem_alias_set (mem, alpha_sr_alias_set);
6330 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
6331 imask &= ~(1L << REG_RA);
6332 reg_offset += 8;
6333 }
89cfc2c6 6334
30102605
RH
6335 /* Now save any other registers required to be saved. */
6336 for (i = 0; i < 32; i++)
6337 if (imask & (1L << i))
6338 {
6339 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
6340 set_mem_alias_set (mem, alpha_sr_alias_set);
6341 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
6342 reg_offset += 8;
6343 }
89cfc2c6 6344
30102605
RH
6345 for (i = 0; i < 32; i++)
6346 if (fmask & (1L << i))
6347 {
6348 mem = gen_rtx_MEM (DFmode, plus_constant (sa_reg, reg_offset));
6349 set_mem_alias_set (mem, alpha_sr_alias_set);
6350 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
6351 reg_offset += 8;
6352 }
6353 }
c2ea1ac6 6354 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
30102605
RH
6355 {
6356 /* The standard frame on the T3E includes space for saving registers.
6357 We just have to use it. We don't have to save the return address and
6358 the old frame pointer here - they are saved in the DSIB. */
6359
6360 reg_offset = -56;
6361 for (i = 9; i < 15; i++)
6362 if (imask & (1L << i))
6363 {
6364 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
6365 reg_offset));
6366 set_mem_alias_set (mem, alpha_sr_alias_set);
6367 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
6368 reg_offset -= 8;
6369 }
6370 for (i = 2; i < 10; i++)
6371 if (fmask & (1L << i))
6372 {
6373 mem = gen_rtx_MEM (DFmode, plus_constant (hard_frame_pointer_rtx,
6374 reg_offset));
6375 set_mem_alias_set (mem, alpha_sr_alias_set);
6376 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
6377 reg_offset -= 8;
6378 }
6379 }
89cfc2c6 6380
be7b80f4 6381 if (TARGET_ABI_OPEN_VMS)
89cfc2c6 6382 {
c2ea1ac6
DR
6383 if (alpha_procedure_type == PT_REGISTER)
6384 /* Register frame procedures save the fp.
6385 ?? Ought to have a dwarf2 save for this. */
54aaa4ea
RH
6386 emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno),
6387 hard_frame_pointer_rtx);
89cfc2c6 6388
c2ea1ac6 6389 if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV)
54aaa4ea
RH
6390 emit_insn (gen_force_movdi (gen_rtx_REG (DImode, vms_base_regno),
6391 gen_rtx_REG (DImode, REG_PV)));
89cfc2c6 6392
c2ea1ac6
DR
6393 if (alpha_procedure_type != PT_NULL
6394 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
8207e7c6 6395 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
89cfc2c6 6396
9c0e94a5
RH
6397 /* If we have to allocate space for outgoing args, do it now. */
6398 if (current_function_outgoing_args_size != 0)
8207e7c6
RK
6399 FRP (emit_move_insn
6400 (stack_pointer_rtx,
6401 plus_constant (hard_frame_pointer_rtx,
6402 - (ALPHA_ROUND
6403 (current_function_outgoing_args_size)))));
9c0e94a5 6404 }
30102605 6405 else if (!TARGET_ABI_UNICOSMK)
9c0e94a5
RH
6406 {
6407 /* If we need a frame pointer, set it from the stack pointer. */
6408 if (frame_pointer_needed)
6409 {
6410 if (TARGET_CAN_FAULT_IN_PROLOGUE)
6abc6f40 6411 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
89cfc2c6 6412 else
8207e7c6
RK
6413 /* This must always be the last instruction in the
6414 prologue, thus we emit a special move + clobber. */
6abc6f40
RH
6415 FRP (emit_insn (gen_init_fp (hard_frame_pointer_rtx,
6416 stack_pointer_rtx, sa_reg)));
89cfc2c6 6417 }
89cfc2c6
RK
6418 }
6419
9c0e94a5
RH
6420 /* The ABIs for VMS and OSF/1 say that while we can schedule insns into
6421 the prologue, for exception handling reasons, we cannot do this for
6422 any insn that might fault. We could prevent this for mems with a
6423 (clobber:BLK (scratch)), but this doesn't work for fp insns. So we
6424 have to prevent all such scheduling with a blockage.
89cfc2c6 6425
9c0e94a5
RH
6426 Linux, on the other hand, never bothered to implement OSF/1's
6427 exception handling, and so doesn't care about such things. Anyone
6428 planning to use dwarf2 frame-unwind info can also omit the blockage. */
89cfc2c6 6429
9c0e94a5
RH
6430 if (! TARGET_CAN_FAULT_IN_PROLOGUE)
6431 emit_insn (gen_blockage ());
ef86d2ee
WL
6432}
6433
acd92049 6434/* Output the textual info surrounding the prologue. */
89cfc2c6 6435
9c0e94a5 6436void
acd92049 6437alpha_start_function (file, fnname, decl)
9c0e94a5 6438 FILE *file;
e03c5670 6439 const char *fnname;
3c303f52 6440 tree decl ATTRIBUTE_UNUSED;
9ecc37f0 6441{
9c0e94a5
RH
6442 unsigned long imask = 0;
6443 unsigned long fmask = 0;
6444 /* Stack space needed for pushing registers clobbered by us. */
6445 HOST_WIDE_INT sa_size;
6446 /* Complete stack size needed. */
6447 HOST_WIDE_INT frame_size;
6448 /* Offset from base reg to register save area. */
6449 HOST_WIDE_INT reg_offset;
acd92049 6450 char *entry_label = (char *) alloca (strlen (fnname) + 6);
9c0e94a5 6451 int i;
9ecc37f0 6452
30102605
RH
6453 /* Don't emit an extern directive for functions defined in the same file. */
6454 if (TARGET_ABI_UNICOSMK)
6455 {
6456 tree name_tree;
6457 name_tree = get_identifier (fnname);
6458 TREE_ASM_WRITTEN (name_tree) = 1;
6459 }
6460
941cc05a 6461 alpha_fnname = fnname;
9c0e94a5 6462 sa_size = alpha_sa_size ();
9ecc37f0 6463
9c0e94a5 6464 frame_size = get_frame_size ();
be7b80f4 6465 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6466 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6467 + (alpha_procedure_type == PT_STACK ? 8 : 0)
9c0e94a5
RH
6468 + frame_size
6469 + current_function_pretend_args_size);
30102605
RH
6470 else if (TARGET_ABI_UNICOSMK)
6471 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6472 + (alpha_procedure_type == PT_STACK ? 48 : 0))
30102605
RH
6473 + ALPHA_ROUND (frame_size
6474 + current_function_outgoing_args_size);
9c0e94a5
RH
6475 else
6476 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
6477 + sa_size
6478 + ALPHA_ROUND (frame_size
6479 + current_function_pretend_args_size));
9ecc37f0 6480
be7b80f4 6481 if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
6482 reg_offset = 8;
6483 else
6484 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
9ecc37f0 6485
9c0e94a5 6486 alpha_sa_mask (&imask, &fmask);
a6f12d7c 6487
d60a05a1 6488 /* Ecoff can handle multiple .file directives, so put out file and lineno.
48f6bfac
RK
6489 We have to do that before the .ent directive as we cannot switch
6490 files within procedures with native ecoff because line numbers are
6491 linked to procedure descriptors.
6492 Outputting the lineno helps debugging of one line functions as they
6493 would otherwise get no line number at all. Please note that we would
ddd5a7c1 6494 like to put out last_linenum from final.c, but it is not accessible. */
48f6bfac
RK
6495
6496 if (write_symbols == SDB_DEBUG)
6497 {
30102605 6498#ifdef ASM_OUTPUT_SOURCE_FILENAME
48f6bfac
RK
6499 ASM_OUTPUT_SOURCE_FILENAME (file,
6500 DECL_SOURCE_FILE (current_function_decl));
30102605
RH
6501#endif
6502#ifdef ASM_OUTPUT_SOURCE_LINE
48f6bfac 6503 if (debug_info_level != DINFO_LEVEL_TERSE)
d60a05a1
RK
6504 ASM_OUTPUT_SOURCE_LINE (file,
6505 DECL_SOURCE_LINE (current_function_decl));
30102605 6506#endif
48f6bfac
RK
6507 }
6508
9c0e94a5 6509 /* Issue function start and label. */
30102605
RH
6510 if (TARGET_ABI_OPEN_VMS
6511 || (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive))
33d01c33 6512 {
9c0e94a5 6513 fputs ("\t.ent ", file);
acd92049 6514 assemble_name (file, fnname);
9c0e94a5 6515 putc ('\n', file);
941cc05a
RK
6516
6517 /* If the function needs GP, we'll write the "..ng" label there.
6518 Otherwise, do it here. */
14691f8d
RH
6519 if (TARGET_ABI_OSF
6520 && ! alpha_function_needs_gp
6521 && ! current_function_is_thunk)
941cc05a
RK
6522 {
6523 putc ('$', file);
6524 assemble_name (file, fnname);
6525 fputs ("..ng:\n", file);
6526 }
33d01c33 6527 }
48f6bfac 6528
acd92049 6529 strcpy (entry_label, fnname);
be7b80f4 6530 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6531 strcat (entry_label, "..en");
30102605
RH
6532
6533 /* For public functions, the label must be globalized by appending an
6534 additional colon. */
6535 if (TARGET_ABI_UNICOSMK && TREE_PUBLIC (decl))
6536 strcat (entry_label, ":");
6537
9c0e94a5
RH
6538 ASM_OUTPUT_LABEL (file, entry_label);
6539 inside_function = TRUE;
48f6bfac 6540
be7b80f4 6541 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6542 fprintf (file, "\t.base $%d\n", vms_base_regno);
a6f12d7c 6543
30102605 6544 if (!TARGET_ABI_OPEN_VMS && !TARGET_ABI_UNICOSMK && TARGET_IEEE_CONFORMANT
9c0e94a5 6545 && !flag_inhibit_size_directive)
9973f4a2 6546 {
9c0e94a5
RH
6547 /* Set flags in procedure descriptor to request IEEE-conformant
6548 math-library routines. The value we set it to is PDSC_EXC_IEEE
285a5742 6549 (/usr/include/pdsc.h). */
9c0e94a5 6550 fputs ("\t.eflag 48\n", file);
9973f4a2 6551 }
a6f12d7c 6552
9c0e94a5
RH
6553 /* Set up offsets to alpha virtual arg/local debugging pointer. */
6554 alpha_auto_offset = -frame_size + current_function_pretend_args_size;
6555 alpha_arg_offset = -frame_size + 48;
c97e3db7 6556
9c0e94a5
RH
6557 /* Describe our frame. If the frame size is larger than an integer,
6558 print it as zero to avoid an assembler error. We won't be
6559 properly describing such a frame, but that's the best we can do. */
30102605
RH
6560 if (TARGET_ABI_UNICOSMK)
6561 ;
6562 else if (TARGET_ABI_OPEN_VMS)
a6f12d7c 6563 {
9c0e94a5
RH
6564 fprintf (file, "\t.frame $%d,", vms_unwind_regno);
6565 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6f4fdd10 6566 frame_size >= ((HOST_WIDE_INT) 1 << 31) ? 0 : frame_size);
3c303f52
KG
6567 fputs (",$26,", file);
6568 fprintf (file, HOST_WIDE_INT_PRINT_DEC, reg_offset);
6569 fputs ("\n", file);
a6f12d7c 6570 }
9c0e94a5 6571 else if (!flag_inhibit_size_directive)
33d01c33 6572 {
5665caa2 6573 fprintf (file, "\t.frame $%d,",
33d01c33 6574 (frame_pointer_needed
9c0e94a5 6575 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM));
5665caa2 6576 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9c0e94a5 6577 frame_size >= (1l << 31) ? 0 : frame_size);
5665caa2 6578 fprintf (file, ",$26,%d\n", current_function_pretend_args_size);
33d01c33 6579 }
0d24ff5d 6580
9c0e94a5 6581 /* Describe which registers were spilled. */
30102605
RH
6582 if (TARGET_ABI_UNICOSMK)
6583 ;
6584 else if (TARGET_ABI_OPEN_VMS)
0d24ff5d 6585 {
9c0e94a5 6586 if (imask)
30102605 6587 /* ??? Does VMS care if mask contains ra? The old code didn't
9c0e94a5 6588 set it, so I don't here. */
3c303f52 6589 fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1L << REG_RA));
9c0e94a5 6590 if (fmask)
3c303f52 6591 fprintf (file, "\t.fmask 0x%lx,0\n", fmask);
c2ea1ac6 6592 if (alpha_procedure_type == PT_REGISTER)
9c0e94a5
RH
6593 fprintf (file, "\t.fp_save $%d\n", vms_save_fp_regno);
6594 }
6595 else if (!flag_inhibit_size_directive)
6596 {
6597 if (imask)
0d24ff5d 6598 {
3c303f52 6599 fprintf (file, "\t.mask 0x%lx,", imask);
9c0e94a5
RH
6600 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6601 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
6602 putc ('\n', file);
6603
6604 for (i = 0; i < 32; ++i)
6605 if (imask & (1L << i))
6606 reg_offset += 8;
0d24ff5d 6607 }
9c0e94a5
RH
6608
6609 if (fmask)
0d24ff5d 6610 {
3c303f52 6611 fprintf (file, "\t.fmask 0x%lx,", fmask);
9c0e94a5
RH
6612 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6613 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
6614 putc ('\n', file);
0d24ff5d 6615 }
a6f12d7c
RK
6616 }
6617
be7b80f4 6618#if TARGET_ABI_OPEN_VMS
9c0e94a5
RH
6619 /* Ifdef'ed cause readonly_section and link_section are only
6620 available then. */
6621 readonly_section ();
6622 fprintf (file, "\t.align 3\n");
acd92049 6623 assemble_name (file, fnname); fputs ("..na:\n", file);
9c0e94a5 6624 fputs ("\t.ascii \"", file);
acd92049 6625 assemble_name (file, fnname);
9c0e94a5
RH
6626 fputs ("\\0\"\n", file);
6627
6628 link_section ();
6629 fprintf (file, "\t.align 3\n");
6630 fputs ("\t.name ", file);
acd92049 6631 assemble_name (file, fnname);
9c0e94a5 6632 fputs ("..na\n", file);
acd92049 6633 ASM_OUTPUT_LABEL (file, fnname);
9c0e94a5 6634 fprintf (file, "\t.pdesc ");
acd92049 6635 assemble_name (file, fnname);
c2ea1ac6
DR
6636 fprintf (file, "..en,%s\n",
6637 alpha_procedure_type == PT_STACK ? "stack"
6638 : alpha_procedure_type == PT_REGISTER ? "reg" : "null");
acd92049 6639 alpha_need_linkage (fnname, 1);
9c0e94a5
RH
6640 text_section ();
6641#endif
6642}
a6f12d7c 6643
9c0e94a5 6644/* Emit the .prologue note at the scheduled end of the prologue. */
0f33506c 6645
b4c25db2
NB
6646static void
6647alpha_output_function_end_prologue (file)
9c0e94a5
RH
6648 FILE *file;
6649{
30102605
RH
6650 if (TARGET_ABI_UNICOSMK)
6651 ;
6652 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6653 fputs ("\t.prologue\n", file);
be7b80f4 6654 else if (TARGET_ABI_WINDOWS_NT)
9c0e94a5
RH
6655 fputs ("\t.prologue 0\n", file);
6656 else if (!flag_inhibit_size_directive)
14691f8d
RH
6657 fprintf (file, "\t.prologue %d\n",
6658 alpha_function_needs_gp || current_function_is_thunk);
a6f12d7c
RK
6659}
6660
6661/* Write function epilogue. */
6662
6abc6f40
RH
6663/* ??? At some point we will want to support full unwind, and so will
6664 need to mark the epilogue as well. At the moment, we just confuse
6665 dwarf2out. */
6666#undef FRP
6667#define FRP(exp) exp
6668
a6f12d7c 6669void
9c0e94a5 6670alpha_expand_epilogue ()
a6f12d7c 6671{
9c0e94a5
RH
6672 /* Registers to save. */
6673 unsigned long imask = 0;
6674 unsigned long fmask = 0;
6675 /* Stack space needed for pushing registers clobbered by us. */
6676 HOST_WIDE_INT sa_size;
6677 /* Complete stack size needed. */
6678 HOST_WIDE_INT frame_size;
6679 /* Offset from base reg to register save area. */
6680 HOST_WIDE_INT reg_offset;
6681 int fp_is_frame_pointer, fp_offset;
6682 rtx sa_reg, sa_reg_exp = NULL;
3873d24b 6683 rtx sp_adj1, sp_adj2, mem;
01439aee 6684 rtx eh_ofs;
a6f12d7c
RK
6685 int i;
6686
9c0e94a5 6687 sa_size = alpha_sa_size ();
a6f12d7c 6688
9c0e94a5 6689 frame_size = get_frame_size ();
be7b80f4 6690 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6691 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6692 + (alpha_procedure_type == PT_STACK ? 8 : 0)
9c0e94a5
RH
6693 + frame_size
6694 + current_function_pretend_args_size);
30102605
RH
6695 else if (TARGET_ABI_UNICOSMK)
6696 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6697 + (alpha_procedure_type == PT_STACK ? 48 : 0))
30102605
RH
6698 + ALPHA_ROUND (frame_size
6699 + current_function_outgoing_args_size);
9c0e94a5
RH
6700 else
6701 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
6702 + sa_size
6703 + ALPHA_ROUND (frame_size
6704 + current_function_pretend_args_size));
a6f12d7c 6705
be7b80f4 6706 if (TARGET_ABI_OPEN_VMS)
c2ea1ac6
DR
6707 {
6708 if (alpha_procedure_type == PT_STACK)
6709 reg_offset = 8;
6710 else
6711 reg_offset = 0;
6712 }
9c0e94a5
RH
6713 else
6714 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
6715
6716 alpha_sa_mask (&imask, &fmask);
6717
c2ea1ac6
DR
6718 fp_is_frame_pointer
6719 = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
6720 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed));
c8d8ed65
RK
6721 fp_offset = 0;
6722 sa_reg = stack_pointer_rtx;
9c0e94a5 6723
4573b4de
RH
6724 if (current_function_calls_eh_return)
6725 eh_ofs = EH_RETURN_STACKADJ_RTX;
6726 else
6727 eh_ofs = NULL_RTX;
6728
30102605 6729 if (!TARGET_ABI_UNICOSMK && sa_size)
9c0e94a5
RH
6730 {
6731 /* If we have a frame pointer, restore SP from it. */
be7b80f4 6732 if ((TARGET_ABI_OPEN_VMS
9c0e94a5 6733 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
be7b80f4 6734 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed))
8207e7c6 6735 FRP (emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx));
0d24ff5d 6736
9c0e94a5 6737 /* Cope with very large offsets to the register save area. */
9c0e94a5 6738 if (reg_offset + sa_size > 0x8000)
a6f12d7c 6739 {
9c0e94a5
RH
6740 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
6741 HOST_WIDE_INT bias;
6742
6743 if (low + sa_size <= 0x8000)
6744 bias = reg_offset - low, reg_offset = low;
6745 else
6746 bias = reg_offset, reg_offset = 0;
6747
6748 sa_reg = gen_rtx_REG (DImode, 22);
6749 sa_reg_exp = plus_constant (stack_pointer_rtx, bias);
6750
6abc6f40 6751 FRP (emit_move_insn (sa_reg, sa_reg_exp));
a6f12d7c 6752 }
9c0e94a5 6753
285a5742 6754 /* Restore registers in order, excepting a true frame pointer. */
a6f12d7c 6755
4573b4de 6756 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
01439aee 6757 if (! eh_ofs)
ba4828e0 6758 set_mem_alias_set (mem, alpha_sr_alias_set);
4573b4de
RH
6759 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
6760
9c0e94a5
RH
6761 reg_offset += 8;
6762 imask &= ~(1L << REG_RA);
0f33506c 6763
9c0e94a5
RH
6764 for (i = 0; i < 32; ++i)
6765 if (imask & (1L << i))
a6f12d7c 6766 {
9c0e94a5 6767 if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer)
0f33506c
RK
6768 fp_offset = reg_offset;
6769 else
9c0e94a5 6770 {
3873d24b 6771 mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset));
ba4828e0 6772 set_mem_alias_set (mem, alpha_sr_alias_set);
3873d24b 6773 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
9c0e94a5 6774 }
a6f12d7c
RK
6775 reg_offset += 8;
6776 }
6777
9c0e94a5
RH
6778 for (i = 0; i < 32; ++i)
6779 if (fmask & (1L << i))
a6f12d7c 6780 {
3873d24b 6781 mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset));
ba4828e0 6782 set_mem_alias_set (mem, alpha_sr_alias_set);
3873d24b 6783 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
a6f12d7c
RK
6784 reg_offset += 8;
6785 }
9c0e94a5 6786 }
c2ea1ac6 6787 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
30102605
RH
6788 {
6789 /* Restore callee-saved general-purpose registers. */
6790
6791 reg_offset = -56;
6792
6793 for (i = 9; i < 15; i++)
6794 if (imask & (1L << i))
6795 {
6796 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
6797 reg_offset));
6798 set_mem_alias_set (mem, alpha_sr_alias_set);
6799 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
6800 reg_offset -= 8;
6801 }
6802
6803 for (i = 2; i < 10; i++)
6804 if (fmask & (1L << i))
6805 {
6806 mem = gen_rtx_MEM (DFmode, plus_constant(hard_frame_pointer_rtx,
6807 reg_offset));
6808 set_mem_alias_set (mem, alpha_sr_alias_set);
6809 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
6810 reg_offset -= 8;
6811 }
6812
6813 /* Restore the return address from the DSIB. */
6814
6815 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx, -8));
6816 set_mem_alias_set (mem, alpha_sr_alias_set);
6817 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
6818 }
a6f12d7c 6819
01439aee 6820 if (frame_size || eh_ofs)
9c0e94a5 6821 {
71038426
RH
6822 sp_adj1 = stack_pointer_rtx;
6823
01439aee 6824 if (eh_ofs)
71038426
RH
6825 {
6826 sp_adj1 = gen_rtx_REG (DImode, 23);
6827 emit_move_insn (sp_adj1,
01439aee 6828 gen_rtx_PLUS (Pmode, stack_pointer_rtx, eh_ofs));
71038426
RH
6829 }
6830
9c0e94a5
RH
6831 /* If the stack size is large, begin computation into a temporary
6832 register so as not to interfere with a potential fp restore,
6833 which must be consecutive with an SP restore. */
30102605
RH
6834 if (frame_size < 32768
6835 && ! (TARGET_ABI_UNICOSMK && current_function_calls_alloca))
71038426 6836 sp_adj2 = GEN_INT (frame_size);
30102605
RH
6837 else if (TARGET_ABI_UNICOSMK)
6838 {
6839 sp_adj1 = gen_rtx_REG (DImode, 23);
6840 FRP (emit_move_insn (sp_adj1, hard_frame_pointer_rtx));
6841 sp_adj2 = const0_rtx;
6842 }
9c0e94a5
RH
6843 else if (frame_size < 0x40007fffL)
6844 {
6845 int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
6846
71038426 6847 sp_adj2 = plus_constant (sp_adj1, frame_size - low);
9c0e94a5
RH
6848 if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2))
6849 sp_adj1 = sa_reg;
6850 else
6851 {
6852 sp_adj1 = gen_rtx_REG (DImode, 23);
6abc6f40 6853 FRP (emit_move_insn (sp_adj1, sp_adj2));
9c0e94a5
RH
6854 }
6855 sp_adj2 = GEN_INT (low);
6856 }
d60a05a1 6857 else
9c0e94a5 6858 {
71038426
RH
6859 rtx tmp = gen_rtx_REG (DImode, 23);
6860 FRP (sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3));
6861 if (!sp_adj2)
9c0e94a5
RH
6862 {
6863 /* We can't drop new things to memory this late, afaik,
6864 so build it up by pieces. */
3fe5612d
RH
6865 FRP (sp_adj2 = alpha_emit_set_long_const (tmp, frame_size,
6866 -(frame_size < 0)));
71038426 6867 if (!sp_adj2)
9c0e94a5 6868 abort ();
9c0e94a5 6869 }
9c0e94a5 6870 }
a6f12d7c 6871
9c0e94a5
RH
6872 /* From now on, things must be in order. So emit blockages. */
6873
6874 /* Restore the frame pointer. */
30102605
RH
6875 if (TARGET_ABI_UNICOSMK)
6876 {
6877 emit_insn (gen_blockage ());
6878 mem = gen_rtx_MEM (DImode,
6879 plus_constant (hard_frame_pointer_rtx, -16));
6880 set_mem_alias_set (mem, alpha_sr_alias_set);
6881 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
6882 }
6883 else if (fp_is_frame_pointer)
9c0e94a5
RH
6884 {
6885 emit_insn (gen_blockage ());
8207e7c6 6886 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
ba4828e0 6887 set_mem_alias_set (mem, alpha_sr_alias_set);
3873d24b 6888 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
9c0e94a5 6889 }
be7b80f4 6890 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
6891 {
6892 emit_insn (gen_blockage ());
6abc6f40
RH
6893 FRP (emit_move_insn (hard_frame_pointer_rtx,
6894 gen_rtx_REG (DImode, vms_save_fp_regno)));
9c0e94a5
RH
6895 }
6896
6897 /* Restore the stack pointer. */
6898 emit_insn (gen_blockage ());
30102605
RH
6899 if (sp_adj2 == const0_rtx)
6900 FRP (emit_move_insn (stack_pointer_rtx, sp_adj1));
6901 else
6902 FRP (emit_move_insn (stack_pointer_rtx,
6903 gen_rtx_PLUS (DImode, sp_adj1, sp_adj2)));
9c0e94a5
RH
6904 }
6905 else
6906 {
c2ea1ac6 6907 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_REGISTER)
9c0e94a5
RH
6908 {
6909 emit_insn (gen_blockage ());
6abc6f40
RH
6910 FRP (emit_move_insn (hard_frame_pointer_rtx,
6911 gen_rtx_REG (DImode, vms_save_fp_regno)));
9c0e94a5 6912 }
c2ea1ac6 6913 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type != PT_STACK)
30102605
RH
6914 {
6915 /* Decrement the frame pointer if the function does not have a
6916 frame. */
6917
6918 emit_insn (gen_blockage ());
6919 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
6920 hard_frame_pointer_rtx, GEN_INT (-1))));
6921 }
a6f12d7c 6922 }
9c0e94a5
RH
6923}
6924
6925/* Output the rest of the textual info surrounding the epilogue. */
6926
6927void
acd92049 6928alpha_end_function (file, fnname, decl)
9c0e94a5 6929 FILE *file;
e03c5670 6930 const char *fnname;
3c303f52 6931 tree decl ATTRIBUTE_UNUSED;
9c0e94a5 6932{
a6f12d7c 6933 /* End the function. */
30102605 6934 if (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive)
33d01c33 6935 {
9c0e94a5 6936 fputs ("\t.end ", file);
acd92049 6937 assemble_name (file, fnname);
9c0e94a5 6938 putc ('\n', file);
33d01c33 6939 }
48f6bfac 6940 inside_function = FALSE;
9973f4a2 6941
60593797
RH
6942 /* Show that we know this function if it is called again.
6943
6944 Don't do this for global functions in object files destined for a
6945 shared library because the function may be overridden by the application
62918bd3
RH
6946 or other libraries. Similarly, don't do this for weak functions.
6947
6948 Don't do this for functions not defined in the .text section, as
6949 otherwise it's not unlikely that the destination is out of range
6950 for a direct branch. */
60593797 6951
5e62dee5 6952 if (!DECL_WEAK (current_function_decl)
62918bd3
RH
6953 && (!flag_pic || !TREE_PUBLIC (current_function_decl))
6954 && decl_in_text_section (current_function_decl))
60593797 6955 SYMBOL_REF_FLAG (XEXP (DECL_RTL (current_function_decl), 0)) = 1;
30102605
RH
6956
6957 /* Output jump tables and the static subroutine information block. */
6958 if (TARGET_ABI_UNICOSMK)
6959 {
6960 unicosmk_output_ssib (file, fnname);
6961 unicosmk_output_deferred_case_vectors (file);
6962 }
a6f12d7c 6963}
14691f8d
RH
6964
6965/* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
6966
6967 In order to avoid the hordes of differences between generated code
6968 with and without TARGET_EXPLICIT_RELOCS, and to avoid duplicating
6969 lots of code loading up large constants, generate rtl and emit it
6970 instead of going straight to text.
6971
6972 Not sure why this idea hasn't been explored before... */
6973
6974void
6975alpha_output_mi_thunk_osf (file, thunk_fndecl, delta, function)
6976 FILE *file;
6977 tree thunk_fndecl ATTRIBUTE_UNUSED;
6978 HOST_WIDE_INT delta;
6979 tree function;
6980{
6981 HOST_WIDE_INT hi, lo;
6982 rtx this, insn, funexp;
6983
6984 /* We always require a valid GP. */
6985 emit_insn (gen_prologue_ldgp ());
6986 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6987
6988 /* Find the "this" pointer. If the function returns a structure,
6989 the structure return pointer is in $16. */
6990 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6991 this = gen_rtx_REG (Pmode, 17);
6992 else
6993 this = gen_rtx_REG (Pmode, 16);
6994
6995 /* Add DELTA. When possible we use ldah+lda. Otherwise load the
6996 entire constant for the add. */
6997 lo = ((delta & 0xffff) ^ 0x8000) - 0x8000;
6998 hi = (((delta - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
6999 if (hi + lo == delta)
7000 {
7001 if (hi)
7002 emit_insn (gen_adddi3 (this, this, GEN_INT (hi)));
7003 if (lo)
7004 emit_insn (gen_adddi3 (this, this, GEN_INT (lo)));
7005 }
7006 else
7007 {
7008 rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0),
7009 delta, -(delta < 0));
7010 emit_insn (gen_adddi3 (this, this, tmp));
7011 }
7012
7013 /* Generate a tail call to the target function. */
7014 if (! TREE_USED (function))
7015 {
7016 assemble_external (function);
7017 TREE_USED (function) = 1;
7018 }
7019 funexp = XEXP (DECL_RTL (function), 0);
7020 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
7021 insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
7022 SIBLING_CALL_P (insn) = 1;
7023
7024 /* Run just enough of rest_of_compilation to get the insns emitted.
7025 There's not really enough bulk here to make other passes such as
7026 instruction scheduling worth while. Note that use_thunk calls
7027 assemble_start_function and assemble_end_function. */
7028 insn = get_insns ();
7029 shorten_branches (insn);
7030 final_start_function (insn, file, 1);
7031 final (insn, file, 1, 0);
7032 final_end_function ();
7033}
48f6bfac
RK
7034\f
7035/* Debugging support. */
7036
7037#include "gstab.h"
7038
7039/* Count the number of sdb related labels are generated (to find block
7040 start and end boundaries). */
7041
7042int sdb_label_count = 0;
7043
7044/* Next label # for each statement. */
7045
7046static int sym_lineno = 0;
7047
7048/* Count the number of .file directives, so that .loc is up to date. */
7049
7050static int num_source_filenames = 0;
7051
7052/* Name of the file containing the current function. */
7053
df45c7ea 7054static const char *current_function_file = "";
48f6bfac
RK
7055
7056/* Offsets to alpha virtual arg/local debugging pointers. */
7057
7058long alpha_arg_offset;
7059long alpha_auto_offset;
7060\f
7061/* Emit a new filename to a stream. */
7062
7063void
7064alpha_output_filename (stream, name)
7065 FILE *stream;
aa388f29 7066 const char *name;
48f6bfac
RK
7067{
7068 static int first_time = TRUE;
7069 char ltext_label_name[100];
7070
7071 if (first_time)
7072 {
7073 first_time = FALSE;
7074 ++num_source_filenames;
7075 current_function_file = name;
7076 fprintf (stream, "\t.file\t%d ", num_source_filenames);
7077 output_quoted_string (stream, name);
7078 fprintf (stream, "\n");
7079 if (!TARGET_GAS && write_symbols == DBX_DEBUG)
7080 fprintf (stream, "\t#@stabs\n");
7081 }
7082
6af601b3 7083 else if (write_symbols == DBX_DEBUG)
48f6bfac
RK
7084 {
7085 ASM_GENERATE_INTERNAL_LABEL (ltext_label_name, "Ltext", 0);
8202cda0 7086 fprintf (stream, "%s", ASM_STABS_OP);
48f6bfac
RK
7087 output_quoted_string (stream, name);
7088 fprintf (stream, ",%d,0,0,%s\n", N_SOL, &ltext_label_name[1]);
7089 }
7090
7091 else if (name != current_function_file
5665caa2 7092 && strcmp (name, current_function_file) != 0)
48f6bfac
RK
7093 {
7094 if (inside_function && ! TARGET_GAS)
7095 fprintf (stream, "\t#.file\t%d ", num_source_filenames);
7096 else
7097 {
7098 ++num_source_filenames;
7099 current_function_file = name;
7100 fprintf (stream, "\t.file\t%d ", num_source_filenames);
7101 }
7102
7103 output_quoted_string (stream, name);
7104 fprintf (stream, "\n");
7105 }
7106}
7107\f
7108/* Emit a linenumber to a stream. */
7109
7110void
7111alpha_output_lineno (stream, line)
7112 FILE *stream;
7113 int line;
7114{
6af601b3 7115 if (write_symbols == DBX_DEBUG)
48f6bfac
RK
7116 {
7117 /* mips-tfile doesn't understand .stabd directives. */
7118 ++sym_lineno;
8202cda0 7119 fprintf (stream, "$LM%d:\n%s%d,0,%d,$LM%d\n",
48f6bfac
RK
7120 sym_lineno, ASM_STABN_OP, N_SLINE, line, sym_lineno);
7121 }
7122 else
40828e35 7123 fprintf (stream, "\n\t.loc\t%d %d\n", num_source_filenames, line);
48f6bfac 7124}
6245e3df
RK
7125\f
7126/* Structure to show the current status of registers and memory. */
7127
7128struct shadow_summary
7129{
7130 struct {
1d11bf18
RH
7131 unsigned int i : 31; /* Mask of int regs */
7132 unsigned int fp : 31; /* Mask of fp regs */
7133 unsigned int mem : 1; /* mem == imem | fpmem */
6245e3df
RK
7134 } used, defd;
7135};
7136
f6da8bc3
KG
7137static void summarize_insn PARAMS ((rtx, struct shadow_summary *, int));
7138static void alpha_handle_trap_shadows PARAMS ((rtx));
9c0e94a5 7139
6245e3df
RK
7140/* Summary the effects of expression X on the machine. Update SUM, a pointer
7141 to the summary structure. SET is nonzero if the insn is setting the
7142 object, otherwise zero. */
7143
7144static void
7145summarize_insn (x, sum, set)
7146 rtx x;
7147 struct shadow_summary *sum;
7148 int set;
7149{
6f7d635c 7150 const char *format_ptr;
6245e3df
RK
7151 int i, j;
7152
7153 if (x == 0)
7154 return;
7155
7156 switch (GET_CODE (x))
7157 {
7158 /* ??? Note that this case would be incorrect if the Alpha had a
7159 ZERO_EXTRACT in SET_DEST. */
7160 case SET:
7161 summarize_insn (SET_SRC (x), sum, 0);
7162 summarize_insn (SET_DEST (x), sum, 1);
7163 break;
7164
7165 case CLOBBER:
7166 summarize_insn (XEXP (x, 0), sum, 1);
7167 break;
7168
7169 case USE:
7170 summarize_insn (XEXP (x, 0), sum, 0);
7171 break;
7172
f4e31cf5
RH
7173 case ASM_OPERANDS:
7174 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7175 summarize_insn (ASM_OPERANDS_INPUT (x, i), sum, 0);
7176 break;
7177
6245e3df 7178 case PARALLEL:
8fed04e5 7179 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6245e3df
RK
7180 summarize_insn (XVECEXP (x, 0, i), sum, 0);
7181 break;
7182
f4e31cf5 7183 case SUBREG:
9c0e94a5
RH
7184 summarize_insn (SUBREG_REG (x), sum, 0);
7185 break;
f4e31cf5 7186
6245e3df
RK
7187 case REG:
7188 {
7189 int regno = REGNO (x);
948068e2 7190 unsigned long mask = ((unsigned long) 1) << (regno % 32);
6245e3df
RK
7191
7192 if (regno == 31 || regno == 63)
7193 break;
7194
7195 if (set)
7196 {
7197 if (regno < 32)
7198 sum->defd.i |= mask;
7199 else
7200 sum->defd.fp |= mask;
7201 }
7202 else
7203 {
7204 if (regno < 32)
7205 sum->used.i |= mask;
7206 else
7207 sum->used.fp |= mask;
7208 }
7209 }
7210 break;
7211
7212 case MEM:
7213 if (set)
7214 sum->defd.mem = 1;
7215 else
7216 sum->used.mem = 1;
7217
7218 /* Find the regs used in memory address computation: */
7219 summarize_insn (XEXP (x, 0), sum, 0);
7220 break;
7221
8ba46994
RK
7222 case CONST_INT: case CONST_DOUBLE:
7223 case SYMBOL_REF: case LABEL_REF: case CONST:
368a1647 7224 case SCRATCH: case ASM_INPUT:
8ba46994
RK
7225 break;
7226
6245e3df
RK
7227 /* Handle common unary and binary ops for efficiency. */
7228 case COMPARE: case PLUS: case MINUS: case MULT: case DIV:
7229 case MOD: case UDIV: case UMOD: case AND: case IOR:
7230 case XOR: case ASHIFT: case ROTATE: case ASHIFTRT: case LSHIFTRT:
7231 case ROTATERT: case SMIN: case SMAX: case UMIN: case UMAX:
7232 case NE: case EQ: case GE: case GT: case LE:
7233 case LT: case GEU: case GTU: case LEU: case LTU:
7234 summarize_insn (XEXP (x, 0), sum, 0);
7235 summarize_insn (XEXP (x, 1), sum, 0);
7236 break;
7237
7238 case NEG: case NOT: case SIGN_EXTEND: case ZERO_EXTEND:
7239 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: case FLOAT:
7240 case FIX: case UNSIGNED_FLOAT: case UNSIGNED_FIX: case ABS:
7241 case SQRT: case FFS:
7242 summarize_insn (XEXP (x, 0), sum, 0);
7243 break;
7244
7245 default:
7246 format_ptr = GET_RTX_FORMAT (GET_CODE (x));
8fed04e5 7247 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
bed95fa1 7248 switch (format_ptr[i])
6245e3df
RK
7249 {
7250 case 'e':
7251 summarize_insn (XEXP (x, i), sum, 0);
7252 break;
7253
7254 case 'E':
8fed04e5 7255 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6245e3df
RK
7256 summarize_insn (XVECEXP (x, i, j), sum, 0);
7257 break;
7258
2b01d264
RH
7259 case 'i':
7260 break;
7261
6245e3df
RK
7262 default:
7263 abort ();
7264 }
7265 }
7266}
6245e3df 7267
9c0e94a5
RH
7268/* Ensure a sufficient number of `trapb' insns are in the code when
7269 the user requests code with a trap precision of functions or
7270 instructions.
7271
7272 In naive mode, when the user requests a trap-precision of
7273 "instruction", a trapb is needed after every instruction that may
7274 generate a trap. This ensures that the code is resumption safe but
7275 it is also slow.
7276
7277 When optimizations are turned on, we delay issuing a trapb as long
7278 as possible. In this context, a trap shadow is the sequence of
7279 instructions that starts with a (potentially) trap generating
7280 instruction and extends to the next trapb or call_pal instruction
7281 (but GCC never generates call_pal by itself). We can delay (and
7282 therefore sometimes omit) a trapb subject to the following
7283 conditions:
7284
7285 (a) On entry to the trap shadow, if any Alpha register or memory
7286 location contains a value that is used as an operand value by some
7287 instruction in the trap shadow (live on entry), then no instruction
7288 in the trap shadow may modify the register or memory location.
7289
7290 (b) Within the trap shadow, the computation of the base register
7291 for a memory load or store instruction may not involve using the
7292 result of an instruction that might generate an UNPREDICTABLE
7293 result.
7294
7295 (c) Within the trap shadow, no register may be used more than once
7296 as a destination register. (This is to make life easier for the
7297 trap-handler.)
6245e3df 7298
2ea844d3 7299 (d) The trap shadow may not include any branch instructions. */
6245e3df 7300
2ea844d3
RH
7301static void
7302alpha_handle_trap_shadows (insns)
7303 rtx insns;
6245e3df 7304{
2ea844d3
RH
7305 struct shadow_summary shadow;
7306 int trap_pending, exception_nesting;
68aed21b 7307 rtx i, n;
6245e3df 7308
2ea844d3
RH
7309 trap_pending = 0;
7310 exception_nesting = 0;
7311 shadow.used.i = 0;
7312 shadow.used.fp = 0;
7313 shadow.used.mem = 0;
7314 shadow.defd = shadow.used;
7315
7316 for (i = insns; i ; i = NEXT_INSN (i))
7317 {
7318 if (GET_CODE (i) == NOTE)
7319 {
7320 switch (NOTE_LINE_NUMBER (i))
7321 {
7322 case NOTE_INSN_EH_REGION_BEG:
7323 exception_nesting++;
7324 if (trap_pending)
7325 goto close_shadow;
7326 break;
7327
7328 case NOTE_INSN_EH_REGION_END:
7329 exception_nesting--;
7330 if (trap_pending)
7331 goto close_shadow;
7332 break;
7333
7334 case NOTE_INSN_EPILOGUE_BEG:
7335 if (trap_pending && alpha_tp >= ALPHA_TP_FUNC)
7336 goto close_shadow;
7337 break;
7338 }
7339 }
7340 else if (trap_pending)
7341 {
7342 if (alpha_tp == ALPHA_TP_FUNC)
7343 {
7344 if (GET_CODE (i) == JUMP_INSN
7345 && GET_CODE (PATTERN (i)) == RETURN)
7346 goto close_shadow;
7347 }
7348 else if (alpha_tp == ALPHA_TP_INSN)
7349 {
7350 if (optimize > 0)
7351 {
7352 struct shadow_summary sum;
7353
7354 sum.used.i = 0;
7355 sum.used.fp = 0;
7356 sum.used.mem = 0;
f4e31cf5 7357 sum.defd = sum.used;
2ea844d3
RH
7358
7359 switch (GET_CODE (i))
7360 {
7361 case INSN:
bb02e7ea
RH
7362 /* Annoyingly, get_attr_trap will abort on these. */
7363 if (GET_CODE (PATTERN (i)) == USE
7364 || GET_CODE (PATTERN (i)) == CLOBBER)
2ea844d3
RH
7365 break;
7366
7367 summarize_insn (PATTERN (i), &sum, 0);
7368
7369 if ((sum.defd.i & shadow.defd.i)
7370 || (sum.defd.fp & shadow.defd.fp))
7371 {
7372 /* (c) would be violated */
7373 goto close_shadow;
7374 }
7375
7376 /* Combine shadow with summary of current insn: */
7377 shadow.used.i |= sum.used.i;
7378 shadow.used.fp |= sum.used.fp;
7379 shadow.used.mem |= sum.used.mem;
7380 shadow.defd.i |= sum.defd.i;
7381 shadow.defd.fp |= sum.defd.fp;
7382 shadow.defd.mem |= sum.defd.mem;
7383
7384 if ((sum.defd.i & shadow.used.i)
7385 || (sum.defd.fp & shadow.used.fp)
7386 || (sum.defd.mem & shadow.used.mem))
7387 {
7388 /* (a) would be violated (also takes care of (b)) */
7389 if (get_attr_trap (i) == TRAP_YES
7390 && ((sum.defd.i & sum.used.i)
7391 || (sum.defd.fp & sum.used.fp)))
7392 abort ();
7393
7394 goto close_shadow;
7395 }
7396 break;
7397
7398 case JUMP_INSN:
7399 case CALL_INSN:
7400 case CODE_LABEL:
7401 goto close_shadow;
7402
7403 default:
6245e3df 7404 abort ();
2ea844d3
RH
7405 }
7406 }
7407 else
7408 {
7409 close_shadow:
68aed21b
RH
7410 n = emit_insn_before (gen_trapb (), i);
7411 PUT_MODE (n, TImode);
7412 PUT_MODE (i, TImode);
2ea844d3
RH
7413 trap_pending = 0;
7414 shadow.used.i = 0;
7415 shadow.used.fp = 0;
7416 shadow.used.mem = 0;
7417 shadow.defd = shadow.used;
7418 }
7419 }
7420 }
6245e3df 7421
4f3f5e9f
RH
7422 if ((exception_nesting > 0 || alpha_tp >= ALPHA_TP_FUNC)
7423 && GET_CODE (i) == INSN
7424 && GET_CODE (PATTERN (i)) != USE
7425 && GET_CODE (PATTERN (i)) != CLOBBER
7426 && get_attr_trap (i) == TRAP_YES)
7427 {
7428 if (optimize && !trap_pending)
7429 summarize_insn (PATTERN (i), &shadow, 0);
7430 trap_pending = 1;
7431 }
6245e3df
RK
7432 }
7433}
68aed21b 7434\f
68aed21b
RH
7435/* Alpha can only issue instruction groups simultaneously if they are
7436 suitibly aligned. This is very processor-specific. */
7437
3873d24b
RH
7438enum alphaev4_pipe {
7439 EV4_STOP = 0,
7440 EV4_IB0 = 1,
7441 EV4_IB1 = 2,
7442 EV4_IBX = 4
7443};
7444
68aed21b
RH
7445enum alphaev5_pipe {
7446 EV5_STOP = 0,
7447 EV5_NONE = 1,
7448 EV5_E01 = 2,
7449 EV5_E0 = 4,
7450 EV5_E1 = 8,
7451 EV5_FAM = 16,
7452 EV5_FA = 32,
7453 EV5_FM = 64
7454};
7455
f6da8bc3
KG
7456static enum alphaev4_pipe alphaev4_insn_pipe PARAMS ((rtx));
7457static enum alphaev5_pipe alphaev5_insn_pipe PARAMS ((rtx));
b81f53a1
RK
7458static rtx alphaev4_next_group PARAMS ((rtx, int *, int *));
7459static rtx alphaev5_next_group PARAMS ((rtx, int *, int *));
7460static rtx alphaev4_next_nop PARAMS ((int *));
7461static rtx alphaev5_next_nop PARAMS ((int *));
3873d24b
RH
7462
7463static void alpha_align_insns
b81f53a1 7464 PARAMS ((rtx, unsigned int, rtx (*)(rtx, int *, int *), rtx (*)(int *)));
3873d24b
RH
7465
7466static enum alphaev4_pipe
7467alphaev4_insn_pipe (insn)
7468 rtx insn;
7469{
7470 if (recog_memoized (insn) < 0)
7471 return EV4_STOP;
7472 if (get_attr_length (insn) != 4)
7473 return EV4_STOP;
7474
7475 switch (get_attr_type (insn))
7476 {
7477 case TYPE_ILD:
7478 case TYPE_FLD:
7479 return EV4_IBX;
7480
7481 case TYPE_LDSYM:
7482 case TYPE_IADD:
7483 case TYPE_ILOG:
7484 case TYPE_ICMOV:
7485 case TYPE_ICMP:
7486 case TYPE_IST:
7487 case TYPE_FST:
7488 case TYPE_SHIFT:
7489 case TYPE_IMUL:
7490 case TYPE_FBR:
7491 return EV4_IB0;
7492
7493 case TYPE_MISC:
7494 case TYPE_IBR:
7495 case TYPE_JSR:
7496 case TYPE_FCPYS:
7497 case TYPE_FCMOV:
7498 case TYPE_FADD:
7499 case TYPE_FDIV:
7500 case TYPE_FMUL:
7501 return EV4_IB1;
7502
7503 default:
b81f53a1 7504 abort ();
3873d24b
RH
7505 }
7506}
7507
68aed21b
RH
7508static enum alphaev5_pipe
7509alphaev5_insn_pipe (insn)
7510 rtx insn;
7511{
7512 if (recog_memoized (insn) < 0)
7513 return EV5_STOP;
7514 if (get_attr_length (insn) != 4)
7515 return EV5_STOP;
7516
7517 switch (get_attr_type (insn))
7518 {
7519 case TYPE_ILD:
7520 case TYPE_FLD:
7521 case TYPE_LDSYM:
7522 case TYPE_IADD:
7523 case TYPE_ILOG:
7524 case TYPE_ICMOV:
7525 case TYPE_ICMP:
7526 return EV5_E01;
7527
7528 case TYPE_IST:
7529 case TYPE_FST:
7530 case TYPE_SHIFT:
7531 case TYPE_IMUL:
7532 case TYPE_MISC:
7533 case TYPE_MVI:
7534 return EV5_E0;
7535
7536 case TYPE_IBR:
7537 case TYPE_JSR:
7538 return EV5_E1;
7539
7540 case TYPE_FCPYS:
7541 return EV5_FAM;
7542
7543 case TYPE_FBR:
7544 case TYPE_FCMOV:
7545 case TYPE_FADD:
7546 case TYPE_FDIV:
7547 return EV5_FA;
7548
7549 case TYPE_FMUL:
7550 return EV5_FM;
2c01018f
RH
7551
7552 default:
7553 abort();
68aed21b 7554 }
68aed21b
RH
7555}
7556
3873d24b
RH
7557/* IN_USE is a mask of the slots currently filled within the insn group.
7558 The mask bits come from alphaev4_pipe above. If EV4_IBX is set, then
7559 the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1.
7560
7561 LEN is, of course, the length of the group in bytes. */
7562
7563static rtx
7564alphaev4_next_group (insn, pin_use, plen)
7565 rtx insn;
7566 int *pin_use, *plen;
7567{
7568 int len, in_use;
7569
7570 len = in_use = 0;
7571
2c3c49de 7572 if (! INSN_P (insn)
3873d24b
RH
7573 || GET_CODE (PATTERN (insn)) == CLOBBER
7574 || GET_CODE (PATTERN (insn)) == USE)
7575 goto next_and_done;
7576
7577 while (1)
7578 {
7579 enum alphaev4_pipe pipe;
7580
7581 pipe = alphaev4_insn_pipe (insn);
7582 switch (pipe)
7583 {
7584 case EV4_STOP:
7585 /* Force complex instructions to start new groups. */
7586 if (in_use)
7587 goto done;
7588
7589 /* If this is a completely unrecognized insn, its an asm.
7590 We don't know how long it is, so record length as -1 to
7591 signal a needed realignment. */
7592 if (recog_memoized (insn) < 0)
7593 len = -1;
7594 else
7595 len = get_attr_length (insn);
7596 goto next_and_done;
7597
7598 case EV4_IBX:
7599 if (in_use & EV4_IB0)
7600 {
7601 if (in_use & EV4_IB1)
7602 goto done;
7603 in_use |= EV4_IB1;
7604 }
7605 else
7606 in_use |= EV4_IB0 | EV4_IBX;
7607 break;
7608
7609 case EV4_IB0:
7610 if (in_use & EV4_IB0)
7611 {
7612 if (!(in_use & EV4_IBX) || (in_use & EV4_IB1))
7613 goto done;
7614 in_use |= EV4_IB1;
7615 }
7616 in_use |= EV4_IB0;
7617 break;
7618
7619 case EV4_IB1:
7620 if (in_use & EV4_IB1)
7621 goto done;
7622 in_use |= EV4_IB1;
7623 break;
7624
7625 default:
7626 abort();
7627 }
7628 len += 4;
7629
7630 /* Haifa doesn't do well scheduling branches. */
7631 if (GET_CODE (insn) == JUMP_INSN)
7632 goto next_and_done;
7633
7634 next:
7635 insn = next_nonnote_insn (insn);
7636
2c3c49de 7637 if (!insn || ! INSN_P (insn))
3873d24b
RH
7638 goto done;
7639
7640 /* Let Haifa tell us where it thinks insn group boundaries are. */
7641 if (GET_MODE (insn) == TImode)
7642 goto done;
7643
7644 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
7645 goto next;
7646 }
7647
7648 next_and_done:
7649 insn = next_nonnote_insn (insn);
7650
7651 done:
7652 *plen = len;
7653 *pin_use = in_use;
7654 return insn;
7655}
7656
7657/* IN_USE is a mask of the slots currently filled within the insn group.
7658 The mask bits come from alphaev5_pipe above. If EV5_E01 is set, then
7659 the insn in EV5_E0 can be swapped by the hardware into EV5_E1.
68aed21b
RH
7660
7661 LEN is, of course, the length of the group in bytes. */
7662
7663static rtx
7664alphaev5_next_group (insn, pin_use, plen)
7665 rtx insn;
7666 int *pin_use, *plen;
7667{
7668 int len, in_use;
7669
7670 len = in_use = 0;
7671
2c3c49de 7672 if (! INSN_P (insn)
2c01018f
RH
7673 || GET_CODE (PATTERN (insn)) == CLOBBER
7674 || GET_CODE (PATTERN (insn)) == USE)
7675 goto next_and_done;
68aed21b 7676
2c01018f 7677 while (1)
68aed21b
RH
7678 {
7679 enum alphaev5_pipe pipe;
68aed21b
RH
7680
7681 pipe = alphaev5_insn_pipe (insn);
7682 switch (pipe)
7683 {
7684 case EV5_STOP:
7685 /* Force complex instructions to start new groups. */
7686 if (in_use)
7687 goto done;
7688
7689 /* If this is a completely unrecognized insn, its an asm.
7690 We don't know how long it is, so record length as -1 to
7691 signal a needed realignment. */
7692 if (recog_memoized (insn) < 0)
7693 len = -1;
7694 else
7695 len = get_attr_length (insn);
2c01018f 7696 goto next_and_done;
68aed21b
RH
7697
7698 /* ??? Most of the places below, we would like to abort, as
7699 it would indicate an error either in Haifa, or in the
7700 scheduling description. Unfortunately, Haifa never
7701 schedules the last instruction of the BB, so we don't
7702 have an accurate TI bit to go off. */
7703 case EV5_E01:
7704 if (in_use & EV5_E0)
7705 {
7706 if (in_use & EV5_E1)
7707 goto done;
7708 in_use |= EV5_E1;
7709 }
7710 else
7711 in_use |= EV5_E0 | EV5_E01;
7712 break;
7713
7714 case EV5_E0:
7715 if (in_use & EV5_E0)
7716 {
3873d24b 7717 if (!(in_use & EV5_E01) || (in_use & EV5_E1))
68aed21b
RH
7718 goto done;
7719 in_use |= EV5_E1;
7720 }
7721 in_use |= EV5_E0;
7722 break;
7723
7724 case EV5_E1:
7725 if (in_use & EV5_E1)
7726 goto done;
7727 in_use |= EV5_E1;
7728 break;
7729
7730 case EV5_FAM:
7731 if (in_use & EV5_FA)
7732 {
7733 if (in_use & EV5_FM)
7734 goto done;
7735 in_use |= EV5_FM;
7736 }
7737 else
7738 in_use |= EV5_FA | EV5_FAM;
7739 break;
7740
7741 case EV5_FA:
7742 if (in_use & EV5_FA)
7743 goto done;
7744 in_use |= EV5_FA;
7745 break;
7746
7747 case EV5_FM:
7748 if (in_use & EV5_FM)
7749 goto done;
7750 in_use |= EV5_FM;
7751 break;
7752
7753 case EV5_NONE:
7754 break;
7755
7756 default:
7757 abort();
7758 }
7759 len += 4;
7760
7761 /* Haifa doesn't do well scheduling branches. */
7762 /* ??? If this is predicted not-taken, slotting continues, except
7763 that no more IBR, FBR, or JSR insns may be slotted. */
7764 if (GET_CODE (insn) == JUMP_INSN)
2c01018f 7765 goto next_and_done;
68aed21b 7766
2c01018f 7767 next:
68aed21b
RH
7768 insn = next_nonnote_insn (insn);
7769
2c3c49de 7770 if (!insn || ! INSN_P (insn))
68aed21b 7771 goto done;
a874dd18 7772
68aed21b
RH
7773 /* Let Haifa tell us where it thinks insn group boundaries are. */
7774 if (GET_MODE (insn) == TImode)
7775 goto done;
7776
2c01018f
RH
7777 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
7778 goto next;
68aed21b 7779 }
2c01018f
RH
7780
7781 next_and_done:
7782 insn = next_nonnote_insn (insn);
68aed21b
RH
7783
7784 done:
7785 *plen = len;
7786 *pin_use = in_use;
7787 return insn;
68aed21b
RH
7788}
7789
3873d24b
RH
7790static rtx
7791alphaev4_next_nop (pin_use)
7792 int *pin_use;
7793{
7794 int in_use = *pin_use;
7795 rtx nop;
7796
7797 if (!(in_use & EV4_IB0))
7798 {
7799 in_use |= EV4_IB0;
7800 nop = gen_nop ();
7801 }
7802 else if ((in_use & (EV4_IBX|EV4_IB1)) == EV4_IBX)
7803 {
7804 in_use |= EV4_IB1;
7805 nop = gen_nop ();
7806 }
7807 else if (TARGET_FP && !(in_use & EV4_IB1))
7808 {
7809 in_use |= EV4_IB1;
7810 nop = gen_fnop ();
7811 }
7812 else
7813 nop = gen_unop ();
7814
7815 *pin_use = in_use;
7816 return nop;
7817}
7818
7819static rtx
7820alphaev5_next_nop (pin_use)
7821 int *pin_use;
7822{
7823 int in_use = *pin_use;
7824 rtx nop;
7825
7826 if (!(in_use & EV5_E1))
7827 {
7828 in_use |= EV5_E1;
7829 nop = gen_nop ();
7830 }
7831 else if (TARGET_FP && !(in_use & EV5_FA))
7832 {
7833 in_use |= EV5_FA;
7834 nop = gen_fnop ();
7835 }
7836 else if (TARGET_FP && !(in_use & EV5_FM))
7837 {
7838 in_use |= EV5_FM;
7839 nop = gen_fnop ();
7840 }
7841 else
7842 nop = gen_unop ();
7843
7844 *pin_use = in_use;
7845 return nop;
7846}
7847
7848/* The instruction group alignment main loop. */
7849
68aed21b 7850static void
b81f53a1 7851alpha_align_insns (insns, max_align, next_group, next_nop)
68aed21b 7852 rtx insns;
b81f53a1
RK
7853 unsigned int max_align;
7854 rtx (*next_group) PARAMS ((rtx, int *, int *));
7855 rtx (*next_nop) PARAMS ((int *));
68aed21b
RH
7856{
7857 /* ALIGN is the known alignment for the insn group. */
b81f53a1 7858 unsigned int align;
68aed21b
RH
7859 /* OFS is the offset of the current insn in the insn group. */
7860 int ofs;
7861 int prev_in_use, in_use, len;
7862 rtx i, next;
7863
7864 /* Let shorten branches care for assigning alignments to code labels. */
7865 shorten_branches (insns);
7866
30864e14
RH
7867 if (align_functions < 4)
7868 align = 4;
21cb9e60 7869 else if ((unsigned int) align_functions < max_align)
30864e14
RH
7870 align = align_functions;
7871 else
7872 align = max_align;
80db34d8 7873
68aed21b 7874 ofs = prev_in_use = 0;
68aed21b
RH
7875 i = insns;
7876 if (GET_CODE (i) == NOTE)
7877 i = next_nonnote_insn (i);
7878
7879 while (i)
7880 {
b81f53a1 7881 next = (*next_group) (i, &in_use, &len);
68aed21b
RH
7882
7883 /* When we see a label, resync alignment etc. */
7884 if (GET_CODE (i) == CODE_LABEL)
7885 {
b81f53a1
RK
7886 unsigned int new_align = 1 << label_to_alignment (i);
7887
68aed21b
RH
7888 if (new_align >= align)
7889 {
3873d24b 7890 align = new_align < max_align ? new_align : max_align;
68aed21b
RH
7891 ofs = 0;
7892 }
b81f53a1 7893
68aed21b
RH
7894 else if (ofs & (new_align-1))
7895 ofs = (ofs | (new_align-1)) + 1;
7896 if (len != 0)
7897 abort();
7898 }
7899
7900 /* Handle complex instructions special. */
7901 else if (in_use == 0)
7902 {
7903 /* Asms will have length < 0. This is a signal that we have
7904 lost alignment knowledge. Assume, however, that the asm
7905 will not mis-align instructions. */
7906 if (len < 0)
7907 {
7908 ofs = 0;
7909 align = 4;
7910 len = 0;
7911 }
7912 }
7913
7914 /* If the known alignment is smaller than the recognized insn group,
7915 realign the output. */
1eb356b9 7916 else if ((int) align < len)
68aed21b 7917 {
b81f53a1 7918 unsigned int new_log_align = len > 8 ? 4 : 3;
11cb1475 7919 rtx prev, where;
68aed21b 7920
11cb1475 7921 where = prev = prev_nonnote_insn (i);
68aed21b
RH
7922 if (!where || GET_CODE (where) != CODE_LABEL)
7923 where = i;
7924
11cb1475
RH
7925 /* Can't realign between a call and its gp reload. */
7926 if (! (TARGET_EXPLICIT_RELOCS
7927 && prev && GET_CODE (prev) == CALL_INSN))
7928 {
7929 emit_insn_before (gen_realign (GEN_INT (new_log_align)), where);
7930 align = 1 << new_log_align;
7931 ofs = 0;
7932 }
68aed21b
RH
7933 }
7934
7935 /* If the group won't fit in the same INT16 as the previous,
7936 we need to add padding to keep the group together. Rather
7937 than simply leaving the insn filling to the assembler, we
7938 can make use of the knowledge of what sorts of instructions
7939 were issued in the previous group to make sure that all of
7940 the added nops are really free. */
1eb356b9 7941 else if (ofs + len > (int) align)
68aed21b
RH
7942 {
7943 int nop_count = (align - ofs) / 4;
7944 rtx where;
7945
11cb1475
RH
7946 /* Insert nops before labels, branches, and calls to truely merge
7947 the execution of the nops with the previous instruction group. */
68aed21b 7948 where = prev_nonnote_insn (i);
3873d24b 7949 if (where)
68aed21b 7950 {
3873d24b 7951 if (GET_CODE (where) == CODE_LABEL)
68aed21b 7952 {
3873d24b
RH
7953 rtx where2 = prev_nonnote_insn (where);
7954 if (where2 && GET_CODE (where2) == JUMP_INSN)
7955 where = where2;
68aed21b 7956 }
11cb1475 7957 else if (GET_CODE (where) == INSN)
3873d24b 7958 where = i;
68aed21b 7959 }
3873d24b
RH
7960 else
7961 where = i;
7962
7963 do
7964 emit_insn_before ((*next_nop)(&prev_in_use), where);
68aed21b
RH
7965 while (--nop_count);
7966 ofs = 0;
7967 }
7968
7969 ofs = (ofs + len) & (align - 1);
7970 prev_in_use = in_use;
7971 i = next;
7972 }
7973}
68aed21b 7974\f
f5143c46 7975/* Machine dependent reorg pass. */
2ea844d3
RH
7976
7977void
7978alpha_reorg (insns)
7979 rtx insns;
7980{
68aed21b
RH
7981 if (alpha_tp != ALPHA_TP_PROG || flag_exceptions)
7982 alpha_handle_trap_shadows (insns);
7983
68aed21b
RH
7984 /* Due to the number of extra trapb insns, don't bother fixing up
7985 alignment when trap precision is instruction. Moreover, we can
b81f53a1 7986 only do our job when sched2 is run. */
68aed21b
RH
7987 if (optimize && !optimize_size
7988 && alpha_tp != ALPHA_TP_INSN
7989 && flag_schedule_insns_after_reload)
7990 {
3873d24b 7991 if (alpha_cpu == PROCESSOR_EV4)
b81f53a1 7992 alpha_align_insns (insns, 8, alphaev4_next_group, alphaev4_next_nop);
3873d24b 7993 else if (alpha_cpu == PROCESSOR_EV5)
b81f53a1 7994 alpha_align_insns (insns, 16, alphaev5_next_group, alphaev5_next_nop);
68aed21b 7995 }
2ea844d3 7996}
2ea844d3 7997\f
a874dd18
RK
7998/* Check a floating-point value for validity for a particular machine mode. */
7999
e03c5670 8000static const char * const float_strings[] =
a874dd18 8001{
39d78b32 8002 /* These are for FLOAT_VAX. */
a874dd18
RK
8003 "1.70141173319264430e+38", /* 2^127 (2^24 - 1) / 2^24 */
8004 "-1.70141173319264430e+38",
8005 "2.93873587705571877e-39", /* 2^-128 */
39d78b32
RK
8006 "-2.93873587705571877e-39",
8007 /* These are for the default broken IEEE mode, which traps
8008 on infinity or denormal numbers. */
8009 "3.402823466385288598117e+38", /* 2^128 (1 - 2^-24) */
8010 "-3.402823466385288598117e+38",
8011 "1.1754943508222875079687e-38", /* 2^-126 */
8012 "-1.1754943508222875079687e-38",
a874dd18
RK
8013};
8014
39d78b32 8015static REAL_VALUE_TYPE float_values[8];
a874dd18
RK
8016static int inited_float_values = 0;
8017
8018int
8019check_float_value (mode, d, overflow)
8020 enum machine_mode mode;
8021 REAL_VALUE_TYPE *d;
3c303f52 8022 int overflow ATTRIBUTE_UNUSED;
a874dd18
RK
8023{
8024
8025 if (TARGET_IEEE || TARGET_IEEE_CONFORMANT || TARGET_IEEE_WITH_INEXACT)
8026 return 0;
8027
8028 if (inited_float_values == 0)
8029 {
8030 int i;
39d78b32 8031 for (i = 0; i < 8; i++)
a874dd18
RK
8032 float_values[i] = REAL_VALUE_ATOF (float_strings[i], DFmode);
8033
8034 inited_float_values = 1;
8035 }
8036
8037 if (mode == SFmode)
8038 {
8039 REAL_VALUE_TYPE r;
39d78b32
RK
8040 REAL_VALUE_TYPE *fvptr;
8041
8042 if (TARGET_FLOAT_VAX)
8043 fvptr = &float_values[0];
8044 else
8045 fvptr = &float_values[4];
a874dd18 8046
4e135bdd 8047 memcpy (&r, d, sizeof (REAL_VALUE_TYPE));
39d78b32 8048 if (REAL_VALUES_LESS (fvptr[0], r))
a874dd18 8049 {
6439a1f2 8050 memcpy (d, &fvptr[0], sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8051 return 1;
8052 }
39d78b32 8053 else if (REAL_VALUES_LESS (r, fvptr[1]))
a874dd18 8054 {
6439a1f2 8055 memcpy (d, &fvptr[1], sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8056 return 1;
8057 }
8058 else if (REAL_VALUES_LESS (dconst0, r)
39d78b32 8059 && REAL_VALUES_LESS (r, fvptr[2]))
a874dd18 8060 {
6439a1f2 8061 memcpy (d, &dconst0, sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8062 return 1;
8063 }
8064 else if (REAL_VALUES_LESS (r, dconst0)
39d78b32 8065 && REAL_VALUES_LESS (fvptr[3], r))
a874dd18 8066 {
6439a1f2 8067 memcpy (d, &dconst0, sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8068 return 1;
8069 }
8070 }
8071
8072 return 0;
8073}
30102605 8074\f
be7b80f4 8075#if TARGET_ABI_OPEN_VMS
89cfc2c6 8076
e9a25f70 8077/* Return the VMS argument type corresponding to MODE. */
89cfc2c6 8078
e9a25f70
JL
8079enum avms_arg_type
8080alpha_arg_type (mode)
8081 enum machine_mode mode;
8082{
8083 switch (mode)
89cfc2c6 8084 {
e9a25f70
JL
8085 case SFmode:
8086 return TARGET_FLOAT_VAX ? FF : FS;
8087 case DFmode:
8088 return TARGET_FLOAT_VAX ? FD : FT;
8089 default:
8090 return I64;
89cfc2c6 8091 }
e9a25f70 8092}
89cfc2c6 8093
e9a25f70
JL
8094/* Return an rtx for an integer representing the VMS Argument Information
8095 register value. */
89cfc2c6 8096
aa388f29 8097rtx
e9a25f70
JL
8098alpha_arg_info_reg_val (cum)
8099 CUMULATIVE_ARGS cum;
8100{
8101 unsigned HOST_WIDE_INT regval = cum.num_args;
8102 int i;
89cfc2c6 8103
e9a25f70
JL
8104 for (i = 0; i < 6; i++)
8105 regval |= ((int) cum.atypes[i]) << (i * 3 + 8);
89cfc2c6 8106
e9a25f70
JL
8107 return GEN_INT (regval);
8108}
8109\f
a82c7f05
RH
8110#include <splay-tree.h>
8111
89cfc2c6
RK
8112/* Structure to collect function names for final output
8113 in link section. */
8114
8115enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN};
8116
a82c7f05
RH
8117struct alpha_links
8118{
8119 rtx linkage;
89cfc2c6
RK
8120 enum links_kind kind;
8121};
8122
a82c7f05
RH
8123static splay_tree alpha_links;
8124
8125static int mark_alpha_links_node PARAMS ((splay_tree_node, void *));
8126static void mark_alpha_links PARAMS ((void *));
8127static int alpha_write_one_linkage PARAMS ((splay_tree_node, void *));
8128
8129/* Protect alpha_links from garbage collection. */
8130
8131static int
8132mark_alpha_links_node (node, data)
8133 splay_tree_node node;
8134 void *data ATTRIBUTE_UNUSED;
8135{
8136 struct alpha_links *links = (struct alpha_links *) node->value;
8137 ggc_mark_rtx (links->linkage);
8138 return 0;
8139}
8140
8141static void
8142mark_alpha_links (ptr)
8143 void *ptr;
8144{
8145 splay_tree tree = *(splay_tree *) ptr;
8146 splay_tree_foreach (tree, mark_alpha_links_node, NULL);
8147}
89cfc2c6
RK
8148
8149/* Make (or fake) .linkage entry for function call.
8150
a82c7f05 8151 IS_LOCAL is 0 if name is used in call, 1 if name is used in definition.
89cfc2c6 8152
a82c7f05
RH
8153 Return an SYMBOL_REF rtx for the linkage. */
8154
8155rtx
89cfc2c6 8156alpha_need_linkage (name, is_local)
e03c5670 8157 const char *name;
89cfc2c6
RK
8158 int is_local;
8159{
a82c7f05
RH
8160 splay_tree_node node;
8161 struct alpha_links *al;
89cfc2c6
RK
8162
8163 if (name[0] == '*')
8164 name++;
8165
a82c7f05
RH
8166 if (alpha_links)
8167 {
8168 /* Is this name already defined? */
89cfc2c6 8169
a82c7f05
RH
8170 node = splay_tree_lookup (alpha_links, (splay_tree_key) name);
8171 if (node)
8172 {
8173 al = (struct alpha_links *) node->value;
8174 if (is_local)
8175 {
8176 /* Defined here but external assumed. */
8177 if (al->kind == KIND_EXTERN)
8178 al->kind = KIND_LOCAL;
8179 }
8180 else
8181 {
8182 /* Used here but unused assumed. */
8183 if (al->kind == KIND_UNUSED)
8184 al->kind = KIND_LOCAL;
8185 }
8186 return al->linkage;
8187 }
8188 }
8189 else
8190 {
8191 alpha_links = splay_tree_new ((splay_tree_compare_fn) strcmp,
8192 (splay_tree_delete_key_fn) free,
8193 (splay_tree_delete_key_fn) free);
8194 ggc_add_root (&alpha_links, 1, 1, mark_alpha_links);
8195 }
89cfc2c6 8196
a82c7f05
RH
8197 al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
8198 name = xstrdup (name);
89cfc2c6
RK
8199
8200 /* Assume external if no definition. */
a82c7f05 8201 al->kind = (is_local ? KIND_UNUSED : KIND_EXTERN);
89cfc2c6 8202
a82c7f05 8203 /* Ensure we have an IDENTIFIER so assemble_name can mark it used. */
9398dc27
RK
8204 get_identifier (name);
8205
a82c7f05
RH
8206 /* Construct a SYMBOL_REF for us to call. */
8207 {
8208 size_t name_len = strlen (name);
520a57c8 8209 char *linksym = alloca (name_len + 6);
a82c7f05
RH
8210 linksym[0] = '$';
8211 memcpy (linksym + 1, name, name_len);
8212 memcpy (linksym + 1 + name_len, "..lk", 5);
520a57c8
ZW
8213 al->linkage = gen_rtx_SYMBOL_REF (Pmode,
8214 ggc_alloc_string (linksym, name_len + 5));
a82c7f05
RH
8215 }
8216
8217 splay_tree_insert (alpha_links, (splay_tree_key) name,
8218 (splay_tree_value) al);
89cfc2c6 8219
a82c7f05 8220 return al->linkage;
89cfc2c6
RK
8221}
8222
a82c7f05
RH
8223static int
8224alpha_write_one_linkage (node, data)
8225 splay_tree_node node;
8226 void *data;
8227{
83182544 8228 const char *const name = (const char *) node->key;
a82c7f05
RH
8229 struct alpha_links *links = (struct alpha_links *) node->value;
8230 FILE *stream = (FILE *) data;
8231
8232 if (links->kind == KIND_UNUSED
8233 || ! TREE_SYMBOL_REFERENCED (get_identifier (name)))
8234 return 0;
8235
8236 fprintf (stream, "$%s..lk:\n", name);
8237 if (links->kind == KIND_LOCAL)
8238 {
8239 /* Local and used, build linkage pair. */
8240 fprintf (stream, "\t.quad %s..en\n", name);
8241 fprintf (stream, "\t.quad %s\n", name);
8242 }
8243 else
8244 {
8245 /* External and used, request linkage pair. */
8246 fprintf (stream, "\t.linkage %s\n", name);
8247 }
8248
8249 return 0;
8250}
89cfc2c6
RK
8251
8252void
8253alpha_write_linkage (stream)
8254 FILE *stream;
8255{
c1bd46a8
DR
8256 if (alpha_links)
8257 {
8258 readonly_section ();
8259 fprintf (stream, "\t.align 3\n");
8260 splay_tree_foreach (alpha_links, alpha_write_one_linkage, stream);
8261 }
89cfc2c6
RK
8262}
8263
7c262518
RH
8264/* Given a decl, a section name, and whether the decl initializer
8265 has relocs, choose attributes for the section. */
8266
8267#define SECTION_VMS_OVERLAY SECTION_FORGET
c1bd46a8
DR
8268#define SECTION_VMS_GLOBAL SECTION_MACH_DEP
8269#define SECTION_VMS_INITIALIZE (SECTION_VMS_GLOBAL << 1)
7c262518
RH
8270
8271static unsigned int
8272vms_section_type_flags (decl, name, reloc)
8273 tree decl;
8274 const char *name;
8275 int reloc;
8276{
8277 unsigned int flags = default_section_type_flags (decl, name, reloc);
8278
91d231cb
JM
8279 if (decl && DECL_ATTRIBUTES (decl)
8280 && lookup_attribute ("overlaid", DECL_ATTRIBUTES (decl)))
7c262518 8281 flags |= SECTION_VMS_OVERLAY;
c1bd46a8
DR
8282 if (decl && DECL_ATTRIBUTES (decl)
8283 && lookup_attribute ("global", DECL_ATTRIBUTES (decl)))
8284 flags |= SECTION_VMS_GLOBAL;
8285 if (decl && DECL_ATTRIBUTES (decl)
8286 && lookup_attribute ("initialize", DECL_ATTRIBUTES (decl)))
8287 flags |= SECTION_VMS_INITIALIZE;
7c262518
RH
8288
8289 return flags;
8290}
8291
8292/* Switch to an arbitrary section NAME with attributes as specified
8293 by FLAGS. ALIGN specifies any known alignment requirements for
8294 the section; 0 if the default should be used. */
8295
8296static void
715bdd29 8297vms_asm_named_section (name, flags)
7c262518
RH
8298 const char *name;
8299 unsigned int flags;
7c262518 8300{
c1bd46a8
DR
8301 fputc ('\n', asm_out_file);
8302 fprintf (asm_out_file, ".section\t%s", name);
7c262518
RH
8303
8304 if (flags & SECTION_VMS_OVERLAY)
c1bd46a8
DR
8305 fprintf (asm_out_file, ",OVR");
8306 if (flags & SECTION_VMS_GLOBAL)
8307 fprintf (asm_out_file, ",GBL");
8308 if (flags & SECTION_VMS_INITIALIZE)
8309 fprintf (asm_out_file, ",NOMOD");
8310 if (flags & SECTION_DEBUG)
8311 fprintf (asm_out_file, ",NOWRT");
8312
8313 fputc ('\n', asm_out_file);
7c262518
RH
8314}
8315
2cc07db4
RH
8316/* Record an element in the table of global constructors. SYMBOL is
8317 a SYMBOL_REF of the function to be called; PRIORITY is a number
8318 between 0 and MAX_INIT_PRIORITY.
8319
8320 Differs from default_ctors_section_asm_out_constructor in that the
8321 width of the .ctors entry is always 64 bits, rather than the 32 bits
8322 used by a normal pointer. */
8323
8324static void
8325vms_asm_out_constructor (symbol, priority)
8326 rtx symbol;
8327 int priority ATTRIBUTE_UNUSED;
8328{
8329 ctors_section ();
c8af3574
RH
8330 assemble_align (BITS_PER_WORD);
8331 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
2cc07db4
RH
8332}
8333
8334static void
8335vms_asm_out_destructor (symbol, priority)
8336 rtx symbol;
8337 int priority ATTRIBUTE_UNUSED;
8338{
8339 dtors_section ();
c8af3574
RH
8340 assemble_align (BITS_PER_WORD);
8341 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
2cc07db4 8342}
89cfc2c6
RK
8343#else
8344
a82c7f05 8345rtx
89cfc2c6 8346alpha_need_linkage (name, is_local)
e03c5670 8347 const char *name ATTRIBUTE_UNUSED;
3c303f52 8348 int is_local ATTRIBUTE_UNUSED;
89cfc2c6 8349{
a82c7f05 8350 return NULL_RTX;
89cfc2c6
RK
8351}
8352
be7b80f4 8353#endif /* TARGET_ABI_OPEN_VMS */
30102605
RH
8354\f
8355#if TARGET_ABI_UNICOSMK
8356
8357static void unicosmk_output_module_name PARAMS ((FILE *));
8358static void unicosmk_output_default_externs PARAMS ((FILE *));
8359static void unicosmk_output_dex PARAMS ((FILE *));
8360static void unicosmk_output_externs PARAMS ((FILE *));
8361static void unicosmk_output_addr_vec PARAMS ((FILE *, rtx));
8362static const char *unicosmk_ssib_name PARAMS ((void));
950a3816 8363static int unicosmk_special_name PARAMS ((const char *));
30102605
RH
8364
8365/* Define the offset between two registers, one to be eliminated, and the
8366 other its replacement, at the start of a routine. */
8367
8368int
8369unicosmk_initial_elimination_offset (from, to)
8370 int from;
8371 int to;
8372{
8373 int fixed_size;
8374
8375 fixed_size = alpha_sa_size();
8376 if (fixed_size != 0)
8377 fixed_size += 48;
8378
8379 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
8380 return -fixed_size;
8381 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
8382 return 0;
8383 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
8384 return (ALPHA_ROUND (current_function_outgoing_args_size)
8385 + ALPHA_ROUND (get_frame_size()));
8386 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
8387 return (ALPHA_ROUND (fixed_size)
8388 + ALPHA_ROUND (get_frame_size()
8389 + current_function_outgoing_args_size));
8390 else
8391 abort ();
8392}
8393
8394/* Output the module name for .ident and .end directives. We have to strip
8395 directories and add make sure that the module name starts with a letter
8396 or '$'. */
8397
8398static void
8399unicosmk_output_module_name (file)
8400 FILE *file;
8401{
8402 const char *name;
8403
8404 /* Strip directories. */
8405
8406 name = strrchr (main_input_filename, '/');
8407 if (name)
8408 ++name;
8409 else
8410 name = main_input_filename;
8411
8412 /* CAM only accepts module names that start with a letter or '$'. We
8413 prefix the module name with a '$' if necessary. */
8414
8415 if (!ISALPHA (*name))
8416 fprintf (file, "$%s", name);
8417 else
8418 fputs (name, file);
8419}
8420
8421/* Output text that to appear at the beginning of an assembler file. */
8422
8423void
8424unicosmk_asm_file_start (file)
8425 FILE *file;
8426{
8427 int i;
8428
8429 fputs ("\t.ident\t", file);
8430 unicosmk_output_module_name (file);
8431 fputs ("\n\n", file);
8432
8433 /* The Unicos/Mk assembler uses different register names. Instead of trying
8434 to support them, we simply use micro definitions. */
8435
8436 /* CAM has different register names: rN for the integer register N and fN
8437 for the floating-point register N. Instead of trying to use these in
8438 alpha.md, we define the symbols $N and $fN to refer to the appropriate
8439 register. */
8440
8441 for (i = 0; i < 32; ++i)
8442 fprintf (file, "$%d <- r%d\n", i, i);
8443
8444 for (i = 0; i < 32; ++i)
8445 fprintf (file, "$f%d <- f%d\n", i, i);
8446
8447 putc ('\n', file);
8448
8449 /* The .align directive fill unused space with zeroes which does not work
8450 in code sections. We define the macro 'gcc@code@align' which uses nops
8451 instead. Note that it assumes that code sections always have the
8452 biggest possible alignment since . refers to the current offset from
8453 the beginning of the section. */
8454
8455 fputs ("\t.macro gcc@code@align n\n", file);
8456 fputs ("gcc@n@bytes = 1 << n\n", file);
8457 fputs ("gcc@here = . % gcc@n@bytes\n", file);
8458 fputs ("\t.if ne, gcc@here, 0\n", file);
8459 fputs ("\t.repeat (gcc@n@bytes - gcc@here) / 4\n", file);
8460 fputs ("\tbis r31,r31,r31\n", file);
8461 fputs ("\t.endr\n", file);
8462 fputs ("\t.endif\n", file);
8463 fputs ("\t.endm gcc@code@align\n\n", file);
8464
8465 /* Output extern declarations which should always be visible. */
8466 unicosmk_output_default_externs (file);
8467
8468 /* Open a dummy section. We always need to be inside a section for the
8469 section-switching code to work correctly.
8470 ??? This should be a module id or something like that. I still have to
8471 figure out what the rules for those are. */
8472 fputs ("\n\t.psect\t$SG00000,data\n", file);
8473}
8474
8475/* Output text to appear at the end of an assembler file. This includes all
8476 pending extern declarations and DEX expressions. */
8477
8478void
8479unicosmk_asm_file_end (file)
8480 FILE *file;
8481{
8482 fputs ("\t.endp\n\n", file);
8483
8484 /* Output all pending externs. */
8485
8486 unicosmk_output_externs (file);
8487
8488 /* Output dex definitions used for functions whose names conflict with
8489 register names. */
8490
8491 unicosmk_output_dex (file);
8492
8493 fputs ("\t.end\t", file);
8494 unicosmk_output_module_name (file);
8495 putc ('\n', file);
8496}
8497
8498/* Output the definition of a common variable. */
8499
8500void
8501unicosmk_output_common (file, name, size, align)
8502 FILE *file;
8503 const char *name;
8504 int size;
8505 int align;
8506{
8507 tree name_tree;
8508 printf ("T3E__: common %s\n", name);
8509
8510 common_section ();
8511 fputs("\t.endp\n\n\t.psect ", file);
8512 assemble_name(file, name);
8513 fprintf(file, ",%d,common\n", floor_log2 (align / BITS_PER_UNIT));
8514 fprintf(file, "\t.byte\t0:%d\n", size);
8515
8516 /* Mark the symbol as defined in this module. */
8517 name_tree = get_identifier (name);
8518 TREE_ASM_WRITTEN (name_tree) = 1;
8519}
8520
8521#define SECTION_PUBLIC SECTION_MACH_DEP
8522#define SECTION_MAIN (SECTION_PUBLIC << 1)
8523static int current_section_align;
8524
8525static unsigned int
8526unicosmk_section_type_flags (decl, name, reloc)
8527 tree decl;
8528 const char *name;
8529 int reloc ATTRIBUTE_UNUSED;
8530{
8531 unsigned int flags = default_section_type_flags (decl, name, reloc);
8532
8533 if (!decl)
8534 return flags;
8535
8536 if (TREE_CODE (decl) == FUNCTION_DECL)
8537 {
8538 current_section_align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
8539 if (align_functions_log > current_section_align)
8540 current_section_align = align_functions_log;
8541
8542 if (! strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "main"))
8543 flags |= SECTION_MAIN;
8544 }
8545 else
8546 current_section_align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
8547
8548 if (TREE_PUBLIC (decl))
8549 flags |= SECTION_PUBLIC;
8550
8551 return flags;
8552}
8553
8554/* Generate a section name for decl and associate it with the
8555 declaration. */
8556
8557void
8558unicosmk_unique_section (decl, reloc)
8559 tree decl;
8560 int reloc ATTRIBUTE_UNUSED;
8561{
8562 const char *name;
8563 int len;
8564
8565 if (!decl)
8566 abort ();
8567
8568 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
8569 STRIP_NAME_ENCODING (name, name);
8570 len = strlen (name);
8571
8572 if (TREE_CODE (decl) == FUNCTION_DECL)
8573 {
8574 char *string;
8575
8576 /* It is essential that we prefix the section name here because
8577 otherwise the section names generated for constructors and
8578 destructors confuse collect2. */
8579
8580 string = alloca (len + 6);
8581 sprintf (string, "code@%s", name);
8582 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
8583 }
8584 else if (TREE_PUBLIC (decl))
8585 DECL_SECTION_NAME (decl) = build_string (len, name);
8586 else
8587 {
8588 char *string;
8589
8590 string = alloca (len + 6);
8591 sprintf (string, "data@%s", name);
8592 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
8593 }
8594}
8595
8596/* Switch to an arbitrary section NAME with attributes as specified
8597 by FLAGS. ALIGN specifies any known alignment requirements for
8598 the section; 0 if the default should be used. */
8599
8600static void
8601unicosmk_asm_named_section (name, flags)
8602 const char *name;
8603 unsigned int flags;
8604{
8605 const char *kind;
8606
8607 /* Close the previous section. */
8608
8609 fputs ("\t.endp\n\n", asm_out_file);
8610
8611 /* Find out what kind of section we are opening. */
8612
8613 if (flags & SECTION_MAIN)
8614 fputs ("\t.start\tmain\n", asm_out_file);
8615
8616 if (flags & SECTION_CODE)
8617 kind = "code";
8618 else if (flags & SECTION_PUBLIC)
8619 kind = "common";
8620 else
8621 kind = "data";
8622
8623 if (current_section_align != 0)
8624 fprintf (asm_out_file, "\t.psect\t%s,%d,%s\n", name,
8625 current_section_align, kind);
8626 else
8627 fprintf (asm_out_file, "\t.psect\t%s,%s\n", name, kind);
8628}
8629
8630static void
8631unicosmk_insert_attributes (decl, attr_ptr)
8632 tree decl;
8633 tree *attr_ptr ATTRIBUTE_UNUSED;
8634{
8635 if (DECL_P (decl)
8636 && (TREE_PUBLIC (decl) || TREE_CODE (decl) == FUNCTION_DECL))
8637 UNIQUE_SECTION (decl, 0);
8638}
8639
8640/* Output an alignment directive. We have to use the macro 'gcc@code@align'
8641 in code sections because .align fill unused space with zeroes. */
8642
8643void
8644unicosmk_output_align (file, align)
8645 FILE *file;
8646 int align;
8647{
8648 if (inside_function)
8649 fprintf (file, "\tgcc@code@align\t%d\n", align);
8650 else
8651 fprintf (file, "\t.align\t%d\n", align);
8652}
8653
8654/* Add a case vector to the current function's list of deferred case
8655 vectors. Case vectors have to be put into a separate section because CAM
8656 does not allow data definitions in code sections. */
8657
8658void
8659unicosmk_defer_case_vector (lab, vec)
8660 rtx lab;
8661 rtx vec;
8662{
8663 struct machine_function *machine = cfun->machine;
8664
8665 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
8666 machine->addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec,
8667 machine->addr_list);
8668}
8669
8670/* Output a case vector. */
8671
8672static void
8673unicosmk_output_addr_vec (file, vec)
8674 FILE *file;
8675 rtx vec;
8676{
8677 rtx lab = XEXP (vec, 0);
8678 rtx body = XEXP (vec, 1);
8679 int vlen = XVECLEN (body, 0);
8680 int idx;
8681
8682 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (lab));
8683
8684 for (idx = 0; idx < vlen; idx++)
8685 {
8686 ASM_OUTPUT_ADDR_VEC_ELT
8687 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
8688 }
8689}
8690
8691/* Output current function's deferred case vectors. */
8692
8693static void
8694unicosmk_output_deferred_case_vectors (file)
8695 FILE *file;
8696{
8697 struct machine_function *machine = cfun->machine;
8698 rtx t;
8699
8700 if (machine->addr_list == NULL_RTX)
8701 return;
8702
8703 data_section ();
8704 for (t = machine->addr_list; t; t = XEXP (t, 1))
8705 unicosmk_output_addr_vec (file, XEXP (t, 0));
8706}
8707
8708/* Set up the dynamic subprogram information block (DSIB) and update the
8709 frame pointer register ($15) for subroutines which have a frame. If the
8710 subroutine doesn't have a frame, simply increment $15. */
8711
8712static void
8713unicosmk_gen_dsib (imaskP)
8714 unsigned long * imaskP;
8715{
c2ea1ac6 8716 if (alpha_procedure_type == PT_STACK)
30102605
RH
8717 {
8718 const char *ssib_name;
8719 rtx mem;
8720
8721 /* Allocate 64 bytes for the DSIB. */
8722
8723 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
8724 GEN_INT (-64))));
8725 emit_insn (gen_blockage ());
8726
8727 /* Save the return address. */
8728
8729 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 56));
8730 set_mem_alias_set (mem, alpha_sr_alias_set);
8731 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
8732 (*imaskP) &= ~(1L << REG_RA);
8733
8734 /* Save the old frame pointer. */
8735
8736 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 48));
8737 set_mem_alias_set (mem, alpha_sr_alias_set);
8738 FRP (emit_move_insn (mem, hard_frame_pointer_rtx));
8739 (*imaskP) &= ~(1L << HARD_FRAME_POINTER_REGNUM);
8740
8741 emit_insn (gen_blockage ());
8742
8743 /* Store the SSIB pointer. */
8744
8745 ssib_name = ggc_strdup (unicosmk_ssib_name ());
8746 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 32));
8747 set_mem_alias_set (mem, alpha_sr_alias_set);
8748
8749 FRP (emit_move_insn (gen_rtx_REG (DImode, 5),
8750 gen_rtx_SYMBOL_REF (Pmode, ssib_name)));
8751 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 5)));
8752
8753 /* Save the CIW index. */
8754
8755 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 24));
8756 set_mem_alias_set (mem, alpha_sr_alias_set);
8757 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 25)));
8758
8759 emit_insn (gen_blockage ());
8760
8761 /* Set the new frame pointer. */
8762
8763 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
8764 stack_pointer_rtx, GEN_INT (64))));
8765
8766 }
8767 else
8768 {
8769 /* Increment the frame pointer register to indicate that we do not
8770 have a frame. */
8771
8772 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
8773 hard_frame_pointer_rtx, GEN_INT (1))));
8774 }
8775}
8776
8777#define SSIB_PREFIX "__SSIB_"
8778#define SSIB_PREFIX_LEN 7
8779
8780/* Generate the name of the SSIB section for the current function. */
8781
8782static const char *
8783unicosmk_ssib_name ()
8784{
8785 /* This is ok since CAM won't be able to deal with names longer than that
8786 anyway. */
8787
8788 static char name[256];
8789
8790 rtx x;
8791 const char *fnname;
30102605
RH
8792 int len;
8793
8794 x = DECL_RTL (cfun->decl);
8795 if (GET_CODE (x) != MEM)
8796 abort ();
8797 x = XEXP (x, 0);
8798 if (GET_CODE (x) != SYMBOL_REF)
8799 abort ();
8800 fnname = XSTR (x, 0);
8801 STRIP_NAME_ENCODING (fnname, fnname);
8802
8803 len = strlen (fnname);
8804 if (len + SSIB_PREFIX_LEN > 255)
8805 len = 255 - SSIB_PREFIX_LEN;
8806
8807 strcpy (name, SSIB_PREFIX);
8808 strncpy (name + SSIB_PREFIX_LEN, fnname, len);
8809 name[len + SSIB_PREFIX_LEN] = 0;
8810
8811 return name;
8812}
8813
8814/* Output the static subroutine information block for the current
8815 function. */
8816
8817static void
8818unicosmk_output_ssib (file, fnname)
8819 FILE *file;
8820 const char *fnname;
8821{
8822 int len;
8823 int i;
8824 rtx x;
8825 rtx ciw;
8826 struct machine_function *machine = cfun->machine;
8827
8828 ssib_section ();
8829 fprintf (file, "\t.endp\n\n\t.psect\t%s%s,data\n", user_label_prefix,
8830 unicosmk_ssib_name ());
8831
8832 /* Some required stuff and the function name length. */
8833
8834 len = strlen (fnname);
8835 fprintf (file, "\t.quad\t^X20008%2.2X28\n", len);
8836
8837 /* Saved registers
8838 ??? We don't do that yet. */
8839
8840 fputs ("\t.quad\t0\n", file);
8841
8842 /* Function address. */
8843
8844 fputs ("\t.quad\t", file);
8845 assemble_name (file, fnname);
8846 putc ('\n', file);
8847
8848 fputs ("\t.quad\t0\n", file);
8849 fputs ("\t.quad\t0\n", file);
8850
8851 /* Function name.
8852 ??? We do it the same way Cray CC does it but this could be
8853 simplified. */
8854
8855 for( i = 0; i < len; i++ )
8856 fprintf (file, "\t.byte\t%d\n", (int)(fnname[i]));
8857 if( (len % 8) == 0 )
8858 fputs ("\t.quad\t0\n", file);
8859 else
8860 fprintf (file, "\t.bits\t%d : 0\n", (8 - (len % 8))*8);
8861
8862 /* All call information words used in the function. */
8863
8864 for (x = machine->first_ciw; x; x = XEXP (x, 1))
8865 {
8866 ciw = XEXP (x, 0);
8867 fprintf (file, "\t.quad\t");
8868#if HOST_BITS_PER_WIDE_INT == 32
8869 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
8870 CONST_DOUBLE_HIGH (ciw), CONST_DOUBLE_LOW (ciw));
8871#else
8872 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (ciw));
8873#endif
8874 fprintf (file, "\n");
8875 }
8876}
8877
8878/* Add a call information word (CIW) to the list of the current function's
8879 CIWs and return its index.
8880
8881 X is a CONST_INT or CONST_DOUBLE representing the CIW. */
8882
8883rtx
8884unicosmk_add_call_info_word (x)
8885 rtx x;
8886{
8887 rtx node;
8888 struct machine_function *machine = cfun->machine;
8889
8890 node = gen_rtx_EXPR_LIST (VOIDmode, x, NULL_RTX);
8891 if (machine->first_ciw == NULL_RTX)
8892 machine->first_ciw = node;
8893 else
8894 XEXP (machine->last_ciw, 1) = node;
8895
8896 machine->last_ciw = node;
8897 ++machine->ciw_count;
8898
8899 return GEN_INT (machine->ciw_count
8900 + strlen (current_function_name)/8 + 5);
8901}
8902
8903static char unicosmk_section_buf[100];
8904
8905char *
8906unicosmk_text_section ()
8907{
8908 static int count = 0;
8909 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@text___%d,code",
8910 count++);
8911 return unicosmk_section_buf;
8912}
8913
8914char *
8915unicosmk_data_section ()
8916{
8917 static int count = 1;
8918 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@data___%d,data",
8919 count++);
8920 return unicosmk_section_buf;
8921}
8922
8923/* The Cray assembler doesn't accept extern declarations for symbols which
8924 are defined in the same file. We have to keep track of all global
8925 symbols which are referenced and/or defined in a source file and output
8926 extern declarations for those which are referenced but not defined at
8927 the end of file. */
8928
8929/* List of identifiers for which an extern declaration might have to be
8930 emitted. */
8931
8932struct unicosmk_extern_list
8933{
8934 struct unicosmk_extern_list *next;
8935 const char *name;
8936};
8937
8938static struct unicosmk_extern_list *unicosmk_extern_head = 0;
8939
8940/* Output extern declarations which are required for every asm file. */
8941
8942static void
8943unicosmk_output_default_externs (file)
8944 FILE *file;
8945{
83182544 8946 static const char *const externs[] =
30102605
RH
8947 { "__T3E_MISMATCH" };
8948
8949 int i;
8950 int n;
8951
8952 n = ARRAY_SIZE (externs);
8953
8954 for (i = 0; i < n; i++)
8955 fprintf (file, "\t.extern\t%s\n", externs[i]);
8956}
8957
8958/* Output extern declarations for global symbols which are have been
8959 referenced but not defined. */
8960
8961static void
8962unicosmk_output_externs (file)
8963 FILE *file;
8964{
8965 struct unicosmk_extern_list *p;
8966 const char *real_name;
8967 int len;
8968 tree name_tree;
8969
8970 len = strlen (user_label_prefix);
8971 for (p = unicosmk_extern_head; p != 0; p = p->next)
8972 {
8973 /* We have to strip the encoding and possibly remove user_label_prefix
8974 from the identifier in order to handle -fleading-underscore and
8975 explicit asm names correctly (cf. gcc.dg/asm-names-1.c). */
8976 STRIP_NAME_ENCODING (real_name, p->name);
8977 if (len && p->name[0] == '*'
8978 && !memcmp (real_name, user_label_prefix, len))
8979 real_name += len;
8980
8981 name_tree = get_identifier (real_name);
8982 if (! TREE_ASM_WRITTEN (name_tree))
8983 {
8984 TREE_ASM_WRITTEN (name_tree) = 1;
8985 fputs ("\t.extern\t", file);
8986 assemble_name (file, p->name);
8987 putc ('\n', file);
8988 }
8989 }
8990}
8991
8992/* Record an extern. */
8993
8994void
8995unicosmk_add_extern (name)
8996 const char *name;
8997{
8998 struct unicosmk_extern_list *p;
8999
9000 p = (struct unicosmk_extern_list *)
9001 permalloc (sizeof (struct unicosmk_extern_list));
9002 p->next = unicosmk_extern_head;
9003 p->name = name;
9004 unicosmk_extern_head = p;
9005}
9006
9007/* The Cray assembler generates incorrect code if identifiers which
9008 conflict with register names are used as instruction operands. We have
9009 to replace such identifiers with DEX expressions. */
9010
9011/* Structure to collect identifiers which have been replaced by DEX
9012 expressions. */
9013
9014struct unicosmk_dex {
9015 struct unicosmk_dex *next;
9016 const char *name;
9017};
9018
9019/* List of identifiers which have been replaced by DEX expressions. The DEX
9020 number is determined by the position in the list. */
9021
9022static struct unicosmk_dex *unicosmk_dex_list = NULL;
9023
9024/* The number of elements in the DEX list. */
9025
9026static int unicosmk_dex_count = 0;
9027
9028/* Check if NAME must be replaced by a DEX expression. */
9029
9030static int
9031unicosmk_special_name (name)
9032 const char *name;
9033{
9034 if (name[0] == '*')
9035 ++name;
9036
9037 if (name[0] == '$')
9038 ++name;
9039
9040 if (name[0] != 'r' && name[0] != 'f' && name[0] != 'R' && name[0] != 'F')
9041 return 0;
9042
9043 switch (name[1])
9044 {
9045 case '1': case '2':
9046 return (name[2] == '\0' || (ISDIGIT (name[2]) && name[3] == '\0'));
9047
9048 case '3':
9049 return (name[2] == '\0'
9050 || ((name[2] == '0' || name[2] == '1') && name[3] == '\0'));
9051
9052 default:
9053 return (ISDIGIT (name[1]) && name[2] == '\0');
9054 }
9055}
9056
9057/* Return the DEX number if X must be replaced by a DEX expression and 0
9058 otherwise. */
9059
9060static int
9061unicosmk_need_dex (x)
9062 rtx x;
9063{
9064 struct unicosmk_dex *dex;
9065 const char *name;
9066 int i;
9067
9068 if (GET_CODE (x) != SYMBOL_REF)
9069 return 0;
9070
9071 name = XSTR (x,0);
9072 if (! unicosmk_special_name (name))
9073 return 0;
9074
9075 i = unicosmk_dex_count;
9076 for (dex = unicosmk_dex_list; dex; dex = dex->next)
9077 {
9078 if (! strcmp (name, dex->name))
9079 return i;
9080 --i;
9081 }
9082
9083 dex = (struct unicosmk_dex *) permalloc (sizeof (struct unicosmk_dex));
9084 dex->name = name;
9085 dex->next = unicosmk_dex_list;
9086 unicosmk_dex_list = dex;
9087
9088 ++unicosmk_dex_count;
9089 return unicosmk_dex_count;
9090}
9091
9092/* Output the DEX definitions for this file. */
9093
9094static void
9095unicosmk_output_dex (file)
9096 FILE *file;
9097{
9098 struct unicosmk_dex *dex;
9099 int i;
9100
9101 if (unicosmk_dex_list == NULL)
9102 return;
9103
9104 fprintf (file, "\t.dexstart\n");
9105
9106 i = unicosmk_dex_count;
9107 for (dex = unicosmk_dex_list; dex; dex = dex->next)
9108 {
9109 fprintf (file, "\tDEX (%d) = ", i);
9110 assemble_name (file, dex->name);
9111 putc ('\n', file);
9112 --i;
9113 }
9114
9115 fprintf (file, "\t.dexend\n");
9116}
9117
9118#else
9119
9120static void
9121unicosmk_output_deferred_case_vectors (file)
9122 FILE *file ATTRIBUTE_UNUSED;
9123{}
9124
9125static void
9126unicosmk_gen_dsib (imaskP)
9127 unsigned long * imaskP ATTRIBUTE_UNUSED;
9128{}
9129
9130static void
9131unicosmk_output_ssib (file, fnname)
9132 FILE * file ATTRIBUTE_UNUSED;
9133 const char * fnname ATTRIBUTE_UNUSED;
9134{}
9135
9136rtx
9137unicosmk_add_call_info_word (x)
9138 rtx x ATTRIBUTE_UNUSED;
9139{
9140 return NULL_RTX;
9141}
9142
9143static int
9144unicosmk_need_dex (x)
9145 rtx x ATTRIBUTE_UNUSED;
9146{
9147 return 0;
9148}
9149
9150#endif /* TARGET_ABI_UNICOSMK */
This page took 2.159163 seconds and 5 git commands to generate.