]> gcc.gnu.org Git - gcc.git/blame - gcc/config/alpha/alpha.c
alpha.c (alpha_adjust_cost): Remove everything but memory latency adjustments.
[gcc.git] / gcc / config / alpha / alpha.c
CommitLineData
a6f12d7c 1/* Subroutines used for code generation on the DEC Alpha.
9ddd9abd 2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
09e98324 3 2000, 2001 Free Software Foundation, Inc.
d60a05a1 4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
a6f12d7c
RK
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING. If not, write to
38ead7f3
RK
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA. */
a6f12d7c
RK
22
23
a6f12d7c 24#include "config.h"
3c303f52 25#include "system.h"
a6f12d7c 26#include "rtl.h"
e78d8e51 27#include "tree.h"
a6f12d7c
RK
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
a6f12d7c
RK
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
36#include "recog.h"
a6f12d7c 37#include "expr.h"
e78d8e51
ZW
38#include "optabs.h"
39#include "reload.h"
a6f12d7c 40#include "obstack.h"
9ecc37f0
RH
41#include "except.h"
42#include "function.h"
3c303f52 43#include "toplev.h"
01439aee 44#include "ggc.h"
b91055dd 45#include "integrate.h"
aead1ca3 46#include "tm_p.h"
672a6f42
NB
47#include "target.h"
48#include "target-def.h"
14691f8d 49#include "debug.h"
f1e639b1 50#include "langhooks.h"
9ecc37f0 51
285a5742 52/* Specify which cpu to schedule for. */
9ecc37f0 53
9b009d45 54enum processor_type alpha_cpu;
df45c7ea 55static const char * const alpha_cpu_name[] =
bcbbac26
RH
56{
57 "ev4", "ev5", "ev6"
58};
da792a68 59
6245e3df
RK
60/* Specify how accurate floating-point traps need to be. */
61
62enum alpha_trap_precision alpha_tp;
63
64/* Specify the floating-point rounding mode. */
65
66enum alpha_fp_rounding_mode alpha_fprm;
67
68/* Specify which things cause traps. */
69
70enum alpha_fp_trap_mode alpha_fptm;
71
72/* Strings decoded into the above options. */
9ecc37f0 73
df45c7ea 74const char *alpha_cpu_string; /* -mcpu= */
a3b815cb 75const char *alpha_tune_string; /* -mtune= */
df45c7ea
KG
76const char *alpha_tp_string; /* -mtrap-precision=[p|s|i] */
77const char *alpha_fprm_string; /* -mfp-rounding-mode=[n|m|c|d] */
78const char *alpha_fptm_string; /* -mfp-trap-mode=[n|u|su|sui] */
79const char *alpha_mlat_string; /* -mmemory-latency= */
6245e3df 80
a6f12d7c
RK
81/* Save information from a "cmpxx" operation until the branch or scc is
82 emitted. */
83
6db21c7f 84struct alpha_compare alpha_compare;
a6f12d7c 85
48f6bfac
RK
86/* Non-zero if inside of a function, because the Alpha asm can't
87 handle .files inside of functions. */
88
89static int inside_function = FALSE;
90
bcbbac26
RH
91/* The number of cycles of latency we should assume on memory reads. */
92
93int alpha_memory_latency = 3;
94
9c0e94a5
RH
95/* Whether the function needs the GP. */
96
97static int alpha_function_needs_gp;
98
3873d24b
RH
99/* The alias set for prologue/epilogue register save/restore. */
100
101static int alpha_sr_alias_set;
102
941cc05a
RK
103/* The assembler name of the current function. */
104
105static const char *alpha_fnname;
106
1eb356b9
RH
107/* The next explicit relocation sequence number. */
108int alpha_next_sequence_number = 1;
109
110/* The literal and gpdisp sequence numbers for this insn, as printed
111 by %# and %* respectively. */
112int alpha_this_literal_sequence_number;
113int alpha_this_gpdisp_sequence_number;
114
d60a05a1 115/* Declarations of static functions. */
62918bd3
RH
116static bool decl_in_text_section
117 PARAMS ((tree));
a615ca3e 118static int some_small_symbolic_operand_1
1e7e480e 119 PARAMS ((rtx *, void *));
a615ca3e 120static int split_small_symbolic_operand_1
1e7e480e 121 PARAMS ((rtx *, void *));
e2c9fb9b
RH
122static bool local_symbol_p
123 PARAMS ((rtx));
9c0e94a5 124static void alpha_set_memflags_1
f6da8bc3 125 PARAMS ((rtx, int, int, int));
9c0e94a5 126static rtx alpha_emit_set_const_1
f6da8bc3 127 PARAMS ((rtx, enum machine_mode, HOST_WIDE_INT, int));
9c0e94a5 128static void alpha_expand_unaligned_load_words
f6da8bc3 129 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
9c0e94a5 130static void alpha_expand_unaligned_store_words
f6da8bc3 131 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
9c0e94a5 132static void alpha_sa_mask
f6da8bc3 133 PARAMS ((unsigned long *imaskP, unsigned long *fmaskP));
1eb356b9
RH
134static int find_lo_sum
135 PARAMS ((rtx *, void *));
9c0e94a5 136static int alpha_does_function_need_gp
f6da8bc3 137 PARAMS ((void));
5495cc55
RH
138static int alpha_ra_ever_killed
139 PARAMS ((void));
be7560ea
RH
140static const char *get_trap_mode_suffix
141 PARAMS ((void));
142static const char *get_round_mode_suffix
143 PARAMS ((void));
5495cc55
RH
144static rtx set_frame_related_p
145 PARAMS ((void));
146static const char *alpha_lookup_xfloating_lib_func
147 PARAMS ((enum rtx_code));
148static int alpha_compute_xfloating_mode_arg
149 PARAMS ((enum rtx_code, enum alpha_fp_rounding_mode));
150static void alpha_emit_xfloating_libcall
151 PARAMS ((const char *, rtx, rtx[], int, rtx));
152static rtx alpha_emit_xfloating_compare
153 PARAMS ((enum rtx_code, rtx, rtx));
b4c25db2
NB
154static void alpha_output_function_end_prologue
155 PARAMS ((FILE *));
c237e94a
ZW
156static int alpha_adjust_cost
157 PARAMS ((rtx, rtx, rtx, int));
158static int alpha_issue_rate
159 PARAMS ((void));
98791e3a
RH
160static int alpha_use_dfa_pipeline_interface
161 PARAMS ((void));
162static int alpha_multipass_dfa_lookahead
163 PARAMS ((void));
89cfc2c6 164
30102605
RH
165#if TARGET_ABI_UNICOSMK
166static void alpha_init_machine_status
167 PARAMS ((struct function *p));
168static void alpha_mark_machine_status
169 PARAMS ((struct function *p));
170static void alpha_free_machine_status
171 PARAMS ((struct function *p));
172#endif
173
174static void unicosmk_output_deferred_case_vectors PARAMS ((FILE *));
175static void unicosmk_gen_dsib PARAMS ((unsigned long *imaskP));
176static void unicosmk_output_ssib PARAMS ((FILE *, const char *));
177static int unicosmk_need_dex PARAMS ((rtx));
178
e9a25f70 179/* Get the number of args of a function in one of two ways. */
30102605 180#if TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK
e9a25f70
JL
181#define NUM_ARGS current_function_args_info.num_args
182#else
183#define NUM_ARGS current_function_args_info
184#endif
26250081 185
26250081
RH
186#define REG_PV 27
187#define REG_RA 26
a6f12d7c 188\f
672a6f42 189/* Initialize the GCC target structure. */
be7b80f4 190#if TARGET_ABI_OPEN_VMS
91d231cb 191const struct attribute_spec vms_attribute_table[];
7c262518 192static unsigned int vms_section_type_flags PARAMS ((tree, const char *, int));
715bdd29 193static void vms_asm_named_section PARAMS ((const char *, unsigned int));
2cc07db4
RH
194static void vms_asm_out_constructor PARAMS ((rtx, int));
195static void vms_asm_out_destructor PARAMS ((rtx, int));
91d231cb
JM
196# undef TARGET_ATTRIBUTE_TABLE
197# define TARGET_ATTRIBUTE_TABLE vms_attribute_table
7c262518
RH
198# undef TARGET_SECTION_TYPE_FLAGS
199# define TARGET_SECTION_TYPE_FLAGS vms_section_type_flags
8289c43b 200#endif
672a6f42 201
30102605
RH
202#if TARGET_ABI_UNICOSMK
203static void unicosmk_asm_named_section PARAMS ((const char *, unsigned int));
204static void unicosmk_insert_attributes PARAMS ((tree, tree *));
205static unsigned int unicosmk_section_type_flags PARAMS ((tree, const char *,
206 int));
207# undef TARGET_INSERT_ATTRIBUTES
208# define TARGET_INSERT_ATTRIBUTES unicosmk_insert_attributes
209# undef TARGET_SECTION_TYPE_FLAGS
210# define TARGET_SECTION_TYPE_FLAGS unicosmk_section_type_flags
211#endif
212
301d03af
RS
213#undef TARGET_ASM_ALIGNED_HI_OP
214#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
215#undef TARGET_ASM_ALIGNED_DI_OP
216#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
217
218/* Default unaligned ops are provided for ELF systems. To get unaligned
219 data for non-ELF systems, we have to turn off auto alignment. */
220#ifndef OBJECT_FORMAT_ELF
221#undef TARGET_ASM_UNALIGNED_HI_OP
222#define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t"
223#undef TARGET_ASM_UNALIGNED_SI_OP
224#define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t"
225#undef TARGET_ASM_UNALIGNED_DI_OP
226#define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t"
227#endif
228
b4c25db2
NB
229#undef TARGET_ASM_FUNCTION_END_PROLOGUE
230#define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue
231
c237e94a
ZW
232#undef TARGET_SCHED_ADJUST_COST
233#define TARGET_SCHED_ADJUST_COST alpha_adjust_cost
234#undef TARGET_SCHED_ISSUE_RATE
235#define TARGET_SCHED_ISSUE_RATE alpha_issue_rate
98791e3a
RH
236#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
237#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE \
238 alpha_use_dfa_pipeline_interface
239#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
240#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
241 alpha_multipass_dfa_lookahead
c237e94a 242
f6897b10 243struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 244\f
285a5742 245/* Parse target option strings. */
6245e3df
RK
246
247void
248override_options ()
249{
a3b815cb 250 int i;
8b60264b
KG
251 static const struct cpu_table {
252 const char *const name;
253 const enum processor_type processor;
254 const int flags;
a3b815cb
JJ
255 } cpu_table[] = {
256#define EV5_MASK (MASK_CPU_EV5)
257#define EV6_MASK (MASK_CPU_EV6|MASK_BWX|MASK_MAX|MASK_FIX)
258 { "ev4", PROCESSOR_EV4, 0 },
259 { "ev45", PROCESSOR_EV4, 0 },
260 { "21064", PROCESSOR_EV4, 0 },
261 { "ev5", PROCESSOR_EV5, EV5_MASK },
262 { "21164", PROCESSOR_EV5, EV5_MASK },
263 { "ev56", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
264 { "21164a", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
265 { "pca56", PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
266 { "21164PC",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
267 { "21164pc",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
268 { "ev6", PROCESSOR_EV6, EV6_MASK },
269 { "21264", PROCESSOR_EV6, EV6_MASK },
270 { "ev67", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
271 { "21264a", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
272 { 0, 0, 0 }
273 };
274
30102605
RH
275 /* Unicos/Mk doesn't have shared libraries. */
276 if (TARGET_ABI_UNICOSMK && flag_pic)
277 {
278 warning ("-f%s ignored for Unicos/Mk (not supported)",
279 (flag_pic > 1) ? "PIC" : "pic");
280 flag_pic = 0;
281 }
282
283 /* On Unicos/Mk, the native compiler consistenly generates /d suffices for
284 floating-point instructions. Make that the default for this target. */
285 if (TARGET_ABI_UNICOSMK)
286 alpha_fprm = ALPHA_FPRM_DYN;
287 else
288 alpha_fprm = ALPHA_FPRM_NORM;
289
6245e3df 290 alpha_tp = ALPHA_TP_PROG;
6245e3df
RK
291 alpha_fptm = ALPHA_FPTM_N;
292
30102605
RH
293 /* We cannot use su and sui qualifiers for conversion instructions on
294 Unicos/Mk. I'm not sure if this is due to assembler or hardware
295 limitations. Right now, we issue a warning if -mieee is specified
296 and then ignore it; eventually, we should either get it right or
297 disable the option altogether. */
298
6245e3df
RK
299 if (TARGET_IEEE)
300 {
30102605
RH
301 if (TARGET_ABI_UNICOSMK)
302 warning ("-mieee not supported on Unicos/Mk");
303 else
304 {
305 alpha_tp = ALPHA_TP_INSN;
306 alpha_fptm = ALPHA_FPTM_SU;
307 }
6245e3df
RK
308 }
309
310 if (TARGET_IEEE_WITH_INEXACT)
311 {
30102605
RH
312 if (TARGET_ABI_UNICOSMK)
313 warning ("-mieee-with-inexact not supported on Unicos/Mk");
314 else
315 {
316 alpha_tp = ALPHA_TP_INSN;
317 alpha_fptm = ALPHA_FPTM_SUI;
318 }
6245e3df
RK
319 }
320
321 if (alpha_tp_string)
10d5c73f
RK
322 {
323 if (! strcmp (alpha_tp_string, "p"))
6245e3df 324 alpha_tp = ALPHA_TP_PROG;
10d5c73f 325 else if (! strcmp (alpha_tp_string, "f"))
6245e3df 326 alpha_tp = ALPHA_TP_FUNC;
10d5c73f 327 else if (! strcmp (alpha_tp_string, "i"))
6245e3df 328 alpha_tp = ALPHA_TP_INSN;
10d5c73f
RK
329 else
330 error ("bad value `%s' for -mtrap-precision switch", alpha_tp_string);
331 }
6245e3df
RK
332
333 if (alpha_fprm_string)
10d5c73f
RK
334 {
335 if (! strcmp (alpha_fprm_string, "n"))
6245e3df 336 alpha_fprm = ALPHA_FPRM_NORM;
10d5c73f 337 else if (! strcmp (alpha_fprm_string, "m"))
6245e3df 338 alpha_fprm = ALPHA_FPRM_MINF;
10d5c73f 339 else if (! strcmp (alpha_fprm_string, "c"))
6245e3df 340 alpha_fprm = ALPHA_FPRM_CHOP;
10d5c73f 341 else if (! strcmp (alpha_fprm_string,"d"))
6245e3df 342 alpha_fprm = ALPHA_FPRM_DYN;
10d5c73f
RK
343 else
344 error ("bad value `%s' for -mfp-rounding-mode switch",
6245e3df 345 alpha_fprm_string);
10d5c73f 346 }
6245e3df
RK
347
348 if (alpha_fptm_string)
10d5c73f
RK
349 {
350 if (strcmp (alpha_fptm_string, "n") == 0)
351 alpha_fptm = ALPHA_FPTM_N;
352 else if (strcmp (alpha_fptm_string, "u") == 0)
353 alpha_fptm = ALPHA_FPTM_U;
354 else if (strcmp (alpha_fptm_string, "su") == 0)
355 alpha_fptm = ALPHA_FPTM_SU;
356 else if (strcmp (alpha_fptm_string, "sui") == 0)
357 alpha_fptm = ALPHA_FPTM_SUI;
358 else
359 error ("bad value `%s' for -mfp-trap-mode switch", alpha_fptm_string);
360 }
6245e3df 361
de4abb91
RH
362 alpha_cpu
363 = TARGET_CPU_DEFAULT & MASK_CPU_EV6 ? PROCESSOR_EV6
364 : (TARGET_CPU_DEFAULT & MASK_CPU_EV5 ? PROCESSOR_EV5 : PROCESSOR_EV4);
365
366 if (alpha_cpu_string)
367 {
a3b815cb
JJ
368 for (i = 0; cpu_table [i].name; i++)
369 if (! strcmp (alpha_cpu_string, cpu_table [i].name))
370 {
371 alpha_cpu = cpu_table [i].processor;
372 target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX
373 | MASK_CPU_EV5 | MASK_CPU_EV6);
374 target_flags |= cpu_table [i].flags;
375 break;
376 }
377 if (! cpu_table [i].name)
de4abb91
RH
378 error ("bad value `%s' for -mcpu switch", alpha_cpu_string);
379 }
380
a3b815cb
JJ
381 if (alpha_tune_string)
382 {
383 for (i = 0; cpu_table [i].name; i++)
384 if (! strcmp (alpha_tune_string, cpu_table [i].name))
385 {
386 alpha_cpu = cpu_table [i].processor;
387 break;
388 }
389 if (! cpu_table [i].name)
390 error ("bad value `%s' for -mcpu switch", alpha_tune_string);
391 }
392
285a5742 393 /* Do some sanity checks on the above options. */
6245e3df 394
30102605
RH
395 if (TARGET_ABI_UNICOSMK && alpha_fptm != ALPHA_FPTM_N)
396 {
397 warning ("trap mode not supported on Unicos/Mk");
398 alpha_fptm = ALPHA_FPTM_N;
399 }
400
10d5c73f 401 if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI)
a3b815cb 402 && alpha_tp != ALPHA_TP_INSN && ! TARGET_CPU_EV6)
6245e3df 403 {
10d5c73f 404 warning ("fp software completion requires -mtrap-precision=i");
6245e3df
RK
405 alpha_tp = ALPHA_TP_INSN;
406 }
89cfc2c6 407
a3b815cb 408 if (TARGET_CPU_EV6)
981a828e
RH
409 {
410 /* Except for EV6 pass 1 (not released), we always have precise
411 arithmetic traps. Which means we can do software completion
412 without minding trap shadows. */
413 alpha_tp = ALPHA_TP_PROG;
414 }
415
89cfc2c6
RK
416 if (TARGET_FLOAT_VAX)
417 {
418 if (alpha_fprm == ALPHA_FPRM_MINF || alpha_fprm == ALPHA_FPRM_DYN)
419 {
420 warning ("rounding mode not supported for VAX floats");
421 alpha_fprm = ALPHA_FPRM_NORM;
422 }
423 if (alpha_fptm == ALPHA_FPTM_SUI)
424 {
425 warning ("trap mode not supported for VAX floats");
426 alpha_fptm = ALPHA_FPTM_SU;
427 }
428 }
bcbbac26
RH
429
430 {
431 char *end;
432 int lat;
433
434 if (!alpha_mlat_string)
435 alpha_mlat_string = "L1";
436
d1e6b55b 437 if (ISDIGIT ((unsigned char)alpha_mlat_string[0])
bcbbac26
RH
438 && (lat = strtol (alpha_mlat_string, &end, 10), *end == '\0'))
439 ;
440 else if ((alpha_mlat_string[0] == 'L' || alpha_mlat_string[0] == 'l')
d1e6b55b 441 && ISDIGIT ((unsigned char)alpha_mlat_string[1])
bcbbac26
RH
442 && alpha_mlat_string[2] == '\0')
443 {
444 static int const cache_latency[][4] =
445 {
446 { 3, 30, -1 }, /* ev4 -- Bcache is a guess */
447 { 2, 12, 38 }, /* ev5 -- Bcache from PC164 LMbench numbers */
285a5742 448 { 3, 12, 30 }, /* ev6 -- Bcache from DS20 LMbench. */
bcbbac26
RH
449 };
450
451 lat = alpha_mlat_string[1] - '0';
54f5c4b0 452 if (lat <= 0 || lat > 3 || cache_latency[alpha_cpu][lat-1] == -1)
bcbbac26
RH
453 {
454 warning ("L%d cache latency unknown for %s",
455 lat, alpha_cpu_name[alpha_cpu]);
456 lat = 3;
457 }
458 else
459 lat = cache_latency[alpha_cpu][lat-1];
460 }
461 else if (! strcmp (alpha_mlat_string, "main"))
462 {
463 /* Most current memories have about 370ns latency. This is
464 a reasonable guess for a fast cpu. */
465 lat = 150;
466 }
467 else
468 {
469 warning ("bad value `%s' for -mmemory-latency", alpha_mlat_string);
470 lat = 3;
471 }
472
473 alpha_memory_latency = lat;
474 }
bb8ebb7f
RH
475
476 /* Default the definition of "small data" to 8 bytes. */
477 if (!g_switch_set)
478 g_switch_value = 8;
3873d24b 479
133d3133
RH
480 /* Infer TARGET_SMALL_DATA from -fpic/-fPIC. */
481 if (flag_pic == 1)
482 target_flags |= MASK_SMALL_DATA;
483 else if (flag_pic == 2)
484 target_flags &= ~MASK_SMALL_DATA;
485
c176c051
RH
486 /* Align labels and loops for optimal branching. */
487 /* ??? Kludge these by not doing anything if we don't optimize and also if
285a5742 488 we are writing ECOFF symbols to work around a bug in DEC's assembler. */
c176c051
RH
489 if (optimize > 0 && write_symbols != SDB_DEBUG)
490 {
491 if (align_loops <= 0)
492 align_loops = 16;
493 if (align_jumps <= 0)
494 align_jumps = 16;
495 }
496 if (align_functions <= 0)
497 align_functions = 16;
498
3873d24b
RH
499 /* Acquire a unique set number for our register saves and restores. */
500 alpha_sr_alias_set = new_alias_set ();
30102605
RH
501
502 /* Register variables and functions with the garbage collector. */
503
504#if TARGET_ABI_UNICOSMK
505 /* Set up function hooks. */
506 init_machine_status = alpha_init_machine_status;
507 mark_machine_status = alpha_mark_machine_status;
508 free_machine_status = alpha_free_machine_status;
509#endif
6245e3df
RK
510}
511\f
a6f12d7c
RK
512/* Returns 1 if VALUE is a mask that contains full bytes of zero or ones. */
513
514int
515zap_mask (value)
516 HOST_WIDE_INT value;
517{
518 int i;
519
520 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
521 i++, value >>= 8)
522 if ((value & 0xff) != 0 && (value & 0xff) != 0xff)
523 return 0;
524
525 return 1;
526}
527
528/* Returns 1 if OP is either the constant zero or a register. If a
529 register, it must be in the proper mode unless MODE is VOIDmode. */
530
531int
532reg_or_0_operand (op, mode)
533 register rtx op;
534 enum machine_mode mode;
535{
536 return op == const0_rtx || register_operand (op, mode);
537}
538
f4014bfd
RK
539/* Return 1 if OP is a constant in the range of 0-63 (for a shift) or
540 any register. */
541
542int
543reg_or_6bit_operand (op, mode)
544 register rtx op;
545 enum machine_mode mode;
546{
547 return ((GET_CODE (op) == CONST_INT
548 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64)
549 || register_operand (op, mode));
550}
551
552
a6f12d7c
RK
553/* Return 1 if OP is an 8-bit constant or any register. */
554
555int
556reg_or_8bit_operand (op, mode)
557 register rtx op;
558 enum machine_mode mode;
559{
560 return ((GET_CODE (op) == CONST_INT
561 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100)
562 || register_operand (op, mode));
563}
564
14edc0e4
TG
565/* Return 1 if OP is an 8-bit constant. */
566
567int
568cint8_operand (op, mode)
569 register rtx op;
3c303f52 570 enum machine_mode mode ATTRIBUTE_UNUSED;
14edc0e4 571{
e3208d53 572 return ((GET_CODE (op) == CONST_INT
eb8da868 573 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100));
14edc0e4
TG
574}
575
a6f12d7c
RK
576/* Return 1 if the operand is a valid second operand to an add insn. */
577
578int
579add_operand (op, mode)
580 register rtx op;
581 enum machine_mode mode;
582{
583 if (GET_CODE (op) == CONST_INT)
80df65c9 584 /* Constraints I, J, O and P are covered by K. */
e6118f89 585 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'K')
80df65c9 586 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
a6f12d7c
RK
587
588 return register_operand (op, mode);
589}
590
591/* Return 1 if the operand is a valid second operand to a sign-extending
592 add insn. */
593
594int
595sext_add_operand (op, mode)
596 register rtx op;
597 enum machine_mode mode;
598{
599 if (GET_CODE (op) == CONST_INT)
80df65c9
RH
600 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
601 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'));
a6f12d7c 602
c5c76735 603 return reg_not_elim_operand (op, mode);
a6f12d7c
RK
604}
605
606/* Return 1 if OP is the constant 4 or 8. */
607
608int
609const48_operand (op, mode)
610 register rtx op;
3c303f52 611 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
612{
613 return (GET_CODE (op) == CONST_INT
614 && (INTVAL (op) == 4 || INTVAL (op) == 8));
615}
616
617/* Return 1 if OP is a valid first operand to an AND insn. */
618
619int
620and_operand (op, mode)
621 register rtx op;
622 enum machine_mode mode;
623{
624 if (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
625 return (zap_mask (CONST_DOUBLE_LOW (op))
626 && zap_mask (CONST_DOUBLE_HIGH (op)));
627
628 if (GET_CODE (op) == CONST_INT)
629 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
630 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100
631 || zap_mask (INTVAL (op)));
632
633 return register_operand (op, mode);
634}
635
c7def335 636/* Return 1 if OP is a valid first operand to an IOR or XOR insn. */
8088469d
RK
637
638int
c7def335 639or_operand (op, mode)
8088469d
RK
640 register rtx op;
641 enum machine_mode mode;
642{
643 if (GET_CODE (op) == CONST_INT)
644 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
645 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100);
646
647 return register_operand (op, mode);
648}
649
a6f12d7c
RK
650/* Return 1 if OP is a constant that is the width, in bits, of an integral
651 mode smaller than DImode. */
652
653int
654mode_width_operand (op, mode)
655 register rtx op;
3c303f52 656 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
657{
658 return (GET_CODE (op) == CONST_INT
6c174fc0
RH
659 && (INTVAL (op) == 8 || INTVAL (op) == 16
660 || INTVAL (op) == 32 || INTVAL (op) == 64));
a6f12d7c
RK
661}
662
663/* Return 1 if OP is a constant that is the width of an integral machine mode
664 smaller than an integer. */
665
666int
667mode_mask_operand (op, mode)
668 register rtx op;
3c303f52 669 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
670{
671#if HOST_BITS_PER_WIDE_INT == 32
672 if (GET_CODE (op) == CONST_DOUBLE)
6c174fc0
RH
673 return (CONST_DOUBLE_LOW (op) == -1
674 && (CONST_DOUBLE_HIGH (op) == -1
675 || CONST_DOUBLE_HIGH (op) == 0));
676#else
677 if (GET_CODE (op) == CONST_DOUBLE)
678 return (CONST_DOUBLE_LOW (op) == -1 && CONST_DOUBLE_HIGH (op) == 0);
a6f12d7c
RK
679#endif
680
16b02ae0
RK
681 return (GET_CODE (op) == CONST_INT
682 && (INTVAL (op) == 0xff
683 || INTVAL (op) == 0xffff
3873d24b 684 || INTVAL (op) == (HOST_WIDE_INT)0xffffffff
11ea364a 685#if HOST_BITS_PER_WIDE_INT == 64
3873d24b 686 || INTVAL (op) == -1
a6f12d7c 687#endif
16b02ae0 688 ));
a6f12d7c
RK
689}
690
691/* Return 1 if OP is a multiple of 8 less than 64. */
692
693int
694mul8_operand (op, mode)
695 register rtx op;
3c303f52 696 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
697{
698 return (GET_CODE (op) == CONST_INT
699 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64
700 && (INTVAL (op) & 7) == 0);
701}
702
703/* Return 1 if OP is the constant zero in floating-point. */
704
705int
706fp0_operand (op, mode)
707 register rtx op;
708 enum machine_mode mode;
709{
710 return (GET_MODE (op) == mode
711 && GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode));
712}
713
714/* Return 1 if OP is the floating-point constant zero or a register. */
715
716int
717reg_or_fp0_operand (op, mode)
718 register rtx op;
719 enum machine_mode mode;
720{
721 return fp0_operand (op, mode) || register_operand (op, mode);
722}
723
4ed43ff8
RH
724/* Return 1 if OP is a hard floating-point register. */
725
726int
727hard_fp_register_operand (op, mode)
728 register rtx op;
729 enum machine_mode mode;
730{
d2c6a1b6
RH
731 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
732 return 0;
733
734 if (GET_CODE (op) == SUBREG)
735 op = SUBREG_REG (op);
736 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == FLOAT_REGS;
737}
738
739/* Return 1 if OP is a hard general register. */
740
741int
742hard_int_register_operand (op, mode)
743 register rtx op;
744 enum machine_mode mode;
745{
746 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
747 return 0;
748
749 if (GET_CODE (op) == SUBREG)
750 op = SUBREG_REG (op);
751 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS;
4ed43ff8
RH
752}
753
a6f12d7c
RK
754/* Return 1 if OP is a register or a constant integer. */
755
756
757int
758reg_or_cint_operand (op, mode)
759 register rtx op;
760 enum machine_mode mode;
761{
e3208d53 762 return (GET_CODE (op) == CONST_INT
e3208d53 763 || register_operand (op, mode));
a6f12d7c
RK
764}
765
8d36d33b
RK
766/* Return 1 if OP is something that can be reloaded into a register;
767 if it is a MEM, it need not be valid. */
768
769int
770some_operand (op, mode)
771 register rtx op;
772 enum machine_mode mode;
773{
774 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
775 return 0;
776
777 switch (GET_CODE (op))
778 {
e3208d53 779 case REG: case MEM: case CONST_DOUBLE: case CONST_INT: case LABEL_REF:
551cc6fd 780 case SYMBOL_REF: case CONST: case HIGH:
8d36d33b
RK
781 return 1;
782
783 case SUBREG:
784 return some_operand (SUBREG_REG (op), VOIDmode);
1d300e19
KG
785
786 default:
787 break;
8d36d33b
RK
788 }
789
790 return 0;
791}
792
f711a22b
RH
793/* Likewise, but don't accept constants. */
794
795int
796some_ni_operand (op, mode)
797 register rtx op;
798 enum machine_mode mode;
799{
800 if (GET_MODE (op) != mode && mode != VOIDmode)
801 return 0;
802
803 if (GET_CODE (op) == SUBREG)
804 op = SUBREG_REG (op);
805
806 return (GET_CODE (op) == REG || GET_CODE (op) == MEM);
807}
808
a6f12d7c
RK
809/* Return 1 if OP is a valid operand for the source of a move insn. */
810
811int
812input_operand (op, mode)
813 register rtx op;
814 enum machine_mode mode;
815{
816 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
817 return 0;
818
819 if (GET_MODE_CLASS (mode) == MODE_FLOAT && GET_MODE (op) != mode)
820 return 0;
821
822 switch (GET_CODE (op))
823 {
824 case LABEL_REF:
825 case SYMBOL_REF:
826 case CONST:
e2c9fb9b 827 if (TARGET_EXPLICIT_RELOCS)
551cc6fd
RH
828 {
829 /* We don't split symbolic operands into something unintelligable
830 until after reload, but we do not wish non-small, non-global
831 symbolic operands to be reconstructed from their high/lo_sum
832 form. */
833 return (small_symbolic_operand (op, mode)
834 || global_symbolic_operand (op, mode));
835 }
e2c9fb9b 836
e3208d53 837 /* This handles both the Windows/NT and OSF cases. */
7daa56f5 838 return mode == ptr_mode || mode == DImode;
a6f12d7c 839
551cc6fd
RH
840 case HIGH:
841 return (TARGET_EXPLICIT_RELOCS
842 && local_symbolic_operand (XEXP (op, 0), mode));
843
a6f12d7c 844 case REG:
14a774a9 845 case ADDRESSOF:
a6f12d7c
RK
846 return 1;
847
848 case SUBREG:
849 if (register_operand (op, mode))
850 return 1;
285a5742 851 /* ... fall through ... */
a6f12d7c 852 case MEM:
e9a25f70 853 return ((TARGET_BWX || (mode != HImode && mode != QImode))
a2574dbe 854 && general_operand (op, mode));
a6f12d7c
RK
855
856 case CONST_DOUBLE:
857 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
858
859 case CONST_INT:
860 return mode == QImode || mode == HImode || add_operand (op, mode);
1d300e19 861
ee5332b8
RH
862 case CONSTANT_P_RTX:
863 return 1;
864
1d300e19
KG
865 default:
866 break;
a6f12d7c
RK
867 }
868
869 return 0;
870}
871
0f33506c 872/* Return 1 if OP is a SYMBOL_REF for a function known to be in this
62918bd3 873 file, and in the same section as the current function. */
a6f12d7c
RK
874
875int
0f33506c 876current_file_function_operand (op, mode)
a6f12d7c 877 rtx op;
3c303f52 878 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c 879{
1afec8ad
RH
880 if (GET_CODE (op) != SYMBOL_REF)
881 return 0;
882
62918bd3
RH
883 /* Easy test for recursion. */
884 if (op == XEXP (DECL_RTL (current_function_decl), 0))
885 return 1;
1afec8ad 886
62918bd3
RH
887 /* Otherwise, we need the DECL for the SYMBOL_REF, which we can't get.
888 So SYMBOL_REF_FLAG has been declared to imply that the function is
889 in the default text section. So we must also check that the current
890 function is also in the text section. */
891 if (SYMBOL_REF_FLAG (op) && decl_in_text_section (current_function_decl))
892 return 1;
893
894 return 0;
1afec8ad
RH
895}
896
897/* Return 1 if OP is a SYMBOL_REF for which we can make a call via bsr. */
898
899int
900direct_call_operand (op, mode)
901 rtx op;
902 enum machine_mode mode;
903{
904 /* Must be defined in this file. */
905 if (! current_file_function_operand (op, mode))
906 return 0;
907
908 /* If profiling is implemented via linker tricks, we can't jump
909 to the nogp alternate entry point. */
910 /* ??? TARGET_PROFILING_NEEDS_GP isn't really the right test,
911 but is approximately correct for the OSF ABIs. Don't know
912 what to do for VMS, NT, or UMK. */
913 if (! TARGET_PROFILING_NEEDS_GP
70f4f91c 914 && ! current_function_profile)
1afec8ad 915 return 0;
4d8f669f
RH
916
917 return 1;
a6f12d7c
RK
918}
919
e2c9fb9b
RH
920/* Return true if OP is a LABEL_REF, or SYMBOL_REF or CONST referencing
921 a variable known to be defined in this file. */
922
923static bool
924local_symbol_p (op)
925 rtx op;
926{
927 const char *str = XSTR (op, 0);
928
929 /* ??? SYMBOL_REF_FLAG is set for local function symbols, but we
930 run into problems with the rtl inliner in that the symbol was
931 once external, but is local after inlining, which results in
932 unrecognizable insns. */
933
934 return (CONSTANT_POOL_ADDRESS_P (op)
935 /* If @, then ENCODE_SECTION_INFO sez it's local. */
936 || str[0] == '@'
937 /* If *$, then ASM_GENERATE_INTERNAL_LABEL sez it's local. */
938 || (str[0] == '*' && str[1] == '$'));
939}
1eb356b9
RH
940
941int
942local_symbolic_operand (op, mode)
943 rtx op;
30102605 944 enum machine_mode mode;
1eb356b9 945{
30102605
RH
946 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
947 return 0;
948
1eb356b9
RH
949 if (GET_CODE (op) == LABEL_REF)
950 return 1;
951
952 if (GET_CODE (op) == CONST
953 && GET_CODE (XEXP (op, 0)) == PLUS
954 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
955 op = XEXP (XEXP (op, 0), 0);
956
957 if (GET_CODE (op) != SYMBOL_REF)
958 return 0;
959
e2c9fb9b 960 return local_symbol_p (op);
1eb356b9
RH
961}
962
133d3133
RH
963/* Return true if OP is a SYMBOL_REF or CONST referencing a variable
964 known to be defined in this file in the small data area. */
965
966int
967small_symbolic_operand (op, mode)
968 rtx op;
969 enum machine_mode mode ATTRIBUTE_UNUSED;
970{
971 const char *str;
972
973 if (! TARGET_SMALL_DATA)
974 return 0;
975
e2c9fb9b
RH
976 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
977 return 0;
978
133d3133
RH
979 if (GET_CODE (op) == CONST
980 && GET_CODE (XEXP (op, 0)) == PLUS
981 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
982 op = XEXP (XEXP (op, 0), 0);
983
984 if (GET_CODE (op) != SYMBOL_REF)
985 return 0;
986
987 if (CONSTANT_POOL_ADDRESS_P (op))
e2c9fb9b 988 return GET_MODE_SIZE (get_pool_mode (op)) <= (unsigned) g_switch_value;
133d3133
RH
989 else
990 {
991 str = XSTR (op, 0);
992 return str[0] == '@' && str[1] == 's';
993 }
994}
995
e2c9fb9b
RH
996/* Return true if OP is a SYMBOL_REF or CONST referencing a variable
997 not known (or known not) to be defined in this file. */
998
999int
1000global_symbolic_operand (op, mode)
1001 rtx op;
1002 enum machine_mode mode;
1003{
1004 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1005 return 0;
1006
1007 if (GET_CODE (op) == CONST
1008 && GET_CODE (XEXP (op, 0)) == PLUS
1009 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1010 op = XEXP (XEXP (op, 0), 0);
1011
1012 if (GET_CODE (op) != SYMBOL_REF)
1013 return 0;
1014
1015 return ! local_symbol_p (op);
1016}
1017
6bcf5f0a
RK
1018/* Return 1 if OP is a valid operand for the MEM of a CALL insn. */
1019
1020int
1021call_operand (op, mode)
1022 rtx op;
1023 enum machine_mode mode;
1024{
1025 if (mode != Pmode)
1026 return 0;
1027
be7b80f4
RH
1028 if (GET_CODE (op) == REG)
1029 {
1030 if (TARGET_ABI_OSF)
99407cf2
RH
1031 {
1032 /* Disallow virtual registers to cope with pathalogical test cases
1033 such as compile/930117-1.c in which the virtual reg decomposes
1034 to the frame pointer. Which is a hard reg that is not $27. */
1035 return (REGNO (op) == 27 || REGNO (op) > LAST_VIRTUAL_REGISTER);
1036 }
be7b80f4
RH
1037 else
1038 return 1;
1039 }
e2c9fb9b
RH
1040 if (TARGET_ABI_UNICOSMK)
1041 return 0;
1042 if (GET_CODE (op) == SYMBOL_REF)
1043 return 1;
be7b80f4
RH
1044
1045 return 0;
6bcf5f0a
RK
1046}
1047
30102605
RH
1048/* Returns 1 if OP is a symbolic operand, i.e. a symbol_ref or a label_ref,
1049 possibly with an offset. */
1050
1051int
1052symbolic_operand (op, mode)
1053 register rtx op;
1054 enum machine_mode mode;
1055{
1056 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1057 return 0;
1058 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1059 return 1;
1060 if (GET_CODE (op) == CONST
1061 && GET_CODE (XEXP (op,0)) == PLUS
1062 && GET_CODE (XEXP (XEXP (op,0), 0)) == SYMBOL_REF
1063 && GET_CODE (XEXP (XEXP (op,0), 1)) == CONST_INT)
1064 return 1;
1065 return 0;
1066}
1067
a6f12d7c
RK
1068/* Return 1 if OP is a valid Alpha comparison operator. Here we know which
1069 comparisons are valid in which insn. */
1070
1071int
1072alpha_comparison_operator (op, mode)
1073 register rtx op;
1074 enum machine_mode mode;
1075{
1076 enum rtx_code code = GET_CODE (op);
1077
1eb8759b 1078 if (mode != GET_MODE (op) && mode != VOIDmode)
a6f12d7c
RK
1079 return 0;
1080
1081 return (code == EQ || code == LE || code == LT
a0e5a544 1082 || code == LEU || code == LTU);
a6f12d7c
RK
1083}
1084
8f4773ea
RH
1085/* Return 1 if OP is a valid Alpha comparison operator against zero.
1086 Here we know which comparisons are valid in which insn. */
1087
1088int
1089alpha_zero_comparison_operator (op, mode)
1090 register rtx op;
1091 enum machine_mode mode;
1092{
1093 enum rtx_code code = GET_CODE (op);
1094
1095 if (mode != GET_MODE (op) && mode != VOIDmode)
1096 return 0;
1097
1098 return (code == EQ || code == NE || code == LE || code == LT
1099 || code == LEU || code == LTU);
1100}
1101
5bf6c48a
RK
1102/* Return 1 if OP is a valid Alpha swapped comparison operator. */
1103
1104int
1105alpha_swapped_comparison_operator (op, mode)
1106 register rtx op;
1107 enum machine_mode mode;
1108{
1109 enum rtx_code code = GET_CODE (op);
1110
1eb8759b
RH
1111 if ((mode != GET_MODE (op) && mode != VOIDmode)
1112 || GET_RTX_CLASS (code) != '<')
5bf6c48a
RK
1113 return 0;
1114
1115 code = swap_condition (code);
1116 return (code == EQ || code == LE || code == LT
a0e5a544 1117 || code == LEU || code == LTU);
5bf6c48a
RK
1118}
1119
a6f12d7c
RK
1120/* Return 1 if OP is a signed comparison operation. */
1121
1122int
1123signed_comparison_operator (op, mode)
1124 register rtx op;
3c303f52 1125 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c 1126{
1eb8759b 1127 enum rtx_code code = GET_CODE (op);
1d300e19 1128
1eb8759b
RH
1129 if (mode != GET_MODE (op) && mode != VOIDmode)
1130 return 0;
a6f12d7c 1131
1eb8759b
RH
1132 return (code == EQ || code == NE
1133 || code == LE || code == LT
1134 || code == GE || code == GT);
1135}
1136
1137/* Return 1 if OP is a valid Alpha floating point comparison operator.
1138 Here we know which comparisons are valid in which insn. */
1139
1140int
1141alpha_fp_comparison_operator (op, mode)
1142 register rtx op;
1143 enum machine_mode mode;
1144{
1145 enum rtx_code code = GET_CODE (op);
1146
1147 if (mode != GET_MODE (op) && mode != VOIDmode)
1148 return 0;
1149
1150 return (code == EQ || code == LE || code == LT || code == UNORDERED);
a6f12d7c
RK
1151}
1152
1153/* Return 1 if this is a divide or modulus operator. */
1154
1155int
1156divmod_operator (op, mode)
1157 register rtx op;
3c303f52 1158 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
1159{
1160 switch (GET_CODE (op))
1161 {
1162 case DIV: case MOD: case UDIV: case UMOD:
1163 return 1;
1d300e19
KG
1164
1165 default:
1166 break;
a6f12d7c
RK
1167 }
1168
1169 return 0;
1170}
1171
1172/* Return 1 if this memory address is a known aligned register plus
1173 a constant. It must be a valid address. This means that we can do
1174 this as an aligned reference plus some offset.
1175
96043e7e 1176 Take into account what reload will do. */
a6f12d7c
RK
1177
1178int
1179aligned_memory_operand (op, mode)
1180 register rtx op;
1181 enum machine_mode mode;
1182{
4e46365b 1183 rtx base;
a6f12d7c 1184
96043e7e
RH
1185 if (reload_in_progress)
1186 {
4e46365b
RH
1187 rtx tmp = op;
1188 if (GET_CODE (tmp) == SUBREG)
1189 tmp = SUBREG_REG (tmp);
1190 if (GET_CODE (tmp) == REG
1191 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1192 {
1193 op = reg_equiv_memory_loc[REGNO (tmp)];
1194 if (op == 0)
1195 return 0;
1196 }
96043e7e 1197 }
a6f12d7c 1198
96043e7e 1199 if (GET_CODE (op) != MEM
4e46365b 1200 || GET_MODE (op) != mode)
a6f12d7c 1201 return 0;
a6f12d7c
RK
1202 op = XEXP (op, 0);
1203
4e46365b
RH
1204 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1205 sorts of constructs. Dig for the real base register. */
1206 if (reload_in_progress
1207 && GET_CODE (op) == PLUS
1208 && GET_CODE (XEXP (op, 0)) == PLUS)
1209 base = XEXP (XEXP (op, 0), 0);
1210 else
1211 {
1212 if (! memory_address_p (mode, op))
1213 return 0;
1214 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1215 }
a6f12d7c 1216
bdb429a5 1217 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) >= 32);
a6f12d7c
RK
1218}
1219
1220/* Similar, but return 1 if OP is a MEM which is not alignable. */
1221
1222int
1223unaligned_memory_operand (op, mode)
1224 register rtx op;
1225 enum machine_mode mode;
1226{
4e46365b
RH
1227 rtx base;
1228
1229 if (reload_in_progress)
a6f12d7c 1230 {
4e46365b
RH
1231 rtx tmp = op;
1232 if (GET_CODE (tmp) == SUBREG)
1233 tmp = SUBREG_REG (tmp);
1234 if (GET_CODE (tmp) == REG
1235 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1236 {
1237 op = reg_equiv_memory_loc[REGNO (tmp)];
1238 if (op == 0)
1239 return 0;
1240 }
a6f12d7c
RK
1241 }
1242
4e46365b
RH
1243 if (GET_CODE (op) != MEM
1244 || GET_MODE (op) != mode)
a6f12d7c 1245 return 0;
a6f12d7c
RK
1246 op = XEXP (op, 0);
1247
4e46365b
RH
1248 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1249 sorts of constructs. Dig for the real base register. */
1250 if (reload_in_progress
1251 && GET_CODE (op) == PLUS
1252 && GET_CODE (XEXP (op, 0)) == PLUS)
1253 base = XEXP (XEXP (op, 0), 0);
1254 else
1255 {
1256 if (! memory_address_p (mode, op))
1257 return 0;
1258 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1259 }
a6f12d7c 1260
bdb429a5 1261 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) < 32);
adb18b68
RK
1262}
1263
1264/* Return 1 if OP is either a register or an unaligned memory location. */
1265
1266int
1267reg_or_unaligned_mem_operand (op, mode)
1268 rtx op;
1269 enum machine_mode mode;
1270{
1271 return register_operand (op, mode) || unaligned_memory_operand (op, mode);
a6f12d7c
RK
1272}
1273
1274/* Return 1 if OP is any memory location. During reload a pseudo matches. */
1275
1276int
1277any_memory_operand (op, mode)
1278 register rtx op;
3c303f52 1279 enum machine_mode mode ATTRIBUTE_UNUSED;
a6f12d7c
RK
1280{
1281 return (GET_CODE (op) == MEM
1282 || (GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == REG)
1283 || (reload_in_progress && GET_CODE (op) == REG
1284 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1285 || (reload_in_progress && GET_CODE (op) == SUBREG
1286 && GET_CODE (SUBREG_REG (op)) == REG
1287 && REGNO (SUBREG_REG (op)) >= FIRST_PSEUDO_REGISTER));
1288}
1289
40b80dad
RH
1290/* Returns 1 if OP is not an eliminable register.
1291
1292 This exists to cure a pathological abort in the s8addq (et al) patterns,
1293
1294 long foo () { long t; bar(); return (long) &t * 26107; }
1295
1296 which run afoul of a hack in reload to cure a (presumably) similar
1297 problem with lea-type instructions on other targets. But there is
1298 one of us and many of them, so work around the problem by selectively
1299 preventing combine from making the optimization. */
1300
1301int
1302reg_not_elim_operand (op, mode)
1303 register rtx op;
1304 enum machine_mode mode;
1305{
1306 rtx inner = op;
1307 if (GET_CODE (op) == SUBREG)
1308 inner = SUBREG_REG (op);
1309 if (inner == frame_pointer_rtx || inner == arg_pointer_rtx)
1310 return 0;
1311
1312 return register_operand (op, mode);
1313}
9c0e94a5 1314
67070f5c 1315/* Return 1 is OP is a memory location that is not a reference (using
ab87f8c8
JL
1316 an AND) to an unaligned location. Take into account what reload
1317 will do. */
1318
1319int
1320normal_memory_operand (op, mode)
1321 register rtx op;
df45c7ea 1322 enum machine_mode mode ATTRIBUTE_UNUSED;
ab87f8c8 1323{
4e46365b 1324 if (reload_in_progress)
ab87f8c8 1325 {
4e46365b
RH
1326 rtx tmp = op;
1327 if (GET_CODE (tmp) == SUBREG)
1328 tmp = SUBREG_REG (tmp);
1329 if (GET_CODE (tmp) == REG
1330 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1331 {
1332 op = reg_equiv_memory_loc[REGNO (tmp)];
ab87f8c8 1333
4e46365b
RH
1334 /* This may not have been assigned an equivalent address if it will
1335 be eliminated. In that case, it doesn't matter what we do. */
1336 if (op == 0)
1337 return 1;
1338 }
ab87f8c8
JL
1339 }
1340
1341 return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) != AND;
1342}
67070f5c
RH
1343
1344/* Accept a register, but not a subreg of any kind. This allows us to
1345 avoid pathological cases in reload wrt data movement common in
1346 int->fp conversion. */
1347
1348int
1349reg_no_subreg_operand (op, mode)
1350 register rtx op;
1351 enum machine_mode mode;
1352{
f6598df3 1353 if (GET_CODE (op) != REG)
67070f5c
RH
1354 return 0;
1355 return register_operand (op, mode);
1356}
3611aef0 1357
5519a4f9 1358/* Recognize an addition operation that includes a constant. Used to
3611aef0
RH
1359 convince reload to canonize (plus (plus reg c1) c2) during register
1360 elimination. */
1361
1362int
1363addition_operation (op, mode)
1364 register rtx op;
1365 enum machine_mode mode;
1366{
1367 if (GET_MODE (op) != mode && mode != VOIDmode)
1368 return 0;
1369 if (GET_CODE (op) == PLUS
1370 && register_operand (XEXP (op, 0), mode)
1371 && GET_CODE (XEXP (op, 1)) == CONST_INT
1372 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op, 1)), 'K'))
1373 return 1;
1374 return 0;
1375}
1376
551cc6fd
RH
1377/* Implements CONST_OK_FOR_LETTER_P. Return true if the value matches
1378 the range defined for C in [I-P]. */
1379
1380bool
1381alpha_const_ok_for_letter_p (value, c)
1382 HOST_WIDE_INT value;
1383 int c;
1384{
1385 switch (c)
1386 {
1387 case 'I':
1388 /* An unsigned 8 bit constant. */
1389 return (unsigned HOST_WIDE_INT) value < 0x100;
1390 case 'J':
1391 /* The constant zero. */
1392 return value == 0;
1393 case 'K':
1394 /* A signed 16 bit constant. */
1395 return (unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000;
1396 case 'L':
1397 /* A shifted signed 16 bit constant appropriate for LDAH. */
1398 return ((value & 0xffff) == 0
1399 && ((value) >> 31 == -1 || value >> 31 == 0));
1400 case 'M':
1401 /* A constant that can be AND'ed with using a ZAP insn. */
1402 return zap_mask (value);
1403 case 'N':
1404 /* A complemented unsigned 8 bit constant. */
1405 return (unsigned HOST_WIDE_INT) (~ value) < 0x100;
1406 case 'O':
1407 /* A negated unsigned 8 bit constant. */
1408 return (unsigned HOST_WIDE_INT) (- value) < 0x100;
1409 case 'P':
1410 /* The constant 1, 2 or 3. */
1411 return value == 1 || value == 2 || value == 3;
1412
1413 default:
1414 return false;
1415 }
1416}
1417
1418/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1419 matches for C in [GH]. */
1420
1421bool
1422alpha_const_double_ok_for_letter_p (value, c)
1423 rtx value;
1424 int c;
1425{
1426 switch (c)
1427 {
1428 case 'G':
1429 /* The floating point zero constant. */
1430 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
1431 && value == CONST0_RTX (GET_MODE (value)));
1432
1433 case 'H':
1434 /* A valid operand of a ZAP insn. */
1435 return (GET_MODE (value) == VOIDmode
1436 && zap_mask (CONST_DOUBLE_LOW (value))
1437 && zap_mask (CONST_DOUBLE_HIGH (value)));
1438
1439 default:
1440 return false;
1441 }
1442}
1443
1444/* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1445 matches for C. */
1446
1447bool
1448alpha_extra_constraint (value, c)
1449 rtx value;
1450 int c;
1451{
1452 switch (c)
1453 {
1454 case 'Q':
1455 return normal_memory_operand (value, VOIDmode);
1456 case 'R':
1457 return direct_call_operand (value, Pmode);
1458 case 'S':
1459 return (GET_CODE (value) == CONST_INT
1460 && (unsigned HOST_WIDE_INT) INTVAL (value) < 64);
1461 case 'T':
1462 return GET_CODE (value) == HIGH;
1463 case 'U':
1464 return TARGET_ABI_UNICOSMK && symbolic_operand (value, VOIDmode);
1465
1466 default:
1467 return false;
1468 }
1469}
1470
39157bcc
RH
1471/* Return 1 if this function can directly return via $26. */
1472
1473int
1474direct_return ()
1475{
30102605 1476 return (! TARGET_ABI_OPEN_VMS && ! TARGET_ABI_UNICOSMK
be7b80f4
RH
1477 && reload_completed
1478 && alpha_sa_size () == 0
39157bcc
RH
1479 && get_frame_size () == 0
1480 && current_function_outgoing_args_size == 0
1481 && current_function_pretend_args_size == 0);
1482}
25e21aed
RH
1483
1484/* Return the ADDR_VEC associated with a tablejump insn. */
1485
1486rtx
1487alpha_tablejump_addr_vec (insn)
1488 rtx insn;
1489{
1490 rtx tmp;
1491
1492 tmp = JUMP_LABEL (insn);
1493 if (!tmp)
1494 return NULL_RTX;
1495 tmp = NEXT_INSN (tmp);
1496 if (!tmp)
1497 return NULL_RTX;
1498 if (GET_CODE (tmp) == JUMP_INSN
1499 && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
1500 return PATTERN (tmp);
1501 return NULL_RTX;
1502}
1503
1504/* Return the label of the predicted edge, or CONST0_RTX if we don't know. */
1505
1506rtx
1507alpha_tablejump_best_label (insn)
1508 rtx insn;
1509{
1510 rtx jump_table = alpha_tablejump_addr_vec (insn);
1511 rtx best_label = NULL_RTX;
1512
1513 /* ??? Once the CFG doesn't keep getting completely rebuilt, look
1514 there for edge frequency counts from profile data. */
1515
1516 if (jump_table)
1517 {
1518 int n_labels = XVECLEN (jump_table, 1);
1519 int best_count = -1;
1520 int i, j;
1521
1522 for (i = 0; i < n_labels; i++)
1523 {
1524 int count = 1;
1525
1526 for (j = i + 1; j < n_labels; j++)
1527 if (XEXP (XVECEXP (jump_table, 1, i), 0)
1528 == XEXP (XVECEXP (jump_table, 1, j), 0))
1529 count++;
1530
1531 if (count > best_count)
1532 best_count = count, best_label = XVECEXP (jump_table, 1, i);
1533 }
1534 }
1535
1536 return best_label ? best_label : const0_rtx;
1537}
3611aef0 1538\f
62918bd3
RH
1539/* Return true if the function DECL will be placed in the default text
1540 section. */
1541/* ??? Ideally we'd be able to always move from a SYMBOL_REF back to the
1542 decl, as that would allow us to determine if two functions are in the
1543 same section, which is what we really want to know. */
1544
1545static bool
1546decl_in_text_section (decl)
1547 tree decl;
1548{
1549 return (DECL_SECTION_NAME (decl) == NULL_TREE
1550 && ! (flag_function_sections
1551 || (targetm.have_named_sections
1552 && DECL_ONE_ONLY (decl))));
1553}
1554
1eb356b9
RH
1555/* If we are referencing a function that is static, make the SYMBOL_REF
1556 special. We use this to see indicate we can branch to this function
1557 without setting PV or restoring GP.
1558
1559 If this is a variable that is known to be defined locally, add "@v"
1560 to the name. If in addition the variable is to go in .sdata/.sbss,
1561 then add "@s" instead. */
1562
1563void
b2003250 1564alpha_encode_section_info (decl, first)
1eb356b9 1565 tree decl;
b2003250 1566 int first ATTRIBUTE_UNUSED;
1eb356b9
RH
1567{
1568 const char *symbol_str;
1569 bool is_local, is_small;
1570
1571 if (TREE_CODE (decl) == FUNCTION_DECL)
1572 {
62918bd3
RH
1573 /* We mark public functions once they are emitted; otherwise we
1574 don't know that they exist in this unit of translation. */
1575 if (TREE_PUBLIC (decl))
1576 return;
eadccfbb 1577
62918bd3
RH
1578 /* Do not mark functions that are not in .text; otherwise we
1579 don't know that they are near enough for a direct branch. */
1580 if (! decl_in_text_section (decl))
1581 return;
1582
1583 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1eb356b9
RH
1584 return;
1585 }
1586
1587 /* Early out if we're not going to do anything with this data. */
1588 if (! TARGET_EXPLICIT_RELOCS)
1589 return;
1590
1591 /* Careful not to prod global register variables. */
1592 if (TREE_CODE (decl) != VAR_DECL
1593 || GET_CODE (DECL_RTL (decl)) != MEM
1594 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != SYMBOL_REF)
1595 return;
1596
1597 symbol_str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
1598
1599 /* A variable is considered "local" if it is defined in this module. */
1600
eadccfbb
RH
1601 /* Local binding occurs for any non-default visibility. */
1602 if (MODULE_LOCAL_P (decl))
1603 is_local = true;
1604 /* Otherwise, variables defined outside this object may not be local. */
1605 else if (DECL_EXTERNAL (decl))
1eb356b9
RH
1606 is_local = false;
1607 /* Linkonce and weak data is never local. */
1608 else if (DECL_ONE_ONLY (decl) || DECL_WEAK (decl))
1609 is_local = false;
eadccfbb 1610 /* Static variables are always local. */
1eb356b9
RH
1611 else if (! TREE_PUBLIC (decl))
1612 is_local = true;
1613 /* If PIC, then assume that any global name can be overridden by
1614 symbols resolved from other modules. */
1615 else if (flag_pic)
1616 is_local = false;
1617 /* Uninitialized COMMON variable may be unified with symbols
1618 resolved from other modules. */
1619 else if (DECL_COMMON (decl)
1620 && (DECL_INITIAL (decl) == NULL
1621 || DECL_INITIAL (decl) == error_mark_node))
1622 is_local = false;
1623 /* Otherwise we're left with initialized (or non-common) global data
1624 which is of necessity defined locally. */
1625 else
1626 is_local = true;
1627
1628 /* Determine if DECL will wind up in .sdata/.sbss. */
1629
1630 is_small = false;
1631 if (DECL_SECTION_NAME (decl))
1632 {
1633 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
1634 if (strcmp (section, ".sdata") == 0
1635 || strcmp (section, ".sbss") == 0)
1636 is_small = true;
1637 }
1638 else
1639 {
1640 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1641
1642 /* If the variable has already been defined in the output file, then it
1643 is too late to put it in sdata if it wasn't put there in the first
1644 place. The test is here rather than above, because if it is already
1645 in sdata, then it can stay there. */
1646
1647 if (TREE_ASM_WRITTEN (decl))
1648 ;
1649
1650 /* If this is an incomplete type with size 0, then we can't put it in
1651 sdata because it might be too big when completed. */
1652 else if (size > 0 && size <= g_switch_value)
1653 is_small = true;
1654 }
1655
1656 /* Finally, encode this into the symbol string. */
1657 if (is_local)
1658 {
1659 const char *string;
1660 char *newstr;
1661 size_t len;
1662
1663 if (symbol_str[0] == '@')
1664 {
1665 if (symbol_str[1] == (is_small ? 's' : 'v'))
1666 return;
1667 symbol_str += 2;
1668 }
1669
1670 len = strlen (symbol_str) + 1;
1671 newstr = alloca (len + 2);
1672
1673 newstr[0] = '@';
1674 newstr[1] = (is_small ? 's' : 'v');
1675 memcpy (newstr + 2, symbol_str, len);
1676
1677 string = ggc_alloc_string (newstr, len + 2 - 1);
1678 XSTR (XEXP (DECL_RTL (decl), 0), 0) = string;
1679 }
1680 else if (symbol_str[0] == '@')
1681 abort ();
1682}
1683
a39bdefc
RH
1684/* legitimate_address_p recognizes an RTL expression that is a valid
1685 memory address for an instruction. The MODE argument is the
1686 machine mode for the MEM expression that wants to use this address.
1687
1688 For Alpha, we have either a constant address or the sum of a
1689 register and a constant address, or just a register. For DImode,
1690 any of those forms can be surrounded with an AND that clear the
1691 low-order three bits; this is an "unaligned" access. */
1692
1693bool
1694alpha_legitimate_address_p (mode, x, strict)
1695 enum machine_mode mode;
1696 rtx x;
1697 int strict;
1698{
1699 /* If this is an ldq_u type address, discard the outer AND. */
1700 if (mode == DImode
1701 && GET_CODE (x) == AND
1702 && GET_CODE (XEXP (x, 1)) == CONST_INT
1703 && INTVAL (XEXP (x, 1)) == -8)
1704 x = XEXP (x, 0);
1705
1706 /* Discard non-paradoxical subregs. */
1707 if (GET_CODE (x) == SUBREG
1708 && (GET_MODE_SIZE (GET_MODE (x))
1709 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
1710 x = SUBREG_REG (x);
1711
1712 /* Unadorned general registers are valid. */
1713 if (REG_P (x)
1714 && (strict
1715 ? STRICT_REG_OK_FOR_BASE_P (x)
1716 : NONSTRICT_REG_OK_FOR_BASE_P (x)))
1717 return true;
1718
1719 /* Constant addresses (i.e. +/- 32k) are valid. */
1720 if (CONSTANT_ADDRESS_P (x))
1721 return true;
1722
1723 /* Register plus a small constant offset is valid. */
1724 if (GET_CODE (x) == PLUS)
1725 {
1726 rtx ofs = XEXP (x, 1);
1727 x = XEXP (x, 0);
1728
1729 /* Discard non-paradoxical subregs. */
1730 if (GET_CODE (x) == SUBREG
1731 && (GET_MODE_SIZE (GET_MODE (x))
1732 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
1733 x = SUBREG_REG (x);
1734
1735 if (REG_P (x))
1736 {
1737 if (! strict
1738 && NONSTRICT_REG_OK_FP_BASE_P (x)
1739 && GET_CODE (ofs) == CONST_INT)
1740 return true;
1741 if ((strict
1742 ? STRICT_REG_OK_FOR_BASE_P (x)
1743 : NONSTRICT_REG_OK_FOR_BASE_P (x))
1744 && CONSTANT_ADDRESS_P (ofs))
1745 return true;
1746 }
1747 else if (GET_CODE (x) == ADDRESSOF
1748 && GET_CODE (ofs) == CONST_INT)
1749 return true;
1750 }
1751
551cc6fd
RH
1752 /* If we're managing explicit relocations, LO_SUM is valid, as
1753 are small data symbols. */
1754 else if (TARGET_EXPLICIT_RELOCS)
1eb356b9 1755 {
551cc6fd 1756 if (small_symbolic_operand (x, Pmode))
1eb356b9 1757 return true;
551cc6fd
RH
1758
1759 if (GET_CODE (x) == LO_SUM)
1760 {
1761 rtx ofs = XEXP (x, 1);
1762 x = XEXP (x, 0);
1763
1764 /* Discard non-paradoxical subregs. */
1765 if (GET_CODE (x) == SUBREG
1766 && (GET_MODE_SIZE (GET_MODE (x))
1767 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
1768 x = SUBREG_REG (x);
1769
1770 /* Must have a valid base register. */
1771 if (! (REG_P (x)
1772 && (strict
1773 ? STRICT_REG_OK_FOR_BASE_P (x)
1774 : NONSTRICT_REG_OK_FOR_BASE_P (x))))
1775 return false;
1776
1777 /* The symbol must be local. */
1778 if (local_symbolic_operand (ofs, Pmode))
1779 return true;
1780 }
1eb356b9
RH
1781 }
1782
a39bdefc
RH
1783 return false;
1784}
1785
aead1ca3
RH
1786/* Try machine-dependent ways of modifying an illegitimate address
1787 to be legitimate. If we find one, return the new, valid address. */
1788
1789rtx
551cc6fd 1790alpha_legitimize_address (x, scratch, mode)
aead1ca3 1791 rtx x;
551cc6fd 1792 rtx scratch;
aead1ca3
RH
1793 enum machine_mode mode ATTRIBUTE_UNUSED;
1794{
1795 HOST_WIDE_INT addend;
1796
1797 /* If the address is (plus reg const_int) and the CONST_INT is not a
1798 valid offset, compute the high part of the constant and add it to
1799 the register. Then our address is (plus temp low-part-const). */
1800 if (GET_CODE (x) == PLUS
1801 && GET_CODE (XEXP (x, 0)) == REG
1802 && GET_CODE (XEXP (x, 1)) == CONST_INT
1803 && ! CONSTANT_ADDRESS_P (XEXP (x, 1)))
1804 {
1805 addend = INTVAL (XEXP (x, 1));
1806 x = XEXP (x, 0);
1807 goto split_addend;
1808 }
1809
1810 /* If the address is (const (plus FOO const_int)), find the low-order
1811 part of the CONST_INT. Then load FOO plus any high-order part of the
1812 CONST_INT into a register. Our address is (plus reg low-part-const).
1813 This is done to reduce the number of GOT entries. */
551cc6fd
RH
1814 if (!no_new_pseudos
1815 && GET_CODE (x) == CONST
aead1ca3
RH
1816 && GET_CODE (XEXP (x, 0)) == PLUS
1817 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1818 {
1819 addend = INTVAL (XEXP (XEXP (x, 0), 1));
1820 x = force_reg (Pmode, XEXP (XEXP (x, 0), 0));
1821 goto split_addend;
1822 }
1823
1824 /* If we have a (plus reg const), emit the load as in (2), then add
1825 the two registers, and finally generate (plus reg low-part-const) as
1826 our address. */
551cc6fd
RH
1827 if (!no_new_pseudos
1828 && GET_CODE (x) == PLUS
aead1ca3
RH
1829 && GET_CODE (XEXP (x, 0)) == REG
1830 && GET_CODE (XEXP (x, 1)) == CONST
1831 && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1832 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)
1833 {
1834 addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1));
1835 x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0),
1836 XEXP (XEXP (XEXP (x, 1), 0), 0),
1837 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1838 goto split_addend;
1839 }
1840
1eb356b9 1841 /* If this is a local symbol, split the address into HIGH/LO_SUM parts. */
e2c9fb9b 1842 if (TARGET_EXPLICIT_RELOCS && symbolic_operand (x, Pmode))
1eb356b9 1843 {
e2c9fb9b
RH
1844 if (local_symbolic_operand (x, Pmode))
1845 {
1846 if (small_symbolic_operand (x, Pmode))
551cc6fd 1847 return x;
e2c9fb9b
RH
1848 else
1849 {
551cc6fd
RH
1850 if (!no_new_pseudos)
1851 scratch = gen_reg_rtx (Pmode);
1852 emit_insn (gen_rtx_SET (VOIDmode, scratch,
1853 gen_rtx_HIGH (Pmode, x)));
1854 return gen_rtx_LO_SUM (Pmode, scratch, x);
e2c9fb9b 1855 }
133d3133 1856 }
1eb356b9
RH
1857 }
1858
aead1ca3
RH
1859 return NULL;
1860
1861 split_addend:
1862 {
551cc6fd
RH
1863 HOST_WIDE_INT low, high;
1864
1865 low = ((addend & 0xffff) ^ 0x8000) - 0x8000;
1866 addend -= low;
1867 high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000;
1868 addend -= high;
1869
1870 if (addend)
1871 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend),
1872 (no_new_pseudos ? scratch : NULL_RTX),
1873 1, OPTAB_LIB_WIDEN);
1874 if (high)
1875 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
1876 (no_new_pseudos ? scratch : NULL_RTX),
1877 1, OPTAB_LIB_WIDEN);
1878
1879 return plus_constant (x, low);
aead1ca3
RH
1880 }
1881}
1882
551cc6fd
RH
1883/* For TARGET_EXPLICIT_RELOCS, we don't obfuscate a SYMBOL_REF to a
1884 small symbolic operand until after reload. At which point we need
1885 to replace (mem (symbol_ref)) with (mem (lo_sum $29 symbol_ref))
1886 so that sched2 has the proper dependency information. */
1887
1888int
a615ca3e 1889some_small_symbolic_operand (x, mode)
551cc6fd
RH
1890 rtx x;
1891 enum machine_mode mode ATTRIBUTE_UNUSED;
1892{
a615ca3e 1893 return for_each_rtx (&x, some_small_symbolic_operand_1, NULL);
1e7e480e
RH
1894}
1895
1896static int
a615ca3e 1897some_small_symbolic_operand_1 (px, data)
1e7e480e
RH
1898 rtx *px;
1899 void *data ATTRIBUTE_UNUSED;
1900{
1901 rtx x = *px;
551cc6fd 1902
a615ca3e
RH
1903 /* Don't re-split. */
1904 if (GET_CODE (x) == LO_SUM)
1905 return -1;
1e7e480e 1906
a615ca3e 1907 return small_symbolic_operand (x, Pmode) != 0;
551cc6fd
RH
1908}
1909
1910rtx
a615ca3e 1911split_small_symbolic_operand (x)
551cc6fd
RH
1912 rtx x;
1913{
8b9b74a9 1914 x = copy_insn (x);
a615ca3e 1915 for_each_rtx (&x, split_small_symbolic_operand_1, NULL);
1e7e480e
RH
1916 return x;
1917}
551cc6fd 1918
1e7e480e 1919static int
a615ca3e 1920split_small_symbolic_operand_1 (px, data)
1e7e480e
RH
1921 rtx *px;
1922 void *data ATTRIBUTE_UNUSED;
1923{
1924 rtx x = *px;
51c561e3 1925
a615ca3e
RH
1926 /* Don't re-split. */
1927 if (GET_CODE (x) == LO_SUM)
1928 return -1;
551cc6fd 1929
1e7e480e
RH
1930 if (small_symbolic_operand (x, Pmode))
1931 {
1932 x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
1933 *px = x;
a615ca3e 1934 return -1;
1e7e480e
RH
1935 }
1936
a615ca3e 1937 return 0;
551cc6fd
RH
1938}
1939
aead1ca3
RH
1940/* Try a machine-dependent way of reloading an illegitimate address
1941 operand. If we find one, push the reload and return the new rtx. */
1942
1943rtx
1944alpha_legitimize_reload_address (x, mode, opnum, type, ind_levels)
1945 rtx x;
1946 enum machine_mode mode ATTRIBUTE_UNUSED;
1947 int opnum;
1948 int type;
1949 int ind_levels ATTRIBUTE_UNUSED;
1950{
1951 /* We must recognize output that we have already generated ourselves. */
1952 if (GET_CODE (x) == PLUS
1953 && GET_CODE (XEXP (x, 0)) == PLUS
1954 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1955 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1956 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1957 {
1958 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1959 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1960 opnum, type);
1961 return x;
1962 }
1963
1964 /* We wish to handle large displacements off a base register by
1965 splitting the addend across an ldah and the mem insn. This
1966 cuts number of extra insns needed from 3 to 1. */
1967 if (GET_CODE (x) == PLUS
1968 && GET_CODE (XEXP (x, 0)) == REG
1969 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1970 && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))
1971 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1972 {
1973 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1974 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1975 HOST_WIDE_INT high
1976 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1977
1978 /* Check for 32-bit overflow. */
1979 if (high + low != val)
1980 return NULL_RTX;
1981
1982 /* Reload the high part into a base reg; leave the low part
1983 in the mem directly. */
1984 x = gen_rtx_PLUS (GET_MODE (x),
1985 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1986 GEN_INT (high)),
1987 GEN_INT (low));
1988
1989 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1990 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1991 opnum, type);
1992 return x;
1993 }
1994
1995 return NULL_RTX;
1996}
1997\f
a6f12d7c
RK
1998/* REF is an alignable memory location. Place an aligned SImode
1999 reference into *PALIGNED_MEM and the number of bits to shift into
96043e7e
RH
2000 *PBITNUM. SCRATCH is a free register for use in reloading out
2001 of range stack slots. */
a6f12d7c
RK
2002
2003void
4e46365b
RH
2004get_aligned_mem (ref, paligned_mem, pbitnum)
2005 rtx ref;
a6f12d7c
RK
2006 rtx *paligned_mem, *pbitnum;
2007{
2008 rtx base;
2009 HOST_WIDE_INT offset = 0;
2010
4e46365b
RH
2011 if (GET_CODE (ref) != MEM)
2012 abort ();
a6f12d7c 2013
4e46365b
RH
2014 if (reload_in_progress
2015 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
96043e7e 2016 {
4e46365b
RH
2017 base = find_replacement (&XEXP (ref, 0));
2018
2019 if (! memory_address_p (GET_MODE (ref), base))
2020 abort ();
96043e7e 2021 }
a6f12d7c 2022 else
96043e7e 2023 {
96043e7e
RH
2024 base = XEXP (ref, 0);
2025 }
a6f12d7c
RK
2026
2027 if (GET_CODE (base) == PLUS)
2028 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2029
e7dfe4bb
RH
2030 *paligned_mem
2031 = widen_memory_access (ref, SImode, (offset & ~3) - offset);
a6f12d7c 2032
30102605
RH
2033 if (WORDS_BIG_ENDIAN)
2034 *pbitnum = GEN_INT (32 - (GET_MODE_BITSIZE (GET_MODE (ref))
2035 + (offset & 3) * 8));
2036 else
2037 *pbitnum = GEN_INT ((offset & 3) * 8);
a6f12d7c
RK
2038}
2039
adb18b68
RK
2040/* Similar, but just get the address. Handle the two reload cases.
2041 Add EXTRA_OFFSET to the address we return. */
a6f12d7c
RK
2042
2043rtx
adb18b68 2044get_unaligned_address (ref, extra_offset)
a6f12d7c 2045 rtx ref;
adb18b68 2046 int extra_offset;
a6f12d7c
RK
2047{
2048 rtx base;
2049 HOST_WIDE_INT offset = 0;
2050
4e46365b
RH
2051 if (GET_CODE (ref) != MEM)
2052 abort ();
a6f12d7c 2053
4e46365b
RH
2054 if (reload_in_progress
2055 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
96043e7e 2056 {
96043e7e 2057 base = find_replacement (&XEXP (ref, 0));
4e46365b
RH
2058
2059 if (! memory_address_p (GET_MODE (ref), base))
2060 abort ();
96043e7e 2061 }
a6f12d7c 2062 else
96043e7e 2063 {
96043e7e
RH
2064 base = XEXP (ref, 0);
2065 }
a6f12d7c
RK
2066
2067 if (GET_CODE (base) == PLUS)
2068 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2069
adb18b68 2070 return plus_constant (base, offset + extra_offset);
a6f12d7c 2071}
3611aef0 2072
551cc6fd
RH
2073/* On the Alpha, all (non-symbolic) constants except zero go into
2074 a floating-point register via memory. Note that we cannot
2075 return anything that is not a subset of CLASS, and that some
2076 symbolic constants cannot be dropped to memory. */
2077
2078enum reg_class
2079alpha_preferred_reload_class(x, class)
2080 rtx x;
2081 enum reg_class class;
2082{
2083 /* Zero is present in any register class. */
2084 if (x == CONST0_RTX (GET_MODE (x)))
2085 return class;
2086
2087 /* These sorts of constants we can easily drop to memory. */
2088 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2089 {
2090 if (class == FLOAT_REGS)
2091 return NO_REGS;
2092 if (class == ALL_REGS)
2093 return GENERAL_REGS;
2094 return class;
2095 }
2096
2097 /* All other kinds of constants should not (and in the case of HIGH
2098 cannot) be dropped to memory -- instead we use a GENERAL_REGS
2099 secondary reload. */
2100 if (CONSTANT_P (x))
2101 return (class == ALL_REGS ? GENERAL_REGS : class);
2102
2103 return class;
2104}
2105
3611aef0
RH
2106/* Loading and storing HImode or QImode values to and from memory
2107 usually requires a scratch register. The exceptions are loading
2108 QImode and HImode from an aligned address to a general register
2109 unless byte instructions are permitted.
2110
2111 We also cannot load an unaligned address or a paradoxical SUBREG
2112 into an FP register.
2113
2114 We also cannot do integral arithmetic into FP regs, as might result
2115 from register elimination into a DImode fp register. */
2116
2117enum reg_class
2118secondary_reload_class (class, mode, x, in)
2119 enum reg_class class;
2120 enum machine_mode mode;
2121 rtx x;
2122 int in;
2123{
41bd3d41 2124 if ((mode == QImode || mode == HImode) && ! TARGET_BWX)
35a414df 2125 {
41bd3d41
RH
2126 if (GET_CODE (x) == MEM
2127 || (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
2128 || (GET_CODE (x) == SUBREG
2129 && (GET_CODE (SUBREG_REG (x)) == MEM
2130 || (GET_CODE (SUBREG_REG (x)) == REG
2131 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))
35a414df
RH
2132 {
2133 if (!in || !aligned_memory_operand(x, mode))
2134 return GENERAL_REGS;
2135 }
2136 }
3611aef0
RH
2137
2138 if (class == FLOAT_REGS)
2139 {
2140 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
2141 return GENERAL_REGS;
2142
2143 if (GET_CODE (x) == SUBREG
2144 && (GET_MODE_SIZE (GET_MODE (x))
2145 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2146 return GENERAL_REGS;
2147
1eb356b9
RH
2148 if (in && INTEGRAL_MODE_P (mode)
2149 && ! (memory_operand (x, mode) || x == const0_rtx))
3611aef0
RH
2150 return GENERAL_REGS;
2151 }
2152
2153 return NO_REGS;
2154}
a6f12d7c
RK
2155\f
2156/* Subfunction of the following function. Update the flags of any MEM
2157 found in part of X. */
2158
2159static void
80db34d8 2160alpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p)
a6f12d7c 2161 rtx x;
80db34d8 2162 int in_struct_p, volatile_p, unchanging_p;
a6f12d7c
RK
2163{
2164 int i;
2165
2166 switch (GET_CODE (x))
2167 {
2168 case SEQUENCE:
2169 case PARALLEL:
2170 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2171 alpha_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p,
80db34d8 2172 unchanging_p);
a6f12d7c
RK
2173 break;
2174
2175 case INSN:
2176 alpha_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p,
80db34d8 2177 unchanging_p);
a6f12d7c
RK
2178 break;
2179
2180 case SET:
2181 alpha_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p,
80db34d8 2182 unchanging_p);
a6f12d7c 2183 alpha_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p,
80db34d8 2184 unchanging_p);
a6f12d7c
RK
2185 break;
2186
2187 case MEM:
2188 MEM_IN_STRUCT_P (x) = in_struct_p;
2189 MEM_VOLATILE_P (x) = volatile_p;
2190 RTX_UNCHANGING_P (x) = unchanging_p;
80db34d8
RH
2191 /* Sadly, we cannot use alias sets because the extra aliasing
2192 produced by the AND interferes. Given that two-byte quantities
2193 are the only thing we would be able to differentiate anyway,
2194 there does not seem to be any point in convoluting the early
2195 out of the alias check. */
a6f12d7c 2196 break;
1d300e19
KG
2197
2198 default:
2199 break;
a6f12d7c
RK
2200 }
2201}
2202
2203/* Given INSN, which is either an INSN or a SEQUENCE generated to
2204 perform a memory operation, look for any MEMs in either a SET_DEST or
2205 a SET_SRC and copy the in-struct, unchanging, and volatile flags from
2206 REF into each of the MEMs found. If REF is not a MEM, don't do
2207 anything. */
2208
2209void
2210alpha_set_memflags (insn, ref)
2211 rtx insn;
2212 rtx ref;
2213{
80db34d8 2214 int in_struct_p, volatile_p, unchanging_p;
3873d24b
RH
2215
2216 if (GET_CODE (ref) != MEM)
a6f12d7c
RK
2217 return;
2218
3873d24b
RH
2219 in_struct_p = MEM_IN_STRUCT_P (ref);
2220 volatile_p = MEM_VOLATILE_P (ref);
2221 unchanging_p = RTX_UNCHANGING_P (ref);
3873d24b
RH
2222
2223 /* This is only called from alpha.md, after having had something
2224 generated from one of the insn patterns. So if everything is
2225 zero, the pattern is already up-to-date. */
80db34d8 2226 if (! in_struct_p && ! volatile_p && ! unchanging_p)
3873d24b
RH
2227 return;
2228
80db34d8 2229 alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p);
a6f12d7c
RK
2230}
2231\f
2232/* Try to output insns to set TARGET equal to the constant C if it can be
fd94addf
RK
2233 done in less than N insns. Do all computations in MODE. Returns the place
2234 where the output has been placed if it can be done and the insns have been
2235 emitted. If it would take more than N insns, zero is returned and no
2236 insns and emitted. */
a6f12d7c 2237
fd94addf
RK
2238rtx
2239alpha_emit_set_const (target, mode, c, n)
a6f12d7c 2240 rtx target;
fd94addf 2241 enum machine_mode mode;
a6f12d7c
RK
2242 HOST_WIDE_INT c;
2243 int n;
9102cd1f 2244{
b76b08ef
RK
2245 rtx result = 0;
2246 rtx orig_target = target;
9102cd1f
RK
2247 int i;
2248
b76b08ef
RK
2249 /* If we can't make any pseudos, TARGET is an SImode hard register, we
2250 can't load this constant in one insn, do this in DImode. */
2251 if (no_new_pseudos && mode == SImode
2252 && GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER
2253 && (result = alpha_emit_set_const_1 (target, mode, c, 1)) == 0)
2254 {
2255 target = gen_lowpart (DImode, target);
2256 mode = DImode;
2257 }
2258
285a5742 2259 /* Try 1 insn, then 2, then up to N. */
1ef9531b
RH
2260 for (i = 1; i <= n; i++)
2261 {
2262 result = alpha_emit_set_const_1 (target, mode, c, i);
2263 if (result)
2264 {
2265 rtx insn = get_last_insn ();
2266 rtx set = single_set (insn);
2267 if (! CONSTANT_P (SET_SRC (set)))
2268 set_unique_reg_note (get_last_insn (), REG_EQUAL, GEN_INT (c));
2269 break;
2270 }
2271 }
9102cd1f 2272
b76b08ef
RK
2273 /* Allow for the case where we changed the mode of TARGET. */
2274 if (result == target)
2275 result = orig_target;
2276
2277 return result;
9102cd1f
RK
2278}
2279
2280/* Internal routine for the above to check for N or below insns. */
2281
2282static rtx
2283alpha_emit_set_const_1 (target, mode, c, n)
2284 rtx target;
2285 enum machine_mode mode;
2286 HOST_WIDE_INT c;
2287 int n;
a6f12d7c 2288{
20a4db98 2289 HOST_WIDE_INT new;
a6f12d7c 2290 int i, bits;
fd94addf
RK
2291 /* Use a pseudo if highly optimizing and still generating RTL. */
2292 rtx subtarget
b76b08ef 2293 = (flag_expensive_optimizations && !no_new_pseudos ? 0 : target);
b83b7fa3 2294 rtx temp, insn;
a6f12d7c 2295
a6f12d7c
RK
2296 /* If this is a sign-extended 32-bit constant, we can do this in at most
2297 three insns, so do it if we have enough insns left. We always have
285a5742 2298 a sign-extended 32-bit constant when compiling on a narrow machine. */
a6f12d7c 2299
858e4e8c
RH
2300 if (HOST_BITS_PER_WIDE_INT != 64
2301 || c >> 31 == -1 || c >> 31 == 0)
a6f12d7c 2302 {
20a4db98 2303 HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;
a6f12d7c 2304 HOST_WIDE_INT tmp1 = c - low;
20a4db98 2305 HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;
a6f12d7c
RK
2306 HOST_WIDE_INT extra = 0;
2307
ab034cfc
RK
2308 /* If HIGH will be interpreted as negative but the constant is
2309 positive, we must adjust it to do two ldha insns. */
2310
2311 if ((high & 0x8000) != 0 && c >= 0)
a6f12d7c
RK
2312 {
2313 extra = 0x4000;
2314 tmp1 -= 0x40000000;
2315 high = ((tmp1 >> 16) & 0xffff) - 2 * ((tmp1 >> 16) & 0x8000);
2316 }
2317
2318 if (c == low || (low == 0 && extra == 0))
858e4e8c
RH
2319 {
2320 /* We used to use copy_to_suggested_reg (GEN_INT (c), target, mode)
2321 but that meant that we can't handle INT_MIN on 32-bit machines
2322 (like NT/Alpha), because we recurse indefinitely through
2323 emit_move_insn to gen_movdi. So instead, since we know exactly
2324 what we want, create it explicitly. */
2325
2326 if (target == NULL)
2327 target = gen_reg_rtx (mode);
38a448ca 2328 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));
858e4e8c
RH
2329 return target;
2330 }
9102cd1f 2331 else if (n >= 2 + (extra != 0))
a6f12d7c 2332 {
20a4db98 2333 temp = copy_to_suggested_reg (GEN_INT (high << 16), subtarget, mode);
fd94addf 2334
b83b7fa3
RH
2335 /* As of 2002-02-23, addsi3 is only available when not optimizing.
2336 This means that if we go through expand_binop, we'll try to
2337 generate extensions, etc, which will require new pseudos, which
2338 will fail during some split phases. The SImode add patterns
2339 still exist, but are not named. So build the insns by hand. */
2340
a6f12d7c 2341 if (extra != 0)
b83b7fa3
RH
2342 {
2343 if (! subtarget)
2344 subtarget = gen_reg_rtx (mode);
2345 insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16));
2346 insn = gen_rtx_SET (VOIDmode, subtarget, insn);
2347 emit_insn (insn);
1ef9531b 2348 temp = subtarget;
b83b7fa3 2349 }
a6f12d7c 2350
b83b7fa3
RH
2351 if (target == NULL)
2352 target = gen_reg_rtx (mode);
2353 insn = gen_rtx_PLUS (mode, temp, GEN_INT (low));
2354 insn = gen_rtx_SET (VOIDmode, target, insn);
2355 emit_insn (insn);
2356 return target;
a6f12d7c
RK
2357 }
2358 }
2359
0af3ee30 2360 /* If we couldn't do it that way, try some other methods. But if we have
f444f304
RK
2361 no instructions left, don't bother. Likewise, if this is SImode and
2362 we can't make pseudos, we can't do anything since the expand_binop
2363 and expand_unop calls will widen and try to make pseudos. */
a6f12d7c 2364
b76b08ef 2365 if (n == 1 || (mode == SImode && no_new_pseudos))
a6f12d7c
RK
2366 return 0;
2367
0af3ee30 2368 /* Next, see if we can load a related constant and then shift and possibly
a6f12d7c
RK
2369 negate it to get the constant we want. Try this once each increasing
2370 numbers of insns. */
2371
2372 for (i = 1; i < n; i++)
2373 {
20a4db98
RH
2374 /* First, see if minus some low bits, we've an easy load of
2375 high bits. */
2376
2377 new = ((c & 0xffff) ^ 0x8000) - 0x8000;
2378 if (new != 0
2379 && (temp = alpha_emit_set_const (subtarget, mode, c - new, i)) != 0)
2380 return expand_binop (mode, add_optab, temp, GEN_INT (new),
2381 target, 0, OPTAB_WIDEN);
2382
2383 /* Next try complementing. */
fd94addf
RK
2384 if ((temp = alpha_emit_set_const (subtarget, mode, ~ c, i)) != 0)
2385 return expand_unop (mode, one_cmpl_optab, temp, target, 0);
a6f12d7c 2386
fd94addf 2387 /* Next try to form a constant and do a left shift. We can do this
a6f12d7c
RK
2388 if some low-order bits are zero; the exact_log2 call below tells
2389 us that information. The bits we are shifting out could be any
2390 value, but here we'll just try the 0- and sign-extended forms of
2391 the constant. To try to increase the chance of having the same
2392 constant in more than one insn, start at the highest number of
2393 bits to shift, but try all possibilities in case a ZAPNOT will
2394 be useful. */
2395
2396 if ((bits = exact_log2 (c & - c)) > 0)
2397 for (; bits > 0; bits--)
0af3ee30 2398 if ((temp = (alpha_emit_set_const
20a4db98 2399 (subtarget, mode, c >> bits, i))) != 0
fd94addf
RK
2400 || ((temp = (alpha_emit_set_const
2401 (subtarget, mode,
2402 ((unsigned HOST_WIDE_INT) c) >> bits, i)))
2403 != 0))
2404 return expand_binop (mode, ashl_optab, temp, GEN_INT (bits),
2405 target, 0, OPTAB_WIDEN);
a6f12d7c
RK
2406
2407 /* Now try high-order zero bits. Here we try the shifted-in bits as
57cfde96
RK
2408 all zero and all ones. Be careful to avoid shifting outside the
2409 mode and to avoid shifting outside the host wide int size. */
858e4e8c
RH
2410 /* On narrow hosts, don't shift a 1 into the high bit, since we'll
2411 confuse the recursive call and set all of the high 32 bits. */
a6f12d7c 2412
57cfde96 2413 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
858e4e8c 2414 - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64))) > 0)
a6f12d7c 2415 for (; bits > 0; bits--)
fd94addf
RK
2416 if ((temp = alpha_emit_set_const (subtarget, mode,
2417 c << bits, i)) != 0
2418 || ((temp = (alpha_emit_set_const
2419 (subtarget, mode,
2420 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2421 i)))
2422 != 0))
2423 return expand_binop (mode, lshr_optab, temp, GEN_INT (bits),
57cfde96 2424 target, 1, OPTAB_WIDEN);
a6f12d7c
RK
2425
2426 /* Now try high-order 1 bits. We get that with a sign-extension.
57cfde96 2427 But one bit isn't enough here. Be careful to avoid shifting outside
285a5742 2428 the mode and to avoid shifting outside the host wide int size. */
30102605 2429
57cfde96
RK
2430 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
2431 - floor_log2 (~ c) - 2)) > 0)
a6f12d7c 2432 for (; bits > 0; bits--)
fd94addf
RK
2433 if ((temp = alpha_emit_set_const (subtarget, mode,
2434 c << bits, i)) != 0
2435 || ((temp = (alpha_emit_set_const
2436 (subtarget, mode,
2437 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2438 i)))
2439 != 0))
2440 return expand_binop (mode, ashr_optab, temp, GEN_INT (bits),
2441 target, 0, OPTAB_WIDEN);
a6f12d7c
RK
2442 }
2443
20a4db98
RH
2444#if HOST_BITS_PER_WIDE_INT == 64
2445 /* Finally, see if can load a value into the target that is the same as the
2446 constant except that all bytes that are 0 are changed to be 0xff. If we
2447 can, then we can do a ZAPNOT to obtain the desired constant. */
2448
2449 new = c;
2450 for (i = 0; i < 64; i += 8)
2451 if ((new & ((HOST_WIDE_INT) 0xff << i)) == 0)
2452 new |= (HOST_WIDE_INT) 0xff << i;
e68c380c 2453
20a4db98
RH
2454 /* We are only called for SImode and DImode. If this is SImode, ensure that
2455 we are sign extended to a full word. */
2456
2457 if (mode == SImode)
2458 new = ((new & 0xffffffff) ^ 0x80000000) - 0x80000000;
2459
2460 if (new != c && new != -1
2461 && (temp = alpha_emit_set_const (subtarget, mode, new, n - 1)) != 0)
2462 return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new),
e68c380c 2463 target, 0, OPTAB_WIDEN);
20a4db98 2464#endif
e68c380c 2465
a6f12d7c
RK
2466 return 0;
2467}
758d2c0c 2468
97aea203
RK
2469/* Having failed to find a 3 insn sequence in alpha_emit_set_const,
2470 fall back to a straight forward decomposition. We do this to avoid
2471 exponential run times encountered when looking for longer sequences
2472 with alpha_emit_set_const. */
2473
2474rtx
3fe5612d 2475alpha_emit_set_long_const (target, c1, c2)
97aea203 2476 rtx target;
3fe5612d 2477 HOST_WIDE_INT c1, c2;
97aea203 2478{
97aea203 2479 HOST_WIDE_INT d1, d2, d3, d4;
97aea203
RK
2480
2481 /* Decompose the entire word */
3fe5612d
RH
2482#if HOST_BITS_PER_WIDE_INT >= 64
2483 if (c2 != -(c1 < 0))
2484 abort ();
2485 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2486 c1 -= d1;
2487 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2488 c1 = (c1 - d2) >> 32;
2489 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2490 c1 -= d3;
2491 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2492 if (c1 != d4)
2493 abort ();
2494#else
2495 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2496 c1 -= d1;
2497 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2498 if (c1 != d2)
2499 abort ();
2500 c2 += (d2 < 0);
2501 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2502 c2 -= d3;
2503 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2504 if (c2 != d4)
2505 abort ();
2506#endif
97aea203
RK
2507
2508 /* Construct the high word */
3fe5612d
RH
2509 if (d4)
2510 {
2511 emit_move_insn (target, GEN_INT (d4));
2512 if (d3)
2513 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d3)));
2514 }
97aea203 2515 else
3fe5612d 2516 emit_move_insn (target, GEN_INT (d3));
97aea203
RK
2517
2518 /* Shift it into place */
3fe5612d 2519 emit_move_insn (target, gen_rtx_ASHIFT (DImode, target, GEN_INT (32)));
97aea203 2520
3fe5612d
RH
2521 /* Add in the low bits. */
2522 if (d2)
2523 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d2)));
2524 if (d1)
2525 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1)));
97aea203 2526
3fe5612d 2527 return target;
97aea203 2528}
97aea203 2529
23296a36
RH
2530/* Expand a move instruction; return true if all work is done.
2531 We don't handle non-bwx subword loads here. */
2532
2533bool
2534alpha_expand_mov (mode, operands)
2535 enum machine_mode mode;
2536 rtx *operands;
2537{
2538 /* If the output is not a register, the input must be. */
2539 if (GET_CODE (operands[0]) == MEM
2540 && ! reg_or_0_operand (operands[1], mode))
2541 operands[1] = force_reg (mode, operands[1]);
2542
551cc6fd 2543 /* Allow legitimize_address to perform some simplifications. */
d3e98208 2544 if (mode == Pmode && symbolic_operand (operands[1], mode))
1eb356b9 2545 {
bc8e8e97
RH
2546 rtx tmp;
2547
2548 /* With RTL inlining, at -O3, rtl is generated, stored, then actually
2549 compiled at the end of compilation. In the meantime, someone can
2550 re-encode-section-info on some symbol changing it e.g. from global
2551 to local-not-small. If this happens, we'd have emitted a plain
2552 load rather than a high+losum load and not recognize the insn.
2553
2554 So if rtl inlining is in effect, we delay the global/not-global
2555 decision until rest_of_compilation by wrapping it in an
2556 UNSPEC_SYMBOL. */
2557 if (TARGET_EXPLICIT_RELOCS && flag_inline_functions
2558 && rtx_equal_function_value_matters
2559 && global_symbolic_operand (operands[1], mode))
2560 {
2561 emit_insn (gen_movdi_er_maybe_g (operands[0], operands[1]));
2562 return true;
2563 }
2564
2565 tmp = alpha_legitimize_address (operands[1], operands[0], mode);
551cc6fd 2566 if (tmp)
133d3133 2567 {
551cc6fd 2568 operands[1] = tmp;
e2c9fb9b
RH
2569 return false;
2570 }
1eb356b9
RH
2571 }
2572
23296a36
RH
2573 /* Early out for non-constants and valid constants. */
2574 if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode))
2575 return false;
2576
2577 /* Split large integers. */
2578 if (GET_CODE (operands[1]) == CONST_INT
2579 || GET_CODE (operands[1]) == CONST_DOUBLE)
2580 {
2581 HOST_WIDE_INT i0, i1;
666b0481 2582 rtx temp = NULL_RTX;
23296a36
RH
2583
2584 if (GET_CODE (operands[1]) == CONST_INT)
2585 {
2586 i0 = INTVAL (operands[1]);
2587 i1 = -(i0 < 0);
2588 }
2589 else if (HOST_BITS_PER_WIDE_INT >= 64)
2590 {
2591 i0 = CONST_DOUBLE_LOW (operands[1]);
2592 i1 = -(i0 < 0);
2593 }
2594 else
2595 {
2596 i0 = CONST_DOUBLE_LOW (operands[1]);
2597 i1 = CONST_DOUBLE_HIGH (operands[1]);
2598 }
2599
2600 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0))
2601 temp = alpha_emit_set_const (operands[0], mode, i0, 3);
2602
2603 if (!temp && TARGET_BUILD_CONSTANTS)
2604 temp = alpha_emit_set_long_const (operands[0], i0, i1);
2605
2606 if (temp)
2607 {
2608 if (rtx_equal_p (operands[0], temp))
2609 return true;
2610 operands[1] = temp;
2611 return false;
2612 }
2613 }
2614
2615 /* Otherwise we've nothing left but to drop the thing to memory. */
2616 operands[1] = force_const_mem (DImode, operands[1]);
2617 if (reload_in_progress)
2618 {
2619 emit_move_insn (operands[0], XEXP (operands[1], 0));
2620 operands[1] = copy_rtx (operands[1]);
2621 XEXP (operands[1], 0) = operands[0];
2622 }
2623 else
2624 operands[1] = validize_mem (operands[1]);
2625 return false;
2626}
2627
2628/* Expand a non-bwx QImode or HImode move instruction;
2629 return true if all work is done. */
2630
2631bool
2632alpha_expand_mov_nobwx (mode, operands)
2633 enum machine_mode mode;
2634 rtx *operands;
2635{
2636 /* If the output is not a register, the input must be. */
2637 if (GET_CODE (operands[0]) == MEM)
2638 operands[1] = force_reg (mode, operands[1]);
2639
2640 /* Handle four memory cases, unaligned and aligned for either the input
2641 or the output. The only case where we can be called during reload is
2642 for aligned loads; all other cases require temporaries. */
2643
2644 if (GET_CODE (operands[1]) == MEM
2645 || (GET_CODE (operands[1]) == SUBREG
2646 && GET_CODE (SUBREG_REG (operands[1])) == MEM)
2647 || (reload_in_progress && GET_CODE (operands[1]) == REG
2648 && REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER)
2649 || (reload_in_progress && GET_CODE (operands[1]) == SUBREG
2650 && GET_CODE (SUBREG_REG (operands[1])) == REG
2651 && REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER))
2652 {
2653 if (aligned_memory_operand (operands[1], mode))
2654 {
2655 if (reload_in_progress)
2656 {
2657 emit_insn ((mode == QImode
2658 ? gen_reload_inqi_help
2659 : gen_reload_inhi_help)
2660 (operands[0], operands[1],
2661 gen_rtx_REG (SImode, REGNO (operands[0]))));
2662 }
2663 else
2664 {
2665 rtx aligned_mem, bitnum;
2666 rtx scratch = gen_reg_rtx (SImode);
2667
2668 get_aligned_mem (operands[1], &aligned_mem, &bitnum);
2669
2670 emit_insn ((mode == QImode
2671 ? gen_aligned_loadqi
2672 : gen_aligned_loadhi)
2673 (operands[0], aligned_mem, bitnum, scratch));
2674 }
2675 }
2676 else
2677 {
2678 /* Don't pass these as parameters since that makes the generated
2679 code depend on parameter evaluation order which will cause
2680 bootstrap failures. */
2681
2682 rtx temp1 = gen_reg_rtx (DImode);
2683 rtx temp2 = gen_reg_rtx (DImode);
2684 rtx seq = ((mode == QImode
2685 ? gen_unaligned_loadqi
2686 : gen_unaligned_loadhi)
2687 (operands[0], get_unaligned_address (operands[1], 0),
2688 temp1, temp2));
2689
2690 alpha_set_memflags (seq, operands[1]);
2691 emit_insn (seq);
2692 }
2693 return true;
2694 }
2695
2696 if (GET_CODE (operands[0]) == MEM
2697 || (GET_CODE (operands[0]) == SUBREG
2698 && GET_CODE (SUBREG_REG (operands[0])) == MEM)
2699 || (reload_in_progress && GET_CODE (operands[0]) == REG
2700 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER)
2701 || (reload_in_progress && GET_CODE (operands[0]) == SUBREG
2702 && GET_CODE (SUBREG_REG (operands[0])) == REG
2703 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER))
2704 {
2705 if (aligned_memory_operand (operands[0], mode))
2706 {
2707 rtx aligned_mem, bitnum;
2708 rtx temp1 = gen_reg_rtx (SImode);
2709 rtx temp2 = gen_reg_rtx (SImode);
2710
2711 get_aligned_mem (operands[0], &aligned_mem, &bitnum);
2712
2713 emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
2714 temp1, temp2));
2715 }
2716 else
2717 {
2718 rtx temp1 = gen_reg_rtx (DImode);
2719 rtx temp2 = gen_reg_rtx (DImode);
2720 rtx temp3 = gen_reg_rtx (DImode);
2721 rtx seq = ((mode == QImode
2722 ? gen_unaligned_storeqi
2723 : gen_unaligned_storehi)
2724 (get_unaligned_address (operands[0], 0),
2725 operands[1], temp1, temp2, temp3));
2726
2727 alpha_set_memflags (seq, operands[0]);
2728 emit_insn (seq);
2729 }
2730 return true;
2731 }
2732
2733 return false;
2734}
2735
01b9e84e
RH
2736/* Generate an unsigned DImode to FP conversion. This is the same code
2737 optabs would emit if we didn't have TFmode patterns.
2738
2739 For SFmode, this is the only construction I've found that can pass
2740 gcc.c-torture/execute/ieee/rbug.c. No scenario that uses DFmode
2741 intermediates will work, because you'll get intermediate rounding
2742 that ruins the end result. Some of this could be fixed by turning
2743 on round-to-positive-infinity, but that requires diddling the fpsr,
2744 which kills performance. I tried turning this around and converting
2745 to a negative number, so that I could turn on /m, but either I did
2746 it wrong or there's something else cause I wound up with the exact
2747 same single-bit error. There is a branch-less form of this same code:
2748
2749 srl $16,1,$1
2750 and $16,1,$2
2751 cmplt $16,0,$3
2752 or $1,$2,$2
2753 cmovge $16,$16,$2
2754 itoft $3,$f10
2755 itoft $2,$f11
2756 cvtqs $f11,$f11
2757 adds $f11,$f11,$f0
2758 fcmoveq $f10,$f11,$f0
2759
2760 I'm not using it because it's the same number of instructions as
2761 this branch-full form, and it has more serialized long latency
2762 instructions on the critical path.
2763
2764 For DFmode, we can avoid rounding errors by breaking up the word
2765 into two pieces, converting them separately, and adding them back:
2766
2767 LC0: .long 0,0x5f800000
2768
2769 itoft $16,$f11
2770 lda $2,LC0
70994f30 2771 cmplt $16,0,$1
01b9e84e
RH
2772 cpyse $f11,$f31,$f10
2773 cpyse $f31,$f11,$f11
2774 s4addq $1,$2,$1
2775 lds $f12,0($1)
2776 cvtqt $f10,$f10
2777 cvtqt $f11,$f11
2778 addt $f12,$f10,$f0
2779 addt $f0,$f11,$f0
2780
2781 This doesn't seem to be a clear-cut win over the optabs form.
2782 It probably all depends on the distribution of numbers being
2783 converted -- in the optabs form, all but high-bit-set has a
2784 much lower minimum execution time. */
2785
2786void
2787alpha_emit_floatuns (operands)
2788 rtx operands[2];
2789{
2790 rtx neglab, donelab, i0, i1, f0, in, out;
2791 enum machine_mode mode;
2792
2793 out = operands[0];
57014cb9 2794 in = force_reg (DImode, operands[1]);
01b9e84e
RH
2795 mode = GET_MODE (out);
2796 neglab = gen_label_rtx ();
2797 donelab = gen_label_rtx ();
2798 i0 = gen_reg_rtx (DImode);
2799 i1 = gen_reg_rtx (DImode);
2800 f0 = gen_reg_rtx (mode);
2801
d43e0b7d 2802 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
01b9e84e
RH
2803
2804 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
2805 emit_jump_insn (gen_jump (donelab));
70994f30 2806 emit_barrier ();
01b9e84e
RH
2807
2808 emit_label (neglab);
2809
2810 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
2811 emit_insn (gen_anddi3 (i1, in, const1_rtx));
2812 emit_insn (gen_iordi3 (i0, i0, i1));
2813 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
2814 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
2815
2816 emit_label (donelab);
2817}
2818
f283421d
RH
2819/* Generate the comparison for a conditional branch. */
2820
2821rtx
2822alpha_emit_conditional_branch (code)
2823 enum rtx_code code;
2824{
2825 enum rtx_code cmp_code, branch_code;
2826 enum machine_mode cmp_mode, branch_mode = VOIDmode;
6db21c7f 2827 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
f283421d
RH
2828 rtx tem;
2829
5495cc55
RH
2830 if (alpha_compare.fp_p && GET_MODE (op0) == TFmode)
2831 {
2832 if (! TARGET_HAS_XFLOATING_LIBS)
2833 abort ();
2834
2835 /* X_floating library comparison functions return
2836 -1 unordered
2837 0 false
2838 1 true
2839 Convert the compare against the raw return value. */
2840
b8de5050
RH
2841 switch (code)
2842 {
2843 case UNORDERED:
2844 cmp_code = EQ;
2845 code = LT;
2846 break;
2847 case ORDERED:
2848 cmp_code = EQ;
2849 code = GE;
2850 break;
2851 case NE:
2852 cmp_code = NE;
2853 code = NE;
2854 break;
2855 default:
2856 cmp_code = code;
6b7d1a34 2857 code = GT;
b8de5050
RH
2858 break;
2859 }
9e495700
RH
2860
2861 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
5495cc55
RH
2862 op1 = const0_rtx;
2863 alpha_compare.fp_p = 0;
5495cc55
RH
2864 }
2865
f283421d
RH
2866 /* The general case: fold the comparison code to the types of compares
2867 that we have, choosing the branch as necessary. */
2868 switch (code)
2869 {
2870 case EQ: case LE: case LT: case LEU: case LTU:
1eb8759b 2871 case UNORDERED:
f283421d
RH
2872 /* We have these compares: */
2873 cmp_code = code, branch_code = NE;
2874 break;
2875
2876 case NE:
1eb8759b 2877 case ORDERED:
285a5742 2878 /* These must be reversed. */
1eb8759b 2879 cmp_code = reverse_condition (code), branch_code = EQ;
f283421d
RH
2880 break;
2881
2882 case GE: case GT: case GEU: case GTU:
2883 /* For FP, we swap them, for INT, we reverse them. */
6db21c7f 2884 if (alpha_compare.fp_p)
f283421d
RH
2885 {
2886 cmp_code = swap_condition (code);
2887 branch_code = NE;
2888 tem = op0, op0 = op1, op1 = tem;
2889 }
2890 else
2891 {
2892 cmp_code = reverse_condition (code);
2893 branch_code = EQ;
2894 }
2895 break;
2896
2897 default:
2898 abort ();
2899 }
2900
6db21c7f 2901 if (alpha_compare.fp_p)
f283421d
RH
2902 {
2903 cmp_mode = DFmode;
de6c5979 2904 if (flag_unsafe_math_optimizations)
f283421d
RH
2905 {
2906 /* When we are not as concerned about non-finite values, and we
2907 are comparing against zero, we can branch directly. */
2908 if (op1 == CONST0_RTX (DFmode))
2909 cmp_code = NIL, branch_code = code;
2910 else if (op0 == CONST0_RTX (DFmode))
2911 {
2912 /* Undo the swap we probably did just above. */
2913 tem = op0, op0 = op1, op1 = tem;
b771b6b4
RH
2914 branch_code = swap_condition (cmp_code);
2915 cmp_code = NIL;
f283421d
RH
2916 }
2917 }
2918 else
2919 {
2920 /* ??? We mark the the branch mode to be CCmode to prevent the
2921 compare and branch from being combined, since the compare
2922 insn follows IEEE rules that the branch does not. */
2923 branch_mode = CCmode;
2924 }
2925 }
2926 else
2927 {
2928 cmp_mode = DImode;
2929
2930 /* The following optimizations are only for signed compares. */
2931 if (code != LEU && code != LTU && code != GEU && code != GTU)
2932 {
2933 /* Whee. Compare and branch against 0 directly. */
2934 if (op1 == const0_rtx)
2935 cmp_code = NIL, branch_code = code;
2936
2937 /* We want to use cmpcc/bcc when we can, since there is a zero delay
2938 bypass between logicals and br/cmov on EV5. But we don't want to
2939 force valid immediate constants into registers needlessly. */
2940 else if (GET_CODE (op1) == CONST_INT)
2941 {
2942 HOST_WIDE_INT v = INTVAL (op1), n = -v;
2943
2944 if (! CONST_OK_FOR_LETTER_P (v, 'I')
2945 && (CONST_OK_FOR_LETTER_P (n, 'K')
2946 || CONST_OK_FOR_LETTER_P (n, 'L')))
2947 {
2948 cmp_code = PLUS, branch_code = code;
2949 op1 = GEN_INT (n);
2950 }
2951 }
2952 }
f283421d 2953
9e495700
RH
2954 if (!reg_or_0_operand (op0, DImode))
2955 op0 = force_reg (DImode, op0);
2956 if (cmp_code != PLUS && !reg_or_8bit_operand (op1, DImode))
2957 op1 = force_reg (DImode, op1);
2958 }
f283421d
RH
2959
2960 /* Emit an initial compare instruction, if necessary. */
2961 tem = op0;
2962 if (cmp_code != NIL)
2963 {
2964 tem = gen_reg_rtx (cmp_mode);
2965 emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
2966 }
2967
6db21c7f
RH
2968 /* Zero the operands. */
2969 memset (&alpha_compare, 0, sizeof (alpha_compare));
2970
f283421d
RH
2971 /* Return the branch comparison. */
2972 return gen_rtx_fmt_ee (branch_code, branch_mode, tem, CONST0_RTX (cmp_mode));
2973}
2974
9e495700
RH
2975/* Certain simplifications can be done to make invalid setcc operations
2976 valid. Return the final comparison, or NULL if we can't work. */
2977
2978rtx
2979alpha_emit_setcc (code)
2980 enum rtx_code code;
2981{
2982 enum rtx_code cmp_code;
2983 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
2984 int fp_p = alpha_compare.fp_p;
2985 rtx tmp;
2986
2987 /* Zero the operands. */
2988 memset (&alpha_compare, 0, sizeof (alpha_compare));
2989
2990 if (fp_p && GET_MODE (op0) == TFmode)
2991 {
2992 if (! TARGET_HAS_XFLOATING_LIBS)
2993 abort ();
2994
2995 /* X_floating library comparison functions return
2996 -1 unordered
2997 0 false
2998 1 true
2999 Convert the compare against the raw return value. */
3000
3001 if (code == UNORDERED || code == ORDERED)
3002 cmp_code = EQ;
3003 else
3004 cmp_code = code;
3005
3006 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
3007 op1 = const0_rtx;
3008 fp_p = 0;
3009
3010 if (code == UNORDERED)
3011 code = LT;
3012 else if (code == ORDERED)
3013 code = GE;
3014 else
3015 code = GT;
3016 }
3017
3018 if (fp_p && !TARGET_FIX)
3019 return NULL_RTX;
3020
3021 /* The general case: fold the comparison code to the types of compares
3022 that we have, choosing the branch as necessary. */
3023
3024 cmp_code = NIL;
3025 switch (code)
3026 {
3027 case EQ: case LE: case LT: case LEU: case LTU:
3028 case UNORDERED:
3029 /* We have these compares. */
3030 if (fp_p)
3031 cmp_code = code, code = NE;
3032 break;
3033
3034 case NE:
3035 if (!fp_p && op1 == const0_rtx)
3036 break;
3037 /* FALLTHRU */
3038
3039 case ORDERED:
3040 cmp_code = reverse_condition (code);
3041 code = EQ;
3042 break;
3043
3044 case GE: case GT: case GEU: case GTU:
56f19d92 3045 /* These normally need swapping, but for integer zero we have
c74fa144
RH
3046 special patterns that recognize swapped operands. */
3047 if (!fp_p && op1 == const0_rtx)
3048 break;
9e495700
RH
3049 code = swap_condition (code);
3050 if (fp_p)
3051 cmp_code = code, code = NE;
3052 tmp = op0, op0 = op1, op1 = tmp;
3053 break;
3054
3055 default:
3056 abort ();
3057 }
3058
3059 if (!fp_p)
3060 {
c74fa144 3061 if (!register_operand (op0, DImode))
9e495700
RH
3062 op0 = force_reg (DImode, op0);
3063 if (!reg_or_8bit_operand (op1, DImode))
3064 op1 = force_reg (DImode, op1);
3065 }
3066
3067 /* Emit an initial compare instruction, if necessary. */
3068 if (cmp_code != NIL)
3069 {
3070 enum machine_mode mode = fp_p ? DFmode : DImode;
3071
3072 tmp = gen_reg_rtx (mode);
3073 emit_insn (gen_rtx_SET (VOIDmode, tmp,
3074 gen_rtx_fmt_ee (cmp_code, mode, op0, op1)));
3075
3076 op0 = fp_p ? gen_lowpart (DImode, tmp) : tmp;
3077 op1 = const0_rtx;
3078 }
3079
3080 /* Return the setcc comparison. */
3081 return gen_rtx_fmt_ee (code, DImode, op0, op1);
3082}
3083
f283421d 3084
758d2c0c
RK
3085/* Rewrite a comparison against zero CMP of the form
3086 (CODE (cc0) (const_int 0)) so it can be written validly in
3087 a conditional move (if_then_else CMP ...).
3088 If both of the operands that set cc0 are non-zero we must emit
3089 an insn to perform the compare (it can't be done within
285a5742 3090 the conditional move). */
758d2c0c
RK
3091rtx
3092alpha_emit_conditional_move (cmp, mode)
3093 rtx cmp;
3094 enum machine_mode mode;
3095{
1ad2a62d 3096 enum rtx_code code = GET_CODE (cmp);
89b7c471 3097 enum rtx_code cmov_code = NE;
6db21c7f
RH
3098 rtx op0 = alpha_compare.op0;
3099 rtx op1 = alpha_compare.op1;
3100 int fp_p = alpha_compare.fp_p;
1ad2a62d
RK
3101 enum machine_mode cmp_mode
3102 = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
6db21c7f 3103 enum machine_mode cmp_op_mode = fp_p ? DFmode : DImode;
f283421d 3104 enum machine_mode cmov_mode = VOIDmode;
de6c5979 3105 int local_fast_math = flag_unsafe_math_optimizations;
1ad2a62d 3106 rtx tem;
758d2c0c 3107
6db21c7f
RH
3108 /* Zero the operands. */
3109 memset (&alpha_compare, 0, sizeof (alpha_compare));
3110
3111 if (fp_p != FLOAT_MODE_P (mode))
09fe1c49
RH
3112 {
3113 enum rtx_code cmp_code;
3114
3115 if (! TARGET_FIX)
3116 return 0;
3117
3118 /* If we have fp<->int register move instructions, do a cmov by
3119 performing the comparison in fp registers, and move the
3120 zero/non-zero value to integer registers, where we can then
3121 use a normal cmov, or vice-versa. */
3122
3123 switch (code)
3124 {
3125 case EQ: case LE: case LT: case LEU: case LTU:
3126 /* We have these compares. */
3127 cmp_code = code, code = NE;
3128 break;
3129
3130 case NE:
3131 /* This must be reversed. */
3132 cmp_code = EQ, code = EQ;
3133 break;
3134
3135 case GE: case GT: case GEU: case GTU:
56f19d92
RH
3136 /* These normally need swapping, but for integer zero we have
3137 special patterns that recognize swapped operands. */
3138 if (!fp_p && op1 == const0_rtx)
c53f9f5b
RH
3139 cmp_code = code, code = NE;
3140 else
3141 {
3142 cmp_code = swap_condition (code);
3143 code = NE;
3144 tem = op0, op0 = op1, op1 = tem;
3145 }
09fe1c49
RH
3146 break;
3147
3148 default:
3149 abort ();
3150 }
3151
3152 tem = gen_reg_rtx (cmp_op_mode);
3153 emit_insn (gen_rtx_SET (VOIDmode, tem,
3154 gen_rtx_fmt_ee (cmp_code, cmp_op_mode,
3155 op0, op1)));
3156
3157 cmp_mode = cmp_op_mode = fp_p ? DImode : DFmode;
3158 op0 = gen_lowpart (cmp_op_mode, tem);
3159 op1 = CONST0_RTX (cmp_op_mode);
3160 fp_p = !fp_p;
3161 local_fast_math = 1;
3162 }
758d2c0c
RK
3163
3164 /* We may be able to use a conditional move directly.
285a5742 3165 This avoids emitting spurious compares. */
01b9e84e 3166 if (signed_comparison_operator (cmp, VOIDmode)
09fe1c49 3167 && (!fp_p || local_fast_math)
1ad2a62d 3168 && (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
38a448ca 3169 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
758d2c0c 3170
000ca373 3171 /* We can't put the comparison inside the conditional move;
758d2c0c 3172 emit a compare instruction and put that inside the
1ad2a62d
RK
3173 conditional move. Make sure we emit only comparisons we have;
3174 swap or reverse as necessary. */
758d2c0c 3175
000ca373
RH
3176 if (no_new_pseudos)
3177 return NULL_RTX;
3178
758d2c0c
RK
3179 switch (code)
3180 {
1ad2a62d
RK
3181 case EQ: case LE: case LT: case LEU: case LTU:
3182 /* We have these compares: */
758d2c0c 3183 break;
1ad2a62d 3184
758d2c0c 3185 case NE:
285a5742 3186 /* This must be reversed. */
1ad2a62d 3187 code = reverse_condition (code);
89b7c471 3188 cmov_code = EQ;
758d2c0c 3189 break;
1ad2a62d
RK
3190
3191 case GE: case GT: case GEU: case GTU:
9e495700 3192 /* These must be swapped. */
c53f9f5b
RH
3193 if (op1 != CONST0_RTX (cmp_mode))
3194 {
3195 code = swap_condition (code);
3196 tem = op0, op0 = op1, op1 = tem;
3197 }
758d2c0c 3198 break;
1ad2a62d 3199
758d2c0c 3200 default:
1ad2a62d 3201 abort ();
758d2c0c
RK
3202 }
3203
9e495700
RH
3204 if (!fp_p)
3205 {
3206 if (!reg_or_0_operand (op0, DImode))
3207 op0 = force_reg (DImode, op0);
3208 if (!reg_or_8bit_operand (op1, DImode))
3209 op1 = force_reg (DImode, op1);
3210 }
3211
68aed21b 3212 /* ??? We mark the branch mode to be CCmode to prevent the compare
f283421d
RH
3213 and cmov from being combined, since the compare insn follows IEEE
3214 rules that the cmov does not. */
09fe1c49 3215 if (fp_p && !local_fast_math)
f283421d
RH
3216 cmov_mode = CCmode;
3217
1ad2a62d 3218 tem = gen_reg_rtx (cmp_op_mode);
38a448ca 3219 emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1));
f283421d 3220 return gen_rtx_fmt_ee (cmov_code, cmov_mode, tem, CONST0_RTX (cmp_op_mode));
758d2c0c 3221}
8f4773ea
RH
3222
3223/* Simplify a conditional move of two constants into a setcc with
3224 arithmetic. This is done with a splitter since combine would
3225 just undo the work if done during code generation. It also catches
3226 cases we wouldn't have before cse. */
3227
3228int
3229alpha_split_conditional_move (code, dest, cond, t_rtx, f_rtx)
3230 enum rtx_code code;
3231 rtx dest, cond, t_rtx, f_rtx;
3232{
3233 HOST_WIDE_INT t, f, diff;
3234 enum machine_mode mode;
3235 rtx target, subtarget, tmp;
3236
3237 mode = GET_MODE (dest);
3238 t = INTVAL (t_rtx);
3239 f = INTVAL (f_rtx);
3240 diff = t - f;
3241
3242 if (((code == NE || code == EQ) && diff < 0)
3243 || (code == GE || code == GT))
3244 {
3245 code = reverse_condition (code);
3246 diff = t, t = f, f = diff;
3247 diff = t - f;
3248 }
3249
3250 subtarget = target = dest;
3251 if (mode != DImode)
3252 {
3253 target = gen_lowpart (DImode, dest);
3254 if (! no_new_pseudos)
3255 subtarget = gen_reg_rtx (DImode);
3256 else
3257 subtarget = target;
3258 }
a5376276
RH
3259 /* Below, we must be careful to use copy_rtx on target and subtarget
3260 in intermediate insns, as they may be a subreg rtx, which may not
3261 be shared. */
8f4773ea
RH
3262
3263 if (f == 0 && exact_log2 (diff) > 0
3264 /* On EV6, we've got enough shifters to make non-arithmatic shifts
3265 viable over a longer latency cmove. On EV5, the E0 slot is a
285a5742 3266 scarce resource, and on EV4 shift has the same latency as a cmove. */
8f4773ea
RH
3267 && (diff <= 8 || alpha_cpu == PROCESSOR_EV6))
3268 {
3269 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 3270 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea 3271
a5376276
RH
3272 tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget),
3273 GEN_INT (exact_log2 (t)));
8f4773ea
RH
3274 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3275 }
3276 else if (f == 0 && t == -1)
3277 {
3278 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 3279 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea 3280
a5376276 3281 emit_insn (gen_negdi2 (target, copy_rtx (subtarget)));
8f4773ea
RH
3282 }
3283 else if (diff == 1 || diff == 4 || diff == 8)
3284 {
3285 rtx add_op;
3286
3287 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
a5376276 3288 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
8f4773ea
RH
3289
3290 if (diff == 1)
a5376276 3291 emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f)));
8f4773ea
RH
3292 else
3293 {
3294 add_op = GEN_INT (f);
3295 if (sext_add_operand (add_op, mode))
3296 {
a5376276
RH
3297 tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget),
3298 GEN_INT (diff));
8f4773ea
RH
3299 tmp = gen_rtx_PLUS (DImode, tmp, add_op);
3300 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3301 }
3302 else
3303 return 0;
3304 }
3305 }
3306 else
3307 return 0;
3308
3309 return 1;
3310}
6c174fc0 3311\f
5495cc55
RH
3312/* Look up the function X_floating library function name for the
3313 given operation. */
3314
3315static const char *
3316alpha_lookup_xfloating_lib_func (code)
3317 enum rtx_code code;
3318{
3319 struct xfloating_op
3320 {
8b60264b
KG
3321 const enum rtx_code code;
3322 const char *const func;
5495cc55
RH
3323 };
3324
3325 static const struct xfloating_op vms_xfloating_ops[] =
3326 {
3327 { PLUS, "OTS$ADD_X" },
3328 { MINUS, "OTS$SUB_X" },
3329 { MULT, "OTS$MUL_X" },
3330 { DIV, "OTS$DIV_X" },
3331 { EQ, "OTS$EQL_X" },
3332 { NE, "OTS$NEQ_X" },
3333 { LT, "OTS$LSS_X" },
3334 { LE, "OTS$LEQ_X" },
3335 { GT, "OTS$GTR_X" },
3336 { GE, "OTS$GEQ_X" },
3337 { FIX, "OTS$CVTXQ" },
3338 { FLOAT, "OTS$CVTQX" },
3339 { UNSIGNED_FLOAT, "OTS$CVTQUX" },
3340 { FLOAT_EXTEND, "OTS$CVT_FLOAT_T_X" },
3341 { FLOAT_TRUNCATE, "OTS$CVT_FLOAT_X_T" },
3342 };
3343
3344 static const struct xfloating_op osf_xfloating_ops[] =
3345 {
3346 { PLUS, "_OtsAddX" },
3347 { MINUS, "_OtsSubX" },
3348 { MULT, "_OtsMulX" },
3349 { DIV, "_OtsDivX" },
3350 { EQ, "_OtsEqlX" },
3351 { NE, "_OtsNeqX" },
3352 { LT, "_OtsLssX" },
3353 { LE, "_OtsLeqX" },
3354 { GT, "_OtsGtrX" },
3355 { GE, "_OtsGeqX" },
3356 { FIX, "_OtsCvtXQ" },
3357 { FLOAT, "_OtsCvtQX" },
3358 { UNSIGNED_FLOAT, "_OtsCvtQUX" },
3359 { FLOAT_EXTEND, "_OtsConvertFloatTX" },
3360 { FLOAT_TRUNCATE, "_OtsConvertFloatXT" },
3361 };
3362
3363 const struct xfloating_op *ops;
b6a1cbae 3364 const long n = ARRAY_SIZE (osf_xfloating_ops);
5495cc55
RH
3365 long i;
3366
3367 /* How irritating. Nothing to key off for the table. Hardcode
3368 knowledge of the G_floating routines. */
3369 if (TARGET_FLOAT_VAX)
3370 {
be7b80f4 3371 if (TARGET_ABI_OPEN_VMS)
5495cc55
RH
3372 {
3373 if (code == FLOAT_EXTEND)
3374 return "OTS$CVT_FLOAT_G_X";
3375 if (code == FLOAT_TRUNCATE)
3376 return "OTS$CVT_FLOAT_X_G";
3377 }
3378 else
3379 {
3380 if (code == FLOAT_EXTEND)
3381 return "_OtsConvertFloatGX";
3382 if (code == FLOAT_TRUNCATE)
3383 return "_OtsConvertFloatXG";
3384 }
3385 }
3386
be7b80f4 3387 if (TARGET_ABI_OPEN_VMS)
5495cc55
RH
3388 ops = vms_xfloating_ops;
3389 else
3390 ops = osf_xfloating_ops;
3391
3392 for (i = 0; i < n; ++i)
3393 if (ops[i].code == code)
3394 return ops[i].func;
3395
3396 abort();
3397}
3398
3399/* Most X_floating operations take the rounding mode as an argument.
3400 Compute that here. */
3401
3402static int
3403alpha_compute_xfloating_mode_arg (code, round)
3404 enum rtx_code code;
3405 enum alpha_fp_rounding_mode round;
3406{
3407 int mode;
3408
3409 switch (round)
3410 {
3411 case ALPHA_FPRM_NORM:
3412 mode = 2;
3413 break;
3414 case ALPHA_FPRM_MINF:
3415 mode = 1;
3416 break;
3417 case ALPHA_FPRM_CHOP:
3418 mode = 0;
3419 break;
3420 case ALPHA_FPRM_DYN:
3421 mode = 4;
3422 break;
3423 default:
3424 abort ();
3425
3426 /* XXX For reference, round to +inf is mode = 3. */
3427 }
3428
3429 if (code == FLOAT_TRUNCATE && alpha_fptm == ALPHA_FPTM_N)
3430 mode |= 0x10000;
3431
3432 return mode;
3433}
3434
3435/* Emit an X_floating library function call.
3436
3437 Note that these functions do not follow normal calling conventions:
3438 TFmode arguments are passed in two integer registers (as opposed to
3439 indirect); TFmode return values appear in R16+R17.
3440
3441 FUNC is the function name to call.
3442 TARGET is where the output belongs.
3443 OPERANDS are the inputs.
3444 NOPERANDS is the count of inputs.
3445 EQUIV is the expression equivalent for the function.
3446*/
3447
3448static void
3449alpha_emit_xfloating_libcall (func, target, operands, noperands, equiv)
3450 const char *func;
3451 rtx target;
3452 rtx operands[];
3453 int noperands;
3454 rtx equiv;
3455{
3456 rtx usage = NULL_RTX, tmp, reg;
3457 int regno = 16, i;
3458
3459 start_sequence ();
3460
3461 for (i = 0; i < noperands; ++i)
3462 {
3463 switch (GET_MODE (operands[i]))
3464 {
3465 case TFmode:
3466 reg = gen_rtx_REG (TFmode, regno);
3467 regno += 2;
3468 break;
3469
3470 case DFmode:
3471 reg = gen_rtx_REG (DFmode, regno + 32);
3472 regno += 1;
3473 break;
3474
3475 case VOIDmode:
3476 if (GET_CODE (operands[i]) != CONST_INT)
3477 abort ();
3478 /* FALLTHRU */
3479 case DImode:
3480 reg = gen_rtx_REG (DImode, regno);
3481 regno += 1;
3482 break;
3483
3484 default:
3485 abort ();
3486 }
3487
3488 emit_move_insn (reg, operands[i]);
3489 usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage);
3490 }
3491
3492 switch (GET_MODE (target))
3493 {
3494 case TFmode:
3495 reg = gen_rtx_REG (TFmode, 16);
3496 break;
3497 case DFmode:
3498 reg = gen_rtx_REG (DFmode, 32);
3499 break;
3500 case DImode:
3501 reg = gen_rtx_REG (DImode, 0);
3502 break;
3503 default:
3504 abort ();
3505 }
3506
c8d8ed65 3507 tmp = gen_rtx_MEM (QImode, gen_rtx_SYMBOL_REF (Pmode, (char *) func));
0499c2e4 3508 tmp = emit_call_insn (GEN_CALL_VALUE (reg, tmp, const0_rtx,
5495cc55
RH
3509 const0_rtx, const0_rtx));
3510 CALL_INSN_FUNCTION_USAGE (tmp) = usage;
3511
3512 tmp = get_insns ();
3513 end_sequence ();
3514
3515 emit_libcall_block (tmp, target, reg, equiv);
3516}
3517
3518/* Emit an X_floating library function call for arithmetic (+,-,*,/). */
3519
3520void
3521alpha_emit_xfloating_arith (code, operands)
3522 enum rtx_code code;
3523 rtx operands[];
3524{
3525 const char *func;
3526 int mode;
c77f46c6 3527 rtx out_operands[3];
5495cc55
RH
3528
3529 func = alpha_lookup_xfloating_lib_func (code);
3530 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
3531
c77f46c6
AO
3532 out_operands[0] = operands[1];
3533 out_operands[1] = operands[2];
3534 out_operands[2] = GEN_INT (mode);
3535 alpha_emit_xfloating_libcall (func, operands[0], out_operands, 3,
5495cc55
RH
3536 gen_rtx_fmt_ee (code, TFmode, operands[1],
3537 operands[2]));
3538}
3539
3540/* Emit an X_floating library function call for a comparison. */
3541
3542static rtx
3543alpha_emit_xfloating_compare (code, op0, op1)
3544 enum rtx_code code;
3545 rtx op0, op1;
3546{
3547 const char *func;
3548 rtx out, operands[2];
3549
3550 func = alpha_lookup_xfloating_lib_func (code);
3551
3552 operands[0] = op0;
3553 operands[1] = op1;
3554 out = gen_reg_rtx (DImode);
3555
b762a0ef
RH
3556 /* ??? Strange mode for equiv because what's actually returned
3557 is -1,0,1, not a proper boolean value. */
3558 alpha_emit_xfloating_libcall (func, out, operands, 2,
3559 gen_rtx_fmt_ee (code, CCmode, op0, op1));
5495cc55
RH
3560
3561 return out;
3562}
3563
3564/* Emit an X_floating library function call for a conversion. */
3565
3566void
3567alpha_emit_xfloating_cvt (code, operands)
3568 enum rtx_code code;
3569 rtx operands[];
3570{
3571 int noperands = 1, mode;
c77f46c6 3572 rtx out_operands[2];
5495cc55
RH
3573 const char *func;
3574
3575 func = alpha_lookup_xfloating_lib_func (code);
3576
c77f46c6
AO
3577 out_operands[0] = operands[1];
3578
5495cc55
RH
3579 switch (code)
3580 {
3581 case FIX:
3582 mode = alpha_compute_xfloating_mode_arg (code, ALPHA_FPRM_CHOP);
c77f46c6 3583 out_operands[1] = GEN_INT (mode);
d6cde845 3584 noperands = 2;
5495cc55
RH
3585 break;
3586 case FLOAT_TRUNCATE:
3587 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
c77f46c6 3588 out_operands[1] = GEN_INT (mode);
d6cde845 3589 noperands = 2;
5495cc55
RH
3590 break;
3591 default:
3592 break;
3593 }
3594
c77f46c6 3595 alpha_emit_xfloating_libcall (func, operands[0], out_operands, noperands,
5495cc55
RH
3596 gen_rtx_fmt_e (code, GET_MODE (operands[0]),
3597 operands[1]));
3598}
628d74de 3599
f940c352
RH
3600/* Split a TFmode OP[1] into DImode OP[2,3] and likewise for
3601 OP[0] into OP[0,1]. Naturally, output operand ordering is
3602 little-endian. */
3603
628d74de
RH
3604void
3605alpha_split_tfmode_pair (operands)
3606 rtx operands[4];
3607{
3608 if (GET_CODE (operands[1]) == REG)
3609 {
3610 operands[3] = gen_rtx_REG (DImode, REGNO (operands[1]) + 1);
3611 operands[2] = gen_rtx_REG (DImode, REGNO (operands[1]));
3612 }
3613 else if (GET_CODE (operands[1]) == MEM)
3614 {
f4ef873c
RK
3615 operands[3] = adjust_address (operands[1], DImode, 8);
3616 operands[2] = adjust_address (operands[1], DImode, 0);
628d74de
RH
3617 }
3618 else if (operands[1] == CONST0_RTX (TFmode))
3619 operands[2] = operands[3] = const0_rtx;
3620 else
3621 abort ();
3622
3623 if (GET_CODE (operands[0]) == REG)
3624 {
3625 operands[1] = gen_rtx_REG (DImode, REGNO (operands[0]) + 1);
3626 operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
3627 }
3628 else if (GET_CODE (operands[0]) == MEM)
3629 {
f4ef873c
RK
3630 operands[1] = adjust_address (operands[0], DImode, 8);
3631 operands[0] = adjust_address (operands[0], DImode, 0);
628d74de
RH
3632 }
3633 else
3634 abort ();
3635}
f940c352
RH
3636
3637/* Implement negtf2 or abstf2. Op0 is destination, op1 is source,
3638 op2 is a register containing the sign bit, operation is the
3639 logical operation to be performed. */
3640
3641void
3642alpha_split_tfmode_frobsign (operands, operation)
3643 rtx operands[3];
3644 rtx (*operation) PARAMS ((rtx, rtx, rtx));
3645{
3646 rtx high_bit = operands[2];
3647 rtx scratch;
3648 int move;
3649
3650 alpha_split_tfmode_pair (operands);
3651
3652 /* Detect three flavours of operand overlap. */
3653 move = 1;
3654 if (rtx_equal_p (operands[0], operands[2]))
3655 move = 0;
3656 else if (rtx_equal_p (operands[1], operands[2]))
3657 {
3658 if (rtx_equal_p (operands[0], high_bit))
3659 move = 2;
3660 else
3661 move = -1;
3662 }
3663
3664 if (move < 0)
3665 emit_move_insn (operands[0], operands[2]);
3666
3667 /* ??? If the destination overlaps both source tf and high_bit, then
3668 assume source tf is dead in its entirety and use the other half
3669 for a scratch register. Otherwise "scratch" is just the proper
3670 destination register. */
3671 scratch = operands[move < 2 ? 1 : 3];
3672
3673 emit_insn ((*operation) (scratch, high_bit, operands[3]));
3674
3675 if (move > 0)
3676 {
3677 emit_move_insn (operands[0], operands[2]);
3678 if (move > 1)
3679 emit_move_insn (operands[1], scratch);
3680 }
3681}
5495cc55 3682\f
6c174fc0
RH
3683/* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
3684 unaligned data:
3685
3686 unsigned: signed:
3687 word: ldq_u r1,X(r11) ldq_u r1,X(r11)
3688 ldq_u r2,X+1(r11) ldq_u r2,X+1(r11)
3689 lda r3,X(r11) lda r3,X+2(r11)
3690 extwl r1,r3,r1 extql r1,r3,r1
3691 extwh r2,r3,r2 extqh r2,r3,r2
3692 or r1.r2.r1 or r1,r2,r1
3693 sra r1,48,r1
3694
3695 long: ldq_u r1,X(r11) ldq_u r1,X(r11)
3696 ldq_u r2,X+3(r11) ldq_u r2,X+3(r11)
3697 lda r3,X(r11) lda r3,X(r11)
3698 extll r1,r3,r1 extll r1,r3,r1
3699 extlh r2,r3,r2 extlh r2,r3,r2
3700 or r1.r2.r1 addl r1,r2,r1
3701
3702 quad: ldq_u r1,X(r11)
3703 ldq_u r2,X+7(r11)
3704 lda r3,X(r11)
3705 extql r1,r3,r1
3706 extqh r2,r3,r2
3707 or r1.r2.r1
3708*/
3709
3710void
3711alpha_expand_unaligned_load (tgt, mem, size, ofs, sign)
3712 rtx tgt, mem;
3713 HOST_WIDE_INT size, ofs;
3714 int sign;
3715{
1eb356b9 3716 rtx meml, memh, addr, extl, exth, tmp, mema;
4208b40f 3717 enum machine_mode mode;
6c174fc0
RH
3718
3719 meml = gen_reg_rtx (DImode);
3720 memh = gen_reg_rtx (DImode);
3721 addr = gen_reg_rtx (DImode);
3722 extl = gen_reg_rtx (DImode);
3723 exth = gen_reg_rtx (DImode);
3724
1eb356b9
RH
3725 mema = XEXP (mem, 0);
3726 if (GET_CODE (mema) == LO_SUM)
3727 mema = force_reg (Pmode, mema);
3728
e01acbb1
RH
3729 /* AND addresses cannot be in any alias set, since they may implicitly
3730 alias surrounding code. Ideally we'd have some alias set that
3731 covered all types except those with alignment 8 or higher. */
3732
3733 tmp = change_address (mem, DImode,
3734 gen_rtx_AND (DImode,
1eb356b9 3735 plus_constant (mema, ofs),
e01acbb1 3736 GEN_INT (-8)));
ba4828e0 3737 set_mem_alias_set (tmp, 0);
e01acbb1
RH
3738 emit_move_insn (meml, tmp);
3739
3740 tmp = change_address (mem, DImode,
3741 gen_rtx_AND (DImode,
1eb356b9 3742 plus_constant (mema, ofs + size - 1),
e01acbb1 3743 GEN_INT (-8)));
ba4828e0 3744 set_mem_alias_set (tmp, 0);
e01acbb1 3745 emit_move_insn (memh, tmp);
6c174fc0 3746
30102605
RH
3747 if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4))
3748 {
3749 emit_move_insn (addr, plus_constant (mema, -1));
3750
3751 emit_insn (gen_extqh_be (extl, meml, addr));
3752 emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr));
3753
3754 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
3755 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
3756 addr, 1, OPTAB_WIDEN);
3757 }
3758 else if (sign && size == 2)
6c174fc0 3759 {
1eb356b9 3760 emit_move_insn (addr, plus_constant (mema, ofs+2));
6c174fc0 3761
30102605
RH
3762 emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
3763 emit_insn (gen_extqh_le (exth, memh, addr));
6c174fc0 3764
1a7cb241
JW
3765 /* We must use tgt here for the target. Alpha-vms port fails if we use
3766 addr for the target, because addr is marked as a pointer and combine
3767 knows that pointers are always sign-extended 32 bit values. */
3768 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
4208b40f
RH
3769 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
3770 addr, 1, OPTAB_WIDEN);
6c174fc0 3771 }
4208b40f 3772 else
6c174fc0 3773 {
30102605 3774 if (WORDS_BIG_ENDIAN)
4208b40f 3775 {
30102605
RH
3776 emit_move_insn (addr, plus_constant (mema, ofs+size-1));
3777 switch ((int) size)
3778 {
3779 case 2:
3780 emit_insn (gen_extwh_be (extl, meml, addr));
3781 mode = HImode;
3782 break;
6c174fc0 3783
30102605
RH
3784 case 4:
3785 emit_insn (gen_extlh_be (extl, meml, addr));
3786 mode = SImode;
3787 break;
6c174fc0 3788
30102605
RH
3789 case 8:
3790 emit_insn (gen_extqh_be (extl, meml, addr));
3791 mode = DImode;
3792 break;
5495cc55 3793
30102605
RH
3794 default:
3795 abort ();
3796 }
3797 emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
3798 }
3799 else
3800 {
3801 emit_move_insn (addr, plus_constant (mema, ofs));
3802 emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
3803 switch ((int) size)
3804 {
3805 case 2:
3806 emit_insn (gen_extwh_le (exth, memh, addr));
3807 mode = HImode;
3808 break;
3809
3810 case 4:
3811 emit_insn (gen_extlh_le (exth, memh, addr));
3812 mode = SImode;
3813 break;
3814
3815 case 8:
3816 emit_insn (gen_extqh_le (exth, memh, addr));
3817 mode = DImode;
3818 break;
3819
3820 default:
3821 abort();
3822 }
4208b40f
RH
3823 }
3824
3825 addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
3826 gen_lowpart (mode, exth), gen_lowpart (mode, tgt),
3827 sign, OPTAB_WIDEN);
6c174fc0
RH
3828 }
3829
4208b40f
RH
3830 if (addr != tgt)
3831 emit_move_insn (tgt, gen_lowpart(GET_MODE (tgt), addr));
6c174fc0
RH
3832}
3833
3834/* Similarly, use ins and msk instructions to perform unaligned stores. */
3835
3836void
3837alpha_expand_unaligned_store (dst, src, size, ofs)
3838 rtx dst, src;
3839 HOST_WIDE_INT size, ofs;
3840{
1eb356b9 3841 rtx dstl, dsth, addr, insl, insh, meml, memh, dsta;
6c174fc0
RH
3842
3843 dstl = gen_reg_rtx (DImode);
3844 dsth = gen_reg_rtx (DImode);
3845 insl = gen_reg_rtx (DImode);
3846 insh = gen_reg_rtx (DImode);
3847
1eb356b9
RH
3848 dsta = XEXP (dst, 0);
3849 if (GET_CODE (dsta) == LO_SUM)
3850 dsta = force_reg (Pmode, dsta);
3851
e01acbb1
RH
3852 /* AND addresses cannot be in any alias set, since they may implicitly
3853 alias surrounding code. Ideally we'd have some alias set that
3854 covered all types except those with alignment 8 or higher. */
3855
6c174fc0 3856 meml = change_address (dst, DImode,
38a448ca 3857 gen_rtx_AND (DImode,
1eb356b9 3858 plus_constant (dsta, ofs),
38a448ca 3859 GEN_INT (-8)));
ba4828e0 3860 set_mem_alias_set (meml, 0);
e01acbb1 3861
6c174fc0 3862 memh = change_address (dst, DImode,
38a448ca 3863 gen_rtx_AND (DImode,
1eb356b9 3864 plus_constant (dsta, ofs + size - 1),
38a448ca 3865 GEN_INT (-8)));
ba4828e0 3866 set_mem_alias_set (memh, 0);
6c174fc0
RH
3867
3868 emit_move_insn (dsth, memh);
3869 emit_move_insn (dstl, meml);
30102605 3870 if (WORDS_BIG_ENDIAN)
6c174fc0 3871 {
30102605
RH
3872 addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1));
3873
3874 if (src != const0_rtx)
3875 {
3876 switch ((int) size)
3877 {
3878 case 2:
3879 emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
3880 break;
3881 case 4:
3882 emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
3883 break;
3884 case 8:
3885 emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
3886 break;
3887 }
3888 emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
3889 GEN_INT (size*8), addr));
3890 }
6c174fc0 3891
c8d8ed65 3892 switch ((int) size)
6c174fc0
RH
3893 {
3894 case 2:
30102605 3895 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr));
6c174fc0
RH
3896 break;
3897 case 4:
30102605 3898 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffffffff), addr));
6c174fc0
RH
3899 break;
3900 case 8:
30102605
RH
3901 {
3902#if HOST_BITS_PER_WIDE_INT == 32
3903 rtx msk = immed_double_const (0xffffffff, 0xffffffff, DImode);
3904#else
595b6314 3905 rtx msk = constm1_rtx;
30102605
RH
3906#endif
3907 emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
3908 }
6c174fc0
RH
3909 break;
3910 }
30102605
RH
3911
3912 emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr));
6c174fc0 3913 }
30102605
RH
3914 else
3915 {
3916 addr = copy_addr_to_reg (plus_constant (dsta, ofs));
6c174fc0 3917
30102605
RH
3918 if (src != const0_rtx)
3919 {
3920 emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
3921 GEN_INT (size*8), addr));
6c174fc0 3922
30102605
RH
3923 switch ((int) size)
3924 {
3925 case 2:
3926 emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr));
3927 break;
3928 case 4:
3929 emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr));
3930 break;
3931 case 8:
3932 emit_insn (gen_insql_le (insl, src, addr));
3933 break;
3934 }
3935 }
3936
3937 emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
3938
3939 switch ((int) size)
3940 {
3941 case 2:
3942 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
3943 break;
3944 case 4:
3945 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffffffff), addr));
3946 break;
3947 case 8:
3948 {
6c174fc0 3949#if HOST_BITS_PER_WIDE_INT == 32
30102605 3950 rtx msk = immed_double_const (0xffffffff, 0xffffffff, DImode);
6c174fc0 3951#else
595b6314 3952 rtx msk = constm1_rtx;
6c174fc0 3953#endif
30102605
RH
3954 emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
3955 }
3956 break;
3957 }
6c174fc0
RH
3958 }
3959
3960 if (src != const0_rtx)
3961 {
4208b40f
RH
3962 dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN);
3963 dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
6c174fc0 3964 }
30102605
RH
3965
3966 if (WORDS_BIG_ENDIAN)
3967 {
3968 emit_move_insn (meml, dstl);
3969 emit_move_insn (memh, dsth);
3970 }
3971 else
3972 {
3973 /* Must store high before low for degenerate case of aligned. */
3974 emit_move_insn (memh, dsth);
3975 emit_move_insn (meml, dstl);
3976 }
6c174fc0
RH
3977}
3978
4208b40f
RH
3979/* The block move code tries to maximize speed by separating loads and
3980 stores at the expense of register pressure: we load all of the data
3981 before we store it back out. There are two secondary effects worth
3982 mentioning, that this speeds copying to/from aligned and unaligned
3983 buffers, and that it makes the code significantly easier to write. */
6c174fc0 3984
4208b40f
RH
3985#define MAX_MOVE_WORDS 8
3986
3987/* Load an integral number of consecutive unaligned quadwords. */
6c174fc0
RH
3988
3989static void
4208b40f
RH
3990alpha_expand_unaligned_load_words (out_regs, smem, words, ofs)
3991 rtx *out_regs;
3992 rtx smem;
3993 HOST_WIDE_INT words, ofs;
6c174fc0
RH
3994{
3995 rtx const im8 = GEN_INT (-8);
3996 rtx const i64 = GEN_INT (64);
4208b40f 3997 rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
1eb356b9 3998 rtx sreg, areg, tmp, smema;
6c174fc0
RH
3999 HOST_WIDE_INT i;
4000
1eb356b9
RH
4001 smema = XEXP (smem, 0);
4002 if (GET_CODE (smema) == LO_SUM)
4003 smema = force_reg (Pmode, smema);
4004
6c174fc0
RH
4005 /* Generate all the tmp registers we need. */
4006 for (i = 0; i < words; ++i)
4208b40f
RH
4007 {
4008 data_regs[i] = out_regs[i];
4009 ext_tmps[i] = gen_reg_rtx (DImode);
4010 }
4011 data_regs[words] = gen_reg_rtx (DImode);
4012
4013 if (ofs != 0)
f4ef873c 4014 smem = adjust_address (smem, GET_MODE (smem), ofs);
6c174fc0
RH
4015
4016 /* Load up all of the source data. */
4017 for (i = 0; i < words; ++i)
4018 {
e01acbb1
RH
4019 tmp = change_address (smem, DImode,
4020 gen_rtx_AND (DImode,
1eb356b9 4021 plus_constant (smema, 8*i),
e01acbb1 4022 im8));
ba4828e0 4023 set_mem_alias_set (tmp, 0);
e01acbb1 4024 emit_move_insn (data_regs[i], tmp);
6c174fc0 4025 }
e01acbb1
RH
4026
4027 tmp = change_address (smem, DImode,
4028 gen_rtx_AND (DImode,
1eb356b9 4029 plus_constant (smema, 8*words - 1),
e01acbb1 4030 im8));
ba4828e0 4031 set_mem_alias_set (tmp, 0);
e01acbb1 4032 emit_move_insn (data_regs[words], tmp);
6c174fc0
RH
4033
4034 /* Extract the half-word fragments. Unfortunately DEC decided to make
4035 extxh with offset zero a noop instead of zeroing the register, so
4036 we must take care of that edge condition ourselves with cmov. */
4037
1eb356b9 4038 sreg = copy_addr_to_reg (smema);
4208b40f
RH
4039 areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
4040 1, OPTAB_WIDEN);
30102605
RH
4041 if (WORDS_BIG_ENDIAN)
4042 emit_move_insn (sreg, plus_constant (sreg, 7));
6c174fc0
RH
4043 for (i = 0; i < words; ++i)
4044 {
30102605
RH
4045 if (WORDS_BIG_ENDIAN)
4046 {
4047 emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg));
4048 emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
4049 }
4050 else
4051 {
4052 emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
4053 emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
4054 }
38a448ca
RH
4055 emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
4056 gen_rtx_IF_THEN_ELSE (DImode,
4208b40f
RH
4057 gen_rtx_EQ (DImode, areg,
4058 const0_rtx),
38a448ca 4059 const0_rtx, ext_tmps[i])));
6c174fc0
RH
4060 }
4061
4062 /* Merge the half-words into whole words. */
4063 for (i = 0; i < words; ++i)
4064 {
4208b40f
RH
4065 out_regs[i] = expand_binop (DImode, ior_optab, data_regs[i],
4066 ext_tmps[i], data_regs[i], 1, OPTAB_WIDEN);
6c174fc0
RH
4067 }
4068}
4069
4070/* Store an integral number of consecutive unaligned quadwords. DATA_REGS
4071 may be NULL to store zeros. */
4072
4073static void
4208b40f 4074alpha_expand_unaligned_store_words (data_regs, dmem, words, ofs)
6c174fc0 4075 rtx *data_regs;
4208b40f
RH
4076 rtx dmem;
4077 HOST_WIDE_INT words, ofs;
6c174fc0
RH
4078{
4079 rtx const im8 = GEN_INT (-8);
4080 rtx const i64 = GEN_INT (64);
4081#if HOST_BITS_PER_WIDE_INT == 32
4082 rtx const im1 = immed_double_const (0xffffffff, 0xffffffff, DImode);
4083#else
595b6314 4084 rtx const im1 = constm1_rtx;
6c174fc0
RH
4085#endif
4086 rtx ins_tmps[MAX_MOVE_WORDS];
4208b40f 4087 rtx st_tmp_1, st_tmp_2, dreg;
1eb356b9 4088 rtx st_addr_1, st_addr_2, dmema;
6c174fc0
RH
4089 HOST_WIDE_INT i;
4090
1eb356b9
RH
4091 dmema = XEXP (dmem, 0);
4092 if (GET_CODE (dmema) == LO_SUM)
4093 dmema = force_reg (Pmode, dmema);
4094
6c174fc0
RH
4095 /* Generate all the tmp registers we need. */
4096 if (data_regs != NULL)
4097 for (i = 0; i < words; ++i)
4098 ins_tmps[i] = gen_reg_rtx(DImode);
4099 st_tmp_1 = gen_reg_rtx(DImode);
4100 st_tmp_2 = gen_reg_rtx(DImode);
4101
4208b40f 4102 if (ofs != 0)
f4ef873c 4103 dmem = adjust_address (dmem, GET_MODE (dmem), ofs);
4208b40f
RH
4104
4105 st_addr_2 = change_address (dmem, DImode,
38a448ca 4106 gen_rtx_AND (DImode,
1eb356b9 4107 plus_constant (dmema, words*8 - 1),
6c174fc0 4108 im8));
ba4828e0 4109 set_mem_alias_set (st_addr_2, 0);
e01acbb1 4110
4208b40f 4111 st_addr_1 = change_address (dmem, DImode,
1eb356b9 4112 gen_rtx_AND (DImode, dmema, im8));
ba4828e0 4113 set_mem_alias_set (st_addr_1, 0);
6c174fc0
RH
4114
4115 /* Load up the destination end bits. */
4116 emit_move_insn (st_tmp_2, st_addr_2);
4117 emit_move_insn (st_tmp_1, st_addr_1);
4118
4119 /* Shift the input data into place. */
1eb356b9 4120 dreg = copy_addr_to_reg (dmema);
30102605
RH
4121 if (WORDS_BIG_ENDIAN)
4122 emit_move_insn (dreg, plus_constant (dreg, 7));
6c174fc0
RH
4123 if (data_regs != NULL)
4124 {
4125 for (i = words-1; i >= 0; --i)
4126 {
30102605
RH
4127 if (WORDS_BIG_ENDIAN)
4128 {
4129 emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
4130 emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
4131 }
4132 else
4133 {
4134 emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
4135 emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
4136 }
6c174fc0 4137 }
6c174fc0
RH
4138 for (i = words-1; i > 0; --i)
4139 {
4208b40f
RH
4140 ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i],
4141 ins_tmps[i-1], ins_tmps[i-1], 1,
4142 OPTAB_WIDEN);
6c174fc0
RH
4143 }
4144 }
4145
4146 /* Split and merge the ends with the destination data. */
30102605
RH
4147 if (WORDS_BIG_ENDIAN)
4148 {
4149 emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, im1, dreg));
4150 emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
4151 }
4152 else
4153 {
4154 emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
4155 emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, im1, dreg));
4156 }
6c174fc0
RH
4157
4158 if (data_regs != NULL)
4159 {
4208b40f
RH
4160 st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1],
4161 st_tmp_2, 1, OPTAB_WIDEN);
4162 st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0],
4163 st_tmp_1, 1, OPTAB_WIDEN);
6c174fc0
RH
4164 }
4165
4166 /* Store it all. */
30102605
RH
4167 if (WORDS_BIG_ENDIAN)
4168 emit_move_insn (st_addr_1, st_tmp_1);
4169 else
4170 emit_move_insn (st_addr_2, st_tmp_2);
6c174fc0
RH
4171 for (i = words-1; i > 0; --i)
4172 {
e01acbb1
RH
4173 rtx tmp = change_address (dmem, DImode,
4174 gen_rtx_AND (DImode,
30102605
RH
4175 plus_constant(dmema,
4176 WORDS_BIG_ENDIAN ? i*8-1 : i*8),
e01acbb1 4177 im8));
ba4828e0 4178 set_mem_alias_set (tmp, 0);
e01acbb1 4179 emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
6c174fc0 4180 }
30102605
RH
4181 if (WORDS_BIG_ENDIAN)
4182 emit_move_insn (st_addr_2, st_tmp_2);
4183 else
4184 emit_move_insn (st_addr_1, st_tmp_1);
6c174fc0
RH
4185}
4186
4187
4188/* Expand string/block move operations.
4189
4190 operands[0] is the pointer to the destination.
4191 operands[1] is the pointer to the source.
4192 operands[2] is the number of bytes to move.
4193 operands[3] is the alignment. */
4194
4195int
4196alpha_expand_block_move (operands)
4197 rtx operands[];
4198{
4199 rtx bytes_rtx = operands[2];
4200 rtx align_rtx = operands[3];
f35cba21 4201 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
c17f08e1
RH
4202 HOST_WIDE_INT bytes = orig_bytes;
4203 HOST_WIDE_INT src_align = INTVAL (align_rtx) * BITS_PER_UNIT;
4204 HOST_WIDE_INT dst_align = src_align;
bdb429a5
RK
4205 rtx orig_src = operands[1];
4206 rtx orig_dst = operands[0];
4207 rtx data_regs[2 * MAX_MOVE_WORDS + 16];
4208b40f 4208 rtx tmp;
1eb356b9 4209 unsigned int i, words, ofs, nregs = 0;
6c174fc0 4210
bdb429a5 4211 if (orig_bytes <= 0)
6c174fc0 4212 return 1;
c17f08e1 4213 else if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
6c174fc0
RH
4214 return 0;
4215
4208b40f
RH
4216 /* Look for additional alignment information from recorded register info. */
4217
4218 tmp = XEXP (orig_src, 0);
4219 if (GET_CODE (tmp) == REG)
bdb429a5 4220 src_align = MAX (src_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f
RH
4221 else if (GET_CODE (tmp) == PLUS
4222 && GET_CODE (XEXP (tmp, 0)) == REG
4223 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4224 {
bdb429a5
RK
4225 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4226 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
4227
4228 if (a > src_align)
4229 {
bdb429a5
RK
4230 if (a >= 64 && c % 8 == 0)
4231 src_align = 64;
4232 else if (a >= 32 && c % 4 == 0)
4233 src_align = 32;
4234 else if (a >= 16 && c % 2 == 0)
4235 src_align = 16;
4208b40f
RH
4236 }
4237 }
4238
4239 tmp = XEXP (orig_dst, 0);
4240 if (GET_CODE (tmp) == REG)
bdb429a5 4241 dst_align = MAX (dst_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f
RH
4242 else if (GET_CODE (tmp) == PLUS
4243 && GET_CODE (XEXP (tmp, 0)) == REG
4244 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4245 {
bdb429a5
RK
4246 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4247 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
4248
4249 if (a > dst_align)
4250 {
bdb429a5
RK
4251 if (a >= 64 && c % 8 == 0)
4252 dst_align = 64;
4253 else if (a >= 32 && c % 4 == 0)
4254 dst_align = 32;
4255 else if (a >= 16 && c % 2 == 0)
4256 dst_align = 16;
4208b40f
RH
4257 }
4258 }
4259
bdb429a5 4260 /* Load the entire block into registers. */
15389075 4261 if (GET_CODE (XEXP (orig_src, 0)) == ADDRESSOF)
4208b40f
RH
4262 {
4263 enum machine_mode mode;
bdb429a5 4264
4208b40f 4265 tmp = XEXP (XEXP (orig_src, 0), 0);
15389075 4266
c576fce7
RH
4267 /* Don't use the existing register if we're reading more than
4268 is held in the register. Nor if there is not a mode that
4269 handles the exact size. */
f35cba21 4270 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4208b40f 4271 if (mode != BLKmode
c576fce7 4272 && GET_MODE_SIZE (GET_MODE (tmp)) >= bytes)
4208b40f 4273 {
c576fce7
RH
4274 if (mode == TImode)
4275 {
4276 data_regs[nregs] = gen_lowpart (DImode, tmp);
5197bd50 4277 data_regs[nregs + 1] = gen_highpart (DImode, tmp);
c576fce7
RH
4278 nregs += 2;
4279 }
4280 else
4281 data_regs[nregs++] = gen_lowpart (mode, tmp);
bdb429a5 4282
4208b40f
RH
4283 goto src_done;
4284 }
6c174fc0 4285
4208b40f 4286 /* No appropriate mode; fall back on memory. */
792760b9
RK
4287 orig_src = replace_equiv_address (orig_src,
4288 copy_addr_to_reg (XEXP (orig_src, 0)));
d0285326 4289 src_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4208b40f
RH
4290 }
4291
4292 ofs = 0;
bdb429a5 4293 if (src_align >= 64 && bytes >= 8)
6c174fc0
RH
4294 {
4295 words = bytes / 8;
4296
6c174fc0 4297 for (i = 0; i < words; ++i)
5197bd50 4298 data_regs[nregs + i] = gen_reg_rtx (DImode);
6c174fc0 4299
6c174fc0 4300 for (i = 0; i < words; ++i)
bdb429a5 4301 emit_move_insn (data_regs[nregs + i],
f4ef873c 4302 adjust_address (orig_src, DImode, ofs + i * 8));
6c174fc0 4303
4208b40f 4304 nregs += words;
6c174fc0 4305 bytes -= words * 8;
cd36edbd 4306 ofs += words * 8;
6c174fc0 4307 }
bdb429a5
RK
4308
4309 if (src_align >= 32 && bytes >= 4)
6c174fc0
RH
4310 {
4311 words = bytes / 4;
4312
6c174fc0 4313 for (i = 0; i < words; ++i)
5197bd50 4314 data_regs[nregs + i] = gen_reg_rtx (SImode);
6c174fc0 4315
6c174fc0 4316 for (i = 0; i < words; ++i)
bdb429a5 4317 emit_move_insn (data_regs[nregs + i],
792760b9 4318 adjust_address (orig_src, SImode, ofs + i * 4));
6c174fc0 4319
4208b40f 4320 nregs += words;
6c174fc0 4321 bytes -= words * 4;
cd36edbd 4322 ofs += words * 4;
6c174fc0 4323 }
bdb429a5 4324
c17f08e1 4325 if (bytes >= 8)
6c174fc0
RH
4326 {
4327 words = bytes / 8;
4328
6c174fc0 4329 for (i = 0; i < words+1; ++i)
5197bd50 4330 data_regs[nregs + i] = gen_reg_rtx (DImode);
6c174fc0 4331
c576fce7
RH
4332 alpha_expand_unaligned_load_words (data_regs + nregs, orig_src,
4333 words, ofs);
6c174fc0 4334
4208b40f 4335 nregs += words;
6c174fc0 4336 bytes -= words * 8;
cd36edbd 4337 ofs += words * 8;
6c174fc0 4338 }
bdb429a5 4339
bdb429a5 4340 if (! TARGET_BWX && bytes >= 4)
6c174fc0 4341 {
4208b40f 4342 data_regs[nregs++] = tmp = gen_reg_rtx (SImode);
6c174fc0 4343 alpha_expand_unaligned_load (tmp, orig_src, 4, ofs, 0);
6c174fc0
RH
4344 bytes -= 4;
4345 ofs += 4;
4346 }
bdb429a5 4347
6c174fc0
RH
4348 if (bytes >= 2)
4349 {
bdb429a5 4350 if (src_align >= 16)
6c174fc0
RH
4351 {
4352 do {
4208b40f 4353 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
f4ef873c 4354 emit_move_insn (tmp, adjust_address (orig_src, HImode, ofs));
6c174fc0
RH
4355 bytes -= 2;
4356 ofs += 2;
4357 } while (bytes >= 2);
4358 }
bdb429a5 4359 else if (! TARGET_BWX)
6c174fc0 4360 {
4208b40f 4361 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
6c174fc0 4362 alpha_expand_unaligned_load (tmp, orig_src, 2, ofs, 0);
6c174fc0
RH
4363 bytes -= 2;
4364 ofs += 2;
4365 }
4366 }
bdb429a5 4367
6c174fc0
RH
4368 while (bytes > 0)
4369 {
4208b40f 4370 data_regs[nregs++] = tmp = gen_reg_rtx (QImode);
f4ef873c 4371 emit_move_insn (tmp, adjust_address (orig_src, QImode, ofs));
6c174fc0
RH
4372 bytes -= 1;
4373 ofs += 1;
4374 }
bdb429a5 4375
4208b40f
RH
4376 src_done:
4377
9a56f4f6 4378 if (nregs > ARRAY_SIZE (data_regs))
bdb429a5 4379 abort ();
4208b40f 4380
bdb429a5 4381 /* Now save it back out again. */
4208b40f
RH
4382
4383 i = 0, ofs = 0;
4384
4385 if (GET_CODE (XEXP (orig_dst, 0)) == ADDRESSOF)
4386 {
4387 enum machine_mode mode;
4388 tmp = XEXP (XEXP (orig_dst, 0), 0);
4389
f35cba21 4390 mode = mode_for_size (orig_bytes * BITS_PER_UNIT, MODE_INT, 1);
c576fce7 4391 if (GET_MODE (tmp) == mode)
4208b40f 4392 {
c576fce7
RH
4393 if (nregs == 1)
4394 {
4395 emit_move_insn (tmp, data_regs[0]);
4396 i = 1;
4397 goto dst_done;
4398 }
bdb429a5 4399
c576fce7
RH
4400 else if (nregs == 2 && mode == TImode)
4401 {
4402 /* Undo the subregging done above when copying between
4403 two TImode registers. */
4404 if (GET_CODE (data_regs[0]) == SUBREG
4405 && GET_MODE (SUBREG_REG (data_regs[0])) == TImode)
bdb429a5 4406 emit_move_insn (tmp, SUBREG_REG (data_regs[0]));
c576fce7
RH
4407 else
4408 {
4409 rtx seq;
4410
4411 start_sequence ();
4412 emit_move_insn (gen_lowpart (DImode, tmp), data_regs[0]);
4413 emit_move_insn (gen_highpart (DImode, tmp), data_regs[1]);
34cea4e9 4414 seq = get_insns ();
c576fce7
RH
4415 end_sequence ();
4416
4417 emit_no_conflict_block (seq, tmp, data_regs[0],
4418 data_regs[1], NULL_RTX);
4419 }
4420
4421 i = 2;
4422 goto dst_done;
4423 }
4208b40f
RH
4424 }
4425
4426 /* ??? If nregs > 1, consider reconstructing the word in regs. */
4427 /* ??? Optimize mode < dst_mode with strict_low_part. */
f35cba21
RH
4428
4429 /* No appropriate mode; fall back on memory. We can speed things
4430 up by recognizing extra alignment information. */
792760b9
RK
4431 orig_dst = replace_equiv_address (orig_dst,
4432 copy_addr_to_reg (XEXP (orig_dst, 0)));
d0285326 4433 dst_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4208b40f
RH
4434 }
4435
4436 /* Write out the data in whatever chunks reading the source allowed. */
bdb429a5 4437 if (dst_align >= 64)
4208b40f
RH
4438 {
4439 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4440 {
f4ef873c 4441 emit_move_insn (adjust_address (orig_dst, DImode, ofs),
4208b40f
RH
4442 data_regs[i]);
4443 ofs += 8;
4444 i++;
4445 }
4446 }
bdb429a5
RK
4447
4448 if (dst_align >= 32)
4208b40f
RH
4449 {
4450 /* If the source has remaining DImode regs, write them out in
4451 two pieces. */
4452 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4453 {
4454 tmp = expand_binop (DImode, lshr_optab, data_regs[i], GEN_INT (32),
4455 NULL_RTX, 1, OPTAB_WIDEN);
4456
f4ef873c 4457 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4208b40f 4458 gen_lowpart (SImode, data_regs[i]));
f4ef873c 4459 emit_move_insn (adjust_address (orig_dst, SImode, ofs + 4),
4208b40f
RH
4460 gen_lowpart (SImode, tmp));
4461 ofs += 8;
4462 i++;
4463 }
4464
4465 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4466 {
f4ef873c 4467 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4208b40f
RH
4468 data_regs[i]);
4469 ofs += 4;
4470 i++;
4471 }
4472 }
bdb429a5 4473
4208b40f
RH
4474 if (i < nregs && GET_MODE (data_regs[i]) == DImode)
4475 {
4476 /* Write out a remaining block of words using unaligned methods. */
4477
bdb429a5
RK
4478 for (words = 1; i + words < nregs; words++)
4479 if (GET_MODE (data_regs[i + words]) != DImode)
4208b40f
RH
4480 break;
4481
4482 if (words == 1)
4483 alpha_expand_unaligned_store (orig_dst, data_regs[i], 8, ofs);
4484 else
bdb429a5
RK
4485 alpha_expand_unaligned_store_words (data_regs + i, orig_dst,
4486 words, ofs);
4208b40f
RH
4487
4488 i += words;
4489 ofs += words * 8;
4490 }
4491
4492 /* Due to the above, this won't be aligned. */
4493 /* ??? If we have more than one of these, consider constructing full
4494 words in registers and using alpha_expand_unaligned_store_words. */
4495 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4496 {
4497 alpha_expand_unaligned_store (orig_dst, data_regs[i], 4, ofs);
4498 ofs += 4;
4499 i++;
4500 }
4501
bdb429a5 4502 if (dst_align >= 16)
4208b40f
RH
4503 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4504 {
f4ef873c 4505 emit_move_insn (adjust_address (orig_dst, HImode, ofs), data_regs[i]);
4208b40f
RH
4506 i++;
4507 ofs += 2;
4508 }
4509 else
4510 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4511 {
4512 alpha_expand_unaligned_store (orig_dst, data_regs[i], 2, ofs);
4513 i++;
4514 ofs += 2;
4515 }
bdb429a5 4516
4208b40f
RH
4517 while (i < nregs && GET_MODE (data_regs[i]) == QImode)
4518 {
f4ef873c 4519 emit_move_insn (adjust_address (orig_dst, QImode, ofs), data_regs[i]);
4208b40f
RH
4520 i++;
4521 ofs += 1;
4522 }
bdb429a5 4523
4208b40f
RH
4524 dst_done:
4525
4526 if (i != nregs)
bdb429a5 4527 abort ();
6c174fc0
RH
4528
4529 return 1;
4530}
4531
4532int
4533alpha_expand_block_clear (operands)
4534 rtx operands[];
4535{
4536 rtx bytes_rtx = operands[1];
4537 rtx align_rtx = operands[2];
bdb429a5 4538 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
c17f08e1
RH
4539 HOST_WIDE_INT bytes = orig_bytes;
4540 HOST_WIDE_INT align = INTVAL (align_rtx) * BITS_PER_UNIT;
4541 HOST_WIDE_INT alignofs = 0;
bdb429a5 4542 rtx orig_dst = operands[0];
4208b40f 4543 rtx tmp;
c17f08e1 4544 int i, words, ofs = 0;
6c174fc0 4545
bdb429a5 4546 if (orig_bytes <= 0)
6c174fc0 4547 return 1;
c17f08e1 4548 if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
6c174fc0
RH
4549 return 0;
4550
4208b40f 4551 /* Look for stricter alignment. */
4208b40f
RH
4552 tmp = XEXP (orig_dst, 0);
4553 if (GET_CODE (tmp) == REG)
bdb429a5 4554 align = MAX (align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4208b40f
RH
4555 else if (GET_CODE (tmp) == PLUS
4556 && GET_CODE (XEXP (tmp, 0)) == REG
4557 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4558 {
c17f08e1
RH
4559 HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4560 int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4208b40f
RH
4561
4562 if (a > align)
4563 {
c17f08e1
RH
4564 if (a >= 64)
4565 align = a, alignofs = 8 - c % 8;
4566 else if (a >= 32)
4567 align = a, alignofs = 4 - c % 4;
4568 else if (a >= 16)
4569 align = a, alignofs = 2 - c % 2;
4208b40f
RH
4570 }
4571 }
c576fce7
RH
4572 else if (GET_CODE (tmp) == ADDRESSOF)
4573 {
4574 enum machine_mode mode;
4575
4576 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4577 if (GET_MODE (XEXP (tmp, 0)) == mode)
4578 {
4579 emit_move_insn (XEXP (tmp, 0), const0_rtx);
4580 return 1;
4581 }
4582
4583 /* No appropriate mode; fall back on memory. */
792760b9 4584 orig_dst = replace_equiv_address (orig_dst, copy_addr_to_reg (tmp));
c17f08e1 4585 align = GET_MODE_BITSIZE (GET_MODE (XEXP (tmp, 0)));
c576fce7 4586 }
4208b40f 4587
c17f08e1
RH
4588 /* Handle an unaligned prefix first. */
4589
4590 if (alignofs > 0)
4591 {
4592#if HOST_BITS_PER_WIDE_INT >= 64
4593 /* Given that alignofs is bounded by align, the only time BWX could
4594 generate three stores is for a 7 byte fill. Prefer two individual
4595 stores over a load/mask/store sequence. */
4596 if ((!TARGET_BWX || alignofs == 7)
4597 && align >= 32
4598 && !(alignofs == 4 && bytes >= 4))
4599 {
4600 enum machine_mode mode = (align >= 64 ? DImode : SImode);
4601 int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
4602 rtx mem, tmp;
4603 HOST_WIDE_INT mask;
4604
f4ef873c 4605 mem = adjust_address (orig_dst, mode, ofs - inv_alignofs);
ba4828e0 4606 set_mem_alias_set (mem, 0);
c17f08e1
RH
4607
4608 mask = ~(~(HOST_WIDE_INT)0 << (inv_alignofs * 8));
4609 if (bytes < alignofs)
4610 {
4611 mask |= ~(HOST_WIDE_INT)0 << ((inv_alignofs + bytes) * 8);
4612 ofs += bytes;
4613 bytes = 0;
4614 }
4615 else
4616 {
4617 bytes -= alignofs;
4618 ofs += alignofs;
4619 }
4620 alignofs = 0;
4621
4622 tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask),
4623 NULL_RTX, 1, OPTAB_WIDEN);
4624
4625 emit_move_insn (mem, tmp);
4626 }
4627#endif
4628
4629 if (TARGET_BWX && (alignofs & 1) && bytes >= 1)
4630 {
f4ef873c 4631 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
c17f08e1
RH
4632 bytes -= 1;
4633 ofs += 1;
4634 alignofs -= 1;
4635 }
4636 if (TARGET_BWX && align >= 16 && (alignofs & 3) == 2 && bytes >= 2)
4637 {
f4ef873c 4638 emit_move_insn (adjust_address (orig_dst, HImode, ofs), const0_rtx);
c17f08e1
RH
4639 bytes -= 2;
4640 ofs += 2;
4641 alignofs -= 2;
4642 }
4643 if (alignofs == 4 && bytes >= 4)
4644 {
f4ef873c 4645 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
c17f08e1
RH
4646 bytes -= 4;
4647 ofs += 4;
4648 alignofs = 0;
4649 }
4650
4651 /* If we've not used the extra lead alignment information by now,
4652 we won't be able to. Downgrade align to match what's left over. */
4653 if (alignofs > 0)
4654 {
4655 alignofs = alignofs & -alignofs;
4656 align = MIN (align, alignofs * BITS_PER_UNIT);
4657 }
4658 }
4659
4660 /* Handle a block of contiguous long-words. */
6c174fc0 4661
bdb429a5 4662 if (align >= 64 && bytes >= 8)
6c174fc0
RH
4663 {
4664 words = bytes / 8;
4665
4666 for (i = 0; i < words; ++i)
1eb356b9 4667 emit_move_insn (adjust_address (orig_dst, DImode, ofs + i * 8),
f4ef873c 4668 const0_rtx);
6c174fc0
RH
4669
4670 bytes -= words * 8;
cd36edbd 4671 ofs += words * 8;
6c174fc0 4672 }
bdb429a5 4673
c17f08e1
RH
4674 /* If the block is large and appropriately aligned, emit a single
4675 store followed by a sequence of stq_u insns. */
4676
4677 if (align >= 32 && bytes > 16)
4678 {
1eb356b9
RH
4679 rtx orig_dsta;
4680
f4ef873c 4681 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
c17f08e1
RH
4682 bytes -= 4;
4683 ofs += 4;
4684
1eb356b9
RH
4685 orig_dsta = XEXP (orig_dst, 0);
4686 if (GET_CODE (orig_dsta) == LO_SUM)
4687 orig_dsta = force_reg (Pmode, orig_dsta);
4688
c17f08e1
RH
4689 words = bytes / 8;
4690 for (i = 0; i < words; ++i)
4691 {
ba4828e0
RK
4692 rtx mem
4693 = change_address (orig_dst, DImode,
4694 gen_rtx_AND (DImode,
1eb356b9 4695 plus_constant (orig_dsta, ofs + i*8),
ba4828e0
RK
4696 GEN_INT (-8)));
4697 set_mem_alias_set (mem, 0);
c17f08e1
RH
4698 emit_move_insn (mem, const0_rtx);
4699 }
4700
4701 /* Depending on the alignment, the first stq_u may have overlapped
4702 with the initial stl, which means that the last stq_u didn't
4703 write as much as it would appear. Leave those questionable bytes
4704 unaccounted for. */
4705 bytes -= words * 8 - 4;
4706 ofs += words * 8 - 4;
4707 }
4708
4709 /* Handle a smaller block of aligned words. */
4710
4711 if ((align >= 64 && bytes == 4)
4712 || (align == 32 && bytes >= 4))
6c174fc0
RH
4713 {
4714 words = bytes / 4;
4715
4716 for (i = 0; i < words; ++i)
f4ef873c 4717 emit_move_insn (adjust_address (orig_dst, SImode, ofs + i * 4),
bdb429a5 4718 const0_rtx);
6c174fc0
RH
4719
4720 bytes -= words * 4;
cd36edbd 4721 ofs += words * 4;
6c174fc0 4722 }
bdb429a5 4723
c17f08e1
RH
4724 /* An unaligned block uses stq_u stores for as many as possible. */
4725
4726 if (bytes >= 8)
6c174fc0
RH
4727 {
4728 words = bytes / 8;
4729
cd36edbd 4730 alpha_expand_unaligned_store_words (NULL, orig_dst, words, ofs);
6c174fc0
RH
4731
4732 bytes -= words * 8;
cd36edbd 4733 ofs += words * 8;
6c174fc0
RH
4734 }
4735
c17f08e1 4736 /* Next clean up any trailing pieces. */
6c174fc0 4737
c17f08e1
RH
4738#if HOST_BITS_PER_WIDE_INT >= 64
4739 /* Count the number of bits in BYTES for which aligned stores could
4740 be emitted. */
4741 words = 0;
4742 for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
4743 if (bytes & i)
4744 words += 1;
4745
4746 /* If we have appropriate alignment (and it wouldn't take too many
4747 instructions otherwise), mask out the bytes we need. */
4748 if (TARGET_BWX ? words > 2 : bytes > 0)
4749 {
4750 if (align >= 64)
4751 {
4752 rtx mem, tmp;
4753 HOST_WIDE_INT mask;
4754
f4ef873c 4755 mem = adjust_address (orig_dst, DImode, ofs);
ba4828e0 4756 set_mem_alias_set (mem, 0);
c17f08e1
RH
4757
4758 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
4759
4760 tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
4761 NULL_RTX, 1, OPTAB_WIDEN);
4762
4763 emit_move_insn (mem, tmp);
4764 return 1;
4765 }
4766 else if (align >= 32 && bytes < 4)
4767 {
4768 rtx mem, tmp;
4769 HOST_WIDE_INT mask;
4770
f4ef873c 4771 mem = adjust_address (orig_dst, SImode, ofs);
ba4828e0 4772 set_mem_alias_set (mem, 0);
c17f08e1
RH
4773
4774 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
4775
4776 tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
4777 NULL_RTX, 1, OPTAB_WIDEN);
4778
4779 emit_move_insn (mem, tmp);
4780 return 1;
4781 }
6c174fc0 4782 }
c17f08e1 4783#endif
bdb429a5 4784
6c174fc0
RH
4785 if (!TARGET_BWX && bytes >= 4)
4786 {
4787 alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
4788 bytes -= 4;
4789 ofs += 4;
4790 }
bdb429a5 4791
6c174fc0
RH
4792 if (bytes >= 2)
4793 {
bdb429a5 4794 if (align >= 16)
6c174fc0
RH
4795 {
4796 do {
f4ef873c 4797 emit_move_insn (adjust_address (orig_dst, HImode, ofs),
6c174fc0
RH
4798 const0_rtx);
4799 bytes -= 2;
4800 ofs += 2;
4801 } while (bytes >= 2);
4802 }
bdb429a5 4803 else if (! TARGET_BWX)
6c174fc0
RH
4804 {
4805 alpha_expand_unaligned_store (orig_dst, const0_rtx, 2, ofs);
4806 bytes -= 2;
4807 ofs += 2;
4808 }
4809 }
bdb429a5 4810
6c174fc0
RH
4811 while (bytes > 0)
4812 {
f4ef873c 4813 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
6c174fc0
RH
4814 bytes -= 1;
4815 ofs += 1;
4816 }
4817
4818 return 1;
4819}
a6f12d7c
RK
4820\f
4821/* Adjust the cost of a scheduling dependency. Return the new cost of
4822 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
4823
c237e94a 4824static int
a6f12d7c
RK
4825alpha_adjust_cost (insn, link, dep_insn, cost)
4826 rtx insn;
4827 rtx link;
4828 rtx dep_insn;
4829 int cost;
4830{
74835ed8 4831 rtx set, set_src;
26250081 4832 enum attr_type insn_type, dep_insn_type;
a6f12d7c
RK
4833
4834 /* If the dependence is an anti-dependence, there is no cost. For an
4835 output dependence, there is sometimes a cost, but it doesn't seem
4836 worth handling those few cases. */
a6f12d7c 4837 if (REG_NOTE_KIND (link) != 0)
98791e3a 4838 return cost;
a6f12d7c 4839
26250081
RH
4840 /* If we can't recognize the insns, we can't really do anything. */
4841 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
4842 return cost;
4843
4844 insn_type = get_attr_type (insn);
4845 dep_insn_type = get_attr_type (dep_insn);
4846
bcbbac26 4847 /* Bring in the user-defined memory latency. */
71d9b493
RH
4848 if (dep_insn_type == TYPE_ILD
4849 || dep_insn_type == TYPE_FLD
4850 || dep_insn_type == TYPE_LDSYM)
bcbbac26
RH
4851 cost += alpha_memory_latency-1;
4852
98791e3a 4853 /* Everything else handled in DFA bypasses now. */
74835ed8 4854
a6f12d7c
RK
4855 return cost;
4856}
c237e94a 4857
98791e3a
RH
4858/* The number of instructions that can be issued per cycle. */
4859
c237e94a
ZW
4860static int
4861alpha_issue_rate ()
4862{
4863 return (alpha_cpu == PROCESSOR_EV4 ? 2 : 4);
4864}
4865
4866static int
98791e3a 4867alpha_use_dfa_pipeline_interface ()
c237e94a 4868{
98791e3a 4869 return true;
c237e94a
ZW
4870}
4871
98791e3a
RH
4872/* How many alternative schedules to try. This should be as wide as the
4873 scheduling freedom in the DFA, but no wider. Making this value too
4874 large results extra work for the scheduler.
4875
4876 For EV4, loads can be issued to either IB0 or IB1, thus we have 2
4877 alternative schedules. For EV5, we can choose between E0/E1 and
4878 FA/FM. For EV6, an arithmatic insn can be issued to U0/U1/L0/L1. */
4879
4880static int
4881alpha_multipass_dfa_lookahead ()
4882{
4883 return (alpha_cpu == PROCESSOR_EV6 ? 4 : 2);
4884}
9ecc37f0 4885\f
30102605
RH
4886/* Register global variables and machine-specific functions with the
4887 garbage collector. */
4888
4889#if TARGET_ABI_UNICOSMK
4890static void
4891alpha_init_machine_status (p)
4892 struct function *p;
4893{
4894 p->machine =
4895 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
4896
4897 p->machine->first_ciw = NULL_RTX;
4898 p->machine->last_ciw = NULL_RTX;
4899 p->machine->ciw_count = 0;
4900 p->machine->addr_list = NULL_RTX;
4901}
4902
4903static void
4904alpha_mark_machine_status (p)
4905 struct function *p;
4906{
4907 struct machine_function *machine = p->machine;
4908
4909 if (machine)
4910 {
4911 ggc_mark_rtx (machine->first_ciw);
4912 ggc_mark_rtx (machine->addr_list);
4913 }
4914}
4915
4916static void
4917alpha_free_machine_status (p)
4918 struct function *p;
4919{
4920 free (p->machine);
4921 p->machine = NULL;
4922}
4923#endif /* TARGET_ABI_UNICOSMK */
4924
9ecc37f0
RH
4925/* Functions to save and restore alpha_return_addr_rtx. */
4926
9ecc37f0
RH
4927/* Start the ball rolling with RETURN_ADDR_RTX. */
4928
4929rtx
4930alpha_return_addr (count, frame)
4931 int count;
3c303f52 4932 rtx frame ATTRIBUTE_UNUSED;
9ecc37f0 4933{
9ecc37f0
RH
4934 if (count != 0)
4935 return const0_rtx;
4936
b91055dd 4937 return get_hard_reg_initial_val (Pmode, REG_RA);
9ecc37f0
RH
4938}
4939
ccb83cbc
RH
4940/* Return or create a pseudo containing the gp value for the current
4941 function. Needed only if TARGET_LD_BUGGY_LDGP. */
4942
4943rtx
4944alpha_gp_save_rtx ()
4945{
1eefb6c1
RH
4946 rtx r = get_hard_reg_initial_val (DImode, 29);
4947 if (GET_CODE (r) != MEM)
4948 r = gen_mem_addressof (r, NULL_TREE);
4949 return r;
ccb83cbc
RH
4950}
4951
9ecc37f0
RH
4952static int
4953alpha_ra_ever_killed ()
4954{
6abc6f40
RH
4955 rtx top;
4956
b91055dd 4957 if (!has_hard_reg_initial_val (Pmode, REG_RA))
9ecc37f0
RH
4958 return regs_ever_live[REG_RA];
4959
6abc6f40
RH
4960 push_topmost_sequence ();
4961 top = get_insns ();
4962 pop_topmost_sequence ();
4963
4964 return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX);
9ecc37f0
RH
4965}
4966
a6f12d7c 4967\f
be7560ea 4968/* Return the trap mode suffix applicable to the current
285a5742 4969 instruction, or NULL. */
a6f12d7c 4970
be7560ea
RH
4971static const char *
4972get_trap_mode_suffix ()
a6f12d7c 4973{
be7560ea 4974 enum attr_trap_suffix s = get_attr_trap_suffix (current_output_insn);
a6f12d7c 4975
be7560ea 4976 switch (s)
a6f12d7c 4977 {
be7560ea
RH
4978 case TRAP_SUFFIX_NONE:
4979 return NULL;
6245e3df 4980
be7560ea 4981 case TRAP_SUFFIX_SU:
981a828e 4982 if (alpha_fptm >= ALPHA_FPTM_SU)
be7560ea
RH
4983 return "su";
4984 return NULL;
6245e3df 4985
be7560ea
RH
4986 case TRAP_SUFFIX_SUI:
4987 if (alpha_fptm >= ALPHA_FPTM_SUI)
4988 return "sui";
4989 return NULL;
4990
4991 case TRAP_SUFFIX_V_SV:
e83015a9
RH
4992 switch (alpha_fptm)
4993 {
4994 case ALPHA_FPTM_N:
be7560ea 4995 return NULL;
e83015a9 4996 case ALPHA_FPTM_U:
be7560ea 4997 return "v";
e83015a9
RH
4998 case ALPHA_FPTM_SU:
4999 case ALPHA_FPTM_SUI:
be7560ea 5000 return "sv";
e83015a9
RH
5001 }
5002 break;
5003
be7560ea 5004 case TRAP_SUFFIX_V_SV_SVI:
0022a940
DMT
5005 switch (alpha_fptm)
5006 {
5007 case ALPHA_FPTM_N:
be7560ea 5008 return NULL;
0022a940 5009 case ALPHA_FPTM_U:
be7560ea 5010 return "v";
0022a940 5011 case ALPHA_FPTM_SU:
be7560ea 5012 return "sv";
0022a940 5013 case ALPHA_FPTM_SUI:
be7560ea 5014 return "svi";
0022a940
DMT
5015 }
5016 break;
5017
be7560ea 5018 case TRAP_SUFFIX_U_SU_SUI:
6245e3df
RK
5019 switch (alpha_fptm)
5020 {
5021 case ALPHA_FPTM_N:
be7560ea 5022 return NULL;
6245e3df 5023 case ALPHA_FPTM_U:
be7560ea 5024 return "u";
6245e3df 5025 case ALPHA_FPTM_SU:
be7560ea 5026 return "su";
6245e3df 5027 case ALPHA_FPTM_SUI:
be7560ea 5028 return "sui";
6245e3df
RK
5029 }
5030 break;
be7560ea
RH
5031 }
5032 abort ();
5033}
6245e3df 5034
be7560ea 5035/* Return the rounding mode suffix applicable to the current
285a5742 5036 instruction, or NULL. */
be7560ea
RH
5037
5038static const char *
5039get_round_mode_suffix ()
5040{
5041 enum attr_round_suffix s = get_attr_round_suffix (current_output_insn);
5042
5043 switch (s)
5044 {
5045 case ROUND_SUFFIX_NONE:
5046 return NULL;
5047 case ROUND_SUFFIX_NORMAL:
5048 switch (alpha_fprm)
6245e3df 5049 {
be7560ea
RH
5050 case ALPHA_FPRM_NORM:
5051 return NULL;
5052 case ALPHA_FPRM_MINF:
5053 return "m";
5054 case ALPHA_FPRM_CHOP:
5055 return "c";
5056 case ALPHA_FPRM_DYN:
5057 return "d";
6245e3df
RK
5058 }
5059 break;
5060
be7560ea
RH
5061 case ROUND_SUFFIX_C:
5062 return "c";
5063 }
5064 abort ();
5065}
5066
5067/* Print an operand. Recognize special options, documented below. */
5068
5069void
5070print_operand (file, x, code)
5071 FILE *file;
5072 rtx x;
5073 int code;
5074{
5075 int i;
5076
5077 switch (code)
5078 {
5079 case '~':
5080 /* Print the assembler name of the current function. */
5081 assemble_name (file, alpha_fnname);
5082 break;
5083
5084 case '/':
5085 {
5086 const char *trap = get_trap_mode_suffix ();
5087 const char *round = get_round_mode_suffix ();
5088
5089 if (trap || round)
30102605
RH
5090 fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"),
5091 (trap ? trap : ""), (round ? round : ""));
be7560ea
RH
5092 break;
5093 }
5094
89cfc2c6
RK
5095 case ',':
5096 /* Generates single precision instruction suffix. */
be7560ea 5097 fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
89cfc2c6
RK
5098 break;
5099
5100 case '-':
5101 /* Generates double precision instruction suffix. */
be7560ea 5102 fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
89cfc2c6
RK
5103 break;
5104
1eb356b9
RH
5105 case '#':
5106 if (alpha_this_literal_sequence_number == 0)
5107 alpha_this_literal_sequence_number = alpha_next_sequence_number++;
5108 fprintf (file, "%d", alpha_this_literal_sequence_number);
5109 break;
5110
5111 case '*':
5112 if (alpha_this_gpdisp_sequence_number == 0)
5113 alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
5114 fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
5115 break;
5116
5117 case 'H':
5118 if (GET_CODE (x) == HIGH)
133d3133 5119 output_addr_const (file, XEXP (x, 0));
1eb356b9
RH
5120 else
5121 output_operand_lossage ("invalid %%H value");
5122 break;
5123
40571d67
RH
5124 case 'J':
5125 if (GET_CODE (x) == CONST_INT)
5126 {
5127 if (INTVAL (x) != 0)
5128 fprintf (file, "\t\t!lituse_jsr!%d", (int) INTVAL (x));
5129 }
5130 else
5131 output_operand_lossage ("invalid %%J value");
5132 break;
5133
a6f12d7c
RK
5134 case 'r':
5135 /* If this operand is the constant zero, write it as "$31". */
5136 if (GET_CODE (x) == REG)
5137 fprintf (file, "%s", reg_names[REGNO (x)]);
5138 else if (x == CONST0_RTX (GET_MODE (x)))
5139 fprintf (file, "$31");
5140 else
5141 output_operand_lossage ("invalid %%r value");
a6f12d7c
RK
5142 break;
5143
5144 case 'R':
5145 /* Similar, but for floating-point. */
5146 if (GET_CODE (x) == REG)
5147 fprintf (file, "%s", reg_names[REGNO (x)]);
5148 else if (x == CONST0_RTX (GET_MODE (x)))
5149 fprintf (file, "$f31");
5150 else
5151 output_operand_lossage ("invalid %%R value");
a6f12d7c
RK
5152 break;
5153
5154 case 'N':
5155 /* Write the 1's complement of a constant. */
5156 if (GET_CODE (x) != CONST_INT)
5157 output_operand_lossage ("invalid %%N value");
5158
0bc8ae6e 5159 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
a6f12d7c
RK
5160 break;
5161
5162 case 'P':
5163 /* Write 1 << C, for a constant C. */
5164 if (GET_CODE (x) != CONST_INT)
5165 output_operand_lossage ("invalid %%P value");
5166
0bc8ae6e 5167 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x));
a6f12d7c
RK
5168 break;
5169
5170 case 'h':
5171 /* Write the high-order 16 bits of a constant, sign-extended. */
5172 if (GET_CODE (x) != CONST_INT)
5173 output_operand_lossage ("invalid %%h value");
5174
0bc8ae6e 5175 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) >> 16);
a6f12d7c
RK
5176 break;
5177
5178 case 'L':
5179 /* Write the low-order 16 bits of a constant, sign-extended. */
5180 if (GET_CODE (x) != CONST_INT)
5181 output_operand_lossage ("invalid %%L value");
5182
0bc8ae6e
RK
5183 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5184 (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
a6f12d7c
RK
5185 break;
5186
5187 case 'm':
5188 /* Write mask for ZAP insn. */
5189 if (GET_CODE (x) == CONST_DOUBLE)
5190 {
5191 HOST_WIDE_INT mask = 0;
5192 HOST_WIDE_INT value;
5193
5194 value = CONST_DOUBLE_LOW (x);
5195 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5196 i++, value >>= 8)
5197 if (value & 0xff)
5198 mask |= (1 << i);
5199
5200 value = CONST_DOUBLE_HIGH (x);
5201 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5202 i++, value >>= 8)
5203 if (value & 0xff)
5204 mask |= (1 << (i + sizeof (int)));
5205
0bc8ae6e 5206 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
a6f12d7c
RK
5207 }
5208
5209 else if (GET_CODE (x) == CONST_INT)
5210 {
5211 HOST_WIDE_INT mask = 0, value = INTVAL (x);
5212
5213 for (i = 0; i < 8; i++, value >>= 8)
5214 if (value & 0xff)
5215 mask |= (1 << i);
5216
0bc8ae6e 5217 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask);
a6f12d7c
RK
5218 }
5219 else
5220 output_operand_lossage ("invalid %%m value");
5221 break;
5222
5223 case 'M':
6c174fc0 5224 /* 'b', 'w', 'l', or 'q' as the value of the constant. */
a6f12d7c 5225 if (GET_CODE (x) != CONST_INT
6c174fc0
RH
5226 || (INTVAL (x) != 8 && INTVAL (x) != 16
5227 && INTVAL (x) != 32 && INTVAL (x) != 64))
a6f12d7c
RK
5228 output_operand_lossage ("invalid %%M value");
5229
5230 fprintf (file, "%s",
6c174fc0
RH
5231 (INTVAL (x) == 8 ? "b"
5232 : INTVAL (x) == 16 ? "w"
5233 : INTVAL (x) == 32 ? "l"
5234 : "q"));
a6f12d7c
RK
5235 break;
5236
5237 case 'U':
5238 /* Similar, except do it from the mask. */
5239 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0xff)
5240 fprintf (file, "b");
5241 else if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0xffff)
5242 fprintf (file, "w");
11ea364a
JW
5243 else if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0xffffffff)
5244 fprintf (file, "l");
a6f12d7c
RK
5245#if HOST_BITS_PER_WIDE_INT == 32
5246 else if (GET_CODE (x) == CONST_DOUBLE
5247 && CONST_DOUBLE_HIGH (x) == 0
5248 && CONST_DOUBLE_LOW (x) == -1)
5249 fprintf (file, "l");
6c174fc0
RH
5250 else if (GET_CODE (x) == CONST_DOUBLE
5251 && CONST_DOUBLE_HIGH (x) == -1
5252 && CONST_DOUBLE_LOW (x) == -1)
5253 fprintf (file, "q");
a6f12d7c 5254#else
3873d24b 5255 else if (GET_CODE (x) == CONST_INT && INTVAL (x) == -1)
6c174fc0
RH
5256 fprintf (file, "q");
5257 else if (GET_CODE (x) == CONST_DOUBLE
5258 && CONST_DOUBLE_HIGH (x) == 0
5259 && CONST_DOUBLE_LOW (x) == -1)
5260 fprintf (file, "q");
a6f12d7c
RK
5261#endif
5262 else
5263 output_operand_lossage ("invalid %%U value");
5264 break;
5265
5266 case 's':
30102605
RH
5267 /* Write the constant value divided by 8 for little-endian mode or
5268 (56 - value) / 8 for big-endian mode. */
5269
a6f12d7c 5270 if (GET_CODE (x) != CONST_INT
30102605
RH
5271 || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
5272 ? 56
5273 : 64)
5274 || (INTVAL (x) & 7) != 0)
a6f12d7c
RK
5275 output_operand_lossage ("invalid %%s value");
5276
30102605
RH
5277 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5278 WORDS_BIG_ENDIAN
5279 ? (56 - INTVAL (x)) / 8
5280 : INTVAL (x) / 8);
a6f12d7c
RK
5281 break;
5282
5283 case 'S':
5284 /* Same, except compute (64 - c) / 8 */
5285
5286 if (GET_CODE (x) != CONST_INT
5287 && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5288 && (INTVAL (x) & 7) != 8)
5289 output_operand_lossage ("invalid %%s value");
5290
0bc8ae6e 5291 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
a6f12d7c
RK
5292 break;
5293
30102605
RH
5294 case 't':
5295 {
5296 /* On Unicos/Mk systems: use a DEX expression if the symbol
5297 clashes with a register name. */
5298 int dex = unicosmk_need_dex (x);
5299 if (dex)
5300 fprintf (file, "DEX(%d)", dex);
5301 else
5302 output_addr_const (file, x);
5303 }
5304 break;
5305
bdd4c95a 5306 case 'C': case 'D': case 'c': case 'd':
a6f12d7c 5307 /* Write out comparison name. */
bdd4c95a
RK
5308 {
5309 enum rtx_code c = GET_CODE (x);
5310
5311 if (GET_RTX_CLASS (c) != '<')
5312 output_operand_lossage ("invalid %%C value");
5313
948068e2 5314 else if (code == 'D')
bdd4c95a
RK
5315 c = reverse_condition (c);
5316 else if (code == 'c')
5317 c = swap_condition (c);
5318 else if (code == 'd')
5319 c = swap_condition (reverse_condition (c));
5320
5321 if (c == LEU)
5322 fprintf (file, "ule");
5323 else if (c == LTU)
5324 fprintf (file, "ult");
1eb8759b
RH
5325 else if (c == UNORDERED)
5326 fprintf (file, "un");
bdd4c95a
RK
5327 else
5328 fprintf (file, "%s", GET_RTX_NAME (c));
5329 }
ab561e66
RK
5330 break;
5331
a6f12d7c
RK
5332 case 'E':
5333 /* Write the divide or modulus operator. */
5334 switch (GET_CODE (x))
5335 {
5336 case DIV:
5337 fprintf (file, "div%s", GET_MODE (x) == SImode ? "l" : "q");
5338 break;
5339 case UDIV:
5340 fprintf (file, "div%su", GET_MODE (x) == SImode ? "l" : "q");
5341 break;
5342 case MOD:
5343 fprintf (file, "rem%s", GET_MODE (x) == SImode ? "l" : "q");
5344 break;
5345 case UMOD:
5346 fprintf (file, "rem%su", GET_MODE (x) == SImode ? "l" : "q");
5347 break;
5348 default:
5349 output_operand_lossage ("invalid %%E value");
5350 break;
5351 }
5352 break;
5353
a6f12d7c
RK
5354 case 'A':
5355 /* Write "_u" for unaligned access. */
5356 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
5357 fprintf (file, "_u");
5358 break;
5359
5360 case 0:
5361 if (GET_CODE (x) == REG)
5362 fprintf (file, "%s", reg_names[REGNO (x)]);
5363 else if (GET_CODE (x) == MEM)
5364 output_address (XEXP (x, 0));
5365 else
5366 output_addr_const (file, x);
5367 break;
5368
5369 default:
5370 output_operand_lossage ("invalid %%xn code");
5371 }
5372}
714b019c
RH
5373
5374void
5375print_operand_address (file, addr)
5376 FILE *file;
5377 rtx addr;
5378{
e03ec28f 5379 int basereg = 31;
714b019c
RH
5380 HOST_WIDE_INT offset = 0;
5381
5382 if (GET_CODE (addr) == AND)
5383 addr = XEXP (addr, 0);
714b019c 5384
e03ec28f
RH
5385 if (GET_CODE (addr) == PLUS
5386 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
714b019c
RH
5387 {
5388 offset = INTVAL (XEXP (addr, 1));
e03ec28f 5389 addr = XEXP (addr, 0);
714b019c 5390 }
1eb356b9
RH
5391
5392 if (GET_CODE (addr) == LO_SUM)
5393 {
5394 output_addr_const (file, XEXP (addr, 1));
5395 if (offset)
5396 {
5397 fputc ('+', file);
5398 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
5399 }
5400
5401 addr = XEXP (addr, 0);
5402 if (GET_CODE (addr) == REG)
5403 basereg = REGNO (addr);
5404 else if (GET_CODE (addr) == SUBREG
5405 && GET_CODE (SUBREG_REG (addr)) == REG)
5406 basereg = subreg_regno (addr);
5407 else
5408 abort ();
133d3133
RH
5409
5410 fprintf (file, "($%d)\t\t!%s", basereg,
5411 (basereg == 29 ? "gprel" : "gprellow"));
1eb356b9
RH
5412 return;
5413 }
5414
e03ec28f
RH
5415 if (GET_CODE (addr) == REG)
5416 basereg = REGNO (addr);
5417 else if (GET_CODE (addr) == SUBREG
5418 && GET_CODE (SUBREG_REG (addr)) == REG)
1eb356b9 5419 basereg = subreg_regno (addr);
e03ec28f
RH
5420 else if (GET_CODE (addr) == CONST_INT)
5421 offset = INTVAL (addr);
714b019c
RH
5422 else
5423 abort ();
5424
5425 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
e03ec28f 5426 fprintf (file, "($%d)", basereg);
714b019c 5427}
a6f12d7c 5428\f
9ec36da5
JL
5429/* Emit RTL insns to initialize the variable parts of a trampoline at
5430 TRAMP. FNADDR is an RTX for the address of the function's pure
5431 code. CXT is an RTX for the static chain value for the function.
c714f03d
RH
5432
5433 The three offset parameters are for the individual template's
5434 layout. A JMPOFS < 0 indicates that the trampoline does not
5435 contain instructions at all.
5436
9ec36da5
JL
5437 We assume here that a function will be called many more times than
5438 its address is taken (e.g., it might be passed to qsort), so we
5439 take the trouble to initialize the "hint" field in the JMP insn.
5440 Note that the hint field is PC (new) + 4 * bits 13:0. */
5441
5442void
c714f03d
RH
5443alpha_initialize_trampoline (tramp, fnaddr, cxt, fnofs, cxtofs, jmpofs)
5444 rtx tramp, fnaddr, cxt;
5445 int fnofs, cxtofs, jmpofs;
9ec36da5
JL
5446{
5447 rtx temp, temp1, addr;
d2692ef8 5448 /* VMS really uses DImode pointers in memory at this point. */
be7b80f4 5449 enum machine_mode mode = TARGET_ABI_OPEN_VMS ? Pmode : ptr_mode;
9ec36da5 5450
d2692ef8
DT
5451#ifdef POINTERS_EXTEND_UNSIGNED
5452 fnaddr = convert_memory_address (mode, fnaddr);
5453 cxt = convert_memory_address (mode, cxt);
5454#endif
5455
9ec36da5 5456 /* Store function address and CXT. */
d420e567 5457 addr = memory_address (mode, plus_constant (tramp, fnofs));
c5c76735 5458 emit_move_insn (gen_rtx_MEM (mode, addr), fnaddr);
d420e567 5459 addr = memory_address (mode, plus_constant (tramp, cxtofs));
c5c76735 5460 emit_move_insn (gen_rtx_MEM (mode, addr), cxt);
c714f03d
RH
5461
5462 /* This has been disabled since the hint only has a 32k range, and in
285a5742 5463 no existing OS is the stack within 32k of the text segment. */
c714f03d
RH
5464 if (0 && jmpofs >= 0)
5465 {
5466 /* Compute hint value. */
5467 temp = force_operand (plus_constant (tramp, jmpofs+4), NULL_RTX);
5468 temp = expand_binop (DImode, sub_optab, fnaddr, temp, temp, 1,
5469 OPTAB_WIDEN);
5470 temp = expand_shift (RSHIFT_EXPR, Pmode, temp,
5471 build_int_2 (2, 0), NULL_RTX, 1);
22273300
JJ
5472 temp = expand_and (SImode, gen_lowpart (SImode, temp),
5473 GEN_INT (0x3fff), 0);
c714f03d
RH
5474
5475 /* Merge in the hint. */
5476 addr = memory_address (SImode, plus_constant (tramp, jmpofs));
c5c76735 5477 temp1 = force_reg (SImode, gen_rtx_MEM (SImode, addr));
22273300 5478 temp1 = expand_and (SImode, temp1, GEN_INT (0xffffc000), NULL_RTX);
c714f03d
RH
5479 temp1 = expand_binop (SImode, ior_optab, temp1, temp, temp1, 1,
5480 OPTAB_WIDEN);
c5c76735 5481 emit_move_insn (gen_rtx_MEM (SImode, addr), temp1);
c714f03d 5482 }
9ec36da5
JL
5483
5484#ifdef TRANSFER_FROM_TRAMPOLINE
c5c76735 5485 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__enable_execute_stack"),
9ec36da5
JL
5486 0, VOIDmode, 1, addr, Pmode);
5487#endif
5488
c714f03d
RH
5489 if (jmpofs >= 0)
5490 emit_insn (gen_imb ());
9ec36da5
JL
5491}
5492\f
5495cc55
RH
5493/* Determine where to put an argument to a function.
5494 Value is zero to push the argument on the stack,
5495 or a hard register in which to store the argument.
5496
5497 MODE is the argument's machine mode.
5498 TYPE is the data type of the argument (as a tree).
5499 This is null for libcalls where that information may
5500 not be available.
5501 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5502 the preceding args and about the function being called.
5503 NAMED is nonzero if this argument is a named parameter
5504 (otherwise it is an extra parameter matching an ellipsis).
5505
5506 On Alpha the first 6 words of args are normally in registers
5507 and the rest are pushed. */
5508
5509rtx
c5e1237f 5510function_arg (cum, mode, type, named)
5495cc55
RH
5511 CUMULATIVE_ARGS cum;
5512 enum machine_mode mode;
5513 tree type;
5514 int named ATTRIBUTE_UNUSED;
5515{
5516 int basereg;
a82c7f05 5517 int num_args;
5495cc55 5518
30102605
RH
5519 /* Set up defaults for FP operands passed in FP registers, and
5520 integral operands passed in integer registers. */
5521 if (TARGET_FPREGS
5522 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
5523 || GET_MODE_CLASS (mode) == MODE_FLOAT))
5524 basereg = 32 + 16;
5525 else
5526 basereg = 16;
5527
5528 /* ??? Irritatingly, the definition of CUMULATIVE_ARGS is different for
5529 the three platforms, so we can't avoid conditional compilation. */
be7b80f4 5530#if TARGET_ABI_OPEN_VMS
30102605
RH
5531 {
5532 if (mode == VOIDmode)
5533 return alpha_arg_info_reg_val (cum);
be7b80f4 5534
30102605
RH
5535 num_args = cum.num_args;
5536 if (num_args >= 6 || MUST_PASS_IN_STACK (mode, type))
5537 return NULL_RTX;
5538 }
be7b80f4 5539#else
30102605
RH
5540#if TARGET_ABI_UNICOSMK
5541 {
5542 int size;
5495cc55 5543
30102605
RH
5544 /* If this is the last argument, generate the call info word (CIW). */
5545 /* ??? We don't include the caller's line number in the CIW because
5546 I don't know how to determine it if debug infos are turned off. */
5547 if (mode == VOIDmode)
5548 {
5549 int i;
5550 HOST_WIDE_INT lo;
5551 HOST_WIDE_INT hi;
5552 rtx ciw;
5553
5554 lo = 0;
5555
5556 for (i = 0; i < cum.num_reg_words && i < 5; i++)
5557 if (cum.reg_args_type[i])
5558 lo |= (1 << (7 - i));
5559
5560 if (cum.num_reg_words == 6 && cum.reg_args_type[5])
5561 lo |= 7;
5562 else
5563 lo |= cum.num_reg_words;
5564
5565#if HOST_BITS_PER_WIDE_INT == 32
5566 hi = (cum.num_args << 20) | cum.num_arg_words;
5567#else
999c746f
KG
5568 lo = lo | ((HOST_WIDE_INT) cum.num_args << 52)
5569 | ((HOST_WIDE_INT) cum.num_arg_words << 32);
30102605
RH
5570 hi = 0;
5571#endif
5572 ciw = immed_double_const (lo, hi, DImode);
5573
5574 return gen_rtx_UNSPEC (DImode, gen_rtvec (1, ciw),
5575 UNSPEC_UMK_LOAD_CIW);
5576 }
5577
5578 size = ALPHA_ARG_SIZE (mode, type, named);
5579 num_args = cum.num_reg_words;
5580 if (MUST_PASS_IN_STACK (mode, type)
5581 || cum.num_reg_words + size > 6 || cum.force_stack)
5582 return NULL_RTX;
5583 else if (type && TYPE_MODE (type) == BLKmode)
5584 {
5585 rtx reg1, reg2;
5586
5587 reg1 = gen_rtx_REG (DImode, num_args + 16);
5588 reg1 = gen_rtx_EXPR_LIST (DImode, reg1, const0_rtx);
5589
5590 /* The argument fits in two registers. Note that we still need to
5591 reserve a register for empty structures. */
5592 if (size == 0)
5593 return NULL_RTX;
5594 else if (size == 1)
5595 return gen_rtx_PARALLEL (mode, gen_rtvec (1, reg1));
5596 else
5597 {
5598 reg2 = gen_rtx_REG (DImode, num_args + 17);
5599 reg2 = gen_rtx_EXPR_LIST (DImode, reg2, GEN_INT (8));
5600 return gen_rtx_PARALLEL (mode, gen_rtvec (2, reg1, reg2));
5601 }
5602 }
5603 }
5604#else
5605 {
5606 if (cum >= 6)
5607 return NULL_RTX;
5608 num_args = cum;
5609
5610 /* VOID is passed as a special flag for "last argument". */
5611 if (type == void_type_node)
5612 basereg = 16;
5613 else if (MUST_PASS_IN_STACK (mode, type))
5614 return NULL_RTX;
5615 else if (FUNCTION_ARG_PASS_BY_REFERENCE (cum, mode, type, named))
5616 basereg = 16;
5617 }
5618#endif /* TARGET_ABI_UNICOSMK */
be7b80f4 5619#endif /* TARGET_ABI_OPEN_VMS */
5495cc55 5620
a82c7f05 5621 return gen_rtx_REG (mode, num_args + basereg);
5495cc55
RH
5622}
5623
63966b3b
RH
5624tree
5625alpha_build_va_list ()
a6f12d7c 5626{
d4b15af9 5627 tree base, ofs, record, type_decl;
a6f12d7c 5628
30102605 5629 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
63966b3b
RH
5630 return ptr_type_node;
5631
f1e639b1 5632 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
d4b15af9
RH
5633 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5634 TREE_CHAIN (record) = type_decl;
5635 TYPE_NAME (record) = type_decl;
5636
63966b3b 5637 /* C++? SET_IS_AGGR_TYPE (record, 1); */
a6f12d7c 5638
63966b3b
RH
5639 ofs = build_decl (FIELD_DECL, get_identifier ("__offset"),
5640 integer_type_node);
5641 DECL_FIELD_CONTEXT (ofs) = record;
29587b1c 5642
63966b3b
RH
5643 base = build_decl (FIELD_DECL, get_identifier ("__base"),
5644 ptr_type_node);
5645 DECL_FIELD_CONTEXT (base) = record;
5646 TREE_CHAIN (base) = ofs;
29587b1c 5647
63966b3b
RH
5648 TYPE_FIELDS (record) = base;
5649 layout_type (record);
5650
5651 return record;
5652}
5653
5654void
5655alpha_va_start (stdarg_p, valist, nextarg)
5656 int stdarg_p;
5657 tree valist;
5658 rtx nextarg ATTRIBUTE_UNUSED;
5659{
5660 HOST_WIDE_INT offset;
5661 tree t, offset_field, base_field;
29587b1c 5662
bdb429a5
RK
5663 if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK)
5664 return;
5665
f7130778 5666 if (TARGET_ABI_UNICOSMK)
63966b3b
RH
5667 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
5668
5669 /* For Unix, SETUP_INCOMING_VARARGS moves the starting address base
5670 up by 48, storing fp arg registers in the first 48 bytes, and the
5671 integer arg registers in the next 48 bytes. This is only done,
5672 however, if any integer registers need to be stored.
5673
5674 If no integer registers need be stored, then we must subtract 48
5675 in order to account for the integer arg registers which are counted
5676 in argsize above, but which are not actually stored on the stack. */
5677
5678 if (NUM_ARGS <= 5 + stdarg_p)
f7130778 5679 offset = TARGET_ABI_OPEN_VMS ? UNITS_PER_WORD : 6 * UNITS_PER_WORD;
89cfc2c6 5680 else
63966b3b
RH
5681 offset = -6 * UNITS_PER_WORD;
5682
f7130778
DR
5683 if (TARGET_ABI_OPEN_VMS)
5684 {
5685 nextarg = plus_constant (nextarg, offset);
5686 nextarg = plus_constant (nextarg, NUM_ARGS * UNITS_PER_WORD);
5687 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
5688 make_tree (ptr_type_node, nextarg));
5689 TREE_SIDE_EFFECTS (t) = 1;
63966b3b 5690
f7130778
DR
5691 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5692 }
5693 else
5694 {
5695 base_field = TYPE_FIELDS (TREE_TYPE (valist));
5696 offset_field = TREE_CHAIN (base_field);
5697
5698 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
5699 valist, base_field);
5700 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
5701 valist, offset_field);
5702
5703 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
5704 t = build (PLUS_EXPR, ptr_type_node, t, build_int_2 (offset, 0));
5705 t = build (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
5706 TREE_SIDE_EFFECTS (t) = 1;
5707 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5708
5709 t = build_int_2 (NUM_ARGS * UNITS_PER_WORD, 0);
5710 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field, t);
5711 TREE_SIDE_EFFECTS (t) = 1;
5712 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5713 }
63966b3b
RH
5714}
5715
5716rtx
5717alpha_va_arg (valist, type)
5718 tree valist, type;
5719{
63966b3b 5720 rtx addr;
1fcd592b 5721 tree t, type_size, rounded_size;
63966b3b
RH
5722 tree offset_field, base_field, addr_tree, addend;
5723 tree wide_type, wide_ofs;
09e98324 5724 int indirect = 0;
63966b3b 5725
30102605 5726 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
63966b3b 5727 return std_expand_builtin_va_arg (valist, type);
a6f12d7c 5728
1fcd592b
RH
5729 if (type == error_mark_node
5730 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
5731 || TREE_OVERFLOW (type_size))
5732 rounded_size = size_zero_node;
5733 else
5734 rounded_size = fold (build (MULT_EXPR, sizetype,
5735 fold (build (TRUNC_DIV_EXPR, sizetype,
5736 fold (build (PLUS_EXPR, sizetype,
5737 type_size,
5738 size_int (7))),
5739 size_int (8))),
5740 size_int (8)));
1d783d31 5741
63966b3b
RH
5742 base_field = TYPE_FIELDS (TREE_TYPE (valist));
5743 offset_field = TREE_CHAIN (base_field);
1d783d31 5744
63966b3b
RH
5745 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
5746 valist, base_field);
5747 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
5748 valist, offset_field);
5749
8e5fe23f
RH
5750 /* If the type could not be passed in registers, skip the block
5751 reserved for the registers. */
5752 if (MUST_PASS_IN_STACK (TYPE_MODE (type), type))
5753 {
5754 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
5755 build (MAX_EXPR, TREE_TYPE (offset_field),
5756 offset_field, build_int_2 (6*8, 0)));
5757 TREE_SIDE_EFFECTS (t) = 1;
5758 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5759 }
5760
63966b3b
RH
5761 wide_type = make_signed_type (64);
5762 wide_ofs = save_expr (build1 (CONVERT_EXPR, wide_type, offset_field));
5763
5764 addend = wide_ofs;
09e98324
RO
5765
5766 if (TYPE_MODE (type) == TFmode || TYPE_MODE (type) == TCmode)
5767 {
5768 indirect = 1;
1fcd592b 5769 rounded_size = size_int (UNITS_PER_WORD);
09e98324
RO
5770 }
5771 else if (FLOAT_TYPE_P (type))
89cfc2c6 5772 {
63966b3b 5773 tree fpaddend, cond;
89cfc2c6 5774
63966b3b
RH
5775 fpaddend = fold (build (PLUS_EXPR, TREE_TYPE (addend),
5776 addend, build_int_2 (-6*8, 0)));
89cfc2c6 5777
63966b3b
RH
5778 cond = fold (build (LT_EXPR, integer_type_node,
5779 wide_ofs, build_int_2 (6*8, 0)));
89cfc2c6 5780
63966b3b
RH
5781 addend = fold (build (COND_EXPR, TREE_TYPE (addend), cond,
5782 fpaddend, addend));
89cfc2c6 5783 }
63966b3b
RH
5784
5785 addr_tree = build (PLUS_EXPR, TREE_TYPE (base_field),
5786 base_field, addend);
5787
5788 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
5789 addr = copy_to_reg (addr);
5790
5791 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
5792 build (PLUS_EXPR, TREE_TYPE (offset_field),
1fcd592b 5793 offset_field, rounded_size));
63966b3b
RH
5794 TREE_SIDE_EFFECTS (t) = 1;
5795 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5796
09e98324
RO
5797 if (indirect)
5798 {
5799 addr = force_reg (Pmode, addr);
5800 addr = gen_rtx_MEM (Pmode, addr);
5801 }
5802
63966b3b 5803 return addr;
a6f12d7c
RK
5804}
5805\f
5806/* This page contains routines that are used to determine what the function
5807 prologue and epilogue code will do and write them out. */
5808
5809/* Compute the size of the save area in the stack. */
5810
89cfc2c6
RK
5811/* These variables are used for communication between the following functions.
5812 They indicate various things about the current function being compiled
5813 that are used to tell what kind of prologue, epilogue and procedure
285a5742 5814 descriptior to generate. */
89cfc2c6
RK
5815
5816/* Nonzero if we need a stack procedure. */
c2ea1ac6
DR
5817enum alpha_procedure_types {PT_NULL = 0, PT_REGISTER = 1, PT_STACK = 2};
5818static enum alpha_procedure_types alpha_procedure_type;
89cfc2c6
RK
5819
5820/* Register number (either FP or SP) that is used to unwind the frame. */
9c0e94a5 5821static int vms_unwind_regno;
89cfc2c6
RK
5822
5823/* Register number used to save FP. We need not have one for RA since
5824 we don't modify it for register procedures. This is only defined
5825 for register frame procedures. */
9c0e94a5 5826static int vms_save_fp_regno;
89cfc2c6
RK
5827
5828/* Register number used to reference objects off our PV. */
9c0e94a5 5829static int vms_base_regno;
89cfc2c6 5830
acd92049 5831/* Compute register masks for saved registers. */
89cfc2c6
RK
5832
5833static void
5834alpha_sa_mask (imaskP, fmaskP)
5835 unsigned long *imaskP;
5836 unsigned long *fmaskP;
5837{
5838 unsigned long imask = 0;
5839 unsigned long fmask = 0;
1eb356b9 5840 unsigned int i;
89cfc2c6 5841
14691f8d
RH
5842 /* Irritatingly, there are two kinds of thunks -- those created with
5843 ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go through
5844 the regular part of the compiler. In the ASM_OUTPUT_MI_THUNK case
5845 we don't have valid register life info, but assemble_start_function
5846 wants to output .frame and .mask directives. */
b76b08ef 5847 if (current_function_is_thunk && !no_new_pseudos)
acd92049 5848 {
14691f8d
RH
5849 *imaskP = 0;
5850 *fmaskP = 0;
5851 return;
5852 }
89cfc2c6 5853
c2ea1ac6 5854 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
14691f8d 5855 imask |= (1L << HARD_FRAME_POINTER_REGNUM);
89cfc2c6 5856
14691f8d
RH
5857 /* One for every register we have to save. */
5858 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5859 if (! fixed_regs[i] && ! call_used_regs[i]
5860 && regs_ever_live[i] && i != REG_RA
5861 && (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
5862 {
5863 if (i < 32)
5864 imask |= (1L << i);
5865 else
5866 fmask |= (1L << (i - 32));
5867 }
5868
5869 /* We need to restore these for the handler. */
5870 if (current_function_calls_eh_return)
5871 for (i = 0; ; ++i)
5872 {
5873 unsigned regno = EH_RETURN_DATA_REGNO (i);
5874 if (regno == INVALID_REGNUM)
5875 break;
5876 imask |= 1L << regno;
5877 }
30102605 5878
14691f8d
RH
5879 /* If any register spilled, then spill the return address also. */
5880 /* ??? This is required by the Digital stack unwind specification
5881 and isn't needed if we're doing Dwarf2 unwinding. */
5882 if (imask || fmask || alpha_ra_ever_killed ())
5883 imask |= (1L << REG_RA);
9c0e94a5 5884
89cfc2c6
RK
5885 *imaskP = imask;
5886 *fmaskP = fmask;
89cfc2c6
RK
5887}
5888
5889int
5890alpha_sa_size ()
5891{
61334ebe 5892 unsigned long mask[2];
89cfc2c6 5893 int sa_size = 0;
61334ebe 5894 int i, j;
89cfc2c6 5895
61334ebe
RH
5896 alpha_sa_mask (&mask[0], &mask[1]);
5897
5898 if (TARGET_ABI_UNICOSMK)
5899 {
5900 if (mask[0] || mask[1])
5901 sa_size = 14;
5902 }
acd92049 5903 else
acd92049 5904 {
61334ebe
RH
5905 for (j = 0; j < 2; ++j)
5906 for (i = 0; i < 32; ++i)
5907 if ((mask[j] >> i) & 1)
5908 sa_size++;
acd92049 5909 }
89cfc2c6 5910
30102605
RH
5911 if (TARGET_ABI_UNICOSMK)
5912 {
5913 /* We might not need to generate a frame if we don't make any calls
5914 (including calls to __T3E_MISMATCH if this is a vararg function),
5915 don't have any local variables which require stack slots, don't
5916 use alloca and have not determined that we need a frame for other
5917 reasons. */
5918
c2ea1ac6
DR
5919 alpha_procedure_type
5920 = (sa_size || get_frame_size() != 0
5921 || current_function_outgoing_args_size || current_function_varargs
5922 || current_function_stdarg || current_function_calls_alloca
5923 || frame_pointer_needed)
5924 ? PT_STACK : PT_REGISTER;
30102605
RH
5925
5926 /* Always reserve space for saving callee-saved registers if we
5927 need a frame as required by the calling convention. */
c2ea1ac6 5928 if (alpha_procedure_type == PT_STACK)
30102605
RH
5929 sa_size = 14;
5930 }
5931 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
5932 {
5933 /* Start by assuming we can use a register procedure if we don't
5934 make any calls (REG_RA not used) or need to save any
5935 registers and a stack procedure if we do. */
c2ea1ac6
DR
5936 if ((mask[0] >> REG_RA) & 1)
5937 alpha_procedure_type = PT_STACK;
5938 else if (get_frame_size() != 0)
5939 alpha_procedure_type = PT_REGISTER;
5940 else
5941 alpha_procedure_type = PT_NULL;
61334ebe 5942
cb9a8e97 5943 /* Don't reserve space for saving FP & RA yet. Do that later after we've
61334ebe 5944 made the final decision on stack procedure vs register procedure. */
c2ea1ac6 5945 if (alpha_procedure_type == PT_STACK)
cb9a8e97 5946 sa_size -= 2;
9c0e94a5
RH
5947
5948 /* Decide whether to refer to objects off our PV via FP or PV.
5949 If we need FP for something else or if we receive a nonlocal
5950 goto (which expects PV to contain the value), we must use PV.
5951 Otherwise, start by assuming we can use FP. */
c2ea1ac6
DR
5952
5953 vms_base_regno
5954 = (frame_pointer_needed
5955 || current_function_has_nonlocal_label
5956 || alpha_procedure_type == PT_STACK
5957 || current_function_outgoing_args_size)
5958 ? REG_PV : HARD_FRAME_POINTER_REGNUM;
9c0e94a5
RH
5959
5960 /* If we want to copy PV into FP, we need to find some register
5961 in which to save FP. */
5962
5963 vms_save_fp_regno = -1;
5964 if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
5965 for (i = 0; i < 32; i++)
5966 if (! fixed_regs[i] && call_used_regs[i] && ! regs_ever_live[i])
5967 vms_save_fp_regno = i;
5968
c2ea1ac6
DR
5969 if (vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)
5970 vms_base_regno = REG_PV, alpha_procedure_type = PT_STACK;
5971 else if (alpha_procedure_type == PT_NULL)
5972 vms_base_regno = REG_PV;
9c0e94a5
RH
5973
5974 /* Stack unwinding should be done via FP unless we use it for PV. */
5975 vms_unwind_regno = (vms_base_regno == REG_PV
5976 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
5977
5978 /* If this is a stack procedure, allow space for saving FP and RA. */
c2ea1ac6 5979 if (alpha_procedure_type == PT_STACK)
9c0e94a5
RH
5980 sa_size += 2;
5981 }
5982 else
5983 {
9c0e94a5
RH
5984 /* Our size must be even (multiple of 16 bytes). */
5985 if (sa_size & 1)
5986 sa_size++;
5987 }
89cfc2c6
RK
5988
5989 return sa_size * 8;
5990}
5991
5992int
5993alpha_pv_save_size ()
5994{
5995 alpha_sa_size ();
c2ea1ac6 5996 return alpha_procedure_type == PT_STACK ? 8 : 0;
89cfc2c6
RK
5997}
5998
5999int
6000alpha_using_fp ()
6001{
6002 alpha_sa_size ();
9c0e94a5 6003 return vms_unwind_regno == HARD_FRAME_POINTER_REGNUM;
89cfc2c6
RK
6004}
6005
be7b80f4 6006#if TARGET_ABI_OPEN_VMS
8289c43b 6007
91d231cb 6008const struct attribute_spec vms_attribute_table[] =
a6f12d7c 6009{
91d231cb 6010 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
c1bd46a8
DR
6011 { "overlaid", 0, 0, true, false, false, NULL },
6012 { "global", 0, 0, true, false, false, NULL },
6013 { "initialize", 0, 0, true, false, false, NULL },
6014 { NULL, 0, 0, false, false, false, NULL }
91d231cb 6015};
a6f12d7c 6016
8289c43b
NB
6017#endif
6018
1eb356b9
RH
6019static int
6020find_lo_sum (px, data)
6021 rtx *px;
6022 void *data ATTRIBUTE_UNUSED;
6023{
6024 return GET_CODE (*px) == LO_SUM;
6025}
6026
9c0e94a5
RH
6027static int
6028alpha_does_function_need_gp ()
6029{
6030 rtx insn;
a6f12d7c 6031
30102605
RH
6032 /* The GP being variable is an OSF abi thing. */
6033 if (! TARGET_ABI_OSF)
9c0e94a5 6034 return 0;
a6f12d7c 6035
70f4f91c 6036 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
9c0e94a5 6037 return 1;
d60a05a1 6038
acd92049
RH
6039 if (current_function_is_thunk)
6040 return 1;
acd92049 6041
9c0e94a5
RH
6042 /* If we need a GP (we have a LDSYM insn or a CALL_INSN), load it first.
6043 Even if we are a static function, we still need to do this in case
6044 our address is taken and passed to something like qsort. */
a6f12d7c 6045
9c0e94a5
RH
6046 push_topmost_sequence ();
6047 insn = get_insns ();
6048 pop_topmost_sequence ();
89cfc2c6 6049
9c0e94a5 6050 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 6051 if (INSN_P (insn)
9c0e94a5
RH
6052 && GET_CODE (PATTERN (insn)) != USE
6053 && GET_CODE (PATTERN (insn)) != CLOBBER)
6054 {
6055 enum attr_type type = get_attr_type (insn);
6056 if (type == TYPE_LDSYM || type == TYPE_JSR)
6057 return 1;
1eb356b9
RH
6058 if (TARGET_EXPLICIT_RELOCS
6059 && for_each_rtx (&PATTERN (insn), find_lo_sum, NULL) > 0)
6060 return 1;
9c0e94a5 6061 }
a6f12d7c 6062
9c0e94a5 6063 return 0;
a6f12d7c
RK
6064}
6065
0f33506c
RK
6066/* Write a version stamp. Don't write anything if we are running as a
6067 cross-compiler. Otherwise, use the versions in /usr/include/stamp.h. */
6068
9d654bba 6069#ifdef HAVE_STAMP_H
0f33506c
RK
6070#include <stamp.h>
6071#endif
6072
6073void
6074alpha_write_verstamp (file)
4c020733 6075 FILE *file ATTRIBUTE_UNUSED;
0f33506c
RK
6076{
6077#ifdef MS_STAMP
aec4ca5e 6078 fprintf (file, "\t.verstamp %d %d\n", MS_STAMP, LS_STAMP);
0f33506c
RK
6079#endif
6080}
ec6840c1 6081\f
6abc6f40
RH
6082/* Helper function to set RTX_FRAME_RELATED_P on instructions, including
6083 sequences. */
6084
6085static rtx
6086set_frame_related_p ()
6087{
6088 rtx seq = gen_sequence ();
6089 end_sequence ();
6090
6091 if (GET_CODE (seq) == SEQUENCE)
6092 {
6093 int i = XVECLEN (seq, 0);
6094 while (--i >= 0)
6095 RTX_FRAME_RELATED_P (XVECEXP (seq, 0, i)) = 1;
6096 return emit_insn (seq);
6097 }
6098 else
6099 {
6100 seq = emit_insn (seq);
6101 RTX_FRAME_RELATED_P (seq) = 1;
6102 return seq;
6103 }
6104}
6105
6106#define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
6107
a6f12d7c
RK
6108/* Write function prologue. */
6109
89cfc2c6
RK
6110/* On vms we have two kinds of functions:
6111
6112 - stack frame (PROC_STACK)
6113 these are 'normal' functions with local vars and which are
6114 calling other functions
6115 - register frame (PROC_REGISTER)
6116 keeps all data in registers, needs no stack
6117
6118 We must pass this to the assembler so it can generate the
6119 proper pdsc (procedure descriptor)
6120 This is done with the '.pdesc' command.
6121
9c0e94a5
RH
6122 On not-vms, we don't really differentiate between the two, as we can
6123 simply allocate stack without saving registers. */
89cfc2c6
RK
6124
6125void
9c0e94a5 6126alpha_expand_prologue ()
89cfc2c6 6127{
9c0e94a5 6128 /* Registers to save. */
89cfc2c6
RK
6129 unsigned long imask = 0;
6130 unsigned long fmask = 0;
6131 /* Stack space needed for pushing registers clobbered by us. */
6132 HOST_WIDE_INT sa_size;
6133 /* Complete stack size needed. */
6134 HOST_WIDE_INT frame_size;
6135 /* Offset from base reg to register save area. */
9c0e94a5 6136 HOST_WIDE_INT reg_offset;
3873d24b 6137 rtx sa_reg, mem;
89cfc2c6
RK
6138 int i;
6139
6140 sa_size = alpha_sa_size ();
89cfc2c6 6141
9c0e94a5 6142 frame_size = get_frame_size ();
be7b80f4 6143 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6144 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6145 + (alpha_procedure_type == PT_STACK ? 8 : 0)
9c0e94a5
RH
6146 + frame_size
6147 + current_function_pretend_args_size);
30102605
RH
6148 else if (TARGET_ABI_UNICOSMK)
6149 /* We have to allocate space for the DSIB if we generate a frame. */
6150 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6151 + (alpha_procedure_type == PT_STACK ? 48 : 0))
30102605
RH
6152 + ALPHA_ROUND (frame_size
6153 + current_function_outgoing_args_size);
9c0e94a5
RH
6154 else
6155 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
6156 + sa_size
6157 + ALPHA_ROUND (frame_size
6158 + current_function_pretend_args_size));
89cfc2c6 6159
be7b80f4 6160 if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
6161 reg_offset = 8;
6162 else
6163 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
89cfc2c6 6164
9c0e94a5 6165 alpha_sa_mask (&imask, &fmask);
89cfc2c6 6166
941cc05a 6167 /* Emit an insn to reload GP, if needed. */
be7b80f4 6168 if (TARGET_ABI_OSF)
941cc05a
RK
6169 {
6170 alpha_function_needs_gp = alpha_does_function_need_gp ();
6171 if (alpha_function_needs_gp)
6172 emit_insn (gen_prologue_ldgp ());
6173 }
6174
4f1c5cce
RH
6175 /* TARGET_PROFILING_NEEDS_GP actually implies that we need to insert
6176 the call to mcount ourselves, rather than having the linker do it
6177 magically in response to -pg. Since _mcount has special linkage,
6178 don't represent the call as a call. */
70f4f91c 6179 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
4f1c5cce 6180 emit_insn (gen_prologue_mcount ());
30102605
RH
6181
6182 if (TARGET_ABI_UNICOSMK)
6183 unicosmk_gen_dsib (&imask);
6184
89cfc2c6
RK
6185 /* Adjust the stack by the frame size. If the frame size is > 4096
6186 bytes, we need to be sure we probe somewhere in the first and last
6187 4096 bytes (we can probably get away without the latter test) and
6188 every 8192 bytes in between. If the frame size is > 32768, we
6189 do this in a loop. Otherwise, we generate the explicit probe
6190 instructions.
6191
6192 Note that we are only allowed to adjust sp once in the prologue. */
6193
9c0e94a5 6194 if (frame_size <= 32768)
89cfc2c6
RK
6195 {
6196 if (frame_size > 4096)
6197 {
6198 int probed = 4096;
6199
9c0e94a5 6200 do
30102605
RH
6201 emit_insn (gen_probe_stack (GEN_INT (TARGET_ABI_UNICOSMK
6202 ? -probed + 64
6203 : -probed)));
9c0e94a5 6204 while ((probed += 8192) < frame_size);
89cfc2c6
RK
6205
6206 /* We only have to do this probe if we aren't saving registers. */
6207 if (sa_size == 0 && probed + 4096 < frame_size)
9c0e94a5 6208 emit_insn (gen_probe_stack (GEN_INT (-frame_size)));
89cfc2c6
RK
6209 }
6210
6211 if (frame_size != 0)
8207e7c6 6212 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
30102605
RH
6213 GEN_INT (TARGET_ABI_UNICOSMK
6214 ? -frame_size + 64
6215 : -frame_size))));
89cfc2c6
RK
6216 }
6217 else
6218 {
9c0e94a5 6219 /* Here we generate code to set R22 to SP + 4096 and set R23 to the
89cfc2c6
RK
6220 number of 8192 byte blocks to probe. We then probe each block
6221 in the loop and then set SP to the proper location. If the
6222 amount remaining is > 4096, we have to do one more probe if we
6223 are not saving any registers. */
6224
6225 HOST_WIDE_INT blocks = (frame_size + 4096) / 8192;
6226 HOST_WIDE_INT leftover = frame_size + 4096 - blocks * 8192;
9c0e94a5
RH
6227 rtx ptr = gen_rtx_REG (DImode, 22);
6228 rtx count = gen_rtx_REG (DImode, 23);
37679e06 6229 rtx seq;
89cfc2c6 6230
9c0e94a5 6231 emit_move_insn (count, GEN_INT (blocks));
30102605
RH
6232 emit_insn (gen_adddi3 (ptr, stack_pointer_rtx,
6233 GEN_INT (TARGET_ABI_UNICOSMK ? 4096 - 64 : 4096)));
89cfc2c6 6234
9c0e94a5
RH
6235 /* Because of the difficulty in emitting a new basic block this
6236 late in the compilation, generate the loop as a single insn. */
6237 emit_insn (gen_prologue_stack_probe_loop (count, ptr));
89cfc2c6
RK
6238
6239 if (leftover > 4096 && sa_size == 0)
9c0e94a5
RH
6240 {
6241 rtx last = gen_rtx_MEM (DImode, plus_constant (ptr, -leftover));
6242 MEM_VOLATILE_P (last) = 1;
6243 emit_move_insn (last, const0_rtx);
6244 }
89cfc2c6 6245
be7b80f4 6246 if (TARGET_ABI_WINDOWS_NT)
f9d7e5cd
RH
6247 {
6248 /* For NT stack unwind (done by 'reverse execution'), it's
6249 not OK to take the result of a loop, even though the value
6250 is already in ptr, so we reload it via a single operation
37679e06
RH
6251 and subtract it to sp.
6252
6253 Yes, that's correct -- we have to reload the whole constant
6254 into a temporary via ldah+lda then subtract from sp. To
6255 ensure we get ldah+lda, we use a special pattern. */
f9d7e5cd
RH
6256
6257 HOST_WIDE_INT lo, hi;
14eecd34
RH
6258 lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
6259 hi = frame_size - lo;
6abc6f40 6260
37679e06
RH
6261 emit_move_insn (ptr, GEN_INT (hi));
6262 emit_insn (gen_nt_lda (ptr, GEN_INT (lo)));
6263 seq = emit_insn (gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx,
6264 ptr));
f9d7e5cd
RH
6265 }
6266 else
6267 {
f9d7e5cd
RH
6268 seq = emit_insn (gen_adddi3 (stack_pointer_rtx, ptr,
6269 GEN_INT (-leftover)));
f9d7e5cd 6270 }
37679e06
RH
6271
6272 /* This alternative is special, because the DWARF code cannot
6273 possibly intuit through the loop above. So we invent this
6274 note it looks at instead. */
6275 RTX_FRAME_RELATED_P (seq) = 1;
6276 REG_NOTES (seq)
6277 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6278 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6279 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
30102605
RH
6280 GEN_INT (TARGET_ABI_UNICOSMK
6281 ? -frame_size + 64
6282 : -frame_size))),
37679e06 6283 REG_NOTES (seq));
89cfc2c6
RK
6284 }
6285
30102605 6286 if (!TARGET_ABI_UNICOSMK)
89cfc2c6 6287 {
30102605
RH
6288 /* Cope with very large offsets to the register save area. */
6289 sa_reg = stack_pointer_rtx;
6290 if (reg_offset + sa_size > 0x8000)
6291 {
6292 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
6293 HOST_WIDE_INT bias;
89cfc2c6 6294
30102605
RH
6295 if (low + sa_size <= 0x8000)
6296 bias = reg_offset - low, reg_offset = low;
6297 else
6298 bias = reg_offset, reg_offset = 0;
89cfc2c6 6299
30102605
RH
6300 sa_reg = gen_rtx_REG (DImode, 24);
6301 FRP (emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx,
6302 GEN_INT (bias))));
6303 }
9c0e94a5 6304
30102605 6305 /* Save regs in stack order. Beginning with VMS PV. */
c2ea1ac6 6306 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
30102605
RH
6307 {
6308 mem = gen_rtx_MEM (DImode, stack_pointer_rtx);
6309 set_mem_alias_set (mem, alpha_sr_alias_set);
6310 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_PV)));
6311 }
89cfc2c6 6312
30102605
RH
6313 /* Save register RA next. */
6314 if (imask & (1L << REG_RA))
6315 {
6316 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
6317 set_mem_alias_set (mem, alpha_sr_alias_set);
6318 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
6319 imask &= ~(1L << REG_RA);
6320 reg_offset += 8;
6321 }
89cfc2c6 6322
30102605
RH
6323 /* Now save any other registers required to be saved. */
6324 for (i = 0; i < 32; i++)
6325 if (imask & (1L << i))
6326 {
6327 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
6328 set_mem_alias_set (mem, alpha_sr_alias_set);
6329 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
6330 reg_offset += 8;
6331 }
89cfc2c6 6332
30102605
RH
6333 for (i = 0; i < 32; i++)
6334 if (fmask & (1L << i))
6335 {
6336 mem = gen_rtx_MEM (DFmode, plus_constant (sa_reg, reg_offset));
6337 set_mem_alias_set (mem, alpha_sr_alias_set);
6338 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
6339 reg_offset += 8;
6340 }
6341 }
c2ea1ac6 6342 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
30102605
RH
6343 {
6344 /* The standard frame on the T3E includes space for saving registers.
6345 We just have to use it. We don't have to save the return address and
6346 the old frame pointer here - they are saved in the DSIB. */
6347
6348 reg_offset = -56;
6349 for (i = 9; i < 15; i++)
6350 if (imask & (1L << i))
6351 {
6352 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
6353 reg_offset));
6354 set_mem_alias_set (mem, alpha_sr_alias_set);
6355 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
6356 reg_offset -= 8;
6357 }
6358 for (i = 2; i < 10; i++)
6359 if (fmask & (1L << i))
6360 {
6361 mem = gen_rtx_MEM (DFmode, plus_constant (hard_frame_pointer_rtx,
6362 reg_offset));
6363 set_mem_alias_set (mem, alpha_sr_alias_set);
6364 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
6365 reg_offset -= 8;
6366 }
6367 }
89cfc2c6 6368
be7b80f4 6369 if (TARGET_ABI_OPEN_VMS)
89cfc2c6 6370 {
c2ea1ac6
DR
6371 if (alpha_procedure_type == PT_REGISTER)
6372 /* Register frame procedures save the fp.
6373 ?? Ought to have a dwarf2 save for this. */
54aaa4ea
RH
6374 emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno),
6375 hard_frame_pointer_rtx);
89cfc2c6 6376
c2ea1ac6 6377 if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV)
54aaa4ea
RH
6378 emit_insn (gen_force_movdi (gen_rtx_REG (DImode, vms_base_regno),
6379 gen_rtx_REG (DImode, REG_PV)));
89cfc2c6 6380
c2ea1ac6
DR
6381 if (alpha_procedure_type != PT_NULL
6382 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
8207e7c6 6383 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
89cfc2c6 6384
9c0e94a5
RH
6385 /* If we have to allocate space for outgoing args, do it now. */
6386 if (current_function_outgoing_args_size != 0)
8207e7c6
RK
6387 FRP (emit_move_insn
6388 (stack_pointer_rtx,
6389 plus_constant (hard_frame_pointer_rtx,
6390 - (ALPHA_ROUND
6391 (current_function_outgoing_args_size)))));
9c0e94a5 6392 }
30102605 6393 else if (!TARGET_ABI_UNICOSMK)
9c0e94a5
RH
6394 {
6395 /* If we need a frame pointer, set it from the stack pointer. */
6396 if (frame_pointer_needed)
6397 {
6398 if (TARGET_CAN_FAULT_IN_PROLOGUE)
6abc6f40 6399 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
89cfc2c6 6400 else
8207e7c6
RK
6401 /* This must always be the last instruction in the
6402 prologue, thus we emit a special move + clobber. */
6abc6f40
RH
6403 FRP (emit_insn (gen_init_fp (hard_frame_pointer_rtx,
6404 stack_pointer_rtx, sa_reg)));
89cfc2c6 6405 }
89cfc2c6
RK
6406 }
6407
9c0e94a5
RH
6408 /* The ABIs for VMS and OSF/1 say that while we can schedule insns into
6409 the prologue, for exception handling reasons, we cannot do this for
6410 any insn that might fault. We could prevent this for mems with a
6411 (clobber:BLK (scratch)), but this doesn't work for fp insns. So we
6412 have to prevent all such scheduling with a blockage.
89cfc2c6 6413
9c0e94a5
RH
6414 Linux, on the other hand, never bothered to implement OSF/1's
6415 exception handling, and so doesn't care about such things. Anyone
6416 planning to use dwarf2 frame-unwind info can also omit the blockage. */
89cfc2c6 6417
9c0e94a5
RH
6418 if (! TARGET_CAN_FAULT_IN_PROLOGUE)
6419 emit_insn (gen_blockage ());
ef86d2ee
WL
6420}
6421
acd92049 6422/* Output the textual info surrounding the prologue. */
89cfc2c6 6423
9c0e94a5 6424void
acd92049 6425alpha_start_function (file, fnname, decl)
9c0e94a5 6426 FILE *file;
e03c5670 6427 const char *fnname;
3c303f52 6428 tree decl ATTRIBUTE_UNUSED;
9ecc37f0 6429{
9c0e94a5
RH
6430 unsigned long imask = 0;
6431 unsigned long fmask = 0;
6432 /* Stack space needed for pushing registers clobbered by us. */
6433 HOST_WIDE_INT sa_size;
6434 /* Complete stack size needed. */
6435 HOST_WIDE_INT frame_size;
6436 /* Offset from base reg to register save area. */
6437 HOST_WIDE_INT reg_offset;
acd92049 6438 char *entry_label = (char *) alloca (strlen (fnname) + 6);
9c0e94a5 6439 int i;
9ecc37f0 6440
30102605
RH
6441 /* Don't emit an extern directive for functions defined in the same file. */
6442 if (TARGET_ABI_UNICOSMK)
6443 {
6444 tree name_tree;
6445 name_tree = get_identifier (fnname);
6446 TREE_ASM_WRITTEN (name_tree) = 1;
6447 }
6448
941cc05a 6449 alpha_fnname = fnname;
9c0e94a5 6450 sa_size = alpha_sa_size ();
9ecc37f0 6451
9c0e94a5 6452 frame_size = get_frame_size ();
be7b80f4 6453 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6454 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6455 + (alpha_procedure_type == PT_STACK ? 8 : 0)
9c0e94a5
RH
6456 + frame_size
6457 + current_function_pretend_args_size);
30102605
RH
6458 else if (TARGET_ABI_UNICOSMK)
6459 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6460 + (alpha_procedure_type == PT_STACK ? 48 : 0))
30102605
RH
6461 + ALPHA_ROUND (frame_size
6462 + current_function_outgoing_args_size);
9c0e94a5
RH
6463 else
6464 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
6465 + sa_size
6466 + ALPHA_ROUND (frame_size
6467 + current_function_pretend_args_size));
9ecc37f0 6468
be7b80f4 6469 if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
6470 reg_offset = 8;
6471 else
6472 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
9ecc37f0 6473
9c0e94a5 6474 alpha_sa_mask (&imask, &fmask);
a6f12d7c 6475
d60a05a1 6476 /* Ecoff can handle multiple .file directives, so put out file and lineno.
48f6bfac
RK
6477 We have to do that before the .ent directive as we cannot switch
6478 files within procedures with native ecoff because line numbers are
6479 linked to procedure descriptors.
6480 Outputting the lineno helps debugging of one line functions as they
6481 would otherwise get no line number at all. Please note that we would
ddd5a7c1 6482 like to put out last_linenum from final.c, but it is not accessible. */
48f6bfac
RK
6483
6484 if (write_symbols == SDB_DEBUG)
6485 {
30102605 6486#ifdef ASM_OUTPUT_SOURCE_FILENAME
48f6bfac
RK
6487 ASM_OUTPUT_SOURCE_FILENAME (file,
6488 DECL_SOURCE_FILE (current_function_decl));
30102605
RH
6489#endif
6490#ifdef ASM_OUTPUT_SOURCE_LINE
48f6bfac 6491 if (debug_info_level != DINFO_LEVEL_TERSE)
d60a05a1
RK
6492 ASM_OUTPUT_SOURCE_LINE (file,
6493 DECL_SOURCE_LINE (current_function_decl));
30102605 6494#endif
48f6bfac
RK
6495 }
6496
9c0e94a5 6497 /* Issue function start and label. */
30102605
RH
6498 if (TARGET_ABI_OPEN_VMS
6499 || (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive))
33d01c33 6500 {
9c0e94a5 6501 fputs ("\t.ent ", file);
acd92049 6502 assemble_name (file, fnname);
9c0e94a5 6503 putc ('\n', file);
941cc05a
RK
6504
6505 /* If the function needs GP, we'll write the "..ng" label there.
6506 Otherwise, do it here. */
14691f8d
RH
6507 if (TARGET_ABI_OSF
6508 && ! alpha_function_needs_gp
6509 && ! current_function_is_thunk)
941cc05a
RK
6510 {
6511 putc ('$', file);
6512 assemble_name (file, fnname);
6513 fputs ("..ng:\n", file);
6514 }
33d01c33 6515 }
48f6bfac 6516
acd92049 6517 strcpy (entry_label, fnname);
be7b80f4 6518 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6519 strcat (entry_label, "..en");
30102605
RH
6520
6521 /* For public functions, the label must be globalized by appending an
6522 additional colon. */
6523 if (TARGET_ABI_UNICOSMK && TREE_PUBLIC (decl))
6524 strcat (entry_label, ":");
6525
9c0e94a5
RH
6526 ASM_OUTPUT_LABEL (file, entry_label);
6527 inside_function = TRUE;
48f6bfac 6528
be7b80f4 6529 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6530 fprintf (file, "\t.base $%d\n", vms_base_regno);
a6f12d7c 6531
30102605 6532 if (!TARGET_ABI_OPEN_VMS && !TARGET_ABI_UNICOSMK && TARGET_IEEE_CONFORMANT
9c0e94a5 6533 && !flag_inhibit_size_directive)
9973f4a2 6534 {
9c0e94a5
RH
6535 /* Set flags in procedure descriptor to request IEEE-conformant
6536 math-library routines. The value we set it to is PDSC_EXC_IEEE
285a5742 6537 (/usr/include/pdsc.h). */
9c0e94a5 6538 fputs ("\t.eflag 48\n", file);
9973f4a2 6539 }
a6f12d7c 6540
9c0e94a5
RH
6541 /* Set up offsets to alpha virtual arg/local debugging pointer. */
6542 alpha_auto_offset = -frame_size + current_function_pretend_args_size;
6543 alpha_arg_offset = -frame_size + 48;
c97e3db7 6544
9c0e94a5
RH
6545 /* Describe our frame. If the frame size is larger than an integer,
6546 print it as zero to avoid an assembler error. We won't be
6547 properly describing such a frame, but that's the best we can do. */
30102605
RH
6548 if (TARGET_ABI_UNICOSMK)
6549 ;
6550 else if (TARGET_ABI_OPEN_VMS)
a6f12d7c 6551 {
9c0e94a5
RH
6552 fprintf (file, "\t.frame $%d,", vms_unwind_regno);
6553 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6f4fdd10 6554 frame_size >= ((HOST_WIDE_INT) 1 << 31) ? 0 : frame_size);
3c303f52
KG
6555 fputs (",$26,", file);
6556 fprintf (file, HOST_WIDE_INT_PRINT_DEC, reg_offset);
6557 fputs ("\n", file);
a6f12d7c 6558 }
9c0e94a5 6559 else if (!flag_inhibit_size_directive)
33d01c33 6560 {
5665caa2 6561 fprintf (file, "\t.frame $%d,",
33d01c33 6562 (frame_pointer_needed
9c0e94a5 6563 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM));
5665caa2 6564 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9c0e94a5 6565 frame_size >= (1l << 31) ? 0 : frame_size);
5665caa2 6566 fprintf (file, ",$26,%d\n", current_function_pretend_args_size);
33d01c33 6567 }
0d24ff5d 6568
9c0e94a5 6569 /* Describe which registers were spilled. */
30102605
RH
6570 if (TARGET_ABI_UNICOSMK)
6571 ;
6572 else if (TARGET_ABI_OPEN_VMS)
0d24ff5d 6573 {
9c0e94a5 6574 if (imask)
30102605 6575 /* ??? Does VMS care if mask contains ra? The old code didn't
9c0e94a5 6576 set it, so I don't here. */
3c303f52 6577 fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1L << REG_RA));
9c0e94a5 6578 if (fmask)
3c303f52 6579 fprintf (file, "\t.fmask 0x%lx,0\n", fmask);
c2ea1ac6 6580 if (alpha_procedure_type == PT_REGISTER)
9c0e94a5
RH
6581 fprintf (file, "\t.fp_save $%d\n", vms_save_fp_regno);
6582 }
6583 else if (!flag_inhibit_size_directive)
6584 {
6585 if (imask)
0d24ff5d 6586 {
3c303f52 6587 fprintf (file, "\t.mask 0x%lx,", imask);
9c0e94a5
RH
6588 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6589 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
6590 putc ('\n', file);
6591
6592 for (i = 0; i < 32; ++i)
6593 if (imask & (1L << i))
6594 reg_offset += 8;
0d24ff5d 6595 }
9c0e94a5
RH
6596
6597 if (fmask)
0d24ff5d 6598 {
3c303f52 6599 fprintf (file, "\t.fmask 0x%lx,", fmask);
9c0e94a5
RH
6600 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6601 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
6602 putc ('\n', file);
0d24ff5d 6603 }
a6f12d7c
RK
6604 }
6605
be7b80f4 6606#if TARGET_ABI_OPEN_VMS
9c0e94a5
RH
6607 /* Ifdef'ed cause readonly_section and link_section are only
6608 available then. */
6609 readonly_section ();
6610 fprintf (file, "\t.align 3\n");
acd92049 6611 assemble_name (file, fnname); fputs ("..na:\n", file);
9c0e94a5 6612 fputs ("\t.ascii \"", file);
acd92049 6613 assemble_name (file, fnname);
9c0e94a5
RH
6614 fputs ("\\0\"\n", file);
6615
6616 link_section ();
6617 fprintf (file, "\t.align 3\n");
6618 fputs ("\t.name ", file);
acd92049 6619 assemble_name (file, fnname);
9c0e94a5 6620 fputs ("..na\n", file);
acd92049 6621 ASM_OUTPUT_LABEL (file, fnname);
9c0e94a5 6622 fprintf (file, "\t.pdesc ");
acd92049 6623 assemble_name (file, fnname);
c2ea1ac6
DR
6624 fprintf (file, "..en,%s\n",
6625 alpha_procedure_type == PT_STACK ? "stack"
6626 : alpha_procedure_type == PT_REGISTER ? "reg" : "null");
acd92049 6627 alpha_need_linkage (fnname, 1);
9c0e94a5
RH
6628 text_section ();
6629#endif
6630}
a6f12d7c 6631
9c0e94a5 6632/* Emit the .prologue note at the scheduled end of the prologue. */
0f33506c 6633
b4c25db2
NB
6634static void
6635alpha_output_function_end_prologue (file)
9c0e94a5
RH
6636 FILE *file;
6637{
30102605
RH
6638 if (TARGET_ABI_UNICOSMK)
6639 ;
6640 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6641 fputs ("\t.prologue\n", file);
be7b80f4 6642 else if (TARGET_ABI_WINDOWS_NT)
9c0e94a5
RH
6643 fputs ("\t.prologue 0\n", file);
6644 else if (!flag_inhibit_size_directive)
14691f8d
RH
6645 fprintf (file, "\t.prologue %d\n",
6646 alpha_function_needs_gp || current_function_is_thunk);
a6f12d7c
RK
6647}
6648
6649/* Write function epilogue. */
6650
6abc6f40
RH
6651/* ??? At some point we will want to support full unwind, and so will
6652 need to mark the epilogue as well. At the moment, we just confuse
6653 dwarf2out. */
6654#undef FRP
6655#define FRP(exp) exp
6656
a6f12d7c 6657void
9c0e94a5 6658alpha_expand_epilogue ()
a6f12d7c 6659{
9c0e94a5
RH
6660 /* Registers to save. */
6661 unsigned long imask = 0;
6662 unsigned long fmask = 0;
6663 /* Stack space needed for pushing registers clobbered by us. */
6664 HOST_WIDE_INT sa_size;
6665 /* Complete stack size needed. */
6666 HOST_WIDE_INT frame_size;
6667 /* Offset from base reg to register save area. */
6668 HOST_WIDE_INT reg_offset;
6669 int fp_is_frame_pointer, fp_offset;
6670 rtx sa_reg, sa_reg_exp = NULL;
3873d24b 6671 rtx sp_adj1, sp_adj2, mem;
01439aee 6672 rtx eh_ofs;
a6f12d7c
RK
6673 int i;
6674
9c0e94a5 6675 sa_size = alpha_sa_size ();
a6f12d7c 6676
9c0e94a5 6677 frame_size = get_frame_size ();
be7b80f4 6678 if (TARGET_ABI_OPEN_VMS)
9c0e94a5 6679 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6680 + (alpha_procedure_type == PT_STACK ? 8 : 0)
9c0e94a5
RH
6681 + frame_size
6682 + current_function_pretend_args_size);
30102605
RH
6683 else if (TARGET_ABI_UNICOSMK)
6684 frame_size = ALPHA_ROUND (sa_size
c2ea1ac6 6685 + (alpha_procedure_type == PT_STACK ? 48 : 0))
30102605
RH
6686 + ALPHA_ROUND (frame_size
6687 + current_function_outgoing_args_size);
9c0e94a5
RH
6688 else
6689 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
6690 + sa_size
6691 + ALPHA_ROUND (frame_size
6692 + current_function_pretend_args_size));
a6f12d7c 6693
be7b80f4 6694 if (TARGET_ABI_OPEN_VMS)
c2ea1ac6
DR
6695 {
6696 if (alpha_procedure_type == PT_STACK)
6697 reg_offset = 8;
6698 else
6699 reg_offset = 0;
6700 }
9c0e94a5
RH
6701 else
6702 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
6703
6704 alpha_sa_mask (&imask, &fmask);
6705
c2ea1ac6
DR
6706 fp_is_frame_pointer
6707 = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
6708 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed));
c8d8ed65
RK
6709 fp_offset = 0;
6710 sa_reg = stack_pointer_rtx;
9c0e94a5 6711
4573b4de
RH
6712 if (current_function_calls_eh_return)
6713 eh_ofs = EH_RETURN_STACKADJ_RTX;
6714 else
6715 eh_ofs = NULL_RTX;
6716
30102605 6717 if (!TARGET_ABI_UNICOSMK && sa_size)
9c0e94a5
RH
6718 {
6719 /* If we have a frame pointer, restore SP from it. */
be7b80f4 6720 if ((TARGET_ABI_OPEN_VMS
9c0e94a5 6721 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
be7b80f4 6722 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed))
8207e7c6 6723 FRP (emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx));
0d24ff5d 6724
9c0e94a5 6725 /* Cope with very large offsets to the register save area. */
9c0e94a5 6726 if (reg_offset + sa_size > 0x8000)
a6f12d7c 6727 {
9c0e94a5
RH
6728 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
6729 HOST_WIDE_INT bias;
6730
6731 if (low + sa_size <= 0x8000)
6732 bias = reg_offset - low, reg_offset = low;
6733 else
6734 bias = reg_offset, reg_offset = 0;
6735
6736 sa_reg = gen_rtx_REG (DImode, 22);
6737 sa_reg_exp = plus_constant (stack_pointer_rtx, bias);
6738
6abc6f40 6739 FRP (emit_move_insn (sa_reg, sa_reg_exp));
a6f12d7c 6740 }
9c0e94a5 6741
285a5742 6742 /* Restore registers in order, excepting a true frame pointer. */
a6f12d7c 6743
4573b4de 6744 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
01439aee 6745 if (! eh_ofs)
ba4828e0 6746 set_mem_alias_set (mem, alpha_sr_alias_set);
4573b4de
RH
6747 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
6748
9c0e94a5
RH
6749 reg_offset += 8;
6750 imask &= ~(1L << REG_RA);
0f33506c 6751
9c0e94a5
RH
6752 for (i = 0; i < 32; ++i)
6753 if (imask & (1L << i))
a6f12d7c 6754 {
9c0e94a5 6755 if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer)
0f33506c
RK
6756 fp_offset = reg_offset;
6757 else
9c0e94a5 6758 {
3873d24b 6759 mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset));
ba4828e0 6760 set_mem_alias_set (mem, alpha_sr_alias_set);
3873d24b 6761 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
9c0e94a5 6762 }
a6f12d7c
RK
6763 reg_offset += 8;
6764 }
6765
9c0e94a5
RH
6766 for (i = 0; i < 32; ++i)
6767 if (fmask & (1L << i))
a6f12d7c 6768 {
3873d24b 6769 mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset));
ba4828e0 6770 set_mem_alias_set (mem, alpha_sr_alias_set);
3873d24b 6771 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
a6f12d7c
RK
6772 reg_offset += 8;
6773 }
9c0e94a5 6774 }
c2ea1ac6 6775 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
30102605
RH
6776 {
6777 /* Restore callee-saved general-purpose registers. */
6778
6779 reg_offset = -56;
6780
6781 for (i = 9; i < 15; i++)
6782 if (imask & (1L << i))
6783 {
6784 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
6785 reg_offset));
6786 set_mem_alias_set (mem, alpha_sr_alias_set);
6787 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
6788 reg_offset -= 8;
6789 }
6790
6791 for (i = 2; i < 10; i++)
6792 if (fmask & (1L << i))
6793 {
6794 mem = gen_rtx_MEM (DFmode, plus_constant(hard_frame_pointer_rtx,
6795 reg_offset));
6796 set_mem_alias_set (mem, alpha_sr_alias_set);
6797 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
6798 reg_offset -= 8;
6799 }
6800
6801 /* Restore the return address from the DSIB. */
6802
6803 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx, -8));
6804 set_mem_alias_set (mem, alpha_sr_alias_set);
6805 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
6806 }
a6f12d7c 6807
01439aee 6808 if (frame_size || eh_ofs)
9c0e94a5 6809 {
71038426
RH
6810 sp_adj1 = stack_pointer_rtx;
6811
01439aee 6812 if (eh_ofs)
71038426
RH
6813 {
6814 sp_adj1 = gen_rtx_REG (DImode, 23);
6815 emit_move_insn (sp_adj1,
01439aee 6816 gen_rtx_PLUS (Pmode, stack_pointer_rtx, eh_ofs));
71038426
RH
6817 }
6818
9c0e94a5
RH
6819 /* If the stack size is large, begin computation into a temporary
6820 register so as not to interfere with a potential fp restore,
6821 which must be consecutive with an SP restore. */
30102605
RH
6822 if (frame_size < 32768
6823 && ! (TARGET_ABI_UNICOSMK && current_function_calls_alloca))
71038426 6824 sp_adj2 = GEN_INT (frame_size);
30102605
RH
6825 else if (TARGET_ABI_UNICOSMK)
6826 {
6827 sp_adj1 = gen_rtx_REG (DImode, 23);
6828 FRP (emit_move_insn (sp_adj1, hard_frame_pointer_rtx));
6829 sp_adj2 = const0_rtx;
6830 }
9c0e94a5
RH
6831 else if (frame_size < 0x40007fffL)
6832 {
6833 int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
6834
71038426 6835 sp_adj2 = plus_constant (sp_adj1, frame_size - low);
9c0e94a5
RH
6836 if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2))
6837 sp_adj1 = sa_reg;
6838 else
6839 {
6840 sp_adj1 = gen_rtx_REG (DImode, 23);
6abc6f40 6841 FRP (emit_move_insn (sp_adj1, sp_adj2));
9c0e94a5
RH
6842 }
6843 sp_adj2 = GEN_INT (low);
6844 }
d60a05a1 6845 else
9c0e94a5 6846 {
71038426
RH
6847 rtx tmp = gen_rtx_REG (DImode, 23);
6848 FRP (sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3));
6849 if (!sp_adj2)
9c0e94a5
RH
6850 {
6851 /* We can't drop new things to memory this late, afaik,
6852 so build it up by pieces. */
3fe5612d
RH
6853 FRP (sp_adj2 = alpha_emit_set_long_const (tmp, frame_size,
6854 -(frame_size < 0)));
71038426 6855 if (!sp_adj2)
9c0e94a5 6856 abort ();
9c0e94a5 6857 }
9c0e94a5 6858 }
a6f12d7c 6859
9c0e94a5
RH
6860 /* From now on, things must be in order. So emit blockages. */
6861
6862 /* Restore the frame pointer. */
30102605
RH
6863 if (TARGET_ABI_UNICOSMK)
6864 {
6865 emit_insn (gen_blockage ());
6866 mem = gen_rtx_MEM (DImode,
6867 plus_constant (hard_frame_pointer_rtx, -16));
6868 set_mem_alias_set (mem, alpha_sr_alias_set);
6869 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
6870 }
6871 else if (fp_is_frame_pointer)
9c0e94a5
RH
6872 {
6873 emit_insn (gen_blockage ());
8207e7c6 6874 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
ba4828e0 6875 set_mem_alias_set (mem, alpha_sr_alias_set);
3873d24b 6876 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
9c0e94a5 6877 }
be7b80f4 6878 else if (TARGET_ABI_OPEN_VMS)
9c0e94a5
RH
6879 {
6880 emit_insn (gen_blockage ());
6abc6f40
RH
6881 FRP (emit_move_insn (hard_frame_pointer_rtx,
6882 gen_rtx_REG (DImode, vms_save_fp_regno)));
9c0e94a5
RH
6883 }
6884
6885 /* Restore the stack pointer. */
6886 emit_insn (gen_blockage ());
30102605
RH
6887 if (sp_adj2 == const0_rtx)
6888 FRP (emit_move_insn (stack_pointer_rtx, sp_adj1));
6889 else
6890 FRP (emit_move_insn (stack_pointer_rtx,
6891 gen_rtx_PLUS (DImode, sp_adj1, sp_adj2)));
9c0e94a5
RH
6892 }
6893 else
6894 {
c2ea1ac6 6895 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_REGISTER)
9c0e94a5
RH
6896 {
6897 emit_insn (gen_blockage ());
6abc6f40
RH
6898 FRP (emit_move_insn (hard_frame_pointer_rtx,
6899 gen_rtx_REG (DImode, vms_save_fp_regno)));
9c0e94a5 6900 }
c2ea1ac6 6901 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type != PT_STACK)
30102605
RH
6902 {
6903 /* Decrement the frame pointer if the function does not have a
6904 frame. */
6905
6906 emit_insn (gen_blockage ());
6907 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
6908 hard_frame_pointer_rtx, GEN_INT (-1))));
6909 }
a6f12d7c 6910 }
9c0e94a5
RH
6911}
6912
6913/* Output the rest of the textual info surrounding the epilogue. */
6914
6915void
acd92049 6916alpha_end_function (file, fnname, decl)
9c0e94a5 6917 FILE *file;
e03c5670 6918 const char *fnname;
3c303f52 6919 tree decl ATTRIBUTE_UNUSED;
9c0e94a5 6920{
a6f12d7c 6921 /* End the function. */
30102605 6922 if (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive)
33d01c33 6923 {
9c0e94a5 6924 fputs ("\t.end ", file);
acd92049 6925 assemble_name (file, fnname);
9c0e94a5 6926 putc ('\n', file);
33d01c33 6927 }
48f6bfac 6928 inside_function = FALSE;
9973f4a2 6929
60593797
RH
6930 /* Show that we know this function if it is called again.
6931
6932 Don't do this for global functions in object files destined for a
6933 shared library because the function may be overridden by the application
62918bd3
RH
6934 or other libraries. Similarly, don't do this for weak functions.
6935
6936 Don't do this for functions not defined in the .text section, as
6937 otherwise it's not unlikely that the destination is out of range
6938 for a direct branch. */
60593797 6939
5e62dee5 6940 if (!DECL_WEAK (current_function_decl)
62918bd3
RH
6941 && (!flag_pic || !TREE_PUBLIC (current_function_decl))
6942 && decl_in_text_section (current_function_decl))
60593797 6943 SYMBOL_REF_FLAG (XEXP (DECL_RTL (current_function_decl), 0)) = 1;
30102605
RH
6944
6945 /* Output jump tables and the static subroutine information block. */
6946 if (TARGET_ABI_UNICOSMK)
6947 {
6948 unicosmk_output_ssib (file, fnname);
6949 unicosmk_output_deferred_case_vectors (file);
6950 }
a6f12d7c 6951}
14691f8d
RH
6952
6953/* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
6954
6955 In order to avoid the hordes of differences between generated code
6956 with and without TARGET_EXPLICIT_RELOCS, and to avoid duplicating
6957 lots of code loading up large constants, generate rtl and emit it
6958 instead of going straight to text.
6959
6960 Not sure why this idea hasn't been explored before... */
6961
6962void
6963alpha_output_mi_thunk_osf (file, thunk_fndecl, delta, function)
6964 FILE *file;
6965 tree thunk_fndecl ATTRIBUTE_UNUSED;
6966 HOST_WIDE_INT delta;
6967 tree function;
6968{
6969 HOST_WIDE_INT hi, lo;
6970 rtx this, insn, funexp;
6971
6972 /* We always require a valid GP. */
6973 emit_insn (gen_prologue_ldgp ());
6974 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
6975
6976 /* Find the "this" pointer. If the function returns a structure,
6977 the structure return pointer is in $16. */
6978 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6979 this = gen_rtx_REG (Pmode, 17);
6980 else
6981 this = gen_rtx_REG (Pmode, 16);
6982
6983 /* Add DELTA. When possible we use ldah+lda. Otherwise load the
6984 entire constant for the add. */
6985 lo = ((delta & 0xffff) ^ 0x8000) - 0x8000;
6986 hi = (((delta - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
6987 if (hi + lo == delta)
6988 {
6989 if (hi)
6990 emit_insn (gen_adddi3 (this, this, GEN_INT (hi)));
6991 if (lo)
6992 emit_insn (gen_adddi3 (this, this, GEN_INT (lo)));
6993 }
6994 else
6995 {
6996 rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0),
6997 delta, -(delta < 0));
6998 emit_insn (gen_adddi3 (this, this, tmp));
6999 }
7000
7001 /* Generate a tail call to the target function. */
7002 if (! TREE_USED (function))
7003 {
7004 assemble_external (function);
7005 TREE_USED (function) = 1;
7006 }
7007 funexp = XEXP (DECL_RTL (function), 0);
7008 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
7009 insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
7010 SIBLING_CALL_P (insn) = 1;
7011
7012 /* Run just enough of rest_of_compilation to get the insns emitted.
7013 There's not really enough bulk here to make other passes such as
7014 instruction scheduling worth while. Note that use_thunk calls
7015 assemble_start_function and assemble_end_function. */
7016 insn = get_insns ();
7017 shorten_branches (insn);
7018 final_start_function (insn, file, 1);
7019 final (insn, file, 1, 0);
7020 final_end_function ();
7021}
48f6bfac
RK
7022\f
7023/* Debugging support. */
7024
7025#include "gstab.h"
7026
7027/* Count the number of sdb related labels are generated (to find block
7028 start and end boundaries). */
7029
7030int sdb_label_count = 0;
7031
7032/* Next label # for each statement. */
7033
7034static int sym_lineno = 0;
7035
7036/* Count the number of .file directives, so that .loc is up to date. */
7037
7038static int num_source_filenames = 0;
7039
7040/* Name of the file containing the current function. */
7041
df45c7ea 7042static const char *current_function_file = "";
48f6bfac
RK
7043
7044/* Offsets to alpha virtual arg/local debugging pointers. */
7045
7046long alpha_arg_offset;
7047long alpha_auto_offset;
7048\f
7049/* Emit a new filename to a stream. */
7050
7051void
7052alpha_output_filename (stream, name)
7053 FILE *stream;
aa388f29 7054 const char *name;
48f6bfac
RK
7055{
7056 static int first_time = TRUE;
7057 char ltext_label_name[100];
7058
7059 if (first_time)
7060 {
7061 first_time = FALSE;
7062 ++num_source_filenames;
7063 current_function_file = name;
7064 fprintf (stream, "\t.file\t%d ", num_source_filenames);
7065 output_quoted_string (stream, name);
7066 fprintf (stream, "\n");
7067 if (!TARGET_GAS && write_symbols == DBX_DEBUG)
7068 fprintf (stream, "\t#@stabs\n");
7069 }
7070
6af601b3 7071 else if (write_symbols == DBX_DEBUG)
48f6bfac
RK
7072 {
7073 ASM_GENERATE_INTERNAL_LABEL (ltext_label_name, "Ltext", 0);
8202cda0 7074 fprintf (stream, "%s", ASM_STABS_OP);
48f6bfac
RK
7075 output_quoted_string (stream, name);
7076 fprintf (stream, ",%d,0,0,%s\n", N_SOL, &ltext_label_name[1]);
7077 }
7078
7079 else if (name != current_function_file
5665caa2 7080 && strcmp (name, current_function_file) != 0)
48f6bfac
RK
7081 {
7082 if (inside_function && ! TARGET_GAS)
7083 fprintf (stream, "\t#.file\t%d ", num_source_filenames);
7084 else
7085 {
7086 ++num_source_filenames;
7087 current_function_file = name;
7088 fprintf (stream, "\t.file\t%d ", num_source_filenames);
7089 }
7090
7091 output_quoted_string (stream, name);
7092 fprintf (stream, "\n");
7093 }
7094}
7095\f
7096/* Emit a linenumber to a stream. */
7097
7098void
7099alpha_output_lineno (stream, line)
7100 FILE *stream;
7101 int line;
7102{
6af601b3 7103 if (write_symbols == DBX_DEBUG)
48f6bfac
RK
7104 {
7105 /* mips-tfile doesn't understand .stabd directives. */
7106 ++sym_lineno;
8202cda0 7107 fprintf (stream, "$LM%d:\n%s%d,0,%d,$LM%d\n",
48f6bfac
RK
7108 sym_lineno, ASM_STABN_OP, N_SLINE, line, sym_lineno);
7109 }
7110 else
40828e35 7111 fprintf (stream, "\n\t.loc\t%d %d\n", num_source_filenames, line);
48f6bfac 7112}
6245e3df
RK
7113\f
7114/* Structure to show the current status of registers and memory. */
7115
7116struct shadow_summary
7117{
7118 struct {
1d11bf18
RH
7119 unsigned int i : 31; /* Mask of int regs */
7120 unsigned int fp : 31; /* Mask of fp regs */
7121 unsigned int mem : 1; /* mem == imem | fpmem */
6245e3df
RK
7122 } used, defd;
7123};
7124
f6da8bc3
KG
7125static void summarize_insn PARAMS ((rtx, struct shadow_summary *, int));
7126static void alpha_handle_trap_shadows PARAMS ((rtx));
9c0e94a5 7127
6245e3df
RK
7128/* Summary the effects of expression X on the machine. Update SUM, a pointer
7129 to the summary structure. SET is nonzero if the insn is setting the
7130 object, otherwise zero. */
7131
7132static void
7133summarize_insn (x, sum, set)
7134 rtx x;
7135 struct shadow_summary *sum;
7136 int set;
7137{
6f7d635c 7138 const char *format_ptr;
6245e3df
RK
7139 int i, j;
7140
7141 if (x == 0)
7142 return;
7143
7144 switch (GET_CODE (x))
7145 {
7146 /* ??? Note that this case would be incorrect if the Alpha had a
7147 ZERO_EXTRACT in SET_DEST. */
7148 case SET:
7149 summarize_insn (SET_SRC (x), sum, 0);
7150 summarize_insn (SET_DEST (x), sum, 1);
7151 break;
7152
7153 case CLOBBER:
7154 summarize_insn (XEXP (x, 0), sum, 1);
7155 break;
7156
7157 case USE:
7158 summarize_insn (XEXP (x, 0), sum, 0);
7159 break;
7160
f4e31cf5
RH
7161 case ASM_OPERANDS:
7162 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7163 summarize_insn (ASM_OPERANDS_INPUT (x, i), sum, 0);
7164 break;
7165
6245e3df 7166 case PARALLEL:
8fed04e5 7167 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6245e3df
RK
7168 summarize_insn (XVECEXP (x, 0, i), sum, 0);
7169 break;
7170
f4e31cf5 7171 case SUBREG:
9c0e94a5
RH
7172 summarize_insn (SUBREG_REG (x), sum, 0);
7173 break;
f4e31cf5 7174
6245e3df
RK
7175 case REG:
7176 {
7177 int regno = REGNO (x);
948068e2 7178 unsigned long mask = ((unsigned long) 1) << (regno % 32);
6245e3df
RK
7179
7180 if (regno == 31 || regno == 63)
7181 break;
7182
7183 if (set)
7184 {
7185 if (regno < 32)
7186 sum->defd.i |= mask;
7187 else
7188 sum->defd.fp |= mask;
7189 }
7190 else
7191 {
7192 if (regno < 32)
7193 sum->used.i |= mask;
7194 else
7195 sum->used.fp |= mask;
7196 }
7197 }
7198 break;
7199
7200 case MEM:
7201 if (set)
7202 sum->defd.mem = 1;
7203 else
7204 sum->used.mem = 1;
7205
7206 /* Find the regs used in memory address computation: */
7207 summarize_insn (XEXP (x, 0), sum, 0);
7208 break;
7209
8ba46994
RK
7210 case CONST_INT: case CONST_DOUBLE:
7211 case SYMBOL_REF: case LABEL_REF: case CONST:
368a1647 7212 case SCRATCH: case ASM_INPUT:
8ba46994
RK
7213 break;
7214
6245e3df
RK
7215 /* Handle common unary and binary ops for efficiency. */
7216 case COMPARE: case PLUS: case MINUS: case MULT: case DIV:
7217 case MOD: case UDIV: case UMOD: case AND: case IOR:
7218 case XOR: case ASHIFT: case ROTATE: case ASHIFTRT: case LSHIFTRT:
7219 case ROTATERT: case SMIN: case SMAX: case UMIN: case UMAX:
7220 case NE: case EQ: case GE: case GT: case LE:
7221 case LT: case GEU: case GTU: case LEU: case LTU:
7222 summarize_insn (XEXP (x, 0), sum, 0);
7223 summarize_insn (XEXP (x, 1), sum, 0);
7224 break;
7225
7226 case NEG: case NOT: case SIGN_EXTEND: case ZERO_EXTEND:
7227 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: case FLOAT:
7228 case FIX: case UNSIGNED_FLOAT: case UNSIGNED_FIX: case ABS:
7229 case SQRT: case FFS:
7230 summarize_insn (XEXP (x, 0), sum, 0);
7231 break;
7232
7233 default:
7234 format_ptr = GET_RTX_FORMAT (GET_CODE (x));
8fed04e5 7235 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
bed95fa1 7236 switch (format_ptr[i])
6245e3df
RK
7237 {
7238 case 'e':
7239 summarize_insn (XEXP (x, i), sum, 0);
7240 break;
7241
7242 case 'E':
8fed04e5 7243 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6245e3df
RK
7244 summarize_insn (XVECEXP (x, i, j), sum, 0);
7245 break;
7246
2b01d264
RH
7247 case 'i':
7248 break;
7249
6245e3df
RK
7250 default:
7251 abort ();
7252 }
7253 }
7254}
6245e3df 7255
9c0e94a5
RH
7256/* Ensure a sufficient number of `trapb' insns are in the code when
7257 the user requests code with a trap precision of functions or
7258 instructions.
7259
7260 In naive mode, when the user requests a trap-precision of
7261 "instruction", a trapb is needed after every instruction that may
7262 generate a trap. This ensures that the code is resumption safe but
7263 it is also slow.
7264
7265 When optimizations are turned on, we delay issuing a trapb as long
7266 as possible. In this context, a trap shadow is the sequence of
7267 instructions that starts with a (potentially) trap generating
7268 instruction and extends to the next trapb or call_pal instruction
7269 (but GCC never generates call_pal by itself). We can delay (and
7270 therefore sometimes omit) a trapb subject to the following
7271 conditions:
7272
7273 (a) On entry to the trap shadow, if any Alpha register or memory
7274 location contains a value that is used as an operand value by some
7275 instruction in the trap shadow (live on entry), then no instruction
7276 in the trap shadow may modify the register or memory location.
7277
7278 (b) Within the trap shadow, the computation of the base register
7279 for a memory load or store instruction may not involve using the
7280 result of an instruction that might generate an UNPREDICTABLE
7281 result.
7282
7283 (c) Within the trap shadow, no register may be used more than once
7284 as a destination register. (This is to make life easier for the
7285 trap-handler.)
6245e3df 7286
2ea844d3 7287 (d) The trap shadow may not include any branch instructions. */
6245e3df 7288
2ea844d3
RH
7289static void
7290alpha_handle_trap_shadows (insns)
7291 rtx insns;
6245e3df 7292{
2ea844d3
RH
7293 struct shadow_summary shadow;
7294 int trap_pending, exception_nesting;
68aed21b 7295 rtx i, n;
6245e3df 7296
2ea844d3
RH
7297 trap_pending = 0;
7298 exception_nesting = 0;
7299 shadow.used.i = 0;
7300 shadow.used.fp = 0;
7301 shadow.used.mem = 0;
7302 shadow.defd = shadow.used;
7303
7304 for (i = insns; i ; i = NEXT_INSN (i))
7305 {
7306 if (GET_CODE (i) == NOTE)
7307 {
7308 switch (NOTE_LINE_NUMBER (i))
7309 {
7310 case NOTE_INSN_EH_REGION_BEG:
7311 exception_nesting++;
7312 if (trap_pending)
7313 goto close_shadow;
7314 break;
7315
7316 case NOTE_INSN_EH_REGION_END:
7317 exception_nesting--;
7318 if (trap_pending)
7319 goto close_shadow;
7320 break;
7321
7322 case NOTE_INSN_EPILOGUE_BEG:
7323 if (trap_pending && alpha_tp >= ALPHA_TP_FUNC)
7324 goto close_shadow;
7325 break;
7326 }
7327 }
7328 else if (trap_pending)
7329 {
7330 if (alpha_tp == ALPHA_TP_FUNC)
7331 {
7332 if (GET_CODE (i) == JUMP_INSN
7333 && GET_CODE (PATTERN (i)) == RETURN)
7334 goto close_shadow;
7335 }
7336 else if (alpha_tp == ALPHA_TP_INSN)
7337 {
7338 if (optimize > 0)
7339 {
7340 struct shadow_summary sum;
7341
7342 sum.used.i = 0;
7343 sum.used.fp = 0;
7344 sum.used.mem = 0;
f4e31cf5 7345 sum.defd = sum.used;
2ea844d3
RH
7346
7347 switch (GET_CODE (i))
7348 {
7349 case INSN:
bb02e7ea
RH
7350 /* Annoyingly, get_attr_trap will abort on these. */
7351 if (GET_CODE (PATTERN (i)) == USE
7352 || GET_CODE (PATTERN (i)) == CLOBBER)
2ea844d3
RH
7353 break;
7354
7355 summarize_insn (PATTERN (i), &sum, 0);
7356
7357 if ((sum.defd.i & shadow.defd.i)
7358 || (sum.defd.fp & shadow.defd.fp))
7359 {
7360 /* (c) would be violated */
7361 goto close_shadow;
7362 }
7363
7364 /* Combine shadow with summary of current insn: */
7365 shadow.used.i |= sum.used.i;
7366 shadow.used.fp |= sum.used.fp;
7367 shadow.used.mem |= sum.used.mem;
7368 shadow.defd.i |= sum.defd.i;
7369 shadow.defd.fp |= sum.defd.fp;
7370 shadow.defd.mem |= sum.defd.mem;
7371
7372 if ((sum.defd.i & shadow.used.i)
7373 || (sum.defd.fp & shadow.used.fp)
7374 || (sum.defd.mem & shadow.used.mem))
7375 {
7376 /* (a) would be violated (also takes care of (b)) */
7377 if (get_attr_trap (i) == TRAP_YES
7378 && ((sum.defd.i & sum.used.i)
7379 || (sum.defd.fp & sum.used.fp)))
7380 abort ();
7381
7382 goto close_shadow;
7383 }
7384 break;
7385
7386 case JUMP_INSN:
7387 case CALL_INSN:
7388 case CODE_LABEL:
7389 goto close_shadow;
7390
7391 default:
6245e3df 7392 abort ();
2ea844d3
RH
7393 }
7394 }
7395 else
7396 {
7397 close_shadow:
68aed21b
RH
7398 n = emit_insn_before (gen_trapb (), i);
7399 PUT_MODE (n, TImode);
7400 PUT_MODE (i, TImode);
2ea844d3
RH
7401 trap_pending = 0;
7402 shadow.used.i = 0;
7403 shadow.used.fp = 0;
7404 shadow.used.mem = 0;
7405 shadow.defd = shadow.used;
7406 }
7407 }
7408 }
6245e3df 7409
4f3f5e9f
RH
7410 if ((exception_nesting > 0 || alpha_tp >= ALPHA_TP_FUNC)
7411 && GET_CODE (i) == INSN
7412 && GET_CODE (PATTERN (i)) != USE
7413 && GET_CODE (PATTERN (i)) != CLOBBER
7414 && get_attr_trap (i) == TRAP_YES)
7415 {
7416 if (optimize && !trap_pending)
7417 summarize_insn (PATTERN (i), &shadow, 0);
7418 trap_pending = 1;
7419 }
6245e3df
RK
7420 }
7421}
68aed21b 7422\f
68aed21b
RH
7423/* Alpha can only issue instruction groups simultaneously if they are
7424 suitibly aligned. This is very processor-specific. */
7425
3873d24b
RH
7426enum alphaev4_pipe {
7427 EV4_STOP = 0,
7428 EV4_IB0 = 1,
7429 EV4_IB1 = 2,
7430 EV4_IBX = 4
7431};
7432
68aed21b
RH
7433enum alphaev5_pipe {
7434 EV5_STOP = 0,
7435 EV5_NONE = 1,
7436 EV5_E01 = 2,
7437 EV5_E0 = 4,
7438 EV5_E1 = 8,
7439 EV5_FAM = 16,
7440 EV5_FA = 32,
7441 EV5_FM = 64
7442};
7443
f6da8bc3
KG
7444static enum alphaev4_pipe alphaev4_insn_pipe PARAMS ((rtx));
7445static enum alphaev5_pipe alphaev5_insn_pipe PARAMS ((rtx));
b81f53a1
RK
7446static rtx alphaev4_next_group PARAMS ((rtx, int *, int *));
7447static rtx alphaev5_next_group PARAMS ((rtx, int *, int *));
7448static rtx alphaev4_next_nop PARAMS ((int *));
7449static rtx alphaev5_next_nop PARAMS ((int *));
3873d24b
RH
7450
7451static void alpha_align_insns
b81f53a1 7452 PARAMS ((rtx, unsigned int, rtx (*)(rtx, int *, int *), rtx (*)(int *)));
3873d24b
RH
7453
7454static enum alphaev4_pipe
7455alphaev4_insn_pipe (insn)
7456 rtx insn;
7457{
7458 if (recog_memoized (insn) < 0)
7459 return EV4_STOP;
7460 if (get_attr_length (insn) != 4)
7461 return EV4_STOP;
7462
7463 switch (get_attr_type (insn))
7464 {
7465 case TYPE_ILD:
7466 case TYPE_FLD:
7467 return EV4_IBX;
7468
7469 case TYPE_LDSYM:
7470 case TYPE_IADD:
7471 case TYPE_ILOG:
7472 case TYPE_ICMOV:
7473 case TYPE_ICMP:
7474 case TYPE_IST:
7475 case TYPE_FST:
7476 case TYPE_SHIFT:
7477 case TYPE_IMUL:
7478 case TYPE_FBR:
7479 return EV4_IB0;
7480
7481 case TYPE_MISC:
7482 case TYPE_IBR:
7483 case TYPE_JSR:
7484 case TYPE_FCPYS:
7485 case TYPE_FCMOV:
7486 case TYPE_FADD:
7487 case TYPE_FDIV:
7488 case TYPE_FMUL:
7489 return EV4_IB1;
7490
7491 default:
b81f53a1 7492 abort ();
3873d24b
RH
7493 }
7494}
7495
68aed21b
RH
7496static enum alphaev5_pipe
7497alphaev5_insn_pipe (insn)
7498 rtx insn;
7499{
7500 if (recog_memoized (insn) < 0)
7501 return EV5_STOP;
7502 if (get_attr_length (insn) != 4)
7503 return EV5_STOP;
7504
7505 switch (get_attr_type (insn))
7506 {
7507 case TYPE_ILD:
7508 case TYPE_FLD:
7509 case TYPE_LDSYM:
7510 case TYPE_IADD:
7511 case TYPE_ILOG:
7512 case TYPE_ICMOV:
7513 case TYPE_ICMP:
7514 return EV5_E01;
7515
7516 case TYPE_IST:
7517 case TYPE_FST:
7518 case TYPE_SHIFT:
7519 case TYPE_IMUL:
7520 case TYPE_MISC:
7521 case TYPE_MVI:
7522 return EV5_E0;
7523
7524 case TYPE_IBR:
7525 case TYPE_JSR:
7526 return EV5_E1;
7527
7528 case TYPE_FCPYS:
7529 return EV5_FAM;
7530
7531 case TYPE_FBR:
7532 case TYPE_FCMOV:
7533 case TYPE_FADD:
7534 case TYPE_FDIV:
7535 return EV5_FA;
7536
7537 case TYPE_FMUL:
7538 return EV5_FM;
2c01018f
RH
7539
7540 default:
7541 abort();
68aed21b 7542 }
68aed21b
RH
7543}
7544
3873d24b
RH
7545/* IN_USE is a mask of the slots currently filled within the insn group.
7546 The mask bits come from alphaev4_pipe above. If EV4_IBX is set, then
7547 the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1.
7548
7549 LEN is, of course, the length of the group in bytes. */
7550
7551static rtx
7552alphaev4_next_group (insn, pin_use, plen)
7553 rtx insn;
7554 int *pin_use, *plen;
7555{
7556 int len, in_use;
7557
7558 len = in_use = 0;
7559
2c3c49de 7560 if (! INSN_P (insn)
3873d24b
RH
7561 || GET_CODE (PATTERN (insn)) == CLOBBER
7562 || GET_CODE (PATTERN (insn)) == USE)
7563 goto next_and_done;
7564
7565 while (1)
7566 {
7567 enum alphaev4_pipe pipe;
7568
7569 pipe = alphaev4_insn_pipe (insn);
7570 switch (pipe)
7571 {
7572 case EV4_STOP:
7573 /* Force complex instructions to start new groups. */
7574 if (in_use)
7575 goto done;
7576
7577 /* If this is a completely unrecognized insn, its an asm.
7578 We don't know how long it is, so record length as -1 to
7579 signal a needed realignment. */
7580 if (recog_memoized (insn) < 0)
7581 len = -1;
7582 else
7583 len = get_attr_length (insn);
7584 goto next_and_done;
7585
7586 case EV4_IBX:
7587 if (in_use & EV4_IB0)
7588 {
7589 if (in_use & EV4_IB1)
7590 goto done;
7591 in_use |= EV4_IB1;
7592 }
7593 else
7594 in_use |= EV4_IB0 | EV4_IBX;
7595 break;
7596
7597 case EV4_IB0:
7598 if (in_use & EV4_IB0)
7599 {
7600 if (!(in_use & EV4_IBX) || (in_use & EV4_IB1))
7601 goto done;
7602 in_use |= EV4_IB1;
7603 }
7604 in_use |= EV4_IB0;
7605 break;
7606
7607 case EV4_IB1:
7608 if (in_use & EV4_IB1)
7609 goto done;
7610 in_use |= EV4_IB1;
7611 break;
7612
7613 default:
7614 abort();
7615 }
7616 len += 4;
7617
7618 /* Haifa doesn't do well scheduling branches. */
7619 if (GET_CODE (insn) == JUMP_INSN)
7620 goto next_and_done;
7621
7622 next:
7623 insn = next_nonnote_insn (insn);
7624
2c3c49de 7625 if (!insn || ! INSN_P (insn))
3873d24b
RH
7626 goto done;
7627
7628 /* Let Haifa tell us where it thinks insn group boundaries are. */
7629 if (GET_MODE (insn) == TImode)
7630 goto done;
7631
7632 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
7633 goto next;
7634 }
7635
7636 next_and_done:
7637 insn = next_nonnote_insn (insn);
7638
7639 done:
7640 *plen = len;
7641 *pin_use = in_use;
7642 return insn;
7643}
7644
7645/* IN_USE is a mask of the slots currently filled within the insn group.
7646 The mask bits come from alphaev5_pipe above. If EV5_E01 is set, then
7647 the insn in EV5_E0 can be swapped by the hardware into EV5_E1.
68aed21b
RH
7648
7649 LEN is, of course, the length of the group in bytes. */
7650
7651static rtx
7652alphaev5_next_group (insn, pin_use, plen)
7653 rtx insn;
7654 int *pin_use, *plen;
7655{
7656 int len, in_use;
7657
7658 len = in_use = 0;
7659
2c3c49de 7660 if (! INSN_P (insn)
2c01018f
RH
7661 || GET_CODE (PATTERN (insn)) == CLOBBER
7662 || GET_CODE (PATTERN (insn)) == USE)
7663 goto next_and_done;
68aed21b 7664
2c01018f 7665 while (1)
68aed21b
RH
7666 {
7667 enum alphaev5_pipe pipe;
68aed21b
RH
7668
7669 pipe = alphaev5_insn_pipe (insn);
7670 switch (pipe)
7671 {
7672 case EV5_STOP:
7673 /* Force complex instructions to start new groups. */
7674 if (in_use)
7675 goto done;
7676
7677 /* If this is a completely unrecognized insn, its an asm.
7678 We don't know how long it is, so record length as -1 to
7679 signal a needed realignment. */
7680 if (recog_memoized (insn) < 0)
7681 len = -1;
7682 else
7683 len = get_attr_length (insn);
2c01018f 7684 goto next_and_done;
68aed21b
RH
7685
7686 /* ??? Most of the places below, we would like to abort, as
7687 it would indicate an error either in Haifa, or in the
7688 scheduling description. Unfortunately, Haifa never
7689 schedules the last instruction of the BB, so we don't
7690 have an accurate TI bit to go off. */
7691 case EV5_E01:
7692 if (in_use & EV5_E0)
7693 {
7694 if (in_use & EV5_E1)
7695 goto done;
7696 in_use |= EV5_E1;
7697 }
7698 else
7699 in_use |= EV5_E0 | EV5_E01;
7700 break;
7701
7702 case EV5_E0:
7703 if (in_use & EV5_E0)
7704 {
3873d24b 7705 if (!(in_use & EV5_E01) || (in_use & EV5_E1))
68aed21b
RH
7706 goto done;
7707 in_use |= EV5_E1;
7708 }
7709 in_use |= EV5_E0;
7710 break;
7711
7712 case EV5_E1:
7713 if (in_use & EV5_E1)
7714 goto done;
7715 in_use |= EV5_E1;
7716 break;
7717
7718 case EV5_FAM:
7719 if (in_use & EV5_FA)
7720 {
7721 if (in_use & EV5_FM)
7722 goto done;
7723 in_use |= EV5_FM;
7724 }
7725 else
7726 in_use |= EV5_FA | EV5_FAM;
7727 break;
7728
7729 case EV5_FA:
7730 if (in_use & EV5_FA)
7731 goto done;
7732 in_use |= EV5_FA;
7733 break;
7734
7735 case EV5_FM:
7736 if (in_use & EV5_FM)
7737 goto done;
7738 in_use |= EV5_FM;
7739 break;
7740
7741 case EV5_NONE:
7742 break;
7743
7744 default:
7745 abort();
7746 }
7747 len += 4;
7748
7749 /* Haifa doesn't do well scheduling branches. */
7750 /* ??? If this is predicted not-taken, slotting continues, except
7751 that no more IBR, FBR, or JSR insns may be slotted. */
7752 if (GET_CODE (insn) == JUMP_INSN)
2c01018f 7753 goto next_and_done;
68aed21b 7754
2c01018f 7755 next:
68aed21b
RH
7756 insn = next_nonnote_insn (insn);
7757
2c3c49de 7758 if (!insn || ! INSN_P (insn))
68aed21b 7759 goto done;
a874dd18 7760
68aed21b
RH
7761 /* Let Haifa tell us where it thinks insn group boundaries are. */
7762 if (GET_MODE (insn) == TImode)
7763 goto done;
7764
2c01018f
RH
7765 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
7766 goto next;
68aed21b 7767 }
2c01018f
RH
7768
7769 next_and_done:
7770 insn = next_nonnote_insn (insn);
68aed21b
RH
7771
7772 done:
7773 *plen = len;
7774 *pin_use = in_use;
7775 return insn;
68aed21b
RH
7776}
7777
3873d24b
RH
7778static rtx
7779alphaev4_next_nop (pin_use)
7780 int *pin_use;
7781{
7782 int in_use = *pin_use;
7783 rtx nop;
7784
7785 if (!(in_use & EV4_IB0))
7786 {
7787 in_use |= EV4_IB0;
7788 nop = gen_nop ();
7789 }
7790 else if ((in_use & (EV4_IBX|EV4_IB1)) == EV4_IBX)
7791 {
7792 in_use |= EV4_IB1;
7793 nop = gen_nop ();
7794 }
7795 else if (TARGET_FP && !(in_use & EV4_IB1))
7796 {
7797 in_use |= EV4_IB1;
7798 nop = gen_fnop ();
7799 }
7800 else
7801 nop = gen_unop ();
7802
7803 *pin_use = in_use;
7804 return nop;
7805}
7806
7807static rtx
7808alphaev5_next_nop (pin_use)
7809 int *pin_use;
7810{
7811 int in_use = *pin_use;
7812 rtx nop;
7813
7814 if (!(in_use & EV5_E1))
7815 {
7816 in_use |= EV5_E1;
7817 nop = gen_nop ();
7818 }
7819 else if (TARGET_FP && !(in_use & EV5_FA))
7820 {
7821 in_use |= EV5_FA;
7822 nop = gen_fnop ();
7823 }
7824 else if (TARGET_FP && !(in_use & EV5_FM))
7825 {
7826 in_use |= EV5_FM;
7827 nop = gen_fnop ();
7828 }
7829 else
7830 nop = gen_unop ();
7831
7832 *pin_use = in_use;
7833 return nop;
7834}
7835
7836/* The instruction group alignment main loop. */
7837
68aed21b 7838static void
b81f53a1 7839alpha_align_insns (insns, max_align, next_group, next_nop)
68aed21b 7840 rtx insns;
b81f53a1
RK
7841 unsigned int max_align;
7842 rtx (*next_group) PARAMS ((rtx, int *, int *));
7843 rtx (*next_nop) PARAMS ((int *));
68aed21b
RH
7844{
7845 /* ALIGN is the known alignment for the insn group. */
b81f53a1 7846 unsigned int align;
68aed21b
RH
7847 /* OFS is the offset of the current insn in the insn group. */
7848 int ofs;
7849 int prev_in_use, in_use, len;
7850 rtx i, next;
7851
7852 /* Let shorten branches care for assigning alignments to code labels. */
7853 shorten_branches (insns);
7854
30864e14
RH
7855 if (align_functions < 4)
7856 align = 4;
21cb9e60 7857 else if ((unsigned int) align_functions < max_align)
30864e14
RH
7858 align = align_functions;
7859 else
7860 align = max_align;
80db34d8 7861
68aed21b 7862 ofs = prev_in_use = 0;
68aed21b
RH
7863 i = insns;
7864 if (GET_CODE (i) == NOTE)
7865 i = next_nonnote_insn (i);
7866
7867 while (i)
7868 {
b81f53a1 7869 next = (*next_group) (i, &in_use, &len);
68aed21b
RH
7870
7871 /* When we see a label, resync alignment etc. */
7872 if (GET_CODE (i) == CODE_LABEL)
7873 {
b81f53a1
RK
7874 unsigned int new_align = 1 << label_to_alignment (i);
7875
68aed21b
RH
7876 if (new_align >= align)
7877 {
3873d24b 7878 align = new_align < max_align ? new_align : max_align;
68aed21b
RH
7879 ofs = 0;
7880 }
b81f53a1 7881
68aed21b
RH
7882 else if (ofs & (new_align-1))
7883 ofs = (ofs | (new_align-1)) + 1;
7884 if (len != 0)
7885 abort();
7886 }
7887
7888 /* Handle complex instructions special. */
7889 else if (in_use == 0)
7890 {
7891 /* Asms will have length < 0. This is a signal that we have
7892 lost alignment knowledge. Assume, however, that the asm
7893 will not mis-align instructions. */
7894 if (len < 0)
7895 {
7896 ofs = 0;
7897 align = 4;
7898 len = 0;
7899 }
7900 }
7901
7902 /* If the known alignment is smaller than the recognized insn group,
7903 realign the output. */
1eb356b9 7904 else if ((int) align < len)
68aed21b 7905 {
b81f53a1 7906 unsigned int new_log_align = len > 8 ? 4 : 3;
11cb1475 7907 rtx prev, where;
68aed21b 7908
11cb1475 7909 where = prev = prev_nonnote_insn (i);
68aed21b
RH
7910 if (!where || GET_CODE (where) != CODE_LABEL)
7911 where = i;
7912
11cb1475
RH
7913 /* Can't realign between a call and its gp reload. */
7914 if (! (TARGET_EXPLICIT_RELOCS
7915 && prev && GET_CODE (prev) == CALL_INSN))
7916 {
7917 emit_insn_before (gen_realign (GEN_INT (new_log_align)), where);
7918 align = 1 << new_log_align;
7919 ofs = 0;
7920 }
68aed21b
RH
7921 }
7922
7923 /* If the group won't fit in the same INT16 as the previous,
7924 we need to add padding to keep the group together. Rather
7925 than simply leaving the insn filling to the assembler, we
7926 can make use of the knowledge of what sorts of instructions
7927 were issued in the previous group to make sure that all of
7928 the added nops are really free. */
1eb356b9 7929 else if (ofs + len > (int) align)
68aed21b
RH
7930 {
7931 int nop_count = (align - ofs) / 4;
7932 rtx where;
7933
11cb1475
RH
7934 /* Insert nops before labels, branches, and calls to truely merge
7935 the execution of the nops with the previous instruction group. */
68aed21b 7936 where = prev_nonnote_insn (i);
3873d24b 7937 if (where)
68aed21b 7938 {
3873d24b 7939 if (GET_CODE (where) == CODE_LABEL)
68aed21b 7940 {
3873d24b
RH
7941 rtx where2 = prev_nonnote_insn (where);
7942 if (where2 && GET_CODE (where2) == JUMP_INSN)
7943 where = where2;
68aed21b 7944 }
11cb1475 7945 else if (GET_CODE (where) == INSN)
3873d24b 7946 where = i;
68aed21b 7947 }
3873d24b
RH
7948 else
7949 where = i;
7950
7951 do
7952 emit_insn_before ((*next_nop)(&prev_in_use), where);
68aed21b
RH
7953 while (--nop_count);
7954 ofs = 0;
7955 }
7956
7957 ofs = (ofs + len) & (align - 1);
7958 prev_in_use = in_use;
7959 i = next;
7960 }
7961}
68aed21b 7962\f
f5143c46 7963/* Machine dependent reorg pass. */
2ea844d3
RH
7964
7965void
7966alpha_reorg (insns)
7967 rtx insns;
7968{
68aed21b
RH
7969 if (alpha_tp != ALPHA_TP_PROG || flag_exceptions)
7970 alpha_handle_trap_shadows (insns);
7971
68aed21b
RH
7972 /* Due to the number of extra trapb insns, don't bother fixing up
7973 alignment when trap precision is instruction. Moreover, we can
b81f53a1 7974 only do our job when sched2 is run. */
68aed21b
RH
7975 if (optimize && !optimize_size
7976 && alpha_tp != ALPHA_TP_INSN
7977 && flag_schedule_insns_after_reload)
7978 {
3873d24b 7979 if (alpha_cpu == PROCESSOR_EV4)
b81f53a1 7980 alpha_align_insns (insns, 8, alphaev4_next_group, alphaev4_next_nop);
3873d24b 7981 else if (alpha_cpu == PROCESSOR_EV5)
b81f53a1 7982 alpha_align_insns (insns, 16, alphaev5_next_group, alphaev5_next_nop);
68aed21b 7983 }
2ea844d3 7984}
2ea844d3 7985\f
a874dd18
RK
7986/* Check a floating-point value for validity for a particular machine mode. */
7987
e03c5670 7988static const char * const float_strings[] =
a874dd18 7989{
39d78b32 7990 /* These are for FLOAT_VAX. */
a874dd18
RK
7991 "1.70141173319264430e+38", /* 2^127 (2^24 - 1) / 2^24 */
7992 "-1.70141173319264430e+38",
7993 "2.93873587705571877e-39", /* 2^-128 */
39d78b32
RK
7994 "-2.93873587705571877e-39",
7995 /* These are for the default broken IEEE mode, which traps
7996 on infinity or denormal numbers. */
7997 "3.402823466385288598117e+38", /* 2^128 (1 - 2^-24) */
7998 "-3.402823466385288598117e+38",
7999 "1.1754943508222875079687e-38", /* 2^-126 */
8000 "-1.1754943508222875079687e-38",
a874dd18
RK
8001};
8002
39d78b32 8003static REAL_VALUE_TYPE float_values[8];
a874dd18
RK
8004static int inited_float_values = 0;
8005
8006int
8007check_float_value (mode, d, overflow)
8008 enum machine_mode mode;
8009 REAL_VALUE_TYPE *d;
3c303f52 8010 int overflow ATTRIBUTE_UNUSED;
a874dd18
RK
8011{
8012
8013 if (TARGET_IEEE || TARGET_IEEE_CONFORMANT || TARGET_IEEE_WITH_INEXACT)
8014 return 0;
8015
8016 if (inited_float_values == 0)
8017 {
8018 int i;
39d78b32 8019 for (i = 0; i < 8; i++)
a874dd18
RK
8020 float_values[i] = REAL_VALUE_ATOF (float_strings[i], DFmode);
8021
8022 inited_float_values = 1;
8023 }
8024
8025 if (mode == SFmode)
8026 {
8027 REAL_VALUE_TYPE r;
39d78b32
RK
8028 REAL_VALUE_TYPE *fvptr;
8029
8030 if (TARGET_FLOAT_VAX)
8031 fvptr = &float_values[0];
8032 else
8033 fvptr = &float_values[4];
a874dd18 8034
4e135bdd 8035 memcpy (&r, d, sizeof (REAL_VALUE_TYPE));
39d78b32 8036 if (REAL_VALUES_LESS (fvptr[0], r))
a874dd18 8037 {
6439a1f2 8038 memcpy (d, &fvptr[0], sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8039 return 1;
8040 }
39d78b32 8041 else if (REAL_VALUES_LESS (r, fvptr[1]))
a874dd18 8042 {
6439a1f2 8043 memcpy (d, &fvptr[1], sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8044 return 1;
8045 }
8046 else if (REAL_VALUES_LESS (dconst0, r)
39d78b32 8047 && REAL_VALUES_LESS (r, fvptr[2]))
a874dd18 8048 {
6439a1f2 8049 memcpy (d, &dconst0, sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8050 return 1;
8051 }
8052 else if (REAL_VALUES_LESS (r, dconst0)
39d78b32 8053 && REAL_VALUES_LESS (fvptr[3], r))
a874dd18 8054 {
6439a1f2 8055 memcpy (d, &dconst0, sizeof (REAL_VALUE_TYPE));
a874dd18
RK
8056 return 1;
8057 }
8058 }
8059
8060 return 0;
8061}
30102605 8062\f
be7b80f4 8063#if TARGET_ABI_OPEN_VMS
89cfc2c6 8064
e9a25f70 8065/* Return the VMS argument type corresponding to MODE. */
89cfc2c6 8066
e9a25f70
JL
8067enum avms_arg_type
8068alpha_arg_type (mode)
8069 enum machine_mode mode;
8070{
8071 switch (mode)
89cfc2c6 8072 {
e9a25f70
JL
8073 case SFmode:
8074 return TARGET_FLOAT_VAX ? FF : FS;
8075 case DFmode:
8076 return TARGET_FLOAT_VAX ? FD : FT;
8077 default:
8078 return I64;
89cfc2c6 8079 }
e9a25f70 8080}
89cfc2c6 8081
e9a25f70
JL
8082/* Return an rtx for an integer representing the VMS Argument Information
8083 register value. */
89cfc2c6 8084
aa388f29 8085rtx
e9a25f70
JL
8086alpha_arg_info_reg_val (cum)
8087 CUMULATIVE_ARGS cum;
8088{
8089 unsigned HOST_WIDE_INT regval = cum.num_args;
8090 int i;
89cfc2c6 8091
e9a25f70
JL
8092 for (i = 0; i < 6; i++)
8093 regval |= ((int) cum.atypes[i]) << (i * 3 + 8);
89cfc2c6 8094
e9a25f70
JL
8095 return GEN_INT (regval);
8096}
8097\f
a82c7f05
RH
8098#include <splay-tree.h>
8099
89cfc2c6
RK
8100/* Structure to collect function names for final output
8101 in link section. */
8102
8103enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN};
8104
a82c7f05
RH
8105struct alpha_links
8106{
8107 rtx linkage;
89cfc2c6
RK
8108 enum links_kind kind;
8109};
8110
a82c7f05
RH
8111static splay_tree alpha_links;
8112
8113static int mark_alpha_links_node PARAMS ((splay_tree_node, void *));
8114static void mark_alpha_links PARAMS ((void *));
8115static int alpha_write_one_linkage PARAMS ((splay_tree_node, void *));
8116
8117/* Protect alpha_links from garbage collection. */
8118
8119static int
8120mark_alpha_links_node (node, data)
8121 splay_tree_node node;
8122 void *data ATTRIBUTE_UNUSED;
8123{
8124 struct alpha_links *links = (struct alpha_links *) node->value;
8125 ggc_mark_rtx (links->linkage);
8126 return 0;
8127}
8128
8129static void
8130mark_alpha_links (ptr)
8131 void *ptr;
8132{
8133 splay_tree tree = *(splay_tree *) ptr;
8134 splay_tree_foreach (tree, mark_alpha_links_node, NULL);
8135}
89cfc2c6
RK
8136
8137/* Make (or fake) .linkage entry for function call.
8138
a82c7f05 8139 IS_LOCAL is 0 if name is used in call, 1 if name is used in definition.
89cfc2c6 8140
a82c7f05
RH
8141 Return an SYMBOL_REF rtx for the linkage. */
8142
8143rtx
89cfc2c6 8144alpha_need_linkage (name, is_local)
e03c5670 8145 const char *name;
89cfc2c6
RK
8146 int is_local;
8147{
a82c7f05
RH
8148 splay_tree_node node;
8149 struct alpha_links *al;
89cfc2c6
RK
8150
8151 if (name[0] == '*')
8152 name++;
8153
a82c7f05
RH
8154 if (alpha_links)
8155 {
8156 /* Is this name already defined? */
89cfc2c6 8157
a82c7f05
RH
8158 node = splay_tree_lookup (alpha_links, (splay_tree_key) name);
8159 if (node)
8160 {
8161 al = (struct alpha_links *) node->value;
8162 if (is_local)
8163 {
8164 /* Defined here but external assumed. */
8165 if (al->kind == KIND_EXTERN)
8166 al->kind = KIND_LOCAL;
8167 }
8168 else
8169 {
8170 /* Used here but unused assumed. */
8171 if (al->kind == KIND_UNUSED)
8172 al->kind = KIND_LOCAL;
8173 }
8174 return al->linkage;
8175 }
8176 }
8177 else
8178 {
8179 alpha_links = splay_tree_new ((splay_tree_compare_fn) strcmp,
8180 (splay_tree_delete_key_fn) free,
8181 (splay_tree_delete_key_fn) free);
8182 ggc_add_root (&alpha_links, 1, 1, mark_alpha_links);
8183 }
89cfc2c6 8184
a82c7f05
RH
8185 al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
8186 name = xstrdup (name);
89cfc2c6
RK
8187
8188 /* Assume external if no definition. */
a82c7f05 8189 al->kind = (is_local ? KIND_UNUSED : KIND_EXTERN);
89cfc2c6 8190
a82c7f05 8191 /* Ensure we have an IDENTIFIER so assemble_name can mark it used. */
9398dc27
RK
8192 get_identifier (name);
8193
a82c7f05
RH
8194 /* Construct a SYMBOL_REF for us to call. */
8195 {
8196 size_t name_len = strlen (name);
520a57c8 8197 char *linksym = alloca (name_len + 6);
a82c7f05
RH
8198 linksym[0] = '$';
8199 memcpy (linksym + 1, name, name_len);
8200 memcpy (linksym + 1 + name_len, "..lk", 5);
520a57c8
ZW
8201 al->linkage = gen_rtx_SYMBOL_REF (Pmode,
8202 ggc_alloc_string (linksym, name_len + 5));
a82c7f05
RH
8203 }
8204
8205 splay_tree_insert (alpha_links, (splay_tree_key) name,
8206 (splay_tree_value) al);
89cfc2c6 8207
a82c7f05 8208 return al->linkage;
89cfc2c6
RK
8209}
8210
a82c7f05
RH
8211static int
8212alpha_write_one_linkage (node, data)
8213 splay_tree_node node;
8214 void *data;
8215{
83182544 8216 const char *const name = (const char *) node->key;
a82c7f05
RH
8217 struct alpha_links *links = (struct alpha_links *) node->value;
8218 FILE *stream = (FILE *) data;
8219
8220 if (links->kind == KIND_UNUSED
8221 || ! TREE_SYMBOL_REFERENCED (get_identifier (name)))
8222 return 0;
8223
8224 fprintf (stream, "$%s..lk:\n", name);
8225 if (links->kind == KIND_LOCAL)
8226 {
8227 /* Local and used, build linkage pair. */
8228 fprintf (stream, "\t.quad %s..en\n", name);
8229 fprintf (stream, "\t.quad %s\n", name);
8230 }
8231 else
8232 {
8233 /* External and used, request linkage pair. */
8234 fprintf (stream, "\t.linkage %s\n", name);
8235 }
8236
8237 return 0;
8238}
89cfc2c6
RK
8239
8240void
8241alpha_write_linkage (stream)
8242 FILE *stream;
8243{
c1bd46a8
DR
8244 if (alpha_links)
8245 {
8246 readonly_section ();
8247 fprintf (stream, "\t.align 3\n");
8248 splay_tree_foreach (alpha_links, alpha_write_one_linkage, stream);
8249 }
89cfc2c6
RK
8250}
8251
7c262518
RH
8252/* Given a decl, a section name, and whether the decl initializer
8253 has relocs, choose attributes for the section. */
8254
8255#define SECTION_VMS_OVERLAY SECTION_FORGET
c1bd46a8
DR
8256#define SECTION_VMS_GLOBAL SECTION_MACH_DEP
8257#define SECTION_VMS_INITIALIZE (SECTION_VMS_GLOBAL << 1)
7c262518
RH
8258
8259static unsigned int
8260vms_section_type_flags (decl, name, reloc)
8261 tree decl;
8262 const char *name;
8263 int reloc;
8264{
8265 unsigned int flags = default_section_type_flags (decl, name, reloc);
8266
91d231cb
JM
8267 if (decl && DECL_ATTRIBUTES (decl)
8268 && lookup_attribute ("overlaid", DECL_ATTRIBUTES (decl)))
7c262518 8269 flags |= SECTION_VMS_OVERLAY;
c1bd46a8
DR
8270 if (decl && DECL_ATTRIBUTES (decl)
8271 && lookup_attribute ("global", DECL_ATTRIBUTES (decl)))
8272 flags |= SECTION_VMS_GLOBAL;
8273 if (decl && DECL_ATTRIBUTES (decl)
8274 && lookup_attribute ("initialize", DECL_ATTRIBUTES (decl)))
8275 flags |= SECTION_VMS_INITIALIZE;
7c262518
RH
8276
8277 return flags;
8278}
8279
8280/* Switch to an arbitrary section NAME with attributes as specified
8281 by FLAGS. ALIGN specifies any known alignment requirements for
8282 the section; 0 if the default should be used. */
8283
8284static void
715bdd29 8285vms_asm_named_section (name, flags)
7c262518
RH
8286 const char *name;
8287 unsigned int flags;
7c262518 8288{
c1bd46a8
DR
8289 fputc ('\n', asm_out_file);
8290 fprintf (asm_out_file, ".section\t%s", name);
7c262518
RH
8291
8292 if (flags & SECTION_VMS_OVERLAY)
c1bd46a8
DR
8293 fprintf (asm_out_file, ",OVR");
8294 if (flags & SECTION_VMS_GLOBAL)
8295 fprintf (asm_out_file, ",GBL");
8296 if (flags & SECTION_VMS_INITIALIZE)
8297 fprintf (asm_out_file, ",NOMOD");
8298 if (flags & SECTION_DEBUG)
8299 fprintf (asm_out_file, ",NOWRT");
8300
8301 fputc ('\n', asm_out_file);
7c262518
RH
8302}
8303
2cc07db4
RH
8304/* Record an element in the table of global constructors. SYMBOL is
8305 a SYMBOL_REF of the function to be called; PRIORITY is a number
8306 between 0 and MAX_INIT_PRIORITY.
8307
8308 Differs from default_ctors_section_asm_out_constructor in that the
8309 width of the .ctors entry is always 64 bits, rather than the 32 bits
8310 used by a normal pointer. */
8311
8312static void
8313vms_asm_out_constructor (symbol, priority)
8314 rtx symbol;
8315 int priority ATTRIBUTE_UNUSED;
8316{
8317 ctors_section ();
c8af3574
RH
8318 assemble_align (BITS_PER_WORD);
8319 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
2cc07db4
RH
8320}
8321
8322static void
8323vms_asm_out_destructor (symbol, priority)
8324 rtx symbol;
8325 int priority ATTRIBUTE_UNUSED;
8326{
8327 dtors_section ();
c8af3574
RH
8328 assemble_align (BITS_PER_WORD);
8329 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
2cc07db4 8330}
89cfc2c6
RK
8331#else
8332
a82c7f05 8333rtx
89cfc2c6 8334alpha_need_linkage (name, is_local)
e03c5670 8335 const char *name ATTRIBUTE_UNUSED;
3c303f52 8336 int is_local ATTRIBUTE_UNUSED;
89cfc2c6 8337{
a82c7f05 8338 return NULL_RTX;
89cfc2c6
RK
8339}
8340
be7b80f4 8341#endif /* TARGET_ABI_OPEN_VMS */
30102605
RH
8342\f
8343#if TARGET_ABI_UNICOSMK
8344
8345static void unicosmk_output_module_name PARAMS ((FILE *));
8346static void unicosmk_output_default_externs PARAMS ((FILE *));
8347static void unicosmk_output_dex PARAMS ((FILE *));
8348static void unicosmk_output_externs PARAMS ((FILE *));
8349static void unicosmk_output_addr_vec PARAMS ((FILE *, rtx));
8350static const char *unicosmk_ssib_name PARAMS ((void));
950a3816 8351static int unicosmk_special_name PARAMS ((const char *));
30102605
RH
8352
8353/* Define the offset between two registers, one to be eliminated, and the
8354 other its replacement, at the start of a routine. */
8355
8356int
8357unicosmk_initial_elimination_offset (from, to)
8358 int from;
8359 int to;
8360{
8361 int fixed_size;
8362
8363 fixed_size = alpha_sa_size();
8364 if (fixed_size != 0)
8365 fixed_size += 48;
8366
8367 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
8368 return -fixed_size;
8369 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
8370 return 0;
8371 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
8372 return (ALPHA_ROUND (current_function_outgoing_args_size)
8373 + ALPHA_ROUND (get_frame_size()));
8374 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
8375 return (ALPHA_ROUND (fixed_size)
8376 + ALPHA_ROUND (get_frame_size()
8377 + current_function_outgoing_args_size));
8378 else
8379 abort ();
8380}
8381
8382/* Output the module name for .ident and .end directives. We have to strip
8383 directories and add make sure that the module name starts with a letter
8384 or '$'. */
8385
8386static void
8387unicosmk_output_module_name (file)
8388 FILE *file;
8389{
8390 const char *name;
8391
8392 /* Strip directories. */
8393
8394 name = strrchr (main_input_filename, '/');
8395 if (name)
8396 ++name;
8397 else
8398 name = main_input_filename;
8399
8400 /* CAM only accepts module names that start with a letter or '$'. We
8401 prefix the module name with a '$' if necessary. */
8402
8403 if (!ISALPHA (*name))
8404 fprintf (file, "$%s", name);
8405 else
8406 fputs (name, file);
8407}
8408
8409/* Output text that to appear at the beginning of an assembler file. */
8410
8411void
8412unicosmk_asm_file_start (file)
8413 FILE *file;
8414{
8415 int i;
8416
8417 fputs ("\t.ident\t", file);
8418 unicosmk_output_module_name (file);
8419 fputs ("\n\n", file);
8420
8421 /* The Unicos/Mk assembler uses different register names. Instead of trying
8422 to support them, we simply use micro definitions. */
8423
8424 /* CAM has different register names: rN for the integer register N and fN
8425 for the floating-point register N. Instead of trying to use these in
8426 alpha.md, we define the symbols $N and $fN to refer to the appropriate
8427 register. */
8428
8429 for (i = 0; i < 32; ++i)
8430 fprintf (file, "$%d <- r%d\n", i, i);
8431
8432 for (i = 0; i < 32; ++i)
8433 fprintf (file, "$f%d <- f%d\n", i, i);
8434
8435 putc ('\n', file);
8436
8437 /* The .align directive fill unused space with zeroes which does not work
8438 in code sections. We define the macro 'gcc@code@align' which uses nops
8439 instead. Note that it assumes that code sections always have the
8440 biggest possible alignment since . refers to the current offset from
8441 the beginning of the section. */
8442
8443 fputs ("\t.macro gcc@code@align n\n", file);
8444 fputs ("gcc@n@bytes = 1 << n\n", file);
8445 fputs ("gcc@here = . % gcc@n@bytes\n", file);
8446 fputs ("\t.if ne, gcc@here, 0\n", file);
8447 fputs ("\t.repeat (gcc@n@bytes - gcc@here) / 4\n", file);
8448 fputs ("\tbis r31,r31,r31\n", file);
8449 fputs ("\t.endr\n", file);
8450 fputs ("\t.endif\n", file);
8451 fputs ("\t.endm gcc@code@align\n\n", file);
8452
8453 /* Output extern declarations which should always be visible. */
8454 unicosmk_output_default_externs (file);
8455
8456 /* Open a dummy section. We always need to be inside a section for the
8457 section-switching code to work correctly.
8458 ??? This should be a module id or something like that. I still have to
8459 figure out what the rules for those are. */
8460 fputs ("\n\t.psect\t$SG00000,data\n", file);
8461}
8462
8463/* Output text to appear at the end of an assembler file. This includes all
8464 pending extern declarations and DEX expressions. */
8465
8466void
8467unicosmk_asm_file_end (file)
8468 FILE *file;
8469{
8470 fputs ("\t.endp\n\n", file);
8471
8472 /* Output all pending externs. */
8473
8474 unicosmk_output_externs (file);
8475
8476 /* Output dex definitions used for functions whose names conflict with
8477 register names. */
8478
8479 unicosmk_output_dex (file);
8480
8481 fputs ("\t.end\t", file);
8482 unicosmk_output_module_name (file);
8483 putc ('\n', file);
8484}
8485
8486/* Output the definition of a common variable. */
8487
8488void
8489unicosmk_output_common (file, name, size, align)
8490 FILE *file;
8491 const char *name;
8492 int size;
8493 int align;
8494{
8495 tree name_tree;
8496 printf ("T3E__: common %s\n", name);
8497
8498 common_section ();
8499 fputs("\t.endp\n\n\t.psect ", file);
8500 assemble_name(file, name);
8501 fprintf(file, ",%d,common\n", floor_log2 (align / BITS_PER_UNIT));
8502 fprintf(file, "\t.byte\t0:%d\n", size);
8503
8504 /* Mark the symbol as defined in this module. */
8505 name_tree = get_identifier (name);
8506 TREE_ASM_WRITTEN (name_tree) = 1;
8507}
8508
8509#define SECTION_PUBLIC SECTION_MACH_DEP
8510#define SECTION_MAIN (SECTION_PUBLIC << 1)
8511static int current_section_align;
8512
8513static unsigned int
8514unicosmk_section_type_flags (decl, name, reloc)
8515 tree decl;
8516 const char *name;
8517 int reloc ATTRIBUTE_UNUSED;
8518{
8519 unsigned int flags = default_section_type_flags (decl, name, reloc);
8520
8521 if (!decl)
8522 return flags;
8523
8524 if (TREE_CODE (decl) == FUNCTION_DECL)
8525 {
8526 current_section_align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
8527 if (align_functions_log > current_section_align)
8528 current_section_align = align_functions_log;
8529
8530 if (! strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "main"))
8531 flags |= SECTION_MAIN;
8532 }
8533 else
8534 current_section_align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
8535
8536 if (TREE_PUBLIC (decl))
8537 flags |= SECTION_PUBLIC;
8538
8539 return flags;
8540}
8541
8542/* Generate a section name for decl and associate it with the
8543 declaration. */
8544
8545void
8546unicosmk_unique_section (decl, reloc)
8547 tree decl;
8548 int reloc ATTRIBUTE_UNUSED;
8549{
8550 const char *name;
8551 int len;
8552
8553 if (!decl)
8554 abort ();
8555
8556 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
8557 STRIP_NAME_ENCODING (name, name);
8558 len = strlen (name);
8559
8560 if (TREE_CODE (decl) == FUNCTION_DECL)
8561 {
8562 char *string;
8563
8564 /* It is essential that we prefix the section name here because
8565 otherwise the section names generated for constructors and
8566 destructors confuse collect2. */
8567
8568 string = alloca (len + 6);
8569 sprintf (string, "code@%s", name);
8570 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
8571 }
8572 else if (TREE_PUBLIC (decl))
8573 DECL_SECTION_NAME (decl) = build_string (len, name);
8574 else
8575 {
8576 char *string;
8577
8578 string = alloca (len + 6);
8579 sprintf (string, "data@%s", name);
8580 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
8581 }
8582}
8583
8584/* Switch to an arbitrary section NAME with attributes as specified
8585 by FLAGS. ALIGN specifies any known alignment requirements for
8586 the section; 0 if the default should be used. */
8587
8588static void
8589unicosmk_asm_named_section (name, flags)
8590 const char *name;
8591 unsigned int flags;
8592{
8593 const char *kind;
8594
8595 /* Close the previous section. */
8596
8597 fputs ("\t.endp\n\n", asm_out_file);
8598
8599 /* Find out what kind of section we are opening. */
8600
8601 if (flags & SECTION_MAIN)
8602 fputs ("\t.start\tmain\n", asm_out_file);
8603
8604 if (flags & SECTION_CODE)
8605 kind = "code";
8606 else if (flags & SECTION_PUBLIC)
8607 kind = "common";
8608 else
8609 kind = "data";
8610
8611 if (current_section_align != 0)
8612 fprintf (asm_out_file, "\t.psect\t%s,%d,%s\n", name,
8613 current_section_align, kind);
8614 else
8615 fprintf (asm_out_file, "\t.psect\t%s,%s\n", name, kind);
8616}
8617
8618static void
8619unicosmk_insert_attributes (decl, attr_ptr)
8620 tree decl;
8621 tree *attr_ptr ATTRIBUTE_UNUSED;
8622{
8623 if (DECL_P (decl)
8624 && (TREE_PUBLIC (decl) || TREE_CODE (decl) == FUNCTION_DECL))
8625 UNIQUE_SECTION (decl, 0);
8626}
8627
8628/* Output an alignment directive. We have to use the macro 'gcc@code@align'
8629 in code sections because .align fill unused space with zeroes. */
8630
8631void
8632unicosmk_output_align (file, align)
8633 FILE *file;
8634 int align;
8635{
8636 if (inside_function)
8637 fprintf (file, "\tgcc@code@align\t%d\n", align);
8638 else
8639 fprintf (file, "\t.align\t%d\n", align);
8640}
8641
8642/* Add a case vector to the current function's list of deferred case
8643 vectors. Case vectors have to be put into a separate section because CAM
8644 does not allow data definitions in code sections. */
8645
8646void
8647unicosmk_defer_case_vector (lab, vec)
8648 rtx lab;
8649 rtx vec;
8650{
8651 struct machine_function *machine = cfun->machine;
8652
8653 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
8654 machine->addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec,
8655 machine->addr_list);
8656}
8657
8658/* Output a case vector. */
8659
8660static void
8661unicosmk_output_addr_vec (file, vec)
8662 FILE *file;
8663 rtx vec;
8664{
8665 rtx lab = XEXP (vec, 0);
8666 rtx body = XEXP (vec, 1);
8667 int vlen = XVECLEN (body, 0);
8668 int idx;
8669
8670 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (lab));
8671
8672 for (idx = 0; idx < vlen; idx++)
8673 {
8674 ASM_OUTPUT_ADDR_VEC_ELT
8675 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
8676 }
8677}
8678
8679/* Output current function's deferred case vectors. */
8680
8681static void
8682unicosmk_output_deferred_case_vectors (file)
8683 FILE *file;
8684{
8685 struct machine_function *machine = cfun->machine;
8686 rtx t;
8687
8688 if (machine->addr_list == NULL_RTX)
8689 return;
8690
8691 data_section ();
8692 for (t = machine->addr_list; t; t = XEXP (t, 1))
8693 unicosmk_output_addr_vec (file, XEXP (t, 0));
8694}
8695
8696/* Set up the dynamic subprogram information block (DSIB) and update the
8697 frame pointer register ($15) for subroutines which have a frame. If the
8698 subroutine doesn't have a frame, simply increment $15. */
8699
8700static void
8701unicosmk_gen_dsib (imaskP)
8702 unsigned long * imaskP;
8703{
c2ea1ac6 8704 if (alpha_procedure_type == PT_STACK)
30102605
RH
8705 {
8706 const char *ssib_name;
8707 rtx mem;
8708
8709 /* Allocate 64 bytes for the DSIB. */
8710
8711 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
8712 GEN_INT (-64))));
8713 emit_insn (gen_blockage ());
8714
8715 /* Save the return address. */
8716
8717 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 56));
8718 set_mem_alias_set (mem, alpha_sr_alias_set);
8719 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
8720 (*imaskP) &= ~(1L << REG_RA);
8721
8722 /* Save the old frame pointer. */
8723
8724 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 48));
8725 set_mem_alias_set (mem, alpha_sr_alias_set);
8726 FRP (emit_move_insn (mem, hard_frame_pointer_rtx));
8727 (*imaskP) &= ~(1L << HARD_FRAME_POINTER_REGNUM);
8728
8729 emit_insn (gen_blockage ());
8730
8731 /* Store the SSIB pointer. */
8732
8733 ssib_name = ggc_strdup (unicosmk_ssib_name ());
8734 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 32));
8735 set_mem_alias_set (mem, alpha_sr_alias_set);
8736
8737 FRP (emit_move_insn (gen_rtx_REG (DImode, 5),
8738 gen_rtx_SYMBOL_REF (Pmode, ssib_name)));
8739 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 5)));
8740
8741 /* Save the CIW index. */
8742
8743 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 24));
8744 set_mem_alias_set (mem, alpha_sr_alias_set);
8745 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 25)));
8746
8747 emit_insn (gen_blockage ());
8748
8749 /* Set the new frame pointer. */
8750
8751 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
8752 stack_pointer_rtx, GEN_INT (64))));
8753
8754 }
8755 else
8756 {
8757 /* Increment the frame pointer register to indicate that we do not
8758 have a frame. */
8759
8760 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
8761 hard_frame_pointer_rtx, GEN_INT (1))));
8762 }
8763}
8764
8765#define SSIB_PREFIX "__SSIB_"
8766#define SSIB_PREFIX_LEN 7
8767
8768/* Generate the name of the SSIB section for the current function. */
8769
8770static const char *
8771unicosmk_ssib_name ()
8772{
8773 /* This is ok since CAM won't be able to deal with names longer than that
8774 anyway. */
8775
8776 static char name[256];
8777
8778 rtx x;
8779 const char *fnname;
30102605
RH
8780 int len;
8781
8782 x = DECL_RTL (cfun->decl);
8783 if (GET_CODE (x) != MEM)
8784 abort ();
8785 x = XEXP (x, 0);
8786 if (GET_CODE (x) != SYMBOL_REF)
8787 abort ();
8788 fnname = XSTR (x, 0);
8789 STRIP_NAME_ENCODING (fnname, fnname);
8790
8791 len = strlen (fnname);
8792 if (len + SSIB_PREFIX_LEN > 255)
8793 len = 255 - SSIB_PREFIX_LEN;
8794
8795 strcpy (name, SSIB_PREFIX);
8796 strncpy (name + SSIB_PREFIX_LEN, fnname, len);
8797 name[len + SSIB_PREFIX_LEN] = 0;
8798
8799 return name;
8800}
8801
8802/* Output the static subroutine information block for the current
8803 function. */
8804
8805static void
8806unicosmk_output_ssib (file, fnname)
8807 FILE *file;
8808 const char *fnname;
8809{
8810 int len;
8811 int i;
8812 rtx x;
8813 rtx ciw;
8814 struct machine_function *machine = cfun->machine;
8815
8816 ssib_section ();
8817 fprintf (file, "\t.endp\n\n\t.psect\t%s%s,data\n", user_label_prefix,
8818 unicosmk_ssib_name ());
8819
8820 /* Some required stuff and the function name length. */
8821
8822 len = strlen (fnname);
8823 fprintf (file, "\t.quad\t^X20008%2.2X28\n", len);
8824
8825 /* Saved registers
8826 ??? We don't do that yet. */
8827
8828 fputs ("\t.quad\t0\n", file);
8829
8830 /* Function address. */
8831
8832 fputs ("\t.quad\t", file);
8833 assemble_name (file, fnname);
8834 putc ('\n', file);
8835
8836 fputs ("\t.quad\t0\n", file);
8837 fputs ("\t.quad\t0\n", file);
8838
8839 /* Function name.
8840 ??? We do it the same way Cray CC does it but this could be
8841 simplified. */
8842
8843 for( i = 0; i < len; i++ )
8844 fprintf (file, "\t.byte\t%d\n", (int)(fnname[i]));
8845 if( (len % 8) == 0 )
8846 fputs ("\t.quad\t0\n", file);
8847 else
8848 fprintf (file, "\t.bits\t%d : 0\n", (8 - (len % 8))*8);
8849
8850 /* All call information words used in the function. */
8851
8852 for (x = machine->first_ciw; x; x = XEXP (x, 1))
8853 {
8854 ciw = XEXP (x, 0);
8855 fprintf (file, "\t.quad\t");
8856#if HOST_BITS_PER_WIDE_INT == 32
8857 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
8858 CONST_DOUBLE_HIGH (ciw), CONST_DOUBLE_LOW (ciw));
8859#else
8860 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (ciw));
8861#endif
8862 fprintf (file, "\n");
8863 }
8864}
8865
8866/* Add a call information word (CIW) to the list of the current function's
8867 CIWs and return its index.
8868
8869 X is a CONST_INT or CONST_DOUBLE representing the CIW. */
8870
8871rtx
8872unicosmk_add_call_info_word (x)
8873 rtx x;
8874{
8875 rtx node;
8876 struct machine_function *machine = cfun->machine;
8877
8878 node = gen_rtx_EXPR_LIST (VOIDmode, x, NULL_RTX);
8879 if (machine->first_ciw == NULL_RTX)
8880 machine->first_ciw = node;
8881 else
8882 XEXP (machine->last_ciw, 1) = node;
8883
8884 machine->last_ciw = node;
8885 ++machine->ciw_count;
8886
8887 return GEN_INT (machine->ciw_count
8888 + strlen (current_function_name)/8 + 5);
8889}
8890
8891static char unicosmk_section_buf[100];
8892
8893char *
8894unicosmk_text_section ()
8895{
8896 static int count = 0;
8897 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@text___%d,code",
8898 count++);
8899 return unicosmk_section_buf;
8900}
8901
8902char *
8903unicosmk_data_section ()
8904{
8905 static int count = 1;
8906 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@data___%d,data",
8907 count++);
8908 return unicosmk_section_buf;
8909}
8910
8911/* The Cray assembler doesn't accept extern declarations for symbols which
8912 are defined in the same file. We have to keep track of all global
8913 symbols which are referenced and/or defined in a source file and output
8914 extern declarations for those which are referenced but not defined at
8915 the end of file. */
8916
8917/* List of identifiers for which an extern declaration might have to be
8918 emitted. */
8919
8920struct unicosmk_extern_list
8921{
8922 struct unicosmk_extern_list *next;
8923 const char *name;
8924};
8925
8926static struct unicosmk_extern_list *unicosmk_extern_head = 0;
8927
8928/* Output extern declarations which are required for every asm file. */
8929
8930static void
8931unicosmk_output_default_externs (file)
8932 FILE *file;
8933{
83182544 8934 static const char *const externs[] =
30102605
RH
8935 { "__T3E_MISMATCH" };
8936
8937 int i;
8938 int n;
8939
8940 n = ARRAY_SIZE (externs);
8941
8942 for (i = 0; i < n; i++)
8943 fprintf (file, "\t.extern\t%s\n", externs[i]);
8944}
8945
8946/* Output extern declarations for global symbols which are have been
8947 referenced but not defined. */
8948
8949static void
8950unicosmk_output_externs (file)
8951 FILE *file;
8952{
8953 struct unicosmk_extern_list *p;
8954 const char *real_name;
8955 int len;
8956 tree name_tree;
8957
8958 len = strlen (user_label_prefix);
8959 for (p = unicosmk_extern_head; p != 0; p = p->next)
8960 {
8961 /* We have to strip the encoding and possibly remove user_label_prefix
8962 from the identifier in order to handle -fleading-underscore and
8963 explicit asm names correctly (cf. gcc.dg/asm-names-1.c). */
8964 STRIP_NAME_ENCODING (real_name, p->name);
8965 if (len && p->name[0] == '*'
8966 && !memcmp (real_name, user_label_prefix, len))
8967 real_name += len;
8968
8969 name_tree = get_identifier (real_name);
8970 if (! TREE_ASM_WRITTEN (name_tree))
8971 {
8972 TREE_ASM_WRITTEN (name_tree) = 1;
8973 fputs ("\t.extern\t", file);
8974 assemble_name (file, p->name);
8975 putc ('\n', file);
8976 }
8977 }
8978}
8979
8980/* Record an extern. */
8981
8982void
8983unicosmk_add_extern (name)
8984 const char *name;
8985{
8986 struct unicosmk_extern_list *p;
8987
8988 p = (struct unicosmk_extern_list *)
8989 permalloc (sizeof (struct unicosmk_extern_list));
8990 p->next = unicosmk_extern_head;
8991 p->name = name;
8992 unicosmk_extern_head = p;
8993}
8994
8995/* The Cray assembler generates incorrect code if identifiers which
8996 conflict with register names are used as instruction operands. We have
8997 to replace such identifiers with DEX expressions. */
8998
8999/* Structure to collect identifiers which have been replaced by DEX
9000 expressions. */
9001
9002struct unicosmk_dex {
9003 struct unicosmk_dex *next;
9004 const char *name;
9005};
9006
9007/* List of identifiers which have been replaced by DEX expressions. The DEX
9008 number is determined by the position in the list. */
9009
9010static struct unicosmk_dex *unicosmk_dex_list = NULL;
9011
9012/* The number of elements in the DEX list. */
9013
9014static int unicosmk_dex_count = 0;
9015
9016/* Check if NAME must be replaced by a DEX expression. */
9017
9018static int
9019unicosmk_special_name (name)
9020 const char *name;
9021{
9022 if (name[0] == '*')
9023 ++name;
9024
9025 if (name[0] == '$')
9026 ++name;
9027
9028 if (name[0] != 'r' && name[0] != 'f' && name[0] != 'R' && name[0] != 'F')
9029 return 0;
9030
9031 switch (name[1])
9032 {
9033 case '1': case '2':
9034 return (name[2] == '\0' || (ISDIGIT (name[2]) && name[3] == '\0'));
9035
9036 case '3':
9037 return (name[2] == '\0'
9038 || ((name[2] == '0' || name[2] == '1') && name[3] == '\0'));
9039
9040 default:
9041 return (ISDIGIT (name[1]) && name[2] == '\0');
9042 }
9043}
9044
9045/* Return the DEX number if X must be replaced by a DEX expression and 0
9046 otherwise. */
9047
9048static int
9049unicosmk_need_dex (x)
9050 rtx x;
9051{
9052 struct unicosmk_dex *dex;
9053 const char *name;
9054 int i;
9055
9056 if (GET_CODE (x) != SYMBOL_REF)
9057 return 0;
9058
9059 name = XSTR (x,0);
9060 if (! unicosmk_special_name (name))
9061 return 0;
9062
9063 i = unicosmk_dex_count;
9064 for (dex = unicosmk_dex_list; dex; dex = dex->next)
9065 {
9066 if (! strcmp (name, dex->name))
9067 return i;
9068 --i;
9069 }
9070
9071 dex = (struct unicosmk_dex *) permalloc (sizeof (struct unicosmk_dex));
9072 dex->name = name;
9073 dex->next = unicosmk_dex_list;
9074 unicosmk_dex_list = dex;
9075
9076 ++unicosmk_dex_count;
9077 return unicosmk_dex_count;
9078}
9079
9080/* Output the DEX definitions for this file. */
9081
9082static void
9083unicosmk_output_dex (file)
9084 FILE *file;
9085{
9086 struct unicosmk_dex *dex;
9087 int i;
9088
9089 if (unicosmk_dex_list == NULL)
9090 return;
9091
9092 fprintf (file, "\t.dexstart\n");
9093
9094 i = unicosmk_dex_count;
9095 for (dex = unicosmk_dex_list; dex; dex = dex->next)
9096 {
9097 fprintf (file, "\tDEX (%d) = ", i);
9098 assemble_name (file, dex->name);
9099 putc ('\n', file);
9100 --i;
9101 }
9102
9103 fprintf (file, "\t.dexend\n");
9104}
9105
9106#else
9107
9108static void
9109unicosmk_output_deferred_case_vectors (file)
9110 FILE *file ATTRIBUTE_UNUSED;
9111{}
9112
9113static void
9114unicosmk_gen_dsib (imaskP)
9115 unsigned long * imaskP ATTRIBUTE_UNUSED;
9116{}
9117
9118static void
9119unicosmk_output_ssib (file, fnname)
9120 FILE * file ATTRIBUTE_UNUSED;
9121 const char * fnname ATTRIBUTE_UNUSED;
9122{}
9123
9124rtx
9125unicosmk_add_call_info_word (x)
9126 rtx x ATTRIBUTE_UNUSED;
9127{
9128 return NULL_RTX;
9129}
9130
9131static int
9132unicosmk_need_dex (x)
9133 rtx x ATTRIBUTE_UNUSED;
9134{
9135 return 0;
9136}
9137
9138#endif /* TARGET_ABI_UNICOSMK */
This page took 2.451375 seconds and 5 git commands to generate.