]> gcc.gnu.org Git - gcc.git/blame - gcc/config/s390/s390.c
s390.c (s390_mainpool_start): Delete the main pool placeholder insn when chunkifying...
[gcc.git] / gcc / config / s390 / s390.c
CommitLineData
9db1d521 1/* Subroutines used for code generation on IBM S/390 and zSeries
283334f0
KH
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
9db1d521 4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
f314b9b1 5 Ulrich Weigand (uweigand@de.ibm.com).
9db1d521 6
58add37a 7This file is part of GCC.
9db1d521 8
58add37a
UW
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 2, or (at your option) any later
12version.
9db1d521 13
58add37a
UW
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
9db1d521
HP
18
19You should have received a copy of the GNU General Public License
58add37a
UW
20along with GCC; see the file COPYING. If not, write to the Free
21Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2202111-1307, USA. */
9db1d521
HP
23
24#include "config.h"
9db1d521 25#include "system.h"
4977bab6
ZW
26#include "coretypes.h"
27#include "tm.h"
9db1d521
HP
28#include "rtl.h"
29#include "tree.h"
30#include "tm_p.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "real.h"
34#include "insn-config.h"
35#include "conditions.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
39#include "except.h"
40#include "function.h"
41#include "recog.h"
42#include "expr.h"
7c82a1ed 43#include "reload.h"
9db1d521
HP
44#include "toplev.h"
45#include "basic-block.h"
4023fb28 46#include "integrate.h"
9db1d521
HP
47#include "ggc.h"
48#include "target.h"
49#include "target-def.h"
0d3c08b6 50#include "debug.h"
f1e639b1 51#include "langhooks.h"
a41c6c53 52#include "optabs.h"
9db1d521 53
114278e7
RH
54/* Machine-specific symbol_ref flags. */
55#define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
56
57
9c808aad
AJ
58static bool s390_assemble_integer (rtx, unsigned int, int);
59static void s390_select_rtx_section (enum machine_mode, rtx,
60 unsigned HOST_WIDE_INT);
61static void s390_encode_section_info (tree, rtx, int);
62static bool s390_cannot_force_const_mem (rtx);
63static rtx s390_delegitimize_address (rtx);
8c17530e 64static bool s390_return_in_memory (tree, tree);
9c808aad
AJ
65static void s390_init_builtins (void);
66static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
67static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
68 HOST_WIDE_INT, tree);
69static enum attr_type s390_safe_attr_type (rtx);
70
71static int s390_adjust_cost (rtx, rtx, rtx, int);
72static int s390_adjust_priority (rtx, int);
73static int s390_issue_rate (void);
74static int s390_use_dfa_pipeline_interface (void);
75static int s390_first_cycle_multipass_dfa_lookahead (void);
9c808aad
AJ
76static bool s390_rtx_costs (rtx, int, int, int *);
77static int s390_address_cost (rtx);
78static void s390_reorg (void);
79static bool s390_valid_pointer_mode (enum machine_mode);
c35d187f 80static tree s390_build_builtin_va_list (void);
52609473 81
301d03af
RS
82#undef TARGET_ASM_ALIGNED_HI_OP
83#define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
84#undef TARGET_ASM_ALIGNED_DI_OP
85#define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
86#undef TARGET_ASM_INTEGER
87#define TARGET_ASM_INTEGER s390_assemble_integer
88
f314b9b1
UW
89#undef TARGET_ASM_OPEN_PAREN
90#define TARGET_ASM_OPEN_PAREN ""
91
92#undef TARGET_ASM_CLOSE_PAREN
93#define TARGET_ASM_CLOSE_PAREN ""
94
b64a1b53
RH
95#undef TARGET_ASM_SELECT_RTX_SECTION
96#define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
97
fb49053f
RH
98#undef TARGET_ENCODE_SECTION_INFO
99#define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
fd3cd001
UW
100
101#ifdef HAVE_AS_TLS
102#undef TARGET_HAVE_TLS
103#define TARGET_HAVE_TLS true
104#endif
105#undef TARGET_CANNOT_FORCE_CONST_MEM
106#define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
107
69bd9368
RS
108#undef TARGET_DELEGITIMIZE_ADDRESS
109#define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
110
8c17530e
UW
111#undef TARGET_RETURN_IN_MEMORY
112#define TARGET_RETURN_IN_MEMORY s390_return_in_memory
113
fd3cd001
UW
114#undef TARGET_INIT_BUILTINS
115#define TARGET_INIT_BUILTINS s390_init_builtins
116#undef TARGET_EXPAND_BUILTIN
117#define TARGET_EXPAND_BUILTIN s390_expand_builtin
fb49053f 118
3961e8fe
RH
119#undef TARGET_ASM_OUTPUT_MI_THUNK
120#define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
121#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
122#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
c590b625 123
077dab3b
HP
124#undef TARGET_SCHED_ADJUST_COST
125#define TARGET_SCHED_ADJUST_COST s390_adjust_cost
52609473
HP
126#undef TARGET_SCHED_ADJUST_PRIORITY
127#define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
077dab3b
HP
128#undef TARGET_SCHED_ISSUE_RATE
129#define TARGET_SCHED_ISSUE_RATE s390_issue_rate
130#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
131#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
52609473
HP
132#undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
133#define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
077dab3b 134
3c50106f
RH
135#undef TARGET_RTX_COSTS
136#define TARGET_RTX_COSTS s390_rtx_costs
dcefdf67
RH
137#undef TARGET_ADDRESS_COST
138#define TARGET_ADDRESS_COST s390_address_cost
077dab3b 139
18dbd950
RS
140#undef TARGET_MACHINE_DEPENDENT_REORG
141#define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
142
c7453384
EC
143#undef TARGET_VALID_POINTER_MODE
144#define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
145
c35d187f
RH
146#undef TARGET_BUILD_BUILTIN_VA_LIST
147#define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
148
101ad855
KH
149#undef TARGET_PROMOTE_FUNCTION_ARGS
150#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
151#undef TARGET_PROMOTE_FUNCTION_RETURN
152#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
153
9db1d521
HP
154struct gcc_target targetm = TARGET_INITIALIZER;
155
156extern int reload_completed;
157
4023fb28
UW
158/* The alias set for prologue/epilogue register save/restore. */
159static int s390_sr_alias_set = 0;
160
9db1d521
HP
161/* Save information from a "cmpxx" operation until the branch or scc is
162 emitted. */
163rtx s390_compare_op0, s390_compare_op1;
164
994fe660
UW
165/* Structure used to hold the components of a S/390 memory
166 address. A legitimate address on S/390 is of the general
167 form
168 base + index + displacement
169 where any of the components is optional.
170
171 base and index are registers of the class ADDR_REGS,
172 displacement is an unsigned 12-bit immediate constant. */
9db1d521
HP
173
174struct s390_address
175{
176 rtx base;
177 rtx indx;
178 rtx disp;
f3e9edff 179 int pointer;
9db1d521
HP
180};
181
be2c2a4b 182/* Which cpu are we tuning for. */
f13e0d4e
UW
183enum processor_type s390_tune;
184enum processor_flags s390_tune_flags;
1fec52be
HP
185/* Which instruction set architecture to use. */
186enum processor_type s390_arch;
f13e0d4e 187enum processor_flags s390_arch_flags;
1fec52be
HP
188
189/* Strings to hold which cpu and instruction set architecture to use. */
be2c2a4b 190const char *s390_tune_string; /* for -mtune=<xxx> */
1fec52be
HP
191const char *s390_arch_string; /* for -march=<xxx> */
192
29742ba4 193/* Define the structure for the machine field in struct function. */
4023fb28 194
29742ba4 195struct machine_function GTY(())
4023fb28 196{
29742ba4 197 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
4023fb28 198 int save_fprs_p;
29742ba4 199
545d16ff 200 /* Set if return address needs to be saved. */
416cf582
UW
201 bool save_return_addr_p;
202
29742ba4 203 /* Number of first and last gpr to be saved, restored. */
4023fb28
UW
204 int first_save_gpr;
205 int first_restore_gpr;
206 int last_save_gpr;
4023fb28 207
29742ba4 208 /* Size of stack frame. */
4023fb28 209 HOST_WIDE_INT frame_size;
fd3cd001
UW
210
211 /* Some local-dynamic TLS symbol name. */
212 const char *some_ld_name;
4023fb28
UW
213};
214
9c808aad
AJ
215static int s390_match_ccmode_set (rtx, enum machine_mode);
216static int s390_branch_condition_mask (rtx);
217static const char *s390_branch_condition_mnemonic (rtx, int);
218static int check_mode (rtx, enum machine_mode *);
219static int general_s_operand (rtx, enum machine_mode, int);
220static int s390_short_displacement (rtx);
221static int s390_decompose_address (rtx, struct s390_address *);
222static rtx get_thread_pointer (void);
223static rtx legitimize_tls_address (rtx, rtx);
ac32b25e 224static void print_shift_count_operand (FILE *, rtx);
9c808aad
AJ
225static const char *get_some_local_dynamic_name (void);
226static int get_some_local_dynamic_name_1 (rtx *, void *);
227static int reg_used_in_mem_p (int, rtx);
228static int addr_generation_dependency_p (rtx, rtx);
545d16ff 229static int s390_split_branches (void);
9c808aad
AJ
230static void find_constant_pool_ref (rtx, rtx *);
231static void replace_constant_pool_ref (rtx *, rtx, rtx);
232static rtx find_ltrel_base (rtx);
233static void replace_ltrel_base (rtx *, rtx);
545d16ff 234static void s390_optimize_prolog (bool);
9c808aad
AJ
235static int find_unused_clobbered_reg (void);
236static void s390_frame_info (void);
237static rtx save_fpr (rtx, int, int);
238static rtx restore_fpr (rtx, int, int);
239static rtx save_gprs (rtx, int, int, int);
240static rtx restore_gprs (rtx, int, int, int);
241static int s390_function_arg_size (enum machine_mode, tree);
242static bool s390_function_arg_float (enum machine_mode, tree);
243static struct machine_function * s390_init_machine_status (void);
d3632d41
UW
244
245/* Check whether integer displacement is in range. */
246#define DISP_IN_RANGE(d) \
247 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
248 : ((d) >= 0 && (d) <= 4095))
c7453384 249
994fe660 250/* Return true if SET either doesn't set the CC register, or else
c7453384 251 the source and destination have matching CC modes and that
994fe660 252 CC mode is at least as constrained as REQ_MODE. */
c7453384 253
9db1d521 254static int
9c808aad 255s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
9db1d521 256{
994fe660 257 enum machine_mode set_mode;
9db1d521
HP
258
259 if (GET_CODE (set) != SET)
994fe660 260 abort ();
9db1d521
HP
261
262 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
263 return 1;
264
265 set_mode = GET_MODE (SET_DEST (set));
266 switch (set_mode)
267 {
9db1d521 268 case CCSmode:
07893d4f 269 case CCSRmode:
9db1d521 270 case CCUmode:
07893d4f 271 case CCURmode:
ba956982 272 case CCLmode:
07893d4f
UW
273 case CCL1mode:
274 case CCL2mode:
275 case CCT1mode:
276 case CCT2mode:
277 case CCT3mode:
278 if (req_mode != set_mode)
ba956982
UW
279 return 0;
280 break;
07893d4f 281
9db1d521 282 case CCZmode:
07893d4f
UW
283 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
284 && req_mode != CCSRmode && req_mode != CCURmode)
9db1d521
HP
285 return 0;
286 break;
0a3bdf9d
UW
287
288 case CCAPmode:
289 case CCANmode:
290 if (req_mode != CCAmode)
291 return 0;
292 break;
c7453384 293
9db1d521
HP
294 default:
295 abort ();
296 }
c7453384 297
9db1d521
HP
298 return (GET_MODE (SET_SRC (set)) == set_mode);
299}
300
c7453384
EC
301/* Return true if every SET in INSN that sets the CC register
302 has source and destination with matching CC modes and that
303 CC mode is at least as constrained as REQ_MODE.
07893d4f 304 If REQ_MODE is VOIDmode, always return false. */
c7453384 305
9db1d521 306int
9c808aad 307s390_match_ccmode (rtx insn, enum machine_mode req_mode)
9db1d521
HP
308{
309 int i;
310
07893d4f
UW
311 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
312 if (req_mode == VOIDmode)
313 return 0;
314
9db1d521
HP
315 if (GET_CODE (PATTERN (insn)) == SET)
316 return s390_match_ccmode_set (PATTERN (insn), req_mode);
317
318 if (GET_CODE (PATTERN (insn)) == PARALLEL)
319 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
320 {
321 rtx set = XVECEXP (PATTERN (insn), 0, i);
322 if (GET_CODE (set) == SET)
323 if (!s390_match_ccmode_set (set, req_mode))
324 return 0;
325 }
326
327 return 1;
328}
329
c7453384 330/* If a test-under-mask instruction can be used to implement
07893d4f 331 (compare (and ... OP1) OP2), return the CC mode required
c7453384 332 to do that. Otherwise, return VOIDmode.
07893d4f
UW
333 MIXED is true if the instruction can distinguish between
334 CC1 and CC2 for mixed selected bits (TMxx), it is false
335 if the instruction cannot (TM). */
336
337enum machine_mode
9c808aad 338s390_tm_ccmode (rtx op1, rtx op2, int mixed)
07893d4f
UW
339{
340 int bit0, bit1;
341
342 /* ??? Fixme: should work on CONST_DOUBLE as well. */
343 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
344 return VOIDmode;
345
346 /* Selected bits all zero: CC0. */
347 if (INTVAL (op2) == 0)
348 return CCTmode;
349
350 /* Selected bits all one: CC3. */
351 if (INTVAL (op2) == INTVAL (op1))
352 return CCT3mode;
353
354 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
355 if (mixed)
356 {
357 bit1 = exact_log2 (INTVAL (op2));
358 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
359 if (bit0 != -1 && bit1 != -1)
360 return bit0 > bit1 ? CCT1mode : CCT2mode;
361 }
362
363 return VOIDmode;
364}
365
c7453384
EC
366/* Given a comparison code OP (EQ, NE, etc.) and the operands
367 OP0 and OP1 of a COMPARE, return the mode to be used for the
ba956982
UW
368 comparison. */
369
370enum machine_mode
9c808aad 371s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
ba956982
UW
372{
373 switch (code)
374 {
375 case EQ:
376 case NE:
0a3bdf9d 377 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
f19a9af7 378 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
0a3bdf9d 379 return CCAPmode;
3ef093a8
AK
380 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
381 || GET_CODE (op1) == NEG)
382 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
ba956982
UW
383 return CCLmode;
384
07893d4f
UW
385 if (GET_CODE (op0) == AND)
386 {
387 /* Check whether we can potentially do it via TM. */
388 enum machine_mode ccmode;
389 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
390 if (ccmode != VOIDmode)
391 {
392 /* Relax CCTmode to CCZmode to allow fall-back to AND
393 if that turns out to be beneficial. */
394 return ccmode == CCTmode ? CCZmode : ccmode;
395 }
396 }
397
c7453384 398 if (register_operand (op0, HImode)
07893d4f
UW
399 && GET_CODE (op1) == CONST_INT
400 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
401 return CCT3mode;
c7453384 402 if (register_operand (op0, QImode)
07893d4f
UW
403 && GET_CODE (op1) == CONST_INT
404 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
405 return CCT3mode;
406
ba956982
UW
407 return CCZmode;
408
409 case LE:
410 case LT:
411 case GE:
412 case GT:
0a3bdf9d 413 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
f19a9af7 414 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
0a3bdf9d
UW
415 {
416 if (INTVAL (XEXP((op0), 1)) < 0)
417 return CCANmode;
418 else
419 return CCAPmode;
420 }
ba956982
UW
421 case UNORDERED:
422 case ORDERED:
423 case UNEQ:
424 case UNLE:
425 case UNLT:
426 case UNGE:
427 case UNGT:
428 case LTGT:
07893d4f
UW
429 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
430 && GET_CODE (op1) != CONST_INT)
431 return CCSRmode;
ba956982
UW
432 return CCSmode;
433
ba956982
UW
434 case LTU:
435 case GEU:
3ef093a8
AK
436 if (GET_CODE (op0) == PLUS
437 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
438 return CCL1mode;
439
440 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
441 && GET_CODE (op1) != CONST_INT)
442 return CCURmode;
443 return CCUmode;
444
445 case LEU:
ba956982 446 case GTU:
3ef093a8
AK
447 if (GET_CODE (op0) == MINUS
448 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
07893d4f
UW
449 return CCL2mode;
450
451 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
452 && GET_CODE (op1) != CONST_INT)
453 return CCURmode;
ba956982
UW
454 return CCUmode;
455
456 default:
457 abort ();
458 }
459}
460
e69166de
UW
461/* Return nonzero if OP is a valid comparison operator
462 for an ALC condition in mode MODE. */
463
464int
465s390_alc_comparison (rtx op, enum machine_mode mode)
466{
467 if (mode != VOIDmode && mode != GET_MODE (op))
468 return 0;
469
470 if (GET_RTX_CLASS (GET_CODE (op)) != '<')
471 return 0;
472
473 if (GET_CODE (XEXP (op, 0)) != REG
474 || REGNO (XEXP (op, 0)) != CC_REGNUM
475 || XEXP (op, 1) != const0_rtx)
476 return 0;
477
478 switch (GET_MODE (XEXP (op, 0)))
479 {
480 case CCL1mode:
481 return GET_CODE (op) == LTU;
482
483 case CCL2mode:
484 return GET_CODE (op) == LEU;
485
486 case CCUmode:
487 return GET_CODE (op) == GTU;
488
489 case CCURmode:
490 return GET_CODE (op) == LTU;
491
492 case CCSmode:
493 return GET_CODE (op) == UNGT;
494
495 case CCSRmode:
496 return GET_CODE (op) == UNLT;
497
498 default:
499 return 0;
500 }
501}
502
503/* Return nonzero if OP is a valid comparison operator
504 for an SLB condition in mode MODE. */
505
506int
507s390_slb_comparison (rtx op, enum machine_mode mode)
508{
509 if (mode != VOIDmode && mode != GET_MODE (op))
510 return 0;
511
512 if (GET_RTX_CLASS (GET_CODE (op)) != '<')
513 return 0;
514
515 if (GET_CODE (XEXP (op, 0)) != REG
516 || REGNO (XEXP (op, 0)) != CC_REGNUM
517 || XEXP (op, 1) != const0_rtx)
518 return 0;
519
520 switch (GET_MODE (XEXP (op, 0)))
521 {
522 case CCL1mode:
523 return GET_CODE (op) == GEU;
524
525 case CCL2mode:
526 return GET_CODE (op) == GTU;
527
528 case CCUmode:
529 return GET_CODE (op) == LEU;
530
531 case CCURmode:
532 return GET_CODE (op) == GEU;
533
534 case CCSmode:
535 return GET_CODE (op) == LE;
536
537 case CCSRmode:
538 return GET_CODE (op) == GE;
539
540 default:
541 return 0;
542 }
543}
544
c7453384 545/* Return branch condition mask to implement a branch
ba956982
UW
546 specified by CODE. */
547
548static int
9c808aad 549s390_branch_condition_mask (rtx code)
c7453384 550{
ba956982
UW
551 const int CC0 = 1 << 3;
552 const int CC1 = 1 << 2;
553 const int CC2 = 1 << 1;
554 const int CC3 = 1 << 0;
555
556 if (GET_CODE (XEXP (code, 0)) != REG
557 || REGNO (XEXP (code, 0)) != CC_REGNUM
558 || XEXP (code, 1) != const0_rtx)
559 abort ();
560
561 switch (GET_MODE (XEXP (code, 0)))
562 {
563 case CCZmode:
564 switch (GET_CODE (code))
565 {
566 case EQ: return CC0;
567 case NE: return CC1 | CC2 | CC3;
568 default:
569 abort ();
570 }
571 break;
572
07893d4f
UW
573 case CCT1mode:
574 switch (GET_CODE (code))
575 {
576 case EQ: return CC1;
577 case NE: return CC0 | CC2 | CC3;
578 default:
579 abort ();
580 }
581 break;
582
583 case CCT2mode:
584 switch (GET_CODE (code))
585 {
586 case EQ: return CC2;
587 case NE: return CC0 | CC1 | CC3;
588 default:
589 abort ();
590 }
591 break;
592
593 case CCT3mode:
594 switch (GET_CODE (code))
595 {
596 case EQ: return CC3;
597 case NE: return CC0 | CC1 | CC2;
598 default:
599 abort ();
600 }
601 break;
602
ba956982
UW
603 case CCLmode:
604 switch (GET_CODE (code))
605 {
606 case EQ: return CC0 | CC2;
607 case NE: return CC1 | CC3;
07893d4f
UW
608 default:
609 abort ();
610 }
611 break;
612
613 case CCL1mode:
614 switch (GET_CODE (code))
615 {
616 case LTU: return CC2 | CC3; /* carry */
617 case GEU: return CC0 | CC1; /* no carry */
618 default:
619 abort ();
620 }
621 break;
622
623 case CCL2mode:
624 switch (GET_CODE (code))
625 {
626 case GTU: return CC0 | CC1; /* borrow */
627 case LEU: return CC2 | CC3; /* no borrow */
ba956982
UW
628 default:
629 abort ();
630 }
631 break;
632
633 case CCUmode:
634 switch (GET_CODE (code))
635 {
636 case EQ: return CC0;
637 case NE: return CC1 | CC2 | CC3;
638 case LTU: return CC1;
639 case GTU: return CC2;
640 case LEU: return CC0 | CC1;
641 case GEU: return CC0 | CC2;
642 default:
643 abort ();
644 }
645 break;
646
07893d4f
UW
647 case CCURmode:
648 switch (GET_CODE (code))
649 {
650 case EQ: return CC0;
651 case NE: return CC2 | CC1 | CC3;
652 case LTU: return CC2;
653 case GTU: return CC1;
654 case LEU: return CC0 | CC2;
655 case GEU: return CC0 | CC1;
656 default:
657 abort ();
658 }
659 break;
660
0a3bdf9d
UW
661 case CCAPmode:
662 switch (GET_CODE (code))
663 {
664 case EQ: return CC0;
665 case NE: return CC1 | CC2 | CC3;
666 case LT: return CC1 | CC3;
667 case GT: return CC2;
668 case LE: return CC0 | CC1 | CC3;
669 case GE: return CC0 | CC2;
670 default:
671 abort ();
672 }
673 break;
674
675 case CCANmode:
676 switch (GET_CODE (code))
677 {
678 case EQ: return CC0;
679 case NE: return CC1 | CC2 | CC3;
680 case LT: return CC1;
681 case GT: return CC2 | CC3;
682 case LE: return CC0 | CC1;
683 case GE: return CC0 | CC2 | CC3;
684 default:
685 abort ();
686 }
687 break;
688
ba956982
UW
689 case CCSmode:
690 switch (GET_CODE (code))
691 {
692 case EQ: return CC0;
693 case NE: return CC1 | CC2 | CC3;
694 case LT: return CC1;
695 case GT: return CC2;
696 case LE: return CC0 | CC1;
697 case GE: return CC0 | CC2;
698 case UNORDERED: return CC3;
699 case ORDERED: return CC0 | CC1 | CC2;
700 case UNEQ: return CC0 | CC3;
701 case UNLT: return CC1 | CC3;
702 case UNGT: return CC2 | CC3;
703 case UNLE: return CC0 | CC1 | CC3;
704 case UNGE: return CC0 | CC2 | CC3;
705 case LTGT: return CC1 | CC2;
706 default:
707 abort ();
708 }
07893d4f
UW
709 break;
710
711 case CCSRmode:
712 switch (GET_CODE (code))
713 {
714 case EQ: return CC0;
715 case NE: return CC2 | CC1 | CC3;
716 case LT: return CC2;
717 case GT: return CC1;
718 case LE: return CC0 | CC2;
719 case GE: return CC0 | CC1;
720 case UNORDERED: return CC3;
721 case ORDERED: return CC0 | CC2 | CC1;
722 case UNEQ: return CC0 | CC3;
723 case UNLT: return CC2 | CC3;
724 case UNGT: return CC1 | CC3;
725 case UNLE: return CC0 | CC2 | CC3;
726 case UNGE: return CC0 | CC1 | CC3;
727 case LTGT: return CC2 | CC1;
728 default:
729 abort ();
730 }
731 break;
ba956982
UW
732
733 default:
734 abort ();
735 }
736}
737
c7453384
EC
738/* If INV is false, return assembler mnemonic string to implement
739 a branch specified by CODE. If INV is true, return mnemonic
ba956982
UW
740 for the corresponding inverted branch. */
741
742static const char *
9c808aad 743s390_branch_condition_mnemonic (rtx code, int inv)
ba956982 744{
0139adca 745 static const char *const mnemonic[16] =
ba956982
UW
746 {
747 NULL, "o", "h", "nle",
748 "l", "nhe", "lh", "ne",
749 "e", "nlh", "he", "nl",
750 "le", "nh", "no", NULL
751 };
752
753 int mask = s390_branch_condition_mask (code);
754
755 if (inv)
756 mask ^= 15;
757
758 if (mask < 1 || mask > 14)
759 abort ();
760
761 return mnemonic[mask];
762}
763
f19a9af7
AK
764/* Return the part of op which has a value different from def.
765 The size of the part is determined by mode.
766 Use this function only if you already know that op really
767 contains such a part. */
4023fb28 768
f19a9af7
AK
769unsigned HOST_WIDE_INT
770s390_extract_part (rtx op, enum machine_mode mode, int def)
4023fb28 771{
f19a9af7
AK
772 unsigned HOST_WIDE_INT value = 0;
773 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
774 int part_bits = GET_MODE_BITSIZE (mode);
775 unsigned HOST_WIDE_INT part_mask = (1 << part_bits) - 1;
776 int i;
777
778 for (i = 0; i < max_parts; i++)
4023fb28 779 {
f19a9af7
AK
780 if (i == 0)
781 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 782 else
f19a9af7
AK
783 value >>= part_bits;
784
785 if ((value & part_mask) != (def & part_mask))
786 return value & part_mask;
4023fb28 787 }
f19a9af7 788
4023fb28
UW
789 abort ();
790}
791
792/* If OP is an integer constant of mode MODE with exactly one
f19a9af7
AK
793 part of mode PART_MODE unequal to DEF, return the number of that
794 part. Otherwise, return -1. */
4023fb28
UW
795
796int
f19a9af7
AK
797s390_single_part (rtx op,
798 enum machine_mode mode,
799 enum machine_mode part_mode,
800 int def)
801{
802 unsigned HOST_WIDE_INT value = 0;
803 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
804 unsigned HOST_WIDE_INT part_mask = (1 << GET_MODE_BITSIZE (part_mode)) - 1;
805 int i, part = -1;
806
807 if (GET_CODE (op) != CONST_INT)
808 return -1;
809
810 for (i = 0; i < n_parts; i++)
811 {
812 if (i == 0)
813 value = (unsigned HOST_WIDE_INT) INTVAL (op);
4023fb28 814 else
f19a9af7
AK
815 value >>= GET_MODE_BITSIZE (part_mode);
816
817 if ((value & part_mask) != (def & part_mask))
818 {
819 if (part != -1)
820 return -1;
821 else
822 part = i;
823 }
4023fb28 824 }
f19a9af7 825 return part == -1 ? -1 : n_parts - 1 - part;
4023fb28
UW
826}
827
c7453384
EC
828/* Check whether we can (and want to) split a double-word
829 move in mode MODE from SRC to DST into two single-word
dc65c307
UW
830 moves, moving the subword FIRST_SUBWORD first. */
831
832bool
9c808aad 833s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
dc65c307
UW
834{
835 /* Floating point registers cannot be split. */
836 if (FP_REG_P (src) || FP_REG_P (dst))
837 return false;
838
fae778eb 839 /* We don't need to split if operands are directly accessible. */
dc65c307
UW
840 if (s_operand (src, mode) || s_operand (dst, mode))
841 return false;
842
843 /* Non-offsettable memory references cannot be split. */
844 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
845 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
846 return false;
847
848 /* Moving the first subword must not clobber a register
849 needed to move the second subword. */
850 if (register_operand (dst, mode))
851 {
852 rtx subreg = operand_subword (dst, first_subword, 0, mode);
853 if (reg_overlap_mentioned_p (subreg, src))
854 return false;
855 }
856
857 return true;
858}
859
ba956982 860
c7453384 861/* Change optimizations to be performed, depending on the
994fe660
UW
862 optimization level.
863
864 LEVEL is the optimization level specified; 2 if `-O2' is
865 specified, 1 if `-O' is specified, and 0 if neither is specified.
866
5e7a8ee0 867 SIZE is nonzero if `-Os' is specified and zero otherwise. */
9db1d521
HP
868
869void
9c808aad 870optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
9db1d521 871{
8e509cf9
UW
872 /* ??? There are apparently still problems with -fcaller-saves. */
873 flag_caller_saves = 0;
2120e3cd
UW
874
875 /* By default, always emit DWARF-2 unwind info. This allows debugging
876 without maintaining a stack frame back-chain. */
877 flag_asynchronous_unwind_tables = 1;
9db1d521
HP
878}
879
4023fb28 880void
9c808aad 881override_options (void)
4023fb28 882{
1fec52be 883 int i;
1fec52be
HP
884 static struct pta
885 {
886 const char *const name; /* processor name or nickname. */
887 const enum processor_type processor;
f13e0d4e 888 const enum processor_flags flags;
1fec52be
HP
889 }
890 const processor_alias_table[] =
891 {
f13e0d4e
UW
892 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
893 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
894 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
c7453384 895 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
f13e0d4e 896 | PF_LONG_DISPLACEMENT},
1fec52be
HP
897 };
898
899 int const pta_size = ARRAY_SIZE (processor_alias_table);
900
4023fb28
UW
901 /* Acquire a unique set number for our register saves and restores. */
902 s390_sr_alias_set = new_alias_set ();
4023fb28 903
29742ba4
HP
904 /* Set up function hooks. */
905 init_machine_status = s390_init_machine_status;
f13e0d4e
UW
906
907 /* Architecture mode defaults according to ABI. */
908 if (!(target_flags_explicit & MASK_ZARCH))
909 {
910 if (TARGET_64BIT)
911 target_flags |= MASK_ZARCH;
912 else
913 target_flags &= ~MASK_ZARCH;
914 }
915
916 /* Determine processor architectural level. */
1fec52be 917 if (!s390_arch_string)
f13e0d4e 918 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1fec52be
HP
919
920 for (i = 0; i < pta_size; i++)
921 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
922 {
923 s390_arch = processor_alias_table[i].processor;
f13e0d4e 924 s390_arch_flags = processor_alias_table[i].flags;
1fec52be
HP
925 break;
926 }
1fec52be 927 if (i == pta_size)
f13e0d4e 928 error ("Unknown cpu used in -march=%s.", s390_arch_string);
1fec52be 929
f13e0d4e
UW
930 /* Determine processor to tune for. */
931 if (!s390_tune_string)
1fec52be 932 {
f13e0d4e
UW
933 s390_tune = s390_arch;
934 s390_tune_flags = s390_arch_flags;
935 s390_tune_string = s390_arch_string;
936 }
937 else
938 {
939 for (i = 0; i < pta_size; i++)
940 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
941 {
942 s390_tune = processor_alias_table[i].processor;
943 s390_tune_flags = processor_alias_table[i].flags;
944 break;
945 }
946 if (i == pta_size)
947 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1fec52be
HP
948 }
949
f13e0d4e
UW
950 /* Sanity checks. */
951 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
952 error ("z/Architecture mode not supported on %s.", s390_arch_string);
953 if (TARGET_64BIT && !TARGET_ZARCH)
954 error ("64-bit ABI not supported in ESA/390 mode.");
29742ba4 955}
9db1d521
HP
956
957/* Map for smallest class containing reg regno. */
958
0139adca 959const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
9db1d521
HP
960{ GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
961 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
962 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
963 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
964 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
965 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
966 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
967 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
c7453384 968 ADDR_REGS, NO_REGS, ADDR_REGS
9db1d521
HP
969};
970
077dab3b
HP
971/* Return attribute type of insn. */
972
973static enum attr_type
9c808aad 974s390_safe_attr_type (rtx insn)
077dab3b
HP
975{
976 if (recog_memoized (insn) >= 0)
977 return get_attr_type (insn);
978 else
979 return TYPE_NONE;
980}
9db1d521 981
994fe660
UW
982/* Return true if OP a (const_int 0) operand.
983 OP is the current operation.
984 MODE is the current operation mode. */
c7453384 985
9db1d521 986int
9c808aad 987const0_operand (register rtx op, enum machine_mode mode)
9db1d521
HP
988{
989 return op == CONST0_RTX (mode);
990}
991
b2ccb744
UW
992/* Return true if OP is constant.
993 OP is the current operation.
994 MODE is the current operation mode. */
995
996int
9c808aad 997consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
b2ccb744
UW
998{
999 return CONSTANT_P (op);
1000}
1001
994fe660 1002/* Return true if the mode of operand OP matches MODE.
c7453384 1003 If MODE is set to VOIDmode, set it to the mode of OP. */
9db1d521
HP
1004
1005static int
9c808aad 1006check_mode (register rtx op, enum machine_mode *mode)
9db1d521
HP
1007{
1008 if (*mode == VOIDmode)
1009 *mode = GET_MODE (op);
1010 else
1011 {
1012 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1013 return 0;
1014 }
1015 return 1;
1016}
1017
994fe660 1018/* Return true if OP a valid operand for the LARL instruction.
9db1d521
HP
1019 OP is the current operation.
1020 MODE is the current operation mode. */
1021
1022int
9c808aad 1023larl_operand (register rtx op, enum machine_mode mode)
9db1d521 1024{
9db1d521
HP
1025 if (! check_mode (op, &mode))
1026 return 0;
1027
1028 /* Allow labels and local symbols. */
1029 if (GET_CODE (op) == LABEL_REF)
1030 return 1;
114278e7
RH
1031 if (GET_CODE (op) == SYMBOL_REF)
1032 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1033 && SYMBOL_REF_TLS_MODEL (op) == 0
1034 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
9db1d521
HP
1035
1036 /* Everything else must have a CONST, so strip it. */
1037 if (GET_CODE (op) != CONST)
1038 return 0;
1039 op = XEXP (op, 0);
1040
d3632d41 1041 /* Allow adding *even* in-range constants. */
9db1d521
HP
1042 if (GET_CODE (op) == PLUS)
1043 {
1044 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1045 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1046 return 0;
d3632d41
UW
1047#if HOST_BITS_PER_WIDE_INT > 32
1048 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1049 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1050 return 0;
c7453384 1051#endif
9db1d521
HP
1052 op = XEXP (op, 0);
1053 }
1054
1055 /* Labels and local symbols allowed here as well. */
1056 if (GET_CODE (op) == LABEL_REF)
1057 return 1;
114278e7
RH
1058 if (GET_CODE (op) == SYMBOL_REF)
1059 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1060 && SYMBOL_REF_TLS_MODEL (op) == 0
1061 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
9db1d521 1062
fd3cd001
UW
1063 /* Now we must have a @GOTENT offset or @PLT stub
1064 or an @INDNTPOFF TLS offset. */
9db1d521 1065 if (GET_CODE (op) == UNSPEC
fd7643fb 1066 && XINT (op, 1) == UNSPEC_GOTENT)
9db1d521
HP
1067 return 1;
1068 if (GET_CODE (op) == UNSPEC
fd7643fb 1069 && XINT (op, 1) == UNSPEC_PLT)
9db1d521 1070 return 1;
fd3cd001
UW
1071 if (GET_CODE (op) == UNSPEC
1072 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1073 return 1;
9db1d521
HP
1074
1075 return 0;
1076}
1077
4023fb28 1078/* Helper routine to implement s_operand and s_imm_operand.
994fe660 1079 OP is the current operation.
4023fb28
UW
1080 MODE is the current operation mode.
1081 ALLOW_IMMEDIATE specifies whether immediate operands should
1082 be accepted or not. */
9db1d521 1083
4023fb28 1084static int
9c808aad
AJ
1085general_s_operand (register rtx op, enum machine_mode mode,
1086 int allow_immediate)
9db1d521 1087{
4023fb28 1088 struct s390_address addr;
9db1d521 1089
4023fb28
UW
1090 /* Call general_operand first, so that we don't have to
1091 check for many special cases. */
9db1d521
HP
1092 if (!general_operand (op, mode))
1093 return 0;
1094
4023fb28
UW
1095 /* Just like memory_operand, allow (subreg (mem ...))
1096 after reload. */
c7453384
EC
1097 if (reload_completed
1098 && GET_CODE (op) == SUBREG
4023fb28
UW
1099 && GET_CODE (SUBREG_REG (op)) == MEM)
1100 op = SUBREG_REG (op);
9db1d521 1101
4023fb28
UW
1102 switch (GET_CODE (op))
1103 {
5af2f3d3
UW
1104 /* Constants are OK as s-operand if ALLOW_IMMEDIATE
1105 is true and we are still before reload. */
4023fb28
UW
1106 case CONST_INT:
1107 case CONST_DOUBLE:
1108 if (!allow_immediate || reload_completed)
5af2f3d3
UW
1109 return 0;
1110 return 1;
4023fb28
UW
1111
1112 /* Memory operands are OK unless they already use an
1113 index register. */
1114 case MEM:
1115 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1116 return 1;
5af2f3d3
UW
1117 if (!s390_decompose_address (XEXP (op, 0), &addr))
1118 return 0;
1119 if (addr.indx)
1120 return 0;
1121 /* Do not allow literal pool references unless ALLOW_IMMEDIATE
1122 is true. This prevents compares between two literal pool
1123 entries from being accepted. */
1124 if (!allow_immediate
1125 && addr.base && REGNO (addr.base) == BASE_REGISTER)
1126 return 0;
1127 return 1;
9db1d521 1128
4023fb28
UW
1129 default:
1130 break;
1131 }
9db1d521 1132
4023fb28 1133 return 0;
9db1d521
HP
1134}
1135
4023fb28 1136/* Return true if OP is a valid S-type operand.
994fe660
UW
1137 OP is the current operation.
1138 MODE is the current operation mode. */
9db1d521
HP
1139
1140int
9c808aad 1141s_operand (register rtx op, enum machine_mode mode)
9db1d521 1142{
4023fb28 1143 return general_s_operand (op, mode, 0);
9db1d521
HP
1144}
1145
c7453384
EC
1146/* Return true if OP is a valid S-type operand or an immediate
1147 operand that can be addressed as S-type operand by forcing
4023fb28 1148 it into the literal pool.
994fe660
UW
1149 OP is the current operation.
1150 MODE is the current operation mode. */
9db1d521
HP
1151
1152int
9c808aad 1153s_imm_operand (register rtx op, enum machine_mode mode)
9db1d521 1154{
4023fb28 1155 return general_s_operand (op, mode, 1);
9db1d521
HP
1156}
1157
ac32b25e
UW
1158/* Return true if OP a valid shift count operand.
1159 OP is the current operation.
1160 MODE is the current operation mode. */
1161
1162int
1163shift_count_operand (rtx op, enum machine_mode mode)
1164{
1165 HOST_WIDE_INT offset = 0;
1166
1167 if (! check_mode (op, &mode))
1168 return 0;
1169
1170 /* We can have an integer constant, an address register,
1171 or a sum of the two. Note that reload already checks
1172 that any register present is an address register, so
1173 we just check for any register here. */
1174 if (GET_CODE (op) == CONST_INT)
1175 {
1176 offset = INTVAL (op);
1177 op = NULL_RTX;
1178 }
1179 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1180 {
1181 offset = INTVAL (XEXP (op, 1));
1182 op = XEXP (op, 0);
1183 }
1184 while (op && GET_CODE (op) == SUBREG)
1185 op = SUBREG_REG (op);
1186 if (op && GET_CODE (op) != REG)
1187 return 0;
1188
1189 /* Unfortunately we have to reject constants that are invalid
1190 for an address, or else reload will get confused. */
1191 if (!DISP_IN_RANGE (offset))
1192 return 0;
1193
1194 return 1;
1195}
1196
d3632d41
UW
1197/* Return true if DISP is a valid short displacement. */
1198
1199static int
9c808aad 1200s390_short_displacement (rtx disp)
d3632d41
UW
1201{
1202 /* No displacement is OK. */
1203 if (!disp)
1204 return 1;
1205
1206 /* Integer displacement in range. */
1207 if (GET_CODE (disp) == CONST_INT)
1208 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1209
1210 /* GOT offset is not OK, the GOT can be large. */
1211 if (GET_CODE (disp) == CONST
1212 && GET_CODE (XEXP (disp, 0)) == UNSPEC
fd7643fb 1213 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
d3632d41
UW
1214 return 0;
1215
1216 /* All other symbolic constants are literal pool references,
1217 which are OK as the literal pool must be small. */
1218 if (GET_CODE (disp) == CONST)
1219 return 1;
1220
1221 return 0;
1222}
1223
1224/* Return true if OP is a valid operand for a C constraint. */
ccfc6cc8
UW
1225
1226int
f19a9af7 1227s390_extra_constraint_str (rtx op, int c, const char * str)
ccfc6cc8
UW
1228{
1229 struct s390_address addr;
1230
f19a9af7
AK
1231 if (c != str[0])
1232 abort ();
1233
d3632d41
UW
1234 switch (c)
1235 {
1236 case 'Q':
1237 if (GET_CODE (op) != MEM)
1238 return 0;
1239 if (!s390_decompose_address (XEXP (op, 0), &addr))
1240 return 0;
1241 if (addr.indx)
1242 return 0;
ccfc6cc8 1243
d3632d41
UW
1244 if (TARGET_LONG_DISPLACEMENT)
1245 {
1246 if (!s390_short_displacement (addr.disp))
1247 return 0;
1248 }
1249 break;
ccfc6cc8 1250
d3632d41
UW
1251 case 'R':
1252 if (GET_CODE (op) != MEM)
1253 return 0;
1254
1255 if (TARGET_LONG_DISPLACEMENT)
1256 {
1257 if (!s390_decompose_address (XEXP (op, 0), &addr))
1258 return 0;
1259 if (!s390_short_displacement (addr.disp))
1260 return 0;
1261 }
1262 break;
1263
1264 case 'S':
1265 if (!TARGET_LONG_DISPLACEMENT)
1266 return 0;
1267 if (GET_CODE (op) != MEM)
1268 return 0;
1269 if (!s390_decompose_address (XEXP (op, 0), &addr))
1270 return 0;
1271 if (addr.indx)
1272 return 0;
1273 if (s390_short_displacement (addr.disp))
1274 return 0;
1275 break;
1276
1277 case 'T':
1278 if (!TARGET_LONG_DISPLACEMENT)
1279 return 0;
1280 if (GET_CODE (op) != MEM)
1281 return 0;
1282 /* Any invalid address here will be fixed up by reload,
1283 so accept it for the most generic constraint. */
1284 if (s390_decompose_address (XEXP (op, 0), &addr)
1285 && s390_short_displacement (addr.disp))
1286 return 0;
1287 break;
1288
1289 case 'U':
1290 if (TARGET_LONG_DISPLACEMENT)
1291 {
1292 if (!s390_decompose_address (op, &addr))
1293 return 0;
1294 if (!s390_short_displacement (addr.disp))
1295 return 0;
1296 }
1297 break;
1298
1299 case 'W':
1300 if (!TARGET_LONG_DISPLACEMENT)
1301 return 0;
1302 /* Any invalid address here will be fixed up by reload,
1303 so accept it for the most generic constraint. */
1304 if (s390_decompose_address (op, &addr)
1305 && s390_short_displacement (addr.disp))
1306 return 0;
1307 break;
1308
ac32b25e
UW
1309 case 'Y':
1310 return shift_count_operand (op, VOIDmode);
1311
d3632d41
UW
1312 default:
1313 return 0;
1314 }
ccfc6cc8
UW
1315
1316 return 1;
1317}
1318
f19a9af7
AK
1319/* Return true if VALUE matches the constraint STR. */
1320
1321int
1322s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1323 int c,
1324 const char * str)
1325{
1326 enum machine_mode mode, part_mode;
1327 int def;
1328 unsigned char part;
1329
1330 if (c != str[0])
1331 abort ();
1332
1333 switch (str[0])
1334 {
1335 case 'I':
1336 return (unsigned int)value < 256;
1337
1338 case 'J':
1339 return (unsigned int)value < 4096;
1340
1341 case 'K':
1342 return value >= -32768 && value < 32768;
1343
1344 case 'L':
1345 return (TARGET_LONG_DISPLACEMENT ?
1346 (value >= -524288 && value <= 524287)
1347 : (value >= 0 && value <= 4095));
1348 case 'M':
1349 return value == 2147483647;
1350
1351 case 'N':
1352 part = str[1] - '0';
1353
1354 switch (str[2])
1355 {
1356 case 'H': part_mode = HImode; break;
1357 case 'Q': part_mode = QImode; break;
1358 default: return 0;
1359 }
1360
1361 switch (str[3])
1362 {
1363 case 'H': mode = HImode; break;
1364 case 'S': mode = SImode; break;
1365 case 'D': mode = DImode; break;
1366 default: return 0;
1367 }
1368
1369 switch (str[4])
1370 {
1371 case '0': def = 0; break;
1372 case 'F': def = -1; break;
1373 default: return 0;
1374 }
1375
1376 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
1377 return 0;
1378
1379 if (s390_single_part (GEN_INT (value), mode, part_mode, def) != part)
1380 return 0;
1381
1382 break;
1383
1384 default:
1385 return 0;
1386 }
1387
1388 return 1;
1389}
1390
3c50106f
RH
1391/* Compute a (partial) cost for rtx X. Return true if the complete
1392 cost has been computed, and false if subexpressions should be
1393 scanned. In either case, *TOTAL contains the cost result. */
1394
1395static bool
9c808aad 1396s390_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
1397{
1398 switch (code)
1399 {
1400 case CONST:
1401 if (GET_CODE (XEXP (x, 0)) == MINUS
1402 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1403 *total = 1000;
1404 else
1405 *total = 0;
1406 return true;
1407
1408 case CONST_INT:
1409 /* Force_const_mem does not work out of reload, because the
1410 saveable_obstack is set to reload_obstack, which does not
1411 live long enough. Because of this we cannot use force_const_mem
1412 in addsi3. This leads to problems with gen_add2_insn with a
1413 constant greater than a short. Because of that we give an
1414 addition of greater constants a cost of 3 (reload1.c 10096). */
1415 /* ??? saveable_obstack no longer exists. */
1416 if (outer_code == PLUS
1417 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1418 *total = COSTS_N_INSNS (3);
1419 else
1420 *total = 0;
1421 return true;
1422
1423 case LABEL_REF:
1424 case SYMBOL_REF:
1425 case CONST_DOUBLE:
1426 *total = 0;
1427 return true;
1428
1429 case ASHIFT:
1430 case ASHIFTRT:
1431 case LSHIFTRT:
1432 case PLUS:
1433 case AND:
1434 case IOR:
1435 case XOR:
1436 case MINUS:
1437 case NEG:
1438 case NOT:
1439 *total = COSTS_N_INSNS (1);
1440 return true;
1441
1442 case MULT:
1443 if (GET_MODE (XEXP (x, 0)) == DImode)
1444 *total = COSTS_N_INSNS (40);
1445 else
1446 *total = COSTS_N_INSNS (7);
1447 return true;
1448
1449 case DIV:
1450 case UDIV:
1451 case MOD:
1452 case UMOD:
1453 *total = COSTS_N_INSNS (33);
1454 return true;
1455
1456 default:
1457 return false;
1458 }
1459}
1460
dea09b1b
UW
1461/* Return the cost of an address rtx ADDR. */
1462
dcefdf67 1463static int
9c808aad 1464s390_address_cost (rtx addr)
dea09b1b
UW
1465{
1466 struct s390_address ad;
1467 if (!s390_decompose_address (addr, &ad))
1468 return 1000;
1469
1470 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1471}
1472
994fe660 1473/* Return true if OP is a valid operand for the BRAS instruction.
9db1d521
HP
1474 OP is the current operation.
1475 MODE is the current operation mode. */
1476
1477int
9c808aad 1478bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521
HP
1479{
1480 register enum rtx_code code = GET_CODE (op);
1481
1482 /* Allow SYMBOL_REFs. */
1483 if (code == SYMBOL_REF)
1484 return 1;
1485
1486 /* Allow @PLT stubs. */
1487 if (code == CONST
1488 && GET_CODE (XEXP (op, 0)) == UNSPEC
fd7643fb 1489 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
9db1d521
HP
1490 return 1;
1491 return 0;
1492}
1493
fd3cd001
UW
1494/* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1495 otherwise return 0. */
1496
1497int
9c808aad 1498tls_symbolic_operand (register rtx op)
fd3cd001 1499{
fd3cd001
UW
1500 if (GET_CODE (op) != SYMBOL_REF)
1501 return 0;
114278e7 1502 return SYMBOL_REF_TLS_MODEL (op);
fd3cd001 1503}
9db1d521 1504\f
994fe660 1505/* Return true if OP is a load multiple operation. It is known to be a
c7453384 1506 PARALLEL and the first section will be tested.
994fe660
UW
1507 OP is the current operation.
1508 MODE is the current operation mode. */
9db1d521
HP
1509
1510int
9c808aad 1511load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 1512{
c19ec8f9 1513 enum machine_mode elt_mode;
9db1d521
HP
1514 int count = XVECLEN (op, 0);
1515 unsigned int dest_regno;
1516 rtx src_addr;
4023fb28 1517 int i, off;
9db1d521
HP
1518
1519
1520 /* Perform a quick check so we don't blow up below. */
1521 if (count <= 1
1522 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1523 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1524 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1525 return 0;
1526
1527 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1528 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
c19ec8f9 1529 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
9db1d521 1530
4023fb28
UW
1531 /* Check, is base, or base + displacement. */
1532
1533 if (GET_CODE (src_addr) == REG)
1534 off = 0;
1535 else if (GET_CODE (src_addr) == PLUS
c7453384 1536 && GET_CODE (XEXP (src_addr, 0)) == REG
4023fb28
UW
1537 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1538 {
1539 off = INTVAL (XEXP (src_addr, 1));
1540 src_addr = XEXP (src_addr, 0);
1541 }
1542 else
1543 return 0;
1544
1545 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1546 return 0;
1547
9db1d521
HP
1548 for (i = 1; i < count; i++)
1549 {
1550 rtx elt = XVECEXP (op, 0, i);
1551
1552 if (GET_CODE (elt) != SET
1553 || GET_CODE (SET_DEST (elt)) != REG
c19ec8f9 1554 || GET_MODE (SET_DEST (elt)) != elt_mode
9db1d521
HP
1555 || REGNO (SET_DEST (elt)) != dest_regno + i
1556 || GET_CODE (SET_SRC (elt)) != MEM
c19ec8f9 1557 || GET_MODE (SET_SRC (elt)) != elt_mode
9db1d521
HP
1558 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1559 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1560 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4023fb28 1561 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
c19ec8f9 1562 != off + i * GET_MODE_SIZE (elt_mode))
9db1d521
HP
1563 return 0;
1564 }
1565
1566 return 1;
1567}
1568
994fe660 1569/* Return true if OP is a store multiple operation. It is known to be a
c7453384 1570 PARALLEL and the first section will be tested.
994fe660
UW
1571 OP is the current operation.
1572 MODE is the current operation mode. */
9db1d521
HP
1573
1574int
9c808aad 1575store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 1576{
c19ec8f9 1577 enum machine_mode elt_mode;
4023fb28 1578 int count = XVECLEN (op, 0);
9db1d521
HP
1579 unsigned int src_regno;
1580 rtx dest_addr;
4023fb28 1581 int i, off;
9db1d521
HP
1582
1583 /* Perform a quick check so we don't blow up below. */
1584 if (count <= 1
1585 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1586 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1587 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1588 return 0;
1589
1590 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1591 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
c19ec8f9 1592 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
9db1d521 1593
4023fb28
UW
1594 /* Check, is base, or base + displacement. */
1595
1596 if (GET_CODE (dest_addr) == REG)
1597 off = 0;
1598 else if (GET_CODE (dest_addr) == PLUS
c7453384 1599 && GET_CODE (XEXP (dest_addr, 0)) == REG
4023fb28
UW
1600 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1601 {
1602 off = INTVAL (XEXP (dest_addr, 1));
1603 dest_addr = XEXP (dest_addr, 0);
1604 }
1605 else
1606 return 0;
1607
1608 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1609 return 0;
1610
9db1d521
HP
1611 for (i = 1; i < count; i++)
1612 {
1613 rtx elt = XVECEXP (op, 0, i);
1614
1615 if (GET_CODE (elt) != SET
1616 || GET_CODE (SET_SRC (elt)) != REG
c19ec8f9 1617 || GET_MODE (SET_SRC (elt)) != elt_mode
9db1d521
HP
1618 || REGNO (SET_SRC (elt)) != src_regno + i
1619 || GET_CODE (SET_DEST (elt)) != MEM
c19ec8f9 1620 || GET_MODE (SET_DEST (elt)) != elt_mode
9db1d521
HP
1621 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1622 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1623 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4023fb28 1624 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
c19ec8f9 1625 != off + i * GET_MODE_SIZE (elt_mode))
9db1d521
HP
1626 return 0;
1627 }
1628 return 1;
1629}
1630
1631
994fe660 1632/* Return true if OP contains a symbol reference */
9db1d521
HP
1633
1634int
9c808aad 1635symbolic_reference_mentioned_p (rtx op)
9db1d521 1636{
994fe660 1637 register const char *fmt;
9db1d521
HP
1638 register int i;
1639
1640 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1641 return 1;
1642
1643 fmt = GET_RTX_FORMAT (GET_CODE (op));
1644 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1645 {
1646 if (fmt[i] == 'E')
1647 {
1648 register int j;
1649
1650 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1651 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1652 return 1;
1653 }
1654
1655 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1656 return 1;
1657 }
1658
1659 return 0;
1660}
1661
fd3cd001
UW
1662/* Return true if OP contains a reference to a thread-local symbol. */
1663
1664int
9c808aad 1665tls_symbolic_reference_mentioned_p (rtx op)
fd3cd001
UW
1666{
1667 register const char *fmt;
1668 register int i;
1669
1670 if (GET_CODE (op) == SYMBOL_REF)
1671 return tls_symbolic_operand (op);
1672
1673 fmt = GET_RTX_FORMAT (GET_CODE (op));
1674 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1675 {
1676 if (fmt[i] == 'E')
1677 {
1678 register int j;
1679
1680 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1681 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1682 return 1;
1683 }
1684
1685 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1686 return 1;
1687 }
1688
1689 return 0;
1690}
1691
9db1d521 1692
c7453384
EC
1693/* Return true if OP is a legitimate general operand when
1694 generating PIC code. It is given that flag_pic is on
994fe660
UW
1695 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1696
9db1d521 1697int
9c808aad 1698legitimate_pic_operand_p (register rtx op)
9db1d521 1699{
4023fb28 1700 /* Accept all non-symbolic constants. */
9db1d521
HP
1701 if (!SYMBOLIC_CONST (op))
1702 return 1;
1703
c7453384 1704 /* Reject everything else; must be handled
fd3cd001 1705 via emit_symbolic_move. */
9db1d521
HP
1706 return 0;
1707}
1708
994fe660
UW
1709/* Returns true if the constant value OP is a legitimate general operand.
1710 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1711
9db1d521 1712int
9c808aad 1713legitimate_constant_p (register rtx op)
9db1d521 1714{
4023fb28 1715 /* Accept all non-symbolic constants. */
9db1d521
HP
1716 if (!SYMBOLIC_CONST (op))
1717 return 1;
1718
fd3cd001 1719 /* Accept immediate LARL operands. */
9e8327e3 1720 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
fd3cd001
UW
1721 return 1;
1722
1723 /* Thread-local symbols are never legal constants. This is
1724 so that emit_call knows that computing such addresses
1725 might require a function call. */
1726 if (TLS_SYMBOLIC_CONST (op))
1727 return 0;
1728
9db1d521
HP
1729 /* In the PIC case, symbolic constants must *not* be
1730 forced into the literal pool. We accept them here,
fd3cd001 1731 so that they will be handled by emit_symbolic_move. */
9db1d521
HP
1732 if (flag_pic)
1733 return 1;
1734
9db1d521
HP
1735 /* All remaining non-PIC symbolic constants are
1736 forced into the literal pool. */
1737 return 0;
1738}
1739
fd3cd001
UW
1740/* Determine if it's legal to put X into the constant pool. This
1741 is not possible if X contains the address of a symbol that is
1742 not constant (TLS) or not known at final link time (PIC). */
1743
1744static bool
9c808aad 1745s390_cannot_force_const_mem (rtx x)
fd3cd001
UW
1746{
1747 switch (GET_CODE (x))
1748 {
1749 case CONST_INT:
1750 case CONST_DOUBLE:
1751 /* Accept all non-symbolic constants. */
1752 return false;
1753
1754 case LABEL_REF:
1755 /* Labels are OK iff we are non-PIC. */
1756 return flag_pic != 0;
1757
1758 case SYMBOL_REF:
1759 /* 'Naked' TLS symbol references are never OK,
1760 non-TLS symbols are OK iff we are non-PIC. */
1761 if (tls_symbolic_operand (x))
1762 return true;
1763 else
1764 return flag_pic != 0;
1765
1766 case CONST:
1767 return s390_cannot_force_const_mem (XEXP (x, 0));
1768 case PLUS:
1769 case MINUS:
1770 return s390_cannot_force_const_mem (XEXP (x, 0))
1771 || s390_cannot_force_const_mem (XEXP (x, 1));
1772
1773 case UNSPEC:
1774 switch (XINT (x, 1))
1775 {
1776 /* Only lt-relative or GOT-relative UNSPECs are OK. */
fd7643fb
UW
1777 case UNSPEC_LTREL_OFFSET:
1778 case UNSPEC_GOT:
1779 case UNSPEC_GOTOFF:
1780 case UNSPEC_PLTOFF:
fd3cd001
UW
1781 case UNSPEC_TLSGD:
1782 case UNSPEC_TLSLDM:
1783 case UNSPEC_NTPOFF:
1784 case UNSPEC_DTPOFF:
1785 case UNSPEC_GOTNTPOFF:
1786 case UNSPEC_INDNTPOFF:
1787 return false;
1788
1789 default:
1790 return true;
1791 }
1792 break;
1793
1794 default:
1795 abort ();
1796 }
1797}
1798
4023fb28 1799/* Returns true if the constant value OP is a legitimate general
c7453384 1800 operand during and after reload. The difference to
4023fb28
UW
1801 legitimate_constant_p is that this function will not accept
1802 a constant that would need to be forced to the literal pool
1803 before it can be used as operand. */
1804
1805int
9c808aad 1806legitimate_reload_constant_p (register rtx op)
4023fb28 1807{
d3632d41 1808 /* Accept la(y) operands. */
c7453384 1809 if (GET_CODE (op) == CONST_INT
d3632d41
UW
1810 && DISP_IN_RANGE (INTVAL (op)))
1811 return 1;
1812
4023fb28
UW
1813 /* Accept l(g)hi operands. */
1814 if (GET_CODE (op) == CONST_INT
f19a9af7 1815 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', "K"))
4023fb28
UW
1816 return 1;
1817
1818 /* Accept lliXX operands. */
9e8327e3 1819 if (TARGET_ZARCH
f19a9af7 1820 && s390_single_part (op, DImode, HImode, 0) >= 0)
4023fb28
UW
1821 return 1;
1822
1823 /* Accept larl operands. */
9e8327e3 1824 if (TARGET_CPU_ZARCH
4023fb28
UW
1825 && larl_operand (op, VOIDmode))
1826 return 1;
1827
4023fb28
UW
1828 /* Everything else cannot be handled without reload. */
1829 return 0;
1830}
1831
1832/* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1833 return the class of reg to actually use. */
1834
1835enum reg_class
9c808aad 1836s390_preferred_reload_class (rtx op, enum reg_class class)
4023fb28
UW
1837{
1838 /* This can happen if a floating point constant is being
1839 reloaded into an integer register. Leave well alone. */
1840 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1841 && class != FP_REGS)
1842 return class;
1843
1844 switch (GET_CODE (op))
1845 {
1846 /* Constants we cannot reload must be forced into the
0796c16a
UW
1847 literal pool. */
1848
4023fb28
UW
1849 case CONST_DOUBLE:
1850 case CONST_INT:
0796c16a 1851 if (legitimate_reload_constant_p (op))
4023fb28 1852 return class;
0796c16a
UW
1853 else
1854 return NO_REGS;
4023fb28
UW
1855
1856 /* If a symbolic constant or a PLUS is reloaded,
14b3e8ef
UW
1857 it is most likely being used as an address, so
1858 prefer ADDR_REGS. If 'class' is not a superset
1859 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
4023fb28
UW
1860 case PLUS:
1861 case LABEL_REF:
1862 case SYMBOL_REF:
1863 case CONST:
14b3e8ef
UW
1864 if (reg_class_subset_p (ADDR_REGS, class))
1865 return ADDR_REGS;
1866 else
1867 return NO_REGS;
4023fb28
UW
1868
1869 default:
1870 break;
1871 }
1872
1873 return class;
1874}
9db1d521 1875
f3e9edff
UW
1876/* Return the register class of a scratch register needed to
1877 load IN into a register of class CLASS in MODE.
1878
1879 We need a temporary when loading a PLUS expression which
1880 is not a legitimate operand of the LOAD ADDRESS instruction. */
1881
1882enum reg_class
9c808aad
AJ
1883s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1884 enum machine_mode mode, rtx in)
f3e9edff
UW
1885{
1886 if (s390_plus_operand (in, mode))
1887 return ADDR_REGS;
1888
1889 return NO_REGS;
1890}
1891
dc65c307
UW
1892/* Return the register class of a scratch register needed to
1893 store a register of class CLASS in MODE into OUT:
1894
c7453384 1895 We need a temporary when storing a double-word to a
dc65c307
UW
1896 non-offsettable memory address. */
1897
1898enum reg_class
9c808aad
AJ
1899s390_secondary_output_reload_class (enum reg_class class,
1900 enum machine_mode mode, rtx out)
dc65c307
UW
1901{
1902 if ((TARGET_64BIT ? mode == TImode
1903 : (mode == DImode || mode == DFmode))
1904 && reg_classes_intersect_p (GENERAL_REGS, class)
1905 && GET_CODE (out) == MEM
1906 && !offsettable_memref_p (out)
1907 && !s_operand (out, VOIDmode))
1908 return ADDR_REGS;
1909
1910 return NO_REGS;
1911}
1912
f3e9edff 1913/* Return true if OP is a PLUS that is not a legitimate
c7453384 1914 operand for the LA instruction.
f3e9edff
UW
1915 OP is the current operation.
1916 MODE is the current operation mode. */
1917
1918int
9c808aad 1919s390_plus_operand (register rtx op, enum machine_mode mode)
f3e9edff
UW
1920{
1921 if (!check_mode (op, &mode) || mode != Pmode)
1922 return FALSE;
1923
1924 if (GET_CODE (op) != PLUS)
1925 return FALSE;
1926
1927 if (legitimate_la_operand_p (op))
1928 return FALSE;
1929
1930 return TRUE;
1931}
1932
1933/* Generate code to load SRC, which is PLUS that is not a
1934 legitimate operand for the LA instruction, into TARGET.
1935 SCRATCH may be used as scratch register. */
1936
1937void
9c808aad
AJ
1938s390_expand_plus_operand (register rtx target, register rtx src,
1939 register rtx scratch)
f3e9edff 1940{
7974fe63 1941 rtx sum1, sum2;
b808c04c 1942 struct s390_address ad;
6a4e49c1 1943
6a4e49c1 1944 /* src must be a PLUS; get its two operands. */
f3e9edff
UW
1945 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1946 abort ();
1947
7c82a1ed
UW
1948 /* Check if any of the two operands is already scheduled
1949 for replacement by reload. This can happen e.g. when
1950 float registers occur in an address. */
1951 sum1 = find_replacement (&XEXP (src, 0));
1952 sum2 = find_replacement (&XEXP (src, 1));
ccfc6cc8 1953 src = gen_rtx_PLUS (Pmode, sum1, sum2);
ccfc6cc8 1954
7974fe63
UW
1955 /* If the address is already strictly valid, there's nothing to do. */
1956 if (!s390_decompose_address (src, &ad)
1957 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1958 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
f3e9edff 1959 {
7974fe63
UW
1960 /* Otherwise, one of the operands cannot be an address register;
1961 we reload its value into the scratch register. */
1962 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1963 {
1964 emit_move_insn (scratch, sum1);
1965 sum1 = scratch;
1966 }
1967 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1968 {
1969 emit_move_insn (scratch, sum2);
1970 sum2 = scratch;
1971 }
f3e9edff 1972
7974fe63
UW
1973 /* According to the way these invalid addresses are generated
1974 in reload.c, it should never happen (at least on s390) that
1975 *neither* of the PLUS components, after find_replacements
1976 was applied, is an address register. */
1977 if (sum1 == scratch && sum2 == scratch)
1978 {
1979 debug_rtx (src);
1980 abort ();
1981 }
f3e9edff 1982
7974fe63 1983 src = gen_rtx_PLUS (Pmode, sum1, sum2);
f3e9edff
UW
1984 }
1985
1986 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1987 is only ever performed on addresses, so we can mark the
1988 sum as legitimate for LA in any case. */
a41c6c53 1989 s390_load_address (target, src);
f3e9edff
UW
1990}
1991
1992
994fe660 1993/* Decompose a RTL expression ADDR for a memory address into
b808c04c
UW
1994 its components, returned in OUT.
1995
994fe660
UW
1996 Returns 0 if ADDR is not a valid memory address, nonzero
1997 otherwise. If OUT is NULL, don't return the components,
1998 but check for validity only.
9db1d521 1999
994fe660
UW
2000 Note: Only addresses in canonical form are recognized.
2001 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2002 canonical form so that they will be recognized. */
9db1d521
HP
2003
2004static int
9c808aad 2005s390_decompose_address (register rtx addr, struct s390_address *out)
9db1d521
HP
2006{
2007 rtx base = NULL_RTX;
2008 rtx indx = NULL_RTX;
2009 rtx disp = NULL_RTX;
f3e9edff 2010 int pointer = FALSE;
fd7643fb
UW
2011 int base_ptr = FALSE;
2012 int indx_ptr = FALSE;
9db1d521
HP
2013
2014 /* Decompose address into base + index + displacement. */
2015
2016 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2017 base = addr;
2018
2019 else if (GET_CODE (addr) == PLUS)
2020 {
2021 rtx op0 = XEXP (addr, 0);
2022 rtx op1 = XEXP (addr, 1);
2023 enum rtx_code code0 = GET_CODE (op0);
2024 enum rtx_code code1 = GET_CODE (op1);
2025
2026 if (code0 == REG || code0 == UNSPEC)
2027 {
2028 if (code1 == REG || code1 == UNSPEC)
2029 {
2030 indx = op0; /* index + base */
2031 base = op1;
2032 }
2033
2034 else
2035 {
2036 base = op0; /* base + displacement */
2037 disp = op1;
2038 }
2039 }
2040
2041 else if (code0 == PLUS)
2042 {
2043 indx = XEXP (op0, 0); /* index + base + disp */
2044 base = XEXP (op0, 1);
2045 disp = op1;
2046 }
2047
2048 else
2049 {
2050 return FALSE;
2051 }
2052 }
2053
2054 else
2055 disp = addr; /* displacement */
2056
2057
2058 /* Validate base register. */
2059 if (base)
2060 {
2061 if (GET_CODE (base) == UNSPEC)
2062 {
fd7643fb
UW
2063 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != UNSPEC_LTREL_BASE)
2064 return FALSE;
2065 base = gen_rtx_REG (Pmode, BASE_REGISTER);
9db1d521
HP
2066 }
2067
2068 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
fd7643fb 2069 return FALSE;
9db1d521 2070
f3e9edff
UW
2071 if (REGNO (base) == BASE_REGISTER
2072 || REGNO (base) == STACK_POINTER_REGNUM
2073 || REGNO (base) == FRAME_POINTER_REGNUM
2074 || ((reload_completed || reload_in_progress)
2075 && frame_pointer_needed
2076 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
a41c6c53 2077 || REGNO (base) == ARG_POINTER_REGNUM
f3e9edff
UW
2078 || (flag_pic
2079 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
fd7643fb 2080 pointer = base_ptr = TRUE;
9db1d521
HP
2081 }
2082
2083 /* Validate index register. */
2084 if (indx)
2085 {
2086 if (GET_CODE (indx) == UNSPEC)
2087 {
fd7643fb
UW
2088 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != UNSPEC_LTREL_BASE)
2089 return FALSE;
2090 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
9db1d521
HP
2091 }
2092
2093 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
fd7643fb 2094 return FALSE;
9db1d521 2095
f3e9edff
UW
2096 if (REGNO (indx) == BASE_REGISTER
2097 || REGNO (indx) == STACK_POINTER_REGNUM
2098 || REGNO (indx) == FRAME_POINTER_REGNUM
2099 || ((reload_completed || reload_in_progress)
2100 && frame_pointer_needed
2101 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
a41c6c53 2102 || REGNO (indx) == ARG_POINTER_REGNUM
f3e9edff
UW
2103 || (flag_pic
2104 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
fd7643fb
UW
2105 pointer = indx_ptr = TRUE;
2106 }
2107
2108 /* Prefer to use pointer as base, not index. */
2109 if (base && indx && !base_ptr
2110 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2111 {
2112 rtx tmp = base;
2113 base = indx;
2114 indx = tmp;
9db1d521
HP
2115 }
2116
2117 /* Validate displacement. */
2118 if (disp)
2119 {
2120 /* Allow integer constant in range. */
2121 if (GET_CODE (disp) == CONST_INT)
2122 {
522ec94e
UW
2123 /* If the argument pointer is involved, the displacement will change
2124 later anyway as the argument pointer gets eliminated. This could
2125 make a valid displacement invalid, but it is more likely to make
2126 an invalid displacement valid, because we sometimes access the
2127 register save area via negative offsets to the arg pointer.
2128 Thus we don't check the displacement for validity here. If after
2129 elimination the displacement turns out to be invalid after all,
2130 this is fixed up by reload in any case. */
e86e721f
UW
2131 if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
2132 {
d3632d41 2133 if (!DISP_IN_RANGE (INTVAL (disp)))
e86e721f
UW
2134 return FALSE;
2135 }
9db1d521
HP
2136 }
2137
fd7643fb 2138 /* In the small-PIC case, the linker converts @GOT
fd3cd001 2139 and @GOTNTPOFF offsets to possible displacements. */
9db1d521
HP
2140 else if (GET_CODE (disp) == CONST
2141 && GET_CODE (XEXP (disp, 0)) == UNSPEC
fd7643fb 2142 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
fd3cd001 2143 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
9db1d521
HP
2144 {
2145 if (flag_pic != 1)
2146 return FALSE;
f3e9edff
UW
2147
2148 pointer = TRUE;
9db1d521
HP
2149 }
2150
b2ccb744
UW
2151 /* Accept chunkfied literal pool symbol references. */
2152 else if (GET_CODE (disp) == CONST
2153 && GET_CODE (XEXP (disp, 0)) == MINUS
2154 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
2155 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
2156 {
2157 pointer = TRUE;
2158 }
c7453384 2159
b2ccb744
UW
2160 /* Likewise if a constant offset is present. */
2161 else if (GET_CODE (disp) == CONST
2162 && GET_CODE (XEXP (disp, 0)) == PLUS
2163 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
2164 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
2165 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
2166 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
2167 {
2168 pointer = TRUE;
2169 }
2170
c7453384 2171 /* We can convert literal pool addresses to
9db1d521
HP
2172 displacements by basing them off the base register. */
2173 else
2174 {
2175 /* In some cases, we can accept an additional
2176 small constant offset. Split these off here. */
2177
994fe660 2178 unsigned int offset = 0;
9db1d521
HP
2179
2180 if (GET_CODE (disp) == CONST
2181 && GET_CODE (XEXP (disp, 0)) == PLUS
2182 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2183 {
2184 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2185 disp = XEXP (XEXP (disp, 0), 0);
2186 }
2187
2188 /* Now we must have a literal pool address. */
2189 if (GET_CODE (disp) != SYMBOL_REF
2190 || !CONSTANT_POOL_ADDRESS_P (disp))
2191 return FALSE;
2192
9db1d521
HP
2193 /* If we have an offset, make sure it does not
2194 exceed the size of the constant pool entry. */
2195 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
2196 return FALSE;
2197
c7453384 2198 /* Either base or index must be free to
9db1d521
HP
2199 hold the base register. */
2200 if (base && indx)
2201 return FALSE;
2202
2203 /* Convert the address. */
2204 if (base)
2205 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2206 else
2207 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2208
c7453384 2209 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
fd7643fb 2210 UNSPEC_LTREL_OFFSET);
9db1d521
HP
2211 disp = gen_rtx_CONST (Pmode, disp);
2212
2213 if (offset)
2214 disp = plus_constant (disp, offset);
f3e9edff
UW
2215
2216 pointer = TRUE;
9db1d521
HP
2217 }
2218 }
2219
f3e9edff
UW
2220 if (!base && !indx)
2221 pointer = TRUE;
c7453384 2222
9db1d521
HP
2223 if (out)
2224 {
2225 out->base = base;
2226 out->indx = indx;
2227 out->disp = disp;
f3e9edff 2228 out->pointer = pointer;
9db1d521
HP
2229 }
2230
2231 return TRUE;
2232}
2233
994fe660
UW
2234/* Return nonzero if ADDR is a valid memory address.
2235 STRICT specifies whether strict register checking applies. */
2236
9db1d521 2237int
9c808aad
AJ
2238legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2239 register rtx addr, int strict)
9db1d521 2240{
b808c04c
UW
2241 struct s390_address ad;
2242 if (!s390_decompose_address (addr, &ad))
2243 return FALSE;
2244
2245 if (strict)
2246 {
2247 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2248 return FALSE;
2249 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2250 return FALSE;
2251 }
2252 else
2253 {
2254 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2255 return FALSE;
2256 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2257 return FALSE;
2258 }
2259
2260 return TRUE;
9db1d521
HP
2261}
2262
ba956982
UW
2263/* Return 1 if OP is a valid operand for the LA instruction.
2264 In 31-bit, we need to prove that the result is used as an
2265 address, as LA performs only a 31-bit addition. */
2266
2267int
9c808aad 2268legitimate_la_operand_p (register rtx op)
ba956982
UW
2269{
2270 struct s390_address addr;
b808c04c 2271 if (!s390_decompose_address (op, &addr))
ba956982
UW
2272 return FALSE;
2273
f3e9edff 2274 if (TARGET_64BIT || addr.pointer)
ba956982
UW
2275 return TRUE;
2276
f3e9edff
UW
2277 return FALSE;
2278}
ba956982 2279
100c7420 2280/* Return 1 if OP is a valid operand for the LA instruction,
4888ec5d 2281 and we prefer to use LA over addition to compute it. */
c7453384 2282
100c7420 2283int
9c808aad 2284preferred_la_operand_p (register rtx op)
100c7420
UW
2285{
2286 struct s390_address addr;
2287 if (!s390_decompose_address (op, &addr))
2288 return FALSE;
2289
2290 if (!TARGET_64BIT && !addr.pointer)
2291 return FALSE;
2292
2293 if (addr.pointer)
2294 return TRUE;
2295
4888ec5d
UW
2296 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2297 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2298 return TRUE;
100c7420
UW
2299
2300 return FALSE;
2301}
2302
a41c6c53
UW
2303/* Emit a forced load-address operation to load SRC into DST.
2304 This will use the LOAD ADDRESS instruction even in situations
2305 where legitimate_la_operand_p (SRC) returns false. */
ba956982 2306
a41c6c53 2307void
9c808aad 2308s390_load_address (rtx dst, rtx src)
f3e9edff 2309{
a41c6c53
UW
2310 if (TARGET_64BIT)
2311 emit_move_insn (dst, src);
2312 else
2313 emit_insn (gen_force_la_31 (dst, src));
ba956982
UW
2314}
2315
9db1d521
HP
2316/* Return a legitimate reference for ORIG (an address) using the
2317 register REG. If REG is 0, a new pseudo is generated.
2318
2319 There are two types of references that must be handled:
2320
2321 1. Global data references must load the address from the GOT, via
2322 the PIC reg. An insn is emitted to do this load, and the reg is
2323 returned.
2324
2325 2. Static data references, constant pool addresses, and code labels
2326 compute the address as an offset from the GOT, whose base is in
114278e7 2327 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
9db1d521
HP
2328 differentiate them from global data objects. The returned
2329 address is the PIC reg + an unspec constant.
2330
2331 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2332 reg also appears in the address. */
2333
2334rtx
9c808aad 2335legitimize_pic_address (rtx orig, rtx reg)
9db1d521
HP
2336{
2337 rtx addr = orig;
2338 rtx new = orig;
2339 rtx base;
2340
2341 if (GET_CODE (addr) == LABEL_REF
114278e7 2342 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
9db1d521
HP
2343 {
2344 /* This is a local symbol. */
9e8327e3 2345 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
9db1d521 2346 {
c7453384
EC
2347 /* Access local symbols PC-relative via LARL.
2348 This is the same as in the non-PIC case, so it is
d65f7478 2349 handled automatically ... */
9db1d521
HP
2350 }
2351 else
2352 {
fd7643fb 2353 /* Access local symbols relative to the GOT. */
9db1d521
HP
2354
2355 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2356
fd7643fb
UW
2357 if (reload_in_progress || reload_completed)
2358 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2359
2360 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
e23795ea
UW
2361 addr = gen_rtx_CONST (Pmode, addr);
2362 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2363 emit_move_insn (temp, addr);
2364
fd7643fb 2365 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
2366 if (reg != 0)
2367 {
2368 emit_move_insn (reg, new);
2369 new = reg;
2370 }
2371 }
2372 }
2373 else if (GET_CODE (addr) == SYMBOL_REF)
2374 {
2375 if (reg == 0)
2376 reg = gen_reg_rtx (Pmode);
2377
2378 if (flag_pic == 1)
2379 {
2380 /* Assume GOT offset < 4k. This is handled the same way
fd7643fb 2381 in both 31- and 64-bit code (@GOT). */
9db1d521 2382
c3cc6b78
UW
2383 if (reload_in_progress || reload_completed)
2384 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
9db1d521 2385
fd7643fb 2386 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
9db1d521
HP
2387 new = gen_rtx_CONST (Pmode, new);
2388 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2389 new = gen_rtx_MEM (Pmode, new);
2390 RTX_UNCHANGING_P (new) = 1;
2391 emit_move_insn (reg, new);
2392 new = reg;
2393 }
9e8327e3 2394 else if (TARGET_CPU_ZARCH)
9db1d521
HP
2395 {
2396 /* If the GOT offset might be >= 4k, we determine the position
2397 of the GOT entry via a PC-relative LARL (@GOTENT). */
2398
2399 rtx temp = gen_reg_rtx (Pmode);
2400
fd7643fb 2401 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
9db1d521
HP
2402 new = gen_rtx_CONST (Pmode, new);
2403 emit_move_insn (temp, new);
2404
2405 new = gen_rtx_MEM (Pmode, temp);
2406 RTX_UNCHANGING_P (new) = 1;
2407 emit_move_insn (reg, new);
2408 new = reg;
2409 }
2410 else
2411 {
c7453384 2412 /* If the GOT offset might be >= 4k, we have to load it
9db1d521
HP
2413 from the literal pool (@GOT). */
2414
2415 rtx temp = gen_reg_rtx (Pmode);
2416
c3cc6b78
UW
2417 if (reload_in_progress || reload_completed)
2418 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
9db1d521 2419
fd7643fb 2420 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
e23795ea
UW
2421 addr = gen_rtx_CONST (Pmode, addr);
2422 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2423 emit_move_insn (temp, addr);
2424
2425 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2426 new = gen_rtx_MEM (Pmode, new);
2427 RTX_UNCHANGING_P (new) = 1;
2428 emit_move_insn (reg, new);
2429 new = reg;
2430 }
c7453384 2431 }
9db1d521
HP
2432 else
2433 {
2434 if (GET_CODE (addr) == CONST)
2435 {
2436 addr = XEXP (addr, 0);
2437 if (GET_CODE (addr) == UNSPEC)
2438 {
2439 if (XVECLEN (addr, 0) != 1)
994fe660 2440 abort ();
9db1d521
HP
2441 switch (XINT (addr, 1))
2442 {
fd7643fb 2443 /* If someone moved a GOT-relative UNSPEC
9db1d521 2444 out of the literal pool, force them back in. */
fd7643fb
UW
2445 case UNSPEC_GOTOFF:
2446 case UNSPEC_PLTOFF:
e23795ea 2447 new = force_const_mem (Pmode, orig);
9db1d521
HP
2448 break;
2449
fd7643fb
UW
2450 /* @GOT is OK as is if small. */
2451 case UNSPEC_GOT:
2452 if (flag_pic == 2)
2453 new = force_const_mem (Pmode, orig);
2454 break;
2455
9db1d521 2456 /* @GOTENT is OK as is. */
fd7643fb 2457 case UNSPEC_GOTENT:
9db1d521
HP
2458 break;
2459
2460 /* @PLT is OK as is on 64-bit, must be converted to
fd7643fb
UW
2461 GOT-relative @PLTOFF on 31-bit. */
2462 case UNSPEC_PLT:
9e8327e3 2463 if (!TARGET_CPU_ZARCH)
9db1d521
HP
2464 {
2465 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2466
fd7643fb
UW
2467 if (reload_in_progress || reload_completed)
2468 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2469
9db1d521 2470 addr = XVECEXP (addr, 0, 0);
c7453384 2471 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
fd7643fb 2472 UNSPEC_PLTOFF);
e23795ea
UW
2473 addr = gen_rtx_CONST (Pmode, addr);
2474 addr = force_const_mem (Pmode, addr);
9db1d521
HP
2475 emit_move_insn (temp, addr);
2476
fd7643fb 2477 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
2478 if (reg != 0)
2479 {
2480 emit_move_insn (reg, new);
2481 new = reg;
2482 }
2483 }
2484 break;
2485
2486 /* Everything else cannot happen. */
2487 default:
2488 abort ();
2489 }
2490 }
2491 else if (GET_CODE (addr) != PLUS)
994fe660 2492 abort ();
9db1d521
HP
2493 }
2494 if (GET_CODE (addr) == PLUS)
2495 {
2496 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
c7453384 2497 /* Check first to see if this is a constant offset
9db1d521
HP
2498 from a local symbol reference. */
2499 if ((GET_CODE (op0) == LABEL_REF
114278e7 2500 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
9db1d521
HP
2501 && GET_CODE (op1) == CONST_INT)
2502 {
9e8327e3 2503 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
9db1d521
HP
2504 {
2505 if (INTVAL (op1) & 1)
2506 {
c7453384 2507 /* LARL can't handle odd offsets, so emit a
9db1d521
HP
2508 pair of LARL and LA. */
2509 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2510
d3632d41 2511 if (!DISP_IN_RANGE (INTVAL (op1)))
9db1d521
HP
2512 {
2513 int even = INTVAL (op1) - 1;
2514 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
b30d2115 2515 op0 = gen_rtx_CONST (Pmode, op0);
a556fd39 2516 op1 = const1_rtx;
9db1d521
HP
2517 }
2518
2519 emit_move_insn (temp, op0);
2520 new = gen_rtx_PLUS (Pmode, temp, op1);
2521
2522 if (reg != 0)
2523 {
2524 emit_move_insn (reg, new);
2525 new = reg;
2526 }
2527 }
2528 else
2529 {
2530 /* If the offset is even, we can just use LARL.
2531 This will happen automatically. */
2532 }
2533 }
2534 else
2535 {
fd7643fb 2536 /* Access local symbols relative to the GOT. */
9db1d521
HP
2537
2538 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2539
fd7643fb
UW
2540 if (reload_in_progress || reload_completed)
2541 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2542
c7453384 2543 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
fd7643fb 2544 UNSPEC_GOTOFF);
e23795ea
UW
2545 addr = gen_rtx_PLUS (Pmode, addr, op1);
2546 addr = gen_rtx_CONST (Pmode, addr);
2547 addr = force_const_mem (Pmode, addr);
9c808aad 2548 emit_move_insn (temp, addr);
9db1d521 2549
fd7643fb 2550 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
9db1d521
HP
2551 if (reg != 0)
2552 {
2553 emit_move_insn (reg, new);
2554 new = reg;
2555 }
2556 }
2557 }
2558
fd7643fb 2559 /* Now, check whether it is a GOT relative symbol plus offset
9db1d521
HP
2560 that was pulled out of the literal pool. Force it back in. */
2561
2562 else if (GET_CODE (op0) == UNSPEC
2563 && GET_CODE (op1) == CONST_INT)
2564 {
2565 if (XVECLEN (op0, 0) != 1)
994fe660 2566 abort ();
fd7643fb 2567 if (XINT (op0, 1) != UNSPEC_GOTOFF)
994fe660 2568 abort ();
9db1d521 2569
e23795ea 2570 new = force_const_mem (Pmode, orig);
9db1d521
HP
2571 }
2572
2573 /* Otherwise, compute the sum. */
2574 else
2575 {
2576 base = legitimize_pic_address (XEXP (addr, 0), reg);
2577 new = legitimize_pic_address (XEXP (addr, 1),
2578 base == reg ? NULL_RTX : reg);
2579 if (GET_CODE (new) == CONST_INT)
2580 new = plus_constant (base, INTVAL (new));
2581 else
2582 {
2583 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2584 {
2585 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2586 new = XEXP (new, 1);
2587 }
2588 new = gen_rtx_PLUS (Pmode, base, new);
2589 }
2590
2591 if (GET_CODE (new) == CONST)
2592 new = XEXP (new, 0);
2593 new = force_operand (new, 0);
2594 }
2595 }
2596 }
2597 return new;
2598}
2599
fd3cd001
UW
2600/* Load the thread pointer into a register. */
2601
2602static rtx
9c808aad 2603get_thread_pointer (void)
fd3cd001
UW
2604{
2605 rtx tp;
2606
2607 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2608 tp = force_reg (Pmode, tp);
2609 mark_reg_pointer (tp, BITS_PER_WORD);
2610
2611 return tp;
2612}
2613
2614/* Construct the SYMBOL_REF for the tls_get_offset function. */
2615
2616static GTY(()) rtx s390_tls_symbol;
2617rtx
9c808aad 2618s390_tls_get_offset (void)
fd3cd001
UW
2619{
2620 if (!s390_tls_symbol)
2621 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2622
2623 return s390_tls_symbol;
2624}
2625
2626/* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2627 this (thread-local) address. REG may be used as temporary. */
2628
2629static rtx
9c808aad 2630legitimize_tls_address (rtx addr, rtx reg)
fd3cd001
UW
2631{
2632 rtx new, tls_call, temp, base, r2, insn;
2633
2634 if (GET_CODE (addr) == SYMBOL_REF)
2635 switch (tls_symbolic_operand (addr))
2636 {
2637 case TLS_MODEL_GLOBAL_DYNAMIC:
2638 start_sequence ();
2639 r2 = gen_rtx_REG (Pmode, 2);
2640 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2641 new = gen_rtx_CONST (Pmode, tls_call);
2642 new = force_const_mem (Pmode, new);
2643 emit_move_insn (r2, new);
2644 emit_call_insn (gen_call_value_tls (r2, tls_call));
2645 insn = get_insns ();
2646 end_sequence ();
2647
2648 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2649 temp = gen_reg_rtx (Pmode);
2650 emit_libcall_block (insn, temp, r2, new);
2651
2652 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2653 if (reg != 0)
2654 {
2655 s390_load_address (reg, new);
2656 new = reg;
2657 }
2658 break;
2659
2660 case TLS_MODEL_LOCAL_DYNAMIC:
2661 start_sequence ();
2662 r2 = gen_rtx_REG (Pmode, 2);
2663 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2664 new = gen_rtx_CONST (Pmode, tls_call);
2665 new = force_const_mem (Pmode, new);
2666 emit_move_insn (r2, new);
2667 emit_call_insn (gen_call_value_tls (r2, tls_call));
2668 insn = get_insns ();
2669 end_sequence ();
2670
2671 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2672 temp = gen_reg_rtx (Pmode);
2673 emit_libcall_block (insn, temp, r2, new);
2674
2675 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2676 base = gen_reg_rtx (Pmode);
2677 s390_load_address (base, new);
2678
2679 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2680 new = gen_rtx_CONST (Pmode, new);
2681 new = force_const_mem (Pmode, new);
2682 temp = gen_reg_rtx (Pmode);
2683 emit_move_insn (temp, new);
2684
2685 new = gen_rtx_PLUS (Pmode, base, temp);
2686 if (reg != 0)
2687 {
2688 s390_load_address (reg, new);
2689 new = reg;
2690 }
2691 break;
2692
2693 case TLS_MODEL_INITIAL_EXEC:
2694 if (flag_pic == 1)
2695 {
2696 /* Assume GOT offset < 4k. This is handled the same way
2697 in both 31- and 64-bit code. */
2698
2699 if (reload_in_progress || reload_completed)
2700 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2701
2702 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2703 new = gen_rtx_CONST (Pmode, new);
2704 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2705 new = gen_rtx_MEM (Pmode, new);
2706 RTX_UNCHANGING_P (new) = 1;
2707 temp = gen_reg_rtx (Pmode);
2708 emit_move_insn (temp, new);
2709 }
9e8327e3 2710 else if (TARGET_CPU_ZARCH)
fd3cd001
UW
2711 {
2712 /* If the GOT offset might be >= 4k, we determine the position
2713 of the GOT entry via a PC-relative LARL. */
2714
2715 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2716 new = gen_rtx_CONST (Pmode, new);
2717 temp = gen_reg_rtx (Pmode);
2718 emit_move_insn (temp, new);
2719
2720 new = gen_rtx_MEM (Pmode, temp);
2721 RTX_UNCHANGING_P (new) = 1;
2722 temp = gen_reg_rtx (Pmode);
2723 emit_move_insn (temp, new);
2724 }
2725 else if (flag_pic)
2726 {
c7453384 2727 /* If the GOT offset might be >= 4k, we have to load it
fd3cd001
UW
2728 from the literal pool. */
2729
2730 if (reload_in_progress || reload_completed)
2731 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2732
2733 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2734 new = gen_rtx_CONST (Pmode, new);
2735 new = force_const_mem (Pmode, new);
2736 temp = gen_reg_rtx (Pmode);
2737 emit_move_insn (temp, new);
2738
2739 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2740 new = gen_rtx_MEM (Pmode, new);
2741 RTX_UNCHANGING_P (new) = 1;
2742
2743 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2744 temp = gen_reg_rtx (Pmode);
2745 emit_insn (gen_rtx_SET (Pmode, temp, new));
2746 }
2747 else
2748 {
2749 /* In position-dependent code, load the absolute address of
2750 the GOT entry from the literal pool. */
2751
2752 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2753 new = gen_rtx_CONST (Pmode, new);
2754 new = force_const_mem (Pmode, new);
2755 temp = gen_reg_rtx (Pmode);
2756 emit_move_insn (temp, new);
2757
2758 new = temp;
2759 new = gen_rtx_MEM (Pmode, new);
2760 RTX_UNCHANGING_P (new) = 1;
2761
2762 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2763 temp = gen_reg_rtx (Pmode);
2764 emit_insn (gen_rtx_SET (Pmode, temp, new));
2765 }
2766
2767 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2768 if (reg != 0)
2769 {
2770 s390_load_address (reg, new);
2771 new = reg;
2772 }
2773 break;
2774
2775 case TLS_MODEL_LOCAL_EXEC:
2776 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2777 new = gen_rtx_CONST (Pmode, new);
2778 new = force_const_mem (Pmode, new);
2779 temp = gen_reg_rtx (Pmode);
2780 emit_move_insn (temp, new);
2781
2782 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2783 if (reg != 0)
2784 {
2785 s390_load_address (reg, new);
2786 new = reg;
2787 }
2788 break;
2789
2790 default:
2791 abort ();
2792 }
2793
2794 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2795 {
2796 switch (XINT (XEXP (addr, 0), 1))
2797 {
2798 case UNSPEC_INDNTPOFF:
9e8327e3 2799 if (TARGET_CPU_ZARCH)
fd3cd001
UW
2800 new = addr;
2801 else
2802 abort ();
2803 break;
2804
2805 default:
2806 abort ();
2807 }
2808 }
2809
2810 else
2811 abort (); /* for now ... */
2812
2813 return new;
2814}
2815
9db1d521
HP
2816/* Emit insns to move operands[1] into operands[0]. */
2817
2818void
9c808aad 2819emit_symbolic_move (rtx *operands)
9db1d521 2820{
4023fb28 2821 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
9db1d521 2822
fd3cd001 2823 if (GET_CODE (operands[0]) == MEM)
9db1d521 2824 operands[1] = force_reg (Pmode, operands[1]);
fd3cd001
UW
2825 else if (TLS_SYMBOLIC_CONST (operands[1]))
2826 operands[1] = legitimize_tls_address (operands[1], temp);
2827 else if (flag_pic)
9db1d521
HP
2828 operands[1] = legitimize_pic_address (operands[1], temp);
2829}
2830
994fe660 2831/* Try machine-dependent ways of modifying an illegitimate address X
9db1d521 2832 to be legitimate. If we find one, return the new, valid address.
9db1d521
HP
2833
2834 OLDX is the address as it was before break_out_memory_refs was called.
2835 In some cases it is useful to look at this to decide what needs to be done.
2836
994fe660 2837 MODE is the mode of the operand pointed to by X.
9db1d521
HP
2838
2839 When -fpic is used, special handling is needed for symbolic references.
2840 See comments by legitimize_pic_address for details. */
2841
2842rtx
9c808aad
AJ
2843legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
2844 enum machine_mode mode ATTRIBUTE_UNUSED)
9db1d521 2845{
ba956982 2846 rtx constant_term = const0_rtx;
9db1d521 2847
fd3cd001
UW
2848 if (TLS_SYMBOLIC_CONST (x))
2849 {
2850 x = legitimize_tls_address (x, 0);
2851
2852 if (legitimate_address_p (mode, x, FALSE))
2853 return x;
2854 }
2855 else if (flag_pic)
9db1d521 2856 {
ba956982 2857 if (SYMBOLIC_CONST (x)
c7453384
EC
2858 || (GET_CODE (x) == PLUS
2859 && (SYMBOLIC_CONST (XEXP (x, 0))
ba956982
UW
2860 || SYMBOLIC_CONST (XEXP (x, 1)))))
2861 x = legitimize_pic_address (x, 0);
2862
2863 if (legitimate_address_p (mode, x, FALSE))
2864 return x;
9db1d521 2865 }
9db1d521 2866
ba956982 2867 x = eliminate_constant_term (x, &constant_term);
994fe660 2868
61f02ff5
UW
2869 /* Optimize loading of large displacements by splitting them
2870 into the multiple of 4K and the rest; this allows the
c7453384 2871 former to be CSE'd if possible.
61f02ff5
UW
2872
2873 Don't do this if the displacement is added to a register
2874 pointing into the stack frame, as the offsets will
2875 change later anyway. */
2876
2877 if (GET_CODE (constant_term) == CONST_INT
d3632d41
UW
2878 && !TARGET_LONG_DISPLACEMENT
2879 && !DISP_IN_RANGE (INTVAL (constant_term))
61f02ff5
UW
2880 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2881 {
2882 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2883 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2884
2885 rtx temp = gen_reg_rtx (Pmode);
2886 rtx val = force_operand (GEN_INT (upper), temp);
2887 if (val != temp)
2888 emit_move_insn (temp, val);
2889
2890 x = gen_rtx_PLUS (Pmode, x, temp);
2891 constant_term = GEN_INT (lower);
2892 }
2893
ba956982 2894 if (GET_CODE (x) == PLUS)
9db1d521 2895 {
ba956982
UW
2896 if (GET_CODE (XEXP (x, 0)) == REG)
2897 {
2898 register rtx temp = gen_reg_rtx (Pmode);
2899 register rtx val = force_operand (XEXP (x, 1), temp);
2900 if (val != temp)
2901 emit_move_insn (temp, val);
2902
2903 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2904 }
2905
2906 else if (GET_CODE (XEXP (x, 1)) == REG)
2907 {
2908 register rtx temp = gen_reg_rtx (Pmode);
2909 register rtx val = force_operand (XEXP (x, 0), temp);
2910 if (val != temp)
2911 emit_move_insn (temp, val);
2912
2913 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2914 }
9db1d521 2915 }
ba956982
UW
2916
2917 if (constant_term != const0_rtx)
2918 x = gen_rtx_PLUS (Pmode, x, constant_term);
2919
2920 return x;
9db1d521
HP
2921}
2922
a41c6c53
UW
2923/* Emit code to move LEN bytes from DST to SRC. */
2924
2925void
9c808aad 2926s390_expand_movstr (rtx dst, rtx src, rtx len)
a41c6c53 2927{
a41c6c53
UW
2928 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2929 {
2930 if (INTVAL (len) > 0)
b9404c99 2931 emit_insn (gen_movstr_short (dst, src, GEN_INT (INTVAL (len) - 1)));
a41c6c53
UW
2932 }
2933
2934 else if (TARGET_MVCLE)
2935 {
b9404c99 2936 emit_insn (gen_movstr_long (dst, src, convert_to_mode (Pmode, len, 1)));
a41c6c53
UW
2937 }
2938
2939 else
2940 {
2941 rtx dst_addr, src_addr, count, blocks, temp;
2942 rtx end_label = gen_label_rtx ();
2943 enum machine_mode mode;
2944 tree type;
2945
2946 mode = GET_MODE (len);
2947 if (mode == VOIDmode)
b9404c99 2948 mode = Pmode;
a41c6c53 2949
47798692 2950 type = lang_hooks.types.type_for_mode (mode, 1);
a41c6c53
UW
2951 if (!type)
2952 abort ();
2953
2954 dst_addr = gen_reg_rtx (Pmode);
2955 src_addr = gen_reg_rtx (Pmode);
2956 count = gen_reg_rtx (mode);
2957 blocks = gen_reg_rtx (mode);
2958
2959 convert_move (count, len, 1);
c7453384 2960 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
2961 EQ, NULL_RTX, mode, 1, end_label);
2962
2963 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2964 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2965 dst = change_address (dst, VOIDmode, dst_addr);
2966 src = change_address (src, VOIDmode, src_addr);
c7453384 2967
a41c6c53
UW
2968 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2969 if (temp != count)
2970 emit_move_insn (count, temp);
2971
2972 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2973 if (temp != blocks)
2974 emit_move_insn (blocks, temp);
2975
2976 expand_start_loop (1);
2977 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2978 make_tree (type, blocks),
2979 make_tree (type, const0_rtx)));
2980
b9404c99 2981 emit_insn (gen_movstr_short (dst, src, GEN_INT (255)));
c7453384 2982 s390_load_address (dst_addr,
a41c6c53 2983 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 2984 s390_load_address (src_addr,
a41c6c53 2985 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
c7453384 2986
a41c6c53
UW
2987 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2988 if (temp != blocks)
2989 emit_move_insn (blocks, temp);
2990
2991 expand_end_loop ();
2992
b9404c99
UW
2993 emit_insn (gen_movstr_short (dst, src,
2994 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
2995 emit_label (end_label);
2996 }
2997}
2998
2999/* Emit code to clear LEN bytes at DST. */
3000
3001void
9c808aad 3002s390_expand_clrstr (rtx dst, rtx len)
a41c6c53 3003{
a41c6c53
UW
3004 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3005 {
3006 if (INTVAL (len) > 0)
b9404c99 3007 emit_insn (gen_clrstr_short (dst, GEN_INT (INTVAL (len) - 1)));
a41c6c53
UW
3008 }
3009
3010 else if (TARGET_MVCLE)
3011 {
b9404c99 3012 emit_insn (gen_clrstr_long (dst, convert_to_mode (Pmode, len, 1)));
a41c6c53
UW
3013 }
3014
3015 else
3016 {
3017 rtx dst_addr, src_addr, count, blocks, temp;
3018 rtx end_label = gen_label_rtx ();
3019 enum machine_mode mode;
3020 tree type;
3021
3022 mode = GET_MODE (len);
3023 if (mode == VOIDmode)
b9404c99 3024 mode = Pmode;
a41c6c53 3025
47798692 3026 type = lang_hooks.types.type_for_mode (mode, 1);
a41c6c53
UW
3027 if (!type)
3028 abort ();
3029
3030 dst_addr = gen_reg_rtx (Pmode);
3031 src_addr = gen_reg_rtx (Pmode);
3032 count = gen_reg_rtx (mode);
3033 blocks = gen_reg_rtx (mode);
3034
3035 convert_move (count, len, 1);
c7453384 3036 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3037 EQ, NULL_RTX, mode, 1, end_label);
3038
3039 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3040 dst = change_address (dst, VOIDmode, dst_addr);
c7453384 3041
a41c6c53
UW
3042 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3043 if (temp != count)
3044 emit_move_insn (count, temp);
3045
3046 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3047 if (temp != blocks)
3048 emit_move_insn (blocks, temp);
3049
3050 expand_start_loop (1);
3051 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3052 make_tree (type, blocks),
3053 make_tree (type, const0_rtx)));
3054
b9404c99 3055 emit_insn (gen_clrstr_short (dst, GEN_INT (255)));
c7453384 3056 s390_load_address (dst_addr,
a41c6c53 3057 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
c7453384 3058
a41c6c53
UW
3059 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3060 if (temp != blocks)
3061 emit_move_insn (blocks, temp);
3062
3063 expand_end_loop ();
3064
b9404c99 3065 emit_insn (gen_clrstr_short (dst, convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3066 emit_label (end_label);
3067 }
3068}
3069
3070/* Emit code to compare LEN bytes at OP0 with those at OP1,
3071 and return the result in TARGET. */
3072
3073void
9c808aad 3074s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
a41c6c53 3075{
9c808aad 3076 rtx (*gen_result) (rtx) =
a41c6c53
UW
3077 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
3078
3079 op0 = protect_from_queue (op0, 0);
3080 op1 = protect_from_queue (op1, 0);
3081 len = protect_from_queue (len, 0);
3082
3083 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3084 {
3085 if (INTVAL (len) > 0)
3086 {
b9404c99 3087 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
47798692 3088 emit_insn (gen_result (target));
a41c6c53
UW
3089 }
3090 else
3091 emit_move_insn (target, const0_rtx);
3092 }
3093
011fd56b 3094 else /* if (TARGET_MVCLE) */
a41c6c53 3095 {
b9404c99 3096 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
47798692 3097 emit_insn (gen_result (target));
a41c6c53
UW
3098 }
3099
011fd56b
UW
3100#if 0
3101 /* Deactivate for now as profile code cannot cope with
3102 CC being live across basic block boundaries. */
a41c6c53
UW
3103 else
3104 {
3105 rtx addr0, addr1, count, blocks, temp;
3106 rtx end_label = gen_label_rtx ();
3107 enum machine_mode mode;
3108 tree type;
3109
3110 mode = GET_MODE (len);
3111 if (mode == VOIDmode)
b9404c99 3112 mode = Pmode;
a41c6c53 3113
47798692 3114 type = lang_hooks.types.type_for_mode (mode, 1);
a41c6c53
UW
3115 if (!type)
3116 abort ();
3117
3118 addr0 = gen_reg_rtx (Pmode);
3119 addr1 = gen_reg_rtx (Pmode);
3120 count = gen_reg_rtx (mode);
3121 blocks = gen_reg_rtx (mode);
3122
3123 convert_move (count, len, 1);
c7453384 3124 emit_cmp_and_jump_insns (count, const0_rtx,
a41c6c53
UW
3125 EQ, NULL_RTX, mode, 1, end_label);
3126
3127 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3128 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3129 op0 = change_address (op0, VOIDmode, addr0);
3130 op1 = change_address (op1, VOIDmode, addr1);
c7453384 3131
a41c6c53
UW
3132 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3133 if (temp != count)
3134 emit_move_insn (count, temp);
3135
3136 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3137 if (temp != blocks)
3138 emit_move_insn (blocks, temp);
3139
3140 expand_start_loop (1);
3141 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3142 make_tree (type, blocks),
3143 make_tree (type, const0_rtx)));
3144
b9404c99 3145 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
a41c6c53 3146 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
c7453384 3147 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
a41c6c53
UW
3148 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3149 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3150 emit_jump_insn (temp);
3151
c7453384 3152 s390_load_address (addr0,
a41c6c53 3153 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
c7453384 3154 s390_load_address (addr1,
a41c6c53 3155 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
c7453384 3156
a41c6c53
UW
3157 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3158 if (temp != blocks)
3159 emit_move_insn (blocks, temp);
3160
3161 expand_end_loop ();
3162
b9404c99
UW
3163 emit_insn (gen_cmpmem_short (op0, op1,
3164 convert_to_mode (Pmode, count, 1)));
a41c6c53
UW
3165 emit_label (end_label);
3166
47798692 3167 emit_insn (gen_result (target));
a41c6c53 3168 }
011fd56b 3169#endif
a41c6c53
UW
3170}
3171
6b2300b3
JJ
3172/* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3173 We need to emit DTP-relative relocations. */
3174
3175void
9c808aad 3176s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
6b2300b3
JJ
3177{
3178 switch (size)
3179 {
3180 case 4:
3181 fputs ("\t.long\t", file);
3182 break;
3183 case 8:
3184 fputs ("\t.quad\t", file);
3185 break;
3186 default:
3187 abort ();
3188 }
3189 output_addr_const (file, x);
3190 fputs ("@DTPOFF", file);
3191}
3192
4c8c0dec
JJ
3193/* In the name of slightly smaller debug output, and to cater to
3194 general assembler losage, recognize various UNSPEC sequences
3195 and turn them back into a direct symbol reference. */
3196
69bd9368 3197static rtx
9c808aad 3198s390_delegitimize_address (rtx orig_x)
4c8c0dec
JJ
3199{
3200 rtx x = orig_x, y;
3201
3202 if (GET_CODE (x) != MEM)
3203 return orig_x;
3204
3205 x = XEXP (x, 0);
3206 if (GET_CODE (x) == PLUS
3207 && GET_CODE (XEXP (x, 1)) == CONST
3208 && GET_CODE (XEXP (x, 0)) == REG
3209 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3210 {
3211 y = XEXP (XEXP (x, 1), 0);
3212 if (GET_CODE (y) == UNSPEC
fd7643fb 3213 && XINT (y, 1) == UNSPEC_GOT)
4c8c0dec
JJ
3214 return XVECEXP (y, 0, 0);
3215 return orig_x;
3216 }
3217
3218 if (GET_CODE (x) == CONST)
3219 {
3220 y = XEXP (x, 0);
3221 if (GET_CODE (y) == UNSPEC
fd7643fb 3222 && XINT (y, 1) == UNSPEC_GOTENT)
4c8c0dec
JJ
3223 return XVECEXP (y, 0, 0);
3224 return orig_x;
3225 }
3226
c7453384 3227 return orig_x;
4c8c0dec 3228}
ba956982 3229
ac32b25e
UW
3230/* Output shift count operand OP to stdio stream FILE. */
3231
3232static void
3233print_shift_count_operand (FILE *file, rtx op)
3234{
3235 HOST_WIDE_INT offset = 0;
3236
3237 /* We can have an integer constant, an address register,
3238 or a sum of the two. */
3239 if (GET_CODE (op) == CONST_INT)
3240 {
3241 offset = INTVAL (op);
3242 op = NULL_RTX;
3243 }
3244 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
3245 {
3246 offset = INTVAL (XEXP (op, 1));
3247 op = XEXP (op, 0);
3248 }
3249 while (op && GET_CODE (op) == SUBREG)
3250 op = SUBREG_REG (op);
3251
3252 /* Sanity check. */
3253 if (op && (GET_CODE (op) != REG
3254 || REGNO (op) >= FIRST_PSEUDO_REGISTER
3255 || REGNO_REG_CLASS (REGNO (op)) != ADDR_REGS))
3256 abort ();
3257
3258 /* Shift counts are truncated to the low six bits anyway. */
3259 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & 63);
3260 if (op)
3261 fprintf (file, "(%s)", reg_names[REGNO (op)]);
3262}
3263
fd3cd001
UW
3264/* Locate some local-dynamic symbol still in use by this function
3265 so that we can print its name in local-dynamic base patterns. */
3266
3267static const char *
9c808aad 3268get_some_local_dynamic_name (void)
fd3cd001
UW
3269{
3270 rtx insn;
3271
3272 if (cfun->machine->some_ld_name)
3273 return cfun->machine->some_ld_name;
3274
3275 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3276 if (INSN_P (insn)
3277 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3278 return cfun->machine->some_ld_name;
3279
3280 abort ();
3281}
3282
3283static int
9c808aad 3284get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
fd3cd001
UW
3285{
3286 rtx x = *px;
3287
3288 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3289 {
3290 x = get_pool_constant (x);
3291 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3292 }
3293
3294 if (GET_CODE (x) == SYMBOL_REF
3295 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3296 {
3297 cfun->machine->some_ld_name = XSTR (x, 0);
3298 return 1;
3299 }
3300
3301 return 0;
3302}
3303
faeb9bb6
UW
3304/* Output machine-dependent UNSPECs occurring in address constant X
3305 in assembler syntax to stdio stream FILE. Returns true if the
3306 constant X could be recognized, false otherwise. */
9db1d521 3307
faeb9bb6
UW
3308bool
3309s390_output_addr_const_extra (FILE *file, rtx x)
9db1d521 3310{
faeb9bb6
UW
3311 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
3312 switch (XINT (x, 1))
3313 {
3314 case UNSPEC_GOTENT:
3315 output_addr_const (file, XVECEXP (x, 0, 0));
3316 fprintf (file, "@GOTENT");
3317 return true;
3318 case UNSPEC_GOT:
3319 output_addr_const (file, XVECEXP (x, 0, 0));
3320 fprintf (file, "@GOT");
3321 return true;
3322 case UNSPEC_GOTOFF:
3323 output_addr_const (file, XVECEXP (x, 0, 0));
3324 fprintf (file, "@GOTOFF");
3325 return true;
3326 case UNSPEC_PLT:
3327 output_addr_const (file, XVECEXP (x, 0, 0));
3328 fprintf (file, "@PLT");
3329 return true;
3330 case UNSPEC_PLTOFF:
3331 output_addr_const (file, XVECEXP (x, 0, 0));
3332 fprintf (file, "@PLTOFF");
3333 return true;
3334 case UNSPEC_TLSGD:
3335 output_addr_const (file, XVECEXP (x, 0, 0));
3336 fprintf (file, "@TLSGD");
3337 return true;
3338 case UNSPEC_TLSLDM:
3339 assemble_name (file, get_some_local_dynamic_name ());
3340 fprintf (file, "@TLSLDM");
3341 return true;
3342 case UNSPEC_DTPOFF:
3343 output_addr_const (file, XVECEXP (x, 0, 0));
3344 fprintf (file, "@DTPOFF");
3345 return true;
3346 case UNSPEC_NTPOFF:
3347 output_addr_const (file, XVECEXP (x, 0, 0));
3348 fprintf (file, "@NTPOFF");
3349 return true;
3350 case UNSPEC_GOTNTPOFF:
3351 output_addr_const (file, XVECEXP (x, 0, 0));
3352 fprintf (file, "@GOTNTPOFF");
3353 return true;
3354 case UNSPEC_INDNTPOFF:
3355 output_addr_const (file, XVECEXP (x, 0, 0));
3356 fprintf (file, "@INDNTPOFF");
3357 return true;
3358 }
9db1d521 3359
faeb9bb6 3360 return false;
9db1d521
HP
3361}
3362
c7453384 3363/* Output address operand ADDR in assembler syntax to
994fe660 3364 stdio stream FILE. */
9db1d521
HP
3365
3366void
9c808aad 3367print_operand_address (FILE *file, rtx addr)
9db1d521
HP
3368{
3369 struct s390_address ad;
3370
b808c04c
UW
3371 if (!s390_decompose_address (addr, &ad)
3372 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3373 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
4023fb28 3374 output_operand_lossage ("Cannot decompose address.");
c7453384 3375
9db1d521 3376 if (ad.disp)
faeb9bb6 3377 output_addr_const (file, ad.disp);
9db1d521
HP
3378 else
3379 fprintf (file, "0");
3380
3381 if (ad.base && ad.indx)
3382 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3383 reg_names[REGNO (ad.base)]);
3384 else if (ad.base)
3385 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3386}
3387
c7453384
EC
3388/* Output operand X in assembler syntax to stdio stream FILE.
3389 CODE specified the format flag. The following format flags
994fe660
UW
3390 are recognized:
3391
3392 'C': print opcode suffix for branch condition.
3393 'D': print opcode suffix for inverse branch condition.
fd3cd001 3394 'J': print tls_load/tls_gdcall/tls_ldcall suffix
994fe660
UW
3395 'O': print only the displacement of a memory reference.
3396 'R': print only the base register of a memory reference.
3397 'N': print the second word of a DImode operand.
3398 'M': print the second word of a TImode operand.
ac32b25e 3399 'Y': print shift count operand.
994fe660 3400
5519a4f9
KH
3401 'b': print integer X as if it's an unsigned byte.
3402 'x': print integer X as if it's an unsigned word.
f19a9af7
AK
3403 'h': print integer X as if it's a signed word.
3404 'i': print the first nonzero HImode part of X.
3405 'j': print the first HImode part unequal to 0xffff of X. */
9db1d521
HP
3406
3407void
9c808aad 3408print_operand (FILE *file, rtx x, int code)
9db1d521
HP
3409{
3410 switch (code)
3411 {
3412 case 'C':
ba956982 3413 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
9db1d521
HP
3414 return;
3415
3416 case 'D':
ba956982 3417 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
9db1d521
HP
3418 return;
3419
fd3cd001
UW
3420 case 'J':
3421 if (GET_CODE (x) == SYMBOL_REF)
3422 {
3423 fprintf (file, "%s", ":tls_load:");
3424 output_addr_const (file, x);
3425 }
3426 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3427 {
3428 fprintf (file, "%s", ":tls_gdcall:");
3429 output_addr_const (file, XVECEXP (x, 0, 0));
3430 }
3431 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3432 {
3433 fprintf (file, "%s", ":tls_ldcall:");
3434 assemble_name (file, get_some_local_dynamic_name ());
3435 }
3436 else
3437 abort ();
3438 return;
3439
9db1d521
HP
3440 case 'O':
3441 {
3442 struct s390_address ad;
3443
3444 if (GET_CODE (x) != MEM
b808c04c
UW
3445 || !s390_decompose_address (XEXP (x, 0), &ad)
3446 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
9db1d521 3447 || ad.indx)
994fe660 3448 abort ();
9db1d521
HP
3449
3450 if (ad.disp)
faeb9bb6 3451 output_addr_const (file, ad.disp);
9db1d521
HP
3452 else
3453 fprintf (file, "0");
3454 }
3455 return;
3456
3457 case 'R':
3458 {
3459 struct s390_address ad;
3460
3461 if (GET_CODE (x) != MEM
b808c04c
UW
3462 || !s390_decompose_address (XEXP (x, 0), &ad)
3463 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
9db1d521 3464 || ad.indx)
994fe660 3465 abort ();
9db1d521
HP
3466
3467 if (ad.base)
3468 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3469 else
3470 fprintf (file, "0");
3471 }
3472 return;
3473
3474 case 'N':
3475 if (GET_CODE (x) == REG)
3476 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3477 else if (GET_CODE (x) == MEM)
3478 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3479 else
994fe660 3480 abort ();
9db1d521
HP
3481 break;
3482
3483 case 'M':
3484 if (GET_CODE (x) == REG)
3485 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3486 else if (GET_CODE (x) == MEM)
3487 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3488 else
994fe660 3489 abort ();
9db1d521 3490 break;
ac32b25e
UW
3491
3492 case 'Y':
3493 print_shift_count_operand (file, x);
3494 return;
9db1d521
HP
3495 }
3496
3497 switch (GET_CODE (x))
3498 {
3499 case REG:
3500 fprintf (file, "%s", reg_names[REGNO (x)]);
3501 break;
3502
3503 case MEM:
3504 output_address (XEXP (x, 0));
3505 break;
3506
3507 case CONST:
3508 case CODE_LABEL:
3509 case LABEL_REF:
3510 case SYMBOL_REF:
faeb9bb6 3511 output_addr_const (file, x);
9db1d521
HP
3512 break;
3513
3514 case CONST_INT:
3515 if (code == 'b')
4023fb28
UW
3516 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3517 else if (code == 'x')
3518 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3519 else if (code == 'h')
3520 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
f19a9af7
AK
3521 else if (code == 'i')
3522 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
3523 s390_extract_part (x, HImode, 0));
3524 else if (code == 'j')
3525 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
3526 s390_extract_part (x, HImode, -1));
4023fb28
UW
3527 else
3528 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3529 break;
3530
3531 case CONST_DOUBLE:
3532 if (GET_MODE (x) != VOIDmode)
3533 abort ();
3534 if (code == 'b')
3535 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
9db1d521 3536 else if (code == 'x')
4023fb28 3537 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
9db1d521 3538 else if (code == 'h')
4023fb28 3539 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
9db1d521 3540 else
4023fb28 3541 abort ();
9db1d521
HP
3542 break;
3543
3544 default:
3545 fatal_insn ("UNKNOWN in print_operand !?", x);
3546 break;
3547 }
3548}
3549
301d03af
RS
3550/* Target hook for assembling integer objects. We need to define it
3551 here to work a round a bug in some versions of GAS, which couldn't
3552 handle values smaller than INT_MIN when printed in decimal. */
3553
3554static bool
9c808aad 3555s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af
RS
3556{
3557 if (size == 8 && aligned_p
3558 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3559 {
4a0a75dd
KG
3560 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
3561 INTVAL (x));
301d03af
RS
3562 return true;
3563 }
3564 return default_assemble_integer (x, size, aligned_p);
3565}
3566
c7453384 3567/* Returns true if register REGNO is used for forming
994fe660 3568 a memory address in expression X. */
9db1d521
HP
3569
3570static int
9c808aad 3571reg_used_in_mem_p (int regno, rtx x)
9db1d521
HP
3572{
3573 enum rtx_code code = GET_CODE (x);
3574 int i, j;
3575 const char *fmt;
c7453384 3576
9db1d521
HP
3577 if (code == MEM)
3578 {
3579 if (refers_to_regno_p (regno, regno+1,
3580 XEXP (x, 0), 0))
3581 return 1;
3582 }
c7453384 3583 else if (code == SET
4023fb28
UW
3584 && GET_CODE (SET_DEST (x)) == PC)
3585 {
3586 if (refers_to_regno_p (regno, regno+1,
3587 SET_SRC (x), 0))
3588 return 1;
3589 }
9db1d521
HP
3590
3591 fmt = GET_RTX_FORMAT (code);
3592 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3593 {
3594 if (fmt[i] == 'e'
3595 && reg_used_in_mem_p (regno, XEXP (x, i)))
3596 return 1;
c7453384 3597
9db1d521
HP
3598 else if (fmt[i] == 'E')
3599 for (j = 0; j < XVECLEN (x, i); j++)
3600 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3601 return 1;
3602 }
3603 return 0;
3604}
3605
d65f7478 3606/* Returns true if expression DEP_RTX sets an address register
994fe660 3607 used by instruction INSN to address memory. */
9db1d521 3608
c7453384 3609static int
9c808aad 3610addr_generation_dependency_p (rtx dep_rtx, rtx insn)
9db1d521 3611{
4023fb28 3612 rtx target, pat;
9db1d521 3613
077dab3b
HP
3614 if (GET_CODE (dep_rtx) == INSN)
3615 dep_rtx = PATTERN (dep_rtx);
3616
9db1d521
HP
3617 if (GET_CODE (dep_rtx) == SET)
3618 {
3619 target = SET_DEST (dep_rtx);
cc7ab9b7
UW
3620 if (GET_CODE (target) == STRICT_LOW_PART)
3621 target = XEXP (target, 0);
3622 while (GET_CODE (target) == SUBREG)
3623 target = SUBREG_REG (target);
3624
9db1d521
HP
3625 if (GET_CODE (target) == REG)
3626 {
3627 int regno = REGNO (target);
3628
077dab3b 3629 if (s390_safe_attr_type (insn) == TYPE_LA)
4023fb28
UW
3630 {
3631 pat = PATTERN (insn);
3632 if (GET_CODE (pat) == PARALLEL)
3633 {
3634 if (XVECLEN (pat, 0) != 2)
3635 abort();
3636 pat = XVECEXP (pat, 0, 0);
3637 }
3638 if (GET_CODE (pat) == SET)
3639 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3640 else
3641 abort();
3642 }
077dab3b 3643 else if (get_attr_atype (insn) == ATYPE_AGEN)
4023fb28
UW
3644 return reg_used_in_mem_p (regno, PATTERN (insn));
3645 }
9db1d521
HP
3646 }
3647 return 0;
3648}
3649
077dab3b
HP
3650/* Return 1, if dep_insn sets register used in insn in the agen unit. */
3651
c7453384 3652int
9c808aad 3653s390_agen_dep_p (rtx dep_insn, rtx insn)
c7453384 3654{
077dab3b
HP
3655 rtx dep_rtx = PATTERN (dep_insn);
3656 int i;
c7453384
EC
3657
3658 if (GET_CODE (dep_rtx) == SET
077dab3b
HP
3659 && addr_generation_dependency_p (dep_rtx, insn))
3660 return 1;
3661 else if (GET_CODE (dep_rtx) == PARALLEL)
3662 {
3663 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3664 {
3665 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3666 return 1;
3667 }
3668 }
3669 return 0;
3670}
3671
994fe660 3672/* Return the modified cost of the dependency of instruction INSN
c7453384 3673 on instruction DEP_INSN through the link LINK. COST is the
994fe660
UW
3674 default cost of that dependency.
3675
3676 Data dependencies are all handled without delay. However, if a
c7453384 3677 register is modified and subsequently used as base or index
994fe660 3678 register of a memory reference, at least 4 cycles need to pass
c7453384 3679 between setting and using the register to avoid pipeline stalls.
d65f7478 3680 An exception is the LA instruction. An address generated by LA can
f2d3c02a 3681 be used by introducing only a one cycle stall on the pipeline. */
9db1d521 3682
c237e94a 3683static int
9c808aad 3684s390_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
9db1d521 3685{
994fe660 3686 rtx dep_rtx;
9db1d521
HP
3687 int i;
3688
3689 /* If the dependence is an anti-dependence, there is no cost. For an
3690 output dependence, there is sometimes a cost, but it doesn't seem
3691 worth handling those few cases. */
3692
3693 if (REG_NOTE_KIND (link) != 0)
3694 return 0;
3695
3696 /* If we can't recognize the insns, we can't really do anything. */
3697 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3698 return cost;
3699
077dab3b
HP
3700 /* DFA based scheduling checks address dependency in md file. */
3701 if (s390_use_dfa_pipeline_interface ())
52609473 3702 {
c7453384 3703 /* Operand forward in case of lr, load and la. */
52609473
HP
3704 if (s390_tune == PROCESSOR_2084_Z990
3705 && cost == 1
3706 && (s390_safe_attr_type (dep_insn) == TYPE_LA
3707 || s390_safe_attr_type (dep_insn) == TYPE_LR
3708 || s390_safe_attr_type (dep_insn) == TYPE_LOAD))
3709 return 0;
3710 return cost;
3711 }
077dab3b 3712
9db1d521
HP
3713 dep_rtx = PATTERN (dep_insn);
3714
c7453384 3715 if (GET_CODE (dep_rtx) == SET
077dab3b 3716 && addr_generation_dependency_p (dep_rtx, insn))
c7453384 3717 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
9db1d521
HP
3718 else if (GET_CODE (dep_rtx) == PARALLEL)
3719 {
3720 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3721 {
077dab3b 3722 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
c7453384 3723 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
9db1d521
HP
3724 }
3725 }
3726
f2d3c02a
HP
3727 return cost;
3728}
52609473
HP
3729/* A C statement (sans semicolon) to update the integer scheduling priority
3730 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3731 reduce the priority to execute INSN later. Do not define this macro if
c7453384 3732 you do not need to adjust the scheduling priorities of insns.
52609473 3733
c7453384 3734 A STD instruction should be scheduled earlier,
52609473
HP
3735 in order to use the bypass. */
3736
3737static int
9c808aad 3738s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
52609473
HP
3739{
3740 if (! INSN_P (insn))
3741 return priority;
3742
3743 if (s390_tune != PROCESSOR_2084_Z990)
3744 return priority;
3745
3746 switch (s390_safe_attr_type (insn))
3747 {
3748 case TYPE_FSTORED:
3749 case TYPE_FSTORES:
3750 priority = priority << 3;
3751 break;
3752 case TYPE_STORE:
3753 priority = priority << 1;
3754 break;
3755 default:
3756 break;
3757 }
3758 return priority;
3759}
f2d3c02a 3760
077dab3b 3761/* The number of instructions that can be issued per cycle. */
f2d3c02a 3762
077dab3b 3763static int
9c808aad 3764s390_issue_rate (void)
077dab3b 3765{
c7453384 3766 if (s390_tune == PROCESSOR_2084_Z990)
52609473 3767 return 3;
077dab3b
HP
3768 return 1;
3769}
f2d3c02a 3770
077dab3b
HP
3771/* If the following function returns TRUE, we will use the the DFA
3772 insn scheduler. */
f2d3c02a
HP
3773
3774static int
9c808aad 3775s390_use_dfa_pipeline_interface (void)
f2d3c02a 3776{
52609473
HP
3777 if (s390_tune == PROCESSOR_2064_Z900
3778 || s390_tune == PROCESSOR_2084_Z990)
077dab3b 3779 return 1;
52609473 3780
077dab3b 3781 return 0;
52609473
HP
3782}
3783
3784static int
9c808aad 3785s390_first_cycle_multipass_dfa_lookahead (void)
52609473
HP
3786{
3787 return s390_use_dfa_pipeline_interface () ? 4 : 0;
3788}
3789
52609473 3790
c7453384 3791/* Split all branches that exceed the maximum distance.
545d16ff 3792 Returns true if this created a new literal pool entry. */
9db1d521 3793
c7453384 3794static int
545d16ff 3795s390_split_branches (void)
9db1d521 3796{
545d16ff 3797 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
aee4e0db 3798 int new_literal = 0;
0a3bdf9d
UW
3799 rtx insn, pat, tmp, target;
3800 rtx *label;
9db1d521 3801
c3cc6b78
UW
3802 /* We need correct insn addresses. */
3803
3804 shorten_branches (get_insns ());
3805
13e58269 3806 /* Find all branches that exceed 64KB, and split them. */
9db1d521 3807
13e58269
UW
3808 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3809 {
3810 if (GET_CODE (insn) != JUMP_INSN)
3811 continue;
9db1d521 3812
13e58269 3813 pat = PATTERN (insn);
0a3bdf9d
UW
3814 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3815 pat = XVECEXP (pat, 0, 0);
3816 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
13e58269 3817 continue;
9db1d521 3818
c7453384 3819 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
9db1d521 3820 {
0a3bdf9d 3821 label = &SET_SRC (pat);
c7453384
EC
3822 }
3823 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
9db1d521 3824 {
c7453384 3825 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
0a3bdf9d 3826 label = &XEXP (SET_SRC (pat), 1);
c7453384 3827 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
0a3bdf9d 3828 label = &XEXP (SET_SRC (pat), 2);
13e58269
UW
3829 else
3830 continue;
3831 }
3832 else
3833 continue;
3834
545d16ff 3835 if (get_attr_length (insn) <= 4)
13e58269
UW
3836 continue;
3837
545d16ff
UW
3838 /* We are going to use the return register as scratch register,
3839 make sure it will be saved/restored by the prologue/epilogue. */
3840 cfun->machine->save_return_addr_p = 1;
c3cc6b78 3841
545d16ff 3842 if (!flag_pic)
9db1d521 3843 {
aee4e0db 3844 new_literal = 1;
0a3bdf9d
UW
3845 tmp = force_const_mem (Pmode, *label);
3846 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3847 INSN_ADDRESSES_NEW (tmp, -1);
13e58269 3848
0a3bdf9d
UW
3849 target = temp_reg;
3850 }
3851 else
13e58269 3852 {
aee4e0db 3853 new_literal = 1;
c7453384 3854 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
fd7643fb
UW
3855 UNSPEC_LTREL_OFFSET);
3856 target = gen_rtx_CONST (Pmode, target);
3857 target = force_const_mem (Pmode, target);
3858 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
0a3bdf9d
UW
3859 INSN_ADDRESSES_NEW (tmp, -1);
3860
c7453384 3861 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (target, 0)),
fd7643fb
UW
3862 UNSPEC_LTREL_BASE);
3863 target = gen_rtx_PLUS (Pmode, temp_reg, target);
13e58269
UW
3864 }
3865
0a3bdf9d
UW
3866 if (!validate_change (insn, label, target, 0))
3867 abort ();
9db1d521 3868 }
aee4e0db
UW
3869
3870 return new_literal;
9db1d521
HP
3871}
3872
b2ccb744 3873
c7453384
EC
3874/* Find a literal pool symbol referenced in RTX X, and store
3875 it at REF. Will abort if X contains references to more than
b2ccb744 3876 one such pool symbol; multiple references to the same symbol
c7453384 3877 are allowed, however.
b2ccb744 3878
c7453384 3879 The rtx pointed to by REF must be initialized to NULL_RTX
b2ccb744
UW
3880 by the caller before calling this routine. */
3881
3882static void
9c808aad 3883find_constant_pool_ref (rtx x, rtx *ref)
b2ccb744
UW
3884{
3885 int i, j;
3886 const char *fmt;
3887
fd7643fb
UW
3888 /* Ignore LTREL_BASE references. */
3889 if (GET_CODE (x) == UNSPEC
3890 && XINT (x, 1) == UNSPEC_LTREL_BASE)
3891 return;
5af2f3d3
UW
3892 /* Likewise POOL_ENTRY insns. */
3893 if (GET_CODE (x) == UNSPEC_VOLATILE
3894 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
3895 return;
fd7643fb 3896
b2ccb744
UW
3897 if (GET_CODE (x) == SYMBOL_REF
3898 && CONSTANT_POOL_ADDRESS_P (x))
3899 {
3900 if (*ref == NULL_RTX)
3901 *ref = x;
3902 else if (*ref != x)
3903 abort();
3904 }
3905
3906 fmt = GET_RTX_FORMAT (GET_CODE (x));
3907 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3908 {
3909 if (fmt[i] == 'e')
3910 {
3911 find_constant_pool_ref (XEXP (x, i), ref);
3912 }
3913 else if (fmt[i] == 'E')
3914 {
3915 for (j = 0; j < XVECLEN (x, i); j++)
3916 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3917 }
3918 }
3919}
3920
3921/* Replace every reference to the literal pool symbol REF
3922 in X by the address ADDR. Fix up MEMs as required. */
3923
3924static void
9c808aad 3925replace_constant_pool_ref (rtx *x, rtx ref, rtx addr)
b2ccb744
UW
3926{
3927 int i, j;
3928 const char *fmt;
3929
3930 if (*x == ref)
3931 abort ();
3932
3933 /* Literal pool references can only occur inside a MEM ... */
3934 if (GET_CODE (*x) == MEM)
3935 {
3936 rtx memref = XEXP (*x, 0);
3937
3938 if (memref == ref)
3939 {
3940 *x = replace_equiv_address (*x, addr);
3941 return;
3942 }
3943
3944 if (GET_CODE (memref) == CONST
3945 && GET_CODE (XEXP (memref, 0)) == PLUS
3946 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3947 && XEXP (XEXP (memref, 0), 0) == ref)
3948 {
3949 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3950 *x = replace_equiv_address (*x, plus_constant (addr, off));
3951 return;
3952 }
3953 }
3954
3955 /* ... or a load-address type pattern. */
3956 if (GET_CODE (*x) == SET)
3957 {
3958 rtx addrref = SET_SRC (*x);
3959
3960 if (addrref == ref)
3961 {
3962 SET_SRC (*x) = addr;
3963 return;
3964 }
3965
3966 if (GET_CODE (addrref) == CONST
3967 && GET_CODE (XEXP (addrref, 0)) == PLUS
3968 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3969 && XEXP (XEXP (addrref, 0), 0) == ref)
3970 {
3971 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3972 SET_SRC (*x) = plus_constant (addr, off);
3973 return;
3974 }
3975 }
3976
3977 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3978 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3979 {
3980 if (fmt[i] == 'e')
3981 {
3982 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3983 }
3984 else if (fmt[i] == 'E')
3985 {
3986 for (j = 0; j < XVECLEN (*x, i); j++)
3987 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3988 }
3989 }
3990}
3991
c7453384 3992/* Check whether X contains an UNSPEC_LTREL_BASE.
fd7643fb 3993 Return its constant pool symbol if found, NULL_RTX otherwise. */
aee4e0db 3994
fd7643fb 3995static rtx
9c808aad 3996find_ltrel_base (rtx x)
aee4e0db 3997{
aee4e0db
UW
3998 int i, j;
3999 const char *fmt;
4000
fd7643fb
UW
4001 if (GET_CODE (x) == UNSPEC
4002 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4003 return XVECEXP (x, 0, 0);
aee4e0db
UW
4004
4005 fmt = GET_RTX_FORMAT (GET_CODE (x));
4006 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4007 {
4008 if (fmt[i] == 'e')
4009 {
fd7643fb
UW
4010 rtx fnd = find_ltrel_base (XEXP (x, i));
4011 if (fnd)
4012 return fnd;
aee4e0db
UW
4013 }
4014 else if (fmt[i] == 'E')
4015 {
4016 for (j = 0; j < XVECLEN (x, i); j++)
fd7643fb
UW
4017 {
4018 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4019 if (fnd)
4020 return fnd;
4021 }
aee4e0db
UW
4022 }
4023 }
4024
fd7643fb 4025 return NULL_RTX;
aee4e0db
UW
4026}
4027
fd7643fb 4028/* Replace any occurrence of UNSPEC_LTREL_BASE in X with BASE. */
aee4e0db
UW
4029
4030static void
9c808aad 4031replace_ltrel_base (rtx *x, rtx base)
aee4e0db 4032{
fd7643fb 4033 int i, j;
aee4e0db
UW
4034 const char *fmt;
4035
fd7643fb
UW
4036 if (GET_CODE (*x) == UNSPEC
4037 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
aee4e0db 4038 {
fd7643fb
UW
4039 *x = base;
4040 return;
aee4e0db
UW
4041 }
4042
4043 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4044 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4045 {
4046 if (fmt[i] == 'e')
4047 {
fd7643fb 4048 replace_ltrel_base (&XEXP (*x, i), base);
aee4e0db
UW
4049 }
4050 else if (fmt[i] == 'E')
4051 {
4052 for (j = 0; j < XVECLEN (*x, i); j++)
fd7643fb 4053 replace_ltrel_base (&XVECEXP (*x, i, j), base);
aee4e0db
UW
4054 }
4055 }
4056}
4057
4058
fd7643fb 4059/* We keep a list of constants which we have to add to internal
b2ccb744
UW
4060 constant tables in the middle of large functions. */
4061
fd7643fb 4062#define NR_C_MODES 7
c7453384 4063enum machine_mode constant_modes[NR_C_MODES] =
b2ccb744 4064{
fd7643fb 4065 TImode,
b2ccb744
UW
4066 DFmode, DImode,
4067 SFmode, SImode,
4068 HImode,
4069 QImode
4070};
4071
b2ccb744
UW
4072struct constant
4073{
4074 struct constant *next;
4075 rtx value;
4076 rtx label;
4077};
4078
4079struct constant_pool
4080{
4081 struct constant_pool *next;
4082 rtx first_insn;
aee4e0db
UW
4083 rtx pool_insn;
4084 bitmap insns;
b2ccb744
UW
4085
4086 struct constant *constants[NR_C_MODES];
4087 rtx label;
4088 int size;
4089};
4090
5af2f3d3
UW
4091static struct constant_pool * s390_mainpool_start (void);
4092static void s390_mainpool_finish (struct constant_pool *, rtx base_reg);
4093static void s390_mainpool_cancel (struct constant_pool *);
4094
4095static struct constant_pool * s390_chunkify_start (rtx base_reg);
4096static void s390_chunkify_finish (struct constant_pool *, rtx base_reg);
9c808aad 4097static void s390_chunkify_cancel (struct constant_pool *);
aee4e0db 4098
9c808aad
AJ
4099static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4100static void s390_end_pool (struct constant_pool *, rtx);
4101static void s390_add_pool_insn (struct constant_pool *, rtx);
4102static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4103static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4104static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
5af2f3d3
UW
4105static rtx s390_dump_pool (struct constant_pool *, bool);
4106static struct constant_pool *s390_alloc_pool (void);
9c808aad 4107static void s390_free_pool (struct constant_pool *);
b2ccb744
UW
4108
4109/* Create new constant pool covering instructions starting at INSN
4110 and chain it to the end of POOL_LIST. */
4111
4112static struct constant_pool *
9c808aad 4113s390_start_pool (struct constant_pool **pool_list, rtx insn)
b2ccb744
UW
4114{
4115 struct constant_pool *pool, **prev;
b2ccb744 4116
5af2f3d3 4117 pool = s390_alloc_pool ();
b2ccb744 4118 pool->first_insn = insn;
aee4e0db 4119
b2ccb744
UW
4120 for (prev = pool_list; *prev; prev = &(*prev)->next)
4121 ;
4122 *prev = pool;
4123
4124 return pool;
4125}
4126
aee4e0db
UW
4127/* End range of instructions covered by POOL at INSN and emit
4128 placeholder insn representing the pool. */
b2ccb744
UW
4129
4130static void
9c808aad 4131s390_end_pool (struct constant_pool *pool, rtx insn)
b2ccb744 4132{
aee4e0db
UW
4133 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4134
4135 if (!insn)
4136 insn = get_last_insn ();
4137
4138 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4139 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4140}
4141
4142/* Add INSN to the list of insns covered by POOL. */
4143
4144static void
9c808aad 4145s390_add_pool_insn (struct constant_pool *pool, rtx insn)
aee4e0db
UW
4146{
4147 bitmap_set_bit (pool->insns, INSN_UID (insn));
b2ccb744
UW
4148}
4149
4150/* Return pool out of POOL_LIST that covers INSN. */
4151
4152static struct constant_pool *
9c808aad 4153s390_find_pool (struct constant_pool *pool_list, rtx insn)
b2ccb744 4154{
b2ccb744
UW
4155 struct constant_pool *pool;
4156
b2ccb744 4157 for (pool = pool_list; pool; pool = pool->next)
aee4e0db 4158 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
b2ccb744
UW
4159 break;
4160
4161 return pool;
4162}
4163
aee4e0db 4164/* Add constant VAL of mode MODE to the constant pool POOL. */
b2ccb744 4165
aee4e0db 4166static void
9c808aad 4167s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
b2ccb744
UW
4168{
4169 struct constant *c;
b2ccb744
UW
4170 int i;
4171
4172 for (i = 0; i < NR_C_MODES; i++)
4173 if (constant_modes[i] == mode)
4174 break;
4175 if (i == NR_C_MODES)
4176 abort ();
4177
4178 for (c = pool->constants[i]; c != NULL; c = c->next)
4179 if (rtx_equal_p (val, c->value))
4180 break;
4181
4182 if (c == NULL)
4183 {
4184 c = (struct constant *) xmalloc (sizeof *c);
4185 c->value = val;
4186 c->label = gen_label_rtx ();
4187 c->next = pool->constants[i];
4188 pool->constants[i] = c;
4189 pool->size += GET_MODE_SIZE (mode);
4190 }
aee4e0db 4191}
b2ccb744 4192
aee4e0db
UW
4193/* Find constant VAL of mode MODE in the constant pool POOL.
4194 Return an RTX describing the distance from the start of
4195 the pool to the location of the new constant. */
c7453384 4196
aee4e0db 4197static rtx
9c808aad
AJ
4198s390_find_constant (struct constant_pool *pool, rtx val,
4199 enum machine_mode mode)
aee4e0db
UW
4200{
4201 struct constant *c;
4202 rtx offset;
4203 int i;
c7453384 4204
aee4e0db
UW
4205 for (i = 0; i < NR_C_MODES; i++)
4206 if (constant_modes[i] == mode)
4207 break;
4208 if (i == NR_C_MODES)
4209 abort ();
c7453384 4210
aee4e0db
UW
4211 for (c = pool->constants[i]; c != NULL; c = c->next)
4212 if (rtx_equal_p (val, c->value))
4213 break;
c7453384 4214
aee4e0db
UW
4215 if (c == NULL)
4216 abort ();
c7453384 4217
aee4e0db
UW
4218 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4219 gen_rtx_LABEL_REF (Pmode, pool->label));
b2ccb744
UW
4220 offset = gen_rtx_CONST (Pmode, offset);
4221 return offset;
4222}
4223
5af2f3d3
UW
4224/* Dump out the constants in POOL. If REMOTE_LABEL is true,
4225 do not emit the pool base label. */
b2ccb744
UW
4226
4227static rtx
5af2f3d3 4228s390_dump_pool (struct constant_pool *pool, bool remote_label)
b2ccb744
UW
4229{
4230 struct constant *c;
4231 rtx insn;
4232 int i;
4233
c7453384 4234 /* Pool start insn switches to proper section
b2ccb744 4235 and guarantees necessary alignment. */
9e8327e3 4236 if (TARGET_CPU_ZARCH)
aee4e0db 4237 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
b2ccb744 4238 else
aee4e0db 4239 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
b2ccb744
UW
4240 INSN_ADDRESSES_NEW (insn, -1);
4241
5af2f3d3
UW
4242 if (!remote_label)
4243 {
4244 insn = emit_label_after (pool->label, insn);
4245 INSN_ADDRESSES_NEW (insn, -1);
4246 }
b2ccb744
UW
4247
4248 /* Dump constants in descending alignment requirement order,
4249 ensuring proper alignment for every constant. */
4250 for (i = 0; i < NR_C_MODES; i++)
4251 for (c = pool->constants[i]; c; c = c->next)
4252 {
fd7643fb 4253 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
aee4e0db
UW
4254 rtx value = c->value;
4255 if (GET_CODE (value) == CONST
4256 && GET_CODE (XEXP (value, 0)) == UNSPEC
fd7643fb 4257 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
aee4e0db
UW
4258 && XVECLEN (XEXP (value, 0), 0) == 1)
4259 {
4260 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
9c808aad 4261 gen_rtx_LABEL_REF (VOIDmode, pool->label));
aee4e0db
UW
4262 value = gen_rtx_CONST (VOIDmode, value);
4263 }
4264
b2ccb744
UW
4265 insn = emit_label_after (c->label, insn);
4266 INSN_ADDRESSES_NEW (insn, -1);
416cf582
UW
4267
4268 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
4269 gen_rtvec (1, value),
4270 UNSPECV_POOL_ENTRY);
4271 insn = emit_insn_after (value, insn);
b2ccb744
UW
4272 INSN_ADDRESSES_NEW (insn, -1);
4273 }
4274
c7453384 4275 /* Pool end insn switches back to previous section
b2ccb744 4276 and guarantees necessary alignment. */
9e8327e3 4277 if (TARGET_CPU_ZARCH)
b2ccb744
UW
4278 insn = emit_insn_after (gen_pool_end_64 (), insn);
4279 else
4280 insn = emit_insn_after (gen_pool_end_31 (), insn);
4281 INSN_ADDRESSES_NEW (insn, -1);
4282
4283 insn = emit_barrier_after (insn);
4284 INSN_ADDRESSES_NEW (insn, -1);
4285
aee4e0db
UW
4286 /* Remove placeholder insn. */
4287 remove_insn (pool->pool_insn);
4288
b2ccb744
UW
4289 return insn;
4290}
4291
5af2f3d3
UW
4292/* Allocate new constant_pool structure. */
4293
4294static struct constant_pool *
4295s390_alloc_pool (void)
4296{
4297 struct constant_pool *pool;
4298 int i;
4299
4300 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4301 pool->next = NULL;
4302 for (i = 0; i < NR_C_MODES; i++)
4303 pool->constants[i] = NULL;
4304
4305 pool->label = gen_label_rtx ();
4306 pool->first_insn = NULL_RTX;
4307 pool->pool_insn = NULL_RTX;
4308 pool->insns = BITMAP_XMALLOC ();
4309 pool->size = 0;
4310
4311 return pool;
4312}
4313
b2ccb744
UW
4314/* Free all memory used by POOL. */
4315
4316static void
9c808aad 4317s390_free_pool (struct constant_pool *pool)
b2ccb744
UW
4318{
4319 int i;
4320
4321 for (i = 0; i < NR_C_MODES; i++)
4322 {
4323 struct constant *c = pool->constants[i];
4324 while (c != NULL)
4325 {
4326 struct constant *next = c->next;
4327 free (c);
4328 c = next;
4329 }
4330 }
4331
aee4e0db 4332 BITMAP_XFREE (pool->insns);
b2ccb744 4333 free (pool);
c7453384 4334}
b2ccb744 4335
b2ccb744 4336
5af2f3d3
UW
4337/* Collect main literal pool. Return NULL on overflow. */
4338
4339static struct constant_pool *
4340s390_mainpool_start (void)
4341{
4342 struct constant_pool *pool;
4343 rtx insn;
4344
4345 pool = s390_alloc_pool ();
4346
4347 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4348 {
4349 if (GET_CODE (insn) == INSN
4350 && GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
4351 && XINT (PATTERN (insn), 1) == UNSPECV_MAIN_POOL)
4352 {
4353 if (pool->pool_insn)
4354 abort ();
4355 pool->pool_insn = insn;
4356 }
4357
4358 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4359 {
4360 rtx pool_ref = NULL_RTX;
4361 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4362 if (pool_ref)
4363 {
4364 rtx constant = get_pool_constant (pool_ref);
4365 enum machine_mode mode = get_pool_mode (pool_ref);
4366 s390_add_constant (pool, constant, mode);
4367 }
4368 }
4369 }
4370
4371 if (!pool->pool_insn)
4372 abort ();
4373
4374 if (pool->size >= 4096)
4375 {
d76e8439
UW
4376 /* We're going to chunkify the pool, so remove the main
4377 pool placeholder insn. */
4378 remove_insn (pool->pool_insn);
4379
5af2f3d3
UW
4380 s390_free_pool (pool);
4381 pool = NULL;
4382 }
4383
4384 return pool;
4385}
4386
4387/* POOL holds the main literal pool as collected by s390_mainpool_start.
4388 Modify the current function to output the pool constants as well as
4389 the pool register setup instruction. BASE_REG is the register to
4390 be used as pool base register. */
4391
4392static void
4393s390_mainpool_finish (struct constant_pool *pool, rtx base_reg)
4394{
4395 rtx insn;
4396
4397 /* If the pool is empty, we're done. */
4398 if (pool->size == 0)
4399 {
4400 remove_insn (pool->pool_insn);
4401 s390_free_pool (pool);
4402 return;
4403 }
4404
4405 /* We need correct insn addresses. */
4406 shorten_branches (get_insns ());
4407
9e8327e3 4408 /* On zSeries, we use a LARL to load the pool register. The pool is
5af2f3d3 4409 located in the .rodata section, so we emit it after the function. */
9e8327e3 4410 if (TARGET_CPU_ZARCH)
5af2f3d3
UW
4411 {
4412 insn = gen_main_base_64 (base_reg, pool->label);
4413 insn = emit_insn_after (insn, pool->pool_insn);
4414 INSN_ADDRESSES_NEW (insn, -1);
4415 remove_insn (pool->pool_insn);
4416
4417 insn = get_last_insn ();
4418 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
4419 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4420
4421 s390_dump_pool (pool, 0);
4422 }
4423
9e8327e3 4424 /* On S/390, if the total size of the function's code plus literal pool
5af2f3d3
UW
4425 does not exceed 4096 bytes, we use BASR to set up a function base
4426 pointer, and emit the literal pool at the end of the function. */
4427 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
4428 + pool->size + 8 /* alignment slop */ < 4096)
4429 {
4430 insn = gen_main_base_31_small (base_reg, pool->label);
4431 insn = emit_insn_after (insn, pool->pool_insn);
4432 INSN_ADDRESSES_NEW (insn, -1);
4433 remove_insn (pool->pool_insn);
4434
4435 insn = emit_label_after (pool->label, insn);
4436 INSN_ADDRESSES_NEW (insn, -1);
4437
4438 insn = get_last_insn ();
4439 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
4440 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4441
4442 s390_dump_pool (pool, 1);
4443 }
4444
4445 /* Otherwise, we emit an inline literal pool and use BASR to branch
4446 over it, setting up the pool register at the same time. */
4447 else
4448 {
4449 rtx pool_end = gen_label_rtx ();
4450
4451 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
4452 insn = emit_insn_after (insn, pool->pool_insn);
4453 INSN_ADDRESSES_NEW (insn, -1);
4454 remove_insn (pool->pool_insn);
4455
4456 insn = emit_label_after (pool->label, insn);
4457 INSN_ADDRESSES_NEW (insn, -1);
4458
4459 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
4460 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4461
4462 insn = emit_label_after (pool_end, pool->pool_insn);
4463 INSN_ADDRESSES_NEW (insn, -1);
4464
4465 s390_dump_pool (pool, 1);
4466 }
4467
4468
4469 /* Replace all literal pool references. */
4470
4471 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4472 {
4473 if (INSN_P (insn))
4474 replace_ltrel_base (&PATTERN (insn), base_reg);
4475
4476 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4477 {
4478 rtx addr, pool_ref = NULL_RTX;
4479 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4480 if (pool_ref)
4481 {
4482 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
4483 get_pool_mode (pool_ref));
4484 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4485 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4486 INSN_CODE (insn) = -1;
4487 }
4488 }
4489 }
4490
4491
4492 /* Free the pool. */
4493 s390_free_pool (pool);
4494}
4495
4496/* POOL holds the main literal pool as collected by s390_mainpool_start.
4497 We have decided we cannot use this pool, so revert all changes
4498 to the current function that were done by s390_mainpool_start. */
4499static void
4500s390_mainpool_cancel (struct constant_pool *pool)
4501{
4502 /* We didn't actually change the instruction stream, so simply
4503 free the pool memory. */
4504 s390_free_pool (pool);
4505}
4506
4507
4508/* Chunkify the literal pool. BASE_REG is to be used as pool
4509 register. */
9db1d521 4510
b2ccb744
UW
4511#define S390_POOL_CHUNK_MIN 0xc00
4512#define S390_POOL_CHUNK_MAX 0xe00
4513
c7453384 4514static struct constant_pool *
5af2f3d3 4515s390_chunkify_start (rtx base_reg)
9db1d521 4516{
b2ccb744
UW
4517 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4518 int extra_size = 0;
4519 bitmap far_labels;
fd7643fb 4520 rtx pending_ltrel = NULL_RTX;
13e58269 4521 rtx insn;
9db1d521 4522
9c808aad 4523 rtx (*gen_reload_base) (rtx, rtx) =
9e8327e3 4524 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
aee4e0db
UW
4525
4526
c3cc6b78
UW
4527 /* We need correct insn addresses. */
4528
4529 shorten_branches (get_insns ());
4530
fd7643fb 4531 /* Scan all insns and move literals to pool chunks. */
13e58269 4532
13e58269 4533 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 4534 {
fd7643fb
UW
4535 /* Check for pending LTREL_BASE. */
4536 if (INSN_P (insn))
4537 {
4538 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
4539 if (ltrel_base)
4540 {
4541 if (ltrel_base == pending_ltrel)
4542 pending_ltrel = NULL_RTX;
4543 else
4544 abort ();
4545 }
4546 }
4547
aee4e0db 4548 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
b2ccb744 4549 {
aee4e0db 4550 rtx pool_ref = NULL_RTX;
b2ccb744
UW
4551 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4552 if (pool_ref)
4553 {
fd7643fb
UW
4554 rtx constant = get_pool_constant (pool_ref);
4555 enum machine_mode mode = get_pool_mode (pool_ref);
4556
b2ccb744
UW
4557 if (!curr_pool)
4558 curr_pool = s390_start_pool (&pool_list, insn);
4559
fd7643fb 4560 s390_add_constant (curr_pool, constant, mode);
aee4e0db 4561 s390_add_pool_insn (curr_pool, insn);
aee4e0db 4562
fd7643fb
UW
4563 /* Don't split the pool chunk between a LTREL_OFFSET load
4564 and the corresponding LTREL_BASE. */
4565 if (GET_CODE (constant) == CONST
4566 && GET_CODE (XEXP (constant, 0)) == UNSPEC
4567 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
4568 {
4569 if (pending_ltrel)
4570 abort ();
4571 pending_ltrel = pool_ref;
4572 }
b2ccb744
UW
4573 }
4574 }
4575
aee4e0db 4576 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
fd7643fb
UW
4577 {
4578 if (curr_pool)
4579 s390_add_pool_insn (curr_pool, insn);
4580 /* An LTREL_BASE must follow within the same basic block. */
4581 if (pending_ltrel)
4582 abort ();
4583 }
aee4e0db 4584
c7453384 4585 if (!curr_pool
b2ccb744
UW
4586 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4587 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
9db1d521 4588 continue;
13e58269 4589
9e8327e3 4590 if (TARGET_CPU_ZARCH)
9db1d521 4591 {
b2ccb744
UW
4592 if (curr_pool->size < S390_POOL_CHUNK_MAX)
4593 continue;
13e58269 4594
aee4e0db 4595 s390_end_pool (curr_pool, NULL_RTX);
b2ccb744
UW
4596 curr_pool = NULL;
4597 }
4598 else
9db1d521 4599 {
b2ccb744 4600 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
9c808aad 4601 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
b2ccb744
UW
4602 + extra_size;
4603
4604 /* We will later have to insert base register reload insns.
4605 Those will have an effect on code size, which we need to
4606 consider here. This calculation makes rather pessimistic
4607 worst-case assumptions. */
aee4e0db 4608 if (GET_CODE (insn) == CODE_LABEL)
b2ccb744 4609 extra_size += 6;
b2ccb744
UW
4610
4611 if (chunk_size < S390_POOL_CHUNK_MIN
4612 && curr_pool->size < S390_POOL_CHUNK_MIN)
4613 continue;
4614
4615 /* Pool chunks can only be inserted after BARRIERs ... */
4616 if (GET_CODE (insn) == BARRIER)
4617 {
4618 s390_end_pool (curr_pool, insn);
4619 curr_pool = NULL;
4620 extra_size = 0;
4621 }
4622
4623 /* ... so if we don't find one in time, create one. */
4624 else if ((chunk_size > S390_POOL_CHUNK_MAX
aee4e0db 4625 || curr_pool->size > S390_POOL_CHUNK_MAX))
b2ccb744 4626 {
b2ccb744
UW
4627 rtx label, jump, barrier;
4628
aee4e0db
UW
4629 /* We can insert the barrier only after a 'real' insn. */
4630 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4631 continue;
4632 if (get_attr_length (insn) == 0)
4633 continue;
4634
c7453384 4635 /* Don't separate LTREL_BASE from the corresponding
fd7643fb
UW
4636 LTREL_OFFSET load. */
4637 if (pending_ltrel)
aee4e0db
UW
4638 continue;
4639
9c808aad 4640 label = gen_label_rtx ();
b2ccb744
UW
4641 jump = emit_jump_insn_after (gen_jump (label), insn);
4642 barrier = emit_barrier_after (jump);
4643 insn = emit_label_after (label, barrier);
4644 JUMP_LABEL (jump) = label;
4645 LABEL_NUSES (label) = 1;
4646
aee4e0db
UW
4647 INSN_ADDRESSES_NEW (jump, -1);
4648 INSN_ADDRESSES_NEW (barrier, -1);
b2ccb744
UW
4649 INSN_ADDRESSES_NEW (insn, -1);
4650
4651 s390_end_pool (curr_pool, barrier);
4652 curr_pool = NULL;
4653 extra_size = 0;
4654 }
13e58269 4655 }
9db1d521 4656 }
ce50cae8 4657
aee4e0db
UW
4658 if (curr_pool)
4659 s390_end_pool (curr_pool, NULL_RTX);
fd7643fb
UW
4660 if (pending_ltrel)
4661 abort ();
13e58269 4662
b2ccb744 4663
c7453384 4664 /* Find all labels that are branched into
13e58269 4665 from an insn belonging to a different chunk. */
ce50cae8 4666
b2ccb744 4667 far_labels = BITMAP_XMALLOC ();
6bc627b3 4668
13e58269 4669 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9db1d521 4670 {
b2ccb744
UW
4671 /* Labels marked with LABEL_PRESERVE_P can be target
4672 of non-local jumps, so we have to mark them.
4673 The same holds for named labels.
4674
4675 Don't do that, however, if it is the label before
4676 a jump table. */
4677
c7453384 4678 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
4679 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
4680 {
4681 rtx vec_insn = next_real_insn (insn);
c7453384 4682 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
4683 PATTERN (vec_insn) : NULL_RTX;
4684 if (!vec_pat
4685 || !(GET_CODE (vec_pat) == ADDR_VEC
4686 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4687 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
4688 }
4689
4690 /* If we have a direct jump (conditional or unconditional)
4691 or a casesi jump, check all potential targets. */
c7453384 4692 else if (GET_CODE (insn) == JUMP_INSN)
13e58269
UW
4693 {
4694 rtx pat = PATTERN (insn);
0a3bdf9d
UW
4695 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4696 pat = XVECEXP (pat, 0, 0);
4697
c7453384 4698 if (GET_CODE (pat) == SET)
13e58269 4699 {
aee4e0db 4700 rtx label = JUMP_LABEL (insn);
13e58269
UW
4701 if (label)
4702 {
c7453384 4703 if (s390_find_pool (pool_list, label)
b2ccb744
UW
4704 != s390_find_pool (pool_list, insn))
4705 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
13e58269 4706 }
c7453384 4707 }
b2ccb744
UW
4708 else if (GET_CODE (pat) == PARALLEL
4709 && XVECLEN (pat, 0) == 2
4710 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4711 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
4712 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
4713 {
4714 /* Find the jump table used by this casesi jump. */
4715 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
4716 rtx vec_insn = next_real_insn (vec_label);
c7453384 4717 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
b2ccb744
UW
4718 PATTERN (vec_insn) : NULL_RTX;
4719 if (vec_pat
4720 && (GET_CODE (vec_pat) == ADDR_VEC
4721 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4722 {
4723 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
13e58269 4724
b2ccb744
UW
4725 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
4726 {
4727 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
13e58269 4728
c7453384 4729 if (s390_find_pool (pool_list, label)
b2ccb744
UW
4730 != s390_find_pool (pool_list, insn))
4731 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4732 }
4733 }
4734 }
13e58269 4735 }
9db1d521 4736 }
ce50cae8 4737
b2ccb744
UW
4738 /* Insert base register reload insns before every pool. */
4739
4740 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
aee4e0db
UW
4741 {
4742 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
4743 rtx insn = curr_pool->first_insn;
4744 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
4745 }
b2ccb744
UW
4746
4747 /* Insert base register reload insns at every far label. */
13e58269 4748
13e58269 4749 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
c7453384 4750 if (GET_CODE (insn) == CODE_LABEL
b2ccb744
UW
4751 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
4752 {
4753 struct constant_pool *pool = s390_find_pool (pool_list, insn);
4754 if (pool)
4755 {
aee4e0db
UW
4756 rtx new_insn = gen_reload_base (base_reg, pool->label);
4757 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
b2ccb744
UW
4758 }
4759 }
4760
aee4e0db
UW
4761
4762 BITMAP_XFREE (far_labels);
13e58269 4763
13e58269
UW
4764
4765 /* Recompute insn addresses. */
4766
4767 init_insn_lengths ();
4768 shorten_branches (get_insns ());
9db1d521 4769
aee4e0db
UW
4770 return pool_list;
4771}
9db1d521 4772
aee4e0db 4773/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
c7453384 4774 After we have decided to use this list, finish implementing
5af2f3d3
UW
4775 all changes to the current function as required. BASE_REG is
4776 to be used as pool base register. */
c7453384 4777
aee4e0db 4778static void
5af2f3d3 4779s390_chunkify_finish (struct constant_pool *pool_list, rtx base_reg)
aee4e0db 4780{
aee4e0db
UW
4781 struct constant_pool *curr_pool = NULL;
4782 rtx insn;
c7453384
EC
4783
4784
aee4e0db
UW
4785 /* Replace all literal pool references. */
4786
c7453384 4787 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
aee4e0db 4788 {
fd7643fb
UW
4789 if (INSN_P (insn))
4790 replace_ltrel_base (&PATTERN (insn), base_reg);
4791
aee4e0db
UW
4792 curr_pool = s390_find_pool (pool_list, insn);
4793 if (!curr_pool)
4794 continue;
4795
4796 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4797 {
4798 rtx addr, pool_ref = NULL_RTX;
4799 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4800 if (pool_ref)
4801 {
4802 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4803 get_pool_mode (pool_ref));
4804 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4805 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4806 INSN_CODE (insn) = -1;
4807 }
aee4e0db
UW
4808 }
4809 }
4810
4811 /* Dump out all literal pools. */
c7453384 4812
aee4e0db 4813 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5af2f3d3 4814 s390_dump_pool (curr_pool, 0);
c7453384 4815
aee4e0db
UW
4816 /* Free pool list. */
4817
4818 while (pool_list)
4819 {
4820 struct constant_pool *next = pool_list->next;
4821 s390_free_pool (pool_list);
4822 pool_list = next;
4823 }
4824}
4825
4826/* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4827 We have decided we cannot use this list, so revert all changes
4828 to the current function that were done by s390_chunkify_start. */
c7453384 4829
aee4e0db 4830static void
9c808aad 4831s390_chunkify_cancel (struct constant_pool *pool_list)
aee4e0db
UW
4832{
4833 struct constant_pool *curr_pool = NULL;
4834 rtx insn;
4835
4836 /* Remove all pool placeholder insns. */
4837
4838 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4839 {
4840 /* Did we insert an extra barrier? Remove it. */
4841 rtx barrier = PREV_INSN (curr_pool->pool_insn);
4842 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4843 rtx label = NEXT_INSN (curr_pool->pool_insn);
4844
4845 if (jump && GET_CODE (jump) == JUMP_INSN
4846 && barrier && GET_CODE (barrier) == BARRIER
4847 && label && GET_CODE (label) == CODE_LABEL
4848 && GET_CODE (PATTERN (jump)) == SET
4849 && SET_DEST (PATTERN (jump)) == pc_rtx
4850 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4851 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4852 {
4853 remove_insn (jump);
4854 remove_insn (barrier);
4855 remove_insn (label);
b2ccb744 4856 }
9db1d521 4857
aee4e0db
UW
4858 remove_insn (curr_pool->pool_insn);
4859 }
4860
fd7643fb 4861 /* Remove all base register reload insns. */
aee4e0db
UW
4862
4863 for (insn = get_insns (); insn; )
4864 {
4865 rtx next_insn = NEXT_INSN (insn);
4866
4867 if (GET_CODE (insn) == INSN
4868 && GET_CODE (PATTERN (insn)) == SET
4869 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
fd7643fb 4870 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
aee4e0db 4871 remove_insn (insn);
9db1d521 4872
aee4e0db
UW
4873 insn = next_insn;
4874 }
4875
4876 /* Free pool list. */
9db1d521 4877
b2ccb744 4878 while (pool_list)
9db1d521 4879 {
b2ccb744
UW
4880 struct constant_pool *next = pool_list->next;
4881 s390_free_pool (pool_list);
4882 pool_list = next;
9db1d521 4883 }
9db1d521
HP
4884}
4885
b2ccb744 4886
faeb9bb6 4887/* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
416cf582
UW
4888
4889void
faeb9bb6 4890s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
416cf582
UW
4891{
4892 REAL_VALUE_TYPE r;
4893
4894 switch (GET_MODE_CLASS (mode))
4895 {
4896 case MODE_FLOAT:
4897 if (GET_CODE (exp) != CONST_DOUBLE)
4898 abort ();
4899
4900 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
4901 assemble_real (r, mode, align);
4902 break;
4903
4904 case MODE_INT:
faeb9bb6 4905 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
416cf582
UW
4906 break;
4907
4908 default:
4909 abort ();
4910 }
4911}
4912
4913
c3cc6b78 4914/* Rework the prolog/epilog to avoid saving/restoring
5af2f3d3 4915 registers unnecessarily. BASE_USED specifies whether
545d16ff 4916 the literal pool base register needs to be saved. */
b2ccb744 4917
c3cc6b78 4918static void
545d16ff 4919s390_optimize_prolog (bool base_used)
b2ccb744 4920{
c3cc6b78
UW
4921 int save_first, save_last, restore_first, restore_last;
4922 int i, j;
4923 rtx insn, new_insn, next_insn;
4924
aee4e0db 4925 /* Recompute regs_ever_live data for special registers. */
5af2f3d3 4926 regs_ever_live[BASE_REGISTER] = base_used;
545d16ff 4927 regs_ever_live[RETURN_REGNUM] = cfun->machine->save_return_addr_p;
29742ba4 4928 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
aee4e0db 4929
aee4e0db 4930
c3cc6b78 4931 /* Find first and last gpr to be saved. */
c7453384 4932
c3cc6b78
UW
4933 for (i = 6; i < 16; i++)
4934 if (regs_ever_live[i])
6f84708a 4935 if (!global_regs[i]
c7453384 4936 || i == STACK_POINTER_REGNUM
6f84708a 4937 || i == RETURN_REGNUM
c7453384 4938 || i == BASE_REGISTER
6f84708a
UW
4939 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
4940 break;
c3cc6b78
UW
4941
4942 for (j = 15; j > i; j--)
4943 if (regs_ever_live[j])
6f84708a 4944 if (!global_regs[j]
c7453384 4945 || j == STACK_POINTER_REGNUM
6f84708a 4946 || j == RETURN_REGNUM
c7453384 4947 || j == BASE_REGISTER
6f84708a
UW
4948 || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
4949 break;
c3cc6b78
UW
4950
4951 if (i == 16)
b2ccb744 4952 {
c3cc6b78
UW
4953 /* Nothing to save/restore. */
4954 save_first = restore_first = -1;
4955 save_last = restore_last = -1;
9db1d521 4956 }
c3cc6b78
UW
4957 else
4958 {
4959 /* Save/restore from i to j. */
4960 save_first = restore_first = i;
4961 save_last = restore_last = j;
4962 }
4963
4964 /* Varargs functions need to save gprs 2 to 6. */
4965 if (current_function_stdarg)
4966 {
4967 save_first = 2;
4968 if (save_last < 6)
4969 save_last = 6;
4970 }
4971
4972
4973 /* If all special registers are in fact used, there's nothing we
4974 can do, so no point in walking the insn list. */
4975 if (i <= BASE_REGISTER && j >= BASE_REGISTER
545d16ff 4976 && (TARGET_CPU_ZARCH || (i <= RETURN_REGNUM && j >= RETURN_REGNUM)))
c3cc6b78
UW
4977 return;
4978
4979
4980 /* Search for prolog/epilog insns and replace them. */
4981
4982 for (insn = get_insns (); insn; insn = next_insn)
4983 {
4984 int first, last, off;
4985 rtx set, base, offset;
4986
4987 next_insn = NEXT_INSN (insn);
4988
4989 if (GET_CODE (insn) != INSN)
4990 continue;
c3cc6b78 4991
545d16ff
UW
4992 if (GET_CODE (PATTERN (insn)) == PARALLEL
4993 && store_multiple_operation (PATTERN (insn), VOIDmode))
c3cc6b78
UW
4994 {
4995 set = XVECEXP (PATTERN (insn), 0, 0);
4996 first = REGNO (SET_SRC (set));
4997 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4998 offset = const0_rtx;
4999 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5000 off = INTVAL (offset) - first * UNITS_PER_WORD;
5001
5002 if (GET_CODE (base) != REG || off < 0)
5003 continue;
545d16ff 5004 if (first > BASE_REGISTER || last < BASE_REGISTER)
c3cc6b78 5005 continue;
545d16ff
UW
5006
5007 if (save_first != -1)
5008 {
5009 new_insn = save_gprs (base, off, save_first, save_last);
5010 new_insn = emit_insn_before (new_insn, insn);
5011 INSN_ADDRESSES_NEW (new_insn, -1);
5012 }
5013
5014 remove_insn (insn);
5015 continue;
5016 }
5017
5018 if (GET_CODE (PATTERN (insn)) == SET
5019 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
5020 && REGNO (SET_SRC (PATTERN (insn))) == BASE_REGISTER
5021 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
5022 {
5023 set = PATTERN (insn);
5024 offset = const0_rtx;
5025 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5026 off = INTVAL (offset) - BASE_REGISTER * UNITS_PER_WORD;
5027
5028 if (GET_CODE (base) != REG || off < 0)
c3cc6b78
UW
5029 continue;
5030
5031 if (save_first != -1)
5032 {
5033 new_insn = save_gprs (base, off, save_first, save_last);
5034 new_insn = emit_insn_before (new_insn, insn);
5035 INSN_ADDRESSES_NEW (new_insn, -1);
5036 }
5037
5038 remove_insn (insn);
545d16ff 5039 continue;
c3cc6b78
UW
5040 }
5041
545d16ff
UW
5042 if (GET_CODE (PATTERN (insn)) == PARALLEL
5043 && load_multiple_operation (PATTERN (insn), VOIDmode))
c3cc6b78
UW
5044 {
5045 set = XVECEXP (PATTERN (insn), 0, 0);
5046 first = REGNO (SET_DEST (set));
5047 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5048 offset = const0_rtx;
5049 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5050 off = INTVAL (offset) - first * UNITS_PER_WORD;
5051
5052 if (GET_CODE (base) != REG || off < 0)
5053 continue;
545d16ff 5054 if (first > BASE_REGISTER || last < BASE_REGISTER)
c3cc6b78 5055 continue;
545d16ff
UW
5056
5057 if (restore_first != -1)
5058 {
5059 new_insn = restore_gprs (base, off, restore_first, restore_last);
5060 new_insn = emit_insn_before (new_insn, insn);
5061 INSN_ADDRESSES_NEW (new_insn, -1);
5062 }
5063
5064 remove_insn (insn);
5065 continue;
5066 }
5067
5068 if (GET_CODE (PATTERN (insn)) == SET
5069 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5070 && REGNO (SET_DEST (PATTERN (insn))) == BASE_REGISTER
5071 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
5072 {
5073 set = PATTERN (insn);
5074 offset = const0_rtx;
5075 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5076 off = INTVAL (offset) - BASE_REGISTER * UNITS_PER_WORD;
5077
5078 if (GET_CODE (base) != REG || off < 0)
c3cc6b78
UW
5079 continue;
5080
5081 if (restore_first != -1)
5082 {
5083 new_insn = restore_gprs (base, off, restore_first, restore_last);
5084 new_insn = emit_insn_before (new_insn, insn);
5085 INSN_ADDRESSES_NEW (new_insn, -1);
5086 }
5087
5088 remove_insn (insn);
545d16ff 5089 continue;
c3cc6b78
UW
5090 }
5091 }
5092}
5093
5094/* Perform machine-dependent processing. */
5095
18dbd950 5096static void
9c808aad 5097s390_reorg (void)
c3cc6b78 5098{
5af2f3d3 5099 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
5af2f3d3
UW
5100 bool base_used = false;
5101 bool pool_overflow = false;
c3cc6b78 5102
aee4e0db
UW
5103 /* Make sure all splits have been performed; splits after
5104 machine_dependent_reorg might confuse insn length counts. */
5105 split_all_insns_noflow ();
c3cc6b78 5106
c3cc6b78 5107
5af2f3d3
UW
5108 /* In small leaf functions, try to use an unused call-clobbered
5109 register as base register to avoid save/restore overhead. */
5110 if (current_function_is_leaf && !regs_ever_live[5])
5111 base_reg = gen_rtx_REG (Pmode, 5);
5112
5113
5114 /* Install the main literal pool and the associated base
5115 register load insns.
5116
5117 In addition, there are two problematic situations we need
5118 to correct:
c7453384 5119
aee4e0db
UW
5120 - the literal pool might be > 4096 bytes in size, so that
5121 some of its elements cannot be directly accessed
c7453384 5122
aee4e0db
UW
5123 - a branch target might be > 64K away from the branch, so that
5124 it is not possible to use a PC-relative instruction.
c7453384 5125
aee4e0db
UW
5126 To fix those, we split the single literal pool into multiple
5127 pool chunks, reloading the pool base register at various
5128 points throughout the function to ensure it always points to
5129 the pool chunk the following code expects, and / or replace
5130 PC-relative branches by absolute branches.
c7453384 5131
aee4e0db
UW
5132 However, the two problems are interdependent: splitting the
5133 literal pool can move a branch further away from its target,
5134 causing the 64K limit to overflow, and on the other hand,
5135 replacing a PC-relative branch by an absolute branch means
5136 we need to put the branch target address into the literal
5137 pool, possibly causing it to overflow.
c7453384 5138
aee4e0db
UW
5139 So, we loop trying to fix up both problems until we manage
5140 to satisfy both conditions at the same time. Note that the
5141 loop is guaranteed to terminate as every pass of the loop
5142 strictly decreases the total number of PC-relative branches
5143 in the function. (This is not completely true as there
5144 might be branch-over-pool insns introduced by chunkify_start.
5145 Those never need to be split however.) */
c7453384 5146
aee4e0db
UW
5147 for (;;)
5148 {
5af2f3d3
UW
5149 struct constant_pool *pool = NULL;
5150
5151 /* Collect the literal pool. */
5152 if (!pool_overflow)
5153 {
5154 pool = s390_mainpool_start ();
5155 if (!pool)
5156 pool_overflow = true;
5157 }
c7453384 5158
5af2f3d3
UW
5159 /* If literal pool overflowed, start to chunkify it. */
5160 if (pool_overflow)
5161 pool = s390_chunkify_start (base_reg);
aee4e0db
UW
5162
5163 /* Split out-of-range branches. If this has created new
5164 literal pool entries, cancel current chunk list and
545d16ff
UW
5165 recompute it. zSeries machines have large branch
5166 instructions, so we never need to split a branch. */
5167 if (!TARGET_CPU_ZARCH && s390_split_branches ())
aee4e0db 5168 {
5af2f3d3
UW
5169 if (pool_overflow)
5170 s390_chunkify_cancel (pool);
5171 else
5172 s390_mainpool_cancel (pool);
c7453384 5173
aee4e0db
UW
5174 continue;
5175 }
5176
aee4e0db 5177 /* If we made it up to here, both conditions are satisfied.
5af2f3d3
UW
5178 Finish up literal pool related changes. */
5179 if ((pool_overflow || pool->size > 0)
5180 && REGNO (base_reg) == BASE_REGISTER)
5181 base_used = true;
5182
5183 if (pool_overflow)
5184 s390_chunkify_finish (pool, base_reg);
5185 else
5186 s390_mainpool_finish (pool, base_reg);
c7453384 5187
aee4e0db
UW
5188 break;
5189 }
c7453384 5190
545d16ff 5191 s390_optimize_prolog (base_used);
9db1d521
HP
5192}
5193
c3cc6b78 5194
5d4d885c
UW
5195/* Return an RTL expression representing the value of the return address
5196 for the frame COUNT steps up from the current frame. FRAME is the
5197 frame pointer of that frame. */
5198
5199rtx
9c808aad 5200s390_return_addr_rtx (int count, rtx frame)
5d4d885c
UW
5201{
5202 rtx addr;
5203
590fcf48
UW
5204 /* Without backchain, we fail for all but the current frame. */
5205
5206 if (!TARGET_BACKCHAIN && count > 0)
5207 return NULL_RTX;
5208
416cf582
UW
5209 /* For the current frame, we need to make sure the initial
5210 value of RETURN_REGNUM is actually saved. */
5d4d885c
UW
5211
5212 if (count == 0)
416cf582 5213 cfun->machine->save_return_addr_p = true;
5d4d885c 5214
416cf582 5215 /* To retrieve the return address we read the stack slot where the
5d4d885c
UW
5216 corresponding RETURN_REGNUM value was saved. */
5217
5218 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
5219 addr = memory_address (Pmode, addr);
5220 return gen_rtx_MEM (Pmode, addr);
c7453384 5221}
5d4d885c 5222
4023fb28
UW
5223/* Find first call clobbered register unsused in a function.
5224 This could be used as base register in a leaf function
5225 or for holding the return address before epilogue. */
9db1d521 5226
4023fb28 5227static int
9c808aad 5228find_unused_clobbered_reg (void)
4023fb28
UW
5229{
5230 int i;
5231 for (i = 0; i < 6; i++)
5232 if (!regs_ever_live[i])
5233 return i;
5234 return 0;
5235}
5236
5237/* Fill FRAME with info about frame of current function. */
5238
5239static void
9c808aad 5240s390_frame_info (void)
4023fb28
UW
5241{
5242 int i, j;
5243 HOST_WIDE_INT fsize = get_frame_size ();
5244
0067d121 5245 if (!TARGET_64BIT && fsize > 0x7fff0000)
4023fb28
UW
5246 fatal_error ("Total size of local variables exceeds architecture limit.");
5247
5248 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
29742ba4 5249 cfun->machine->save_fprs_p = 0;
c3cc6b78 5250 if (TARGET_64BIT)
c7453384 5251 for (i = 24; i < 32; i++)
6f84708a 5252 if (regs_ever_live[i] && !global_regs[i])
c3cc6b78 5253 {
29742ba4 5254 cfun->machine->save_fprs_p = 1;
c3cc6b78
UW
5255 break;
5256 }
4023fb28 5257
29742ba4 5258 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
4023fb28
UW
5259
5260 /* Does function need to setup frame and save area. */
c7453384 5261
4023fb28 5262 if (! current_function_is_leaf
29742ba4 5263 || cfun->machine->frame_size > 0
c7453384 5264 || current_function_calls_alloca
6c535c69 5265 || current_function_stdarg)
29742ba4 5266 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
4023fb28 5267
545d16ff
UW
5268 /* If we use the return register, we'll need to make sure
5269 it is going to be saved/restored. */
5270
5271 if (!current_function_is_leaf
5272 || regs_ever_live[RETURN_REGNUM])
5273 cfun->machine->save_return_addr_p = 1;
5274
c3cc6b78 5275 /* Find first and last gpr to be saved. Note that at this point,
545d16ff
UW
5276 we assume the base register and -on S/390- the return register
5277 always need to be saved. This is done because the usage of these
c3cc6b78
UW
5278 register might change even after the prolog was emitted.
5279 If it turns out later that we really don't need them, the
5280 prolog/epilog code is modified again. */
5281
545d16ff
UW
5282 regs_ever_live[BASE_REGISTER] = 1;
5283 if (!TARGET_CPU_ZARCH || cfun->machine->save_return_addr_p)
5284 regs_ever_live[RETURN_REGNUM] = 1;
5285 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
c7453384 5286
4023fb28 5287 for (i = 6; i < 16; i++)
545d16ff
UW
5288 if (regs_ever_live[i])
5289 if (!global_regs[i]
5290 || i == STACK_POINTER_REGNUM
5291 || i == RETURN_REGNUM
5292 || i == BASE_REGISTER
5293 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5294 break;
4023fb28
UW
5295
5296 for (j = 15; j > i; j--)
545d16ff
UW
5297 if (regs_ever_live[j])
5298 if (!global_regs[j]
5299 || j == STACK_POINTER_REGNUM
5300 || j == RETURN_REGNUM
5301 || j == BASE_REGISTER
5302 || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
5303 break;
c3cc6b78
UW
5304
5305 /* Save / Restore from gpr i to j. */
29742ba4
HP
5306 cfun->machine->first_save_gpr = i;
5307 cfun->machine->first_restore_gpr = i;
5308 cfun->machine->last_save_gpr = j;
c3cc6b78
UW
5309
5310 /* Varargs functions need to save gprs 2 to 6. */
6c535c69 5311 if (current_function_stdarg)
29742ba4 5312 cfun->machine->first_save_gpr = 2;
4023fb28
UW
5313}
5314
c7453384 5315/* Return offset between argument pointer and frame pointer
4023fb28
UW
5316 initially after prologue. */
5317
0067d121 5318HOST_WIDE_INT
9c808aad 5319s390_arg_frame_offset (void)
9db1d521 5320{
29742ba4
HP
5321 HOST_WIDE_INT fsize = get_frame_size ();
5322 int save_fprs_p, i;
9db1d521 5323
29742ba4
HP
5324 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5325 save_fprs_p = 0;
5326 if (TARGET_64BIT)
c7453384 5327 for (i = 24; i < 32; i++)
6f84708a 5328 if (regs_ever_live[i] && !global_regs[i])
29742ba4
HP
5329 {
5330 save_fprs_p = 1;
5331 break;
5332 }
9db1d521 5333
29742ba4 5334 fsize = fsize + save_fprs_p * 64;
9db1d521 5335
29742ba4 5336 /* Does function need to setup frame and save area. */
c7453384 5337
29742ba4
HP
5338 if (! current_function_is_leaf
5339 || fsize > 0
c7453384 5340 || current_function_calls_alloca
29742ba4
HP
5341 || current_function_stdarg)
5342 fsize += STARTING_FRAME_OFFSET;
5343 return fsize + STACK_POINTER_OFFSET;
9db1d521
HP
5344}
5345
4023fb28 5346/* Emit insn to save fpr REGNUM at offset OFFSET relative
c7453384 5347 to register BASE. Return generated insn. */
994fe660 5348
9db1d521 5349static rtx
9c808aad 5350save_fpr (rtx base, int offset, int regnum)
9db1d521 5351{
4023fb28
UW
5352 rtx addr;
5353 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5354 set_mem_alias_set (addr, s390_sr_alias_set);
9db1d521 5355
4023fb28
UW
5356 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5357}
9db1d521 5358
4023fb28 5359/* Emit insn to restore fpr REGNUM from offset OFFSET relative
c7453384 5360 to register BASE. Return generated insn. */
9db1d521 5361
4023fb28 5362static rtx
9c808aad 5363restore_fpr (rtx base, int offset, int regnum)
4023fb28
UW
5364{
5365 rtx addr;
5366 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5367 set_mem_alias_set (addr, s390_sr_alias_set);
9db1d521 5368
4023fb28 5369 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
9db1d521
HP
5370}
5371
c3cc6b78 5372/* Generate insn to save registers FIRST to LAST into
c7453384 5373 the register save area located at offset OFFSET
c3cc6b78 5374 relative to register BASE. */
9db1d521 5375
c3cc6b78 5376static rtx
9c808aad 5377save_gprs (rtx base, int offset, int first, int last)
9db1d521 5378{
c3cc6b78
UW
5379 rtx addr, insn, note;
5380 int i;
5381
5382 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5383 addr = gen_rtx_MEM (Pmode, addr);
5384 set_mem_alias_set (addr, s390_sr_alias_set);
5385
5386 /* Special-case single register. */
5387 if (first == last)
5388 {
5389 if (TARGET_64BIT)
5390 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5391 else
5392 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5393
5394 RTX_FRAME_RELATED_P (insn) = 1;
5395 return insn;
5396 }
5397
5398
5399 insn = gen_store_multiple (addr,
5400 gen_rtx_REG (Pmode, first),
5401 GEN_INT (last - first + 1));
5402
5403
5404 /* We need to set the FRAME_RELATED flag on all SETs
5405 inside the store-multiple pattern.
5406
5407 However, we must not emit DWARF records for registers 2..5
c7453384 5408 if they are stored for use by variable arguments ...
c3cc6b78
UW
5409
5410 ??? Unfortunately, it is not enough to simply not the the
5411 FRAME_RELATED flags for those SETs, because the first SET
5412 of the PARALLEL is always treated as if it had the flag
5413 set, even if it does not. Therefore we emit a new pattern
5414 without those registers as REG_FRAME_RELATED_EXPR note. */
5415
5416 if (first >= 6)
5417 {
5418 rtx pat = PATTERN (insn);
5419
5420 for (i = 0; i < XVECLEN (pat, 0); i++)
5421 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5422 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5423
5424 RTX_FRAME_RELATED_P (insn) = 1;
5425 }
5426 else if (last >= 6)
5427 {
5428 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
c7453384 5429 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
c3cc6b78
UW
5430 gen_rtx_REG (Pmode, 6),
5431 GEN_INT (last - 6 + 1));
5432 note = PATTERN (note);
5433
5434 REG_NOTES (insn) =
c7453384 5435 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
c3cc6b78
UW
5436 note, REG_NOTES (insn));
5437
5438 for (i = 0; i < XVECLEN (note, 0); i++)
5439 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5440 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5441
5442 RTX_FRAME_RELATED_P (insn) = 1;
5443 }
5444
5445 return insn;
4023fb28 5446}
9db1d521 5447
c3cc6b78 5448/* Generate insn to restore registers FIRST to LAST from
c7453384 5449 the register save area located at offset OFFSET
c3cc6b78 5450 relative to register BASE. */
9db1d521 5451
c3cc6b78 5452static rtx
9c808aad 5453restore_gprs (rtx base, int offset, int first, int last)
4023fb28 5454{
c3cc6b78
UW
5455 rtx addr, insn;
5456
5457 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5458 addr = gen_rtx_MEM (Pmode, addr);
5459 set_mem_alias_set (addr, s390_sr_alias_set);
5460
5461 /* Special-case single register. */
5462 if (first == last)
5463 {
5464 if (TARGET_64BIT)
5465 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5466 else
5467 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5468
5469 return insn;
5470 }
5471
5472 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5473 addr,
5474 GEN_INT (last - first + 1));
5475 return insn;
4023fb28 5476}
9db1d521 5477
fd7643fb
UW
5478/* Emit code to load the GOT register. If MAYBE_DEAD is true,
5479 annotate generated insns with REG_MAYBE_DEAD notes. */
5480
5481static GTY(()) rtx got_symbol;
5482void
9c808aad 5483s390_load_got (int maybe_dead)
fd7643fb
UW
5484{
5485 if (!got_symbol)
5486 {
5487 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5488 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
5489 }
5490
9e8327e3 5491 if (TARGET_CPU_ZARCH)
fd7643fb
UW
5492 {
5493 rtx insn = emit_move_insn (pic_offset_table_rtx, got_symbol);
5494 if (maybe_dead)
5495 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5496 REG_NOTES (insn));
5497 }
5498 else
5499 {
5500 rtx offset, insn;
5501
c7453384 5502 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
fd7643fb
UW
5503 UNSPEC_LTREL_OFFSET);
5504 offset = gen_rtx_CONST (Pmode, offset);
5505 offset = force_const_mem (Pmode, offset);
5506
5507 insn = emit_move_insn (pic_offset_table_rtx, offset);
5508 if (maybe_dead)
5509 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5510 REG_NOTES (insn));
5511
c7453384 5512 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
fd7643fb
UW
5513 UNSPEC_LTREL_BASE);
5514 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
5515
5516 insn = emit_move_insn (pic_offset_table_rtx, offset);
5517 if (maybe_dead)
5518 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5519 REG_NOTES (insn));
5520 }
5521}
5522
4023fb28 5523/* Expand the prologue into a bunch of separate insns. */
9db1d521 5524
4023fb28 5525void
9c808aad 5526s390_emit_prologue (void)
4023fb28 5527{
4023fb28
UW
5528 rtx insn, addr;
5529 rtx temp_reg;
2c153108 5530 int i;
9db1d521 5531
4023fb28 5532 /* Compute frame_info. */
9db1d521 5533
29742ba4 5534 s390_frame_info ();
4023fb28 5535
c7453384
EC
5536 /* Choose best register to use for temp use within prologue.
5537 See below for why TPF must use the register 1. */
5538
c3cc6b78 5539 if (!current_function_is_leaf
c7453384 5540 && !TARGET_TPF)
4023fb28 5541 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
9db1d521 5542 else
4023fb28 5543 temp_reg = gen_rtx_REG (Pmode, 1);
9db1d521 5544
4023fb28 5545 /* Save call saved gprs. */
9db1d521 5546
c7453384 5547 insn = save_gprs (stack_pointer_rtx, 0,
29742ba4 5548 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
c3cc6b78 5549 emit_insn (insn);
4023fb28 5550
5af2f3d3 5551 /* Dummy insn to mark literal pool slot. */
c7453384 5552
5af2f3d3 5553 emit_insn (gen_main_pool ());
c7453384 5554
4023fb28
UW
5555 /* Save fprs for variable args. */
5556
6c535c69 5557 if (current_function_stdarg)
7333171f
UW
5558 for (i = 16; i < (TARGET_64BIT ? 20 : 18); i++)
5559 save_fpr (stack_pointer_rtx, 16*UNITS_PER_WORD + 8*(i-16), i);
9db1d521 5560
4023fb28 5561 /* Save fprs 4 and 6 if used (31 bit ABI). */
9db1d521 5562
4023fb28 5563 if (!TARGET_64BIT)
7333171f
UW
5564 for (i = 18; i < 20; i++)
5565 if (regs_ever_live[i] && !global_regs[i])
4023fb28 5566 {
7333171f 5567 insn = save_fpr (stack_pointer_rtx, 16*UNITS_PER_WORD + 8*(i-16), i);
4023fb28
UW
5568 RTX_FRAME_RELATED_P (insn) = 1;
5569 }
9db1d521 5570
4023fb28 5571 /* Decrement stack pointer. */
9db1d521 5572
29742ba4 5573 if (cfun->machine->frame_size > 0)
4023fb28 5574 {
29742ba4 5575 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
9db1d521 5576
4023fb28 5577 /* Save incoming stack pointer into temp reg. */
c7453384 5578
29742ba4 5579 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
9db1d521 5580 {
4023fb28 5581 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
9db1d521 5582 }
c7453384 5583
fae778eb 5584 /* Subtract frame size from stack pointer. */
4023fb28 5585
d3632d41
UW
5586 if (DISP_IN_RANGE (INTVAL (frame_off)))
5587 {
c7453384
EC
5588 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5589 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
9c808aad 5590 frame_off));
d3632d41
UW
5591 insn = emit_insn (insn);
5592 }
5593 else
5594 {
f19a9af7 5595 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
d3632d41
UW
5596 frame_off = force_const_mem (Pmode, frame_off);
5597
5598 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5599 }
4023fb28 5600
4023fb28 5601 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 5602 REG_NOTES (insn) =
4023fb28
UW
5603 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5604 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5605 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
29742ba4 5606 GEN_INT (-cfun->machine->frame_size))),
4023fb28
UW
5607 REG_NOTES (insn));
5608
5609 /* Set backchain. */
c7453384 5610
4023fb28 5611 if (TARGET_BACKCHAIN)
9db1d521 5612 {
4023fb28
UW
5613 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5614 set_mem_alias_set (addr, s390_sr_alias_set);
5615 insn = emit_insn (gen_move_insn (addr, temp_reg));
9db1d521 5616 }
7d798969
UW
5617
5618 /* If we support asynchronous exceptions (e.g. for Java),
5619 we need to make sure the backchain pointer is set up
5620 before any possibly trapping memory access. */
5621
5622 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
5623 {
5624 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
5625 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
5626 }
4023fb28 5627 }
9db1d521 5628
4023fb28 5629 /* Save fprs 8 - 15 (64 bit ABI). */
c7453384 5630
29742ba4 5631 if (cfun->machine->save_fprs_p)
4023fb28
UW
5632 {
5633 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
9db1d521 5634
4023fb28 5635 for (i = 24; i < 32; i++)
6f84708a 5636 if (regs_ever_live[i] && !global_regs[i])
4023fb28 5637 {
c7453384 5638 rtx addr = plus_constant (stack_pointer_rtx,
29742ba4 5639 cfun->machine->frame_size - 64 + (i-24)*8);
4023fb28
UW
5640
5641 insn = save_fpr (temp_reg, (i-24)*8, i);
5642 RTX_FRAME_RELATED_P (insn) = 1;
c7453384 5643 REG_NOTES (insn) =
4023fb28 5644 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
c7453384 5645 gen_rtx_SET (VOIDmode,
4023fb28
UW
5646 gen_rtx_MEM (DFmode, addr),
5647 gen_rtx_REG (DFmode, i)),
5648 REG_NOTES (insn));
5649 }
5650 }
c7453384 5651
4023fb28 5652 /* Set frame pointer, if needed. */
c7453384 5653
29742ba4 5654 if (frame_pointer_needed)
4023fb28
UW
5655 {
5656 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
5657 RTX_FRAME_RELATED_P (insn) = 1;
5658 }
9db1d521 5659
4023fb28 5660 /* Set up got pointer, if needed. */
c7453384 5661
c3cc6b78 5662 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
c7453384
EC
5663 s390_load_got(true);
5664
5665 if (TARGET_TPF)
5666 {
5667 /* Generate a BAS instruction to serve as a function
5668 entry intercept to facilitate the use of tracing
5669 algorithms located at the branch target.
5670
5671 This must use register 1. */
5672 rtx addr;
5673 rtx unkn;
5674 rtx link;
5675
5676 addr = GEN_INT (0xfe0);
5677 unkn = CONST0_RTX (SImode);
5678 link = gen_rtx_REG (Pmode, 1);
5679
5680 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5681
5682 /* Emit a blockage here so that all code
5683 lies between the profiling mechanisms. */
5684 emit_insn (gen_blockage ());
5685 }
4023fb28 5686}
9db1d521 5687
b1c9bc51 5688/* Expand the epilogue into a bunch of separate insns. */
9db1d521 5689
4023fb28 5690void
9c808aad 5691s390_emit_epilogue (void)
4023fb28 5692{
4023fb28 5693 rtx frame_pointer, return_reg;
5d4d885c 5694 int area_bottom, area_top, offset = 0;
4023fb28 5695 rtvec p;
7333171f 5696 int i;
9db1d521 5697
c7453384
EC
5698 if (TARGET_TPF)
5699 {
5700
5701 /* Generate a BAS instruction to serve as a function
5702 entry intercept to facilitate the use of tracing
5703 algorithms located at the branch target.
5704
5705 This must use register 1. */
5706
5707 rtx addr;
5708 rtx unkn;
5709 rtx link;
5710
5711 addr = GEN_INT (0xfe6);
5712 unkn = CONST0_RTX (SImode);
5713 link = gen_rtx_REG (Pmode, 1);
5714
5715 /* Emit a blockage here so that all code
5716 lies between the profiling mechanisms. */
5717 emit_insn (gen_blockage ());
5718
5719 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5720 }
5721
4023fb28 5722 /* Check whether to use frame or stack pointer for restore. */
9db1d521 5723
c7453384 5724 frame_pointer = frame_pointer_needed ?
4023fb28
UW
5725 hard_frame_pointer_rtx : stack_pointer_rtx;
5726
5727 /* Compute which parts of the save area we need to access. */
5728
29742ba4 5729 if (cfun->machine->first_restore_gpr != -1)
9db1d521 5730 {
29742ba4
HP
5731 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
5732 area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
4023fb28
UW
5733 }
5734 else
5735 {
5736 area_bottom = INT_MAX;
5737 area_top = INT_MIN;
9db1d521
HP
5738 }
5739
4023fb28 5740 if (TARGET_64BIT)
9db1d521 5741 {
29742ba4 5742 if (cfun->machine->save_fprs_p)
9db1d521 5743 {
4023fb28
UW
5744 if (area_bottom > -64)
5745 area_bottom = -64;
5746 if (area_top < 0)
5747 area_top = 0;
9db1d521 5748 }
4023fb28
UW
5749 }
5750 else
5751 {
7333171f
UW
5752 for (i = 18; i < 20; i++)
5753 if (regs_ever_live[i] && !global_regs[i])
5754 {
5755 if (area_bottom > 16*UNITS_PER_WORD + 8*(i-16))
5756 area_bottom = 16*UNITS_PER_WORD + 8*(i-16);
5757 if (area_top < 16*UNITS_PER_WORD + 8*(i-16) + 8)
5758 area_top = 16*UNITS_PER_WORD + 8*(i-16) + 8;
5759 }
9db1d521 5760 }
9db1d521 5761
c7453384 5762 /* Check whether we can access the register save area.
4023fb28 5763 If not, increment the frame pointer as required. */
9db1d521 5764
4023fb28
UW
5765 if (area_top <= area_bottom)
5766 {
5767 /* Nothing to restore. */
5768 }
d3632d41
UW
5769 else if (DISP_IN_RANGE (cfun->machine->frame_size + area_bottom)
5770 && DISP_IN_RANGE (cfun->machine->frame_size + area_top-1))
4023fb28
UW
5771 {
5772 /* Area is in range. */
29742ba4 5773 offset = cfun->machine->frame_size;
4023fb28
UW
5774 }
5775 else
5776 {
5777 rtx insn, frame_off;
9db1d521 5778
c7453384 5779 offset = area_bottom < 0 ? -area_bottom : 0;
29742ba4 5780 frame_off = GEN_INT (cfun->machine->frame_size - offset);
9db1d521 5781
d3632d41
UW
5782 if (DISP_IN_RANGE (INTVAL (frame_off)))
5783 {
c7453384 5784 insn = gen_rtx_SET (VOIDmode, frame_pointer,
d3632d41
UW
5785 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
5786 insn = emit_insn (insn);
5787 }
5788 else
5789 {
f19a9af7 5790 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
d3632d41 5791 frame_off = force_const_mem (Pmode, frame_off);
9db1d521 5792
d3632d41
UW
5793 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5794 }
4023fb28 5795 }
9db1d521 5796
4023fb28
UW
5797 /* Restore call saved fprs. */
5798
5799 if (TARGET_64BIT)
9db1d521 5800 {
29742ba4 5801 if (cfun->machine->save_fprs_p)
4023fb28 5802 for (i = 24; i < 32; i++)
1447dc69 5803 if (regs_ever_live[i] && !global_regs[i])
c7453384 5804 restore_fpr (frame_pointer,
4023fb28 5805 offset - 64 + (i-24) * 8, i);
9db1d521
HP
5806 }
5807 else
5808 {
7333171f
UW
5809 for (i = 18; i < 20; i++)
5810 if (regs_ever_live[i] && !global_regs[i])
5811 restore_fpr (frame_pointer,
5812 offset + 16*UNITS_PER_WORD + 8*(i-16), i);
4023fb28 5813 }
9db1d521 5814
4023fb28
UW
5815 /* Return register. */
5816
c7453384 5817 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4023fb28
UW
5818
5819 /* Restore call saved gprs. */
5820
29742ba4 5821 if (cfun->machine->first_restore_gpr != -1)
4023fb28 5822 {
c3cc6b78 5823 rtx insn, addr;
1447dc69
HP
5824 int i;
5825
c7453384 5826 /* Check for global register and save them
1447dc69
HP
5827 to stack location from where they get restored. */
5828
c7453384 5829 for (i = cfun->machine->first_restore_gpr;
29742ba4 5830 i <= cfun->machine->last_save_gpr;
1447dc69
HP
5831 i++)
5832 {
c7453384 5833 /* These registers are special and need to be
cf5ee720 5834 restored in any case. */
c7453384 5835 if (i == STACK_POINTER_REGNUM
cf5ee720 5836 || i == RETURN_REGNUM
c7453384 5837 || i == BASE_REGISTER
5d4d885c 5838 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
cf5ee720
UW
5839 continue;
5840
1447dc69
HP
5841 if (global_regs[i])
5842 {
c7453384 5843 addr = plus_constant (frame_pointer,
1447dc69
HP
5844 offset + i * UNITS_PER_WORD);
5845 addr = gen_rtx_MEM (Pmode, addr);
5846 set_mem_alias_set (addr, s390_sr_alias_set);
5847 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
c7453384 5848 }
1447dc69 5849 }
4023fb28
UW
5850
5851 /* Fetch return address from stack before load multiple,
5852 this will do good for scheduling. */
5853
63e292b7
UW
5854 if (cfun->machine->save_return_addr_p
5855 || (cfun->machine->first_restore_gpr < BASE_REGISTER
5856 && cfun->machine->last_save_gpr > RETURN_REGNUM))
9db1d521 5857 {
4023fb28
UW
5858 int return_regnum = find_unused_clobbered_reg();
5859 if (!return_regnum)
5860 return_regnum = 4;
5861 return_reg = gen_rtx_REG (Pmode, return_regnum);
c7453384
EC
5862
5863 addr = plus_constant (frame_pointer,
5864 offset + RETURN_REGNUM * UNITS_PER_WORD);
4023fb28
UW
5865 addr = gen_rtx_MEM (Pmode, addr);
5866 set_mem_alias_set (addr, s390_sr_alias_set);
5867 emit_move_insn (return_reg, addr);
9db1d521 5868 }
4023fb28
UW
5869
5870 /* ??? As references to the base register are not made
5871 explicit in insn RTX code, we have to add a barrier here
5872 to prevent incorrect scheduling. */
5873
c7453384 5874 emit_insn (gen_blockage());
4023fb28 5875
c7453384
EC
5876 insn = restore_gprs (frame_pointer, offset,
5877 cfun->machine->first_restore_gpr,
29742ba4 5878 cfun->machine->last_save_gpr);
c3cc6b78 5879 emit_insn (insn);
4023fb28 5880 }
9db1d521 5881
4023fb28 5882 /* Return to caller. */
9db1d521 5883
4023fb28 5884 p = rtvec_alloc (2);
c7453384 5885
4023fb28
UW
5886 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5887 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5888 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9db1d521
HP
5889}
5890
9db1d521 5891
c7453384 5892/* Return the size in bytes of a function argument of
994fe660
UW
5893 type TYPE and/or mode MODE. At least one of TYPE or
5894 MODE must be specified. */
9db1d521
HP
5895
5896static int
9c808aad 5897s390_function_arg_size (enum machine_mode mode, tree type)
9db1d521
HP
5898{
5899 if (type)
5900 return int_size_in_bytes (type);
5901
d65f7478 5902 /* No type info available for some library calls ... */
9db1d521
HP
5903 if (mode != BLKmode)
5904 return GET_MODE_SIZE (mode);
5905
5906 /* If we have neither type nor mode, abort */
994fe660 5907 abort ();
9db1d521
HP
5908}
5909
82b1c974
UW
5910/* Return true if a function argument of type TYPE and mode MODE
5911 is to be passed in a floating-point register, if available. */
5912
5913static bool
9c808aad 5914s390_function_arg_float (enum machine_mode mode, tree type)
82b1c974 5915{
8c17530e
UW
5916 int size = s390_function_arg_size (mode, type);
5917 if (size > 8)
5918 return false;
5919
82b1c974
UW
5920 /* Soft-float changes the ABI: no floating-point registers are used. */
5921 if (TARGET_SOFT_FLOAT)
5922 return false;
5923
5924 /* No type info available for some library calls ... */
5925 if (!type)
5926 return mode == SFmode || mode == DFmode;
5927
5928 /* The ABI says that record types with a single member are treated
5929 just like that member would be. */
5930 while (TREE_CODE (type) == RECORD_TYPE)
5931 {
5932 tree field, single = NULL_TREE;
5933
5934 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5935 {
5936 if (TREE_CODE (field) != FIELD_DECL)
5937 continue;
5938
5939 if (single == NULL_TREE)
5940 single = TREE_TYPE (field);
5941 else
5942 return false;
5943 }
5944
5945 if (single == NULL_TREE)
5946 return false;
5947 else
5948 type = single;
5949 }
5950
5951 return TREE_CODE (type) == REAL_TYPE;
5952}
5953
8c17530e
UW
5954/* Return true if a function argument of type TYPE and mode MODE
5955 is to be passed in an integer register, or a pair of integer
5956 registers, if available. */
5957
5958static bool
5959s390_function_arg_integer (enum machine_mode mode, tree type)
5960{
5961 int size = s390_function_arg_size (mode, type);
5962 if (size > 8)
5963 return false;
5964
5965 /* No type info available for some library calls ... */
5966 if (!type)
5967 return GET_MODE_CLASS (mode) == MODE_INT
5968 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
5969
5970 /* We accept small integral (and similar) types. */
5971 if (INTEGRAL_TYPE_P (type)
5972 || POINTER_TYPE_P (type)
5973 || TREE_CODE (type) == OFFSET_TYPE
5974 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
5975 return true;
5976
5977 /* We also accept structs of size 1, 2, 4, 8 that are not
5978 passed in floating-point registers. */
5979 if (AGGREGATE_TYPE_P (type)
5980 && exact_log2 (size) >= 0
5981 && !s390_function_arg_float (mode, type))
5982 return true;
5983
5984 return false;
5985}
5986
994fe660
UW
5987/* Return 1 if a function argument of type TYPE and mode MODE
5988 is to be passed by reference. The ABI specifies that only
5989 structures of size 1, 2, 4, or 8 bytes are passed by value,
5990 all other structures (and complex numbers) are passed by
5991 reference. */
5992
9db1d521 5993int
9c808aad 5994s390_function_arg_pass_by_reference (enum machine_mode mode, tree type)
9db1d521
HP
5995{
5996 int size = s390_function_arg_size (mode, type);
8c17530e
UW
5997 if (size > 8)
5998 return true;
9db1d521
HP
5999
6000 if (type)
6001 {
8c17530e 6002 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
9db1d521
HP
6003 return 1;
6004
8c17530e
UW
6005 if (TREE_CODE (type) == COMPLEX_TYPE
6006 || TREE_CODE (type) == VECTOR_TYPE)
9db1d521
HP
6007 return 1;
6008 }
c7453384 6009
9db1d521 6010 return 0;
9db1d521
HP
6011}
6012
6013/* Update the data in CUM to advance over an argument of mode MODE and
6014 data type TYPE. (TYPE is null for libcalls where that information
994fe660
UW
6015 may not be available.). The boolean NAMED specifies whether the
6016 argument is a named argument (as opposed to an unnamed argument
6017 matching an ellipsis). */
9db1d521
HP
6018
6019void
9c808aad
AJ
6020s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6021 tree type, int named ATTRIBUTE_UNUSED)
9db1d521 6022{
82b1c974 6023 if (s390_function_arg_pass_by_reference (mode, type))
9db1d521 6024 {
82b1c974 6025 cum->gprs += 1;
9db1d521 6026 }
82b1c974 6027 else if (s390_function_arg_float (mode, type))
9db1d521 6028 {
82b1c974 6029 cum->fprs += 1;
9db1d521 6030 }
8c17530e 6031 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
6032 {
6033 int size = s390_function_arg_size (mode, type);
6034 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
6035 }
8c17530e
UW
6036 else
6037 abort ();
9db1d521
HP
6038}
6039
994fe660
UW
6040/* Define where to put the arguments to a function.
6041 Value is zero to push the argument on the stack,
6042 or a hard register in which to store the argument.
6043
6044 MODE is the argument's machine mode.
6045 TYPE is the data type of the argument (as a tree).
6046 This is null for libcalls where that information may
6047 not be available.
6048 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6049 the preceding args and about the function being called.
6050 NAMED is nonzero if this argument is a named parameter
c7453384 6051 (otherwise it is an extra parameter matching an ellipsis).
994fe660
UW
6052
6053 On S/390, we use general purpose registers 2 through 6 to
6054 pass integer, pointer, and certain structure arguments, and
6055 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
6056 to pass floating point arguments. All remaining arguments
6057 are pushed to the stack. */
9db1d521
HP
6058
6059rtx
9c808aad
AJ
6060s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
6061 int named ATTRIBUTE_UNUSED)
9db1d521
HP
6062{
6063 if (s390_function_arg_pass_by_reference (mode, type))
6064 return 0;
6065
82b1c974 6066 if (s390_function_arg_float (mode, type))
9db1d521
HP
6067 {
6068 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
6069 return 0;
6070 else
f1c25d3b 6071 return gen_rtx_REG (mode, cum->fprs + 16);
9db1d521 6072 }
8c17530e 6073 else if (s390_function_arg_integer (mode, type))
9db1d521
HP
6074 {
6075 int size = s390_function_arg_size (mode, type);
6076 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
6077
6078 if (cum->gprs + n_gprs > 5)
6079 return 0;
6080 else
f1c25d3b 6081 return gen_rtx_REG (mode, cum->gprs + 2);
9db1d521 6082 }
8c17530e
UW
6083
6084 /* After the real arguments, expand_call calls us once again
6085 with a void_type_node type. Whatever we return here is
6086 passed as operand 2 to the call expanders.
6087
6088 We don't need this feature ... */
6089 else if (type == void_type_node)
6090 return const0_rtx;
6091
6092 abort ();
6093}
6094
6095/* Return true if return values of type TYPE should be returned
6096 in a memory buffer whose address is passed by the caller as
6097 hidden first argument. */
6098
6099static bool
6100s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
6101{
6102 /* We accept small integral (and similar) types. */
6103 if (INTEGRAL_TYPE_P (type)
6104 || POINTER_TYPE_P (type)
6105 || TREE_CODE (type) == OFFSET_TYPE
6106 || TREE_CODE (type) == REAL_TYPE)
6107 return int_size_in_bytes (type) > 8;
6108
6109 /* Aggregates and similar constructs are always returned
6110 in memory. */
6111 if (AGGREGATE_TYPE_P (type)
6112 || TREE_CODE (type) == COMPLEX_TYPE
6113 || TREE_CODE (type) == VECTOR_TYPE)
6114 return true;
6115
6116 /* ??? We get called on all sorts of random stuff from
6117 aggregate_value_p. We can't abort, but it's not clear
6118 what's safe to return. Pretend it's a struct I guess. */
6119 return true;
6120}
6121
6122/* Define where to return a (scalar) value of type TYPE.
6123 If TYPE is null, define where to return a (scalar)
6124 value of mode MODE from a libcall. */
6125
6126rtx
6127s390_function_value (tree type, enum machine_mode mode)
6128{
6129 if (type)
6130 {
6131 int unsignedp = TREE_UNSIGNED (type);
6132 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
6133 }
6134
6135 if (GET_MODE_CLASS (mode) != MODE_INT
6136 && GET_MODE_CLASS (mode) != MODE_FLOAT)
6137 abort ();
6138 if (GET_MODE_SIZE (mode) > 8)
6139 abort ();
6140
6141 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
6142 return gen_rtx_REG (mode, 16);
6143 else
6144 return gen_rtx_REG (mode, 2);
9db1d521
HP
6145}
6146
6147
994fe660
UW
6148/* Create and return the va_list datatype.
6149
6150 On S/390, va_list is an array type equivalent to
6151
6152 typedef struct __va_list_tag
6153 {
6154 long __gpr;
6155 long __fpr;
6156 void *__overflow_arg_area;
6157 void *__reg_save_area;
994fe660
UW
6158 } va_list[1];
6159
6160 where __gpr and __fpr hold the number of general purpose
6161 or floating point arguments used up to now, respectively,
c7453384 6162 __overflow_arg_area points to the stack location of the
994fe660
UW
6163 next argument passed on the stack, and __reg_save_area
6164 always points to the start of the register area in the
6165 call frame of the current function. The function prologue
6166 saves all registers used for argument passing into this
6167 area if the function uses variable arguments. */
9db1d521 6168
c35d187f
RH
6169static tree
6170s390_build_builtin_va_list (void)
9db1d521
HP
6171{
6172 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
6173
47798692 6174 record = lang_hooks.types.make_type (RECORD_TYPE);
9db1d521
HP
6175
6176 type_decl =
6177 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6178
c7453384 6179 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
9db1d521 6180 long_integer_type_node);
c7453384 6181 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
9db1d521
HP
6182 long_integer_type_node);
6183 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
6184 ptr_type_node);
6185 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
6186 ptr_type_node);
6187
6188 DECL_FIELD_CONTEXT (f_gpr) = record;
6189 DECL_FIELD_CONTEXT (f_fpr) = record;
6190 DECL_FIELD_CONTEXT (f_ovf) = record;
6191 DECL_FIELD_CONTEXT (f_sav) = record;
6192
6193 TREE_CHAIN (record) = type_decl;
6194 TYPE_NAME (record) = type_decl;
6195 TYPE_FIELDS (record) = f_gpr;
6196 TREE_CHAIN (f_gpr) = f_fpr;
6197 TREE_CHAIN (f_fpr) = f_ovf;
6198 TREE_CHAIN (f_ovf) = f_sav;
6199
6200 layout_type (record);
6201
6202 /* The correct type is an array type of one element. */
6203 return build_array_type (record, build_index_type (size_zero_node));
6204}
6205
994fe660 6206/* Implement va_start by filling the va_list structure VALIST.
6c535c69
ZW
6207 STDARG_P is always true, and ignored.
6208 NEXTARG points to the first anonymous stack argument.
994fe660 6209
f710504c 6210 The following global variables are used to initialize
994fe660
UW
6211 the va_list structure:
6212
6213 current_function_args_info:
6214 holds number of gprs and fprs used for named arguments.
6215 current_function_arg_offset_rtx:
6216 holds the offset of the first anonymous stack argument
6217 (relative to the virtual arg pointer). */
9db1d521
HP
6218
6219void
9c808aad 6220s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
9db1d521
HP
6221{
6222 HOST_WIDE_INT n_gpr, n_fpr;
6223 int off;
6224 tree f_gpr, f_fpr, f_ovf, f_sav;
6225 tree gpr, fpr, ovf, sav, t;
6226
6227 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6228 f_fpr = TREE_CHAIN (f_gpr);
6229 f_ovf = TREE_CHAIN (f_fpr);
6230 f_sav = TREE_CHAIN (f_ovf);
6231
6232 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6233 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6234 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6235 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6236 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6237
6238 /* Count number of gp and fp argument registers used. */
6239
6240 n_gpr = current_function_args_info.gprs;
6241 n_fpr = current_function_args_info.fprs;
6242
6243 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
6244 TREE_SIDE_EFFECTS (t) = 1;
6245 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6246
6247 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
6248 TREE_SIDE_EFFECTS (t) = 1;
6249 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6250
6251 /* Find the overflow area. */
6252 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6253
6254 off = INTVAL (current_function_arg_offset_rtx);
6255 off = off < 0 ? 0 : off;
9db1d521
HP
6256 if (TARGET_DEBUG_ARG)
6257 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7ee20eba 6258 (int)n_gpr, (int)n_fpr, off);
9db1d521
HP
6259
6260 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
6261
6262 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6263 TREE_SIDE_EFFECTS (t) = 1;
6264 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6265
6266 /* Find the register save area. */
6267 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
6268 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
6269 build_int_2 (-STACK_POINTER_OFFSET, -1));
6270 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
6271 TREE_SIDE_EFFECTS (t) = 1;
6272 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6273}
6274
c7453384 6275/* Implement va_arg by updating the va_list structure
994fe660 6276 VALIST as required to retrieve an argument of type
c7453384
EC
6277 TYPE, and returning that argument.
6278
994fe660 6279 Generates code equivalent to:
c7453384 6280
9db1d521
HP
6281 if (integral value) {
6282 if (size <= 4 && args.gpr < 5 ||
c7453384 6283 size > 4 && args.gpr < 4 )
9db1d521
HP
6284 ret = args.reg_save_area[args.gpr+8]
6285 else
6286 ret = *args.overflow_arg_area++;
6287 } else if (float value) {
6288 if (args.fgpr < 2)
6289 ret = args.reg_save_area[args.fpr+64]
6290 else
6291 ret = *args.overflow_arg_area++;
6292 } else if (aggregate value) {
6293 if (args.gpr < 5)
6294 ret = *args.reg_save_area[args.gpr]
6295 else
6296 ret = **args.overflow_arg_area++;
6297 } */
6298
9db1d521 6299rtx
9c808aad 6300s390_va_arg (tree valist, tree type)
9db1d521
HP
6301{
6302 tree f_gpr, f_fpr, f_ovf, f_sav;
6303 tree gpr, fpr, ovf, sav, reg, t, u;
6304 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
6305 rtx lab_false, lab_over, addr_rtx, r;
6306
6307 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6308 f_fpr = TREE_CHAIN (f_gpr);
6309 f_ovf = TREE_CHAIN (f_fpr);
6310 f_sav = TREE_CHAIN (f_ovf);
6311
6312 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6313 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6314 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6315 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6316 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6317
6318 size = int_size_in_bytes (type);
6319
6320 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
6321 {
6322 if (TARGET_DEBUG_ARG)
6323 {
6324 fprintf (stderr, "va_arg: aggregate type");
6325 debug_tree (type);
6326 }
6327
6328 /* Aggregates are passed by reference. */
6329 indirect_p = 1;
6330 reg = gpr;
6331 n_reg = 1;
f314b9b1 6332 sav_ofs = 2 * UNITS_PER_WORD;
9db1d521
HP
6333 sav_scale = UNITS_PER_WORD;
6334 size = UNITS_PER_WORD;
6335 max_reg = 4;
6336 }
82b1c974 6337 else if (s390_function_arg_float (TYPE_MODE (type), type))
9db1d521
HP
6338 {
6339 if (TARGET_DEBUG_ARG)
6340 {
6341 fprintf (stderr, "va_arg: float type");
6342 debug_tree (type);
6343 }
6344
6345 /* FP args go in FP registers, if present. */
6346 indirect_p = 0;
6347 reg = fpr;
6348 n_reg = 1;
f314b9b1 6349 sav_ofs = 16 * UNITS_PER_WORD;
9db1d521
HP
6350 sav_scale = 8;
6351 /* TARGET_64BIT has up to 4 parameter in fprs */
6352 max_reg = TARGET_64BIT ? 3 : 1;
6353 }
6354 else
6355 {
6356 if (TARGET_DEBUG_ARG)
6357 {
6358 fprintf (stderr, "va_arg: other type");
6359 debug_tree (type);
6360 }
6361
6362 /* Otherwise into GP registers. */
6363 indirect_p = 0;
6364 reg = gpr;
6365 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6366 sav_ofs = 2 * UNITS_PER_WORD;
c7453384 6367
c873e11f
UW
6368 if (size < UNITS_PER_WORD)
6369 sav_ofs += UNITS_PER_WORD - size;
9db1d521
HP
6370
6371 sav_scale = UNITS_PER_WORD;
6372 if (n_reg > 1)
6373 max_reg = 3;
6374 else
6375 max_reg = 4;
6376 }
6377
6378 /* Pull the value out of the saved registers ... */
6379
6380 lab_false = gen_label_rtx ();
6381 lab_over = gen_label_rtx ();
6382 addr_rtx = gen_reg_rtx (Pmode);
6383
6384 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
6385 GEN_INT (max_reg),
40c13662 6386 GT, const1_rtx, Pmode, 0, lab_false);
9db1d521
HP
6387
6388 if (sav_ofs)
6389 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
6390 else
6391 t = sav;
6392
6393 u = build (MULT_EXPR, long_integer_type_node,
6394 reg, build_int_2 (sav_scale, 0));
6395 TREE_SIDE_EFFECTS (u) = 1;
6396
6397 t = build (PLUS_EXPR, ptr_type_node, t, u);
6398 TREE_SIDE_EFFECTS (t) = 1;
6399
6400 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6401 if (r != addr_rtx)
6402 emit_move_insn (addr_rtx, r);
6403
6404
6405 emit_jump_insn (gen_jump (lab_over));
6406 emit_barrier ();
6407 emit_label (lab_false);
6408
6409 /* ... Otherwise out of the overflow area. */
6410
6411 t = save_expr (ovf);
6412
6413
6414 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6415 if (size < UNITS_PER_WORD)
6416 {
6417 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
6418 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6419 TREE_SIDE_EFFECTS (t) = 1;
6420 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6421
6422 t = save_expr (ovf);
6423 }
6424
6425 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6426 if (r != addr_rtx)
6427 emit_move_insn (addr_rtx, r);
6428
6429 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
6430 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6431 TREE_SIDE_EFFECTS (t) = 1;
6432 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6433
6434 emit_label (lab_over);
6435
c7453384 6436 /* If less than max_regs a registers are retrieved out
9db1d521
HP
6437 of register save area, increment. */
6438
c7453384 6439 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
9db1d521
HP
6440 build_int_2 (n_reg, 0));
6441 TREE_SIDE_EFFECTS (u) = 1;
6442 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
6443
6444 if (indirect_p)
6445 {
6446 r = gen_rtx_MEM (Pmode, addr_rtx);
3afb4581 6447 set_mem_alias_set (r, get_varargs_alias_set ());
9db1d521
HP
6448 emit_move_insn (addr_rtx, r);
6449 }
6450
6451
6452 return addr_rtx;
6453}
6454
994fe660 6455
fd3cd001
UW
6456/* Builtins. */
6457
6458enum s390_builtin
6459{
6460 S390_BUILTIN_THREAD_POINTER,
6461 S390_BUILTIN_SET_THREAD_POINTER,
6462
6463 S390_BUILTIN_max
6464};
6465
6466static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6467 CODE_FOR_get_tp_64,
6468 CODE_FOR_set_tp_64
6469};
6470
6471static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6472 CODE_FOR_get_tp_31,
6473 CODE_FOR_set_tp_31
6474};
6475
6476static void
9c808aad 6477s390_init_builtins (void)
fd3cd001
UW
6478{
6479 tree ftype;
6480
6481 ftype = build_function_type (ptr_type_node, void_list_node);
6482 builtin_function ("__builtin_thread_pointer", ftype,
6483 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6484 NULL, NULL_TREE);
6485
6486 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6487 builtin_function ("__builtin_set_thread_pointer", ftype,
6488 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6489 NULL, NULL_TREE);
6490}
6491
6492/* Expand an expression EXP that calls a built-in function,
6493 with result going to TARGET if that's convenient
6494 (and in mode MODE if that's convenient).
6495 SUBTARGET may be used as the target for computing one of EXP's operands.
6496 IGNORE is nonzero if the value is to be ignored. */
6497
6498static rtx
9c808aad
AJ
6499s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6500 enum machine_mode mode ATTRIBUTE_UNUSED,
6501 int ignore ATTRIBUTE_UNUSED)
fd3cd001
UW
6502{
6503#define MAX_ARGS 2
6504
c7453384 6505 unsigned int const *code_for_builtin =
fd3cd001
UW
6506 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6507
6508 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6509 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6510 tree arglist = TREE_OPERAND (exp, 1);
6511 enum insn_code icode;
6512 rtx op[MAX_ARGS], pat;
6513 int arity;
6514 bool nonvoid;
6515
6516 if (fcode >= S390_BUILTIN_max)
6517 internal_error ("bad builtin fcode");
6518 icode = code_for_builtin[fcode];
6519 if (icode == 0)
6520 internal_error ("bad builtin fcode");
6521
6522 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6523
6524 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6525 arglist;
6526 arglist = TREE_CHAIN (arglist), arity++)
6527 {
6528 const struct insn_operand_data *insn_op;
6529
6530 tree arg = TREE_VALUE (arglist);
6531 if (arg == error_mark_node)
6532 return NULL_RTX;
6533 if (arity > MAX_ARGS)
6534 return NULL_RTX;
6535
6536 insn_op = &insn_data[icode].operand[arity + nonvoid];
6537
6538 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6539
6540 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6541 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6542 }
6543
6544 if (nonvoid)
6545 {
6546 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6547 if (!target
6548 || GET_MODE (target) != tmode
6549 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6550 target = gen_reg_rtx (tmode);
6551 }
6552
6553 switch (arity)
6554 {
6555 case 0:
6556 pat = GEN_FCN (icode) (target);
6557 break;
6558 case 1:
6559 if (nonvoid)
6560 pat = GEN_FCN (icode) (target, op[0]);
6561 else
6562 pat = GEN_FCN (icode) (op[0]);
6563 break;
6564 case 2:
6565 pat = GEN_FCN (icode) (target, op[0], op[1]);
6566 break;
6567 default:
6568 abort ();
6569 }
6570 if (!pat)
6571 return NULL_RTX;
6572 emit_insn (pat);
6573
6574 if (nonvoid)
6575 return target;
6576 else
6577 return const0_rtx;
6578}
6579
6580
994fe660
UW
6581/* Output assembly code for the trampoline template to
6582 stdio stream FILE.
6583
6584 On S/390, we use gpr 1 internally in the trampoline code;
6585 gpr 0 is used to hold the static chain. */
9db1d521
HP
6586
6587void
9c808aad 6588s390_trampoline_template (FILE *file)
9db1d521
HP
6589{
6590 if (TARGET_64BIT)
6591 {
6592 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6593 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6594 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6595 fprintf (file, "br\t%s\n", reg_names[1]);
6596 fprintf (file, "0:\t.quad\t0\n");
6597 fprintf (file, ".quad\t0\n");
6598 }
6599 else
6600 {
6601 fprintf (file, "basr\t%s,0\n", reg_names[1]);
6602 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6603 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6604 fprintf (file, "br\t%s\n", reg_names[1]);
6605 fprintf (file, ".long\t0\n");
6606 fprintf (file, ".long\t0\n");
6607 }
6608}
6609
994fe660
UW
6610/* Emit RTL insns to initialize the variable parts of a trampoline.
6611 FNADDR is an RTX for the address of the function's pure code.
6612 CXT is an RTX for the static chain value for the function. */
6613
9db1d521 6614void
9c808aad 6615s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
9db1d521 6616{
a322288b 6617 emit_move_insn (gen_rtx_MEM (Pmode,
c7453384 6618 memory_address (Pmode,
994fe660 6619 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
a322288b 6620 emit_move_insn (gen_rtx_MEM (Pmode,
c7453384 6621 memory_address (Pmode,
994fe660 6622 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
9db1d521 6623}
4023fb28
UW
6624
6625/* Return rtx for 64-bit constant formed from the 32-bit subwords
6626 LOW and HIGH, independent of the host word size. */
6627
6628rtx
9c808aad 6629s390_gen_rtx_const_DI (int high, int low)
4023fb28
UW
6630{
6631#if HOST_BITS_PER_WIDE_INT >= 64
6632 HOST_WIDE_INT val;
6633 val = (HOST_WIDE_INT)high;
6634 val <<= 32;
6635 val |= (HOST_WIDE_INT)low;
c7453384 6636
4023fb28
UW
6637 return GEN_INT (val);
6638#else
6639#if HOST_BITS_PER_WIDE_INT >= 32
6640 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6641#else
6642 abort ();
6643#endif
6644#endif
c7453384 6645}
4023fb28
UW
6646
6647/* Output assembler code to FILE to increment profiler label # LABELNO
6648 for profiling a function entry. */
6649
6650void
9c808aad 6651s390_function_profiler (FILE *file, int labelno)
4023fb28
UW
6652{
6653 rtx op[7];
6654
6655 char label[128];
63a1ff86 6656 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
4023fb28
UW
6657
6658 fprintf (file, "# function profiler \n");
6659
6660 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6661 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6662 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6663
6664 op[2] = gen_rtx_REG (Pmode, 1);
6665 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
114278e7 6666 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
4023fb28
UW
6667
6668 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
6669 if (flag_pic)
6670 {
fd7643fb 6671 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
4023fb28
UW
6672 op[4] = gen_rtx_CONST (Pmode, op[4]);
6673 }
6674
6675 if (TARGET_64BIT)
6676 {
6677 output_asm_insn ("stg\t%0,%1", op);
6678 output_asm_insn ("larl\t%2,%3", op);
6679 output_asm_insn ("brasl\t%0,%4", op);
6680 output_asm_insn ("lg\t%0,%1", op);
6681 }
6682 else if (!flag_pic)
6683 {
6684 op[6] = gen_label_rtx ();
6685
6686 output_asm_insn ("st\t%0,%1", op);
6687 output_asm_insn ("bras\t%2,%l6", op);
4023fb28 6688 output_asm_insn (".long\t%4", op);
14b3e8ef 6689 output_asm_insn (".long\t%3", op);
47798692 6690 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
4023fb28
UW
6691 output_asm_insn ("l\t%0,0(%2)", op);
6692 output_asm_insn ("l\t%2,4(%2)", op);
6693 output_asm_insn ("basr\t%0,%0", op);
6694 output_asm_insn ("l\t%0,%1", op);
6695 }
6696 else
6697 {
6698 op[5] = gen_label_rtx ();
6699 op[6] = gen_label_rtx ();
6700
6701 output_asm_insn ("st\t%0,%1", op);
6702 output_asm_insn ("bras\t%2,%l6", op);
47798692 6703 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
4023fb28 6704 output_asm_insn (".long\t%4-%l5", op);
14b3e8ef 6705 output_asm_insn (".long\t%3-%l5", op);
47798692 6706 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
4023fb28
UW
6707 output_asm_insn ("lr\t%0,%2", op);
6708 output_asm_insn ("a\t%0,0(%2)", op);
6709 output_asm_insn ("a\t%2,4(%2)", op);
6710 output_asm_insn ("basr\t%0,%0", op);
6711 output_asm_insn ("l\t%0,%1", op);
6712 }
6713}
6714
b64a1b53
RH
6715/* Select section for constant in constant pool. In 32-bit mode,
6716 constants go in the function section; in 64-bit mode in .rodata. */
6717
6718static void
9c808aad
AJ
6719s390_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
6720 rtx x ATTRIBUTE_UNUSED,
6721 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
b64a1b53 6722{
9e8327e3 6723 if (TARGET_CPU_ZARCH)
b64a1b53
RH
6724 readonly_data_section ();
6725 else
6726 function_section (current_function_decl);
6727}
fb49053f 6728
fd3cd001 6729/* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
114278e7 6730 into its SYMBOL_REF_FLAGS. */
fb49053f
RH
6731
6732static void
9c808aad 6733s390_encode_section_info (tree decl, rtx rtl, int first)
fb49053f 6734{
c6a2438a 6735 default_encode_section_info (decl, rtl, first);
e23795ea 6736
114278e7
RH
6737 /* If a variable has a forced alignment to < 2 bytes, mark it with
6738 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
c7453384 6739 if (TREE_CODE (decl) == VAR_DECL
114278e7 6740 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
c6a2438a 6741 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
fd3cd001
UW
6742}
6743
3062825f 6744/* Output thunk to FILE that implements a C++ virtual function call (with
c7453384 6745 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
3062825f
UW
6746 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6747 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6748 relative to the resulting this pointer. */
6749
c590b625 6750static void
9c808aad
AJ
6751s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
6752 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
6753 tree function)
483ab821 6754{
89ce1c8f
JJ
6755 rtx op[10];
6756 int nonlocal = 0;
3062825f
UW
6757
6758 /* Operand 0 is the target function. */
6759 op[0] = XEXP (DECL_RTL (function), 0);
114278e7 6760 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
3062825f 6761 {
89ce1c8f
JJ
6762 nonlocal = 1;
6763 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
fd7643fb 6764 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
3062825f
UW
6765 op[0] = gen_rtx_CONST (Pmode, op[0]);
6766 }
6767
6768 /* Operand 1 is the 'this' pointer. */
61f71b34 6769 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
3062825f
UW
6770 op[1] = gen_rtx_REG (Pmode, 3);
6771 else
6772 op[1] = gen_rtx_REG (Pmode, 2);
6773
6774 /* Operand 2 is the delta. */
6775 op[2] = GEN_INT (delta);
6776
6777 /* Operand 3 is the vcall_offset. */
6778 op[3] = GEN_INT (vcall_offset);
6779
6780 /* Operand 4 is the temporary register. */
6781 op[4] = gen_rtx_REG (Pmode, 1);
6782
6783 /* Operands 5 to 8 can be used as labels. */
6784 op[5] = NULL_RTX;
6785 op[6] = NULL_RTX;
6786 op[7] = NULL_RTX;
6787 op[8] = NULL_RTX;
6788
89ce1c8f
JJ
6789 /* Operand 9 can be used for temporary register. */
6790 op[9] = NULL_RTX;
6791
3062825f
UW
6792 /* Generate code. */
6793 if (TARGET_64BIT)
6794 {
6795 /* Setup literal pool pointer if required. */
c7453384 6796 if ((!DISP_IN_RANGE (delta)
f19a9af7 6797 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
c7453384 6798 || (!DISP_IN_RANGE (vcall_offset)
f19a9af7 6799 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
3062825f
UW
6800 {
6801 op[5] = gen_label_rtx ();
6802 output_asm_insn ("larl\t%4,%5", op);
6803 }
6804
6805 /* Add DELTA to this pointer. */
6806 if (delta)
6807 {
f19a9af7 6808 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
3062825f 6809 output_asm_insn ("la\t%1,%2(%1)", op);
d3632d41
UW
6810 else if (DISP_IN_RANGE (delta))
6811 output_asm_insn ("lay\t%1,%2(%1)", op);
f19a9af7 6812 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
3062825f
UW
6813 output_asm_insn ("aghi\t%1,%2", op);
6814 else
6815 {
6816 op[6] = gen_label_rtx ();
6817 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
6818 }
6819 }
6820
6821 /* Perform vcall adjustment. */
6822 if (vcall_offset)
6823 {
d3632d41 6824 if (DISP_IN_RANGE (vcall_offset))
3062825f
UW
6825 {
6826 output_asm_insn ("lg\t%4,0(%1)", op);
6827 output_asm_insn ("ag\t%1,%3(%4)", op);
6828 }
f19a9af7 6829 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
3062825f
UW
6830 {
6831 output_asm_insn ("lghi\t%4,%3", op);
6832 output_asm_insn ("ag\t%4,0(%1)", op);
6833 output_asm_insn ("ag\t%1,0(%4)", op);
6834 }
6835 else
6836 {
6837 op[7] = gen_label_rtx ();
6838 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
6839 output_asm_insn ("ag\t%4,0(%1)", op);
6840 output_asm_insn ("ag\t%1,0(%4)", op);
6841 }
6842 }
c7453384 6843
3062825f
UW
6844 /* Jump to target. */
6845 output_asm_insn ("jg\t%0", op);
6846
6847 /* Output literal pool if required. */
6848 if (op[5])
6849 {
6850 output_asm_insn (".align\t4", op);
47798692
UW
6851 targetm.asm_out.internal_label (file, "L",
6852 CODE_LABEL_NUMBER (op[5]));
3062825f
UW
6853 }
6854 if (op[6])
6855 {
47798692
UW
6856 targetm.asm_out.internal_label (file, "L",
6857 CODE_LABEL_NUMBER (op[6]));
3062825f
UW
6858 output_asm_insn (".long\t%2", op);
6859 }
6860 if (op[7])
6861 {
47798692
UW
6862 targetm.asm_out.internal_label (file, "L",
6863 CODE_LABEL_NUMBER (op[7]));
3062825f
UW
6864 output_asm_insn (".long\t%3", op);
6865 }
6866 }
6867 else
6868 {
6869 /* Setup base pointer if required. */
6870 if (!vcall_offset
d3632d41 6871 || (!DISP_IN_RANGE (delta)
f19a9af7 6872 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
d3632d41 6873 || (!DISP_IN_RANGE (delta)
f19a9af7 6874 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
3062825f
UW
6875 {
6876 op[5] = gen_label_rtx ();
6877 output_asm_insn ("basr\t%4,0", op);
47798692
UW
6878 targetm.asm_out.internal_label (file, "L",
6879 CODE_LABEL_NUMBER (op[5]));
3062825f
UW
6880 }
6881
6882 /* Add DELTA to this pointer. */
6883 if (delta)
6884 {
f19a9af7 6885 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
3062825f 6886 output_asm_insn ("la\t%1,%2(%1)", op);
d3632d41
UW
6887 else if (DISP_IN_RANGE (delta))
6888 output_asm_insn ("lay\t%1,%2(%1)", op);
f19a9af7 6889 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
3062825f
UW
6890 output_asm_insn ("ahi\t%1,%2", op);
6891 else
6892 {
6893 op[6] = gen_label_rtx ();
6894 output_asm_insn ("a\t%1,%6-%5(%4)", op);
6895 }
6896 }
6897
6898 /* Perform vcall adjustment. */
6899 if (vcall_offset)
6900 {
f19a9af7 6901 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'J', "J"))
3062825f
UW
6902 {
6903 output_asm_insn ("lg\t%4,0(%1)", op);
6904 output_asm_insn ("a\t%1,%3(%4)", op);
6905 }
d3632d41
UW
6906 else if (DISP_IN_RANGE (vcall_offset))
6907 {
6908 output_asm_insn ("lg\t%4,0(%1)", op);
6909 output_asm_insn ("ay\t%1,%3(%4)", op);
6910 }
f19a9af7 6911 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
3062825f
UW
6912 {
6913 output_asm_insn ("lhi\t%4,%3", op);
6914 output_asm_insn ("a\t%4,0(%1)", op);
6915 output_asm_insn ("a\t%1,0(%4)", op);
6916 }
6917 else
6918 {
6919 op[7] = gen_label_rtx ();
6920 output_asm_insn ("l\t%4,%7-%5(%4)", op);
6921 output_asm_insn ("a\t%4,0(%1)", op);
6922 output_asm_insn ("a\t%1,0(%4)", op);
6923 }
6924
6925 /* We had to clobber the base pointer register.
6926 Re-setup the base pointer (with a different base). */
6927 op[5] = gen_label_rtx ();
6928 output_asm_insn ("basr\t%4,0", op);
47798692
UW
6929 targetm.asm_out.internal_label (file, "L",
6930 CODE_LABEL_NUMBER (op[5]));
3062825f
UW
6931 }
6932
6933 /* Jump to target. */
6934 op[8] = gen_label_rtx ();
89ce1c8f 6935
3062825f
UW
6936 if (!flag_pic)
6937 output_asm_insn ("l\t%4,%8-%5(%4)", op);
89ce1c8f 6938 else if (!nonlocal)
3062825f 6939 output_asm_insn ("a\t%4,%8-%5(%4)", op);
89ce1c8f
JJ
6940 /* We cannot call through .plt, since .plt requires %r12 loaded. */
6941 else if (flag_pic == 1)
6942 {
6943 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6944 output_asm_insn ("l\t%4,%0(%4)", op);
6945 }
6946 else if (flag_pic == 2)
6947 {
6948 op[9] = gen_rtx_REG (Pmode, 0);
6949 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
6950 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6951 output_asm_insn ("ar\t%4,%9", op);
6952 output_asm_insn ("l\t%4,0(%4)", op);
6953 }
6954
3062825f
UW
6955 output_asm_insn ("br\t%4", op);
6956
6957 /* Output literal pool. */
6958 output_asm_insn (".align\t4", op);
89ce1c8f
JJ
6959
6960 if (nonlocal && flag_pic == 2)
6961 output_asm_insn (".long\t%0", op);
6962 if (nonlocal)
6963 {
6964 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6965 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
6966 }
6967
47798692 6968 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
3062825f
UW
6969 if (!flag_pic)
6970 output_asm_insn (".long\t%0", op);
6971 else
6972 output_asm_insn (".long\t%0-%5", op);
6973
6974 if (op[6])
6975 {
47798692
UW
6976 targetm.asm_out.internal_label (file, "L",
6977 CODE_LABEL_NUMBER (op[6]));
3062825f
UW
6978 output_asm_insn (".long\t%2", op);
6979 }
6980 if (op[7])
6981 {
47798692
UW
6982 targetm.asm_out.internal_label (file, "L",
6983 CODE_LABEL_NUMBER (op[7]));
3062825f
UW
6984 output_asm_insn (".long\t%3", op);
6985 }
6986 }
483ab821 6987}
3062825f 6988
c7453384 6989bool
9c808aad 6990s390_valid_pointer_mode (enum machine_mode mode)
c7453384
EC
6991{
6992 return (mode == SImode || (TARGET_64BIT && mode == DImode));
6993}
6994
29742ba4
HP
6995/* How to allocate a 'struct machine_function'. */
6996
6997static struct machine_function *
9c808aad 6998s390_init_machine_status (void)
29742ba4
HP
6999{
7000 return ggc_alloc_cleared (sizeof (struct machine_function));
7001}
7002
7003#include "gt-s390.h"
This page took 1.581198 seconds and 5 git commands to generate.