]> gcc.gnu.org Git - gcc.git/blob - gcc/config/s390/s390.c
s390.c (s390_select_ccmode): Return CCAPmode for integer NEG and ABS.
[gcc.git] / gcc / config / s390 / s390.c
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 02111-1307, USA. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "conditions.h"
36 #include "output.h"
37 #include "insn-attr.h"
38 #include "flags.h"
39 #include "except.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "expr.h"
43 #include "reload.h"
44 #include "toplev.h"
45 #include "basic-block.h"
46 #include "integrate.h"
47 #include "ggc.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "debug.h"
51 #include "langhooks.h"
52 #include "optabs.h"
53 #include "tree-gimple.h"
54
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
57
58
59 static bool s390_assemble_integer (rtx, unsigned int, int);
60 static void s390_encode_section_info (tree, rtx, int);
61 static bool s390_cannot_force_const_mem (rtx);
62 static rtx s390_delegitimize_address (rtx);
63 static bool s390_return_in_memory (tree, tree);
64 static void s390_init_builtins (void);
65 static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
66 static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
67 HOST_WIDE_INT, tree);
68 static enum attr_type s390_safe_attr_type (rtx);
69
70 static int s390_adjust_priority (rtx, int);
71 static int s390_issue_rate (void);
72 static int s390_first_cycle_multipass_dfa_lookahead (void);
73 static bool s390_cannot_copy_insn_p (rtx);
74 static bool s390_rtx_costs (rtx, int, int, int *);
75 static int s390_address_cost (rtx);
76 static void s390_reorg (void);
77 static bool s390_valid_pointer_mode (enum machine_mode);
78 static tree s390_build_builtin_va_list (void);
79 static tree s390_gimplify_va_arg (tree, tree, tree *, tree *);
80 static bool s390_function_ok_for_sibcall (tree, tree);
81 static bool s390_call_saved_register_used (tree);
82 static bool s390_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
83 tree, bool);
84 static bool s390_fixed_condition_code_regs (unsigned int *, unsigned int *);
85
86 #undef TARGET_ASM_ALIGNED_HI_OP
87 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
88 #undef TARGET_ASM_ALIGNED_DI_OP
89 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
90 #undef TARGET_ASM_INTEGER
91 #define TARGET_ASM_INTEGER s390_assemble_integer
92
93 #undef TARGET_ASM_OPEN_PAREN
94 #define TARGET_ASM_OPEN_PAREN ""
95
96 #undef TARGET_ASM_CLOSE_PAREN
97 #define TARGET_ASM_CLOSE_PAREN ""
98
99 #undef TARGET_ENCODE_SECTION_INFO
100 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
101
102 #ifdef HAVE_AS_TLS
103 #undef TARGET_HAVE_TLS
104 #define TARGET_HAVE_TLS true
105 #endif
106 #undef TARGET_CANNOT_FORCE_CONST_MEM
107 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
108
109 #undef TARGET_DELEGITIMIZE_ADDRESS
110 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
111
112 #undef TARGET_RETURN_IN_MEMORY
113 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
114
115 #undef TARGET_INIT_BUILTINS
116 #define TARGET_INIT_BUILTINS s390_init_builtins
117 #undef TARGET_EXPAND_BUILTIN
118 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
119
120 #undef TARGET_ASM_OUTPUT_MI_THUNK
121 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
122 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
123 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
124
125 #undef TARGET_SCHED_ADJUST_PRIORITY
126 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
127 #undef TARGET_SCHED_ISSUE_RATE
128 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
129 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
130 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
131
132 #undef TARGET_CANNOT_COPY_INSN_P
133 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
134 #undef TARGET_RTX_COSTS
135 #define TARGET_RTX_COSTS s390_rtx_costs
136 #undef TARGET_ADDRESS_COST
137 #define TARGET_ADDRESS_COST s390_address_cost
138
139 #undef TARGET_MACHINE_DEPENDENT_REORG
140 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
141
142 #undef TARGET_VALID_POINTER_MODE
143 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
144
145 #undef TARGET_BUILD_BUILTIN_VA_LIST
146 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
147 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
148 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
149
150 #undef TARGET_PROMOTE_FUNCTION_ARGS
151 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
152 #undef TARGET_PROMOTE_FUNCTION_RETURN
153 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
154 #undef TARGET_PASS_BY_REFERENCE
155 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
156
157 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
158 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
159
160 #undef TARGET_FIXED_CONDITION_CODE_REGS
161 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
162
163 struct gcc_target targetm = TARGET_INITIALIZER;
164
165 extern int reload_completed;
166
167 /* The alias set for prologue/epilogue register save/restore. */
168 static int s390_sr_alias_set = 0;
169
170 /* Save information from a "cmpxx" operation until the branch or scc is
171 emitted. */
172 rtx s390_compare_op0, s390_compare_op1;
173
174 /* Structure used to hold the components of a S/390 memory
175 address. A legitimate address on S/390 is of the general
176 form
177 base + index + displacement
178 where any of the components is optional.
179
180 base and index are registers of the class ADDR_REGS,
181 displacement is an unsigned 12-bit immediate constant. */
182
183 struct s390_address
184 {
185 rtx base;
186 rtx indx;
187 rtx disp;
188 int pointer;
189 };
190
191 /* Which cpu are we tuning for. */
192 enum processor_type s390_tune;
193 enum processor_flags s390_tune_flags;
194 /* Which instruction set architecture to use. */
195 enum processor_type s390_arch;
196 enum processor_flags s390_arch_flags;
197
198 /* Strings to hold which cpu and instruction set architecture to use. */
199 const char *s390_tune_string; /* for -mtune=<xxx> */
200 const char *s390_arch_string; /* for -march=<xxx> */
201
202 /* String to specify backchain mode:
203 "" no-backchain, "1" backchain, "2" kernel-backchain. */
204 const char *s390_backchain_string = TARGET_DEFAULT_BACKCHAIN;
205
206 const char *s390_warn_framesize_string;
207 const char *s390_warn_dynamicstack_string;
208 const char *s390_stack_size_string;
209 const char *s390_stack_guard_string;
210
211 HOST_WIDE_INT s390_warn_framesize = 0;
212 bool s390_warn_dynamicstack_p = 0;
213 HOST_WIDE_INT s390_stack_size = 0;
214 HOST_WIDE_INT s390_stack_guard = 0;
215
216 /* The following structure is embedded in the machine
217 specific part of struct function. */
218
219 struct s390_frame_layout GTY (())
220 {
221 /* Offset within stack frame. */
222 HOST_WIDE_INT gprs_offset;
223 HOST_WIDE_INT f0_offset;
224 HOST_WIDE_INT f4_offset;
225 HOST_WIDE_INT f8_offset;
226 HOST_WIDE_INT backchain_offset;
227
228 /* Number of first and last gpr to be saved, restored. */
229 int first_save_gpr;
230 int first_restore_gpr;
231 int last_save_gpr;
232 int last_restore_gpr;
233
234 /* Bits standing for floating point registers. Set, if the
235 respective register has to be saved. Starting with reg 16 (f0)
236 at the rightmost bit.
237 Bit 15 - 8 7 6 5 4 3 2 1 0
238 fpr 15 - 8 7 5 3 1 6 4 2 0
239 reg 31 - 24 23 22 21 20 19 18 17 16 */
240 unsigned int fpr_bitmap;
241
242 /* Number of floating point registers f8-f15 which must be saved. */
243 int high_fprs;
244
245 /* Set if return address needs to be saved. */
246 bool save_return_addr_p;
247
248 /* Set if backchain needs to be saved. */
249 bool save_backchain_p;
250
251 /* Size of stack frame. */
252 HOST_WIDE_INT frame_size;
253 };
254
255 /* Define the structure for the machine field in struct function. */
256
257 struct machine_function GTY(())
258 {
259 struct s390_frame_layout frame_layout;
260
261 /* Literal pool base register. */
262 rtx base_reg;
263
264 /* True if we may need to perform branch splitting. */
265 bool split_branches_pending_p;
266
267 /* Some local-dynamic TLS symbol name. */
268 const char *some_ld_name;
269 };
270
271 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
272
273 #define cfun_frame_layout (cfun->machine->frame_layout)
274 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
275 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr - \
276 cfun_frame_layout.first_save_gpr + 1) * UNITS_PER_WORD)
277 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
278 (1 << (BITNUM)))
279 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
280 (1 << (BITNUM))))
281
282 static int s390_match_ccmode_set (rtx, enum machine_mode);
283 static int s390_branch_condition_mask (rtx);
284 static const char *s390_branch_condition_mnemonic (rtx, int);
285 static int check_mode (rtx, enum machine_mode *);
286 static int s390_short_displacement (rtx);
287 static int s390_decompose_address (rtx, struct s390_address *);
288 static rtx get_thread_pointer (void);
289 static rtx legitimize_tls_address (rtx, rtx);
290 static void print_shift_count_operand (FILE *, rtx);
291 static const char *get_some_local_dynamic_name (void);
292 static int get_some_local_dynamic_name_1 (rtx *, void *);
293 static int reg_used_in_mem_p (int, rtx);
294 static int addr_generation_dependency_p (rtx, rtx);
295 static int s390_split_branches (void);
296 static void annotate_constant_pool_refs (rtx *x);
297 static void find_constant_pool_ref (rtx, rtx *);
298 static void replace_constant_pool_ref (rtx *, rtx, rtx);
299 static rtx find_ltrel_base (rtx);
300 static void replace_ltrel_base (rtx *);
301 static void s390_optimize_prologue (void);
302 static int find_unused_clobbered_reg (void);
303 static void s390_frame_area (int *, int *);
304 static void s390_register_info (int []);
305 static void s390_frame_info (void);
306 static void s390_init_frame_layout (void);
307 static void s390_update_frame_layout (void);
308 static rtx save_fpr (rtx, int, int);
309 static rtx restore_fpr (rtx, int, int);
310 static rtx save_gprs (rtx, int, int, int);
311 static rtx restore_gprs (rtx, int, int, int);
312 static int s390_function_arg_size (enum machine_mode, tree);
313 static bool s390_function_arg_float (enum machine_mode, tree);
314 static struct machine_function * s390_init_machine_status (void);
315
316 /* Check whether integer displacement is in range. */
317 #define DISP_IN_RANGE(d) \
318 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
319 : ((d) >= 0 && (d) <= 4095))
320
321 /* Return true if SET either doesn't set the CC register, or else
322 the source and destination have matching CC modes and that
323 CC mode is at least as constrained as REQ_MODE. */
324
325 static int
326 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
327 {
328 enum machine_mode set_mode;
329
330 if (GET_CODE (set) != SET)
331 abort ();
332
333 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
334 return 1;
335
336 set_mode = GET_MODE (SET_DEST (set));
337 switch (set_mode)
338 {
339 case CCSmode:
340 case CCSRmode:
341 case CCUmode:
342 case CCURmode:
343 case CCLmode:
344 case CCL1mode:
345 case CCL2mode:
346 case CCL3mode:
347 case CCT1mode:
348 case CCT2mode:
349 case CCT3mode:
350 if (req_mode != set_mode)
351 return 0;
352 break;
353
354 case CCZmode:
355 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
356 && req_mode != CCSRmode && req_mode != CCURmode)
357 return 0;
358 break;
359
360 case CCAPmode:
361 case CCANmode:
362 if (req_mode != CCAmode)
363 return 0;
364 break;
365
366 default:
367 abort ();
368 }
369
370 return (GET_MODE (SET_SRC (set)) == set_mode);
371 }
372
373 /* Return true if every SET in INSN that sets the CC register
374 has source and destination with matching CC modes and that
375 CC mode is at least as constrained as REQ_MODE.
376 If REQ_MODE is VOIDmode, always return false. */
377
378 int
379 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
380 {
381 int i;
382
383 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
384 if (req_mode == VOIDmode)
385 return 0;
386
387 if (GET_CODE (PATTERN (insn)) == SET)
388 return s390_match_ccmode_set (PATTERN (insn), req_mode);
389
390 if (GET_CODE (PATTERN (insn)) == PARALLEL)
391 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
392 {
393 rtx set = XVECEXP (PATTERN (insn), 0, i);
394 if (GET_CODE (set) == SET)
395 if (!s390_match_ccmode_set (set, req_mode))
396 return 0;
397 }
398
399 return 1;
400 }
401
402 /* If a test-under-mask instruction can be used to implement
403 (compare (and ... OP1) OP2), return the CC mode required
404 to do that. Otherwise, return VOIDmode.
405 MIXED is true if the instruction can distinguish between
406 CC1 and CC2 for mixed selected bits (TMxx), it is false
407 if the instruction cannot (TM). */
408
409 enum machine_mode
410 s390_tm_ccmode (rtx op1, rtx op2, int mixed)
411 {
412 int bit0, bit1;
413
414 /* ??? Fixme: should work on CONST_DOUBLE as well. */
415 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
416 return VOIDmode;
417
418 /* Selected bits all zero: CC0. */
419 if (INTVAL (op2) == 0)
420 return CCTmode;
421
422 /* Selected bits all one: CC3. */
423 if (INTVAL (op2) == INTVAL (op1))
424 return CCT3mode;
425
426 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
427 if (mixed)
428 {
429 bit1 = exact_log2 (INTVAL (op2));
430 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
431 if (bit0 != -1 && bit1 != -1)
432 return bit0 > bit1 ? CCT1mode : CCT2mode;
433 }
434
435 return VOIDmode;
436 }
437
438 /* Given a comparison code OP (EQ, NE, etc.) and the operands
439 OP0 and OP1 of a COMPARE, return the mode to be used for the
440 comparison. */
441
442 enum machine_mode
443 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
444 {
445 switch (code)
446 {
447 case EQ:
448 case NE:
449 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
450 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
451 return CCAPmode;
452 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
453 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
454 return CCAPmode;
455 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
456 || GET_CODE (op1) == NEG)
457 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
458 return CCLmode;
459
460 if (GET_CODE (op0) == AND)
461 {
462 /* Check whether we can potentially do it via TM. */
463 enum machine_mode ccmode;
464 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
465 if (ccmode != VOIDmode)
466 {
467 /* Relax CCTmode to CCZmode to allow fall-back to AND
468 if that turns out to be beneficial. */
469 return ccmode == CCTmode ? CCZmode : ccmode;
470 }
471 }
472
473 if (register_operand (op0, HImode)
474 && GET_CODE (op1) == CONST_INT
475 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
476 return CCT3mode;
477 if (register_operand (op0, QImode)
478 && GET_CODE (op1) == CONST_INT
479 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
480 return CCT3mode;
481
482 return CCZmode;
483
484 case LE:
485 case LT:
486 case GE:
487 case GT:
488 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
489 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
490 return CCAPmode;
491 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
492 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
493 {
494 if (INTVAL (XEXP((op0), 1)) < 0)
495 return CCANmode;
496 else
497 return CCAPmode;
498 }
499 /* Fall through. */
500 case UNORDERED:
501 case ORDERED:
502 case UNEQ:
503 case UNLE:
504 case UNLT:
505 case UNGE:
506 case UNGT:
507 case LTGT:
508 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
509 && GET_CODE (op1) != CONST_INT)
510 return CCSRmode;
511 return CCSmode;
512
513 case LTU:
514 case GEU:
515 if (GET_CODE (op0) == PLUS
516 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
517 return CCL1mode;
518
519 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
520 && GET_CODE (op1) != CONST_INT)
521 return CCURmode;
522 return CCUmode;
523
524 case LEU:
525 case GTU:
526 if (GET_CODE (op0) == MINUS
527 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
528 return CCL2mode;
529
530 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
531 && GET_CODE (op1) != CONST_INT)
532 return CCURmode;
533 return CCUmode;
534
535 default:
536 abort ();
537 }
538 }
539
540 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
541 that we can implement more efficiently. */
542
543 void
544 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
545 {
546 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
547 if ((*code == EQ || *code == NE)
548 && *op1 == const0_rtx
549 && GET_CODE (*op0) == ZERO_EXTRACT
550 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
551 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
552 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
553 {
554 rtx inner = XEXP (*op0, 0);
555 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
556 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
557 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
558
559 if (len > 0 && len < modesize
560 && pos >= 0 && pos + len <= modesize
561 && modesize <= HOST_BITS_PER_WIDE_INT)
562 {
563 unsigned HOST_WIDE_INT block;
564 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
565 block <<= modesize - pos - len;
566
567 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
568 gen_int_mode (block, GET_MODE (inner)));
569 }
570 }
571
572 /* Narrow AND of memory against immediate to enable TM. */
573 if ((*code == EQ || *code == NE)
574 && *op1 == const0_rtx
575 && GET_CODE (*op0) == AND
576 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
577 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
578 {
579 rtx inner = XEXP (*op0, 0);
580 rtx mask = XEXP (*op0, 1);
581
582 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
583 if (GET_CODE (inner) == SUBREG
584 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
585 && (GET_MODE_SIZE (GET_MODE (inner))
586 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
587 && ((INTVAL (mask)
588 & GET_MODE_MASK (GET_MODE (inner))
589 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
590 == 0))
591 inner = SUBREG_REG (inner);
592
593 /* Do not change volatile MEMs. */
594 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
595 {
596 int part = s390_single_part (XEXP (*op0, 1),
597 GET_MODE (inner), QImode, 0);
598 if (part >= 0)
599 {
600 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
601 inner = adjust_address_nv (inner, QImode, part);
602 *op0 = gen_rtx_AND (QImode, inner, mask);
603 }
604 }
605 }
606
607 /* Narrow comparisons against 0xffff to HImode if possible. */
608 if ((*code == EQ || *code == NE)
609 && GET_CODE (*op1) == CONST_INT
610 && INTVAL (*op1) == 0xffff
611 && SCALAR_INT_MODE_P (GET_MODE (*op0))
612 && (nonzero_bits (*op0, GET_MODE (*op0))
613 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
614 {
615 *op0 = gen_lowpart (HImode, *op0);
616 *op1 = constm1_rtx;
617 }
618
619
620 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
621 if (GET_CODE (*op0) == UNSPEC
622 && XINT (*op0, 1) == UNSPEC_CMPINT
623 && XVECLEN (*op0, 0) == 1
624 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
625 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
626 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
627 && *op1 == const0_rtx)
628 {
629 enum rtx_code new_code = UNKNOWN;
630 switch (*code)
631 {
632 case EQ: new_code = EQ; break;
633 case NE: new_code = NE; break;
634 case LT: new_code = GTU; break;
635 case GT: new_code = LTU; break;
636 case LE: new_code = GEU; break;
637 case GE: new_code = LEU; break;
638 default: break;
639 }
640
641 if (new_code != UNKNOWN)
642 {
643 *op0 = XVECEXP (*op0, 0, 0);
644 *code = new_code;
645 }
646 }
647 }
648
649 /* Emit a compare instruction suitable to implement the comparison
650 OP0 CODE OP1. Return the correct condition RTL to be placed in
651 the IF_THEN_ELSE of the conditional branch testing the result. */
652
653 rtx
654 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
655 {
656 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
657 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
658
659 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
660 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
661 }
662
663 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
664 unconditional jump, else a conditional jump under condition COND. */
665
666 void
667 s390_emit_jump (rtx target, rtx cond)
668 {
669 rtx insn;
670
671 target = gen_rtx_LABEL_REF (VOIDmode, target);
672 if (cond)
673 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
674
675 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
676 emit_jump_insn (insn);
677 }
678
679 /* Return nonzero if OP is a valid comparison operator
680 for a branch condition in mode MODE. */
681
682 int
683 s390_comparison (rtx op, enum machine_mode mode)
684 {
685 if (mode != VOIDmode && mode != GET_MODE (op))
686 return 0;
687
688 if (!COMPARISON_P (op))
689 return 0;
690
691 if (GET_CODE (XEXP (op, 0)) != REG
692 || REGNO (XEXP (op, 0)) != CC_REGNUM
693 || XEXP (op, 1) != const0_rtx)
694 return 0;
695
696 return s390_branch_condition_mask (op) >= 0;
697 }
698
699 /* Return nonzero if OP is a valid comparison operator
700 for an ALC condition in mode MODE. */
701
702 int
703 s390_alc_comparison (rtx op, enum machine_mode mode)
704 {
705 if (mode != VOIDmode && mode != GET_MODE (op))
706 return 0;
707
708 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
709 op = XEXP (op, 0);
710
711 if (!COMPARISON_P (op))
712 return 0;
713
714 if (GET_CODE (XEXP (op, 0)) != REG
715 || REGNO (XEXP (op, 0)) != CC_REGNUM
716 || XEXP (op, 1) != const0_rtx)
717 return 0;
718
719 switch (GET_MODE (XEXP (op, 0)))
720 {
721 case CCL1mode:
722 return GET_CODE (op) == LTU;
723
724 case CCL2mode:
725 return GET_CODE (op) == LEU;
726
727 case CCL3mode:
728 return GET_CODE (op) == GEU;
729
730 case CCUmode:
731 return GET_CODE (op) == GTU;
732
733 case CCURmode:
734 return GET_CODE (op) == LTU;
735
736 case CCSmode:
737 return GET_CODE (op) == UNGT;
738
739 case CCSRmode:
740 return GET_CODE (op) == UNLT;
741
742 default:
743 return 0;
744 }
745 }
746
747 /* Return nonzero if OP is a valid comparison operator
748 for an SLB condition in mode MODE. */
749
750 int
751 s390_slb_comparison (rtx op, enum machine_mode mode)
752 {
753 if (mode != VOIDmode && mode != GET_MODE (op))
754 return 0;
755
756 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
757 op = XEXP (op, 0);
758
759 if (!COMPARISON_P (op))
760 return 0;
761
762 if (GET_CODE (XEXP (op, 0)) != REG
763 || REGNO (XEXP (op, 0)) != CC_REGNUM
764 || XEXP (op, 1) != const0_rtx)
765 return 0;
766
767 switch (GET_MODE (XEXP (op, 0)))
768 {
769 case CCL1mode:
770 return GET_CODE (op) == GEU;
771
772 case CCL2mode:
773 return GET_CODE (op) == GTU;
774
775 case CCL3mode:
776 return GET_CODE (op) == LTU;
777
778 case CCUmode:
779 return GET_CODE (op) == LEU;
780
781 case CCURmode:
782 return GET_CODE (op) == GEU;
783
784 case CCSmode:
785 return GET_CODE (op) == LE;
786
787 case CCSRmode:
788 return GET_CODE (op) == GE;
789
790 default:
791 return 0;
792 }
793 }
794
795 /* Return branch condition mask to implement a branch
796 specified by CODE. Return -1 for invalid comparisons. */
797
798 static int
799 s390_branch_condition_mask (rtx code)
800 {
801 const int CC0 = 1 << 3;
802 const int CC1 = 1 << 2;
803 const int CC2 = 1 << 1;
804 const int CC3 = 1 << 0;
805
806 if (GET_CODE (XEXP (code, 0)) != REG
807 || REGNO (XEXP (code, 0)) != CC_REGNUM
808 || XEXP (code, 1) != const0_rtx)
809 abort ();
810
811 switch (GET_MODE (XEXP (code, 0)))
812 {
813 case CCZmode:
814 switch (GET_CODE (code))
815 {
816 case EQ: return CC0;
817 case NE: return CC1 | CC2 | CC3;
818 default: return -1;
819 }
820 break;
821
822 case CCT1mode:
823 switch (GET_CODE (code))
824 {
825 case EQ: return CC1;
826 case NE: return CC0 | CC2 | CC3;
827 default: return -1;
828 }
829 break;
830
831 case CCT2mode:
832 switch (GET_CODE (code))
833 {
834 case EQ: return CC2;
835 case NE: return CC0 | CC1 | CC3;
836 default: return -1;
837 }
838 break;
839
840 case CCT3mode:
841 switch (GET_CODE (code))
842 {
843 case EQ: return CC3;
844 case NE: return CC0 | CC1 | CC2;
845 default: return -1;
846 }
847 break;
848
849 case CCLmode:
850 switch (GET_CODE (code))
851 {
852 case EQ: return CC0 | CC2;
853 case NE: return CC1 | CC3;
854 default: return -1;
855 }
856 break;
857
858 case CCL1mode:
859 switch (GET_CODE (code))
860 {
861 case LTU: return CC2 | CC3; /* carry */
862 case GEU: return CC0 | CC1; /* no carry */
863 default: return -1;
864 }
865 break;
866
867 case CCL2mode:
868 switch (GET_CODE (code))
869 {
870 case GTU: return CC0 | CC1; /* borrow */
871 case LEU: return CC2 | CC3; /* no borrow */
872 default: return -1;
873 }
874 break;
875
876 case CCL3mode:
877 switch (GET_CODE (code))
878 {
879 case EQ: return CC0 | CC2;
880 case NE: return CC1 | CC3;
881 case LTU: return CC1;
882 case GTU: return CC3;
883 case LEU: return CC1 | CC2;
884 case GEU: return CC2 | CC3;
885 default: return -1;
886 }
887
888 case CCUmode:
889 switch (GET_CODE (code))
890 {
891 case EQ: return CC0;
892 case NE: return CC1 | CC2 | CC3;
893 case LTU: return CC1;
894 case GTU: return CC2;
895 case LEU: return CC0 | CC1;
896 case GEU: return CC0 | CC2;
897 default: return -1;
898 }
899 break;
900
901 case CCURmode:
902 switch (GET_CODE (code))
903 {
904 case EQ: return CC0;
905 case NE: return CC2 | CC1 | CC3;
906 case LTU: return CC2;
907 case GTU: return CC1;
908 case LEU: return CC0 | CC2;
909 case GEU: return CC0 | CC1;
910 default: return -1;
911 }
912 break;
913
914 case CCAPmode:
915 switch (GET_CODE (code))
916 {
917 case EQ: return CC0;
918 case NE: return CC1 | CC2 | CC3;
919 case LT: return CC1 | CC3;
920 case GT: return CC2;
921 case LE: return CC0 | CC1 | CC3;
922 case GE: return CC0 | CC2;
923 default: return -1;
924 }
925 break;
926
927 case CCANmode:
928 switch (GET_CODE (code))
929 {
930 case EQ: return CC0;
931 case NE: return CC1 | CC2 | CC3;
932 case LT: return CC1;
933 case GT: return CC2 | CC3;
934 case LE: return CC0 | CC1;
935 case GE: return CC0 | CC2 | CC3;
936 default: return -1;
937 }
938 break;
939
940 case CCSmode:
941 switch (GET_CODE (code))
942 {
943 case EQ: return CC0;
944 case NE: return CC1 | CC2 | CC3;
945 case LT: return CC1;
946 case GT: return CC2;
947 case LE: return CC0 | CC1;
948 case GE: return CC0 | CC2;
949 case UNORDERED: return CC3;
950 case ORDERED: return CC0 | CC1 | CC2;
951 case UNEQ: return CC0 | CC3;
952 case UNLT: return CC1 | CC3;
953 case UNGT: return CC2 | CC3;
954 case UNLE: return CC0 | CC1 | CC3;
955 case UNGE: return CC0 | CC2 | CC3;
956 case LTGT: return CC1 | CC2;
957 default: return -1;
958 }
959 break;
960
961 case CCSRmode:
962 switch (GET_CODE (code))
963 {
964 case EQ: return CC0;
965 case NE: return CC2 | CC1 | CC3;
966 case LT: return CC2;
967 case GT: return CC1;
968 case LE: return CC0 | CC2;
969 case GE: return CC0 | CC1;
970 case UNORDERED: return CC3;
971 case ORDERED: return CC0 | CC2 | CC1;
972 case UNEQ: return CC0 | CC3;
973 case UNLT: return CC2 | CC3;
974 case UNGT: return CC1 | CC3;
975 case UNLE: return CC0 | CC2 | CC3;
976 case UNGE: return CC0 | CC1 | CC3;
977 case LTGT: return CC2 | CC1;
978 default: return -1;
979 }
980 break;
981
982 default:
983 return -1;
984 }
985 }
986
987 /* If INV is false, return assembler mnemonic string to implement
988 a branch specified by CODE. If INV is true, return mnemonic
989 for the corresponding inverted branch. */
990
991 static const char *
992 s390_branch_condition_mnemonic (rtx code, int inv)
993 {
994 static const char *const mnemonic[16] =
995 {
996 NULL, "o", "h", "nle",
997 "l", "nhe", "lh", "ne",
998 "e", "nlh", "he", "nl",
999 "le", "nh", "no", NULL
1000 };
1001
1002 int mask = s390_branch_condition_mask (code);
1003 gcc_assert (mask >= 0);
1004
1005 if (inv)
1006 mask ^= 15;
1007
1008 if (mask < 1 || mask > 14)
1009 abort ();
1010
1011 return mnemonic[mask];
1012 }
1013
1014 /* Return the part of op which has a value different from def.
1015 The size of the part is determined by mode.
1016 Use this function only if you already know that op really
1017 contains such a part. */
1018
1019 unsigned HOST_WIDE_INT
1020 s390_extract_part (rtx op, enum machine_mode mode, int def)
1021 {
1022 unsigned HOST_WIDE_INT value = 0;
1023 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1024 int part_bits = GET_MODE_BITSIZE (mode);
1025 unsigned HOST_WIDE_INT part_mask = (1 << part_bits) - 1;
1026 int i;
1027
1028 for (i = 0; i < max_parts; i++)
1029 {
1030 if (i == 0)
1031 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1032 else
1033 value >>= part_bits;
1034
1035 if ((value & part_mask) != (def & part_mask))
1036 return value & part_mask;
1037 }
1038
1039 abort ();
1040 }
1041
1042 /* If OP is an integer constant of mode MODE with exactly one
1043 part of mode PART_MODE unequal to DEF, return the number of that
1044 part. Otherwise, return -1. */
1045
1046 int
1047 s390_single_part (rtx op,
1048 enum machine_mode mode,
1049 enum machine_mode part_mode,
1050 int def)
1051 {
1052 unsigned HOST_WIDE_INT value = 0;
1053 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1054 unsigned HOST_WIDE_INT part_mask = (1 << GET_MODE_BITSIZE (part_mode)) - 1;
1055 int i, part = -1;
1056
1057 if (GET_CODE (op) != CONST_INT)
1058 return -1;
1059
1060 for (i = 0; i < n_parts; i++)
1061 {
1062 if (i == 0)
1063 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1064 else
1065 value >>= GET_MODE_BITSIZE (part_mode);
1066
1067 if ((value & part_mask) != (def & part_mask))
1068 {
1069 if (part != -1)
1070 return -1;
1071 else
1072 part = i;
1073 }
1074 }
1075 return part == -1 ? -1 : n_parts - 1 - part;
1076 }
1077
1078 /* Check whether we can (and want to) split a double-word
1079 move in mode MODE from SRC to DST into two single-word
1080 moves, moving the subword FIRST_SUBWORD first. */
1081
1082 bool
1083 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1084 {
1085 /* Floating point registers cannot be split. */
1086 if (FP_REG_P (src) || FP_REG_P (dst))
1087 return false;
1088
1089 /* We don't need to split if operands are directly accessible. */
1090 if (s_operand (src, mode) || s_operand (dst, mode))
1091 return false;
1092
1093 /* Non-offsettable memory references cannot be split. */
1094 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1095 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1096 return false;
1097
1098 /* Moving the first subword must not clobber a register
1099 needed to move the second subword. */
1100 if (register_operand (dst, mode))
1101 {
1102 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1103 if (reg_overlap_mentioned_p (subreg, src))
1104 return false;
1105 }
1106
1107 return true;
1108 }
1109
1110 /* Check whether the address of memory reference MEM2 equals exactly
1111 the address of memory reference MEM1 plus DELTA. Return true if
1112 we can prove this to be the case, false otherwise. */
1113
1114 bool
1115 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1116 {
1117 rtx addr1, addr2, addr_delta;
1118
1119 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1120 return false;
1121
1122 addr1 = XEXP (mem1, 0);
1123 addr2 = XEXP (mem2, 0);
1124
1125 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1126 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1127 return false;
1128
1129 return true;
1130 }
1131
1132 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1133
1134 void
1135 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1136 rtx *operands)
1137 {
1138 enum machine_mode wmode = mode;
1139 rtx dst = operands[0];
1140 rtx src1 = operands[1];
1141 rtx src2 = operands[2];
1142 rtx op, clob, tem;
1143
1144 /* If we cannot handle the operation directly, use a temp register. */
1145 if (!s390_logical_operator_ok_p (operands))
1146 dst = gen_reg_rtx (mode);
1147
1148 /* QImode and HImode patterns make sense only if we have a destination
1149 in memory. Otherwise perform the operation in SImode. */
1150 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1151 wmode = SImode;
1152
1153 /* Widen operands if required. */
1154 if (mode != wmode)
1155 {
1156 if (GET_CODE (dst) == SUBREG
1157 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1158 dst = tem;
1159 else if (REG_P (dst))
1160 dst = gen_rtx_SUBREG (wmode, dst, 0);
1161 else
1162 dst = gen_reg_rtx (wmode);
1163
1164 if (GET_CODE (src1) == SUBREG
1165 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1166 src1 = tem;
1167 else if (GET_MODE (src1) != VOIDmode)
1168 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1169
1170 if (GET_CODE (src2) == SUBREG
1171 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1172 src2 = tem;
1173 else if (GET_MODE (src2) != VOIDmode)
1174 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1175 }
1176
1177 /* Emit the instruction. */
1178 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1179 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1180 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1181
1182 /* Fix up the destination if needed. */
1183 if (dst != operands[0])
1184 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1185 }
1186
1187 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1188
1189 bool
1190 s390_logical_operator_ok_p (rtx *operands)
1191 {
1192 /* If the destination operand is in memory, it needs to coincide
1193 with one of the source operands. After reload, it has to be
1194 the first source operand. */
1195 if (GET_CODE (operands[0]) == MEM)
1196 return rtx_equal_p (operands[0], operands[1])
1197 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1198
1199 return true;
1200 }
1201
1202 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1203 operand IMMOP to switch from SS to SI type instructions. */
1204
1205 void
1206 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1207 {
1208 int def = code == AND ? -1 : 0;
1209 HOST_WIDE_INT mask;
1210 int part;
1211
1212 gcc_assert (GET_CODE (*memop) == MEM);
1213 gcc_assert (!MEM_VOLATILE_P (*memop));
1214
1215 mask = s390_extract_part (*immop, QImode, def);
1216 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1217 gcc_assert (part >= 0);
1218
1219 *memop = adjust_address (*memop, QImode, part);
1220 *immop = gen_int_mode (mask, QImode);
1221 }
1222
1223
1224 /* Change optimizations to be performed, depending on the
1225 optimization level.
1226
1227 LEVEL is the optimization level specified; 2 if `-O2' is
1228 specified, 1 if `-O' is specified, and 0 if neither is specified.
1229
1230 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1231
1232 void
1233 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1234 {
1235 /* ??? There are apparently still problems with -fcaller-saves. */
1236 flag_caller_saves = 0;
1237
1238 /* By default, always emit DWARF-2 unwind info. This allows debugging
1239 without maintaining a stack frame back-chain. */
1240 flag_asynchronous_unwind_tables = 1;
1241 }
1242
1243 void
1244 override_options (void)
1245 {
1246 int i;
1247 static struct pta
1248 {
1249 const char *const name; /* processor name or nickname. */
1250 const enum processor_type processor;
1251 const enum processor_flags flags;
1252 }
1253 const processor_alias_table[] =
1254 {
1255 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1256 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1257 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1258 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1259 | PF_LONG_DISPLACEMENT},
1260 };
1261
1262 int const pta_size = ARRAY_SIZE (processor_alias_table);
1263
1264 /* Acquire a unique set number for our register saves and restores. */
1265 s390_sr_alias_set = new_alias_set ();
1266
1267 /* Set up function hooks. */
1268 init_machine_status = s390_init_machine_status;
1269
1270 /* Architecture mode defaults according to ABI. */
1271 if (!(target_flags_explicit & MASK_ZARCH))
1272 {
1273 if (TARGET_64BIT)
1274 target_flags |= MASK_ZARCH;
1275 else
1276 target_flags &= ~MASK_ZARCH;
1277 }
1278
1279 /* Determine processor architectural level. */
1280 if (!s390_arch_string)
1281 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1282
1283 for (i = 0; i < pta_size; i++)
1284 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
1285 {
1286 s390_arch = processor_alias_table[i].processor;
1287 s390_arch_flags = processor_alias_table[i].flags;
1288 break;
1289 }
1290 if (i == pta_size)
1291 error ("Unknown cpu used in -march=%s.", s390_arch_string);
1292
1293 /* Determine processor to tune for. */
1294 if (!s390_tune_string)
1295 {
1296 s390_tune = s390_arch;
1297 s390_tune_flags = s390_arch_flags;
1298 s390_tune_string = s390_arch_string;
1299 }
1300 else
1301 {
1302 for (i = 0; i < pta_size; i++)
1303 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1304 {
1305 s390_tune = processor_alias_table[i].processor;
1306 s390_tune_flags = processor_alias_table[i].flags;
1307 break;
1308 }
1309 if (i == pta_size)
1310 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1311 }
1312
1313 /* Sanity checks. */
1314 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1315 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1316 if (TARGET_64BIT && !TARGET_ZARCH)
1317 error ("64-bit ABI not supported in ESA/390 mode.");
1318
1319 if (s390_warn_framesize_string)
1320 {
1321 if (sscanf (s390_warn_framesize_string, HOST_WIDE_INT_PRINT_DEC,
1322 &s390_warn_framesize) != 1)
1323 error ("invalid value for -mwarn-framesize");
1324 }
1325
1326 if (s390_warn_dynamicstack_string)
1327 s390_warn_dynamicstack_p = 1;
1328
1329 if (s390_stack_size_string)
1330 {
1331 if (sscanf (s390_stack_size_string, HOST_WIDE_INT_PRINT_DEC,
1332 &s390_stack_size) != 1)
1333 error ("invalid value for -mstack-size");
1334
1335 if (exact_log2 (s390_stack_size) == -1)
1336 error ("stack size must be an exact power of 2");
1337
1338 if (s390_stack_guard_string)
1339 {
1340 if (sscanf (s390_stack_guard_string, HOST_WIDE_INT_PRINT_DEC,
1341 &s390_stack_guard) != 1)
1342 error ("invalid value for -mstack-guard");
1343
1344 if (s390_stack_guard >= s390_stack_size)
1345 error ("stack size must be greater than the stack guard value");
1346
1347 if (exact_log2 (s390_stack_guard) == -1)
1348 error ("stack guard value must be an exact power of 2");
1349 }
1350 else
1351 error ("-mstack-size implies use of -mstack-guard");
1352 }
1353
1354 if (s390_stack_guard_string && !s390_stack_size_string)
1355 error ("-mstack-guard implies use of -mstack-size");
1356 }
1357
1358 /* Map for smallest class containing reg regno. */
1359
1360 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1361 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1362 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1363 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1364 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1365 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1366 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1367 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1368 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1369 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1370 ACCESS_REGS, ACCESS_REGS
1371 };
1372
1373 /* Return attribute type of insn. */
1374
1375 static enum attr_type
1376 s390_safe_attr_type (rtx insn)
1377 {
1378 if (recog_memoized (insn) >= 0)
1379 return get_attr_type (insn);
1380 else
1381 return TYPE_NONE;
1382 }
1383
1384 /* Return true if OP a (const_int 0) operand.
1385 OP is the current operation.
1386 MODE is the current operation mode. */
1387
1388 int
1389 const0_operand (register rtx op, enum machine_mode mode)
1390 {
1391 return op == CONST0_RTX (mode);
1392 }
1393
1394 /* Return true if OP is constant.
1395 OP is the current operation.
1396 MODE is the current operation mode. */
1397
1398 int
1399 consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1400 {
1401 return CONSTANT_P (op);
1402 }
1403
1404 /* Return true if the mode of operand OP matches MODE.
1405 If MODE is set to VOIDmode, set it to the mode of OP. */
1406
1407 static int
1408 check_mode (register rtx op, enum machine_mode *mode)
1409 {
1410 if (*mode == VOIDmode)
1411 *mode = GET_MODE (op);
1412 else
1413 {
1414 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1415 return 0;
1416 }
1417 return 1;
1418 }
1419
1420 /* Return true if OP a valid operand for the LARL instruction.
1421 OP is the current operation.
1422 MODE is the current operation mode. */
1423
1424 int
1425 larl_operand (register rtx op, enum machine_mode mode)
1426 {
1427 if (! check_mode (op, &mode))
1428 return 0;
1429
1430 /* Allow labels and local symbols. */
1431 if (GET_CODE (op) == LABEL_REF)
1432 return 1;
1433 if (GET_CODE (op) == SYMBOL_REF)
1434 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1435 && SYMBOL_REF_TLS_MODEL (op) == 0
1436 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1437
1438 /* Everything else must have a CONST, so strip it. */
1439 if (GET_CODE (op) != CONST)
1440 return 0;
1441 op = XEXP (op, 0);
1442
1443 /* Allow adding *even* in-range constants. */
1444 if (GET_CODE (op) == PLUS)
1445 {
1446 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1447 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1448 return 0;
1449 #if HOST_BITS_PER_WIDE_INT > 32
1450 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1451 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1452 return 0;
1453 #endif
1454 op = XEXP (op, 0);
1455 }
1456
1457 /* Labels and local symbols allowed here as well. */
1458 if (GET_CODE (op) == LABEL_REF)
1459 return 1;
1460 if (GET_CODE (op) == SYMBOL_REF)
1461 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1462 && SYMBOL_REF_TLS_MODEL (op) == 0
1463 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1464
1465 /* Now we must have a @GOTENT offset or @PLT stub
1466 or an @INDNTPOFF TLS offset. */
1467 if (GET_CODE (op) == UNSPEC
1468 && XINT (op, 1) == UNSPEC_GOTENT)
1469 return 1;
1470 if (GET_CODE (op) == UNSPEC
1471 && XINT (op, 1) == UNSPEC_PLT)
1472 return 1;
1473 if (GET_CODE (op) == UNSPEC
1474 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1475 return 1;
1476
1477 return 0;
1478 }
1479
1480 /* Return true if OP is a valid S-type operand.
1481 OP is the current operation.
1482 MODE is the current operation mode. */
1483
1484 int
1485 s_operand (rtx op, enum machine_mode mode)
1486 {
1487 struct s390_address addr;
1488
1489 /* Call general_operand first, so that we don't have to
1490 check for many special cases. */
1491 if (!general_operand (op, mode))
1492 return 0;
1493
1494 /* Just like memory_operand, allow (subreg (mem ...))
1495 after reload. */
1496 if (reload_completed
1497 && GET_CODE (op) == SUBREG
1498 && GET_CODE (SUBREG_REG (op)) == MEM)
1499 op = SUBREG_REG (op);
1500
1501 if (GET_CODE (op) != MEM)
1502 return 0;
1503 if (!s390_decompose_address (XEXP (op, 0), &addr))
1504 return 0;
1505 if (addr.indx)
1506 return 0;
1507
1508 return 1;
1509 }
1510
1511 /* Return true if OP is a memory operand pointing to the
1512 literal pool, or an immediate operand. */
1513
1514 bool
1515 s390_pool_operand (rtx op)
1516 {
1517 struct s390_address addr;
1518
1519 /* Just like memory_operand, allow (subreg (mem ...))
1520 after reload. */
1521 if (reload_completed
1522 && GET_CODE (op) == SUBREG
1523 && GET_CODE (SUBREG_REG (op)) == MEM)
1524 op = SUBREG_REG (op);
1525
1526 switch (GET_CODE (op))
1527 {
1528 case CONST_INT:
1529 case CONST_DOUBLE:
1530 return true;
1531
1532 case MEM:
1533 if (!s390_decompose_address (XEXP (op, 0), &addr))
1534 return false;
1535 if (addr.base && REG_P (addr.base) && REGNO (addr.base) == BASE_REGNUM)
1536 return true;
1537 if (addr.indx && REG_P (addr.indx) && REGNO (addr.indx) == BASE_REGNUM)
1538 return true;
1539 return false;
1540
1541 default:
1542 return false;
1543 }
1544 }
1545
1546 /* Return true if OP a valid shift count operand.
1547 OP is the current operation.
1548 MODE is the current operation mode. */
1549
1550 int
1551 shift_count_operand (rtx op, enum machine_mode mode)
1552 {
1553 HOST_WIDE_INT offset = 0;
1554
1555 if (! check_mode (op, &mode))
1556 return 0;
1557
1558 /* We can have an integer constant, an address register,
1559 or a sum of the two. Note that reload already checks
1560 that any register present is an address register, so
1561 we just check for any register here. */
1562 if (GET_CODE (op) == CONST_INT)
1563 {
1564 offset = INTVAL (op);
1565 op = NULL_RTX;
1566 }
1567 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1568 {
1569 offset = INTVAL (XEXP (op, 1));
1570 op = XEXP (op, 0);
1571 }
1572 while (op && GET_CODE (op) == SUBREG)
1573 op = SUBREG_REG (op);
1574 if (op && GET_CODE (op) != REG)
1575 return 0;
1576
1577 /* Unfortunately we have to reject constants that are invalid
1578 for an address, or else reload will get confused. */
1579 if (!DISP_IN_RANGE (offset))
1580 return 0;
1581
1582 return 1;
1583 }
1584
1585 /* Return true if DISP is a valid short displacement. */
1586
1587 static int
1588 s390_short_displacement (rtx disp)
1589 {
1590 /* No displacement is OK. */
1591 if (!disp)
1592 return 1;
1593
1594 /* Integer displacement in range. */
1595 if (GET_CODE (disp) == CONST_INT)
1596 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1597
1598 /* GOT offset is not OK, the GOT can be large. */
1599 if (GET_CODE (disp) == CONST
1600 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1601 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1602 return 0;
1603
1604 /* All other symbolic constants are literal pool references,
1605 which are OK as the literal pool must be small. */
1606 if (GET_CODE (disp) == CONST)
1607 return 1;
1608
1609 return 0;
1610 }
1611
1612 /* Return true if OP is a valid operand for a C constraint. */
1613
1614 int
1615 s390_extra_constraint_str (rtx op, int c, const char * str)
1616 {
1617 struct s390_address addr;
1618
1619 if (c != str[0])
1620 abort ();
1621
1622 /* Check for offsettable variants of memory constraints. */
1623 if (c == 'A')
1624 {
1625 /* Only accept non-volatile MEMs. */
1626 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1627 return 0;
1628
1629 if ((reload_completed || reload_in_progress)
1630 ? !offsettable_memref_p (op)
1631 : !offsettable_nonstrict_memref_p (op))
1632 return 0;
1633
1634 c = str[1];
1635 }
1636
1637 switch (c)
1638 {
1639 case 'Q':
1640 if (GET_CODE (op) != MEM)
1641 return 0;
1642 if (!s390_decompose_address (XEXP (op, 0), &addr))
1643 return 0;
1644 if (addr.indx)
1645 return 0;
1646
1647 if (TARGET_LONG_DISPLACEMENT)
1648 {
1649 if (!s390_short_displacement (addr.disp))
1650 return 0;
1651 }
1652 break;
1653
1654 case 'R':
1655 if (GET_CODE (op) != MEM)
1656 return 0;
1657
1658 if (TARGET_LONG_DISPLACEMENT)
1659 {
1660 if (!s390_decompose_address (XEXP (op, 0), &addr))
1661 return 0;
1662 if (!s390_short_displacement (addr.disp))
1663 return 0;
1664 }
1665 break;
1666
1667 case 'S':
1668 if (!TARGET_LONG_DISPLACEMENT)
1669 return 0;
1670 if (GET_CODE (op) != MEM)
1671 return 0;
1672 if (!s390_decompose_address (XEXP (op, 0), &addr))
1673 return 0;
1674 if (addr.indx)
1675 return 0;
1676 if (s390_short_displacement (addr.disp))
1677 return 0;
1678 break;
1679
1680 case 'T':
1681 if (!TARGET_LONG_DISPLACEMENT)
1682 return 0;
1683 if (GET_CODE (op) != MEM)
1684 return 0;
1685 /* Any invalid address here will be fixed up by reload,
1686 so accept it for the most generic constraint. */
1687 if (s390_decompose_address (XEXP (op, 0), &addr)
1688 && s390_short_displacement (addr.disp))
1689 return 0;
1690 break;
1691
1692 case 'U':
1693 if (TARGET_LONG_DISPLACEMENT)
1694 {
1695 if (!s390_decompose_address (op, &addr))
1696 return 0;
1697 if (!s390_short_displacement (addr.disp))
1698 return 0;
1699 }
1700 break;
1701
1702 case 'W':
1703 if (!TARGET_LONG_DISPLACEMENT)
1704 return 0;
1705 /* Any invalid address here will be fixed up by reload,
1706 so accept it for the most generic constraint. */
1707 if (s390_decompose_address (op, &addr)
1708 && s390_short_displacement (addr.disp))
1709 return 0;
1710 break;
1711
1712 case 'Y':
1713 return shift_count_operand (op, VOIDmode);
1714
1715 default:
1716 return 0;
1717 }
1718
1719 return 1;
1720 }
1721
1722 /* Return true if VALUE matches the constraint STR. */
1723
1724 int
1725 s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1726 int c,
1727 const char * str)
1728 {
1729 enum machine_mode mode, part_mode;
1730 int def;
1731 int part, part_goal;
1732
1733 if (c != str[0])
1734 abort ();
1735
1736 switch (str[0])
1737 {
1738 case 'I':
1739 return (unsigned int)value < 256;
1740
1741 case 'J':
1742 return (unsigned int)value < 4096;
1743
1744 case 'K':
1745 return value >= -32768 && value < 32768;
1746
1747 case 'L':
1748 return (TARGET_LONG_DISPLACEMENT ?
1749 (value >= -524288 && value <= 524287)
1750 : (value >= 0 && value <= 4095));
1751 case 'M':
1752 return value == 2147483647;
1753
1754 case 'N':
1755 if (str[1] == 'x')
1756 part_goal = -1;
1757 else
1758 part_goal = str[1] - '0';
1759
1760 switch (str[2])
1761 {
1762 case 'H': part_mode = HImode; break;
1763 case 'Q': part_mode = QImode; break;
1764 default: return 0;
1765 }
1766
1767 switch (str[3])
1768 {
1769 case 'H': mode = HImode; break;
1770 case 'S': mode = SImode; break;
1771 case 'D': mode = DImode; break;
1772 default: return 0;
1773 }
1774
1775 switch (str[4])
1776 {
1777 case '0': def = 0; break;
1778 case 'F': def = -1; break;
1779 default: return 0;
1780 }
1781
1782 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
1783 return 0;
1784
1785 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
1786 if (part < 0)
1787 return 0;
1788 if (part_goal != -1 && part_goal != part)
1789 return 0;
1790
1791 break;
1792
1793 default:
1794 return 0;
1795 }
1796
1797 return 1;
1798 }
1799
1800 /* Compute a (partial) cost for rtx X. Return true if the complete
1801 cost has been computed, and false if subexpressions should be
1802 scanned. In either case, *TOTAL contains the cost result. */
1803
1804 static bool
1805 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
1806 {
1807 switch (code)
1808 {
1809 case CONST:
1810 if (GET_CODE (XEXP (x, 0)) == MINUS
1811 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1812 *total = 1000;
1813 else
1814 *total = 0;
1815 return true;
1816
1817 case CONST_INT:
1818 /* Force_const_mem does not work out of reload, because the
1819 saveable_obstack is set to reload_obstack, which does not
1820 live long enough. Because of this we cannot use force_const_mem
1821 in addsi3. This leads to problems with gen_add2_insn with a
1822 constant greater than a short. Because of that we give an
1823 addition of greater constants a cost of 3 (reload1.c 10096). */
1824 /* ??? saveable_obstack no longer exists. */
1825 if (outer_code == PLUS
1826 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1827 *total = COSTS_N_INSNS (3);
1828 else
1829 *total = 0;
1830 return true;
1831
1832 case LABEL_REF:
1833 case SYMBOL_REF:
1834 case CONST_DOUBLE:
1835 *total = 0;
1836 return true;
1837
1838 case ASHIFT:
1839 case ASHIFTRT:
1840 case LSHIFTRT:
1841 case PLUS:
1842 case AND:
1843 case IOR:
1844 case XOR:
1845 case MINUS:
1846 case NEG:
1847 case NOT:
1848 *total = COSTS_N_INSNS (1);
1849 return true;
1850
1851 case MULT:
1852 if (GET_MODE (XEXP (x, 0)) == DImode)
1853 *total = COSTS_N_INSNS (40);
1854 else
1855 *total = COSTS_N_INSNS (7);
1856 return true;
1857
1858 case DIV:
1859 case UDIV:
1860 case MOD:
1861 case UMOD:
1862 *total = COSTS_N_INSNS (33);
1863 return true;
1864
1865 default:
1866 return false;
1867 }
1868 }
1869
1870 /* Return the cost of an address rtx ADDR. */
1871
1872 static int
1873 s390_address_cost (rtx addr)
1874 {
1875 struct s390_address ad;
1876 if (!s390_decompose_address (addr, &ad))
1877 return 1000;
1878
1879 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1880 }
1881
1882 /* Return true if OP is a valid operand for the BRAS instruction.
1883 OP is the current operation.
1884 MODE is the current operation mode. */
1885
1886 int
1887 bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1888 {
1889 register enum rtx_code code = GET_CODE (op);
1890
1891 /* Allow SYMBOL_REFs. */
1892 if (code == SYMBOL_REF)
1893 return 1;
1894
1895 /* Allow @PLT stubs. */
1896 if (code == CONST
1897 && GET_CODE (XEXP (op, 0)) == UNSPEC
1898 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1899 return 1;
1900 return 0;
1901 }
1902
1903 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1904 otherwise return 0. */
1905
1906 int
1907 tls_symbolic_operand (register rtx op)
1908 {
1909 if (GET_CODE (op) != SYMBOL_REF)
1910 return 0;
1911 return SYMBOL_REF_TLS_MODEL (op);
1912 }
1913 \f
1914 /* Return true if OP is a load multiple operation. It is known to be a
1915 PARALLEL and the first section will be tested.
1916 OP is the current operation.
1917 MODE is the current operation mode. */
1918
1919 int
1920 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1921 {
1922 enum machine_mode elt_mode;
1923 int count = XVECLEN (op, 0);
1924 unsigned int dest_regno;
1925 rtx src_addr;
1926 int i, off;
1927
1928
1929 /* Perform a quick check so we don't blow up below. */
1930 if (count <= 1
1931 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1932 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1933 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1934 return 0;
1935
1936 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1937 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1938 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
1939
1940 /* Check, is base, or base + displacement. */
1941
1942 if (GET_CODE (src_addr) == REG)
1943 off = 0;
1944 else if (GET_CODE (src_addr) == PLUS
1945 && GET_CODE (XEXP (src_addr, 0)) == REG
1946 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1947 {
1948 off = INTVAL (XEXP (src_addr, 1));
1949 src_addr = XEXP (src_addr, 0);
1950 }
1951 else
1952 return 0;
1953
1954 for (i = 1; i < count; i++)
1955 {
1956 rtx elt = XVECEXP (op, 0, i);
1957
1958 if (GET_CODE (elt) != SET
1959 || GET_CODE (SET_DEST (elt)) != REG
1960 || GET_MODE (SET_DEST (elt)) != elt_mode
1961 || REGNO (SET_DEST (elt)) != dest_regno + i
1962 || GET_CODE (SET_SRC (elt)) != MEM
1963 || GET_MODE (SET_SRC (elt)) != elt_mode
1964 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1965 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1966 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1967 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1968 != off + i * GET_MODE_SIZE (elt_mode))
1969 return 0;
1970 }
1971
1972 return 1;
1973 }
1974
1975 /* Return true if OP is a store multiple operation. It is known to be a
1976 PARALLEL and the first section will be tested.
1977 OP is the current operation.
1978 MODE is the current operation mode. */
1979
1980 int
1981 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1982 {
1983 enum machine_mode elt_mode;
1984 int count = XVECLEN (op, 0);
1985 unsigned int src_regno;
1986 rtx dest_addr;
1987 int i, off;
1988
1989 /* Perform a quick check so we don't blow up below. */
1990 if (count <= 1
1991 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1992 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1993 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1994 return 0;
1995
1996 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1997 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1998 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
1999
2000 /* Check, is base, or base + displacement. */
2001
2002 if (GET_CODE (dest_addr) == REG)
2003 off = 0;
2004 else if (GET_CODE (dest_addr) == PLUS
2005 && GET_CODE (XEXP (dest_addr, 0)) == REG
2006 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
2007 {
2008 off = INTVAL (XEXP (dest_addr, 1));
2009 dest_addr = XEXP (dest_addr, 0);
2010 }
2011 else
2012 return 0;
2013
2014 for (i = 1; i < count; i++)
2015 {
2016 rtx elt = XVECEXP (op, 0, i);
2017
2018 if (GET_CODE (elt) != SET
2019 || GET_CODE (SET_SRC (elt)) != REG
2020 || GET_MODE (SET_SRC (elt)) != elt_mode
2021 || REGNO (SET_SRC (elt)) != src_regno + i
2022 || GET_CODE (SET_DEST (elt)) != MEM
2023 || GET_MODE (SET_DEST (elt)) != elt_mode
2024 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2025 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2026 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2027 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
2028 != off + i * GET_MODE_SIZE (elt_mode))
2029 return 0;
2030 }
2031 return 1;
2032 }
2033
2034 /* Split DImode access register reference REG (on 64-bit) into its constituent
2035 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2036 gen_highpart cannot be used as they assume all registers are word-sized,
2037 while our access registers have only half that size. */
2038
2039 void
2040 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2041 {
2042 gcc_assert (TARGET_64BIT);
2043 gcc_assert (ACCESS_REG_P (reg));
2044 gcc_assert (GET_MODE (reg) == DImode);
2045 gcc_assert (!(REGNO (reg) & 1));
2046
2047 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2048 *hi = gen_rtx_REG (SImode, REGNO (reg));
2049 }
2050
2051 /* Return true if OP contains a symbol reference */
2052
2053 int
2054 symbolic_reference_mentioned_p (rtx op)
2055 {
2056 register const char *fmt;
2057 register int i;
2058
2059 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2060 return 1;
2061
2062 fmt = GET_RTX_FORMAT (GET_CODE (op));
2063 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2064 {
2065 if (fmt[i] == 'E')
2066 {
2067 register int j;
2068
2069 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2070 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2071 return 1;
2072 }
2073
2074 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2075 return 1;
2076 }
2077
2078 return 0;
2079 }
2080
2081 /* Return true if OP contains a reference to a thread-local symbol. */
2082
2083 int
2084 tls_symbolic_reference_mentioned_p (rtx op)
2085 {
2086 register const char *fmt;
2087 register int i;
2088
2089 if (GET_CODE (op) == SYMBOL_REF)
2090 return tls_symbolic_operand (op);
2091
2092 fmt = GET_RTX_FORMAT (GET_CODE (op));
2093 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2094 {
2095 if (fmt[i] == 'E')
2096 {
2097 register int j;
2098
2099 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2100 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2101 return 1;
2102 }
2103
2104 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2105 return 1;
2106 }
2107
2108 return 0;
2109 }
2110
2111
2112 /* Return true if OP is a legitimate general operand when
2113 generating PIC code. It is given that flag_pic is on
2114 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2115
2116 int
2117 legitimate_pic_operand_p (register rtx op)
2118 {
2119 /* Accept all non-symbolic constants. */
2120 if (!SYMBOLIC_CONST (op))
2121 return 1;
2122
2123 /* Reject everything else; must be handled
2124 via emit_symbolic_move. */
2125 return 0;
2126 }
2127
2128 /* Returns true if the constant value OP is a legitimate general operand.
2129 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2130
2131 int
2132 legitimate_constant_p (register rtx op)
2133 {
2134 /* Accept all non-symbolic constants. */
2135 if (!SYMBOLIC_CONST (op))
2136 return 1;
2137
2138 /* Accept immediate LARL operands. */
2139 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2140 return 1;
2141
2142 /* Thread-local symbols are never legal constants. This is
2143 so that emit_call knows that computing such addresses
2144 might require a function call. */
2145 if (TLS_SYMBOLIC_CONST (op))
2146 return 0;
2147
2148 /* In the PIC case, symbolic constants must *not* be
2149 forced into the literal pool. We accept them here,
2150 so that they will be handled by emit_symbolic_move. */
2151 if (flag_pic)
2152 return 1;
2153
2154 /* All remaining non-PIC symbolic constants are
2155 forced into the literal pool. */
2156 return 0;
2157 }
2158
2159 /* Determine if it's legal to put X into the constant pool. This
2160 is not possible if X contains the address of a symbol that is
2161 not constant (TLS) or not known at final link time (PIC). */
2162
2163 static bool
2164 s390_cannot_force_const_mem (rtx x)
2165 {
2166 switch (GET_CODE (x))
2167 {
2168 case CONST_INT:
2169 case CONST_DOUBLE:
2170 /* Accept all non-symbolic constants. */
2171 return false;
2172
2173 case LABEL_REF:
2174 /* Labels are OK iff we are non-PIC. */
2175 return flag_pic != 0;
2176
2177 case SYMBOL_REF:
2178 /* 'Naked' TLS symbol references are never OK,
2179 non-TLS symbols are OK iff we are non-PIC. */
2180 if (tls_symbolic_operand (x))
2181 return true;
2182 else
2183 return flag_pic != 0;
2184
2185 case CONST:
2186 return s390_cannot_force_const_mem (XEXP (x, 0));
2187 case PLUS:
2188 case MINUS:
2189 return s390_cannot_force_const_mem (XEXP (x, 0))
2190 || s390_cannot_force_const_mem (XEXP (x, 1));
2191
2192 case UNSPEC:
2193 switch (XINT (x, 1))
2194 {
2195 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2196 case UNSPEC_LTREL_OFFSET:
2197 case UNSPEC_GOT:
2198 case UNSPEC_GOTOFF:
2199 case UNSPEC_PLTOFF:
2200 case UNSPEC_TLSGD:
2201 case UNSPEC_TLSLDM:
2202 case UNSPEC_NTPOFF:
2203 case UNSPEC_DTPOFF:
2204 case UNSPEC_GOTNTPOFF:
2205 case UNSPEC_INDNTPOFF:
2206 return false;
2207
2208 /* If the literal pool shares the code section, be put
2209 execute template placeholders into the pool as well. */
2210 case UNSPEC_INSN:
2211 return TARGET_CPU_ZARCH;
2212
2213 default:
2214 return true;
2215 }
2216 break;
2217
2218 default:
2219 abort ();
2220 }
2221 }
2222
2223 /* Returns true if the constant value OP is a legitimate general
2224 operand during and after reload. The difference to
2225 legitimate_constant_p is that this function will not accept
2226 a constant that would need to be forced to the literal pool
2227 before it can be used as operand. */
2228
2229 int
2230 legitimate_reload_constant_p (register rtx op)
2231 {
2232 /* Accept la(y) operands. */
2233 if (GET_CODE (op) == CONST_INT
2234 && DISP_IN_RANGE (INTVAL (op)))
2235 return 1;
2236
2237 /* Accept l(g)hi operands. */
2238 if (GET_CODE (op) == CONST_INT
2239 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', "K"))
2240 return 1;
2241
2242 /* Accept lliXX operands. */
2243 if (TARGET_ZARCH
2244 && s390_single_part (op, DImode, HImode, 0) >= 0)
2245 return 1;
2246
2247 /* Accept larl operands. */
2248 if (TARGET_CPU_ZARCH
2249 && larl_operand (op, VOIDmode))
2250 return 1;
2251
2252 /* Everything else cannot be handled without reload. */
2253 return 0;
2254 }
2255
2256 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2257 return the class of reg to actually use. */
2258
2259 enum reg_class
2260 s390_preferred_reload_class (rtx op, enum reg_class class)
2261 {
2262 switch (GET_CODE (op))
2263 {
2264 /* Constants we cannot reload must be forced into the
2265 literal pool. */
2266
2267 case CONST_DOUBLE:
2268 case CONST_INT:
2269 if (legitimate_reload_constant_p (op))
2270 return class;
2271 else
2272 return NO_REGS;
2273
2274 /* If a symbolic constant or a PLUS is reloaded,
2275 it is most likely being used as an address, so
2276 prefer ADDR_REGS. If 'class' is not a superset
2277 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2278 case PLUS:
2279 case LABEL_REF:
2280 case SYMBOL_REF:
2281 case CONST:
2282 if (reg_class_subset_p (ADDR_REGS, class))
2283 return ADDR_REGS;
2284 else
2285 return NO_REGS;
2286
2287 default:
2288 break;
2289 }
2290
2291 return class;
2292 }
2293
2294 /* Return the register class of a scratch register needed to
2295 load IN into a register of class CLASS in MODE.
2296
2297 We need a temporary when loading a PLUS expression which
2298 is not a legitimate operand of the LOAD ADDRESS instruction. */
2299
2300 enum reg_class
2301 s390_secondary_input_reload_class (enum reg_class class,
2302 enum machine_mode mode, rtx in)
2303 {
2304 if (s390_plus_operand (in, mode))
2305 return ADDR_REGS;
2306
2307 if (reg_classes_intersect_p (CC_REGS, class))
2308 return GENERAL_REGS;
2309
2310 return NO_REGS;
2311 }
2312
2313 /* Return the register class of a scratch register needed to
2314 store a register of class CLASS in MODE into OUT:
2315
2316 We need a temporary when storing a double-word to a
2317 non-offsettable memory address. */
2318
2319 enum reg_class
2320 s390_secondary_output_reload_class (enum reg_class class,
2321 enum machine_mode mode, rtx out)
2322 {
2323 if ((TARGET_64BIT ? mode == TImode
2324 : (mode == DImode || mode == DFmode))
2325 && reg_classes_intersect_p (GENERAL_REGS, class)
2326 && GET_CODE (out) == MEM
2327 && !offsettable_memref_p (out)
2328 && !s_operand (out, VOIDmode))
2329 return ADDR_REGS;
2330
2331 if (reg_classes_intersect_p (CC_REGS, class))
2332 return GENERAL_REGS;
2333
2334 return NO_REGS;
2335 }
2336
2337 /* Return true if OP is a PLUS that is not a legitimate
2338 operand for the LA instruction.
2339 OP is the current operation.
2340 MODE is the current operation mode. */
2341
2342 int
2343 s390_plus_operand (register rtx op, enum machine_mode mode)
2344 {
2345 if (!check_mode (op, &mode) || mode != Pmode)
2346 return FALSE;
2347
2348 if (GET_CODE (op) != PLUS)
2349 return FALSE;
2350
2351 if (legitimate_la_operand_p (op))
2352 return FALSE;
2353
2354 return TRUE;
2355 }
2356
2357 /* Generate code to load SRC, which is PLUS that is not a
2358 legitimate operand for the LA instruction, into TARGET.
2359 SCRATCH may be used as scratch register. */
2360
2361 void
2362 s390_expand_plus_operand (register rtx target, register rtx src,
2363 register rtx scratch)
2364 {
2365 rtx sum1, sum2;
2366 struct s390_address ad;
2367
2368 /* src must be a PLUS; get its two operands. */
2369 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
2370 abort ();
2371
2372 /* Check if any of the two operands is already scheduled
2373 for replacement by reload. This can happen e.g. when
2374 float registers occur in an address. */
2375 sum1 = find_replacement (&XEXP (src, 0));
2376 sum2 = find_replacement (&XEXP (src, 1));
2377 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2378
2379 /* If the address is already strictly valid, there's nothing to do. */
2380 if (!s390_decompose_address (src, &ad)
2381 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2382 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2383 {
2384 /* Otherwise, one of the operands cannot be an address register;
2385 we reload its value into the scratch register. */
2386 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2387 {
2388 emit_move_insn (scratch, sum1);
2389 sum1 = scratch;
2390 }
2391 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2392 {
2393 emit_move_insn (scratch, sum2);
2394 sum2 = scratch;
2395 }
2396
2397 /* According to the way these invalid addresses are generated
2398 in reload.c, it should never happen (at least on s390) that
2399 *neither* of the PLUS components, after find_replacements
2400 was applied, is an address register. */
2401 if (sum1 == scratch && sum2 == scratch)
2402 {
2403 debug_rtx (src);
2404 abort ();
2405 }
2406
2407 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2408 }
2409
2410 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2411 is only ever performed on addresses, so we can mark the
2412 sum as legitimate for LA in any case. */
2413 s390_load_address (target, src);
2414 }
2415
2416
2417 /* Decompose a RTL expression ADDR for a memory address into
2418 its components, returned in OUT.
2419
2420 Returns 0 if ADDR is not a valid memory address, nonzero
2421 otherwise. If OUT is NULL, don't return the components,
2422 but check for validity only.
2423
2424 Note: Only addresses in canonical form are recognized.
2425 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2426 canonical form so that they will be recognized. */
2427
2428 static int
2429 s390_decompose_address (register rtx addr, struct s390_address *out)
2430 {
2431 HOST_WIDE_INT offset = 0;
2432 rtx base = NULL_RTX;
2433 rtx indx = NULL_RTX;
2434 rtx disp = NULL_RTX;
2435 rtx orig_disp;
2436 int pointer = FALSE;
2437 int base_ptr = FALSE;
2438 int indx_ptr = FALSE;
2439
2440 /* Decompose address into base + index + displacement. */
2441
2442 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2443 base = addr;
2444
2445 else if (GET_CODE (addr) == PLUS)
2446 {
2447 rtx op0 = XEXP (addr, 0);
2448 rtx op1 = XEXP (addr, 1);
2449 enum rtx_code code0 = GET_CODE (op0);
2450 enum rtx_code code1 = GET_CODE (op1);
2451
2452 if (code0 == REG || code0 == UNSPEC)
2453 {
2454 if (code1 == REG || code1 == UNSPEC)
2455 {
2456 indx = op0; /* index + base */
2457 base = op1;
2458 }
2459
2460 else
2461 {
2462 base = op0; /* base + displacement */
2463 disp = op1;
2464 }
2465 }
2466
2467 else if (code0 == PLUS)
2468 {
2469 indx = XEXP (op0, 0); /* index + base + disp */
2470 base = XEXP (op0, 1);
2471 disp = op1;
2472 }
2473
2474 else
2475 {
2476 return FALSE;
2477 }
2478 }
2479
2480 else
2481 disp = addr; /* displacement */
2482
2483 /* Extract integer part of displacement. */
2484 orig_disp = disp;
2485 if (disp)
2486 {
2487 if (GET_CODE (disp) == CONST_INT)
2488 {
2489 offset = INTVAL (disp);
2490 disp = NULL_RTX;
2491 }
2492 else if (GET_CODE (disp) == CONST
2493 && GET_CODE (XEXP (disp, 0)) == PLUS
2494 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2495 {
2496 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2497 disp = XEXP (XEXP (disp, 0), 0);
2498 }
2499 }
2500
2501 /* Strip off CONST here to avoid special case tests later. */
2502 if (disp && GET_CODE (disp) == CONST)
2503 disp = XEXP (disp, 0);
2504
2505 /* We can convert literal pool addresses to
2506 displacements by basing them off the base register. */
2507 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2508 {
2509 /* Either base or index must be free to hold the base register. */
2510 if (!base)
2511 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2512 else if (!indx)
2513 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2514 else
2515 return FALSE;
2516
2517 /* Mark up the displacement. */
2518 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2519 UNSPEC_LTREL_OFFSET);
2520 }
2521
2522 /* Validate base register. */
2523 if (base)
2524 {
2525 if (GET_CODE (base) == UNSPEC)
2526 switch (XINT (base, 1))
2527 {
2528 case UNSPEC_LTREF:
2529 if (!disp)
2530 disp = gen_rtx_UNSPEC (Pmode,
2531 gen_rtvec (1, XVECEXP (base, 0, 0)),
2532 UNSPEC_LTREL_OFFSET);
2533 else
2534 return FALSE;
2535
2536 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2537 break;
2538
2539 case UNSPEC_LTREL_BASE:
2540 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2541 break;
2542
2543 default:
2544 return FALSE;
2545 }
2546
2547 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
2548 return FALSE;
2549
2550 if (REGNO (base) == BASE_REGNUM
2551 || REGNO (base) == STACK_POINTER_REGNUM
2552 || REGNO (base) == FRAME_POINTER_REGNUM
2553 || ((reload_completed || reload_in_progress)
2554 && frame_pointer_needed
2555 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2556 || REGNO (base) == ARG_POINTER_REGNUM
2557 || (flag_pic
2558 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
2559 pointer = base_ptr = TRUE;
2560 }
2561
2562 /* Validate index register. */
2563 if (indx)
2564 {
2565 if (GET_CODE (indx) == UNSPEC)
2566 switch (XINT (indx, 1))
2567 {
2568 case UNSPEC_LTREF:
2569 if (!disp)
2570 disp = gen_rtx_UNSPEC (Pmode,
2571 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2572 UNSPEC_LTREL_OFFSET);
2573 else
2574 return FALSE;
2575
2576 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2577 break;
2578
2579 case UNSPEC_LTREL_BASE:
2580 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2581 break;
2582
2583 default:
2584 return FALSE;
2585 }
2586
2587 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2588 return FALSE;
2589
2590 if (REGNO (indx) == BASE_REGNUM
2591 || REGNO (indx) == STACK_POINTER_REGNUM
2592 || REGNO (indx) == FRAME_POINTER_REGNUM
2593 || ((reload_completed || reload_in_progress)
2594 && frame_pointer_needed
2595 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2596 || REGNO (indx) == ARG_POINTER_REGNUM
2597 || (flag_pic
2598 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2599 pointer = indx_ptr = TRUE;
2600 }
2601
2602 /* Prefer to use pointer as base, not index. */
2603 if (base && indx && !base_ptr
2604 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2605 {
2606 rtx tmp = base;
2607 base = indx;
2608 indx = tmp;
2609 }
2610
2611 /* Validate displacement. */
2612 if (!disp)
2613 {
2614 /* If the argument pointer or the return address pointer are involved,
2615 the displacement will change later anyway as the virtual registers get
2616 eliminated. This could make a valid displacement invalid, but it is
2617 more likely to make an invalid displacement valid, because we sometimes
2618 access the register save area via negative offsets to one of those
2619 registers.
2620 Thus we don't check the displacement for validity here. If after
2621 elimination the displacement turns out to be invalid after all,
2622 this is fixed up by reload in any case. */
2623 if (base != arg_pointer_rtx
2624 && indx != arg_pointer_rtx
2625 && base != return_address_pointer_rtx
2626 && indx != return_address_pointer_rtx)
2627 if (!DISP_IN_RANGE (offset))
2628 return FALSE;
2629 }
2630 else
2631 {
2632 /* All the special cases are pointers. */
2633 pointer = TRUE;
2634
2635 /* In the small-PIC case, the linker converts @GOT
2636 and @GOTNTPOFF offsets to possible displacements. */
2637 if (GET_CODE (disp) == UNSPEC
2638 && (XINT (disp, 1) == UNSPEC_GOT
2639 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
2640 && offset == 0
2641 && flag_pic == 1)
2642 {
2643 ;
2644 }
2645
2646 /* Accept chunkified literal pool symbol references. */
2647 else if (GET_CODE (disp) == MINUS
2648 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
2649 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
2650 {
2651 ;
2652 }
2653
2654 /* Accept literal pool references. */
2655 else if (GET_CODE (disp) == UNSPEC
2656 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
2657 {
2658 orig_disp = gen_rtx_CONST (Pmode, disp);
2659 if (offset)
2660 {
2661 /* If we have an offset, make sure it does not
2662 exceed the size of the constant pool entry. */
2663 rtx sym = XVECEXP (disp, 0, 0);
2664 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
2665 return FALSE;
2666
2667 orig_disp = plus_constant (orig_disp, offset);
2668 }
2669 }
2670
2671 else
2672 return FALSE;
2673 }
2674
2675 if (!base && !indx)
2676 pointer = TRUE;
2677
2678 if (out)
2679 {
2680 out->base = base;
2681 out->indx = indx;
2682 out->disp = orig_disp;
2683 out->pointer = pointer;
2684 }
2685
2686 return TRUE;
2687 }
2688
2689 /* Return nonzero if ADDR is a valid memory address.
2690 STRICT specifies whether strict register checking applies. */
2691
2692 int
2693 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2694 register rtx addr, int strict)
2695 {
2696 struct s390_address ad;
2697 if (!s390_decompose_address (addr, &ad))
2698 return FALSE;
2699
2700 if (strict)
2701 {
2702 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2703 return FALSE;
2704 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2705 return FALSE;
2706 }
2707 else
2708 {
2709 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2710 return FALSE;
2711 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2712 return FALSE;
2713 }
2714
2715 return TRUE;
2716 }
2717
2718 /* Return 1 if OP is a valid operand for the LA instruction.
2719 In 31-bit, we need to prove that the result is used as an
2720 address, as LA performs only a 31-bit addition. */
2721
2722 int
2723 legitimate_la_operand_p (register rtx op)
2724 {
2725 struct s390_address addr;
2726 if (!s390_decompose_address (op, &addr))
2727 return FALSE;
2728
2729 if (TARGET_64BIT || addr.pointer)
2730 return TRUE;
2731
2732 return FALSE;
2733 }
2734
2735 /* Return 1 if it is valid *and* preferable to use LA to
2736 compute the sum of OP1 and OP2. */
2737
2738 int
2739 preferred_la_operand_p (rtx op1, rtx op2)
2740 {
2741 struct s390_address addr;
2742
2743 if (op2 != const0_rtx)
2744 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2745
2746 if (!s390_decompose_address (op1, &addr))
2747 return FALSE;
2748 if (addr.base && !REG_OK_FOR_BASE_STRICT_P (addr.base))
2749 return FALSE;
2750 if (addr.indx && !REG_OK_FOR_INDEX_STRICT_P (addr.indx))
2751 return FALSE;
2752
2753 if (!TARGET_64BIT && !addr.pointer)
2754 return FALSE;
2755
2756 if (addr.pointer)
2757 return TRUE;
2758
2759 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2760 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2761 return TRUE;
2762
2763 return FALSE;
2764 }
2765
2766 /* Emit a forced load-address operation to load SRC into DST.
2767 This will use the LOAD ADDRESS instruction even in situations
2768 where legitimate_la_operand_p (SRC) returns false. */
2769
2770 void
2771 s390_load_address (rtx dst, rtx src)
2772 {
2773 if (TARGET_64BIT)
2774 emit_move_insn (dst, src);
2775 else
2776 emit_insn (gen_force_la_31 (dst, src));
2777 }
2778
2779 /* Return a legitimate reference for ORIG (an address) using the
2780 register REG. If REG is 0, a new pseudo is generated.
2781
2782 There are two types of references that must be handled:
2783
2784 1. Global data references must load the address from the GOT, via
2785 the PIC reg. An insn is emitted to do this load, and the reg is
2786 returned.
2787
2788 2. Static data references, constant pool addresses, and code labels
2789 compute the address as an offset from the GOT, whose base is in
2790 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2791 differentiate them from global data objects. The returned
2792 address is the PIC reg + an unspec constant.
2793
2794 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2795 reg also appears in the address. */
2796
2797 rtx
2798 legitimize_pic_address (rtx orig, rtx reg)
2799 {
2800 rtx addr = orig;
2801 rtx new = orig;
2802 rtx base;
2803
2804 if (GET_CODE (addr) == LABEL_REF
2805 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2806 {
2807 /* This is a local symbol. */
2808 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2809 {
2810 /* Access local symbols PC-relative via LARL.
2811 This is the same as in the non-PIC case, so it is
2812 handled automatically ... */
2813 }
2814 else
2815 {
2816 /* Access local symbols relative to the GOT. */
2817
2818 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2819
2820 if (reload_in_progress || reload_completed)
2821 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2822
2823 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2824 addr = gen_rtx_CONST (Pmode, addr);
2825 addr = force_const_mem (Pmode, addr);
2826 emit_move_insn (temp, addr);
2827
2828 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2829 if (reg != 0)
2830 {
2831 emit_move_insn (reg, new);
2832 new = reg;
2833 }
2834 }
2835 }
2836 else if (GET_CODE (addr) == SYMBOL_REF)
2837 {
2838 if (reg == 0)
2839 reg = gen_reg_rtx (Pmode);
2840
2841 if (flag_pic == 1)
2842 {
2843 /* Assume GOT offset < 4k. This is handled the same way
2844 in both 31- and 64-bit code (@GOT). */
2845
2846 if (reload_in_progress || reload_completed)
2847 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2848
2849 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2850 new = gen_rtx_CONST (Pmode, new);
2851 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2852 new = gen_const_mem (Pmode, new);
2853 emit_move_insn (reg, new);
2854 new = reg;
2855 }
2856 else if (TARGET_CPU_ZARCH)
2857 {
2858 /* If the GOT offset might be >= 4k, we determine the position
2859 of the GOT entry via a PC-relative LARL (@GOTENT). */
2860
2861 rtx temp = gen_reg_rtx (Pmode);
2862
2863 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2864 new = gen_rtx_CONST (Pmode, new);
2865 emit_move_insn (temp, new);
2866
2867 new = gen_const_mem (Pmode, temp);
2868 emit_move_insn (reg, new);
2869 new = reg;
2870 }
2871 else
2872 {
2873 /* If the GOT offset might be >= 4k, we have to load it
2874 from the literal pool (@GOT). */
2875
2876 rtx temp = gen_reg_rtx (Pmode);
2877
2878 if (reload_in_progress || reload_completed)
2879 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2880
2881 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2882 addr = gen_rtx_CONST (Pmode, addr);
2883 addr = force_const_mem (Pmode, addr);
2884 emit_move_insn (temp, addr);
2885
2886 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2887 new = gen_const_mem (Pmode, new);
2888 emit_move_insn (reg, new);
2889 new = reg;
2890 }
2891 }
2892 else
2893 {
2894 if (GET_CODE (addr) == CONST)
2895 {
2896 addr = XEXP (addr, 0);
2897 if (GET_CODE (addr) == UNSPEC)
2898 {
2899 if (XVECLEN (addr, 0) != 1)
2900 abort ();
2901 switch (XINT (addr, 1))
2902 {
2903 /* If someone moved a GOT-relative UNSPEC
2904 out of the literal pool, force them back in. */
2905 case UNSPEC_GOTOFF:
2906 case UNSPEC_PLTOFF:
2907 new = force_const_mem (Pmode, orig);
2908 break;
2909
2910 /* @GOT is OK as is if small. */
2911 case UNSPEC_GOT:
2912 if (flag_pic == 2)
2913 new = force_const_mem (Pmode, orig);
2914 break;
2915
2916 /* @GOTENT is OK as is. */
2917 case UNSPEC_GOTENT:
2918 break;
2919
2920 /* @PLT is OK as is on 64-bit, must be converted to
2921 GOT-relative @PLTOFF on 31-bit. */
2922 case UNSPEC_PLT:
2923 if (!TARGET_CPU_ZARCH)
2924 {
2925 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2926
2927 if (reload_in_progress || reload_completed)
2928 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2929
2930 addr = XVECEXP (addr, 0, 0);
2931 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2932 UNSPEC_PLTOFF);
2933 addr = gen_rtx_CONST (Pmode, addr);
2934 addr = force_const_mem (Pmode, addr);
2935 emit_move_insn (temp, addr);
2936
2937 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2938 if (reg != 0)
2939 {
2940 emit_move_insn (reg, new);
2941 new = reg;
2942 }
2943 }
2944 break;
2945
2946 /* Everything else cannot happen. */
2947 default:
2948 abort ();
2949 }
2950 }
2951 else if (GET_CODE (addr) != PLUS)
2952 abort ();
2953 }
2954 if (GET_CODE (addr) == PLUS)
2955 {
2956 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2957 /* Check first to see if this is a constant offset
2958 from a local symbol reference. */
2959 if ((GET_CODE (op0) == LABEL_REF
2960 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2961 && GET_CODE (op1) == CONST_INT)
2962 {
2963 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
2964 {
2965 if (INTVAL (op1) & 1)
2966 {
2967 /* LARL can't handle odd offsets, so emit a
2968 pair of LARL and LA. */
2969 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2970
2971 if (!DISP_IN_RANGE (INTVAL (op1)))
2972 {
2973 int even = INTVAL (op1) - 1;
2974 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2975 op0 = gen_rtx_CONST (Pmode, op0);
2976 op1 = const1_rtx;
2977 }
2978
2979 emit_move_insn (temp, op0);
2980 new = gen_rtx_PLUS (Pmode, temp, op1);
2981
2982 if (reg != 0)
2983 {
2984 emit_move_insn (reg, new);
2985 new = reg;
2986 }
2987 }
2988 else
2989 {
2990 /* If the offset is even, we can just use LARL.
2991 This will happen automatically. */
2992 }
2993 }
2994 else
2995 {
2996 /* Access local symbols relative to the GOT. */
2997
2998 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2999
3000 if (reload_in_progress || reload_completed)
3001 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3002
3003 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
3004 UNSPEC_GOTOFF);
3005 addr = gen_rtx_PLUS (Pmode, addr, op1);
3006 addr = gen_rtx_CONST (Pmode, addr);
3007 addr = force_const_mem (Pmode, addr);
3008 emit_move_insn (temp, addr);
3009
3010 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3011 if (reg != 0)
3012 {
3013 emit_move_insn (reg, new);
3014 new = reg;
3015 }
3016 }
3017 }
3018
3019 /* Now, check whether it is a GOT relative symbol plus offset
3020 that was pulled out of the literal pool. Force it back in. */
3021
3022 else if (GET_CODE (op0) == UNSPEC
3023 && GET_CODE (op1) == CONST_INT
3024 && XINT (op0, 1) == UNSPEC_GOTOFF)
3025 {
3026 if (XVECLEN (op0, 0) != 1)
3027 abort ();
3028
3029 new = force_const_mem (Pmode, orig);
3030 }
3031
3032 /* Otherwise, compute the sum. */
3033 else
3034 {
3035 base = legitimize_pic_address (XEXP (addr, 0), reg);
3036 new = legitimize_pic_address (XEXP (addr, 1),
3037 base == reg ? NULL_RTX : reg);
3038 if (GET_CODE (new) == CONST_INT)
3039 new = plus_constant (base, INTVAL (new));
3040 else
3041 {
3042 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3043 {
3044 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3045 new = XEXP (new, 1);
3046 }
3047 new = gen_rtx_PLUS (Pmode, base, new);
3048 }
3049
3050 if (GET_CODE (new) == CONST)
3051 new = XEXP (new, 0);
3052 new = force_operand (new, 0);
3053 }
3054 }
3055 }
3056 return new;
3057 }
3058
3059 /* Load the thread pointer into a register. */
3060
3061 static rtx
3062 get_thread_pointer (void)
3063 {
3064 rtx tp = gen_reg_rtx (Pmode);
3065
3066 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3067 mark_reg_pointer (tp, BITS_PER_WORD);
3068
3069 return tp;
3070 }
3071
3072 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3073 in s390_tls_symbol which always refers to __tls_get_offset.
3074 The returned offset is written to RESULT_REG and an USE rtx is
3075 generated for TLS_CALL. */
3076
3077 static GTY(()) rtx s390_tls_symbol;
3078
3079 static void
3080 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3081 {
3082 rtx insn;
3083
3084 if (!flag_pic)
3085 abort ();
3086
3087 if (!s390_tls_symbol)
3088 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3089
3090 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3091 gen_rtx_REG (Pmode, RETURN_REGNUM));
3092
3093 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3094 CONST_OR_PURE_CALL_P (insn) = 1;
3095 }
3096
3097 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3098 this (thread-local) address. REG may be used as temporary. */
3099
3100 static rtx
3101 legitimize_tls_address (rtx addr, rtx reg)
3102 {
3103 rtx new, tls_call, temp, base, r2, insn;
3104
3105 if (GET_CODE (addr) == SYMBOL_REF)
3106 switch (tls_symbolic_operand (addr))
3107 {
3108 case TLS_MODEL_GLOBAL_DYNAMIC:
3109 start_sequence ();
3110 r2 = gen_rtx_REG (Pmode, 2);
3111 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3112 new = gen_rtx_CONST (Pmode, tls_call);
3113 new = force_const_mem (Pmode, new);
3114 emit_move_insn (r2, new);
3115 s390_emit_tls_call_insn (r2, tls_call);
3116 insn = get_insns ();
3117 end_sequence ();
3118
3119 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3120 temp = gen_reg_rtx (Pmode);
3121 emit_libcall_block (insn, temp, r2, new);
3122
3123 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3124 if (reg != 0)
3125 {
3126 s390_load_address (reg, new);
3127 new = reg;
3128 }
3129 break;
3130
3131 case TLS_MODEL_LOCAL_DYNAMIC:
3132 start_sequence ();
3133 r2 = gen_rtx_REG (Pmode, 2);
3134 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3135 new = gen_rtx_CONST (Pmode, tls_call);
3136 new = force_const_mem (Pmode, new);
3137 emit_move_insn (r2, new);
3138 s390_emit_tls_call_insn (r2, tls_call);
3139 insn = get_insns ();
3140 end_sequence ();
3141
3142 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3143 temp = gen_reg_rtx (Pmode);
3144 emit_libcall_block (insn, temp, r2, new);
3145
3146 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3147 base = gen_reg_rtx (Pmode);
3148 s390_load_address (base, new);
3149
3150 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3151 new = gen_rtx_CONST (Pmode, new);
3152 new = force_const_mem (Pmode, new);
3153 temp = gen_reg_rtx (Pmode);
3154 emit_move_insn (temp, new);
3155
3156 new = gen_rtx_PLUS (Pmode, base, temp);
3157 if (reg != 0)
3158 {
3159 s390_load_address (reg, new);
3160 new = reg;
3161 }
3162 break;
3163
3164 case TLS_MODEL_INITIAL_EXEC:
3165 if (flag_pic == 1)
3166 {
3167 /* Assume GOT offset < 4k. This is handled the same way
3168 in both 31- and 64-bit code. */
3169
3170 if (reload_in_progress || reload_completed)
3171 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3172
3173 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3174 new = gen_rtx_CONST (Pmode, new);
3175 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3176 new = gen_const_mem (Pmode, new);
3177 temp = gen_reg_rtx (Pmode);
3178 emit_move_insn (temp, new);
3179 }
3180 else if (TARGET_CPU_ZARCH)
3181 {
3182 /* If the GOT offset might be >= 4k, we determine the position
3183 of the GOT entry via a PC-relative LARL. */
3184
3185 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3186 new = gen_rtx_CONST (Pmode, new);
3187 temp = gen_reg_rtx (Pmode);
3188 emit_move_insn (temp, new);
3189
3190 new = gen_const_mem (Pmode, temp);
3191 temp = gen_reg_rtx (Pmode);
3192 emit_move_insn (temp, new);
3193 }
3194 else if (flag_pic)
3195 {
3196 /* If the GOT offset might be >= 4k, we have to load it
3197 from the literal pool. */
3198
3199 if (reload_in_progress || reload_completed)
3200 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3201
3202 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3203 new = gen_rtx_CONST (Pmode, new);
3204 new = force_const_mem (Pmode, new);
3205 temp = gen_reg_rtx (Pmode);
3206 emit_move_insn (temp, new);
3207
3208 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3209 new = gen_const_mem (Pmode, new);
3210
3211 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3212 temp = gen_reg_rtx (Pmode);
3213 emit_insn (gen_rtx_SET (Pmode, temp, new));
3214 }
3215 else
3216 {
3217 /* In position-dependent code, load the absolute address of
3218 the GOT entry from the literal pool. */
3219
3220 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3221 new = gen_rtx_CONST (Pmode, new);
3222 new = force_const_mem (Pmode, new);
3223 temp = gen_reg_rtx (Pmode);
3224 emit_move_insn (temp, new);
3225
3226 new = temp;
3227 new = gen_const_mem (Pmode, new);
3228 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3229 temp = gen_reg_rtx (Pmode);
3230 emit_insn (gen_rtx_SET (Pmode, temp, new));
3231 }
3232
3233 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3234 if (reg != 0)
3235 {
3236 s390_load_address (reg, new);
3237 new = reg;
3238 }
3239 break;
3240
3241 case TLS_MODEL_LOCAL_EXEC:
3242 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3243 new = gen_rtx_CONST (Pmode, new);
3244 new = force_const_mem (Pmode, new);
3245 temp = gen_reg_rtx (Pmode);
3246 emit_move_insn (temp, new);
3247
3248 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3249 if (reg != 0)
3250 {
3251 s390_load_address (reg, new);
3252 new = reg;
3253 }
3254 break;
3255
3256 default:
3257 abort ();
3258 }
3259
3260 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3261 {
3262 switch (XINT (XEXP (addr, 0), 1))
3263 {
3264 case UNSPEC_INDNTPOFF:
3265 if (TARGET_CPU_ZARCH)
3266 new = addr;
3267 else
3268 abort ();
3269 break;
3270
3271 default:
3272 abort ();
3273 }
3274 }
3275
3276 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3277 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3278 {
3279 new = XEXP (XEXP (addr, 0), 0);
3280 if (GET_CODE (new) != SYMBOL_REF)
3281 new = gen_rtx_CONST (Pmode, new);
3282
3283 new = legitimize_tls_address (new, reg);
3284 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3285 new = force_operand (new, 0);
3286 }
3287
3288 else
3289 abort (); /* for now ... */
3290
3291 return new;
3292 }
3293
3294 /* Emit insns to move operands[1] into operands[0]. */
3295
3296 void
3297 emit_symbolic_move (rtx *operands)
3298 {
3299 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
3300
3301 if (GET_CODE (operands[0]) == MEM)
3302 operands[1] = force_reg (Pmode, operands[1]);
3303 else if (TLS_SYMBOLIC_CONST (operands[1]))
3304 operands[1] = legitimize_tls_address (operands[1], temp);
3305 else if (flag_pic)
3306 operands[1] = legitimize_pic_address (operands[1], temp);
3307 }
3308
3309 /* Try machine-dependent ways of modifying an illegitimate address X
3310 to be legitimate. If we find one, return the new, valid address.
3311
3312 OLDX is the address as it was before break_out_memory_refs was called.
3313 In some cases it is useful to look at this to decide what needs to be done.
3314
3315 MODE is the mode of the operand pointed to by X.
3316
3317 When -fpic is used, special handling is needed for symbolic references.
3318 See comments by legitimize_pic_address for details. */
3319
3320 rtx
3321 legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
3322 enum machine_mode mode ATTRIBUTE_UNUSED)
3323 {
3324 rtx constant_term = const0_rtx;
3325
3326 if (TLS_SYMBOLIC_CONST (x))
3327 {
3328 x = legitimize_tls_address (x, 0);
3329
3330 if (legitimate_address_p (mode, x, FALSE))
3331 return x;
3332 }
3333 else if (flag_pic)
3334 {
3335 if (SYMBOLIC_CONST (x)
3336 || (GET_CODE (x) == PLUS
3337 && (SYMBOLIC_CONST (XEXP (x, 0))
3338 || SYMBOLIC_CONST (XEXP (x, 1)))))
3339 x = legitimize_pic_address (x, 0);
3340
3341 if (legitimate_address_p (mode, x, FALSE))
3342 return x;
3343 }
3344
3345 x = eliminate_constant_term (x, &constant_term);
3346
3347 /* Optimize loading of large displacements by splitting them
3348 into the multiple of 4K and the rest; this allows the
3349 former to be CSE'd if possible.
3350
3351 Don't do this if the displacement is added to a register
3352 pointing into the stack frame, as the offsets will
3353 change later anyway. */
3354
3355 if (GET_CODE (constant_term) == CONST_INT
3356 && !TARGET_LONG_DISPLACEMENT
3357 && !DISP_IN_RANGE (INTVAL (constant_term))
3358 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3359 {
3360 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3361 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3362
3363 rtx temp = gen_reg_rtx (Pmode);
3364 rtx val = force_operand (GEN_INT (upper), temp);
3365 if (val != temp)
3366 emit_move_insn (temp, val);
3367
3368 x = gen_rtx_PLUS (Pmode, x, temp);
3369 constant_term = GEN_INT (lower);
3370 }
3371
3372 if (GET_CODE (x) == PLUS)
3373 {
3374 if (GET_CODE (XEXP (x, 0)) == REG)
3375 {
3376 register rtx temp = gen_reg_rtx (Pmode);
3377 register rtx val = force_operand (XEXP (x, 1), temp);
3378 if (val != temp)
3379 emit_move_insn (temp, val);
3380
3381 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3382 }
3383
3384 else if (GET_CODE (XEXP (x, 1)) == REG)
3385 {
3386 register rtx temp = gen_reg_rtx (Pmode);
3387 register rtx val = force_operand (XEXP (x, 0), temp);
3388 if (val != temp)
3389 emit_move_insn (temp, val);
3390
3391 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3392 }
3393 }
3394
3395 if (constant_term != const0_rtx)
3396 x = gen_rtx_PLUS (Pmode, x, constant_term);
3397
3398 return x;
3399 }
3400
3401 /* Try a machine-dependent way of reloading an illegitimate address AD
3402 operand. If we find one, push the reload and and return the new address.
3403
3404 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3405 and TYPE is the reload type of the current reload. */
3406
3407 rtx
3408 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3409 int opnum, int type)
3410 {
3411 if (!optimize || TARGET_LONG_DISPLACEMENT)
3412 return NULL_RTX;
3413
3414 if (GET_CODE (ad) == PLUS)
3415 {
3416 rtx tem = simplify_binary_operation (PLUS, Pmode,
3417 XEXP (ad, 0), XEXP (ad, 1));
3418 if (tem)
3419 ad = tem;
3420 }
3421
3422 if (GET_CODE (ad) == PLUS
3423 && GET_CODE (XEXP (ad, 0)) == REG
3424 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3425 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3426 {
3427 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3428 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3429 rtx cst, tem, new;
3430
3431 cst = GEN_INT (upper);
3432 if (!legitimate_reload_constant_p (cst))
3433 cst = force_const_mem (Pmode, cst);
3434
3435 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3436 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3437
3438 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3439 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3440 opnum, (enum reload_type) type);
3441 return new;
3442 }
3443
3444 return NULL_RTX;
3445 }
3446
3447 /* Emit code to move LEN bytes from DST to SRC. */
3448
3449 void
3450 s390_expand_movmem (rtx dst, rtx src, rtx len)
3451 {
3452 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3453 {
3454 if (INTVAL (len) > 0)
3455 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3456 }
3457
3458 else if (TARGET_MVCLE)
3459 {
3460 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3461 }
3462
3463 else
3464 {
3465 rtx dst_addr, src_addr, count, blocks, temp;
3466 rtx loop_start_label = gen_label_rtx ();
3467 rtx loop_end_label = gen_label_rtx ();
3468 rtx end_label = gen_label_rtx ();
3469 enum machine_mode mode;
3470
3471 mode = GET_MODE (len);
3472 if (mode == VOIDmode)
3473 mode = Pmode;
3474
3475 dst_addr = gen_reg_rtx (Pmode);
3476 src_addr = gen_reg_rtx (Pmode);
3477 count = gen_reg_rtx (mode);
3478 blocks = gen_reg_rtx (mode);
3479
3480 convert_move (count, len, 1);
3481 emit_cmp_and_jump_insns (count, const0_rtx,
3482 EQ, NULL_RTX, mode, 1, end_label);
3483
3484 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3485 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3486 dst = change_address (dst, VOIDmode, dst_addr);
3487 src = change_address (src, VOIDmode, src_addr);
3488
3489 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3490 if (temp != count)
3491 emit_move_insn (count, temp);
3492
3493 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3494 if (temp != blocks)
3495 emit_move_insn (blocks, temp);
3496
3497 emit_cmp_and_jump_insns (blocks, const0_rtx,
3498 EQ, NULL_RTX, mode, 1, loop_end_label);
3499
3500 emit_label (loop_start_label);
3501
3502 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3503 s390_load_address (dst_addr,
3504 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3505 s390_load_address (src_addr,
3506 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3507
3508 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3509 if (temp != blocks)
3510 emit_move_insn (blocks, temp);
3511
3512 emit_cmp_and_jump_insns (blocks, const0_rtx,
3513 EQ, NULL_RTX, mode, 1, loop_end_label);
3514
3515 emit_jump (loop_start_label);
3516 emit_label (loop_end_label);
3517
3518 emit_insn (gen_movmem_short (dst, src,
3519 convert_to_mode (Pmode, count, 1)));
3520 emit_label (end_label);
3521 }
3522 }
3523
3524 /* Emit code to clear LEN bytes at DST. */
3525
3526 void
3527 s390_expand_clrmem (rtx dst, rtx len)
3528 {
3529 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3530 {
3531 if (INTVAL (len) > 0)
3532 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3533 }
3534
3535 else if (TARGET_MVCLE)
3536 {
3537 emit_insn (gen_clrmem_long (dst, convert_to_mode (Pmode, len, 1)));
3538 }
3539
3540 else
3541 {
3542 rtx dst_addr, src_addr, count, blocks, temp;
3543 rtx loop_start_label = gen_label_rtx ();
3544 rtx loop_end_label = gen_label_rtx ();
3545 rtx end_label = gen_label_rtx ();
3546 enum machine_mode mode;
3547
3548 mode = GET_MODE (len);
3549 if (mode == VOIDmode)
3550 mode = Pmode;
3551
3552 dst_addr = gen_reg_rtx (Pmode);
3553 src_addr = gen_reg_rtx (Pmode);
3554 count = gen_reg_rtx (mode);
3555 blocks = gen_reg_rtx (mode);
3556
3557 convert_move (count, len, 1);
3558 emit_cmp_and_jump_insns (count, const0_rtx,
3559 EQ, NULL_RTX, mode, 1, end_label);
3560
3561 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3562 dst = change_address (dst, VOIDmode, dst_addr);
3563
3564 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3565 if (temp != count)
3566 emit_move_insn (count, temp);
3567
3568 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3569 if (temp != blocks)
3570 emit_move_insn (blocks, temp);
3571
3572 emit_cmp_and_jump_insns (blocks, const0_rtx,
3573 EQ, NULL_RTX, mode, 1, loop_end_label);
3574
3575 emit_label (loop_start_label);
3576
3577 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3578 s390_load_address (dst_addr,
3579 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3580
3581 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3582 if (temp != blocks)
3583 emit_move_insn (blocks, temp);
3584
3585 emit_cmp_and_jump_insns (blocks, const0_rtx,
3586 EQ, NULL_RTX, mode, 1, loop_end_label);
3587
3588 emit_jump (loop_start_label);
3589 emit_label (loop_end_label);
3590
3591 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3592 emit_label (end_label);
3593 }
3594 }
3595
3596 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3597 and return the result in TARGET. */
3598
3599 void
3600 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3601 {
3602 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3603 rtx tmp;
3604
3605 /* As the result of CMPINT is inverted compared to what we need,
3606 we have to swap the operands. */
3607 tmp = op0; op0 = op1; op1 = tmp;
3608
3609 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3610 {
3611 if (INTVAL (len) > 0)
3612 {
3613 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3614 emit_insn (gen_cmpint (target, ccreg));
3615 }
3616 else
3617 emit_move_insn (target, const0_rtx);
3618 }
3619 else if (TARGET_MVCLE)
3620 {
3621 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3622 emit_insn (gen_cmpint (target, ccreg));
3623 }
3624 else
3625 {
3626 rtx addr0, addr1, count, blocks, temp;
3627 rtx loop_start_label = gen_label_rtx ();
3628 rtx loop_end_label = gen_label_rtx ();
3629 rtx end_label = gen_label_rtx ();
3630 enum machine_mode mode;
3631
3632 mode = GET_MODE (len);
3633 if (mode == VOIDmode)
3634 mode = Pmode;
3635
3636 addr0 = gen_reg_rtx (Pmode);
3637 addr1 = gen_reg_rtx (Pmode);
3638 count = gen_reg_rtx (mode);
3639 blocks = gen_reg_rtx (mode);
3640
3641 convert_move (count, len, 1);
3642 emit_cmp_and_jump_insns (count, const0_rtx,
3643 EQ, NULL_RTX, mode, 1, end_label);
3644
3645 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3646 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3647 op0 = change_address (op0, VOIDmode, addr0);
3648 op1 = change_address (op1, VOIDmode, addr1);
3649
3650 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3651 if (temp != count)
3652 emit_move_insn (count, temp);
3653
3654 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3655 if (temp != blocks)
3656 emit_move_insn (blocks, temp);
3657
3658 emit_cmp_and_jump_insns (blocks, const0_rtx,
3659 EQ, NULL_RTX, mode, 1, loop_end_label);
3660
3661 emit_label (loop_start_label);
3662
3663 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3664 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3665 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3666 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3667 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3668 emit_jump_insn (temp);
3669
3670 s390_load_address (addr0,
3671 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3672 s390_load_address (addr1,
3673 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3674
3675 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3676 if (temp != blocks)
3677 emit_move_insn (blocks, temp);
3678
3679 emit_cmp_and_jump_insns (blocks, const0_rtx,
3680 EQ, NULL_RTX, mode, 1, loop_end_label);
3681
3682 emit_jump (loop_start_label);
3683 emit_label (loop_end_label);
3684
3685 emit_insn (gen_cmpmem_short (op0, op1,
3686 convert_to_mode (Pmode, count, 1)));
3687 emit_label (end_label);
3688
3689 emit_insn (gen_cmpint (target, ccreg));
3690 }
3691 }
3692
3693
3694 /* Expand conditional increment or decrement using alc/slb instructions.
3695 Should generate code setting DST to either SRC or SRC + INCREMENT,
3696 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3697 Returns true if successful, false otherwise. */
3698
3699 bool
3700 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3701 rtx dst, rtx src, rtx increment)
3702 {
3703 enum machine_mode cmp_mode;
3704 enum machine_mode cc_mode;
3705 rtx op_res;
3706 rtx insn;
3707 rtvec p;
3708
3709 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3710 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3711 cmp_mode = SImode;
3712 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3713 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3714 cmp_mode = DImode;
3715 else
3716 return false;
3717
3718 /* Try ADD LOGICAL WITH CARRY. */
3719 if (increment == const1_rtx)
3720 {
3721 /* Determine CC mode to use. */
3722 if (cmp_code == EQ || cmp_code == NE)
3723 {
3724 if (cmp_op1 != const0_rtx)
3725 {
3726 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3727 NULL_RTX, 0, OPTAB_WIDEN);
3728 cmp_op1 = const0_rtx;
3729 }
3730
3731 cmp_code = cmp_code == EQ ? LEU : GTU;
3732 }
3733
3734 if (cmp_code == LTU || cmp_code == LEU)
3735 {
3736 rtx tem = cmp_op0;
3737 cmp_op0 = cmp_op1;
3738 cmp_op1 = tem;
3739 cmp_code = swap_condition (cmp_code);
3740 }
3741
3742 switch (cmp_code)
3743 {
3744 case GTU:
3745 cc_mode = CCUmode;
3746 break;
3747
3748 case GEU:
3749 cc_mode = CCL3mode;
3750 break;
3751
3752 default:
3753 return false;
3754 }
3755
3756 /* Emit comparison instruction pattern. */
3757 if (!register_operand (cmp_op0, cmp_mode))
3758 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3759
3760 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3761 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3762 /* We use insn_invalid_p here to add clobbers if required. */
3763 if (insn_invalid_p (emit_insn (insn)))
3764 abort ();
3765
3766 /* Emit ALC instruction pattern. */
3767 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3768 gen_rtx_REG (cc_mode, CC_REGNUM),
3769 const0_rtx);
3770
3771 if (src != const0_rtx)
3772 {
3773 if (!register_operand (src, GET_MODE (dst)))
3774 src = force_reg (GET_MODE (dst), src);
3775
3776 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3777 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3778 }
3779
3780 p = rtvec_alloc (2);
3781 RTVEC_ELT (p, 0) =
3782 gen_rtx_SET (VOIDmode, dst, op_res);
3783 RTVEC_ELT (p, 1) =
3784 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3785 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3786
3787 return true;
3788 }
3789
3790 /* Try SUBTRACT LOGICAL WITH BORROW. */
3791 if (increment == constm1_rtx)
3792 {
3793 /* Determine CC mode to use. */
3794 if (cmp_code == EQ || cmp_code == NE)
3795 {
3796 if (cmp_op1 != const0_rtx)
3797 {
3798 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3799 NULL_RTX, 0, OPTAB_WIDEN);
3800 cmp_op1 = const0_rtx;
3801 }
3802
3803 cmp_code = cmp_code == EQ ? LEU : GTU;
3804 }
3805
3806 if (cmp_code == GTU || cmp_code == GEU)
3807 {
3808 rtx tem = cmp_op0;
3809 cmp_op0 = cmp_op1;
3810 cmp_op1 = tem;
3811 cmp_code = swap_condition (cmp_code);
3812 }
3813
3814 switch (cmp_code)
3815 {
3816 case LEU:
3817 cc_mode = CCUmode;
3818 break;
3819
3820 case LTU:
3821 cc_mode = CCL3mode;
3822 break;
3823
3824 default:
3825 return false;
3826 }
3827
3828 /* Emit comparison instruction pattern. */
3829 if (!register_operand (cmp_op0, cmp_mode))
3830 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3831
3832 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3833 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3834 /* We use insn_invalid_p here to add clobbers if required. */
3835 if (insn_invalid_p (emit_insn (insn)))
3836 abort ();
3837
3838 /* Emit SLB instruction pattern. */
3839 if (!register_operand (src, GET_MODE (dst)))
3840 src = force_reg (GET_MODE (dst), src);
3841
3842 op_res = gen_rtx_MINUS (GET_MODE (dst),
3843 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3844 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3845 gen_rtx_REG (cc_mode, CC_REGNUM),
3846 const0_rtx));
3847 p = rtvec_alloc (2);
3848 RTVEC_ELT (p, 0) =
3849 gen_rtx_SET (VOIDmode, dst, op_res);
3850 RTVEC_ELT (p, 1) =
3851 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3852 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3853
3854 return true;
3855 }
3856
3857 return false;
3858 }
3859
3860
3861 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3862 We need to emit DTP-relative relocations. */
3863
3864 void
3865 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3866 {
3867 switch (size)
3868 {
3869 case 4:
3870 fputs ("\t.long\t", file);
3871 break;
3872 case 8:
3873 fputs ("\t.quad\t", file);
3874 break;
3875 default:
3876 abort ();
3877 }
3878 output_addr_const (file, x);
3879 fputs ("@DTPOFF", file);
3880 }
3881
3882 /* In the name of slightly smaller debug output, and to cater to
3883 general assembler losage, recognize various UNSPEC sequences
3884 and turn them back into a direct symbol reference. */
3885
3886 static rtx
3887 s390_delegitimize_address (rtx orig_x)
3888 {
3889 rtx x = orig_x, y;
3890
3891 if (GET_CODE (x) != MEM)
3892 return orig_x;
3893
3894 x = XEXP (x, 0);
3895 if (GET_CODE (x) == PLUS
3896 && GET_CODE (XEXP (x, 1)) == CONST
3897 && GET_CODE (XEXP (x, 0)) == REG
3898 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3899 {
3900 y = XEXP (XEXP (x, 1), 0);
3901 if (GET_CODE (y) == UNSPEC
3902 && XINT (y, 1) == UNSPEC_GOT)
3903 return XVECEXP (y, 0, 0);
3904 return orig_x;
3905 }
3906
3907 if (GET_CODE (x) == CONST)
3908 {
3909 y = XEXP (x, 0);
3910 if (GET_CODE (y) == UNSPEC
3911 && XINT (y, 1) == UNSPEC_GOTENT)
3912 return XVECEXP (y, 0, 0);
3913 return orig_x;
3914 }
3915
3916 return orig_x;
3917 }
3918
3919 /* Output shift count operand OP to stdio stream FILE. */
3920
3921 static void
3922 print_shift_count_operand (FILE *file, rtx op)
3923 {
3924 HOST_WIDE_INT offset = 0;
3925
3926 /* We can have an integer constant, an address register,
3927 or a sum of the two. */
3928 if (GET_CODE (op) == CONST_INT)
3929 {
3930 offset = INTVAL (op);
3931 op = NULL_RTX;
3932 }
3933 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
3934 {
3935 offset = INTVAL (XEXP (op, 1));
3936 op = XEXP (op, 0);
3937 }
3938 while (op && GET_CODE (op) == SUBREG)
3939 op = SUBREG_REG (op);
3940
3941 /* Sanity check. */
3942 if (op && (GET_CODE (op) != REG
3943 || REGNO (op) >= FIRST_PSEUDO_REGISTER
3944 || REGNO_REG_CLASS (REGNO (op)) != ADDR_REGS))
3945 abort ();
3946
3947 /* Shift counts are truncated to the low six bits anyway. */
3948 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & 63);
3949 if (op)
3950 fprintf (file, "(%s)", reg_names[REGNO (op)]);
3951 }
3952
3953 /* Locate some local-dynamic symbol still in use by this function
3954 so that we can print its name in local-dynamic base patterns. */
3955
3956 static const char *
3957 get_some_local_dynamic_name (void)
3958 {
3959 rtx insn;
3960
3961 if (cfun->machine->some_ld_name)
3962 return cfun->machine->some_ld_name;
3963
3964 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3965 if (INSN_P (insn)
3966 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3967 return cfun->machine->some_ld_name;
3968
3969 abort ();
3970 }
3971
3972 static int
3973 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
3974 {
3975 rtx x = *px;
3976
3977 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3978 {
3979 x = get_pool_constant (x);
3980 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3981 }
3982
3983 if (GET_CODE (x) == SYMBOL_REF
3984 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3985 {
3986 cfun->machine->some_ld_name = XSTR (x, 0);
3987 return 1;
3988 }
3989
3990 return 0;
3991 }
3992
3993 /* Output machine-dependent UNSPECs occurring in address constant X
3994 in assembler syntax to stdio stream FILE. Returns true if the
3995 constant X could be recognized, false otherwise. */
3996
3997 bool
3998 s390_output_addr_const_extra (FILE *file, rtx x)
3999 {
4000 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4001 switch (XINT (x, 1))
4002 {
4003 case UNSPEC_GOTENT:
4004 output_addr_const (file, XVECEXP (x, 0, 0));
4005 fprintf (file, "@GOTENT");
4006 return true;
4007 case UNSPEC_GOT:
4008 output_addr_const (file, XVECEXP (x, 0, 0));
4009 fprintf (file, "@GOT");
4010 return true;
4011 case UNSPEC_GOTOFF:
4012 output_addr_const (file, XVECEXP (x, 0, 0));
4013 fprintf (file, "@GOTOFF");
4014 return true;
4015 case UNSPEC_PLT:
4016 output_addr_const (file, XVECEXP (x, 0, 0));
4017 fprintf (file, "@PLT");
4018 return true;
4019 case UNSPEC_PLTOFF:
4020 output_addr_const (file, XVECEXP (x, 0, 0));
4021 fprintf (file, "@PLTOFF");
4022 return true;
4023 case UNSPEC_TLSGD:
4024 output_addr_const (file, XVECEXP (x, 0, 0));
4025 fprintf (file, "@TLSGD");
4026 return true;
4027 case UNSPEC_TLSLDM:
4028 assemble_name (file, get_some_local_dynamic_name ());
4029 fprintf (file, "@TLSLDM");
4030 return true;
4031 case UNSPEC_DTPOFF:
4032 output_addr_const (file, XVECEXP (x, 0, 0));
4033 fprintf (file, "@DTPOFF");
4034 return true;
4035 case UNSPEC_NTPOFF:
4036 output_addr_const (file, XVECEXP (x, 0, 0));
4037 fprintf (file, "@NTPOFF");
4038 return true;
4039 case UNSPEC_GOTNTPOFF:
4040 output_addr_const (file, XVECEXP (x, 0, 0));
4041 fprintf (file, "@GOTNTPOFF");
4042 return true;
4043 case UNSPEC_INDNTPOFF:
4044 output_addr_const (file, XVECEXP (x, 0, 0));
4045 fprintf (file, "@INDNTPOFF");
4046 return true;
4047 }
4048
4049 return false;
4050 }
4051
4052 /* Output address operand ADDR in assembler syntax to
4053 stdio stream FILE. */
4054
4055 void
4056 print_operand_address (FILE *file, rtx addr)
4057 {
4058 struct s390_address ad;
4059
4060 if (!s390_decompose_address (addr, &ad)
4061 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4062 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
4063 output_operand_lossage ("Cannot decompose address.");
4064
4065 if (ad.disp)
4066 output_addr_const (file, ad.disp);
4067 else
4068 fprintf (file, "0");
4069
4070 if (ad.base && ad.indx)
4071 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4072 reg_names[REGNO (ad.base)]);
4073 else if (ad.base)
4074 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4075 }
4076
4077 /* Output operand X in assembler syntax to stdio stream FILE.
4078 CODE specified the format flag. The following format flags
4079 are recognized:
4080
4081 'C': print opcode suffix for branch condition.
4082 'D': print opcode suffix for inverse branch condition.
4083 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4084 'O': print only the displacement of a memory reference.
4085 'R': print only the base register of a memory reference.
4086 'S': print S-type memory reference (base+displacement).
4087 'N': print the second word of a DImode operand.
4088 'M': print the second word of a TImode operand.
4089 'Y': print shift count operand.
4090
4091 'b': print integer X as if it's an unsigned byte.
4092 'x': print integer X as if it's an unsigned word.
4093 'h': print integer X as if it's a signed word.
4094 'i': print the first nonzero HImode part of X.
4095 'j': print the first HImode part unequal to 0xffff of X. */
4096
4097 void
4098 print_operand (FILE *file, rtx x, int code)
4099 {
4100 switch (code)
4101 {
4102 case 'C':
4103 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4104 return;
4105
4106 case 'D':
4107 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4108 return;
4109
4110 case 'J':
4111 if (GET_CODE (x) == SYMBOL_REF)
4112 {
4113 fprintf (file, "%s", ":tls_load:");
4114 output_addr_const (file, x);
4115 }
4116 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4117 {
4118 fprintf (file, "%s", ":tls_gdcall:");
4119 output_addr_const (file, XVECEXP (x, 0, 0));
4120 }
4121 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4122 {
4123 fprintf (file, "%s", ":tls_ldcall:");
4124 assemble_name (file, get_some_local_dynamic_name ());
4125 }
4126 else
4127 abort ();
4128 return;
4129
4130 case 'O':
4131 {
4132 struct s390_address ad;
4133
4134 if (GET_CODE (x) != MEM
4135 || !s390_decompose_address (XEXP (x, 0), &ad)
4136 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4137 || ad.indx)
4138 abort ();
4139
4140 if (ad.disp)
4141 output_addr_const (file, ad.disp);
4142 else
4143 fprintf (file, "0");
4144 }
4145 return;
4146
4147 case 'R':
4148 {
4149 struct s390_address ad;
4150
4151 if (GET_CODE (x) != MEM
4152 || !s390_decompose_address (XEXP (x, 0), &ad)
4153 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4154 || ad.indx)
4155 abort ();
4156
4157 if (ad.base)
4158 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4159 else
4160 fprintf (file, "0");
4161 }
4162 return;
4163
4164 case 'S':
4165 {
4166 struct s390_address ad;
4167
4168 if (GET_CODE (x) != MEM
4169 || !s390_decompose_address (XEXP (x, 0), &ad)
4170 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4171 || ad.indx)
4172 abort ();
4173
4174 if (ad.disp)
4175 output_addr_const (file, ad.disp);
4176 else
4177 fprintf (file, "0");
4178
4179 if (ad.base)
4180 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4181 }
4182 return;
4183
4184 case 'N':
4185 if (GET_CODE (x) == REG)
4186 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4187 else if (GET_CODE (x) == MEM)
4188 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4189 else
4190 abort ();
4191 break;
4192
4193 case 'M':
4194 if (GET_CODE (x) == REG)
4195 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4196 else if (GET_CODE (x) == MEM)
4197 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4198 else
4199 abort ();
4200 break;
4201
4202 case 'Y':
4203 print_shift_count_operand (file, x);
4204 return;
4205 }
4206
4207 switch (GET_CODE (x))
4208 {
4209 case REG:
4210 fprintf (file, "%s", reg_names[REGNO (x)]);
4211 break;
4212
4213 case MEM:
4214 output_address (XEXP (x, 0));
4215 break;
4216
4217 case CONST:
4218 case CODE_LABEL:
4219 case LABEL_REF:
4220 case SYMBOL_REF:
4221 output_addr_const (file, x);
4222 break;
4223
4224 case CONST_INT:
4225 if (code == 'b')
4226 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4227 else if (code == 'x')
4228 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4229 else if (code == 'h')
4230 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4231 else if (code == 'i')
4232 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4233 s390_extract_part (x, HImode, 0));
4234 else if (code == 'j')
4235 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4236 s390_extract_part (x, HImode, -1));
4237 else
4238 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4239 break;
4240
4241 case CONST_DOUBLE:
4242 if (GET_MODE (x) != VOIDmode)
4243 abort ();
4244 if (code == 'b')
4245 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4246 else if (code == 'x')
4247 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4248 else if (code == 'h')
4249 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4250 else
4251 abort ();
4252 break;
4253
4254 default:
4255 fatal_insn ("UNKNOWN in print_operand !?", x);
4256 break;
4257 }
4258 }
4259
4260 /* Target hook for assembling integer objects. We need to define it
4261 here to work a round a bug in some versions of GAS, which couldn't
4262 handle values smaller than INT_MIN when printed in decimal. */
4263
4264 static bool
4265 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4266 {
4267 if (size == 8 && aligned_p
4268 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4269 {
4270 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4271 INTVAL (x));
4272 return true;
4273 }
4274 return default_assemble_integer (x, size, aligned_p);
4275 }
4276
4277 /* Returns true if register REGNO is used for forming
4278 a memory address in expression X. */
4279
4280 static int
4281 reg_used_in_mem_p (int regno, rtx x)
4282 {
4283 enum rtx_code code = GET_CODE (x);
4284 int i, j;
4285 const char *fmt;
4286
4287 if (code == MEM)
4288 {
4289 if (refers_to_regno_p (regno, regno+1,
4290 XEXP (x, 0), 0))
4291 return 1;
4292 }
4293 else if (code == SET
4294 && GET_CODE (SET_DEST (x)) == PC)
4295 {
4296 if (refers_to_regno_p (regno, regno+1,
4297 SET_SRC (x), 0))
4298 return 1;
4299 }
4300
4301 fmt = GET_RTX_FORMAT (code);
4302 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4303 {
4304 if (fmt[i] == 'e'
4305 && reg_used_in_mem_p (regno, XEXP (x, i)))
4306 return 1;
4307
4308 else if (fmt[i] == 'E')
4309 for (j = 0; j < XVECLEN (x, i); j++)
4310 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4311 return 1;
4312 }
4313 return 0;
4314 }
4315
4316 /* Returns true if expression DEP_RTX sets an address register
4317 used by instruction INSN to address memory. */
4318
4319 static int
4320 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4321 {
4322 rtx target, pat;
4323
4324 if (GET_CODE (dep_rtx) == INSN)
4325 dep_rtx = PATTERN (dep_rtx);
4326
4327 if (GET_CODE (dep_rtx) == SET)
4328 {
4329 target = SET_DEST (dep_rtx);
4330 if (GET_CODE (target) == STRICT_LOW_PART)
4331 target = XEXP (target, 0);
4332 while (GET_CODE (target) == SUBREG)
4333 target = SUBREG_REG (target);
4334
4335 if (GET_CODE (target) == REG)
4336 {
4337 int regno = REGNO (target);
4338
4339 if (s390_safe_attr_type (insn) == TYPE_LA)
4340 {
4341 pat = PATTERN (insn);
4342 if (GET_CODE (pat) == PARALLEL)
4343 {
4344 if (XVECLEN (pat, 0) != 2)
4345 abort();
4346 pat = XVECEXP (pat, 0, 0);
4347 }
4348 if (GET_CODE (pat) == SET)
4349 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4350 else
4351 abort();
4352 }
4353 else if (get_attr_atype (insn) == ATYPE_AGEN)
4354 return reg_used_in_mem_p (regno, PATTERN (insn));
4355 }
4356 }
4357 return 0;
4358 }
4359
4360 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4361
4362 int
4363 s390_agen_dep_p (rtx dep_insn, rtx insn)
4364 {
4365 rtx dep_rtx = PATTERN (dep_insn);
4366 int i;
4367
4368 if (GET_CODE (dep_rtx) == SET
4369 && addr_generation_dependency_p (dep_rtx, insn))
4370 return 1;
4371 else if (GET_CODE (dep_rtx) == PARALLEL)
4372 {
4373 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4374 {
4375 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4376 return 1;
4377 }
4378 }
4379 return 0;
4380 }
4381
4382 /* A C statement (sans semicolon) to update the integer scheduling priority
4383 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4384 reduce the priority to execute INSN later. Do not define this macro if
4385 you do not need to adjust the scheduling priorities of insns.
4386
4387 A STD instruction should be scheduled earlier,
4388 in order to use the bypass. */
4389
4390 static int
4391 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4392 {
4393 if (! INSN_P (insn))
4394 return priority;
4395
4396 if (s390_tune != PROCESSOR_2084_Z990)
4397 return priority;
4398
4399 switch (s390_safe_attr_type (insn))
4400 {
4401 case TYPE_FSTORED:
4402 case TYPE_FSTORES:
4403 priority = priority << 3;
4404 break;
4405 case TYPE_STORE:
4406 case TYPE_STM:
4407 priority = priority << 1;
4408 break;
4409 default:
4410 break;
4411 }
4412 return priority;
4413 }
4414
4415 /* The number of instructions that can be issued per cycle. */
4416
4417 static int
4418 s390_issue_rate (void)
4419 {
4420 if (s390_tune == PROCESSOR_2084_Z990)
4421 return 3;
4422 return 1;
4423 }
4424
4425 static int
4426 s390_first_cycle_multipass_dfa_lookahead (void)
4427 {
4428 return 4;
4429 }
4430
4431
4432 /* Split all branches that exceed the maximum distance.
4433 Returns true if this created a new literal pool entry. */
4434
4435 static int
4436 s390_split_branches (void)
4437 {
4438 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4439 int new_literal = 0;
4440 rtx insn, pat, tmp, target;
4441 rtx *label;
4442
4443 /* We need correct insn addresses. */
4444
4445 shorten_branches (get_insns ());
4446
4447 /* Find all branches that exceed 64KB, and split them. */
4448
4449 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4450 {
4451 if (GET_CODE (insn) != JUMP_INSN)
4452 continue;
4453
4454 pat = PATTERN (insn);
4455 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4456 pat = XVECEXP (pat, 0, 0);
4457 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
4458 continue;
4459
4460 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
4461 {
4462 label = &SET_SRC (pat);
4463 }
4464 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
4465 {
4466 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
4467 label = &XEXP (SET_SRC (pat), 1);
4468 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
4469 label = &XEXP (SET_SRC (pat), 2);
4470 else
4471 continue;
4472 }
4473 else
4474 continue;
4475
4476 if (get_attr_length (insn) <= 4)
4477 continue;
4478
4479 /* We are going to use the return register as scratch register,
4480 make sure it will be saved/restored by the prologue/epilogue. */
4481 cfun_frame_layout.save_return_addr_p = 1;
4482
4483 if (!flag_pic)
4484 {
4485 new_literal = 1;
4486 tmp = force_const_mem (Pmode, *label);
4487 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
4488 INSN_ADDRESSES_NEW (tmp, -1);
4489 annotate_constant_pool_refs (&PATTERN (tmp));
4490
4491 target = temp_reg;
4492 }
4493 else
4494 {
4495 new_literal = 1;
4496 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
4497 UNSPEC_LTREL_OFFSET);
4498 target = gen_rtx_CONST (Pmode, target);
4499 target = force_const_mem (Pmode, target);
4500 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
4501 INSN_ADDRESSES_NEW (tmp, -1);
4502 annotate_constant_pool_refs (&PATTERN (tmp));
4503
4504 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
4505 cfun->machine->base_reg),
4506 UNSPEC_LTREL_BASE);
4507 target = gen_rtx_PLUS (Pmode, temp_reg, target);
4508 }
4509
4510 if (!validate_change (insn, label, target, 0))
4511 abort ();
4512 }
4513
4514 return new_literal;
4515 }
4516
4517 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4518 Fix up MEMs as required. */
4519
4520 static void
4521 annotate_constant_pool_refs (rtx *x)
4522 {
4523 int i, j;
4524 const char *fmt;
4525
4526 if (GET_CODE (*x) == SYMBOL_REF
4527 && CONSTANT_POOL_ADDRESS_P (*x))
4528 abort ();
4529
4530 /* Literal pool references can only occur inside a MEM ... */
4531 if (GET_CODE (*x) == MEM)
4532 {
4533 rtx memref = XEXP (*x, 0);
4534
4535 if (GET_CODE (memref) == SYMBOL_REF
4536 && CONSTANT_POOL_ADDRESS_P (memref))
4537 {
4538 rtx base = cfun->machine->base_reg;
4539 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4540 UNSPEC_LTREF);
4541
4542 *x = replace_equiv_address (*x, addr);
4543 return;
4544 }
4545
4546 if (GET_CODE (memref) == CONST
4547 && GET_CODE (XEXP (memref, 0)) == PLUS
4548 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4549 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4550 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4551 {
4552 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4553 rtx sym = XEXP (XEXP (memref, 0), 0);
4554 rtx base = cfun->machine->base_reg;
4555 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4556 UNSPEC_LTREF);
4557
4558 *x = replace_equiv_address (*x, plus_constant (addr, off));
4559 return;
4560 }
4561 }
4562
4563 /* ... or a load-address type pattern. */
4564 if (GET_CODE (*x) == SET)
4565 {
4566 rtx addrref = SET_SRC (*x);
4567
4568 if (GET_CODE (addrref) == SYMBOL_REF
4569 && CONSTANT_POOL_ADDRESS_P (addrref))
4570 {
4571 rtx base = cfun->machine->base_reg;
4572 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4573 UNSPEC_LTREF);
4574
4575 SET_SRC (*x) = addr;
4576 return;
4577 }
4578
4579 if (GET_CODE (addrref) == CONST
4580 && GET_CODE (XEXP (addrref, 0)) == PLUS
4581 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4582 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4583 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4584 {
4585 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4586 rtx sym = XEXP (XEXP (addrref, 0), 0);
4587 rtx base = cfun->machine->base_reg;
4588 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4589 UNSPEC_LTREF);
4590
4591 SET_SRC (*x) = plus_constant (addr, off);
4592 return;
4593 }
4594 }
4595
4596 /* Annotate LTREL_BASE as well. */
4597 if (GET_CODE (*x) == UNSPEC
4598 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4599 {
4600 rtx base = cfun->machine->base_reg;
4601 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
4602 UNSPEC_LTREL_BASE);
4603 return;
4604 }
4605
4606 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4607 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4608 {
4609 if (fmt[i] == 'e')
4610 {
4611 annotate_constant_pool_refs (&XEXP (*x, i));
4612 }
4613 else if (fmt[i] == 'E')
4614 {
4615 for (j = 0; j < XVECLEN (*x, i); j++)
4616 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
4617 }
4618 }
4619 }
4620
4621
4622 /* Find an annotated literal pool symbol referenced in RTX X,
4623 and store it at REF. Will abort if X contains references to
4624 more than one such pool symbol; multiple references to the same
4625 symbol are allowed, however.
4626
4627 The rtx pointed to by REF must be initialized to NULL_RTX
4628 by the caller before calling this routine. */
4629
4630 static void
4631 find_constant_pool_ref (rtx x, rtx *ref)
4632 {
4633 int i, j;
4634 const char *fmt;
4635
4636 /* Ignore LTREL_BASE references. */
4637 if (GET_CODE (x) == UNSPEC
4638 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4639 return;
4640 /* Likewise POOL_ENTRY insns. */
4641 if (GET_CODE (x) == UNSPEC_VOLATILE
4642 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
4643 return;
4644
4645 if (GET_CODE (x) == SYMBOL_REF
4646 && CONSTANT_POOL_ADDRESS_P (x))
4647 abort ();
4648
4649 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
4650 {
4651 rtx sym = XVECEXP (x, 0, 0);
4652 if (GET_CODE (sym) != SYMBOL_REF
4653 || !CONSTANT_POOL_ADDRESS_P (sym))
4654 abort ();
4655
4656 if (*ref == NULL_RTX)
4657 *ref = sym;
4658 else if (*ref != sym)
4659 abort ();
4660
4661 return;
4662 }
4663
4664 fmt = GET_RTX_FORMAT (GET_CODE (x));
4665 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4666 {
4667 if (fmt[i] == 'e')
4668 {
4669 find_constant_pool_ref (XEXP (x, i), ref);
4670 }
4671 else if (fmt[i] == 'E')
4672 {
4673 for (j = 0; j < XVECLEN (x, i); j++)
4674 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4675 }
4676 }
4677 }
4678
4679 /* Replace every reference to the annotated literal pool
4680 symbol REF in X by its base plus OFFSET. */
4681
4682 static void
4683 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
4684 {
4685 int i, j;
4686 const char *fmt;
4687
4688 if (*x == ref)
4689 abort ();
4690
4691 if (GET_CODE (*x) == UNSPEC
4692 && XINT (*x, 1) == UNSPEC_LTREF
4693 && XVECEXP (*x, 0, 0) == ref)
4694 {
4695 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
4696 return;
4697 }
4698
4699 if (GET_CODE (*x) == PLUS
4700 && GET_CODE (XEXP (*x, 1)) == CONST_INT
4701 && GET_CODE (XEXP (*x, 0)) == UNSPEC
4702 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
4703 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
4704 {
4705 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
4706 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
4707 return;
4708 }
4709
4710 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4711 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4712 {
4713 if (fmt[i] == 'e')
4714 {
4715 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
4716 }
4717 else if (fmt[i] == 'E')
4718 {
4719 for (j = 0; j < XVECLEN (*x, i); j++)
4720 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
4721 }
4722 }
4723 }
4724
4725 /* Check whether X contains an UNSPEC_LTREL_BASE.
4726 Return its constant pool symbol if found, NULL_RTX otherwise. */
4727
4728 static rtx
4729 find_ltrel_base (rtx x)
4730 {
4731 int i, j;
4732 const char *fmt;
4733
4734 if (GET_CODE (x) == UNSPEC
4735 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4736 return XVECEXP (x, 0, 0);
4737
4738 fmt = GET_RTX_FORMAT (GET_CODE (x));
4739 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4740 {
4741 if (fmt[i] == 'e')
4742 {
4743 rtx fnd = find_ltrel_base (XEXP (x, i));
4744 if (fnd)
4745 return fnd;
4746 }
4747 else if (fmt[i] == 'E')
4748 {
4749 for (j = 0; j < XVECLEN (x, i); j++)
4750 {
4751 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4752 if (fnd)
4753 return fnd;
4754 }
4755 }
4756 }
4757
4758 return NULL_RTX;
4759 }
4760
4761 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4762
4763 static void
4764 replace_ltrel_base (rtx *x)
4765 {
4766 int i, j;
4767 const char *fmt;
4768
4769 if (GET_CODE (*x) == UNSPEC
4770 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4771 {
4772 *x = XVECEXP (*x, 0, 1);
4773 return;
4774 }
4775
4776 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4777 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4778 {
4779 if (fmt[i] == 'e')
4780 {
4781 replace_ltrel_base (&XEXP (*x, i));
4782 }
4783 else if (fmt[i] == 'E')
4784 {
4785 for (j = 0; j < XVECLEN (*x, i); j++)
4786 replace_ltrel_base (&XVECEXP (*x, i, j));
4787 }
4788 }
4789 }
4790
4791
4792 /* We keep a list of constants which we have to add to internal
4793 constant tables in the middle of large functions. */
4794
4795 #define NR_C_MODES 7
4796 enum machine_mode constant_modes[NR_C_MODES] =
4797 {
4798 TImode,
4799 DFmode, DImode,
4800 SFmode, SImode,
4801 HImode,
4802 QImode
4803 };
4804
4805 struct constant
4806 {
4807 struct constant *next;
4808 rtx value;
4809 rtx label;
4810 };
4811
4812 struct constant_pool
4813 {
4814 struct constant_pool *next;
4815 rtx first_insn;
4816 rtx pool_insn;
4817 bitmap insns;
4818
4819 struct constant *constants[NR_C_MODES];
4820 struct constant *execute;
4821 rtx label;
4822 int size;
4823 };
4824
4825 static struct constant_pool * s390_mainpool_start (void);
4826 static void s390_mainpool_finish (struct constant_pool *);
4827 static void s390_mainpool_cancel (struct constant_pool *);
4828
4829 static struct constant_pool * s390_chunkify_start (void);
4830 static void s390_chunkify_finish (struct constant_pool *);
4831 static void s390_chunkify_cancel (struct constant_pool *);
4832
4833 static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4834 static void s390_end_pool (struct constant_pool *, rtx);
4835 static void s390_add_pool_insn (struct constant_pool *, rtx);
4836 static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4837 static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4838 static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
4839 static void s390_add_execute (struct constant_pool *, rtx);
4840 static rtx s390_find_execute (struct constant_pool *, rtx);
4841 static rtx s390_execute_label (rtx);
4842 static rtx s390_execute_target (rtx);
4843 static void s390_dump_pool (struct constant_pool *, bool);
4844 static void s390_dump_execute (struct constant_pool *);
4845 static struct constant_pool *s390_alloc_pool (void);
4846 static void s390_free_pool (struct constant_pool *);
4847
4848 /* Create new constant pool covering instructions starting at INSN
4849 and chain it to the end of POOL_LIST. */
4850
4851 static struct constant_pool *
4852 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4853 {
4854 struct constant_pool *pool, **prev;
4855
4856 pool = s390_alloc_pool ();
4857 pool->first_insn = insn;
4858
4859 for (prev = pool_list; *prev; prev = &(*prev)->next)
4860 ;
4861 *prev = pool;
4862
4863 return pool;
4864 }
4865
4866 /* End range of instructions covered by POOL at INSN and emit
4867 placeholder insn representing the pool. */
4868
4869 static void
4870 s390_end_pool (struct constant_pool *pool, rtx insn)
4871 {
4872 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4873
4874 if (!insn)
4875 insn = get_last_insn ();
4876
4877 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4878 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4879 }
4880
4881 /* Add INSN to the list of insns covered by POOL. */
4882
4883 static void
4884 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
4885 {
4886 bitmap_set_bit (pool->insns, INSN_UID (insn));
4887 }
4888
4889 /* Return pool out of POOL_LIST that covers INSN. */
4890
4891 static struct constant_pool *
4892 s390_find_pool (struct constant_pool *pool_list, rtx insn)
4893 {
4894 struct constant_pool *pool;
4895
4896 for (pool = pool_list; pool; pool = pool->next)
4897 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4898 break;
4899
4900 return pool;
4901 }
4902
4903 /* Add constant VAL of mode MODE to the constant pool POOL. */
4904
4905 static void
4906 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
4907 {
4908 struct constant *c;
4909 int i;
4910
4911 for (i = 0; i < NR_C_MODES; i++)
4912 if (constant_modes[i] == mode)
4913 break;
4914 if (i == NR_C_MODES)
4915 abort ();
4916
4917 for (c = pool->constants[i]; c != NULL; c = c->next)
4918 if (rtx_equal_p (val, c->value))
4919 break;
4920
4921 if (c == NULL)
4922 {
4923 c = (struct constant *) xmalloc (sizeof *c);
4924 c->value = val;
4925 c->label = gen_label_rtx ();
4926 c->next = pool->constants[i];
4927 pool->constants[i] = c;
4928 pool->size += GET_MODE_SIZE (mode);
4929 }
4930 }
4931
4932 /* Find constant VAL of mode MODE in the constant pool POOL.
4933 Return an RTX describing the distance from the start of
4934 the pool to the location of the new constant. */
4935
4936 static rtx
4937 s390_find_constant (struct constant_pool *pool, rtx val,
4938 enum machine_mode mode)
4939 {
4940 struct constant *c;
4941 rtx offset;
4942 int i;
4943
4944 for (i = 0; i < NR_C_MODES; i++)
4945 if (constant_modes[i] == mode)
4946 break;
4947 if (i == NR_C_MODES)
4948 abort ();
4949
4950 for (c = pool->constants[i]; c != NULL; c = c->next)
4951 if (rtx_equal_p (val, c->value))
4952 break;
4953
4954 if (c == NULL)
4955 abort ();
4956
4957 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4958 gen_rtx_LABEL_REF (Pmode, pool->label));
4959 offset = gen_rtx_CONST (Pmode, offset);
4960 return offset;
4961 }
4962
4963 /* Add execute target for INSN to the constant pool POOL. */
4964
4965 static void
4966 s390_add_execute (struct constant_pool *pool, rtx insn)
4967 {
4968 struct constant *c;
4969
4970 for (c = pool->execute; c != NULL; c = c->next)
4971 if (INSN_UID (insn) == INSN_UID (c->value))
4972 break;
4973
4974 if (c == NULL)
4975 {
4976 rtx label = s390_execute_label (insn);
4977 gcc_assert (label);
4978
4979 c = (struct constant *) xmalloc (sizeof *c);
4980 c->value = insn;
4981 c->label = label == const0_rtx ? gen_label_rtx () : XEXP (label, 0);
4982 c->next = pool->execute;
4983 pool->execute = c;
4984 pool->size += label == const0_rtx ? 6 : 0;
4985 }
4986 }
4987
4988 /* Find execute target for INSN in the constant pool POOL.
4989 Return an RTX describing the distance from the start of
4990 the pool to the location of the execute target. */
4991
4992 static rtx
4993 s390_find_execute (struct constant_pool *pool, rtx insn)
4994 {
4995 struct constant *c;
4996 rtx offset;
4997
4998 for (c = pool->execute; c != NULL; c = c->next)
4999 if (INSN_UID (insn) == INSN_UID (c->value))
5000 break;
5001
5002 if (c == NULL)
5003 abort ();
5004
5005 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5006 gen_rtx_LABEL_REF (Pmode, pool->label));
5007 offset = gen_rtx_CONST (Pmode, offset);
5008 return offset;
5009 }
5010
5011 /* Check whether INSN is an execute. Return the label_ref to its
5012 execute target template if so, NULL_RTX otherwise. */
5013
5014 static rtx
5015 s390_execute_label (rtx insn)
5016 {
5017 if (GET_CODE (insn) == INSN
5018 && GET_CODE (PATTERN (insn)) == PARALLEL
5019 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5020 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5021 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5022
5023 return NULL_RTX;
5024 }
5025
5026 /* For an execute INSN, extract the execute target template. */
5027
5028 static rtx
5029 s390_execute_target (rtx insn)
5030 {
5031 rtx pattern = PATTERN (insn);
5032 gcc_assert (s390_execute_label (insn));
5033
5034 if (XVECLEN (pattern, 0) == 2)
5035 {
5036 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5037 }
5038 else
5039 {
5040 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5041 int i;
5042
5043 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5044 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5045
5046 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5047 }
5048
5049 return pattern;
5050 }
5051
5052 /* Indicate that INSN cannot be duplicated. This is the case for
5053 execute insns that carry a unique label. */
5054
5055 static bool
5056 s390_cannot_copy_insn_p (rtx insn)
5057 {
5058 rtx label = s390_execute_label (insn);
5059 return label && label != const0_rtx;
5060 }
5061
5062 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5063 do not emit the pool base label. */
5064
5065 static void
5066 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5067 {
5068 struct constant *c;
5069 rtx insn = pool->pool_insn;
5070 int i;
5071
5072 /* Switch to rodata section. */
5073 if (TARGET_CPU_ZARCH)
5074 {
5075 insn = emit_insn_after (gen_pool_section_start (), insn);
5076 INSN_ADDRESSES_NEW (insn, -1);
5077 }
5078
5079 /* Ensure minimum pool alignment. */
5080 if (TARGET_CPU_ZARCH)
5081 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5082 else
5083 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5084 INSN_ADDRESSES_NEW (insn, -1);
5085
5086 /* Emit pool base label. */
5087 if (!remote_label)
5088 {
5089 insn = emit_label_after (pool->label, insn);
5090 INSN_ADDRESSES_NEW (insn, -1);
5091 }
5092
5093 /* Dump constants in descending alignment requirement order,
5094 ensuring proper alignment for every constant. */
5095 for (i = 0; i < NR_C_MODES; i++)
5096 for (c = pool->constants[i]; c; c = c->next)
5097 {
5098 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5099 rtx value = c->value;
5100 if (GET_CODE (value) == CONST
5101 && GET_CODE (XEXP (value, 0)) == UNSPEC
5102 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5103 && XVECLEN (XEXP (value, 0), 0) == 1)
5104 {
5105 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5106 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5107 value = gen_rtx_CONST (VOIDmode, value);
5108 }
5109
5110 insn = emit_label_after (c->label, insn);
5111 INSN_ADDRESSES_NEW (insn, -1);
5112
5113 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5114 gen_rtvec (1, value),
5115 UNSPECV_POOL_ENTRY);
5116 insn = emit_insn_after (value, insn);
5117 INSN_ADDRESSES_NEW (insn, -1);
5118 }
5119
5120 /* Ensure minimum alignment for instructions. */
5121 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5122 INSN_ADDRESSES_NEW (insn, -1);
5123
5124 /* Output in-pool execute template insns. */
5125 for (c = pool->execute; c; c = c->next)
5126 {
5127 if (s390_execute_label (c->value) != const0_rtx)
5128 continue;
5129
5130 insn = emit_label_after (c->label, insn);
5131 INSN_ADDRESSES_NEW (insn, -1);
5132
5133 insn = emit_insn_after (s390_execute_target (c->value), insn);
5134 INSN_ADDRESSES_NEW (insn, -1);
5135 }
5136
5137 /* Switch back to previous section. */
5138 if (TARGET_CPU_ZARCH)
5139 {
5140 insn = emit_insn_after (gen_pool_section_end (), insn);
5141 INSN_ADDRESSES_NEW (insn, -1);
5142 }
5143
5144 insn = emit_barrier_after (insn);
5145 INSN_ADDRESSES_NEW (insn, -1);
5146
5147 /* Remove placeholder insn. */
5148 remove_insn (pool->pool_insn);
5149
5150 /* Output out-of-pool execute template isns. */
5151 s390_dump_execute (pool);
5152 }
5153
5154 /* Dump out the out-of-pool execute template insns in POOL
5155 at the end of the instruction stream. */
5156
5157 static void
5158 s390_dump_execute (struct constant_pool *pool)
5159 {
5160 struct constant *c;
5161 rtx insn;
5162
5163 for (c = pool->execute; c; c = c->next)
5164 {
5165 if (s390_execute_label (c->value) == const0_rtx)
5166 continue;
5167
5168 insn = emit_label (c->label);
5169 INSN_ADDRESSES_NEW (insn, -1);
5170
5171 insn = emit_insn (s390_execute_target (c->value));
5172 INSN_ADDRESSES_NEW (insn, -1);
5173 }
5174 }
5175
5176 /* Allocate new constant_pool structure. */
5177
5178 static struct constant_pool *
5179 s390_alloc_pool (void)
5180 {
5181 struct constant_pool *pool;
5182 int i;
5183
5184 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5185 pool->next = NULL;
5186 for (i = 0; i < NR_C_MODES; i++)
5187 pool->constants[i] = NULL;
5188
5189 pool->execute = NULL;
5190 pool->label = gen_label_rtx ();
5191 pool->first_insn = NULL_RTX;
5192 pool->pool_insn = NULL_RTX;
5193 pool->insns = BITMAP_XMALLOC ();
5194 pool->size = 0;
5195
5196 return pool;
5197 }
5198
5199 /* Free all memory used by POOL. */
5200
5201 static void
5202 s390_free_pool (struct constant_pool *pool)
5203 {
5204 struct constant *c, *next;
5205 int i;
5206
5207 for (i = 0; i < NR_C_MODES; i++)
5208 for (c = pool->constants[i]; c; c = next)
5209 {
5210 next = c->next;
5211 free (c);
5212 }
5213
5214 for (c = pool->execute; c; c = next)
5215 {
5216 next = c->next;
5217 free (c);
5218 }
5219
5220 BITMAP_XFREE (pool->insns);
5221 free (pool);
5222 }
5223
5224
5225 /* Collect main literal pool. Return NULL on overflow. */
5226
5227 static struct constant_pool *
5228 s390_mainpool_start (void)
5229 {
5230 struct constant_pool *pool;
5231 rtx insn;
5232
5233 pool = s390_alloc_pool ();
5234
5235 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5236 {
5237 if (GET_CODE (insn) == INSN
5238 && GET_CODE (PATTERN (insn)) == SET
5239 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5240 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5241 {
5242 if (pool->pool_insn)
5243 abort ();
5244 pool->pool_insn = insn;
5245 }
5246
5247 if (s390_execute_label (insn))
5248 {
5249 s390_add_execute (pool, insn);
5250 }
5251 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5252 {
5253 rtx pool_ref = NULL_RTX;
5254 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5255 if (pool_ref)
5256 {
5257 rtx constant = get_pool_constant (pool_ref);
5258 enum machine_mode mode = get_pool_mode (pool_ref);
5259 s390_add_constant (pool, constant, mode);
5260 }
5261 }
5262 }
5263
5264 if (!pool->pool_insn && pool->size > 0)
5265 abort ();
5266
5267 if (pool->size >= 4096)
5268 {
5269 /* We're going to chunkify the pool, so remove the main
5270 pool placeholder insn. */
5271 remove_insn (pool->pool_insn);
5272
5273 s390_free_pool (pool);
5274 pool = NULL;
5275 }
5276
5277 return pool;
5278 }
5279
5280 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5281 Modify the current function to output the pool constants as well as
5282 the pool register setup instruction. */
5283
5284 static void
5285 s390_mainpool_finish (struct constant_pool *pool)
5286 {
5287 rtx base_reg = cfun->machine->base_reg;
5288 rtx insn;
5289
5290 /* If the pool is empty, we're done. */
5291 if (pool->size == 0)
5292 {
5293 /* However, we may have out-of-pool execute templates. */
5294 s390_dump_execute (pool);
5295
5296 /* We don't actually need a base register after all. */
5297 cfun->machine->base_reg = NULL_RTX;
5298
5299 if (pool->pool_insn)
5300 remove_insn (pool->pool_insn);
5301 s390_free_pool (pool);
5302 return;
5303 }
5304
5305 /* We need correct insn addresses. */
5306 shorten_branches (get_insns ());
5307
5308 /* On zSeries, we use a LARL to load the pool register. The pool is
5309 located in the .rodata section, so we emit it after the function. */
5310 if (TARGET_CPU_ZARCH)
5311 {
5312 insn = gen_main_base_64 (base_reg, pool->label);
5313 insn = emit_insn_after (insn, pool->pool_insn);
5314 INSN_ADDRESSES_NEW (insn, -1);
5315 remove_insn (pool->pool_insn);
5316
5317 insn = get_last_insn ();
5318 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5319 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5320
5321 s390_dump_pool (pool, 0);
5322 }
5323
5324 /* On S/390, if the total size of the function's code plus literal pool
5325 does not exceed 4096 bytes, we use BASR to set up a function base
5326 pointer, and emit the literal pool at the end of the function. */
5327 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5328 + pool->size + 8 /* alignment slop */ < 4096)
5329 {
5330 insn = gen_main_base_31_small (base_reg, pool->label);
5331 insn = emit_insn_after (insn, pool->pool_insn);
5332 INSN_ADDRESSES_NEW (insn, -1);
5333 remove_insn (pool->pool_insn);
5334
5335 insn = emit_label_after (pool->label, insn);
5336 INSN_ADDRESSES_NEW (insn, -1);
5337
5338 insn = get_last_insn ();
5339 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5340 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5341
5342 s390_dump_pool (pool, 1);
5343 }
5344
5345 /* Otherwise, we emit an inline literal pool and use BASR to branch
5346 over it, setting up the pool register at the same time. */
5347 else
5348 {
5349 rtx pool_end = gen_label_rtx ();
5350
5351 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5352 insn = emit_insn_after (insn, pool->pool_insn);
5353 INSN_ADDRESSES_NEW (insn, -1);
5354 remove_insn (pool->pool_insn);
5355
5356 insn = emit_label_after (pool->label, insn);
5357 INSN_ADDRESSES_NEW (insn, -1);
5358
5359 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5360 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5361
5362 insn = emit_label_after (pool_end, pool->pool_insn);
5363 INSN_ADDRESSES_NEW (insn, -1);
5364
5365 s390_dump_pool (pool, 1);
5366 }
5367
5368
5369 /* Replace all literal pool references. */
5370
5371 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5372 {
5373 if (INSN_P (insn))
5374 replace_ltrel_base (&PATTERN (insn));
5375
5376 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5377 {
5378 rtx addr, pool_ref = NULL_RTX;
5379 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5380 if (pool_ref)
5381 {
5382 if (s390_execute_label (insn))
5383 addr = s390_find_execute (pool, insn);
5384 else
5385 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5386 get_pool_mode (pool_ref));
5387
5388 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5389 INSN_CODE (insn) = -1;
5390 }
5391 }
5392 }
5393
5394
5395 /* Free the pool. */
5396 s390_free_pool (pool);
5397 }
5398
5399 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5400 We have decided we cannot use this pool, so revert all changes
5401 to the current function that were done by s390_mainpool_start. */
5402 static void
5403 s390_mainpool_cancel (struct constant_pool *pool)
5404 {
5405 /* We didn't actually change the instruction stream, so simply
5406 free the pool memory. */
5407 s390_free_pool (pool);
5408 }
5409
5410
5411 /* Chunkify the literal pool. */
5412
5413 #define S390_POOL_CHUNK_MIN 0xc00
5414 #define S390_POOL_CHUNK_MAX 0xe00
5415
5416 static struct constant_pool *
5417 s390_chunkify_start (void)
5418 {
5419 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5420 int extra_size = 0;
5421 bitmap far_labels;
5422 rtx pending_ltrel = NULL_RTX;
5423 rtx insn;
5424
5425 rtx (*gen_reload_base) (rtx, rtx) =
5426 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5427
5428
5429 /* We need correct insn addresses. */
5430
5431 shorten_branches (get_insns ());
5432
5433 /* Scan all insns and move literals to pool chunks. */
5434
5435 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5436 {
5437 /* Check for pending LTREL_BASE. */
5438 if (INSN_P (insn))
5439 {
5440 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5441 if (ltrel_base)
5442 {
5443 if (ltrel_base == pending_ltrel)
5444 pending_ltrel = NULL_RTX;
5445 else
5446 abort ();
5447 }
5448 }
5449
5450 if (s390_execute_label (insn))
5451 {
5452 if (!curr_pool)
5453 curr_pool = s390_start_pool (&pool_list, insn);
5454
5455 s390_add_execute (curr_pool, insn);
5456 s390_add_pool_insn (curr_pool, insn);
5457 }
5458 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5459 {
5460 rtx pool_ref = NULL_RTX;
5461 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5462 if (pool_ref)
5463 {
5464 rtx constant = get_pool_constant (pool_ref);
5465 enum machine_mode mode = get_pool_mode (pool_ref);
5466
5467 if (!curr_pool)
5468 curr_pool = s390_start_pool (&pool_list, insn);
5469
5470 s390_add_constant (curr_pool, constant, mode);
5471 s390_add_pool_insn (curr_pool, insn);
5472
5473 /* Don't split the pool chunk between a LTREL_OFFSET load
5474 and the corresponding LTREL_BASE. */
5475 if (GET_CODE (constant) == CONST
5476 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5477 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5478 {
5479 if (pending_ltrel)
5480 abort ();
5481 pending_ltrel = pool_ref;
5482 }
5483 }
5484 }
5485
5486 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5487 {
5488 if (curr_pool)
5489 s390_add_pool_insn (curr_pool, insn);
5490 /* An LTREL_BASE must follow within the same basic block. */
5491 if (pending_ltrel)
5492 abort ();
5493 }
5494
5495 if (!curr_pool
5496 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5497 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5498 continue;
5499
5500 if (TARGET_CPU_ZARCH)
5501 {
5502 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5503 continue;
5504
5505 s390_end_pool (curr_pool, NULL_RTX);
5506 curr_pool = NULL;
5507 }
5508 else
5509 {
5510 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5511 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5512 + extra_size;
5513
5514 /* We will later have to insert base register reload insns.
5515 Those will have an effect on code size, which we need to
5516 consider here. This calculation makes rather pessimistic
5517 worst-case assumptions. */
5518 if (GET_CODE (insn) == CODE_LABEL)
5519 extra_size += 6;
5520
5521 if (chunk_size < S390_POOL_CHUNK_MIN
5522 && curr_pool->size < S390_POOL_CHUNK_MIN)
5523 continue;
5524
5525 /* Pool chunks can only be inserted after BARRIERs ... */
5526 if (GET_CODE (insn) == BARRIER)
5527 {
5528 s390_end_pool (curr_pool, insn);
5529 curr_pool = NULL;
5530 extra_size = 0;
5531 }
5532
5533 /* ... so if we don't find one in time, create one. */
5534 else if ((chunk_size > S390_POOL_CHUNK_MAX
5535 || curr_pool->size > S390_POOL_CHUNK_MAX))
5536 {
5537 rtx label, jump, barrier;
5538
5539 /* We can insert the barrier only after a 'real' insn. */
5540 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5541 continue;
5542 if (get_attr_length (insn) == 0)
5543 continue;
5544
5545 /* Don't separate LTREL_BASE from the corresponding
5546 LTREL_OFFSET load. */
5547 if (pending_ltrel)
5548 continue;
5549
5550 label = gen_label_rtx ();
5551 jump = emit_jump_insn_after (gen_jump (label), insn);
5552 barrier = emit_barrier_after (jump);
5553 insn = emit_label_after (label, barrier);
5554 JUMP_LABEL (jump) = label;
5555 LABEL_NUSES (label) = 1;
5556
5557 INSN_ADDRESSES_NEW (jump, -1);
5558 INSN_ADDRESSES_NEW (barrier, -1);
5559 INSN_ADDRESSES_NEW (insn, -1);
5560
5561 s390_end_pool (curr_pool, barrier);
5562 curr_pool = NULL;
5563 extra_size = 0;
5564 }
5565 }
5566 }
5567
5568 if (curr_pool)
5569 s390_end_pool (curr_pool, NULL_RTX);
5570 if (pending_ltrel)
5571 abort ();
5572
5573
5574 /* Find all labels that are branched into
5575 from an insn belonging to a different chunk. */
5576
5577 far_labels = BITMAP_XMALLOC ();
5578
5579 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5580 {
5581 /* Labels marked with LABEL_PRESERVE_P can be target
5582 of non-local jumps, so we have to mark them.
5583 The same holds for named labels.
5584
5585 Don't do that, however, if it is the label before
5586 a jump table. */
5587
5588 if (GET_CODE (insn) == CODE_LABEL
5589 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
5590 {
5591 rtx vec_insn = next_real_insn (insn);
5592 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5593 PATTERN (vec_insn) : NULL_RTX;
5594 if (!vec_pat
5595 || !(GET_CODE (vec_pat) == ADDR_VEC
5596 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5597 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
5598 }
5599
5600 /* If we have a direct jump (conditional or unconditional)
5601 or a casesi jump, check all potential targets. */
5602 else if (GET_CODE (insn) == JUMP_INSN)
5603 {
5604 rtx pat = PATTERN (insn);
5605 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5606 pat = XVECEXP (pat, 0, 0);
5607
5608 if (GET_CODE (pat) == SET)
5609 {
5610 rtx label = JUMP_LABEL (insn);
5611 if (label)
5612 {
5613 if (s390_find_pool (pool_list, label)
5614 != s390_find_pool (pool_list, insn))
5615 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5616 }
5617 }
5618 else if (GET_CODE (pat) == PARALLEL
5619 && XVECLEN (pat, 0) == 2
5620 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5621 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
5622 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
5623 {
5624 /* Find the jump table used by this casesi jump. */
5625 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
5626 rtx vec_insn = next_real_insn (vec_label);
5627 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5628 PATTERN (vec_insn) : NULL_RTX;
5629 if (vec_pat
5630 && (GET_CODE (vec_pat) == ADDR_VEC
5631 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5632 {
5633 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
5634
5635 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
5636 {
5637 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
5638
5639 if (s390_find_pool (pool_list, label)
5640 != s390_find_pool (pool_list, insn))
5641 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5642 }
5643 }
5644 }
5645 }
5646 }
5647
5648 /* Insert base register reload insns before every pool. */
5649
5650 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5651 {
5652 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5653 curr_pool->label);
5654 rtx insn = curr_pool->first_insn;
5655 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
5656 }
5657
5658 /* Insert base register reload insns at every far label. */
5659
5660 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5661 if (GET_CODE (insn) == CODE_LABEL
5662 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
5663 {
5664 struct constant_pool *pool = s390_find_pool (pool_list, insn);
5665 if (pool)
5666 {
5667 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5668 pool->label);
5669 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
5670 }
5671 }
5672
5673
5674 BITMAP_XFREE (far_labels);
5675
5676
5677 /* Recompute insn addresses. */
5678
5679 init_insn_lengths ();
5680 shorten_branches (get_insns ());
5681
5682 return pool_list;
5683 }
5684
5685 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5686 After we have decided to use this list, finish implementing
5687 all changes to the current function as required. */
5688
5689 static void
5690 s390_chunkify_finish (struct constant_pool *pool_list)
5691 {
5692 struct constant_pool *curr_pool = NULL;
5693 rtx insn;
5694
5695
5696 /* Replace all literal pool references. */
5697
5698 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5699 {
5700 if (INSN_P (insn))
5701 replace_ltrel_base (&PATTERN (insn));
5702
5703 curr_pool = s390_find_pool (pool_list, insn);
5704 if (!curr_pool)
5705 continue;
5706
5707 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5708 {
5709 rtx addr, pool_ref = NULL_RTX;
5710 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5711 if (pool_ref)
5712 {
5713 if (s390_execute_label (insn))
5714 addr = s390_find_execute (curr_pool, insn);
5715 else
5716 addr = s390_find_constant (curr_pool,
5717 get_pool_constant (pool_ref),
5718 get_pool_mode (pool_ref));
5719
5720 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5721 INSN_CODE (insn) = -1;
5722 }
5723 }
5724 }
5725
5726 /* Dump out all literal pools. */
5727
5728 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5729 s390_dump_pool (curr_pool, 0);
5730
5731 /* Free pool list. */
5732
5733 while (pool_list)
5734 {
5735 struct constant_pool *next = pool_list->next;
5736 s390_free_pool (pool_list);
5737 pool_list = next;
5738 }
5739 }
5740
5741 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5742 We have decided we cannot use this list, so revert all changes
5743 to the current function that were done by s390_chunkify_start. */
5744
5745 static void
5746 s390_chunkify_cancel (struct constant_pool *pool_list)
5747 {
5748 struct constant_pool *curr_pool = NULL;
5749 rtx insn;
5750
5751 /* Remove all pool placeholder insns. */
5752
5753 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5754 {
5755 /* Did we insert an extra barrier? Remove it. */
5756 rtx barrier = PREV_INSN (curr_pool->pool_insn);
5757 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
5758 rtx label = NEXT_INSN (curr_pool->pool_insn);
5759
5760 if (jump && GET_CODE (jump) == JUMP_INSN
5761 && barrier && GET_CODE (barrier) == BARRIER
5762 && label && GET_CODE (label) == CODE_LABEL
5763 && GET_CODE (PATTERN (jump)) == SET
5764 && SET_DEST (PATTERN (jump)) == pc_rtx
5765 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
5766 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
5767 {
5768 remove_insn (jump);
5769 remove_insn (barrier);
5770 remove_insn (label);
5771 }
5772
5773 remove_insn (curr_pool->pool_insn);
5774 }
5775
5776 /* Remove all base register reload insns. */
5777
5778 for (insn = get_insns (); insn; )
5779 {
5780 rtx next_insn = NEXT_INSN (insn);
5781
5782 if (GET_CODE (insn) == INSN
5783 && GET_CODE (PATTERN (insn)) == SET
5784 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
5785 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
5786 remove_insn (insn);
5787
5788 insn = next_insn;
5789 }
5790
5791 /* Free pool list. */
5792
5793 while (pool_list)
5794 {
5795 struct constant_pool *next = pool_list->next;
5796 s390_free_pool (pool_list);
5797 pool_list = next;
5798 }
5799 }
5800
5801
5802 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5803
5804 void
5805 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
5806 {
5807 REAL_VALUE_TYPE r;
5808
5809 switch (GET_MODE_CLASS (mode))
5810 {
5811 case MODE_FLOAT:
5812 if (GET_CODE (exp) != CONST_DOUBLE)
5813 abort ();
5814
5815 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
5816 assemble_real (r, mode, align);
5817 break;
5818
5819 case MODE_INT:
5820 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
5821 break;
5822
5823 default:
5824 abort ();
5825 }
5826 }
5827
5828
5829 /* Rework the prologue/epilogue to avoid saving/restoring
5830 registers unnecessarily. */
5831
5832 static void
5833 s390_optimize_prologue (void)
5834 {
5835 rtx insn, new_insn, next_insn;
5836
5837 /* Do a final recompute of the frame-related data. */
5838
5839 s390_update_frame_layout ();
5840
5841 /* If all special registers are in fact used, there's nothing we
5842 can do, so no point in walking the insn list. */
5843
5844 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
5845 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
5846 && (TARGET_CPU_ZARCH
5847 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
5848 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
5849 return;
5850
5851 /* Search for prologue/epilogue insns and replace them. */
5852
5853 for (insn = get_insns (); insn; insn = next_insn)
5854 {
5855 int first, last, off;
5856 rtx set, base, offset;
5857
5858 next_insn = NEXT_INSN (insn);
5859
5860 if (GET_CODE (insn) != INSN)
5861 continue;
5862
5863 if (GET_CODE (PATTERN (insn)) == PARALLEL
5864 && store_multiple_operation (PATTERN (insn), VOIDmode))
5865 {
5866 set = XVECEXP (PATTERN (insn), 0, 0);
5867 first = REGNO (SET_SRC (set));
5868 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5869 offset = const0_rtx;
5870 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5871 off = INTVAL (offset);
5872
5873 if (GET_CODE (base) != REG || off < 0)
5874 continue;
5875 if (REGNO (base) != STACK_POINTER_REGNUM
5876 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5877 continue;
5878 if (first > BASE_REGNUM || last < BASE_REGNUM)
5879 continue;
5880
5881 if (cfun_frame_layout.first_save_gpr != -1)
5882 {
5883 new_insn = save_gprs (base,
5884 off + (cfun_frame_layout.first_save_gpr
5885 - first) * UNITS_PER_WORD,
5886 cfun_frame_layout.first_save_gpr,
5887 cfun_frame_layout.last_save_gpr);
5888 new_insn = emit_insn_before (new_insn, insn);
5889 INSN_ADDRESSES_NEW (new_insn, -1);
5890 }
5891
5892 remove_insn (insn);
5893 continue;
5894 }
5895
5896 if (GET_CODE (PATTERN (insn)) == SET
5897 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
5898 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
5899 || (!TARGET_CPU_ZARCH
5900 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
5901 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
5902 {
5903 set = PATTERN (insn);
5904 first = REGNO (SET_SRC (set));
5905 offset = const0_rtx;
5906 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5907 off = INTVAL (offset);
5908
5909 if (GET_CODE (base) != REG || off < 0)
5910 continue;
5911 if (REGNO (base) != STACK_POINTER_REGNUM
5912 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5913 continue;
5914 if (cfun_frame_layout.first_save_gpr != -1)
5915 {
5916 new_insn = save_gprs (base,
5917 off + (cfun_frame_layout.first_save_gpr
5918 - first) * UNITS_PER_WORD,
5919 cfun_frame_layout.first_save_gpr,
5920 cfun_frame_layout.last_save_gpr);
5921 new_insn = emit_insn_before (new_insn, insn);
5922 INSN_ADDRESSES_NEW (new_insn, -1);
5923 }
5924
5925 remove_insn (insn);
5926 continue;
5927 }
5928
5929 if (GET_CODE (PATTERN (insn)) == PARALLEL
5930 && load_multiple_operation (PATTERN (insn), VOIDmode))
5931 {
5932 set = XVECEXP (PATTERN (insn), 0, 0);
5933 first = REGNO (SET_DEST (set));
5934 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5935 offset = const0_rtx;
5936 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5937 off = INTVAL (offset);
5938
5939 if (GET_CODE (base) != REG || off < 0)
5940 continue;
5941 if (REGNO (base) != STACK_POINTER_REGNUM
5942 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5943 continue;
5944 if (first > BASE_REGNUM || last < BASE_REGNUM)
5945 continue;
5946
5947 if (cfun_frame_layout.first_restore_gpr != -1)
5948 {
5949 new_insn = restore_gprs (base,
5950 off + (cfun_frame_layout.first_restore_gpr
5951 - first) * UNITS_PER_WORD,
5952 cfun_frame_layout.first_restore_gpr,
5953 cfun_frame_layout.last_restore_gpr);
5954 new_insn = emit_insn_before (new_insn, insn);
5955 INSN_ADDRESSES_NEW (new_insn, -1);
5956 }
5957
5958 remove_insn (insn);
5959 continue;
5960 }
5961
5962 if (GET_CODE (PATTERN (insn)) == SET
5963 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5964 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
5965 || (!TARGET_CPU_ZARCH
5966 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
5967 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
5968 {
5969 set = PATTERN (insn);
5970 first = REGNO (SET_DEST (set));
5971 offset = const0_rtx;
5972 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5973 off = INTVAL (offset);
5974
5975 if (GET_CODE (base) != REG || off < 0)
5976 continue;
5977 if (REGNO (base) != STACK_POINTER_REGNUM
5978 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5979 continue;
5980 if (cfun_frame_layout.first_restore_gpr != -1)
5981 {
5982 new_insn = restore_gprs (base,
5983 off + (cfun_frame_layout.first_restore_gpr
5984 - first) * UNITS_PER_WORD,
5985 cfun_frame_layout.first_restore_gpr,
5986 cfun_frame_layout.last_restore_gpr);
5987 new_insn = emit_insn_before (new_insn, insn);
5988 INSN_ADDRESSES_NEW (new_insn, -1);
5989 }
5990
5991 remove_insn (insn);
5992 continue;
5993 }
5994 }
5995 }
5996
5997 /* Perform machine-dependent processing. */
5998
5999 static void
6000 s390_reorg (void)
6001 {
6002 bool pool_overflow = false;
6003
6004 /* Make sure all splits have been performed; splits after
6005 machine_dependent_reorg might confuse insn length counts. */
6006 split_all_insns_noflow ();
6007
6008
6009 /* Install the main literal pool and the associated base
6010 register load insns.
6011
6012 In addition, there are two problematic situations we need
6013 to correct:
6014
6015 - the literal pool might be > 4096 bytes in size, so that
6016 some of its elements cannot be directly accessed
6017
6018 - a branch target might be > 64K away from the branch, so that
6019 it is not possible to use a PC-relative instruction.
6020
6021 To fix those, we split the single literal pool into multiple
6022 pool chunks, reloading the pool base register at various
6023 points throughout the function to ensure it always points to
6024 the pool chunk the following code expects, and / or replace
6025 PC-relative branches by absolute branches.
6026
6027 However, the two problems are interdependent: splitting the
6028 literal pool can move a branch further away from its target,
6029 causing the 64K limit to overflow, and on the other hand,
6030 replacing a PC-relative branch by an absolute branch means
6031 we need to put the branch target address into the literal
6032 pool, possibly causing it to overflow.
6033
6034 So, we loop trying to fix up both problems until we manage
6035 to satisfy both conditions at the same time. Note that the
6036 loop is guaranteed to terminate as every pass of the loop
6037 strictly decreases the total number of PC-relative branches
6038 in the function. (This is not completely true as there
6039 might be branch-over-pool insns introduced by chunkify_start.
6040 Those never need to be split however.) */
6041
6042 for (;;)
6043 {
6044 struct constant_pool *pool = NULL;
6045
6046 /* Collect the literal pool. */
6047 if (!pool_overflow)
6048 {
6049 pool = s390_mainpool_start ();
6050 if (!pool)
6051 pool_overflow = true;
6052 }
6053
6054 /* If literal pool overflowed, start to chunkify it. */
6055 if (pool_overflow)
6056 pool = s390_chunkify_start ();
6057
6058 /* Split out-of-range branches. If this has created new
6059 literal pool entries, cancel current chunk list and
6060 recompute it. zSeries machines have large branch
6061 instructions, so we never need to split a branch. */
6062 if (!TARGET_CPU_ZARCH && s390_split_branches ())
6063 {
6064 if (pool_overflow)
6065 s390_chunkify_cancel (pool);
6066 else
6067 s390_mainpool_cancel (pool);
6068
6069 continue;
6070 }
6071
6072 /* If we made it up to here, both conditions are satisfied.
6073 Finish up literal pool related changes. */
6074 if (pool_overflow)
6075 s390_chunkify_finish (pool);
6076 else
6077 s390_mainpool_finish (pool);
6078
6079 /* We're done splitting branches. */
6080 cfun->machine->split_branches_pending_p = false;
6081 break;
6082 }
6083
6084 s390_optimize_prologue ();
6085 }
6086
6087
6088 /* Return an RTL expression representing the value of the return address
6089 for the frame COUNT steps up from the current frame. FRAME is the
6090 frame pointer of that frame. */
6091
6092 rtx
6093 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6094 {
6095 int offset;
6096 rtx addr;
6097
6098 /* Without backchain, we fail for all but the current frame. */
6099
6100 if (!TARGET_BACKCHAIN && !TARGET_KERNEL_BACKCHAIN && count > 0)
6101 return NULL_RTX;
6102
6103 /* For the current frame, we need to make sure the initial
6104 value of RETURN_REGNUM is actually saved. */
6105
6106 if (count == 0)
6107 {
6108 cfun_frame_layout.save_return_addr_p = true;
6109 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6110 }
6111
6112 if (TARGET_BACKCHAIN)
6113 offset = RETURN_REGNUM * UNITS_PER_WORD;
6114 else
6115 offset = -2 * UNITS_PER_WORD;
6116
6117 addr = plus_constant (frame, offset);
6118 addr = memory_address (Pmode, addr);
6119 return gen_rtx_MEM (Pmode, addr);
6120 }
6121
6122 /* Return an RTL expression representing the back chain stored in
6123 the current stack frame. */
6124
6125 rtx
6126 s390_back_chain_rtx (void)
6127 {
6128 rtx chain;
6129
6130 gcc_assert (TARGET_BACKCHAIN || TARGET_KERNEL_BACKCHAIN);
6131
6132 if (TARGET_BACKCHAIN)
6133 chain = stack_pointer_rtx;
6134 else
6135 chain = plus_constant (stack_pointer_rtx,
6136 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6137
6138 chain = gen_rtx_MEM (Pmode, chain);
6139 return chain;
6140 }
6141
6142 /* Find first call clobbered register unused in a function.
6143 This could be used as base register in a leaf function
6144 or for holding the return address before epilogue. */
6145
6146 static int
6147 find_unused_clobbered_reg (void)
6148 {
6149 int i;
6150 for (i = 0; i < 6; i++)
6151 if (!regs_ever_live[i])
6152 return i;
6153 return 0;
6154 }
6155
6156 /* Determine the frame area which actually has to be accessed
6157 in the function epilogue. The values are stored at the
6158 given pointers AREA_BOTTOM (address of the lowest used stack
6159 address) and AREA_TOP (address of the first item which does
6160 not belong to the stack frame). */
6161
6162 static void
6163 s390_frame_area (int *area_bottom, int *area_top)
6164 {
6165 int b, t;
6166 int i;
6167
6168 b = INT_MAX;
6169 t = INT_MIN;
6170
6171 if (cfun_frame_layout.first_restore_gpr != -1)
6172 {
6173 b = (cfun_frame_layout.gprs_offset
6174 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6175 t = b + (cfun_frame_layout.last_restore_gpr
6176 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6177 }
6178
6179 if (TARGET_64BIT && cfun_save_high_fprs_p)
6180 {
6181 b = MIN (b, cfun_frame_layout.f8_offset);
6182 t = MAX (t, (cfun_frame_layout.f8_offset
6183 + cfun_frame_layout.high_fprs * 8));
6184 }
6185
6186 if (!TARGET_64BIT)
6187 for (i = 2; i < 4; i++)
6188 if (cfun_fpr_bit_p (i))
6189 {
6190 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6191 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6192 }
6193
6194 *area_bottom = b;
6195 *area_top = t;
6196 }
6197
6198 /* Fill cfun->machine with info about register usage of current function.
6199 Return in LIVE_REGS which GPRs are currently considered live. */
6200
6201 static void
6202 s390_register_info (int live_regs[])
6203 {
6204 int i, j;
6205
6206 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6207 cfun_frame_layout.fpr_bitmap = 0;
6208 cfun_frame_layout.high_fprs = 0;
6209 if (TARGET_64BIT)
6210 for (i = 24; i < 32; i++)
6211 if (regs_ever_live[i] && !global_regs[i])
6212 {
6213 cfun_set_fpr_bit (i - 16);
6214 cfun_frame_layout.high_fprs++;
6215 }
6216
6217 /* Find first and last gpr to be saved. We trust regs_ever_live
6218 data, except that we don't save and restore global registers.
6219
6220 Also, all registers with special meaning to the compiler need
6221 to be handled extra. */
6222
6223 for (i = 0; i < 16; i++)
6224 live_regs[i] = regs_ever_live[i] && !global_regs[i];
6225
6226 if (flag_pic)
6227 live_regs[PIC_OFFSET_TABLE_REGNUM]
6228 = regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
6229
6230 live_regs[BASE_REGNUM]
6231 = cfun->machine->base_reg
6232 && REGNO (cfun->machine->base_reg) == BASE_REGNUM;
6233
6234 live_regs[RETURN_REGNUM]
6235 = cfun->machine->split_branches_pending_p
6236 || cfun_frame_layout.save_return_addr_p;
6237
6238 live_regs[STACK_POINTER_REGNUM]
6239 = !current_function_is_leaf
6240 || TARGET_TPF_PROFILING
6241 || cfun_save_high_fprs_p
6242 || get_frame_size () > 0
6243 || current_function_calls_alloca
6244 || current_function_stdarg;
6245
6246 for (i = 6; i < 16; i++)
6247 if (live_regs[i])
6248 break;
6249 for (j = 15; j > i; j--)
6250 if (live_regs[j])
6251 break;
6252
6253 if (i == 16)
6254 {
6255 /* Nothing to save/restore. */
6256 cfun_frame_layout.first_save_gpr = -1;
6257 cfun_frame_layout.first_restore_gpr = -1;
6258 cfun_frame_layout.last_save_gpr = -1;
6259 cfun_frame_layout.last_restore_gpr = -1;
6260 }
6261 else
6262 {
6263 /* Save / Restore from gpr i to j. */
6264 cfun_frame_layout.first_save_gpr = i;
6265 cfun_frame_layout.first_restore_gpr = i;
6266 cfun_frame_layout.last_save_gpr = j;
6267 cfun_frame_layout.last_restore_gpr = j;
6268 }
6269
6270 if (current_function_stdarg)
6271 {
6272 /* Varargs functions need to save gprs 2 to 6. */
6273 if (cfun_frame_layout.first_save_gpr == -1
6274 || cfun_frame_layout.first_save_gpr > 2)
6275 cfun_frame_layout.first_save_gpr = 2;
6276
6277 if (cfun_frame_layout.last_save_gpr == -1
6278 || cfun_frame_layout.last_save_gpr < 6)
6279 cfun_frame_layout.last_save_gpr = 6;
6280
6281 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6282 if (TARGET_HARD_FLOAT)
6283 for (i = 0; i < (TARGET_64BIT ? 4 : 2); i++)
6284 cfun_set_fpr_bit (i);
6285 }
6286
6287 if (!TARGET_64BIT)
6288 for (i = 2; i < 4; i++)
6289 if (regs_ever_live[i + 16] && !global_regs[i + 16])
6290 cfun_set_fpr_bit (i);
6291 }
6292
6293 /* Fill cfun->machine with info about frame of current function. */
6294
6295 static void
6296 s390_frame_info (void)
6297 {
6298 int i;
6299
6300 cfun_frame_layout.frame_size = get_frame_size ();
6301 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6302 fatal_error ("Total size of local variables exceeds architecture limit.");
6303
6304 cfun_frame_layout.save_backchain_p = (TARGET_BACKCHAIN
6305 || TARGET_KERNEL_BACKCHAIN);
6306
6307 if (TARGET_BACKCHAIN)
6308 {
6309 cfun_frame_layout.backchain_offset = 0;
6310 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6311 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6312 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6313 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr
6314 * UNITS_PER_WORD);
6315 }
6316 else if (TARGET_KERNEL_BACKCHAIN)
6317 {
6318 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6319 - UNITS_PER_WORD);
6320 cfun_frame_layout.gprs_offset
6321 = (cfun_frame_layout.backchain_offset
6322 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr + 1)
6323 * UNITS_PER_WORD);
6324
6325 if (TARGET_64BIT)
6326 {
6327 cfun_frame_layout.f4_offset
6328 = (cfun_frame_layout.gprs_offset
6329 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6330
6331 cfun_frame_layout.f0_offset
6332 = (cfun_frame_layout.f4_offset
6333 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6334 }
6335 else
6336 {
6337 /* On 31 bit we have to care about alignment of the
6338 floating point regs to provide fastest access. */
6339 cfun_frame_layout.f0_offset
6340 = ((cfun_frame_layout.gprs_offset
6341 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6342 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6343
6344 cfun_frame_layout.f4_offset
6345 = (cfun_frame_layout.f0_offset
6346 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6347 }
6348 }
6349 else /* no backchain */
6350 {
6351 cfun_frame_layout.f4_offset
6352 = (STACK_POINTER_OFFSET
6353 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6354
6355 cfun_frame_layout.f0_offset
6356 = (cfun_frame_layout.f4_offset
6357 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6358
6359 cfun_frame_layout.gprs_offset
6360 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6361 }
6362
6363 if (current_function_is_leaf
6364 && !TARGET_TPF_PROFILING
6365 && cfun_frame_layout.frame_size == 0
6366 && !cfun_save_high_fprs_p
6367 && !current_function_calls_alloca
6368 && !current_function_stdarg)
6369 return;
6370
6371 if (TARGET_BACKCHAIN)
6372 cfun_frame_layout.frame_size += (STARTING_FRAME_OFFSET
6373 + cfun_frame_layout.high_fprs * 8);
6374 else
6375 {
6376 cfun_frame_layout.frame_size += (cfun_frame_layout.save_backchain_p
6377 * UNITS_PER_WORD);
6378
6379 /* No alignment trouble here because f8-f15 are only saved under
6380 64 bit. */
6381 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6382 cfun_frame_layout.f4_offset),
6383 cfun_frame_layout.gprs_offset)
6384 - cfun_frame_layout.high_fprs * 8);
6385
6386 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6387
6388 for (i = 0; i < 8; i++)
6389 if (cfun_fpr_bit_p (i))
6390 cfun_frame_layout.frame_size += 8;
6391
6392 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6393
6394 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6395 the frame size to sustain 8 byte alignment of stack frames. */
6396 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6397 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6398 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6399
6400 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6401 }
6402 }
6403
6404 /* Generate frame layout. Fills in register and frame data for the current
6405 function in cfun->machine. This routine can be called multiple times;
6406 it will re-do the complete frame layout every time. */
6407
6408 static void
6409 s390_init_frame_layout (void)
6410 {
6411 HOST_WIDE_INT frame_size;
6412 int base_used;
6413 int live_regs[16];
6414
6415 /* If return address register is explicitly used, we need to save it. */
6416 if (regs_ever_live[RETURN_REGNUM]
6417 || !current_function_is_leaf
6418 || TARGET_TPF_PROFILING
6419 || current_function_stdarg
6420 || current_function_calls_eh_return)
6421 cfun_frame_layout.save_return_addr_p = true;
6422
6423 /* On S/390 machines, we may need to perform branch splitting, which
6424 will require both base and return address register. We have no
6425 choice but to assume we're going to need them until right at the
6426 end of the machine dependent reorg phase. */
6427 if (!TARGET_CPU_ZARCH)
6428 cfun->machine->split_branches_pending_p = true;
6429
6430 do
6431 {
6432 frame_size = cfun_frame_layout.frame_size;
6433
6434 /* Try to predict whether we'll need the base register. */
6435 base_used = cfun->machine->split_branches_pending_p
6436 || current_function_uses_const_pool
6437 || (!DISP_IN_RANGE (-frame_size)
6438 && !CONST_OK_FOR_CONSTRAINT_P (-frame_size, 'K', "K"));
6439
6440 /* Decide which register to use as literal pool base. In small
6441 leaf functions, try to use an unused call-clobbered register
6442 as base register to avoid save/restore overhead. */
6443 if (!base_used)
6444 cfun->machine->base_reg = NULL_RTX;
6445 else if (current_function_is_leaf && !regs_ever_live[5])
6446 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6447 else
6448 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6449
6450 s390_register_info (live_regs);
6451 s390_frame_info ();
6452 }
6453 while (frame_size != cfun_frame_layout.frame_size);
6454 }
6455
6456 /* Update frame layout. Recompute actual register save data based on
6457 current info and update regs_ever_live for the special registers.
6458 May be called multiple times, but may never cause *more* registers
6459 to be saved than s390_init_frame_layout allocated room for. */
6460
6461 static void
6462 s390_update_frame_layout (void)
6463 {
6464 int live_regs[16];
6465
6466 s390_register_info (live_regs);
6467
6468 regs_ever_live[BASE_REGNUM] = live_regs[BASE_REGNUM];
6469 regs_ever_live[RETURN_REGNUM] = live_regs[RETURN_REGNUM];
6470 regs_ever_live[STACK_POINTER_REGNUM] = live_regs[STACK_POINTER_REGNUM];
6471
6472 if (cfun->machine->base_reg)
6473 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
6474 }
6475
6476 /* Return true if register FROM can be eliminated via register TO. */
6477
6478 bool
6479 s390_can_eliminate (int from, int to)
6480 {
6481 gcc_assert (to == STACK_POINTER_REGNUM
6482 || to == HARD_FRAME_POINTER_REGNUM);
6483
6484 gcc_assert (from == FRAME_POINTER_REGNUM
6485 || from == ARG_POINTER_REGNUM
6486 || from == RETURN_ADDRESS_POINTER_REGNUM);
6487
6488 /* Make sure we actually saved the return address. */
6489 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6490 if (!current_function_calls_eh_return
6491 && !current_function_stdarg
6492 && !cfun_frame_layout.save_return_addr_p)
6493 return false;
6494
6495 return true;
6496 }
6497
6498 /* Return offset between register FROM and TO initially after prolog. */
6499
6500 HOST_WIDE_INT
6501 s390_initial_elimination_offset (int from, int to)
6502 {
6503 HOST_WIDE_INT offset;
6504 int index;
6505
6506 /* ??? Why are we called for non-eliminable pairs? */
6507 if (!s390_can_eliminate (from, to))
6508 return 0;
6509
6510 switch (from)
6511 {
6512 case FRAME_POINTER_REGNUM:
6513 offset = 0;
6514 break;
6515
6516 case ARG_POINTER_REGNUM:
6517 s390_init_frame_layout ();
6518 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6519 break;
6520
6521 case RETURN_ADDRESS_POINTER_REGNUM:
6522 s390_init_frame_layout ();
6523 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr;
6524 gcc_assert (index >= 0);
6525 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6526 offset += index * UNITS_PER_WORD;
6527 break;
6528
6529 default:
6530 gcc_unreachable ();
6531 }
6532
6533 return offset;
6534 }
6535
6536 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6537 to register BASE. Return generated insn. */
6538
6539 static rtx
6540 save_fpr (rtx base, int offset, int regnum)
6541 {
6542 rtx addr;
6543 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6544 set_mem_alias_set (addr, s390_sr_alias_set);
6545
6546 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6547 }
6548
6549 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6550 to register BASE. Return generated insn. */
6551
6552 static rtx
6553 restore_fpr (rtx base, int offset, int regnum)
6554 {
6555 rtx addr;
6556 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6557 set_mem_alias_set (addr, s390_sr_alias_set);
6558
6559 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
6560 }
6561
6562 /* Generate insn to save registers FIRST to LAST into
6563 the register save area located at offset OFFSET
6564 relative to register BASE. */
6565
6566 static rtx
6567 save_gprs (rtx base, int offset, int first, int last)
6568 {
6569 rtx addr, insn, note;
6570 int i;
6571
6572 addr = plus_constant (base, offset);
6573 addr = gen_rtx_MEM (Pmode, addr);
6574 set_mem_alias_set (addr, s390_sr_alias_set);
6575
6576 /* Special-case single register. */
6577 if (first == last)
6578 {
6579 if (TARGET_64BIT)
6580 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
6581 else
6582 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
6583
6584 RTX_FRAME_RELATED_P (insn) = 1;
6585 return insn;
6586 }
6587
6588
6589 insn = gen_store_multiple (addr,
6590 gen_rtx_REG (Pmode, first),
6591 GEN_INT (last - first + 1));
6592
6593
6594 /* We need to set the FRAME_RELATED flag on all SETs
6595 inside the store-multiple pattern.
6596
6597 However, we must not emit DWARF records for registers 2..5
6598 if they are stored for use by variable arguments ...
6599
6600 ??? Unfortunately, it is not enough to simply not the the
6601 FRAME_RELATED flags for those SETs, because the first SET
6602 of the PARALLEL is always treated as if it had the flag
6603 set, even if it does not. Therefore we emit a new pattern
6604 without those registers as REG_FRAME_RELATED_EXPR note. */
6605
6606 if (first >= 6)
6607 {
6608 rtx pat = PATTERN (insn);
6609
6610 for (i = 0; i < XVECLEN (pat, 0); i++)
6611 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
6612 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
6613
6614 RTX_FRAME_RELATED_P (insn) = 1;
6615 }
6616 else if (last >= 6)
6617 {
6618 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
6619 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
6620 gen_rtx_REG (Pmode, 6),
6621 GEN_INT (last - 6 + 1));
6622 note = PATTERN (note);
6623
6624 REG_NOTES (insn) =
6625 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6626 note, REG_NOTES (insn));
6627
6628 for (i = 0; i < XVECLEN (note, 0); i++)
6629 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
6630 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
6631
6632 RTX_FRAME_RELATED_P (insn) = 1;
6633 }
6634
6635 return insn;
6636 }
6637
6638 /* Generate insn to restore registers FIRST to LAST from
6639 the register save area located at offset OFFSET
6640 relative to register BASE. */
6641
6642 static rtx
6643 restore_gprs (rtx base, int offset, int first, int last)
6644 {
6645 rtx addr, insn;
6646
6647 addr = plus_constant (base, offset);
6648 addr = gen_rtx_MEM (Pmode, addr);
6649 set_mem_alias_set (addr, s390_sr_alias_set);
6650
6651 /* Special-case single register. */
6652 if (first == last)
6653 {
6654 if (TARGET_64BIT)
6655 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
6656 else
6657 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
6658
6659 return insn;
6660 }
6661
6662 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
6663 addr,
6664 GEN_INT (last - first + 1));
6665 return insn;
6666 }
6667
6668 /* Return insn sequence to load the GOT register. */
6669
6670 static GTY(()) rtx got_symbol;
6671 rtx
6672 s390_load_got (void)
6673 {
6674 rtx insns;
6675
6676 if (!got_symbol)
6677 {
6678 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6679 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
6680 }
6681
6682 start_sequence ();
6683
6684 if (TARGET_CPU_ZARCH)
6685 {
6686 emit_move_insn (pic_offset_table_rtx, got_symbol);
6687 }
6688 else
6689 {
6690 rtx offset;
6691
6692 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
6693 UNSPEC_LTREL_OFFSET);
6694 offset = gen_rtx_CONST (Pmode, offset);
6695 offset = force_const_mem (Pmode, offset);
6696
6697 emit_move_insn (pic_offset_table_rtx, offset);
6698
6699 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
6700 UNSPEC_LTREL_BASE);
6701 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
6702
6703 emit_move_insn (pic_offset_table_rtx, offset);
6704 }
6705
6706 insns = get_insns ();
6707 end_sequence ();
6708 return insns;
6709 }
6710
6711 /* Expand the prologue into a bunch of separate insns. */
6712
6713 void
6714 s390_emit_prologue (void)
6715 {
6716 rtx insn, addr;
6717 rtx temp_reg;
6718 int i;
6719 int offset;
6720 int next_fpr = 0;
6721
6722 /* Complete frame layout. */
6723
6724 s390_update_frame_layout ();
6725
6726 /* Annotate all constant pool references to let the scheduler know
6727 they implicitly use the base register. */
6728
6729 push_topmost_sequence ();
6730
6731 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6732 if (INSN_P (insn))
6733 annotate_constant_pool_refs (&PATTERN (insn));
6734
6735 pop_topmost_sequence ();
6736
6737 /* Choose best register to use for temp use within prologue.
6738 See below for why TPF must use the register 1. */
6739
6740 if (!current_function_is_leaf && !TARGET_TPF_PROFILING)
6741 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
6742 else
6743 temp_reg = gen_rtx_REG (Pmode, 1);
6744
6745 /* Save call saved gprs. */
6746 if (cfun_frame_layout.first_save_gpr != -1)
6747 {
6748 insn = save_gprs (stack_pointer_rtx,
6749 cfun_frame_layout.gprs_offset,
6750 cfun_frame_layout.first_save_gpr,
6751 cfun_frame_layout.last_save_gpr);
6752 emit_insn (insn);
6753 }
6754
6755 /* Dummy insn to mark literal pool slot. */
6756
6757 if (cfun->machine->base_reg)
6758 emit_insn (gen_main_pool (cfun->machine->base_reg));
6759
6760 offset = cfun_frame_layout.f0_offset;
6761
6762 /* Save f0 and f2. */
6763 for (i = 0; i < 2; i++)
6764 {
6765 if (cfun_fpr_bit_p (i))
6766 {
6767 save_fpr (stack_pointer_rtx, offset, i + 16);
6768 offset += 8;
6769 }
6770 else if (TARGET_BACKCHAIN)
6771 offset += 8;
6772 }
6773
6774 /* Save f4 and f6. */
6775 offset = cfun_frame_layout.f4_offset;
6776 for (i = 2; i < 4; i++)
6777 {
6778 if (cfun_fpr_bit_p (i))
6779 {
6780 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6781 offset += 8;
6782
6783 /* If f4 and f6 are call clobbered they are saved due to stdargs and
6784 therefore are not frame related. */
6785 if (!call_really_used_regs[i + 16])
6786 RTX_FRAME_RELATED_P (insn) = 1;
6787 }
6788 else if (TARGET_BACKCHAIN)
6789 offset += 8;
6790 }
6791
6792 if (!TARGET_BACKCHAIN
6793 && cfun_save_high_fprs_p
6794 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
6795 {
6796 offset = (cfun_frame_layout.f8_offset
6797 + (cfun_frame_layout.high_fprs - 1) * 8);
6798
6799 for (i = 15; i > 7 && offset >= 0; i--)
6800 if (cfun_fpr_bit_p (i))
6801 {
6802 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6803
6804 RTX_FRAME_RELATED_P (insn) = 1;
6805 offset -= 8;
6806 }
6807 if (offset >= cfun_frame_layout.f8_offset)
6808 next_fpr = i + 16;
6809 }
6810
6811 if (TARGET_BACKCHAIN)
6812 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
6813
6814 /* Decrement stack pointer. */
6815
6816 if (cfun_frame_layout.frame_size > 0)
6817 {
6818 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
6819
6820 if (s390_stack_size)
6821 {
6822 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
6823 & ~(s390_stack_guard - 1));
6824 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
6825 GEN_INT (stack_check_mask));
6826
6827 if (TARGET_64BIT)
6828 gen_cmpdi (t, const0_rtx);
6829 else
6830 gen_cmpsi (t, const0_rtx);
6831
6832 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
6833 gen_rtx_REG (CCmode,
6834 CC_REGNUM),
6835 const0_rtx),
6836 const0_rtx));
6837 }
6838
6839 if (s390_warn_framesize > 0
6840 && cfun_frame_layout.frame_size >= s390_warn_framesize)
6841 warning ("frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
6842 current_function_name (), cfun_frame_layout.frame_size);
6843
6844 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
6845 warning ("%qs uses dynamic stack allocation", current_function_name ());
6846
6847 /* Save incoming stack pointer into temp reg. */
6848 if (cfun_frame_layout.save_backchain_p || next_fpr)
6849 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
6850
6851 /* Subtract frame size from stack pointer. */
6852
6853 if (DISP_IN_RANGE (INTVAL (frame_off)))
6854 {
6855 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6856 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6857 frame_off));
6858 insn = emit_insn (insn);
6859 }
6860 else
6861 {
6862 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
6863 frame_off = force_const_mem (Pmode, frame_off);
6864
6865 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
6866 annotate_constant_pool_refs (&PATTERN (insn));
6867 }
6868
6869 RTX_FRAME_RELATED_P (insn) = 1;
6870 REG_NOTES (insn) =
6871 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6872 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6873 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6874 GEN_INT (-cfun_frame_layout.frame_size))),
6875 REG_NOTES (insn));
6876
6877 /* Set backchain. */
6878
6879 if (cfun_frame_layout.save_backchain_p)
6880 {
6881 if (cfun_frame_layout.backchain_offset)
6882 addr = gen_rtx_MEM (Pmode,
6883 plus_constant (stack_pointer_rtx,
6884 cfun_frame_layout.backchain_offset));
6885 else
6886 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
6887 set_mem_alias_set (addr, s390_sr_alias_set);
6888 insn = emit_insn (gen_move_insn (addr, temp_reg));
6889 }
6890
6891 /* If we support asynchronous exceptions (e.g. for Java),
6892 we need to make sure the backchain pointer is set up
6893 before any possibly trapping memory access. */
6894
6895 if (cfun_frame_layout.save_backchain_p && flag_non_call_exceptions)
6896 {
6897 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
6898 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
6899 }
6900 }
6901
6902 /* Save fprs 8 - 15 (64 bit ABI). */
6903
6904 if (cfun_save_high_fprs_p && next_fpr)
6905 {
6906 insn = emit_insn (gen_add2_insn (temp_reg,
6907 GEN_INT (cfun_frame_layout.f8_offset)));
6908
6909 offset = 0;
6910
6911 for (i = 24; i <= next_fpr; i++)
6912 if (cfun_fpr_bit_p (i - 16))
6913 {
6914 rtx addr = plus_constant (stack_pointer_rtx,
6915 cfun_frame_layout.frame_size
6916 + cfun_frame_layout.f8_offset
6917 + offset);
6918
6919 insn = save_fpr (temp_reg, offset, i);
6920 offset += 8;
6921 RTX_FRAME_RELATED_P (insn) = 1;
6922 REG_NOTES (insn) =
6923 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6924 gen_rtx_SET (VOIDmode,
6925 gen_rtx_MEM (DFmode, addr),
6926 gen_rtx_REG (DFmode, i)),
6927 REG_NOTES (insn));
6928 }
6929 }
6930
6931 /* Set frame pointer, if needed. */
6932
6933 if (frame_pointer_needed)
6934 {
6935 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6936 RTX_FRAME_RELATED_P (insn) = 1;
6937 }
6938
6939 /* Set up got pointer, if needed. */
6940
6941 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6942 {
6943 rtx insns = s390_load_got ();
6944
6945 for (insn = insns; insn; insn = NEXT_INSN (insn))
6946 {
6947 annotate_constant_pool_refs (&PATTERN (insn));
6948
6949 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
6950 REG_NOTES (insn));
6951 }
6952
6953 emit_insn (insns);
6954 }
6955
6956 if (TARGET_TPF_PROFILING)
6957 {
6958 /* Generate a BAS instruction to serve as a function
6959 entry intercept to facilitate the use of tracing
6960 algorithms located at the branch target. */
6961 emit_insn (gen_prologue_tpf ());
6962
6963 /* Emit a blockage here so that all code
6964 lies between the profiling mechanisms. */
6965 emit_insn (gen_blockage ());
6966 }
6967 }
6968
6969 /* Expand the epilogue into a bunch of separate insns. */
6970
6971 void
6972 s390_emit_epilogue (bool sibcall)
6973 {
6974 rtx frame_pointer, return_reg;
6975 int area_bottom, area_top, offset = 0;
6976 int next_offset;
6977 rtvec p;
6978 int i;
6979
6980 if (TARGET_TPF_PROFILING)
6981 {
6982
6983 /* Generate a BAS instruction to serve as a function
6984 entry intercept to facilitate the use of tracing
6985 algorithms located at the branch target. */
6986
6987 /* Emit a blockage here so that all code
6988 lies between the profiling mechanisms. */
6989 emit_insn (gen_blockage ());
6990
6991 emit_insn (gen_epilogue_tpf ());
6992 }
6993
6994 /* Check whether to use frame or stack pointer for restore. */
6995
6996 frame_pointer = (frame_pointer_needed
6997 ? hard_frame_pointer_rtx : stack_pointer_rtx);
6998
6999 s390_frame_area (&area_bottom, &area_top);
7000
7001 /* Check whether we can access the register save area.
7002 If not, increment the frame pointer as required. */
7003
7004 if (area_top <= area_bottom)
7005 {
7006 /* Nothing to restore. */
7007 }
7008 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
7009 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
7010 {
7011 /* Area is in range. */
7012 offset = cfun_frame_layout.frame_size;
7013 }
7014 else
7015 {
7016 rtx insn, frame_off;
7017
7018 offset = area_bottom < 0 ? -area_bottom : 0;
7019 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7020
7021 if (DISP_IN_RANGE (INTVAL (frame_off)))
7022 {
7023 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7024 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7025 insn = emit_insn (insn);
7026 }
7027 else
7028 {
7029 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
7030 frame_off = force_const_mem (Pmode, frame_off);
7031
7032 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7033 annotate_constant_pool_refs (&PATTERN (insn));
7034 }
7035 }
7036
7037 /* Restore call saved fprs. */
7038
7039 if (TARGET_64BIT)
7040 {
7041 if (cfun_save_high_fprs_p)
7042 {
7043 next_offset = cfun_frame_layout.f8_offset;
7044 for (i = 24; i < 32; i++)
7045 {
7046 if (cfun_fpr_bit_p (i - 16))
7047 {
7048 restore_fpr (frame_pointer,
7049 offset + next_offset, i);
7050 next_offset += 8;
7051 }
7052 }
7053 }
7054
7055 }
7056 else
7057 {
7058 next_offset = cfun_frame_layout.f4_offset;
7059 for (i = 18; i < 20; i++)
7060 {
7061 if (cfun_fpr_bit_p (i - 16))
7062 {
7063 restore_fpr (frame_pointer,
7064 offset + next_offset, i);
7065 next_offset += 8;
7066 }
7067 else if (TARGET_BACKCHAIN)
7068 next_offset += 8;
7069 }
7070
7071 }
7072
7073 /* Return register. */
7074
7075 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7076
7077 /* Restore call saved gprs. */
7078
7079 if (cfun_frame_layout.first_restore_gpr != -1)
7080 {
7081 rtx insn, addr;
7082 int i;
7083
7084 /* Check for global register and save them
7085 to stack location from where they get restored. */
7086
7087 for (i = cfun_frame_layout.first_restore_gpr;
7088 i <= cfun_frame_layout.last_restore_gpr;
7089 i++)
7090 {
7091 /* These registers are special and need to be
7092 restored in any case. */
7093 if (i == STACK_POINTER_REGNUM
7094 || i == RETURN_REGNUM
7095 || i == BASE_REGNUM
7096 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7097 continue;
7098
7099 if (global_regs[i])
7100 {
7101 addr = plus_constant (frame_pointer,
7102 offset + cfun_frame_layout.gprs_offset
7103 + (i - cfun_frame_layout.first_save_gpr)
7104 * UNITS_PER_WORD);
7105 addr = gen_rtx_MEM (Pmode, addr);
7106 set_mem_alias_set (addr, s390_sr_alias_set);
7107 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7108 }
7109 }
7110
7111 if (! sibcall)
7112 {
7113 /* Fetch return address from stack before load multiple,
7114 this will do good for scheduling. */
7115
7116 if (cfun_frame_layout.save_return_addr_p
7117 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7118 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7119 {
7120 int return_regnum = find_unused_clobbered_reg();
7121 if (!return_regnum)
7122 return_regnum = 4;
7123 return_reg = gen_rtx_REG (Pmode, return_regnum);
7124
7125 addr = plus_constant (frame_pointer,
7126 offset + cfun_frame_layout.gprs_offset
7127 + (RETURN_REGNUM
7128 - cfun_frame_layout.first_save_gpr)
7129 * UNITS_PER_WORD);
7130 addr = gen_rtx_MEM (Pmode, addr);
7131 set_mem_alias_set (addr, s390_sr_alias_set);
7132 emit_move_insn (return_reg, addr);
7133 }
7134 }
7135
7136 insn = restore_gprs (frame_pointer,
7137 offset + cfun_frame_layout.gprs_offset
7138 + (cfun_frame_layout.first_restore_gpr
7139 - cfun_frame_layout.first_save_gpr)
7140 * UNITS_PER_WORD,
7141 cfun_frame_layout.first_restore_gpr,
7142 cfun_frame_layout.last_restore_gpr);
7143 emit_insn (insn);
7144 }
7145
7146 if (! sibcall)
7147 {
7148
7149 /* Return to caller. */
7150
7151 p = rtvec_alloc (2);
7152
7153 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7154 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7155 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7156 }
7157 }
7158
7159
7160 /* Return the size in bytes of a function argument of
7161 type TYPE and/or mode MODE. At least one of TYPE or
7162 MODE must be specified. */
7163
7164 static int
7165 s390_function_arg_size (enum machine_mode mode, tree type)
7166 {
7167 if (type)
7168 return int_size_in_bytes (type);
7169
7170 /* No type info available for some library calls ... */
7171 if (mode != BLKmode)
7172 return GET_MODE_SIZE (mode);
7173
7174 /* If we have neither type nor mode, abort */
7175 abort ();
7176 }
7177
7178 /* Return true if a function argument of type TYPE and mode MODE
7179 is to be passed in a floating-point register, if available. */
7180
7181 static bool
7182 s390_function_arg_float (enum machine_mode mode, tree type)
7183 {
7184 int size = s390_function_arg_size (mode, type);
7185 if (size > 8)
7186 return false;
7187
7188 /* Soft-float changes the ABI: no floating-point registers are used. */
7189 if (TARGET_SOFT_FLOAT)
7190 return false;
7191
7192 /* No type info available for some library calls ... */
7193 if (!type)
7194 return mode == SFmode || mode == DFmode;
7195
7196 /* The ABI says that record types with a single member are treated
7197 just like that member would be. */
7198 while (TREE_CODE (type) == RECORD_TYPE)
7199 {
7200 tree field, single = NULL_TREE;
7201
7202 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7203 {
7204 if (TREE_CODE (field) != FIELD_DECL)
7205 continue;
7206
7207 if (single == NULL_TREE)
7208 single = TREE_TYPE (field);
7209 else
7210 return false;
7211 }
7212
7213 if (single == NULL_TREE)
7214 return false;
7215 else
7216 type = single;
7217 }
7218
7219 return TREE_CODE (type) == REAL_TYPE;
7220 }
7221
7222 /* Return true if a function argument of type TYPE and mode MODE
7223 is to be passed in an integer register, or a pair of integer
7224 registers, if available. */
7225
7226 static bool
7227 s390_function_arg_integer (enum machine_mode mode, tree type)
7228 {
7229 int size = s390_function_arg_size (mode, type);
7230 if (size > 8)
7231 return false;
7232
7233 /* No type info available for some library calls ... */
7234 if (!type)
7235 return GET_MODE_CLASS (mode) == MODE_INT
7236 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
7237
7238 /* We accept small integral (and similar) types. */
7239 if (INTEGRAL_TYPE_P (type)
7240 || POINTER_TYPE_P (type)
7241 || TREE_CODE (type) == OFFSET_TYPE
7242 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7243 return true;
7244
7245 /* We also accept structs of size 1, 2, 4, 8 that are not
7246 passed in floating-point registers. */
7247 if (AGGREGATE_TYPE_P (type)
7248 && exact_log2 (size) >= 0
7249 && !s390_function_arg_float (mode, type))
7250 return true;
7251
7252 return false;
7253 }
7254
7255 /* Return 1 if a function argument of type TYPE and mode MODE
7256 is to be passed by reference. The ABI specifies that only
7257 structures of size 1, 2, 4, or 8 bytes are passed by value,
7258 all other structures (and complex numbers) are passed by
7259 reference. */
7260
7261 static bool
7262 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7263 enum machine_mode mode, tree type,
7264 bool named ATTRIBUTE_UNUSED)
7265 {
7266 int size = s390_function_arg_size (mode, type);
7267 if (size > 8)
7268 return true;
7269
7270 if (type)
7271 {
7272 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7273 return 1;
7274
7275 if (TREE_CODE (type) == COMPLEX_TYPE
7276 || TREE_CODE (type) == VECTOR_TYPE)
7277 return 1;
7278 }
7279
7280 return 0;
7281 }
7282
7283 /* Update the data in CUM to advance over an argument of mode MODE and
7284 data type TYPE. (TYPE is null for libcalls where that information
7285 may not be available.). The boolean NAMED specifies whether the
7286 argument is a named argument (as opposed to an unnamed argument
7287 matching an ellipsis). */
7288
7289 void
7290 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7291 tree type, int named ATTRIBUTE_UNUSED)
7292 {
7293 if (s390_function_arg_float (mode, type))
7294 {
7295 cum->fprs += 1;
7296 }
7297 else if (s390_function_arg_integer (mode, type))
7298 {
7299 int size = s390_function_arg_size (mode, type);
7300 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7301 }
7302 else
7303 abort ();
7304 }
7305
7306 /* Define where to put the arguments to a function.
7307 Value is zero to push the argument on the stack,
7308 or a hard register in which to store the argument.
7309
7310 MODE is the argument's machine mode.
7311 TYPE is the data type of the argument (as a tree).
7312 This is null for libcalls where that information may
7313 not be available.
7314 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7315 the preceding args and about the function being called.
7316 NAMED is nonzero if this argument is a named parameter
7317 (otherwise it is an extra parameter matching an ellipsis).
7318
7319 On S/390, we use general purpose registers 2 through 6 to
7320 pass integer, pointer, and certain structure arguments, and
7321 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7322 to pass floating point arguments. All remaining arguments
7323 are pushed to the stack. */
7324
7325 rtx
7326 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7327 int named ATTRIBUTE_UNUSED)
7328 {
7329 if (s390_function_arg_float (mode, type))
7330 {
7331 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
7332 return 0;
7333 else
7334 return gen_rtx_REG (mode, cum->fprs + 16);
7335 }
7336 else if (s390_function_arg_integer (mode, type))
7337 {
7338 int size = s390_function_arg_size (mode, type);
7339 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7340
7341 if (cum->gprs + n_gprs > 5)
7342 return 0;
7343 else
7344 return gen_rtx_REG (mode, cum->gprs + 2);
7345 }
7346
7347 /* After the real arguments, expand_call calls us once again
7348 with a void_type_node type. Whatever we return here is
7349 passed as operand 2 to the call expanders.
7350
7351 We don't need this feature ... */
7352 else if (type == void_type_node)
7353 return const0_rtx;
7354
7355 abort ();
7356 }
7357
7358 /* Return true if return values of type TYPE should be returned
7359 in a memory buffer whose address is passed by the caller as
7360 hidden first argument. */
7361
7362 static bool
7363 s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7364 {
7365 /* We accept small integral (and similar) types. */
7366 if (INTEGRAL_TYPE_P (type)
7367 || POINTER_TYPE_P (type)
7368 || TREE_CODE (type) == OFFSET_TYPE
7369 || TREE_CODE (type) == REAL_TYPE)
7370 return int_size_in_bytes (type) > 8;
7371
7372 /* Aggregates and similar constructs are always returned
7373 in memory. */
7374 if (AGGREGATE_TYPE_P (type)
7375 || TREE_CODE (type) == COMPLEX_TYPE
7376 || TREE_CODE (type) == VECTOR_TYPE)
7377 return true;
7378
7379 /* ??? We get called on all sorts of random stuff from
7380 aggregate_value_p. We can't abort, but it's not clear
7381 what's safe to return. Pretend it's a struct I guess. */
7382 return true;
7383 }
7384
7385 /* Define where to return a (scalar) value of type TYPE.
7386 If TYPE is null, define where to return a (scalar)
7387 value of mode MODE from a libcall. */
7388
7389 rtx
7390 s390_function_value (tree type, enum machine_mode mode)
7391 {
7392 if (type)
7393 {
7394 int unsignedp = TYPE_UNSIGNED (type);
7395 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7396 }
7397
7398 if (GET_MODE_CLASS (mode) != MODE_INT
7399 && GET_MODE_CLASS (mode) != MODE_FLOAT)
7400 abort ();
7401 if (GET_MODE_SIZE (mode) > 8)
7402 abort ();
7403
7404 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
7405 return gen_rtx_REG (mode, 16);
7406 else
7407 return gen_rtx_REG (mode, 2);
7408 }
7409
7410
7411 /* Create and return the va_list datatype.
7412
7413 On S/390, va_list is an array type equivalent to
7414
7415 typedef struct __va_list_tag
7416 {
7417 long __gpr;
7418 long __fpr;
7419 void *__overflow_arg_area;
7420 void *__reg_save_area;
7421 } va_list[1];
7422
7423 where __gpr and __fpr hold the number of general purpose
7424 or floating point arguments used up to now, respectively,
7425 __overflow_arg_area points to the stack location of the
7426 next argument passed on the stack, and __reg_save_area
7427 always points to the start of the register area in the
7428 call frame of the current function. The function prologue
7429 saves all registers used for argument passing into this
7430 area if the function uses variable arguments. */
7431
7432 static tree
7433 s390_build_builtin_va_list (void)
7434 {
7435 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7436
7437 record = lang_hooks.types.make_type (RECORD_TYPE);
7438
7439 type_decl =
7440 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7441
7442 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7443 long_integer_type_node);
7444 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7445 long_integer_type_node);
7446 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7447 ptr_type_node);
7448 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7449 ptr_type_node);
7450
7451 DECL_FIELD_CONTEXT (f_gpr) = record;
7452 DECL_FIELD_CONTEXT (f_fpr) = record;
7453 DECL_FIELD_CONTEXT (f_ovf) = record;
7454 DECL_FIELD_CONTEXT (f_sav) = record;
7455
7456 TREE_CHAIN (record) = type_decl;
7457 TYPE_NAME (record) = type_decl;
7458 TYPE_FIELDS (record) = f_gpr;
7459 TREE_CHAIN (f_gpr) = f_fpr;
7460 TREE_CHAIN (f_fpr) = f_ovf;
7461 TREE_CHAIN (f_ovf) = f_sav;
7462
7463 layout_type (record);
7464
7465 /* The correct type is an array type of one element. */
7466 return build_array_type (record, build_index_type (size_zero_node));
7467 }
7468
7469 /* Implement va_start by filling the va_list structure VALIST.
7470 STDARG_P is always true, and ignored.
7471 NEXTARG points to the first anonymous stack argument.
7472
7473 The following global variables are used to initialize
7474 the va_list structure:
7475
7476 current_function_args_info:
7477 holds number of gprs and fprs used for named arguments.
7478 current_function_arg_offset_rtx:
7479 holds the offset of the first anonymous stack argument
7480 (relative to the virtual arg pointer). */
7481
7482 void
7483 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
7484 {
7485 HOST_WIDE_INT n_gpr, n_fpr;
7486 int off;
7487 tree f_gpr, f_fpr, f_ovf, f_sav;
7488 tree gpr, fpr, ovf, sav, t;
7489
7490 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7491 f_fpr = TREE_CHAIN (f_gpr);
7492 f_ovf = TREE_CHAIN (f_fpr);
7493 f_sav = TREE_CHAIN (f_ovf);
7494
7495 valist = build_va_arg_indirect_ref (valist);
7496 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7497 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7498 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7499 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7500
7501 /* Count number of gp and fp argument registers used. */
7502
7503 n_gpr = current_function_args_info.gprs;
7504 n_fpr = current_function_args_info.fprs;
7505
7506 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
7507 build_int_cst (NULL_TREE, n_gpr));
7508 TREE_SIDE_EFFECTS (t) = 1;
7509 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7510
7511 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
7512 build_int_cst (NULL_TREE, n_fpr));
7513 TREE_SIDE_EFFECTS (t) = 1;
7514 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7515
7516 /* Find the overflow area. */
7517 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7518
7519 off = INTVAL (current_function_arg_offset_rtx);
7520 off = off < 0 ? 0 : off;
7521 if (TARGET_DEBUG_ARG)
7522 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7523 (int)n_gpr, (int)n_fpr, off);
7524
7525 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
7526
7527 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
7528 TREE_SIDE_EFFECTS (t) = 1;
7529 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7530
7531 /* Find the register save area. */
7532 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
7533 if (TARGET_KERNEL_BACKCHAIN)
7534 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7535 build_int_cst (NULL_TREE,
7536 -(RETURN_REGNUM - 2) * UNITS_PER_WORD
7537 - (TARGET_64BIT ? 4 : 2) * 8));
7538 else
7539 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7540 build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
7541
7542 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
7543 TREE_SIDE_EFFECTS (t) = 1;
7544 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7545 }
7546
7547 /* Implement va_arg by updating the va_list structure
7548 VALIST as required to retrieve an argument of type
7549 TYPE, and returning that argument.
7550
7551 Generates code equivalent to:
7552
7553 if (integral value) {
7554 if (size <= 4 && args.gpr < 5 ||
7555 size > 4 && args.gpr < 4 )
7556 ret = args.reg_save_area[args.gpr+8]
7557 else
7558 ret = *args.overflow_arg_area++;
7559 } else if (float value) {
7560 if (args.fgpr < 2)
7561 ret = args.reg_save_area[args.fpr+64]
7562 else
7563 ret = *args.overflow_arg_area++;
7564 } else if (aggregate value) {
7565 if (args.gpr < 5)
7566 ret = *args.reg_save_area[args.gpr]
7567 else
7568 ret = **args.overflow_arg_area++;
7569 } */
7570
7571 tree
7572 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
7573 tree *post_p ATTRIBUTE_UNUSED)
7574 {
7575 tree f_gpr, f_fpr, f_ovf, f_sav;
7576 tree gpr, fpr, ovf, sav, reg, t, u;
7577 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
7578 tree lab_false, lab_over, addr;
7579
7580 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7581 f_fpr = TREE_CHAIN (f_gpr);
7582 f_ovf = TREE_CHAIN (f_fpr);
7583 f_sav = TREE_CHAIN (f_ovf);
7584
7585 valist = build_va_arg_indirect_ref (valist);
7586 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7587 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7588 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7589 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7590
7591 size = int_size_in_bytes (type);
7592
7593 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
7594 {
7595 if (TARGET_DEBUG_ARG)
7596 {
7597 fprintf (stderr, "va_arg: aggregate type");
7598 debug_tree (type);
7599 }
7600
7601 /* Aggregates are passed by reference. */
7602 indirect_p = 1;
7603 reg = gpr;
7604 n_reg = 1;
7605
7606 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7607 will be added by s390_frame_info because for va_args always an even
7608 number of gprs has to be saved r15-r2 = 14 regs. */
7609 sav_ofs = (TARGET_KERNEL_BACKCHAIN
7610 ? (TARGET_64BIT ? 4 : 2) * 8 : 2 * UNITS_PER_WORD);
7611 sav_scale = UNITS_PER_WORD;
7612 size = UNITS_PER_WORD;
7613 max_reg = 4;
7614 }
7615 else if (s390_function_arg_float (TYPE_MODE (type), type))
7616 {
7617 if (TARGET_DEBUG_ARG)
7618 {
7619 fprintf (stderr, "va_arg: float type");
7620 debug_tree (type);
7621 }
7622
7623 /* FP args go in FP registers, if present. */
7624 indirect_p = 0;
7625 reg = fpr;
7626 n_reg = 1;
7627 sav_ofs = TARGET_KERNEL_BACKCHAIN ? 0 : 16 * UNITS_PER_WORD;
7628 sav_scale = 8;
7629 /* TARGET_64BIT has up to 4 parameter in fprs */
7630 max_reg = TARGET_64BIT ? 3 : 1;
7631 }
7632 else
7633 {
7634 if (TARGET_DEBUG_ARG)
7635 {
7636 fprintf (stderr, "va_arg: other type");
7637 debug_tree (type);
7638 }
7639
7640 /* Otherwise into GP registers. */
7641 indirect_p = 0;
7642 reg = gpr;
7643 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7644
7645 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7646 will be added by s390_frame_info because for va_args always an even
7647 number of gprs has to be saved r15-r2 = 14 regs. */
7648 sav_ofs = TARGET_KERNEL_BACKCHAIN ?
7649 (TARGET_64BIT ? 4 : 2) * 8 : 2*UNITS_PER_WORD;
7650
7651 if (size < UNITS_PER_WORD)
7652 sav_ofs += UNITS_PER_WORD - size;
7653
7654 sav_scale = UNITS_PER_WORD;
7655 if (n_reg > 1)
7656 max_reg = 3;
7657 else
7658 max_reg = 4;
7659 }
7660
7661 /* Pull the value out of the saved registers ... */
7662
7663 lab_false = create_artificial_label ();
7664 lab_over = create_artificial_label ();
7665 addr = create_tmp_var (ptr_type_node, "addr");
7666
7667 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
7668 t = build2 (GT_EXPR, boolean_type_node, reg, t);
7669 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7670 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7671 gimplify_and_add (t, pre_p);
7672
7673 t = build2 (PLUS_EXPR, ptr_type_node, sav,
7674 fold_convert (ptr_type_node, size_int (sav_ofs)));
7675 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
7676 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
7677 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
7678
7679 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
7680 gimplify_and_add (t, pre_p);
7681
7682 t = build1 (GOTO_EXPR, void_type_node, lab_over);
7683 gimplify_and_add (t, pre_p);
7684
7685 t = build1 (LABEL_EXPR, void_type_node, lab_false);
7686 append_to_statement_list (t, pre_p);
7687
7688
7689 /* ... Otherwise out of the overflow area. */
7690
7691 t = ovf;
7692 if (size < UNITS_PER_WORD)
7693 t = build2 (PLUS_EXPR, ptr_type_node, t,
7694 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
7695
7696 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7697
7698 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
7699 gimplify_and_add (u, pre_p);
7700
7701 t = build2 (PLUS_EXPR, ptr_type_node, t,
7702 fold_convert (ptr_type_node, size_int (size)));
7703 t = build2 (MODIFY_EXPR, ptr_type_node, ovf, t);
7704 gimplify_and_add (t, pre_p);
7705
7706 t = build1 (LABEL_EXPR, void_type_node, lab_over);
7707 append_to_statement_list (t, pre_p);
7708
7709
7710 /* Increment register save count. */
7711
7712 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
7713 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
7714 gimplify_and_add (u, pre_p);
7715
7716 if (indirect_p)
7717 {
7718 t = build_pointer_type (build_pointer_type (type));
7719 addr = fold_convert (t, addr);
7720 addr = build_va_arg_indirect_ref (addr);
7721 }
7722 else
7723 {
7724 t = build_pointer_type (type);
7725 addr = fold_convert (t, addr);
7726 }
7727
7728 return build_va_arg_indirect_ref (addr);
7729 }
7730
7731
7732 /* Builtins. */
7733
7734 enum s390_builtin
7735 {
7736 S390_BUILTIN_THREAD_POINTER,
7737 S390_BUILTIN_SET_THREAD_POINTER,
7738
7739 S390_BUILTIN_max
7740 };
7741
7742 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
7743 CODE_FOR_get_tp_64,
7744 CODE_FOR_set_tp_64
7745 };
7746
7747 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
7748 CODE_FOR_get_tp_31,
7749 CODE_FOR_set_tp_31
7750 };
7751
7752 static void
7753 s390_init_builtins (void)
7754 {
7755 tree ftype;
7756
7757 ftype = build_function_type (ptr_type_node, void_list_node);
7758 lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
7759 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
7760 NULL, NULL_TREE);
7761
7762 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
7763 lang_hooks.builtin_function ("__builtin_set_thread_pointer", ftype,
7764 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
7765 NULL, NULL_TREE);
7766 }
7767
7768 /* Expand an expression EXP that calls a built-in function,
7769 with result going to TARGET if that's convenient
7770 (and in mode MODE if that's convenient).
7771 SUBTARGET may be used as the target for computing one of EXP's operands.
7772 IGNORE is nonzero if the value is to be ignored. */
7773
7774 static rtx
7775 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7776 enum machine_mode mode ATTRIBUTE_UNUSED,
7777 int ignore ATTRIBUTE_UNUSED)
7778 {
7779 #define MAX_ARGS 2
7780
7781 unsigned int const *code_for_builtin =
7782 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
7783
7784 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7785 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7786 tree arglist = TREE_OPERAND (exp, 1);
7787 enum insn_code icode;
7788 rtx op[MAX_ARGS], pat;
7789 int arity;
7790 bool nonvoid;
7791
7792 if (fcode >= S390_BUILTIN_max)
7793 internal_error ("bad builtin fcode");
7794 icode = code_for_builtin[fcode];
7795 if (icode == 0)
7796 internal_error ("bad builtin fcode");
7797
7798 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
7799
7800 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
7801 arglist;
7802 arglist = TREE_CHAIN (arglist), arity++)
7803 {
7804 const struct insn_operand_data *insn_op;
7805
7806 tree arg = TREE_VALUE (arglist);
7807 if (arg == error_mark_node)
7808 return NULL_RTX;
7809 if (arity > MAX_ARGS)
7810 return NULL_RTX;
7811
7812 insn_op = &insn_data[icode].operand[arity + nonvoid];
7813
7814 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
7815
7816 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
7817 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
7818 }
7819
7820 if (nonvoid)
7821 {
7822 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7823 if (!target
7824 || GET_MODE (target) != tmode
7825 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
7826 target = gen_reg_rtx (tmode);
7827 }
7828
7829 switch (arity)
7830 {
7831 case 0:
7832 pat = GEN_FCN (icode) (target);
7833 break;
7834 case 1:
7835 if (nonvoid)
7836 pat = GEN_FCN (icode) (target, op[0]);
7837 else
7838 pat = GEN_FCN (icode) (op[0]);
7839 break;
7840 case 2:
7841 pat = GEN_FCN (icode) (target, op[0], op[1]);
7842 break;
7843 default:
7844 abort ();
7845 }
7846 if (!pat)
7847 return NULL_RTX;
7848 emit_insn (pat);
7849
7850 if (nonvoid)
7851 return target;
7852 else
7853 return const0_rtx;
7854 }
7855
7856
7857 /* Output assembly code for the trampoline template to
7858 stdio stream FILE.
7859
7860 On S/390, we use gpr 1 internally in the trampoline code;
7861 gpr 0 is used to hold the static chain. */
7862
7863 void
7864 s390_trampoline_template (FILE *file)
7865 {
7866 rtx op[2];
7867 op[0] = gen_rtx_REG (Pmode, 0);
7868 op[1] = gen_rtx_REG (Pmode, 1);
7869
7870 if (TARGET_64BIT)
7871 {
7872 output_asm_insn ("basr\t%1,0", op);
7873 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
7874 output_asm_insn ("br\t%1", op);
7875 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
7876 }
7877 else
7878 {
7879 output_asm_insn ("basr\t%1,0", op);
7880 output_asm_insn ("lm\t%0,%1,6(%1)", op);
7881 output_asm_insn ("br\t%1", op);
7882 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
7883 }
7884 }
7885
7886 /* Emit RTL insns to initialize the variable parts of a trampoline.
7887 FNADDR is an RTX for the address of the function's pure code.
7888 CXT is an RTX for the static chain value for the function. */
7889
7890 void
7891 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
7892 {
7893 emit_move_insn (gen_rtx_MEM (Pmode,
7894 memory_address (Pmode,
7895 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
7896 emit_move_insn (gen_rtx_MEM (Pmode,
7897 memory_address (Pmode,
7898 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
7899 }
7900
7901 /* Return rtx for 64-bit constant formed from the 32-bit subwords
7902 LOW and HIGH, independent of the host word size. */
7903
7904 rtx
7905 s390_gen_rtx_const_DI (int high, int low)
7906 {
7907 #if HOST_BITS_PER_WIDE_INT >= 64
7908 HOST_WIDE_INT val;
7909 val = (HOST_WIDE_INT)high;
7910 val <<= 32;
7911 val |= (HOST_WIDE_INT)low;
7912
7913 return GEN_INT (val);
7914 #else
7915 #if HOST_BITS_PER_WIDE_INT >= 32
7916 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
7917 #else
7918 abort ();
7919 #endif
7920 #endif
7921 }
7922
7923 /* Output assembler code to FILE to increment profiler label # LABELNO
7924 for profiling a function entry. */
7925
7926 void
7927 s390_function_profiler (FILE *file, int labelno)
7928 {
7929 rtx op[7];
7930
7931 char label[128];
7932 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
7933
7934 fprintf (file, "# function profiler \n");
7935
7936 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
7937 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7938 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
7939
7940 op[2] = gen_rtx_REG (Pmode, 1);
7941 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
7942 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
7943
7944 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
7945 if (flag_pic)
7946 {
7947 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
7948 op[4] = gen_rtx_CONST (Pmode, op[4]);
7949 }
7950
7951 if (TARGET_64BIT)
7952 {
7953 output_asm_insn ("stg\t%0,%1", op);
7954 output_asm_insn ("larl\t%2,%3", op);
7955 output_asm_insn ("brasl\t%0,%4", op);
7956 output_asm_insn ("lg\t%0,%1", op);
7957 }
7958 else if (!flag_pic)
7959 {
7960 op[6] = gen_label_rtx ();
7961
7962 output_asm_insn ("st\t%0,%1", op);
7963 output_asm_insn ("bras\t%2,%l6", op);
7964 output_asm_insn (".long\t%4", op);
7965 output_asm_insn (".long\t%3", op);
7966 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7967 output_asm_insn ("l\t%0,0(%2)", op);
7968 output_asm_insn ("l\t%2,4(%2)", op);
7969 output_asm_insn ("basr\t%0,%0", op);
7970 output_asm_insn ("l\t%0,%1", op);
7971 }
7972 else
7973 {
7974 op[5] = gen_label_rtx ();
7975 op[6] = gen_label_rtx ();
7976
7977 output_asm_insn ("st\t%0,%1", op);
7978 output_asm_insn ("bras\t%2,%l6", op);
7979 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
7980 output_asm_insn (".long\t%4-%l5", op);
7981 output_asm_insn (".long\t%3-%l5", op);
7982 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7983 output_asm_insn ("lr\t%0,%2", op);
7984 output_asm_insn ("a\t%0,0(%2)", op);
7985 output_asm_insn ("a\t%2,4(%2)", op);
7986 output_asm_insn ("basr\t%0,%0", op);
7987 output_asm_insn ("l\t%0,%1", op);
7988 }
7989 }
7990
7991 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7992 into its SYMBOL_REF_FLAGS. */
7993
7994 static void
7995 s390_encode_section_info (tree decl, rtx rtl, int first)
7996 {
7997 default_encode_section_info (decl, rtl, first);
7998
7999 /* If a variable has a forced alignment to < 2 bytes, mark it with
8000 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
8001 if (TREE_CODE (decl) == VAR_DECL
8002 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
8003 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
8004 }
8005
8006 /* Output thunk to FILE that implements a C++ virtual function call (with
8007 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8008 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8009 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8010 relative to the resulting this pointer. */
8011
8012 static void
8013 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8014 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8015 tree function)
8016 {
8017 rtx op[10];
8018 int nonlocal = 0;
8019
8020 /* Operand 0 is the target function. */
8021 op[0] = XEXP (DECL_RTL (function), 0);
8022 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8023 {
8024 nonlocal = 1;
8025 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8026 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8027 op[0] = gen_rtx_CONST (Pmode, op[0]);
8028 }
8029
8030 /* Operand 1 is the 'this' pointer. */
8031 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8032 op[1] = gen_rtx_REG (Pmode, 3);
8033 else
8034 op[1] = gen_rtx_REG (Pmode, 2);
8035
8036 /* Operand 2 is the delta. */
8037 op[2] = GEN_INT (delta);
8038
8039 /* Operand 3 is the vcall_offset. */
8040 op[3] = GEN_INT (vcall_offset);
8041
8042 /* Operand 4 is the temporary register. */
8043 op[4] = gen_rtx_REG (Pmode, 1);
8044
8045 /* Operands 5 to 8 can be used as labels. */
8046 op[5] = NULL_RTX;
8047 op[6] = NULL_RTX;
8048 op[7] = NULL_RTX;
8049 op[8] = NULL_RTX;
8050
8051 /* Operand 9 can be used for temporary register. */
8052 op[9] = NULL_RTX;
8053
8054 /* Generate code. */
8055 if (TARGET_64BIT)
8056 {
8057 /* Setup literal pool pointer if required. */
8058 if ((!DISP_IN_RANGE (delta)
8059 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8060 || (!DISP_IN_RANGE (vcall_offset)
8061 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8062 {
8063 op[5] = gen_label_rtx ();
8064 output_asm_insn ("larl\t%4,%5", op);
8065 }
8066
8067 /* Add DELTA to this pointer. */
8068 if (delta)
8069 {
8070 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8071 output_asm_insn ("la\t%1,%2(%1)", op);
8072 else if (DISP_IN_RANGE (delta))
8073 output_asm_insn ("lay\t%1,%2(%1)", op);
8074 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8075 output_asm_insn ("aghi\t%1,%2", op);
8076 else
8077 {
8078 op[6] = gen_label_rtx ();
8079 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8080 }
8081 }
8082
8083 /* Perform vcall adjustment. */
8084 if (vcall_offset)
8085 {
8086 if (DISP_IN_RANGE (vcall_offset))
8087 {
8088 output_asm_insn ("lg\t%4,0(%1)", op);
8089 output_asm_insn ("ag\t%1,%3(%4)", op);
8090 }
8091 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8092 {
8093 output_asm_insn ("lghi\t%4,%3", op);
8094 output_asm_insn ("ag\t%4,0(%1)", op);
8095 output_asm_insn ("ag\t%1,0(%4)", op);
8096 }
8097 else
8098 {
8099 op[7] = gen_label_rtx ();
8100 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8101 output_asm_insn ("ag\t%4,0(%1)", op);
8102 output_asm_insn ("ag\t%1,0(%4)", op);
8103 }
8104 }
8105
8106 /* Jump to target. */
8107 output_asm_insn ("jg\t%0", op);
8108
8109 /* Output literal pool if required. */
8110 if (op[5])
8111 {
8112 output_asm_insn (".align\t4", op);
8113 targetm.asm_out.internal_label (file, "L",
8114 CODE_LABEL_NUMBER (op[5]));
8115 }
8116 if (op[6])
8117 {
8118 targetm.asm_out.internal_label (file, "L",
8119 CODE_LABEL_NUMBER (op[6]));
8120 output_asm_insn (".long\t%2", op);
8121 }
8122 if (op[7])
8123 {
8124 targetm.asm_out.internal_label (file, "L",
8125 CODE_LABEL_NUMBER (op[7]));
8126 output_asm_insn (".long\t%3", op);
8127 }
8128 }
8129 else
8130 {
8131 /* Setup base pointer if required. */
8132 if (!vcall_offset
8133 || (!DISP_IN_RANGE (delta)
8134 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8135 || (!DISP_IN_RANGE (delta)
8136 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8137 {
8138 op[5] = gen_label_rtx ();
8139 output_asm_insn ("basr\t%4,0", op);
8140 targetm.asm_out.internal_label (file, "L",
8141 CODE_LABEL_NUMBER (op[5]));
8142 }
8143
8144 /* Add DELTA to this pointer. */
8145 if (delta)
8146 {
8147 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8148 output_asm_insn ("la\t%1,%2(%1)", op);
8149 else if (DISP_IN_RANGE (delta))
8150 output_asm_insn ("lay\t%1,%2(%1)", op);
8151 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8152 output_asm_insn ("ahi\t%1,%2", op);
8153 else
8154 {
8155 op[6] = gen_label_rtx ();
8156 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8157 }
8158 }
8159
8160 /* Perform vcall adjustment. */
8161 if (vcall_offset)
8162 {
8163 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'J', "J"))
8164 {
8165 output_asm_insn ("lg\t%4,0(%1)", op);
8166 output_asm_insn ("a\t%1,%3(%4)", op);
8167 }
8168 else if (DISP_IN_RANGE (vcall_offset))
8169 {
8170 output_asm_insn ("lg\t%4,0(%1)", op);
8171 output_asm_insn ("ay\t%1,%3(%4)", op);
8172 }
8173 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8174 {
8175 output_asm_insn ("lhi\t%4,%3", op);
8176 output_asm_insn ("a\t%4,0(%1)", op);
8177 output_asm_insn ("a\t%1,0(%4)", op);
8178 }
8179 else
8180 {
8181 op[7] = gen_label_rtx ();
8182 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8183 output_asm_insn ("a\t%4,0(%1)", op);
8184 output_asm_insn ("a\t%1,0(%4)", op);
8185 }
8186
8187 /* We had to clobber the base pointer register.
8188 Re-setup the base pointer (with a different base). */
8189 op[5] = gen_label_rtx ();
8190 output_asm_insn ("basr\t%4,0", op);
8191 targetm.asm_out.internal_label (file, "L",
8192 CODE_LABEL_NUMBER (op[5]));
8193 }
8194
8195 /* Jump to target. */
8196 op[8] = gen_label_rtx ();
8197
8198 if (!flag_pic)
8199 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8200 else if (!nonlocal)
8201 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8202 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8203 else if (flag_pic == 1)
8204 {
8205 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8206 output_asm_insn ("l\t%4,%0(%4)", op);
8207 }
8208 else if (flag_pic == 2)
8209 {
8210 op[9] = gen_rtx_REG (Pmode, 0);
8211 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8212 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8213 output_asm_insn ("ar\t%4,%9", op);
8214 output_asm_insn ("l\t%4,0(%4)", op);
8215 }
8216
8217 output_asm_insn ("br\t%4", op);
8218
8219 /* Output literal pool. */
8220 output_asm_insn (".align\t4", op);
8221
8222 if (nonlocal && flag_pic == 2)
8223 output_asm_insn (".long\t%0", op);
8224 if (nonlocal)
8225 {
8226 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8227 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8228 }
8229
8230 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8231 if (!flag_pic)
8232 output_asm_insn (".long\t%0", op);
8233 else
8234 output_asm_insn (".long\t%0-%5", op);
8235
8236 if (op[6])
8237 {
8238 targetm.asm_out.internal_label (file, "L",
8239 CODE_LABEL_NUMBER (op[6]));
8240 output_asm_insn (".long\t%2", op);
8241 }
8242 if (op[7])
8243 {
8244 targetm.asm_out.internal_label (file, "L",
8245 CODE_LABEL_NUMBER (op[7]));
8246 output_asm_insn (".long\t%3", op);
8247 }
8248 }
8249 }
8250
8251 bool
8252 s390_valid_pointer_mode (enum machine_mode mode)
8253 {
8254 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8255 }
8256
8257 /* How to allocate a 'struct machine_function'. */
8258
8259 static struct machine_function *
8260 s390_init_machine_status (void)
8261 {
8262 return ggc_alloc_cleared (sizeof (struct machine_function));
8263 }
8264
8265 /* Checks whether the given ARGUMENT_LIST would use a caller
8266 saved register. This is used to decide whether sibling call
8267 optimization could be performed on the respective function
8268 call. */
8269
8270 static bool
8271 s390_call_saved_register_used (tree argument_list)
8272 {
8273 CUMULATIVE_ARGS cum;
8274 tree parameter;
8275 enum machine_mode mode;
8276 tree type;
8277 rtx parm_rtx;
8278 int reg;
8279
8280 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8281
8282 while (argument_list)
8283 {
8284 parameter = TREE_VALUE (argument_list);
8285 argument_list = TREE_CHAIN (argument_list);
8286
8287 if (!parameter)
8288 abort();
8289
8290 /* For an undeclared variable passed as parameter we will get
8291 an ERROR_MARK node here. */
8292 if (TREE_CODE (parameter) == ERROR_MARK)
8293 return true;
8294
8295 if (! (type = TREE_TYPE (parameter)))
8296 abort();
8297
8298 if (! (mode = TYPE_MODE (TREE_TYPE (parameter))))
8299 abort();
8300
8301 if (pass_by_reference (&cum, mode, type, true))
8302 {
8303 mode = Pmode;
8304 type = build_pointer_type (type);
8305 }
8306
8307 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8308
8309 s390_function_arg_advance (&cum, mode, type, 0);
8310
8311 if (parm_rtx && REG_P (parm_rtx))
8312 {
8313 for (reg = 0;
8314 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8315 reg++)
8316 if (! call_used_regs[reg + REGNO (parm_rtx)])
8317 return true;
8318 }
8319 }
8320 return false;
8321 }
8322
8323 /* Return true if the given call expression can be
8324 turned into a sibling call.
8325 DECL holds the declaration of the function to be called whereas
8326 EXP is the call expression itself. */
8327
8328 static bool
8329 s390_function_ok_for_sibcall (tree decl, tree exp)
8330 {
8331 /* The TPF epilogue uses register 1. */
8332 if (TARGET_TPF_PROFILING)
8333 return false;
8334
8335 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8336 which would have to be restored before the sibcall. */
8337 if (!TARGET_64BIT && flag_pic && decl && TREE_PUBLIC (decl))
8338 return false;
8339
8340 /* Register 6 on s390 is available as an argument register but unfortunately
8341 "caller saved". This makes functions needing this register for arguments
8342 not suitable for sibcalls. */
8343 if (TREE_OPERAND (exp, 1)
8344 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
8345 return false;
8346
8347 return true;
8348 }
8349
8350 /* Return the fixed registers used for condition codes. */
8351
8352 static bool
8353 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8354 {
8355 *p1 = CC_REGNUM;
8356 *p2 = INVALID_REGNUM;
8357
8358 return true;
8359 }
8360
8361 /* This function is used by the call expanders of the machine description.
8362 It emits the call insn itself together with the necessary operations
8363 to adjust the target address and returns the emitted insn.
8364 ADDR_LOCATION is the target address rtx
8365 TLS_CALL the location of the thread-local symbol
8366 RESULT_REG the register where the result of the call should be stored
8367 RETADDR_REG the register where the return address should be stored
8368 If this parameter is NULL_RTX the call is considered
8369 to be a sibling call. */
8370
8371 rtx
8372 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8373 rtx retaddr_reg)
8374 {
8375 bool plt_call = false;
8376 rtx insn;
8377 rtx call;
8378 rtx clobber;
8379 rtvec vec;
8380
8381 /* Direct function calls need special treatment. */
8382 if (GET_CODE (addr_location) == SYMBOL_REF)
8383 {
8384 /* When calling a global routine in PIC mode, we must
8385 replace the symbol itself with the PLT stub. */
8386 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8387 {
8388 addr_location = gen_rtx_UNSPEC (Pmode,
8389 gen_rtvec (1, addr_location),
8390 UNSPEC_PLT);
8391 addr_location = gen_rtx_CONST (Pmode, addr_location);
8392 plt_call = true;
8393 }
8394
8395 /* Unless we can use the bras(l) insn, force the
8396 routine address into a register. */
8397 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8398 {
8399 if (flag_pic)
8400 addr_location = legitimize_pic_address (addr_location, 0);
8401 else
8402 addr_location = force_reg (Pmode, addr_location);
8403 }
8404 }
8405
8406 /* If it is already an indirect call or the code above moved the
8407 SYMBOL_REF to somewhere else make sure the address can be found in
8408 register 1. */
8409 if (retaddr_reg == NULL_RTX
8410 && GET_CODE (addr_location) != SYMBOL_REF
8411 && !plt_call)
8412 {
8413 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8414 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8415 }
8416
8417 addr_location = gen_rtx_MEM (QImode, addr_location);
8418 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8419
8420 if (result_reg != NULL_RTX)
8421 call = gen_rtx_SET (VOIDmode, result_reg, call);
8422
8423 if (retaddr_reg != NULL_RTX)
8424 {
8425 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8426
8427 if (tls_call != NULL_RTX)
8428 vec = gen_rtvec (3, call, clobber,
8429 gen_rtx_USE (VOIDmode, tls_call));
8430 else
8431 vec = gen_rtvec (2, call, clobber);
8432
8433 call = gen_rtx_PARALLEL (VOIDmode, vec);
8434 }
8435
8436 insn = emit_call_insn (call);
8437
8438 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8439 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8440 {
8441 /* s390_function_ok_for_sibcall should
8442 have denied sibcalls in this case. */
8443 if (retaddr_reg == NULL_RTX)
8444 abort ();
8445
8446 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8447 }
8448 return insn;
8449 }
8450
8451 /* Implement CONDITIONAL_REGISTER_USAGE. */
8452
8453 void
8454 s390_conditional_register_usage (void)
8455 {
8456 int i;
8457
8458 if (flag_pic)
8459 {
8460 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8461 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8462 }
8463 if (TARGET_CPU_ZARCH)
8464 {
8465 fixed_regs[RETURN_REGNUM] = 0;
8466 call_used_regs[RETURN_REGNUM] = 0;
8467 }
8468 if (TARGET_64BIT)
8469 {
8470 for (i = 24; i < 32; i++)
8471 call_used_regs[i] = call_really_used_regs[i] = 0;
8472 }
8473 else
8474 {
8475 for (i = 18; i < 20; i++)
8476 call_used_regs[i] = call_really_used_regs[i] = 0;
8477 }
8478
8479 if (TARGET_SOFT_FLOAT)
8480 {
8481 for (i = 16; i < 32; i++)
8482 call_used_regs[i] = fixed_regs[i] = 1;
8483 }
8484 }
8485
8486 /* Corresponding function to eh_return expander. */
8487
8488 static GTY(()) rtx s390_tpf_eh_return_symbol;
8489 void
8490 s390_emit_tpf_eh_return (rtx target)
8491 {
8492 rtx insn, reg;
8493
8494 if (!s390_tpf_eh_return_symbol)
8495 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8496
8497 reg = gen_rtx_REG (Pmode, 2);
8498
8499 emit_move_insn (reg, target);
8500 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8501 gen_rtx_REG (Pmode, RETURN_REGNUM));
8502 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8503
8504 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8505 }
8506
8507 #include "gt-s390.h"
This page took 0.465621 seconds and 5 git commands to generate.