1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
53 #include "tree-gimple.h"
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
59 static bool s390_assemble_integer (rtx
, unsigned int, int);
60 static void s390_encode_section_info (tree
, rtx
, int);
61 static bool s390_cannot_force_const_mem (rtx
);
62 static rtx
s390_delegitimize_address (rtx
);
63 static bool s390_return_in_memory (tree
, tree
);
64 static void s390_init_builtins (void);
65 static rtx
s390_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
66 static void s390_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
,
68 static enum attr_type
s390_safe_attr_type (rtx
);
70 static int s390_adjust_priority (rtx
, int);
71 static int s390_issue_rate (void);
72 static int s390_first_cycle_multipass_dfa_lookahead (void);
73 static bool s390_cannot_copy_insn_p (rtx
);
74 static bool s390_rtx_costs (rtx
, int, int, int *);
75 static int s390_address_cost (rtx
);
76 static void s390_reorg (void);
77 static bool s390_valid_pointer_mode (enum machine_mode
);
78 static tree
s390_build_builtin_va_list (void);
79 static tree
s390_gimplify_va_arg (tree
, tree
, tree
*, tree
*);
80 static bool s390_function_ok_for_sibcall (tree
, tree
);
81 static bool s390_call_saved_register_used (tree
);
82 static bool s390_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode mode
,
84 static bool s390_fixed_condition_code_regs (unsigned int *, unsigned int *);
86 #undef TARGET_ASM_ALIGNED_HI_OP
87 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
88 #undef TARGET_ASM_ALIGNED_DI_OP
89 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
90 #undef TARGET_ASM_INTEGER
91 #define TARGET_ASM_INTEGER s390_assemble_integer
93 #undef TARGET_ASM_OPEN_PAREN
94 #define TARGET_ASM_OPEN_PAREN ""
96 #undef TARGET_ASM_CLOSE_PAREN
97 #define TARGET_ASM_CLOSE_PAREN ""
99 #undef TARGET_ENCODE_SECTION_INFO
100 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
103 #undef TARGET_HAVE_TLS
104 #define TARGET_HAVE_TLS true
106 #undef TARGET_CANNOT_FORCE_CONST_MEM
107 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
109 #undef TARGET_DELEGITIMIZE_ADDRESS
110 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
112 #undef TARGET_RETURN_IN_MEMORY
113 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
115 #undef TARGET_INIT_BUILTINS
116 #define TARGET_INIT_BUILTINS s390_init_builtins
117 #undef TARGET_EXPAND_BUILTIN
118 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
120 #undef TARGET_ASM_OUTPUT_MI_THUNK
121 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
122 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
123 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
125 #undef TARGET_SCHED_ADJUST_PRIORITY
126 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
127 #undef TARGET_SCHED_ISSUE_RATE
128 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
129 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
130 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
132 #undef TARGET_CANNOT_COPY_INSN_P
133 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
134 #undef TARGET_RTX_COSTS
135 #define TARGET_RTX_COSTS s390_rtx_costs
136 #undef TARGET_ADDRESS_COST
137 #define TARGET_ADDRESS_COST s390_address_cost
139 #undef TARGET_MACHINE_DEPENDENT_REORG
140 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
142 #undef TARGET_VALID_POINTER_MODE
143 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
145 #undef TARGET_BUILD_BUILTIN_VA_LIST
146 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
147 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
148 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
150 #undef TARGET_PROMOTE_FUNCTION_ARGS
151 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
152 #undef TARGET_PROMOTE_FUNCTION_RETURN
153 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
154 #undef TARGET_PASS_BY_REFERENCE
155 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
157 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
158 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
160 #undef TARGET_FIXED_CONDITION_CODE_REGS
161 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
163 struct gcc_target targetm
= TARGET_INITIALIZER
;
165 extern int reload_completed
;
167 /* The alias set for prologue/epilogue register save/restore. */
168 static int s390_sr_alias_set
= 0;
170 /* Save information from a "cmpxx" operation until the branch or scc is
172 rtx s390_compare_op0
, s390_compare_op1
;
174 /* Structure used to hold the components of a S/390 memory
175 address. A legitimate address on S/390 is of the general
177 base + index + displacement
178 where any of the components is optional.
180 base and index are registers of the class ADDR_REGS,
181 displacement is an unsigned 12-bit immediate constant. */
191 /* Which cpu are we tuning for. */
192 enum processor_type s390_tune
;
193 enum processor_flags s390_tune_flags
;
194 /* Which instruction set architecture to use. */
195 enum processor_type s390_arch
;
196 enum processor_flags s390_arch_flags
;
198 /* Strings to hold which cpu and instruction set architecture to use. */
199 const char *s390_tune_string
; /* for -mtune=<xxx> */
200 const char *s390_arch_string
; /* for -march=<xxx> */
202 /* String to specify backchain mode:
203 "" no-backchain, "1" backchain, "2" kernel-backchain. */
204 const char *s390_backchain_string
= TARGET_DEFAULT_BACKCHAIN
;
206 const char *s390_warn_framesize_string
;
207 const char *s390_warn_dynamicstack_string
;
208 const char *s390_stack_size_string
;
209 const char *s390_stack_guard_string
;
211 HOST_WIDE_INT s390_warn_framesize
= 0;
212 bool s390_warn_dynamicstack_p
= 0;
213 HOST_WIDE_INT s390_stack_size
= 0;
214 HOST_WIDE_INT s390_stack_guard
= 0;
216 /* The following structure is embedded in the machine
217 specific part of struct function. */
219 struct s390_frame_layout
GTY (())
221 /* Offset within stack frame. */
222 HOST_WIDE_INT gprs_offset
;
223 HOST_WIDE_INT f0_offset
;
224 HOST_WIDE_INT f4_offset
;
225 HOST_WIDE_INT f8_offset
;
226 HOST_WIDE_INT backchain_offset
;
228 /* Number of first and last gpr to be saved, restored. */
230 int first_restore_gpr
;
232 int last_restore_gpr
;
234 /* Bits standing for floating point registers. Set, if the
235 respective register has to be saved. Starting with reg 16 (f0)
236 at the rightmost bit.
237 Bit 15 - 8 7 6 5 4 3 2 1 0
238 fpr 15 - 8 7 5 3 1 6 4 2 0
239 reg 31 - 24 23 22 21 20 19 18 17 16 */
240 unsigned int fpr_bitmap
;
242 /* Number of floating point registers f8-f15 which must be saved. */
245 /* Set if return address needs to be saved. */
246 bool save_return_addr_p
;
248 /* Set if backchain needs to be saved. */
249 bool save_backchain_p
;
251 /* Size of stack frame. */
252 HOST_WIDE_INT frame_size
;
255 /* Define the structure for the machine field in struct function. */
257 struct machine_function
GTY(())
259 struct s390_frame_layout frame_layout
;
261 /* Literal pool base register. */
264 /* True if we may need to perform branch splitting. */
265 bool split_branches_pending_p
;
267 /* Some local-dynamic TLS symbol name. */
268 const char *some_ld_name
;
271 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
273 #define cfun_frame_layout (cfun->machine->frame_layout)
274 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
275 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr - \
276 cfun_frame_layout.first_save_gpr + 1) * UNITS_PER_WORD)
277 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
279 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
282 static int s390_match_ccmode_set (rtx
, enum machine_mode
);
283 static int s390_branch_condition_mask (rtx
);
284 static const char *s390_branch_condition_mnemonic (rtx
, int);
285 static int check_mode (rtx
, enum machine_mode
*);
286 static int s390_short_displacement (rtx
);
287 static int s390_decompose_address (rtx
, struct s390_address
*);
288 static rtx
get_thread_pointer (void);
289 static rtx
legitimize_tls_address (rtx
, rtx
);
290 static void print_shift_count_operand (FILE *, rtx
);
291 static const char *get_some_local_dynamic_name (void);
292 static int get_some_local_dynamic_name_1 (rtx
*, void *);
293 static int reg_used_in_mem_p (int, rtx
);
294 static int addr_generation_dependency_p (rtx
, rtx
);
295 static int s390_split_branches (void);
296 static void annotate_constant_pool_refs (rtx
*x
);
297 static void find_constant_pool_ref (rtx
, rtx
*);
298 static void replace_constant_pool_ref (rtx
*, rtx
, rtx
);
299 static rtx
find_ltrel_base (rtx
);
300 static void replace_ltrel_base (rtx
*);
301 static void s390_optimize_prologue (void);
302 static int find_unused_clobbered_reg (void);
303 static void s390_frame_area (int *, int *);
304 static void s390_register_info (int []);
305 static void s390_frame_info (void);
306 static void s390_init_frame_layout (void);
307 static void s390_update_frame_layout (void);
308 static rtx
save_fpr (rtx
, int, int);
309 static rtx
restore_fpr (rtx
, int, int);
310 static rtx
save_gprs (rtx
, int, int, int);
311 static rtx
restore_gprs (rtx
, int, int, int);
312 static int s390_function_arg_size (enum machine_mode
, tree
);
313 static bool s390_function_arg_float (enum machine_mode
, tree
);
314 static struct machine_function
* s390_init_machine_status (void);
316 /* Check whether integer displacement is in range. */
317 #define DISP_IN_RANGE(d) \
318 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
319 : ((d) >= 0 && (d) <= 4095))
321 /* Return true if SET either doesn't set the CC register, or else
322 the source and destination have matching CC modes and that
323 CC mode is at least as constrained as REQ_MODE. */
326 s390_match_ccmode_set (rtx set
, enum machine_mode req_mode
)
328 enum machine_mode set_mode
;
330 if (GET_CODE (set
) != SET
)
333 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
336 set_mode
= GET_MODE (SET_DEST (set
));
350 if (req_mode
!= set_mode
)
355 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
356 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
362 if (req_mode
!= CCAmode
)
370 return (GET_MODE (SET_SRC (set
)) == set_mode
);
373 /* Return true if every SET in INSN that sets the CC register
374 has source and destination with matching CC modes and that
375 CC mode is at least as constrained as REQ_MODE.
376 If REQ_MODE is VOIDmode, always return false. */
379 s390_match_ccmode (rtx insn
, enum machine_mode req_mode
)
383 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
384 if (req_mode
== VOIDmode
)
387 if (GET_CODE (PATTERN (insn
)) == SET
)
388 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
390 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
391 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
393 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
394 if (GET_CODE (set
) == SET
)
395 if (!s390_match_ccmode_set (set
, req_mode
))
402 /* If a test-under-mask instruction can be used to implement
403 (compare (and ... OP1) OP2), return the CC mode required
404 to do that. Otherwise, return VOIDmode.
405 MIXED is true if the instruction can distinguish between
406 CC1 and CC2 for mixed selected bits (TMxx), it is false
407 if the instruction cannot (TM). */
410 s390_tm_ccmode (rtx op1
, rtx op2
, int mixed
)
414 /* ??? Fixme: should work on CONST_DOUBLE as well. */
415 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
418 /* Selected bits all zero: CC0. */
419 if (INTVAL (op2
) == 0)
422 /* Selected bits all one: CC3. */
423 if (INTVAL (op2
) == INTVAL (op1
))
426 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
429 bit1
= exact_log2 (INTVAL (op2
));
430 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
431 if (bit0
!= -1 && bit1
!= -1)
432 return bit0
> bit1
? CCT1mode
: CCT2mode
;
438 /* Given a comparison code OP (EQ, NE, etc.) and the operands
439 OP0 and OP1 of a COMPARE, return the mode to be used for the
443 s390_select_ccmode (enum rtx_code code
, rtx op0
, rtx op1
)
449 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
450 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
452 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
453 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0
, 1)), 'K', "K"))
455 if ((GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
456 || GET_CODE (op1
) == NEG
)
457 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
460 if (GET_CODE (op0
) == AND
)
462 /* Check whether we can potentially do it via TM. */
463 enum machine_mode ccmode
;
464 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
465 if (ccmode
!= VOIDmode
)
467 /* Relax CCTmode to CCZmode to allow fall-back to AND
468 if that turns out to be beneficial. */
469 return ccmode
== CCTmode
? CCZmode
: ccmode
;
473 if (register_operand (op0
, HImode
)
474 && GET_CODE (op1
) == CONST_INT
475 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
477 if (register_operand (op0
, QImode
)
478 && GET_CODE (op1
) == CONST_INT
479 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
488 if ((GET_CODE (op0
) == NEG
|| GET_CODE (op0
) == ABS
)
489 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
491 if (GET_CODE (op0
) == PLUS
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
492 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0
, 1)), 'K', "K"))
494 if (INTVAL (XEXP((op0
), 1)) < 0)
508 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
509 && GET_CODE (op1
) != CONST_INT
)
515 if (GET_CODE (op0
) == PLUS
516 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
519 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
520 && GET_CODE (op1
) != CONST_INT
)
526 if (GET_CODE (op0
) == MINUS
527 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
530 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
531 && GET_CODE (op1
) != CONST_INT
)
540 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
541 that we can implement more efficiently. */
544 s390_canonicalize_comparison (enum rtx_code
*code
, rtx
*op0
, rtx
*op1
)
546 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
547 if ((*code
== EQ
|| *code
== NE
)
548 && *op1
== const0_rtx
549 && GET_CODE (*op0
) == ZERO_EXTRACT
550 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
551 && GET_CODE (XEXP (*op0
, 2)) == CONST_INT
552 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
554 rtx inner
= XEXP (*op0
, 0);
555 HOST_WIDE_INT modesize
= GET_MODE_BITSIZE (GET_MODE (inner
));
556 HOST_WIDE_INT len
= INTVAL (XEXP (*op0
, 1));
557 HOST_WIDE_INT pos
= INTVAL (XEXP (*op0
, 2));
559 if (len
> 0 && len
< modesize
560 && pos
>= 0 && pos
+ len
<= modesize
561 && modesize
<= HOST_BITS_PER_WIDE_INT
)
563 unsigned HOST_WIDE_INT block
;
564 block
= ((unsigned HOST_WIDE_INT
) 1 << len
) - 1;
565 block
<<= modesize
- pos
- len
;
567 *op0
= gen_rtx_AND (GET_MODE (inner
), inner
,
568 gen_int_mode (block
, GET_MODE (inner
)));
572 /* Narrow AND of memory against immediate to enable TM. */
573 if ((*code
== EQ
|| *code
== NE
)
574 && *op1
== const0_rtx
575 && GET_CODE (*op0
) == AND
576 && GET_CODE (XEXP (*op0
, 1)) == CONST_INT
577 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0
, 0))))
579 rtx inner
= XEXP (*op0
, 0);
580 rtx mask
= XEXP (*op0
, 1);
582 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
583 if (GET_CODE (inner
) == SUBREG
584 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner
)))
585 && (GET_MODE_SIZE (GET_MODE (inner
))
586 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner
))))
588 & GET_MODE_MASK (GET_MODE (inner
))
589 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner
))))
591 inner
= SUBREG_REG (inner
);
593 /* Do not change volatile MEMs. */
594 if (MEM_P (inner
) && !MEM_VOLATILE_P (inner
))
596 int part
= s390_single_part (XEXP (*op0
, 1),
597 GET_MODE (inner
), QImode
, 0);
600 mask
= gen_int_mode (s390_extract_part (mask
, QImode
, 0), QImode
);
601 inner
= adjust_address_nv (inner
, QImode
, part
);
602 *op0
= gen_rtx_AND (QImode
, inner
, mask
);
607 /* Narrow comparisons against 0xffff to HImode if possible. */
608 if ((*code
== EQ
|| *code
== NE
)
609 && GET_CODE (*op1
) == CONST_INT
610 && INTVAL (*op1
) == 0xffff
611 && SCALAR_INT_MODE_P (GET_MODE (*op0
))
612 && (nonzero_bits (*op0
, GET_MODE (*op0
))
613 & ~(unsigned HOST_WIDE_INT
) 0xffff) == 0)
615 *op0
= gen_lowpart (HImode
, *op0
);
620 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
621 if (GET_CODE (*op0
) == UNSPEC
622 && XINT (*op0
, 1) == UNSPEC_CMPINT
623 && XVECLEN (*op0
, 0) == 1
624 && GET_MODE (XVECEXP (*op0
, 0, 0)) == CCUmode
625 && GET_CODE (XVECEXP (*op0
, 0, 0)) == REG
626 && REGNO (XVECEXP (*op0
, 0, 0)) == CC_REGNUM
627 && *op1
== const0_rtx
)
629 enum rtx_code new_code
= UNKNOWN
;
632 case EQ
: new_code
= EQ
; break;
633 case NE
: new_code
= NE
; break;
634 case LT
: new_code
= GTU
; break;
635 case GT
: new_code
= LTU
; break;
636 case LE
: new_code
= GEU
; break;
637 case GE
: new_code
= LEU
; break;
641 if (new_code
!= UNKNOWN
)
643 *op0
= XVECEXP (*op0
, 0, 0);
649 /* Emit a compare instruction suitable to implement the comparison
650 OP0 CODE OP1. Return the correct condition RTL to be placed in
651 the IF_THEN_ELSE of the conditional branch testing the result. */
654 s390_emit_compare (enum rtx_code code
, rtx op0
, rtx op1
)
656 enum machine_mode mode
= s390_select_ccmode (code
, op0
, op1
);
657 rtx cc
= gen_rtx_REG (mode
, CC_REGNUM
);
659 emit_insn (gen_rtx_SET (VOIDmode
, cc
, gen_rtx_COMPARE (mode
, op0
, op1
)));
660 return gen_rtx_fmt_ee (code
, VOIDmode
, cc
, const0_rtx
);
663 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
664 unconditional jump, else a conditional jump under condition COND. */
667 s390_emit_jump (rtx target
, rtx cond
)
671 target
= gen_rtx_LABEL_REF (VOIDmode
, target
);
673 target
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, target
, pc_rtx
);
675 insn
= gen_rtx_SET (VOIDmode
, pc_rtx
, target
);
676 emit_jump_insn (insn
);
679 /* Return nonzero if OP is a valid comparison operator
680 for a branch condition in mode MODE. */
683 s390_comparison (rtx op
, enum machine_mode mode
)
685 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
688 if (!COMPARISON_P (op
))
691 if (GET_CODE (XEXP (op
, 0)) != REG
692 || REGNO (XEXP (op
, 0)) != CC_REGNUM
693 || XEXP (op
, 1) != const0_rtx
)
696 return s390_branch_condition_mask (op
) >= 0;
699 /* Return nonzero if OP is a valid comparison operator
700 for an ALC condition in mode MODE. */
703 s390_alc_comparison (rtx op
, enum machine_mode mode
)
705 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
708 while (GET_CODE (op
) == ZERO_EXTEND
|| GET_CODE (op
) == SIGN_EXTEND
)
711 if (!COMPARISON_P (op
))
714 if (GET_CODE (XEXP (op
, 0)) != REG
715 || REGNO (XEXP (op
, 0)) != CC_REGNUM
716 || XEXP (op
, 1) != const0_rtx
)
719 switch (GET_MODE (XEXP (op
, 0)))
722 return GET_CODE (op
) == LTU
;
725 return GET_CODE (op
) == LEU
;
728 return GET_CODE (op
) == GEU
;
731 return GET_CODE (op
) == GTU
;
734 return GET_CODE (op
) == LTU
;
737 return GET_CODE (op
) == UNGT
;
740 return GET_CODE (op
) == UNLT
;
747 /* Return nonzero if OP is a valid comparison operator
748 for an SLB condition in mode MODE. */
751 s390_slb_comparison (rtx op
, enum machine_mode mode
)
753 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
756 while (GET_CODE (op
) == ZERO_EXTEND
|| GET_CODE (op
) == SIGN_EXTEND
)
759 if (!COMPARISON_P (op
))
762 if (GET_CODE (XEXP (op
, 0)) != REG
763 || REGNO (XEXP (op
, 0)) != CC_REGNUM
764 || XEXP (op
, 1) != const0_rtx
)
767 switch (GET_MODE (XEXP (op
, 0)))
770 return GET_CODE (op
) == GEU
;
773 return GET_CODE (op
) == GTU
;
776 return GET_CODE (op
) == LTU
;
779 return GET_CODE (op
) == LEU
;
782 return GET_CODE (op
) == GEU
;
785 return GET_CODE (op
) == LE
;
788 return GET_CODE (op
) == GE
;
795 /* Return branch condition mask to implement a branch
796 specified by CODE. Return -1 for invalid comparisons. */
799 s390_branch_condition_mask (rtx code
)
801 const int CC0
= 1 << 3;
802 const int CC1
= 1 << 2;
803 const int CC2
= 1 << 1;
804 const int CC3
= 1 << 0;
806 if (GET_CODE (XEXP (code
, 0)) != REG
807 || REGNO (XEXP (code
, 0)) != CC_REGNUM
808 || XEXP (code
, 1) != const0_rtx
)
811 switch (GET_MODE (XEXP (code
, 0)))
814 switch (GET_CODE (code
))
817 case NE
: return CC1
| CC2
| CC3
;
823 switch (GET_CODE (code
))
826 case NE
: return CC0
| CC2
| CC3
;
832 switch (GET_CODE (code
))
835 case NE
: return CC0
| CC1
| CC3
;
841 switch (GET_CODE (code
))
844 case NE
: return CC0
| CC1
| CC2
;
850 switch (GET_CODE (code
))
852 case EQ
: return CC0
| CC2
;
853 case NE
: return CC1
| CC3
;
859 switch (GET_CODE (code
))
861 case LTU
: return CC2
| CC3
; /* carry */
862 case GEU
: return CC0
| CC1
; /* no carry */
868 switch (GET_CODE (code
))
870 case GTU
: return CC0
| CC1
; /* borrow */
871 case LEU
: return CC2
| CC3
; /* no borrow */
877 switch (GET_CODE (code
))
879 case EQ
: return CC0
| CC2
;
880 case NE
: return CC1
| CC3
;
881 case LTU
: return CC1
;
882 case GTU
: return CC3
;
883 case LEU
: return CC1
| CC2
;
884 case GEU
: return CC2
| CC3
;
889 switch (GET_CODE (code
))
892 case NE
: return CC1
| CC2
| CC3
;
893 case LTU
: return CC1
;
894 case GTU
: return CC2
;
895 case LEU
: return CC0
| CC1
;
896 case GEU
: return CC0
| CC2
;
902 switch (GET_CODE (code
))
905 case NE
: return CC2
| CC1
| CC3
;
906 case LTU
: return CC2
;
907 case GTU
: return CC1
;
908 case LEU
: return CC0
| CC2
;
909 case GEU
: return CC0
| CC1
;
915 switch (GET_CODE (code
))
918 case NE
: return CC1
| CC2
| CC3
;
919 case LT
: return CC1
| CC3
;
921 case LE
: return CC0
| CC1
| CC3
;
922 case GE
: return CC0
| CC2
;
928 switch (GET_CODE (code
))
931 case NE
: return CC1
| CC2
| CC3
;
933 case GT
: return CC2
| CC3
;
934 case LE
: return CC0
| CC1
;
935 case GE
: return CC0
| CC2
| CC3
;
941 switch (GET_CODE (code
))
944 case NE
: return CC1
| CC2
| CC3
;
947 case LE
: return CC0
| CC1
;
948 case GE
: return CC0
| CC2
;
949 case UNORDERED
: return CC3
;
950 case ORDERED
: return CC0
| CC1
| CC2
;
951 case UNEQ
: return CC0
| CC3
;
952 case UNLT
: return CC1
| CC3
;
953 case UNGT
: return CC2
| CC3
;
954 case UNLE
: return CC0
| CC1
| CC3
;
955 case UNGE
: return CC0
| CC2
| CC3
;
956 case LTGT
: return CC1
| CC2
;
962 switch (GET_CODE (code
))
965 case NE
: return CC2
| CC1
| CC3
;
968 case LE
: return CC0
| CC2
;
969 case GE
: return CC0
| CC1
;
970 case UNORDERED
: return CC3
;
971 case ORDERED
: return CC0
| CC2
| CC1
;
972 case UNEQ
: return CC0
| CC3
;
973 case UNLT
: return CC2
| CC3
;
974 case UNGT
: return CC1
| CC3
;
975 case UNLE
: return CC0
| CC2
| CC3
;
976 case UNGE
: return CC0
| CC1
| CC3
;
977 case LTGT
: return CC2
| CC1
;
987 /* If INV is false, return assembler mnemonic string to implement
988 a branch specified by CODE. If INV is true, return mnemonic
989 for the corresponding inverted branch. */
992 s390_branch_condition_mnemonic (rtx code
, int inv
)
994 static const char *const mnemonic
[16] =
996 NULL
, "o", "h", "nle",
997 "l", "nhe", "lh", "ne",
998 "e", "nlh", "he", "nl",
999 "le", "nh", "no", NULL
1002 int mask
= s390_branch_condition_mask (code
);
1003 gcc_assert (mask
>= 0);
1008 if (mask
< 1 || mask
> 14)
1011 return mnemonic
[mask
];
1014 /* Return the part of op which has a value different from def.
1015 The size of the part is determined by mode.
1016 Use this function only if you already know that op really
1017 contains such a part. */
1019 unsigned HOST_WIDE_INT
1020 s390_extract_part (rtx op
, enum machine_mode mode
, int def
)
1022 unsigned HOST_WIDE_INT value
= 0;
1023 int max_parts
= HOST_BITS_PER_WIDE_INT
/ GET_MODE_BITSIZE (mode
);
1024 int part_bits
= GET_MODE_BITSIZE (mode
);
1025 unsigned HOST_WIDE_INT part_mask
= (1 << part_bits
) - 1;
1028 for (i
= 0; i
< max_parts
; i
++)
1031 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1033 value
>>= part_bits
;
1035 if ((value
& part_mask
) != (def
& part_mask
))
1036 return value
& part_mask
;
1042 /* If OP is an integer constant of mode MODE with exactly one
1043 part of mode PART_MODE unequal to DEF, return the number of that
1044 part. Otherwise, return -1. */
1047 s390_single_part (rtx op
,
1048 enum machine_mode mode
,
1049 enum machine_mode part_mode
,
1052 unsigned HOST_WIDE_INT value
= 0;
1053 int n_parts
= GET_MODE_SIZE (mode
) / GET_MODE_SIZE (part_mode
);
1054 unsigned HOST_WIDE_INT part_mask
= (1 << GET_MODE_BITSIZE (part_mode
)) - 1;
1057 if (GET_CODE (op
) != CONST_INT
)
1060 for (i
= 0; i
< n_parts
; i
++)
1063 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
1065 value
>>= GET_MODE_BITSIZE (part_mode
);
1067 if ((value
& part_mask
) != (def
& part_mask
))
1075 return part
== -1 ? -1 : n_parts
- 1 - part
;
1078 /* Check whether we can (and want to) split a double-word
1079 move in mode MODE from SRC to DST into two single-word
1080 moves, moving the subword FIRST_SUBWORD first. */
1083 s390_split_ok_p (rtx dst
, rtx src
, enum machine_mode mode
, int first_subword
)
1085 /* Floating point registers cannot be split. */
1086 if (FP_REG_P (src
) || FP_REG_P (dst
))
1089 /* We don't need to split if operands are directly accessible. */
1090 if (s_operand (src
, mode
) || s_operand (dst
, mode
))
1093 /* Non-offsettable memory references cannot be split. */
1094 if ((GET_CODE (src
) == MEM
&& !offsettable_memref_p (src
))
1095 || (GET_CODE (dst
) == MEM
&& !offsettable_memref_p (dst
)))
1098 /* Moving the first subword must not clobber a register
1099 needed to move the second subword. */
1100 if (register_operand (dst
, mode
))
1102 rtx subreg
= operand_subword (dst
, first_subword
, 0, mode
);
1103 if (reg_overlap_mentioned_p (subreg
, src
))
1110 /* Check whether the address of memory reference MEM2 equals exactly
1111 the address of memory reference MEM1 plus DELTA. Return true if
1112 we can prove this to be the case, false otherwise. */
1115 s390_offset_p (rtx mem1
, rtx mem2
, rtx delta
)
1117 rtx addr1
, addr2
, addr_delta
;
1119 if (GET_CODE (mem1
) != MEM
|| GET_CODE (mem2
) != MEM
)
1122 addr1
= XEXP (mem1
, 0);
1123 addr2
= XEXP (mem2
, 0);
1125 addr_delta
= simplify_binary_operation (MINUS
, Pmode
, addr2
, addr1
);
1126 if (!addr_delta
|| !rtx_equal_p (addr_delta
, delta
))
1132 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1135 s390_expand_logical_operator (enum rtx_code code
, enum machine_mode mode
,
1138 enum machine_mode wmode
= mode
;
1139 rtx dst
= operands
[0];
1140 rtx src1
= operands
[1];
1141 rtx src2
= operands
[2];
1144 /* If we cannot handle the operation directly, use a temp register. */
1145 if (!s390_logical_operator_ok_p (operands
))
1146 dst
= gen_reg_rtx (mode
);
1148 /* QImode and HImode patterns make sense only if we have a destination
1149 in memory. Otherwise perform the operation in SImode. */
1150 if ((mode
== QImode
|| mode
== HImode
) && GET_CODE (dst
) != MEM
)
1153 /* Widen operands if required. */
1156 if (GET_CODE (dst
) == SUBREG
1157 && (tem
= simplify_subreg (wmode
, dst
, mode
, 0)) != 0)
1159 else if (REG_P (dst
))
1160 dst
= gen_rtx_SUBREG (wmode
, dst
, 0);
1162 dst
= gen_reg_rtx (wmode
);
1164 if (GET_CODE (src1
) == SUBREG
1165 && (tem
= simplify_subreg (wmode
, src1
, mode
, 0)) != 0)
1167 else if (GET_MODE (src1
) != VOIDmode
)
1168 src1
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src1
), 0);
1170 if (GET_CODE (src2
) == SUBREG
1171 && (tem
= simplify_subreg (wmode
, src2
, mode
, 0)) != 0)
1173 else if (GET_MODE (src2
) != VOIDmode
)
1174 src2
= gen_rtx_SUBREG (wmode
, force_reg (mode
, src2
), 0);
1177 /* Emit the instruction. */
1178 op
= gen_rtx_SET (VOIDmode
, dst
, gen_rtx_fmt_ee (code
, wmode
, src1
, src2
));
1179 clob
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
1180 emit_insn (gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, op
, clob
)));
1182 /* Fix up the destination if needed. */
1183 if (dst
!= operands
[0])
1184 emit_move_insn (operands
[0], gen_lowpart (mode
, dst
));
1187 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1190 s390_logical_operator_ok_p (rtx
*operands
)
1192 /* If the destination operand is in memory, it needs to coincide
1193 with one of the source operands. After reload, it has to be
1194 the first source operand. */
1195 if (GET_CODE (operands
[0]) == MEM
)
1196 return rtx_equal_p (operands
[0], operands
[1])
1197 || (!reload_completed
&& rtx_equal_p (operands
[0], operands
[2]));
1202 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1203 operand IMMOP to switch from SS to SI type instructions. */
1206 s390_narrow_logical_operator (enum rtx_code code
, rtx
*memop
, rtx
*immop
)
1208 int def
= code
== AND
? -1 : 0;
1212 gcc_assert (GET_CODE (*memop
) == MEM
);
1213 gcc_assert (!MEM_VOLATILE_P (*memop
));
1215 mask
= s390_extract_part (*immop
, QImode
, def
);
1216 part
= s390_single_part (*immop
, GET_MODE (*memop
), QImode
, def
);
1217 gcc_assert (part
>= 0);
1219 *memop
= adjust_address (*memop
, QImode
, part
);
1220 *immop
= gen_int_mode (mask
, QImode
);
1224 /* Change optimizations to be performed, depending on the
1227 LEVEL is the optimization level specified; 2 if `-O2' is
1228 specified, 1 if `-O' is specified, and 0 if neither is specified.
1230 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1233 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1235 /* ??? There are apparently still problems with -fcaller-saves. */
1236 flag_caller_saves
= 0;
1238 /* By default, always emit DWARF-2 unwind info. This allows debugging
1239 without maintaining a stack frame back-chain. */
1240 flag_asynchronous_unwind_tables
= 1;
1244 override_options (void)
1249 const char *const name
; /* processor name or nickname. */
1250 const enum processor_type processor
;
1251 const enum processor_flags flags
;
1253 const processor_alias_table
[] =
1255 {"g5", PROCESSOR_9672_G5
, PF_IEEE_FLOAT
},
1256 {"g6", PROCESSOR_9672_G6
, PF_IEEE_FLOAT
},
1257 {"z900", PROCESSOR_2064_Z900
, PF_IEEE_FLOAT
| PF_ZARCH
},
1258 {"z990", PROCESSOR_2084_Z990
, PF_IEEE_FLOAT
| PF_ZARCH
1259 | PF_LONG_DISPLACEMENT
},
1262 int const pta_size
= ARRAY_SIZE (processor_alias_table
);
1264 /* Acquire a unique set number for our register saves and restores. */
1265 s390_sr_alias_set
= new_alias_set ();
1267 /* Set up function hooks. */
1268 init_machine_status
= s390_init_machine_status
;
1270 /* Architecture mode defaults according to ABI. */
1271 if (!(target_flags_explicit
& MASK_ZARCH
))
1274 target_flags
|= MASK_ZARCH
;
1276 target_flags
&= ~MASK_ZARCH
;
1279 /* Determine processor architectural level. */
1280 if (!s390_arch_string
)
1281 s390_arch_string
= TARGET_ZARCH
? "z900" : "g5";
1283 for (i
= 0; i
< pta_size
; i
++)
1284 if (! strcmp (s390_arch_string
, processor_alias_table
[i
].name
))
1286 s390_arch
= processor_alias_table
[i
].processor
;
1287 s390_arch_flags
= processor_alias_table
[i
].flags
;
1291 error ("Unknown cpu used in -march=%s.", s390_arch_string
);
1293 /* Determine processor to tune for. */
1294 if (!s390_tune_string
)
1296 s390_tune
= s390_arch
;
1297 s390_tune_flags
= s390_arch_flags
;
1298 s390_tune_string
= s390_arch_string
;
1302 for (i
= 0; i
< pta_size
; i
++)
1303 if (! strcmp (s390_tune_string
, processor_alias_table
[i
].name
))
1305 s390_tune
= processor_alias_table
[i
].processor
;
1306 s390_tune_flags
= processor_alias_table
[i
].flags
;
1310 error ("Unknown cpu used in -mtune=%s.", s390_tune_string
);
1313 /* Sanity checks. */
1314 if (TARGET_ZARCH
&& !(s390_arch_flags
& PF_ZARCH
))
1315 error ("z/Architecture mode not supported on %s.", s390_arch_string
);
1316 if (TARGET_64BIT
&& !TARGET_ZARCH
)
1317 error ("64-bit ABI not supported in ESA/390 mode.");
1319 if (s390_warn_framesize_string
)
1321 if (sscanf (s390_warn_framesize_string
, HOST_WIDE_INT_PRINT_DEC
,
1322 &s390_warn_framesize
) != 1)
1323 error ("invalid value for -mwarn-framesize");
1326 if (s390_warn_dynamicstack_string
)
1327 s390_warn_dynamicstack_p
= 1;
1329 if (s390_stack_size_string
)
1331 if (sscanf (s390_stack_size_string
, HOST_WIDE_INT_PRINT_DEC
,
1332 &s390_stack_size
) != 1)
1333 error ("invalid value for -mstack-size");
1335 if (exact_log2 (s390_stack_size
) == -1)
1336 error ("stack size must be an exact power of 2");
1338 if (s390_stack_guard_string
)
1340 if (sscanf (s390_stack_guard_string
, HOST_WIDE_INT_PRINT_DEC
,
1341 &s390_stack_guard
) != 1)
1342 error ("invalid value for -mstack-guard");
1344 if (s390_stack_guard
>= s390_stack_size
)
1345 error ("stack size must be greater than the stack guard value");
1347 if (exact_log2 (s390_stack_guard
) == -1)
1348 error ("stack guard value must be an exact power of 2");
1351 error ("-mstack-size implies use of -mstack-guard");
1354 if (s390_stack_guard_string
&& !s390_stack_size_string
)
1355 error ("-mstack-guard implies use of -mstack-size");
1358 /* Map for smallest class containing reg regno. */
1360 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
1361 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1362 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1363 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1364 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
1365 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1366 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1367 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1368 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
1369 ADDR_REGS
, CC_REGS
, ADDR_REGS
, ADDR_REGS
,
1370 ACCESS_REGS
, ACCESS_REGS
1373 /* Return attribute type of insn. */
1375 static enum attr_type
1376 s390_safe_attr_type (rtx insn
)
1378 if (recog_memoized (insn
) >= 0)
1379 return get_attr_type (insn
);
1384 /* Return true if OP a (const_int 0) operand.
1385 OP is the current operation.
1386 MODE is the current operation mode. */
1389 const0_operand (register rtx op
, enum machine_mode mode
)
1391 return op
== CONST0_RTX (mode
);
1394 /* Return true if OP is constant.
1395 OP is the current operation.
1396 MODE is the current operation mode. */
1399 consttable_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1401 return CONSTANT_P (op
);
1404 /* Return true if the mode of operand OP matches MODE.
1405 If MODE is set to VOIDmode, set it to the mode of OP. */
1408 check_mode (register rtx op
, enum machine_mode
*mode
)
1410 if (*mode
== VOIDmode
)
1411 *mode
= GET_MODE (op
);
1414 if (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != *mode
)
1420 /* Return true if OP a valid operand for the LARL instruction.
1421 OP is the current operation.
1422 MODE is the current operation mode. */
1425 larl_operand (register rtx op
, enum machine_mode mode
)
1427 if (! check_mode (op
, &mode
))
1430 /* Allow labels and local symbols. */
1431 if (GET_CODE (op
) == LABEL_REF
)
1433 if (GET_CODE (op
) == SYMBOL_REF
)
1434 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1435 && SYMBOL_REF_TLS_MODEL (op
) == 0
1436 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1438 /* Everything else must have a CONST, so strip it. */
1439 if (GET_CODE (op
) != CONST
)
1443 /* Allow adding *even* in-range constants. */
1444 if (GET_CODE (op
) == PLUS
)
1446 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
1447 || (INTVAL (XEXP (op
, 1)) & 1) != 0)
1449 #if HOST_BITS_PER_WIDE_INT > 32
1450 if (INTVAL (XEXP (op
, 1)) >= (HOST_WIDE_INT
)1 << 32
1451 || INTVAL (XEXP (op
, 1)) < -((HOST_WIDE_INT
)1 << 32))
1457 /* Labels and local symbols allowed here as well. */
1458 if (GET_CODE (op
) == LABEL_REF
)
1460 if (GET_CODE (op
) == SYMBOL_REF
)
1461 return ((SYMBOL_REF_FLAGS (op
) & SYMBOL_FLAG_ALIGN1
) == 0
1462 && SYMBOL_REF_TLS_MODEL (op
) == 0
1463 && (!flag_pic
|| SYMBOL_REF_LOCAL_P (op
)));
1465 /* Now we must have a @GOTENT offset or @PLT stub
1466 or an @INDNTPOFF TLS offset. */
1467 if (GET_CODE (op
) == UNSPEC
1468 && XINT (op
, 1) == UNSPEC_GOTENT
)
1470 if (GET_CODE (op
) == UNSPEC
1471 && XINT (op
, 1) == UNSPEC_PLT
)
1473 if (GET_CODE (op
) == UNSPEC
1474 && XINT (op
, 1) == UNSPEC_INDNTPOFF
)
1480 /* Return true if OP is a valid S-type operand.
1481 OP is the current operation.
1482 MODE is the current operation mode. */
1485 s_operand (rtx op
, enum machine_mode mode
)
1487 struct s390_address addr
;
1489 /* Call general_operand first, so that we don't have to
1490 check for many special cases. */
1491 if (!general_operand (op
, mode
))
1494 /* Just like memory_operand, allow (subreg (mem ...))
1496 if (reload_completed
1497 && GET_CODE (op
) == SUBREG
1498 && GET_CODE (SUBREG_REG (op
)) == MEM
)
1499 op
= SUBREG_REG (op
);
1501 if (GET_CODE (op
) != MEM
)
1503 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1511 /* Return true if OP is a memory operand pointing to the
1512 literal pool, or an immediate operand. */
1515 s390_pool_operand (rtx op
)
1517 struct s390_address addr
;
1519 /* Just like memory_operand, allow (subreg (mem ...))
1521 if (reload_completed
1522 && GET_CODE (op
) == SUBREG
1523 && GET_CODE (SUBREG_REG (op
)) == MEM
)
1524 op
= SUBREG_REG (op
);
1526 switch (GET_CODE (op
))
1533 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1535 if (addr
.base
&& REG_P (addr
.base
) && REGNO (addr
.base
) == BASE_REGNUM
)
1537 if (addr
.indx
&& REG_P (addr
.indx
) && REGNO (addr
.indx
) == BASE_REGNUM
)
1546 /* Return true if OP a valid shift count operand.
1547 OP is the current operation.
1548 MODE is the current operation mode. */
1551 shift_count_operand (rtx op
, enum machine_mode mode
)
1553 HOST_WIDE_INT offset
= 0;
1555 if (! check_mode (op
, &mode
))
1558 /* We can have an integer constant, an address register,
1559 or a sum of the two. Note that reload already checks
1560 that any register present is an address register, so
1561 we just check for any register here. */
1562 if (GET_CODE (op
) == CONST_INT
)
1564 offset
= INTVAL (op
);
1567 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
1569 offset
= INTVAL (XEXP (op
, 1));
1572 while (op
&& GET_CODE (op
) == SUBREG
)
1573 op
= SUBREG_REG (op
);
1574 if (op
&& GET_CODE (op
) != REG
)
1577 /* Unfortunately we have to reject constants that are invalid
1578 for an address, or else reload will get confused. */
1579 if (!DISP_IN_RANGE (offset
))
1585 /* Return true if DISP is a valid short displacement. */
1588 s390_short_displacement (rtx disp
)
1590 /* No displacement is OK. */
1594 /* Integer displacement in range. */
1595 if (GET_CODE (disp
) == CONST_INT
)
1596 return INTVAL (disp
) >= 0 && INTVAL (disp
) < 4096;
1598 /* GOT offset is not OK, the GOT can be large. */
1599 if (GET_CODE (disp
) == CONST
1600 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1601 && XINT (XEXP (disp
, 0), 1) == UNSPEC_GOT
)
1604 /* All other symbolic constants are literal pool references,
1605 which are OK as the literal pool must be small. */
1606 if (GET_CODE (disp
) == CONST
)
1612 /* Return true if OP is a valid operand for a C constraint. */
1615 s390_extra_constraint_str (rtx op
, int c
, const char * str
)
1617 struct s390_address addr
;
1622 /* Check for offsettable variants of memory constraints. */
1625 /* Only accept non-volatile MEMs. */
1626 if (!MEM_P (op
) || MEM_VOLATILE_P (op
))
1629 if ((reload_completed
|| reload_in_progress
)
1630 ? !offsettable_memref_p (op
)
1631 : !offsettable_nonstrict_memref_p (op
))
1640 if (GET_CODE (op
) != MEM
)
1642 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1647 if (TARGET_LONG_DISPLACEMENT
)
1649 if (!s390_short_displacement (addr
.disp
))
1655 if (GET_CODE (op
) != MEM
)
1658 if (TARGET_LONG_DISPLACEMENT
)
1660 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1662 if (!s390_short_displacement (addr
.disp
))
1668 if (!TARGET_LONG_DISPLACEMENT
)
1670 if (GET_CODE (op
) != MEM
)
1672 if (!s390_decompose_address (XEXP (op
, 0), &addr
))
1676 if (s390_short_displacement (addr
.disp
))
1681 if (!TARGET_LONG_DISPLACEMENT
)
1683 if (GET_CODE (op
) != MEM
)
1685 /* Any invalid address here will be fixed up by reload,
1686 so accept it for the most generic constraint. */
1687 if (s390_decompose_address (XEXP (op
, 0), &addr
)
1688 && s390_short_displacement (addr
.disp
))
1693 if (TARGET_LONG_DISPLACEMENT
)
1695 if (!s390_decompose_address (op
, &addr
))
1697 if (!s390_short_displacement (addr
.disp
))
1703 if (!TARGET_LONG_DISPLACEMENT
)
1705 /* Any invalid address here will be fixed up by reload,
1706 so accept it for the most generic constraint. */
1707 if (s390_decompose_address (op
, &addr
)
1708 && s390_short_displacement (addr
.disp
))
1713 return shift_count_operand (op
, VOIDmode
);
1722 /* Return true if VALUE matches the constraint STR. */
1725 s390_const_ok_for_constraint_p (HOST_WIDE_INT value
,
1729 enum machine_mode mode
, part_mode
;
1731 int part
, part_goal
;
1739 return (unsigned int)value
< 256;
1742 return (unsigned int)value
< 4096;
1745 return value
>= -32768 && value
< 32768;
1748 return (TARGET_LONG_DISPLACEMENT
?
1749 (value
>= -524288 && value
<= 524287)
1750 : (value
>= 0 && value
<= 4095));
1752 return value
== 2147483647;
1758 part_goal
= str
[1] - '0';
1762 case 'H': part_mode
= HImode
; break;
1763 case 'Q': part_mode
= QImode
; break;
1769 case 'H': mode
= HImode
; break;
1770 case 'S': mode
= SImode
; break;
1771 case 'D': mode
= DImode
; break;
1777 case '0': def
= 0; break;
1778 case 'F': def
= -1; break;
1782 if (GET_MODE_SIZE (mode
) <= GET_MODE_SIZE (part_mode
))
1785 part
= s390_single_part (GEN_INT (value
), mode
, part_mode
, def
);
1788 if (part_goal
!= -1 && part_goal
!= part
)
1800 /* Compute a (partial) cost for rtx X. Return true if the complete
1801 cost has been computed, and false if subexpressions should be
1802 scanned. In either case, *TOTAL contains the cost result. */
1805 s390_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
1810 if (GET_CODE (XEXP (x
, 0)) == MINUS
1811 && GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
1818 /* Force_const_mem does not work out of reload, because the
1819 saveable_obstack is set to reload_obstack, which does not
1820 live long enough. Because of this we cannot use force_const_mem
1821 in addsi3. This leads to problems with gen_add2_insn with a
1822 constant greater than a short. Because of that we give an
1823 addition of greater constants a cost of 3 (reload1.c 10096). */
1824 /* ??? saveable_obstack no longer exists. */
1825 if (outer_code
== PLUS
1826 && (INTVAL (x
) > 32767 || INTVAL (x
) < -32768))
1827 *total
= COSTS_N_INSNS (3);
1848 *total
= COSTS_N_INSNS (1);
1852 if (GET_MODE (XEXP (x
, 0)) == DImode
)
1853 *total
= COSTS_N_INSNS (40);
1855 *total
= COSTS_N_INSNS (7);
1862 *total
= COSTS_N_INSNS (33);
1870 /* Return the cost of an address rtx ADDR. */
1873 s390_address_cost (rtx addr
)
1875 struct s390_address ad
;
1876 if (!s390_decompose_address (addr
, &ad
))
1879 return ad
.indx
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1882 /* Return true if OP is a valid operand for the BRAS instruction.
1883 OP is the current operation.
1884 MODE is the current operation mode. */
1887 bras_sym_operand (register rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1889 register enum rtx_code code
= GET_CODE (op
);
1891 /* Allow SYMBOL_REFs. */
1892 if (code
== SYMBOL_REF
)
1895 /* Allow @PLT stubs. */
1897 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1898 && XINT (XEXP (op
, 0), 1) == UNSPEC_PLT
)
1903 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1904 otherwise return 0. */
1907 tls_symbolic_operand (register rtx op
)
1909 if (GET_CODE (op
) != SYMBOL_REF
)
1911 return SYMBOL_REF_TLS_MODEL (op
);
1914 /* Return true if OP is a load multiple operation. It is known to be a
1915 PARALLEL and the first section will be tested.
1916 OP is the current operation.
1917 MODE is the current operation mode. */
1920 load_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1922 enum machine_mode elt_mode
;
1923 int count
= XVECLEN (op
, 0);
1924 unsigned int dest_regno
;
1929 /* Perform a quick check so we don't blow up below. */
1931 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1932 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
1933 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
1936 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
1937 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
1938 elt_mode
= GET_MODE (SET_DEST (XVECEXP (op
, 0, 0)));
1940 /* Check, is base, or base + displacement. */
1942 if (GET_CODE (src_addr
) == REG
)
1944 else if (GET_CODE (src_addr
) == PLUS
1945 && GET_CODE (XEXP (src_addr
, 0)) == REG
1946 && GET_CODE (XEXP (src_addr
, 1)) == CONST_INT
)
1948 off
= INTVAL (XEXP (src_addr
, 1));
1949 src_addr
= XEXP (src_addr
, 0);
1954 for (i
= 1; i
< count
; i
++)
1956 rtx elt
= XVECEXP (op
, 0, i
);
1958 if (GET_CODE (elt
) != SET
1959 || GET_CODE (SET_DEST (elt
)) != REG
1960 || GET_MODE (SET_DEST (elt
)) != elt_mode
1961 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
1962 || GET_CODE (SET_SRC (elt
)) != MEM
1963 || GET_MODE (SET_SRC (elt
)) != elt_mode
1964 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
1965 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
1966 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
1967 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1))
1968 != off
+ i
* GET_MODE_SIZE (elt_mode
))
1975 /* Return true if OP is a store multiple operation. It is known to be a
1976 PARALLEL and the first section will be tested.
1977 OP is the current operation.
1978 MODE is the current operation mode. */
1981 store_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1983 enum machine_mode elt_mode
;
1984 int count
= XVECLEN (op
, 0);
1985 unsigned int src_regno
;
1989 /* Perform a quick check so we don't blow up below. */
1991 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1992 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
1993 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
1996 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
1997 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
1998 elt_mode
= GET_MODE (SET_SRC (XVECEXP (op
, 0, 0)));
2000 /* Check, is base, or base + displacement. */
2002 if (GET_CODE (dest_addr
) == REG
)
2004 else if (GET_CODE (dest_addr
) == PLUS
2005 && GET_CODE (XEXP (dest_addr
, 0)) == REG
2006 && GET_CODE (XEXP (dest_addr
, 1)) == CONST_INT
)
2008 off
= INTVAL (XEXP (dest_addr
, 1));
2009 dest_addr
= XEXP (dest_addr
, 0);
2014 for (i
= 1; i
< count
; i
++)
2016 rtx elt
= XVECEXP (op
, 0, i
);
2018 if (GET_CODE (elt
) != SET
2019 || GET_CODE (SET_SRC (elt
)) != REG
2020 || GET_MODE (SET_SRC (elt
)) != elt_mode
2021 || REGNO (SET_SRC (elt
)) != src_regno
+ i
2022 || GET_CODE (SET_DEST (elt
)) != MEM
2023 || GET_MODE (SET_DEST (elt
)) != elt_mode
2024 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
2025 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
2026 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
2027 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1))
2028 != off
+ i
* GET_MODE_SIZE (elt_mode
))
2034 /* Split DImode access register reference REG (on 64-bit) into its constituent
2035 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2036 gen_highpart cannot be used as they assume all registers are word-sized,
2037 while our access registers have only half that size. */
2040 s390_split_access_reg (rtx reg
, rtx
*lo
, rtx
*hi
)
2042 gcc_assert (TARGET_64BIT
);
2043 gcc_assert (ACCESS_REG_P (reg
));
2044 gcc_assert (GET_MODE (reg
) == DImode
);
2045 gcc_assert (!(REGNO (reg
) & 1));
2047 *lo
= gen_rtx_REG (SImode
, REGNO (reg
) + 1);
2048 *hi
= gen_rtx_REG (SImode
, REGNO (reg
));
2051 /* Return true if OP contains a symbol reference */
2054 symbolic_reference_mentioned_p (rtx op
)
2056 register const char *fmt
;
2059 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
2062 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2063 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2069 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2070 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2074 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
2081 /* Return true if OP contains a reference to a thread-local symbol. */
2084 tls_symbolic_reference_mentioned_p (rtx op
)
2086 register const char *fmt
;
2089 if (GET_CODE (op
) == SYMBOL_REF
)
2090 return tls_symbolic_operand (op
);
2092 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
2093 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
2099 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
2100 if (tls_symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
2104 else if (fmt
[i
] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op
, i
)))
2112 /* Return true if OP is a legitimate general operand when
2113 generating PIC code. It is given that flag_pic is on
2114 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2117 legitimate_pic_operand_p (register rtx op
)
2119 /* Accept all non-symbolic constants. */
2120 if (!SYMBOLIC_CONST (op
))
2123 /* Reject everything else; must be handled
2124 via emit_symbolic_move. */
2128 /* Returns true if the constant value OP is a legitimate general operand.
2129 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2132 legitimate_constant_p (register rtx op
)
2134 /* Accept all non-symbolic constants. */
2135 if (!SYMBOLIC_CONST (op
))
2138 /* Accept immediate LARL operands. */
2139 if (TARGET_CPU_ZARCH
&& larl_operand (op
, VOIDmode
))
2142 /* Thread-local symbols are never legal constants. This is
2143 so that emit_call knows that computing such addresses
2144 might require a function call. */
2145 if (TLS_SYMBOLIC_CONST (op
))
2148 /* In the PIC case, symbolic constants must *not* be
2149 forced into the literal pool. We accept them here,
2150 so that they will be handled by emit_symbolic_move. */
2154 /* All remaining non-PIC symbolic constants are
2155 forced into the literal pool. */
2159 /* Determine if it's legal to put X into the constant pool. This
2160 is not possible if X contains the address of a symbol that is
2161 not constant (TLS) or not known at final link time (PIC). */
2164 s390_cannot_force_const_mem (rtx x
)
2166 switch (GET_CODE (x
))
2170 /* Accept all non-symbolic constants. */
2174 /* Labels are OK iff we are non-PIC. */
2175 return flag_pic
!= 0;
2178 /* 'Naked' TLS symbol references are never OK,
2179 non-TLS symbols are OK iff we are non-PIC. */
2180 if (tls_symbolic_operand (x
))
2183 return flag_pic
!= 0;
2186 return s390_cannot_force_const_mem (XEXP (x
, 0));
2189 return s390_cannot_force_const_mem (XEXP (x
, 0))
2190 || s390_cannot_force_const_mem (XEXP (x
, 1));
2193 switch (XINT (x
, 1))
2195 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2196 case UNSPEC_LTREL_OFFSET
:
2204 case UNSPEC_GOTNTPOFF
:
2205 case UNSPEC_INDNTPOFF
:
2208 /* If the literal pool shares the code section, be put
2209 execute template placeholders into the pool as well. */
2211 return TARGET_CPU_ZARCH
;
2223 /* Returns true if the constant value OP is a legitimate general
2224 operand during and after reload. The difference to
2225 legitimate_constant_p is that this function will not accept
2226 a constant that would need to be forced to the literal pool
2227 before it can be used as operand. */
2230 legitimate_reload_constant_p (register rtx op
)
2232 /* Accept la(y) operands. */
2233 if (GET_CODE (op
) == CONST_INT
2234 && DISP_IN_RANGE (INTVAL (op
)))
2237 /* Accept l(g)hi operands. */
2238 if (GET_CODE (op
) == CONST_INT
2239 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', "K"))
2242 /* Accept lliXX operands. */
2244 && s390_single_part (op
, DImode
, HImode
, 0) >= 0)
2247 /* Accept larl operands. */
2248 if (TARGET_CPU_ZARCH
2249 && larl_operand (op
, VOIDmode
))
2252 /* Everything else cannot be handled without reload. */
2256 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2257 return the class of reg to actually use. */
2260 s390_preferred_reload_class (rtx op
, enum reg_class
class)
2262 switch (GET_CODE (op
))
2264 /* Constants we cannot reload must be forced into the
2269 if (legitimate_reload_constant_p (op
))
2274 /* If a symbolic constant or a PLUS is reloaded,
2275 it is most likely being used as an address, so
2276 prefer ADDR_REGS. If 'class' is not a superset
2277 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2282 if (reg_class_subset_p (ADDR_REGS
, class))
2294 /* Return the register class of a scratch register needed to
2295 load IN into a register of class CLASS in MODE.
2297 We need a temporary when loading a PLUS expression which
2298 is not a legitimate operand of the LOAD ADDRESS instruction. */
2301 s390_secondary_input_reload_class (enum reg_class
class,
2302 enum machine_mode mode
, rtx in
)
2304 if (s390_plus_operand (in
, mode
))
2307 if (reg_classes_intersect_p (CC_REGS
, class))
2308 return GENERAL_REGS
;
2313 /* Return the register class of a scratch register needed to
2314 store a register of class CLASS in MODE into OUT:
2316 We need a temporary when storing a double-word to a
2317 non-offsettable memory address. */
2320 s390_secondary_output_reload_class (enum reg_class
class,
2321 enum machine_mode mode
, rtx out
)
2323 if ((TARGET_64BIT
? mode
== TImode
2324 : (mode
== DImode
|| mode
== DFmode
))
2325 && reg_classes_intersect_p (GENERAL_REGS
, class)
2326 && GET_CODE (out
) == MEM
2327 && !offsettable_memref_p (out
)
2328 && !s_operand (out
, VOIDmode
))
2331 if (reg_classes_intersect_p (CC_REGS
, class))
2332 return GENERAL_REGS
;
2337 /* Return true if OP is a PLUS that is not a legitimate
2338 operand for the LA instruction.
2339 OP is the current operation.
2340 MODE is the current operation mode. */
2343 s390_plus_operand (register rtx op
, enum machine_mode mode
)
2345 if (!check_mode (op
, &mode
) || mode
!= Pmode
)
2348 if (GET_CODE (op
) != PLUS
)
2351 if (legitimate_la_operand_p (op
))
2357 /* Generate code to load SRC, which is PLUS that is not a
2358 legitimate operand for the LA instruction, into TARGET.
2359 SCRATCH may be used as scratch register. */
2362 s390_expand_plus_operand (register rtx target
, register rtx src
,
2363 register rtx scratch
)
2366 struct s390_address ad
;
2368 /* src must be a PLUS; get its two operands. */
2369 if (GET_CODE (src
) != PLUS
|| GET_MODE (src
) != Pmode
)
2372 /* Check if any of the two operands is already scheduled
2373 for replacement by reload. This can happen e.g. when
2374 float registers occur in an address. */
2375 sum1
= find_replacement (&XEXP (src
, 0));
2376 sum2
= find_replacement (&XEXP (src
, 1));
2377 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
2379 /* If the address is already strictly valid, there's nothing to do. */
2380 if (!s390_decompose_address (src
, &ad
)
2381 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
2382 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
2384 /* Otherwise, one of the operands cannot be an address register;
2385 we reload its value into the scratch register. */
2386 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
2388 emit_move_insn (scratch
, sum1
);
2391 if (true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
2393 emit_move_insn (scratch
, sum2
);
2397 /* According to the way these invalid addresses are generated
2398 in reload.c, it should never happen (at least on s390) that
2399 *neither* of the PLUS components, after find_replacements
2400 was applied, is an address register. */
2401 if (sum1
== scratch
&& sum2
== scratch
)
2407 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
2410 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2411 is only ever performed on addresses, so we can mark the
2412 sum as legitimate for LA in any case. */
2413 s390_load_address (target
, src
);
2417 /* Decompose a RTL expression ADDR for a memory address into
2418 its components, returned in OUT.
2420 Returns 0 if ADDR is not a valid memory address, nonzero
2421 otherwise. If OUT is NULL, don't return the components,
2422 but check for validity only.
2424 Note: Only addresses in canonical form are recognized.
2425 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2426 canonical form so that they will be recognized. */
2429 s390_decompose_address (register rtx addr
, struct s390_address
*out
)
2431 HOST_WIDE_INT offset
= 0;
2432 rtx base
= NULL_RTX
;
2433 rtx indx
= NULL_RTX
;
2434 rtx disp
= NULL_RTX
;
2436 int pointer
= FALSE
;
2437 int base_ptr
= FALSE
;
2438 int indx_ptr
= FALSE
;
2440 /* Decompose address into base + index + displacement. */
2442 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
2445 else if (GET_CODE (addr
) == PLUS
)
2447 rtx op0
= XEXP (addr
, 0);
2448 rtx op1
= XEXP (addr
, 1);
2449 enum rtx_code code0
= GET_CODE (op0
);
2450 enum rtx_code code1
= GET_CODE (op1
);
2452 if (code0
== REG
|| code0
== UNSPEC
)
2454 if (code1
== REG
|| code1
== UNSPEC
)
2456 indx
= op0
; /* index + base */
2462 base
= op0
; /* base + displacement */
2467 else if (code0
== PLUS
)
2469 indx
= XEXP (op0
, 0); /* index + base + disp */
2470 base
= XEXP (op0
, 1);
2481 disp
= addr
; /* displacement */
2483 /* Extract integer part of displacement. */
2487 if (GET_CODE (disp
) == CONST_INT
)
2489 offset
= INTVAL (disp
);
2492 else if (GET_CODE (disp
) == CONST
2493 && GET_CODE (XEXP (disp
, 0)) == PLUS
2494 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
2496 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
2497 disp
= XEXP (XEXP (disp
, 0), 0);
2501 /* Strip off CONST here to avoid special case tests later. */
2502 if (disp
&& GET_CODE (disp
) == CONST
)
2503 disp
= XEXP (disp
, 0);
2505 /* We can convert literal pool addresses to
2506 displacements by basing them off the base register. */
2507 if (disp
&& GET_CODE (disp
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (disp
))
2509 /* Either base or index must be free to hold the base register. */
2511 base
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
2513 indx
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
2517 /* Mark up the displacement. */
2518 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
),
2519 UNSPEC_LTREL_OFFSET
);
2522 /* Validate base register. */
2525 if (GET_CODE (base
) == UNSPEC
)
2526 switch (XINT (base
, 1))
2530 disp
= gen_rtx_UNSPEC (Pmode
,
2531 gen_rtvec (1, XVECEXP (base
, 0, 0)),
2532 UNSPEC_LTREL_OFFSET
);
2536 base
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
2539 case UNSPEC_LTREL_BASE
:
2540 base
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
2547 if (GET_CODE (base
) != REG
|| GET_MODE (base
) != Pmode
)
2550 if (REGNO (base
) == BASE_REGNUM
2551 || REGNO (base
) == STACK_POINTER_REGNUM
2552 || REGNO (base
) == FRAME_POINTER_REGNUM
2553 || ((reload_completed
|| reload_in_progress
)
2554 && frame_pointer_needed
2555 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
2556 || REGNO (base
) == ARG_POINTER_REGNUM
2558 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
2559 pointer
= base_ptr
= TRUE
;
2562 /* Validate index register. */
2565 if (GET_CODE (indx
) == UNSPEC
)
2566 switch (XINT (indx
, 1))
2570 disp
= gen_rtx_UNSPEC (Pmode
,
2571 gen_rtvec (1, XVECEXP (indx
, 0, 0)),
2572 UNSPEC_LTREL_OFFSET
);
2576 indx
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
2579 case UNSPEC_LTREL_BASE
:
2580 indx
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
2587 if (GET_CODE (indx
) != REG
|| GET_MODE (indx
) != Pmode
)
2590 if (REGNO (indx
) == BASE_REGNUM
2591 || REGNO (indx
) == STACK_POINTER_REGNUM
2592 || REGNO (indx
) == FRAME_POINTER_REGNUM
2593 || ((reload_completed
|| reload_in_progress
)
2594 && frame_pointer_needed
2595 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
2596 || REGNO (indx
) == ARG_POINTER_REGNUM
2598 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
2599 pointer
= indx_ptr
= TRUE
;
2602 /* Prefer to use pointer as base, not index. */
2603 if (base
&& indx
&& !base_ptr
2604 && (indx_ptr
|| (!REG_POINTER (base
) && REG_POINTER (indx
))))
2611 /* Validate displacement. */
2614 /* If the argument pointer or the return address pointer are involved,
2615 the displacement will change later anyway as the virtual registers get
2616 eliminated. This could make a valid displacement invalid, but it is
2617 more likely to make an invalid displacement valid, because we sometimes
2618 access the register save area via negative offsets to one of those
2620 Thus we don't check the displacement for validity here. If after
2621 elimination the displacement turns out to be invalid after all,
2622 this is fixed up by reload in any case. */
2623 if (base
!= arg_pointer_rtx
2624 && indx
!= arg_pointer_rtx
2625 && base
!= return_address_pointer_rtx
2626 && indx
!= return_address_pointer_rtx
)
2627 if (!DISP_IN_RANGE (offset
))
2632 /* All the special cases are pointers. */
2635 /* In the small-PIC case, the linker converts @GOT
2636 and @GOTNTPOFF offsets to possible displacements. */
2637 if (GET_CODE (disp
) == UNSPEC
2638 && (XINT (disp
, 1) == UNSPEC_GOT
2639 || XINT (disp
, 1) == UNSPEC_GOTNTPOFF
)
2646 /* Accept chunkified literal pool symbol references. */
2647 else if (GET_CODE (disp
) == MINUS
2648 && GET_CODE (XEXP (disp
, 0)) == LABEL_REF
2649 && GET_CODE (XEXP (disp
, 1)) == LABEL_REF
)
2654 /* Accept literal pool references. */
2655 else if (GET_CODE (disp
) == UNSPEC
2656 && XINT (disp
, 1) == UNSPEC_LTREL_OFFSET
)
2658 orig_disp
= gen_rtx_CONST (Pmode
, disp
);
2661 /* If we have an offset, make sure it does not
2662 exceed the size of the constant pool entry. */
2663 rtx sym
= XVECEXP (disp
, 0, 0);
2664 if (offset
>= GET_MODE_SIZE (get_pool_mode (sym
)))
2667 orig_disp
= plus_constant (orig_disp
, offset
);
2682 out
->disp
= orig_disp
;
2683 out
->pointer
= pointer
;
2689 /* Return nonzero if ADDR is a valid memory address.
2690 STRICT specifies whether strict register checking applies. */
2693 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
2694 register rtx addr
, int strict
)
2696 struct s390_address ad
;
2697 if (!s390_decompose_address (addr
, &ad
))
2702 if (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
2704 if (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
))
2709 if (ad
.base
&& !REG_OK_FOR_BASE_NONSTRICT_P (ad
.base
))
2711 if (ad
.indx
&& !REG_OK_FOR_INDEX_NONSTRICT_P (ad
.indx
))
2718 /* Return 1 if OP is a valid operand for the LA instruction.
2719 In 31-bit, we need to prove that the result is used as an
2720 address, as LA performs only a 31-bit addition. */
2723 legitimate_la_operand_p (register rtx op
)
2725 struct s390_address addr
;
2726 if (!s390_decompose_address (op
, &addr
))
2729 if (TARGET_64BIT
|| addr
.pointer
)
2735 /* Return 1 if it is valid *and* preferable to use LA to
2736 compute the sum of OP1 and OP2. */
2739 preferred_la_operand_p (rtx op1
, rtx op2
)
2741 struct s390_address addr
;
2743 if (op2
!= const0_rtx
)
2744 op1
= gen_rtx_PLUS (Pmode
, op1
, op2
);
2746 if (!s390_decompose_address (op1
, &addr
))
2748 if (addr
.base
&& !REG_OK_FOR_BASE_STRICT_P (addr
.base
))
2750 if (addr
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (addr
.indx
))
2753 if (!TARGET_64BIT
&& !addr
.pointer
)
2759 if ((addr
.base
&& REG_P (addr
.base
) && REG_POINTER (addr
.base
))
2760 || (addr
.indx
&& REG_P (addr
.indx
) && REG_POINTER (addr
.indx
)))
2766 /* Emit a forced load-address operation to load SRC into DST.
2767 This will use the LOAD ADDRESS instruction even in situations
2768 where legitimate_la_operand_p (SRC) returns false. */
2771 s390_load_address (rtx dst
, rtx src
)
2774 emit_move_insn (dst
, src
);
2776 emit_insn (gen_force_la_31 (dst
, src
));
2779 /* Return a legitimate reference for ORIG (an address) using the
2780 register REG. If REG is 0, a new pseudo is generated.
2782 There are two types of references that must be handled:
2784 1. Global data references must load the address from the GOT, via
2785 the PIC reg. An insn is emitted to do this load, and the reg is
2788 2. Static data references, constant pool addresses, and code labels
2789 compute the address as an offset from the GOT, whose base is in
2790 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2791 differentiate them from global data objects. The returned
2792 address is the PIC reg + an unspec constant.
2794 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2795 reg also appears in the address. */
2798 legitimize_pic_address (rtx orig
, rtx reg
)
2804 if (GET_CODE (addr
) == LABEL_REF
2805 || (GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (addr
)))
2807 /* This is a local symbol. */
2808 if (TARGET_CPU_ZARCH
&& larl_operand (addr
, VOIDmode
))
2810 /* Access local symbols PC-relative via LARL.
2811 This is the same as in the non-PIC case, so it is
2812 handled automatically ... */
2816 /* Access local symbols relative to the GOT. */
2818 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2820 if (reload_in_progress
|| reload_completed
)
2821 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2823 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTOFF
);
2824 addr
= gen_rtx_CONST (Pmode
, addr
);
2825 addr
= force_const_mem (Pmode
, addr
);
2826 emit_move_insn (temp
, addr
);
2828 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2831 emit_move_insn (reg
, new);
2836 else if (GET_CODE (addr
) == SYMBOL_REF
)
2839 reg
= gen_reg_rtx (Pmode
);
2843 /* Assume GOT offset < 4k. This is handled the same way
2844 in both 31- and 64-bit code (@GOT). */
2846 if (reload_in_progress
|| reload_completed
)
2847 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2849 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2850 new = gen_rtx_CONST (Pmode
, new);
2851 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
2852 new = gen_const_mem (Pmode
, new);
2853 emit_move_insn (reg
, new);
2856 else if (TARGET_CPU_ZARCH
)
2858 /* If the GOT offset might be >= 4k, we determine the position
2859 of the GOT entry via a PC-relative LARL (@GOTENT). */
2861 rtx temp
= gen_reg_rtx (Pmode
);
2863 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTENT
);
2864 new = gen_rtx_CONST (Pmode
, new);
2865 emit_move_insn (temp
, new);
2867 new = gen_const_mem (Pmode
, temp
);
2868 emit_move_insn (reg
, new);
2873 /* If the GOT offset might be >= 4k, we have to load it
2874 from the literal pool (@GOT). */
2876 rtx temp
= gen_reg_rtx (Pmode
);
2878 if (reload_in_progress
|| reload_completed
)
2879 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2881 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOT
);
2882 addr
= gen_rtx_CONST (Pmode
, addr
);
2883 addr
= force_const_mem (Pmode
, addr
);
2884 emit_move_insn (temp
, addr
);
2886 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2887 new = gen_const_mem (Pmode
, new);
2888 emit_move_insn (reg
, new);
2894 if (GET_CODE (addr
) == CONST
)
2896 addr
= XEXP (addr
, 0);
2897 if (GET_CODE (addr
) == UNSPEC
)
2899 if (XVECLEN (addr
, 0) != 1)
2901 switch (XINT (addr
, 1))
2903 /* If someone moved a GOT-relative UNSPEC
2904 out of the literal pool, force them back in. */
2907 new = force_const_mem (Pmode
, orig
);
2910 /* @GOT is OK as is if small. */
2913 new = force_const_mem (Pmode
, orig
);
2916 /* @GOTENT is OK as is. */
2920 /* @PLT is OK as is on 64-bit, must be converted to
2921 GOT-relative @PLTOFF on 31-bit. */
2923 if (!TARGET_CPU_ZARCH
)
2925 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2927 if (reload_in_progress
|| reload_completed
)
2928 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
2930 addr
= XVECEXP (addr
, 0, 0);
2931 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
),
2933 addr
= gen_rtx_CONST (Pmode
, addr
);
2934 addr
= force_const_mem (Pmode
, addr
);
2935 emit_move_insn (temp
, addr
);
2937 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
2940 emit_move_insn (reg
, new);
2946 /* Everything else cannot happen. */
2951 else if (GET_CODE (addr
) != PLUS
)
2954 if (GET_CODE (addr
) == PLUS
)
2956 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
2957 /* Check first to see if this is a constant offset
2958 from a local symbol reference. */
2959 if ((GET_CODE (op0
) == LABEL_REF
2960 || (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_LOCAL_P (op0
)))
2961 && GET_CODE (op1
) == CONST_INT
)
2963 if (TARGET_CPU_ZARCH
&& larl_operand (op0
, VOIDmode
))
2965 if (INTVAL (op1
) & 1)
2967 /* LARL can't handle odd offsets, so emit a
2968 pair of LARL and LA. */
2969 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
2971 if (!DISP_IN_RANGE (INTVAL (op1
)))
2973 int even
= INTVAL (op1
) - 1;
2974 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
2975 op0
= gen_rtx_CONST (Pmode
, op0
);
2979 emit_move_insn (temp
, op0
);
2980 new = gen_rtx_PLUS (Pmode
, temp
, op1
);
2984 emit_move_insn (reg
, new);
2990 /* If the offset is even, we can just use LARL.
2991 This will happen automatically. */
2996 /* Access local symbols relative to the GOT. */
2998 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
3000 if (reload_in_progress
|| reload_completed
)
3001 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3003 addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op0
),
3005 addr
= gen_rtx_PLUS (Pmode
, addr
, op1
);
3006 addr
= gen_rtx_CONST (Pmode
, addr
);
3007 addr
= force_const_mem (Pmode
, addr
);
3008 emit_move_insn (temp
, addr
);
3010 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3013 emit_move_insn (reg
, new);
3019 /* Now, check whether it is a GOT relative symbol plus offset
3020 that was pulled out of the literal pool. Force it back in. */
3022 else if (GET_CODE (op0
) == UNSPEC
3023 && GET_CODE (op1
) == CONST_INT
3024 && XINT (op0
, 1) == UNSPEC_GOTOFF
)
3026 if (XVECLEN (op0
, 0) != 1)
3029 new = force_const_mem (Pmode
, orig
);
3032 /* Otherwise, compute the sum. */
3035 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
3036 new = legitimize_pic_address (XEXP (addr
, 1),
3037 base
== reg
? NULL_RTX
: reg
);
3038 if (GET_CODE (new) == CONST_INT
)
3039 new = plus_constant (base
, INTVAL (new));
3042 if (GET_CODE (new) == PLUS
&& CONSTANT_P (XEXP (new, 1)))
3044 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new, 0));
3045 new = XEXP (new, 1);
3047 new = gen_rtx_PLUS (Pmode
, base
, new);
3050 if (GET_CODE (new) == CONST
)
3051 new = XEXP (new, 0);
3052 new = force_operand (new, 0);
3059 /* Load the thread pointer into a register. */
3062 get_thread_pointer (void)
3064 rtx tp
= gen_reg_rtx (Pmode
);
3066 emit_move_insn (tp
, gen_rtx_REG (Pmode
, TP_REGNUM
));
3067 mark_reg_pointer (tp
, BITS_PER_WORD
);
3072 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3073 in s390_tls_symbol which always refers to __tls_get_offset.
3074 The returned offset is written to RESULT_REG and an USE rtx is
3075 generated for TLS_CALL. */
3077 static GTY(()) rtx s390_tls_symbol
;
3080 s390_emit_tls_call_insn (rtx result_reg
, rtx tls_call
)
3087 if (!s390_tls_symbol
)
3088 s390_tls_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tls_get_offset");
3090 insn
= s390_emit_call (s390_tls_symbol
, tls_call
, result_reg
,
3091 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
3093 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), result_reg
);
3094 CONST_OR_PURE_CALL_P (insn
) = 1;
3097 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3098 this (thread-local) address. REG may be used as temporary. */
3101 legitimize_tls_address (rtx addr
, rtx reg
)
3103 rtx
new, tls_call
, temp
, base
, r2
, insn
;
3105 if (GET_CODE (addr
) == SYMBOL_REF
)
3106 switch (tls_symbolic_operand (addr
))
3108 case TLS_MODEL_GLOBAL_DYNAMIC
:
3110 r2
= gen_rtx_REG (Pmode
, 2);
3111 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_TLSGD
);
3112 new = gen_rtx_CONST (Pmode
, tls_call
);
3113 new = force_const_mem (Pmode
, new);
3114 emit_move_insn (r2
, new);
3115 s390_emit_tls_call_insn (r2
, tls_call
);
3116 insn
= get_insns ();
3119 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3120 temp
= gen_reg_rtx (Pmode
);
3121 emit_libcall_block (insn
, temp
, r2
, new);
3123 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
3126 s390_load_address (reg
, new);
3131 case TLS_MODEL_LOCAL_DYNAMIC
:
3133 r2
= gen_rtx_REG (Pmode
, 2);
3134 tls_call
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM
);
3135 new = gen_rtx_CONST (Pmode
, tls_call
);
3136 new = force_const_mem (Pmode
, new);
3137 emit_move_insn (r2
, new);
3138 s390_emit_tls_call_insn (r2
, tls_call
);
3139 insn
= get_insns ();
3142 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
), UNSPEC_TLSLDM_NTPOFF
);
3143 temp
= gen_reg_rtx (Pmode
);
3144 emit_libcall_block (insn
, temp
, r2
, new);
3146 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
3147 base
= gen_reg_rtx (Pmode
);
3148 s390_load_address (base
, new);
3150 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_DTPOFF
);
3151 new = gen_rtx_CONST (Pmode
, new);
3152 new = force_const_mem (Pmode
, new);
3153 temp
= gen_reg_rtx (Pmode
);
3154 emit_move_insn (temp
, new);
3156 new = gen_rtx_PLUS (Pmode
, base
, temp
);
3159 s390_load_address (reg
, new);
3164 case TLS_MODEL_INITIAL_EXEC
:
3167 /* Assume GOT offset < 4k. This is handled the same way
3168 in both 31- and 64-bit code. */
3170 if (reload_in_progress
|| reload_completed
)
3171 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3173 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3174 new = gen_rtx_CONST (Pmode
, new);
3175 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
3176 new = gen_const_mem (Pmode
, new);
3177 temp
= gen_reg_rtx (Pmode
);
3178 emit_move_insn (temp
, new);
3180 else if (TARGET_CPU_ZARCH
)
3182 /* If the GOT offset might be >= 4k, we determine the position
3183 of the GOT entry via a PC-relative LARL. */
3185 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3186 new = gen_rtx_CONST (Pmode
, new);
3187 temp
= gen_reg_rtx (Pmode
);
3188 emit_move_insn (temp
, new);
3190 new = gen_const_mem (Pmode
, temp
);
3191 temp
= gen_reg_rtx (Pmode
);
3192 emit_move_insn (temp
, new);
3196 /* If the GOT offset might be >= 4k, we have to load it
3197 from the literal pool. */
3199 if (reload_in_progress
|| reload_completed
)
3200 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3202 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_GOTNTPOFF
);
3203 new = gen_rtx_CONST (Pmode
, new);
3204 new = force_const_mem (Pmode
, new);
3205 temp
= gen_reg_rtx (Pmode
);
3206 emit_move_insn (temp
, new);
3208 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
3209 new = gen_const_mem (Pmode
, new);
3211 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
3212 temp
= gen_reg_rtx (Pmode
);
3213 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
3217 /* In position-dependent code, load the absolute address of
3218 the GOT entry from the literal pool. */
3220 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_INDNTPOFF
);
3221 new = gen_rtx_CONST (Pmode
, new);
3222 new = force_const_mem (Pmode
, new);
3223 temp
= gen_reg_rtx (Pmode
);
3224 emit_move_insn (temp
, new);
3227 new = gen_const_mem (Pmode
, new);
3228 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, new, addr
), UNSPEC_TLS_LOAD
);
3229 temp
= gen_reg_rtx (Pmode
);
3230 emit_insn (gen_rtx_SET (Pmode
, temp
, new));
3233 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
3236 s390_load_address (reg
, new);
3241 case TLS_MODEL_LOCAL_EXEC
:
3242 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), UNSPEC_NTPOFF
);
3243 new = gen_rtx_CONST (Pmode
, new);
3244 new = force_const_mem (Pmode
, new);
3245 temp
= gen_reg_rtx (Pmode
);
3246 emit_move_insn (temp
, new);
3248 new = gen_rtx_PLUS (Pmode
, get_thread_pointer (), temp
);
3251 s390_load_address (reg
, new);
3260 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == UNSPEC
)
3262 switch (XINT (XEXP (addr
, 0), 1))
3264 case UNSPEC_INDNTPOFF
:
3265 if (TARGET_CPU_ZARCH
)
3276 else if (GET_CODE (addr
) == CONST
&& GET_CODE (XEXP (addr
, 0)) == PLUS
3277 && GET_CODE (XEXP (XEXP (addr
, 0), 1)) == CONST_INT
)
3279 new = XEXP (XEXP (addr
, 0), 0);
3280 if (GET_CODE (new) != SYMBOL_REF
)
3281 new = gen_rtx_CONST (Pmode
, new);
3283 new = legitimize_tls_address (new, reg
);
3284 new = plus_constant (new, INTVAL (XEXP (XEXP (addr
, 0), 1)));
3285 new = force_operand (new, 0);
3289 abort (); /* for now ... */
3294 /* Emit insns to move operands[1] into operands[0]. */
3297 emit_symbolic_move (rtx
*operands
)
3299 rtx temp
= no_new_pseudos
? operands
[0] : gen_reg_rtx (Pmode
);
3301 if (GET_CODE (operands
[0]) == MEM
)
3302 operands
[1] = force_reg (Pmode
, operands
[1]);
3303 else if (TLS_SYMBOLIC_CONST (operands
[1]))
3304 operands
[1] = legitimize_tls_address (operands
[1], temp
);
3306 operands
[1] = legitimize_pic_address (operands
[1], temp
);
3309 /* Try machine-dependent ways of modifying an illegitimate address X
3310 to be legitimate. If we find one, return the new, valid address.
3312 OLDX is the address as it was before break_out_memory_refs was called.
3313 In some cases it is useful to look at this to decide what needs to be done.
3315 MODE is the mode of the operand pointed to by X.
3317 When -fpic is used, special handling is needed for symbolic references.
3318 See comments by legitimize_pic_address for details. */
3321 legitimize_address (register rtx x
, register rtx oldx ATTRIBUTE_UNUSED
,
3322 enum machine_mode mode ATTRIBUTE_UNUSED
)
3324 rtx constant_term
= const0_rtx
;
3326 if (TLS_SYMBOLIC_CONST (x
))
3328 x
= legitimize_tls_address (x
, 0);
3330 if (legitimate_address_p (mode
, x
, FALSE
))
3335 if (SYMBOLIC_CONST (x
)
3336 || (GET_CODE (x
) == PLUS
3337 && (SYMBOLIC_CONST (XEXP (x
, 0))
3338 || SYMBOLIC_CONST (XEXP (x
, 1)))))
3339 x
= legitimize_pic_address (x
, 0);
3341 if (legitimate_address_p (mode
, x
, FALSE
))
3345 x
= eliminate_constant_term (x
, &constant_term
);
3347 /* Optimize loading of large displacements by splitting them
3348 into the multiple of 4K and the rest; this allows the
3349 former to be CSE'd if possible.
3351 Don't do this if the displacement is added to a register
3352 pointing into the stack frame, as the offsets will
3353 change later anyway. */
3355 if (GET_CODE (constant_term
) == CONST_INT
3356 && !TARGET_LONG_DISPLACEMENT
3357 && !DISP_IN_RANGE (INTVAL (constant_term
))
3358 && !(REG_P (x
) && REGNO_PTR_FRAME_P (REGNO (x
))))
3360 HOST_WIDE_INT lower
= INTVAL (constant_term
) & 0xfff;
3361 HOST_WIDE_INT upper
= INTVAL (constant_term
) ^ lower
;
3363 rtx temp
= gen_reg_rtx (Pmode
);
3364 rtx val
= force_operand (GEN_INT (upper
), temp
);
3366 emit_move_insn (temp
, val
);
3368 x
= gen_rtx_PLUS (Pmode
, x
, temp
);
3369 constant_term
= GEN_INT (lower
);
3372 if (GET_CODE (x
) == PLUS
)
3374 if (GET_CODE (XEXP (x
, 0)) == REG
)
3376 register rtx temp
= gen_reg_rtx (Pmode
);
3377 register rtx val
= force_operand (XEXP (x
, 1), temp
);
3379 emit_move_insn (temp
, val
);
3381 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
3384 else if (GET_CODE (XEXP (x
, 1)) == REG
)
3386 register rtx temp
= gen_reg_rtx (Pmode
);
3387 register rtx val
= force_operand (XEXP (x
, 0), temp
);
3389 emit_move_insn (temp
, val
);
3391 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
3395 if (constant_term
!= const0_rtx
)
3396 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
3401 /* Try a machine-dependent way of reloading an illegitimate address AD
3402 operand. If we find one, push the reload and and return the new address.
3404 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3405 and TYPE is the reload type of the current reload. */
3408 legitimize_reload_address (rtx ad
, enum machine_mode mode ATTRIBUTE_UNUSED
,
3409 int opnum
, int type
)
3411 if (!optimize
|| TARGET_LONG_DISPLACEMENT
)
3414 if (GET_CODE (ad
) == PLUS
)
3416 rtx tem
= simplify_binary_operation (PLUS
, Pmode
,
3417 XEXP (ad
, 0), XEXP (ad
, 1));
3422 if (GET_CODE (ad
) == PLUS
3423 && GET_CODE (XEXP (ad
, 0)) == REG
3424 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
3425 && !DISP_IN_RANGE (INTVAL (XEXP (ad
, 1))))
3427 HOST_WIDE_INT lower
= INTVAL (XEXP (ad
, 1)) & 0xfff;
3428 HOST_WIDE_INT upper
= INTVAL (XEXP (ad
, 1)) ^ lower
;
3431 cst
= GEN_INT (upper
);
3432 if (!legitimate_reload_constant_p (cst
))
3433 cst
= force_const_mem (Pmode
, cst
);
3435 tem
= gen_rtx_PLUS (Pmode
, XEXP (ad
, 0), cst
);
3436 new = gen_rtx_PLUS (Pmode
, tem
, GEN_INT (lower
));
3438 push_reload (XEXP (tem
, 1), 0, &XEXP (tem
, 1), 0,
3439 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3440 opnum
, (enum reload_type
) type
);
3447 /* Emit code to move LEN bytes from DST to SRC. */
3450 s390_expand_movmem (rtx dst
, rtx src
, rtx len
)
3452 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3454 if (INTVAL (len
) > 0)
3455 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (INTVAL (len
) - 1)));
3458 else if (TARGET_MVCLE
)
3460 emit_insn (gen_movmem_long (dst
, src
, convert_to_mode (Pmode
, len
, 1)));
3465 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
3466 rtx loop_start_label
= gen_label_rtx ();
3467 rtx loop_end_label
= gen_label_rtx ();
3468 rtx end_label
= gen_label_rtx ();
3469 enum machine_mode mode
;
3471 mode
= GET_MODE (len
);
3472 if (mode
== VOIDmode
)
3475 dst_addr
= gen_reg_rtx (Pmode
);
3476 src_addr
= gen_reg_rtx (Pmode
);
3477 count
= gen_reg_rtx (mode
);
3478 blocks
= gen_reg_rtx (mode
);
3480 convert_move (count
, len
, 1);
3481 emit_cmp_and_jump_insns (count
, const0_rtx
,
3482 EQ
, NULL_RTX
, mode
, 1, end_label
);
3484 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
3485 emit_move_insn (src_addr
, force_operand (XEXP (src
, 0), NULL_RTX
));
3486 dst
= change_address (dst
, VOIDmode
, dst_addr
);
3487 src
= change_address (src
, VOIDmode
, src_addr
);
3489 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3491 emit_move_insn (count
, temp
);
3493 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3495 emit_move_insn (blocks
, temp
);
3497 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3498 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3500 emit_label (loop_start_label
);
3502 emit_insn (gen_movmem_short (dst
, src
, GEN_INT (255)));
3503 s390_load_address (dst_addr
,
3504 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
3505 s390_load_address (src_addr
,
3506 gen_rtx_PLUS (Pmode
, src_addr
, GEN_INT (256)));
3508 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3510 emit_move_insn (blocks
, temp
);
3512 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3513 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3515 emit_jump (loop_start_label
);
3516 emit_label (loop_end_label
);
3518 emit_insn (gen_movmem_short (dst
, src
,
3519 convert_to_mode (Pmode
, count
, 1)));
3520 emit_label (end_label
);
3524 /* Emit code to clear LEN bytes at DST. */
3527 s390_expand_clrmem (rtx dst
, rtx len
)
3529 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3531 if (INTVAL (len
) > 0)
3532 emit_insn (gen_clrmem_short (dst
, GEN_INT (INTVAL (len
) - 1)));
3535 else if (TARGET_MVCLE
)
3537 emit_insn (gen_clrmem_long (dst
, convert_to_mode (Pmode
, len
, 1)));
3542 rtx dst_addr
, src_addr
, count
, blocks
, temp
;
3543 rtx loop_start_label
= gen_label_rtx ();
3544 rtx loop_end_label
= gen_label_rtx ();
3545 rtx end_label
= gen_label_rtx ();
3546 enum machine_mode mode
;
3548 mode
= GET_MODE (len
);
3549 if (mode
== VOIDmode
)
3552 dst_addr
= gen_reg_rtx (Pmode
);
3553 src_addr
= gen_reg_rtx (Pmode
);
3554 count
= gen_reg_rtx (mode
);
3555 blocks
= gen_reg_rtx (mode
);
3557 convert_move (count
, len
, 1);
3558 emit_cmp_and_jump_insns (count
, const0_rtx
,
3559 EQ
, NULL_RTX
, mode
, 1, end_label
);
3561 emit_move_insn (dst_addr
, force_operand (XEXP (dst
, 0), NULL_RTX
));
3562 dst
= change_address (dst
, VOIDmode
, dst_addr
);
3564 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3566 emit_move_insn (count
, temp
);
3568 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3570 emit_move_insn (blocks
, temp
);
3572 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3573 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3575 emit_label (loop_start_label
);
3577 emit_insn (gen_clrmem_short (dst
, GEN_INT (255)));
3578 s390_load_address (dst_addr
,
3579 gen_rtx_PLUS (Pmode
, dst_addr
, GEN_INT (256)));
3581 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3583 emit_move_insn (blocks
, temp
);
3585 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3586 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3588 emit_jump (loop_start_label
);
3589 emit_label (loop_end_label
);
3591 emit_insn (gen_clrmem_short (dst
, convert_to_mode (Pmode
, count
, 1)));
3592 emit_label (end_label
);
3596 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3597 and return the result in TARGET. */
3600 s390_expand_cmpmem (rtx target
, rtx op0
, rtx op1
, rtx len
)
3602 rtx ccreg
= gen_rtx_REG (CCUmode
, CC_REGNUM
);
3605 /* As the result of CMPINT is inverted compared to what we need,
3606 we have to swap the operands. */
3607 tmp
= op0
; op0
= op1
; op1
= tmp
;
3609 if (GET_CODE (len
) == CONST_INT
&& INTVAL (len
) >= 0 && INTVAL (len
) <= 256)
3611 if (INTVAL (len
) > 0)
3613 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (INTVAL (len
) - 1)));
3614 emit_insn (gen_cmpint (target
, ccreg
));
3617 emit_move_insn (target
, const0_rtx
);
3619 else if (TARGET_MVCLE
)
3621 emit_insn (gen_cmpmem_long (op0
, op1
, convert_to_mode (Pmode
, len
, 1)));
3622 emit_insn (gen_cmpint (target
, ccreg
));
3626 rtx addr0
, addr1
, count
, blocks
, temp
;
3627 rtx loop_start_label
= gen_label_rtx ();
3628 rtx loop_end_label
= gen_label_rtx ();
3629 rtx end_label
= gen_label_rtx ();
3630 enum machine_mode mode
;
3632 mode
= GET_MODE (len
);
3633 if (mode
== VOIDmode
)
3636 addr0
= gen_reg_rtx (Pmode
);
3637 addr1
= gen_reg_rtx (Pmode
);
3638 count
= gen_reg_rtx (mode
);
3639 blocks
= gen_reg_rtx (mode
);
3641 convert_move (count
, len
, 1);
3642 emit_cmp_and_jump_insns (count
, const0_rtx
,
3643 EQ
, NULL_RTX
, mode
, 1, end_label
);
3645 emit_move_insn (addr0
, force_operand (XEXP (op0
, 0), NULL_RTX
));
3646 emit_move_insn (addr1
, force_operand (XEXP (op1
, 0), NULL_RTX
));
3647 op0
= change_address (op0
, VOIDmode
, addr0
);
3648 op1
= change_address (op1
, VOIDmode
, addr1
);
3650 temp
= expand_binop (mode
, add_optab
, count
, constm1_rtx
, count
, 1, 0);
3652 emit_move_insn (count
, temp
);
3654 temp
= expand_binop (mode
, ashr_optab
, count
, GEN_INT (8), blocks
, 1, 0);
3656 emit_move_insn (blocks
, temp
);
3658 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3659 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3661 emit_label (loop_start_label
);
3663 emit_insn (gen_cmpmem_short (op0
, op1
, GEN_INT (255)));
3664 temp
= gen_rtx_NE (VOIDmode
, ccreg
, const0_rtx
);
3665 temp
= gen_rtx_IF_THEN_ELSE (VOIDmode
, temp
,
3666 gen_rtx_LABEL_REF (VOIDmode
, end_label
), pc_rtx
);
3667 temp
= gen_rtx_SET (VOIDmode
, pc_rtx
, temp
);
3668 emit_jump_insn (temp
);
3670 s390_load_address (addr0
,
3671 gen_rtx_PLUS (Pmode
, addr0
, GEN_INT (256)));
3672 s390_load_address (addr1
,
3673 gen_rtx_PLUS (Pmode
, addr1
, GEN_INT (256)));
3675 temp
= expand_binop (mode
, add_optab
, blocks
, constm1_rtx
, blocks
, 1, 0);
3677 emit_move_insn (blocks
, temp
);
3679 emit_cmp_and_jump_insns (blocks
, const0_rtx
,
3680 EQ
, NULL_RTX
, mode
, 1, loop_end_label
);
3682 emit_jump (loop_start_label
);
3683 emit_label (loop_end_label
);
3685 emit_insn (gen_cmpmem_short (op0
, op1
,
3686 convert_to_mode (Pmode
, count
, 1)));
3687 emit_label (end_label
);
3689 emit_insn (gen_cmpint (target
, ccreg
));
3694 /* Expand conditional increment or decrement using alc/slb instructions.
3695 Should generate code setting DST to either SRC or SRC + INCREMENT,
3696 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3697 Returns true if successful, false otherwise. */
3700 s390_expand_addcc (enum rtx_code cmp_code
, rtx cmp_op0
, rtx cmp_op1
,
3701 rtx dst
, rtx src
, rtx increment
)
3703 enum machine_mode cmp_mode
;
3704 enum machine_mode cc_mode
;
3709 if ((GET_MODE (cmp_op0
) == SImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
3710 && (GET_MODE (cmp_op1
) == SImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
3712 else if ((GET_MODE (cmp_op0
) == DImode
|| GET_MODE (cmp_op0
) == VOIDmode
)
3713 && (GET_MODE (cmp_op1
) == DImode
|| GET_MODE (cmp_op1
) == VOIDmode
))
3718 /* Try ADD LOGICAL WITH CARRY. */
3719 if (increment
== const1_rtx
)
3721 /* Determine CC mode to use. */
3722 if (cmp_code
== EQ
|| cmp_code
== NE
)
3724 if (cmp_op1
!= const0_rtx
)
3726 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
3727 NULL_RTX
, 0, OPTAB_WIDEN
);
3728 cmp_op1
= const0_rtx
;
3731 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
3734 if (cmp_code
== LTU
|| cmp_code
== LEU
)
3739 cmp_code
= swap_condition (cmp_code
);
3756 /* Emit comparison instruction pattern. */
3757 if (!register_operand (cmp_op0
, cmp_mode
))
3758 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
3760 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
3761 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
3762 /* We use insn_invalid_p here to add clobbers if required. */
3763 if (insn_invalid_p (emit_insn (insn
)))
3766 /* Emit ALC instruction pattern. */
3767 op_res
= gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
3768 gen_rtx_REG (cc_mode
, CC_REGNUM
),
3771 if (src
!= const0_rtx
)
3773 if (!register_operand (src
, GET_MODE (dst
)))
3774 src
= force_reg (GET_MODE (dst
), src
);
3776 src
= gen_rtx_PLUS (GET_MODE (dst
), src
, const0_rtx
);
3777 op_res
= gen_rtx_PLUS (GET_MODE (dst
), src
, op_res
);
3780 p
= rtvec_alloc (2);
3782 gen_rtx_SET (VOIDmode
, dst
, op_res
);
3784 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
3785 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
3790 /* Try SUBTRACT LOGICAL WITH BORROW. */
3791 if (increment
== constm1_rtx
)
3793 /* Determine CC mode to use. */
3794 if (cmp_code
== EQ
|| cmp_code
== NE
)
3796 if (cmp_op1
!= const0_rtx
)
3798 cmp_op0
= expand_simple_binop (cmp_mode
, XOR
, cmp_op0
, cmp_op1
,
3799 NULL_RTX
, 0, OPTAB_WIDEN
);
3800 cmp_op1
= const0_rtx
;
3803 cmp_code
= cmp_code
== EQ
? LEU
: GTU
;
3806 if (cmp_code
== GTU
|| cmp_code
== GEU
)
3811 cmp_code
= swap_condition (cmp_code
);
3828 /* Emit comparison instruction pattern. */
3829 if (!register_operand (cmp_op0
, cmp_mode
))
3830 cmp_op0
= force_reg (cmp_mode
, cmp_op0
);
3832 insn
= gen_rtx_SET (VOIDmode
, gen_rtx_REG (cc_mode
, CC_REGNUM
),
3833 gen_rtx_COMPARE (cc_mode
, cmp_op0
, cmp_op1
));
3834 /* We use insn_invalid_p here to add clobbers if required. */
3835 if (insn_invalid_p (emit_insn (insn
)))
3838 /* Emit SLB instruction pattern. */
3839 if (!register_operand (src
, GET_MODE (dst
)))
3840 src
= force_reg (GET_MODE (dst
), src
);
3842 op_res
= gen_rtx_MINUS (GET_MODE (dst
),
3843 gen_rtx_MINUS (GET_MODE (dst
), src
, const0_rtx
),
3844 gen_rtx_fmt_ee (cmp_code
, GET_MODE (dst
),
3845 gen_rtx_REG (cc_mode
, CC_REGNUM
),
3847 p
= rtvec_alloc (2);
3849 gen_rtx_SET (VOIDmode
, dst
, op_res
);
3851 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (CCmode
, CC_REGNUM
));
3852 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
3861 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3862 We need to emit DTP-relative relocations. */
3865 s390_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3870 fputs ("\t.long\t", file
);
3873 fputs ("\t.quad\t", file
);
3878 output_addr_const (file
, x
);
3879 fputs ("@DTPOFF", file
);
3882 /* In the name of slightly smaller debug output, and to cater to
3883 general assembler losage, recognize various UNSPEC sequences
3884 and turn them back into a direct symbol reference. */
3887 s390_delegitimize_address (rtx orig_x
)
3891 if (GET_CODE (x
) != MEM
)
3895 if (GET_CODE (x
) == PLUS
3896 && GET_CODE (XEXP (x
, 1)) == CONST
3897 && GET_CODE (XEXP (x
, 0)) == REG
3898 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
3900 y
= XEXP (XEXP (x
, 1), 0);
3901 if (GET_CODE (y
) == UNSPEC
3902 && XINT (y
, 1) == UNSPEC_GOT
)
3903 return XVECEXP (y
, 0, 0);
3907 if (GET_CODE (x
) == CONST
)
3910 if (GET_CODE (y
) == UNSPEC
3911 && XINT (y
, 1) == UNSPEC_GOTENT
)
3912 return XVECEXP (y
, 0, 0);
3919 /* Output shift count operand OP to stdio stream FILE. */
3922 print_shift_count_operand (FILE *file
, rtx op
)
3924 HOST_WIDE_INT offset
= 0;
3926 /* We can have an integer constant, an address register,
3927 or a sum of the two. */
3928 if (GET_CODE (op
) == CONST_INT
)
3930 offset
= INTVAL (op
);
3933 if (op
&& GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
)
3935 offset
= INTVAL (XEXP (op
, 1));
3938 while (op
&& GET_CODE (op
) == SUBREG
)
3939 op
= SUBREG_REG (op
);
3942 if (op
&& (GET_CODE (op
) != REG
3943 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
3944 || REGNO_REG_CLASS (REGNO (op
)) != ADDR_REGS
))
3947 /* Shift counts are truncated to the low six bits anyway. */
3948 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
& 63);
3950 fprintf (file
, "(%s)", reg_names
[REGNO (op
)]);
3953 /* Locate some local-dynamic symbol still in use by this function
3954 so that we can print its name in local-dynamic base patterns. */
3957 get_some_local_dynamic_name (void)
3961 if (cfun
->machine
->some_ld_name
)
3962 return cfun
->machine
->some_ld_name
;
3964 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3966 && for_each_rtx (&PATTERN (insn
), get_some_local_dynamic_name_1
, 0))
3967 return cfun
->machine
->some_ld_name
;
3973 get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
3977 if (GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
3979 x
= get_pool_constant (x
);
3980 return for_each_rtx (&x
, get_some_local_dynamic_name_1
, 0);
3983 if (GET_CODE (x
) == SYMBOL_REF
3984 && tls_symbolic_operand (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
3986 cfun
->machine
->some_ld_name
= XSTR (x
, 0);
3993 /* Output machine-dependent UNSPECs occurring in address constant X
3994 in assembler syntax to stdio stream FILE. Returns true if the
3995 constant X could be recognized, false otherwise. */
3998 s390_output_addr_const_extra (FILE *file
, rtx x
)
4000 if (GET_CODE (x
) == UNSPEC
&& XVECLEN (x
, 0) == 1)
4001 switch (XINT (x
, 1))
4004 output_addr_const (file
, XVECEXP (x
, 0, 0));
4005 fprintf (file
, "@GOTENT");
4008 output_addr_const (file
, XVECEXP (x
, 0, 0));
4009 fprintf (file
, "@GOT");
4012 output_addr_const (file
, XVECEXP (x
, 0, 0));
4013 fprintf (file
, "@GOTOFF");
4016 output_addr_const (file
, XVECEXP (x
, 0, 0));
4017 fprintf (file
, "@PLT");
4020 output_addr_const (file
, XVECEXP (x
, 0, 0));
4021 fprintf (file
, "@PLTOFF");
4024 output_addr_const (file
, XVECEXP (x
, 0, 0));
4025 fprintf (file
, "@TLSGD");
4028 assemble_name (file
, get_some_local_dynamic_name ());
4029 fprintf (file
, "@TLSLDM");
4032 output_addr_const (file
, XVECEXP (x
, 0, 0));
4033 fprintf (file
, "@DTPOFF");
4036 output_addr_const (file
, XVECEXP (x
, 0, 0));
4037 fprintf (file
, "@NTPOFF");
4039 case UNSPEC_GOTNTPOFF
:
4040 output_addr_const (file
, XVECEXP (x
, 0, 0));
4041 fprintf (file
, "@GOTNTPOFF");
4043 case UNSPEC_INDNTPOFF
:
4044 output_addr_const (file
, XVECEXP (x
, 0, 0));
4045 fprintf (file
, "@INDNTPOFF");
4052 /* Output address operand ADDR in assembler syntax to
4053 stdio stream FILE. */
4056 print_operand_address (FILE *file
, rtx addr
)
4058 struct s390_address ad
;
4060 if (!s390_decompose_address (addr
, &ad
)
4061 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
4062 || (ad
.indx
&& !REG_OK_FOR_INDEX_STRICT_P (ad
.indx
)))
4063 output_operand_lossage ("Cannot decompose address.");
4066 output_addr_const (file
, ad
.disp
);
4068 fprintf (file
, "0");
4070 if (ad
.base
&& ad
.indx
)
4071 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
4072 reg_names
[REGNO (ad
.base
)]);
4074 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
4077 /* Output operand X in assembler syntax to stdio stream FILE.
4078 CODE specified the format flag. The following format flags
4081 'C': print opcode suffix for branch condition.
4082 'D': print opcode suffix for inverse branch condition.
4083 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4084 'O': print only the displacement of a memory reference.
4085 'R': print only the base register of a memory reference.
4086 'S': print S-type memory reference (base+displacement).
4087 'N': print the second word of a DImode operand.
4088 'M': print the second word of a TImode operand.
4089 'Y': print shift count operand.
4091 'b': print integer X as if it's an unsigned byte.
4092 'x': print integer X as if it's an unsigned word.
4093 'h': print integer X as if it's a signed word.
4094 'i': print the first nonzero HImode part of X.
4095 'j': print the first HImode part unequal to 0xffff of X. */
4098 print_operand (FILE *file
, rtx x
, int code
)
4103 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
4107 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
4111 if (GET_CODE (x
) == SYMBOL_REF
)
4113 fprintf (file
, "%s", ":tls_load:");
4114 output_addr_const (file
, x
);
4116 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSGD
)
4118 fprintf (file
, "%s", ":tls_gdcall:");
4119 output_addr_const (file
, XVECEXP (x
, 0, 0));
4121 else if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_TLSLDM
)
4123 fprintf (file
, "%s", ":tls_ldcall:");
4124 assemble_name (file
, get_some_local_dynamic_name ());
4132 struct s390_address ad
;
4134 if (GET_CODE (x
) != MEM
4135 || !s390_decompose_address (XEXP (x
, 0), &ad
)
4136 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
4141 output_addr_const (file
, ad
.disp
);
4143 fprintf (file
, "0");
4149 struct s390_address ad
;
4151 if (GET_CODE (x
) != MEM
4152 || !s390_decompose_address (XEXP (x
, 0), &ad
)
4153 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
4158 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
4160 fprintf (file
, "0");
4166 struct s390_address ad
;
4168 if (GET_CODE (x
) != MEM
4169 || !s390_decompose_address (XEXP (x
, 0), &ad
)
4170 || (ad
.base
&& !REG_OK_FOR_BASE_STRICT_P (ad
.base
))
4175 output_addr_const (file
, ad
.disp
);
4177 fprintf (file
, "0");
4180 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
4185 if (GET_CODE (x
) == REG
)
4186 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
4187 else if (GET_CODE (x
) == MEM
)
4188 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 4));
4194 if (GET_CODE (x
) == REG
)
4195 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
4196 else if (GET_CODE (x
) == MEM
)
4197 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 8));
4203 print_shift_count_operand (file
, x
);
4207 switch (GET_CODE (x
))
4210 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
4214 output_address (XEXP (x
, 0));
4221 output_addr_const (file
, x
);
4226 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
4227 else if (code
== 'x')
4228 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
4229 else if (code
== 'h')
4230 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
4231 else if (code
== 'i')
4232 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4233 s390_extract_part (x
, HImode
, 0));
4234 else if (code
== 'j')
4235 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4236 s390_extract_part (x
, HImode
, -1));
4238 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
4242 if (GET_MODE (x
) != VOIDmode
)
4245 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
4246 else if (code
== 'x')
4247 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
4248 else if (code
== 'h')
4249 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
4255 fatal_insn ("UNKNOWN in print_operand !?", x
);
4260 /* Target hook for assembling integer objects. We need to define it
4261 here to work a round a bug in some versions of GAS, which couldn't
4262 handle values smaller than INT_MIN when printed in decimal. */
4265 s390_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4267 if (size
== 8 && aligned_p
4268 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
4270 fprintf (asm_out_file
, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX
"\n",
4274 return default_assemble_integer (x
, size
, aligned_p
);
4277 /* Returns true if register REGNO is used for forming
4278 a memory address in expression X. */
4281 reg_used_in_mem_p (int regno
, rtx x
)
4283 enum rtx_code code
= GET_CODE (x
);
4289 if (refers_to_regno_p (regno
, regno
+1,
4293 else if (code
== SET
4294 && GET_CODE (SET_DEST (x
)) == PC
)
4296 if (refers_to_regno_p (regno
, regno
+1,
4301 fmt
= GET_RTX_FORMAT (code
);
4302 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4305 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
4308 else if (fmt
[i
] == 'E')
4309 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4310 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
4316 /* Returns true if expression DEP_RTX sets an address register
4317 used by instruction INSN to address memory. */
4320 addr_generation_dependency_p (rtx dep_rtx
, rtx insn
)
4324 if (GET_CODE (dep_rtx
) == INSN
)
4325 dep_rtx
= PATTERN (dep_rtx
);
4327 if (GET_CODE (dep_rtx
) == SET
)
4329 target
= SET_DEST (dep_rtx
);
4330 if (GET_CODE (target
) == STRICT_LOW_PART
)
4331 target
= XEXP (target
, 0);
4332 while (GET_CODE (target
) == SUBREG
)
4333 target
= SUBREG_REG (target
);
4335 if (GET_CODE (target
) == REG
)
4337 int regno
= REGNO (target
);
4339 if (s390_safe_attr_type (insn
) == TYPE_LA
)
4341 pat
= PATTERN (insn
);
4342 if (GET_CODE (pat
) == PARALLEL
)
4344 if (XVECLEN (pat
, 0) != 2)
4346 pat
= XVECEXP (pat
, 0, 0);
4348 if (GET_CODE (pat
) == SET
)
4349 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
4353 else if (get_attr_atype (insn
) == ATYPE_AGEN
)
4354 return reg_used_in_mem_p (regno
, PATTERN (insn
));
4360 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4363 s390_agen_dep_p (rtx dep_insn
, rtx insn
)
4365 rtx dep_rtx
= PATTERN (dep_insn
);
4368 if (GET_CODE (dep_rtx
) == SET
4369 && addr_generation_dependency_p (dep_rtx
, insn
))
4371 else if (GET_CODE (dep_rtx
) == PARALLEL
)
4373 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
4375 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
), insn
))
4382 /* A C statement (sans semicolon) to update the integer scheduling priority
4383 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4384 reduce the priority to execute INSN later. Do not define this macro if
4385 you do not need to adjust the scheduling priorities of insns.
4387 A STD instruction should be scheduled earlier,
4388 in order to use the bypass. */
4391 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
4393 if (! INSN_P (insn
))
4396 if (s390_tune
!= PROCESSOR_2084_Z990
)
4399 switch (s390_safe_attr_type (insn
))
4403 priority
= priority
<< 3;
4407 priority
= priority
<< 1;
4415 /* The number of instructions that can be issued per cycle. */
4418 s390_issue_rate (void)
4420 if (s390_tune
== PROCESSOR_2084_Z990
)
4426 s390_first_cycle_multipass_dfa_lookahead (void)
4432 /* Split all branches that exceed the maximum distance.
4433 Returns true if this created a new literal pool entry. */
4436 s390_split_branches (void)
4438 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
4439 int new_literal
= 0;
4440 rtx insn
, pat
, tmp
, target
;
4443 /* We need correct insn addresses. */
4445 shorten_branches (get_insns ());
4447 /* Find all branches that exceed 64KB, and split them. */
4449 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4451 if (GET_CODE (insn
) != JUMP_INSN
)
4454 pat
= PATTERN (insn
);
4455 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
4456 pat
= XVECEXP (pat
, 0, 0);
4457 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
4460 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
4462 label
= &SET_SRC (pat
);
4464 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
4466 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
4467 label
= &XEXP (SET_SRC (pat
), 1);
4468 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
4469 label
= &XEXP (SET_SRC (pat
), 2);
4476 if (get_attr_length (insn
) <= 4)
4479 /* We are going to use the return register as scratch register,
4480 make sure it will be saved/restored by the prologue/epilogue. */
4481 cfun_frame_layout
.save_return_addr_p
= 1;
4486 tmp
= force_const_mem (Pmode
, *label
);
4487 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, tmp
), insn
);
4488 INSN_ADDRESSES_NEW (tmp
, -1);
4489 annotate_constant_pool_refs (&PATTERN (tmp
));
4496 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, *label
),
4497 UNSPEC_LTREL_OFFSET
);
4498 target
= gen_rtx_CONST (Pmode
, target
);
4499 target
= force_const_mem (Pmode
, target
);
4500 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
4501 INSN_ADDRESSES_NEW (tmp
, -1);
4502 annotate_constant_pool_refs (&PATTERN (tmp
));
4504 target
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XEXP (target
, 0),
4505 cfun
->machine
->base_reg
),
4507 target
= gen_rtx_PLUS (Pmode
, temp_reg
, target
);
4510 if (!validate_change (insn
, label
, target
, 0))
4517 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4518 Fix up MEMs as required. */
4521 annotate_constant_pool_refs (rtx
*x
)
4526 if (GET_CODE (*x
) == SYMBOL_REF
4527 && CONSTANT_POOL_ADDRESS_P (*x
))
4530 /* Literal pool references can only occur inside a MEM ... */
4531 if (GET_CODE (*x
) == MEM
)
4533 rtx memref
= XEXP (*x
, 0);
4535 if (GET_CODE (memref
) == SYMBOL_REF
4536 && CONSTANT_POOL_ADDRESS_P (memref
))
4538 rtx base
= cfun
->machine
->base_reg
;
4539 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, memref
, base
),
4542 *x
= replace_equiv_address (*x
, addr
);
4546 if (GET_CODE (memref
) == CONST
4547 && GET_CODE (XEXP (memref
, 0)) == PLUS
4548 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
4549 && GET_CODE (XEXP (XEXP (memref
, 0), 0)) == SYMBOL_REF
4550 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref
, 0), 0)))
4552 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
4553 rtx sym
= XEXP (XEXP (memref
, 0), 0);
4554 rtx base
= cfun
->machine
->base_reg
;
4555 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
4558 *x
= replace_equiv_address (*x
, plus_constant (addr
, off
));
4563 /* ... or a load-address type pattern. */
4564 if (GET_CODE (*x
) == SET
)
4566 rtx addrref
= SET_SRC (*x
);
4568 if (GET_CODE (addrref
) == SYMBOL_REF
4569 && CONSTANT_POOL_ADDRESS_P (addrref
))
4571 rtx base
= cfun
->machine
->base_reg
;
4572 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, addrref
, base
),
4575 SET_SRC (*x
) = addr
;
4579 if (GET_CODE (addrref
) == CONST
4580 && GET_CODE (XEXP (addrref
, 0)) == PLUS
4581 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
4582 && GET_CODE (XEXP (XEXP (addrref
, 0), 0)) == SYMBOL_REF
4583 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref
, 0), 0)))
4585 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
4586 rtx sym
= XEXP (XEXP (addrref
, 0), 0);
4587 rtx base
= cfun
->machine
->base_reg
;
4588 rtx addr
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, sym
, base
),
4591 SET_SRC (*x
) = plus_constant (addr
, off
);
4596 /* Annotate LTREL_BASE as well. */
4597 if (GET_CODE (*x
) == UNSPEC
4598 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
4600 rtx base
= cfun
->machine
->base_reg
;
4601 *x
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (2, XVECEXP (*x
, 0, 0), base
),
4606 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4607 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4611 annotate_constant_pool_refs (&XEXP (*x
, i
));
4613 else if (fmt
[i
] == 'E')
4615 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4616 annotate_constant_pool_refs (&XVECEXP (*x
, i
, j
));
4622 /* Find an annotated literal pool symbol referenced in RTX X,
4623 and store it at REF. Will abort if X contains references to
4624 more than one such pool symbol; multiple references to the same
4625 symbol are allowed, however.
4627 The rtx pointed to by REF must be initialized to NULL_RTX
4628 by the caller before calling this routine. */
4631 find_constant_pool_ref (rtx x
, rtx
*ref
)
4636 /* Ignore LTREL_BASE references. */
4637 if (GET_CODE (x
) == UNSPEC
4638 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
4640 /* Likewise POOL_ENTRY insns. */
4641 if (GET_CODE (x
) == UNSPEC_VOLATILE
4642 && XINT (x
, 1) == UNSPECV_POOL_ENTRY
)
4645 if (GET_CODE (x
) == SYMBOL_REF
4646 && CONSTANT_POOL_ADDRESS_P (x
))
4649 if (GET_CODE (x
) == UNSPEC
&& XINT (x
, 1) == UNSPEC_LTREF
)
4651 rtx sym
= XVECEXP (x
, 0, 0);
4652 if (GET_CODE (sym
) != SYMBOL_REF
4653 || !CONSTANT_POOL_ADDRESS_P (sym
))
4656 if (*ref
== NULL_RTX
)
4658 else if (*ref
!= sym
)
4664 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4665 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4669 find_constant_pool_ref (XEXP (x
, i
), ref
);
4671 else if (fmt
[i
] == 'E')
4673 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4674 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
4679 /* Replace every reference to the annotated literal pool
4680 symbol REF in X by its base plus OFFSET. */
4683 replace_constant_pool_ref (rtx
*x
, rtx ref
, rtx offset
)
4691 if (GET_CODE (*x
) == UNSPEC
4692 && XINT (*x
, 1) == UNSPEC_LTREF
4693 && XVECEXP (*x
, 0, 0) == ref
)
4695 *x
= gen_rtx_PLUS (Pmode
, XVECEXP (*x
, 0, 1), offset
);
4699 if (GET_CODE (*x
) == PLUS
4700 && GET_CODE (XEXP (*x
, 1)) == CONST_INT
4701 && GET_CODE (XEXP (*x
, 0)) == UNSPEC
4702 && XINT (XEXP (*x
, 0), 1) == UNSPEC_LTREF
4703 && XVECEXP (XEXP (*x
, 0), 0, 0) == ref
)
4705 rtx addr
= gen_rtx_PLUS (Pmode
, XVECEXP (XEXP (*x
, 0), 0, 1), offset
);
4706 *x
= plus_constant (addr
, INTVAL (XEXP (*x
, 1)));
4710 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4711 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4715 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, offset
);
4717 else if (fmt
[i
] == 'E')
4719 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4720 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, offset
);
4725 /* Check whether X contains an UNSPEC_LTREL_BASE.
4726 Return its constant pool symbol if found, NULL_RTX otherwise. */
4729 find_ltrel_base (rtx x
)
4734 if (GET_CODE (x
) == UNSPEC
4735 && XINT (x
, 1) == UNSPEC_LTREL_BASE
)
4736 return XVECEXP (x
, 0, 0);
4738 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
4739 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
4743 rtx fnd
= find_ltrel_base (XEXP (x
, i
));
4747 else if (fmt
[i
] == 'E')
4749 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4751 rtx fnd
= find_ltrel_base (XVECEXP (x
, i
, j
));
4761 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4764 replace_ltrel_base (rtx
*x
)
4769 if (GET_CODE (*x
) == UNSPEC
4770 && XINT (*x
, 1) == UNSPEC_LTREL_BASE
)
4772 *x
= XVECEXP (*x
, 0, 1);
4776 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
4777 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
4781 replace_ltrel_base (&XEXP (*x
, i
));
4783 else if (fmt
[i
] == 'E')
4785 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
4786 replace_ltrel_base (&XVECEXP (*x
, i
, j
));
4792 /* We keep a list of constants which we have to add to internal
4793 constant tables in the middle of large functions. */
4795 #define NR_C_MODES 7
4796 enum machine_mode constant_modes
[NR_C_MODES
] =
4807 struct constant
*next
;
4812 struct constant_pool
4814 struct constant_pool
*next
;
4819 struct constant
*constants
[NR_C_MODES
];
4820 struct constant
*execute
;
4825 static struct constant_pool
* s390_mainpool_start (void);
4826 static void s390_mainpool_finish (struct constant_pool
*);
4827 static void s390_mainpool_cancel (struct constant_pool
*);
4829 static struct constant_pool
* s390_chunkify_start (void);
4830 static void s390_chunkify_finish (struct constant_pool
*);
4831 static void s390_chunkify_cancel (struct constant_pool
*);
4833 static struct constant_pool
*s390_start_pool (struct constant_pool
**, rtx
);
4834 static void s390_end_pool (struct constant_pool
*, rtx
);
4835 static void s390_add_pool_insn (struct constant_pool
*, rtx
);
4836 static struct constant_pool
*s390_find_pool (struct constant_pool
*, rtx
);
4837 static void s390_add_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4838 static rtx
s390_find_constant (struct constant_pool
*, rtx
, enum machine_mode
);
4839 static void s390_add_execute (struct constant_pool
*, rtx
);
4840 static rtx
s390_find_execute (struct constant_pool
*, rtx
);
4841 static rtx
s390_execute_label (rtx
);
4842 static rtx
s390_execute_target (rtx
);
4843 static void s390_dump_pool (struct constant_pool
*, bool);
4844 static void s390_dump_execute (struct constant_pool
*);
4845 static struct constant_pool
*s390_alloc_pool (void);
4846 static void s390_free_pool (struct constant_pool
*);
4848 /* Create new constant pool covering instructions starting at INSN
4849 and chain it to the end of POOL_LIST. */
4851 static struct constant_pool
*
4852 s390_start_pool (struct constant_pool
**pool_list
, rtx insn
)
4854 struct constant_pool
*pool
, **prev
;
4856 pool
= s390_alloc_pool ();
4857 pool
->first_insn
= insn
;
4859 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
4866 /* End range of instructions covered by POOL at INSN and emit
4867 placeholder insn representing the pool. */
4870 s390_end_pool (struct constant_pool
*pool
, rtx insn
)
4872 rtx pool_size
= GEN_INT (pool
->size
+ 8 /* alignment slop */);
4875 insn
= get_last_insn ();
4877 pool
->pool_insn
= emit_insn_after (gen_pool (pool_size
), insn
);
4878 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
4881 /* Add INSN to the list of insns covered by POOL. */
4884 s390_add_pool_insn (struct constant_pool
*pool
, rtx insn
)
4886 bitmap_set_bit (pool
->insns
, INSN_UID (insn
));
4889 /* Return pool out of POOL_LIST that covers INSN. */
4891 static struct constant_pool
*
4892 s390_find_pool (struct constant_pool
*pool_list
, rtx insn
)
4894 struct constant_pool
*pool
;
4896 for (pool
= pool_list
; pool
; pool
= pool
->next
)
4897 if (bitmap_bit_p (pool
->insns
, INSN_UID (insn
)))
4903 /* Add constant VAL of mode MODE to the constant pool POOL. */
4906 s390_add_constant (struct constant_pool
*pool
, rtx val
, enum machine_mode mode
)
4911 for (i
= 0; i
< NR_C_MODES
; i
++)
4912 if (constant_modes
[i
] == mode
)
4914 if (i
== NR_C_MODES
)
4917 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4918 if (rtx_equal_p (val
, c
->value
))
4923 c
= (struct constant
*) xmalloc (sizeof *c
);
4925 c
->label
= gen_label_rtx ();
4926 c
->next
= pool
->constants
[i
];
4927 pool
->constants
[i
] = c
;
4928 pool
->size
+= GET_MODE_SIZE (mode
);
4932 /* Find constant VAL of mode MODE in the constant pool POOL.
4933 Return an RTX describing the distance from the start of
4934 the pool to the location of the new constant. */
4937 s390_find_constant (struct constant_pool
*pool
, rtx val
,
4938 enum machine_mode mode
)
4944 for (i
= 0; i
< NR_C_MODES
; i
++)
4945 if (constant_modes
[i
] == mode
)
4947 if (i
== NR_C_MODES
)
4950 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
4951 if (rtx_equal_p (val
, c
->value
))
4957 offset
= gen_rtx_MINUS (Pmode
, gen_rtx_LABEL_REF (Pmode
, c
->label
),
4958 gen_rtx_LABEL_REF (Pmode
, pool
->label
));
4959 offset
= gen_rtx_CONST (Pmode
, offset
);
4963 /* Add execute target for INSN to the constant pool POOL. */
4966 s390_add_execute (struct constant_pool
*pool
, rtx insn
)
4970 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
4971 if (INSN_UID (insn
) == INSN_UID (c
->value
))
4976 rtx label
= s390_execute_label (insn
);
4979 c
= (struct constant
*) xmalloc (sizeof *c
);
4981 c
->label
= label
== const0_rtx
? gen_label_rtx () : XEXP (label
, 0);
4982 c
->next
= pool
->execute
;
4984 pool
->size
+= label
== const0_rtx
? 6 : 0;
4988 /* Find execute target for INSN in the constant pool POOL.
4989 Return an RTX describing the distance from the start of
4990 the pool to the location of the execute target. */
4993 s390_find_execute (struct constant_pool
*pool
, rtx insn
)
4998 for (c
= pool
->execute
; c
!= NULL
; c
= c
->next
)
4999 if (INSN_UID (insn
) == INSN_UID (c
->value
))
5005 offset
= gen_rtx_MINUS (Pmode
, gen_rtx_LABEL_REF (Pmode
, c
->label
),
5006 gen_rtx_LABEL_REF (Pmode
, pool
->label
));
5007 offset
= gen_rtx_CONST (Pmode
, offset
);
5011 /* Check whether INSN is an execute. Return the label_ref to its
5012 execute target template if so, NULL_RTX otherwise. */
5015 s390_execute_label (rtx insn
)
5017 if (GET_CODE (insn
) == INSN
5018 && GET_CODE (PATTERN (insn
)) == PARALLEL
5019 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == UNSPEC
5020 && XINT (XVECEXP (PATTERN (insn
), 0, 0), 1) == UNSPEC_EXECUTE
)
5021 return XVECEXP (XVECEXP (PATTERN (insn
), 0, 0), 0, 2);
5026 /* For an execute INSN, extract the execute target template. */
5029 s390_execute_target (rtx insn
)
5031 rtx pattern
= PATTERN (insn
);
5032 gcc_assert (s390_execute_label (insn
));
5034 if (XVECLEN (pattern
, 0) == 2)
5036 pattern
= copy_rtx (XVECEXP (pattern
, 0, 1));
5040 rtvec vec
= rtvec_alloc (XVECLEN (pattern
, 0) - 1);
5043 for (i
= 0; i
< XVECLEN (pattern
, 0) - 1; i
++)
5044 RTVEC_ELT (vec
, i
) = copy_rtx (XVECEXP (pattern
, 0, i
+ 1));
5046 pattern
= gen_rtx_PARALLEL (VOIDmode
, vec
);
5052 /* Indicate that INSN cannot be duplicated. This is the case for
5053 execute insns that carry a unique label. */
5056 s390_cannot_copy_insn_p (rtx insn
)
5058 rtx label
= s390_execute_label (insn
);
5059 return label
&& label
!= const0_rtx
;
5062 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5063 do not emit the pool base label. */
5066 s390_dump_pool (struct constant_pool
*pool
, bool remote_label
)
5069 rtx insn
= pool
->pool_insn
;
5072 /* Switch to rodata section. */
5073 if (TARGET_CPU_ZARCH
)
5075 insn
= emit_insn_after (gen_pool_section_start (), insn
);
5076 INSN_ADDRESSES_NEW (insn
, -1);
5079 /* Ensure minimum pool alignment. */
5080 if (TARGET_CPU_ZARCH
)
5081 insn
= emit_insn_after (gen_pool_align (GEN_INT (8)), insn
);
5083 insn
= emit_insn_after (gen_pool_align (GEN_INT (4)), insn
);
5084 INSN_ADDRESSES_NEW (insn
, -1);
5086 /* Emit pool base label. */
5089 insn
= emit_label_after (pool
->label
, insn
);
5090 INSN_ADDRESSES_NEW (insn
, -1);
5093 /* Dump constants in descending alignment requirement order,
5094 ensuring proper alignment for every constant. */
5095 for (i
= 0; i
< NR_C_MODES
; i
++)
5096 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
5098 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5099 rtx value
= c
->value
;
5100 if (GET_CODE (value
) == CONST
5101 && GET_CODE (XEXP (value
, 0)) == UNSPEC
5102 && XINT (XEXP (value
, 0), 1) == UNSPEC_LTREL_OFFSET
5103 && XVECLEN (XEXP (value
, 0), 0) == 1)
5105 value
= gen_rtx_MINUS (Pmode
, XVECEXP (XEXP (value
, 0), 0, 0),
5106 gen_rtx_LABEL_REF (VOIDmode
, pool
->label
));
5107 value
= gen_rtx_CONST (VOIDmode
, value
);
5110 insn
= emit_label_after (c
->label
, insn
);
5111 INSN_ADDRESSES_NEW (insn
, -1);
5113 value
= gen_rtx_UNSPEC_VOLATILE (constant_modes
[i
],
5114 gen_rtvec (1, value
),
5115 UNSPECV_POOL_ENTRY
);
5116 insn
= emit_insn_after (value
, insn
);
5117 INSN_ADDRESSES_NEW (insn
, -1);
5120 /* Ensure minimum alignment for instructions. */
5121 insn
= emit_insn_after (gen_pool_align (GEN_INT (2)), insn
);
5122 INSN_ADDRESSES_NEW (insn
, -1);
5124 /* Output in-pool execute template insns. */
5125 for (c
= pool
->execute
; c
; c
= c
->next
)
5127 if (s390_execute_label (c
->value
) != const0_rtx
)
5130 insn
= emit_label_after (c
->label
, insn
);
5131 INSN_ADDRESSES_NEW (insn
, -1);
5133 insn
= emit_insn_after (s390_execute_target (c
->value
), insn
);
5134 INSN_ADDRESSES_NEW (insn
, -1);
5137 /* Switch back to previous section. */
5138 if (TARGET_CPU_ZARCH
)
5140 insn
= emit_insn_after (gen_pool_section_end (), insn
);
5141 INSN_ADDRESSES_NEW (insn
, -1);
5144 insn
= emit_barrier_after (insn
);
5145 INSN_ADDRESSES_NEW (insn
, -1);
5147 /* Remove placeholder insn. */
5148 remove_insn (pool
->pool_insn
);
5150 /* Output out-of-pool execute template isns. */
5151 s390_dump_execute (pool
);
5154 /* Dump out the out-of-pool execute template insns in POOL
5155 at the end of the instruction stream. */
5158 s390_dump_execute (struct constant_pool
*pool
)
5163 for (c
= pool
->execute
; c
; c
= c
->next
)
5165 if (s390_execute_label (c
->value
) == const0_rtx
)
5168 insn
= emit_label (c
->label
);
5169 INSN_ADDRESSES_NEW (insn
, -1);
5171 insn
= emit_insn (s390_execute_target (c
->value
));
5172 INSN_ADDRESSES_NEW (insn
, -1);
5176 /* Allocate new constant_pool structure. */
5178 static struct constant_pool
*
5179 s390_alloc_pool (void)
5181 struct constant_pool
*pool
;
5184 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
5186 for (i
= 0; i
< NR_C_MODES
; i
++)
5187 pool
->constants
[i
] = NULL
;
5189 pool
->execute
= NULL
;
5190 pool
->label
= gen_label_rtx ();
5191 pool
->first_insn
= NULL_RTX
;
5192 pool
->pool_insn
= NULL_RTX
;
5193 pool
->insns
= BITMAP_XMALLOC ();
5199 /* Free all memory used by POOL. */
5202 s390_free_pool (struct constant_pool
*pool
)
5204 struct constant
*c
, *next
;
5207 for (i
= 0; i
< NR_C_MODES
; i
++)
5208 for (c
= pool
->constants
[i
]; c
; c
= next
)
5214 for (c
= pool
->execute
; c
; c
= next
)
5220 BITMAP_XFREE (pool
->insns
);
5225 /* Collect main literal pool. Return NULL on overflow. */
5227 static struct constant_pool
*
5228 s390_mainpool_start (void)
5230 struct constant_pool
*pool
;
5233 pool
= s390_alloc_pool ();
5235 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5237 if (GET_CODE (insn
) == INSN
5238 && GET_CODE (PATTERN (insn
)) == SET
5239 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC_VOLATILE
5240 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPECV_MAIN_POOL
)
5242 if (pool
->pool_insn
)
5244 pool
->pool_insn
= insn
;
5247 if (s390_execute_label (insn
))
5249 s390_add_execute (pool
, insn
);
5251 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
5253 rtx pool_ref
= NULL_RTX
;
5254 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
5257 rtx constant
= get_pool_constant (pool_ref
);
5258 enum machine_mode mode
= get_pool_mode (pool_ref
);
5259 s390_add_constant (pool
, constant
, mode
);
5264 if (!pool
->pool_insn
&& pool
->size
> 0)
5267 if (pool
->size
>= 4096)
5269 /* We're going to chunkify the pool, so remove the main
5270 pool placeholder insn. */
5271 remove_insn (pool
->pool_insn
);
5273 s390_free_pool (pool
);
5280 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5281 Modify the current function to output the pool constants as well as
5282 the pool register setup instruction. */
5285 s390_mainpool_finish (struct constant_pool
*pool
)
5287 rtx base_reg
= cfun
->machine
->base_reg
;
5290 /* If the pool is empty, we're done. */
5291 if (pool
->size
== 0)
5293 /* However, we may have out-of-pool execute templates. */
5294 s390_dump_execute (pool
);
5296 /* We don't actually need a base register after all. */
5297 cfun
->machine
->base_reg
= NULL_RTX
;
5299 if (pool
->pool_insn
)
5300 remove_insn (pool
->pool_insn
);
5301 s390_free_pool (pool
);
5305 /* We need correct insn addresses. */
5306 shorten_branches (get_insns ());
5308 /* On zSeries, we use a LARL to load the pool register. The pool is
5309 located in the .rodata section, so we emit it after the function. */
5310 if (TARGET_CPU_ZARCH
)
5312 insn
= gen_main_base_64 (base_reg
, pool
->label
);
5313 insn
= emit_insn_after (insn
, pool
->pool_insn
);
5314 INSN_ADDRESSES_NEW (insn
, -1);
5315 remove_insn (pool
->pool_insn
);
5317 insn
= get_last_insn ();
5318 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
5319 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
5321 s390_dump_pool (pool
, 0);
5324 /* On S/390, if the total size of the function's code plus literal pool
5325 does not exceed 4096 bytes, we use BASR to set up a function base
5326 pointer, and emit the literal pool at the end of the function. */
5327 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5328 + pool
->size
+ 8 /* alignment slop */ < 4096)
5330 insn
= gen_main_base_31_small (base_reg
, pool
->label
);
5331 insn
= emit_insn_after (insn
, pool
->pool_insn
);
5332 INSN_ADDRESSES_NEW (insn
, -1);
5333 remove_insn (pool
->pool_insn
);
5335 insn
= emit_label_after (pool
->label
, insn
);
5336 INSN_ADDRESSES_NEW (insn
, -1);
5338 insn
= get_last_insn ();
5339 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
5340 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
5342 s390_dump_pool (pool
, 1);
5345 /* Otherwise, we emit an inline literal pool and use BASR to branch
5346 over it, setting up the pool register at the same time. */
5349 rtx pool_end
= gen_label_rtx ();
5351 insn
= gen_main_base_31_large (base_reg
, pool
->label
, pool_end
);
5352 insn
= emit_insn_after (insn
, pool
->pool_insn
);
5353 INSN_ADDRESSES_NEW (insn
, -1);
5354 remove_insn (pool
->pool_insn
);
5356 insn
= emit_label_after (pool
->label
, insn
);
5357 INSN_ADDRESSES_NEW (insn
, -1);
5359 pool
->pool_insn
= emit_insn_after (gen_pool (const0_rtx
), insn
);
5360 INSN_ADDRESSES_NEW (pool
->pool_insn
, -1);
5362 insn
= emit_label_after (pool_end
, pool
->pool_insn
);
5363 INSN_ADDRESSES_NEW (insn
, -1);
5365 s390_dump_pool (pool
, 1);
5369 /* Replace all literal pool references. */
5371 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5374 replace_ltrel_base (&PATTERN (insn
));
5376 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
5378 rtx addr
, pool_ref
= NULL_RTX
;
5379 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
5382 if (s390_execute_label (insn
))
5383 addr
= s390_find_execute (pool
, insn
);
5385 addr
= s390_find_constant (pool
, get_pool_constant (pool_ref
),
5386 get_pool_mode (pool_ref
));
5388 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
5389 INSN_CODE (insn
) = -1;
5395 /* Free the pool. */
5396 s390_free_pool (pool
);
5399 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5400 We have decided we cannot use this pool, so revert all changes
5401 to the current function that were done by s390_mainpool_start. */
5403 s390_mainpool_cancel (struct constant_pool
*pool
)
5405 /* We didn't actually change the instruction stream, so simply
5406 free the pool memory. */
5407 s390_free_pool (pool
);
5411 /* Chunkify the literal pool. */
5413 #define S390_POOL_CHUNK_MIN 0xc00
5414 #define S390_POOL_CHUNK_MAX 0xe00
5416 static struct constant_pool
*
5417 s390_chunkify_start (void)
5419 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
5422 rtx pending_ltrel
= NULL_RTX
;
5425 rtx (*gen_reload_base
) (rtx
, rtx
) =
5426 TARGET_CPU_ZARCH
? gen_reload_base_64
: gen_reload_base_31
;
5429 /* We need correct insn addresses. */
5431 shorten_branches (get_insns ());
5433 /* Scan all insns and move literals to pool chunks. */
5435 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5437 /* Check for pending LTREL_BASE. */
5440 rtx ltrel_base
= find_ltrel_base (PATTERN (insn
));
5443 if (ltrel_base
== pending_ltrel
)
5444 pending_ltrel
= NULL_RTX
;
5450 if (s390_execute_label (insn
))
5453 curr_pool
= s390_start_pool (&pool_list
, insn
);
5455 s390_add_execute (curr_pool
, insn
);
5456 s390_add_pool_insn (curr_pool
, insn
);
5458 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
5460 rtx pool_ref
= NULL_RTX
;
5461 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
5464 rtx constant
= get_pool_constant (pool_ref
);
5465 enum machine_mode mode
= get_pool_mode (pool_ref
);
5468 curr_pool
= s390_start_pool (&pool_list
, insn
);
5470 s390_add_constant (curr_pool
, constant
, mode
);
5471 s390_add_pool_insn (curr_pool
, insn
);
5473 /* Don't split the pool chunk between a LTREL_OFFSET load
5474 and the corresponding LTREL_BASE. */
5475 if (GET_CODE (constant
) == CONST
5476 && GET_CODE (XEXP (constant
, 0)) == UNSPEC
5477 && XINT (XEXP (constant
, 0), 1) == UNSPEC_LTREL_OFFSET
)
5481 pending_ltrel
= pool_ref
;
5486 if (GET_CODE (insn
) == JUMP_INSN
|| GET_CODE (insn
) == CODE_LABEL
)
5489 s390_add_pool_insn (curr_pool
, insn
);
5490 /* An LTREL_BASE must follow within the same basic block. */
5496 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
5497 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
5500 if (TARGET_CPU_ZARCH
)
5502 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
5505 s390_end_pool (curr_pool
, NULL_RTX
);
5510 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
5511 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
5514 /* We will later have to insert base register reload insns.
5515 Those will have an effect on code size, which we need to
5516 consider here. This calculation makes rather pessimistic
5517 worst-case assumptions. */
5518 if (GET_CODE (insn
) == CODE_LABEL
)
5521 if (chunk_size
< S390_POOL_CHUNK_MIN
5522 && curr_pool
->size
< S390_POOL_CHUNK_MIN
)
5525 /* Pool chunks can only be inserted after BARRIERs ... */
5526 if (GET_CODE (insn
) == BARRIER
)
5528 s390_end_pool (curr_pool
, insn
);
5533 /* ... so if we don't find one in time, create one. */
5534 else if ((chunk_size
> S390_POOL_CHUNK_MAX
5535 || curr_pool
->size
> S390_POOL_CHUNK_MAX
))
5537 rtx label
, jump
, barrier
;
5539 /* We can insert the barrier only after a 'real' insn. */
5540 if (GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != CALL_INSN
)
5542 if (get_attr_length (insn
) == 0)
5545 /* Don't separate LTREL_BASE from the corresponding
5546 LTREL_OFFSET load. */
5550 label
= gen_label_rtx ();
5551 jump
= emit_jump_insn_after (gen_jump (label
), insn
);
5552 barrier
= emit_barrier_after (jump
);
5553 insn
= emit_label_after (label
, barrier
);
5554 JUMP_LABEL (jump
) = label
;
5555 LABEL_NUSES (label
) = 1;
5557 INSN_ADDRESSES_NEW (jump
, -1);
5558 INSN_ADDRESSES_NEW (barrier
, -1);
5559 INSN_ADDRESSES_NEW (insn
, -1);
5561 s390_end_pool (curr_pool
, barrier
);
5569 s390_end_pool (curr_pool
, NULL_RTX
);
5574 /* Find all labels that are branched into
5575 from an insn belonging to a different chunk. */
5577 far_labels
= BITMAP_XMALLOC ();
5579 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5581 /* Labels marked with LABEL_PRESERVE_P can be target
5582 of non-local jumps, so we have to mark them.
5583 The same holds for named labels.
5585 Don't do that, however, if it is the label before
5588 if (GET_CODE (insn
) == CODE_LABEL
5589 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
5591 rtx vec_insn
= next_real_insn (insn
);
5592 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
5593 PATTERN (vec_insn
) : NULL_RTX
;
5595 || !(GET_CODE (vec_pat
) == ADDR_VEC
5596 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
5597 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
5600 /* If we have a direct jump (conditional or unconditional)
5601 or a casesi jump, check all potential targets. */
5602 else if (GET_CODE (insn
) == JUMP_INSN
)
5604 rtx pat
= PATTERN (insn
);
5605 if (GET_CODE (pat
) == PARALLEL
&& XVECLEN (pat
, 0) > 2)
5606 pat
= XVECEXP (pat
, 0, 0);
5608 if (GET_CODE (pat
) == SET
)
5610 rtx label
= JUMP_LABEL (insn
);
5613 if (s390_find_pool (pool_list
, label
)
5614 != s390_find_pool (pool_list
, insn
))
5615 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
5618 else if (GET_CODE (pat
) == PARALLEL
5619 && XVECLEN (pat
, 0) == 2
5620 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
5621 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
5622 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
5624 /* Find the jump table used by this casesi jump. */
5625 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
5626 rtx vec_insn
= next_real_insn (vec_label
);
5627 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
5628 PATTERN (vec_insn
) : NULL_RTX
;
5630 && (GET_CODE (vec_pat
) == ADDR_VEC
5631 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
5633 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
5635 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
5637 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
5639 if (s390_find_pool (pool_list
, label
)
5640 != s390_find_pool (pool_list
, insn
))
5641 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
5648 /* Insert base register reload insns before every pool. */
5650 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
5652 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
5654 rtx insn
= curr_pool
->first_insn
;
5655 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
5658 /* Insert base register reload insns at every far label. */
5660 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5661 if (GET_CODE (insn
) == CODE_LABEL
5662 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
5664 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
5667 rtx new_insn
= gen_reload_base (cfun
->machine
->base_reg
,
5669 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
5674 BITMAP_XFREE (far_labels
);
5677 /* Recompute insn addresses. */
5679 init_insn_lengths ();
5680 shorten_branches (get_insns ());
5685 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5686 After we have decided to use this list, finish implementing
5687 all changes to the current function as required. */
5690 s390_chunkify_finish (struct constant_pool
*pool_list
)
5692 struct constant_pool
*curr_pool
= NULL
;
5696 /* Replace all literal pool references. */
5698 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5701 replace_ltrel_base (&PATTERN (insn
));
5703 curr_pool
= s390_find_pool (pool_list
, insn
);
5707 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
5709 rtx addr
, pool_ref
= NULL_RTX
;
5710 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
5713 if (s390_execute_label (insn
))
5714 addr
= s390_find_execute (curr_pool
, insn
);
5716 addr
= s390_find_constant (curr_pool
,
5717 get_pool_constant (pool_ref
),
5718 get_pool_mode (pool_ref
));
5720 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
5721 INSN_CODE (insn
) = -1;
5726 /* Dump out all literal pools. */
5728 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
5729 s390_dump_pool (curr_pool
, 0);
5731 /* Free pool list. */
5735 struct constant_pool
*next
= pool_list
->next
;
5736 s390_free_pool (pool_list
);
5741 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5742 We have decided we cannot use this list, so revert all changes
5743 to the current function that were done by s390_chunkify_start. */
5746 s390_chunkify_cancel (struct constant_pool
*pool_list
)
5748 struct constant_pool
*curr_pool
= NULL
;
5751 /* Remove all pool placeholder insns. */
5753 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
5755 /* Did we insert an extra barrier? Remove it. */
5756 rtx barrier
= PREV_INSN (curr_pool
->pool_insn
);
5757 rtx jump
= barrier
? PREV_INSN (barrier
) : NULL_RTX
;
5758 rtx label
= NEXT_INSN (curr_pool
->pool_insn
);
5760 if (jump
&& GET_CODE (jump
) == JUMP_INSN
5761 && barrier
&& GET_CODE (barrier
) == BARRIER
5762 && label
&& GET_CODE (label
) == CODE_LABEL
5763 && GET_CODE (PATTERN (jump
)) == SET
5764 && SET_DEST (PATTERN (jump
)) == pc_rtx
5765 && GET_CODE (SET_SRC (PATTERN (jump
))) == LABEL_REF
5766 && XEXP (SET_SRC (PATTERN (jump
)), 0) == label
)
5769 remove_insn (barrier
);
5770 remove_insn (label
);
5773 remove_insn (curr_pool
->pool_insn
);
5776 /* Remove all base register reload insns. */
5778 for (insn
= get_insns (); insn
; )
5780 rtx next_insn
= NEXT_INSN (insn
);
5782 if (GET_CODE (insn
) == INSN
5783 && GET_CODE (PATTERN (insn
)) == SET
5784 && GET_CODE (SET_SRC (PATTERN (insn
))) == UNSPEC
5785 && XINT (SET_SRC (PATTERN (insn
)), 1) == UNSPEC_RELOAD_BASE
)
5791 /* Free pool list. */
5795 struct constant_pool
*next
= pool_list
->next
;
5796 s390_free_pool (pool_list
);
5802 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5805 s390_output_pool_entry (rtx exp
, enum machine_mode mode
, unsigned int align
)
5809 switch (GET_MODE_CLASS (mode
))
5812 if (GET_CODE (exp
) != CONST_DOUBLE
)
5815 REAL_VALUE_FROM_CONST_DOUBLE (r
, exp
);
5816 assemble_real (r
, mode
, align
);
5820 assemble_integer (exp
, GET_MODE_SIZE (mode
), align
, 1);
5829 /* Rework the prologue/epilogue to avoid saving/restoring
5830 registers unnecessarily. */
5833 s390_optimize_prologue (void)
5835 rtx insn
, new_insn
, next_insn
;
5837 /* Do a final recompute of the frame-related data. */
5839 s390_update_frame_layout ();
5841 /* If all special registers are in fact used, there's nothing we
5842 can do, so no point in walking the insn list. */
5844 if (cfun_frame_layout
.first_save_gpr
<= BASE_REGNUM
5845 && cfun_frame_layout
.last_save_gpr
>= BASE_REGNUM
5846 && (TARGET_CPU_ZARCH
5847 || (cfun_frame_layout
.first_save_gpr
<= RETURN_REGNUM
5848 && cfun_frame_layout
.last_save_gpr
>= RETURN_REGNUM
)))
5851 /* Search for prologue/epilogue insns and replace them. */
5853 for (insn
= get_insns (); insn
; insn
= next_insn
)
5855 int first
, last
, off
;
5856 rtx set
, base
, offset
;
5858 next_insn
= NEXT_INSN (insn
);
5860 if (GET_CODE (insn
) != INSN
)
5863 if (GET_CODE (PATTERN (insn
)) == PARALLEL
5864 && store_multiple_operation (PATTERN (insn
), VOIDmode
))
5866 set
= XVECEXP (PATTERN (insn
), 0, 0);
5867 first
= REGNO (SET_SRC (set
));
5868 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
5869 offset
= const0_rtx
;
5870 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5871 off
= INTVAL (offset
);
5873 if (GET_CODE (base
) != REG
|| off
< 0)
5875 if (REGNO (base
) != STACK_POINTER_REGNUM
5876 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
5878 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
5881 if (cfun_frame_layout
.first_save_gpr
!= -1)
5883 new_insn
= save_gprs (base
,
5884 off
+ (cfun_frame_layout
.first_save_gpr
5885 - first
) * UNITS_PER_WORD
,
5886 cfun_frame_layout
.first_save_gpr
,
5887 cfun_frame_layout
.last_save_gpr
);
5888 new_insn
= emit_insn_before (new_insn
, insn
);
5889 INSN_ADDRESSES_NEW (new_insn
, -1);
5896 if (GET_CODE (PATTERN (insn
)) == SET
5897 && GET_CODE (SET_SRC (PATTERN (insn
))) == REG
5898 && (REGNO (SET_SRC (PATTERN (insn
))) == BASE_REGNUM
5899 || (!TARGET_CPU_ZARCH
5900 && REGNO (SET_SRC (PATTERN (insn
))) == RETURN_REGNUM
))
5901 && GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
5903 set
= PATTERN (insn
);
5904 first
= REGNO (SET_SRC (set
));
5905 offset
= const0_rtx
;
5906 base
= eliminate_constant_term (XEXP (SET_DEST (set
), 0), &offset
);
5907 off
= INTVAL (offset
);
5909 if (GET_CODE (base
) != REG
|| off
< 0)
5911 if (REGNO (base
) != STACK_POINTER_REGNUM
5912 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
5914 if (cfun_frame_layout
.first_save_gpr
!= -1)
5916 new_insn
= save_gprs (base
,
5917 off
+ (cfun_frame_layout
.first_save_gpr
5918 - first
) * UNITS_PER_WORD
,
5919 cfun_frame_layout
.first_save_gpr
,
5920 cfun_frame_layout
.last_save_gpr
);
5921 new_insn
= emit_insn_before (new_insn
, insn
);
5922 INSN_ADDRESSES_NEW (new_insn
, -1);
5929 if (GET_CODE (PATTERN (insn
)) == PARALLEL
5930 && load_multiple_operation (PATTERN (insn
), VOIDmode
))
5932 set
= XVECEXP (PATTERN (insn
), 0, 0);
5933 first
= REGNO (SET_DEST (set
));
5934 last
= first
+ XVECLEN (PATTERN (insn
), 0) - 1;
5935 offset
= const0_rtx
;
5936 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5937 off
= INTVAL (offset
);
5939 if (GET_CODE (base
) != REG
|| off
< 0)
5941 if (REGNO (base
) != STACK_POINTER_REGNUM
5942 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
5944 if (first
> BASE_REGNUM
|| last
< BASE_REGNUM
)
5947 if (cfun_frame_layout
.first_restore_gpr
!= -1)
5949 new_insn
= restore_gprs (base
,
5950 off
+ (cfun_frame_layout
.first_restore_gpr
5951 - first
) * UNITS_PER_WORD
,
5952 cfun_frame_layout
.first_restore_gpr
,
5953 cfun_frame_layout
.last_restore_gpr
);
5954 new_insn
= emit_insn_before (new_insn
, insn
);
5955 INSN_ADDRESSES_NEW (new_insn
, -1);
5962 if (GET_CODE (PATTERN (insn
)) == SET
5963 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
5964 && (REGNO (SET_DEST (PATTERN (insn
))) == BASE_REGNUM
5965 || (!TARGET_CPU_ZARCH
5966 && REGNO (SET_DEST (PATTERN (insn
))) == RETURN_REGNUM
))
5967 && GET_CODE (SET_SRC (PATTERN (insn
))) == MEM
)
5969 set
= PATTERN (insn
);
5970 first
= REGNO (SET_DEST (set
));
5971 offset
= const0_rtx
;
5972 base
= eliminate_constant_term (XEXP (SET_SRC (set
), 0), &offset
);
5973 off
= INTVAL (offset
);
5975 if (GET_CODE (base
) != REG
|| off
< 0)
5977 if (REGNO (base
) != STACK_POINTER_REGNUM
5978 && REGNO (base
) != HARD_FRAME_POINTER_REGNUM
)
5980 if (cfun_frame_layout
.first_restore_gpr
!= -1)
5982 new_insn
= restore_gprs (base
,
5983 off
+ (cfun_frame_layout
.first_restore_gpr
5984 - first
) * UNITS_PER_WORD
,
5985 cfun_frame_layout
.first_restore_gpr
,
5986 cfun_frame_layout
.last_restore_gpr
);
5987 new_insn
= emit_insn_before (new_insn
, insn
);
5988 INSN_ADDRESSES_NEW (new_insn
, -1);
5997 /* Perform machine-dependent processing. */
6002 bool pool_overflow
= false;
6004 /* Make sure all splits have been performed; splits after
6005 machine_dependent_reorg might confuse insn length counts. */
6006 split_all_insns_noflow ();
6009 /* Install the main literal pool and the associated base
6010 register load insns.
6012 In addition, there are two problematic situations we need
6015 - the literal pool might be > 4096 bytes in size, so that
6016 some of its elements cannot be directly accessed
6018 - a branch target might be > 64K away from the branch, so that
6019 it is not possible to use a PC-relative instruction.
6021 To fix those, we split the single literal pool into multiple
6022 pool chunks, reloading the pool base register at various
6023 points throughout the function to ensure it always points to
6024 the pool chunk the following code expects, and / or replace
6025 PC-relative branches by absolute branches.
6027 However, the two problems are interdependent: splitting the
6028 literal pool can move a branch further away from its target,
6029 causing the 64K limit to overflow, and on the other hand,
6030 replacing a PC-relative branch by an absolute branch means
6031 we need to put the branch target address into the literal
6032 pool, possibly causing it to overflow.
6034 So, we loop trying to fix up both problems until we manage
6035 to satisfy both conditions at the same time. Note that the
6036 loop is guaranteed to terminate as every pass of the loop
6037 strictly decreases the total number of PC-relative branches
6038 in the function. (This is not completely true as there
6039 might be branch-over-pool insns introduced by chunkify_start.
6040 Those never need to be split however.) */
6044 struct constant_pool
*pool
= NULL
;
6046 /* Collect the literal pool. */
6049 pool
= s390_mainpool_start ();
6051 pool_overflow
= true;
6054 /* If literal pool overflowed, start to chunkify it. */
6056 pool
= s390_chunkify_start ();
6058 /* Split out-of-range branches. If this has created new
6059 literal pool entries, cancel current chunk list and
6060 recompute it. zSeries machines have large branch
6061 instructions, so we never need to split a branch. */
6062 if (!TARGET_CPU_ZARCH
&& s390_split_branches ())
6065 s390_chunkify_cancel (pool
);
6067 s390_mainpool_cancel (pool
);
6072 /* If we made it up to here, both conditions are satisfied.
6073 Finish up literal pool related changes. */
6075 s390_chunkify_finish (pool
);
6077 s390_mainpool_finish (pool
);
6079 /* We're done splitting branches. */
6080 cfun
->machine
->split_branches_pending_p
= false;
6084 s390_optimize_prologue ();
6088 /* Return an RTL expression representing the value of the return address
6089 for the frame COUNT steps up from the current frame. FRAME is the
6090 frame pointer of that frame. */
6093 s390_return_addr_rtx (int count
, rtx frame ATTRIBUTE_UNUSED
)
6098 /* Without backchain, we fail for all but the current frame. */
6100 if (!TARGET_BACKCHAIN
&& !TARGET_KERNEL_BACKCHAIN
&& count
> 0)
6103 /* For the current frame, we need to make sure the initial
6104 value of RETURN_REGNUM is actually saved. */
6108 cfun_frame_layout
.save_return_addr_p
= true;
6109 return gen_rtx_MEM (Pmode
, return_address_pointer_rtx
);
6112 if (TARGET_BACKCHAIN
)
6113 offset
= RETURN_REGNUM
* UNITS_PER_WORD
;
6115 offset
= -2 * UNITS_PER_WORD
;
6117 addr
= plus_constant (frame
, offset
);
6118 addr
= memory_address (Pmode
, addr
);
6119 return gen_rtx_MEM (Pmode
, addr
);
6122 /* Return an RTL expression representing the back chain stored in
6123 the current stack frame. */
6126 s390_back_chain_rtx (void)
6130 gcc_assert (TARGET_BACKCHAIN
|| TARGET_KERNEL_BACKCHAIN
);
6132 if (TARGET_BACKCHAIN
)
6133 chain
= stack_pointer_rtx
;
6135 chain
= plus_constant (stack_pointer_rtx
,
6136 STACK_POINTER_OFFSET
- UNITS_PER_WORD
);
6138 chain
= gen_rtx_MEM (Pmode
, chain
);
6142 /* Find first call clobbered register unused in a function.
6143 This could be used as base register in a leaf function
6144 or for holding the return address before epilogue. */
6147 find_unused_clobbered_reg (void)
6150 for (i
= 0; i
< 6; i
++)
6151 if (!regs_ever_live
[i
])
6156 /* Determine the frame area which actually has to be accessed
6157 in the function epilogue. The values are stored at the
6158 given pointers AREA_BOTTOM (address of the lowest used stack
6159 address) and AREA_TOP (address of the first item which does
6160 not belong to the stack frame). */
6163 s390_frame_area (int *area_bottom
, int *area_top
)
6171 if (cfun_frame_layout
.first_restore_gpr
!= -1)
6173 b
= (cfun_frame_layout
.gprs_offset
6174 + cfun_frame_layout
.first_restore_gpr
* UNITS_PER_WORD
);
6175 t
= b
+ (cfun_frame_layout
.last_restore_gpr
6176 - cfun_frame_layout
.first_restore_gpr
+ 1) * UNITS_PER_WORD
;
6179 if (TARGET_64BIT
&& cfun_save_high_fprs_p
)
6181 b
= MIN (b
, cfun_frame_layout
.f8_offset
);
6182 t
= MAX (t
, (cfun_frame_layout
.f8_offset
6183 + cfun_frame_layout
.high_fprs
* 8));
6187 for (i
= 2; i
< 4; i
++)
6188 if (cfun_fpr_bit_p (i
))
6190 b
= MIN (b
, cfun_frame_layout
.f4_offset
+ (i
- 2) * 8);
6191 t
= MAX (t
, cfun_frame_layout
.f4_offset
+ (i
- 1) * 8);
6198 /* Fill cfun->machine with info about register usage of current function.
6199 Return in LIVE_REGS which GPRs are currently considered live. */
6202 s390_register_info (int live_regs
[])
6206 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6207 cfun_frame_layout
.fpr_bitmap
= 0;
6208 cfun_frame_layout
.high_fprs
= 0;
6210 for (i
= 24; i
< 32; i
++)
6211 if (regs_ever_live
[i
] && !global_regs
[i
])
6213 cfun_set_fpr_bit (i
- 16);
6214 cfun_frame_layout
.high_fprs
++;
6217 /* Find first and last gpr to be saved. We trust regs_ever_live
6218 data, except that we don't save and restore global registers.
6220 Also, all registers with special meaning to the compiler need
6221 to be handled extra. */
6223 for (i
= 0; i
< 16; i
++)
6224 live_regs
[i
] = regs_ever_live
[i
] && !global_regs
[i
];
6227 live_regs
[PIC_OFFSET_TABLE_REGNUM
]
6228 = regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
];
6230 live_regs
[BASE_REGNUM
]
6231 = cfun
->machine
->base_reg
6232 && REGNO (cfun
->machine
->base_reg
) == BASE_REGNUM
;
6234 live_regs
[RETURN_REGNUM
]
6235 = cfun
->machine
->split_branches_pending_p
6236 || cfun_frame_layout
.save_return_addr_p
;
6238 live_regs
[STACK_POINTER_REGNUM
]
6239 = !current_function_is_leaf
6240 || TARGET_TPF_PROFILING
6241 || cfun_save_high_fprs_p
6242 || get_frame_size () > 0
6243 || current_function_calls_alloca
6244 || current_function_stdarg
;
6246 for (i
= 6; i
< 16; i
++)
6249 for (j
= 15; j
> i
; j
--)
6255 /* Nothing to save/restore. */
6256 cfun_frame_layout
.first_save_gpr
= -1;
6257 cfun_frame_layout
.first_restore_gpr
= -1;
6258 cfun_frame_layout
.last_save_gpr
= -1;
6259 cfun_frame_layout
.last_restore_gpr
= -1;
6263 /* Save / Restore from gpr i to j. */
6264 cfun_frame_layout
.first_save_gpr
= i
;
6265 cfun_frame_layout
.first_restore_gpr
= i
;
6266 cfun_frame_layout
.last_save_gpr
= j
;
6267 cfun_frame_layout
.last_restore_gpr
= j
;
6270 if (current_function_stdarg
)
6272 /* Varargs functions need to save gprs 2 to 6. */
6273 if (cfun_frame_layout
.first_save_gpr
== -1
6274 || cfun_frame_layout
.first_save_gpr
> 2)
6275 cfun_frame_layout
.first_save_gpr
= 2;
6277 if (cfun_frame_layout
.last_save_gpr
== -1
6278 || cfun_frame_layout
.last_save_gpr
< 6)
6279 cfun_frame_layout
.last_save_gpr
= 6;
6281 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6282 if (TARGET_HARD_FLOAT
)
6283 for (i
= 0; i
< (TARGET_64BIT
? 4 : 2); i
++)
6284 cfun_set_fpr_bit (i
);
6288 for (i
= 2; i
< 4; i
++)
6289 if (regs_ever_live
[i
+ 16] && !global_regs
[i
+ 16])
6290 cfun_set_fpr_bit (i
);
6293 /* Fill cfun->machine with info about frame of current function. */
6296 s390_frame_info (void)
6300 cfun_frame_layout
.frame_size
= get_frame_size ();
6301 if (!TARGET_64BIT
&& cfun_frame_layout
.frame_size
> 0x7fff0000)
6302 fatal_error ("Total size of local variables exceeds architecture limit.");
6304 cfun_frame_layout
.save_backchain_p
= (TARGET_BACKCHAIN
6305 || TARGET_KERNEL_BACKCHAIN
);
6307 if (TARGET_BACKCHAIN
)
6309 cfun_frame_layout
.backchain_offset
= 0;
6310 cfun_frame_layout
.f0_offset
= 16 * UNITS_PER_WORD
;
6311 cfun_frame_layout
.f4_offset
= cfun_frame_layout
.f0_offset
+ 2 * 8;
6312 cfun_frame_layout
.f8_offset
= -cfun_frame_layout
.high_fprs
* 8;
6313 cfun_frame_layout
.gprs_offset
= (cfun_frame_layout
.first_save_gpr
6316 else if (TARGET_KERNEL_BACKCHAIN
)
6318 cfun_frame_layout
.backchain_offset
= (STACK_POINTER_OFFSET
6320 cfun_frame_layout
.gprs_offset
6321 = (cfun_frame_layout
.backchain_offset
6322 - (STACK_POINTER_REGNUM
- cfun_frame_layout
.first_save_gpr
+ 1)
6327 cfun_frame_layout
.f4_offset
6328 = (cfun_frame_layout
.gprs_offset
6329 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6331 cfun_frame_layout
.f0_offset
6332 = (cfun_frame_layout
.f4_offset
6333 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6337 /* On 31 bit we have to care about alignment of the
6338 floating point regs to provide fastest access. */
6339 cfun_frame_layout
.f0_offset
6340 = ((cfun_frame_layout
.gprs_offset
6341 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1))
6342 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6344 cfun_frame_layout
.f4_offset
6345 = (cfun_frame_layout
.f0_offset
6346 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6349 else /* no backchain */
6351 cfun_frame_layout
.f4_offset
6352 = (STACK_POINTER_OFFSET
6353 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6355 cfun_frame_layout
.f0_offset
6356 = (cfun_frame_layout
.f4_offset
6357 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6359 cfun_frame_layout
.gprs_offset
6360 = cfun_frame_layout
.f0_offset
- cfun_gprs_save_area_size
;
6363 if (current_function_is_leaf
6364 && !TARGET_TPF_PROFILING
6365 && cfun_frame_layout
.frame_size
== 0
6366 && !cfun_save_high_fprs_p
6367 && !current_function_calls_alloca
6368 && !current_function_stdarg
)
6371 if (TARGET_BACKCHAIN
)
6372 cfun_frame_layout
.frame_size
+= (STARTING_FRAME_OFFSET
6373 + cfun_frame_layout
.high_fprs
* 8);
6376 cfun_frame_layout
.frame_size
+= (cfun_frame_layout
.save_backchain_p
6379 /* No alignment trouble here because f8-f15 are only saved under
6381 cfun_frame_layout
.f8_offset
= (MIN (MIN (cfun_frame_layout
.f0_offset
,
6382 cfun_frame_layout
.f4_offset
),
6383 cfun_frame_layout
.gprs_offset
)
6384 - cfun_frame_layout
.high_fprs
* 8);
6386 cfun_frame_layout
.frame_size
+= cfun_frame_layout
.high_fprs
* 8;
6388 for (i
= 0; i
< 8; i
++)
6389 if (cfun_fpr_bit_p (i
))
6390 cfun_frame_layout
.frame_size
+= 8;
6392 cfun_frame_layout
.frame_size
+= cfun_gprs_save_area_size
;
6394 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6395 the frame size to sustain 8 byte alignment of stack frames. */
6396 cfun_frame_layout
.frame_size
= ((cfun_frame_layout
.frame_size
+
6397 STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
6398 & ~(STACK_BOUNDARY
/ BITS_PER_UNIT
- 1));
6400 cfun_frame_layout
.frame_size
+= current_function_outgoing_args_size
;
6404 /* Generate frame layout. Fills in register and frame data for the current
6405 function in cfun->machine. This routine can be called multiple times;
6406 it will re-do the complete frame layout every time. */
6409 s390_init_frame_layout (void)
6411 HOST_WIDE_INT frame_size
;
6415 /* If return address register is explicitly used, we need to save it. */
6416 if (regs_ever_live
[RETURN_REGNUM
]
6417 || !current_function_is_leaf
6418 || TARGET_TPF_PROFILING
6419 || current_function_stdarg
6420 || current_function_calls_eh_return
)
6421 cfun_frame_layout
.save_return_addr_p
= true;
6423 /* On S/390 machines, we may need to perform branch splitting, which
6424 will require both base and return address register. We have no
6425 choice but to assume we're going to need them until right at the
6426 end of the machine dependent reorg phase. */
6427 if (!TARGET_CPU_ZARCH
)
6428 cfun
->machine
->split_branches_pending_p
= true;
6432 frame_size
= cfun_frame_layout
.frame_size
;
6434 /* Try to predict whether we'll need the base register. */
6435 base_used
= cfun
->machine
->split_branches_pending_p
6436 || current_function_uses_const_pool
6437 || (!DISP_IN_RANGE (-frame_size
)
6438 && !CONST_OK_FOR_CONSTRAINT_P (-frame_size
, 'K', "K"));
6440 /* Decide which register to use as literal pool base. In small
6441 leaf functions, try to use an unused call-clobbered register
6442 as base register to avoid save/restore overhead. */
6444 cfun
->machine
->base_reg
= NULL_RTX
;
6445 else if (current_function_is_leaf
&& !regs_ever_live
[5])
6446 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, 5);
6448 cfun
->machine
->base_reg
= gen_rtx_REG (Pmode
, BASE_REGNUM
);
6450 s390_register_info (live_regs
);
6453 while (frame_size
!= cfun_frame_layout
.frame_size
);
6456 /* Update frame layout. Recompute actual register save data based on
6457 current info and update regs_ever_live for the special registers.
6458 May be called multiple times, but may never cause *more* registers
6459 to be saved than s390_init_frame_layout allocated room for. */
6462 s390_update_frame_layout (void)
6466 s390_register_info (live_regs
);
6468 regs_ever_live
[BASE_REGNUM
] = live_regs
[BASE_REGNUM
];
6469 regs_ever_live
[RETURN_REGNUM
] = live_regs
[RETURN_REGNUM
];
6470 regs_ever_live
[STACK_POINTER_REGNUM
] = live_regs
[STACK_POINTER_REGNUM
];
6472 if (cfun
->machine
->base_reg
)
6473 regs_ever_live
[REGNO (cfun
->machine
->base_reg
)] = 1;
6476 /* Return true if register FROM can be eliminated via register TO. */
6479 s390_can_eliminate (int from
, int to
)
6481 gcc_assert (to
== STACK_POINTER_REGNUM
6482 || to
== HARD_FRAME_POINTER_REGNUM
);
6484 gcc_assert (from
== FRAME_POINTER_REGNUM
6485 || from
== ARG_POINTER_REGNUM
6486 || from
== RETURN_ADDRESS_POINTER_REGNUM
);
6488 /* Make sure we actually saved the return address. */
6489 if (from
== RETURN_ADDRESS_POINTER_REGNUM
)
6490 if (!current_function_calls_eh_return
6491 && !current_function_stdarg
6492 && !cfun_frame_layout
.save_return_addr_p
)
6498 /* Return offset between register FROM and TO initially after prolog. */
6501 s390_initial_elimination_offset (int from
, int to
)
6503 HOST_WIDE_INT offset
;
6506 /* ??? Why are we called for non-eliminable pairs? */
6507 if (!s390_can_eliminate (from
, to
))
6512 case FRAME_POINTER_REGNUM
:
6516 case ARG_POINTER_REGNUM
:
6517 s390_init_frame_layout ();
6518 offset
= cfun_frame_layout
.frame_size
+ STACK_POINTER_OFFSET
;
6521 case RETURN_ADDRESS_POINTER_REGNUM
:
6522 s390_init_frame_layout ();
6523 index
= RETURN_REGNUM
- cfun_frame_layout
.first_save_gpr
;
6524 gcc_assert (index
>= 0);
6525 offset
= cfun_frame_layout
.frame_size
+ cfun_frame_layout
.gprs_offset
;
6526 offset
+= index
* UNITS_PER_WORD
;
6536 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6537 to register BASE. Return generated insn. */
6540 save_fpr (rtx base
, int offset
, int regnum
)
6543 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
6544 set_mem_alias_set (addr
, s390_sr_alias_set
);
6546 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
6549 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6550 to register BASE. Return generated insn. */
6553 restore_fpr (rtx base
, int offset
, int regnum
)
6556 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
6557 set_mem_alias_set (addr
, s390_sr_alias_set
);
6559 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
6562 /* Generate insn to save registers FIRST to LAST into
6563 the register save area located at offset OFFSET
6564 relative to register BASE. */
6567 save_gprs (rtx base
, int offset
, int first
, int last
)
6569 rtx addr
, insn
, note
;
6572 addr
= plus_constant (base
, offset
);
6573 addr
= gen_rtx_MEM (Pmode
, addr
);
6574 set_mem_alias_set (addr
, s390_sr_alias_set
);
6576 /* Special-case single register. */
6580 insn
= gen_movdi (addr
, gen_rtx_REG (Pmode
, first
));
6582 insn
= gen_movsi (addr
, gen_rtx_REG (Pmode
, first
));
6584 RTX_FRAME_RELATED_P (insn
) = 1;
6589 insn
= gen_store_multiple (addr
,
6590 gen_rtx_REG (Pmode
, first
),
6591 GEN_INT (last
- first
+ 1));
6594 /* We need to set the FRAME_RELATED flag on all SETs
6595 inside the store-multiple pattern.
6597 However, we must not emit DWARF records for registers 2..5
6598 if they are stored for use by variable arguments ...
6600 ??? Unfortunately, it is not enough to simply not the the
6601 FRAME_RELATED flags for those SETs, because the first SET
6602 of the PARALLEL is always treated as if it had the flag
6603 set, even if it does not. Therefore we emit a new pattern
6604 without those registers as REG_FRAME_RELATED_EXPR note. */
6608 rtx pat
= PATTERN (insn
);
6610 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6611 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
6612 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
6614 RTX_FRAME_RELATED_P (insn
) = 1;
6618 addr
= plus_constant (base
, offset
+ (6 - first
) * UNITS_PER_WORD
);
6619 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, addr
),
6620 gen_rtx_REG (Pmode
, 6),
6621 GEN_INT (last
- 6 + 1));
6622 note
= PATTERN (note
);
6625 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
6626 note
, REG_NOTES (insn
));
6628 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
6629 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
)
6630 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
6632 RTX_FRAME_RELATED_P (insn
) = 1;
6638 /* Generate insn to restore registers FIRST to LAST from
6639 the register save area located at offset OFFSET
6640 relative to register BASE. */
6643 restore_gprs (rtx base
, int offset
, int first
, int last
)
6647 addr
= plus_constant (base
, offset
);
6648 addr
= gen_rtx_MEM (Pmode
, addr
);
6649 set_mem_alias_set (addr
, s390_sr_alias_set
);
6651 /* Special-case single register. */
6655 insn
= gen_movdi (gen_rtx_REG (Pmode
, first
), addr
);
6657 insn
= gen_movsi (gen_rtx_REG (Pmode
, first
), addr
);
6662 insn
= gen_load_multiple (gen_rtx_REG (Pmode
, first
),
6664 GEN_INT (last
- first
+ 1));
6668 /* Return insn sequence to load the GOT register. */
6670 static GTY(()) rtx got_symbol
;
6672 s390_load_got (void)
6678 got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
6679 SYMBOL_REF_FLAGS (got_symbol
) = SYMBOL_FLAG_LOCAL
;
6684 if (TARGET_CPU_ZARCH
)
6686 emit_move_insn (pic_offset_table_rtx
, got_symbol
);
6692 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, got_symbol
),
6693 UNSPEC_LTREL_OFFSET
);
6694 offset
= gen_rtx_CONST (Pmode
, offset
);
6695 offset
= force_const_mem (Pmode
, offset
);
6697 emit_move_insn (pic_offset_table_rtx
, offset
);
6699 offset
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, XEXP (offset
, 0)),
6701 offset
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, offset
);
6703 emit_move_insn (pic_offset_table_rtx
, offset
);
6706 insns
= get_insns ();
6711 /* Expand the prologue into a bunch of separate insns. */
6714 s390_emit_prologue (void)
6722 /* Complete frame layout. */
6724 s390_update_frame_layout ();
6726 /* Annotate all constant pool references to let the scheduler know
6727 they implicitly use the base register. */
6729 push_topmost_sequence ();
6731 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6733 annotate_constant_pool_refs (&PATTERN (insn
));
6735 pop_topmost_sequence ();
6737 /* Choose best register to use for temp use within prologue.
6738 See below for why TPF must use the register 1. */
6740 if (!current_function_is_leaf
&& !TARGET_TPF_PROFILING
)
6741 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
6743 temp_reg
= gen_rtx_REG (Pmode
, 1);
6745 /* Save call saved gprs. */
6746 if (cfun_frame_layout
.first_save_gpr
!= -1)
6748 insn
= save_gprs (stack_pointer_rtx
,
6749 cfun_frame_layout
.gprs_offset
,
6750 cfun_frame_layout
.first_save_gpr
,
6751 cfun_frame_layout
.last_save_gpr
);
6755 /* Dummy insn to mark literal pool slot. */
6757 if (cfun
->machine
->base_reg
)
6758 emit_insn (gen_main_pool (cfun
->machine
->base_reg
));
6760 offset
= cfun_frame_layout
.f0_offset
;
6762 /* Save f0 and f2. */
6763 for (i
= 0; i
< 2; i
++)
6765 if (cfun_fpr_bit_p (i
))
6767 save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
6770 else if (TARGET_BACKCHAIN
)
6774 /* Save f4 and f6. */
6775 offset
= cfun_frame_layout
.f4_offset
;
6776 for (i
= 2; i
< 4; i
++)
6778 if (cfun_fpr_bit_p (i
))
6780 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
6783 /* If f4 and f6 are call clobbered they are saved due to stdargs and
6784 therefore are not frame related. */
6785 if (!call_really_used_regs
[i
+ 16])
6786 RTX_FRAME_RELATED_P (insn
) = 1;
6788 else if (TARGET_BACKCHAIN
)
6792 if (!TARGET_BACKCHAIN
6793 && cfun_save_high_fprs_p
6794 && cfun_frame_layout
.f8_offset
+ cfun_frame_layout
.high_fprs
* 8 > 0)
6796 offset
= (cfun_frame_layout
.f8_offset
6797 + (cfun_frame_layout
.high_fprs
- 1) * 8);
6799 for (i
= 15; i
> 7 && offset
>= 0; i
--)
6800 if (cfun_fpr_bit_p (i
))
6802 insn
= save_fpr (stack_pointer_rtx
, offset
, i
+ 16);
6804 RTX_FRAME_RELATED_P (insn
) = 1;
6807 if (offset
>= cfun_frame_layout
.f8_offset
)
6811 if (TARGET_BACKCHAIN
)
6812 next_fpr
= cfun_save_high_fprs_p
? 31 : 0;
6814 /* Decrement stack pointer. */
6816 if (cfun_frame_layout
.frame_size
> 0)
6818 rtx frame_off
= GEN_INT (-cfun_frame_layout
.frame_size
);
6820 if (s390_stack_size
)
6822 HOST_WIDE_INT stack_check_mask
= ((s390_stack_size
- 1)
6823 & ~(s390_stack_guard
- 1));
6824 rtx t
= gen_rtx_AND (Pmode
, stack_pointer_rtx
,
6825 GEN_INT (stack_check_mask
));
6828 gen_cmpdi (t
, const0_rtx
);
6830 gen_cmpsi (t
, const0_rtx
);
6832 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode
,
6833 gen_rtx_REG (CCmode
,
6839 if (s390_warn_framesize
> 0
6840 && cfun_frame_layout
.frame_size
>= s390_warn_framesize
)
6841 warning ("frame size of %qs is " HOST_WIDE_INT_PRINT_DEC
" bytes",
6842 current_function_name (), cfun_frame_layout
.frame_size
);
6844 if (s390_warn_dynamicstack_p
&& cfun
->calls_alloca
)
6845 warning ("%qs uses dynamic stack allocation", current_function_name ());
6847 /* Save incoming stack pointer into temp reg. */
6848 if (cfun_frame_layout
.save_backchain_p
|| next_fpr
)
6849 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
6851 /* Subtract frame size from stack pointer. */
6853 if (DISP_IN_RANGE (INTVAL (frame_off
)))
6855 insn
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
6856 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
6858 insn
= emit_insn (insn
);
6862 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off
), 'K', "K"))
6863 frame_off
= force_const_mem (Pmode
, frame_off
);
6865 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
6866 annotate_constant_pool_refs (&PATTERN (insn
));
6869 RTX_FRAME_RELATED_P (insn
) = 1;
6871 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
6872 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
6873 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
6874 GEN_INT (-cfun_frame_layout
.frame_size
))),
6877 /* Set backchain. */
6879 if (cfun_frame_layout
.save_backchain_p
)
6881 if (cfun_frame_layout
.backchain_offset
)
6882 addr
= gen_rtx_MEM (Pmode
,
6883 plus_constant (stack_pointer_rtx
,
6884 cfun_frame_layout
.backchain_offset
));
6886 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
6887 set_mem_alias_set (addr
, s390_sr_alias_set
);
6888 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
6891 /* If we support asynchronous exceptions (e.g. for Java),
6892 we need to make sure the backchain pointer is set up
6893 before any possibly trapping memory access. */
6895 if (cfun_frame_layout
.save_backchain_p
&& flag_non_call_exceptions
)
6897 addr
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
6898 emit_insn (gen_rtx_CLOBBER (VOIDmode
, addr
));
6902 /* Save fprs 8 - 15 (64 bit ABI). */
6904 if (cfun_save_high_fprs_p
&& next_fpr
)
6906 insn
= emit_insn (gen_add2_insn (temp_reg
,
6907 GEN_INT (cfun_frame_layout
.f8_offset
)));
6911 for (i
= 24; i
<= next_fpr
; i
++)
6912 if (cfun_fpr_bit_p (i
- 16))
6914 rtx addr
= plus_constant (stack_pointer_rtx
,
6915 cfun_frame_layout
.frame_size
6916 + cfun_frame_layout
.f8_offset
6919 insn
= save_fpr (temp_reg
, offset
, i
);
6921 RTX_FRAME_RELATED_P (insn
) = 1;
6923 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
6924 gen_rtx_SET (VOIDmode
,
6925 gen_rtx_MEM (DFmode
, addr
),
6926 gen_rtx_REG (DFmode
, i
)),
6931 /* Set frame pointer, if needed. */
6933 if (frame_pointer_needed
)
6935 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
6936 RTX_FRAME_RELATED_P (insn
) = 1;
6939 /* Set up got pointer, if needed. */
6941 if (flag_pic
&& regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
6943 rtx insns
= s390_load_got ();
6945 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
6947 annotate_constant_pool_refs (&PATTERN (insn
));
6949 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
6956 if (TARGET_TPF_PROFILING
)
6958 /* Generate a BAS instruction to serve as a function
6959 entry intercept to facilitate the use of tracing
6960 algorithms located at the branch target. */
6961 emit_insn (gen_prologue_tpf ());
6963 /* Emit a blockage here so that all code
6964 lies between the profiling mechanisms. */
6965 emit_insn (gen_blockage ());
6969 /* Expand the epilogue into a bunch of separate insns. */
6972 s390_emit_epilogue (bool sibcall
)
6974 rtx frame_pointer
, return_reg
;
6975 int area_bottom
, area_top
, offset
= 0;
6980 if (TARGET_TPF_PROFILING
)
6983 /* Generate a BAS instruction to serve as a function
6984 entry intercept to facilitate the use of tracing
6985 algorithms located at the branch target. */
6987 /* Emit a blockage here so that all code
6988 lies between the profiling mechanisms. */
6989 emit_insn (gen_blockage ());
6991 emit_insn (gen_epilogue_tpf ());
6994 /* Check whether to use frame or stack pointer for restore. */
6996 frame_pointer
= (frame_pointer_needed
6997 ? hard_frame_pointer_rtx
: stack_pointer_rtx
);
6999 s390_frame_area (&area_bottom
, &area_top
);
7001 /* Check whether we can access the register save area.
7002 If not, increment the frame pointer as required. */
7004 if (area_top
<= area_bottom
)
7006 /* Nothing to restore. */
7008 else if (DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_bottom
)
7009 && DISP_IN_RANGE (cfun_frame_layout
.frame_size
+ area_top
- 1))
7011 /* Area is in range. */
7012 offset
= cfun_frame_layout
.frame_size
;
7016 rtx insn
, frame_off
;
7018 offset
= area_bottom
< 0 ? -area_bottom
: 0;
7019 frame_off
= GEN_INT (cfun_frame_layout
.frame_size
- offset
);
7021 if (DISP_IN_RANGE (INTVAL (frame_off
)))
7023 insn
= gen_rtx_SET (VOIDmode
, frame_pointer
,
7024 gen_rtx_PLUS (Pmode
, frame_pointer
, frame_off
));
7025 insn
= emit_insn (insn
);
7029 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off
), 'K', "K"))
7030 frame_off
= force_const_mem (Pmode
, frame_off
);
7032 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
7033 annotate_constant_pool_refs (&PATTERN (insn
));
7037 /* Restore call saved fprs. */
7041 if (cfun_save_high_fprs_p
)
7043 next_offset
= cfun_frame_layout
.f8_offset
;
7044 for (i
= 24; i
< 32; i
++)
7046 if (cfun_fpr_bit_p (i
- 16))
7048 restore_fpr (frame_pointer
,
7049 offset
+ next_offset
, i
);
7058 next_offset
= cfun_frame_layout
.f4_offset
;
7059 for (i
= 18; i
< 20; i
++)
7061 if (cfun_fpr_bit_p (i
- 16))
7063 restore_fpr (frame_pointer
,
7064 offset
+ next_offset
, i
);
7067 else if (TARGET_BACKCHAIN
)
7073 /* Return register. */
7075 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
7077 /* Restore call saved gprs. */
7079 if (cfun_frame_layout
.first_restore_gpr
!= -1)
7084 /* Check for global register and save them
7085 to stack location from where they get restored. */
7087 for (i
= cfun_frame_layout
.first_restore_gpr
;
7088 i
<= cfun_frame_layout
.last_restore_gpr
;
7091 /* These registers are special and need to be
7092 restored in any case. */
7093 if (i
== STACK_POINTER_REGNUM
7094 || i
== RETURN_REGNUM
7096 || (flag_pic
&& i
== (int)PIC_OFFSET_TABLE_REGNUM
))
7101 addr
= plus_constant (frame_pointer
,
7102 offset
+ cfun_frame_layout
.gprs_offset
7103 + (i
- cfun_frame_layout
.first_save_gpr
)
7105 addr
= gen_rtx_MEM (Pmode
, addr
);
7106 set_mem_alias_set (addr
, s390_sr_alias_set
);
7107 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
7113 /* Fetch return address from stack before load multiple,
7114 this will do good for scheduling. */
7116 if (cfun_frame_layout
.save_return_addr_p
7117 || (cfun_frame_layout
.first_restore_gpr
< BASE_REGNUM
7118 && cfun_frame_layout
.last_restore_gpr
> RETURN_REGNUM
))
7120 int return_regnum
= find_unused_clobbered_reg();
7123 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
7125 addr
= plus_constant (frame_pointer
,
7126 offset
+ cfun_frame_layout
.gprs_offset
7128 - cfun_frame_layout
.first_save_gpr
)
7130 addr
= gen_rtx_MEM (Pmode
, addr
);
7131 set_mem_alias_set (addr
, s390_sr_alias_set
);
7132 emit_move_insn (return_reg
, addr
);
7136 insn
= restore_gprs (frame_pointer
,
7137 offset
+ cfun_frame_layout
.gprs_offset
7138 + (cfun_frame_layout
.first_restore_gpr
7139 - cfun_frame_layout
.first_save_gpr
)
7141 cfun_frame_layout
.first_restore_gpr
,
7142 cfun_frame_layout
.last_restore_gpr
);
7149 /* Return to caller. */
7151 p
= rtvec_alloc (2);
7153 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
7154 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
7155 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
7160 /* Return the size in bytes of a function argument of
7161 type TYPE and/or mode MODE. At least one of TYPE or
7162 MODE must be specified. */
7165 s390_function_arg_size (enum machine_mode mode
, tree type
)
7168 return int_size_in_bytes (type
);
7170 /* No type info available for some library calls ... */
7171 if (mode
!= BLKmode
)
7172 return GET_MODE_SIZE (mode
);
7174 /* If we have neither type nor mode, abort */
7178 /* Return true if a function argument of type TYPE and mode MODE
7179 is to be passed in a floating-point register, if available. */
7182 s390_function_arg_float (enum machine_mode mode
, tree type
)
7184 int size
= s390_function_arg_size (mode
, type
);
7188 /* Soft-float changes the ABI: no floating-point registers are used. */
7189 if (TARGET_SOFT_FLOAT
)
7192 /* No type info available for some library calls ... */
7194 return mode
== SFmode
|| mode
== DFmode
;
7196 /* The ABI says that record types with a single member are treated
7197 just like that member would be. */
7198 while (TREE_CODE (type
) == RECORD_TYPE
)
7200 tree field
, single
= NULL_TREE
;
7202 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
7204 if (TREE_CODE (field
) != FIELD_DECL
)
7207 if (single
== NULL_TREE
)
7208 single
= TREE_TYPE (field
);
7213 if (single
== NULL_TREE
)
7219 return TREE_CODE (type
) == REAL_TYPE
;
7222 /* Return true if a function argument of type TYPE and mode MODE
7223 is to be passed in an integer register, or a pair of integer
7224 registers, if available. */
7227 s390_function_arg_integer (enum machine_mode mode
, tree type
)
7229 int size
= s390_function_arg_size (mode
, type
);
7233 /* No type info available for some library calls ... */
7235 return GET_MODE_CLASS (mode
) == MODE_INT
7236 || (TARGET_SOFT_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
);
7238 /* We accept small integral (and similar) types. */
7239 if (INTEGRAL_TYPE_P (type
)
7240 || POINTER_TYPE_P (type
)
7241 || TREE_CODE (type
) == OFFSET_TYPE
7242 || (TARGET_SOFT_FLOAT
&& TREE_CODE (type
) == REAL_TYPE
))
7245 /* We also accept structs of size 1, 2, 4, 8 that are not
7246 passed in floating-point registers. */
7247 if (AGGREGATE_TYPE_P (type
)
7248 && exact_log2 (size
) >= 0
7249 && !s390_function_arg_float (mode
, type
))
7255 /* Return 1 if a function argument of type TYPE and mode MODE
7256 is to be passed by reference. The ABI specifies that only
7257 structures of size 1, 2, 4, or 8 bytes are passed by value,
7258 all other structures (and complex numbers) are passed by
7262 s390_pass_by_reference (CUMULATIVE_ARGS
*ca ATTRIBUTE_UNUSED
,
7263 enum machine_mode mode
, tree type
,
7264 bool named ATTRIBUTE_UNUSED
)
7266 int size
= s390_function_arg_size (mode
, type
);
7272 if (AGGREGATE_TYPE_P (type
) && exact_log2 (size
) < 0)
7275 if (TREE_CODE (type
) == COMPLEX_TYPE
7276 || TREE_CODE (type
) == VECTOR_TYPE
)
7283 /* Update the data in CUM to advance over an argument of mode MODE and
7284 data type TYPE. (TYPE is null for libcalls where that information
7285 may not be available.). The boolean NAMED specifies whether the
7286 argument is a named argument (as opposed to an unnamed argument
7287 matching an ellipsis). */
7290 s390_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
7291 tree type
, int named ATTRIBUTE_UNUSED
)
7293 if (s390_function_arg_float (mode
, type
))
7297 else if (s390_function_arg_integer (mode
, type
))
7299 int size
= s390_function_arg_size (mode
, type
);
7300 cum
->gprs
+= ((size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
);
7306 /* Define where to put the arguments to a function.
7307 Value is zero to push the argument on the stack,
7308 or a hard register in which to store the argument.
7310 MODE is the argument's machine mode.
7311 TYPE is the data type of the argument (as a tree).
7312 This is null for libcalls where that information may
7314 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7315 the preceding args and about the function being called.
7316 NAMED is nonzero if this argument is a named parameter
7317 (otherwise it is an extra parameter matching an ellipsis).
7319 On S/390, we use general purpose registers 2 through 6 to
7320 pass integer, pointer, and certain structure arguments, and
7321 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7322 to pass floating point arguments. All remaining arguments
7323 are pushed to the stack. */
7326 s390_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
7327 int named ATTRIBUTE_UNUSED
)
7329 if (s390_function_arg_float (mode
, type
))
7331 if (cum
->fprs
+ 1 > (TARGET_64BIT
? 4 : 2))
7334 return gen_rtx_REG (mode
, cum
->fprs
+ 16);
7336 else if (s390_function_arg_integer (mode
, type
))
7338 int size
= s390_function_arg_size (mode
, type
);
7339 int n_gprs
= (size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
;
7341 if (cum
->gprs
+ n_gprs
> 5)
7344 return gen_rtx_REG (mode
, cum
->gprs
+ 2);
7347 /* After the real arguments, expand_call calls us once again
7348 with a void_type_node type. Whatever we return here is
7349 passed as operand 2 to the call expanders.
7351 We don't need this feature ... */
7352 else if (type
== void_type_node
)
7358 /* Return true if return values of type TYPE should be returned
7359 in a memory buffer whose address is passed by the caller as
7360 hidden first argument. */
7363 s390_return_in_memory (tree type
, tree fundecl ATTRIBUTE_UNUSED
)
7365 /* We accept small integral (and similar) types. */
7366 if (INTEGRAL_TYPE_P (type
)
7367 || POINTER_TYPE_P (type
)
7368 || TREE_CODE (type
) == OFFSET_TYPE
7369 || TREE_CODE (type
) == REAL_TYPE
)
7370 return int_size_in_bytes (type
) > 8;
7372 /* Aggregates and similar constructs are always returned
7374 if (AGGREGATE_TYPE_P (type
)
7375 || TREE_CODE (type
) == COMPLEX_TYPE
7376 || TREE_CODE (type
) == VECTOR_TYPE
)
7379 /* ??? We get called on all sorts of random stuff from
7380 aggregate_value_p. We can't abort, but it's not clear
7381 what's safe to return. Pretend it's a struct I guess. */
7385 /* Define where to return a (scalar) value of type TYPE.
7386 If TYPE is null, define where to return a (scalar)
7387 value of mode MODE from a libcall. */
7390 s390_function_value (tree type
, enum machine_mode mode
)
7394 int unsignedp
= TYPE_UNSIGNED (type
);
7395 mode
= promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1);
7398 if (GET_MODE_CLASS (mode
) != MODE_INT
7399 && GET_MODE_CLASS (mode
) != MODE_FLOAT
)
7401 if (GET_MODE_SIZE (mode
) > 8)
7404 if (TARGET_HARD_FLOAT
&& GET_MODE_CLASS (mode
) == MODE_FLOAT
)
7405 return gen_rtx_REG (mode
, 16);
7407 return gen_rtx_REG (mode
, 2);
7411 /* Create and return the va_list datatype.
7413 On S/390, va_list is an array type equivalent to
7415 typedef struct __va_list_tag
7419 void *__overflow_arg_area;
7420 void *__reg_save_area;
7423 where __gpr and __fpr hold the number of general purpose
7424 or floating point arguments used up to now, respectively,
7425 __overflow_arg_area points to the stack location of the
7426 next argument passed on the stack, and __reg_save_area
7427 always points to the start of the register area in the
7428 call frame of the current function. The function prologue
7429 saves all registers used for argument passing into this
7430 area if the function uses variable arguments. */
7433 s390_build_builtin_va_list (void)
7435 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
7437 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
7440 build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
7442 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("__gpr"),
7443 long_integer_type_node
);
7444 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("__fpr"),
7445 long_integer_type_node
);
7446 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("__overflow_arg_area"),
7448 f_sav
= build_decl (FIELD_DECL
, get_identifier ("__reg_save_area"),
7451 DECL_FIELD_CONTEXT (f_gpr
) = record
;
7452 DECL_FIELD_CONTEXT (f_fpr
) = record
;
7453 DECL_FIELD_CONTEXT (f_ovf
) = record
;
7454 DECL_FIELD_CONTEXT (f_sav
) = record
;
7456 TREE_CHAIN (record
) = type_decl
;
7457 TYPE_NAME (record
) = type_decl
;
7458 TYPE_FIELDS (record
) = f_gpr
;
7459 TREE_CHAIN (f_gpr
) = f_fpr
;
7460 TREE_CHAIN (f_fpr
) = f_ovf
;
7461 TREE_CHAIN (f_ovf
) = f_sav
;
7463 layout_type (record
);
7465 /* The correct type is an array type of one element. */
7466 return build_array_type (record
, build_index_type (size_zero_node
));
7469 /* Implement va_start by filling the va_list structure VALIST.
7470 STDARG_P is always true, and ignored.
7471 NEXTARG points to the first anonymous stack argument.
7473 The following global variables are used to initialize
7474 the va_list structure:
7476 current_function_args_info:
7477 holds number of gprs and fprs used for named arguments.
7478 current_function_arg_offset_rtx:
7479 holds the offset of the first anonymous stack argument
7480 (relative to the virtual arg pointer). */
7483 s390_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
7485 HOST_WIDE_INT n_gpr
, n_fpr
;
7487 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
7488 tree gpr
, fpr
, ovf
, sav
, t
;
7490 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
7491 f_fpr
= TREE_CHAIN (f_gpr
);
7492 f_ovf
= TREE_CHAIN (f_fpr
);
7493 f_sav
= TREE_CHAIN (f_ovf
);
7495 valist
= build_va_arg_indirect_ref (valist
);
7496 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
7497 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
7498 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
7499 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
7501 /* Count number of gp and fp argument registers used. */
7503 n_gpr
= current_function_args_info
.gprs
;
7504 n_fpr
= current_function_args_info
.fprs
;
7506 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
,
7507 build_int_cst (NULL_TREE
, n_gpr
));
7508 TREE_SIDE_EFFECTS (t
) = 1;
7509 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7511 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
,
7512 build_int_cst (NULL_TREE
, n_fpr
));
7513 TREE_SIDE_EFFECTS (t
) = 1;
7514 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7516 /* Find the overflow area. */
7517 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
7519 off
= INTVAL (current_function_arg_offset_rtx
);
7520 off
= off
< 0 ? 0 : off
;
7521 if (TARGET_DEBUG_ARG
)
7522 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7523 (int)n_gpr
, (int)n_fpr
, off
);
7525 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
, build_int_cst (NULL_TREE
, off
));
7527 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
7528 TREE_SIDE_EFFECTS (t
) = 1;
7529 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7531 /* Find the register save area. */
7532 t
= make_tree (TREE_TYPE (sav
), return_address_pointer_rtx
);
7533 if (TARGET_KERNEL_BACKCHAIN
)
7534 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
7535 build_int_cst (NULL_TREE
,
7536 -(RETURN_REGNUM
- 2) * UNITS_PER_WORD
7537 - (TARGET_64BIT
? 4 : 2) * 8));
7539 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
7540 build_int_cst (NULL_TREE
, -RETURN_REGNUM
* UNITS_PER_WORD
));
7542 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
7543 TREE_SIDE_EFFECTS (t
) = 1;
7544 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
7547 /* Implement va_arg by updating the va_list structure
7548 VALIST as required to retrieve an argument of type
7549 TYPE, and returning that argument.
7551 Generates code equivalent to:
7553 if (integral value) {
7554 if (size <= 4 && args.gpr < 5 ||
7555 size > 4 && args.gpr < 4 )
7556 ret = args.reg_save_area[args.gpr+8]
7558 ret = *args.overflow_arg_area++;
7559 } else if (float value) {
7561 ret = args.reg_save_area[args.fpr+64]
7563 ret = *args.overflow_arg_area++;
7564 } else if (aggregate value) {
7566 ret = *args.reg_save_area[args.gpr]
7568 ret = **args.overflow_arg_area++;
7572 s390_gimplify_va_arg (tree valist
, tree type
, tree
*pre_p
,
7573 tree
*post_p ATTRIBUTE_UNUSED
)
7575 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
7576 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
7577 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
7578 tree lab_false
, lab_over
, addr
;
7580 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
7581 f_fpr
= TREE_CHAIN (f_gpr
);
7582 f_ovf
= TREE_CHAIN (f_fpr
);
7583 f_sav
= TREE_CHAIN (f_ovf
);
7585 valist
= build_va_arg_indirect_ref (valist
);
7586 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
7587 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
7588 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
7589 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
7591 size
= int_size_in_bytes (type
);
7593 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
7595 if (TARGET_DEBUG_ARG
)
7597 fprintf (stderr
, "va_arg: aggregate type");
7601 /* Aggregates are passed by reference. */
7606 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7607 will be added by s390_frame_info because for va_args always an even
7608 number of gprs has to be saved r15-r2 = 14 regs. */
7609 sav_ofs
= (TARGET_KERNEL_BACKCHAIN
7610 ? (TARGET_64BIT
? 4 : 2) * 8 : 2 * UNITS_PER_WORD
);
7611 sav_scale
= UNITS_PER_WORD
;
7612 size
= UNITS_PER_WORD
;
7615 else if (s390_function_arg_float (TYPE_MODE (type
), type
))
7617 if (TARGET_DEBUG_ARG
)
7619 fprintf (stderr
, "va_arg: float type");
7623 /* FP args go in FP registers, if present. */
7627 sav_ofs
= TARGET_KERNEL_BACKCHAIN
? 0 : 16 * UNITS_PER_WORD
;
7629 /* TARGET_64BIT has up to 4 parameter in fprs */
7630 max_reg
= TARGET_64BIT
? 3 : 1;
7634 if (TARGET_DEBUG_ARG
)
7636 fprintf (stderr
, "va_arg: other type");
7640 /* Otherwise into GP registers. */
7643 n_reg
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
7645 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7646 will be added by s390_frame_info because for va_args always an even
7647 number of gprs has to be saved r15-r2 = 14 regs. */
7648 sav_ofs
= TARGET_KERNEL_BACKCHAIN
?
7649 (TARGET_64BIT
? 4 : 2) * 8 : 2*UNITS_PER_WORD
;
7651 if (size
< UNITS_PER_WORD
)
7652 sav_ofs
+= UNITS_PER_WORD
- size
;
7654 sav_scale
= UNITS_PER_WORD
;
7661 /* Pull the value out of the saved registers ... */
7663 lab_false
= create_artificial_label ();
7664 lab_over
= create_artificial_label ();
7665 addr
= create_tmp_var (ptr_type_node
, "addr");
7667 t
= fold_convert (TREE_TYPE (reg
), size_int (max_reg
));
7668 t
= build2 (GT_EXPR
, boolean_type_node
, reg
, t
);
7669 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
7670 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
7671 gimplify_and_add (t
, pre_p
);
7673 t
= build2 (PLUS_EXPR
, ptr_type_node
, sav
,
7674 fold_convert (ptr_type_node
, size_int (sav_ofs
)));
7675 u
= build2 (MULT_EXPR
, TREE_TYPE (reg
), reg
,
7676 fold_convert (TREE_TYPE (reg
), size_int (sav_scale
)));
7677 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
, fold_convert (ptr_type_node
, u
));
7679 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
7680 gimplify_and_add (t
, pre_p
);
7682 t
= build1 (GOTO_EXPR
, void_type_node
, lab_over
);
7683 gimplify_and_add (t
, pre_p
);
7685 t
= build1 (LABEL_EXPR
, void_type_node
, lab_false
);
7686 append_to_statement_list (t
, pre_p
);
7689 /* ... Otherwise out of the overflow area. */
7692 if (size
< UNITS_PER_WORD
)
7693 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
,
7694 fold_convert (ptr_type_node
, size_int (UNITS_PER_WORD
- size
)));
7696 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
7698 u
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
7699 gimplify_and_add (u
, pre_p
);
7701 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
,
7702 fold_convert (ptr_type_node
, size_int (size
)));
7703 t
= build2 (MODIFY_EXPR
, ptr_type_node
, ovf
, t
);
7704 gimplify_and_add (t
, pre_p
);
7706 t
= build1 (LABEL_EXPR
, void_type_node
, lab_over
);
7707 append_to_statement_list (t
, pre_p
);
7710 /* Increment register save count. */
7712 u
= build2 (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
7713 fold_convert (TREE_TYPE (reg
), size_int (n_reg
)));
7714 gimplify_and_add (u
, pre_p
);
7718 t
= build_pointer_type (build_pointer_type (type
));
7719 addr
= fold_convert (t
, addr
);
7720 addr
= build_va_arg_indirect_ref (addr
);
7724 t
= build_pointer_type (type
);
7725 addr
= fold_convert (t
, addr
);
7728 return build_va_arg_indirect_ref (addr
);
7736 S390_BUILTIN_THREAD_POINTER
,
7737 S390_BUILTIN_SET_THREAD_POINTER
,
7742 static unsigned int const code_for_builtin_64
[S390_BUILTIN_max
] = {
7747 static unsigned int const code_for_builtin_31
[S390_BUILTIN_max
] = {
7753 s390_init_builtins (void)
7757 ftype
= build_function_type (ptr_type_node
, void_list_node
);
7758 lang_hooks
.builtin_function ("__builtin_thread_pointer", ftype
,
7759 S390_BUILTIN_THREAD_POINTER
, BUILT_IN_MD
,
7762 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
7763 lang_hooks
.builtin_function ("__builtin_set_thread_pointer", ftype
,
7764 S390_BUILTIN_SET_THREAD_POINTER
, BUILT_IN_MD
,
7768 /* Expand an expression EXP that calls a built-in function,
7769 with result going to TARGET if that's convenient
7770 (and in mode MODE if that's convenient).
7771 SUBTARGET may be used as the target for computing one of EXP's operands.
7772 IGNORE is nonzero if the value is to be ignored. */
7775 s390_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
7776 enum machine_mode mode ATTRIBUTE_UNUSED
,
7777 int ignore ATTRIBUTE_UNUSED
)
7781 unsigned int const *code_for_builtin
=
7782 TARGET_64BIT
? code_for_builtin_64
: code_for_builtin_31
;
7784 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7785 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7786 tree arglist
= TREE_OPERAND (exp
, 1);
7787 enum insn_code icode
;
7788 rtx op
[MAX_ARGS
], pat
;
7792 if (fcode
>= S390_BUILTIN_max
)
7793 internal_error ("bad builtin fcode");
7794 icode
= code_for_builtin
[fcode
];
7796 internal_error ("bad builtin fcode");
7798 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
7800 for (arglist
= TREE_OPERAND (exp
, 1), arity
= 0;
7802 arglist
= TREE_CHAIN (arglist
), arity
++)
7804 const struct insn_operand_data
*insn_op
;
7806 tree arg
= TREE_VALUE (arglist
);
7807 if (arg
== error_mark_node
)
7809 if (arity
> MAX_ARGS
)
7812 insn_op
= &insn_data
[icode
].operand
[arity
+ nonvoid
];
7814 op
[arity
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, 0);
7816 if (!(*insn_op
->predicate
) (op
[arity
], insn_op
->mode
))
7817 op
[arity
] = copy_to_mode_reg (insn_op
->mode
, op
[arity
]);
7822 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7824 || GET_MODE (target
) != tmode
7825 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7826 target
= gen_reg_rtx (tmode
);
7832 pat
= GEN_FCN (icode
) (target
);
7836 pat
= GEN_FCN (icode
) (target
, op
[0]);
7838 pat
= GEN_FCN (icode
) (op
[0]);
7841 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
7857 /* Output assembly code for the trampoline template to
7860 On S/390, we use gpr 1 internally in the trampoline code;
7861 gpr 0 is used to hold the static chain. */
7864 s390_trampoline_template (FILE *file
)
7867 op
[0] = gen_rtx_REG (Pmode
, 0);
7868 op
[1] = gen_rtx_REG (Pmode
, 1);
7872 output_asm_insn ("basr\t%1,0", op
);
7873 output_asm_insn ("lmg\t%0,%1,14(%1)", op
);
7874 output_asm_insn ("br\t%1", op
);
7875 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 10));
7879 output_asm_insn ("basr\t%1,0", op
);
7880 output_asm_insn ("lm\t%0,%1,6(%1)", op
);
7881 output_asm_insn ("br\t%1", op
);
7882 ASM_OUTPUT_SKIP (file
, (HOST_WIDE_INT
)(TRAMPOLINE_SIZE
- 8));
7886 /* Emit RTL insns to initialize the variable parts of a trampoline.
7887 FNADDR is an RTX for the address of the function's pure code.
7888 CXT is an RTX for the static chain value for the function. */
7891 s390_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
7893 emit_move_insn (gen_rtx_MEM (Pmode
,
7894 memory_address (Pmode
,
7895 plus_constant (addr
, (TARGET_64BIT
? 16 : 8)))), cxt
);
7896 emit_move_insn (gen_rtx_MEM (Pmode
,
7897 memory_address (Pmode
,
7898 plus_constant (addr
, (TARGET_64BIT
? 24 : 12)))), fnaddr
);
7901 /* Return rtx for 64-bit constant formed from the 32-bit subwords
7902 LOW and HIGH, independent of the host word size. */
7905 s390_gen_rtx_const_DI (int high
, int low
)
7907 #if HOST_BITS_PER_WIDE_INT >= 64
7909 val
= (HOST_WIDE_INT
)high
;
7911 val
|= (HOST_WIDE_INT
)low
;
7913 return GEN_INT (val
);
7915 #if HOST_BITS_PER_WIDE_INT >= 32
7916 return immed_double_const ((HOST_WIDE_INT
)low
, (HOST_WIDE_INT
)high
, DImode
);
7923 /* Output assembler code to FILE to increment profiler label # LABELNO
7924 for profiling a function entry. */
7927 s390_function_profiler (FILE *file
, int labelno
)
7932 ASM_GENERATE_INTERNAL_LABEL (label
, "LP", labelno
);
7934 fprintf (file
, "# function profiler \n");
7936 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
7937 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
7938 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (op
[1], UNITS_PER_WORD
));
7940 op
[2] = gen_rtx_REG (Pmode
, 1);
7941 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
7942 SYMBOL_REF_FLAGS (op
[3]) = SYMBOL_FLAG_LOCAL
;
7944 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
7947 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), UNSPEC_PLT
);
7948 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
7953 output_asm_insn ("stg\t%0,%1", op
);
7954 output_asm_insn ("larl\t%2,%3", op
);
7955 output_asm_insn ("brasl\t%0,%4", op
);
7956 output_asm_insn ("lg\t%0,%1", op
);
7960 op
[6] = gen_label_rtx ();
7962 output_asm_insn ("st\t%0,%1", op
);
7963 output_asm_insn ("bras\t%2,%l6", op
);
7964 output_asm_insn (".long\t%4", op
);
7965 output_asm_insn (".long\t%3", op
);
7966 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
7967 output_asm_insn ("l\t%0,0(%2)", op
);
7968 output_asm_insn ("l\t%2,4(%2)", op
);
7969 output_asm_insn ("basr\t%0,%0", op
);
7970 output_asm_insn ("l\t%0,%1", op
);
7974 op
[5] = gen_label_rtx ();
7975 op
[6] = gen_label_rtx ();
7977 output_asm_insn ("st\t%0,%1", op
);
7978 output_asm_insn ("bras\t%2,%l6", op
);
7979 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[5]));
7980 output_asm_insn (".long\t%4-%l5", op
);
7981 output_asm_insn (".long\t%3-%l5", op
);
7982 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[6]));
7983 output_asm_insn ("lr\t%0,%2", op
);
7984 output_asm_insn ("a\t%0,0(%2)", op
);
7985 output_asm_insn ("a\t%2,4(%2)", op
);
7986 output_asm_insn ("basr\t%0,%0", op
);
7987 output_asm_insn ("l\t%0,%1", op
);
7991 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7992 into its SYMBOL_REF_FLAGS. */
7995 s390_encode_section_info (tree decl
, rtx rtl
, int first
)
7997 default_encode_section_info (decl
, rtl
, first
);
7999 /* If a variable has a forced alignment to < 2 bytes, mark it with
8000 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
8001 if (TREE_CODE (decl
) == VAR_DECL
8002 && DECL_USER_ALIGN (decl
) && DECL_ALIGN (decl
) < 16)
8003 SYMBOL_REF_FLAGS (XEXP (rtl
, 0)) |= SYMBOL_FLAG_ALIGN1
;
8006 /* Output thunk to FILE that implements a C++ virtual function call (with
8007 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
8008 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
8009 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
8010 relative to the resulting this pointer. */
8013 s390_output_mi_thunk (FILE *file
, tree thunk ATTRIBUTE_UNUSED
,
8014 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
8020 /* Operand 0 is the target function. */
8021 op
[0] = XEXP (DECL_RTL (function
), 0);
8022 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (op
[0]))
8025 op
[0] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[0]),
8026 TARGET_64BIT
? UNSPEC_PLT
: UNSPEC_GOT
);
8027 op
[0] = gen_rtx_CONST (Pmode
, op
[0]);
8030 /* Operand 1 is the 'this' pointer. */
8031 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
8032 op
[1] = gen_rtx_REG (Pmode
, 3);
8034 op
[1] = gen_rtx_REG (Pmode
, 2);
8036 /* Operand 2 is the delta. */
8037 op
[2] = GEN_INT (delta
);
8039 /* Operand 3 is the vcall_offset. */
8040 op
[3] = GEN_INT (vcall_offset
);
8042 /* Operand 4 is the temporary register. */
8043 op
[4] = gen_rtx_REG (Pmode
, 1);
8045 /* Operands 5 to 8 can be used as labels. */
8051 /* Operand 9 can be used for temporary register. */
8054 /* Generate code. */
8057 /* Setup literal pool pointer if required. */
8058 if ((!DISP_IN_RANGE (delta
)
8059 && !CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
8060 || (!DISP_IN_RANGE (vcall_offset
)
8061 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K")))
8063 op
[5] = gen_label_rtx ();
8064 output_asm_insn ("larl\t%4,%5", op
);
8067 /* Add DELTA to this pointer. */
8070 if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'J', "J"))
8071 output_asm_insn ("la\t%1,%2(%1)", op
);
8072 else if (DISP_IN_RANGE (delta
))
8073 output_asm_insn ("lay\t%1,%2(%1)", op
);
8074 else if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
8075 output_asm_insn ("aghi\t%1,%2", op
);
8078 op
[6] = gen_label_rtx ();
8079 output_asm_insn ("agf\t%1,%6-%5(%4)", op
);
8083 /* Perform vcall adjustment. */
8086 if (DISP_IN_RANGE (vcall_offset
))
8088 output_asm_insn ("lg\t%4,0(%1)", op
);
8089 output_asm_insn ("ag\t%1,%3(%4)", op
);
8091 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K"))
8093 output_asm_insn ("lghi\t%4,%3", op
);
8094 output_asm_insn ("ag\t%4,0(%1)", op
);
8095 output_asm_insn ("ag\t%1,0(%4)", op
);
8099 op
[7] = gen_label_rtx ();
8100 output_asm_insn ("llgf\t%4,%7-%5(%4)", op
);
8101 output_asm_insn ("ag\t%4,0(%1)", op
);
8102 output_asm_insn ("ag\t%1,0(%4)", op
);
8106 /* Jump to target. */
8107 output_asm_insn ("jg\t%0", op
);
8109 /* Output literal pool if required. */
8112 output_asm_insn (".align\t4", op
);
8113 targetm
.asm_out
.internal_label (file
, "L",
8114 CODE_LABEL_NUMBER (op
[5]));
8118 targetm
.asm_out
.internal_label (file
, "L",
8119 CODE_LABEL_NUMBER (op
[6]));
8120 output_asm_insn (".long\t%2", op
);
8124 targetm
.asm_out
.internal_label (file
, "L",
8125 CODE_LABEL_NUMBER (op
[7]));
8126 output_asm_insn (".long\t%3", op
);
8131 /* Setup base pointer if required. */
8133 || (!DISP_IN_RANGE (delta
)
8134 && !CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
8135 || (!DISP_IN_RANGE (delta
)
8136 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K")))
8138 op
[5] = gen_label_rtx ();
8139 output_asm_insn ("basr\t%4,0", op
);
8140 targetm
.asm_out
.internal_label (file
, "L",
8141 CODE_LABEL_NUMBER (op
[5]));
8144 /* Add DELTA to this pointer. */
8147 if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'J', "J"))
8148 output_asm_insn ("la\t%1,%2(%1)", op
);
8149 else if (DISP_IN_RANGE (delta
))
8150 output_asm_insn ("lay\t%1,%2(%1)", op
);
8151 else if (CONST_OK_FOR_CONSTRAINT_P (delta
, 'K', "K"))
8152 output_asm_insn ("ahi\t%1,%2", op
);
8155 op
[6] = gen_label_rtx ();
8156 output_asm_insn ("a\t%1,%6-%5(%4)", op
);
8160 /* Perform vcall adjustment. */
8163 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'J', "J"))
8165 output_asm_insn ("lg\t%4,0(%1)", op
);
8166 output_asm_insn ("a\t%1,%3(%4)", op
);
8168 else if (DISP_IN_RANGE (vcall_offset
))
8170 output_asm_insn ("lg\t%4,0(%1)", op
);
8171 output_asm_insn ("ay\t%1,%3(%4)", op
);
8173 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset
, 'K', "K"))
8175 output_asm_insn ("lhi\t%4,%3", op
);
8176 output_asm_insn ("a\t%4,0(%1)", op
);
8177 output_asm_insn ("a\t%1,0(%4)", op
);
8181 op
[7] = gen_label_rtx ();
8182 output_asm_insn ("l\t%4,%7-%5(%4)", op
);
8183 output_asm_insn ("a\t%4,0(%1)", op
);
8184 output_asm_insn ("a\t%1,0(%4)", op
);
8187 /* We had to clobber the base pointer register.
8188 Re-setup the base pointer (with a different base). */
8189 op
[5] = gen_label_rtx ();
8190 output_asm_insn ("basr\t%4,0", op
);
8191 targetm
.asm_out
.internal_label (file
, "L",
8192 CODE_LABEL_NUMBER (op
[5]));
8195 /* Jump to target. */
8196 op
[8] = gen_label_rtx ();
8199 output_asm_insn ("l\t%4,%8-%5(%4)", op
);
8201 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
8202 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8203 else if (flag_pic
== 1)
8205 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
8206 output_asm_insn ("l\t%4,%0(%4)", op
);
8208 else if (flag_pic
== 2)
8210 op
[9] = gen_rtx_REG (Pmode
, 0);
8211 output_asm_insn ("l\t%9,%8-4-%5(%4)", op
);
8212 output_asm_insn ("a\t%4,%8-%5(%4)", op
);
8213 output_asm_insn ("ar\t%4,%9", op
);
8214 output_asm_insn ("l\t%4,0(%4)", op
);
8217 output_asm_insn ("br\t%4", op
);
8219 /* Output literal pool. */
8220 output_asm_insn (".align\t4", op
);
8222 if (nonlocal
&& flag_pic
== 2)
8223 output_asm_insn (".long\t%0", op
);
8226 op
[0] = gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
8227 SYMBOL_REF_FLAGS (op
[0]) = SYMBOL_FLAG_LOCAL
;
8230 targetm
.asm_out
.internal_label (file
, "L", CODE_LABEL_NUMBER (op
[8]));
8232 output_asm_insn (".long\t%0", op
);
8234 output_asm_insn (".long\t%0-%5", op
);
8238 targetm
.asm_out
.internal_label (file
, "L",
8239 CODE_LABEL_NUMBER (op
[6]));
8240 output_asm_insn (".long\t%2", op
);
8244 targetm
.asm_out
.internal_label (file
, "L",
8245 CODE_LABEL_NUMBER (op
[7]));
8246 output_asm_insn (".long\t%3", op
);
8252 s390_valid_pointer_mode (enum machine_mode mode
)
8254 return (mode
== SImode
|| (TARGET_64BIT
&& mode
== DImode
));
8257 /* How to allocate a 'struct machine_function'. */
8259 static struct machine_function
*
8260 s390_init_machine_status (void)
8262 return ggc_alloc_cleared (sizeof (struct machine_function
));
8265 /* Checks whether the given ARGUMENT_LIST would use a caller
8266 saved register. This is used to decide whether sibling call
8267 optimization could be performed on the respective function
8271 s390_call_saved_register_used (tree argument_list
)
8273 CUMULATIVE_ARGS cum
;
8275 enum machine_mode mode
;
8280 INIT_CUMULATIVE_ARGS (cum
, NULL
, NULL
, 0, 0);
8282 while (argument_list
)
8284 parameter
= TREE_VALUE (argument_list
);
8285 argument_list
= TREE_CHAIN (argument_list
);
8290 /* For an undeclared variable passed as parameter we will get
8291 an ERROR_MARK node here. */
8292 if (TREE_CODE (parameter
) == ERROR_MARK
)
8295 if (! (type
= TREE_TYPE (parameter
)))
8298 if (! (mode
= TYPE_MODE (TREE_TYPE (parameter
))))
8301 if (pass_by_reference (&cum
, mode
, type
, true))
8304 type
= build_pointer_type (type
);
8307 parm_rtx
= s390_function_arg (&cum
, mode
, type
, 0);
8309 s390_function_arg_advance (&cum
, mode
, type
, 0);
8311 if (parm_rtx
&& REG_P (parm_rtx
))
8314 reg
< HARD_REGNO_NREGS (REGNO (parm_rtx
), GET_MODE (parm_rtx
));
8316 if (! call_used_regs
[reg
+ REGNO (parm_rtx
)])
8323 /* Return true if the given call expression can be
8324 turned into a sibling call.
8325 DECL holds the declaration of the function to be called whereas
8326 EXP is the call expression itself. */
8329 s390_function_ok_for_sibcall (tree decl
, tree exp
)
8331 /* The TPF epilogue uses register 1. */
8332 if (TARGET_TPF_PROFILING
)
8335 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8336 which would have to be restored before the sibcall. */
8337 if (!TARGET_64BIT
&& flag_pic
&& decl
&& TREE_PUBLIC (decl
))
8340 /* Register 6 on s390 is available as an argument register but unfortunately
8341 "caller saved". This makes functions needing this register for arguments
8342 not suitable for sibcalls. */
8343 if (TREE_OPERAND (exp
, 1)
8344 && s390_call_saved_register_used (TREE_OPERAND (exp
, 1)))
8350 /* Return the fixed registers used for condition codes. */
8353 s390_fixed_condition_code_regs (unsigned int *p1
, unsigned int *p2
)
8356 *p2
= INVALID_REGNUM
;
8361 /* This function is used by the call expanders of the machine description.
8362 It emits the call insn itself together with the necessary operations
8363 to adjust the target address and returns the emitted insn.
8364 ADDR_LOCATION is the target address rtx
8365 TLS_CALL the location of the thread-local symbol
8366 RESULT_REG the register where the result of the call should be stored
8367 RETADDR_REG the register where the return address should be stored
8368 If this parameter is NULL_RTX the call is considered
8369 to be a sibling call. */
8372 s390_emit_call (rtx addr_location
, rtx tls_call
, rtx result_reg
,
8375 bool plt_call
= false;
8381 /* Direct function calls need special treatment. */
8382 if (GET_CODE (addr_location
) == SYMBOL_REF
)
8384 /* When calling a global routine in PIC mode, we must
8385 replace the symbol itself with the PLT stub. */
8386 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (addr_location
))
8388 addr_location
= gen_rtx_UNSPEC (Pmode
,
8389 gen_rtvec (1, addr_location
),
8391 addr_location
= gen_rtx_CONST (Pmode
, addr_location
);
8395 /* Unless we can use the bras(l) insn, force the
8396 routine address into a register. */
8397 if (!TARGET_SMALL_EXEC
&& !TARGET_CPU_ZARCH
)
8400 addr_location
= legitimize_pic_address (addr_location
, 0);
8402 addr_location
= force_reg (Pmode
, addr_location
);
8406 /* If it is already an indirect call or the code above moved the
8407 SYMBOL_REF to somewhere else make sure the address can be found in
8409 if (retaddr_reg
== NULL_RTX
8410 && GET_CODE (addr_location
) != SYMBOL_REF
8413 emit_move_insn (gen_rtx_REG (Pmode
, SIBCALL_REGNUM
), addr_location
);
8414 addr_location
= gen_rtx_REG (Pmode
, SIBCALL_REGNUM
);
8417 addr_location
= gen_rtx_MEM (QImode
, addr_location
);
8418 call
= gen_rtx_CALL (VOIDmode
, addr_location
, const0_rtx
);
8420 if (result_reg
!= NULL_RTX
)
8421 call
= gen_rtx_SET (VOIDmode
, result_reg
, call
);
8423 if (retaddr_reg
!= NULL_RTX
)
8425 clobber
= gen_rtx_CLOBBER (VOIDmode
, retaddr_reg
);
8427 if (tls_call
!= NULL_RTX
)
8428 vec
= gen_rtvec (3, call
, clobber
,
8429 gen_rtx_USE (VOIDmode
, tls_call
));
8431 vec
= gen_rtvec (2, call
, clobber
);
8433 call
= gen_rtx_PARALLEL (VOIDmode
, vec
);
8436 insn
= emit_call_insn (call
);
8438 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8439 if ((!TARGET_64BIT
&& plt_call
) || tls_call
!= NULL_RTX
)
8441 /* s390_function_ok_for_sibcall should
8442 have denied sibcalls in this case. */
8443 if (retaddr_reg
== NULL_RTX
)
8446 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), pic_offset_table_rtx
);
8451 /* Implement CONDITIONAL_REGISTER_USAGE. */
8454 s390_conditional_register_usage (void)
8460 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
8461 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
8463 if (TARGET_CPU_ZARCH
)
8465 fixed_regs
[RETURN_REGNUM
] = 0;
8466 call_used_regs
[RETURN_REGNUM
] = 0;
8470 for (i
= 24; i
< 32; i
++)
8471 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
8475 for (i
= 18; i
< 20; i
++)
8476 call_used_regs
[i
] = call_really_used_regs
[i
] = 0;
8479 if (TARGET_SOFT_FLOAT
)
8481 for (i
= 16; i
< 32; i
++)
8482 call_used_regs
[i
] = fixed_regs
[i
] = 1;
8486 /* Corresponding function to eh_return expander. */
8488 static GTY(()) rtx s390_tpf_eh_return_symbol
;
8490 s390_emit_tpf_eh_return (rtx target
)
8494 if (!s390_tpf_eh_return_symbol
)
8495 s390_tpf_eh_return_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "__tpf_eh_return");
8497 reg
= gen_rtx_REG (Pmode
, 2);
8499 emit_move_insn (reg
, target
);
8500 insn
= s390_emit_call (s390_tpf_eh_return_symbol
, NULL_RTX
, reg
,
8501 gen_rtx_REG (Pmode
, RETURN_REGNUM
));
8502 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), reg
);
8504 emit_move_insn (EH_RETURN_HANDLER_RTX
, reg
);
8507 #include "gt-s390.h"