1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
42 #include "basic-block.h"
43 #include "integrate.h"
46 #include "target-def.h"
48 #include "langhooks.h"
50 static bool s390_assemble_integer
PARAMS ((rtx
, unsigned int, int));
51 static int s390_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
52 static int s390_adjust_priority
PARAMS ((rtx
, int));
53 static void s390_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
54 unsigned HOST_WIDE_INT
));
55 static void s390_encode_section_info
PARAMS ((tree
, int));
57 #undef TARGET_ASM_ALIGNED_HI_OP
58 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
59 #undef TARGET_ASM_ALIGNED_DI_OP
60 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
61 #undef TARGET_ASM_INTEGER
62 #define TARGET_ASM_INTEGER s390_assemble_integer
64 #undef TARGET_ASM_FUNCTION_PROLOGUE
65 #define TARGET_ASM_FUNCTION_PROLOGUE s390_function_prologue
67 #undef TARGET_ASM_FUNCTION_EPILOGUE
68 #define TARGET_ASM_FUNCTION_EPILOGUE s390_function_epilogue
70 #undef TARGET_ASM_OPEN_PAREN
71 #define TARGET_ASM_OPEN_PAREN ""
73 #undef TARGET_ASM_CLOSE_PAREN
74 #define TARGET_ASM_CLOSE_PAREN ""
76 #undef TARGET_ASM_SELECT_RTX_SECTION
77 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
79 #undef TARGET_SCHED_ADJUST_COST
80 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
82 #undef TARGET_SCHED_ADJUST_PRIORITY
83 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
85 #undef TARGET_ENCODE_SECTION_INFO
86 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
88 struct gcc_target targetm
= TARGET_INITIALIZER
;
90 extern int reload_completed
;
92 /* The alias set for prologue/epilogue register save/restore. */
93 static int s390_sr_alias_set
= 0;
95 /* Function count for creating unique internal labels in a compile unit. */
96 int s390_function_count
= 0;
98 /* Save information from a "cmpxx" operation until the branch or scc is
100 rtx s390_compare_op0
, s390_compare_op1
;
102 /* Structure used to hold the components of a S/390 memory
103 address. A legitimate address on S/390 is of the general
105 base + index + displacement
106 where any of the components is optional.
108 base and index are registers of the class ADDR_REGS,
109 displacement is an unsigned 12-bit immediate constant. */
119 /* Structure containing information for prologue and epilogue. */
124 int return_reg_saved_p
;
127 int first_restore_gpr
;
129 int arg_frame_offset
;
131 HOST_WIDE_INT frame_size
;
134 static int s390_match_ccmode_set
PARAMS ((rtx
, enum machine_mode
));
135 static int s390_branch_condition_mask
PARAMS ((rtx
));
136 static const char *s390_branch_condition_mnemonic
PARAMS ((rtx
, int));
137 static int check_mode
PARAMS ((rtx
, enum machine_mode
*));
138 static int general_s_operand
PARAMS ((rtx
, enum machine_mode
, int));
139 static int s390_decompose_address
PARAMS ((rtx
, struct s390_address
*, int));
140 static int reg_used_in_mem_p
PARAMS ((int, rtx
));
141 static int addr_generation_dependency_p
PARAMS ((rtx
, rtx
));
142 static void s390_split_branches
PARAMS ((void));
143 static void find_constant_pool_ref
PARAMS ((rtx
, rtx
*));
144 static void replace_constant_pool_ref
PARAMS ((rtx
*, rtx
, rtx
));
145 static void s390_chunkify_pool
PARAMS ((void));
146 static int save_fprs_p
PARAMS ((void));
147 static int find_unused_clobbered_reg
PARAMS ((void));
148 static void s390_frame_info
PARAMS ((struct s390_frame
*));
149 static rtx save_fpr
PARAMS ((rtx
, int, int));
150 static rtx restore_fpr
PARAMS ((rtx
, int, int));
151 static int s390_function_arg_size
PARAMS ((enum machine_mode
, tree
));
154 /* Return true if SET either doesn't set the CC register, or else
155 the source and destination have matching CC modes and that
156 CC mode is at least as constrained as REQ_MODE. */
159 s390_match_ccmode_set (set
, req_mode
)
161 enum machine_mode req_mode
;
163 enum machine_mode set_mode
;
165 if (GET_CODE (set
) != SET
)
168 if (GET_CODE (SET_DEST (set
)) != REG
|| !CC_REGNO_P (REGNO (SET_DEST (set
))))
171 set_mode
= GET_MODE (SET_DEST (set
));
184 if (req_mode
!= set_mode
)
189 if (req_mode
!= CCSmode
&& req_mode
!= CCUmode
&& req_mode
!= CCTmode
190 && req_mode
!= CCSRmode
&& req_mode
!= CCURmode
)
198 return (GET_MODE (SET_SRC (set
)) == set_mode
);
201 /* Return true if every SET in INSN that sets the CC register
202 has source and destination with matching CC modes and that
203 CC mode is at least as constrained as REQ_MODE.
204 If REQ_MODE is VOIDmode, always return false. */
207 s390_match_ccmode (insn
, req_mode
)
209 enum machine_mode req_mode
;
213 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
214 if (req_mode
== VOIDmode
)
217 if (GET_CODE (PATTERN (insn
)) == SET
)
218 return s390_match_ccmode_set (PATTERN (insn
), req_mode
);
220 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
221 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
223 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
224 if (GET_CODE (set
) == SET
)
225 if (!s390_match_ccmode_set (set
, req_mode
))
232 /* If a test-under-mask instruction can be used to implement
233 (compare (and ... OP1) OP2), return the CC mode required
234 to do that. Otherwise, return VOIDmode.
235 MIXED is true if the instruction can distinguish between
236 CC1 and CC2 for mixed selected bits (TMxx), it is false
237 if the instruction cannot (TM). */
240 s390_tm_ccmode (op1
, op2
, mixed
)
247 /* ??? Fixme: should work on CONST_DOUBLE as well. */
248 if (GET_CODE (op1
) != CONST_INT
|| GET_CODE (op2
) != CONST_INT
)
251 /* Selected bits all zero: CC0. */
252 if (INTVAL (op2
) == 0)
255 /* Selected bits all one: CC3. */
256 if (INTVAL (op2
) == INTVAL (op1
))
259 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
262 bit1
= exact_log2 (INTVAL (op2
));
263 bit0
= exact_log2 (INTVAL (op1
) ^ INTVAL (op2
));
264 if (bit0
!= -1 && bit1
!= -1)
265 return bit0
> bit1
? CCT1mode
: CCT2mode
;
271 /* Given a comparison code OP (EQ, NE, etc.) and the operands
272 OP0 and OP1 of a COMPARE, return the mode to be used for the
276 s390_select_ccmode (code
, op0
, op1
)
285 if (GET_CODE (op0
) == PLUS
|| GET_CODE (op0
) == MINUS
286 || GET_CODE (op1
) == NEG
)
289 if (GET_CODE (op0
) == AND
)
291 /* Check whether we can potentially do it via TM. */
292 enum machine_mode ccmode
;
293 ccmode
= s390_tm_ccmode (XEXP (op0
, 1), op1
, 1);
294 if (ccmode
!= VOIDmode
)
296 /* Relax CCTmode to CCZmode to allow fall-back to AND
297 if that turns out to be beneficial. */
298 return ccmode
== CCTmode
? CCZmode
: ccmode
;
302 if (register_operand (op0
, HImode
)
303 && GET_CODE (op1
) == CONST_INT
304 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 65535))
306 if (register_operand (op0
, QImode
)
307 && GET_CODE (op1
) == CONST_INT
308 && (INTVAL (op1
) == -1 || INTVAL (op1
) == 255))
325 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
326 && GET_CODE (op1
) != CONST_INT
)
332 if (GET_CODE (op0
) == PLUS
)
335 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
336 && GET_CODE (op1
) != CONST_INT
)
342 if (GET_CODE (op0
) == MINUS
)
345 if ((GET_CODE (op0
) == SIGN_EXTEND
|| GET_CODE (op0
) == ZERO_EXTEND
)
346 && GET_CODE (op1
) != CONST_INT
)
355 /* Return branch condition mask to implement a branch
356 specified by CODE. */
359 s390_branch_condition_mask (code
)
362 const int CC0
= 1 << 3;
363 const int CC1
= 1 << 2;
364 const int CC2
= 1 << 1;
365 const int CC3
= 1 << 0;
367 if (GET_CODE (XEXP (code
, 0)) != REG
368 || REGNO (XEXP (code
, 0)) != CC_REGNUM
369 || XEXP (code
, 1) != const0_rtx
)
372 switch (GET_MODE (XEXP (code
, 0)))
375 switch (GET_CODE (code
))
378 case NE
: return CC1
| CC2
| CC3
;
385 switch (GET_CODE (code
))
388 case NE
: return CC0
| CC2
| CC3
;
395 switch (GET_CODE (code
))
398 case NE
: return CC0
| CC1
| CC3
;
405 switch (GET_CODE (code
))
408 case NE
: return CC0
| CC1
| CC2
;
415 switch (GET_CODE (code
))
417 case EQ
: return CC0
| CC2
;
418 case NE
: return CC1
| CC3
;
425 switch (GET_CODE (code
))
427 case LTU
: return CC2
| CC3
; /* carry */
428 case GEU
: return CC0
| CC1
; /* no carry */
435 switch (GET_CODE (code
))
437 case GTU
: return CC0
| CC1
; /* borrow */
438 case LEU
: return CC2
| CC3
; /* no borrow */
445 switch (GET_CODE (code
))
448 case NE
: return CC1
| CC2
| CC3
;
449 case LTU
: return CC1
;
450 case GTU
: return CC2
;
451 case LEU
: return CC0
| CC1
;
452 case GEU
: return CC0
| CC2
;
459 switch (GET_CODE (code
))
462 case NE
: return CC2
| CC1
| CC3
;
463 case LTU
: return CC2
;
464 case GTU
: return CC1
;
465 case LEU
: return CC0
| CC2
;
466 case GEU
: return CC0
| CC1
;
473 switch (GET_CODE (code
))
476 case NE
: return CC1
| CC2
| CC3
;
479 case LE
: return CC0
| CC1
;
480 case GE
: return CC0
| CC2
;
481 case UNORDERED
: return CC3
;
482 case ORDERED
: return CC0
| CC1
| CC2
;
483 case UNEQ
: return CC0
| CC3
;
484 case UNLT
: return CC1
| CC3
;
485 case UNGT
: return CC2
| CC3
;
486 case UNLE
: return CC0
| CC1
| CC3
;
487 case UNGE
: return CC0
| CC2
| CC3
;
488 case LTGT
: return CC1
| CC2
;
495 switch (GET_CODE (code
))
498 case NE
: return CC2
| CC1
| CC3
;
501 case LE
: return CC0
| CC2
;
502 case GE
: return CC0
| CC1
;
503 case UNORDERED
: return CC3
;
504 case ORDERED
: return CC0
| CC2
| CC1
;
505 case UNEQ
: return CC0
| CC3
;
506 case UNLT
: return CC2
| CC3
;
507 case UNGT
: return CC1
| CC3
;
508 case UNLE
: return CC0
| CC2
| CC3
;
509 case UNGE
: return CC0
| CC1
| CC3
;
510 case LTGT
: return CC2
| CC1
;
521 /* If INV is false, return assembler mnemonic string to implement
522 a branch specified by CODE. If INV is true, return mnemonic
523 for the corresponding inverted branch. */
526 s390_branch_condition_mnemonic (code
, inv
)
530 static const char *const mnemonic
[16] =
532 NULL
, "o", "h", "nle",
533 "l", "nhe", "lh", "ne",
534 "e", "nlh", "he", "nl",
535 "le", "nh", "no", NULL
538 int mask
= s390_branch_condition_mask (code
);
543 if (mask
< 1 || mask
> 14)
546 return mnemonic
[mask
];
549 /* If OP is an integer constant of mode MODE with exactly one
550 HImode subpart unequal to DEF, return the number of that
551 subpart. As a special case, all HImode subparts of OP are
552 equal to DEF, return zero. Otherwise, return -1. */
555 s390_single_hi (op
, mode
, def
)
557 enum machine_mode mode
;
560 if (GET_CODE (op
) == CONST_INT
)
562 unsigned HOST_WIDE_INT value
;
563 int n_parts
= GET_MODE_SIZE (mode
) / 2;
566 for (i
= 0; i
< n_parts
; i
++)
569 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
573 if ((value
& 0xffff) != (unsigned)(def
& 0xffff))
582 return part
== -1 ? 0 : (n_parts
- 1 - part
);
585 else if (GET_CODE (op
) == CONST_DOUBLE
586 && GET_MODE (op
) == VOIDmode
)
588 unsigned HOST_WIDE_INT value
;
589 int n_parts
= GET_MODE_SIZE (mode
) / 2;
592 for (i
= 0; i
< n_parts
; i
++)
595 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
596 else if (i
== HOST_BITS_PER_WIDE_INT
/ 16)
597 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
);
601 if ((value
& 0xffff) != (unsigned)(def
& 0xffff))
610 return part
== -1 ? 0 : (n_parts
- 1 - part
);
616 /* Extract the HImode part number PART from integer
617 constant OP of mode MODE. */
620 s390_extract_hi (op
, mode
, part
)
622 enum machine_mode mode
;
625 int n_parts
= GET_MODE_SIZE (mode
) / 2;
626 if (part
< 0 || part
>= n_parts
)
629 part
= n_parts
- 1 - part
;
631 if (GET_CODE (op
) == CONST_INT
)
633 unsigned HOST_WIDE_INT value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
634 return ((value
>> (16 * part
)) & 0xffff);
636 else if (GET_CODE (op
) == CONST_DOUBLE
637 && GET_MODE (op
) == VOIDmode
)
639 unsigned HOST_WIDE_INT value
;
640 if (part
< HOST_BITS_PER_WIDE_INT
/ 16)
641 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
643 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
),
644 part
-= HOST_BITS_PER_WIDE_INT
/ 16;
646 return ((value
>> (16 * part
)) & 0xffff);
652 /* If OP is an integer constant of mode MODE with exactly one
653 QImode subpart unequal to DEF, return the number of that
654 subpart. As a special case, all QImode subparts of OP are
655 equal to DEF, return zero. Otherwise, return -1. */
658 s390_single_qi (op
, mode
, def
)
660 enum machine_mode mode
;
663 if (GET_CODE (op
) == CONST_INT
)
665 unsigned HOST_WIDE_INT value
;
666 int n_parts
= GET_MODE_SIZE (mode
);
669 for (i
= 0; i
< n_parts
; i
++)
672 value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
676 if ((value
& 0xff) != (unsigned)(def
& 0xff))
685 return part
== -1 ? 0 : (n_parts
- 1 - part
);
688 else if (GET_CODE (op
) == CONST_DOUBLE
689 && GET_MODE (op
) == VOIDmode
)
691 unsigned HOST_WIDE_INT value
;
692 int n_parts
= GET_MODE_SIZE (mode
);
695 for (i
= 0; i
< n_parts
; i
++)
698 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
699 else if (i
== HOST_BITS_PER_WIDE_INT
/ 8)
700 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
);
704 if ((value
& 0xff) != (unsigned)(def
& 0xff))
713 return part
== -1 ? 0 : (n_parts
- 1 - part
);
719 /* Extract the QImode part number PART from integer
720 constant OP of mode MODE. */
723 s390_extract_qi (op
, mode
, part
)
725 enum machine_mode mode
;
728 int n_parts
= GET_MODE_SIZE (mode
);
729 if (part
< 0 || part
>= n_parts
)
732 part
= n_parts
- 1 - part
;
734 if (GET_CODE (op
) == CONST_INT
)
736 unsigned HOST_WIDE_INT value
= (unsigned HOST_WIDE_INT
) INTVAL (op
);
737 return ((value
>> (8 * part
)) & 0xff);
739 else if (GET_CODE (op
) == CONST_DOUBLE
740 && GET_MODE (op
) == VOIDmode
)
742 unsigned HOST_WIDE_INT value
;
743 if (part
< HOST_BITS_PER_WIDE_INT
/ 8)
744 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_LOW (op
);
746 value
= (unsigned HOST_WIDE_INT
) CONST_DOUBLE_HIGH (op
),
747 part
-= HOST_BITS_PER_WIDE_INT
/ 8;
749 return ((value
>> (8 * part
)) & 0xff);
756 /* Change optimizations to be performed, depending on the
759 LEVEL is the optimization level specified; 2 if `-O2' is
760 specified, 1 if `-O' is specified, and 0 if neither is specified.
762 SIZE is non-zero if `-Os' is specified and zero otherwise. */
765 optimization_options (level
, size
)
766 int level ATTRIBUTE_UNUSED
;
767 int size ATTRIBUTE_UNUSED
;
769 #ifdef HAVE_decrement_and_branch_on_count
770 /* When optimizing, enable use of BRCT instruction. */
772 flag_branch_on_count_reg
= 1;
779 /* Acquire a unique set number for our register saves and restores. */
780 s390_sr_alias_set
= new_alias_set ();
784 /* Map for smallest class containing reg regno. */
786 const enum reg_class regclass_map
[FIRST_PSEUDO_REGISTER
] =
787 { GENERAL_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
788 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
789 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
790 ADDR_REGS
, ADDR_REGS
, ADDR_REGS
, ADDR_REGS
,
791 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
792 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
793 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
794 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
795 ADDR_REGS
, NO_REGS
, ADDR_REGS
799 /* Return true if OP a (const_int 0) operand.
800 OP is the current operation.
801 MODE is the current operation mode. */
804 const0_operand (op
, mode
)
806 enum machine_mode mode
;
808 return op
== CONST0_RTX (mode
);
811 /* Return true if OP is constant.
812 OP is the current operation.
813 MODE is the current operation mode. */
816 consttable_operand (op
, mode
)
818 enum machine_mode mode ATTRIBUTE_UNUSED
;
820 return CONSTANT_P (op
);
823 /* Return true if the mode of operand OP matches MODE.
824 If MODE is set to VOIDmode, set it to the mode of OP. */
827 check_mode (op
, mode
)
829 enum machine_mode
*mode
;
831 if (*mode
== VOIDmode
)
832 *mode
= GET_MODE (op
);
835 if (GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != *mode
)
841 /* Return true if OP a valid operand for the LARL instruction.
842 OP is the current operation.
843 MODE is the current operation mode. */
846 larl_operand (op
, mode
)
848 enum machine_mode mode
;
850 if (! check_mode (op
, &mode
))
853 /* Allow labels and local symbols. */
854 if (GET_CODE (op
) == LABEL_REF
)
856 if (GET_CODE (op
) == SYMBOL_REF
857 && (!flag_pic
|| SYMBOL_REF_FLAG (op
)
858 || CONSTANT_POOL_ADDRESS_P (op
)))
861 /* Everything else must have a CONST, so strip it. */
862 if (GET_CODE (op
) != CONST
)
866 /* Allow adding *even* constants. */
867 if (GET_CODE (op
) == PLUS
)
869 if (GET_CODE (XEXP (op
, 1)) != CONST_INT
870 || (INTVAL (XEXP (op
, 1)) & 1) != 0)
875 /* Labels and local symbols allowed here as well. */
876 if (GET_CODE (op
) == LABEL_REF
)
878 if (GET_CODE (op
) == SYMBOL_REF
879 && (!flag_pic
|| SYMBOL_REF_FLAG (op
)
880 || CONSTANT_POOL_ADDRESS_P (op
)))
883 /* Now we must have a @GOTENT offset or @PLT stub. */
884 if (GET_CODE (op
) == UNSPEC
885 && XINT (op
, 1) == 111)
887 if (GET_CODE (op
) == UNSPEC
888 && XINT (op
, 1) == 113)
894 /* Return true if OP is a valid FP-Register.
895 OP is the current operation.
896 MODE is the current operation mode. */
899 fp_operand (op
, mode
)
901 enum machine_mode mode
;
903 register enum rtx_code code
= GET_CODE (op
);
904 if (! check_mode (op
, &mode
))
906 if (code
== REG
&& REGNO_OK_FOR_FP_P (REGNO (op
)))
912 /* Helper routine to implement s_operand and s_imm_operand.
913 OP is the current operation.
914 MODE is the current operation mode.
915 ALLOW_IMMEDIATE specifies whether immediate operands should
916 be accepted or not. */
919 general_s_operand (op
, mode
, allow_immediate
)
921 enum machine_mode mode
;
924 struct s390_address addr
;
926 /* Call general_operand first, so that we don't have to
927 check for many special cases. */
928 if (!general_operand (op
, mode
))
931 /* Just like memory_operand, allow (subreg (mem ...))
934 && GET_CODE (op
) == SUBREG
935 && GET_CODE (SUBREG_REG (op
)) == MEM
)
936 op
= SUBREG_REG (op
);
938 switch (GET_CODE (op
))
940 /* Constants that we are sure will be forced to the
941 literal pool in reload are OK as s-operand. Note
942 that we cannot call s390_preferred_reload_class here
943 because it might not be known yet at this point
944 whether the current function is a leaf or not. */
947 if (!allow_immediate
|| reload_completed
)
949 if (!legitimate_reload_constant_p (op
))
955 /* Memory operands are OK unless they already use an
958 if (GET_CODE (XEXP (op
, 0)) == ADDRESSOF
)
960 if (s390_decompose_address (XEXP (op
, 0), &addr
, FALSE
)
972 /* Return true if OP is a valid S-type operand.
973 OP is the current operation.
974 MODE is the current operation mode. */
979 enum machine_mode mode
;
981 return general_s_operand (op
, mode
, 0);
984 /* Return true if OP is a valid S-type operand or an immediate
985 operand that can be addressed as S-type operand by forcing
986 it into the literal pool.
987 OP is the current operation.
988 MODE is the current operation mode. */
991 s_imm_operand (op
, mode
)
993 enum machine_mode mode
;
995 return general_s_operand (op
, mode
, 1);
998 /* Return true if OP is a valid operand for a 'Q' constraint.
999 This differs from s_operand in that only memory operands
1000 without index register are accepted, nothing else. */
1006 struct s390_address addr
;
1008 if (GET_CODE (op
) != MEM
)
1011 if (!s390_decompose_address (XEXP (op
, 0), &addr
, FALSE
))
1020 /* Return true if OP is a valid operand for the BRAS instruction.
1021 OP is the current operation.
1022 MODE is the current operation mode. */
1025 bras_sym_operand (op
, mode
)
1027 enum machine_mode mode ATTRIBUTE_UNUSED
;
1029 register enum rtx_code code
= GET_CODE (op
);
1031 /* Allow SYMBOL_REFs. */
1032 if (code
== SYMBOL_REF
)
1035 /* Allow @PLT stubs. */
1037 && GET_CODE (XEXP (op
, 0)) == UNSPEC
1038 && XINT (XEXP (op
, 0), 1) == 113)
1044 /* Return true if OP is a load multiple operation. It is known to be a
1045 PARALLEL and the first section will be tested.
1046 OP is the current operation.
1047 MODE is the current operation mode. */
1050 load_multiple_operation (op
, mode
)
1052 enum machine_mode mode ATTRIBUTE_UNUSED
;
1054 int count
= XVECLEN (op
, 0);
1055 unsigned int dest_regno
;
1060 /* Perform a quick check so we don't blow up below. */
1062 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1063 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
1064 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
1067 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
1068 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
1070 /* Check, is base, or base + displacement. */
1072 if (GET_CODE (src_addr
) == REG
)
1074 else if (GET_CODE (src_addr
) == PLUS
1075 && GET_CODE (XEXP (src_addr
, 0)) == REG
1076 && GET_CODE (XEXP (src_addr
, 1)) == CONST_INT
)
1078 off
= INTVAL (XEXP (src_addr
, 1));
1079 src_addr
= XEXP (src_addr
, 0);
1084 if (src_addr
== frame_pointer_rtx
|| src_addr
== arg_pointer_rtx
)
1087 for (i
= 1; i
< count
; i
++)
1089 rtx elt
= XVECEXP (op
, 0, i
);
1091 if (GET_CODE (elt
) != SET
1092 || GET_CODE (SET_DEST (elt
)) != REG
1093 || GET_MODE (SET_DEST (elt
)) != Pmode
1094 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
1095 || GET_CODE (SET_SRC (elt
)) != MEM
1096 || GET_MODE (SET_SRC (elt
)) != Pmode
1097 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
1098 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
1099 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
1100 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1))
1101 != off
+ i
* UNITS_PER_WORD
)
1108 /* Return true if OP is a store multiple operation. It is known to be a
1109 PARALLEL and the first section will be tested.
1110 OP is the current operation.
1111 MODE is the current operation mode. */
1114 store_multiple_operation (op
, mode
)
1116 enum machine_mode mode ATTRIBUTE_UNUSED
;
1118 int count
= XVECLEN (op
, 0);
1119 unsigned int src_regno
;
1123 /* Perform a quick check so we don't blow up below. */
1125 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
1126 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
1127 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
1130 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
1131 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
1133 /* Check, is base, or base + displacement. */
1135 if (GET_CODE (dest_addr
) == REG
)
1137 else if (GET_CODE (dest_addr
) == PLUS
1138 && GET_CODE (XEXP (dest_addr
, 0)) == REG
1139 && GET_CODE (XEXP (dest_addr
, 1)) == CONST_INT
)
1141 off
= INTVAL (XEXP (dest_addr
, 1));
1142 dest_addr
= XEXP (dest_addr
, 0);
1147 if (dest_addr
== frame_pointer_rtx
|| dest_addr
== arg_pointer_rtx
)
1150 for (i
= 1; i
< count
; i
++)
1152 rtx elt
= XVECEXP (op
, 0, i
);
1154 if (GET_CODE (elt
) != SET
1155 || GET_CODE (SET_SRC (elt
)) != REG
1156 || GET_MODE (SET_SRC (elt
)) != Pmode
1157 || REGNO (SET_SRC (elt
)) != src_regno
+ i
1158 || GET_CODE (SET_DEST (elt
)) != MEM
1159 || GET_MODE (SET_DEST (elt
)) != Pmode
1160 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
1161 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
1162 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
1163 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1))
1164 != off
+ i
* UNITS_PER_WORD
)
1171 /* Return true if OP contains a symbol reference */
1174 symbolic_reference_mentioned_p (op
)
1177 register const char *fmt
;
1180 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1183 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1184 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1190 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1191 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1195 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1203 /* Return true if OP is a legitimate general operand when
1204 generating PIC code. It is given that flag_pic is on
1205 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1208 legitimate_pic_operand_p (op
)
1211 /* Accept all non-symbolic constants. */
1212 if (!SYMBOLIC_CONST (op
))
1215 /* Reject everything else; must be handled
1216 via emit_pic_move. */
1220 /* Returns true if the constant value OP is a legitimate general operand.
1221 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1224 legitimate_constant_p (op
)
1227 /* Accept all non-symbolic constants. */
1228 if (!SYMBOLIC_CONST (op
))
1231 /* In the PIC case, symbolic constants must *not* be
1232 forced into the literal pool. We accept them here,
1233 so that they will be handled by emit_pic_move. */
1237 /* Even in the non-PIC case, we can accept immediate
1238 LARL operands here. */
1240 return larl_operand (op
, VOIDmode
);
1242 /* All remaining non-PIC symbolic constants are
1243 forced into the literal pool. */
1247 /* Returns true if the constant value OP is a legitimate general
1248 operand during and after reload. The difference to
1249 legitimate_constant_p is that this function will not accept
1250 a constant that would need to be forced to the literal pool
1251 before it can be used as operand. */
1254 legitimate_reload_constant_p (op
)
1257 /* Accept l(g)hi operands. */
1258 if (GET_CODE (op
) == CONST_INT
1259 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1262 /* Accept lliXX operands. */
1264 && s390_single_hi (op
, DImode
, 0) >= 0)
1267 /* Accept larl operands. */
1269 && larl_operand (op
, VOIDmode
))
1272 /* If reload is completed, and we do not already have a
1273 literal pool, and OP must be forced to the literal
1274 pool, then something must have gone wrong earlier.
1275 We *cannot* force the constant any more, because the
1276 prolog generation already decided we don't need to
1277 set up the base register. */
1278 if (reload_completed
&& !regs_ever_live
[BASE_REGISTER
])
1281 /* Everything else cannot be handled without reload. */
1285 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1286 return the class of reg to actually use. */
1289 s390_preferred_reload_class (op
, class)
1291 enum reg_class
class;
1293 /* This can happen if a floating point constant is being
1294 reloaded into an integer register. Leave well alone. */
1295 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1296 && class != FP_REGS
)
1299 switch (GET_CODE (op
))
1301 /* Constants we cannot reload must be forced into the
1302 literal pool. For constants we *could* handle directly,
1303 it might still be preferable to put them in the pool and
1304 use a memory-to-memory instruction.
1306 However, try to avoid needlessly allocating a literal
1307 pool in a routine that wouldn't otherwise need any.
1308 Heuristically, we assume that 64-bit leaf functions
1309 typically don't need a literal pool, all others do. */
1312 if (!legitimate_reload_constant_p (op
))
1315 if (TARGET_64BIT
&& current_function_is_leaf
)
1320 /* If a symbolic constant or a PLUS is reloaded,
1321 it is most likely being used as an address, so
1322 prefer ADDR_REGS. If 'class' is not a superset
1323 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1328 if (reg_class_subset_p (ADDR_REGS
, class))
1340 /* Return the register class of a scratch register needed to
1341 load IN into a register of class CLASS in MODE.
1343 We need a temporary when loading a PLUS expression which
1344 is not a legitimate operand of the LOAD ADDRESS instruction. */
1347 s390_secondary_input_reload_class (class, mode
, in
)
1348 enum reg_class
class ATTRIBUTE_UNUSED
;
1349 enum machine_mode mode
;
1352 if (s390_plus_operand (in
, mode
))
1358 /* Return true if OP is a PLUS that is not a legitimate
1359 operand for the LA instruction.
1360 OP is the current operation.
1361 MODE is the current operation mode. */
1364 s390_plus_operand (op
, mode
)
1366 enum machine_mode mode
;
1368 if (!check_mode (op
, &mode
) || mode
!= Pmode
)
1371 if (GET_CODE (op
) != PLUS
)
1374 if (legitimate_la_operand_p (op
))
1380 /* Generate code to load SRC, which is PLUS that is not a
1381 legitimate operand for the LA instruction, into TARGET.
1382 SCRATCH may be used as scratch register. */
1385 s390_expand_plus_operand (target
, src
, scratch_in
)
1386 register rtx target
;
1388 register rtx scratch_in
;
1390 rtx sum1
, sum2
, scratch
;
1392 /* ??? reload apparently does not ensure that the scratch register
1393 and the target do not overlap. We absolutely require this to be
1394 the case, however. Therefore the reload_in[sd]i patterns ask for
1395 a double-sized scratch register, and if one part happens to be
1396 equal to the target, we use the other one. */
1397 scratch
= gen_rtx_REG (Pmode
, REGNO (scratch_in
));
1398 if (rtx_equal_p (scratch
, target
))
1399 scratch
= gen_rtx_REG (Pmode
, REGNO (scratch_in
) + 1);
1401 /* src must be a PLUS; get its two operands. */
1402 if (GET_CODE (src
) != PLUS
|| GET_MODE (src
) != Pmode
)
1405 /* Check if any of the two operands is already scheduled
1406 for replacement by reload. This can happen e.g. when
1407 float registers occur in an address. */
1408 sum1
= find_replacement (&XEXP (src
, 0));
1409 sum2
= find_replacement (&XEXP (src
, 1));
1411 /* Accept already valid addresses. */
1412 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
1413 if (s390_decompose_address (src
, NULL
, 1))
1415 src
= legitimize_la_operand (src
);
1416 emit_insn (gen_rtx_SET (VOIDmode
, target
, src
));
1420 /* If one of the two operands is equal to the target,
1421 make it the first one. If one is a constant, make
1422 it the second one. */
1423 if (rtx_equal_p (target
, sum2
)
1424 || GET_CODE (sum1
) == CONST_INT
)
1431 /* If the first operand is not an address register,
1432 we reload it into the target. */
1433 if (true_regnum (sum1
) < 1 || true_regnum (sum1
) > 15)
1435 emit_move_insn (target
, sum1
);
1439 /* Likewise for the second operand. However, take
1440 care not to clobber the target if we already used
1441 it for the first operand. Use the scratch instead.
1442 Also, allow an immediate offset if it is in range. */
1443 if ((true_regnum (sum2
) < 1 || true_regnum (sum2
) > 15)
1444 && !(GET_CODE (sum2
) == CONST_INT
1445 && INTVAL (sum2
) >= 0 && INTVAL (sum2
) < 4096))
1447 if (!rtx_equal_p (target
, sum1
))
1449 emit_move_insn (target
, sum2
);
1454 emit_move_insn (scratch
, sum2
);
1459 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1460 is only ever performed on addresses, so we can mark the
1461 sum as legitimate for LA in any case. */
1462 src
= gen_rtx_PLUS (Pmode
, sum1
, sum2
);
1463 src
= legitimize_la_operand (src
);
1464 emit_insn (gen_rtx_SET (VOIDmode
, target
, src
));
1468 /* Decompose a RTL expression ADDR for a memory address into
1469 its components, returned in OUT. The boolean STRICT
1470 specifies whether strict register checking applies.
1471 Returns 0 if ADDR is not a valid memory address, nonzero
1472 otherwise. If OUT is NULL, don't return the components,
1473 but check for validity only.
1475 Note: Only addresses in canonical form are recognized.
1476 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1477 canonical form so that they will be recognized. */
1480 s390_decompose_address (addr
, out
, strict
)
1482 struct s390_address
*out
;
1485 rtx base
= NULL_RTX
;
1486 rtx indx
= NULL_RTX
;
1487 rtx disp
= NULL_RTX
;
1488 int pointer
= FALSE
;
1490 /* Decompose address into base + index + displacement. */
1492 if (GET_CODE (addr
) == REG
|| GET_CODE (addr
) == UNSPEC
)
1495 else if (GET_CODE (addr
) == PLUS
)
1497 rtx op0
= XEXP (addr
, 0);
1498 rtx op1
= XEXP (addr
, 1);
1499 enum rtx_code code0
= GET_CODE (op0
);
1500 enum rtx_code code1
= GET_CODE (op1
);
1502 if (code0
== REG
|| code0
== UNSPEC
)
1504 if (code1
== REG
|| code1
== UNSPEC
)
1506 indx
= op0
; /* index + base */
1512 base
= op0
; /* base + displacement */
1517 else if (code0
== PLUS
)
1519 indx
= XEXP (op0
, 0); /* index + base + disp */
1520 base
= XEXP (op0
, 1);
1531 disp
= addr
; /* displacement */
1534 /* Validate base register. */
1537 if (GET_CODE (base
) == UNSPEC
)
1539 if (XVECLEN (base
, 0) != 1 || XINT (base
, 1) != 101)
1541 base
= XVECEXP (base
, 0, 0);
1545 if (GET_CODE (base
) != REG
|| GET_MODE (base
) != Pmode
)
1548 if ((strict
&& ! REG_OK_FOR_BASE_STRICT_P (base
))
1549 || (! strict
&& ! REG_OK_FOR_BASE_NONSTRICT_P (base
)))
1552 if (REGNO (base
) == BASE_REGISTER
1553 || REGNO (base
) == STACK_POINTER_REGNUM
1554 || REGNO (base
) == FRAME_POINTER_REGNUM
1555 || ((reload_completed
|| reload_in_progress
)
1556 && frame_pointer_needed
1557 && REGNO (base
) == HARD_FRAME_POINTER_REGNUM
)
1559 && REGNO (base
) == PIC_OFFSET_TABLE_REGNUM
))
1563 /* Validate index register. */
1566 if (GET_CODE (indx
) == UNSPEC
)
1568 if (XVECLEN (indx
, 0) != 1 || XINT (indx
, 1) != 101)
1570 indx
= XVECEXP (indx
, 0, 0);
1574 if (GET_CODE (indx
) != REG
|| GET_MODE (indx
) != Pmode
)
1577 if ((strict
&& ! REG_OK_FOR_BASE_STRICT_P (indx
))
1578 || (! strict
&& ! REG_OK_FOR_BASE_NONSTRICT_P (indx
)))
1581 if (REGNO (indx
) == BASE_REGISTER
1582 || REGNO (indx
) == STACK_POINTER_REGNUM
1583 || REGNO (indx
) == FRAME_POINTER_REGNUM
1584 || ((reload_completed
|| reload_in_progress
)
1585 && frame_pointer_needed
1586 && REGNO (indx
) == HARD_FRAME_POINTER_REGNUM
)
1588 && REGNO (indx
) == PIC_OFFSET_TABLE_REGNUM
))
1592 /* Validate displacement. */
1595 /* Allow integer constant in range. */
1596 if (GET_CODE (disp
) == CONST_INT
)
1598 if (INTVAL (disp
) < 0 || INTVAL (disp
) >= 4096)
1602 /* In the small-PIC case, the linker converts @GOT12
1603 offsets to possible displacements. */
1604 else if (GET_CODE (disp
) == CONST
1605 && GET_CODE (XEXP (disp
, 0)) == UNSPEC
1606 && XINT (XEXP (disp
, 0), 1) == 110)
1614 /* Accept chunkfied literal pool symbol references. */
1615 else if (GET_CODE (disp
) == CONST
1616 && GET_CODE (XEXP (disp
, 0)) == MINUS
1617 && GET_CODE (XEXP (XEXP (disp
, 0), 0)) == LABEL_REF
1618 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == LABEL_REF
)
1623 /* Likewise if a constant offset is present. */
1624 else if (GET_CODE (disp
) == CONST
1625 && GET_CODE (XEXP (disp
, 0)) == PLUS
1626 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
1627 && GET_CODE (XEXP (XEXP (disp
, 0), 0)) == MINUS
1628 && GET_CODE (XEXP (XEXP (XEXP (disp
, 0), 0), 0)) == LABEL_REF
1629 && GET_CODE (XEXP (XEXP (XEXP (disp
, 0), 0), 1)) == LABEL_REF
)
1634 /* We can convert literal pool addresses to
1635 displacements by basing them off the base register. */
1638 /* In some cases, we can accept an additional
1639 small constant offset. Split these off here. */
1641 unsigned int offset
= 0;
1643 if (GET_CODE (disp
) == CONST
1644 && GET_CODE (XEXP (disp
, 0)) == PLUS
1645 && GET_CODE (XEXP (XEXP (disp
, 0), 1)) == CONST_INT
)
1647 offset
= INTVAL (XEXP (XEXP (disp
, 0), 1));
1648 disp
= XEXP (XEXP (disp
, 0), 0);
1651 /* Now we must have a literal pool address. */
1652 if (GET_CODE (disp
) != SYMBOL_REF
1653 || !CONSTANT_POOL_ADDRESS_P (disp
))
1656 /* In 64-bit PIC mode we cannot accept symbolic
1657 constants in the constant pool. */
1658 if (TARGET_64BIT
&& flag_pic
1659 && SYMBOLIC_CONST (get_pool_constant (disp
)))
1662 /* If we have an offset, make sure it does not
1663 exceed the size of the constant pool entry. */
1664 if (offset
&& offset
>= GET_MODE_SIZE (get_pool_mode (disp
)))
1667 /* Either base or index must be free to
1668 hold the base register. */
1672 /* Convert the address. */
1674 indx
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
1676 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
1678 disp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, disp
), 100);
1679 disp
= gen_rtx_CONST (Pmode
, disp
);
1682 disp
= plus_constant (disp
, offset
);
1696 out
->pointer
= pointer
;
1702 /* Return nonzero if ADDR is a valid memory address.
1703 STRICT specifies whether strict register checking applies. */
1706 legitimate_address_p (mode
, addr
, strict
)
1707 enum machine_mode mode ATTRIBUTE_UNUSED
;
1711 return s390_decompose_address (addr
, NULL
, strict
);
1714 /* Return 1 if OP is a valid operand for the LA instruction.
1715 In 31-bit, we need to prove that the result is used as an
1716 address, as LA performs only a 31-bit addition. */
1719 legitimate_la_operand_p (op
)
1722 struct s390_address addr
;
1723 if (!s390_decompose_address (op
, &addr
, FALSE
))
1726 if (TARGET_64BIT
|| addr
.pointer
)
1732 /* Return a modified variant of OP that is guaranteed to
1733 be accepted by legitimate_la_operand_p. */
1736 legitimize_la_operand (op
)
1739 struct s390_address addr
;
1740 if (!s390_decompose_address (op
, &addr
, FALSE
))
1743 if (TARGET_64BIT
|| addr
.pointer
)
1749 op
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
.base
), 101);
1751 op
= gen_rtx_PLUS (Pmode
, op
, addr
.indx
);
1753 op
= gen_rtx_PLUS (Pmode
, op
, addr
.disp
);
1758 /* Return a legitimate reference for ORIG (an address) using the
1759 register REG. If REG is 0, a new pseudo is generated.
1761 There are two types of references that must be handled:
1763 1. Global data references must load the address from the GOT, via
1764 the PIC reg. An insn is emitted to do this load, and the reg is
1767 2. Static data references, constant pool addresses, and code labels
1768 compute the address as an offset from the GOT, whose base is in
1769 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1770 differentiate them from global data objects. The returned
1771 address is the PIC reg + an unspec constant.
1773 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1774 reg also appears in the address. */
1777 legitimize_pic_address (orig
, reg
)
1785 if (GET_CODE (addr
) == LABEL_REF
1786 || (GET_CODE (addr
) == SYMBOL_REF
1787 && (SYMBOL_REF_FLAG (addr
)
1788 || CONSTANT_POOL_ADDRESS_P (addr
))))
1790 /* This is a local symbol. */
1793 /* Access local symbols PC-relative via LARL.
1794 This is the same as in the non-PIC case, so it is
1795 handled automatically ... */
1799 /* Access local symbols relative to the literal pool. */
1801 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
1803 addr
= gen_rtx_UNSPEC (SImode
, gen_rtvec (1, addr
), 100);
1804 addr
= gen_rtx_CONST (SImode
, addr
);
1805 addr
= force_const_mem (SImode
, addr
);
1806 emit_move_insn (temp
, addr
);
1808 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
1809 base
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, base
), 101);
1810 new = gen_rtx_PLUS (Pmode
, base
, temp
);
1814 emit_move_insn (reg
, new);
1819 else if (GET_CODE (addr
) == SYMBOL_REF
)
1822 reg
= gen_reg_rtx (Pmode
);
1826 /* Assume GOT offset < 4k. This is handled the same way
1827 in both 31- and 64-bit code (@GOT12). */
1829 current_function_uses_pic_offset_table
= 1;
1831 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), 110);
1832 new = gen_rtx_CONST (Pmode
, new);
1833 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, new);
1834 new = gen_rtx_MEM (Pmode
, new);
1835 RTX_UNCHANGING_P (new) = 1;
1836 emit_move_insn (reg
, new);
1839 else if (TARGET_64BIT
)
1841 /* If the GOT offset might be >= 4k, we determine the position
1842 of the GOT entry via a PC-relative LARL (@GOTENT). */
1844 rtx temp
= gen_reg_rtx (Pmode
);
1846 new = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), 111);
1847 new = gen_rtx_CONST (Pmode
, new);
1848 emit_move_insn (temp
, new);
1850 new = gen_rtx_MEM (Pmode
, temp
);
1851 RTX_UNCHANGING_P (new) = 1;
1852 emit_move_insn (reg
, new);
1857 /* If the GOT offset might be >= 4k, we have to load it
1858 from the literal pool (@GOT). */
1860 rtx temp
= gen_reg_rtx (Pmode
);
1862 current_function_uses_pic_offset_table
= 1;
1864 addr
= gen_rtx_UNSPEC (SImode
, gen_rtvec (1, addr
), 112);
1865 addr
= gen_rtx_CONST (SImode
, addr
);
1866 addr
= force_const_mem (SImode
, addr
);
1867 emit_move_insn (temp
, addr
);
1869 new = gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, temp
);
1870 new = gen_rtx_MEM (Pmode
, new);
1871 RTX_UNCHANGING_P (new) = 1;
1872 emit_move_insn (reg
, new);
1878 if (GET_CODE (addr
) == CONST
)
1880 addr
= XEXP (addr
, 0);
1881 if (GET_CODE (addr
) == UNSPEC
)
1883 if (XVECLEN (addr
, 0) != 1)
1885 switch (XINT (addr
, 1))
1887 /* If someone moved an @GOT or lt-relative UNSPEC
1888 out of the literal pool, force them back in. */
1892 new = force_const_mem (SImode
, orig
);
1895 /* @GOTENT is OK as is. */
1899 /* @PLT is OK as is on 64-bit, must be converted to
1900 lt-relative PLT on 31-bit. */
1904 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
1906 addr
= XVECEXP (addr
, 0, 0);
1907 addr
= gen_rtx_UNSPEC (SImode
, gen_rtvec (1, addr
), 114);
1908 addr
= gen_rtx_CONST (SImode
, addr
);
1909 addr
= force_const_mem (SImode
, addr
);
1910 emit_move_insn (temp
, addr
);
1912 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
1913 base
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, base
), 101);
1914 new = gen_rtx_PLUS (Pmode
, base
, temp
);
1918 emit_move_insn (reg
, new);
1924 /* Everything else cannot happen. */
1929 else if (GET_CODE (addr
) != PLUS
)
1932 if (GET_CODE (addr
) == PLUS
)
1934 rtx op0
= XEXP (addr
, 0), op1
= XEXP (addr
, 1);
1935 /* Check first to see if this is a constant offset
1936 from a local symbol reference. */
1937 if ((GET_CODE (op0
) == LABEL_REF
1938 || (GET_CODE (op0
) == SYMBOL_REF
1939 && (SYMBOL_REF_FLAG (op0
)
1940 || CONSTANT_POOL_ADDRESS_P (op0
))))
1941 && GET_CODE (op1
) == CONST_INT
)
1945 if (INTVAL (op1
) & 1)
1947 /* LARL can't handle odd offsets, so emit a
1948 pair of LARL and LA. */
1949 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
1951 if (INTVAL (op1
) < 0 || INTVAL (op1
) >= 4096)
1953 int even
= INTVAL (op1
) - 1;
1954 op0
= gen_rtx_PLUS (Pmode
, op0
, GEN_INT (even
));
1955 op0
= gen_rtx_CONST (Pmode
, op0
);
1959 emit_move_insn (temp
, op0
);
1960 new = gen_rtx_PLUS (Pmode
, temp
, op1
);
1964 emit_move_insn (reg
, new);
1970 /* If the offset is even, we can just use LARL.
1971 This will happen automatically. */
1976 /* Access local symbols relative to the literal pool. */
1978 rtx temp
= reg
? reg
: gen_reg_rtx (Pmode
);
1980 addr
= gen_rtx_UNSPEC (SImode
, gen_rtvec (1, op0
), 100);
1981 addr
= gen_rtx_PLUS (SImode
, addr
, op1
);
1982 addr
= gen_rtx_CONST (SImode
, addr
);
1983 addr
= force_const_mem (SImode
, addr
);
1984 emit_move_insn (temp
, addr
);
1986 base
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
1987 base
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, base
), 101);
1988 new = gen_rtx_PLUS (Pmode
, base
, temp
);
1992 emit_move_insn (reg
, new);
1998 /* Now, check whether it is an LT-relative symbol plus offset
1999 that was pulled out of the literal pool. Force it back in. */
2001 else if (GET_CODE (op0
) == UNSPEC
2002 && GET_CODE (op1
) == CONST_INT
)
2004 if (XVECLEN (op0
, 0) != 1)
2006 if (XINT (op0
, 1) != 100)
2009 new = force_const_mem (SImode
, orig
);
2012 /* Otherwise, compute the sum. */
2015 base
= legitimize_pic_address (XEXP (addr
, 0), reg
);
2016 new = legitimize_pic_address (XEXP (addr
, 1),
2017 base
== reg
? NULL_RTX
: reg
);
2018 if (GET_CODE (new) == CONST_INT
)
2019 new = plus_constant (base
, INTVAL (new));
2022 if (GET_CODE (new) == PLUS
&& CONSTANT_P (XEXP (new, 1)))
2024 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (new, 0));
2025 new = XEXP (new, 1);
2027 new = gen_rtx_PLUS (Pmode
, base
, new);
2030 if (GET_CODE (new) == CONST
)
2031 new = XEXP (new, 0);
2032 new = force_operand (new, 0);
2039 /* Emit insns to move operands[1] into operands[0]. */
2042 emit_pic_move (operands
, mode
)
2044 enum machine_mode mode ATTRIBUTE_UNUSED
;
2046 rtx temp
= no_new_pseudos
? operands
[0] : gen_reg_rtx (Pmode
);
2048 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
2049 operands
[1] = force_reg (Pmode
, operands
[1]);
2051 operands
[1] = legitimize_pic_address (operands
[1], temp
);
2054 /* Try machine-dependent ways of modifying an illegitimate address X
2055 to be legitimate. If we find one, return the new, valid address.
2057 OLDX is the address as it was before break_out_memory_refs was called.
2058 In some cases it is useful to look at this to decide what needs to be done.
2060 MODE is the mode of the operand pointed to by X.
2062 When -fpic is used, special handling is needed for symbolic references.
2063 See comments by legitimize_pic_address for details. */
2066 legitimize_address (x
, oldx
, mode
)
2068 register rtx oldx ATTRIBUTE_UNUSED
;
2069 enum machine_mode mode ATTRIBUTE_UNUSED
;
2071 rtx constant_term
= const0_rtx
;
2075 if (SYMBOLIC_CONST (x
)
2076 || (GET_CODE (x
) == PLUS
2077 && (SYMBOLIC_CONST (XEXP (x
, 0))
2078 || SYMBOLIC_CONST (XEXP (x
, 1)))))
2079 x
= legitimize_pic_address (x
, 0);
2081 if (legitimate_address_p (mode
, x
, FALSE
))
2085 x
= eliminate_constant_term (x
, &constant_term
);
2087 if (GET_CODE (x
) == PLUS
)
2089 if (GET_CODE (XEXP (x
, 0)) == REG
)
2091 register rtx temp
= gen_reg_rtx (Pmode
);
2092 register rtx val
= force_operand (XEXP (x
, 1), temp
);
2094 emit_move_insn (temp
, val
);
2096 x
= gen_rtx_PLUS (Pmode
, XEXP (x
, 0), temp
);
2099 else if (GET_CODE (XEXP (x
, 1)) == REG
)
2101 register rtx temp
= gen_reg_rtx (Pmode
);
2102 register rtx val
= force_operand (XEXP (x
, 0), temp
);
2104 emit_move_insn (temp
, val
);
2106 x
= gen_rtx_PLUS (Pmode
, temp
, XEXP (x
, 1));
2110 if (constant_term
!= const0_rtx
)
2111 x
= gen_rtx_PLUS (Pmode
, x
, constant_term
);
2116 /* In the name of slightly smaller debug output, and to cater to
2117 general assembler losage, recognize various UNSPEC sequences
2118 and turn them back into a direct symbol reference. */
2121 s390_simplify_dwarf_addr (orig_x
)
2126 if (GET_CODE (x
) != MEM
)
2130 if (GET_CODE (x
) == PLUS
2131 && GET_CODE (XEXP (x
, 1)) == CONST
2132 && GET_CODE (XEXP (x
, 0)) == REG
2133 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
2135 y
= XEXP (XEXP (x
, 1), 0);
2136 if (GET_CODE (y
) == UNSPEC
2137 && XINT (y
, 1) == 110)
2138 return XVECEXP (y
, 0, 0);
2142 if (GET_CODE (x
) == CONST
)
2145 if (GET_CODE (y
) == UNSPEC
2146 && XINT (y
, 1) == 111)
2147 return XVECEXP (y
, 0, 0);
2154 /* Output symbolic constant X in assembler syntax to
2155 stdio stream FILE. */
2158 s390_output_symbolic_const (file
, x
)
2162 switch (GET_CODE (x
))
2167 s390_output_symbolic_const (file
, XEXP (x
, 0));
2171 s390_output_symbolic_const (file
, XEXP (x
, 0));
2172 fprintf (file
, "+");
2173 s390_output_symbolic_const (file
, XEXP (x
, 1));
2177 s390_output_symbolic_const (file
, XEXP (x
, 0));
2178 fprintf (file
, "-");
2179 s390_output_symbolic_const (file
, XEXP (x
, 1));
2186 output_addr_const (file
, x
);
2190 if (XVECLEN (x
, 0) != 1)
2191 output_operand_lossage ("invalid UNSPEC as operand (1)");
2192 switch (XINT (x
, 1))
2195 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
2196 fprintf (file
, "-.LT%X", s390_function_count
);
2199 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
2200 fprintf (file
, "@GOT12");
2203 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
2204 fprintf (file
, "@GOTENT");
2207 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
2208 fprintf (file
, "@GOT");
2211 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
2212 fprintf (file
, "@PLT");
2215 s390_output_symbolic_const (file
, XVECEXP (x
, 0, 0));
2216 fprintf (file
, "@PLT-.LT%X", s390_function_count
);
2219 output_operand_lossage ("invalid UNSPEC as operand (2)");
2225 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x
);
2230 /* Output address operand ADDR in assembler syntax to
2231 stdio stream FILE. */
2234 print_operand_address (file
, addr
)
2238 struct s390_address ad
;
2240 if (!s390_decompose_address (addr
, &ad
, TRUE
))
2241 output_operand_lossage ("Cannot decompose address.");
2244 s390_output_symbolic_const (file
, ad
.disp
);
2246 fprintf (file
, "0");
2248 if (ad
.base
&& ad
.indx
)
2249 fprintf (file
, "(%s,%s)", reg_names
[REGNO (ad
.indx
)],
2250 reg_names
[REGNO (ad
.base
)]);
2252 fprintf (file
, "(%s)", reg_names
[REGNO (ad
.base
)]);
2255 /* Output operand X in assembler syntax to stdio stream FILE.
2256 CODE specified the format flag. The following format flags
2259 'C': print opcode suffix for branch condition.
2260 'D': print opcode suffix for inverse branch condition.
2261 'O': print only the displacement of a memory reference.
2262 'R': print only the base register of a memory reference.
2263 'N': print the second word of a DImode operand.
2264 'M': print the second word of a TImode operand.
2266 'b': print integer X as if it's an unsigned byte.
2267 'x': print integer X as if it's an unsigned word.
2268 'h': print integer X as if it's a signed word. */
2271 print_operand (file
, x
, code
)
2279 fprintf (file
, s390_branch_condition_mnemonic (x
, FALSE
));
2283 fprintf (file
, s390_branch_condition_mnemonic (x
, TRUE
));
2288 struct s390_address ad
;
2290 if (GET_CODE (x
) != MEM
2291 || !s390_decompose_address (XEXP (x
, 0), &ad
, TRUE
)
2296 s390_output_symbolic_const (file
, ad
.disp
);
2298 fprintf (file
, "0");
2304 struct s390_address ad
;
2306 if (GET_CODE (x
) != MEM
2307 || !s390_decompose_address (XEXP (x
, 0), &ad
, TRUE
)
2312 fprintf (file
, "%s", reg_names
[REGNO (ad
.base
)]);
2314 fprintf (file
, "0");
2319 if (GET_CODE (x
) == REG
)
2320 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
2321 else if (GET_CODE (x
) == MEM
)
2322 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 4));
2328 if (GET_CODE (x
) == REG
)
2329 x
= gen_rtx_REG (GET_MODE (x
), REGNO (x
) + 1);
2330 else if (GET_CODE (x
) == MEM
)
2331 x
= change_address (x
, VOIDmode
, plus_constant (XEXP (x
, 0), 8));
2337 switch (GET_CODE (x
))
2340 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
2344 output_address (XEXP (x
, 0));
2351 s390_output_symbolic_const (file
, x
);
2356 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xff);
2357 else if (code
== 'x')
2358 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) & 0xffff);
2359 else if (code
== 'h')
2360 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((INTVAL (x
) & 0xffff) ^ 0x8000) - 0x8000);
2362 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
));
2366 if (GET_MODE (x
) != VOIDmode
)
2369 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xff);
2370 else if (code
== 'x')
2371 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, CONST_DOUBLE_LOW (x
) & 0xffff);
2372 else if (code
== 'h')
2373 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ((CONST_DOUBLE_LOW (x
) & 0xffff) ^ 0x8000) - 0x8000);
2379 fatal_insn ("UNKNOWN in print_operand !?", x
);
2384 /* Target hook for assembling integer objects. We need to define it
2385 here to work a round a bug in some versions of GAS, which couldn't
2386 handle values smaller than INT_MIN when printed in decimal. */
2389 s390_assemble_integer (x
, size
, aligned_p
)
2394 if (size
== 8 && aligned_p
2395 && GET_CODE (x
) == CONST_INT
&& INTVAL (x
) < INT_MIN
)
2397 fputs ("\t.quad\t", asm_out_file
);
2398 fprintf (asm_out_file
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
2399 putc ('\n', asm_out_file
);
2402 return default_assemble_integer (x
, size
, aligned_p
);
2406 #define DEBUG_SCHED 0
2408 /* Returns true if register REGNO is used for forming
2409 a memory address in expression X. */
2412 reg_used_in_mem_p (regno
, x
)
2416 enum rtx_code code
= GET_CODE (x
);
2422 if (refers_to_regno_p (regno
, regno
+1,
2426 else if (code
== SET
2427 && GET_CODE (SET_DEST (x
)) == PC
)
2429 if (refers_to_regno_p (regno
, regno
+1,
2434 fmt
= GET_RTX_FORMAT (code
);
2435 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2438 && reg_used_in_mem_p (regno
, XEXP (x
, i
)))
2441 else if (fmt
[i
] == 'E')
2442 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2443 if (reg_used_in_mem_p (regno
, XVECEXP (x
, i
, j
)))
2449 /* Returns true if expression DEP_RTX sets an address register
2450 used by instruction INSN to address memory. */
2453 addr_generation_dependency_p (dep_rtx
, insn
)
2459 if (GET_CODE (dep_rtx
) == SET
)
2461 target
= SET_DEST (dep_rtx
);
2463 if (GET_CODE (target
) == REG
)
2465 int regno
= REGNO (target
);
2467 if (get_attr_type (insn
) == TYPE_LA
)
2469 pat
= PATTERN (insn
);
2470 if (GET_CODE (pat
) == PARALLEL
)
2472 if (XVECLEN (pat
, 0) != 2)
2474 pat
= XVECEXP (pat
, 0, 0);
2476 if (GET_CODE (pat
) == SET
)
2477 return refers_to_regno_p (regno
, regno
+1, SET_SRC (pat
), 0);
2481 else if (get_attr_atype (insn
) == ATYPE_MEM
)
2482 return reg_used_in_mem_p (regno
, PATTERN (insn
));
2489 /* Return the modified cost of the dependency of instruction INSN
2490 on instruction DEP_INSN through the link LINK. COST is the
2491 default cost of that dependency.
2493 Data dependencies are all handled without delay. However, if a
2494 register is modified and subsequently used as base or index
2495 register of a memory reference, at least 4 cycles need to pass
2496 between setting and using the register to avoid pipeline stalls.
2497 An exception is the LA instruction. An address generated by LA can
2498 be used by introducing only a one cycle stall on the pipeline. */
2501 s390_adjust_cost (insn
, link
, dep_insn
, cost
)
2510 /* If the dependence is an anti-dependence, there is no cost. For an
2511 output dependence, there is sometimes a cost, but it doesn't seem
2512 worth handling those few cases. */
2514 if (REG_NOTE_KIND (link
) != 0)
2517 /* If we can't recognize the insns, we can't really do anything. */
2518 if (recog_memoized (insn
) < 0 || recog_memoized (dep_insn
) < 0)
2521 dep_rtx
= PATTERN (dep_insn
);
2523 if (GET_CODE (dep_rtx
) == SET
)
2525 if (addr_generation_dependency_p (dep_rtx
, insn
))
2527 cost
+= (get_attr_type (dep_insn
) == TYPE_LA
) ? 1 : 4;
2530 fprintf (stderr
, "\n\nAddress dependency detected: cost %d\n",
2532 debug_rtx (dep_insn
);
2537 else if (GET_CODE (dep_rtx
) == PARALLEL
)
2539 for (i
= 0; i
< XVECLEN (dep_rtx
, 0); i
++)
2541 if (addr_generation_dependency_p (XVECEXP (dep_rtx
, 0, i
),
2544 cost
+= (get_attr_type (dep_insn
) == TYPE_LA
) ? 1 : 4;
2547 fprintf (stderr
, "\n\nAddress dependency detected: cost %d\n"
2549 debug_rtx (dep_insn
);
2560 /* A C statement (sans semicolon) to update the integer scheduling priority
2561 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2562 increase the priority to execute INSN later. Do not define this macro if
2563 you do not need to adjust the scheduling priorities of insns.
2565 A LA instruction maybe scheduled later, since the pipeline bypasses the
2566 calculated value. */
2569 s390_adjust_priority (insn
, priority
)
2570 rtx insn ATTRIBUTE_UNUSED
;
2573 if (! INSN_P (insn
))
2576 if (GET_CODE (PATTERN (insn
)) == USE
2577 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
2580 switch (get_attr_type (insn
))
2586 if (priority
>= 0 && priority
< 0x01000000)
2590 /* LM in epilogue should never be scheduled. This
2591 is due to literal access done in function body.
2592 The usage of register 13 is not mentioned explicitly,
2593 leading to scheduling 'LM' accross this instructions.
2595 priority
= 0x7fffffff;
2603 /* Split all branches that exceed the maximum distance. */
2606 s390_split_branches ()
2608 rtx temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
2609 rtx insn
, pat
, label
, target
, jump
, tmp
;
2611 /* In 64-bit mode we can jump +- 4GB. */
2616 /* Find all branches that exceed 64KB, and split them. */
2618 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2620 if (GET_CODE (insn
) != JUMP_INSN
)
2623 pat
= PATTERN (insn
);
2624 if (GET_CODE (pat
) != SET
)
2627 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
2629 label
= SET_SRC (pat
);
2631 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
2633 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
2634 label
= XEXP (SET_SRC (pat
), 1);
2635 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
2636 label
= XEXP (SET_SRC (pat
), 2);
2643 if (get_attr_length (insn
) == 4)
2648 target
= gen_rtx_UNSPEC (SImode
, gen_rtvec (1, label
), 100);
2649 target
= gen_rtx_CONST (SImode
, target
);
2650 target
= force_const_mem (SImode
, target
);
2651 jump
= gen_rtx_REG (Pmode
, BASE_REGISTER
);
2652 jump
= gen_rtx_PLUS (Pmode
, jump
, temp_reg
);
2656 target
= force_const_mem (Pmode
, label
);
2660 if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
2662 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
2663 jump
= gen_rtx_IF_THEN_ELSE (VOIDmode
, XEXP (SET_SRC (pat
), 0),
2666 jump
= gen_rtx_IF_THEN_ELSE (VOIDmode
, XEXP (SET_SRC (pat
), 0),
2670 tmp
= emit_insn_before (gen_rtx_SET (Pmode
, temp_reg
, target
), insn
);
2671 INSN_ADDRESSES_NEW (tmp
, -1);
2673 tmp
= emit_jump_insn_before (gen_rtx_SET (VOIDmode
, pc_rtx
, jump
), insn
);
2674 INSN_ADDRESSES_NEW (tmp
, -1);
2682 /* Find a literal pool symbol referenced in RTX X, and store
2683 it at REF. Will abort if X contains references to more than
2684 one such pool symbol; multiple references to the same symbol
2685 are allowed, however.
2687 The rtx pointed to by REF must be initialized to NULL_RTX
2688 by the caller before calling this routine. */
2691 find_constant_pool_ref (x
, ref
)
2698 if (GET_CODE (x
) == SYMBOL_REF
2699 && CONSTANT_POOL_ADDRESS_P (x
))
2701 if (*ref
== NULL_RTX
)
2707 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2708 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2712 find_constant_pool_ref (XEXP (x
, i
), ref
);
2714 else if (fmt
[i
] == 'E')
2716 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2717 find_constant_pool_ref (XVECEXP (x
, i
, j
), ref
);
2722 /* Replace every reference to the literal pool symbol REF
2723 in X by the address ADDR. Fix up MEMs as required. */
2726 replace_constant_pool_ref (x
, ref
, addr
)
2737 /* Literal pool references can only occur inside a MEM ... */
2738 if (GET_CODE (*x
) == MEM
)
2740 rtx memref
= XEXP (*x
, 0);
2744 *x
= replace_equiv_address (*x
, addr
);
2748 if (GET_CODE (memref
) == CONST
2749 && GET_CODE (XEXP (memref
, 0)) == PLUS
2750 && GET_CODE (XEXP (XEXP (memref
, 0), 1)) == CONST_INT
2751 && XEXP (XEXP (memref
, 0), 0) == ref
)
2753 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (memref
, 0), 1));
2754 *x
= replace_equiv_address (*x
, plus_constant (addr
, off
));
2759 /* ... or a load-address type pattern. */
2760 if (GET_CODE (*x
) == SET
)
2762 rtx addrref
= SET_SRC (*x
);
2766 SET_SRC (*x
) = addr
;
2770 if (GET_CODE (addrref
) == CONST
2771 && GET_CODE (XEXP (addrref
, 0)) == PLUS
2772 && GET_CODE (XEXP (XEXP (addrref
, 0), 1)) == CONST_INT
2773 && XEXP (XEXP (addrref
, 0), 0) == ref
)
2775 HOST_WIDE_INT off
= INTVAL (XEXP (XEXP (addrref
, 0), 1));
2776 SET_SRC (*x
) = plus_constant (addr
, off
);
2781 fmt
= GET_RTX_FORMAT (GET_CODE (*x
));
2782 for (i
= GET_RTX_LENGTH (GET_CODE (*x
)) - 1; i
>= 0; i
--)
2786 replace_constant_pool_ref (&XEXP (*x
, i
), ref
, addr
);
2788 else if (fmt
[i
] == 'E')
2790 for (j
= 0; j
< XVECLEN (*x
, i
); j
++)
2791 replace_constant_pool_ref (&XVECEXP (*x
, i
, j
), ref
, addr
);
2796 /* We keep a list of constants we which we have to add to internal
2797 constant tables in the middle of large functions. */
2799 #define NR_C_MODES 6
2800 enum machine_mode constant_modes
[NR_C_MODES
] =
2808 rtx (*gen_consttable
[NR_C_MODES
])(rtx
) =
2810 gen_consttable_df
, gen_consttable_di
,
2811 gen_consttable_sf
, gen_consttable_si
,
2818 struct constant
*next
;
2823 struct constant_pool
2825 struct constant_pool
*next
;
2829 struct constant
*constants
[NR_C_MODES
];
2834 static struct constant_pool
*s390_start_pool
PARAMS ((struct constant_pool
**, rtx
));
2835 static void s390_end_pool
PARAMS ((struct constant_pool
*, rtx
));
2836 static struct constant_pool
*s390_find_pool
PARAMS ((struct constant_pool
*, rtx
));
2837 static rtx s390_add_pool
PARAMS ((struct constant_pool
*, rtx
, enum machine_mode
));
2838 static rtx s390_dump_pool
PARAMS ((struct constant_pool
*));
2839 static void s390_free_pool
PARAMS ((struct constant_pool
*));
2841 /* Create new constant pool covering instructions starting at INSN
2842 and chain it to the end of POOL_LIST. */
2844 static struct constant_pool
*
2845 s390_start_pool (pool_list
, insn
)
2846 struct constant_pool
**pool_list
;
2849 struct constant_pool
*pool
, **prev
;
2852 pool
= (struct constant_pool
*) xmalloc (sizeof *pool
);
2854 for (i
= 0; i
< NR_C_MODES
; i
++)
2855 pool
->constants
[i
] = NULL
;
2857 pool
->label
= gen_label_rtx ();
2858 pool
->first_insn
= insn
;
2859 pool
->last_insn
= NULL_RTX
;
2862 for (prev
= pool_list
; *prev
; prev
= &(*prev
)->next
)
2869 /* End range of instructions covered by POOL at INSN. */
2872 s390_end_pool (pool
, insn
)
2873 struct constant_pool
*pool
;
2876 pool
->last_insn
= insn
;
2879 /* Return pool out of POOL_LIST that covers INSN. */
2881 static struct constant_pool
*
2882 s390_find_pool (pool_list
, insn
)
2883 struct constant_pool
*pool_list
;
2886 int addr
= INSN_ADDRESSES (INSN_UID (insn
));
2887 struct constant_pool
*pool
;
2892 for (pool
= pool_list
; pool
; pool
= pool
->next
)
2893 if (INSN_ADDRESSES (INSN_UID (pool
->first_insn
)) <= addr
2894 && (pool
->last_insn
== NULL_RTX
2895 || INSN_ADDRESSES (INSN_UID (pool
->last_insn
)) > addr
))
2901 /* Add constant VAL of mode MODE to the constant pool POOL.
2902 Return an RTX describing the distance from the start of
2903 the pool to the location of the new constant. */
2906 s390_add_pool (pool
, val
, mode
)
2907 struct constant_pool
*pool
;
2909 enum machine_mode mode
;
2915 for (i
= 0; i
< NR_C_MODES
; i
++)
2916 if (constant_modes
[i
] == mode
)
2918 if (i
== NR_C_MODES
)
2921 for (c
= pool
->constants
[i
]; c
!= NULL
; c
= c
->next
)
2922 if (rtx_equal_p (val
, c
->value
))
2927 c
= (struct constant
*) xmalloc (sizeof *c
);
2929 c
->label
= gen_label_rtx ();
2930 c
->next
= pool
->constants
[i
];
2931 pool
->constants
[i
] = c
;
2932 pool
->size
+= GET_MODE_SIZE (mode
);
2935 offset
= gen_rtx_MINUS (Pmode
, gen_rtx_LABEL_REF (Pmode
, c
->label
),
2936 gen_rtx_LABEL_REF (Pmode
, pool
->label
));
2937 offset
= gen_rtx_CONST (Pmode
, offset
);
2941 /* Dump out the constants in POOL. */
2944 s390_dump_pool (pool
)
2945 struct constant_pool
*pool
;
2951 /* Select location to put literal pool. */
2953 insn
= get_last_insn ();
2955 insn
= pool
->last_insn
? pool
->last_insn
: get_last_insn ();
2957 /* Pool start insn switches to proper section
2958 and guarantees necessary alignment. */
2960 insn
= emit_insn_after (gen_pool_start_64 (), insn
);
2962 insn
= emit_insn_after (gen_pool_start_31 (), insn
);
2963 INSN_ADDRESSES_NEW (insn
, -1);
2965 insn
= emit_label_after (pool
->label
, insn
);
2966 INSN_ADDRESSES_NEW (insn
, -1);
2968 /* Dump constants in descending alignment requirement order,
2969 ensuring proper alignment for every constant. */
2970 for (i
= 0; i
< NR_C_MODES
; i
++)
2971 for (c
= pool
->constants
[i
]; c
; c
= c
->next
)
2973 insn
= emit_label_after (c
->label
, insn
);
2974 INSN_ADDRESSES_NEW (insn
, -1);
2975 insn
= emit_insn_after (gen_consttable
[i
] (c
->value
), insn
);
2976 INSN_ADDRESSES_NEW (insn
, -1);
2979 /* Pool end insn switches back to previous section
2980 and guarantees necessary alignment. */
2982 insn
= emit_insn_after (gen_pool_end_64 (), insn
);
2984 insn
= emit_insn_after (gen_pool_end_31 (), insn
);
2985 INSN_ADDRESSES_NEW (insn
, -1);
2987 insn
= emit_barrier_after (insn
);
2988 INSN_ADDRESSES_NEW (insn
, -1);
2993 /* Free all memory used by POOL. */
2996 s390_free_pool (pool
)
2997 struct constant_pool
*pool
;
3001 for (i
= 0; i
< NR_C_MODES
; i
++)
3003 struct constant
*c
= pool
->constants
[i
];
3006 struct constant
*next
= c
->next
;
3015 /* Used in s390.md for branch length calculation. */
3016 int s390_pool_overflow
= 0;
3018 /* Chunkify the literal pool if required. */
3020 #define S390_POOL_CHUNK_MIN 0xc00
3021 #define S390_POOL_CHUNK_MAX 0xe00
3024 s390_chunkify_pool ()
3026 rtx base_reg
= gen_rtx_REG (Pmode
,
3027 TARGET_64BIT
? BASE_REGISTER
: RETURN_REGNUM
);
3029 struct constant_pool
*curr_pool
= NULL
, *pool_list
= NULL
;
3034 /* Do we need to chunkify the literal pool? */
3036 if (get_pool_size () < S390_POOL_CHUNK_MAX
)
3039 /* Scan all insns and move literals to pool chunks.
3040 Replace all occurrances of literal pool references
3041 by explicit references to pool chunk entries. */
3043 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3045 if (GET_CODE (insn
) == INSN
)
3047 rtx addr
, pool_ref
= NULL_RTX
;
3048 find_constant_pool_ref (PATTERN (insn
), &pool_ref
);
3052 curr_pool
= s390_start_pool (&pool_list
, insn
);
3054 addr
= s390_add_pool (curr_pool
, get_pool_constant (pool_ref
),
3055 get_pool_mode (pool_ref
));
3057 addr
= gen_rtx_PLUS (Pmode
, base_reg
, addr
);
3058 replace_constant_pool_ref (&PATTERN (insn
), pool_ref
, addr
);
3059 INSN_CODE (insn
) = -1;
3064 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn
)
3065 || INSN_ADDRESSES (INSN_UID (insn
)) == -1)
3070 if (curr_pool
->size
< S390_POOL_CHUNK_MAX
)
3073 s390_end_pool (curr_pool
, insn
);
3078 int chunk_size
= INSN_ADDRESSES (INSN_UID (insn
))
3079 - INSN_ADDRESSES (INSN_UID (curr_pool
->first_insn
))
3082 /* We will later have to insert base register reload insns.
3083 Those will have an effect on code size, which we need to
3084 consider here. This calculation makes rather pessimistic
3085 worst-case assumptions. */
3086 if (GET_CODE (insn
) == CODE_LABEL
3087 || GET_CODE (insn
) == JUMP_INSN
)
3089 else if (GET_CODE (insn
) == CALL_INSN
)
3092 if (chunk_size
< S390_POOL_CHUNK_MIN
3093 && curr_pool
->size
< S390_POOL_CHUNK_MIN
)
3096 /* Pool chunks can only be inserted after BARRIERs ... */
3097 if (GET_CODE (insn
) == BARRIER
)
3099 s390_end_pool (curr_pool
, insn
);
3104 /* ... so if we don't find one in time, create one. */
3105 else if ((chunk_size
> S390_POOL_CHUNK_MAX
3106 || curr_pool
->size
> S390_POOL_CHUNK_MAX
)
3107 && (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
))
3109 int addr
= INSN_ADDRESSES (INSN_UID (insn
));
3110 rtx label
, jump
, barrier
;
3112 label
= gen_label_rtx ();
3113 jump
= emit_jump_insn_after (gen_jump (label
), insn
);
3114 barrier
= emit_barrier_after (jump
);
3115 insn
= emit_label_after (label
, barrier
);
3116 JUMP_LABEL (jump
) = label
;
3117 LABEL_NUSES (label
) = 1;
3119 INSN_ADDRESSES_NEW (jump
, addr
+1);
3120 INSN_ADDRESSES_NEW (barrier
, addr
+1);
3121 INSN_ADDRESSES_NEW (insn
, -1);
3123 s390_end_pool (curr_pool
, barrier
);
3130 /* Dump out all literal pools. */
3132 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
3133 s390_dump_pool (curr_pool
);
3136 /* Find all labels that are branched into
3137 from an insn belonging to a different chunk. */
3139 far_labels
= BITMAP_XMALLOC ();
3141 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3143 /* Labels marked with LABEL_PRESERVE_P can be target
3144 of non-local jumps, so we have to mark them.
3145 The same holds for named labels.
3147 Don't do that, however, if it is the label before
3150 if (GET_CODE (insn
) == CODE_LABEL
3151 && (LABEL_PRESERVE_P (insn
) || LABEL_NAME (insn
)))
3153 rtx vec_insn
= next_real_insn (insn
);
3154 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
3155 PATTERN (vec_insn
) : NULL_RTX
;
3157 || !(GET_CODE (vec_pat
) == ADDR_VEC
3158 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
3159 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (insn
));
3162 /* If we have a direct jump (conditional or unconditional)
3163 or a casesi jump, check all potential targets. */
3164 else if (GET_CODE (insn
) == JUMP_INSN
)
3166 rtx pat
= PATTERN (insn
);
3167 if (GET_CODE (pat
) == SET
)
3171 if (GET_CODE (SET_SRC (pat
)) == LABEL_REF
)
3173 label
= XEXP (SET_SRC (pat
), 0);
3175 else if (GET_CODE (SET_SRC (pat
)) == IF_THEN_ELSE
)
3177 if (GET_CODE (XEXP (SET_SRC (pat
), 1)) == LABEL_REF
)
3178 label
= XEXP (XEXP (SET_SRC (pat
), 1), 0);
3179 else if (GET_CODE (XEXP (SET_SRC (pat
), 2)) == LABEL_REF
)
3180 label
= XEXP (XEXP (SET_SRC (pat
), 2), 0);
3185 if (s390_find_pool (pool_list
, label
)
3186 != s390_find_pool (pool_list
, insn
))
3187 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
3190 else if (GET_CODE (pat
) == PARALLEL
3191 && XVECLEN (pat
, 0) == 2
3192 && GET_CODE (XVECEXP (pat
, 0, 0)) == SET
3193 && GET_CODE (XVECEXP (pat
, 0, 1)) == USE
3194 && GET_CODE (XEXP (XVECEXP (pat
, 0, 1), 0)) == LABEL_REF
)
3196 /* Find the jump table used by this casesi jump. */
3197 rtx vec_label
= XEXP (XEXP (XVECEXP (pat
, 0, 1), 0), 0);
3198 rtx vec_insn
= next_real_insn (vec_label
);
3199 rtx vec_pat
= vec_insn
&& GET_CODE (vec_insn
) == JUMP_INSN
?
3200 PATTERN (vec_insn
) : NULL_RTX
;
3202 && (GET_CODE (vec_pat
) == ADDR_VEC
3203 || GET_CODE (vec_pat
) == ADDR_DIFF_VEC
))
3205 int i
, diff_p
= GET_CODE (vec_pat
) == ADDR_DIFF_VEC
;
3207 for (i
= 0; i
< XVECLEN (vec_pat
, diff_p
); i
++)
3209 rtx label
= XEXP (XVECEXP (vec_pat
, diff_p
, i
), 0);
3211 if (s390_find_pool (pool_list
, label
)
3212 != s390_find_pool (pool_list
, insn
))
3213 bitmap_set_bit (far_labels
, CODE_LABEL_NUMBER (label
));
3220 /* Insert base register reload insns before every pool. */
3222 for (curr_pool
= pool_list
; curr_pool
; curr_pool
= curr_pool
->next
)
3225 rtx pool_ref
= gen_rtx_LABEL_REF (Pmode
, curr_pool
->label
);
3226 rtx new_insn
= gen_rtx_SET (Pmode
, base_reg
, pool_ref
);
3227 rtx insn
= curr_pool
->first_insn
;
3228 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
3232 rtx new_insn
= gen_reload_base (base_reg
, curr_pool
->label
);
3233 rtx insn
= curr_pool
->first_insn
;
3234 INSN_ADDRESSES_NEW (emit_insn_before (new_insn
, insn
), -1);
3237 /* Insert base register reload insns at every far label. */
3239 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3240 if (GET_CODE (insn
) == CODE_LABEL
3241 && bitmap_bit_p (far_labels
, CODE_LABEL_NUMBER (insn
)))
3243 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
3248 rtx pool_ref
= gen_rtx_LABEL_REF (Pmode
, pool
->label
);
3249 rtx new_insn
= gen_rtx_SET (Pmode
, base_reg
, pool_ref
);
3250 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
3254 rtx new_insn
= gen_reload_base (base_reg
, pool
->label
);
3255 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
3260 /* Insert base register reload insns after every call if necessary. */
3262 if (REGNO (base_reg
) == RETURN_REGNUM
)
3263 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3264 if (GET_CODE (insn
) == CALL_INSN
)
3266 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
3269 rtx new_insn
= gen_reload_base2 (base_reg
, pool
->label
);
3270 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
3275 /* Recompute insn addresses. */
3277 s390_pool_overflow
= 1;
3278 init_insn_lengths ();
3279 shorten_branches (get_insns ());
3280 s390_pool_overflow
= 0;
3282 /* Insert base register reload insns after far branches. */
3285 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3286 if (GET_CODE (insn
) == JUMP_INSN
3287 && GET_CODE (PATTERN (insn
)) == SET
3288 && get_attr_length (insn
) >= 12)
3290 struct constant_pool
*pool
= s390_find_pool (pool_list
, insn
);
3293 rtx new_insn
= gen_reload_base (base_reg
, pool
->label
);
3294 INSN_ADDRESSES_NEW (emit_insn_after (new_insn
, insn
), -1);
3299 /* Free all memory. */
3303 struct constant_pool
*next
= pool_list
->next
;
3304 s390_free_pool (pool_list
);
3308 BITMAP_XFREE (far_labels
);
3312 /* Index of constant pool chunk that is currently being processed.
3313 Set to -1 before function output has started. */
3314 int s390_pool_count
= -1;
3316 /* Number of elements of current constant pool. */
3317 int s390_nr_constants
;
3319 /* Output main constant pool to stdio stream FILE. */
3322 s390_output_constant_pool (file
)
3325 /* Output constant pool. */
3326 if (s390_nr_constants
)
3330 fprintf (file
, "\tlarl\t%s,.LT%X\n", reg_names
[BASE_REGISTER
],
3331 s390_function_count
);
3332 readonly_data_section ();
3333 ASM_OUTPUT_ALIGN (file
, 3);
3337 fprintf (file
, "\tbras\t%s,.LTN%X\n", reg_names
[BASE_REGISTER
],
3338 s390_function_count
);
3340 fprintf (file
, ".LT%X:\n", s390_function_count
);
3342 s390_pool_count
= 0;
3343 output_constant_pool (current_function_name
, current_function_decl
);
3344 s390_pool_count
= -1;
3347 function_section (current_function_decl
);
3349 fprintf (file
, ".LTN%X:\n", s390_function_count
);
3354 /* Return true if floating point registers need to be saved. */
3362 for (i
=24; i
<=31; i
++)
3364 if (regs_ever_live
[i
] == 1)
3370 /* Find first call clobbered register unsused in a function.
3371 This could be used as base register in a leaf function
3372 or for holding the return address before epilogue. */
3375 find_unused_clobbered_reg ()
3378 for (i
= 0; i
< 6; i
++)
3379 if (!regs_ever_live
[i
])
3384 /* Fill FRAME with info about frame of current function. */
3387 s390_frame_info (frame
)
3388 struct s390_frame
*frame
;
3391 HOST_WIDE_INT fsize
= get_frame_size ();
3393 if (fsize
> 0x7fff0000)
3394 fatal_error ("Total size of local variables exceeds architecture limit.");
3396 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
3397 frame
->save_fprs_p
= save_fprs_p ();
3399 frame
->frame_size
= fsize
+ frame
->save_fprs_p
* 64;
3401 /* Does function need to setup frame and save area. */
3403 if (! current_function_is_leaf
3404 || frame
->frame_size
> 0
3405 || current_function_calls_alloca
3406 || current_function_stdarg
)
3407 frame
->frame_size
+= STARTING_FRAME_OFFSET
;
3409 /* If we need to allocate a frame, the stack pointer is changed. */
3411 if (frame
->frame_size
> 0)
3412 regs_ever_live
[STACK_POINTER_REGNUM
] = 1;
3414 /* If the literal pool might overflow, the return register might
3415 be used as temp literal pointer. */
3417 if (!TARGET_64BIT
&& get_pool_size () >= S390_POOL_CHUNK_MAX
/ 2)
3418 regs_ever_live
[RETURN_REGNUM
] = 1;
3420 /* If there is (possibly) any pool entry, we need to
3421 load base register. */
3423 if (get_pool_size ()
3424 || !CONST_OK_FOR_LETTER_P (frame
->frame_size
, 'K')
3425 || (!TARGET_64BIT
&& current_function_uses_pic_offset_table
))
3426 regs_ever_live
[BASE_REGISTER
] = 1;
3428 /* If we need the GOT pointer, remember to save/restore it. */
3430 if (current_function_uses_pic_offset_table
)
3431 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3433 /* Frame pointer needed. */
3435 frame
->frame_pointer_p
= frame_pointer_needed
;
3437 /* Find first and last gpr to be saved. */
3439 for (i
= 6; i
< 16; i
++)
3440 if (regs_ever_live
[i
])
3443 for (j
= 15; j
> i
; j
--)
3444 if (regs_ever_live
[j
])
3449 /* Nothing to save / restore. */
3450 frame
->first_save_gpr
= -1;
3451 frame
->first_restore_gpr
= -1;
3452 frame
->last_save_gpr
= -1;
3453 frame
->return_reg_saved_p
= 0;
3457 /* Save / Restore from gpr i to j. */
3458 frame
->first_save_gpr
= i
;
3459 frame
->first_restore_gpr
= i
;
3460 frame
->last_save_gpr
= j
;
3461 frame
->return_reg_saved_p
= (j
>= RETURN_REGNUM
&& i
<= RETURN_REGNUM
);
3464 if (current_function_stdarg
)
3466 /* Varargs function need to save from gpr 2 to gpr 15. */
3467 frame
->first_save_gpr
= 2;
3471 /* Return offset between argument pointer and frame pointer
3472 initially after prologue. */
3475 s390_arg_frame_offset ()
3477 struct s390_frame frame
;
3479 /* Compute frame_info. */
3481 s390_frame_info (&frame
);
3483 return frame
.frame_size
+ STACK_POINTER_OFFSET
;
3486 /* Emit insn to save fpr REGNUM at offset OFFSET relative
3487 to register BASE. Return generated insn. */
3490 save_fpr (base
, offset
, regnum
)
3496 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
3497 set_mem_alias_set (addr
, s390_sr_alias_set
);
3499 return emit_move_insn (addr
, gen_rtx_REG (DFmode
, regnum
));
3502 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
3503 to register BASE. Return generated insn. */
3506 restore_fpr (base
, offset
, regnum
)
3512 addr
= gen_rtx_MEM (DFmode
, plus_constant (base
, offset
));
3513 set_mem_alias_set (addr
, s390_sr_alias_set
);
3515 return emit_move_insn (gen_rtx_REG (DFmode
, regnum
), addr
);
3518 /* Output the function prologue assembly code to the
3519 stdio stream FILE. The local frame size is passed
3523 s390_function_prologue (file
, lsize
)
3524 FILE *file ATTRIBUTE_UNUSED
;
3525 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED
;
3527 s390_chunkify_pool ();
3528 s390_split_branches ();
3531 /* Output the function epilogue assembly code to the
3532 stdio stream FILE. The local frame size is passed
3536 s390_function_epilogue (file
, lsize
)
3537 FILE *file ATTRIBUTE_UNUSED
;
3538 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED
;
3540 current_function_uses_pic_offset_table
= 0;
3541 s390_function_count
++;
3544 /* Expand the prologue into a bunch of separate insns. */
3547 s390_emit_prologue ()
3549 struct s390_frame frame
;
3554 /* Compute frame_info. */
3556 s390_frame_info (&frame
);
3558 /* Choose best register to use for temp use within prologue. */
3560 if (frame
.return_reg_saved_p
3561 && !has_hard_reg_initial_val (Pmode
, RETURN_REGNUM
)
3562 && get_pool_size () < S390_POOL_CHUNK_MAX
/ 2)
3563 temp_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
3565 temp_reg
= gen_rtx_REG (Pmode
, 1);
3567 /* Save call saved gprs. */
3569 if (frame
.first_save_gpr
!= -1)
3571 addr
= plus_constant (stack_pointer_rtx
,
3572 frame
.first_save_gpr
* UNITS_PER_WORD
);
3573 addr
= gen_rtx_MEM (Pmode
, addr
);
3574 set_mem_alias_set (addr
, s390_sr_alias_set
);
3576 if (frame
.first_save_gpr
!= frame
.last_save_gpr
)
3578 insn
= emit_insn (gen_store_multiple (addr
,
3579 gen_rtx_REG (Pmode
, frame
.first_save_gpr
),
3580 GEN_INT (frame
.last_save_gpr
3581 - frame
.first_save_gpr
+ 1)));
3583 /* We need to set the FRAME_RELATED flag on all SETs
3584 inside the store-multiple pattern.
3586 However, we must not emit DWARF records for registers 2..5
3587 if they are stored for use by variable arguments ...
3589 ??? Unfortunately, it is not enough to simply not the the
3590 FRAME_RELATED flags for those SETs, because the first SET
3591 of the PARALLEL is always treated as if it had the flag
3592 set, even if it does not. Therefore we emit a new pattern
3593 without those registers as REG_FRAME_RELATED_EXPR note. */
3595 if (frame
.first_save_gpr
>= 6)
3597 rtx pat
= PATTERN (insn
);
3599 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3600 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
3601 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, i
)) = 1;
3603 RTX_FRAME_RELATED_P (insn
) = 1;
3605 else if (frame
.last_save_gpr
>= 6)
3608 naddr
= plus_constant (stack_pointer_rtx
, 6 * UNITS_PER_WORD
);
3609 note
= gen_store_multiple (gen_rtx_MEM (Pmode
, naddr
),
3610 gen_rtx_REG (Pmode
, 6),
3611 GEN_INT (frame
.last_save_gpr
- 6 + 1));
3612 note
= PATTERN (note
);
3615 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
3616 note
, REG_NOTES (insn
));
3618 for (i
= 0; i
< XVECLEN (note
, 0); i
++)
3619 if (GET_CODE (XVECEXP (note
, 0, i
)) == SET
)
3620 RTX_FRAME_RELATED_P (XVECEXP (note
, 0, i
)) = 1;
3622 RTX_FRAME_RELATED_P (insn
) = 1;
3627 insn
= emit_move_insn (addr
,
3628 gen_rtx_REG (Pmode
, frame
.first_save_gpr
));
3629 RTX_FRAME_RELATED_P (insn
) = 1;
3633 /* Dump constant pool and set constant pool register (13). */
3635 insn
= emit_insn (gen_lit ());
3637 /* Save fprs for variable args. */
3639 if (current_function_stdarg
)
3641 /* Save fpr 0 and 2. */
3643 save_fpr (stack_pointer_rtx
, STACK_POINTER_OFFSET
- 32, 16);
3644 save_fpr (stack_pointer_rtx
, STACK_POINTER_OFFSET
- 24, 17);
3648 /* Save fpr 4 and 6. */
3650 save_fpr (stack_pointer_rtx
, STACK_POINTER_OFFSET
- 16, 18);
3651 save_fpr (stack_pointer_rtx
, STACK_POINTER_OFFSET
- 8, 19);
3655 /* Save fprs 4 and 6 if used (31 bit ABI). */
3659 /* Save fpr 4 and 6. */
3660 if (regs_ever_live
[18])
3662 insn
= save_fpr (stack_pointer_rtx
, STACK_POINTER_OFFSET
- 16, 18);
3663 RTX_FRAME_RELATED_P (insn
) = 1;
3665 if (regs_ever_live
[19])
3667 insn
= save_fpr (stack_pointer_rtx
, STACK_POINTER_OFFSET
- 8, 19);
3668 RTX_FRAME_RELATED_P (insn
) = 1;
3672 /* Decrement stack pointer. */
3674 if (frame
.frame_size
> 0)
3676 rtx frame_off
= GEN_INT (-frame
.frame_size
);
3678 /* Save incoming stack pointer into temp reg. */
3680 if (TARGET_BACKCHAIN
|| frame
.save_fprs_p
)
3682 insn
= emit_insn (gen_move_insn (temp_reg
, stack_pointer_rtx
));
3685 /* Substract frame size from stack pointer. */
3687 frame_off
= GEN_INT (-frame
.frame_size
);
3688 if (!CONST_OK_FOR_LETTER_P (-frame
.frame_size
, 'K'))
3689 frame_off
= force_const_mem (Pmode
, frame_off
);
3691 insn
= emit_insn (gen_add2_insn (stack_pointer_rtx
, frame_off
));
3692 RTX_FRAME_RELATED_P (insn
) = 1;
3694 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
3695 gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
3696 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3697 GEN_INT (-frame
.frame_size
))),
3700 /* Set backchain. */
3702 if (TARGET_BACKCHAIN
)
3704 addr
= gen_rtx_MEM (Pmode
, stack_pointer_rtx
);
3705 set_mem_alias_set (addr
, s390_sr_alias_set
);
3706 insn
= emit_insn (gen_move_insn (addr
, temp_reg
));
3710 /* Save fprs 8 - 15 (64 bit ABI). */
3712 if (frame
.save_fprs_p
)
3714 insn
= emit_insn (gen_add2_insn (temp_reg
, GEN_INT(-64)));
3716 for (i
= 24; i
< 32; i
++)
3717 if (regs_ever_live
[i
])
3719 rtx addr
= plus_constant (stack_pointer_rtx
,
3720 frame
.frame_size
- 64 + (i
-24)*8);
3722 insn
= save_fpr (temp_reg
, (i
-24)*8, i
);
3723 RTX_FRAME_RELATED_P (insn
) = 1;
3725 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
3726 gen_rtx_SET (VOIDmode
,
3727 gen_rtx_MEM (DFmode
, addr
),
3728 gen_rtx_REG (DFmode
, i
)),
3733 /* Set frame pointer, if needed. */
3735 if (frame
.frame_pointer_p
)
3737 insn
= emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
3738 RTX_FRAME_RELATED_P (insn
) = 1;
3741 /* Set up got pointer, if needed. */
3743 if (current_function_uses_pic_offset_table
)
3745 rtx got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
3746 SYMBOL_REF_FLAG (got_symbol
) = 1;
3750 insn
= emit_insn (gen_movdi (pic_offset_table_rtx
,
3753 /* It can happen that the GOT pointer isn't really needed ... */
3754 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
3759 got_symbol
= gen_rtx_UNSPEC (VOIDmode
,
3760 gen_rtvec (1, got_symbol
), 100);
3761 got_symbol
= gen_rtx_CONST (VOIDmode
, got_symbol
);
3762 got_symbol
= force_const_mem (Pmode
, got_symbol
);
3763 insn
= emit_move_insn (pic_offset_table_rtx
,
3765 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
3768 insn
= emit_insn (gen_add2_insn (pic_offset_table_rtx
,
3769 gen_rtx_REG (Pmode
, BASE_REGISTER
)));
3770 REG_NOTES(insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, NULL_RTX
,
3776 /* Expand the epilogue into a bunch of separate insns. */
3779 s390_emit_epilogue ()
3781 struct s390_frame frame
;
3782 rtx frame_pointer
, return_reg
;
3783 int area_bottom
, area_top
, offset
;
3786 /* Compute frame_info. */
3788 s390_frame_info (&frame
);
3790 /* Check whether to use frame or stack pointer for restore. */
3792 frame_pointer
= frame
.frame_pointer_p
?
3793 hard_frame_pointer_rtx
: stack_pointer_rtx
;
3795 /* Compute which parts of the save area we need to access. */
3797 if (frame
.first_restore_gpr
!= -1)
3799 area_bottom
= frame
.first_restore_gpr
* UNITS_PER_WORD
;
3800 area_top
= (frame
.last_save_gpr
+ 1) * UNITS_PER_WORD
;
3804 area_bottom
= INT_MAX
;
3810 if (frame
.save_fprs_p
)
3812 if (area_bottom
> -64)
3820 if (regs_ever_live
[18])
3822 if (area_bottom
> STACK_POINTER_OFFSET
- 16)
3823 area_bottom
= STACK_POINTER_OFFSET
- 16;
3824 if (area_top
< STACK_POINTER_OFFSET
- 8)
3825 area_top
= STACK_POINTER_OFFSET
- 8;
3827 if (regs_ever_live
[19])
3829 if (area_bottom
> STACK_POINTER_OFFSET
- 8)
3830 area_bottom
= STACK_POINTER_OFFSET
- 8;
3831 if (area_top
< STACK_POINTER_OFFSET
)
3832 area_top
= STACK_POINTER_OFFSET
;
3836 /* Check whether we can access the register save area.
3837 If not, increment the frame pointer as required. */
3839 if (area_top
<= area_bottom
)
3841 /* Nothing to restore. */
3843 else if (frame
.frame_size
+ area_bottom
>= 0
3844 && frame
.frame_size
+ area_top
<= 4096)
3846 /* Area is in range. */
3847 offset
= frame
.frame_size
;
3851 rtx insn
, frame_off
;
3853 offset
= area_bottom
< 0 ? -area_bottom
: 0;
3854 frame_off
= GEN_INT (frame
.frame_size
- offset
);
3856 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off
), 'K'))
3857 frame_off
= force_const_mem (Pmode
, frame_off
);
3859 insn
= emit_insn (gen_add2_insn (frame_pointer
, frame_off
));
3862 /* Restore call saved fprs. */
3868 if (frame
.save_fprs_p
)
3869 for (i
= 24; i
< 32; i
++)
3870 if (regs_ever_live
[i
] && !global_regs
[i
])
3871 restore_fpr (frame_pointer
,
3872 offset
- 64 + (i
-24) * 8, i
);
3876 if (regs_ever_live
[18] && !global_regs
[18])
3877 restore_fpr (frame_pointer
, offset
+ STACK_POINTER_OFFSET
- 16, 18);
3878 if (regs_ever_live
[19] && !global_regs
[19])
3879 restore_fpr (frame_pointer
, offset
+ STACK_POINTER_OFFSET
- 8, 19);
3882 /* Return register. */
3884 return_reg
= gen_rtx_REG (Pmode
, RETURN_REGNUM
);
3886 /* Restore call saved gprs. */
3888 if (frame
.first_restore_gpr
!= -1)
3893 /* Check for global register and save them
3894 to stack location from where they get restored. */
3896 for (i
= frame
.first_restore_gpr
;
3897 i
<= frame
.last_save_gpr
;
3900 /* These registers are special and need to be
3901 restored in any case. */
3902 if (i
== STACK_POINTER_REGNUM
3903 || i
== RETURN_REGNUM
3904 || i
== BASE_REGISTER
3905 || (flag_pic
&& i
== PIC_OFFSET_TABLE_REGNUM
))
3910 addr
= plus_constant (frame_pointer
,
3911 offset
+ i
* UNITS_PER_WORD
);
3912 addr
= gen_rtx_MEM (Pmode
, addr
);
3913 set_mem_alias_set (addr
, s390_sr_alias_set
);
3914 emit_move_insn (addr
, gen_rtx_REG (Pmode
, i
));
3918 /* Fetch return address from stack before load multiple,
3919 this will do good for scheduling. */
3921 if (frame
.last_save_gpr
>= RETURN_REGNUM
3922 && frame
.first_restore_gpr
< RETURN_REGNUM
)
3924 int return_regnum
= find_unused_clobbered_reg();
3927 return_reg
= gen_rtx_REG (Pmode
, return_regnum
);
3929 addr
= plus_constant (frame_pointer
,
3930 offset
+ RETURN_REGNUM
* UNITS_PER_WORD
);
3931 addr
= gen_rtx_MEM (Pmode
, addr
);
3932 set_mem_alias_set (addr
, s390_sr_alias_set
);
3933 emit_move_insn (return_reg
, addr
);
3936 /* ??? As references to the base register are not made
3937 explicit in insn RTX code, we have to add a barrier here
3938 to prevent incorrect scheduling. */
3940 emit_insn (gen_blockage());
3942 addr
= plus_constant (frame_pointer
,
3943 offset
+ frame
.first_restore_gpr
* UNITS_PER_WORD
);
3944 addr
= gen_rtx_MEM (Pmode
, addr
);
3945 set_mem_alias_set (addr
, s390_sr_alias_set
);
3947 if (frame
.first_restore_gpr
!= frame
.last_save_gpr
)
3949 emit_insn (gen_load_multiple (
3950 gen_rtx_REG (Pmode
, frame
.first_restore_gpr
),
3952 GEN_INT (frame
.last_save_gpr
- frame
.first_restore_gpr
+ 1)));
3956 emit_move_insn (gen_rtx_REG (Pmode
, frame
.first_restore_gpr
),
3961 /* Return to caller. */
3963 p
= rtvec_alloc (2);
3965 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
3966 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
, return_reg
);
3967 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
3971 /* Return the size in bytes of a function argument of
3972 type TYPE and/or mode MODE. At least one of TYPE or
3973 MODE must be specified. */
3976 s390_function_arg_size (mode
, type
)
3977 enum machine_mode mode
;
3981 return int_size_in_bytes (type
);
3983 /* No type info available for some library calls ... */
3984 if (mode
!= BLKmode
)
3985 return GET_MODE_SIZE (mode
);
3987 /* If we have neither type nor mode, abort */
3991 /* Return 1 if a function argument of type TYPE and mode MODE
3992 is to be passed by reference. The ABI specifies that only
3993 structures of size 1, 2, 4, or 8 bytes are passed by value,
3994 all other structures (and complex numbers) are passed by
3998 s390_function_arg_pass_by_reference (mode
, type
)
3999 enum machine_mode mode
;
4002 int size
= s390_function_arg_size (mode
, type
);
4006 if (AGGREGATE_TYPE_P (type
) &&
4007 size
!= 1 && size
!= 2 && size
!= 4 && size
!= 8)
4010 if (TREE_CODE (type
) == COMPLEX_TYPE
)
4017 /* Update the data in CUM to advance over an argument of mode MODE and
4018 data type TYPE. (TYPE is null for libcalls where that information
4019 may not be available.). The boolean NAMED specifies whether the
4020 argument is a named argument (as opposed to an unnamed argument
4021 matching an ellipsis). */
4024 s390_function_arg_advance (cum
, mode
, type
, named
)
4025 CUMULATIVE_ARGS
*cum
;
4026 enum machine_mode mode
;
4028 int named ATTRIBUTE_UNUSED
;
4030 if (! TARGET_SOFT_FLOAT
&& (mode
== DFmode
|| mode
== SFmode
))
4034 else if (s390_function_arg_pass_by_reference (mode
, type
))
4040 int size
= s390_function_arg_size (mode
, type
);
4041 cum
->gprs
+= ((size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
);
4045 /* Define where to put the arguments to a function.
4046 Value is zero to push the argument on the stack,
4047 or a hard register in which to store the argument.
4049 MODE is the argument's machine mode.
4050 TYPE is the data type of the argument (as a tree).
4051 This is null for libcalls where that information may
4053 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4054 the preceding args and about the function being called.
4055 NAMED is nonzero if this argument is a named parameter
4056 (otherwise it is an extra parameter matching an ellipsis).
4058 On S/390, we use general purpose registers 2 through 6 to
4059 pass integer, pointer, and certain structure arguments, and
4060 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
4061 to pass floating point arguments. All remaining arguments
4062 are pushed to the stack. */
4065 s390_function_arg (cum
, mode
, type
, named
)
4066 CUMULATIVE_ARGS
*cum
;
4067 enum machine_mode mode
;
4069 int named ATTRIBUTE_UNUSED
;
4071 if (s390_function_arg_pass_by_reference (mode
, type
))
4074 if (! TARGET_SOFT_FLOAT
&& (mode
== DFmode
|| mode
== SFmode
))
4076 if (cum
->fprs
+ 1 > (TARGET_64BIT
? 4 : 2))
4079 return gen_rtx (REG
, mode
, cum
->fprs
+ 16);
4083 int size
= s390_function_arg_size (mode
, type
);
4084 int n_gprs
= (size
+ UNITS_PER_WORD
-1) / UNITS_PER_WORD
;
4086 if (cum
->gprs
+ n_gprs
> 5)
4089 return gen_rtx (REG
, mode
, cum
->gprs
+ 2);
4094 /* Create and return the va_list datatype.
4096 On S/390, va_list is an array type equivalent to
4098 typedef struct __va_list_tag
4102 void *__overflow_arg_area;
4103 void *__reg_save_area;
4107 where __gpr and __fpr hold the number of general purpose
4108 or floating point arguments used up to now, respectively,
4109 __overflow_arg_area points to the stack location of the
4110 next argument passed on the stack, and __reg_save_area
4111 always points to the start of the register area in the
4112 call frame of the current function. The function prologue
4113 saves all registers used for argument passing into this
4114 area if the function uses variable arguments. */
4117 s390_build_va_list ()
4119 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
4121 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
4124 build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
4126 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("__gpr"),
4127 long_integer_type_node
);
4128 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("__fpr"),
4129 long_integer_type_node
);
4130 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("__overflow_arg_area"),
4132 f_sav
= build_decl (FIELD_DECL
, get_identifier ("__reg_save_area"),
4135 DECL_FIELD_CONTEXT (f_gpr
) = record
;
4136 DECL_FIELD_CONTEXT (f_fpr
) = record
;
4137 DECL_FIELD_CONTEXT (f_ovf
) = record
;
4138 DECL_FIELD_CONTEXT (f_sav
) = record
;
4140 TREE_CHAIN (record
) = type_decl
;
4141 TYPE_NAME (record
) = type_decl
;
4142 TYPE_FIELDS (record
) = f_gpr
;
4143 TREE_CHAIN (f_gpr
) = f_fpr
;
4144 TREE_CHAIN (f_fpr
) = f_ovf
;
4145 TREE_CHAIN (f_ovf
) = f_sav
;
4147 layout_type (record
);
4149 /* The correct type is an array type of one element. */
4150 return build_array_type (record
, build_index_type (size_zero_node
));
4153 /* Implement va_start by filling the va_list structure VALIST.
4154 STDARG_P is always true, and ignored.
4155 NEXTARG points to the first anonymous stack argument.
4157 The following global variables are used to initialize
4158 the va_list structure:
4160 current_function_args_info:
4161 holds number of gprs and fprs used for named arguments.
4162 current_function_arg_offset_rtx:
4163 holds the offset of the first anonymous stack argument
4164 (relative to the virtual arg pointer). */
4167 s390_va_start (valist
, nextarg
)
4169 rtx nextarg ATTRIBUTE_UNUSED
;
4171 HOST_WIDE_INT n_gpr
, n_fpr
;
4173 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
4174 tree gpr
, fpr
, ovf
, sav
, t
;
4176 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4177 f_fpr
= TREE_CHAIN (f_gpr
);
4178 f_ovf
= TREE_CHAIN (f_fpr
);
4179 f_sav
= TREE_CHAIN (f_ovf
);
4181 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4182 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4183 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4184 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4185 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4187 /* Count number of gp and fp argument registers used. */
4189 n_gpr
= current_function_args_info
.gprs
;
4190 n_fpr
= current_function_args_info
.fprs
;
4192 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
4193 TREE_SIDE_EFFECTS (t
) = 1;
4194 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4196 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
4197 TREE_SIDE_EFFECTS (t
) = 1;
4198 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4200 /* Find the overflow area. */
4201 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
4203 off
= INTVAL (current_function_arg_offset_rtx
);
4204 off
= off
< 0 ? 0 : off
;
4205 if (TARGET_DEBUG_ARG
)
4206 fprintf (stderr
, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
4207 (int)n_gpr
, (int)n_fpr
, off
);
4209 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
, build_int_2 (off
, 0));
4211 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4212 TREE_SIDE_EFFECTS (t
) = 1;
4213 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4215 /* Find the register save area. */
4216 t
= make_tree (TREE_TYPE (sav
), virtual_incoming_args_rtx
);
4217 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
4218 build_int_2 (-STACK_POINTER_OFFSET
, -1));
4219 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
4220 TREE_SIDE_EFFECTS (t
) = 1;
4221 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4224 /* Implement va_arg by updating the va_list structure
4225 VALIST as required to retrieve an argument of type
4226 TYPE, and returning that argument.
4228 Generates code equivalent to:
4230 if (integral value) {
4231 if (size <= 4 && args.gpr < 5 ||
4232 size > 4 && args.gpr < 4 )
4233 ret = args.reg_save_area[args.gpr+8]
4235 ret = *args.overflow_arg_area++;
4236 } else if (float value) {
4238 ret = args.reg_save_area[args.fpr+64]
4240 ret = *args.overflow_arg_area++;
4241 } else if (aggregate value) {
4243 ret = *args.reg_save_area[args.gpr]
4245 ret = **args.overflow_arg_area++;
4249 s390_va_arg (valist
, type
)
4253 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
4254 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
4255 int indirect_p
, size
, n_reg
, sav_ofs
, sav_scale
, max_reg
;
4256 rtx lab_false
, lab_over
, addr_rtx
, r
;
4258 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4259 f_fpr
= TREE_CHAIN (f_gpr
);
4260 f_ovf
= TREE_CHAIN (f_fpr
);
4261 f_sav
= TREE_CHAIN (f_ovf
);
4263 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4264 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4265 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4266 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4267 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4269 size
= int_size_in_bytes (type
);
4271 if (s390_function_arg_pass_by_reference (TYPE_MODE (type
), type
))
4273 if (TARGET_DEBUG_ARG
)
4275 fprintf (stderr
, "va_arg: aggregate type");
4279 /* Aggregates are passed by reference. */
4283 sav_ofs
= 2 * UNITS_PER_WORD
;
4284 sav_scale
= UNITS_PER_WORD
;
4285 size
= UNITS_PER_WORD
;
4288 else if (FLOAT_TYPE_P (type
) && ! TARGET_SOFT_FLOAT
)
4290 if (TARGET_DEBUG_ARG
)
4292 fprintf (stderr
, "va_arg: float type");
4296 /* FP args go in FP registers, if present. */
4300 sav_ofs
= 16 * UNITS_PER_WORD
;
4302 /* TARGET_64BIT has up to 4 parameter in fprs */
4303 max_reg
= TARGET_64BIT
? 3 : 1;
4307 if (TARGET_DEBUG_ARG
)
4309 fprintf (stderr
, "va_arg: other type");
4313 /* Otherwise into GP registers. */
4316 n_reg
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
4317 sav_ofs
= 2 * UNITS_PER_WORD
;
4319 sav_ofs
+= TYPE_MODE (type
) == SImode
? 4 :
4320 TYPE_MODE (type
) == HImode
? 6 :
4321 TYPE_MODE (type
) == QImode
? 7 : 0;
4323 sav_ofs
+= TYPE_MODE (type
) == HImode
? 2 :
4324 TYPE_MODE (type
) == QImode
? 3 : 0;
4326 sav_scale
= UNITS_PER_WORD
;
4333 /* Pull the value out of the saved registers ... */
4335 lab_false
= gen_label_rtx ();
4336 lab_over
= gen_label_rtx ();
4337 addr_rtx
= gen_reg_rtx (Pmode
);
4339 emit_cmp_and_jump_insns (expand_expr (reg
, NULL_RTX
, Pmode
, EXPAND_NORMAL
),
4341 GT
, const1_rtx
, Pmode
, 0, lab_false
);
4344 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
4348 u
= build (MULT_EXPR
, long_integer_type_node
,
4349 reg
, build_int_2 (sav_scale
, 0));
4350 TREE_SIDE_EFFECTS (u
) = 1;
4352 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
4353 TREE_SIDE_EFFECTS (t
) = 1;
4355 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4357 emit_move_insn (addr_rtx
, r
);
4360 emit_jump_insn (gen_jump (lab_over
));
4362 emit_label (lab_false
);
4364 /* ... Otherwise out of the overflow area. */
4366 t
= save_expr (ovf
);
4369 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
4370 if (size
< UNITS_PER_WORD
)
4372 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (UNITS_PER_WORD
-size
, 0));
4373 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4374 TREE_SIDE_EFFECTS (t
) = 1;
4375 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4377 t
= save_expr (ovf
);
4380 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4382 emit_move_insn (addr_rtx
, r
);
4384 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
4385 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4386 TREE_SIDE_EFFECTS (t
) = 1;
4387 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4389 emit_label (lab_over
);
4391 /* If less than max_regs a registers are retrieved out
4392 of register save area, increment. */
4394 u
= build (PREINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
4395 build_int_2 (n_reg
, 0));
4396 TREE_SIDE_EFFECTS (u
) = 1;
4397 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4401 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
4402 set_mem_alias_set (r
, get_varargs_alias_set ());
4403 emit_move_insn (addr_rtx
, r
);
4411 /* Output assembly code for the trampoline template to
4414 On S/390, we use gpr 1 internally in the trampoline code;
4415 gpr 0 is used to hold the static chain. */
4418 s390_trampoline_template (file
)
4423 fprintf (file
, "larl\t%s,0f\n", reg_names
[1]);
4424 fprintf (file
, "lg\t%s,0(%s)\n", reg_names
[0], reg_names
[1]);
4425 fprintf (file
, "lg\t%s,8(%s)\n", reg_names
[1], reg_names
[1]);
4426 fprintf (file
, "br\t%s\n", reg_names
[1]);
4427 fprintf (file
, "0:\t.quad\t0\n");
4428 fprintf (file
, ".quad\t0\n");
4432 fprintf (file
, "basr\t%s,0\n", reg_names
[1]);
4433 fprintf (file
, "l\t%s,10(%s)\n", reg_names
[0], reg_names
[1]);
4434 fprintf (file
, "l\t%s,14(%s)\n", reg_names
[1], reg_names
[1]);
4435 fprintf (file
, "br\t%s\n", reg_names
[1]);
4436 fprintf (file
, ".long\t0\n");
4437 fprintf (file
, ".long\t0\n");
4441 /* Emit RTL insns to initialize the variable parts of a trampoline.
4442 FNADDR is an RTX for the address of the function's pure code.
4443 CXT is an RTX for the static chain value for the function. */
4446 s390_initialize_trampoline (addr
, fnaddr
, cxt
)
4451 emit_move_insn (gen_rtx
4453 memory_address (Pmode
,
4454 plus_constant (addr
, (TARGET_64BIT
? 20 : 12) ))), cxt
);
4455 emit_move_insn (gen_rtx
4457 memory_address (Pmode
,
4458 plus_constant (addr
, (TARGET_64BIT
? 28 : 16) ))), fnaddr
);
4461 /* Return rtx for 64-bit constant formed from the 32-bit subwords
4462 LOW and HIGH, independent of the host word size. */
4465 s390_gen_rtx_const_DI (high
, low
)
4469 #if HOST_BITS_PER_WIDE_INT >= 64
4471 val
= (HOST_WIDE_INT
)high
;
4473 val
|= (HOST_WIDE_INT
)low
;
4475 return GEN_INT (val
);
4477 #if HOST_BITS_PER_WIDE_INT >= 32
4478 return immed_double_const ((HOST_WIDE_INT
)low
, (HOST_WIDE_INT
)high
, DImode
);
4485 /* Output assembler code to FILE to increment profiler label # LABELNO
4486 for profiling a function entry. */
4489 s390_function_profiler (file
, labelno
)
4496 sprintf (label
, "%sP%d", LPREFIX
, labelno
);
4498 fprintf (file
, "# function profiler \n");
4500 op
[0] = gen_rtx_REG (Pmode
, RETURN_REGNUM
);
4501 op
[1] = gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
4502 op
[1] = gen_rtx_MEM (Pmode
, plus_constant (op
[1], UNITS_PER_WORD
));
4504 op
[2] = gen_rtx_REG (Pmode
, 1);
4505 op
[3] = gen_rtx_SYMBOL_REF (Pmode
, label
);
4506 SYMBOL_REF_FLAG (op
[3]) = 1;
4508 op
[4] = gen_rtx_SYMBOL_REF (Pmode
, "_mcount");
4511 op
[4] = gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, op
[4]), 113);
4512 op
[4] = gen_rtx_CONST (Pmode
, op
[4]);
4517 output_asm_insn ("stg\t%0,%1", op
);
4518 output_asm_insn ("larl\t%2,%3", op
);
4519 output_asm_insn ("brasl\t%0,%4", op
);
4520 output_asm_insn ("lg\t%0,%1", op
);
4524 op
[6] = gen_label_rtx ();
4526 output_asm_insn ("st\t%0,%1", op
);
4527 output_asm_insn ("bras\t%2,%l6", op
);
4528 output_asm_insn (".long\t%4", op
);
4529 output_asm_insn (".long\t%3", op
);
4530 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (op
[6]));
4531 output_asm_insn ("l\t%0,0(%2)", op
);
4532 output_asm_insn ("l\t%2,4(%2)", op
);
4533 output_asm_insn ("basr\t%0,%0", op
);
4534 output_asm_insn ("l\t%0,%1", op
);
4538 op
[5] = gen_label_rtx ();
4539 op
[6] = gen_label_rtx ();
4541 output_asm_insn ("st\t%0,%1", op
);
4542 output_asm_insn ("bras\t%2,%l6", op
);
4543 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (op
[5]));
4544 output_asm_insn (".long\t%4-%l5", op
);
4545 output_asm_insn (".long\t%3-%l5", op
);
4546 ASM_OUTPUT_INTERNAL_LABEL (file
, "L", CODE_LABEL_NUMBER (op
[6]));
4547 output_asm_insn ("lr\t%0,%2", op
);
4548 output_asm_insn ("a\t%0,0(%2)", op
);
4549 output_asm_insn ("a\t%2,4(%2)", op
);
4550 output_asm_insn ("basr\t%0,%0", op
);
4551 output_asm_insn ("l\t%0,%1", op
);
4555 /* Select section for constant in constant pool. In 32-bit mode,
4556 constants go in the function section; in 64-bit mode in .rodata. */
4559 s390_select_rtx_section (mode
, x
, align
)
4560 enum machine_mode mode ATTRIBUTE_UNUSED
;
4561 rtx x ATTRIBUTE_UNUSED
;
4562 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
4565 readonly_data_section ();
4567 function_section (current_function_decl
);
4570 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
4571 may access it directly in the GOT. */
4574 s390_encode_section_info (decl
, first
)
4576 int first ATTRIBUTE_UNUSED
;
4580 rtx rtl
= (TREE_CODE_CLASS (TREE_CODE (decl
)) != 'd'
4581 ? TREE_CST_RTL (decl
) : DECL_RTL (decl
));
4583 if (GET_CODE (rtl
) == MEM
)
4585 SYMBOL_REF_FLAG (XEXP (rtl
, 0))
4586 = (TREE_CODE_CLASS (TREE_CODE (decl
)) != 'd'
4587 || ! TREE_PUBLIC (decl
));