]> gcc.gnu.org Git - gcc.git/blob - gcc/config/s390/s390.c
reload.c (find_reloads): Handle constraint letters marked by EXTRA_ADDRESS_CONSTRAINT...
[gcc.git] / gcc / config / s390 / s390.c
1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "except.h"
37 #include "function.h"
38 #include "recog.h"
39 #include "expr.h"
40 #include "reload.h"
41 #include "toplev.h"
42 #include "basic-block.h"
43 #include "integrate.h"
44 #include "ggc.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "debug.h"
48 #include "langhooks.h"
49
50 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
51 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
52 static int s390_adjust_priority PARAMS ((rtx, int));
53 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
54 unsigned HOST_WIDE_INT));
55 static void s390_encode_section_info PARAMS ((tree, int));
56
57 #undef TARGET_ASM_ALIGNED_HI_OP
58 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
59 #undef TARGET_ASM_ALIGNED_DI_OP
60 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
61 #undef TARGET_ASM_INTEGER
62 #define TARGET_ASM_INTEGER s390_assemble_integer
63
64 #undef TARGET_ASM_FUNCTION_PROLOGUE
65 #define TARGET_ASM_FUNCTION_PROLOGUE s390_function_prologue
66
67 #undef TARGET_ASM_FUNCTION_EPILOGUE
68 #define TARGET_ASM_FUNCTION_EPILOGUE s390_function_epilogue
69
70 #undef TARGET_ASM_OPEN_PAREN
71 #define TARGET_ASM_OPEN_PAREN ""
72
73 #undef TARGET_ASM_CLOSE_PAREN
74 #define TARGET_ASM_CLOSE_PAREN ""
75
76 #undef TARGET_ASM_SELECT_RTX_SECTION
77 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
78
79 #undef TARGET_SCHED_ADJUST_COST
80 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
81
82 #undef TARGET_SCHED_ADJUST_PRIORITY
83 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
84
85 #undef TARGET_ENCODE_SECTION_INFO
86 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
87
88 struct gcc_target targetm = TARGET_INITIALIZER;
89
90 extern int reload_completed;
91
92 /* The alias set for prologue/epilogue register save/restore. */
93 static int s390_sr_alias_set = 0;
94
95 /* Function count for creating unique internal labels in a compile unit. */
96 int s390_function_count = 0;
97
98 /* Save information from a "cmpxx" operation until the branch or scc is
99 emitted. */
100 rtx s390_compare_op0, s390_compare_op1;
101
102 /* Structure used to hold the components of a S/390 memory
103 address. A legitimate address on S/390 is of the general
104 form
105 base + index + displacement
106 where any of the components is optional.
107
108 base and index are registers of the class ADDR_REGS,
109 displacement is an unsigned 12-bit immediate constant. */
110
111 struct s390_address
112 {
113 rtx base;
114 rtx indx;
115 rtx disp;
116 int pointer;
117 };
118
119 /* Structure containing information for prologue and epilogue. */
120
121 struct s390_frame
122 {
123 int frame_pointer_p;
124 int return_reg_saved_p;
125 int save_fprs_p;
126 int first_save_gpr;
127 int first_restore_gpr;
128 int last_save_gpr;
129 int arg_frame_offset;
130
131 HOST_WIDE_INT frame_size;
132 };
133
134 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
135 static int s390_branch_condition_mask PARAMS ((rtx));
136 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
137 static int check_mode PARAMS ((rtx, enum machine_mode *));
138 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
139 static int s390_decompose_address PARAMS ((rtx, struct s390_address *, int));
140 static int reg_used_in_mem_p PARAMS ((int, rtx));
141 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
142 static void s390_split_branches PARAMS ((void));
143 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
144 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
145 static void s390_chunkify_pool PARAMS ((void));
146 static int save_fprs_p PARAMS ((void));
147 static int find_unused_clobbered_reg PARAMS ((void));
148 static void s390_frame_info PARAMS ((struct s390_frame *));
149 static rtx save_fpr PARAMS ((rtx, int, int));
150 static rtx restore_fpr PARAMS ((rtx, int, int));
151 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
152
153
154 /* Return true if SET either doesn't set the CC register, or else
155 the source and destination have matching CC modes and that
156 CC mode is at least as constrained as REQ_MODE. */
157
158 static int
159 s390_match_ccmode_set (set, req_mode)
160 rtx set;
161 enum machine_mode req_mode;
162 {
163 enum machine_mode set_mode;
164
165 if (GET_CODE (set) != SET)
166 abort ();
167
168 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
169 return 1;
170
171 set_mode = GET_MODE (SET_DEST (set));
172 switch (set_mode)
173 {
174 case CCSmode:
175 case CCSRmode:
176 case CCUmode:
177 case CCURmode:
178 case CCLmode:
179 case CCL1mode:
180 case CCL2mode:
181 case CCT1mode:
182 case CCT2mode:
183 case CCT3mode:
184 if (req_mode != set_mode)
185 return 0;
186 break;
187
188 case CCZmode:
189 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
190 && req_mode != CCSRmode && req_mode != CCURmode)
191 return 0;
192 break;
193
194 default:
195 abort ();
196 }
197
198 return (GET_MODE (SET_SRC (set)) == set_mode);
199 }
200
201 /* Return true if every SET in INSN that sets the CC register
202 has source and destination with matching CC modes and that
203 CC mode is at least as constrained as REQ_MODE.
204 If REQ_MODE is VOIDmode, always return false. */
205
206 int
207 s390_match_ccmode (insn, req_mode)
208 rtx insn;
209 enum machine_mode req_mode;
210 {
211 int i;
212
213 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
214 if (req_mode == VOIDmode)
215 return 0;
216
217 if (GET_CODE (PATTERN (insn)) == SET)
218 return s390_match_ccmode_set (PATTERN (insn), req_mode);
219
220 if (GET_CODE (PATTERN (insn)) == PARALLEL)
221 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
222 {
223 rtx set = XVECEXP (PATTERN (insn), 0, i);
224 if (GET_CODE (set) == SET)
225 if (!s390_match_ccmode_set (set, req_mode))
226 return 0;
227 }
228
229 return 1;
230 }
231
232 /* If a test-under-mask instruction can be used to implement
233 (compare (and ... OP1) OP2), return the CC mode required
234 to do that. Otherwise, return VOIDmode.
235 MIXED is true if the instruction can distinguish between
236 CC1 and CC2 for mixed selected bits (TMxx), it is false
237 if the instruction cannot (TM). */
238
239 enum machine_mode
240 s390_tm_ccmode (op1, op2, mixed)
241 rtx op1;
242 rtx op2;
243 int mixed;
244 {
245 int bit0, bit1;
246
247 /* ??? Fixme: should work on CONST_DOUBLE as well. */
248 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
249 return VOIDmode;
250
251 /* Selected bits all zero: CC0. */
252 if (INTVAL (op2) == 0)
253 return CCTmode;
254
255 /* Selected bits all one: CC3. */
256 if (INTVAL (op2) == INTVAL (op1))
257 return CCT3mode;
258
259 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
260 if (mixed)
261 {
262 bit1 = exact_log2 (INTVAL (op2));
263 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
264 if (bit0 != -1 && bit1 != -1)
265 return bit0 > bit1 ? CCT1mode : CCT2mode;
266 }
267
268 return VOIDmode;
269 }
270
271 /* Given a comparison code OP (EQ, NE, etc.) and the operands
272 OP0 and OP1 of a COMPARE, return the mode to be used for the
273 comparison. */
274
275 enum machine_mode
276 s390_select_ccmode (code, op0, op1)
277 enum rtx_code code;
278 rtx op0;
279 rtx op1;
280 {
281 switch (code)
282 {
283 case EQ:
284 case NE:
285 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
286 || GET_CODE (op1) == NEG)
287 return CCLmode;
288
289 if (GET_CODE (op0) == AND)
290 {
291 /* Check whether we can potentially do it via TM. */
292 enum machine_mode ccmode;
293 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
294 if (ccmode != VOIDmode)
295 {
296 /* Relax CCTmode to CCZmode to allow fall-back to AND
297 if that turns out to be beneficial. */
298 return ccmode == CCTmode ? CCZmode : ccmode;
299 }
300 }
301
302 if (register_operand (op0, HImode)
303 && GET_CODE (op1) == CONST_INT
304 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
305 return CCT3mode;
306 if (register_operand (op0, QImode)
307 && GET_CODE (op1) == CONST_INT
308 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
309 return CCT3mode;
310
311 return CCZmode;
312
313 case LE:
314 case LT:
315 case GE:
316 case GT:
317 case UNORDERED:
318 case ORDERED:
319 case UNEQ:
320 case UNLE:
321 case UNLT:
322 case UNGE:
323 case UNGT:
324 case LTGT:
325 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
326 && GET_CODE (op1) != CONST_INT)
327 return CCSRmode;
328 return CCSmode;
329
330 case LTU:
331 case GEU:
332 if (GET_CODE (op0) == PLUS)
333 return CCL1mode;
334
335 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
336 && GET_CODE (op1) != CONST_INT)
337 return CCURmode;
338 return CCUmode;
339
340 case LEU:
341 case GTU:
342 if (GET_CODE (op0) == MINUS)
343 return CCL2mode;
344
345 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
346 && GET_CODE (op1) != CONST_INT)
347 return CCURmode;
348 return CCUmode;
349
350 default:
351 abort ();
352 }
353 }
354
355 /* Return branch condition mask to implement a branch
356 specified by CODE. */
357
358 static int
359 s390_branch_condition_mask (code)
360 rtx code;
361 {
362 const int CC0 = 1 << 3;
363 const int CC1 = 1 << 2;
364 const int CC2 = 1 << 1;
365 const int CC3 = 1 << 0;
366
367 if (GET_CODE (XEXP (code, 0)) != REG
368 || REGNO (XEXP (code, 0)) != CC_REGNUM
369 || XEXP (code, 1) != const0_rtx)
370 abort ();
371
372 switch (GET_MODE (XEXP (code, 0)))
373 {
374 case CCZmode:
375 switch (GET_CODE (code))
376 {
377 case EQ: return CC0;
378 case NE: return CC1 | CC2 | CC3;
379 default:
380 abort ();
381 }
382 break;
383
384 case CCT1mode:
385 switch (GET_CODE (code))
386 {
387 case EQ: return CC1;
388 case NE: return CC0 | CC2 | CC3;
389 default:
390 abort ();
391 }
392 break;
393
394 case CCT2mode:
395 switch (GET_CODE (code))
396 {
397 case EQ: return CC2;
398 case NE: return CC0 | CC1 | CC3;
399 default:
400 abort ();
401 }
402 break;
403
404 case CCT3mode:
405 switch (GET_CODE (code))
406 {
407 case EQ: return CC3;
408 case NE: return CC0 | CC1 | CC2;
409 default:
410 abort ();
411 }
412 break;
413
414 case CCLmode:
415 switch (GET_CODE (code))
416 {
417 case EQ: return CC0 | CC2;
418 case NE: return CC1 | CC3;
419 default:
420 abort ();
421 }
422 break;
423
424 case CCL1mode:
425 switch (GET_CODE (code))
426 {
427 case LTU: return CC2 | CC3; /* carry */
428 case GEU: return CC0 | CC1; /* no carry */
429 default:
430 abort ();
431 }
432 break;
433
434 case CCL2mode:
435 switch (GET_CODE (code))
436 {
437 case GTU: return CC0 | CC1; /* borrow */
438 case LEU: return CC2 | CC3; /* no borrow */
439 default:
440 abort ();
441 }
442 break;
443
444 case CCUmode:
445 switch (GET_CODE (code))
446 {
447 case EQ: return CC0;
448 case NE: return CC1 | CC2 | CC3;
449 case LTU: return CC1;
450 case GTU: return CC2;
451 case LEU: return CC0 | CC1;
452 case GEU: return CC0 | CC2;
453 default:
454 abort ();
455 }
456 break;
457
458 case CCURmode:
459 switch (GET_CODE (code))
460 {
461 case EQ: return CC0;
462 case NE: return CC2 | CC1 | CC3;
463 case LTU: return CC2;
464 case GTU: return CC1;
465 case LEU: return CC0 | CC2;
466 case GEU: return CC0 | CC1;
467 default:
468 abort ();
469 }
470 break;
471
472 case CCSmode:
473 switch (GET_CODE (code))
474 {
475 case EQ: return CC0;
476 case NE: return CC1 | CC2 | CC3;
477 case LT: return CC1;
478 case GT: return CC2;
479 case LE: return CC0 | CC1;
480 case GE: return CC0 | CC2;
481 case UNORDERED: return CC3;
482 case ORDERED: return CC0 | CC1 | CC2;
483 case UNEQ: return CC0 | CC3;
484 case UNLT: return CC1 | CC3;
485 case UNGT: return CC2 | CC3;
486 case UNLE: return CC0 | CC1 | CC3;
487 case UNGE: return CC0 | CC2 | CC3;
488 case LTGT: return CC1 | CC2;
489 default:
490 abort ();
491 }
492 break;
493
494 case CCSRmode:
495 switch (GET_CODE (code))
496 {
497 case EQ: return CC0;
498 case NE: return CC2 | CC1 | CC3;
499 case LT: return CC2;
500 case GT: return CC1;
501 case LE: return CC0 | CC2;
502 case GE: return CC0 | CC1;
503 case UNORDERED: return CC3;
504 case ORDERED: return CC0 | CC2 | CC1;
505 case UNEQ: return CC0 | CC3;
506 case UNLT: return CC2 | CC3;
507 case UNGT: return CC1 | CC3;
508 case UNLE: return CC0 | CC2 | CC3;
509 case UNGE: return CC0 | CC1 | CC3;
510 case LTGT: return CC2 | CC1;
511 default:
512 abort ();
513 }
514 break;
515
516 default:
517 abort ();
518 }
519 }
520
521 /* If INV is false, return assembler mnemonic string to implement
522 a branch specified by CODE. If INV is true, return mnemonic
523 for the corresponding inverted branch. */
524
525 static const char *
526 s390_branch_condition_mnemonic (code, inv)
527 rtx code;
528 int inv;
529 {
530 static const char *const mnemonic[16] =
531 {
532 NULL, "o", "h", "nle",
533 "l", "nhe", "lh", "ne",
534 "e", "nlh", "he", "nl",
535 "le", "nh", "no", NULL
536 };
537
538 int mask = s390_branch_condition_mask (code);
539
540 if (inv)
541 mask ^= 15;
542
543 if (mask < 1 || mask > 14)
544 abort ();
545
546 return mnemonic[mask];
547 }
548
549 /* If OP is an integer constant of mode MODE with exactly one
550 HImode subpart unequal to DEF, return the number of that
551 subpart. As a special case, all HImode subparts of OP are
552 equal to DEF, return zero. Otherwise, return -1. */
553
554 int
555 s390_single_hi (op, mode, def)
556 rtx op;
557 enum machine_mode mode;
558 int def;
559 {
560 if (GET_CODE (op) == CONST_INT)
561 {
562 unsigned HOST_WIDE_INT value;
563 int n_parts = GET_MODE_SIZE (mode) / 2;
564 int i, part = -1;
565
566 for (i = 0; i < n_parts; i++)
567 {
568 if (i == 0)
569 value = (unsigned HOST_WIDE_INT) INTVAL (op);
570 else
571 value >>= 16;
572
573 if ((value & 0xffff) != (unsigned)(def & 0xffff))
574 {
575 if (part != -1)
576 return -1;
577 else
578 part = i;
579 }
580 }
581
582 return part == -1 ? 0 : (n_parts - 1 - part);
583 }
584
585 else if (GET_CODE (op) == CONST_DOUBLE
586 && GET_MODE (op) == VOIDmode)
587 {
588 unsigned HOST_WIDE_INT value;
589 int n_parts = GET_MODE_SIZE (mode) / 2;
590 int i, part = -1;
591
592 for (i = 0; i < n_parts; i++)
593 {
594 if (i == 0)
595 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
596 else if (i == HOST_BITS_PER_WIDE_INT / 16)
597 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
598 else
599 value >>= 16;
600
601 if ((value & 0xffff) != (unsigned)(def & 0xffff))
602 {
603 if (part != -1)
604 return -1;
605 else
606 part = i;
607 }
608 }
609
610 return part == -1 ? 0 : (n_parts - 1 - part);
611 }
612
613 return -1;
614 }
615
616 /* Extract the HImode part number PART from integer
617 constant OP of mode MODE. */
618
619 int
620 s390_extract_hi (op, mode, part)
621 rtx op;
622 enum machine_mode mode;
623 int part;
624 {
625 int n_parts = GET_MODE_SIZE (mode) / 2;
626 if (part < 0 || part >= n_parts)
627 abort();
628 else
629 part = n_parts - 1 - part;
630
631 if (GET_CODE (op) == CONST_INT)
632 {
633 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
634 return ((value >> (16 * part)) & 0xffff);
635 }
636 else if (GET_CODE (op) == CONST_DOUBLE
637 && GET_MODE (op) == VOIDmode)
638 {
639 unsigned HOST_WIDE_INT value;
640 if (part < HOST_BITS_PER_WIDE_INT / 16)
641 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
642 else
643 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
644 part -= HOST_BITS_PER_WIDE_INT / 16;
645
646 return ((value >> (16 * part)) & 0xffff);
647 }
648
649 abort ();
650 }
651
652 /* If OP is an integer constant of mode MODE with exactly one
653 QImode subpart unequal to DEF, return the number of that
654 subpart. As a special case, all QImode subparts of OP are
655 equal to DEF, return zero. Otherwise, return -1. */
656
657 int
658 s390_single_qi (op, mode, def)
659 rtx op;
660 enum machine_mode mode;
661 int def;
662 {
663 if (GET_CODE (op) == CONST_INT)
664 {
665 unsigned HOST_WIDE_INT value;
666 int n_parts = GET_MODE_SIZE (mode);
667 int i, part = -1;
668
669 for (i = 0; i < n_parts; i++)
670 {
671 if (i == 0)
672 value = (unsigned HOST_WIDE_INT) INTVAL (op);
673 else
674 value >>= 8;
675
676 if ((value & 0xff) != (unsigned)(def & 0xff))
677 {
678 if (part != -1)
679 return -1;
680 else
681 part = i;
682 }
683 }
684
685 return part == -1 ? 0 : (n_parts - 1 - part);
686 }
687
688 else if (GET_CODE (op) == CONST_DOUBLE
689 && GET_MODE (op) == VOIDmode)
690 {
691 unsigned HOST_WIDE_INT value;
692 int n_parts = GET_MODE_SIZE (mode);
693 int i, part = -1;
694
695 for (i = 0; i < n_parts; i++)
696 {
697 if (i == 0)
698 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
699 else if (i == HOST_BITS_PER_WIDE_INT / 8)
700 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
701 else
702 value >>= 8;
703
704 if ((value & 0xff) != (unsigned)(def & 0xff))
705 {
706 if (part != -1)
707 return -1;
708 else
709 part = i;
710 }
711 }
712
713 return part == -1 ? 0 : (n_parts - 1 - part);
714 }
715
716 return -1;
717 }
718
719 /* Extract the QImode part number PART from integer
720 constant OP of mode MODE. */
721
722 int
723 s390_extract_qi (op, mode, part)
724 rtx op;
725 enum machine_mode mode;
726 int part;
727 {
728 int n_parts = GET_MODE_SIZE (mode);
729 if (part < 0 || part >= n_parts)
730 abort();
731 else
732 part = n_parts - 1 - part;
733
734 if (GET_CODE (op) == CONST_INT)
735 {
736 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
737 return ((value >> (8 * part)) & 0xff);
738 }
739 else if (GET_CODE (op) == CONST_DOUBLE
740 && GET_MODE (op) == VOIDmode)
741 {
742 unsigned HOST_WIDE_INT value;
743 if (part < HOST_BITS_PER_WIDE_INT / 8)
744 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
745 else
746 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
747 part -= HOST_BITS_PER_WIDE_INT / 8;
748
749 return ((value >> (8 * part)) & 0xff);
750 }
751
752 abort ();
753 }
754
755
756 /* Change optimizations to be performed, depending on the
757 optimization level.
758
759 LEVEL is the optimization level specified; 2 if `-O2' is
760 specified, 1 if `-O' is specified, and 0 if neither is specified.
761
762 SIZE is non-zero if `-Os' is specified and zero otherwise. */
763
764 void
765 optimization_options (level, size)
766 int level ATTRIBUTE_UNUSED;
767 int size ATTRIBUTE_UNUSED;
768 {
769 #ifdef HAVE_decrement_and_branch_on_count
770 /* When optimizing, enable use of BRCT instruction. */
771 if (level >= 1)
772 flag_branch_on_count_reg = 1;
773 #endif
774 }
775
776 void
777 override_options ()
778 {
779 /* Acquire a unique set number for our register saves and restores. */
780 s390_sr_alias_set = new_alias_set ();
781 }
782
783
784 /* Map for smallest class containing reg regno. */
785
786 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
787 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
788 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
789 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
790 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
791 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
792 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
793 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
794 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
795 ADDR_REGS, NO_REGS, ADDR_REGS
796 };
797
798
799 /* Return true if OP a (const_int 0) operand.
800 OP is the current operation.
801 MODE is the current operation mode. */
802
803 int
804 const0_operand (op, mode)
805 register rtx op;
806 enum machine_mode mode;
807 {
808 return op == CONST0_RTX (mode);
809 }
810
811 /* Return true if OP is constant.
812 OP is the current operation.
813 MODE is the current operation mode. */
814
815 int
816 consttable_operand (op, mode)
817 rtx op;
818 enum machine_mode mode ATTRIBUTE_UNUSED;
819 {
820 return CONSTANT_P (op);
821 }
822
823 /* Return true if the mode of operand OP matches MODE.
824 If MODE is set to VOIDmode, set it to the mode of OP. */
825
826 static int
827 check_mode (op, mode)
828 register rtx op;
829 enum machine_mode *mode;
830 {
831 if (*mode == VOIDmode)
832 *mode = GET_MODE (op);
833 else
834 {
835 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
836 return 0;
837 }
838 return 1;
839 }
840
841 /* Return true if OP a valid operand for the LARL instruction.
842 OP is the current operation.
843 MODE is the current operation mode. */
844
845 int
846 larl_operand (op, mode)
847 register rtx op;
848 enum machine_mode mode;
849 {
850 if (! check_mode (op, &mode))
851 return 0;
852
853 /* Allow labels and local symbols. */
854 if (GET_CODE (op) == LABEL_REF)
855 return 1;
856 if (GET_CODE (op) == SYMBOL_REF
857 && (!flag_pic || SYMBOL_REF_FLAG (op)
858 || CONSTANT_POOL_ADDRESS_P (op)))
859 return 1;
860
861 /* Everything else must have a CONST, so strip it. */
862 if (GET_CODE (op) != CONST)
863 return 0;
864 op = XEXP (op, 0);
865
866 /* Allow adding *even* constants. */
867 if (GET_CODE (op) == PLUS)
868 {
869 if (GET_CODE (XEXP (op, 1)) != CONST_INT
870 || (INTVAL (XEXP (op, 1)) & 1) != 0)
871 return 0;
872 op = XEXP (op, 0);
873 }
874
875 /* Labels and local symbols allowed here as well. */
876 if (GET_CODE (op) == LABEL_REF)
877 return 1;
878 if (GET_CODE (op) == SYMBOL_REF
879 && (!flag_pic || SYMBOL_REF_FLAG (op)
880 || CONSTANT_POOL_ADDRESS_P (op)))
881 return 1;
882
883 /* Now we must have a @GOTENT offset or @PLT stub. */
884 if (GET_CODE (op) == UNSPEC
885 && XINT (op, 1) == 111)
886 return 1;
887 if (GET_CODE (op) == UNSPEC
888 && XINT (op, 1) == 113)
889 return 1;
890
891 return 0;
892 }
893
894 /* Return true if OP is a valid FP-Register.
895 OP is the current operation.
896 MODE is the current operation mode. */
897
898 int
899 fp_operand (op, mode)
900 register rtx op;
901 enum machine_mode mode;
902 {
903 register enum rtx_code code = GET_CODE (op);
904 if (! check_mode (op, &mode))
905 return 0;
906 if (code == REG && REGNO_OK_FOR_FP_P (REGNO (op)))
907 return 1;
908 else
909 return 0;
910 }
911
912 /* Helper routine to implement s_operand and s_imm_operand.
913 OP is the current operation.
914 MODE is the current operation mode.
915 ALLOW_IMMEDIATE specifies whether immediate operands should
916 be accepted or not. */
917
918 static int
919 general_s_operand (op, mode, allow_immediate)
920 register rtx op;
921 enum machine_mode mode;
922 int allow_immediate;
923 {
924 struct s390_address addr;
925
926 /* Call general_operand first, so that we don't have to
927 check for many special cases. */
928 if (!general_operand (op, mode))
929 return 0;
930
931 /* Just like memory_operand, allow (subreg (mem ...))
932 after reload. */
933 if (reload_completed
934 && GET_CODE (op) == SUBREG
935 && GET_CODE (SUBREG_REG (op)) == MEM)
936 op = SUBREG_REG (op);
937
938 switch (GET_CODE (op))
939 {
940 /* Constants that we are sure will be forced to the
941 literal pool in reload are OK as s-operand. Note
942 that we cannot call s390_preferred_reload_class here
943 because it might not be known yet at this point
944 whether the current function is a leaf or not. */
945 case CONST_INT:
946 case CONST_DOUBLE:
947 if (!allow_immediate || reload_completed)
948 break;
949 if (!legitimate_reload_constant_p (op))
950 return 1;
951 if (!TARGET_64BIT)
952 return 1;
953 break;
954
955 /* Memory operands are OK unless they already use an
956 index register. */
957 case MEM:
958 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
959 return 1;
960 if (s390_decompose_address (XEXP (op, 0), &addr, FALSE)
961 && !addr.indx)
962 return 1;
963 break;
964
965 default:
966 break;
967 }
968
969 return 0;
970 }
971
972 /* Return true if OP is a valid S-type operand.
973 OP is the current operation.
974 MODE is the current operation mode. */
975
976 int
977 s_operand (op, mode)
978 register rtx op;
979 enum machine_mode mode;
980 {
981 return general_s_operand (op, mode, 0);
982 }
983
984 /* Return true if OP is a valid S-type operand or an immediate
985 operand that can be addressed as S-type operand by forcing
986 it into the literal pool.
987 OP is the current operation.
988 MODE is the current operation mode. */
989
990 int
991 s_imm_operand (op, mode)
992 register rtx op;
993 enum machine_mode mode;
994 {
995 return general_s_operand (op, mode, 1);
996 }
997
998 /* Return true if OP is a valid operand for a 'Q' constraint.
999 This differs from s_operand in that only memory operands
1000 without index register are accepted, nothing else. */
1001
1002 int
1003 q_constraint (op)
1004 register rtx op;
1005 {
1006 struct s390_address addr;
1007
1008 if (GET_CODE (op) != MEM)
1009 return 0;
1010
1011 if (!s390_decompose_address (XEXP (op, 0), &addr, FALSE))
1012 return 0;
1013
1014 if (addr.indx)
1015 return 0;
1016
1017 return 1;
1018 }
1019
1020 /* Return true if OP is a valid operand for the BRAS instruction.
1021 OP is the current operation.
1022 MODE is the current operation mode. */
1023
1024 int
1025 bras_sym_operand (op, mode)
1026 register rtx op;
1027 enum machine_mode mode ATTRIBUTE_UNUSED;
1028 {
1029 register enum rtx_code code = GET_CODE (op);
1030
1031 /* Allow SYMBOL_REFs. */
1032 if (code == SYMBOL_REF)
1033 return 1;
1034
1035 /* Allow @PLT stubs. */
1036 if (code == CONST
1037 && GET_CODE (XEXP (op, 0)) == UNSPEC
1038 && XINT (XEXP (op, 0), 1) == 113)
1039 return 1;
1040 return 0;
1041 }
1042
1043 \f
1044 /* Return true if OP is a load multiple operation. It is known to be a
1045 PARALLEL and the first section will be tested.
1046 OP is the current operation.
1047 MODE is the current operation mode. */
1048
1049 int
1050 load_multiple_operation (op, mode)
1051 rtx op;
1052 enum machine_mode mode ATTRIBUTE_UNUSED;
1053 {
1054 int count = XVECLEN (op, 0);
1055 unsigned int dest_regno;
1056 rtx src_addr;
1057 int i, off;
1058
1059
1060 /* Perform a quick check so we don't blow up below. */
1061 if (count <= 1
1062 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1063 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1064 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1065 return 0;
1066
1067 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1068 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1069
1070 /* Check, is base, or base + displacement. */
1071
1072 if (GET_CODE (src_addr) == REG)
1073 off = 0;
1074 else if (GET_CODE (src_addr) == PLUS
1075 && GET_CODE (XEXP (src_addr, 0)) == REG
1076 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1077 {
1078 off = INTVAL (XEXP (src_addr, 1));
1079 src_addr = XEXP (src_addr, 0);
1080 }
1081 else
1082 return 0;
1083
1084 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1085 return 0;
1086
1087 for (i = 1; i < count; i++)
1088 {
1089 rtx elt = XVECEXP (op, 0, i);
1090
1091 if (GET_CODE (elt) != SET
1092 || GET_CODE (SET_DEST (elt)) != REG
1093 || GET_MODE (SET_DEST (elt)) != Pmode
1094 || REGNO (SET_DEST (elt)) != dest_regno + i
1095 || GET_CODE (SET_SRC (elt)) != MEM
1096 || GET_MODE (SET_SRC (elt)) != Pmode
1097 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1098 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1099 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1100 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1101 != off + i * UNITS_PER_WORD)
1102 return 0;
1103 }
1104
1105 return 1;
1106 }
1107
1108 /* Return true if OP is a store multiple operation. It is known to be a
1109 PARALLEL and the first section will be tested.
1110 OP is the current operation.
1111 MODE is the current operation mode. */
1112
1113 int
1114 store_multiple_operation (op, mode)
1115 rtx op;
1116 enum machine_mode mode ATTRIBUTE_UNUSED;
1117 {
1118 int count = XVECLEN (op, 0);
1119 unsigned int src_regno;
1120 rtx dest_addr;
1121 int i, off;
1122
1123 /* Perform a quick check so we don't blow up below. */
1124 if (count <= 1
1125 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1126 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1127 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1128 return 0;
1129
1130 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1131 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1132
1133 /* Check, is base, or base + displacement. */
1134
1135 if (GET_CODE (dest_addr) == REG)
1136 off = 0;
1137 else if (GET_CODE (dest_addr) == PLUS
1138 && GET_CODE (XEXP (dest_addr, 0)) == REG
1139 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1140 {
1141 off = INTVAL (XEXP (dest_addr, 1));
1142 dest_addr = XEXP (dest_addr, 0);
1143 }
1144 else
1145 return 0;
1146
1147 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1148 return 0;
1149
1150 for (i = 1; i < count; i++)
1151 {
1152 rtx elt = XVECEXP (op, 0, i);
1153
1154 if (GET_CODE (elt) != SET
1155 || GET_CODE (SET_SRC (elt)) != REG
1156 || GET_MODE (SET_SRC (elt)) != Pmode
1157 || REGNO (SET_SRC (elt)) != src_regno + i
1158 || GET_CODE (SET_DEST (elt)) != MEM
1159 || GET_MODE (SET_DEST (elt)) != Pmode
1160 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1161 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1162 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1163 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1164 != off + i * UNITS_PER_WORD)
1165 return 0;
1166 }
1167 return 1;
1168 }
1169
1170
1171 /* Return true if OP contains a symbol reference */
1172
1173 int
1174 symbolic_reference_mentioned_p (op)
1175 rtx op;
1176 {
1177 register const char *fmt;
1178 register int i;
1179
1180 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1181 return 1;
1182
1183 fmt = GET_RTX_FORMAT (GET_CODE (op));
1184 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1185 {
1186 if (fmt[i] == 'E')
1187 {
1188 register int j;
1189
1190 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1191 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1192 return 1;
1193 }
1194
1195 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1196 return 1;
1197 }
1198
1199 return 0;
1200 }
1201
1202
1203 /* Return true if OP is a legitimate general operand when
1204 generating PIC code. It is given that flag_pic is on
1205 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1206
1207 int
1208 legitimate_pic_operand_p (op)
1209 register rtx op;
1210 {
1211 /* Accept all non-symbolic constants. */
1212 if (!SYMBOLIC_CONST (op))
1213 return 1;
1214
1215 /* Reject everything else; must be handled
1216 via emit_pic_move. */
1217 return 0;
1218 }
1219
1220 /* Returns true if the constant value OP is a legitimate general operand.
1221 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1222
1223 int
1224 legitimate_constant_p (op)
1225 register rtx op;
1226 {
1227 /* Accept all non-symbolic constants. */
1228 if (!SYMBOLIC_CONST (op))
1229 return 1;
1230
1231 /* In the PIC case, symbolic constants must *not* be
1232 forced into the literal pool. We accept them here,
1233 so that they will be handled by emit_pic_move. */
1234 if (flag_pic)
1235 return 1;
1236
1237 /* Even in the non-PIC case, we can accept immediate
1238 LARL operands here. */
1239 if (TARGET_64BIT)
1240 return larl_operand (op, VOIDmode);
1241
1242 /* All remaining non-PIC symbolic constants are
1243 forced into the literal pool. */
1244 return 0;
1245 }
1246
1247 /* Returns true if the constant value OP is a legitimate general
1248 operand during and after reload. The difference to
1249 legitimate_constant_p is that this function will not accept
1250 a constant that would need to be forced to the literal pool
1251 before it can be used as operand. */
1252
1253 int
1254 legitimate_reload_constant_p (op)
1255 register rtx op;
1256 {
1257 /* Accept l(g)hi operands. */
1258 if (GET_CODE (op) == CONST_INT
1259 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1260 return 1;
1261
1262 /* Accept lliXX operands. */
1263 if (TARGET_64BIT
1264 && s390_single_hi (op, DImode, 0) >= 0)
1265 return 1;
1266
1267 /* Accept larl operands. */
1268 if (TARGET_64BIT
1269 && larl_operand (op, VOIDmode))
1270 return 1;
1271
1272 /* If reload is completed, and we do not already have a
1273 literal pool, and OP must be forced to the literal
1274 pool, then something must have gone wrong earlier.
1275 We *cannot* force the constant any more, because the
1276 prolog generation already decided we don't need to
1277 set up the base register. */
1278 if (reload_completed && !regs_ever_live[BASE_REGISTER])
1279 abort ();
1280
1281 /* Everything else cannot be handled without reload. */
1282 return 0;
1283 }
1284
1285 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1286 return the class of reg to actually use. */
1287
1288 enum reg_class
1289 s390_preferred_reload_class (op, class)
1290 rtx op;
1291 enum reg_class class;
1292 {
1293 /* This can happen if a floating point constant is being
1294 reloaded into an integer register. Leave well alone. */
1295 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1296 && class != FP_REGS)
1297 return class;
1298
1299 switch (GET_CODE (op))
1300 {
1301 /* Constants we cannot reload must be forced into the
1302 literal pool. For constants we *could* handle directly,
1303 it might still be preferable to put them in the pool and
1304 use a memory-to-memory instruction.
1305
1306 However, try to avoid needlessly allocating a literal
1307 pool in a routine that wouldn't otherwise need any.
1308 Heuristically, we assume that 64-bit leaf functions
1309 typically don't need a literal pool, all others do. */
1310 case CONST_DOUBLE:
1311 case CONST_INT:
1312 if (!legitimate_reload_constant_p (op))
1313 return NO_REGS;
1314
1315 if (TARGET_64BIT && current_function_is_leaf)
1316 return class;
1317
1318 return NO_REGS;
1319
1320 /* If a symbolic constant or a PLUS is reloaded,
1321 it is most likely being used as an address, so
1322 prefer ADDR_REGS. If 'class' is not a superset
1323 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1324 case PLUS:
1325 case LABEL_REF:
1326 case SYMBOL_REF:
1327 case CONST:
1328 if (reg_class_subset_p (ADDR_REGS, class))
1329 return ADDR_REGS;
1330 else
1331 return NO_REGS;
1332
1333 default:
1334 break;
1335 }
1336
1337 return class;
1338 }
1339
1340 /* Return the register class of a scratch register needed to
1341 load IN into a register of class CLASS in MODE.
1342
1343 We need a temporary when loading a PLUS expression which
1344 is not a legitimate operand of the LOAD ADDRESS instruction. */
1345
1346 enum reg_class
1347 s390_secondary_input_reload_class (class, mode, in)
1348 enum reg_class class ATTRIBUTE_UNUSED;
1349 enum machine_mode mode;
1350 rtx in;
1351 {
1352 if (s390_plus_operand (in, mode))
1353 return ADDR_REGS;
1354
1355 return NO_REGS;
1356 }
1357
1358 /* Return true if OP is a PLUS that is not a legitimate
1359 operand for the LA instruction.
1360 OP is the current operation.
1361 MODE is the current operation mode. */
1362
1363 int
1364 s390_plus_operand (op, mode)
1365 register rtx op;
1366 enum machine_mode mode;
1367 {
1368 if (!check_mode (op, &mode) || mode != Pmode)
1369 return FALSE;
1370
1371 if (GET_CODE (op) != PLUS)
1372 return FALSE;
1373
1374 if (legitimate_la_operand_p (op))
1375 return FALSE;
1376
1377 return TRUE;
1378 }
1379
1380 /* Generate code to load SRC, which is PLUS that is not a
1381 legitimate operand for the LA instruction, into TARGET.
1382 SCRATCH may be used as scratch register. */
1383
1384 void
1385 s390_expand_plus_operand (target, src, scratch_in)
1386 register rtx target;
1387 register rtx src;
1388 register rtx scratch_in;
1389 {
1390 rtx sum1, sum2, scratch;
1391
1392 /* ??? reload apparently does not ensure that the scratch register
1393 and the target do not overlap. We absolutely require this to be
1394 the case, however. Therefore the reload_in[sd]i patterns ask for
1395 a double-sized scratch register, and if one part happens to be
1396 equal to the target, we use the other one. */
1397 scratch = gen_rtx_REG (Pmode, REGNO (scratch_in));
1398 if (rtx_equal_p (scratch, target))
1399 scratch = gen_rtx_REG (Pmode, REGNO (scratch_in) + 1);
1400
1401 /* src must be a PLUS; get its two operands. */
1402 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1403 abort ();
1404
1405 /* Check if any of the two operands is already scheduled
1406 for replacement by reload. This can happen e.g. when
1407 float registers occur in an address. */
1408 sum1 = find_replacement (&XEXP (src, 0));
1409 sum2 = find_replacement (&XEXP (src, 1));
1410
1411 /* Accept already valid addresses. */
1412 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1413 if (s390_decompose_address (src, NULL, 1))
1414 {
1415 src = legitimize_la_operand (src);
1416 emit_insn (gen_rtx_SET (VOIDmode, target, src));
1417 return;
1418 }
1419
1420 /* If one of the two operands is equal to the target,
1421 make it the first one. If one is a constant, make
1422 it the second one. */
1423 if (rtx_equal_p (target, sum2)
1424 || GET_CODE (sum1) == CONST_INT)
1425 {
1426 rtx tem = sum2;
1427 sum2 = sum1;
1428 sum1 = tem;
1429 }
1430
1431 /* If the first operand is not an address register,
1432 we reload it into the target. */
1433 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1434 {
1435 emit_move_insn (target, sum1);
1436 sum1 = target;
1437 }
1438
1439 /* Likewise for the second operand. However, take
1440 care not to clobber the target if we already used
1441 it for the first operand. Use the scratch instead.
1442 Also, allow an immediate offset if it is in range. */
1443 if ((true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1444 && !(GET_CODE (sum2) == CONST_INT
1445 && INTVAL (sum2) >= 0 && INTVAL (sum2) < 4096))
1446 {
1447 if (!rtx_equal_p (target, sum1))
1448 {
1449 emit_move_insn (target, sum2);
1450 sum2 = target;
1451 }
1452 else
1453 {
1454 emit_move_insn (scratch, sum2);
1455 sum2 = scratch;
1456 }
1457 }
1458
1459 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1460 is only ever performed on addresses, so we can mark the
1461 sum as legitimate for LA in any case. */
1462 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1463 src = legitimize_la_operand (src);
1464 emit_insn (gen_rtx_SET (VOIDmode, target, src));
1465 }
1466
1467
1468 /* Decompose a RTL expression ADDR for a memory address into
1469 its components, returned in OUT. The boolean STRICT
1470 specifies whether strict register checking applies.
1471 Returns 0 if ADDR is not a valid memory address, nonzero
1472 otherwise. If OUT is NULL, don't return the components,
1473 but check for validity only.
1474
1475 Note: Only addresses in canonical form are recognized.
1476 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1477 canonical form so that they will be recognized. */
1478
1479 static int
1480 s390_decompose_address (addr, out, strict)
1481 register rtx addr;
1482 struct s390_address *out;
1483 int strict;
1484 {
1485 rtx base = NULL_RTX;
1486 rtx indx = NULL_RTX;
1487 rtx disp = NULL_RTX;
1488 int pointer = FALSE;
1489
1490 /* Decompose address into base + index + displacement. */
1491
1492 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1493 base = addr;
1494
1495 else if (GET_CODE (addr) == PLUS)
1496 {
1497 rtx op0 = XEXP (addr, 0);
1498 rtx op1 = XEXP (addr, 1);
1499 enum rtx_code code0 = GET_CODE (op0);
1500 enum rtx_code code1 = GET_CODE (op1);
1501
1502 if (code0 == REG || code0 == UNSPEC)
1503 {
1504 if (code1 == REG || code1 == UNSPEC)
1505 {
1506 indx = op0; /* index + base */
1507 base = op1;
1508 }
1509
1510 else
1511 {
1512 base = op0; /* base + displacement */
1513 disp = op1;
1514 }
1515 }
1516
1517 else if (code0 == PLUS)
1518 {
1519 indx = XEXP (op0, 0); /* index + base + disp */
1520 base = XEXP (op0, 1);
1521 disp = op1;
1522 }
1523
1524 else
1525 {
1526 return FALSE;
1527 }
1528 }
1529
1530 else
1531 disp = addr; /* displacement */
1532
1533
1534 /* Validate base register. */
1535 if (base)
1536 {
1537 if (GET_CODE (base) == UNSPEC)
1538 {
1539 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1540 return FALSE;
1541 base = XVECEXP (base, 0, 0);
1542 pointer = TRUE;
1543 }
1544
1545 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1546 return FALSE;
1547
1548 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
1549 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
1550 return FALSE;
1551
1552 if (REGNO (base) == BASE_REGISTER
1553 || REGNO (base) == STACK_POINTER_REGNUM
1554 || REGNO (base) == FRAME_POINTER_REGNUM
1555 || ((reload_completed || reload_in_progress)
1556 && frame_pointer_needed
1557 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1558 || (flag_pic
1559 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1560 pointer = TRUE;
1561 }
1562
1563 /* Validate index register. */
1564 if (indx)
1565 {
1566 if (GET_CODE (indx) == UNSPEC)
1567 {
1568 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1569 return FALSE;
1570 indx = XVECEXP (indx, 0, 0);
1571 pointer = TRUE;
1572 }
1573
1574 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1575 return FALSE;
1576
1577 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (indx))
1578 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (indx)))
1579 return FALSE;
1580
1581 if (REGNO (indx) == BASE_REGISTER
1582 || REGNO (indx) == STACK_POINTER_REGNUM
1583 || REGNO (indx) == FRAME_POINTER_REGNUM
1584 || ((reload_completed || reload_in_progress)
1585 && frame_pointer_needed
1586 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1587 || (flag_pic
1588 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1589 pointer = TRUE;
1590 }
1591
1592 /* Validate displacement. */
1593 if (disp)
1594 {
1595 /* Allow integer constant in range. */
1596 if (GET_CODE (disp) == CONST_INT)
1597 {
1598 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1599 return FALSE;
1600 }
1601
1602 /* In the small-PIC case, the linker converts @GOT12
1603 offsets to possible displacements. */
1604 else if (GET_CODE (disp) == CONST
1605 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1606 && XINT (XEXP (disp, 0), 1) == 110)
1607 {
1608 if (flag_pic != 1)
1609 return FALSE;
1610
1611 pointer = TRUE;
1612 }
1613
1614 /* Accept chunkfied literal pool symbol references. */
1615 else if (GET_CODE (disp) == CONST
1616 && GET_CODE (XEXP (disp, 0)) == MINUS
1617 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1618 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1619 {
1620 pointer = TRUE;
1621 }
1622
1623 /* Likewise if a constant offset is present. */
1624 else if (GET_CODE (disp) == CONST
1625 && GET_CODE (XEXP (disp, 0)) == PLUS
1626 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1627 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1628 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1629 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1630 {
1631 pointer = TRUE;
1632 }
1633
1634 /* We can convert literal pool addresses to
1635 displacements by basing them off the base register. */
1636 else
1637 {
1638 /* In some cases, we can accept an additional
1639 small constant offset. Split these off here. */
1640
1641 unsigned int offset = 0;
1642
1643 if (GET_CODE (disp) == CONST
1644 && GET_CODE (XEXP (disp, 0)) == PLUS
1645 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1646 {
1647 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1648 disp = XEXP (XEXP (disp, 0), 0);
1649 }
1650
1651 /* Now we must have a literal pool address. */
1652 if (GET_CODE (disp) != SYMBOL_REF
1653 || !CONSTANT_POOL_ADDRESS_P (disp))
1654 return FALSE;
1655
1656 /* In 64-bit PIC mode we cannot accept symbolic
1657 constants in the constant pool. */
1658 if (TARGET_64BIT && flag_pic
1659 && SYMBOLIC_CONST (get_pool_constant (disp)))
1660 return FALSE;
1661
1662 /* If we have an offset, make sure it does not
1663 exceed the size of the constant pool entry. */
1664 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1665 return FALSE;
1666
1667 /* Either base or index must be free to
1668 hold the base register. */
1669 if (base && indx)
1670 return FALSE;
1671
1672 /* Convert the address. */
1673 if (base)
1674 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1675 else
1676 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1677
1678 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1679 disp = gen_rtx_CONST (Pmode, disp);
1680
1681 if (offset)
1682 disp = plus_constant (disp, offset);
1683
1684 pointer = TRUE;
1685 }
1686 }
1687
1688 if (!base && !indx)
1689 pointer = TRUE;
1690
1691 if (out)
1692 {
1693 out->base = base;
1694 out->indx = indx;
1695 out->disp = disp;
1696 out->pointer = pointer;
1697 }
1698
1699 return TRUE;
1700 }
1701
1702 /* Return nonzero if ADDR is a valid memory address.
1703 STRICT specifies whether strict register checking applies. */
1704
1705 int
1706 legitimate_address_p (mode, addr, strict)
1707 enum machine_mode mode ATTRIBUTE_UNUSED;
1708 register rtx addr;
1709 int strict;
1710 {
1711 return s390_decompose_address (addr, NULL, strict);
1712 }
1713
1714 /* Return 1 if OP is a valid operand for the LA instruction.
1715 In 31-bit, we need to prove that the result is used as an
1716 address, as LA performs only a 31-bit addition. */
1717
1718 int
1719 legitimate_la_operand_p (op)
1720 register rtx op;
1721 {
1722 struct s390_address addr;
1723 if (!s390_decompose_address (op, &addr, FALSE))
1724 return FALSE;
1725
1726 if (TARGET_64BIT || addr.pointer)
1727 return TRUE;
1728
1729 return FALSE;
1730 }
1731
1732 /* Return a modified variant of OP that is guaranteed to
1733 be accepted by legitimate_la_operand_p. */
1734
1735 rtx
1736 legitimize_la_operand (op)
1737 register rtx op;
1738 {
1739 struct s390_address addr;
1740 if (!s390_decompose_address (op, &addr, FALSE))
1741 abort ();
1742
1743 if (TARGET_64BIT || addr.pointer)
1744 return op;
1745
1746 if (!addr.base)
1747 abort ();
1748
1749 op = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr.base), 101);
1750 if (addr.indx)
1751 op = gen_rtx_PLUS (Pmode, op, addr.indx);
1752 if (addr.disp)
1753 op = gen_rtx_PLUS (Pmode, op, addr.disp);
1754
1755 return op;
1756 }
1757
1758 /* Return a legitimate reference for ORIG (an address) using the
1759 register REG. If REG is 0, a new pseudo is generated.
1760
1761 There are two types of references that must be handled:
1762
1763 1. Global data references must load the address from the GOT, via
1764 the PIC reg. An insn is emitted to do this load, and the reg is
1765 returned.
1766
1767 2. Static data references, constant pool addresses, and code labels
1768 compute the address as an offset from the GOT, whose base is in
1769 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1770 differentiate them from global data objects. The returned
1771 address is the PIC reg + an unspec constant.
1772
1773 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1774 reg also appears in the address. */
1775
1776 rtx
1777 legitimize_pic_address (orig, reg)
1778 rtx orig;
1779 rtx reg;
1780 {
1781 rtx addr = orig;
1782 rtx new = orig;
1783 rtx base;
1784
1785 if (GET_CODE (addr) == LABEL_REF
1786 || (GET_CODE (addr) == SYMBOL_REF
1787 && (SYMBOL_REF_FLAG (addr)
1788 || CONSTANT_POOL_ADDRESS_P (addr))))
1789 {
1790 /* This is a local symbol. */
1791 if (TARGET_64BIT)
1792 {
1793 /* Access local symbols PC-relative via LARL.
1794 This is the same as in the non-PIC case, so it is
1795 handled automatically ... */
1796 }
1797 else
1798 {
1799 /* Access local symbols relative to the literal pool. */
1800
1801 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1802
1803 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1804 addr = gen_rtx_CONST (SImode, addr);
1805 addr = force_const_mem (SImode, addr);
1806 emit_move_insn (temp, addr);
1807
1808 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1809 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1810 new = gen_rtx_PLUS (Pmode, base, temp);
1811
1812 if (reg != 0)
1813 {
1814 emit_move_insn (reg, new);
1815 new = reg;
1816 }
1817 }
1818 }
1819 else if (GET_CODE (addr) == SYMBOL_REF)
1820 {
1821 if (reg == 0)
1822 reg = gen_reg_rtx (Pmode);
1823
1824 if (flag_pic == 1)
1825 {
1826 /* Assume GOT offset < 4k. This is handled the same way
1827 in both 31- and 64-bit code (@GOT12). */
1828
1829 current_function_uses_pic_offset_table = 1;
1830
1831 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1832 new = gen_rtx_CONST (Pmode, new);
1833 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1834 new = gen_rtx_MEM (Pmode, new);
1835 RTX_UNCHANGING_P (new) = 1;
1836 emit_move_insn (reg, new);
1837 new = reg;
1838 }
1839 else if (TARGET_64BIT)
1840 {
1841 /* If the GOT offset might be >= 4k, we determine the position
1842 of the GOT entry via a PC-relative LARL (@GOTENT). */
1843
1844 rtx temp = gen_reg_rtx (Pmode);
1845
1846 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1847 new = gen_rtx_CONST (Pmode, new);
1848 emit_move_insn (temp, new);
1849
1850 new = gen_rtx_MEM (Pmode, temp);
1851 RTX_UNCHANGING_P (new) = 1;
1852 emit_move_insn (reg, new);
1853 new = reg;
1854 }
1855 else
1856 {
1857 /* If the GOT offset might be >= 4k, we have to load it
1858 from the literal pool (@GOT). */
1859
1860 rtx temp = gen_reg_rtx (Pmode);
1861
1862 current_function_uses_pic_offset_table = 1;
1863
1864 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
1865 addr = gen_rtx_CONST (SImode, addr);
1866 addr = force_const_mem (SImode, addr);
1867 emit_move_insn (temp, addr);
1868
1869 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
1870 new = gen_rtx_MEM (Pmode, new);
1871 RTX_UNCHANGING_P (new) = 1;
1872 emit_move_insn (reg, new);
1873 new = reg;
1874 }
1875 }
1876 else
1877 {
1878 if (GET_CODE (addr) == CONST)
1879 {
1880 addr = XEXP (addr, 0);
1881 if (GET_CODE (addr) == UNSPEC)
1882 {
1883 if (XVECLEN (addr, 0) != 1)
1884 abort ();
1885 switch (XINT (addr, 1))
1886 {
1887 /* If someone moved an @GOT or lt-relative UNSPEC
1888 out of the literal pool, force them back in. */
1889 case 100:
1890 case 112:
1891 case 114:
1892 new = force_const_mem (SImode, orig);
1893 break;
1894
1895 /* @GOTENT is OK as is. */
1896 case 111:
1897 break;
1898
1899 /* @PLT is OK as is on 64-bit, must be converted to
1900 lt-relative PLT on 31-bit. */
1901 case 113:
1902 if (!TARGET_64BIT)
1903 {
1904 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1905
1906 addr = XVECEXP (addr, 0, 0);
1907 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
1908 addr = gen_rtx_CONST (SImode, addr);
1909 addr = force_const_mem (SImode, addr);
1910 emit_move_insn (temp, addr);
1911
1912 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1913 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1914 new = gen_rtx_PLUS (Pmode, base, temp);
1915
1916 if (reg != 0)
1917 {
1918 emit_move_insn (reg, new);
1919 new = reg;
1920 }
1921 }
1922 break;
1923
1924 /* Everything else cannot happen. */
1925 default:
1926 abort ();
1927 }
1928 }
1929 else if (GET_CODE (addr) != PLUS)
1930 abort ();
1931 }
1932 if (GET_CODE (addr) == PLUS)
1933 {
1934 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1935 /* Check first to see if this is a constant offset
1936 from a local symbol reference. */
1937 if ((GET_CODE (op0) == LABEL_REF
1938 || (GET_CODE (op0) == SYMBOL_REF
1939 && (SYMBOL_REF_FLAG (op0)
1940 || CONSTANT_POOL_ADDRESS_P (op0))))
1941 && GET_CODE (op1) == CONST_INT)
1942 {
1943 if (TARGET_64BIT)
1944 {
1945 if (INTVAL (op1) & 1)
1946 {
1947 /* LARL can't handle odd offsets, so emit a
1948 pair of LARL and LA. */
1949 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1950
1951 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
1952 {
1953 int even = INTVAL (op1) - 1;
1954 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
1955 op0 = gen_rtx_CONST (Pmode, op0);
1956 op1 = GEN_INT (1);
1957 }
1958
1959 emit_move_insn (temp, op0);
1960 new = gen_rtx_PLUS (Pmode, temp, op1);
1961
1962 if (reg != 0)
1963 {
1964 emit_move_insn (reg, new);
1965 new = reg;
1966 }
1967 }
1968 else
1969 {
1970 /* If the offset is even, we can just use LARL.
1971 This will happen automatically. */
1972 }
1973 }
1974 else
1975 {
1976 /* Access local symbols relative to the literal pool. */
1977
1978 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1979
1980 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
1981 addr = gen_rtx_PLUS (SImode, addr, op1);
1982 addr = gen_rtx_CONST (SImode, addr);
1983 addr = force_const_mem (SImode, addr);
1984 emit_move_insn (temp, addr);
1985
1986 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1987 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1988 new = gen_rtx_PLUS (Pmode, base, temp);
1989
1990 if (reg != 0)
1991 {
1992 emit_move_insn (reg, new);
1993 new = reg;
1994 }
1995 }
1996 }
1997
1998 /* Now, check whether it is an LT-relative symbol plus offset
1999 that was pulled out of the literal pool. Force it back in. */
2000
2001 else if (GET_CODE (op0) == UNSPEC
2002 && GET_CODE (op1) == CONST_INT)
2003 {
2004 if (XVECLEN (op0, 0) != 1)
2005 abort ();
2006 if (XINT (op0, 1) != 100)
2007 abort ();
2008
2009 new = force_const_mem (SImode, orig);
2010 }
2011
2012 /* Otherwise, compute the sum. */
2013 else
2014 {
2015 base = legitimize_pic_address (XEXP (addr, 0), reg);
2016 new = legitimize_pic_address (XEXP (addr, 1),
2017 base == reg ? NULL_RTX : reg);
2018 if (GET_CODE (new) == CONST_INT)
2019 new = plus_constant (base, INTVAL (new));
2020 else
2021 {
2022 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2023 {
2024 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2025 new = XEXP (new, 1);
2026 }
2027 new = gen_rtx_PLUS (Pmode, base, new);
2028 }
2029
2030 if (GET_CODE (new) == CONST)
2031 new = XEXP (new, 0);
2032 new = force_operand (new, 0);
2033 }
2034 }
2035 }
2036 return new;
2037 }
2038
2039 /* Emit insns to move operands[1] into operands[0]. */
2040
2041 void
2042 emit_pic_move (operands, mode)
2043 rtx *operands;
2044 enum machine_mode mode ATTRIBUTE_UNUSED;
2045 {
2046 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2047
2048 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2049 operands[1] = force_reg (Pmode, operands[1]);
2050 else
2051 operands[1] = legitimize_pic_address (operands[1], temp);
2052 }
2053
2054 /* Try machine-dependent ways of modifying an illegitimate address X
2055 to be legitimate. If we find one, return the new, valid address.
2056
2057 OLDX is the address as it was before break_out_memory_refs was called.
2058 In some cases it is useful to look at this to decide what needs to be done.
2059
2060 MODE is the mode of the operand pointed to by X.
2061
2062 When -fpic is used, special handling is needed for symbolic references.
2063 See comments by legitimize_pic_address for details. */
2064
2065 rtx
2066 legitimize_address (x, oldx, mode)
2067 register rtx x;
2068 register rtx oldx ATTRIBUTE_UNUSED;
2069 enum machine_mode mode ATTRIBUTE_UNUSED;
2070 {
2071 rtx constant_term = const0_rtx;
2072
2073 if (flag_pic)
2074 {
2075 if (SYMBOLIC_CONST (x)
2076 || (GET_CODE (x) == PLUS
2077 && (SYMBOLIC_CONST (XEXP (x, 0))
2078 || SYMBOLIC_CONST (XEXP (x, 1)))))
2079 x = legitimize_pic_address (x, 0);
2080
2081 if (legitimate_address_p (mode, x, FALSE))
2082 return x;
2083 }
2084
2085 x = eliminate_constant_term (x, &constant_term);
2086
2087 if (GET_CODE (x) == PLUS)
2088 {
2089 if (GET_CODE (XEXP (x, 0)) == REG)
2090 {
2091 register rtx temp = gen_reg_rtx (Pmode);
2092 register rtx val = force_operand (XEXP (x, 1), temp);
2093 if (val != temp)
2094 emit_move_insn (temp, val);
2095
2096 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2097 }
2098
2099 else if (GET_CODE (XEXP (x, 1)) == REG)
2100 {
2101 register rtx temp = gen_reg_rtx (Pmode);
2102 register rtx val = force_operand (XEXP (x, 0), temp);
2103 if (val != temp)
2104 emit_move_insn (temp, val);
2105
2106 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2107 }
2108 }
2109
2110 if (constant_term != const0_rtx)
2111 x = gen_rtx_PLUS (Pmode, x, constant_term);
2112
2113 return x;
2114 }
2115
2116 /* In the name of slightly smaller debug output, and to cater to
2117 general assembler losage, recognize various UNSPEC sequences
2118 and turn them back into a direct symbol reference. */
2119
2120 rtx
2121 s390_simplify_dwarf_addr (orig_x)
2122 rtx orig_x;
2123 {
2124 rtx x = orig_x, y;
2125
2126 if (GET_CODE (x) != MEM)
2127 return orig_x;
2128
2129 x = XEXP (x, 0);
2130 if (GET_CODE (x) == PLUS
2131 && GET_CODE (XEXP (x, 1)) == CONST
2132 && GET_CODE (XEXP (x, 0)) == REG
2133 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
2134 {
2135 y = XEXP (XEXP (x, 1), 0);
2136 if (GET_CODE (y) == UNSPEC
2137 && XINT (y, 1) == 110)
2138 return XVECEXP (y, 0, 0);
2139 return orig_x;
2140 }
2141
2142 if (GET_CODE (x) == CONST)
2143 {
2144 y = XEXP (x, 0);
2145 if (GET_CODE (y) == UNSPEC
2146 && XINT (y, 1) == 111)
2147 return XVECEXP (y, 0, 0);
2148 return orig_x;
2149 }
2150
2151 return orig_x;
2152 }
2153
2154 /* Output symbolic constant X in assembler syntax to
2155 stdio stream FILE. */
2156
2157 void
2158 s390_output_symbolic_const (file, x)
2159 FILE *file;
2160 rtx x;
2161 {
2162 switch (GET_CODE (x))
2163 {
2164 case CONST:
2165 case ZERO_EXTEND:
2166 case SIGN_EXTEND:
2167 s390_output_symbolic_const (file, XEXP (x, 0));
2168 break;
2169
2170 case PLUS:
2171 s390_output_symbolic_const (file, XEXP (x, 0));
2172 fprintf (file, "+");
2173 s390_output_symbolic_const (file, XEXP (x, 1));
2174 break;
2175
2176 case MINUS:
2177 s390_output_symbolic_const (file, XEXP (x, 0));
2178 fprintf (file, "-");
2179 s390_output_symbolic_const (file, XEXP (x, 1));
2180 break;
2181
2182 case CONST_INT:
2183 case LABEL_REF:
2184 case CODE_LABEL:
2185 case SYMBOL_REF:
2186 output_addr_const (file, x);
2187 break;
2188
2189 case UNSPEC:
2190 if (XVECLEN (x, 0) != 1)
2191 output_operand_lossage ("invalid UNSPEC as operand (1)");
2192 switch (XINT (x, 1))
2193 {
2194 case 100:
2195 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2196 fprintf (file, "-.LT%X", s390_function_count);
2197 break;
2198 case 110:
2199 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2200 fprintf (file, "@GOT12");
2201 break;
2202 case 111:
2203 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2204 fprintf (file, "@GOTENT");
2205 break;
2206 case 112:
2207 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2208 fprintf (file, "@GOT");
2209 break;
2210 case 113:
2211 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2212 fprintf (file, "@PLT");
2213 break;
2214 case 114:
2215 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2216 fprintf (file, "@PLT-.LT%X", s390_function_count);
2217 break;
2218 default:
2219 output_operand_lossage ("invalid UNSPEC as operand (2)");
2220 break;
2221 }
2222 break;
2223
2224 default:
2225 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
2226 break;
2227 }
2228 }
2229
2230 /* Output address operand ADDR in assembler syntax to
2231 stdio stream FILE. */
2232
2233 void
2234 print_operand_address (file, addr)
2235 FILE *file;
2236 rtx addr;
2237 {
2238 struct s390_address ad;
2239
2240 if (!s390_decompose_address (addr, &ad, TRUE))
2241 output_operand_lossage ("Cannot decompose address.");
2242
2243 if (ad.disp)
2244 s390_output_symbolic_const (file, ad.disp);
2245 else
2246 fprintf (file, "0");
2247
2248 if (ad.base && ad.indx)
2249 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
2250 reg_names[REGNO (ad.base)]);
2251 else if (ad.base)
2252 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
2253 }
2254
2255 /* Output operand X in assembler syntax to stdio stream FILE.
2256 CODE specified the format flag. The following format flags
2257 are recognized:
2258
2259 'C': print opcode suffix for branch condition.
2260 'D': print opcode suffix for inverse branch condition.
2261 'O': print only the displacement of a memory reference.
2262 'R': print only the base register of a memory reference.
2263 'N': print the second word of a DImode operand.
2264 'M': print the second word of a TImode operand.
2265
2266 'b': print integer X as if it's an unsigned byte.
2267 'x': print integer X as if it's an unsigned word.
2268 'h': print integer X as if it's a signed word. */
2269
2270 void
2271 print_operand (file, x, code)
2272 FILE *file;
2273 rtx x;
2274 int code;
2275 {
2276 switch (code)
2277 {
2278 case 'C':
2279 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
2280 return;
2281
2282 case 'D':
2283 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
2284 return;
2285
2286 case 'O':
2287 {
2288 struct s390_address ad;
2289
2290 if (GET_CODE (x) != MEM
2291 || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
2292 || ad.indx)
2293 abort ();
2294
2295 if (ad.disp)
2296 s390_output_symbolic_const (file, ad.disp);
2297 else
2298 fprintf (file, "0");
2299 }
2300 return;
2301
2302 case 'R':
2303 {
2304 struct s390_address ad;
2305
2306 if (GET_CODE (x) != MEM
2307 || !s390_decompose_address (XEXP (x, 0), &ad, TRUE)
2308 || ad.indx)
2309 abort ();
2310
2311 if (ad.base)
2312 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
2313 else
2314 fprintf (file, "0");
2315 }
2316 return;
2317
2318 case 'N':
2319 if (GET_CODE (x) == REG)
2320 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2321 else if (GET_CODE (x) == MEM)
2322 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
2323 else
2324 abort ();
2325 break;
2326
2327 case 'M':
2328 if (GET_CODE (x) == REG)
2329 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2330 else if (GET_CODE (x) == MEM)
2331 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
2332 else
2333 abort ();
2334 break;
2335 }
2336
2337 switch (GET_CODE (x))
2338 {
2339 case REG:
2340 fprintf (file, "%s", reg_names[REGNO (x)]);
2341 break;
2342
2343 case MEM:
2344 output_address (XEXP (x, 0));
2345 break;
2346
2347 case CONST:
2348 case CODE_LABEL:
2349 case LABEL_REF:
2350 case SYMBOL_REF:
2351 s390_output_symbolic_const (file, x);
2352 break;
2353
2354 case CONST_INT:
2355 if (code == 'b')
2356 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
2357 else if (code == 'x')
2358 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
2359 else if (code == 'h')
2360 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
2361 else
2362 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2363 break;
2364
2365 case CONST_DOUBLE:
2366 if (GET_MODE (x) != VOIDmode)
2367 abort ();
2368 if (code == 'b')
2369 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
2370 else if (code == 'x')
2371 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
2372 else if (code == 'h')
2373 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
2374 else
2375 abort ();
2376 break;
2377
2378 default:
2379 fatal_insn ("UNKNOWN in print_operand !?", x);
2380 break;
2381 }
2382 }
2383
2384 /* Target hook for assembling integer objects. We need to define it
2385 here to work a round a bug in some versions of GAS, which couldn't
2386 handle values smaller than INT_MIN when printed in decimal. */
2387
2388 static bool
2389 s390_assemble_integer (x, size, aligned_p)
2390 rtx x;
2391 unsigned int size;
2392 int aligned_p;
2393 {
2394 if (size == 8 && aligned_p
2395 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
2396 {
2397 fputs ("\t.quad\t", asm_out_file);
2398 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2399 putc ('\n', asm_out_file);
2400 return true;
2401 }
2402 return default_assemble_integer (x, size, aligned_p);
2403 }
2404
2405
2406 #define DEBUG_SCHED 0
2407
2408 /* Returns true if register REGNO is used for forming
2409 a memory address in expression X. */
2410
2411 static int
2412 reg_used_in_mem_p (regno, x)
2413 int regno;
2414 rtx x;
2415 {
2416 enum rtx_code code = GET_CODE (x);
2417 int i, j;
2418 const char *fmt;
2419
2420 if (code == MEM)
2421 {
2422 if (refers_to_regno_p (regno, regno+1,
2423 XEXP (x, 0), 0))
2424 return 1;
2425 }
2426 else if (code == SET
2427 && GET_CODE (SET_DEST (x)) == PC)
2428 {
2429 if (refers_to_regno_p (regno, regno+1,
2430 SET_SRC (x), 0))
2431 return 1;
2432 }
2433
2434 fmt = GET_RTX_FORMAT (code);
2435 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2436 {
2437 if (fmt[i] == 'e'
2438 && reg_used_in_mem_p (regno, XEXP (x, i)))
2439 return 1;
2440
2441 else if (fmt[i] == 'E')
2442 for (j = 0; j < XVECLEN (x, i); j++)
2443 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2444 return 1;
2445 }
2446 return 0;
2447 }
2448
2449 /* Returns true if expression DEP_RTX sets an address register
2450 used by instruction INSN to address memory. */
2451
2452 static int
2453 addr_generation_dependency_p (dep_rtx, insn)
2454 rtx dep_rtx;
2455 rtx insn;
2456 {
2457 rtx target, pat;
2458
2459 if (GET_CODE (dep_rtx) == SET)
2460 {
2461 target = SET_DEST (dep_rtx);
2462
2463 if (GET_CODE (target) == REG)
2464 {
2465 int regno = REGNO (target);
2466
2467 if (get_attr_type (insn) == TYPE_LA)
2468 {
2469 pat = PATTERN (insn);
2470 if (GET_CODE (pat) == PARALLEL)
2471 {
2472 if (XVECLEN (pat, 0) != 2)
2473 abort();
2474 pat = XVECEXP (pat, 0, 0);
2475 }
2476 if (GET_CODE (pat) == SET)
2477 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2478 else
2479 abort();
2480 }
2481 else if (get_attr_atype (insn) == ATYPE_MEM)
2482 return reg_used_in_mem_p (regno, PATTERN (insn));
2483 }
2484 }
2485 return 0;
2486 }
2487
2488
2489 /* Return the modified cost of the dependency of instruction INSN
2490 on instruction DEP_INSN through the link LINK. COST is the
2491 default cost of that dependency.
2492
2493 Data dependencies are all handled without delay. However, if a
2494 register is modified and subsequently used as base or index
2495 register of a memory reference, at least 4 cycles need to pass
2496 between setting and using the register to avoid pipeline stalls.
2497 An exception is the LA instruction. An address generated by LA can
2498 be used by introducing only a one cycle stall on the pipeline. */
2499
2500 static int
2501 s390_adjust_cost (insn, link, dep_insn, cost)
2502 rtx insn;
2503 rtx link;
2504 rtx dep_insn;
2505 int cost;
2506 {
2507 rtx dep_rtx;
2508 int i;
2509
2510 /* If the dependence is an anti-dependence, there is no cost. For an
2511 output dependence, there is sometimes a cost, but it doesn't seem
2512 worth handling those few cases. */
2513
2514 if (REG_NOTE_KIND (link) != 0)
2515 return 0;
2516
2517 /* If we can't recognize the insns, we can't really do anything. */
2518 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
2519 return cost;
2520
2521 dep_rtx = PATTERN (dep_insn);
2522
2523 if (GET_CODE (dep_rtx) == SET)
2524 {
2525 if (addr_generation_dependency_p (dep_rtx, insn))
2526 {
2527 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2528 if (DEBUG_SCHED)
2529 {
2530 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
2531 cost);
2532 debug_rtx (dep_insn);
2533 debug_rtx (insn);
2534 }
2535 }
2536 }
2537 else if (GET_CODE (dep_rtx) == PARALLEL)
2538 {
2539 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
2540 {
2541 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
2542 insn))
2543 {
2544 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2545 if (DEBUG_SCHED)
2546 {
2547 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
2548 ,cost);
2549 debug_rtx (dep_insn);
2550 debug_rtx (insn);
2551 }
2552 }
2553 }
2554 }
2555
2556 return cost;
2557 }
2558
2559
2560 /* A C statement (sans semicolon) to update the integer scheduling priority
2561 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2562 increase the priority to execute INSN later. Do not define this macro if
2563 you do not need to adjust the scheduling priorities of insns.
2564
2565 A LA instruction maybe scheduled later, since the pipeline bypasses the
2566 calculated value. */
2567
2568 static int
2569 s390_adjust_priority (insn, priority)
2570 rtx insn ATTRIBUTE_UNUSED;
2571 int priority;
2572 {
2573 if (! INSN_P (insn))
2574 return priority;
2575
2576 if (GET_CODE (PATTERN (insn)) == USE
2577 || GET_CODE (PATTERN (insn)) == CLOBBER)
2578 return priority;
2579
2580 switch (get_attr_type (insn))
2581 {
2582 default:
2583 break;
2584
2585 case TYPE_LA:
2586 if (priority >= 0 && priority < 0x01000000)
2587 priority <<= 3;
2588 break;
2589 case TYPE_LM:
2590 /* LM in epilogue should never be scheduled. This
2591 is due to literal access done in function body.
2592 The usage of register 13 is not mentioned explicitly,
2593 leading to scheduling 'LM' accross this instructions.
2594 */
2595 priority = 0x7fffffff;
2596 break;
2597 }
2598
2599 return priority;
2600 }
2601
2602
2603 /* Split all branches that exceed the maximum distance. */
2604
2605 static void
2606 s390_split_branches ()
2607 {
2608 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
2609 rtx insn, pat, label, target, jump, tmp;
2610
2611 /* In 64-bit mode we can jump +- 4GB. */
2612
2613 if (TARGET_64BIT)
2614 return;
2615
2616 /* Find all branches that exceed 64KB, and split them. */
2617
2618 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2619 {
2620 if (GET_CODE (insn) != JUMP_INSN)
2621 continue;
2622
2623 pat = PATTERN (insn);
2624 if (GET_CODE (pat) != SET)
2625 continue;
2626
2627 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
2628 {
2629 label = SET_SRC (pat);
2630 }
2631 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
2632 {
2633 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
2634 label = XEXP (SET_SRC (pat), 1);
2635 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
2636 label = XEXP (SET_SRC (pat), 2);
2637 else
2638 continue;
2639 }
2640 else
2641 continue;
2642
2643 if (get_attr_length (insn) == 4)
2644 continue;
2645
2646 if (flag_pic)
2647 {
2648 target = gen_rtx_UNSPEC (SImode, gen_rtvec (1, label), 100);
2649 target = gen_rtx_CONST (SImode, target);
2650 target = force_const_mem (SImode, target);
2651 jump = gen_rtx_REG (Pmode, BASE_REGISTER);
2652 jump = gen_rtx_PLUS (Pmode, jump, temp_reg);
2653 }
2654 else
2655 {
2656 target = force_const_mem (Pmode, label);
2657 jump = temp_reg;
2658 }
2659
2660 if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
2661 {
2662 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
2663 jump = gen_rtx_IF_THEN_ELSE (VOIDmode, XEXP (SET_SRC (pat), 0),
2664 jump, pc_rtx);
2665 else
2666 jump = gen_rtx_IF_THEN_ELSE (VOIDmode, XEXP (SET_SRC (pat), 0),
2667 pc_rtx, jump);
2668 }
2669
2670 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
2671 INSN_ADDRESSES_NEW (tmp, -1);
2672
2673 tmp = emit_jump_insn_before (gen_rtx_SET (VOIDmode, pc_rtx, jump), insn);
2674 INSN_ADDRESSES_NEW (tmp, -1);
2675
2676 remove_insn (insn);
2677 insn = tmp;
2678 }
2679 }
2680
2681
2682 /* Find a literal pool symbol referenced in RTX X, and store
2683 it at REF. Will abort if X contains references to more than
2684 one such pool symbol; multiple references to the same symbol
2685 are allowed, however.
2686
2687 The rtx pointed to by REF must be initialized to NULL_RTX
2688 by the caller before calling this routine. */
2689
2690 static void
2691 find_constant_pool_ref (x, ref)
2692 rtx x;
2693 rtx *ref;
2694 {
2695 int i, j;
2696 const char *fmt;
2697
2698 if (GET_CODE (x) == SYMBOL_REF
2699 && CONSTANT_POOL_ADDRESS_P (x))
2700 {
2701 if (*ref == NULL_RTX)
2702 *ref = x;
2703 else if (*ref != x)
2704 abort();
2705 }
2706
2707 fmt = GET_RTX_FORMAT (GET_CODE (x));
2708 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2709 {
2710 if (fmt[i] == 'e')
2711 {
2712 find_constant_pool_ref (XEXP (x, i), ref);
2713 }
2714 else if (fmt[i] == 'E')
2715 {
2716 for (j = 0; j < XVECLEN (x, i); j++)
2717 find_constant_pool_ref (XVECEXP (x, i, j), ref);
2718 }
2719 }
2720 }
2721
2722 /* Replace every reference to the literal pool symbol REF
2723 in X by the address ADDR. Fix up MEMs as required. */
2724
2725 static void
2726 replace_constant_pool_ref (x, ref, addr)
2727 rtx *x;
2728 rtx ref;
2729 rtx addr;
2730 {
2731 int i, j;
2732 const char *fmt;
2733
2734 if (*x == ref)
2735 abort ();
2736
2737 /* Literal pool references can only occur inside a MEM ... */
2738 if (GET_CODE (*x) == MEM)
2739 {
2740 rtx memref = XEXP (*x, 0);
2741
2742 if (memref == ref)
2743 {
2744 *x = replace_equiv_address (*x, addr);
2745 return;
2746 }
2747
2748 if (GET_CODE (memref) == CONST
2749 && GET_CODE (XEXP (memref, 0)) == PLUS
2750 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
2751 && XEXP (XEXP (memref, 0), 0) == ref)
2752 {
2753 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
2754 *x = replace_equiv_address (*x, plus_constant (addr, off));
2755 return;
2756 }
2757 }
2758
2759 /* ... or a load-address type pattern. */
2760 if (GET_CODE (*x) == SET)
2761 {
2762 rtx addrref = SET_SRC (*x);
2763
2764 if (addrref == ref)
2765 {
2766 SET_SRC (*x) = addr;
2767 return;
2768 }
2769
2770 if (GET_CODE (addrref) == CONST
2771 && GET_CODE (XEXP (addrref, 0)) == PLUS
2772 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
2773 && XEXP (XEXP (addrref, 0), 0) == ref)
2774 {
2775 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
2776 SET_SRC (*x) = plus_constant (addr, off);
2777 return;
2778 }
2779 }
2780
2781 fmt = GET_RTX_FORMAT (GET_CODE (*x));
2782 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
2783 {
2784 if (fmt[i] == 'e')
2785 {
2786 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
2787 }
2788 else if (fmt[i] == 'E')
2789 {
2790 for (j = 0; j < XVECLEN (*x, i); j++)
2791 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
2792 }
2793 }
2794 }
2795
2796 /* We keep a list of constants we which we have to add to internal
2797 constant tables in the middle of large functions. */
2798
2799 #define NR_C_MODES 6
2800 enum machine_mode constant_modes[NR_C_MODES] =
2801 {
2802 DFmode, DImode,
2803 SFmode, SImode,
2804 HImode,
2805 QImode
2806 };
2807
2808 rtx (*gen_consttable[NR_C_MODES])(rtx) =
2809 {
2810 gen_consttable_df, gen_consttable_di,
2811 gen_consttable_sf, gen_consttable_si,
2812 gen_consttable_hi,
2813 gen_consttable_qi
2814 };
2815
2816 struct constant
2817 {
2818 struct constant *next;
2819 rtx value;
2820 rtx label;
2821 };
2822
2823 struct constant_pool
2824 {
2825 struct constant_pool *next;
2826 rtx first_insn;
2827 rtx last_insn;
2828
2829 struct constant *constants[NR_C_MODES];
2830 rtx label;
2831 int size;
2832 };
2833
2834 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
2835 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
2836 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
2837 static rtx s390_add_pool PARAMS ((struct constant_pool *, rtx, enum machine_mode));
2838 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
2839 static void s390_free_pool PARAMS ((struct constant_pool *));
2840
2841 /* Create new constant pool covering instructions starting at INSN
2842 and chain it to the end of POOL_LIST. */
2843
2844 static struct constant_pool *
2845 s390_start_pool (pool_list, insn)
2846 struct constant_pool **pool_list;
2847 rtx insn;
2848 {
2849 struct constant_pool *pool, **prev;
2850 int i;
2851
2852 pool = (struct constant_pool *) xmalloc (sizeof *pool);
2853 pool->next = NULL;
2854 for (i = 0; i < NR_C_MODES; i++)
2855 pool->constants[i] = NULL;
2856
2857 pool->label = gen_label_rtx ();
2858 pool->first_insn = insn;
2859 pool->last_insn = NULL_RTX;
2860 pool->size = 0;
2861
2862 for (prev = pool_list; *prev; prev = &(*prev)->next)
2863 ;
2864 *prev = pool;
2865
2866 return pool;
2867 }
2868
2869 /* End range of instructions covered by POOL at INSN. */
2870
2871 static void
2872 s390_end_pool (pool, insn)
2873 struct constant_pool *pool;
2874 rtx insn;
2875 {
2876 pool->last_insn = insn;
2877 }
2878
2879 /* Return pool out of POOL_LIST that covers INSN. */
2880
2881 static struct constant_pool *
2882 s390_find_pool (pool_list, insn)
2883 struct constant_pool *pool_list;
2884 rtx insn;
2885 {
2886 int addr = INSN_ADDRESSES (INSN_UID (insn));
2887 struct constant_pool *pool;
2888
2889 if (addr == -1)
2890 return NULL;
2891
2892 for (pool = pool_list; pool; pool = pool->next)
2893 if (INSN_ADDRESSES (INSN_UID (pool->first_insn)) <= addr
2894 && (pool->last_insn == NULL_RTX
2895 || INSN_ADDRESSES (INSN_UID (pool->last_insn)) > addr))
2896 break;
2897
2898 return pool;
2899 }
2900
2901 /* Add constant VAL of mode MODE to the constant pool POOL.
2902 Return an RTX describing the distance from the start of
2903 the pool to the location of the new constant. */
2904
2905 static rtx
2906 s390_add_pool (pool, val, mode)
2907 struct constant_pool *pool;
2908 rtx val;
2909 enum machine_mode mode;
2910 {
2911 struct constant *c;
2912 rtx offset;
2913 int i;
2914
2915 for (i = 0; i < NR_C_MODES; i++)
2916 if (constant_modes[i] == mode)
2917 break;
2918 if (i == NR_C_MODES)
2919 abort ();
2920
2921 for (c = pool->constants[i]; c != NULL; c = c->next)
2922 if (rtx_equal_p (val, c->value))
2923 break;
2924
2925 if (c == NULL)
2926 {
2927 c = (struct constant *) xmalloc (sizeof *c);
2928 c->value = val;
2929 c->label = gen_label_rtx ();
2930 c->next = pool->constants[i];
2931 pool->constants[i] = c;
2932 pool->size += GET_MODE_SIZE (mode);
2933 }
2934
2935 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
2936 gen_rtx_LABEL_REF (Pmode, pool->label));
2937 offset = gen_rtx_CONST (Pmode, offset);
2938 return offset;
2939 }
2940
2941 /* Dump out the constants in POOL. */
2942
2943 static rtx
2944 s390_dump_pool (pool)
2945 struct constant_pool *pool;
2946 {
2947 struct constant *c;
2948 rtx insn;
2949 int i;
2950
2951 /* Select location to put literal pool. */
2952 if (TARGET_64BIT)
2953 insn = get_last_insn ();
2954 else
2955 insn = pool->last_insn? pool->last_insn : get_last_insn ();
2956
2957 /* Pool start insn switches to proper section
2958 and guarantees necessary alignment. */
2959 if (TARGET_64BIT)
2960 insn = emit_insn_after (gen_pool_start_64 (), insn);
2961 else
2962 insn = emit_insn_after (gen_pool_start_31 (), insn);
2963 INSN_ADDRESSES_NEW (insn, -1);
2964
2965 insn = emit_label_after (pool->label, insn);
2966 INSN_ADDRESSES_NEW (insn, -1);
2967
2968 /* Dump constants in descending alignment requirement order,
2969 ensuring proper alignment for every constant. */
2970 for (i = 0; i < NR_C_MODES; i++)
2971 for (c = pool->constants[i]; c; c = c->next)
2972 {
2973 insn = emit_label_after (c->label, insn);
2974 INSN_ADDRESSES_NEW (insn, -1);
2975 insn = emit_insn_after (gen_consttable[i] (c->value), insn);
2976 INSN_ADDRESSES_NEW (insn, -1);
2977 }
2978
2979 /* Pool end insn switches back to previous section
2980 and guarantees necessary alignment. */
2981 if (TARGET_64BIT)
2982 insn = emit_insn_after (gen_pool_end_64 (), insn);
2983 else
2984 insn = emit_insn_after (gen_pool_end_31 (), insn);
2985 INSN_ADDRESSES_NEW (insn, -1);
2986
2987 insn = emit_barrier_after (insn);
2988 INSN_ADDRESSES_NEW (insn, -1);
2989
2990 return insn;
2991 }
2992
2993 /* Free all memory used by POOL. */
2994
2995 static void
2996 s390_free_pool (pool)
2997 struct constant_pool *pool;
2998 {
2999 int i;
3000
3001 for (i = 0; i < NR_C_MODES; i++)
3002 {
3003 struct constant *c = pool->constants[i];
3004 while (c != NULL)
3005 {
3006 struct constant *next = c->next;
3007 free (c);
3008 c = next;
3009 }
3010 }
3011
3012 free (pool);
3013 }
3014
3015 /* Used in s390.md for branch length calculation. */
3016 int s390_pool_overflow = 0;
3017
3018 /* Chunkify the literal pool if required. */
3019
3020 #define S390_POOL_CHUNK_MIN 0xc00
3021 #define S390_POOL_CHUNK_MAX 0xe00
3022
3023 static void
3024 s390_chunkify_pool ()
3025 {
3026 rtx base_reg = gen_rtx_REG (Pmode,
3027 TARGET_64BIT? BASE_REGISTER : RETURN_REGNUM);
3028
3029 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
3030 int extra_size = 0;
3031 bitmap far_labels;
3032 rtx insn;
3033
3034 /* Do we need to chunkify the literal pool? */
3035
3036 if (get_pool_size () < S390_POOL_CHUNK_MAX)
3037 return;
3038
3039 /* Scan all insns and move literals to pool chunks.
3040 Replace all occurrances of literal pool references
3041 by explicit references to pool chunk entries. */
3042
3043 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3044 {
3045 if (GET_CODE (insn) == INSN)
3046 {
3047 rtx addr, pool_ref = NULL_RTX;
3048 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3049 if (pool_ref)
3050 {
3051 if (!curr_pool)
3052 curr_pool = s390_start_pool (&pool_list, insn);
3053
3054 addr = s390_add_pool (curr_pool, get_pool_constant (pool_ref),
3055 get_pool_mode (pool_ref));
3056
3057 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
3058 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
3059 INSN_CODE (insn) = -1;
3060 }
3061 }
3062
3063 if (!curr_pool
3064 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
3065 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
3066 continue;
3067
3068 if (TARGET_64BIT)
3069 {
3070 if (curr_pool->size < S390_POOL_CHUNK_MAX)
3071 continue;
3072
3073 s390_end_pool (curr_pool, insn);
3074 curr_pool = NULL;
3075 }
3076 else
3077 {
3078 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
3079 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
3080 + extra_size;
3081
3082 /* We will later have to insert base register reload insns.
3083 Those will have an effect on code size, which we need to
3084 consider here. This calculation makes rather pessimistic
3085 worst-case assumptions. */
3086 if (GET_CODE (insn) == CODE_LABEL
3087 || GET_CODE (insn) == JUMP_INSN)
3088 extra_size += 6;
3089 else if (GET_CODE (insn) == CALL_INSN)
3090 extra_size += 4;
3091
3092 if (chunk_size < S390_POOL_CHUNK_MIN
3093 && curr_pool->size < S390_POOL_CHUNK_MIN)
3094 continue;
3095
3096 /* Pool chunks can only be inserted after BARRIERs ... */
3097 if (GET_CODE (insn) == BARRIER)
3098 {
3099 s390_end_pool (curr_pool, insn);
3100 curr_pool = NULL;
3101 extra_size = 0;
3102 }
3103
3104 /* ... so if we don't find one in time, create one. */
3105 else if ((chunk_size > S390_POOL_CHUNK_MAX
3106 || curr_pool->size > S390_POOL_CHUNK_MAX)
3107 && (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN))
3108 {
3109 int addr = INSN_ADDRESSES (INSN_UID (insn));
3110 rtx label, jump, barrier;
3111
3112 label = gen_label_rtx ();
3113 jump = emit_jump_insn_after (gen_jump (label), insn);
3114 barrier = emit_barrier_after (jump);
3115 insn = emit_label_after (label, barrier);
3116 JUMP_LABEL (jump) = label;
3117 LABEL_NUSES (label) = 1;
3118
3119 INSN_ADDRESSES_NEW (jump, addr+1);
3120 INSN_ADDRESSES_NEW (barrier, addr+1);
3121 INSN_ADDRESSES_NEW (insn, -1);
3122
3123 s390_end_pool (curr_pool, barrier);
3124 curr_pool = NULL;
3125 extra_size = 0;
3126 }
3127 }
3128 }
3129
3130 /* Dump out all literal pools. */
3131
3132 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3133 s390_dump_pool (curr_pool);
3134
3135
3136 /* Find all labels that are branched into
3137 from an insn belonging to a different chunk. */
3138
3139 far_labels = BITMAP_XMALLOC ();
3140
3141 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3142 {
3143 /* Labels marked with LABEL_PRESERVE_P can be target
3144 of non-local jumps, so we have to mark them.
3145 The same holds for named labels.
3146
3147 Don't do that, however, if it is the label before
3148 a jump table. */
3149
3150 if (GET_CODE (insn) == CODE_LABEL
3151 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
3152 {
3153 rtx vec_insn = next_real_insn (insn);
3154 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3155 PATTERN (vec_insn) : NULL_RTX;
3156 if (!vec_pat
3157 || !(GET_CODE (vec_pat) == ADDR_VEC
3158 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3159 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
3160 }
3161
3162 /* If we have a direct jump (conditional or unconditional)
3163 or a casesi jump, check all potential targets. */
3164 else if (GET_CODE (insn) == JUMP_INSN)
3165 {
3166 rtx pat = PATTERN (insn);
3167 if (GET_CODE (pat) == SET)
3168 {
3169 rtx label = 0;
3170
3171 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3172 {
3173 label = XEXP (SET_SRC (pat), 0);
3174 }
3175 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3176 {
3177 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3178 label = XEXP (XEXP (SET_SRC (pat), 1), 0);
3179 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3180 label = XEXP (XEXP (SET_SRC (pat), 2), 0);
3181 }
3182
3183 if (label)
3184 {
3185 if (s390_find_pool (pool_list, label)
3186 != s390_find_pool (pool_list, insn))
3187 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3188 }
3189 }
3190 else if (GET_CODE (pat) == PARALLEL
3191 && XVECLEN (pat, 0) == 2
3192 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
3193 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
3194 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
3195 {
3196 /* Find the jump table used by this casesi jump. */
3197 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
3198 rtx vec_insn = next_real_insn (vec_label);
3199 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3200 PATTERN (vec_insn) : NULL_RTX;
3201 if (vec_pat
3202 && (GET_CODE (vec_pat) == ADDR_VEC
3203 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3204 {
3205 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
3206
3207 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
3208 {
3209 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
3210
3211 if (s390_find_pool (pool_list, label)
3212 != s390_find_pool (pool_list, insn))
3213 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3214 }
3215 }
3216 }
3217 }
3218 }
3219
3220 /* Insert base register reload insns before every pool. */
3221
3222 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3223 if (TARGET_64BIT)
3224 {
3225 rtx pool_ref = gen_rtx_LABEL_REF (Pmode, curr_pool->label);
3226 rtx new_insn = gen_rtx_SET (Pmode, base_reg, pool_ref);
3227 rtx insn = curr_pool->first_insn;
3228 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3229 }
3230 else
3231 {
3232 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
3233 rtx insn = curr_pool->first_insn;
3234 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3235 }
3236
3237 /* Insert base register reload insns at every far label. */
3238
3239 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3240 if (GET_CODE (insn) == CODE_LABEL
3241 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
3242 {
3243 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3244 if (pool)
3245 {
3246 if (TARGET_64BIT)
3247 {
3248 rtx pool_ref = gen_rtx_LABEL_REF (Pmode, pool->label);
3249 rtx new_insn = gen_rtx_SET (Pmode, base_reg, pool_ref);
3250 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3251 }
3252 else
3253 {
3254 rtx new_insn = gen_reload_base (base_reg, pool->label);
3255 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3256 }
3257 }
3258 }
3259
3260 /* Insert base register reload insns after every call if necessary. */
3261
3262 if (REGNO (base_reg) == RETURN_REGNUM)
3263 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3264 if (GET_CODE (insn) == CALL_INSN)
3265 {
3266 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3267 if (pool)
3268 {
3269 rtx new_insn = gen_reload_base2 (base_reg, pool->label);
3270 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3271 }
3272 }
3273
3274
3275 /* Recompute insn addresses. */
3276
3277 s390_pool_overflow = 1;
3278 init_insn_lengths ();
3279 shorten_branches (get_insns ());
3280 s390_pool_overflow = 0;
3281
3282 /* Insert base register reload insns after far branches. */
3283
3284 if (!TARGET_64BIT)
3285 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3286 if (GET_CODE (insn) == JUMP_INSN
3287 && GET_CODE (PATTERN (insn)) == SET
3288 && get_attr_length (insn) >= 12)
3289 {
3290 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3291 if (pool)
3292 {
3293 rtx new_insn = gen_reload_base (base_reg, pool->label);
3294 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3295 }
3296 }
3297
3298
3299 /* Free all memory. */
3300
3301 while (pool_list)
3302 {
3303 struct constant_pool *next = pool_list->next;
3304 s390_free_pool (pool_list);
3305 pool_list = next;
3306 }
3307
3308 BITMAP_XFREE (far_labels);
3309 }
3310
3311
3312 /* Index of constant pool chunk that is currently being processed.
3313 Set to -1 before function output has started. */
3314 int s390_pool_count = -1;
3315
3316 /* Number of elements of current constant pool. */
3317 int s390_nr_constants;
3318
3319 /* Output main constant pool to stdio stream FILE. */
3320
3321 void
3322 s390_output_constant_pool (file)
3323 FILE *file;
3324 {
3325 /* Output constant pool. */
3326 if (s390_nr_constants)
3327 {
3328 if (TARGET_64BIT)
3329 {
3330 fprintf (file, "\tlarl\t%s,.LT%X\n", reg_names[BASE_REGISTER],
3331 s390_function_count);
3332 readonly_data_section ();
3333 ASM_OUTPUT_ALIGN (file, 3);
3334 }
3335 else
3336 {
3337 fprintf (file, "\tbras\t%s,.LTN%X\n", reg_names[BASE_REGISTER],
3338 s390_function_count);
3339 }
3340 fprintf (file, ".LT%X:\n", s390_function_count);
3341
3342 s390_pool_count = 0;
3343 output_constant_pool (current_function_name, current_function_decl);
3344 s390_pool_count = -1;
3345
3346 if (TARGET_64BIT)
3347 function_section (current_function_decl);
3348 else
3349 fprintf (file, ".LTN%X:\n", s390_function_count);
3350 }
3351 }
3352
3353
3354 /* Return true if floating point registers need to be saved. */
3355
3356 static int
3357 save_fprs_p ()
3358 {
3359 int i;
3360 if (!TARGET_64BIT)
3361 return 0;
3362 for (i=24; i<=31; i++)
3363 {
3364 if (regs_ever_live[i] == 1)
3365 return 1;
3366 }
3367 return 0;
3368 }
3369
3370 /* Find first call clobbered register unsused in a function.
3371 This could be used as base register in a leaf function
3372 or for holding the return address before epilogue. */
3373
3374 static int
3375 find_unused_clobbered_reg ()
3376 {
3377 int i;
3378 for (i = 0; i < 6; i++)
3379 if (!regs_ever_live[i])
3380 return i;
3381 return 0;
3382 }
3383
3384 /* Fill FRAME with info about frame of current function. */
3385
3386 static void
3387 s390_frame_info (frame)
3388 struct s390_frame *frame;
3389 {
3390 int i, j;
3391 HOST_WIDE_INT fsize = get_frame_size ();
3392
3393 if (fsize > 0x7fff0000)
3394 fatal_error ("Total size of local variables exceeds architecture limit.");
3395
3396 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
3397 frame->save_fprs_p = save_fprs_p ();
3398
3399 frame->frame_size = fsize + frame->save_fprs_p * 64;
3400
3401 /* Does function need to setup frame and save area. */
3402
3403 if (! current_function_is_leaf
3404 || frame->frame_size > 0
3405 || current_function_calls_alloca
3406 || current_function_stdarg)
3407 frame->frame_size += STARTING_FRAME_OFFSET;
3408
3409 /* If we need to allocate a frame, the stack pointer is changed. */
3410
3411 if (frame->frame_size > 0)
3412 regs_ever_live[STACK_POINTER_REGNUM] = 1;
3413
3414 /* If the literal pool might overflow, the return register might
3415 be used as temp literal pointer. */
3416
3417 if (!TARGET_64BIT && get_pool_size () >= S390_POOL_CHUNK_MAX / 2)
3418 regs_ever_live[RETURN_REGNUM] = 1;
3419
3420 /* If there is (possibly) any pool entry, we need to
3421 load base register. */
3422
3423 if (get_pool_size ()
3424 || !CONST_OK_FOR_LETTER_P (frame->frame_size, 'K')
3425 || (!TARGET_64BIT && current_function_uses_pic_offset_table))
3426 regs_ever_live[BASE_REGISTER] = 1;
3427
3428 /* If we need the GOT pointer, remember to save/restore it. */
3429
3430 if (current_function_uses_pic_offset_table)
3431 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3432
3433 /* Frame pointer needed. */
3434
3435 frame->frame_pointer_p = frame_pointer_needed;
3436
3437 /* Find first and last gpr to be saved. */
3438
3439 for (i = 6; i < 16; i++)
3440 if (regs_ever_live[i])
3441 break;
3442
3443 for (j = 15; j > i; j--)
3444 if (regs_ever_live[j])
3445 break;
3446
3447 if (i == 16)
3448 {
3449 /* Nothing to save / restore. */
3450 frame->first_save_gpr = -1;
3451 frame->first_restore_gpr = -1;
3452 frame->last_save_gpr = -1;
3453 frame->return_reg_saved_p = 0;
3454 }
3455 else
3456 {
3457 /* Save / Restore from gpr i to j. */
3458 frame->first_save_gpr = i;
3459 frame->first_restore_gpr = i;
3460 frame->last_save_gpr = j;
3461 frame->return_reg_saved_p = (j >= RETURN_REGNUM && i <= RETURN_REGNUM);
3462 }
3463
3464 if (current_function_stdarg)
3465 {
3466 /* Varargs function need to save from gpr 2 to gpr 15. */
3467 frame->first_save_gpr = 2;
3468 }
3469 }
3470
3471 /* Return offset between argument pointer and frame pointer
3472 initially after prologue. */
3473
3474 int
3475 s390_arg_frame_offset ()
3476 {
3477 struct s390_frame frame;
3478
3479 /* Compute frame_info. */
3480
3481 s390_frame_info (&frame);
3482
3483 return frame.frame_size + STACK_POINTER_OFFSET;
3484 }
3485
3486 /* Emit insn to save fpr REGNUM at offset OFFSET relative
3487 to register BASE. Return generated insn. */
3488
3489 static rtx
3490 save_fpr (base, offset, regnum)
3491 rtx base;
3492 int offset;
3493 int regnum;
3494 {
3495 rtx addr;
3496 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
3497 set_mem_alias_set (addr, s390_sr_alias_set);
3498
3499 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
3500 }
3501
3502 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
3503 to register BASE. Return generated insn. */
3504
3505 static rtx
3506 restore_fpr (base, offset, regnum)
3507 rtx base;
3508 int offset;
3509 int regnum;
3510 {
3511 rtx addr;
3512 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
3513 set_mem_alias_set (addr, s390_sr_alias_set);
3514
3515 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
3516 }
3517
3518 /* Output the function prologue assembly code to the
3519 stdio stream FILE. The local frame size is passed
3520 in LSIZE. */
3521
3522 void
3523 s390_function_prologue (file, lsize)
3524 FILE *file ATTRIBUTE_UNUSED;
3525 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED;
3526 {
3527 s390_chunkify_pool ();
3528 s390_split_branches ();
3529 }
3530
3531 /* Output the function epilogue assembly code to the
3532 stdio stream FILE. The local frame size is passed
3533 in LSIZE. */
3534
3535 void
3536 s390_function_epilogue (file, lsize)
3537 FILE *file ATTRIBUTE_UNUSED;
3538 HOST_WIDE_INT lsize ATTRIBUTE_UNUSED;
3539 {
3540 current_function_uses_pic_offset_table = 0;
3541 s390_function_count++;
3542 }
3543
3544 /* Expand the prologue into a bunch of separate insns. */
3545
3546 void
3547 s390_emit_prologue ()
3548 {
3549 struct s390_frame frame;
3550 rtx insn, addr;
3551 rtx temp_reg;
3552 int i;
3553
3554 /* Compute frame_info. */
3555
3556 s390_frame_info (&frame);
3557
3558 /* Choose best register to use for temp use within prologue. */
3559
3560 if (frame.return_reg_saved_p
3561 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
3562 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
3563 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
3564 else
3565 temp_reg = gen_rtx_REG (Pmode, 1);
3566
3567 /* Save call saved gprs. */
3568
3569 if (frame.first_save_gpr != -1)
3570 {
3571 addr = plus_constant (stack_pointer_rtx,
3572 frame.first_save_gpr * UNITS_PER_WORD);
3573 addr = gen_rtx_MEM (Pmode, addr);
3574 set_mem_alias_set (addr, s390_sr_alias_set);
3575
3576 if (frame.first_save_gpr != frame.last_save_gpr )
3577 {
3578 insn = emit_insn (gen_store_multiple (addr,
3579 gen_rtx_REG (Pmode, frame.first_save_gpr),
3580 GEN_INT (frame.last_save_gpr
3581 - frame.first_save_gpr + 1)));
3582
3583 /* We need to set the FRAME_RELATED flag on all SETs
3584 inside the store-multiple pattern.
3585
3586 However, we must not emit DWARF records for registers 2..5
3587 if they are stored for use by variable arguments ...
3588
3589 ??? Unfortunately, it is not enough to simply not the the
3590 FRAME_RELATED flags for those SETs, because the first SET
3591 of the PARALLEL is always treated as if it had the flag
3592 set, even if it does not. Therefore we emit a new pattern
3593 without those registers as REG_FRAME_RELATED_EXPR note. */
3594
3595 if (frame.first_save_gpr >= 6)
3596 {
3597 rtx pat = PATTERN (insn);
3598
3599 for (i = 0; i < XVECLEN (pat, 0); i++)
3600 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
3601 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
3602
3603 RTX_FRAME_RELATED_P (insn) = 1;
3604 }
3605 else if (frame.last_save_gpr >= 6)
3606 {
3607 rtx note, naddr;
3608 naddr = plus_constant (stack_pointer_rtx, 6 * UNITS_PER_WORD);
3609 note = gen_store_multiple (gen_rtx_MEM (Pmode, naddr),
3610 gen_rtx_REG (Pmode, 6),
3611 GEN_INT (frame.last_save_gpr - 6 + 1));
3612 note = PATTERN (note);
3613
3614 REG_NOTES (insn) =
3615 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
3616 note, REG_NOTES (insn));
3617
3618 for (i = 0; i < XVECLEN (note, 0); i++)
3619 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
3620 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
3621
3622 RTX_FRAME_RELATED_P (insn) = 1;
3623 }
3624 }
3625 else
3626 {
3627 insn = emit_move_insn (addr,
3628 gen_rtx_REG (Pmode, frame.first_save_gpr));
3629 RTX_FRAME_RELATED_P (insn) = 1;
3630 }
3631 }
3632
3633 /* Dump constant pool and set constant pool register (13). */
3634
3635 insn = emit_insn (gen_lit ());
3636
3637 /* Save fprs for variable args. */
3638
3639 if (current_function_stdarg)
3640 {
3641 /* Save fpr 0 and 2. */
3642
3643 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
3644 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
3645
3646 if (TARGET_64BIT)
3647 {
3648 /* Save fpr 4 and 6. */
3649
3650 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
3651 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
3652 }
3653 }
3654
3655 /* Save fprs 4 and 6 if used (31 bit ABI). */
3656
3657 if (!TARGET_64BIT)
3658 {
3659 /* Save fpr 4 and 6. */
3660 if (regs_ever_live[18])
3661 {
3662 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
3663 RTX_FRAME_RELATED_P (insn) = 1;
3664 }
3665 if (regs_ever_live[19])
3666 {
3667 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
3668 RTX_FRAME_RELATED_P (insn) = 1;
3669 }
3670 }
3671
3672 /* Decrement stack pointer. */
3673
3674 if (frame.frame_size > 0)
3675 {
3676 rtx frame_off = GEN_INT (-frame.frame_size);
3677
3678 /* Save incoming stack pointer into temp reg. */
3679
3680 if (TARGET_BACKCHAIN || frame.save_fprs_p)
3681 {
3682 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
3683 }
3684
3685 /* Substract frame size from stack pointer. */
3686
3687 frame_off = GEN_INT (-frame.frame_size);
3688 if (!CONST_OK_FOR_LETTER_P (-frame.frame_size, 'K'))
3689 frame_off = force_const_mem (Pmode, frame_off);
3690
3691 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
3692 RTX_FRAME_RELATED_P (insn) = 1;
3693 REG_NOTES (insn) =
3694 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
3695 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
3696 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3697 GEN_INT (-frame.frame_size))),
3698 REG_NOTES (insn));
3699
3700 /* Set backchain. */
3701
3702 if (TARGET_BACKCHAIN)
3703 {
3704 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
3705 set_mem_alias_set (addr, s390_sr_alias_set);
3706 insn = emit_insn (gen_move_insn (addr, temp_reg));
3707 }
3708 }
3709
3710 /* Save fprs 8 - 15 (64 bit ABI). */
3711
3712 if (frame.save_fprs_p)
3713 {
3714 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
3715
3716 for (i = 24; i < 32; i++)
3717 if (regs_ever_live[i])
3718 {
3719 rtx addr = plus_constant (stack_pointer_rtx,
3720 frame.frame_size - 64 + (i-24)*8);
3721
3722 insn = save_fpr (temp_reg, (i-24)*8, i);
3723 RTX_FRAME_RELATED_P (insn) = 1;
3724 REG_NOTES (insn) =
3725 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
3726 gen_rtx_SET (VOIDmode,
3727 gen_rtx_MEM (DFmode, addr),
3728 gen_rtx_REG (DFmode, i)),
3729 REG_NOTES (insn));
3730 }
3731 }
3732
3733 /* Set frame pointer, if needed. */
3734
3735 if (frame.frame_pointer_p)
3736 {
3737 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
3738 RTX_FRAME_RELATED_P (insn) = 1;
3739 }
3740
3741 /* Set up got pointer, if needed. */
3742
3743 if (current_function_uses_pic_offset_table)
3744 {
3745 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3746 SYMBOL_REF_FLAG (got_symbol) = 1;
3747
3748 if (TARGET_64BIT)
3749 {
3750 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
3751 got_symbol));
3752
3753 /* It can happen that the GOT pointer isn't really needed ... */
3754 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
3755 REG_NOTES (insn));
3756 }
3757 else
3758 {
3759 got_symbol = gen_rtx_UNSPEC (VOIDmode,
3760 gen_rtvec (1, got_symbol), 100);
3761 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
3762 got_symbol = force_const_mem (Pmode, got_symbol);
3763 insn = emit_move_insn (pic_offset_table_rtx,
3764 got_symbol);
3765 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
3766 REG_NOTES (insn));
3767
3768 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
3769 gen_rtx_REG (Pmode, BASE_REGISTER)));
3770 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
3771 REG_NOTES (insn));
3772 }
3773 }
3774 }
3775
3776 /* Expand the epilogue into a bunch of separate insns. */
3777
3778 void
3779 s390_emit_epilogue ()
3780 {
3781 struct s390_frame frame;
3782 rtx frame_pointer, return_reg;
3783 int area_bottom, area_top, offset;
3784 rtvec p;
3785
3786 /* Compute frame_info. */
3787
3788 s390_frame_info (&frame);
3789
3790 /* Check whether to use frame or stack pointer for restore. */
3791
3792 frame_pointer = frame.frame_pointer_p ?
3793 hard_frame_pointer_rtx : stack_pointer_rtx;
3794
3795 /* Compute which parts of the save area we need to access. */
3796
3797 if (frame.first_restore_gpr != -1)
3798 {
3799 area_bottom = frame.first_restore_gpr * UNITS_PER_WORD;
3800 area_top = (frame.last_save_gpr + 1) * UNITS_PER_WORD;
3801 }
3802 else
3803 {
3804 area_bottom = INT_MAX;
3805 area_top = INT_MIN;
3806 }
3807
3808 if (TARGET_64BIT)
3809 {
3810 if (frame.save_fprs_p)
3811 {
3812 if (area_bottom > -64)
3813 area_bottom = -64;
3814 if (area_top < 0)
3815 area_top = 0;
3816 }
3817 }
3818 else
3819 {
3820 if (regs_ever_live[18])
3821 {
3822 if (area_bottom > STACK_POINTER_OFFSET - 16)
3823 area_bottom = STACK_POINTER_OFFSET - 16;
3824 if (area_top < STACK_POINTER_OFFSET - 8)
3825 area_top = STACK_POINTER_OFFSET - 8;
3826 }
3827 if (regs_ever_live[19])
3828 {
3829 if (area_bottom > STACK_POINTER_OFFSET - 8)
3830 area_bottom = STACK_POINTER_OFFSET - 8;
3831 if (area_top < STACK_POINTER_OFFSET)
3832 area_top = STACK_POINTER_OFFSET;
3833 }
3834 }
3835
3836 /* Check whether we can access the register save area.
3837 If not, increment the frame pointer as required. */
3838
3839 if (area_top <= area_bottom)
3840 {
3841 /* Nothing to restore. */
3842 }
3843 else if (frame.frame_size + area_bottom >= 0
3844 && frame.frame_size + area_top <= 4096)
3845 {
3846 /* Area is in range. */
3847 offset = frame.frame_size;
3848 }
3849 else
3850 {
3851 rtx insn, frame_off;
3852
3853 offset = area_bottom < 0 ? -area_bottom : 0;
3854 frame_off = GEN_INT (frame.frame_size - offset);
3855
3856 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
3857 frame_off = force_const_mem (Pmode, frame_off);
3858
3859 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
3860 }
3861
3862 /* Restore call saved fprs. */
3863
3864 if (TARGET_64BIT)
3865 {
3866 int i;
3867
3868 if (frame.save_fprs_p)
3869 for (i = 24; i < 32; i++)
3870 if (regs_ever_live[i] && !global_regs[i])
3871 restore_fpr (frame_pointer,
3872 offset - 64 + (i-24) * 8, i);
3873 }
3874 else
3875 {
3876 if (regs_ever_live[18] && !global_regs[18])
3877 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
3878 if (regs_ever_live[19] && !global_regs[19])
3879 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
3880 }
3881
3882 /* Return register. */
3883
3884 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
3885
3886 /* Restore call saved gprs. */
3887
3888 if (frame.first_restore_gpr != -1)
3889 {
3890 rtx addr;
3891 int i;
3892
3893 /* Check for global register and save them
3894 to stack location from where they get restored. */
3895
3896 for (i = frame.first_restore_gpr;
3897 i <= frame.last_save_gpr;
3898 i++)
3899 {
3900 /* These registers are special and need to be
3901 restored in any case. */
3902 if (i == STACK_POINTER_REGNUM
3903 || i == RETURN_REGNUM
3904 || i == BASE_REGISTER
3905 || (flag_pic && i == PIC_OFFSET_TABLE_REGNUM))
3906 continue;
3907
3908 if (global_regs[i])
3909 {
3910 addr = plus_constant (frame_pointer,
3911 offset + i * UNITS_PER_WORD);
3912 addr = gen_rtx_MEM (Pmode, addr);
3913 set_mem_alias_set (addr, s390_sr_alias_set);
3914 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
3915 }
3916 }
3917
3918 /* Fetch return address from stack before load multiple,
3919 this will do good for scheduling. */
3920
3921 if (frame.last_save_gpr >= RETURN_REGNUM
3922 && frame.first_restore_gpr < RETURN_REGNUM)
3923 {
3924 int return_regnum = find_unused_clobbered_reg();
3925 if (!return_regnum)
3926 return_regnum = 4;
3927 return_reg = gen_rtx_REG (Pmode, return_regnum);
3928
3929 addr = plus_constant (frame_pointer,
3930 offset + RETURN_REGNUM * UNITS_PER_WORD);
3931 addr = gen_rtx_MEM (Pmode, addr);
3932 set_mem_alias_set (addr, s390_sr_alias_set);
3933 emit_move_insn (return_reg, addr);
3934 }
3935
3936 /* ??? As references to the base register are not made
3937 explicit in insn RTX code, we have to add a barrier here
3938 to prevent incorrect scheduling. */
3939
3940 emit_insn (gen_blockage());
3941
3942 addr = plus_constant (frame_pointer,
3943 offset + frame.first_restore_gpr * UNITS_PER_WORD);
3944 addr = gen_rtx_MEM (Pmode, addr);
3945 set_mem_alias_set (addr, s390_sr_alias_set);
3946
3947 if (frame.first_restore_gpr != frame.last_save_gpr)
3948 {
3949 emit_insn (gen_load_multiple (
3950 gen_rtx_REG (Pmode, frame.first_restore_gpr),
3951 addr,
3952 GEN_INT (frame.last_save_gpr - frame.first_restore_gpr + 1)));
3953 }
3954 else
3955 {
3956 emit_move_insn (gen_rtx_REG (Pmode, frame.first_restore_gpr),
3957 addr);
3958 }
3959 }
3960
3961 /* Return to caller. */
3962
3963 p = rtvec_alloc (2);
3964
3965 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
3966 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
3967 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
3968 }
3969
3970
3971 /* Return the size in bytes of a function argument of
3972 type TYPE and/or mode MODE. At least one of TYPE or
3973 MODE must be specified. */
3974
3975 static int
3976 s390_function_arg_size (mode, type)
3977 enum machine_mode mode;
3978 tree type;
3979 {
3980 if (type)
3981 return int_size_in_bytes (type);
3982
3983 /* No type info available for some library calls ... */
3984 if (mode != BLKmode)
3985 return GET_MODE_SIZE (mode);
3986
3987 /* If we have neither type nor mode, abort */
3988 abort ();
3989 }
3990
3991 /* Return 1 if a function argument of type TYPE and mode MODE
3992 is to be passed by reference. The ABI specifies that only
3993 structures of size 1, 2, 4, or 8 bytes are passed by value,
3994 all other structures (and complex numbers) are passed by
3995 reference. */
3996
3997 int
3998 s390_function_arg_pass_by_reference (mode, type)
3999 enum machine_mode mode;
4000 tree type;
4001 {
4002 int size = s390_function_arg_size (mode, type);
4003
4004 if (type)
4005 {
4006 if (AGGREGATE_TYPE_P (type) &&
4007 size != 1 && size != 2 && size != 4 && size != 8)
4008 return 1;
4009
4010 if (TREE_CODE (type) == COMPLEX_TYPE)
4011 return 1;
4012 }
4013 return 0;
4014
4015 }
4016
4017 /* Update the data in CUM to advance over an argument of mode MODE and
4018 data type TYPE. (TYPE is null for libcalls where that information
4019 may not be available.). The boolean NAMED specifies whether the
4020 argument is a named argument (as opposed to an unnamed argument
4021 matching an ellipsis). */
4022
4023 void
4024 s390_function_arg_advance (cum, mode, type, named)
4025 CUMULATIVE_ARGS *cum;
4026 enum machine_mode mode;
4027 tree type;
4028 int named ATTRIBUTE_UNUSED;
4029 {
4030 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
4031 {
4032 cum->fprs++;
4033 }
4034 else if (s390_function_arg_pass_by_reference (mode, type))
4035 {
4036 cum->gprs += 1;
4037 }
4038 else
4039 {
4040 int size = s390_function_arg_size (mode, type);
4041 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
4042 }
4043 }
4044
4045 /* Define where to put the arguments to a function.
4046 Value is zero to push the argument on the stack,
4047 or a hard register in which to store the argument.
4048
4049 MODE is the argument's machine mode.
4050 TYPE is the data type of the argument (as a tree).
4051 This is null for libcalls where that information may
4052 not be available.
4053 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4054 the preceding args and about the function being called.
4055 NAMED is nonzero if this argument is a named parameter
4056 (otherwise it is an extra parameter matching an ellipsis).
4057
4058 On S/390, we use general purpose registers 2 through 6 to
4059 pass integer, pointer, and certain structure arguments, and
4060 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
4061 to pass floating point arguments. All remaining arguments
4062 are pushed to the stack. */
4063
4064 rtx
4065 s390_function_arg (cum, mode, type, named)
4066 CUMULATIVE_ARGS *cum;
4067 enum machine_mode mode;
4068 tree type;
4069 int named ATTRIBUTE_UNUSED;
4070 {
4071 if (s390_function_arg_pass_by_reference (mode, type))
4072 return 0;
4073
4074 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
4075 {
4076 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
4077 return 0;
4078 else
4079 return gen_rtx (REG, mode, cum->fprs + 16);
4080 }
4081 else
4082 {
4083 int size = s390_function_arg_size (mode, type);
4084 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
4085
4086 if (cum->gprs + n_gprs > 5)
4087 return 0;
4088 else
4089 return gen_rtx (REG, mode, cum->gprs + 2);
4090 }
4091 }
4092
4093
4094 /* Create and return the va_list datatype.
4095
4096 On S/390, va_list is an array type equivalent to
4097
4098 typedef struct __va_list_tag
4099 {
4100 long __gpr;
4101 long __fpr;
4102 void *__overflow_arg_area;
4103 void *__reg_save_area;
4104
4105 } va_list[1];
4106
4107 where __gpr and __fpr hold the number of general purpose
4108 or floating point arguments used up to now, respectively,
4109 __overflow_arg_area points to the stack location of the
4110 next argument passed on the stack, and __reg_save_area
4111 always points to the start of the register area in the
4112 call frame of the current function. The function prologue
4113 saves all registers used for argument passing into this
4114 area if the function uses variable arguments. */
4115
4116 tree
4117 s390_build_va_list ()
4118 {
4119 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
4120
4121 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4122
4123 type_decl =
4124 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4125
4126 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
4127 long_integer_type_node);
4128 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
4129 long_integer_type_node);
4130 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
4131 ptr_type_node);
4132 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
4133 ptr_type_node);
4134
4135 DECL_FIELD_CONTEXT (f_gpr) = record;
4136 DECL_FIELD_CONTEXT (f_fpr) = record;
4137 DECL_FIELD_CONTEXT (f_ovf) = record;
4138 DECL_FIELD_CONTEXT (f_sav) = record;
4139
4140 TREE_CHAIN (record) = type_decl;
4141 TYPE_NAME (record) = type_decl;
4142 TYPE_FIELDS (record) = f_gpr;
4143 TREE_CHAIN (f_gpr) = f_fpr;
4144 TREE_CHAIN (f_fpr) = f_ovf;
4145 TREE_CHAIN (f_ovf) = f_sav;
4146
4147 layout_type (record);
4148
4149 /* The correct type is an array type of one element. */
4150 return build_array_type (record, build_index_type (size_zero_node));
4151 }
4152
4153 /* Implement va_start by filling the va_list structure VALIST.
4154 STDARG_P is always true, and ignored.
4155 NEXTARG points to the first anonymous stack argument.
4156
4157 The following global variables are used to initialize
4158 the va_list structure:
4159
4160 current_function_args_info:
4161 holds number of gprs and fprs used for named arguments.
4162 current_function_arg_offset_rtx:
4163 holds the offset of the first anonymous stack argument
4164 (relative to the virtual arg pointer). */
4165
4166 void
4167 s390_va_start (valist, nextarg)
4168 tree valist;
4169 rtx nextarg ATTRIBUTE_UNUSED;
4170 {
4171 HOST_WIDE_INT n_gpr, n_fpr;
4172 int off;
4173 tree f_gpr, f_fpr, f_ovf, f_sav;
4174 tree gpr, fpr, ovf, sav, t;
4175
4176 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4177 f_fpr = TREE_CHAIN (f_gpr);
4178 f_ovf = TREE_CHAIN (f_fpr);
4179 f_sav = TREE_CHAIN (f_ovf);
4180
4181 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4182 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4183 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4184 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4185 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4186
4187 /* Count number of gp and fp argument registers used. */
4188
4189 n_gpr = current_function_args_info.gprs;
4190 n_fpr = current_function_args_info.fprs;
4191
4192 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4193 TREE_SIDE_EFFECTS (t) = 1;
4194 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4195
4196 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4197 TREE_SIDE_EFFECTS (t) = 1;
4198 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4199
4200 /* Find the overflow area. */
4201 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4202
4203 off = INTVAL (current_function_arg_offset_rtx);
4204 off = off < 0 ? 0 : off;
4205 if (TARGET_DEBUG_ARG)
4206 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
4207 (int)n_gpr, (int)n_fpr, off);
4208
4209 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
4210
4211 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4212 TREE_SIDE_EFFECTS (t) = 1;
4213 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4214
4215 /* Find the register save area. */
4216 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
4217 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4218 build_int_2 (-STACK_POINTER_OFFSET, -1));
4219 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4220 TREE_SIDE_EFFECTS (t) = 1;
4221 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4222 }
4223
4224 /* Implement va_arg by updating the va_list structure
4225 VALIST as required to retrieve an argument of type
4226 TYPE, and returning that argument.
4227
4228 Generates code equivalent to:
4229
4230 if (integral value) {
4231 if (size <= 4 && args.gpr < 5 ||
4232 size > 4 && args.gpr < 4 )
4233 ret = args.reg_save_area[args.gpr+8]
4234 else
4235 ret = *args.overflow_arg_area++;
4236 } else if (float value) {
4237 if (args.fgpr < 2)
4238 ret = args.reg_save_area[args.fpr+64]
4239 else
4240 ret = *args.overflow_arg_area++;
4241 } else if (aggregate value) {
4242 if (args.gpr < 5)
4243 ret = *args.reg_save_area[args.gpr]
4244 else
4245 ret = **args.overflow_arg_area++;
4246 } */
4247
4248 rtx
4249 s390_va_arg (valist, type)
4250 tree valist;
4251 tree type;
4252 {
4253 tree f_gpr, f_fpr, f_ovf, f_sav;
4254 tree gpr, fpr, ovf, sav, reg, t, u;
4255 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
4256 rtx lab_false, lab_over, addr_rtx, r;
4257
4258 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4259 f_fpr = TREE_CHAIN (f_gpr);
4260 f_ovf = TREE_CHAIN (f_fpr);
4261 f_sav = TREE_CHAIN (f_ovf);
4262
4263 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4264 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4265 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4266 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4267 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4268
4269 size = int_size_in_bytes (type);
4270
4271 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
4272 {
4273 if (TARGET_DEBUG_ARG)
4274 {
4275 fprintf (stderr, "va_arg: aggregate type");
4276 debug_tree (type);
4277 }
4278
4279 /* Aggregates are passed by reference. */
4280 indirect_p = 1;
4281 reg = gpr;
4282 n_reg = 1;
4283 sav_ofs = 2 * UNITS_PER_WORD;
4284 sav_scale = UNITS_PER_WORD;
4285 size = UNITS_PER_WORD;
4286 max_reg = 4;
4287 }
4288 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
4289 {
4290 if (TARGET_DEBUG_ARG)
4291 {
4292 fprintf (stderr, "va_arg: float type");
4293 debug_tree (type);
4294 }
4295
4296 /* FP args go in FP registers, if present. */
4297 indirect_p = 0;
4298 reg = fpr;
4299 n_reg = 1;
4300 sav_ofs = 16 * UNITS_PER_WORD;
4301 sav_scale = 8;
4302 /* TARGET_64BIT has up to 4 parameter in fprs */
4303 max_reg = TARGET_64BIT ? 3 : 1;
4304 }
4305 else
4306 {
4307 if (TARGET_DEBUG_ARG)
4308 {
4309 fprintf (stderr, "va_arg: other type");
4310 debug_tree (type);
4311 }
4312
4313 /* Otherwise into GP registers. */
4314 indirect_p = 0;
4315 reg = gpr;
4316 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4317 sav_ofs = 2 * UNITS_PER_WORD;
4318 if (TARGET_64BIT)
4319 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
4320 TYPE_MODE (type) == HImode ? 6 :
4321 TYPE_MODE (type) == QImode ? 7 : 0;
4322 else
4323 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
4324 TYPE_MODE (type) == QImode ? 3 : 0;
4325
4326 sav_scale = UNITS_PER_WORD;
4327 if (n_reg > 1)
4328 max_reg = 3;
4329 else
4330 max_reg = 4;
4331 }
4332
4333 /* Pull the value out of the saved registers ... */
4334
4335 lab_false = gen_label_rtx ();
4336 lab_over = gen_label_rtx ();
4337 addr_rtx = gen_reg_rtx (Pmode);
4338
4339 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
4340 GEN_INT (max_reg),
4341 GT, const1_rtx, Pmode, 0, lab_false);
4342
4343 if (sav_ofs)
4344 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4345 else
4346 t = sav;
4347
4348 u = build (MULT_EXPR, long_integer_type_node,
4349 reg, build_int_2 (sav_scale, 0));
4350 TREE_SIDE_EFFECTS (u) = 1;
4351
4352 t = build (PLUS_EXPR, ptr_type_node, t, u);
4353 TREE_SIDE_EFFECTS (t) = 1;
4354
4355 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4356 if (r != addr_rtx)
4357 emit_move_insn (addr_rtx, r);
4358
4359
4360 emit_jump_insn (gen_jump (lab_over));
4361 emit_barrier ();
4362 emit_label (lab_false);
4363
4364 /* ... Otherwise out of the overflow area. */
4365
4366 t = save_expr (ovf);
4367
4368
4369 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
4370 if (size < UNITS_PER_WORD)
4371 {
4372 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
4373 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4374 TREE_SIDE_EFFECTS (t) = 1;
4375 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4376
4377 t = save_expr (ovf);
4378 }
4379
4380 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4381 if (r != addr_rtx)
4382 emit_move_insn (addr_rtx, r);
4383
4384 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4385 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4386 TREE_SIDE_EFFECTS (t) = 1;
4387 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4388
4389 emit_label (lab_over);
4390
4391 /* If less than max_regs a registers are retrieved out
4392 of register save area, increment. */
4393
4394 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
4395 build_int_2 (n_reg, 0));
4396 TREE_SIDE_EFFECTS (u) = 1;
4397 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4398
4399 if (indirect_p)
4400 {
4401 r = gen_rtx_MEM (Pmode, addr_rtx);
4402 set_mem_alias_set (r, get_varargs_alias_set ());
4403 emit_move_insn (addr_rtx, r);
4404 }
4405
4406
4407 return addr_rtx;
4408 }
4409
4410
4411 /* Output assembly code for the trampoline template to
4412 stdio stream FILE.
4413
4414 On S/390, we use gpr 1 internally in the trampoline code;
4415 gpr 0 is used to hold the static chain. */
4416
4417 void
4418 s390_trampoline_template (file)
4419 FILE *file;
4420 {
4421 if (TARGET_64BIT)
4422 {
4423 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
4424 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
4425 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
4426 fprintf (file, "br\t%s\n", reg_names[1]);
4427 fprintf (file, "0:\t.quad\t0\n");
4428 fprintf (file, ".quad\t0\n");
4429 }
4430 else
4431 {
4432 fprintf (file, "basr\t%s,0\n", reg_names[1]);
4433 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
4434 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
4435 fprintf (file, "br\t%s\n", reg_names[1]);
4436 fprintf (file, ".long\t0\n");
4437 fprintf (file, ".long\t0\n");
4438 }
4439 }
4440
4441 /* Emit RTL insns to initialize the variable parts of a trampoline.
4442 FNADDR is an RTX for the address of the function's pure code.
4443 CXT is an RTX for the static chain value for the function. */
4444
4445 void
4446 s390_initialize_trampoline (addr, fnaddr, cxt)
4447 rtx addr;
4448 rtx fnaddr;
4449 rtx cxt;
4450 {
4451 emit_move_insn (gen_rtx
4452 (MEM, Pmode,
4453 memory_address (Pmode,
4454 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
4455 emit_move_insn (gen_rtx
4456 (MEM, Pmode,
4457 memory_address (Pmode,
4458 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
4459 }
4460
4461 /* Return rtx for 64-bit constant formed from the 32-bit subwords
4462 LOW and HIGH, independent of the host word size. */
4463
4464 rtx
4465 s390_gen_rtx_const_DI (high, low)
4466 int high;
4467 int low;
4468 {
4469 #if HOST_BITS_PER_WIDE_INT >= 64
4470 HOST_WIDE_INT val;
4471 val = (HOST_WIDE_INT)high;
4472 val <<= 32;
4473 val |= (HOST_WIDE_INT)low;
4474
4475 return GEN_INT (val);
4476 #else
4477 #if HOST_BITS_PER_WIDE_INT >= 32
4478 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
4479 #else
4480 abort ();
4481 #endif
4482 #endif
4483 }
4484
4485 /* Output assembler code to FILE to increment profiler label # LABELNO
4486 for profiling a function entry. */
4487
4488 void
4489 s390_function_profiler (file, labelno)
4490 FILE *file;
4491 int labelno;
4492 {
4493 rtx op[7];
4494
4495 char label[128];
4496 sprintf (label, "%sP%d", LPREFIX, labelno);
4497
4498 fprintf (file, "# function profiler \n");
4499
4500 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
4501 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
4502 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
4503
4504 op[2] = gen_rtx_REG (Pmode, 1);
4505 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
4506 SYMBOL_REF_FLAG (op[3]) = 1;
4507
4508 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
4509 if (flag_pic)
4510 {
4511 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
4512 op[4] = gen_rtx_CONST (Pmode, op[4]);
4513 }
4514
4515 if (TARGET_64BIT)
4516 {
4517 output_asm_insn ("stg\t%0,%1", op);
4518 output_asm_insn ("larl\t%2,%3", op);
4519 output_asm_insn ("brasl\t%0,%4", op);
4520 output_asm_insn ("lg\t%0,%1", op);
4521 }
4522 else if (!flag_pic)
4523 {
4524 op[6] = gen_label_rtx ();
4525
4526 output_asm_insn ("st\t%0,%1", op);
4527 output_asm_insn ("bras\t%2,%l6", op);
4528 output_asm_insn (".long\t%4", op);
4529 output_asm_insn (".long\t%3", op);
4530 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
4531 output_asm_insn ("l\t%0,0(%2)", op);
4532 output_asm_insn ("l\t%2,4(%2)", op);
4533 output_asm_insn ("basr\t%0,%0", op);
4534 output_asm_insn ("l\t%0,%1", op);
4535 }
4536 else
4537 {
4538 op[5] = gen_label_rtx ();
4539 op[6] = gen_label_rtx ();
4540
4541 output_asm_insn ("st\t%0,%1", op);
4542 output_asm_insn ("bras\t%2,%l6", op);
4543 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
4544 output_asm_insn (".long\t%4-%l5", op);
4545 output_asm_insn (".long\t%3-%l5", op);
4546 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
4547 output_asm_insn ("lr\t%0,%2", op);
4548 output_asm_insn ("a\t%0,0(%2)", op);
4549 output_asm_insn ("a\t%2,4(%2)", op);
4550 output_asm_insn ("basr\t%0,%0", op);
4551 output_asm_insn ("l\t%0,%1", op);
4552 }
4553 }
4554
4555 /* Select section for constant in constant pool. In 32-bit mode,
4556 constants go in the function section; in 64-bit mode in .rodata. */
4557
4558 static void
4559 s390_select_rtx_section (mode, x, align)
4560 enum machine_mode mode ATTRIBUTE_UNUSED;
4561 rtx x ATTRIBUTE_UNUSED;
4562 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
4563 {
4564 if (TARGET_64BIT)
4565 readonly_data_section ();
4566 else
4567 function_section (current_function_decl);
4568 }
4569
4570 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
4571 may access it directly in the GOT. */
4572
4573 static void
4574 s390_encode_section_info (decl, first)
4575 tree decl;
4576 int first ATTRIBUTE_UNUSED;
4577 {
4578 if (flag_pic)
4579 {
4580 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
4581 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
4582
4583 if (GET_CODE (rtl) == MEM)
4584 {
4585 SYMBOL_REF_FLAG (XEXP (rtl, 0))
4586 = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
4587 || ! TREE_PUBLIC (decl));
4588 }
4589 }
4590 }
This page took 0.249095 seconds and 5 git commands to generate.