]> gcc.gnu.org Git - gcc.git/blame - gcc/config/rx/rx.c
Move MEMMODEL_* from coretypes.h to memmodel.h
[gcc.git] / gcc / config / rx / rx.c
CommitLineData
65a324b4 1/* Subroutines used for code generation on Renesas RX processors.
818ab71a 2 Copyright (C) 2008-2016 Free Software Foundation, Inc.
65a324b4
NC
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21/* To Do:
22
23 * Re-enable memory-to-memory copies and fix up reload. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
c7131fb2 28#include "backend.h"
e11c4407 29#include "target.h"
c7131fb2 30#include "rtl.h"
e11c4407
AM
31#include "tree.h"
32#include "cfghooks.h"
c7131fb2 33#include "df.h"
4d0cdd0c 34#include "memmodel.h"
e11c4407
AM
35#include "tm_p.h"
36#include "regs.h"
37#include "emit-rtl.h"
38#include "diagnostic-core.h"
d8a2d370
DN
39#include "varasm.h"
40#include "stor-layout.h"
41#include "calls.h"
65a324b4 42#include "output.h"
65a324b4 43#include "flags.h"
36566b39 44#include "explow.h"
65a324b4 45#include "expr.h"
65a324b4 46#include "toplev.h"
65a324b4 47#include "langhooks.h"
96e45421 48#include "opts.h"
9b2b7279 49#include "builtins.h"
878a9174 50
994c5d85 51/* This file should be included last. */
d58627a0
RS
52#include "target-def.h"
53
878a9174
DD
54static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
55static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
56static unsigned int rx_num_interrupt_regs;
65a324b4 57\f
878a9174
DD
58static unsigned int
59rx_gp_base_regnum (void)
60{
61 if (rx_gp_base_regnum_val == INVALID_REGNUM)
62 gcc_unreachable ();
63 return rx_gp_base_regnum_val;
64}
65
66static unsigned int
67rx_pid_base_regnum (void)
68{
69 if (rx_pid_base_regnum_val == INVALID_REGNUM)
70 gcc_unreachable ();
71 return rx_pid_base_regnum_val;
72}
73
74/* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
75
76static tree
77rx_decl_for_addr (rtx op)
78{
79 if (GET_CODE (op) == MEM)
80 op = XEXP (op, 0);
81 if (GET_CODE (op) == CONST)
82 op = XEXP (op, 0);
83 while (GET_CODE (op) == PLUS)
84 op = XEXP (op, 0);
85 if (GET_CODE (op) == SYMBOL_REF)
86 return SYMBOL_REF_DECL (op);
87 return NULL_TREE;
88}
89
31e727b0
NC
90static void rx_print_operand (FILE *, rtx, int);
91
e963cb1a
RH
92#define CC_FLAG_S (1 << 0)
93#define CC_FLAG_Z (1 << 1)
94#define CC_FLAG_O (1 << 2)
95#define CC_FLAG_C (1 << 3)
5f2f13fd 96#define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
e963cb1a 97
ef4bddc2 98static unsigned int flags_from_mode (machine_mode mode);
e963cb1a 99static unsigned int flags_from_code (enum rtx_code code);
9595a419 100\f
878a9174
DD
101/* Return true if OP is a reference to an object in a PID data area. */
102
103enum pid_type
104{
105 PID_NOT_PID = 0, /* The object is not in the PID data area. */
106 PID_ENCODED, /* The object is in the PID data area. */
107 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
108};
109
110static enum pid_type
111rx_pid_data_operand (rtx op)
112{
113 tree op_decl;
114
115 if (!TARGET_PID)
116 return PID_NOT_PID;
117
118 if (GET_CODE (op) == PLUS
119 && GET_CODE (XEXP (op, 0)) == REG
120 && GET_CODE (XEXP (op, 1)) == CONST
121 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
122 return PID_ENCODED;
123
124 op_decl = rx_decl_for_addr (op);
125
126 if (op_decl)
127 {
128 if (TREE_READONLY (op_decl))
129 return PID_UNENCODED;
130 }
131 else
132 {
133 /* Sigh, some special cases. */
134 if (GET_CODE (op) == SYMBOL_REF
135 || GET_CODE (op) == LABEL_REF)
136 return PID_UNENCODED;
137 }
138
139 return PID_NOT_PID;
140}
141
142static rtx
143rx_legitimize_address (rtx x,
144 rtx oldx ATTRIBUTE_UNUSED,
ef4bddc2 145 machine_mode mode ATTRIBUTE_UNUSED)
878a9174
DD
146{
147 if (rx_pid_data_operand (x) == PID_UNENCODED)
148 {
149 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
150 return rv;
151 }
152
153 if (GET_CODE (x) == PLUS
154 && GET_CODE (XEXP (x, 0)) == PLUS
155 && REG_P (XEXP (XEXP (x, 0), 0))
156 && REG_P (XEXP (x, 1)))
157 return force_reg (SImode, x);
158
159 return x;
160}
161
65a324b4
NC
162/* Return true if OP is a reference to an object in a small data area. */
163
164static bool
165rx_small_data_operand (rtx op)
166{
167 if (rx_small_data_limit == 0)
168 return false;
169
170 if (GET_CODE (op) == SYMBOL_REF)
171 return SYMBOL_REF_SMALL_P (op);
172
173 return false;
174}
175
176static bool
ef4bddc2 177rx_is_legitimate_address (machine_mode mode, rtx x,
6b0e4cbb 178 bool strict ATTRIBUTE_UNUSED)
65a324b4
NC
179{
180 if (RTX_OK_FOR_BASE (x, strict))
181 /* Register Indirect. */
182 return true;
183
5f2f13fd
DD
184 if ((GET_MODE_SIZE (mode) == 4
185 || GET_MODE_SIZE (mode) == 2
186 || GET_MODE_SIZE (mode) == 1)
65a324b4
NC
187 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
188 /* Pre-decrement Register Indirect or
189 Post-increment Register Indirect. */
190 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
191
878a9174
DD
192 switch (rx_pid_data_operand (x))
193 {
194 case PID_UNENCODED:
195 return false;
196 case PID_ENCODED:
197 return true;
198 default:
199 break;
200 }
201
65a324b4
NC
202 if (GET_CODE (x) == PLUS)
203 {
204 rtx arg1 = XEXP (x, 0);
205 rtx arg2 = XEXP (x, 1);
206 rtx index = NULL_RTX;
207
208 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, strict))
209 index = arg2;
210 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, strict))
211 index = arg1;
212 else
213 return false;
214
215 switch (GET_CODE (index))
216 {
217 case CONST_INT:
218 {
219 /* Register Relative: REG + INT.
220 Only positive, mode-aligned, mode-sized
221 displacements are allowed. */
222 HOST_WIDE_INT val = INTVAL (index);
223 int factor;
224
225 if (val < 0)
226 return false;
e9c0470a 227
65a324b4
NC
228 switch (GET_MODE_SIZE (mode))
229 {
230 default:
231 case 4: factor = 4; break;
232 case 2: factor = 2; break;
233 case 1: factor = 1; break;
234 }
235
5f2f13fd 236 if (val > (65535 * factor))
65a324b4
NC
237 return false;
238 return (val % factor) == 0;
239 }
240
241 case REG:
242 /* Unscaled Indexed Register Indirect: REG + REG
243 Size has to be "QI", REG has to be valid. */
244 return GET_MODE_SIZE (mode) == 1 && RTX_OK_FOR_BASE (index, strict);
245
246 case MULT:
247 {
248 /* Scaled Indexed Register Indirect: REG + (REG * FACTOR)
249 Factor has to equal the mode size, REG has to be valid. */
250 rtx factor;
251
252 factor = XEXP (index, 1);
253 index = XEXP (index, 0);
254
255 return REG_P (index)
256 && RTX_OK_FOR_BASE (index, strict)
257 && CONST_INT_P (factor)
258 && GET_MODE_SIZE (mode) == INTVAL (factor);
259 }
260
261 default:
262 return false;
263 }
264 }
265
266 /* Small data area accesses turn into register relative offsets. */
267 return rx_small_data_operand (x);
268}
269
270/* Returns TRUE for simple memory addreses, ie ones
271 that do not involve register indirect addressing
272 or pre/post increment/decrement. */
273
274bool
ef4bddc2 275rx_is_restricted_memory_address (rtx mem, machine_mode mode)
65a324b4 276{
65a324b4
NC
277 if (! rx_is_legitimate_address
278 (mode, mem, reload_in_progress || reload_completed))
279 return false;
280
281 switch (GET_CODE (mem))
282 {
283 case REG:
284 /* Simple memory addresses are OK. */
285 return true;
286
287 case PRE_DEC:
288 case POST_INC:
289 return false;
290
291 case PLUS:
e9c0470a
NC
292 {
293 rtx base, index;
294
295 /* Only allow REG+INT addressing. */
296 base = XEXP (mem, 0);
297 index = XEXP (mem, 1);
65a324b4 298
e9c0470a
NC
299 if (! RX_REG_P (base) || ! CONST_INT_P (index))
300 return false;
301
302 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
303 }
65a324b4
NC
304
305 case SYMBOL_REF:
306 /* Can happen when small data is being supported.
307 Assume that it will be resolved into GP+INT. */
308 return true;
309
310 default:
311 gcc_unreachable ();
312 }
313}
314
b09c3081
AS
315/* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
316
317static bool
5bfed9a9 318rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
65a324b4
NC
319{
320 if (GET_CODE (addr) == CONST)
321 addr = XEXP (addr, 0);
322
323 switch (GET_CODE (addr))
324 {
325 /* --REG and REG++ only work in SImode. */
326 case PRE_DEC:
327 case POST_INC:
328 return true;
329
330 case MINUS:
331 case PLUS:
332 if (! REG_P (XEXP (addr, 0)))
333 return true;
334
335 addr = XEXP (addr, 1);
336
337 switch (GET_CODE (addr))
338 {
339 case REG:
340 /* REG+REG only works in SImode. */
341 return true;
342
343 case CONST_INT:
344 /* REG+INT is only mode independent if INT is a
eb1c879c 345 multiple of 4, positive and will fit into 16-bits. */
65a324b4 346 if (((INTVAL (addr) & 3) == 0)
eb1c879c 347 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
65a324b4
NC
348 return false;
349 return true;
350
351 case SYMBOL_REF:
352 case LABEL_REF:
353 return true;
354
355 case MULT:
65a324b4
NC
356 /* REG+REG*SCALE is always mode dependent. */
357 return true;
358
359 default:
360 /* Not recognized, so treat as mode dependent. */
361 return true;
362 }
363
364 case CONST_INT:
365 case SYMBOL_REF:
366 case LABEL_REF:
367 case REG:
368 /* These are all mode independent. */
369 return false;
370
371 default:
372 /* Everything else is unrecognized,
373 so treat as mode dependent. */
374 return true;
375 }
376}
377\f
65a324b4
NC
378/* A C compound statement to output to stdio stream FILE the
379 assembler syntax for an instruction operand that is a memory
380 reference whose address is ADDR. */
381
31e727b0 382static void
cc8ca59e 383rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
65a324b4
NC
384{
385 switch (GET_CODE (addr))
386 {
387 case REG:
388 fprintf (file, "[");
389 rx_print_operand (file, addr, 0);
390 fprintf (file, "]");
391 break;
392
393 case PRE_DEC:
394 fprintf (file, "[-");
395 rx_print_operand (file, XEXP (addr, 0), 0);
396 fprintf (file, "]");
397 break;
398
399 case POST_INC:
400 fprintf (file, "[");
401 rx_print_operand (file, XEXP (addr, 0), 0);
402 fprintf (file, "+]");
403 break;
404
405 case PLUS:
406 {
407 rtx arg1 = XEXP (addr, 0);
408 rtx arg2 = XEXP (addr, 1);
409 rtx base, index;
410
411 if (REG_P (arg1) && RTX_OK_FOR_BASE (arg1, true))
412 base = arg1, index = arg2;
413 else if (REG_P (arg2) && RTX_OK_FOR_BASE (arg2, true))
414 base = arg2, index = arg1;
415 else
416 {
417 rx_print_operand (file, arg1, 0);
418 fprintf (file, " + ");
419 rx_print_operand (file, arg2, 0);
420 break;
421 }
422
423 if (REG_P (index) || GET_CODE (index) == MULT)
424 {
425 fprintf (file, "[");
426 rx_print_operand (file, index, 'A');
427 fprintf (file, ",");
428 }
429 else /* GET_CODE (index) == CONST_INT */
430 {
431 rx_print_operand (file, index, 'A');
432 fprintf (file, "[");
433 }
434 rx_print_operand (file, base, 0);
435 fprintf (file, "]");
436 break;
437 }
438
15ba5696
NC
439 case CONST:
440 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
441 {
442 addr = XEXP (addr, 0);
443 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
878a9174 444
878a9174 445 addr = XVECEXP (addr, 0, 0);
15ba5696 446 gcc_assert (CONST_INT_P (addr));
e50869f7
NC
447 fprintf (file, "#");
448 output_addr_const (file, addr);
449 break;
15ba5696 450 }
e50869f7
NC
451 fprintf (file, "#");
452 output_addr_const (file, XEXP (addr, 0));
453 break;
454
455 case UNSPEC:
456 addr = XVECEXP (addr, 0, 0);
15ba5696 457 /* Fall through. */
65a324b4
NC
458 case LABEL_REF:
459 case SYMBOL_REF:
65a324b4 460 fprintf (file, "#");
878a9174 461 /* Fall through. */
65a324b4
NC
462 default:
463 output_addr_const (file, addr);
464 break;
465 }
466}
467
468static void
469rx_print_integer (FILE * file, HOST_WIDE_INT val)
470{
b14bb68c 471 if (val < 64)
65a324b4
NC
472 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
473 else
474 fprintf (file,
475 TARGET_AS100_SYNTAX
476 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
477 val);
478}
479
480static bool
481rx_assemble_integer (rtx x, unsigned int size, int is_aligned)
482{
483 const char * op = integer_asm_op (size, is_aligned);
484
485 if (! CONST_INT_P (x))
486 return default_assemble_integer (x, size, is_aligned);
487
488 if (op == NULL)
489 return false;
490 fputs (op, asm_out_file);
491
492 rx_print_integer (asm_out_file, INTVAL (x));
493 fputc ('\n', asm_out_file);
494 return true;
495}
496
497
65a324b4
NC
498/* Handles the insertion of a single operand into the assembler output.
499 The %<letter> directives supported are:
500
501 %A Print an operand without a leading # character.
502 %B Print an integer comparison name.
503 %C Print a control register name.
504 %F Print a condition code flag name.
878a9174 505 %G Register used for small-data-area addressing
65a324b4
NC
506 %H Print high part of a DImode register, integer or address.
507 %L Print low part of a DImode register, integer or address.
31e727b0 508 %N Print the negation of the immediate value.
878a9174 509 %P Register used for PID addressing
65a324b4 510 %Q If the operand is a MEM, then correctly generate
e9c0470a
NC
511 register indirect or register relative addressing.
512 %R Like %Q but for zero-extending loads. */
65a324b4 513
31e727b0 514static void
65a324b4
NC
515rx_print_operand (FILE * file, rtx op, int letter)
516{
e9c0470a 517 bool unsigned_load = false;
878a9174
DD
518 bool print_hash = true;
519
520 if (letter == 'A'
521 && ((GET_CODE (op) == CONST
522 && GET_CODE (XEXP (op, 0)) == UNSPEC)
523 || GET_CODE (op) == UNSPEC))
524 {
525 print_hash = false;
526 letter = 0;
527 }
e9c0470a 528
65a324b4
NC
529 switch (letter)
530 {
531 case 'A':
532 /* Print an operand without a leading #. */
533 if (MEM_P (op))
534 op = XEXP (op, 0);
535
536 switch (GET_CODE (op))
537 {
538 case LABEL_REF:
539 case SYMBOL_REF:
540 output_addr_const (file, op);
541 break;
542 case CONST_INT:
543 fprintf (file, "%ld", (long) INTVAL (op));
544 break;
545 default:
546 rx_print_operand (file, op, 0);
547 break;
548 }
549 break;
550
551 case 'B':
e963cb1a
RH
552 {
553 enum rtx_code code = GET_CODE (op);
ef4bddc2 554 machine_mode mode = GET_MODE (XEXP (op, 0));
e963cb1a
RH
555 const char *ret;
556
557 if (mode == CC_Fmode)
558 {
559 /* C flag is undefined, and O flag carries unordered. None of the
560 branch combinations that include O use it helpfully. */
561 switch (code)
562 {
563 case ORDERED:
564 ret = "no";
565 break;
566 case UNORDERED:
567 ret = "o";
568 break;
569 case LT:
570 ret = "n";
571 break;
572 case GE:
573 ret = "pz";
574 break;
575 case EQ:
576 ret = "eq";
577 break;
578 case NE:
579 ret = "ne";
580 break;
581 default:
582 gcc_unreachable ();
583 }
584 }
585 else
586 {
72602cd1 587 unsigned int flags = flags_from_mode (mode);
e9c0470a 588
e963cb1a
RH
589 switch (code)
590 {
591 case LT:
72602cd1 592 ret = (flags & CC_FLAG_O ? "lt" : "n");
e963cb1a
RH
593 break;
594 case GE:
72602cd1 595 ret = (flags & CC_FLAG_O ? "ge" : "pz");
e963cb1a
RH
596 break;
597 case GT:
598 ret = "gt";
599 break;
600 case LE:
601 ret = "le";
602 break;
603 case GEU:
604 ret = "geu";
605 break;
606 case LTU:
607 ret = "ltu";
608 break;
609 case GTU:
610 ret = "gtu";
611 break;
612 case LEU:
613 ret = "leu";
614 break;
615 case EQ:
616 ret = "eq";
617 break;
618 case NE:
619 ret = "ne";
620 break;
621 default:
622 gcc_unreachable ();
623 }
72602cd1 624 gcc_checking_assert ((flags_from_code (code) & ~flags) == 0);
e963cb1a
RH
625 }
626 fputs (ret, file);
627 break;
628 }
65a324b4
NC
629
630 case 'C':
631 gcc_assert (CONST_INT_P (op));
632 switch (INTVAL (op))
633 {
927d22fa
OE
634 case CTRLREG_PSW: fprintf (file, "psw"); break;
635 case CTRLREG_USP: fprintf (file, "usp"); break;
636 case CTRLREG_FPSW: fprintf (file, "fpsw"); break;
637 case CTRLREG_CPEN: fprintf (file, "cpen"); break;
638 case CTRLREG_BPSW: fprintf (file, "bpsw"); break;
639 case CTRLREG_BPC: fprintf (file, "bpc"); break;
640 case CTRLREG_ISP: fprintf (file, "isp"); break;
641 case CTRLREG_FINTV: fprintf (file, "fintv"); break;
642 case CTRLREG_INTB: fprintf (file, "intb"); break;
65a324b4 643 default:
9aaa9e89 644 warning (0, "unrecognized control register number: %d - using 'psw'",
31e727b0 645 (int) INTVAL (op));
5f75e477
NC
646 fprintf (file, "psw");
647 break;
65a324b4
NC
648 }
649 break;
650
651 case 'F':
652 gcc_assert (CONST_INT_P (op));
653 switch (INTVAL (op))
654 {
655 case 0: case 'c': case 'C': fprintf (file, "C"); break;
656 case 1: case 'z': case 'Z': fprintf (file, "Z"); break;
657 case 2: case 's': case 'S': fprintf (file, "S"); break;
658 case 3: case 'o': case 'O': fprintf (file, "O"); break;
659 case 8: case 'i': case 'I': fprintf (file, "I"); break;
660 case 9: case 'u': case 'U': fprintf (file, "U"); break;
661 default:
662 gcc_unreachable ();
663 }
664 break;
665
878a9174
DD
666 case 'G':
667 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
668 break;
669
65a324b4 670 case 'H':
31e727b0 671 switch (GET_CODE (op))
65a324b4 672 {
31e727b0
NC
673 case REG:
674 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
675 break;
676 case CONST_INT:
677 {
678 HOST_WIDE_INT v = INTVAL (op);
9595a419 679
31e727b0
NC
680 fprintf (file, "#");
681 /* Trickery to avoid problems with shifting 32 bits at a time. */
682 v = v >> 16;
683 v = v >> 16;
684 rx_print_integer (file, v);
685 break;
686 }
687 case CONST_DOUBLE:
65a324b4 688 fprintf (file, "#");
31e727b0
NC
689 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
690 break;
691 case MEM:
65a324b4
NC
692 if (! WORDS_BIG_ENDIAN)
693 op = adjust_address (op, SImode, 4);
cc8ca59e 694 output_address (GET_MODE (op), XEXP (op, 0));
31e727b0
NC
695 break;
696 default:
697 gcc_unreachable ();
65a324b4
NC
698 }
699 break;
700
701 case 'L':
31e727b0 702 switch (GET_CODE (op))
65a324b4 703 {
31e727b0
NC
704 case REG:
705 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 1 : 0)]);
706 break;
707 case CONST_INT:
65a324b4
NC
708 fprintf (file, "#");
709 rx_print_integer (file, INTVAL (op) & 0xffffffff);
31e727b0
NC
710 break;
711 case CONST_DOUBLE:
712 fprintf (file, "#");
713 rx_print_integer (file, CONST_DOUBLE_LOW (op));
714 break;
715 case MEM:
65a324b4
NC
716 if (WORDS_BIG_ENDIAN)
717 op = adjust_address (op, SImode, 4);
cc8ca59e 718 output_address (GET_MODE (op), XEXP (op, 0));
31e727b0
NC
719 break;
720 default:
721 gcc_unreachable ();
65a324b4
NC
722 }
723 break;
724
c249a7bc
NC
725 case 'N':
726 gcc_assert (CONST_INT_P (op));
727 fprintf (file, "#");
728 rx_print_integer (file, - INTVAL (op));
729 break;
730
878a9174
DD
731 case 'P':
732 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
733 break;
734
e9c0470a 735 case 'R':
8ae9698d 736 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
e9c0470a
NC
737 unsigned_load = true;
738 /* Fall through. */
65a324b4
NC
739 case 'Q':
740 if (MEM_P (op))
741 {
742 HOST_WIDE_INT offset;
e9c0470a 743 rtx mem = op;
65a324b4
NC
744
745 op = XEXP (op, 0);
746
747 if (REG_P (op))
748 offset = 0;
749 else if (GET_CODE (op) == PLUS)
750 {
751 rtx displacement;
752
753 if (REG_P (XEXP (op, 0)))
754 {
755 displacement = XEXP (op, 1);
756 op = XEXP (op, 0);
757 }
758 else
759 {
760 displacement = XEXP (op, 0);
761 op = XEXP (op, 1);
762 gcc_assert (REG_P (op));
763 }
764
765 gcc_assert (CONST_INT_P (displacement));
766 offset = INTVAL (displacement);
767 gcc_assert (offset >= 0);
768
769 fprintf (file, "%ld", offset);
770 }
771 else
772 gcc_unreachable ();
773
774 fprintf (file, "[");
775 rx_print_operand (file, op, 0);
776 fprintf (file, "].");
777
e9c0470a 778 switch (GET_MODE_SIZE (GET_MODE (mem)))
65a324b4
NC
779 {
780 case 1:
e9c0470a
NC
781 gcc_assert (offset <= 65535 * 1);
782 fprintf (file, unsigned_load ? "UB" : "B");
65a324b4
NC
783 break;
784 case 2:
785 gcc_assert (offset % 2 == 0);
e9c0470a
NC
786 gcc_assert (offset <= 65535 * 2);
787 fprintf (file, unsigned_load ? "UW" : "W");
65a324b4 788 break;
e9c0470a 789 case 4:
65a324b4 790 gcc_assert (offset % 4 == 0);
e9c0470a 791 gcc_assert (offset <= 65535 * 4);
65a324b4
NC
792 fprintf (file, "L");
793 break;
e9c0470a
NC
794 default:
795 gcc_unreachable ();
65a324b4
NC
796 }
797 break;
798 }
799
800 /* Fall through. */
801
802 default:
878a9174
DD
803 if (GET_CODE (op) == CONST
804 && GET_CODE (XEXP (op, 0)) == UNSPEC)
805 op = XEXP (op, 0);
806 else if (GET_CODE (op) == CONST
807 && GET_CODE (XEXP (op, 0)) == PLUS
808 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
809 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
810 {
811 if (print_hash)
812 fprintf (file, "#");
813 fprintf (file, "(");
814 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
815 fprintf (file, " + ");
816 output_addr_const (file, XEXP (XEXP (op, 0), 1));
817 fprintf (file, ")");
818 return;
819 }
820
65a324b4
NC
821 switch (GET_CODE (op))
822 {
823 case MULT:
824 /* Should be the scaled part of an
825 indexed register indirect address. */
826 {
827 rtx base = XEXP (op, 0);
828 rtx index = XEXP (op, 1);
829
830 /* Check for a swaped index register and scaling factor.
831 Not sure if this can happen, but be prepared to handle it. */
832 if (CONST_INT_P (base) && REG_P (index))
833 {
834 rtx tmp = base;
835 base = index;
836 index = tmp;
837 }
838
839 gcc_assert (REG_P (base));
840 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
841 gcc_assert (CONST_INT_P (index));
842 /* Do not try to verify the value of the scalar as it is based
843 on the mode of the MEM not the mode of the MULT. (Which
844 will always be SImode). */
845 fprintf (file, "%s", reg_names [REGNO (base)]);
846 break;
847 }
848
849 case MEM:
cc8ca59e 850 output_address (GET_MODE (op), XEXP (op, 0));
65a324b4
NC
851 break;
852
853 case PLUS:
cc8ca59e 854 output_address (VOIDmode, op);
65a324b4
NC
855 break;
856
857 case REG:
858 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
859 fprintf (file, "%s", reg_names [REGNO (op)]);
860 break;
861
862 case SUBREG:
863 gcc_assert (subreg_regno (op) < FIRST_PSEUDO_REGISTER);
864 fprintf (file, "%s", reg_names [subreg_regno (op)]);
865 break;
866
867 /* This will only be single precision.... */
868 case CONST_DOUBLE:
869 {
870 unsigned long val;
65a324b4 871
34a72c33 872 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
878a9174
DD
873 if (print_hash)
874 fprintf (file, "#");
875 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
65a324b4
NC
876 break;
877 }
878
879 case CONST_INT:
878a9174
DD
880 if (print_hash)
881 fprintf (file, "#");
65a324b4
NC
882 rx_print_integer (file, INTVAL (op));
883 break;
884
878a9174
DD
885 case UNSPEC:
886 switch (XINT (op, 1))
887 {
888 case UNSPEC_PID_ADDR:
889 {
890 rtx sym, add;
891
892 if (print_hash)
893 fprintf (file, "#");
894 sym = XVECEXP (op, 0, 0);
895 add = NULL_RTX;
896 fprintf (file, "(");
897 if (GET_CODE (sym) == PLUS)
898 {
899 add = XEXP (sym, 1);
900 sym = XEXP (sym, 0);
901 }
902 output_addr_const (file, sym);
903 if (add != NULL_RTX)
904 {
905 fprintf (file, "+");
906 output_addr_const (file, add);
907 }
908 fprintf (file, "-__pid_base");
909 fprintf (file, ")");
910 return;
911 }
912 }
913 /* Fall through */
914
65a324b4 915 case CONST:
878a9174 916 case SYMBOL_REF:
65a324b4
NC
917 case LABEL_REF:
918 case CODE_LABEL:
cc8ca59e 919 rx_print_operand_address (file, VOIDmode, op);
65a324b4
NC
920 break;
921
922 default:
923 gcc_unreachable ();
924 }
925 break;
926 }
927}
928
878a9174
DD
929/* Maybe convert an operand into its PID format. */
930
931rtx
932rx_maybe_pidify_operand (rtx op, int copy_to_reg)
933{
934 if (rx_pid_data_operand (op) == PID_UNENCODED)
935 {
936 if (GET_CODE (op) == MEM)
937 {
938 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
939 op = replace_equiv_address (op, a);
940 }
941 else
942 {
943 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
944 }
945
946 if (copy_to_reg)
947 op = copy_to_mode_reg (GET_MODE (op), op);
948 }
949 return op;
950}
951
65a324b4
NC
952/* Returns an assembler template for a move instruction. */
953
954char *
955rx_gen_move_template (rtx * operands, bool is_movu)
956{
31e727b0 957 static char out_template [64];
65a324b4
NC
958 const char * extension = TARGET_AS100_SYNTAX ? ".L" : "";
959 const char * src_template;
960 const char * dst_template;
961 rtx dest = operands[0];
962 rtx src = operands[1];
963
964 /* Decide which extension, if any, should be given to the move instruction. */
965 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
966 {
967 case QImode:
968 /* The .B extension is not valid when
969 loading an immediate into a register. */
970 if (! REG_P (dest) || ! CONST_INT_P (src))
971 extension = ".B";
972 break;
973 case HImode:
974 if (! REG_P (dest) || ! CONST_INT_P (src))
975 /* The .W extension is not valid when
976 loading an immediate into a register. */
977 extension = ".W";
978 break;
69f5aa9b
SKS
979 case DFmode:
980 case DImode:
65a324b4
NC
981 case SFmode:
982 case SImode:
983 extension = ".L";
984 break;
985 case VOIDmode:
986 /* This mode is used by constants. */
987 break;
988 default:
989 debug_rtx (src);
990 gcc_unreachable ();
991 }
992
878a9174 993 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
69f5aa9b
SKS
994 {
995 gcc_assert (GET_MODE (src) != DImode);
996 gcc_assert (GET_MODE (src) != DFmode);
997
998 src_template = "(%A1 - __pid_base)[%P1]";
999 }
878a9174 1000 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
69f5aa9b
SKS
1001 {
1002 gcc_assert (GET_MODE (src) != DImode);
1003 gcc_assert (GET_MODE (src) != DFmode);
1004
1005 src_template = "%%gp(%A1)[%G1]";
1006 }
65a324b4
NC
1007 else
1008 src_template = "%1";
1009
1010 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
69f5aa9b
SKS
1011 {
1012 gcc_assert (GET_MODE (dest) != DImode);
1013 gcc_assert (GET_MODE (dest) != DFmode);
1014
1015 dst_template = "%%gp(%A0)[%G0]";
1016 }
65a324b4
NC
1017 else
1018 dst_template = "%0";
1019
69f5aa9b
SKS
1020 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
1021 {
1022 gcc_assert (! is_movu);
1023
1024 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
da02a644 1025 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
69f5aa9b 1026 else
da02a644 1027 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
69f5aa9b
SKS
1028 }
1029 else
1030 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1031 extension, src_template, dst_template);
31e727b0 1032 return out_template;
65a324b4 1033}
65a324b4
NC
1034\f
1035/* Return VALUE rounded up to the next ALIGNMENT boundary. */
1036
1037static inline unsigned int
1038rx_round_up (unsigned int value, unsigned int alignment)
1039{
1040 alignment -= 1;
1041 return (value + alignment) & (~ alignment);
1042}
1043
1044/* Return the number of bytes in the argument registers
1045 occupied by an argument of type TYPE and mode MODE. */
1046
3968a1c0 1047static unsigned int
ef4bddc2 1048rx_function_arg_size (machine_mode mode, const_tree type)
65a324b4
NC
1049{
1050 unsigned int num_bytes;
1051
1052 num_bytes = (mode == BLKmode)
1053 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1054 return rx_round_up (num_bytes, UNITS_PER_WORD);
1055}
1056
1057#define NUM_ARG_REGS 4
1058#define MAX_NUM_ARG_BYTES (NUM_ARG_REGS * UNITS_PER_WORD)
1059
1060/* Return an RTL expression describing the register holding a function
1061 parameter of mode MODE and type TYPE or NULL_RTX if the parameter should
1062 be passed on the stack. CUM describes the previous parameters to the
1063 function and NAMED is false if the parameter is part of a variable
1064 parameter list, or the last named parameter before the start of a
1065 variable parameter list. */
1066
3968a1c0 1067static rtx
ef4bddc2 1068rx_function_arg (cumulative_args_t cum, machine_mode mode,
6b0e4cbb 1069 const_tree type, bool named)
65a324b4
NC
1070{
1071 unsigned int next_reg;
d5cc9181 1072 unsigned int bytes_so_far = *get_cumulative_args (cum);
65a324b4
NC
1073 unsigned int size;
1074 unsigned int rounded_size;
1075
1076 /* An exploded version of rx_function_arg_size. */
1077 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
31e727b0
NC
1078 /* If the size is not known it cannot be passed in registers. */
1079 if (size < 1)
1080 return NULL_RTX;
65a324b4
NC
1081
1082 rounded_size = rx_round_up (size, UNITS_PER_WORD);
1083
1084 /* Don't pass this arg via registers if there
1085 are insufficient registers to hold all of it. */
1086 if (rounded_size + bytes_so_far > MAX_NUM_ARG_BYTES)
1087 return NULL_RTX;
1088
1089 /* Unnamed arguments and the last named argument in a
1090 variadic function are always passed on the stack. */
1091 if (!named)
1092 return NULL_RTX;
1093
1094 /* Structures must occupy an exact number of registers,
1095 otherwise they are passed on the stack. */
1096 if ((type == NULL || AGGREGATE_TYPE_P (type))
1097 && (size % UNITS_PER_WORD) != 0)
1098 return NULL_RTX;
1099
1100 next_reg = (bytes_so_far / UNITS_PER_WORD) + 1;
1101
1102 return gen_rtx_REG (mode, next_reg);
1103}
1104
3968a1c0 1105static void
ef4bddc2 1106rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
6b0e4cbb 1107 const_tree type, bool named ATTRIBUTE_UNUSED)
3968a1c0 1108{
d5cc9181 1109 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
3968a1c0
NF
1110}
1111
c2ed6cf8 1112static unsigned int
ef4bddc2 1113rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
c2ed6cf8
NF
1114 const_tree type ATTRIBUTE_UNUSED)
1115{
631b20a7 1116 /* Older versions of the RX backend aligned all on-stack arguments
47c9ac72
NC
1117 to 32-bits. The RX C ABI however says that they should be
1118 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1119 if (TARGET_GCC_ABI)
1120 return STACK_BOUNDARY;
1121
1122 if (type)
1123 {
1124 if (DECL_P (type))
1125 return DECL_ALIGN (type);
1126 return TYPE_ALIGN (type);
1127 }
1128
1129 return PARM_BOUNDARY;
c2ed6cf8
NF
1130}
1131
65a324b4
NC
1132/* Return an RTL describing where a function return value of type RET_TYPE
1133 is held. */
1134
1135static rtx
1136rx_function_value (const_tree ret_type,
1137 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1138 bool outgoing ATTRIBUTE_UNUSED)
1139{
ef4bddc2 1140 machine_mode mode = TYPE_MODE (ret_type);
e2f289f3
NC
1141
1142 /* RX ABI specifies that small integer types are
1143 promoted to int when returned by a function. */
bcddd3b9
NC
1144 if (GET_MODE_SIZE (mode) > 0
1145 && GET_MODE_SIZE (mode) < 4
1146 && ! COMPLEX_MODE_P (mode)
e50869f7
NC
1147 && ! VECTOR_TYPE_P (ret_type)
1148 && ! VECTOR_MODE_P (mode)
bcddd3b9 1149 )
e2f289f3
NC
1150 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
1151
1152 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
1153}
1154
1155/* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
1156 regard to function returns as does TARGET_FUNCTION_VALUE. */
1157
ef4bddc2 1158static machine_mode
e2f289f3 1159rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
ef4bddc2 1160 machine_mode mode,
197a830e 1161 int * punsignedp ATTRIBUTE_UNUSED,
e2f289f3
NC
1162 const_tree funtype ATTRIBUTE_UNUSED,
1163 int for_return)
1164{
1165 if (for_return != 1
1166 || GET_MODE_SIZE (mode) >= 4
bcddd3b9 1167 || COMPLEX_MODE_P (mode)
e50869f7
NC
1168 || VECTOR_MODE_P (mode)
1169 || VECTOR_TYPE_P (type)
e2f289f3
NC
1170 || GET_MODE_SIZE (mode) < 1)
1171 return mode;
1172
1173 return SImode;
65a324b4
NC
1174}
1175
1176static bool
1177rx_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1178{
1179 HOST_WIDE_INT size;
1180
1181 if (TYPE_MODE (type) != BLKmode
1182 && ! AGGREGATE_TYPE_P (type))
1183 return false;
1184
1185 size = int_size_in_bytes (type);
1186 /* Large structs and those whose size is not an
1187 exact multiple of 4 are returned in memory. */
1188 return size < 1
1189 || size > 16
1190 || (size % UNITS_PER_WORD) != 0;
1191}
1192
1193static rtx
1194rx_struct_value_rtx (tree fndecl ATTRIBUTE_UNUSED,
1195 int incoming ATTRIBUTE_UNUSED)
1196{
1197 return gen_rtx_REG (Pmode, STRUCT_VAL_REGNUM);
1198}
1199
1200static bool
1201rx_return_in_msb (const_tree valtype)
1202{
1203 return TARGET_BIG_ENDIAN_DATA
1204 && (AGGREGATE_TYPE_P (valtype) || TREE_CODE (valtype) == COMPLEX_TYPE);
1205}
1206
1207/* Returns true if the provided function has the specified attribute. */
1208
1209static inline bool
1210has_func_attr (const_tree decl, const char * func_attr)
1211{
1212 if (decl == NULL_TREE)
1213 decl = current_function_decl;
1214
1215 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
1216}
1217
9595a419 1218/* Returns true if the provided function has the "fast_interrupt" attribute. */
65a324b4 1219
927d22fa 1220bool
65a324b4
NC
1221is_fast_interrupt_func (const_tree decl)
1222{
9595a419 1223 return has_func_attr (decl, "fast_interrupt");
65a324b4
NC
1224}
1225
9595a419 1226/* Returns true if the provided function has the "interrupt" attribute. */
65a324b4 1227
927d22fa 1228bool
9595a419 1229is_interrupt_func (const_tree decl)
65a324b4 1230{
9595a419 1231 return has_func_attr (decl, "interrupt");
65a324b4
NC
1232}
1233
1234/* Returns true if the provided function has the "naked" attribute. */
1235
1236static inline bool
1237is_naked_func (const_tree decl)
1238{
1239 return has_func_attr (decl, "naked");
1240}
1241\f
1242static bool use_fixed_regs = false;
1243
5efd84c5 1244static void
65a324b4
NC
1245rx_conditional_register_usage (void)
1246{
1247 static bool using_fixed_regs = false;
1248
878a9174
DD
1249 if (TARGET_PID)
1250 {
1251 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1252 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1253 }
1254
65a324b4 1255 if (rx_small_data_limit > 0)
878a9174
DD
1256 {
1257 if (TARGET_PID)
1258 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1259 else
1260 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1261
1262 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1263 }
65a324b4
NC
1264
1265 if (use_fixed_regs != using_fixed_regs)
1266 {
1267 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
1268 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1269
1270 if (use_fixed_regs)
1271 {
65a324b4
NC
1272 unsigned int r;
1273
65a324b4
NC
1274 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
1275 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
dafcb54e
NC
1276
1277 /* This is for fast interrupt handlers. Any register in
1278 the range r10 to r13 (inclusive) that is currently
1279 marked as fixed is now a viable, call-used register. */
65a324b4
NC
1280 for (r = 10; r <= 13; r++)
1281 if (fixed_regs[r])
1282 {
1283 fixed_regs[r] = 0;
1284 call_used_regs[r] = 1;
65a324b4
NC
1285 }
1286
dafcb54e
NC
1287 /* Mark r7 as fixed. This is just a hack to avoid
1288 altering the reg_alloc_order array so that the newly
1289 freed r10-r13 registers are the preferred registers. */
1290 fixed_regs[7] = call_used_regs[7] = 1;
65a324b4
NC
1291 }
1292 else
1293 {
1294 /* Restore the normal register masks. */
1295 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
1296 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
1297 }
1298
1299 using_fixed_regs = use_fixed_regs;
1300 }
1301}
1302
7fb80860
NC
1303struct decl_chain
1304{
1305 tree fndecl;
1306 struct decl_chain * next;
1307};
1308
1309/* Stack of decls for which we have issued warnings. */
1310static struct decl_chain * warned_decls = NULL;
1311
1312static void
1313add_warned_decl (tree fndecl)
1314{
1315 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1316
1317 warned->fndecl = fndecl;
1318 warned->next = warned_decls;
1319 warned_decls = warned;
1320}
1321
1322/* Returns TRUE if FNDECL is on our list of warned about decls. */
1323
1324static bool
1325already_warned (tree fndecl)
1326{
1327 struct decl_chain * warned;
1328
1329 for (warned = warned_decls;
1330 warned != NULL;
1331 warned = warned->next)
1332 if (warned->fndecl == fndecl)
1333 return true;
1334
1335 return false;
1336}
1337
65a324b4
NC
1338/* Perform any actions necessary before starting to compile FNDECL.
1339 For the RX we use this to make sure that we have the correct
1340 set of register masks selected. If FNDECL is NULL then we are
1341 compiling top level things. */
1342
1343static void
1344rx_set_current_function (tree fndecl)
1345{
1346 /* Remember the last target of rx_set_current_function. */
1347 static tree rx_previous_fndecl;
9595a419
NC
1348 bool prev_was_fast_interrupt;
1349 bool current_is_fast_interrupt;
65a324b4
NC
1350
1351 /* Only change the context if the function changes. This hook is called
1352 several times in the course of compiling a function, and we don't want
1353 to slow things down too much or call target_reinit when it isn't safe. */
1354 if (fndecl == rx_previous_fndecl)
1355 return;
1356
9595a419 1357 prev_was_fast_interrupt
65a324b4
NC
1358 = rx_previous_fndecl
1359 ? is_fast_interrupt_func (rx_previous_fndecl) : false;
9595a419
NC
1360
1361 current_is_fast_interrupt
65a324b4
NC
1362 = fndecl ? is_fast_interrupt_func (fndecl) : false;
1363
9595a419 1364 if (prev_was_fast_interrupt != current_is_fast_interrupt)
65a324b4 1365 {
9595a419 1366 use_fixed_regs = current_is_fast_interrupt;
65a324b4
NC
1367 target_reinit ();
1368 }
9595a419 1369
7fb80860
NC
1370 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1371 {
1372 /* We do not warn about the first fast interrupt routine that
1373 we see. Instead we just push it onto the stack. */
1374 if (warned_decls == NULL)
1375 add_warned_decl (fndecl);
1376
1377 /* Otherwise if this fast interrupt is one for which we have
1378 not already issued a warning, generate one and then push
1379 it onto the stack as well. */
1380 else if (! already_warned (fndecl))
1381 {
1382 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1383 fndecl, warned_decls->fndecl);
1384 add_warned_decl (fndecl);
1385 }
1386 }
1387
65a324b4
NC
1388 rx_previous_fndecl = fndecl;
1389}
1390\f
1391/* Typical stack layout should looks like this after the function's prologue:
1392
1393 | |
1394 -- ^
1395 | | \ |
1396 | | arguments saved | Increasing
1397 | | on the stack | addresses
1398 PARENT arg pointer -> | | /
1399 -------------------------- ---- -------------------
1400 CHILD |ret | return address
1401 --
1402 | | \
1403 | | call saved
1404 | | registers
1405 | | /
1406 --
1407 | | \
1408 | | local
1409 | | variables
1410 frame pointer -> | | /
1411 --
1412 | | \
1413 | | outgoing | Decreasing
1414 | | arguments | addresses
1415 current stack pointer -> | | / |
1416 -------------------------- ---- ------------------ V
1417 | | */
1418
1419static unsigned int
1420bit_count (unsigned int x)
1421{
1422 const unsigned int m1 = 0x55555555;
1423 const unsigned int m2 = 0x33333333;
1424 const unsigned int m4 = 0x0f0f0f0f;
1425
1426 x -= (x >> 1) & m1;
1427 x = (x & m2) + ((x >> 2) & m2);
1428 x = (x + (x >> 4)) & m4;
1429 x += x >> 8;
1430
1431 return (x + (x >> 16)) & 0x3f;
1432}
1433
dafcb54e
NC
1434#define MUST_SAVE_ACC_REGISTER \
1435 (TARGET_SAVE_ACC_REGISTER \
1436 && (is_interrupt_func (NULL_TREE) \
1437 || is_fast_interrupt_func (NULL_TREE)))
1438
65a324b4
NC
1439/* Returns either the lowest numbered and highest numbered registers that
1440 occupy the call-saved area of the stack frame, if the registers are
1441 stored as a contiguous block, or else a bitmask of the individual
1442 registers if they are stored piecemeal.
1443
1444 Also computes the size of the frame and the size of the outgoing
1445 arguments block (in bytes). */
1446
1447static void
1448rx_get_stack_layout (unsigned int * lowest,
1449 unsigned int * highest,
1450 unsigned int * register_mask,
1451 unsigned int * frame_size,
1452 unsigned int * stack_size)
1453{
1454 unsigned int reg;
1455 unsigned int low;
1456 unsigned int high;
1457 unsigned int fixed_reg = 0;
1458 unsigned int save_mask;
1459 unsigned int pushed_mask;
1460 unsigned int unneeded_pushes;
1461
dafcb54e 1462 if (is_naked_func (NULL_TREE))
65a324b4
NC
1463 {
1464 /* Naked functions do not create their own stack frame.
dafcb54e 1465 Instead the programmer must do that for us. */
65a324b4
NC
1466 * lowest = 0;
1467 * highest = 0;
1468 * register_mask = 0;
1469 * frame_size = 0;
1470 * stack_size = 0;
1471 return;
1472 }
1473
aea8fc97 1474 for (save_mask = high = low = 0, reg = 1; reg < CC_REGNUM; reg++)
65a324b4 1475 {
e14ca1ce 1476 if ((df_regs_ever_live_p (reg)
d7862be3
NC
1477 /* Always save all call clobbered registers inside non-leaf
1478 interrupt handlers, even if they are not live - they may
1479 be used in (non-interrupt aware) routines called from this one. */
1480 || (call_used_regs[reg]
1481 && is_interrupt_func (NULL_TREE)
416ff32e 1482 && ! crtl->is_leaf))
65a324b4
NC
1483 && (! call_used_regs[reg]
1484 /* Even call clobbered registered must
9595a419 1485 be pushed inside interrupt handlers. */
dafcb54e
NC
1486 || is_interrupt_func (NULL_TREE)
1487 /* Likewise for fast interrupt handlers, except registers r10 -
1488 r13. These are normally call-saved, but may have been set
1489 to call-used by rx_conditional_register_usage. If so then
1490 they can be used in the fast interrupt handler without
1491 saving them on the stack. */
1492 || (is_fast_interrupt_func (NULL_TREE)
1493 && ! IN_RANGE (reg, 10, 13))))
65a324b4
NC
1494 {
1495 if (low == 0)
1496 low = reg;
1497 high = reg;
1498
1499 save_mask |= 1 << reg;
1500 }
1501
1502 /* Remember if we see a fixed register
1503 after having found the low register. */
1504 if (low != 0 && fixed_reg == 0 && fixed_regs [reg])
1505 fixed_reg = reg;
1506 }
1507
dafcb54e
NC
1508 /* If we have to save the accumulator register, make sure
1509 that at least two registers are pushed into the frame. */
1510 if (MUST_SAVE_ACC_REGISTER
1511 && bit_count (save_mask) < 2)
1512 {
1513 save_mask |= (1 << 13) | (1 << 14);
1514 if (low == 0)
1515 low = 13;
105249d1
NC
1516 if (high == 0 || low == high)
1517 high = low + 1;
dafcb54e
NC
1518 }
1519
65a324b4
NC
1520 /* Decide if it would be faster fill in the call-saved area of the stack
1521 frame using multiple PUSH instructions instead of a single PUSHM
1522 instruction.
1523
1524 SAVE_MASK is a bitmask of the registers that must be stored in the
1525 call-save area. PUSHED_MASK is a bitmask of the registers that would
1526 be pushed into the area if we used a PUSHM instruction. UNNEEDED_PUSHES
1527 is a bitmask of those registers in pushed_mask that are not in
1528 save_mask.
1529
1530 We use a simple heuristic that says that it is better to use
1531 multiple PUSH instructions if the number of unnecessary pushes is
1532 greater than the number of necessary pushes.
1533
1534 We also use multiple PUSH instructions if there are any fixed registers
1535 between LOW and HIGH. The only way that this can happen is if the user
1536 has specified --fixed-<reg-name> on the command line and in such
1537 circumstances we do not want to touch the fixed registers at all.
1538
55ffa756
NC
1539 Note also that the code in the prologue/epilogue handlers will
1540 automatically merge multiple PUSHes of adjacent registers into a single
1541 PUSHM.
1542
65a324b4 1543 FIXME: Is it worth improving this heuristic ? */
a53378d2 1544 pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
65a324b4
NC
1545 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1546
1547 if ((fixed_reg && fixed_reg <= high)
1548 || (optimize_function_for_speed_p (cfun)
1549 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1550 {
1551 /* Use multiple pushes. */
1552 * lowest = 0;
1553 * highest = 0;
1554 * register_mask = save_mask;
1555 }
1556 else
1557 {
1558 /* Use one push multiple instruction. */
1559 * lowest = low;
1560 * highest = high;
1561 * register_mask = 0;
1562 }
1563
1564 * frame_size = rx_round_up
1565 (get_frame_size (), STACK_BOUNDARY / BITS_PER_UNIT);
1566
1567 if (crtl->args.size > 0)
1568 * frame_size += rx_round_up
1569 (crtl->args.size, STACK_BOUNDARY / BITS_PER_UNIT);
1570
1571 * stack_size = rx_round_up
1572 (crtl->outgoing_args_size, STACK_BOUNDARY / BITS_PER_UNIT);
1573}
1574
1575/* Generate a PUSHM instruction that matches the given operands. */
1576
1577void
1578rx_emit_stack_pushm (rtx * operands)
1579{
1580 HOST_WIDE_INT last_reg;
1581 rtx first_push;
1582
1583 gcc_assert (CONST_INT_P (operands[0]));
1584 last_reg = (INTVAL (operands[0]) / UNITS_PER_WORD) - 1;
1585
1586 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1587 first_push = XVECEXP (operands[1], 0, 1);
1588 gcc_assert (SET_P (first_push));
1589 first_push = SET_SRC (first_push);
1590 gcc_assert (REG_P (first_push));
1591
1592 asm_fprintf (asm_out_file, "\tpushm\t%s-%s\n",
9595a419
NC
1593 reg_names [REGNO (first_push) - last_reg],
1594 reg_names [REGNO (first_push)]);
65a324b4
NC
1595}
1596
1597/* Generate a PARALLEL that will pass the rx_store_multiple_vector predicate. */
1598
1599static rtx
1600gen_rx_store_vector (unsigned int low, unsigned int high)
1601{
1602 unsigned int i;
1603 unsigned int count = (high - low) + 2;
1604 rtx vector;
1605
1606 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1607
1608 XVECEXP (vector, 0, 0) =
f7df4a84 1609 gen_rtx_SET (stack_pointer_rtx,
65a324b4
NC
1610 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1611 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1612
1613 for (i = 0; i < count - 1; i++)
1614 XVECEXP (vector, 0, i + 1) =
f7df4a84 1615 gen_rtx_SET (gen_rtx_MEM (SImode,
9595a419
NC
1616 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1617 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1618 gen_rtx_REG (SImode, high - i));
65a324b4
NC
1619 return vector;
1620}
1621
9595a419
NC
1622/* Mark INSN as being frame related. If it is a PARALLEL
1623 then mark each element as being frame related as well. */
1624
1625static void
1626mark_frame_related (rtx insn)
1627{
1628 RTX_FRAME_RELATED_P (insn) = 1;
1629 insn = PATTERN (insn);
1630
1631 if (GET_CODE (insn) == PARALLEL)
1632 {
1633 unsigned int i;
1634
0d8f38d3 1635 for (i = 0; i < (unsigned) XVECLEN (insn, 0); i++)
9595a419
NC
1636 RTX_FRAME_RELATED_P (XVECEXP (insn, 0, i)) = 1;
1637 }
1638}
1639
15ba5696
NC
1640static bool
1641ok_for_max_constant (HOST_WIDE_INT val)
1642{
1643 if (rx_max_constant_size == 0 || rx_max_constant_size == 4)
1644 /* If there is no constraint on the size of constants
1645 used as operands, then any value is legitimate. */
1646 return true;
1647
1648 /* rx_max_constant_size specifies the maximum number
1649 of bytes that can be used to hold a signed value. */
a53378d2 1650 return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
15ba5696
NC
1651 ( 1 << (rx_max_constant_size * 8)));
1652}
1653
1654/* Generate an ADD of SRC plus VAL into DEST.
1655 Handles the case where VAL is too big for max_constant_value.
1656 Sets FRAME_RELATED_P on the insn if IS_FRAME_RELATED is true. */
1657
1658static void
1659gen_safe_add (rtx dest, rtx src, rtx val, bool is_frame_related)
1660{
1661 rtx insn;
1662
1663 if (val == NULL_RTX || INTVAL (val) == 0)
1664 {
1665 gcc_assert (dest != src);
1666
1667 insn = emit_move_insn (dest, src);
1668 }
1669 else if (ok_for_max_constant (INTVAL (val)))
1670 insn = emit_insn (gen_addsi3 (dest, src, val));
1671 else
1672 {
5f2f13fd 1673 /* Wrap VAL in an UNSPEC so that rx_is_legitimate_constant
bcddd3b9
NC
1674 will not reject it. */
1675 val = gen_rtx_CONST (SImode, gen_rtx_UNSPEC (SImode, gen_rtvec (1, val), UNSPEC_CONST));
1676 insn = emit_insn (gen_addsi3 (dest, src, val));
15ba5696
NC
1677
1678 if (is_frame_related)
1679 /* We have to provide our own frame related note here
1680 as the dwarf2out code cannot be expected to grok
1681 our unspec. */
1682 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
f7df4a84 1683 gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
15ba5696
NC
1684 return;
1685 }
1686
1687 if (is_frame_related)
1688 RTX_FRAME_RELATED_P (insn) = 1;
15ba5696
NC
1689}
1690
55ffa756
NC
1691static void
1692push_regs (unsigned int high, unsigned int low)
1693{
1694 rtx insn;
1695
1696 if (low == high)
1697 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1698 else
1699 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1700 gen_rx_store_vector (low, high)));
1701 mark_frame_related (insn);
1702}
1703
65a324b4
NC
1704void
1705rx_expand_prologue (void)
1706{
1707 unsigned int stack_size;
1708 unsigned int frame_size;
1709 unsigned int mask;
1710 unsigned int low;
1711 unsigned int high;
9595a419 1712 unsigned int reg;
65a324b4
NC
1713
1714 /* Naked functions use their own, programmer provided prologues. */
dafcb54e 1715 if (is_naked_func (NULL_TREE))
65a324b4
NC
1716 return;
1717
1718 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1719
84e60183
NC
1720 if (flag_stack_usage_info)
1721 current_function_static_stack_size = frame_size + stack_size;
e50869f7 1722
65a324b4
NC
1723 /* If we use any of the callee-saved registers, save them now. */
1724 if (mask)
1725 {
65a324b4 1726 /* Push registers in reverse order. */
aea8fc97 1727 for (reg = CC_REGNUM; reg --;)
65a324b4
NC
1728 if (mask & (1 << reg))
1729 {
55ffa756
NC
1730 low = high = reg;
1731
1732 /* Look for a span of registers.
1733 Note - we do not have to worry about -Os and whether
1734 it is better to use a single, longer PUSHM as
1735 rx_get_stack_layout has already done that for us. */
1736 while (reg-- > 0)
1737 if ((mask & (1 << reg)) == 0)
1738 break;
1739 else
1740 --low;
1741
1742 push_regs (high, low);
1743 if (reg == (unsigned) -1)
1744 break;
65a324b4
NC
1745 }
1746 }
1747 else if (low)
55ffa756 1748 push_regs (high, low);
9595a419 1749
dafcb54e 1750 if (MUST_SAVE_ACC_REGISTER)
9595a419
NC
1751 {
1752 unsigned int acc_high, acc_low;
1753
1754 /* Interrupt handlers have to preserve the accumulator
1755 register if so requested by the user. Use the first
dafcb54e 1756 two pushed registers as intermediaries. */
9595a419
NC
1757 if (mask)
1758 {
1759 acc_low = acc_high = 0;
1760
aea8fc97 1761 for (reg = 1; reg < CC_REGNUM; reg ++)
9595a419
NC
1762 if (mask & (1 << reg))
1763 {
1764 if (acc_low == 0)
1765 acc_low = reg;
1766 else
1767 {
1768 acc_high = reg;
1769 break;
1770 }
1771 }
1772
1773 /* We have assumed that there are at least two registers pushed... */
1774 gcc_assert (acc_high != 0);
1775
1776 /* Note - the bottom 16 bits of the accumulator are inaccessible.
1777 We just assume that they are zero. */
1778 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1779 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1780 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_low)));
1781 emit_insn (gen_stack_push (gen_rtx_REG (SImode, acc_high)));
1782 }
1783 else
1784 {
1785 acc_low = low;
1786 acc_high = low + 1;
1787
1788 /* We have assumed that there are at least two registers pushed... */
1789 gcc_assert (acc_high <= high);
1790
1791 emit_insn (gen_mvfacmi (gen_rtx_REG (SImode, acc_low)));
1792 emit_insn (gen_mvfachi (gen_rtx_REG (SImode, acc_high)));
1793 emit_insn (gen_stack_pushm (GEN_INT (2 * UNITS_PER_WORD),
1794 gen_rx_store_vector (acc_low, acc_high)));
1795 }
65a324b4
NC
1796 }
1797
1798 /* If needed, set up the frame pointer. */
1799 if (frame_pointer_needed)
15ba5696
NC
1800 gen_safe_add (frame_pointer_rtx, stack_pointer_rtx,
1801 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
65a324b4
NC
1802
1803 /* Allocate space for the outgoing args.
1804 If the stack frame has not already been set up then handle this as well. */
1805 if (stack_size)
1806 {
1807 if (frame_size)
1808 {
1809 if (frame_pointer_needed)
15ba5696
NC
1810 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx,
1811 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
65a324b4 1812 else
15ba5696
NC
1813 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1814 GEN_INT (- (HOST_WIDE_INT) (frame_size + stack_size)),
1815 true);
65a324b4
NC
1816 }
1817 else
15ba5696
NC
1818 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1819 GEN_INT (- (HOST_WIDE_INT) stack_size), true);
65a324b4
NC
1820 }
1821 else if (frame_size)
1822 {
1823 if (! frame_pointer_needed)
15ba5696
NC
1824 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1825 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
65a324b4 1826 else
15ba5696 1827 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
e50869f7 1828 false /* False because the epilogue will use the FP not the SP. */);
65a324b4 1829 }
65a324b4
NC
1830}
1831
69c7a374
DD
1832static void
1833add_vector_labels (FILE *file, const char *aname)
1834{
1835 tree vec_attr;
1836 tree val_attr;
1837 const char *vname = "vect";
1838 const char *s;
1839 int vnum;
1840
1841 /* This node is for the vector/interrupt tag itself */
1842 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1843 if (!vec_attr)
1844 return;
1845
1846 /* Now point it at the first argument */
1847 vec_attr = TREE_VALUE (vec_attr);
1848
1849 /* Iterate through the arguments. */
1850 while (vec_attr)
1851 {
1852 val_attr = TREE_VALUE (vec_attr);
1853 switch (TREE_CODE (val_attr))
1854 {
1855 case STRING_CST:
1856 s = TREE_STRING_POINTER (val_attr);
1857 goto string_id_common;
1858
1859 case IDENTIFIER_NODE:
1860 s = IDENTIFIER_POINTER (val_attr);
1861
1862 string_id_common:
1863 if (strcmp (s, "$default") == 0)
1864 {
1865 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1866 fprintf (file, "$tableentry$default$%s:\n", vname);
1867 }
1868 else
1869 vname = s;
1870 break;
1871
1872 case INTEGER_CST:
1873 vnum = TREE_INT_CST_LOW (val_attr);
1874
1875 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1876 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1877 break;
1878
1879 default:
1880 ;
1881 }
1882
1883 vec_attr = TREE_CHAIN (vec_attr);
1884 }
1885
1886}
1887
65a324b4
NC
1888static void
1889rx_output_function_prologue (FILE * file,
1890 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED)
1891{
69c7a374
DD
1892 add_vector_labels (file, "interrupt");
1893 add_vector_labels (file, "vector");
1894
65a324b4
NC
1895 if (is_fast_interrupt_func (NULL_TREE))
1896 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1897
9595a419
NC
1898 if (is_interrupt_func (NULL_TREE))
1899 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
65a324b4
NC
1900
1901 if (is_naked_func (NULL_TREE))
1902 asm_fprintf (file, "\t; Note: Naked Function\n");
1903
1904 if (cfun->static_chain_decl != NULL)
1905 asm_fprintf (file, "\t; Note: Nested function declared "
1906 "inside another function.\n");
1907
1908 if (crtl->calls_eh_return)
1909 asm_fprintf (file, "\t; Note: Calls __builtin_eh_return.\n");
1910}
1911
1912/* Generate a POPM or RTSD instruction that matches the given operands. */
1913
1914void
1915rx_emit_stack_popm (rtx * operands, bool is_popm)
1916{
1917 HOST_WIDE_INT stack_adjust;
1918 HOST_WIDE_INT last_reg;
1919 rtx first_push;
1920
1921 gcc_assert (CONST_INT_P (operands[0]));
1922 stack_adjust = INTVAL (operands[0]);
1923
1924 gcc_assert (GET_CODE (operands[1]) == PARALLEL);
1925 last_reg = XVECLEN (operands[1], 0) - (is_popm ? 2 : 3);
1926
1927 first_push = XVECEXP (operands[1], 0, 1);
1928 gcc_assert (SET_P (first_push));
1929 first_push = SET_DEST (first_push);
1930 gcc_assert (REG_P (first_push));
1931
1932 if (is_popm)
1933 asm_fprintf (asm_out_file, "\tpopm\t%s-%s\n",
1934 reg_names [REGNO (first_push)],
1935 reg_names [REGNO (first_push) + last_reg]);
1936 else
1937 asm_fprintf (asm_out_file, "\trtsd\t#%d, %s-%s\n",
1938 (int) stack_adjust,
1939 reg_names [REGNO (first_push)],
1940 reg_names [REGNO (first_push) + last_reg]);
1941}
1942
1943/* Generate a PARALLEL which will satisfy the rx_rtsd_vector predicate. */
1944
1945static rtx
1946gen_rx_rtsd_vector (unsigned int adjust, unsigned int low, unsigned int high)
1947{
1948 unsigned int i;
1949 unsigned int bias = 3;
1950 unsigned int count = (high - low) + bias;
1951 rtx vector;
1952
1953 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1954
1955 XVECEXP (vector, 0, 0) =
f7df4a84 1956 gen_rtx_SET (stack_pointer_rtx,
0a81f074 1957 plus_constant (Pmode, stack_pointer_rtx, adjust));
65a324b4
NC
1958
1959 for (i = 0; i < count - 2; i++)
1960 XVECEXP (vector, 0, i + 1) =
f7df4a84 1961 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
65a324b4
NC
1962 gen_rtx_MEM (SImode,
1963 i == 0 ? stack_pointer_rtx
0a81f074 1964 : plus_constant (Pmode, stack_pointer_rtx,
65a324b4
NC
1965 i * UNITS_PER_WORD)));
1966
3810076b 1967 XVECEXP (vector, 0, count - 1) = ret_rtx;
65a324b4
NC
1968
1969 return vector;
1970}
1971
1972/* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1973
1974static rtx
1975gen_rx_popm_vector (unsigned int low, unsigned int high)
1976{
1977 unsigned int i;
1978 unsigned int count = (high - low) + 2;
1979 rtx vector;
1980
1981 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1982
1983 XVECEXP (vector, 0, 0) =
f7df4a84 1984 gen_rtx_SET (stack_pointer_rtx,
0a81f074 1985 plus_constant (Pmode, stack_pointer_rtx,
65a324b4
NC
1986 (count - 1) * UNITS_PER_WORD));
1987
1988 for (i = 0; i < count - 1; i++)
1989 XVECEXP (vector, 0, i + 1) =
f7df4a84 1990 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
65a324b4
NC
1991 gen_rtx_MEM (SImode,
1992 i == 0 ? stack_pointer_rtx
0a81f074 1993 : plus_constant (Pmode, stack_pointer_rtx,
65a324b4
NC
1994 i * UNITS_PER_WORD)));
1995
1996 return vector;
1997}
be61ce52
NC
1998
1999/* Returns true if a simple return insn can be used. */
2000
2001bool
2002rx_can_use_simple_return (void)
2003{
2004 unsigned int low;
2005 unsigned int high;
2006 unsigned int frame_size;
2007 unsigned int stack_size;
2008 unsigned int register_mask;
2009
2010 if (is_naked_func (NULL_TREE)
2011 || is_fast_interrupt_func (NULL_TREE)
2012 || is_interrupt_func (NULL_TREE))
2013 return false;
2014
2015 rx_get_stack_layout (& low, & high, & register_mask,
2016 & frame_size, & stack_size);
2017
2018 return (register_mask == 0
2019 && (frame_size + stack_size) == 0
2020 && low == 0);
2021}
2022
55ffa756
NC
2023static void
2024pop_regs (unsigned int high, unsigned int low)
2025{
2026 if (high == low)
2027 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2028 else
2029 emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
2030 gen_rx_popm_vector (low, high)));
2031}
2032
65a324b4
NC
2033void
2034rx_expand_epilogue (bool is_sibcall)
2035{
2036 unsigned int low;
2037 unsigned int high;
2038 unsigned int frame_size;
2039 unsigned int stack_size;
2040 unsigned int register_mask;
2041 unsigned int regs_size;
9595a419 2042 unsigned int reg;
65a324b4
NC
2043 unsigned HOST_WIDE_INT total_size;
2044
0d8f38d3
NC
2045 /* FIXME: We do not support indirect sibcalls at the moment becaause we
2046 cannot guarantee that the register holding the function address is a
2047 call-used register. If it is a call-saved register then the stack
2048 pop instructions generated in the epilogue will corrupt the address
2049 before it is used.
2050
2051 Creating a new call-used-only register class works but then the
2052 reload pass gets stuck because it cannot always find a call-used
2053 register for spilling sibcalls.
2054
2055 The other possible solution is for this pass to scan forward for the
2056 sibcall instruction (if it has been generated) and work out if it
2057 is an indirect sibcall using a call-saved register. If it is then
2058 the address can copied into a call-used register in this epilogue
2059 code and the sibcall instruction modified to use that register. */
2060
65a324b4
NC
2061 if (is_naked_func (NULL_TREE))
2062 {
0d8f38d3
NC
2063 gcc_assert (! is_sibcall);
2064
65a324b4
NC
2065 /* Naked functions use their own, programmer provided epilogues.
2066 But, in order to keep gcc happy we have to generate some kind of
2067 epilogue RTL. */
2068 emit_jump_insn (gen_naked_return ());
2069 return;
2070 }
2071
2072 rx_get_stack_layout (& low, & high, & register_mask,
2073 & frame_size, & stack_size);
2074
2075 total_size = frame_size + stack_size;
2076 regs_size = ((high - low) + 1) * UNITS_PER_WORD;
2077
2078 /* See if we are unable to use the special stack frame deconstruct and
2079 return instructions. In most cases we can use them, but the exceptions
2080 are:
2081
2082 - Sibling calling functions deconstruct the frame but do not return to
2083 their caller. Instead they branch to their sibling and allow their
2084 return instruction to return to this function's parent.
2085
9595a419 2086 - Fast and normal interrupt handling functions have to use special
65a324b4
NC
2087 return instructions.
2088
2089 - Functions where we have pushed a fragmented set of registers into the
2090 call-save area must have the same set of registers popped. */
2091 if (is_sibcall
2092 || is_fast_interrupt_func (NULL_TREE)
9595a419 2093 || is_interrupt_func (NULL_TREE)
65a324b4
NC
2094 || register_mask)
2095 {
2096 /* Cannot use the special instructions - deconstruct by hand. */
2097 if (total_size)
15ba5696
NC
2098 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2099 GEN_INT (total_size), false);
65a324b4 2100
dafcb54e 2101 if (MUST_SAVE_ACC_REGISTER)
65a324b4 2102 {
9595a419
NC
2103 unsigned int acc_low, acc_high;
2104
2105 /* Reverse the saving of the accumulator register onto the stack.
2106 Note we must adjust the saved "low" accumulator value as it
2107 is really the middle 32-bits of the accumulator. */
2108 if (register_mask)
2109 {
2110 acc_low = acc_high = 0;
aea8fc97
NC
2111
2112 for (reg = 1; reg < CC_REGNUM; reg ++)
9595a419
NC
2113 if (register_mask & (1 << reg))
2114 {
2115 if (acc_low == 0)
2116 acc_low = reg;
2117 else
2118 {
2119 acc_high = reg;
2120 break;
2121 }
2122 }
2123 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_high)));
2124 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, acc_low)));
2125 }
2126 else
2127 {
2128 acc_low = low;
2129 acc_high = low + 1;
2130 emit_insn (gen_stack_popm (GEN_INT (2 * UNITS_PER_WORD),
2131 gen_rx_popm_vector (acc_low, acc_high)));
2132 }
2133
2134 emit_insn (gen_ashlsi3 (gen_rtx_REG (SImode, acc_low),
2135 gen_rtx_REG (SImode, acc_low),
2136 GEN_INT (16)));
2137 emit_insn (gen_mvtaclo (gen_rtx_REG (SImode, acc_low)));
2138 emit_insn (gen_mvtachi (gen_rtx_REG (SImode, acc_high)));
2139 }
65a324b4 2140
9595a419
NC
2141 if (register_mask)
2142 {
aea8fc97 2143 for (reg = 0; reg < CC_REGNUM; reg ++)
65a324b4 2144 if (register_mask & (1 << reg))
55ffa756
NC
2145 {
2146 low = high = reg;
2147 while (register_mask & (1 << high))
2148 high ++;
2149 pop_regs (high - 1, low);
2150 reg = high;
2151 }
65a324b4
NC
2152 }
2153 else if (low)
55ffa756 2154 pop_regs (high, low);
65a324b4
NC
2155
2156 if (is_fast_interrupt_func (NULL_TREE))
0d8f38d3
NC
2157 {
2158 gcc_assert (! is_sibcall);
2159 emit_jump_insn (gen_fast_interrupt_return ());
2160 }
9595a419 2161 else if (is_interrupt_func (NULL_TREE))
0d8f38d3
NC
2162 {
2163 gcc_assert (! is_sibcall);
2164 emit_jump_insn (gen_exception_return ());
2165 }
65a324b4
NC
2166 else if (! is_sibcall)
2167 emit_jump_insn (gen_simple_return ());
2168
2169 return;
2170 }
2171
2172 /* If we allocated space on the stack, free it now. */
2173 if (total_size)
2174 {
2175 unsigned HOST_WIDE_INT rtsd_size;
2176
2177 /* See if we can use the RTSD instruction. */
2178 rtsd_size = total_size + regs_size;
2179 if (rtsd_size < 1024 && (rtsd_size % 4) == 0)
2180 {
2181 if (low)
2182 emit_jump_insn (gen_pop_and_return
2183 (GEN_INT (rtsd_size),
2184 gen_rx_rtsd_vector (rtsd_size, low, high)));
2185 else
2186 emit_jump_insn (gen_deallocate_and_return (GEN_INT (total_size)));
2187
2188 return;
2189 }
2190
15ba5696
NC
2191 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
2192 GEN_INT (total_size), false);
65a324b4
NC
2193 }
2194
2195 if (low)
2196 emit_jump_insn (gen_pop_and_return (GEN_INT (regs_size),
2197 gen_rx_rtsd_vector (regs_size,
2198 low, high)));
2199 else
2200 emit_jump_insn (gen_simple_return ());
2201}
2202
2203
2204/* Compute the offset (in words) between FROM (arg pointer
2205 or frame pointer) and TO (frame pointer or stack pointer).
2206 See ASCII art comment at the start of rx_expand_prologue
2207 for more information. */
2208
2209int
2210rx_initial_elimination_offset (int from, int to)
2211{
2212 unsigned int low;
2213 unsigned int high;
2214 unsigned int frame_size;
2215 unsigned int stack_size;
2216 unsigned int mask;
2217
2218 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
2219
2220 if (from == ARG_POINTER_REGNUM)
2221 {
2222 /* Extend the computed size of the stack frame to
2223 include the registers pushed in the prologue. */
2224 if (low)
2225 frame_size += ((high - low) + 1) * UNITS_PER_WORD;
2226 else
2227 frame_size += bit_count (mask) * UNITS_PER_WORD;
2228
2229 /* Remember to include the return address. */
2230 frame_size += 1 * UNITS_PER_WORD;
2231
2232 if (to == FRAME_POINTER_REGNUM)
2233 return frame_size;
2234
2235 gcc_assert (to == STACK_POINTER_REGNUM);
2236 return frame_size + stack_size;
2237 }
2238
2239 gcc_assert (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM);
2240 return stack_size;
2241}
2242
65a324b4
NC
2243/* Decide if a variable should go into one of the small data sections. */
2244
2245static bool
2246rx_in_small_data (const_tree decl)
2247{
2248 int size;
f961457f 2249 const char * section;
65a324b4
NC
2250
2251 if (rx_small_data_limit == 0)
2252 return false;
2253
2254 if (TREE_CODE (decl) != VAR_DECL)
2255 return false;
2256
2257 /* We do not put read-only variables into a small data area because
2258 they would be placed with the other read-only sections, far away
2259 from the read-write data sections, and we only have one small
2260 data area pointer.
2261 Similarly commons are placed in the .bss section which might be
2262 far away (and out of alignment with respect to) the .data section. */
2263 if (TREE_READONLY (decl) || DECL_COMMON (decl))
2264 return false;
2265
2266 section = DECL_SECTION_NAME (decl);
2267 if (section)
f961457f 2268 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
65a324b4
NC
2269
2270 size = int_size_in_bytes (TREE_TYPE (decl));
2271
2272 return (size > 0) && (size <= rx_small_data_limit);
2273}
2274
2275/* Return a section for X.
2276 The only special thing we do here is to honor small data. */
2277
2278static section *
ef4bddc2 2279rx_select_rtx_section (machine_mode mode,
65a324b4
NC
2280 rtx x,
2281 unsigned HOST_WIDE_INT align)
2282{
2283 if (rx_small_data_limit > 0
2284 && GET_MODE_SIZE (mode) <= rx_small_data_limit
2285 && align <= (unsigned HOST_WIDE_INT) rx_small_data_limit * BITS_PER_UNIT)
2286 return sdata_section;
2287
2288 return default_elf_select_rtx_section (mode, x, align);
2289}
2290
2291static section *
2292rx_select_section (tree decl,
2293 int reloc,
2294 unsigned HOST_WIDE_INT align)
2295{
2296 if (rx_small_data_limit > 0)
2297 {
2298 switch (categorize_decl_for_section (decl, reloc))
2299 {
2300 case SECCAT_SDATA: return sdata_section;
2301 case SECCAT_SBSS: return sbss_section;
2302 case SECCAT_SRODATA:
2303 /* Fall through. We do not put small, read only
2304 data into the C_2 section because we are not
2305 using the C_2 section. We do not use the C_2
2306 section because it is located with the other
2307 read-only data sections, far away from the read-write
2308 data sections and we only have one small data
2309 pointer (r13). */
2310 default:
2311 break;
2312 }
2313 }
2314
2315 /* If we are supporting the Renesas assembler
2316 we cannot use mergeable sections. */
2317 if (TARGET_AS100_SYNTAX)
2318 switch (categorize_decl_for_section (decl, reloc))
2319 {
2320 case SECCAT_RODATA_MERGE_CONST:
2321 case SECCAT_RODATA_MERGE_STR_INIT:
2322 case SECCAT_RODATA_MERGE_STR:
2323 return readonly_data_section;
2324
2325 default:
2326 break;
2327 }
2328
2329 return default_elf_select_section (decl, reloc, align);
2330}
2331\f
2332enum rx_builtin
2333{
2334 RX_BUILTIN_BRK,
2335 RX_BUILTIN_CLRPSW,
2336 RX_BUILTIN_INT,
2337 RX_BUILTIN_MACHI,
2338 RX_BUILTIN_MACLO,
2339 RX_BUILTIN_MULHI,
2340 RX_BUILTIN_MULLO,
2341 RX_BUILTIN_MVFACHI,
2342 RX_BUILTIN_MVFACMI,
2343 RX_BUILTIN_MVFC,
2344 RX_BUILTIN_MVTACHI,
2345 RX_BUILTIN_MVTACLO,
2346 RX_BUILTIN_MVTC,
9595a419 2347 RX_BUILTIN_MVTIPL,
65a324b4
NC
2348 RX_BUILTIN_RACW,
2349 RX_BUILTIN_REVW,
2350 RX_BUILTIN_RMPA,
2351 RX_BUILTIN_ROUND,
65a324b4
NC
2352 RX_BUILTIN_SETPSW,
2353 RX_BUILTIN_WAIT,
2354 RX_BUILTIN_max
2355};
2356
87e91fca
DD
2357static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2358
65a324b4
NC
2359static void
2360rx_init_builtins (void)
2361{
4bbd2ea8
DD
2362#define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2363 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2364 add_builtin_function ("__builtin_rx_" LC_NAME, \
2365 build_function_type_list (RET_TYPE##_type_node, \
2366 NULL_TREE), \
2367 RX_BUILTIN_##UC_NAME, \
2368 BUILT_IN_MD, NULL, NULL_TREE)
2369
65a324b4 2370#define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
87e91fca 2371 rx_builtins[RX_BUILTIN_##UC_NAME] = \
5f2f13fd 2372 add_builtin_function ("__builtin_rx_" LC_NAME, \
65a324b4
NC
2373 build_function_type_list (RET_TYPE##_type_node, \
2374 ARG_TYPE##_type_node, \
2375 NULL_TREE), \
2376 RX_BUILTIN_##UC_NAME, \
2377 BUILT_IN_MD, NULL, NULL_TREE)
2378
2379#define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
87e91fca 2380 rx_builtins[RX_BUILTIN_##UC_NAME] = \
65a324b4
NC
2381 add_builtin_function ("__builtin_rx_" LC_NAME, \
2382 build_function_type_list (RET_TYPE##_type_node, \
2383 ARG_TYPE1##_type_node,\
2384 ARG_TYPE2##_type_node,\
2385 NULL_TREE), \
2386 RX_BUILTIN_##UC_NAME, \
2387 BUILT_IN_MD, NULL, NULL_TREE)
2388
2389#define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
87e91fca 2390 rx_builtins[RX_BUILTIN_##UC_NAME] = \
65a324b4
NC
2391 add_builtin_function ("__builtin_rx_" LC_NAME, \
2392 build_function_type_list (RET_TYPE##_type_node, \
2393 ARG_TYPE1##_type_node,\
2394 ARG_TYPE2##_type_node,\
2395 ARG_TYPE3##_type_node,\
2396 NULL_TREE), \
2397 RX_BUILTIN_##UC_NAME, \
2398 BUILT_IN_MD, NULL, NULL_TREE)
2399
4bbd2ea8 2400 ADD_RX_BUILTIN0 (BRK, "brk", void);
65a324b4
NC
2401 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
2402 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
2403 ADD_RX_BUILTIN1 (INT, "int", void, integer);
2404 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
2405 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
2406 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
2407 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
4bbd2ea8
DD
2408 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
2409 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
65a324b4
NC
2410 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
2411 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
4bbd2ea8 2412 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
65a324b4
NC
2413 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
2414 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
9595a419 2415 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
65a324b4
NC
2416 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
2417 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
2418 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
4bbd2ea8 2419 ADD_RX_BUILTIN0 (WAIT, "wait", void);
65a324b4
NC
2420}
2421
87e91fca
DD
2422/* Return the RX builtin for CODE. */
2423
2424static tree
2425rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2426{
2427 if (code >= RX_BUILTIN_max)
2428 return error_mark_node;
2429
2430 return rx_builtins[code];
2431}
2432
65a324b4
NC
2433static rtx
2434rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
2435{
2436 if (reg && ! REG_P (arg))
2437 arg = force_reg (SImode, arg);
2438
2439 emit_insn (gen_func (arg));
2440
2441 return NULL_RTX;
2442}
2443
2444static rtx
2445rx_expand_builtin_mvtc (tree exp)
2446{
2447 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2448 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2449
2450 if (! CONST_INT_P (arg1))
2451 return NULL_RTX;
2452
2453 if (! REG_P (arg2))
2454 arg2 = force_reg (SImode, arg2);
2455
2456 emit_insn (gen_mvtc (arg1, arg2));
2457
2458 return NULL_RTX;
2459}
2460
2461static rtx
2462rx_expand_builtin_mvfc (tree t_arg, rtx target)
2463{
2464 rtx arg = expand_normal (t_arg);
2465
2466 if (! CONST_INT_P (arg))
2467 return NULL_RTX;
2468
dafcb54e
NC
2469 if (target == NULL_RTX)
2470 return NULL_RTX;
2471
65a324b4
NC
2472 if (! REG_P (target))
2473 target = force_reg (SImode, target);
2474
2475 emit_insn (gen_mvfc (target, arg));
2476
2477 return target;
2478}
2479
9595a419
NC
2480static rtx
2481rx_expand_builtin_mvtipl (rtx arg)
2482{
2483 /* The RX610 does not support the MVTIPL instruction. */
2484 if (rx_cpu_type == RX610)
2485 return NULL_RTX;
2486
bf9afb7d 2487 if (! CONST_INT_P (arg) || ! IN_RANGE (INTVAL (arg), 0, (1 << 4) - 1))
9595a419
NC
2488 return NULL_RTX;
2489
2490 emit_insn (gen_mvtipl (arg));
2491
2492 return NULL_RTX;
2493}
2494
65a324b4
NC
2495static rtx
2496rx_expand_builtin_mac (tree exp, rtx (* gen_func)(rtx, rtx))
2497{
2498 rtx arg1 = expand_normal (CALL_EXPR_ARG (exp, 0));
2499 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
2500
2501 if (! REG_P (arg1))
2502 arg1 = force_reg (SImode, arg1);
2503
2504 if (! REG_P (arg2))
2505 arg2 = force_reg (SImode, arg2);
2506
2507 emit_insn (gen_func (arg1, arg2));
2508
2509 return NULL_RTX;
2510}
2511
2512static rtx
2513rx_expand_int_builtin_1_arg (rtx arg,
2514 rtx target,
2515 rtx (* gen_func)(rtx, rtx),
2516 bool mem_ok)
2517{
2518 if (! REG_P (arg))
2519 if (!mem_ok || ! MEM_P (arg))
2520 arg = force_reg (SImode, arg);
2521
2522 if (target == NULL_RTX || ! REG_P (target))
2523 target = gen_reg_rtx (SImode);
2524
2525 emit_insn (gen_func (target, arg));
2526
2527 return target;
2528}
2529
2530static rtx
2531rx_expand_int_builtin_0_arg (rtx target, rtx (* gen_func)(rtx))
2532{
2533 if (target == NULL_RTX || ! REG_P (target))
2534 target = gen_reg_rtx (SImode);
2535
2536 emit_insn (gen_func (target));
2537
2538 return target;
2539}
2540
2541static rtx
2542rx_expand_builtin_round (rtx arg, rtx target)
2543{
2544 if ((! REG_P (arg) && ! MEM_P (arg))
2545 || GET_MODE (arg) != SFmode)
2546 arg = force_reg (SFmode, arg);
2547
2548 if (target == NULL_RTX || ! REG_P (target))
2549 target = gen_reg_rtx (SImode);
2550
2551 emit_insn (gen_lrintsf2 (target, arg));
2552
2553 return target;
2554}
2555
bf9afb7d 2556static int
197a830e 2557valid_psw_flag (rtx op, const char *which)
bf9afb7d
DD
2558{
2559 static int mvtc_inform_done = 0;
2560
2561 if (GET_CODE (op) == CONST_INT)
2562 switch (INTVAL (op))
2563 {
2564 case 0: case 'c': case 'C':
2565 case 1: case 'z': case 'Z':
2566 case 2: case 's': case 'S':
2567 case 3: case 'o': case 'O':
2568 case 8: case 'i': case 'I':
2569 case 9: case 'u': case 'U':
2570 return 1;
2571 }
2572
2573 error ("__builtin_rx_%s takes 'C', 'Z', 'S', 'O', 'I', or 'U'", which);
2574 if (!mvtc_inform_done)
2575 error ("use __builtin_rx_mvtc (0, ... ) to write arbitrary values to PSW");
2576 mvtc_inform_done = 1;
2577
2578 return 0;
2579}
2580
65a324b4
NC
2581static rtx
2582rx_expand_builtin (tree exp,
2583 rtx target,
2584 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 2585 machine_mode mode ATTRIBUTE_UNUSED,
65a324b4
NC
2586 int ignore ATTRIBUTE_UNUSED)
2587{
2588 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
58a11859 2589 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
65a324b4
NC
2590 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2591 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2592
2593 switch (fcode)
2594 {
2595 case RX_BUILTIN_BRK: emit_insn (gen_brk ()); return NULL_RTX;
bf9afb7d
DD
2596 case RX_BUILTIN_CLRPSW:
2597 if (!valid_psw_flag (op, "clrpsw"))
2598 return NULL_RTX;
2599 return rx_expand_void_builtin_1_arg (op, gen_clrpsw, false);
2600 case RX_BUILTIN_SETPSW:
2601 if (!valid_psw_flag (op, "setpsw"))
2602 return NULL_RTX;
2603 return rx_expand_void_builtin_1_arg (op, gen_setpsw, false);
65a324b4
NC
2604 case RX_BUILTIN_INT: return rx_expand_void_builtin_1_arg
2605 (op, gen_int, false);
2606 case RX_BUILTIN_MACHI: return rx_expand_builtin_mac (exp, gen_machi);
2607 case RX_BUILTIN_MACLO: return rx_expand_builtin_mac (exp, gen_maclo);
2608 case RX_BUILTIN_MULHI: return rx_expand_builtin_mac (exp, gen_mulhi);
2609 case RX_BUILTIN_MULLO: return rx_expand_builtin_mac (exp, gen_mullo);
2610 case RX_BUILTIN_MVFACHI: return rx_expand_int_builtin_0_arg
2611 (target, gen_mvfachi);
2612 case RX_BUILTIN_MVFACMI: return rx_expand_int_builtin_0_arg
2613 (target, gen_mvfacmi);
2614 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2615 (op, gen_mvtachi, true);
2616 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2617 (op, gen_mvtaclo, true);
e4614c18
NC
2618 case RX_BUILTIN_RMPA:
2619 if (rx_allow_string_insns)
2620 emit_insn (gen_rmpa ());
2621 else
2622 error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2623 return NULL_RTX;
65a324b4
NC
2624 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2625 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
9595a419 2626 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
65a324b4
NC
2627 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2628 (op, gen_racw, false);
2629 case RX_BUILTIN_ROUND: return rx_expand_builtin_round (op, target);
2630 case RX_BUILTIN_REVW: return rx_expand_int_builtin_1_arg
2631 (op, target, gen_revw, false);
65a324b4
NC
2632 case RX_BUILTIN_WAIT: emit_insn (gen_wait ()); return NULL_RTX;
2633
2634 default:
2635 internal_error ("bad builtin code");
2636 break;
2637 }
2638
2639 return NULL_RTX;
2640}
2641\f
2642/* Place an element into a constructor or destructor section.
2643 Like default_ctor_section_asm_out_constructor in varasm.c
2644 except that it uses .init_array (or .fini_array) and it
2645 handles constructor priorities. */
2646
2647static void
2648rx_elf_asm_cdtor (rtx symbol, int priority, bool is_ctor)
2649{
2650 section * s;
2651
2652 if (priority != DEFAULT_INIT_PRIORITY)
2653 {
2654 char buf[18];
2655
2656 sprintf (buf, "%s.%.5u",
2657 is_ctor ? ".init_array" : ".fini_array",
2658 priority);
2659 s = get_section (buf, SECTION_WRITE, NULL_TREE);
2660 }
2661 else if (is_ctor)
2662 s = ctors_section;
2663 else
2664 s = dtors_section;
2665
2666 switch_to_section (s);
2667 assemble_align (POINTER_SIZE);
2668 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
2669}
2670
2671static void
2672rx_elf_asm_constructor (rtx symbol, int priority)
2673{
2674 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */true);
2675}
2676
2677static void
2678rx_elf_asm_destructor (rtx symbol, int priority)
2679{
2680 rx_elf_asm_cdtor (symbol, priority, /* is_ctor= */false);
2681}
2682\f
9595a419 2683/* Check "fast_interrupt", "interrupt" and "naked" attributes. */
65a324b4
NC
2684
2685static tree
2686rx_handle_func_attribute (tree * node,
2687 tree name,
107bdb31 2688 tree args ATTRIBUTE_UNUSED,
65a324b4
NC
2689 int flags ATTRIBUTE_UNUSED,
2690 bool * no_add_attrs)
2691{
2692 gcc_assert (DECL_P (* node));
65a324b4
NC
2693
2694 if (TREE_CODE (* node) != FUNCTION_DECL)
2695 {
2696 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2697 name);
2698 * no_add_attrs = true;
2699 }
2700
2701 /* FIXME: We ought to check for conflicting attributes. */
2702
2703 /* FIXME: We ought to check that the interrupt and exception
2704 handler attributes have been applied to void functions. */
2705 return NULL_TREE;
2706}
2707
69c7a374
DD
2708/* Check "vector" attribute. */
2709
2710static tree
2711rx_handle_vector_attribute (tree * node,
2712 tree name,
2713 tree args,
2714 int flags ATTRIBUTE_UNUSED,
2715 bool * no_add_attrs)
2716{
2717 gcc_assert (DECL_P (* node));
2718 gcc_assert (args != NULL_TREE);
2719
2720 if (TREE_CODE (* node) != FUNCTION_DECL)
2721 {
2722 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2723 name);
2724 * no_add_attrs = true;
2725 }
2726
2727 return NULL_TREE;
2728}
2729
65a324b4
NC
2730/* Table of RX specific attributes. */
2731const struct attribute_spec rx_attribute_table[] =
2732{
62d784f7
KT
2733 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2734 affects_type_identity. */
2735 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2736 false },
69c7a374 2737 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
62d784f7
KT
2738 false },
2739 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2740 false },
69c7a374
DD
2741 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2742 false },
62d784f7 2743 { NULL, 0, 0, false, false, false, NULL, false }
65a324b4
NC
2744};
2745
a32b99ad 2746/* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
fac0f722
JM
2747
2748static void
a32b99ad 2749rx_override_options_after_change (void)
5f75e477
NC
2750{
2751 static bool first_time = TRUE;
5f75e477
NC
2752
2753 if (first_time)
2754 {
2755 /* If this is the first time through and the user has not disabled
a32b99ad
JM
2756 the use of RX FPU hardware then enable -ffinite-math-only,
2757 since the FPU instructions do not support NaNs and infinities. */
5f75e477 2758 if (TARGET_USE_FPU)
a32b99ad 2759 flag_finite_math_only = 1;
5f75e477 2760
5f75e477
NC
2761 first_time = FALSE;
2762 }
2763 else
2764 {
2765 /* Alert the user if they are changing the optimization options
2766 to use IEEE compliant floating point arithmetic with RX FPU insns. */
2767 if (TARGET_USE_FPU
a32b99ad
JM
2768 && !flag_finite_math_only)
2769 warning (0, "RX FPU instructions do not support NaNs and infinities");
5f75e477
NC
2770 }
2771}
2772
0685e770
DD
2773static void
2774rx_option_override (void)
2775{
abd016e6
JM
2776 unsigned int i;
2777 cl_deferred_option *opt;
9771b263 2778 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
abd016e6 2779
9771b263
DN
2780 if (v)
2781 FOR_EACH_VEC_ELT (*v, i, opt)
2782 {
2783 switch (opt->opt_index)
2784 {
2785 case OPT_mint_register_:
2786 switch (opt->value)
2787 {
2788 case 4:
2789 fixed_regs[10] = call_used_regs [10] = 1;
2790 /* Fall through. */
2791 case 3:
2792 fixed_regs[11] = call_used_regs [11] = 1;
2793 /* Fall through. */
2794 case 2:
2795 fixed_regs[12] = call_used_regs [12] = 1;
2796 /* Fall through. */
2797 case 1:
2798 fixed_regs[13] = call_used_regs [13] = 1;
2799 /* Fall through. */
2800 case 0:
2801 rx_num_interrupt_regs = opt->value;
2802 break;
2803 default:
2804 rx_num_interrupt_regs = 0;
2805 /* Error message already given because rx_handle_option
2806 returned false. */
2807 break;
2808 }
2809 break;
abd016e6 2810
9771b263
DN
2811 default:
2812 gcc_unreachable ();
2813 }
2814 }
abd016e6 2815
0685e770 2816 /* This target defaults to strict volatile bitfields. */
36acc1a2 2817 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
0685e770 2818 flag_strict_volatile_bitfields = 1;
a32b99ad
JM
2819
2820 rx_override_options_after_change ();
662666e5 2821
3fad4d00 2822 /* These values are bytes, not log. */
662666e5 2823 if (align_jumps == 0 && ! optimize_size)
3fad4d00 2824 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
662666e5 2825 if (align_loops == 0 && ! optimize_size)
3fad4d00 2826 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
662666e5 2827 if (align_labels == 0 && ! optimize_size)
3fad4d00 2828 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
0685e770
DD
2829}
2830
5f75e477 2831\f
65a324b4
NC
2832static bool
2833rx_allocate_stack_slots_for_args (void)
2834{
2835 /* Naked functions should not allocate stack slots for arguments. */
2836 return ! is_naked_func (NULL_TREE);
2837}
2838
2839static bool
2840rx_func_attr_inlinable (const_tree decl)
2841{
2842 return ! is_fast_interrupt_func (decl)
9595a419 2843 && ! is_interrupt_func (decl)
65a324b4
NC
2844 && ! is_naked_func (decl);
2845}
2846
d45eae79
SL
2847static bool
2848rx_warn_func_return (tree decl)
2849{
2850 /* Naked functions are implemented entirely in assembly, including the
2851 return sequence, so suppress warnings about this. */
2852 return !is_naked_func (decl);
2853}
2854
0d8f38d3
NC
2855/* Return nonzero if it is ok to make a tail-call to DECL,
2856 a function_decl or NULL if this is an indirect call, using EXP */
2857
2858static bool
dafcb54e 2859rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
0d8f38d3 2860{
eb457a8c
DD
2861 if (TARGET_JSR)
2862 return false;
2863
0d8f38d3
NC
2864 /* Do not allow indirect tailcalls. The
2865 sibcall patterns do not support them. */
2866 if (decl == NULL)
2867 return false;
2868
2869 /* Never tailcall from inside interrupt handlers or naked functions. */
2870 if (is_fast_interrupt_func (NULL_TREE)
2871 || is_interrupt_func (NULL_TREE)
2872 || is_naked_func (NULL_TREE))
2873 return false;
2874
2875 return true;
2876}
2877
65a324b4
NC
2878static void
2879rx_file_start (void)
2880{
2881 if (! TARGET_AS100_SYNTAX)
2882 default_file_start ();
2883}
2884
2885static bool
2886rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2887{
9c582551 2888 /* The packed attribute overrides the MS behavior. */
27128fc3 2889 return ! TYPE_PACKED (record_type);
65a324b4 2890}
65a324b4
NC
2891\f
2892/* Returns true if X a legitimate constant for an immediate
2893 operand on the RX. X is already known to satisfy CONSTANT_P. */
2894
2895bool
ef4bddc2 2896rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
65a324b4 2897{
65a324b4
NC
2898 switch (GET_CODE (x))
2899 {
2900 case CONST:
2901 x = XEXP (x, 0);
2902
2903 if (GET_CODE (x) == PLUS)
2904 {
2905 if (! CONST_INT_P (XEXP (x, 1)))
2906 return false;
2907
2908 /* GCC would not pass us CONST_INT + CONST_INT so we
2909 know that we have {SYMBOL|LABEL} + CONST_INT. */
2910 x = XEXP (x, 0);
2911 gcc_assert (! CONST_INT_P (x));
2912 }
2913
2914 switch (GET_CODE (x))
2915 {
2916 case LABEL_REF:
2917 case SYMBOL_REF:
2918 return true;
2919
15ba5696 2920 case UNSPEC:
878a9174 2921 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
15ba5696 2922
65a324b4
NC
2923 default:
2924 /* FIXME: Can this ever happen ? */
e9c0470a 2925 gcc_unreachable ();
65a324b4
NC
2926 }
2927 break;
2928
2929 case LABEL_REF:
2930 case SYMBOL_REF:
2931 return true;
2932 case CONST_DOUBLE:
c9c27b72 2933 return (rx_max_constant_size == 0 || rx_max_constant_size == 4);
65a324b4
NC
2934 case CONST_VECTOR:
2935 return false;
2936 default:
2937 gcc_assert (CONST_INT_P (x));
2938 break;
2939 }
2940
15ba5696 2941 return ok_for_max_constant (INTVAL (x));
65a324b4
NC
2942}
2943
65a324b4 2944static int
ef4bddc2 2945rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
b413068c 2946 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
65a324b4
NC
2947{
2948 rtx a, b;
2949
2950 if (GET_CODE (addr) != PLUS)
2951 return COSTS_N_INSNS (1);
2952
2953 a = XEXP (addr, 0);
2954 b = XEXP (addr, 1);
2955
2956 if (REG_P (a) && REG_P (b))
2957 /* Try to discourage REG+REG addressing as it keeps two registers live. */
2958 return COSTS_N_INSNS (4);
2959
2960 if (speed)
2961 /* [REG+OFF] is just as fast as [REG]. */
2962 return COSTS_N_INSNS (1);
2963
2964 if (CONST_INT_P (b)
2965 && ((INTVAL (b) > 128) || INTVAL (b) < -127))
2966 /* Try to discourage REG + <large OFF> when optimizing for size. */
2967 return COSTS_N_INSNS (2);
2968
2969 return COSTS_N_INSNS (1);
2970}
2971
2972static bool
2973rx_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2974{
2975 /* We can always eliminate to the frame pointer.
2976 We can eliminate to the stack pointer unless a frame
2977 pointer is needed. */
2978
2979 return to == FRAME_POINTER_REGNUM
2980 || ( to == STACK_POINTER_REGNUM && ! frame_pointer_needed);
2981}
2982\f
2983
2984static void
2985rx_trampoline_template (FILE * file)
2986{
2987 /* Output assembler code for a block containing the constant
2988 part of a trampoline, leaving space for the variable parts.
2989
2990 On the RX, (where r8 is the static chain regnum) the trampoline
2991 looks like:
2992
2993 mov #<static chain value>, r8
2994 mov #<function's address>, r9
2995 jmp r9
2996
2997 In big-endian-data-mode however instructions are read into the CPU
2998 4 bytes at a time. These bytes are then swapped around before being
2999 passed to the decoder. So...we must partition our trampoline into
3000 4 byte packets and swap these packets around so that the instruction
3001 reader will reverse the process. But, in order to avoid splitting
3002 the 32-bit constants across these packet boundaries, (making inserting
3003 them into the constructed trampoline very difficult) we have to pad the
3004 instruction sequence with NOP insns. ie:
3005
3006 nop
3007 nop
3008 mov.l #<...>, r8
3009 nop
3010 nop
3011 mov.l #<...>, r9
3012 jmp r9
3013 nop
3014 nop */
3015
3016 if (! TARGET_BIG_ENDIAN_DATA)
3017 {
3018 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", STATIC_CHAIN_REGNUM);
3019 asm_fprintf (file, "\tmov.L\t#0deadbeefH, r%d\n", TRAMPOLINE_TEMP_REGNUM);
3020 asm_fprintf (file, "\tjmp\tr%d\n", TRAMPOLINE_TEMP_REGNUM);
3021 }
3022 else
3023 {
3024 char r8 = '0' + STATIC_CHAIN_REGNUM;
3025 char r9 = '0' + TRAMPOLINE_TEMP_REGNUM;
3026
3027 if (TARGET_AS100_SYNTAX)
3028 {
3029 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r8);
3030 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3031 asm_fprintf (file, "\t.BYTE 0%c2H, 0fbH, 003H, 003H\n", r9);
3032 asm_fprintf (file, "\t.BYTE 0deH, 0adH, 0beH, 0efH\n");
3033 asm_fprintf (file, "\t.BYTE 003H, 003H, 00%cH, 07fH\n", r9);
3034 }
3035 else
3036 {
3037 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r8);
3038 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3039 asm_fprintf (file, "\t.byte 0x%c2, 0xfb, 0x03, 0x03\n", r9);
3040 asm_fprintf (file, "\t.byte 0xde, 0xad, 0xbe, 0xef\n");
3041 asm_fprintf (file, "\t.byte 0x03, 0x03, 0x0%c, 0x7f\n", r9);
3042 }
3043 }
3044}
3045
3046static void
3047rx_trampoline_init (rtx tramp, tree fndecl, rtx chain)
3048{
3049 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
3050
3051 emit_block_move (tramp, assemble_trampoline_template (),
3052 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3053
3054 if (TARGET_BIG_ENDIAN_DATA)
3055 {
3056 emit_move_insn (adjust_address (tramp, SImode, 4), chain);
3057 emit_move_insn (adjust_address (tramp, SImode, 12), fnaddr);
3058 }
3059 else
3060 {
3061 emit_move_insn (adjust_address (tramp, SImode, 2), chain);
3062 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
3063 }
3064}
3065\f
e963cb1a 3066static int
ef4bddc2 3067rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
a5dfec9a
NC
3068 reg_class_t regclass ATTRIBUTE_UNUSED,
3069 bool in)
aea8fc97 3070{
a1d8754e 3071 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
aea8fc97
NC
3072}
3073
e963cb1a 3074/* Convert a CC_MODE to the set of flags that it represents. */
aea8fc97
NC
3075
3076static unsigned int
ef4bddc2 3077flags_from_mode (machine_mode mode)
aea8fc97 3078{
e963cb1a 3079 switch (mode)
aea8fc97 3080 {
e963cb1a
RH
3081 case CC_ZSmode:
3082 return CC_FLAG_S | CC_FLAG_Z;
3083 case CC_ZSOmode:
3084 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
3085 case CC_ZSCmode:
3086 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
3087 case CCmode:
3088 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
3089 case CC_Fmode:
3090 return CC_FLAG_FP;
3091 default:
3092 gcc_unreachable ();
3093 }
3094}
aea8fc97 3095
e963cb1a 3096/* Convert a set of flags to a CC_MODE that can implement it. */
aea8fc97 3097
ef4bddc2 3098static machine_mode
e963cb1a
RH
3099mode_from_flags (unsigned int f)
3100{
3101 if (f & CC_FLAG_FP)
3102 return CC_Fmode;
3103 if (f & CC_FLAG_O)
3104 {
3105 if (f & CC_FLAG_C)
3106 return CCmode;
3107 else
3108 return CC_ZSOmode;
aea8fc97 3109 }
e963cb1a
RH
3110 else if (f & CC_FLAG_C)
3111 return CC_ZSCmode;
3112 else
3113 return CC_ZSmode;
aea8fc97
NC
3114}
3115
e963cb1a
RH
3116/* Convert an RTX_CODE to the set of flags needed to implement it.
3117 This assumes an integer comparison. */
3118
aea8fc97 3119static unsigned int
e963cb1a 3120flags_from_code (enum rtx_code code)
aea8fc97 3121{
e963cb1a 3122 switch (code)
aea8fc97 3123 {
e963cb1a
RH
3124 case LT:
3125 case GE:
72602cd1 3126 return CC_FLAG_S;
e963cb1a
RH
3127 case GT:
3128 case LE:
3129 return CC_FLAG_S | CC_FLAG_O | CC_FLAG_Z;
3130 case GEU:
3131 case LTU:
3132 return CC_FLAG_C;
3133 case GTU:
3134 case LEU:
3135 return CC_FLAG_C | CC_FLAG_Z;
3136 case EQ:
3137 case NE:
3138 return CC_FLAG_Z;
3139 default:
3140 gcc_unreachable ();
aea8fc97
NC
3141 }
3142}
3143
e963cb1a
RH
3144/* Return a CC_MODE of which both M1 and M2 are subsets. */
3145
ef4bddc2
RS
3146static machine_mode
3147rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
aea8fc97 3148{
e963cb1a
RH
3149 unsigned f;
3150
3151 /* Early out for identical modes. */
3152 if (m1 == m2)
3153 return m1;
3154
3155 /* There's no valid combination for FP vs non-FP. */
3156 f = flags_from_mode (m1) | flags_from_mode (m2);
3157 if (f & CC_FLAG_FP)
3158 return VOIDmode;
3159
3160 /* Otherwise, see what mode can implement all the flags. */
3161 return mode_from_flags (f);
aea8fc97 3162}
27bf36f3
RH
3163
3164/* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
3165
ef4bddc2 3166machine_mode
72602cd1 3167rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
27bf36f3
RH
3168{
3169 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3170 return CC_Fmode;
3171
72602cd1
RH
3172 if (y != const0_rtx)
3173 return CCmode;
3174
e963cb1a
RH
3175 return mode_from_flags (flags_from_code (cmp_code));
3176}
3177
e963cb1a
RH
3178/* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
3179 CC_MODE, and use that in branches based on that compare. */
3180
3181void
ef4bddc2 3182rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
e963cb1a
RH
3183 rtx c1, rtx c2, rtx label)
3184{
3185 rtx flags, x;
3186
3187 flags = gen_rtx_REG (cc_mode, CC_REG);
3188 x = gen_rtx_COMPARE (cc_mode, c1, c2);
f7df4a84 3189 x = gen_rtx_SET (flags, x);
e963cb1a
RH
3190 emit_insn (x);
3191
3192 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
3193 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
f7df4a84 3194 x = gen_rtx_SET (pc_rtx, x);
e963cb1a 3195 emit_jump_insn (x);
27bf36f3
RH
3196}
3197
b4d83be3
RH
3198/* A helper function for matching parallels that set the flags. */
3199
3200bool
ef4bddc2 3201rx_match_ccmode (rtx insn, machine_mode cc_mode)
b4d83be3
RH
3202{
3203 rtx op1, flags;
ef4bddc2 3204 machine_mode flags_mode;
b4d83be3
RH
3205
3206 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
3207
3208 op1 = XVECEXP (PATTERN (insn), 0, 1);
3209 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
3210
3211 flags = SET_DEST (op1);
3212 flags_mode = GET_MODE (flags);
3213
3214 if (GET_MODE (SET_SRC (op1)) != flags_mode)
3215 return false;
3216 if (GET_MODE_CLASS (flags_mode) != MODE_CC)
3217 return false;
3218
3219 /* Ensure that the mode of FLAGS is compatible with CC_MODE. */
3220 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
3221 return false;
3222
3223 return true;
3224}
662666e5
NC
3225\f
3226int
34cc3c86 3227rx_align_for_label (rtx lab, int uses_threshold)
662666e5 3228{
34cc3c86
DD
3229 /* This is a simple heuristic to guess when an alignment would not be useful
3230 because the delay due to the inserted NOPs would be greater than the delay
3231 due to the misaligned branch. If uses_threshold is zero then the alignment
3232 is always useful. */
5f2f13fd 3233 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
34cc3c86
DD
3234 return 0;
3235
1cf1574d
DD
3236 if (optimize_size)
3237 return 0;
3fad4d00 3238 /* These values are log, not bytes. */
1cf1574d 3239 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3fad4d00
DD
3240 return 2; /* 4 bytes */
3241 return 3; /* 8 bytes */
662666e5
NC
3242}
3243
3244static int
9158a0d8 3245rx_max_skip_for_label (rtx_insn *lab)
662666e5
NC
3246{
3247 int opsize;
9158a0d8 3248 rtx_insn *op;
662666e5 3249
1704a72b
DD
3250 if (optimize_size)
3251 return 0;
3252
9158a0d8 3253 if (lab == NULL)
662666e5 3254 return 0;
b4d83be3 3255
662666e5
NC
3256 op = lab;
3257 do
3258 {
3259 op = next_nonnote_nondebug_insn (op);
3260 }
3261 while (op && (LABEL_P (op)
3262 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3263 if (!op)
3264 return 0;
3265
3266 opsize = get_attr_length (op);
3267 if (opsize >= 0 && opsize < 8)
3268 return opsize - 1;
3269 return 0;
3270}
e9c0470a
NC
3271
3272/* Compute the real length of the extending load-and-op instructions. */
3273
3274int
3df4ecc2 3275rx_adjust_insn_length (rtx_insn *insn, int current_length)
e9c0470a
NC
3276{
3277 rtx extend, mem, offset;
3278 bool zero;
3279 int factor;
3280
69c7a374
DD
3281 if (!INSN_P (insn))
3282 return current_length;
3283
e9c0470a
NC
3284 switch (INSN_CODE (insn))
3285 {
3286 default:
3287 return current_length;
3288
3289 case CODE_FOR_plussi3_zero_extendhi:
3290 case CODE_FOR_andsi3_zero_extendhi:
3291 case CODE_FOR_iorsi3_zero_extendhi:
3292 case CODE_FOR_xorsi3_zero_extendhi:
3293 case CODE_FOR_divsi3_zero_extendhi:
3294 case CODE_FOR_udivsi3_zero_extendhi:
3295 case CODE_FOR_minussi3_zero_extendhi:
3296 case CODE_FOR_smaxsi3_zero_extendhi:
3297 case CODE_FOR_sminsi3_zero_extendhi:
3298 case CODE_FOR_multsi3_zero_extendhi:
5f2f13fd 3299 case CODE_FOR_comparesi3_zero_extendhi:
e9c0470a
NC
3300 zero = true;
3301 factor = 2;
3302 break;
3303
3304 case CODE_FOR_plussi3_sign_extendhi:
3305 case CODE_FOR_andsi3_sign_extendhi:
3306 case CODE_FOR_iorsi3_sign_extendhi:
3307 case CODE_FOR_xorsi3_sign_extendhi:
3308 case CODE_FOR_divsi3_sign_extendhi:
3309 case CODE_FOR_udivsi3_sign_extendhi:
3310 case CODE_FOR_minussi3_sign_extendhi:
3311 case CODE_FOR_smaxsi3_sign_extendhi:
3312 case CODE_FOR_sminsi3_sign_extendhi:
3313 case CODE_FOR_multsi3_sign_extendhi:
5f2f13fd 3314 case CODE_FOR_comparesi3_sign_extendhi:
e9c0470a
NC
3315 zero = false;
3316 factor = 2;
3317 break;
3318
3319 case CODE_FOR_plussi3_zero_extendqi:
3320 case CODE_FOR_andsi3_zero_extendqi:
3321 case CODE_FOR_iorsi3_zero_extendqi:
3322 case CODE_FOR_xorsi3_zero_extendqi:
3323 case CODE_FOR_divsi3_zero_extendqi:
3324 case CODE_FOR_udivsi3_zero_extendqi:
3325 case CODE_FOR_minussi3_zero_extendqi:
3326 case CODE_FOR_smaxsi3_zero_extendqi:
3327 case CODE_FOR_sminsi3_zero_extendqi:
3328 case CODE_FOR_multsi3_zero_extendqi:
5f2f13fd 3329 case CODE_FOR_comparesi3_zero_extendqi:
e9c0470a
NC
3330 zero = true;
3331 factor = 1;
3332 break;
3333
3334 case CODE_FOR_plussi3_sign_extendqi:
3335 case CODE_FOR_andsi3_sign_extendqi:
3336 case CODE_FOR_iorsi3_sign_extendqi:
3337 case CODE_FOR_xorsi3_sign_extendqi:
3338 case CODE_FOR_divsi3_sign_extendqi:
3339 case CODE_FOR_udivsi3_sign_extendqi:
3340 case CODE_FOR_minussi3_sign_extendqi:
3341 case CODE_FOR_smaxsi3_sign_extendqi:
3342 case CODE_FOR_sminsi3_sign_extendqi:
3343 case CODE_FOR_multsi3_sign_extendqi:
5f2f13fd 3344 case CODE_FOR_comparesi3_sign_extendqi:
e9c0470a
NC
3345 zero = false;
3346 factor = 1;
3347 break;
3348 }
3349
3350 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3351 extend = single_set (insn);
3352 gcc_assert (extend != NULL_RTX);
3353
3354 extend = SET_SRC (extend);
3355 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3356 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3357 extend = XEXP (extend, 0);
3358 else
3359 extend = XEXP (extend, 1);
3360
3361 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3362 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3363
3364 mem = XEXP (extend, 0);
3365 gcc_checking_assert (MEM_P (mem));
3366 if (REG_P (XEXP (mem, 0)))
3367 return (zero && factor == 1) ? 2 : 3;
3368
3369 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3370 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3371 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3372
3373 offset = XEXP (XEXP (mem, 0), 1);
3374 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3375
3376 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3377 return (zero && factor == 1) ? 3 : 4;
3378
3379 return (zero && factor == 1) ? 4 : 5;
3380}
47c9ac72
NC
3381
3382static bool
3383rx_narrow_volatile_bitfield (void)
3384{
3385 return true;
3386}
3387
3388static bool
3389rx_ok_to_inline (tree caller, tree callee)
3390{
3391 /* Do not inline functions with local variables
3392 into a naked CALLER - naked function have no stack frame and
3393 locals need a frame in order to have somewhere to live.
3394
3395 Unfortunately we have no way to determine the presence of
3396 local variables in CALLEE, so we have to be cautious and
3397 assume that there might be some there.
3398
3399 We do allow inlining when CALLEE has the "inline" type
3400 modifier or the "always_inline" or "gnu_inline" attributes. */
3401 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3402 || DECL_DECLARED_INLINE_P (callee)
3403 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3404 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3405}
3406
69f5aa9b
SKS
3407static bool
3408rx_enable_lra (void)
3409{
da02a644 3410 return TARGET_ENABLE_LRA;
69f5aa9b
SKS
3411}
3412
927d22fa
OE
3413rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3414{
3415 if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3416 {
3417 /* If we are inside an interrupt handler, assume that interrupts are
3418 off -- which is the default hardware behavior. In this case, there
3419 is no need to disable the interrupts. */
3420 m_prev_psw_reg = NULL;
3421 }
3422 else
3423 {
3424 m_prev_psw_reg = gen_reg_rtx (SImode);
3425 emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3426 emit_insn (gen_clrpsw (GEN_INT ('I')));
3427 }
3428}
3429
3430rx_atomic_sequence::~rx_atomic_sequence (void)
3431{
3432 if (m_prev_psw_reg != NULL)
3433 emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3434}
3435
aea8fc97 3436\f
47c9ac72
NC
3437#undef TARGET_NARROW_VOLATILE_BITFIELD
3438#define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3439
3440#undef TARGET_CAN_INLINE_P
3441#define TARGET_CAN_INLINE_P rx_ok_to_inline
3442
662666e5
NC
3443#undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3444#define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3445#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3446#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3447#undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3448#define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3449#undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3450#define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3451
65a324b4
NC
3452#undef TARGET_FUNCTION_VALUE
3453#define TARGET_FUNCTION_VALUE rx_function_value
3454
3455#undef TARGET_RETURN_IN_MSB
3456#define TARGET_RETURN_IN_MSB rx_return_in_msb
3457
3458#undef TARGET_IN_SMALL_DATA_P
3459#define TARGET_IN_SMALL_DATA_P rx_in_small_data
3460
3461#undef TARGET_RETURN_IN_MEMORY
3462#define TARGET_RETURN_IN_MEMORY rx_return_in_memory
3463
3464#undef TARGET_HAVE_SRODATA_SECTION
3465#define TARGET_HAVE_SRODATA_SECTION true
3466
3467#undef TARGET_ASM_SELECT_RTX_SECTION
3468#define TARGET_ASM_SELECT_RTX_SECTION rx_select_rtx_section
3469
3470#undef TARGET_ASM_SELECT_SECTION
3471#define TARGET_ASM_SELECT_SECTION rx_select_section
3472
3473#undef TARGET_INIT_BUILTINS
3474#define TARGET_INIT_BUILTINS rx_init_builtins
3475
87e91fca
DD
3476#undef TARGET_BUILTIN_DECL
3477#define TARGET_BUILTIN_DECL rx_builtin_decl
3478
65a324b4
NC
3479#undef TARGET_EXPAND_BUILTIN
3480#define TARGET_EXPAND_BUILTIN rx_expand_builtin
3481
3482#undef TARGET_ASM_CONSTRUCTOR
3483#define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
3484
3485#undef TARGET_ASM_DESTRUCTOR
3486#define TARGET_ASM_DESTRUCTOR rx_elf_asm_destructor
3487
3488#undef TARGET_STRUCT_VALUE_RTX
3489#define TARGET_STRUCT_VALUE_RTX rx_struct_value_rtx
3490
3491#undef TARGET_ATTRIBUTE_TABLE
3492#define TARGET_ATTRIBUTE_TABLE rx_attribute_table
3493
3494#undef TARGET_ASM_FILE_START
3495#define TARGET_ASM_FILE_START rx_file_start
3496
3497#undef TARGET_MS_BITFIELD_LAYOUT_P
3498#define TARGET_MS_BITFIELD_LAYOUT_P rx_is_ms_bitfield_layout
3499
3500#undef TARGET_LEGITIMATE_ADDRESS_P
3501#define TARGET_LEGITIMATE_ADDRESS_P rx_is_legitimate_address
3502
b09c3081
AS
3503#undef TARGET_MODE_DEPENDENT_ADDRESS_P
3504#define TARGET_MODE_DEPENDENT_ADDRESS_P rx_mode_dependent_address_p
3505
65a324b4
NC
3506#undef TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS
3507#define TARGET_ALLOCATE_STACK_SLOTS_FOR_ARGS rx_allocate_stack_slots_for_args
3508
3509#undef TARGET_ASM_FUNCTION_PROLOGUE
3510#define TARGET_ASM_FUNCTION_PROLOGUE rx_output_function_prologue
3511
3512#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
3513#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P rx_func_attr_inlinable
3514
0d8f38d3
NC
3515#undef TARGET_FUNCTION_OK_FOR_SIBCALL
3516#define TARGET_FUNCTION_OK_FOR_SIBCALL rx_function_ok_for_sibcall
3517
3968a1c0
NF
3518#undef TARGET_FUNCTION_ARG
3519#define TARGET_FUNCTION_ARG rx_function_arg
3520
3521#undef TARGET_FUNCTION_ARG_ADVANCE
3522#define TARGET_FUNCTION_ARG_ADVANCE rx_function_arg_advance
3523
c2ed6cf8
NF
3524#undef TARGET_FUNCTION_ARG_BOUNDARY
3525#define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
3526
65a324b4
NC
3527#undef TARGET_SET_CURRENT_FUNCTION
3528#define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
3529
65a324b4
NC
3530#undef TARGET_ASM_INTEGER
3531#define TARGET_ASM_INTEGER rx_assemble_integer
3532
3533#undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
3534#define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
3535
3536#undef TARGET_MAX_ANCHOR_OFFSET
3537#define TARGET_MAX_ANCHOR_OFFSET 32
3538
3539#undef TARGET_ADDRESS_COST
3540#define TARGET_ADDRESS_COST rx_address_cost
3541
3542#undef TARGET_CAN_ELIMINATE
3543#define TARGET_CAN_ELIMINATE rx_can_eliminate
3544
5efd84c5
NF
3545#undef TARGET_CONDITIONAL_REGISTER_USAGE
3546#define TARGET_CONDITIONAL_REGISTER_USAGE rx_conditional_register_usage
3547
65a324b4
NC
3548#undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3549#define TARGET_ASM_TRAMPOLINE_TEMPLATE rx_trampoline_template
3550
3551#undef TARGET_TRAMPOLINE_INIT
3552#define TARGET_TRAMPOLINE_INIT rx_trampoline_init
3553
31e727b0
NC
3554#undef TARGET_PRINT_OPERAND
3555#define TARGET_PRINT_OPERAND rx_print_operand
3556
3557#undef TARGET_PRINT_OPERAND_ADDRESS
3558#define TARGET_PRINT_OPERAND_ADDRESS rx_print_operand_address
3559
aea8fc97
NC
3560#undef TARGET_CC_MODES_COMPATIBLE
3561#define TARGET_CC_MODES_COMPATIBLE rx_cc_modes_compatible
3562
3563#undef TARGET_MEMORY_MOVE_COST
3564#define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
3565
0685e770
DD
3566#undef TARGET_OPTION_OVERRIDE
3567#define TARGET_OPTION_OVERRIDE rx_option_override
3568
e2f289f3
NC
3569#undef TARGET_PROMOTE_FUNCTION_MODE
3570#define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
3571
a32b99ad
JM
3572#undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
3573#define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
fac0f722 3574
1b7ae0b7
RH
3575#undef TARGET_FLAGS_REGNUM
3576#define TARGET_FLAGS_REGNUM CC_REG
3577
1a627b35 3578#undef TARGET_LEGITIMATE_CONSTANT_P
5f2f13fd 3579#define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
1a627b35 3580
878a9174
DD
3581#undef TARGET_LEGITIMIZE_ADDRESS
3582#define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3583
47c9ac72
NC
3584#undef TARGET_WARN_FUNC_RETURN
3585#define TARGET_WARN_FUNC_RETURN rx_warn_func_return
d45eae79 3586
69f5aa9b
SKS
3587#undef TARGET_LRA_P
3588#define TARGET_LRA_P rx_enable_lra
3589
65a324b4
NC
3590struct gcc_target targetm = TARGET_INITIALIZER;
3591
87e91fca 3592#include "gt-rx.h"
This page took 2.700902 seconds and 5 git commands to generate.