]> gcc.gnu.org Git - gcc.git/blob - gcc/config/v850/v850.c
builtins.c, [...]: Avoid "`" as left quote, using "'" or %q, %< and %> as appropriate.
[gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "recog.h"
38 #include "expr.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "integrate.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46
47 #ifndef streq
48 #define streq(a,b) (strcmp (a, b) == 0)
49 #endif
50
51 /* Function prototypes for stupid compilers: */
52 static void const_double_split (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *);
53 static int const_costs_int (HOST_WIDE_INT, int);
54 static int const_costs (rtx, enum rtx_code);
55 static bool v850_rtx_costs (rtx, int, int, int *);
56 static void substitute_ep_register (rtx, rtx, int, int, rtx *, rtx *);
57 static void v850_reorg (void);
58 static int ep_memory_offset (enum machine_mode, int);
59 static void v850_set_data_area (tree, v850_data_area);
60 const struct attribute_spec v850_attribute_table[];
61 static tree v850_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
62 static tree v850_handle_data_area_attribute (tree *, tree, tree, int, bool *);
63 static void v850_insert_attributes (tree, tree *);
64 static void v850_select_section (tree, int, unsigned HOST_WIDE_INT);
65 static void v850_encode_data_area (tree, rtx);
66 static void v850_encode_section_info (tree, rtx, int);
67 static bool v850_return_in_memory (tree, tree);
68 static void v850_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
69 tree, int *, int);
70 static bool v850_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
71 tree, bool);
72
73 /* Information about the various small memory areas. */
74 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
75 {
76 /* name value max physical max */
77 { "tda", (char *)0, 0, 256 },
78 { "sda", (char *)0, 0, 65536 },
79 { "zda", (char *)0, 0, 32768 },
80 };
81
82 /* Names of the various data areas used on the v850. */
83 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
84 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
85
86 /* Track the current data area set by the data area pragma (which
87 can be nested). Tested by check_default_data_area. */
88 data_area_stack_element * data_area_stack = NULL;
89
90 /* True if we don't need to check any more if the current
91 function is an interrupt handler. */
92 static int v850_interrupt_cache_p = FALSE;
93
94 /* Whether current function is an interrupt handler. */
95 static int v850_interrupt_p = FALSE;
96 \f
97 /* Initialize the GCC target structure. */
98 #undef TARGET_ASM_ALIGNED_HI_OP
99 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
100
101 #undef TARGET_ATTRIBUTE_TABLE
102 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
103
104 #undef TARGET_INSERT_ATTRIBUTES
105 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
106
107 #undef TARGET_ASM_SELECT_SECTION
108 #define TARGET_ASM_SELECT_SECTION v850_select_section
109
110 #undef TARGET_ENCODE_SECTION_INFO
111 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
112
113 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
114 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
115
116 #undef TARGET_RTX_COSTS
117 #define TARGET_RTX_COSTS v850_rtx_costs
118
119 #undef TARGET_ADDRESS_COST
120 #define TARGET_ADDRESS_COST hook_int_rtx_0
121
122 #undef TARGET_MACHINE_DEPENDENT_REORG
123 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
124
125 #undef TARGET_PROMOTE_PROTOTYPES
126 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
127
128 #undef TARGET_RETURN_IN_MEMORY
129 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
130
131 #undef TARGET_PASS_BY_REFERENCE
132 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
133
134 #undef TARGET_CALLEE_COPIES
135 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
136
137 #undef TARGET_SETUP_INCOMING_VARARGS
138 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
139
140 struct gcc_target targetm = TARGET_INITIALIZER;
141 \f
142 /* Sometimes certain combinations of command options do not make
143 sense on a particular target machine. You can define a macro
144 `OVERRIDE_OPTIONS' to take account of this. This macro, if
145 defined, is executed once just after all the command options have
146 been parsed.
147
148 Don't use this macro to turn on various extra optimizations for
149 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
150
151 void
152 override_options (void)
153 {
154 int i;
155 extern int atoi (const char *);
156
157 /* Parse -m{s,t,z}da=nnn switches */
158 for (i = 0; i < (int)SMALL_MEMORY_max; i++)
159 {
160 if (small_memory[i].value)
161 {
162 if (!ISDIGIT (*small_memory[i].value))
163 error ("%s=%s is not numeric",
164 small_memory[i].name,
165 small_memory[i].value);
166 else
167 {
168 small_memory[i].max = atoi (small_memory[i].value);
169 if (small_memory[i].max > small_memory[i].physical_max)
170 error ("%s=%s is too large",
171 small_memory[i].name,
172 small_memory[i].value);
173 }
174 }
175 }
176
177 /* Make sure that the US_BIT_SET mask has been correctly initialized. */
178 if ((target_flags & MASK_US_MASK_SET) == 0)
179 {
180 target_flags |= MASK_US_MASK_SET;
181 target_flags &= ~MASK_US_BIT_SET;
182 }
183 }
184
185 \f
186 static bool
187 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
188 enum machine_mode mode, tree type,
189 bool named ATTRIBUTE_UNUSED)
190 {
191 unsigned HOST_WIDE_INT size;
192
193 if (type)
194 size = int_size_in_bytes (type);
195 else
196 size = GET_MODE_SIZE (mode);
197
198 return size > 8;
199 }
200
201 /* Return an RTX to represent where a value with mode MODE will be returned
202 from a function. If the result is 0, the argument is pushed. */
203
204 rtx
205 function_arg (CUMULATIVE_ARGS * cum,
206 enum machine_mode mode,
207 tree type,
208 int named)
209 {
210 rtx result = 0;
211 int size, align;
212
213 if (TARGET_GHS && !named)
214 return NULL_RTX;
215
216 if (mode == BLKmode)
217 size = int_size_in_bytes (type);
218 else
219 size = GET_MODE_SIZE (mode);
220
221 if (size < 1)
222 return 0;
223
224 if (type)
225 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
226 else
227 align = size;
228
229 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
230
231 if (cum->nbytes > 4 * UNITS_PER_WORD)
232 return 0;
233
234 if (type == NULL_TREE
235 && cum->nbytes + size > 4 * UNITS_PER_WORD)
236 return 0;
237
238 switch (cum->nbytes / UNITS_PER_WORD)
239 {
240 case 0:
241 result = gen_rtx_REG (mode, 6);
242 break;
243 case 1:
244 result = gen_rtx_REG (mode, 7);
245 break;
246 case 2:
247 result = gen_rtx_REG (mode, 8);
248 break;
249 case 3:
250 result = gen_rtx_REG (mode, 9);
251 break;
252 default:
253 result = 0;
254 }
255
256 return result;
257 }
258
259 \f
260 /* Return the number of words which must be put into registers
261 for values which are part in registers and part in memory. */
262
263 int
264 function_arg_partial_nregs (CUMULATIVE_ARGS * cum,
265 enum machine_mode mode,
266 tree type,
267 int named)
268 {
269 int size, align;
270
271 if (TARGET_GHS && !named)
272 return 0;
273
274 if (mode == BLKmode)
275 size = int_size_in_bytes (type);
276 else
277 size = GET_MODE_SIZE (mode);
278
279 if (type)
280 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
281 else
282 align = size;
283
284 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
285
286 if (cum->nbytes > 4 * UNITS_PER_WORD)
287 return 0;
288
289 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
290 return 0;
291
292 if (type == NULL_TREE
293 && cum->nbytes + size > 4 * UNITS_PER_WORD)
294 return 0;
295
296 return (4 * UNITS_PER_WORD - cum->nbytes) / UNITS_PER_WORD;
297 }
298
299 \f
300 /* Return the high and low words of a CONST_DOUBLE */
301
302 static void
303 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
304 {
305 if (GET_CODE (x) == CONST_DOUBLE)
306 {
307 long t[2];
308 REAL_VALUE_TYPE rv;
309
310 switch (GET_MODE (x))
311 {
312 case DFmode:
313 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
314 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
315 *p_high = t[1]; /* since v850 is little endian */
316 *p_low = t[0]; /* high is second word */
317 return;
318
319 case SFmode:
320 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
321 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
322 *p_low = 0;
323 return;
324
325 case VOIDmode:
326 case DImode:
327 *p_high = CONST_DOUBLE_HIGH (x);
328 *p_low = CONST_DOUBLE_LOW (x);
329 return;
330
331 default:
332 break;
333 }
334 }
335
336 fatal_insn ("const_double_split got a bad insn:", x);
337 }
338
339 \f
340 /* Return the cost of the rtx R with code CODE. */
341
342 static int
343 const_costs_int (HOST_WIDE_INT value, int zero_cost)
344 {
345 if (CONST_OK_FOR_I (value))
346 return zero_cost;
347 else if (CONST_OK_FOR_J (value))
348 return 1;
349 else if (CONST_OK_FOR_K (value))
350 return 2;
351 else
352 return 4;
353 }
354
355 static int
356 const_costs (rtx r, enum rtx_code c)
357 {
358 HOST_WIDE_INT high, low;
359
360 switch (c)
361 {
362 case CONST_INT:
363 return const_costs_int (INTVAL (r), 0);
364
365 case CONST_DOUBLE:
366 const_double_split (r, &high, &low);
367 if (GET_MODE (r) == SFmode)
368 return const_costs_int (high, 1);
369 else
370 return const_costs_int (high, 1) + const_costs_int (low, 1);
371
372 case SYMBOL_REF:
373 case LABEL_REF:
374 case CONST:
375 return 2;
376
377 case HIGH:
378 return 1;
379
380 default:
381 return 4;
382 }
383 }
384
385 static bool
386 v850_rtx_costs (rtx x,
387 int code,
388 int outer_code ATTRIBUTE_UNUSED,
389 int * total)
390 {
391 switch (code)
392 {
393 case CONST_INT:
394 case CONST_DOUBLE:
395 case CONST:
396 case SYMBOL_REF:
397 case LABEL_REF:
398 *total = COSTS_N_INSNS (const_costs (x, code));
399 return true;
400
401 case MOD:
402 case DIV:
403 case UMOD:
404 case UDIV:
405 if (TARGET_V850E && optimize_size)
406 *total = 6;
407 else
408 *total = 60;
409 return true;
410
411 case MULT:
412 if (TARGET_V850E
413 && ( GET_MODE (x) == SImode
414 || GET_MODE (x) == HImode
415 || GET_MODE (x) == QImode))
416 {
417 if (GET_CODE (XEXP (x, 1)) == REG)
418 *total = 4;
419 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
420 {
421 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
422 *total = 6;
423 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
424 *total = 10;
425 }
426 }
427 else
428 *total = 20;
429 return true;
430
431 default:
432 return false;
433 }
434 }
435 \f
436 /* Print operand X using operand code CODE to assembly language output file
437 FILE. */
438
439 void
440 print_operand (FILE * file, rtx x, int code)
441 {
442 HOST_WIDE_INT high, low;
443
444 switch (code)
445 {
446 case 'c':
447 /* We use 'c' operands with symbols for .vtinherit */
448 if (GET_CODE (x) == SYMBOL_REF)
449 {
450 output_addr_const(file, x);
451 break;
452 }
453 /* fall through */
454 case 'b':
455 case 'B':
456 case 'C':
457 switch ((code == 'B' || code == 'C')
458 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
459 {
460 case NE:
461 if (code == 'c' || code == 'C')
462 fprintf (file, "nz");
463 else
464 fprintf (file, "ne");
465 break;
466 case EQ:
467 if (code == 'c' || code == 'C')
468 fprintf (file, "z");
469 else
470 fprintf (file, "e");
471 break;
472 case GE:
473 fprintf (file, "ge");
474 break;
475 case GT:
476 fprintf (file, "gt");
477 break;
478 case LE:
479 fprintf (file, "le");
480 break;
481 case LT:
482 fprintf (file, "lt");
483 break;
484 case GEU:
485 fprintf (file, "nl");
486 break;
487 case GTU:
488 fprintf (file, "h");
489 break;
490 case LEU:
491 fprintf (file, "nh");
492 break;
493 case LTU:
494 fprintf (file, "l");
495 break;
496 default:
497 abort ();
498 }
499 break;
500 case 'F': /* high word of CONST_DOUBLE */
501 if (GET_CODE (x) == CONST_INT)
502 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
503 else if (GET_CODE (x) == CONST_DOUBLE)
504 {
505 const_double_split (x, &high, &low);
506 fprintf (file, "%ld", (long) high);
507 }
508 else
509 abort ();
510 break;
511 case 'G': /* low word of CONST_DOUBLE */
512 if (GET_CODE (x) == CONST_INT)
513 fprintf (file, "%ld", (long) INTVAL (x));
514 else if (GET_CODE (x) == CONST_DOUBLE)
515 {
516 const_double_split (x, &high, &low);
517 fprintf (file, "%ld", (long) low);
518 }
519 else
520 abort ();
521 break;
522 case 'L':
523 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
524 break;
525 case 'M':
526 fprintf (file, "%d", exact_log2 (INTVAL (x)));
527 break;
528 case 'O':
529 if (special_symbolref_operand (x, VOIDmode))
530 {
531 if (GET_CODE (x) == SYMBOL_REF)
532 ;
533 else if (GET_CODE (x) == CONST)
534 x = XEXP (XEXP (x, 0), 0);
535 else
536 abort ();
537
538 if (SYMBOL_REF_ZDA_P (x))
539 fprintf (file, "zdaoff");
540 else if (SYMBOL_REF_SDA_P (x))
541 fprintf (file, "sdaoff");
542 else if (SYMBOL_REF_TDA_P (x))
543 fprintf (file, "tdaoff");
544 else
545 abort ();
546 }
547 else
548 abort ();
549 break;
550 case 'P':
551 if (special_symbolref_operand (x, VOIDmode))
552 output_addr_const (file, x);
553 else
554 abort ();
555 break;
556 case 'Q':
557 if (special_symbolref_operand (x, VOIDmode))
558 {
559 if (GET_CODE (x) == SYMBOL_REF)
560 ;
561 else if (GET_CODE (x) == CONST)
562 x = XEXP (XEXP (x, 0), 0);
563 else
564 abort ();
565
566 if (SYMBOL_REF_ZDA_P (x))
567 fprintf (file, "r0");
568 else if (SYMBOL_REF_SDA_P (x))
569 fprintf (file, "gp");
570 else if (SYMBOL_REF_TDA_P (x))
571 fprintf (file, "ep");
572 else
573 abort ();
574 }
575 else
576 abort ();
577 break;
578 case 'R': /* 2nd word of a double. */
579 switch (GET_CODE (x))
580 {
581 case REG:
582 fprintf (file, reg_names[REGNO (x) + 1]);
583 break;
584 case MEM:
585 x = XEXP (adjust_address (x, SImode, 4), 0);
586 print_operand_address (file, x);
587 if (GET_CODE (x) == CONST_INT)
588 fprintf (file, "[r0]");
589 break;
590
591 default:
592 break;
593 }
594 break;
595 case 'S':
596 {
597 /* if it's a reference to a TDA variable, use sst/sld vs. st/ld */
598 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
599 fputs ("s", file);
600
601 break;
602 }
603 case 'T':
604 {
605 /* Like an 'S' operand above, but for unsigned loads only. */
606 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
607 fputs ("s", file);
608
609 break;
610 }
611 case 'W': /* print the instruction suffix */
612 switch (GET_MODE (x))
613 {
614 default:
615 abort ();
616
617 case QImode: fputs (".b", file); break;
618 case HImode: fputs (".h", file); break;
619 case SImode: fputs (".w", file); break;
620 case SFmode: fputs (".w", file); break;
621 }
622 break;
623 case '.': /* register r0 */
624 fputs (reg_names[0], file);
625 break;
626 case 'z': /* reg or zero */
627 if (x == const0_rtx)
628 fputs (reg_names[0], file);
629 else if (GET_CODE (x) == REG)
630 fputs (reg_names[REGNO (x)], file);
631 else
632 abort ();
633 break;
634 default:
635 switch (GET_CODE (x))
636 {
637 case MEM:
638 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
639 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
640 XEXP (x, 0)));
641 else
642 output_address (XEXP (x, 0));
643 break;
644
645 case REG:
646 fputs (reg_names[REGNO (x)], file);
647 break;
648 case SUBREG:
649 fputs (reg_names[subreg_regno (x)], file);
650 break;
651 case CONST_INT:
652 case SYMBOL_REF:
653 case CONST:
654 case LABEL_REF:
655 case CODE_LABEL:
656 print_operand_address (file, x);
657 break;
658 default:
659 abort ();
660 }
661 break;
662
663 }
664 }
665
666 \f
667 /* Output assembly language output for the address ADDR to FILE. */
668
669 void
670 print_operand_address (FILE * file, rtx addr)
671 {
672 switch (GET_CODE (addr))
673 {
674 case REG:
675 fprintf (file, "0[");
676 print_operand (file, addr, 0);
677 fprintf (file, "]");
678 break;
679 case LO_SUM:
680 if (GET_CODE (XEXP (addr, 0)) == REG)
681 {
682 /* reg,foo */
683 fprintf (file, "lo(");
684 print_operand (file, XEXP (addr, 1), 0);
685 fprintf (file, ")[");
686 print_operand (file, XEXP (addr, 0), 0);
687 fprintf (file, "]");
688 }
689 break;
690 case PLUS:
691 if (GET_CODE (XEXP (addr, 0)) == REG
692 || GET_CODE (XEXP (addr, 0)) == SUBREG)
693 {
694 /* reg,foo */
695 print_operand (file, XEXP (addr, 1), 0);
696 fprintf (file, "[");
697 print_operand (file, XEXP (addr, 0), 0);
698 fprintf (file, "]");
699 }
700 else
701 {
702 print_operand (file, XEXP (addr, 0), 0);
703 fprintf (file, "+");
704 print_operand (file, XEXP (addr, 1), 0);
705 }
706 break;
707 case SYMBOL_REF:
708 {
709 const char *off_name = NULL;
710 const char *reg_name = NULL;
711
712 if (SYMBOL_REF_ZDA_P (addr))
713 {
714 off_name = "zdaoff";
715 reg_name = "r0";
716 }
717 else if (SYMBOL_REF_SDA_P (addr))
718 {
719 off_name = "sdaoff";
720 reg_name = "gp";
721 }
722 else if (SYMBOL_REF_TDA_P (addr))
723 {
724 off_name = "tdaoff";
725 reg_name = "ep";
726 }
727
728 if (off_name)
729 fprintf (file, "%s(", off_name);
730 output_addr_const (file, addr);
731 if (reg_name)
732 fprintf (file, ")[%s]", reg_name);
733 }
734 break;
735 case CONST:
736 if (special_symbolref_operand (addr, VOIDmode))
737 {
738 rtx x = XEXP (XEXP (addr, 0), 0);
739 const char *off_name;
740 const char *reg_name;
741
742 if (SYMBOL_REF_ZDA_P (x))
743 {
744 off_name = "zdaoff";
745 reg_name = "r0";
746 }
747 else if (SYMBOL_REF_SDA_P (x))
748 {
749 off_name = "sdaoff";
750 reg_name = "gp";
751 }
752 else if (SYMBOL_REF_TDA_P (x))
753 {
754 off_name = "tdaoff";
755 reg_name = "ep";
756 }
757 else
758 abort ();
759
760 fprintf (file, "%s(", off_name);
761 output_addr_const (file, addr);
762 fprintf (file, ")[%s]", reg_name);
763 }
764 else
765 output_addr_const (file, addr);
766 break;
767 default:
768 output_addr_const (file, addr);
769 break;
770 }
771 }
772
773 /* When assemble_integer is used to emit the offsets for a switch
774 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
775 output_addr_const will normally barf at this, but it is OK to omit
776 the truncate and just emit the difference of the two labels. The
777 .hword directive will automatically handle the truncation for us.
778
779 Returns 1 if rtx was handled, 0 otherwise. */
780
781 int
782 v850_output_addr_const_extra (FILE * file, rtx x)
783 {
784 if (GET_CODE (x) != TRUNCATE)
785 return 0;
786
787 x = XEXP (x, 0);
788
789 /* We must also handle the case where the switch table was passed a
790 constant value and so has been collapsed. In this case the first
791 label will have been deleted. In such a case it is OK to emit
792 nothing, since the table will not be used.
793 (cf gcc.c-torture/compile/990801-1.c). */
794 if (GET_CODE (x) == MINUS
795 && GET_CODE (XEXP (x, 0)) == LABEL_REF
796 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
797 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
798 return 1;
799
800 output_addr_const (file, x);
801 return 1;
802 }
803 \f
804 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
805 point value. */
806
807 const char *
808 output_move_single (rtx * operands)
809 {
810 rtx dst = operands[0];
811 rtx src = operands[1];
812
813 if (REG_P (dst))
814 {
815 if (REG_P (src))
816 return "mov %1,%0";
817
818 else if (GET_CODE (src) == CONST_INT)
819 {
820 HOST_WIDE_INT value = INTVAL (src);
821
822 if (CONST_OK_FOR_J (value)) /* Signed 5 bit immediate. */
823 return "mov %1,%0";
824
825 else if (CONST_OK_FOR_K (value)) /* Signed 16 bit immediate. */
826 return "movea lo(%1),%.,%0";
827
828 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
829 return "movhi hi(%1),%.,%0";
830
831 /* A random constant. */
832 else if (TARGET_V850E)
833 return "mov %1,%0";
834 else
835 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
836 }
837
838 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
839 {
840 HOST_WIDE_INT high, low;
841
842 const_double_split (src, &high, &low);
843
844 if (CONST_OK_FOR_J (high)) /* Signed 5 bit immediate. */
845 return "mov %F1,%0";
846
847 else if (CONST_OK_FOR_K (high)) /* Signed 16 bit immediate. */
848 return "movea lo(%F1),%.,%0";
849
850 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
851 return "movhi hi(%F1),%.,%0";
852
853 /* A random constant. */
854 else if (TARGET_V850E)
855 return "mov %F1,%0";
856
857 else
858 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
859 }
860
861 else if (GET_CODE (src) == MEM)
862 return "%S1ld%W1 %1,%0";
863
864 else if (special_symbolref_operand (src, VOIDmode))
865 return "movea %O1(%P1),%Q1,%0";
866
867 else if (GET_CODE (src) == LABEL_REF
868 || GET_CODE (src) == SYMBOL_REF
869 || GET_CODE (src) == CONST)
870 {
871 if (TARGET_V850E)
872 return "mov hilo(%1),%0";
873 else
874 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
875 }
876
877 else if (GET_CODE (src) == HIGH)
878 return "movhi hi(%1),%.,%0";
879
880 else if (GET_CODE (src) == LO_SUM)
881 {
882 operands[2] = XEXP (src, 0);
883 operands[3] = XEXP (src, 1);
884 return "movea lo(%3),%2,%0";
885 }
886 }
887
888 else if (GET_CODE (dst) == MEM)
889 {
890 if (REG_P (src))
891 return "%S0st%W0 %1,%0";
892
893 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
894 return "%S0st%W0 %.,%0";
895
896 else if (GET_CODE (src) == CONST_DOUBLE
897 && CONST0_RTX (GET_MODE (dst)) == src)
898 return "%S0st%W0 %.,%0";
899 }
900
901 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
902 return "";
903 }
904
905 \f
906 /* Return appropriate code to load up an 8 byte integer or
907 floating point value */
908
909 const char *
910 output_move_double (rtx * operands)
911 {
912 enum machine_mode mode = GET_MODE (operands[0]);
913 rtx dst = operands[0];
914 rtx src = operands[1];
915
916 if (register_operand (dst, mode)
917 && register_operand (src, mode))
918 {
919 if (REGNO (src) + 1 == REGNO (dst))
920 return "mov %R1,%R0\n\tmov %1,%0";
921 else
922 return "mov %1,%0\n\tmov %R1,%R0";
923 }
924
925 /* Storing 0 */
926 if (GET_CODE (dst) == MEM
927 && ((GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
928 || (GET_CODE (src) == CONST_DOUBLE && CONST_DOUBLE_OK_FOR_G (src))))
929 return "st.w %.,%0\n\tst.w %.,%R0";
930
931 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
932 {
933 HOST_WIDE_INT high_low[2];
934 int i;
935 rtx xop[10];
936
937 if (GET_CODE (src) == CONST_DOUBLE)
938 const_double_split (src, &high_low[1], &high_low[0]);
939 else
940 {
941 high_low[0] = INTVAL (src);
942 high_low[1] = (INTVAL (src) >= 0) ? 0 : -1;
943 }
944
945 for (i = 0; i < 2; i++)
946 {
947 xop[0] = gen_rtx_REG (SImode, REGNO (dst)+i);
948 xop[1] = GEN_INT (high_low[i]);
949 output_asm_insn (output_move_single (xop), xop);
950 }
951
952 return "";
953 }
954
955 if (GET_CODE (src) == MEM)
956 {
957 int ptrreg = -1;
958 int dreg = REGNO (dst);
959 rtx inside = XEXP (src, 0);
960
961 if (GET_CODE (inside) == REG)
962 ptrreg = REGNO (inside);
963 else if (GET_CODE (inside) == SUBREG)
964 ptrreg = subreg_regno (inside);
965 else if (GET_CODE (inside) == PLUS)
966 ptrreg = REGNO (XEXP (inside, 0));
967 else if (GET_CODE (inside) == LO_SUM)
968 ptrreg = REGNO (XEXP (inside, 0));
969
970 if (dreg == ptrreg)
971 return "ld.w %R1,%R0\n\tld.w %1,%0";
972 }
973
974 if (GET_CODE (src) == MEM)
975 return "ld.w %1,%0\n\tld.w %R1,%R0";
976
977 if (GET_CODE (dst) == MEM)
978 return "st.w %1,%0\n\tst.w %R1,%R0";
979
980 return "mov %1,%0\n\tmov %R1,%R0";
981 }
982
983 \f
984 /* Return maximum offset supported for a short EP memory reference of mode
985 MODE and signedness UNSIGNEDP. */
986
987 static int
988 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
989 {
990 int max_offset = 0;
991
992 switch (mode)
993 {
994 case QImode:
995 if (TARGET_SMALL_SLD)
996 max_offset = (1 << 4);
997 else if (TARGET_V850E
998 && ( ( unsignedp && ! TARGET_US_BIT_SET)
999 || (! unsignedp && TARGET_US_BIT_SET)))
1000 max_offset = (1 << 4);
1001 else
1002 max_offset = (1 << 7);
1003 break;
1004
1005 case HImode:
1006 if (TARGET_SMALL_SLD)
1007 max_offset = (1 << 5);
1008 else if (TARGET_V850E
1009 && ( ( unsignedp && ! TARGET_US_BIT_SET)
1010 || (! unsignedp && TARGET_US_BIT_SET)))
1011 max_offset = (1 << 5);
1012 else
1013 max_offset = (1 << 8);
1014 break;
1015
1016 case SImode:
1017 case SFmode:
1018 max_offset = (1 << 8);
1019 break;
1020
1021 default:
1022 break;
1023 }
1024
1025 return max_offset;
1026 }
1027
1028 /* Return true if OP is a valid short EP memory reference */
1029
1030 int
1031 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1032 {
1033 rtx addr, op0, op1;
1034 int max_offset;
1035 int mask;
1036
1037 if (GET_CODE (op) != MEM)
1038 return FALSE;
1039
1040 max_offset = ep_memory_offset (mode, unsigned_load);
1041
1042 mask = GET_MODE_SIZE (mode) - 1;
1043
1044 addr = XEXP (op, 0);
1045 if (GET_CODE (addr) == CONST)
1046 addr = XEXP (addr, 0);
1047
1048 switch (GET_CODE (addr))
1049 {
1050 default:
1051 break;
1052
1053 case SYMBOL_REF:
1054 return SYMBOL_REF_TDA_P (addr);
1055
1056 case REG:
1057 return REGNO (addr) == EP_REGNUM;
1058
1059 case PLUS:
1060 op0 = XEXP (addr, 0);
1061 op1 = XEXP (addr, 1);
1062 if (GET_CODE (op1) == CONST_INT
1063 && INTVAL (op1) < max_offset
1064 && INTVAL (op1) >= 0
1065 && (INTVAL (op1) & mask) == 0)
1066 {
1067 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1068 return TRUE;
1069
1070 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1071 return TRUE;
1072 }
1073 break;
1074 }
1075
1076 return FALSE;
1077 }
1078
1079 /* Return true if OP is either a register or 0 */
1080
1081 int
1082 reg_or_0_operand (rtx op, enum machine_mode mode)
1083 {
1084 if (GET_CODE (op) == CONST_INT)
1085 return INTVAL (op) == 0;
1086
1087 else if (GET_CODE (op) == CONST_DOUBLE)
1088 return CONST_DOUBLE_OK_FOR_G (op);
1089
1090 else
1091 return register_operand (op, mode);
1092 }
1093
1094 /* Return true if OP is either a register or a signed five bit integer */
1095
1096 int
1097 reg_or_int5_operand (rtx op, enum machine_mode mode)
1098 {
1099 if (GET_CODE (op) == CONST_INT)
1100 return CONST_OK_FOR_J (INTVAL (op));
1101
1102 else
1103 return register_operand (op, mode);
1104 }
1105
1106 /* Return true if OP is either a register or a signed nine bit integer. */
1107
1108 int
1109 reg_or_int9_operand (rtx op, enum machine_mode mode)
1110 {
1111 if (GET_CODE (op) == CONST_INT)
1112 return CONST_OK_FOR_O (INTVAL (op));
1113
1114 return register_operand (op, mode);
1115 }
1116
1117 /* Return true if OP is either a register or a const integer. */
1118
1119 int
1120 reg_or_const_operand (rtx op, enum machine_mode mode)
1121 {
1122 if (GET_CODE (op) == CONST_INT)
1123 return TRUE;
1124
1125 return register_operand (op, mode);
1126 }
1127
1128 /* Return true if OP is a valid call operand. */
1129
1130 int
1131 call_address_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1132 {
1133 /* Only registers are valid call operands if TARGET_LONG_CALLS. */
1134 if (TARGET_LONG_CALLS)
1135 return GET_CODE (op) == REG;
1136 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == REG);
1137 }
1138
1139 int
1140 special_symbolref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1141 {
1142 if (GET_CODE (op) == CONST
1143 && GET_CODE (XEXP (op, 0)) == PLUS
1144 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
1145 && CONST_OK_FOR_K (INTVAL (XEXP (XEXP (op, 0), 1))))
1146 op = XEXP (XEXP (op, 0), 0);
1147
1148 if (GET_CODE (op) == SYMBOL_REF)
1149 return (SYMBOL_REF_FLAGS (op)
1150 & (SYMBOL_FLAG_ZDA | SYMBOL_FLAG_TDA | SYMBOL_FLAG_SDA)) != 0;
1151
1152 return FALSE;
1153 }
1154
1155 int
1156 movsi_source_operand (rtx op, enum machine_mode mode)
1157 {
1158 /* Some constants, as well as symbolic operands
1159 must be done with HIGH & LO_SUM patterns. */
1160 if (CONSTANT_P (op)
1161 && GET_CODE (op) != HIGH
1162 && !(GET_CODE (op) == CONST_INT
1163 && (CONST_OK_FOR_J (INTVAL (op))
1164 || CONST_OK_FOR_K (INTVAL (op))
1165 || CONST_OK_FOR_L (INTVAL (op)))))
1166 return special_symbolref_operand (op, mode);
1167 else
1168 return general_operand (op, mode);
1169 }
1170
1171 int
1172 power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1173 {
1174 if (GET_CODE (op) != CONST_INT)
1175 return 0;
1176
1177 if (exact_log2 (INTVAL (op)) == -1)
1178 return 0;
1179 return 1;
1180 }
1181
1182 int
1183 not_power_of_two_operand (rtx op, enum machine_mode mode)
1184 {
1185 unsigned int mask;
1186
1187 if (mode == QImode)
1188 mask = 0xff;
1189 else if (mode == HImode)
1190 mask = 0xffff;
1191 else if (mode == SImode)
1192 mask = 0xffffffff;
1193 else
1194 return 0;
1195
1196 if (GET_CODE (op) != CONST_INT)
1197 return 0;
1198
1199 if (exact_log2 (~INTVAL (op) & mask) == -1)
1200 return 0;
1201 return 1;
1202 }
1203
1204 \f
1205 /* Substitute memory references involving a pointer, to use the ep pointer,
1206 taking care to save and preserve the ep. */
1207
1208 static void
1209 substitute_ep_register (rtx first_insn,
1210 rtx last_insn,
1211 int uses,
1212 int regno,
1213 rtx * p_r1,
1214 rtx * p_ep)
1215 {
1216 rtx reg = gen_rtx_REG (Pmode, regno);
1217 rtx insn;
1218
1219 if (!*p_r1)
1220 {
1221 regs_ever_live[1] = 1;
1222 *p_r1 = gen_rtx_REG (Pmode, 1);
1223 *p_ep = gen_rtx_REG (Pmode, 30);
1224 }
1225
1226 if (TARGET_DEBUG)
1227 fprintf (stderr, "\
1228 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1229 2 * (uses - 3), uses, reg_names[regno],
1230 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1231 INSN_UID (first_insn), INSN_UID (last_insn));
1232
1233 if (GET_CODE (first_insn) == NOTE)
1234 first_insn = next_nonnote_insn (first_insn);
1235
1236 last_insn = next_nonnote_insn (last_insn);
1237 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1238 {
1239 if (GET_CODE (insn) == INSN)
1240 {
1241 rtx pattern = single_set (insn);
1242
1243 /* Replace the memory references. */
1244 if (pattern)
1245 {
1246 rtx *p_mem;
1247 /* Memory operands are signed by default. */
1248 int unsignedp = FALSE;
1249
1250 if (GET_CODE (SET_DEST (pattern)) == MEM
1251 && GET_CODE (SET_SRC (pattern)) == MEM)
1252 p_mem = (rtx *)0;
1253
1254 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1255 p_mem = &SET_DEST (pattern);
1256
1257 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1258 p_mem = &SET_SRC (pattern);
1259
1260 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1261 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1262 p_mem = &XEXP (SET_SRC (pattern), 0);
1263
1264 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1265 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1266 {
1267 p_mem = &XEXP (SET_SRC (pattern), 0);
1268 unsignedp = TRUE;
1269 }
1270 else
1271 p_mem = (rtx *)0;
1272
1273 if (p_mem)
1274 {
1275 rtx addr = XEXP (*p_mem, 0);
1276
1277 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1278 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1279
1280 else if (GET_CODE (addr) == PLUS
1281 && GET_CODE (XEXP (addr, 0)) == REG
1282 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1283 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1284 && ((INTVAL (XEXP (addr, 1)))
1285 < ep_memory_offset (GET_MODE (*p_mem),
1286 unsignedp))
1287 && ((INTVAL (XEXP (addr, 1))) >= 0))
1288 *p_mem = change_address (*p_mem, VOIDmode,
1289 gen_rtx_PLUS (Pmode,
1290 *p_ep,
1291 XEXP (addr, 1)));
1292 }
1293 }
1294 }
1295 }
1296
1297 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1298 insn = prev_nonnote_insn (first_insn);
1299 if (insn && GET_CODE (insn) == INSN
1300 && GET_CODE (PATTERN (insn)) == SET
1301 && SET_DEST (PATTERN (insn)) == *p_ep
1302 && SET_SRC (PATTERN (insn)) == *p_r1)
1303 delete_insn (insn);
1304 else
1305 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1306
1307 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1308 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1309 }
1310
1311 \f
1312 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1313 the -mep mode to copy heavily used pointers to ep to use the implicit
1314 addressing. */
1315
1316 static void
1317 v850_reorg (void)
1318 {
1319 struct
1320 {
1321 int uses;
1322 rtx first_insn;
1323 rtx last_insn;
1324 }
1325 regs[FIRST_PSEUDO_REGISTER];
1326
1327 int i;
1328 int use_ep = FALSE;
1329 rtx r1 = NULL_RTX;
1330 rtx ep = NULL_RTX;
1331 rtx insn;
1332 rtx pattern;
1333
1334 /* If not ep mode, just return now. */
1335 if (!TARGET_EP)
1336 return;
1337
1338 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1339 {
1340 regs[i].uses = 0;
1341 regs[i].first_insn = NULL_RTX;
1342 regs[i].last_insn = NULL_RTX;
1343 }
1344
1345 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1346 {
1347 switch (GET_CODE (insn))
1348 {
1349 /* End of basic block */
1350 default:
1351 if (!use_ep)
1352 {
1353 int max_uses = -1;
1354 int max_regno = -1;
1355
1356 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1357 {
1358 if (max_uses < regs[i].uses)
1359 {
1360 max_uses = regs[i].uses;
1361 max_regno = i;
1362 }
1363 }
1364
1365 if (max_uses > 3)
1366 substitute_ep_register (regs[max_regno].first_insn,
1367 regs[max_regno].last_insn,
1368 max_uses, max_regno, &r1, &ep);
1369 }
1370
1371 use_ep = FALSE;
1372 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1373 {
1374 regs[i].uses = 0;
1375 regs[i].first_insn = NULL_RTX;
1376 regs[i].last_insn = NULL_RTX;
1377 }
1378 break;
1379
1380 case NOTE:
1381 break;
1382
1383 case INSN:
1384 pattern = single_set (insn);
1385
1386 /* See if there are any memory references we can shorten */
1387 if (pattern)
1388 {
1389 rtx src = SET_SRC (pattern);
1390 rtx dest = SET_DEST (pattern);
1391 rtx mem;
1392 /* Memory operands are signed by default. */
1393 int unsignedp = FALSE;
1394
1395 /* We might have (SUBREG (MEM)) here, so just get rid of the
1396 subregs to make this code simpler. */
1397 if (GET_CODE (dest) == SUBREG
1398 && (GET_CODE (SUBREG_REG (dest)) == MEM
1399 || GET_CODE (SUBREG_REG (dest)) == REG))
1400 alter_subreg (&dest);
1401 if (GET_CODE (src) == SUBREG
1402 && (GET_CODE (SUBREG_REG (src)) == MEM
1403 || GET_CODE (SUBREG_REG (src)) == REG))
1404 alter_subreg (&src);
1405
1406 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1407 mem = NULL_RTX;
1408
1409 else if (GET_CODE (dest) == MEM)
1410 mem = dest;
1411
1412 else if (GET_CODE (src) == MEM)
1413 mem = src;
1414
1415 else if (GET_CODE (src) == SIGN_EXTEND
1416 && GET_CODE (XEXP (src, 0)) == MEM)
1417 mem = XEXP (src, 0);
1418
1419 else if (GET_CODE (src) == ZERO_EXTEND
1420 && GET_CODE (XEXP (src, 0)) == MEM)
1421 {
1422 mem = XEXP (src, 0);
1423 unsignedp = TRUE;
1424 }
1425 else
1426 mem = NULL_RTX;
1427
1428 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1429 use_ep = TRUE;
1430
1431 else if (!use_ep && mem
1432 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1433 {
1434 rtx addr = XEXP (mem, 0);
1435 int regno = -1;
1436 int short_p;
1437
1438 if (GET_CODE (addr) == REG)
1439 {
1440 short_p = TRUE;
1441 regno = REGNO (addr);
1442 }
1443
1444 else if (GET_CODE (addr) == PLUS
1445 && GET_CODE (XEXP (addr, 0)) == REG
1446 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1447 && ((INTVAL (XEXP (addr, 1)))
1448 < ep_memory_offset (GET_MODE (mem), unsignedp))
1449 && ((INTVAL (XEXP (addr, 1))) >= 0))
1450 {
1451 short_p = TRUE;
1452 regno = REGNO (XEXP (addr, 0));
1453 }
1454
1455 else
1456 short_p = FALSE;
1457
1458 if (short_p)
1459 {
1460 regs[regno].uses++;
1461 regs[regno].last_insn = insn;
1462 if (!regs[regno].first_insn)
1463 regs[regno].first_insn = insn;
1464 }
1465 }
1466
1467 /* Loading up a register in the basic block zaps any savings
1468 for the register */
1469 if (GET_CODE (dest) == REG)
1470 {
1471 enum machine_mode mode = GET_MODE (dest);
1472 int regno;
1473 int endregno;
1474
1475 regno = REGNO (dest);
1476 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1477
1478 if (!use_ep)
1479 {
1480 /* See if we can use the pointer before this
1481 modification. */
1482 int max_uses = -1;
1483 int max_regno = -1;
1484
1485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1486 {
1487 if (max_uses < regs[i].uses)
1488 {
1489 max_uses = regs[i].uses;
1490 max_regno = i;
1491 }
1492 }
1493
1494 if (max_uses > 3
1495 && max_regno >= regno
1496 && max_regno < endregno)
1497 {
1498 substitute_ep_register (regs[max_regno].first_insn,
1499 regs[max_regno].last_insn,
1500 max_uses, max_regno, &r1,
1501 &ep);
1502
1503 /* Since we made a substitution, zap all remembered
1504 registers. */
1505 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1506 {
1507 regs[i].uses = 0;
1508 regs[i].first_insn = NULL_RTX;
1509 regs[i].last_insn = NULL_RTX;
1510 }
1511 }
1512 }
1513
1514 for (i = regno; i < endregno; i++)
1515 {
1516 regs[i].uses = 0;
1517 regs[i].first_insn = NULL_RTX;
1518 regs[i].last_insn = NULL_RTX;
1519 }
1520 }
1521 }
1522 }
1523 }
1524 }
1525
1526 \f
1527 /* # of registers saved by the interrupt handler. */
1528 #define INTERRUPT_FIXED_NUM 4
1529
1530 /* # of bytes for registers saved by the interrupt handler. */
1531 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1532
1533 /* # of registers saved in register parameter area. */
1534 #define INTERRUPT_REGPARM_NUM 4
1535 /* # of words saved for other registers. */
1536 #define INTERRUPT_ALL_SAVE_NUM \
1537 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM)
1538
1539 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1540
1541 int
1542 compute_register_save_size (long * p_reg_saved)
1543 {
1544 int size = 0;
1545 int i;
1546 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1547 int call_p = regs_ever_live [LINK_POINTER_REGNUM];
1548 long reg_saved = 0;
1549
1550 /* Count the return pointer if we need to save it. */
1551 if (current_function_profile && !call_p)
1552 regs_ever_live [LINK_POINTER_REGNUM] = call_p = 1;
1553
1554 /* Count space for the register saves. */
1555 if (interrupt_handler)
1556 {
1557 for (i = 0; i <= 31; i++)
1558 switch (i)
1559 {
1560 default:
1561 if (regs_ever_live[i] || call_p)
1562 {
1563 size += 4;
1564 reg_saved |= 1L << i;
1565 }
1566 break;
1567
1568 /* We don't save/restore r0 or the stack pointer */
1569 case 0:
1570 case STACK_POINTER_REGNUM:
1571 break;
1572
1573 /* For registers with fixed use, we save them, set them to the
1574 appropriate value, and then restore them.
1575 These registers are handled specially, so don't list them
1576 on the list of registers to save in the prologue. */
1577 case 1: /* temp used to hold ep */
1578 case 4: /* gp */
1579 case 10: /* temp used to call interrupt save/restore */
1580 case EP_REGNUM: /* ep */
1581 size += 4;
1582 break;
1583 }
1584 }
1585 else
1586 {
1587 /* Find the first register that needs to be saved. */
1588 for (i = 0; i <= 31; i++)
1589 if (regs_ever_live[i] && ((! call_used_regs[i])
1590 || i == LINK_POINTER_REGNUM))
1591 break;
1592
1593 /* If it is possible that an out-of-line helper function might be
1594 used to generate the prologue for the current function, then we
1595 need to cover the possibility that such a helper function will
1596 be used, despite the fact that there might be gaps in the list of
1597 registers that need to be saved. To detect this we note that the
1598 helper functions always push at least register r29 (provided
1599 that the function is not an interrupt handler). */
1600
1601 if (TARGET_PROLOG_FUNCTION
1602 && (i == 2 || ((i >= 20) && (i < 30))))
1603 {
1604 if (i == 2)
1605 {
1606 size += 4;
1607 reg_saved |= 1L << i;
1608
1609 i = 20;
1610 }
1611
1612 /* Helper functions save all registers between the starting
1613 register and the last register, regardless of whether they
1614 are actually used by the function or not. */
1615 for (; i <= 29; i++)
1616 {
1617 size += 4;
1618 reg_saved |= 1L << i;
1619 }
1620
1621 if (regs_ever_live [LINK_POINTER_REGNUM])
1622 {
1623 size += 4;
1624 reg_saved |= 1L << LINK_POINTER_REGNUM;
1625 }
1626 }
1627 else
1628 {
1629 for (; i <= 31; i++)
1630 if (regs_ever_live[i] && ((! call_used_regs[i])
1631 || i == LINK_POINTER_REGNUM))
1632 {
1633 size += 4;
1634 reg_saved |= 1L << i;
1635 }
1636 }
1637 }
1638
1639 if (p_reg_saved)
1640 *p_reg_saved = reg_saved;
1641
1642 return size;
1643 }
1644
1645 int
1646 compute_frame_size (int size, long * p_reg_saved)
1647 {
1648 return (size
1649 + compute_register_save_size (p_reg_saved)
1650 + current_function_outgoing_args_size);
1651 }
1652
1653 \f
1654 void
1655 expand_prologue (void)
1656 {
1657 unsigned int i;
1658 int offset;
1659 unsigned int size = get_frame_size ();
1660 unsigned int actual_fsize;
1661 unsigned int init_stack_alloc = 0;
1662 rtx save_regs[32];
1663 rtx save_all;
1664 unsigned int num_save;
1665 unsigned int default_stack;
1666 int code;
1667 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1668 long reg_saved = 0;
1669
1670 actual_fsize = compute_frame_size (size, &reg_saved);
1671
1672 /* Save/setup global registers for interrupt functions right now. */
1673 if (interrupt_handler)
1674 {
1675 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1676 emit_insn (gen_callt_save_interrupt ());
1677 else
1678 emit_insn (gen_save_interrupt ());
1679
1680 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1681
1682 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1683 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1684 }
1685
1686 /* Save arg registers to the stack if necessary. */
1687 else if (current_function_args_info.anonymous_args)
1688 {
1689 if (TARGET_PROLOG_FUNCTION && TARGET_V850E && !TARGET_DISABLE_CALLT)
1690 emit_insn (gen_save_r6_r9_v850e ());
1691 else if (TARGET_PROLOG_FUNCTION && ! TARGET_LONG_CALLS)
1692 emit_insn (gen_save_r6_r9 ());
1693 else
1694 {
1695 offset = 0;
1696 for (i = 6; i < 10; i++)
1697 {
1698 emit_move_insn (gen_rtx_MEM (SImode,
1699 plus_constant (stack_pointer_rtx,
1700 offset)),
1701 gen_rtx_REG (SImode, i));
1702 offset += 4;
1703 }
1704 }
1705 }
1706
1707 /* Identify all of the saved registers. */
1708 num_save = 0;
1709 default_stack = 0;
1710 for (i = 1; i < 31; i++)
1711 {
1712 if (((1L << i) & reg_saved) != 0)
1713 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1714 }
1715
1716 /* If the return pointer is saved, the helper functions also allocate
1717 16 bytes of stack for arguments to be saved in. */
1718 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1719 {
1720 save_regs[num_save++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1721 default_stack = 16;
1722 }
1723
1724 /* See if we have an insn that allocates stack space and saves the particular
1725 registers we want to. */
1726 save_all = NULL_RTX;
1727 if (TARGET_PROLOG_FUNCTION && num_save > 0 && actual_fsize >= default_stack)
1728 {
1729 int alloc_stack = (4 * num_save) + default_stack;
1730 int unalloc_stack = actual_fsize - alloc_stack;
1731 int save_func_len = 4;
1732 int save_normal_len;
1733
1734 if (unalloc_stack)
1735 save_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1736
1737 /* see if we would have used ep to save the stack */
1738 if (TARGET_EP && num_save > 3 && (unsigned)actual_fsize < 255)
1739 save_normal_len = (3 * 2) + (2 * num_save);
1740 else
1741 save_normal_len = 4 * num_save;
1742
1743 save_normal_len += CONST_OK_FOR_J (actual_fsize) ? 2 : 4;
1744
1745 /* Don't bother checking if we don't actually save any space.
1746 This happens for instance if one register is saved and additional
1747 stack space is allocated. */
1748 if (save_func_len < save_normal_len)
1749 {
1750 save_all = gen_rtx_PARALLEL
1751 (VOIDmode,
1752 rtvec_alloc (num_save + 1
1753 + (TARGET_V850 ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1754
1755 XVECEXP (save_all, 0, 0)
1756 = gen_rtx_SET (VOIDmode,
1757 stack_pointer_rtx,
1758 plus_constant (stack_pointer_rtx, -alloc_stack));
1759
1760 offset = - default_stack;
1761 for (i = 0; i < num_save; i++)
1762 {
1763 XVECEXP (save_all, 0, i+1)
1764 = gen_rtx_SET (VOIDmode,
1765 gen_rtx_MEM (Pmode,
1766 plus_constant (stack_pointer_rtx,
1767 offset)),
1768 save_regs[i]);
1769 offset -= 4;
1770 }
1771
1772 if (TARGET_V850)
1773 {
1774 XVECEXP (save_all, 0, num_save + 1)
1775 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1776
1777 if (TARGET_LONG_CALLS)
1778 XVECEXP (save_all, 0, num_save + 2)
1779 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1780 }
1781
1782 code = recog (save_all, NULL_RTX, NULL);
1783 if (code >= 0)
1784 {
1785 rtx insn = emit_insn (save_all);
1786 INSN_CODE (insn) = code;
1787 actual_fsize -= alloc_stack;
1788
1789 if (TARGET_DEBUG)
1790 fprintf (stderr, "\
1791 Saved %d bytes via prologue function (%d vs. %d) for function %s\n",
1792 save_normal_len - save_func_len,
1793 save_normal_len, save_func_len,
1794 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1795 }
1796 else
1797 save_all = NULL_RTX;
1798 }
1799 }
1800
1801 /* If no prolog save function is available, store the registers the old
1802 fashioned way (one by one). */
1803 if (!save_all)
1804 {
1805 /* Special case interrupt functions that save all registers for a call. */
1806 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1807 {
1808 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1809 emit_insn (gen_callt_save_all_interrupt ());
1810 else
1811 emit_insn (gen_save_all_interrupt ());
1812 }
1813 else
1814 {
1815 /* If the stack is too big, allocate it in chunks so we can do the
1816 register saves. We use the register save size so we use the ep
1817 register. */
1818 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1819 init_stack_alloc = compute_register_save_size (NULL);
1820 else
1821 init_stack_alloc = actual_fsize;
1822
1823 /* Save registers at the beginning of the stack frame. */
1824 offset = init_stack_alloc - 4;
1825
1826 if (init_stack_alloc)
1827 emit_insn (gen_addsi3 (stack_pointer_rtx,
1828 stack_pointer_rtx,
1829 GEN_INT (-init_stack_alloc)));
1830
1831 /* Save the return pointer first. */
1832 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1833 {
1834 emit_move_insn (gen_rtx_MEM (SImode,
1835 plus_constant (stack_pointer_rtx,
1836 offset)),
1837 save_regs[--num_save]);
1838 offset -= 4;
1839 }
1840
1841 for (i = 0; i < num_save; i++)
1842 {
1843 emit_move_insn (gen_rtx_MEM (SImode,
1844 plus_constant (stack_pointer_rtx,
1845 offset)),
1846 save_regs[i]);
1847 offset -= 4;
1848 }
1849 }
1850 }
1851
1852 /* Allocate the rest of the stack that was not allocated above (either it is
1853 > 32K or we just called a function to save the registers and needed more
1854 stack. */
1855 if (actual_fsize > init_stack_alloc)
1856 {
1857 int diff = actual_fsize - init_stack_alloc;
1858 if (CONST_OK_FOR_K (diff))
1859 emit_insn (gen_addsi3 (stack_pointer_rtx,
1860 stack_pointer_rtx,
1861 GEN_INT (-diff)));
1862 else
1863 {
1864 rtx reg = gen_rtx_REG (Pmode, 12);
1865 emit_move_insn (reg, GEN_INT (-diff));
1866 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1867 }
1868 }
1869
1870 /* If we need a frame pointer, set it up now. */
1871 if (frame_pointer_needed)
1872 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1873 }
1874 \f
1875
1876 void
1877 expand_epilogue (void)
1878 {
1879 unsigned int i;
1880 int offset;
1881 unsigned int size = get_frame_size ();
1882 long reg_saved = 0;
1883 unsigned int actual_fsize = compute_frame_size (size, &reg_saved);
1884 unsigned int init_stack_free = 0;
1885 rtx restore_regs[32];
1886 rtx restore_all;
1887 unsigned int num_restore;
1888 unsigned int default_stack;
1889 int code;
1890 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1891
1892 /* Eliminate the initial stack stored by interrupt functions. */
1893 if (interrupt_handler)
1894 {
1895 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1896 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1897 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1898 }
1899
1900 /* Cut off any dynamic stack created. */
1901 if (frame_pointer_needed)
1902 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1903
1904 /* Identify all of the saved registers. */
1905 num_restore = 0;
1906 default_stack = 0;
1907 for (i = 1; i < 31; i++)
1908 {
1909 if (((1L << i) & reg_saved) != 0)
1910 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1911 }
1912
1913 /* If the return pointer is saved, the helper functions also allocate
1914 16 bytes of stack for arguments to be saved in. */
1915 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1916 {
1917 restore_regs[num_restore++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1918 default_stack = 16;
1919 }
1920
1921 /* See if we have an insn that restores the particular registers we
1922 want to. */
1923 restore_all = NULL_RTX;
1924
1925 if (TARGET_PROLOG_FUNCTION
1926 && num_restore > 0
1927 && actual_fsize >= default_stack
1928 && !interrupt_handler)
1929 {
1930 int alloc_stack = (4 * num_restore) + default_stack;
1931 int unalloc_stack = actual_fsize - alloc_stack;
1932 int restore_func_len = 4;
1933 int restore_normal_len;
1934
1935 if (unalloc_stack)
1936 restore_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1937
1938 /* See if we would have used ep to restore the registers. */
1939 if (TARGET_EP && num_restore > 3 && (unsigned)actual_fsize < 255)
1940 restore_normal_len = (3 * 2) + (2 * num_restore);
1941 else
1942 restore_normal_len = 4 * num_restore;
1943
1944 restore_normal_len += (CONST_OK_FOR_J (actual_fsize) ? 2 : 4) + 2;
1945
1946 /* Don't bother checking if we don't actually save any space. */
1947 if (restore_func_len < restore_normal_len)
1948 {
1949 restore_all = gen_rtx_PARALLEL (VOIDmode,
1950 rtvec_alloc (num_restore + 2));
1951 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1952 XVECEXP (restore_all, 0, 1)
1953 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1954 gen_rtx_PLUS (Pmode,
1955 stack_pointer_rtx,
1956 GEN_INT (alloc_stack)));
1957
1958 offset = alloc_stack - 4;
1959 for (i = 0; i < num_restore; i++)
1960 {
1961 XVECEXP (restore_all, 0, i+2)
1962 = gen_rtx_SET (VOIDmode,
1963 restore_regs[i],
1964 gen_rtx_MEM (Pmode,
1965 plus_constant (stack_pointer_rtx,
1966 offset)));
1967 offset -= 4;
1968 }
1969
1970 code = recog (restore_all, NULL_RTX, NULL);
1971
1972 if (code >= 0)
1973 {
1974 rtx insn;
1975
1976 actual_fsize -= alloc_stack;
1977 if (actual_fsize)
1978 {
1979 if (CONST_OK_FOR_K (actual_fsize))
1980 emit_insn (gen_addsi3 (stack_pointer_rtx,
1981 stack_pointer_rtx,
1982 GEN_INT (actual_fsize)));
1983 else
1984 {
1985 rtx reg = gen_rtx_REG (Pmode, 12);
1986 emit_move_insn (reg, GEN_INT (actual_fsize));
1987 emit_insn (gen_addsi3 (stack_pointer_rtx,
1988 stack_pointer_rtx,
1989 reg));
1990 }
1991 }
1992
1993 insn = emit_jump_insn (restore_all);
1994 INSN_CODE (insn) = code;
1995
1996 if (TARGET_DEBUG)
1997 fprintf (stderr, "\
1998 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n",
1999 restore_normal_len - restore_func_len,
2000 restore_normal_len, restore_func_len,
2001 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
2002 }
2003 else
2004 restore_all = NULL_RTX;
2005 }
2006 }
2007
2008 /* If no epilog save function is available, restore the registers the
2009 old fashioned way (one by one). */
2010 if (!restore_all)
2011 {
2012 /* If the stack is large, we need to cut it down in 2 pieces. */
2013 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
2014 init_stack_free = 4 * num_restore;
2015 else
2016 init_stack_free = actual_fsize;
2017
2018 /* Deallocate the rest of the stack if it is > 32K. */
2019 if (actual_fsize > init_stack_free)
2020 {
2021 int diff;
2022
2023 diff = actual_fsize - ((interrupt_handler) ? 0 : init_stack_free);
2024
2025 if (CONST_OK_FOR_K (diff))
2026 emit_insn (gen_addsi3 (stack_pointer_rtx,
2027 stack_pointer_rtx,
2028 GEN_INT (diff)));
2029 else
2030 {
2031 rtx reg = gen_rtx_REG (Pmode, 12);
2032 emit_move_insn (reg, GEN_INT (diff));
2033 emit_insn (gen_addsi3 (stack_pointer_rtx,
2034 stack_pointer_rtx,
2035 reg));
2036 }
2037 }
2038
2039 /* Special case interrupt functions that save all registers
2040 for a call. */
2041 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
2042 {
2043 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2044 emit_insn (gen_callt_restore_all_interrupt ());
2045 else
2046 emit_insn (gen_restore_all_interrupt ());
2047 }
2048 else
2049 {
2050 /* Restore registers from the beginning of the stack frame. */
2051 offset = init_stack_free - 4;
2052
2053 /* Restore the return pointer first. */
2054 if (num_restore > 0
2055 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
2056 {
2057 emit_move_insn (restore_regs[--num_restore],
2058 gen_rtx_MEM (SImode,
2059 plus_constant (stack_pointer_rtx,
2060 offset)));
2061 offset -= 4;
2062 }
2063
2064 for (i = 0; i < num_restore; i++)
2065 {
2066 emit_move_insn (restore_regs[i],
2067 gen_rtx_MEM (SImode,
2068 plus_constant (stack_pointer_rtx,
2069 offset)));
2070
2071 emit_insn (gen_rtx_USE (VOIDmode, restore_regs[i]));
2072 offset -= 4;
2073 }
2074
2075 /* Cut back the remainder of the stack. */
2076 if (init_stack_free)
2077 emit_insn (gen_addsi3 (stack_pointer_rtx,
2078 stack_pointer_rtx,
2079 GEN_INT (init_stack_free)));
2080 }
2081
2082 /* And return or use reti for interrupt handlers. */
2083 if (interrupt_handler)
2084 {
2085 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
2086 emit_insn (gen_callt_return_interrupt ());
2087 else
2088 emit_jump_insn (gen_return_interrupt ());
2089 }
2090 else if (actual_fsize)
2091 emit_jump_insn (gen_return_internal ());
2092 else
2093 emit_jump_insn (gen_return ());
2094 }
2095
2096 v850_interrupt_cache_p = FALSE;
2097 v850_interrupt_p = FALSE;
2098 }
2099
2100 \f
2101 /* Update the condition code from the insn. */
2102
2103 void
2104 notice_update_cc (rtx body, rtx insn)
2105 {
2106 switch (get_attr_cc (insn))
2107 {
2108 case CC_NONE:
2109 /* Insn does not affect CC at all. */
2110 break;
2111
2112 case CC_NONE_0HIT:
2113 /* Insn does not change CC, but the 0'th operand has been changed. */
2114 if (cc_status.value1 != 0
2115 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2116 cc_status.value1 = 0;
2117 break;
2118
2119 case CC_SET_ZN:
2120 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2121 V,C is in an unusable state. */
2122 CC_STATUS_INIT;
2123 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2124 cc_status.value1 = recog_data.operand[0];
2125 break;
2126
2127 case CC_SET_ZNV:
2128 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2129 C is in an unusable state. */
2130 CC_STATUS_INIT;
2131 cc_status.flags |= CC_NO_CARRY;
2132 cc_status.value1 = recog_data.operand[0];
2133 break;
2134
2135 case CC_COMPARE:
2136 /* The insn is a compare instruction. */
2137 CC_STATUS_INIT;
2138 cc_status.value1 = SET_SRC (body);
2139 break;
2140
2141 case CC_CLOBBER:
2142 /* Insn doesn't leave CC in a usable state. */
2143 CC_STATUS_INIT;
2144 break;
2145 }
2146 }
2147 \f
2148 /* Retrieve the data area that has been chosen for the given decl. */
2149
2150 v850_data_area
2151 v850_get_data_area (tree decl)
2152 {
2153 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2154 return DATA_AREA_SDA;
2155
2156 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2157 return DATA_AREA_TDA;
2158
2159 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2160 return DATA_AREA_ZDA;
2161
2162 return DATA_AREA_NORMAL;
2163 }
2164
2165 /* Store the indicated data area in the decl's attributes. */
2166
2167 static void
2168 v850_set_data_area (tree decl, v850_data_area data_area)
2169 {
2170 tree name;
2171
2172 switch (data_area)
2173 {
2174 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2175 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2176 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2177 default:
2178 return;
2179 }
2180
2181 DECL_ATTRIBUTES (decl) = tree_cons
2182 (name, NULL, DECL_ATTRIBUTES (decl));
2183 }
2184 \f
2185 const struct attribute_spec v850_attribute_table[] =
2186 {
2187 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2188 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2189 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2190 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2191 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2192 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2193 { NULL, 0, 0, false, false, false, NULL }
2194 };
2195
2196 /* Handle an "interrupt" attribute; arguments as in
2197 struct attribute_spec.handler. */
2198 static tree
2199 v850_handle_interrupt_attribute (tree * node,
2200 tree name,
2201 tree args ATTRIBUTE_UNUSED,
2202 int flags ATTRIBUTE_UNUSED,
2203 bool * no_add_attrs)
2204 {
2205 if (TREE_CODE (*node) != FUNCTION_DECL)
2206 {
2207 warning ("%qs attribute only applies to functions",
2208 IDENTIFIER_POINTER (name));
2209 *no_add_attrs = true;
2210 }
2211
2212 return NULL_TREE;
2213 }
2214
2215 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2216 struct attribute_spec.handler. */
2217 static tree
2218 v850_handle_data_area_attribute (tree* node,
2219 tree name,
2220 tree args ATTRIBUTE_UNUSED,
2221 int flags ATTRIBUTE_UNUSED,
2222 bool * no_add_attrs)
2223 {
2224 v850_data_area data_area;
2225 v850_data_area area;
2226 tree decl = *node;
2227
2228 /* Implement data area attribute. */
2229 if (is_attribute_p ("sda", name))
2230 data_area = DATA_AREA_SDA;
2231 else if (is_attribute_p ("tda", name))
2232 data_area = DATA_AREA_TDA;
2233 else if (is_attribute_p ("zda", name))
2234 data_area = DATA_AREA_ZDA;
2235 else
2236 abort ();
2237
2238 switch (TREE_CODE (decl))
2239 {
2240 case VAR_DECL:
2241 if (current_function_decl != NULL_TREE)
2242 {
2243 error ("%Jdata area attributes cannot be specified for "
2244 "local variables", decl, decl);
2245 *no_add_attrs = true;
2246 }
2247
2248 /* Drop through. */
2249
2250 case FUNCTION_DECL:
2251 area = v850_get_data_area (decl);
2252 if (area != DATA_AREA_NORMAL && data_area != area)
2253 {
2254 error ("%Jdata area of '%D' conflicts with previous declaration",
2255 decl, decl);
2256 *no_add_attrs = true;
2257 }
2258 break;
2259
2260 default:
2261 break;
2262 }
2263
2264 return NULL_TREE;
2265 }
2266
2267 \f
2268 /* Return nonzero if FUNC is an interrupt function as specified
2269 by the "interrupt" attribute. */
2270
2271 int
2272 v850_interrupt_function_p (tree func)
2273 {
2274 tree a;
2275 int ret = 0;
2276
2277 if (v850_interrupt_cache_p)
2278 return v850_interrupt_p;
2279
2280 if (TREE_CODE (func) != FUNCTION_DECL)
2281 return 0;
2282
2283 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2284 if (a != NULL_TREE)
2285 ret = 1;
2286
2287 else
2288 {
2289 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2290 ret = a != NULL_TREE;
2291 }
2292
2293 /* Its not safe to trust global variables until after function inlining has
2294 been done. */
2295 if (reload_completed | reload_in_progress)
2296 v850_interrupt_p = ret;
2297
2298 return ret;
2299 }
2300
2301 \f
2302 static void
2303 v850_encode_data_area (tree decl, rtx symbol)
2304 {
2305 int flags;
2306
2307 /* Map explicit sections into the appropriate attribute */
2308 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2309 {
2310 if (DECL_SECTION_NAME (decl))
2311 {
2312 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2313
2314 if (streq (name, ".zdata") || streq (name, ".zbss"))
2315 v850_set_data_area (decl, DATA_AREA_ZDA);
2316
2317 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2318 v850_set_data_area (decl, DATA_AREA_SDA);
2319
2320 else if (streq (name, ".tdata"))
2321 v850_set_data_area (decl, DATA_AREA_TDA);
2322 }
2323
2324 /* If no attribute, support -m{zda,sda,tda}=n */
2325 else
2326 {
2327 int size = int_size_in_bytes (TREE_TYPE (decl));
2328 if (size <= 0)
2329 ;
2330
2331 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2332 v850_set_data_area (decl, DATA_AREA_TDA);
2333
2334 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2335 v850_set_data_area (decl, DATA_AREA_SDA);
2336
2337 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2338 v850_set_data_area (decl, DATA_AREA_ZDA);
2339 }
2340
2341 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2342 return;
2343 }
2344
2345 flags = SYMBOL_REF_FLAGS (symbol);
2346 switch (v850_get_data_area (decl))
2347 {
2348 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2349 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2350 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2351 default: abort ();
2352 }
2353 SYMBOL_REF_FLAGS (symbol) = flags;
2354 }
2355
2356 static void
2357 v850_encode_section_info (tree decl, rtx rtl, int first)
2358 {
2359 default_encode_section_info (decl, rtl, first);
2360
2361 if (TREE_CODE (decl) == VAR_DECL
2362 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2363 v850_encode_data_area (decl, XEXP (rtl, 0));
2364 }
2365
2366 /* Return true if the given RTX is a register which can be restored
2367 by a function epilogue. */
2368 int
2369 register_is_ok_for_epilogue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2370 {
2371 /* The save/restore routines can only cope with registers 20 - 31. */
2372 return ((GET_CODE (op) == REG)
2373 && (((REGNO (op) >= 20) && REGNO (op) <= 31)));
2374 }
2375
2376 /* Return nonzero if the given RTX is suitable for collapsing into
2377 jump to a function epilogue. */
2378 int
2379 pattern_is_ok_for_epilogue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2380 {
2381 int count = XVECLEN (op, 0);
2382 int i;
2383
2384 /* If there are no registers to restore then the function epilogue
2385 is not suitable. */
2386 if (count <= 2)
2387 return 0;
2388
2389 /* The pattern matching has already established that we are performing a
2390 function epilogue and that we are popping at least one register. We must
2391 now check the remaining entries in the vector to make sure that they are
2392 also register pops. There is no good reason why there should ever be
2393 anything else in this vector, but being paranoid always helps...
2394
2395 The test below performs the C equivalent of this machine description
2396 pattern match:
2397
2398 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2399 (mem:SI (plus:SI (reg:SI 3) (match_operand:SI n "immediate_operand" "i"))))
2400 */
2401
2402 for (i = 3; i < count; i++)
2403 {
2404 rtx vector_element = XVECEXP (op, 0, i);
2405 rtx dest;
2406 rtx src;
2407 rtx plus;
2408
2409 if (GET_CODE (vector_element) != SET)
2410 return 0;
2411
2412 dest = SET_DEST (vector_element);
2413 src = SET_SRC (vector_element);
2414
2415 if (GET_CODE (dest) != REG
2416 || GET_MODE (dest) != SImode
2417 || ! register_is_ok_for_epilogue (dest, SImode)
2418 || GET_CODE (src) != MEM
2419 || GET_MODE (src) != SImode)
2420 return 0;
2421
2422 plus = XEXP (src, 0);
2423
2424 if (GET_CODE (plus) != PLUS
2425 || GET_CODE (XEXP (plus, 0)) != REG
2426 || GET_MODE (XEXP (plus, 0)) != SImode
2427 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2428 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2429 return 0;
2430 }
2431
2432 return 1;
2433 }
2434
2435 /* Construct a JR instruction to a routine that will perform the equivalent of
2436 the RTL passed in as an argument. This RTL is a function epilogue that
2437 pops registers off the stack and possibly releases some extra stack space
2438 as well. The code has already verified that the RTL matches these
2439 requirements. */
2440 char *
2441 construct_restore_jr (rtx op)
2442 {
2443 int count = XVECLEN (op, 0);
2444 int stack_bytes;
2445 unsigned long int mask;
2446 unsigned long int first;
2447 unsigned long int last;
2448 int i;
2449 static char buff [100]; /* XXX */
2450
2451 if (count <= 2)
2452 {
2453 error ("bogus JR construction: %d\n", count);
2454 return NULL;
2455 }
2456
2457 /* Work out how many bytes to pop off the stack before retrieving
2458 registers. */
2459 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
2460 abort ();
2461 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
2462 abort ();
2463 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
2464 abort ();
2465
2466 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2467
2468 /* Each pop will remove 4 bytes from the stack.... */
2469 stack_bytes -= (count - 2) * 4;
2470
2471 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2472 if (stack_bytes != 0 && stack_bytes != 16)
2473 {
2474 error ("bad amount of stack space removal: %d", stack_bytes);
2475 return NULL;
2476 }
2477
2478 /* Now compute the bit mask of registers to push. */
2479 mask = 0;
2480 for (i = 2; i < count; i++)
2481 {
2482 rtx vector_element = XVECEXP (op, 0, i);
2483
2484 if (GET_CODE (vector_element) != SET)
2485 abort ();
2486 if (GET_CODE (SET_DEST (vector_element)) != REG)
2487 abort ();
2488 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
2489 abort ();
2490
2491 mask |= 1 << REGNO (SET_DEST (vector_element));
2492 }
2493
2494 /* Scan for the first register to pop. */
2495 for (first = 0; first < 32; first++)
2496 {
2497 if (mask & (1 << first))
2498 break;
2499 }
2500
2501 if (first >= 32)
2502 abort ();
2503
2504 /* Discover the last register to pop. */
2505 if (mask & (1 << LINK_POINTER_REGNUM))
2506 {
2507 if (stack_bytes != 16)
2508 abort ();
2509
2510 last = LINK_POINTER_REGNUM;
2511 }
2512 else
2513 {
2514 if (stack_bytes != 0)
2515 abort ();
2516
2517 if ((mask & (1 << 29)) == 0)
2518 abort ();
2519
2520 last = 29;
2521 }
2522
2523 /* Note, it is possible to have gaps in the register mask.
2524 We ignore this here, and generate a JR anyway. We will
2525 be popping more registers than is strictly necessary, but
2526 it does save code space. */
2527
2528 if (TARGET_LONG_CALLS)
2529 {
2530 char name[40];
2531
2532 if (first == last)
2533 sprintf (name, "__return_%s", reg_names [first]);
2534 else
2535 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2536
2537 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2538 name, name);
2539 }
2540 else
2541 {
2542 if (first == last)
2543 sprintf (buff, "jr __return_%s", reg_names [first]);
2544 else
2545 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2546 }
2547
2548 return buff;
2549 }
2550
2551
2552 /* Return nonzero if the given RTX is suitable for collapsing into
2553 a jump to a function prologue. */
2554 int
2555 pattern_is_ok_for_prologue (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2556 {
2557 int count = XVECLEN (op, 0);
2558 int i;
2559 rtx vector_element;
2560
2561 /* If there are no registers to save then the function prologue
2562 is not suitable. */
2563 if (count <= 2)
2564 return 0;
2565
2566 /* The pattern matching has already established that we are adjusting the
2567 stack and pushing at least one register. We must now check that the
2568 remaining entries in the vector to make sure that they are also register
2569 pushes, except for the last entry which should be a CLOBBER of r10.
2570
2571 The test below performs the C equivalent of this machine description
2572 pattern match:
2573
2574 (set (mem:SI (plus:SI (reg:SI 3)
2575 (match_operand:SI 2 "immediate_operand" "i")))
2576 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
2577
2578 */
2579
2580 for (i = 2; i < count - (TARGET_LONG_CALLS ? 2: 1); i++)
2581 {
2582 rtx dest;
2583 rtx src;
2584 rtx plus;
2585
2586 vector_element = XVECEXP (op, 0, i);
2587
2588 if (GET_CODE (vector_element) != SET)
2589 return 0;
2590
2591 dest = SET_DEST (vector_element);
2592 src = SET_SRC (vector_element);
2593
2594 if (GET_CODE (dest) != MEM
2595 || GET_MODE (dest) != SImode
2596 || GET_CODE (src) != REG
2597 || GET_MODE (src) != SImode
2598 || ! register_is_ok_for_epilogue (src, SImode))
2599 return 0;
2600
2601 plus = XEXP (dest, 0);
2602
2603 if ( GET_CODE (plus) != PLUS
2604 || GET_CODE (XEXP (plus, 0)) != REG
2605 || GET_MODE (XEXP (plus, 0)) != SImode
2606 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
2607 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
2608 return 0;
2609
2610 /* If the register is being pushed somewhere other than the stack
2611 space just acquired by the first operand then abandon this quest.
2612 Note: the test is <= because both values are negative. */
2613 if (INTVAL (XEXP (plus, 1))
2614 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
2615 {
2616 return 0;
2617 }
2618 }
2619
2620 /* Make sure that the last entries in the vector are clobbers. */
2621 for (; i < count; i++)
2622 {
2623 vector_element = XVECEXP (op, 0, i);
2624
2625 if (GET_CODE (vector_element) != CLOBBER
2626 || GET_CODE (XEXP (vector_element, 0)) != REG
2627 || !(REGNO (XEXP (vector_element, 0)) == 10
2628 || (TARGET_LONG_CALLS ? (REGNO (XEXP (vector_element, 0)) == 11) : 0 )))
2629 return 0;
2630 }
2631
2632 return 1;
2633 }
2634
2635 /* Construct a JARL instruction to a routine that will perform the equivalent
2636 of the RTL passed as a parameter. This RTL is a function prologue that
2637 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2638 some stack space as well. The code has already verified that the RTL
2639 matches these requirements. */
2640 char *
2641 construct_save_jarl (rtx op)
2642 {
2643 int count = XVECLEN (op, 0);
2644 int stack_bytes;
2645 unsigned long int mask;
2646 unsigned long int first;
2647 unsigned long int last;
2648 int i;
2649 static char buff [100]; /* XXX */
2650
2651 if (count <= 2)
2652 {
2653 error ("bogus JARL construction: %d\n", count);
2654 return NULL;
2655 }
2656
2657 /* Paranoia. */
2658 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2659 abort ();
2660 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
2661 abort ();
2662 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) != REG)
2663 abort ();
2664 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
2665 abort ();
2666
2667 /* Work out how many bytes to push onto the stack after storing the
2668 registers. */
2669 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2670
2671 /* Each push will put 4 bytes from the stack.... */
2672 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2673
2674 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2675 if (stack_bytes != 0 && stack_bytes != -16)
2676 {
2677 error ("bad amount of stack space removal: %d", stack_bytes);
2678 return NULL;
2679 }
2680
2681 /* Now compute the bit mask of registers to push. */
2682 mask = 0;
2683 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2684 {
2685 rtx vector_element = XVECEXP (op, 0, i);
2686
2687 if (GET_CODE (vector_element) != SET)
2688 abort ();
2689 if (GET_CODE (SET_SRC (vector_element)) != REG)
2690 abort ();
2691 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
2692 abort ();
2693
2694 mask |= 1 << REGNO (SET_SRC (vector_element));
2695 }
2696
2697 /* Scan for the first register to push. */
2698 for (first = 0; first < 32; first++)
2699 {
2700 if (mask & (1 << first))
2701 break;
2702 }
2703
2704 if (first >= 32)
2705 abort ();
2706
2707 /* Discover the last register to push. */
2708 if (mask & (1 << LINK_POINTER_REGNUM))
2709 {
2710 if (stack_bytes != -16)
2711 abort ();
2712
2713 last = LINK_POINTER_REGNUM;
2714 }
2715 else
2716 {
2717 if (stack_bytes != 0)
2718 abort ();
2719 if ((mask & (1 << 29)) == 0)
2720 abort ();
2721
2722 last = 29;
2723 }
2724
2725 /* Note, it is possible to have gaps in the register mask.
2726 We ignore this here, and generate a JARL anyway. We will
2727 be pushing more registers than is strictly necessary, but
2728 it does save code space. */
2729
2730 if (TARGET_LONG_CALLS)
2731 {
2732 char name[40];
2733
2734 if (first == last)
2735 sprintf (name, "__save_%s", reg_names [first]);
2736 else
2737 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2738
2739 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2740 name, name);
2741 }
2742 else
2743 {
2744 if (first == last)
2745 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2746 else
2747 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2748 reg_names [last]);
2749 }
2750
2751 return buff;
2752 }
2753
2754 extern tree last_assemble_variable_decl;
2755 extern int size_directive_output;
2756
2757 /* A version of asm_output_aligned_bss() that copes with the special
2758 data areas of the v850. */
2759 void
2760 v850_output_aligned_bss (FILE * file,
2761 tree decl,
2762 const char * name,
2763 int size,
2764 int align)
2765 {
2766 switch (v850_get_data_area (decl))
2767 {
2768 case DATA_AREA_ZDA:
2769 zbss_section ();
2770 break;
2771
2772 case DATA_AREA_SDA:
2773 sbss_section ();
2774 break;
2775
2776 case DATA_AREA_TDA:
2777 tdata_section ();
2778
2779 default:
2780 bss_section ();
2781 break;
2782 }
2783
2784 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2785 #ifdef ASM_DECLARE_OBJECT_NAME
2786 last_assemble_variable_decl = decl;
2787 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2788 #else
2789 /* Standard thing is just output label for the object. */
2790 ASM_OUTPUT_LABEL (file, name);
2791 #endif /* ASM_DECLARE_OBJECT_NAME */
2792 ASM_OUTPUT_SKIP (file, size ? size : 1);
2793 }
2794
2795 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2796 void
2797 v850_output_common (FILE * file,
2798 tree decl,
2799 const char * name,
2800 int size,
2801 int align)
2802 {
2803 if (decl == NULL_TREE)
2804 {
2805 fprintf (file, "%s", COMMON_ASM_OP);
2806 }
2807 else
2808 {
2809 switch (v850_get_data_area (decl))
2810 {
2811 case DATA_AREA_ZDA:
2812 fprintf (file, "%s", ZCOMMON_ASM_OP);
2813 break;
2814
2815 case DATA_AREA_SDA:
2816 fprintf (file, "%s", SCOMMON_ASM_OP);
2817 break;
2818
2819 case DATA_AREA_TDA:
2820 fprintf (file, "%s", TCOMMON_ASM_OP);
2821 break;
2822
2823 default:
2824 fprintf (file, "%s", COMMON_ASM_OP);
2825 break;
2826 }
2827 }
2828
2829 assemble_name (file, name);
2830 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2831 }
2832
2833 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2834 void
2835 v850_output_local (FILE * file,
2836 tree decl,
2837 const char * name,
2838 int size,
2839 int align)
2840 {
2841 fprintf (file, "%s", LOCAL_ASM_OP);
2842 assemble_name (file, name);
2843 fprintf (file, "\n");
2844
2845 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2846 }
2847
2848 /* Add data area to the given declaration if a ghs data area pragma is
2849 currently in effect (#pragma ghs startXXX/endXXX). */
2850 static void
2851 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2852 {
2853 if (data_area_stack
2854 && data_area_stack->data_area
2855 && current_function_decl == NULL_TREE
2856 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2857 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2858 v850_set_data_area (decl, data_area_stack->data_area);
2859
2860 /* Initialize the default names of the v850 specific sections,
2861 if this has not been done before. */
2862
2863 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2864 {
2865 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2866 = build_string (sizeof (".sdata")-1, ".sdata");
2867
2868 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2869 = build_string (sizeof (".rosdata")-1, ".rosdata");
2870
2871 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2872 = build_string (sizeof (".tdata")-1, ".tdata");
2873
2874 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2875 = build_string (sizeof (".zdata")-1, ".zdata");
2876
2877 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2878 = build_string (sizeof (".rozdata")-1, ".rozdata");
2879 }
2880
2881 if (current_function_decl == NULL_TREE
2882 && (TREE_CODE (decl) == VAR_DECL
2883 || TREE_CODE (decl) == CONST_DECL
2884 || TREE_CODE (decl) == FUNCTION_DECL)
2885 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2886 && !DECL_SECTION_NAME (decl))
2887 {
2888 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2889 tree chosen_section;
2890
2891 if (TREE_CODE (decl) == FUNCTION_DECL)
2892 kind = GHS_SECTION_KIND_TEXT;
2893 else
2894 {
2895 /* First choose a section kind based on the data area of the decl. */
2896 switch (v850_get_data_area (decl))
2897 {
2898 default:
2899 abort ();
2900
2901 case DATA_AREA_SDA:
2902 kind = ((TREE_READONLY (decl))
2903 ? GHS_SECTION_KIND_ROSDATA
2904 : GHS_SECTION_KIND_SDATA);
2905 break;
2906
2907 case DATA_AREA_TDA:
2908 kind = GHS_SECTION_KIND_TDATA;
2909 break;
2910
2911 case DATA_AREA_ZDA:
2912 kind = ((TREE_READONLY (decl))
2913 ? GHS_SECTION_KIND_ROZDATA
2914 : GHS_SECTION_KIND_ZDATA);
2915 break;
2916
2917 case DATA_AREA_NORMAL: /* default data area */
2918 if (TREE_READONLY (decl))
2919 kind = GHS_SECTION_KIND_RODATA;
2920 else if (DECL_INITIAL (decl))
2921 kind = GHS_SECTION_KIND_DATA;
2922 else
2923 kind = GHS_SECTION_KIND_BSS;
2924 }
2925 }
2926
2927 /* Now, if the section kind has been explicitly renamed,
2928 then attach a section attribute. */
2929 chosen_section = GHS_current_section_names [(int) kind];
2930
2931 /* Otherwise, if this kind of section needs an explicit section
2932 attribute, then also attach one. */
2933 if (chosen_section == NULL)
2934 chosen_section = GHS_default_section_names [(int) kind];
2935
2936 if (chosen_section)
2937 {
2938 /* Only set the section name if specified by a pragma, because
2939 otherwise it will force those variables to get allocated storage
2940 in this module, rather than by the linker. */
2941 DECL_SECTION_NAME (decl) = chosen_section;
2942 }
2943 }
2944 }
2945
2946 /* Return nonzero if the given RTX is suitable
2947 for collapsing into a DISPOSE instruction. */
2948
2949 int
2950 pattern_is_ok_for_dispose (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2951 {
2952 int count = XVECLEN (op, 0);
2953 int i;
2954
2955 /* If there are no registers to restore then
2956 the dispose instruction is not suitable. */
2957 if (count <= 2)
2958 return 0;
2959
2960 /* The pattern matching has already established that we are performing a
2961 function epilogue and that we are popping at least one register. We must
2962 now check the remaining entries in the vector to make sure that they are
2963 also register pops. There is no good reason why there should ever be
2964 anything else in this vector, but being paranoid always helps...
2965
2966 The test below performs the C equivalent of this machine description
2967 pattern match:
2968
2969 (set (match_operand:SI n "register_is_ok_for_epilogue" "r")
2970 (mem:SI (plus:SI (reg:SI 3)
2971 (match_operand:SI n "immediate_operand" "i"))))
2972 */
2973
2974 for (i = 3; i < count; i++)
2975 {
2976 rtx vector_element = XVECEXP (op, 0, i);
2977 rtx dest;
2978 rtx src;
2979 rtx plus;
2980
2981 if (GET_CODE (vector_element) != SET)
2982 return 0;
2983
2984 dest = SET_DEST (vector_element);
2985 src = SET_SRC (vector_element);
2986
2987 if ( GET_CODE (dest) != REG
2988 || GET_MODE (dest) != SImode
2989 || ! register_is_ok_for_epilogue (dest, SImode)
2990 || GET_CODE (src) != MEM
2991 || GET_MODE (src) != SImode)
2992 return 0;
2993
2994 plus = XEXP (src, 0);
2995
2996 if ( GET_CODE (plus) != PLUS
2997 || GET_CODE (XEXP (plus, 0)) != REG
2998 || GET_MODE (XEXP (plus, 0)) != SImode
2999 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
3000 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
3001 return 0;
3002 }
3003
3004 return 1;
3005 }
3006
3007 /* Construct a DISPOSE instruction that is the equivalent of
3008 the given RTX. We have already verified that this should
3009 be possible. */
3010
3011 char *
3012 construct_dispose_instruction (rtx op)
3013 {
3014 int count = XVECLEN (op, 0);
3015 int stack_bytes;
3016 unsigned long int mask;
3017 int i;
3018 static char buff[ 100 ]; /* XXX */
3019 int use_callt = 0;
3020
3021 if (count <= 2)
3022 {
3023 error ("Bogus DISPOSE construction: %d\n", count);
3024 return NULL;
3025 }
3026
3027 /* Work out how many bytes to pop off the
3028 stack before retrieving registers. */
3029 if (GET_CODE (XVECEXP (op, 0, 1)) != SET)
3030 abort ();
3031 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) != PLUS)
3032 abort ();
3033 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) != CONST_INT)
3034 abort ();
3035
3036 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
3037
3038 /* Each pop will remove 4 bytes from the stack.... */
3039 stack_bytes -= (count - 2) * 4;
3040
3041 /* Make sure that the amount we are popping
3042 will fit into the DISPOSE instruction. */
3043 if (stack_bytes > 128)
3044 {
3045 error ("Too much stack space to dispose of: %d", stack_bytes);
3046 return NULL;
3047 }
3048
3049 /* Now compute the bit mask of registers to push. */
3050 mask = 0;
3051
3052 for (i = 2; i < count; i++)
3053 {
3054 rtx vector_element = XVECEXP (op, 0, i);
3055
3056 if (GET_CODE (vector_element) != SET)
3057 abort ();
3058 if (GET_CODE (SET_DEST (vector_element)) != REG)
3059 abort ();
3060 if (! register_is_ok_for_epilogue (SET_DEST (vector_element), SImode))
3061 abort ();
3062
3063 if (REGNO (SET_DEST (vector_element)) == 2)
3064 use_callt = 1;
3065 else
3066 mask |= 1 << REGNO (SET_DEST (vector_element));
3067 }
3068
3069 if (! TARGET_DISABLE_CALLT
3070 && (use_callt || stack_bytes == 0 || stack_bytes == 16))
3071 {
3072 if (use_callt)
3073 {
3074 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
3075 return buff;
3076 }
3077 else
3078 {
3079 for (i = 20; i < 32; i++)
3080 if (mask & (1 << i))
3081 break;
3082
3083 if (i == 31)
3084 sprintf (buff, "callt ctoff(__callt_return_r31c)");
3085 else
3086 sprintf (buff, "callt ctoff(__callt_return_r%d_r%d%s)",
3087 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3088 }
3089 }
3090 else
3091 {
3092 static char regs [100]; /* XXX */
3093 int done_one;
3094
3095 /* Generate the DISPOSE instruction. Note we could just issue the
3096 bit mask as a number as the assembler can cope with this, but for
3097 the sake of our readers we turn it into a textual description. */
3098 regs[0] = 0;
3099 done_one = 0;
3100
3101 for (i = 20; i < 32; i++)
3102 {
3103 if (mask & (1 << i))
3104 {
3105 int first;
3106
3107 if (done_one)
3108 strcat (regs, ", ");
3109 else
3110 done_one = 1;
3111
3112 first = i;
3113 strcat (regs, reg_names[ first ]);
3114
3115 for (i++; i < 32; i++)
3116 if ((mask & (1 << i)) == 0)
3117 break;
3118
3119 if (i > first + 1)
3120 {
3121 strcat (regs, " - ");
3122 strcat (regs, reg_names[ i - 1 ] );
3123 }
3124 }
3125 }
3126
3127 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
3128 }
3129
3130 return buff;
3131 }
3132
3133 /* Return nonzero if the given RTX is suitable
3134 for collapsing into a PREPARE instruction. */
3135
3136 int
3137 pattern_is_ok_for_prepare (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3138 {
3139 int count = XVECLEN (op, 0);
3140 int i;
3141
3142 /* If there are no registers to restore then the prepare instruction
3143 is not suitable. */
3144 if (count <= 1)
3145 return 0;
3146
3147 /* The pattern matching has already established that we are adjusting the
3148 stack and pushing at least one register. We must now check that the
3149 remaining entries in the vector to make sure that they are also register
3150 pushes.
3151
3152 The test below performs the C equivalent of this machine description
3153 pattern match:
3154
3155 (set (mem:SI (plus:SI (reg:SI 3)
3156 (match_operand:SI 2 "immediate_operand" "i")))
3157 (match_operand:SI 3 "register_is_ok_for_epilogue" "r"))
3158
3159 */
3160
3161 for (i = 2; i < count; i++)
3162 {
3163 rtx vector_element = XVECEXP (op, 0, i);
3164 rtx dest;
3165 rtx src;
3166 rtx plus;
3167
3168 if (GET_CODE (vector_element) != SET)
3169 return 0;
3170
3171 dest = SET_DEST (vector_element);
3172 src = SET_SRC (vector_element);
3173
3174 if ( GET_CODE (dest) != MEM
3175 || GET_MODE (dest) != SImode
3176 || GET_CODE (src) != REG
3177 || GET_MODE (src) != SImode
3178 || ! register_is_ok_for_epilogue (src, SImode)
3179 )
3180 return 0;
3181
3182 plus = XEXP (dest, 0);
3183
3184 if ( GET_CODE (plus) != PLUS
3185 || GET_CODE (XEXP (plus, 0)) != REG
3186 || GET_MODE (XEXP (plus, 0)) != SImode
3187 || REGNO (XEXP (plus, 0)) != STACK_POINTER_REGNUM
3188 || GET_CODE (XEXP (plus, 1)) != CONST_INT)
3189 return 0;
3190
3191 /* If the register is being pushed somewhere other than the stack
3192 space just acquired by the first operand then abandon this quest.
3193 Note: the test is <= because both values are negative. */
3194 if (INTVAL (XEXP (plus, 1))
3195 <= INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)))
3196 return 0;
3197 }
3198
3199 return 1;
3200 }
3201
3202 /* Construct a PREPARE instruction that is the equivalent of
3203 the given RTL. We have already verified that this should
3204 be possible. */
3205
3206 char *
3207 construct_prepare_instruction (rtx op)
3208 {
3209 int count = XVECLEN (op, 0);
3210 int stack_bytes;
3211 unsigned long int mask;
3212 int i;
3213 static char buff[ 100 ]; /* XXX */
3214 int use_callt = 0;
3215
3216 if (count <= 1)
3217 {
3218 error ("Bogus PREPEARE construction: %d\n", count);
3219 return NULL;
3220 }
3221
3222 /* Work out how many bytes to push onto
3223 the stack after storing the registers. */
3224 if (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3225 abort ();
3226 if (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != PLUS)
3227 abort ();
3228 if (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) != CONST_INT)
3229 abort ();
3230
3231 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
3232
3233 /* Each push will put 4 bytes from the stack. */
3234 stack_bytes += (count - 1) * 4;
3235
3236 /* Make sure that the amount we are popping
3237 will fit into the DISPOSE instruction. */
3238 if (stack_bytes < -128)
3239 {
3240 error ("Too much stack space to prepare: %d", stack_bytes);
3241 return NULL;
3242 }
3243
3244 /* Now compute the bit mask of registers to push. */
3245 mask = 0;
3246 for (i = 1; i < count; i++)
3247 {
3248 rtx vector_element = XVECEXP (op, 0, i);
3249
3250 if (GET_CODE (vector_element) != SET)
3251 abort ();
3252 if (GET_CODE (SET_SRC (vector_element)) != REG)
3253 abort ();
3254 if (! register_is_ok_for_epilogue (SET_SRC (vector_element), SImode))
3255 abort ();
3256
3257 if (REGNO (SET_SRC (vector_element)) == 2)
3258 use_callt = 1;
3259 else
3260 mask |= 1 << REGNO (SET_SRC (vector_element));
3261 }
3262
3263 if ((! TARGET_DISABLE_CALLT)
3264 && (use_callt || stack_bytes == 0 || stack_bytes == -16))
3265 {
3266 if (use_callt)
3267 {
3268 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
3269 return buff;
3270 }
3271
3272 for (i = 20; i < 32; i++)
3273 if (mask & (1 << i))
3274 break;
3275
3276 if (i == 31)
3277 sprintf (buff, "callt ctoff(__callt_save_r31c)");
3278 else
3279 sprintf (buff, "callt ctoff(__callt_save_r%d_r%d%s)",
3280 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
3281 }
3282 else
3283 {
3284 static char regs [100]; /* XXX */
3285 int done_one;
3286
3287
3288 /* Generate the PREPARE instruction. Note we could just issue the
3289 bit mask as a number as the assembler can cope with this, but for
3290 the sake of our readers we turn it into a textual description. */
3291 regs[0] = 0;
3292 done_one = 0;
3293
3294 for (i = 20; i < 32; i++)
3295 {
3296 if (mask & (1 << i))
3297 {
3298 int first;
3299
3300 if (done_one)
3301 strcat (regs, ", ");
3302 else
3303 done_one = 1;
3304
3305 first = i;
3306 strcat (regs, reg_names[ first ]);
3307
3308 for (i++; i < 32; i++)
3309 if ((mask & (1 << i)) == 0)
3310 break;
3311
3312 if (i > first + 1)
3313 {
3314 strcat (regs, " - ");
3315 strcat (regs, reg_names[ i - 1 ] );
3316 }
3317 }
3318 }
3319
3320 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
3321 }
3322
3323 return buff;
3324 }
3325 \f
3326 /* Return an RTX indicating where the return address to the
3327 calling function can be found. */
3328
3329 rtx
3330 v850_return_addr (int count)
3331 {
3332 if (count != 0)
3333 return const0_rtx;
3334
3335 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
3336 }
3337 \f
3338 static void
3339 v850_select_section (tree exp,
3340 int reloc ATTRIBUTE_UNUSED,
3341 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3342 {
3343 if (TREE_CODE (exp) == VAR_DECL)
3344 {
3345 int is_const;
3346 if (!TREE_READONLY (exp)
3347 || TREE_SIDE_EFFECTS (exp)
3348 || !DECL_INITIAL (exp)
3349 || (DECL_INITIAL (exp) != error_mark_node
3350 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3351 is_const = FALSE;
3352 else
3353 is_const = TRUE;
3354
3355 switch (v850_get_data_area (exp))
3356 {
3357 case DATA_AREA_ZDA:
3358 if (is_const)
3359 rozdata_section ();
3360 else
3361 zdata_section ();
3362 break;
3363
3364 case DATA_AREA_TDA:
3365 tdata_section ();
3366 break;
3367
3368 case DATA_AREA_SDA:
3369 if (is_const)
3370 rosdata_section ();
3371 else
3372 sdata_section ();
3373 break;
3374
3375 default:
3376 if (is_const)
3377 readonly_data_section ();
3378 else
3379 data_section ();
3380 break;
3381 }
3382 }
3383 else
3384 readonly_data_section ();
3385 }
3386 \f
3387 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3388
3389 static bool
3390 v850_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3391 {
3392 /* Return values > 8 bytes in length in memory. */
3393 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3394 }
3395 \f
3396 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3397
3398 static void
3399 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3400 enum machine_mode mode ATTRIBUTE_UNUSED,
3401 tree type ATTRIBUTE_UNUSED,
3402 int *pretend_arg_size ATTRIBUTE_UNUSED,
3403 int second_time ATTRIBUTE_UNUSED)
3404 {
3405 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3406 }
This page took 0.192192 seconds and 5 git commands to generate.