]> gcc.gnu.org Git - gcc.git/blob - gcc/config/m32r/m32r.c
c-decl.c, [...]: Don't check TARGET_MEM_FUNCTIONS.
[gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "recog.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44
45 /* Save the operands last given to a compare for use when we
46 generate a scc or bcc insn. */
47 rtx m32r_compare_op0, m32r_compare_op1;
48
49 /* Array of valid operand punctuation characters. */
50 char m32r_punct_chars[256];
51
52 /* Selected code model. */
53 const char * m32r_model_string = M32R_MODEL_DEFAULT;
54 enum m32r_model m32r_model;
55
56 /* Selected SDA support. */
57 const char * m32r_sdata_string = M32R_SDATA_DEFAULT;
58 enum m32r_sdata m32r_sdata;
59
60 /* Machine-specific symbol_ref flags. */
61 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
62 #define SYMBOL_REF_MODEL(X) \
63 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
64
65 /* For string literals, etc. */
66 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
67
68 /* Cache-flush support. Cache-flush is used at trampoline.
69 Default cache-flush is "trap 12".
70 default cache-flush function is "_flush_cache" (CACHE_FLUSH_FUNC)
71 default cache-flush trap-interrupt number is "12". (CACHE_FLUSH_TRAP)
72 You can change how to generate code of cache-flush with following options.
73 -flush-func=FLUSH-FUNC-NAME
74 -no-flush-func
75 -fluch-trap=TRAP-NUMBER
76 -no-flush-trap. */
77 const char *m32r_cache_flush_func = CACHE_FLUSH_FUNC;
78 const char *m32r_cache_flush_trap_string = CACHE_FLUSH_TRAP;
79 int m32r_cache_flush_trap = 12;
80
81 /* Forward declaration. */
82 static void init_reg_tables (void);
83 static void block_move_call (rtx, rtx, rtx);
84 static int m32r_is_insn (rtx);
85 const struct attribute_spec m32r_attribute_table[];
86 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
87 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
88 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
89
90 static void m32r_file_start (void);
91
92 static int m32r_adjust_priority (rtx, int);
93 static int m32r_issue_rate (void);
94
95 static void m32r_encode_section_info (tree, rtx, int);
96 static bool m32r_in_small_data_p (tree);
97 static bool m32r_return_in_memory (tree, tree);
98 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
99 tree, int *, int);
100 static void init_idents (void);
101 static bool m32r_rtx_costs (rtx, int, int, int *);
102 \f
103 /* Initialize the GCC target structure. */
104 #undef TARGET_ATTRIBUTE_TABLE
105 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
106
107 #undef TARGET_ASM_ALIGNED_HI_OP
108 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
109 #undef TARGET_ASM_ALIGNED_SI_OP
110 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
111
112 #undef TARGET_ASM_FUNCTION_PROLOGUE
113 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
114 #undef TARGET_ASM_FUNCTION_EPILOGUE
115 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
116
117 #undef TARGET_ASM_FILE_START
118 #define TARGET_ASM_FILE_START m32r_file_start
119
120 #undef TARGET_SCHED_ADJUST_PRIORITY
121 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
122 #undef TARGET_SCHED_ISSUE_RATE
123 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
124 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
125 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
126
127 #undef TARGET_ENCODE_SECTION_INFO
128 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
129 #undef TARGET_IN_SMALL_DATA_P
130 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
131
132 #undef TARGET_RTX_COSTS
133 #define TARGET_RTX_COSTS m32r_rtx_costs
134 #undef TARGET_ADDRESS_COST
135 #define TARGET_ADDRESS_COST hook_int_rtx_0
136
137 #undef TARGET_PROMOTE_PROTOTYPES
138 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
139
140 #undef TARGET_RETURN_IN_MEMORY
141 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
142
143 #undef TARGET_SETUP_INCOMING_VARARGS
144 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
145
146 struct gcc_target targetm = TARGET_INITIALIZER;
147 \f
148 /* Called by OVERRIDE_OPTIONS to initialize various things. */
149
150 void
151 m32r_init (void)
152 {
153 init_reg_tables ();
154
155 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
156 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
157 m32r_punct_chars['#'] = 1;
158 m32r_punct_chars['@'] = 1; /* ??? no longer used */
159
160 /* Provide default value if not specified. */
161 if (!g_switch_set)
162 g_switch_value = SDATA_DEFAULT_SIZE;
163
164 if (strcmp (m32r_model_string, "small") == 0)
165 m32r_model = M32R_MODEL_SMALL;
166 else if (strcmp (m32r_model_string, "medium") == 0)
167 m32r_model = M32R_MODEL_MEDIUM;
168 else if (strcmp (m32r_model_string, "large") == 0)
169 m32r_model = M32R_MODEL_LARGE;
170 else
171 error ("bad value (%s) for -mmodel switch", m32r_model_string);
172
173 if (strcmp (m32r_sdata_string, "none") == 0)
174 m32r_sdata = M32R_SDATA_NONE;
175 else if (strcmp (m32r_sdata_string, "sdata") == 0)
176 m32r_sdata = M32R_SDATA_SDATA;
177 else if (strcmp (m32r_sdata_string, "use") == 0)
178 m32r_sdata = M32R_SDATA_USE;
179 else
180 error ("bad value (%s) for -msdata switch", m32r_sdata_string);
181
182 if (m32r_cache_flush_trap_string)
183 {
184 /* Change trap-number (12) for cache-flush to the others (0 - 15). */
185 m32r_cache_flush_trap = atoi (m32r_cache_flush_trap_string);
186 if (m32r_cache_flush_trap < 0 || m32r_cache_flush_trap > 15)
187 error ("bad value (%s) for -flush-trap=n (0=<n<=15)",
188 m32r_cache_flush_trap_string);
189 }
190 }
191
192 /* Vectors to keep interesting information about registers where it can easily
193 be got. We use to use the actual mode value as the bit number, but there
194 is (or may be) more than 32 modes now. Instead we use two tables: one
195 indexed by hard register number, and one indexed by mode. */
196
197 /* The purpose of m32r_mode_class is to shrink the range of modes so that
198 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
199 mapped into one m32r_mode_class mode. */
200
201 enum m32r_mode_class
202 {
203 C_MODE,
204 S_MODE, D_MODE, T_MODE, O_MODE,
205 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
206 };
207
208 /* Modes for condition codes. */
209 #define C_MODES (1 << (int) C_MODE)
210
211 /* Modes for single-word and smaller quantities. */
212 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
213
214 /* Modes for double-word and smaller quantities. */
215 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
216
217 /* Modes for quad-word and smaller quantities. */
218 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
219
220 /* Modes for accumulators. */
221 #define A_MODES (1 << (int) A_MODE)
222
223 /* Value is 1 if register/mode pair is acceptable on arc. */
224
225 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
226 {
227 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
228 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
229 S_MODES, C_MODES, A_MODES, A_MODES
230 };
231
232 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
233
234 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
235
236 static void
237 init_reg_tables (void)
238 {
239 int i;
240
241 for (i = 0; i < NUM_MACHINE_MODES; i++)
242 {
243 switch (GET_MODE_CLASS (i))
244 {
245 case MODE_INT:
246 case MODE_PARTIAL_INT:
247 case MODE_COMPLEX_INT:
248 if (GET_MODE_SIZE (i) <= 4)
249 m32r_mode_class[i] = 1 << (int) S_MODE;
250 else if (GET_MODE_SIZE (i) == 8)
251 m32r_mode_class[i] = 1 << (int) D_MODE;
252 else if (GET_MODE_SIZE (i) == 16)
253 m32r_mode_class[i] = 1 << (int) T_MODE;
254 else if (GET_MODE_SIZE (i) == 32)
255 m32r_mode_class[i] = 1 << (int) O_MODE;
256 else
257 m32r_mode_class[i] = 0;
258 break;
259 case MODE_FLOAT:
260 case MODE_COMPLEX_FLOAT:
261 if (GET_MODE_SIZE (i) <= 4)
262 m32r_mode_class[i] = 1 << (int) SF_MODE;
263 else if (GET_MODE_SIZE (i) == 8)
264 m32r_mode_class[i] = 1 << (int) DF_MODE;
265 else if (GET_MODE_SIZE (i) == 16)
266 m32r_mode_class[i] = 1 << (int) TF_MODE;
267 else if (GET_MODE_SIZE (i) == 32)
268 m32r_mode_class[i] = 1 << (int) OF_MODE;
269 else
270 m32r_mode_class[i] = 0;
271 break;
272 case MODE_CC:
273 m32r_mode_class[i] = 1 << (int) C_MODE;
274 break;
275 default:
276 m32r_mode_class[i] = 0;
277 break;
278 }
279 }
280
281 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
282 {
283 if (GPR_P (i))
284 m32r_regno_reg_class[i] = GENERAL_REGS;
285 else if (i == ARG_POINTER_REGNUM)
286 m32r_regno_reg_class[i] = GENERAL_REGS;
287 else
288 m32r_regno_reg_class[i] = NO_REGS;
289 }
290 }
291 \f
292 /* M32R specific attribute support.
293
294 interrupt - for interrupt functions
295
296 model - select code model used to access object
297
298 small: addresses use 24 bits, use bl to make calls
299 medium: addresses use 32 bits, use bl to make calls
300 large: addresses use 32 bits, use seth/add3/jl to make calls
301
302 Grep for MODEL in m32r.h for more info. */
303
304 static tree small_ident1;
305 static tree small_ident2;
306 static tree medium_ident1;
307 static tree medium_ident2;
308 static tree large_ident1;
309 static tree large_ident2;
310
311 static void
312 init_idents (void)
313 {
314 if (small_ident1 == 0)
315 {
316 small_ident1 = get_identifier ("small");
317 small_ident2 = get_identifier ("__small__");
318 medium_ident1 = get_identifier ("medium");
319 medium_ident2 = get_identifier ("__medium__");
320 large_ident1 = get_identifier ("large");
321 large_ident2 = get_identifier ("__large__");
322 }
323 }
324
325 const struct attribute_spec m32r_attribute_table[] =
326 {
327 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
328 { "interrupt", 0, 0, true, false, false, NULL },
329 { "model", 1, 1, true, false, false, m32r_handle_model_attribute },
330 { NULL, 0, 0, false, false, false, NULL }
331 };
332
333
334 /* Handle an "model" attribute; arguments as in
335 struct attribute_spec.handler. */
336 static tree
337 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
338 tree args, int flags ATTRIBUTE_UNUSED,
339 bool *no_add_attrs)
340 {
341 tree arg;
342
343 init_idents ();
344 arg = TREE_VALUE (args);
345
346 if (arg != small_ident1
347 && arg != small_ident2
348 && arg != medium_ident1
349 && arg != medium_ident2
350 && arg != large_ident1
351 && arg != large_ident2)
352 {
353 warning ("invalid argument of `%s' attribute",
354 IDENTIFIER_POINTER (name));
355 *no_add_attrs = true;
356 }
357
358 return NULL_TREE;
359 }
360 \f
361 /* Encode section information of DECL, which is either a VAR_DECL,
362 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
363
364 For the M32R we want to record:
365
366 - whether the object lives in .sdata/.sbss.
367 - what code model should be used to access the object
368 */
369
370 static void
371 m32r_encode_section_info (tree decl, rtx rtl, int first)
372 {
373 int extra_flags = 0;
374 tree model_attr;
375 enum m32r_model model;
376
377 default_encode_section_info (decl, rtl, first);
378
379 if (!DECL_P (decl))
380 return;
381
382 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
383 if (model_attr)
384 {
385 tree id;
386
387 init_idents ();
388
389 id = TREE_VALUE (TREE_VALUE (model_attr));
390
391 if (id == small_ident1 || id == small_ident2)
392 model = M32R_MODEL_SMALL;
393 else if (id == medium_ident1 || id == medium_ident2)
394 model = M32R_MODEL_MEDIUM;
395 else if (id == large_ident1 || id == large_ident2)
396 model = M32R_MODEL_LARGE;
397 else
398 abort (); /* shouldn't happen */
399 }
400 else
401 {
402 if (TARGET_MODEL_SMALL)
403 model = M32R_MODEL_SMALL;
404 else if (TARGET_MODEL_MEDIUM)
405 model = M32R_MODEL_MEDIUM;
406 else if (TARGET_MODEL_LARGE)
407 model = M32R_MODEL_LARGE;
408 else
409 abort (); /* shouldn't happen */
410 }
411 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
412
413 if (extra_flags)
414 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
415 }
416
417 /* Only mark the object as being small data area addressable if
418 it hasn't been explicitly marked with a code model.
419
420 The user can explicitly put an object in the small data area with the
421 section attribute. If the object is in sdata/sbss and marked with a
422 code model do both [put the object in .sdata and mark it as being
423 addressed with a specific code model - don't mark it as being addressed
424 with an SDA reloc though]. This is ok and might be useful at times. If
425 the object doesn't fit the linker will give an error. */
426
427 static bool
428 m32r_in_small_data_p (tree decl)
429 {
430 tree section;
431
432 if (TREE_CODE (decl) != VAR_DECL)
433 return false;
434
435 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
436 return false;
437
438 section = DECL_SECTION_NAME (decl);
439 if (section)
440 {
441 char *name = (char *) TREE_STRING_POINTER (section);
442 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
443 return true;
444 }
445 else
446 {
447 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
448 {
449 int size = int_size_in_bytes (TREE_TYPE (decl));
450
451 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
452 return true;
453 }
454 }
455
456 return false;
457 }
458
459 /* Do anything needed before RTL is emitted for each function. */
460
461 void
462 m32r_init_expanders (void)
463 {
464 /* ??? At one point there was code here. The function is left in
465 to make it easy to experiment. */
466 }
467 \f
468 /* Acceptable arguments to the call insn. */
469
470 int
471 call_address_operand (rtx op, enum machine_mode mode)
472 {
473 return symbolic_operand (op, mode);
474
475 /* Constants and values in registers are not OK, because
476 the m32r BL instruction can only support PC relative branching. */
477 }
478
479 int
480 call_operand (rtx op, enum machine_mode mode)
481 {
482 if (GET_CODE (op) != MEM)
483 return 0;
484 op = XEXP (op, 0);
485 return call_address_operand (op, mode);
486 }
487
488 /* Returns 1 if OP is a symbol reference. */
489
490 int
491 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
492 {
493 switch (GET_CODE (op))
494 {
495 case SYMBOL_REF:
496 case LABEL_REF:
497 case CONST :
498 return 1;
499
500 default:
501 return 0;
502 }
503 }
504
505 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
506
507 int
508 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
509 {
510 if (! TARGET_SDATA_USE)
511 return 0;
512
513 if (GET_CODE (op) == SYMBOL_REF)
514 return SYMBOL_REF_SMALL_P (op);
515
516 if (GET_CODE (op) == CONST
517 && GET_CODE (XEXP (op, 0)) == PLUS
518 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
519 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
520 && INT16_P (INTVAL (XEXP (XEXP (op, 0), 1))))
521 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
522
523 return 0;
524 }
525
526 /* Return 1 if OP is a symbol that can use 24 bit addressing. */
527
528 int
529 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
530 {
531 rtx sym;
532
533 if (flag_pic)
534 return 0;
535
536 if (GET_CODE (op) == LABEL_REF)
537 return TARGET_ADDR24;
538
539 if (GET_CODE (op) == SYMBOL_REF)
540 sym = op;
541 else if (GET_CODE (op) == CONST
542 && GET_CODE (XEXP (op, 0)) == PLUS
543 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
544 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
545 && UINT24_P (INTVAL (XEXP (XEXP (op, 0), 1))))
546 sym = XEXP (XEXP (op, 0), 0);
547 else
548 return 0;
549
550 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
551 return 1;
552
553 if (TARGET_ADDR24
554 && (CONSTANT_POOL_ADDRESS_P (sym)
555 || LIT_NAME_P (XSTR (sym, 0))))
556 return 1;
557
558 return 0;
559 }
560
561 /* Return 1 if OP is a symbol that needs 32 bit addressing. */
562
563 int
564 addr32_operand (rtx op, enum machine_mode mode)
565 {
566 rtx sym;
567
568 if (GET_CODE (op) == LABEL_REF)
569 return TARGET_ADDR32;
570
571 if (GET_CODE (op) == SYMBOL_REF)
572 sym = op;
573 else if (GET_CODE (op) == CONST
574 && GET_CODE (XEXP (op, 0)) == PLUS
575 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
576 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
577 && ! flag_pic)
578 sym = XEXP (XEXP (op, 0), 0);
579 else
580 return 0;
581
582 return (! addr24_operand (sym, mode)
583 && ! small_data_operand (sym, mode));
584 }
585
586 /* Return 1 if OP is a function that can be called with the `bl' insn. */
587
588 int
589 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
590 {
591 if (flag_pic)
592 return 1;
593
594 if (GET_CODE (op) == SYMBOL_REF)
595 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
596
597 return TARGET_CALL26;
598 }
599
600 /* Returns 1 if OP is an acceptable operand for seth/add3. */
601
602 int
603 seth_add3_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
604 {
605 if (flag_pic)
606 return 0;
607
608 if (GET_CODE (op) == SYMBOL_REF
609 || GET_CODE (op) == LABEL_REF)
610 return 1;
611
612 if (GET_CODE (op) == CONST
613 && GET_CODE (XEXP (op, 0)) == PLUS
614 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
615 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT
616 && INT16_P (INTVAL (XEXP (XEXP (op, 0), 1))))
617 return 1;
618
619 return 0;
620 }
621
622 /* Return true if OP is a signed 8 bit immediate value. */
623
624 int
625 int8_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
626 {
627 if (GET_CODE (op) != CONST_INT)
628 return 0;
629 return INT8_P (INTVAL (op));
630 }
631
632 /* Return true if OP is a signed 16 bit immediate value
633 useful in comparisons. */
634
635 int
636 cmp_int16_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
637 {
638 if (GET_CODE (op) != CONST_INT)
639 return 0;
640 return CMP_INT16_P (INTVAL (op));
641 }
642
643 /* Return true if OP is an unsigned 16 bit immediate value. */
644
645 int
646 uint16_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
647 {
648 if (GET_CODE (op) != CONST_INT)
649 return 0;
650 return UINT16_P (INTVAL (op));
651 }
652
653 /* Return true if OP is a register or signed 16 bit value. */
654
655 int
656 reg_or_int16_operand (rtx op, enum machine_mode mode)
657 {
658 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
659 return register_operand (op, mode);
660 if (GET_CODE (op) != CONST_INT)
661 return 0;
662 return INT16_P (INTVAL (op));
663 }
664
665 /* Return true if OP is a register or an unsigned 16 bit value. */
666
667 int
668 reg_or_uint16_operand (rtx op, enum machine_mode mode)
669 {
670 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
671 return register_operand (op, mode);
672 if (GET_CODE (op) != CONST_INT)
673 return 0;
674 return UINT16_P (INTVAL (op));
675 }
676
677 /* Return true if OP is a register or an integer value that can be
678 used is SEQ/SNE. We can use either XOR of the value or ADD of
679 the negative of the value for the constant. Don't allow 0,
680 because that is special cased. */
681
682 int
683 reg_or_eq_int16_operand (rtx op, enum machine_mode mode)
684 {
685 HOST_WIDE_INT value;
686
687 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
688 return register_operand (op, mode);
689
690 if (GET_CODE (op) != CONST_INT)
691 return 0;
692
693 value = INTVAL (op);
694 return (value != 0) && (UINT16_P (value) || CMP_INT16_P (-value));
695 }
696
697 /* Return true if OP is a register or signed 16 bit value for compares. */
698
699 int
700 reg_or_cmp_int16_operand (rtx op, enum machine_mode mode)
701 {
702 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
703 return register_operand (op, mode);
704 if (GET_CODE (op) != CONST_INT)
705 return 0;
706 return CMP_INT16_P (INTVAL (op));
707 }
708
709 /* Return true if OP is a register or the constant 0. */
710
711 int
712 reg_or_zero_operand (rtx op, enum machine_mode mode)
713 {
714 if (GET_CODE (op) == REG || GET_CODE (op) == SUBREG)
715 return register_operand (op, mode);
716
717 if (GET_CODE (op) != CONST_INT)
718 return 0;
719
720 return INTVAL (op) == 0;
721 }
722
723 /* Return true if OP is a const_int requiring two instructions to load. */
724
725 int
726 two_insn_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
727 {
728 if (GET_CODE (op) != CONST_INT)
729 return 0;
730 if (INT16_P (INTVAL (op))
731 || UINT24_P (INTVAL (op))
732 || UPPER16_P (INTVAL (op)))
733 return 0;
734 return 1;
735 }
736
737 /* Return true if OP is an acceptable argument for a single word
738 move source. */
739
740 int
741 move_src_operand (rtx op, enum machine_mode mode)
742 {
743 switch (GET_CODE (op))
744 {
745 case LABEL_REF :
746 case SYMBOL_REF :
747 case CONST :
748 return addr24_operand (op, mode);
749 case CONST_INT :
750 /* ??? We allow more cse opportunities if we only allow constants
751 loadable with one insn, and split the rest into two. The instances
752 where this would help should be rare and the current way is
753 simpler. */
754 if (HOST_BITS_PER_WIDE_INT > 32)
755 {
756 HOST_WIDE_INT rest = INTVAL (op) >> 31;
757 return (rest == 0 || rest == -1);
758 }
759 else
760 return 1;
761 case CONST_DOUBLE :
762 if (mode == SFmode)
763 return 1;
764 else if (mode == SImode)
765 {
766 /* Large unsigned constants are represented as const_double's. */
767 unsigned HOST_WIDE_INT low, high;
768
769 low = CONST_DOUBLE_LOW (op);
770 high = CONST_DOUBLE_HIGH (op);
771 return high == 0 && low <= (unsigned) 0xffffffff;
772 }
773 else
774 return 0;
775 case REG :
776 return register_operand (op, mode);
777 case SUBREG :
778 /* (subreg (mem ...) ...) can occur here if the inner part was once a
779 pseudo-reg and is now a stack slot. */
780 if (GET_CODE (SUBREG_REG (op)) == MEM)
781 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
782 else
783 return register_operand (op, mode);
784 case MEM :
785 if (GET_CODE (XEXP (op, 0)) == PRE_INC
786 || GET_CODE (XEXP (op, 0)) == PRE_DEC)
787 return 0; /* loads can't do pre-{inc,dec} */
788 return address_operand (XEXP (op, 0), mode);
789 default :
790 return 0;
791 }
792 }
793
794 /* Return true if OP is an acceptable argument for a double word
795 move source. */
796
797 int
798 move_double_src_operand (rtx op, enum machine_mode mode)
799 {
800 switch (GET_CODE (op))
801 {
802 case CONST_INT :
803 case CONST_DOUBLE :
804 return 1;
805 case REG :
806 return register_operand (op, mode);
807 case SUBREG :
808 /* (subreg (mem ...) ...) can occur here if the inner part was once a
809 pseudo-reg and is now a stack slot. */
810 if (GET_CODE (SUBREG_REG (op)) == MEM)
811 return move_double_src_operand (SUBREG_REG (op), mode);
812 else
813 return register_operand (op, mode);
814 case MEM :
815 /* Disallow auto inc/dec for now. */
816 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
817 || GET_CODE (XEXP (op, 0)) == PRE_INC)
818 return 0;
819 return address_operand (XEXP (op, 0), mode);
820 default :
821 return 0;
822 }
823 }
824
825 /* Return true if OP is an acceptable argument for a move destination. */
826
827 int
828 move_dest_operand (rtx op, enum machine_mode mode)
829 {
830 switch (GET_CODE (op))
831 {
832 case REG :
833 return register_operand (op, mode);
834 case SUBREG :
835 /* (subreg (mem ...) ...) can occur here if the inner part was once a
836 pseudo-reg and is now a stack slot. */
837 if (GET_CODE (SUBREG_REG (op)) == MEM)
838 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
839 else
840 return register_operand (op, mode);
841 case MEM :
842 if (GET_CODE (XEXP (op, 0)) == POST_INC)
843 return 0; /* stores can't do post inc */
844 return address_operand (XEXP (op, 0), mode);
845 default :
846 return 0;
847 }
848 }
849
850 /* Return 1 if OP is a DImode const we want to handle inline.
851 This must match the code in the movdi pattern.
852 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
853
854 int
855 easy_di_const (rtx op)
856 {
857 rtx high_rtx, low_rtx;
858 HOST_WIDE_INT high, low;
859
860 split_double (op, &high_rtx, &low_rtx);
861 high = INTVAL (high_rtx);
862 low = INTVAL (low_rtx);
863 /* Pick constants loadable with 2 16 bit `ldi' insns. */
864 if (high >= -128 && high <= 127
865 && low >= -128 && low <= 127)
866 return 1;
867 return 0;
868 }
869
870 /* Return 1 if OP is a DFmode const we want to handle inline.
871 This must match the code in the movdf pattern.
872 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
873
874 int
875 easy_df_const (rtx op)
876 {
877 REAL_VALUE_TYPE r;
878 long l[2];
879
880 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
881 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
882 if (l[0] == 0 && l[1] == 0)
883 return 1;
884 if ((l[0] & 0xffff) == 0 && l[1] == 0)
885 return 1;
886 return 0;
887 }
888
889 /* Return 1 if OP is an EQ or NE comparison operator. */
890
891 int
892 eqne_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
893 {
894 enum rtx_code code = GET_CODE (op);
895
896 return (code == EQ || code == NE);
897 }
898
899 /* Return 1 if OP is a signed comparison operator. */
900
901 int
902 signed_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
903 {
904 enum rtx_code code = GET_CODE (op);
905
906 return (COMPARISON_P (op)
907 && (code == EQ || code == NE
908 || code == LT || code == LE || code == GT || code == GE));
909 }
910
911 /* Return 1 if OP is (mem (reg ...)).
912 This is used in insn length calcs. */
913
914 int
915 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
916 {
917 return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) == REG;
918 }
919
920 /* Return true if OP is an acceptable input argument for a zero/sign extend
921 operation. */
922
923 int
924 extend_operand (rtx op, enum machine_mode mode)
925 {
926 rtx addr;
927
928 switch (GET_CODE (op))
929 {
930 case REG :
931 case SUBREG :
932 return register_operand (op, mode);
933
934 case MEM :
935 addr = XEXP (op, 0);
936 if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
937 return 0; /* loads can't do pre inc/pre dec */
938
939 return address_operand (addr, mode);
940
941 default :
942 return 0;
943 }
944 }
945
946 /* Return nonzero if the operand is an insn that is a small insn.
947 Allow const_int 0 as well, which is a placeholder for NOP slots. */
948
949 int
950 small_insn_p (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
951 {
952 if (GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
953 return 1;
954
955 if (! INSN_P (op))
956 return 0;
957
958 return get_attr_length (op) == 2;
959 }
960
961 /* Return nonzero if the operand is an insn that is a large insn. */
962
963 int
964 large_insn_p (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
965 {
966 if (! INSN_P (op))
967 return 0;
968
969 return get_attr_length (op) != 2;
970 }
971
972 /* Return nonzero if TYPE must be passed or returned in memory.
973 The m32r treats both directions the same so we handle both directions
974 in this function. */
975
976 int
977 m32r_pass_by_reference (tree type)
978 {
979 int size = int_size_in_bytes (type);
980
981 if (size < 0 || size > 8)
982 return 1;
983
984 return 0;
985 }
986 \f
987 /* Comparisons. */
988
989 /* X and Y are two things to compare using CODE. Emit the compare insn and
990 return the rtx for compare [arg0 of the if_then_else].
991 If need_compare is true then the comparison insn must be generated, rather
992 than being subsumed into the following branch instruction. */
993
994 rtx
995 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
996 {
997 enum rtx_code compare_code;
998 enum rtx_code branch_code;
999 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
1000 int must_swap = 0;
1001
1002 switch (code)
1003 {
1004 case EQ: compare_code = EQ; branch_code = NE; break;
1005 case NE: compare_code = EQ; branch_code = EQ; break;
1006 case LT: compare_code = LT; branch_code = NE; break;
1007 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
1008 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
1009 case GE: compare_code = LT; branch_code = EQ; break;
1010 case LTU: compare_code = LTU; branch_code = NE; break;
1011 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
1012 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
1013 case GEU: compare_code = LTU; branch_code = EQ; break;
1014
1015 default:
1016 abort ();
1017 }
1018
1019 if (need_compare)
1020 {
1021 switch (compare_code)
1022 {
1023 case EQ:
1024 if (GET_CODE (y) == CONST_INT
1025 && CMP_INT16_P (INTVAL (y)) /* Reg equal to small const. */
1026 && y != const0_rtx)
1027 {
1028 rtx tmp = gen_reg_rtx (SImode);
1029
1030 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
1031 x = tmp;
1032 y = const0_rtx;
1033 }
1034 else if (CONSTANT_P (y)) /* Reg equal to const. */
1035 {
1036 rtx tmp = force_reg (GET_MODE (x), y);
1037 y = tmp;
1038 }
1039
1040 if (register_operand (y, SImode) /* Reg equal to reg. */
1041 || y == const0_rtx) /* Reg equal to zero. */
1042 {
1043 emit_insn (gen_cmp_eqsi_insn (x, y));
1044
1045 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
1046 }
1047 break;
1048
1049 case LT:
1050 if (register_operand (y, SImode)
1051 || (GET_CODE (y) == CONST_INT && CMP_INT16_P (INTVAL (y))))
1052 {
1053 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
1054
1055 switch (code)
1056 {
1057 case LT:
1058 emit_insn (gen_cmp_ltsi_insn (x, y));
1059 code = EQ;
1060 break;
1061 case LE:
1062 if (y == const0_rtx)
1063 tmp = const1_rtx;
1064 else
1065 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1066 emit_insn (gen_cmp_ltsi_insn (x, tmp));
1067 code = EQ;
1068 break;
1069 case GT:
1070 if (GET_CODE (y) == CONST_INT)
1071 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
1072 else
1073 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1074 emit_insn (gen_cmp_ltsi_insn (x, tmp));
1075 code = NE;
1076 break;
1077 case GE:
1078 emit_insn (gen_cmp_ltsi_insn (x, y));
1079 code = NE;
1080 break;
1081 default:
1082 abort ();
1083 }
1084
1085 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
1086 }
1087 break;
1088
1089 case LTU:
1090 if (register_operand (y, SImode)
1091 || (GET_CODE (y) == CONST_INT && CMP_INT16_P (INTVAL (y))))
1092 {
1093 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
1094
1095 switch (code)
1096 {
1097 case LTU:
1098 emit_insn (gen_cmp_ltusi_insn (x, y));
1099 code = EQ;
1100 break;
1101 case LEU:
1102 if (y == const0_rtx)
1103 tmp = const1_rtx;
1104 else
1105 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1106 emit_insn (gen_cmp_ltusi_insn (x, tmp));
1107 code = EQ;
1108 break;
1109 case GTU:
1110 if (GET_CODE (y) == CONST_INT)
1111 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
1112 else
1113 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
1114 emit_insn (gen_cmp_ltusi_insn (x, tmp));
1115 code = NE;
1116 break;
1117 case GEU:
1118 emit_insn (gen_cmp_ltusi_insn (x, y));
1119 code = NE;
1120 break;
1121 default:
1122 abort();
1123 }
1124
1125 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
1126 }
1127 break;
1128
1129 default:
1130 abort();
1131 }
1132 }
1133 else
1134 {
1135 /* Reg/reg equal comparison. */
1136 if (compare_code == EQ
1137 && register_operand (y, SImode))
1138 return gen_rtx_fmt_ee (code, CCmode, x, y);
1139
1140 /* Reg/zero signed comparison. */
1141 if ((compare_code == EQ || compare_code == LT)
1142 && y == const0_rtx)
1143 return gen_rtx_fmt_ee (code, CCmode, x, y);
1144
1145 /* Reg/smallconst equal comparison. */
1146 if (compare_code == EQ
1147 && GET_CODE (y) == CONST_INT
1148 && CMP_INT16_P (INTVAL (y)))
1149 {
1150 rtx tmp = gen_reg_rtx (SImode);
1151
1152 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
1153 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
1154 }
1155
1156 /* Reg/const equal comparison. */
1157 if (compare_code == EQ
1158 && CONSTANT_P (y))
1159 {
1160 rtx tmp = force_reg (GET_MODE (x), y);
1161
1162 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
1163 }
1164 }
1165
1166 if (CONSTANT_P (y))
1167 {
1168 if (must_swap)
1169 y = force_reg (GET_MODE (x), y);
1170 else
1171 {
1172 int ok_const =
1173 (code == LTU || code == LEU || code == GTU || code == GEU)
1174 ? uint16_operand (y, GET_MODE (y))
1175 : reg_or_cmp_int16_operand (y, GET_MODE (y));
1176
1177 if (! ok_const)
1178 y = force_reg (GET_MODE (x), y);
1179 }
1180 }
1181
1182 switch (compare_code)
1183 {
1184 case EQ :
1185 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
1186 break;
1187 case LT :
1188 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
1189 break;
1190 case LTU :
1191 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
1192 break;
1193
1194 default:
1195 abort ();
1196 }
1197
1198 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
1199 }
1200 \f
1201 /* Split a 2 word move (DI or DF) into component parts. */
1202
1203 rtx
1204 gen_split_move_double (rtx operands[])
1205 {
1206 enum machine_mode mode = GET_MODE (operands[0]);
1207 rtx dest = operands[0];
1208 rtx src = operands[1];
1209 rtx val;
1210
1211 /* We might have (SUBREG (MEM)) here, so just get rid of the
1212 subregs to make this code simpler. It is safe to call
1213 alter_subreg any time after reload. */
1214 if (GET_CODE (dest) == SUBREG)
1215 alter_subreg (&dest);
1216 if (GET_CODE (src) == SUBREG)
1217 alter_subreg (&src);
1218
1219 start_sequence ();
1220 if (GET_CODE (dest) == REG)
1221 {
1222 int dregno = REGNO (dest);
1223
1224 /* Reg = reg. */
1225 if (GET_CODE (src) == REG)
1226 {
1227 int sregno = REGNO (src);
1228
1229 int reverse = (dregno == sregno + 1);
1230
1231 /* We normally copy the low-numbered register first. However, if
1232 the first register operand 0 is the same as the second register of
1233 operand 1, we must copy in the opposite order. */
1234 emit_insn (gen_rtx_SET (VOIDmode,
1235 operand_subword (dest, reverse, TRUE, mode),
1236 operand_subword (src, reverse, TRUE, mode)));
1237
1238 emit_insn (gen_rtx_SET (VOIDmode,
1239 operand_subword (dest, !reverse, TRUE, mode),
1240 operand_subword (src, !reverse, TRUE, mode)));
1241 }
1242
1243 /* Reg = constant. */
1244 else if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
1245 {
1246 rtx words[2];
1247 split_double (src, &words[0], &words[1]);
1248 emit_insn (gen_rtx_SET (VOIDmode,
1249 operand_subword (dest, 0, TRUE, mode),
1250 words[0]));
1251
1252 emit_insn (gen_rtx_SET (VOIDmode,
1253 operand_subword (dest, 1, TRUE, mode),
1254 words[1]));
1255 }
1256
1257 /* Reg = mem. */
1258 else if (GET_CODE (src) == MEM)
1259 {
1260 /* If the high-address word is used in the address, we must load it
1261 last. Otherwise, load it first. */
1262 int reverse
1263 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1264
1265 /* We used to optimize loads from single registers as
1266
1267 ld r1,r3+; ld r2,r3
1268
1269 if r3 were not used subsequently. However, the REG_NOTES aren't
1270 propagated correctly by the reload phase, and it can cause bad
1271 code to be generated. We could still try:
1272
1273 ld r1,r3+; ld r2,r3; addi r3,-4
1274
1275 which saves 2 bytes and doesn't force longword alignment. */
1276 emit_insn (gen_rtx_SET (VOIDmode,
1277 operand_subword (dest, reverse, TRUE, mode),
1278 adjust_address (src, SImode,
1279 reverse * UNITS_PER_WORD)));
1280
1281 emit_insn (gen_rtx_SET (VOIDmode,
1282 operand_subword (dest, !reverse, TRUE, mode),
1283 adjust_address (src, SImode,
1284 !reverse * UNITS_PER_WORD)));
1285 }
1286 else
1287 abort ();
1288 }
1289
1290 /* Mem = reg. */
1291 /* We used to optimize loads from single registers as
1292
1293 st r1,r3; st r2,+r3
1294
1295 if r3 were not used subsequently. However, the REG_NOTES aren't
1296 propagated correctly by the reload phase, and it can cause bad
1297 code to be generated. We could still try:
1298
1299 st r1,r3; st r2,+r3; addi r3,-4
1300
1301 which saves 2 bytes and doesn't force longword alignment. */
1302 else if (GET_CODE (dest) == MEM && GET_CODE (src) == REG)
1303 {
1304 emit_insn (gen_rtx_SET (VOIDmode,
1305 adjust_address (dest, SImode, 0),
1306 operand_subword (src, 0, TRUE, mode)));
1307
1308 emit_insn (gen_rtx_SET (VOIDmode,
1309 adjust_address (dest, SImode, UNITS_PER_WORD),
1310 operand_subword (src, 1, TRUE, mode)));
1311 }
1312
1313 else
1314 abort ();
1315
1316 val = get_insns ();
1317 end_sequence ();
1318 return val;
1319 }
1320
1321 \f
1322 /* Implements the FUNCTION_ARG_PARTIAL_NREGS macro. */
1323
1324 int
1325 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1326 tree type, int named ATTRIBUTE_UNUSED)
1327 {
1328 int ret;
1329 unsigned int size =
1330 (((mode == BLKmode && type)
1331 ? (unsigned int) int_size_in_bytes (type)
1332 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1333 / UNITS_PER_WORD;
1334
1335 if (*cum >= M32R_MAX_PARM_REGS)
1336 ret = 0;
1337 else if (*cum + size > M32R_MAX_PARM_REGS)
1338 ret = (*cum + size) - M32R_MAX_PARM_REGS;
1339 else
1340 ret = 0;
1341
1342 return ret;
1343 }
1344
1345 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1346
1347 static bool
1348 m32r_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
1349 {
1350 return m32r_pass_by_reference (type);
1351 }
1352
1353 /* Do any needed setup for a variadic function. For the M32R, we must
1354 create a register parameter block, and then copy any anonymous arguments
1355 in registers to memory.
1356
1357 CUM has not been updated for the last named argument which has type TYPE
1358 and mode MODE, and we rely on this fact. */
1359
1360 static void
1361 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1362 tree type, int *pretend_size, int no_rtl)
1363 {
1364 int first_anon_arg;
1365
1366 if (no_rtl)
1367 return;
1368
1369 /* All BLKmode values are passed by reference. */
1370 if (mode == BLKmode)
1371 abort ();
1372
1373 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1374 + ROUND_ADVANCE_ARG (mode, type));
1375
1376 if (first_anon_arg < M32R_MAX_PARM_REGS)
1377 {
1378 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1379 int first_reg_offset = first_anon_arg;
1380 /* Size in words to "pretend" allocate. */
1381 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1382 rtx regblock;
1383
1384 regblock = gen_rtx_MEM (BLKmode,
1385 plus_constant (arg_pointer_rtx,
1386 FIRST_PARM_OFFSET (0)));
1387 set_mem_alias_set (regblock, get_varargs_alias_set ());
1388 move_block_from_reg (first_reg_offset, regblock, size);
1389
1390 *pretend_size = (size * UNITS_PER_WORD);
1391 }
1392 }
1393
1394 \f
1395 /* Implement `va_arg'. */
1396
1397 rtx
1398 m32r_va_arg (tree valist, tree type)
1399 {
1400 HOST_WIDE_INT size, rsize;
1401 tree t;
1402 rtx addr_rtx;
1403
1404 size = int_size_in_bytes (type);
1405 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
1406
1407 if (m32r_pass_by_reference (type))
1408 {
1409 tree type_ptr, type_ptr_ptr;
1410
1411 /* Pass by reference. */
1412 type_ptr = build_pointer_type (type);
1413 type_ptr_ptr = build_pointer_type (type_ptr);
1414
1415 t = build (POSTINCREMENT_EXPR, va_list_type_node, valist,
1416 build_int_2 (UNITS_PER_WORD, 0));
1417 TREE_SIDE_EFFECTS (t) = 1;
1418 t = build1 (NOP_EXPR, type_ptr_ptr, t);
1419 TREE_SIDE_EFFECTS (t) = 1;
1420 t = build1 (INDIRECT_REF, type_ptr, t);
1421
1422 addr_rtx = expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
1423 }
1424 else
1425 {
1426 /* Pass by value. */
1427 if (size < UNITS_PER_WORD)
1428 {
1429 /* Care for bigendian correction on the aligned address. */
1430 t = build (PLUS_EXPR, ptr_type_node, valist,
1431 build_int_2 (rsize - size, 0));
1432 addr_rtx = expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
1433 addr_rtx = copy_to_reg (addr_rtx);
1434
1435 /* Increment AP. */
1436 t = build (PLUS_EXPR, va_list_type_node, valist,
1437 build_int_2 (rsize, 0));
1438 t = build (MODIFY_EXPR, va_list_type_node, valist, t);
1439 TREE_SIDE_EFFECTS (t) = 1;
1440 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1441 }
1442 else
1443 {
1444 t = build (POSTINCREMENT_EXPR, va_list_type_node, valist,
1445 build_int_2 (rsize, 0));
1446 TREE_SIDE_EFFECTS (t) = 1;
1447 addr_rtx = expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
1448 }
1449 }
1450
1451 return addr_rtx;
1452 }
1453 \f
1454 /* Return true if INSN is real instruction bearing insn. */
1455
1456 static int
1457 m32r_is_insn (rtx insn)
1458 {
1459 return (INSN_P (insn)
1460 && GET_CODE (PATTERN (insn)) != USE
1461 && GET_CODE (PATTERN (insn)) != CLOBBER
1462 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1463 }
1464
1465 /* Increase the priority of long instructions so that the
1466 short instructions are scheduled ahead of the long ones. */
1467
1468 static int
1469 m32r_adjust_priority (rtx insn, int priority)
1470 {
1471 if (m32r_is_insn (insn)
1472 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1473 priority <<= 3;
1474
1475 return priority;
1476 }
1477
1478 \f
1479 /* Indicate how many instructions can be issued at the same time.
1480 This is sort of a lie. The m32r can issue only 1 long insn at
1481 once, but it can issue 2 short insns. The default therefore is
1482 set at 2, but this can be overridden by the command line option
1483 -missue-rate=1. */
1484
1485 static int
1486 m32r_issue_rate (void)
1487 {
1488 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1489 }
1490 \f
1491 /* Cost functions. */
1492
1493 static bool
1494 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
1495 {
1496 switch (code)
1497 {
1498 /* Small integers are as cheap as registers. 4 byte values can be
1499 fetched as immediate constants - let's give that the cost of an
1500 extra insn. */
1501 case CONST_INT:
1502 if (INT16_P (INTVAL (x)))
1503 {
1504 *total = 0;
1505 return true;
1506 }
1507 /* FALLTHRU */
1508
1509 case CONST:
1510 case LABEL_REF:
1511 case SYMBOL_REF:
1512 *total = COSTS_N_INSNS (1);
1513 return true;
1514
1515 case CONST_DOUBLE:
1516 {
1517 rtx high, low;
1518
1519 split_double (x, &high, &low);
1520 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1521 + !INT16_P (INTVAL (low)));
1522 return true;
1523 }
1524
1525 case MULT:
1526 *total = COSTS_N_INSNS (3);
1527 return true;
1528
1529 case DIV:
1530 case UDIV:
1531 case MOD:
1532 case UMOD:
1533 *total = COSTS_N_INSNS (10);
1534 return true;
1535
1536 default:
1537 return false;
1538 }
1539 }
1540 \f
1541 /* Type of function DECL.
1542
1543 The result is cached. To reset the cache at the end of a function,
1544 call with DECL = NULL_TREE. */
1545
1546 enum m32r_function_type
1547 m32r_compute_function_type (tree decl)
1548 {
1549 /* Cached value. */
1550 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1551 /* Last function we were called for. */
1552 static tree last_fn = NULL_TREE;
1553
1554 /* Resetting the cached value? */
1555 if (decl == NULL_TREE)
1556 {
1557 fn_type = M32R_FUNCTION_UNKNOWN;
1558 last_fn = NULL_TREE;
1559 return fn_type;
1560 }
1561
1562 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1563 return fn_type;
1564
1565 /* Compute function type. */
1566 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1567 ? M32R_FUNCTION_INTERRUPT
1568 : M32R_FUNCTION_NORMAL);
1569
1570 last_fn = decl;
1571 return fn_type;
1572 }
1573 \f/* Function prologue/epilogue handlers. */
1574
1575 /* M32R stack frames look like:
1576
1577 Before call After call
1578 +-----------------------+ +-----------------------+
1579 | | | |
1580 high | local variables, | | local variables, |
1581 mem | reg save area, etc. | | reg save area, etc. |
1582 | | | |
1583 +-----------------------+ +-----------------------+
1584 | | | |
1585 | arguments on stack. | | arguments on stack. |
1586 | | | |
1587 SP+0->+-----------------------+ +-----------------------+
1588 | reg parm save area, |
1589 | only created for |
1590 | variable argument |
1591 | functions |
1592 +-----------------------+
1593 | previous frame ptr |
1594 +-----------------------+
1595 | |
1596 | register save area |
1597 | |
1598 +-----------------------+
1599 | return address |
1600 +-----------------------+
1601 | |
1602 | local variables |
1603 | |
1604 +-----------------------+
1605 | |
1606 | alloca allocations |
1607 | |
1608 +-----------------------+
1609 | |
1610 low | arguments on stack |
1611 memory | |
1612 SP+0->+-----------------------+
1613
1614 Notes:
1615 1) The "reg parm save area" does not exist for non variable argument fns.
1616 2) The "reg parm save area" can be eliminated completely if we saved regs
1617 containing anonymous args separately but that complicates things too
1618 much (so it's not done).
1619 3) The return address is saved after the register save area so as to have as
1620 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1621
1622 /* Structure to be filled in by m32r_compute_frame_size with register
1623 save masks, and offsets for the current function. */
1624 struct m32r_frame_info
1625 {
1626 unsigned int total_size; /* # bytes that the entire frame takes up. */
1627 unsigned int extra_size; /* # bytes of extra stuff. */
1628 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1629 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1630 unsigned int reg_size; /* # bytes needed to store regs. */
1631 unsigned int var_size; /* # bytes that variables take up. */
1632 unsigned int gmask; /* Mask of saved gp registers. */
1633 unsigned int save_fp; /* Nonzero if fp must be saved. */
1634 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1635 int initialized; /* Nonzero if frame size already calculated. */
1636 };
1637
1638 /* Current frame information calculated by m32r_compute_frame_size. */
1639 static struct m32r_frame_info current_frame_info;
1640
1641 /* Zero structure to initialize current_frame_info. */
1642 static struct m32r_frame_info zero_frame_info;
1643
1644 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1645 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1646
1647 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1648 The return address and frame pointer are treated separately.
1649 Don't consider them here. */
1650 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1651 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1652 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1653
1654 #define MUST_SAVE_FRAME_POINTER (regs_ever_live[FRAME_POINTER_REGNUM])
1655 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM] || current_function_profile)
1656
1657 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1658 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1659
1660 /* Return the bytes needed to compute the frame pointer from the current
1661 stack pointer.
1662
1663 SIZE is the size needed for local variables. */
1664
1665 unsigned int
1666 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1667 {
1668 int regno;
1669 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1670 unsigned int reg_size, frame_size;
1671 unsigned int gmask;
1672 enum m32r_function_type fn_type;
1673 int interrupt_p;
1674 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table);
1675
1676 var_size = M32R_STACK_ALIGN (size);
1677 args_size = M32R_STACK_ALIGN (current_function_outgoing_args_size);
1678 pretend_size = current_function_pretend_args_size;
1679 extra_size = FIRST_PARM_OFFSET (0);
1680 total_size = extra_size + pretend_size + args_size + var_size;
1681 reg_size = 0;
1682 gmask = 0;
1683
1684 /* See if this is an interrupt handler. Call used registers must be saved
1685 for them too. */
1686 fn_type = m32r_compute_function_type (current_function_decl);
1687 interrupt_p = M32R_INTERRUPT_P (fn_type);
1688
1689 /* Calculate space needed for registers. */
1690 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1691 {
1692 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1693 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1694 {
1695 reg_size += UNITS_PER_WORD;
1696 gmask |= 1 << regno;
1697 }
1698 }
1699
1700 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1701 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1702
1703 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1704 * UNITS_PER_WORD);
1705 total_size += reg_size;
1706
1707 /* ??? Not sure this is necessary, and I don't think the epilogue
1708 handler will do the right thing if this changes total_size. */
1709 total_size = M32R_STACK_ALIGN (total_size);
1710
1711 frame_size = total_size - (pretend_size + reg_size);
1712
1713 /* Save computed information. */
1714 current_frame_info.total_size = total_size;
1715 current_frame_info.extra_size = extra_size;
1716 current_frame_info.pretend_size = pretend_size;
1717 current_frame_info.var_size = var_size;
1718 current_frame_info.args_size = args_size;
1719 current_frame_info.reg_size = reg_size;
1720 current_frame_info.gmask = gmask;
1721 current_frame_info.initialized = reload_completed;
1722
1723 /* Ok, we're done. */
1724 return total_size;
1725 }
1726 \f
1727 /* The table we use to reference PIC data. */
1728 static rtx global_offset_table;
1729
1730 static void
1731 m32r_reload_lr (rtx sp, int size)
1732 {
1733 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1734
1735 if (size == 0)
1736 emit_insn (gen_movsi (lr, gen_rtx_MEM (Pmode, sp)));
1737 else if (size <= 32768)
1738 emit_insn (gen_movsi (lr, gen_rtx_MEM (Pmode,
1739 gen_rtx_PLUS (Pmode, sp,
1740 GEN_INT (size)))));
1741 else
1742 {
1743 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1744
1745 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1746 emit_insn (gen_addsi3 (tmp, tmp, sp));
1747 emit_insn (gen_movsi (lr, gen_rtx_MEM (Pmode, tmp)));
1748 }
1749
1750 emit_insn (gen_rtx_USE (VOIDmode, lr));
1751 }
1752
1753 void
1754 m32r_load_pic_register (void)
1755 {
1756 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1757 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1758 GEN_INT (TARGET_MODEL_SMALL)));
1759
1760 /* Need to emit this whether or not we obey regdecls,
1761 since setjmp/longjmp can cause life info to screw up. */
1762 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
1763 }
1764
1765 /* Expand the m32r prologue as a series of insns. */
1766
1767 void
1768 m32r_expand_prologue (void)
1769 {
1770 int regno;
1771 int frame_size;
1772 unsigned int gmask;
1773 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table);
1774
1775 if (! current_frame_info.initialized)
1776 m32r_compute_frame_size (get_frame_size ());
1777
1778 gmask = current_frame_info.gmask;
1779
1780 /* These cases shouldn't happen. Catch them now. */
1781 if (current_frame_info.total_size == 0 && gmask)
1782 abort ();
1783
1784 /* Allocate space for register arguments if this is a variadic function. */
1785 if (current_frame_info.pretend_size != 0)
1786 {
1787 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1788 the wrong result on a 64-bit host. */
1789 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1790 emit_insn (gen_addsi3 (stack_pointer_rtx,
1791 stack_pointer_rtx,
1792 GEN_INT (-pretend_size)));
1793 }
1794
1795 /* Save any registers we need to and set up fp. */
1796 if (current_frame_info.save_fp)
1797 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1798
1799 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1800
1801 /* Save any needed call-saved regs (and call-used if this is an
1802 interrupt handler). */
1803 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1804 {
1805 if ((gmask & (1 << regno)) != 0)
1806 emit_insn (gen_movsi_push (stack_pointer_rtx,
1807 gen_rtx_REG (Pmode, regno)));
1808 }
1809
1810 if (current_frame_info.save_lr)
1811 emit_insn (gen_movsi_push (stack_pointer_rtx,
1812 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1813
1814 /* Allocate the stack frame. */
1815 frame_size = (current_frame_info.total_size
1816 - (current_frame_info.pretend_size
1817 + current_frame_info.reg_size));
1818
1819 if (frame_size == 0)
1820 ; /* Nothing to do. */
1821 else if (frame_size <= 32768)
1822 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1823 GEN_INT (-frame_size)));
1824 else
1825 {
1826 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1827
1828 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1829 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1830 }
1831
1832 if (frame_pointer_needed)
1833 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1834
1835 if (current_function_profile)
1836 /* Push lr for mcount (form_pc, x). */
1837 emit_insn (gen_movsi_push (stack_pointer_rtx,
1838 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1839
1840 if (pic_reg_used)
1841 {
1842 m32r_load_pic_register ();
1843 m32r_reload_lr (stack_pointer_rtx,
1844 (current_function_profile ? 0 : frame_size));
1845 }
1846
1847 if (current_function_profile && !pic_reg_used)
1848 emit_insn (gen_blockage ());
1849 }
1850
1851 \f
1852 /* Set up the stack and frame pointer (if desired) for the function.
1853 Note, if this is changed, you need to mirror the changes in
1854 m32r_compute_frame_size which calculates the prolog size. */
1855
1856 static void
1857 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1858 {
1859 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1860
1861 /* If this is an interrupt handler, mark it as such. */
1862 if (M32R_INTERRUPT_P (fn_type))
1863 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1864
1865 if (! current_frame_info.initialized)
1866 m32r_compute_frame_size (size);
1867
1868 /* This is only for the human reader. */
1869 fprintf (file,
1870 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1871 ASM_COMMENT_START,
1872 current_frame_info.var_size,
1873 current_frame_info.reg_size / 4,
1874 current_frame_info.args_size,
1875 current_frame_info.extra_size);
1876 }
1877 \f
1878 /* Do any necessary cleanup after a function to restore stack, frame,
1879 and regs. */
1880
1881 static void
1882 m32r_output_function_epilogue (FILE * file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1883 {
1884 int regno;
1885 int noepilogue = FALSE;
1886 int total_size;
1887 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1888
1889 /* This is only for the human reader. */
1890 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1891
1892 if (!current_frame_info.initialized)
1893 abort ();
1894 total_size = current_frame_info.total_size;
1895
1896 if (total_size == 0)
1897 {
1898 rtx insn = get_last_insn ();
1899
1900 /* If the last insn was a BARRIER, we don't have to write any code
1901 because a jump (aka return) was put there. */
1902 if (GET_CODE (insn) == NOTE)
1903 insn = prev_nonnote_insn (insn);
1904 if (insn && GET_CODE (insn) == BARRIER)
1905 noepilogue = TRUE;
1906 }
1907
1908 if (!noepilogue)
1909 {
1910 unsigned int var_size = current_frame_info.var_size;
1911 unsigned int args_size = current_frame_info.args_size;
1912 unsigned int gmask = current_frame_info.gmask;
1913 int can_trust_sp_p = !current_function_calls_alloca;
1914 const char * sp_str = reg_names[STACK_POINTER_REGNUM];
1915 const char * fp_str = reg_names[FRAME_POINTER_REGNUM];
1916
1917 /* The first thing to do is point the sp at the bottom of the register
1918 save area. */
1919 if (can_trust_sp_p)
1920 {
1921 unsigned int reg_offset = var_size + args_size;
1922 if (reg_offset == 0)
1923 ; /* Nothing to do. */
1924 else if (reg_offset < 128)
1925 fprintf (file, "\taddi %s,%s%d\n",
1926 sp_str, IMMEDIATE_PREFIX, reg_offset);
1927 else if (reg_offset < 32768)
1928 fprintf (file, "\tadd3 %s,%s,%s%d\n",
1929 sp_str, sp_str, IMMEDIATE_PREFIX, reg_offset);
1930 else
1931 fprintf (file, "\tld24 %s,%s%d\n\tadd %s,%s\n",
1932 reg_names[PROLOGUE_TMP_REGNUM],
1933 IMMEDIATE_PREFIX, reg_offset,
1934 sp_str, reg_names[PROLOGUE_TMP_REGNUM]);
1935 }
1936 else if (frame_pointer_needed)
1937 {
1938 unsigned int reg_offset = var_size + args_size;
1939
1940 if (reg_offset == 0)
1941 fprintf (file, "\tmv %s,%s\n", sp_str, fp_str);
1942 else if (reg_offset < 32768)
1943 fprintf (file, "\tadd3 %s,%s,%s%d\n",
1944 sp_str, fp_str, IMMEDIATE_PREFIX, reg_offset);
1945 else
1946 fprintf (file, "\tld24 %s,%s%d\n\tadd %s,%s\n",
1947 reg_names[PROLOGUE_TMP_REGNUM],
1948 IMMEDIATE_PREFIX, reg_offset,
1949 sp_str, reg_names[PROLOGUE_TMP_REGNUM]);
1950 }
1951 else
1952 abort ();
1953
1954 if (current_frame_info.save_lr)
1955 fprintf (file, "\tpop %s\n", reg_names[RETURN_ADDR_REGNUM]);
1956
1957 /* Restore any saved registers, in reverse order of course. */
1958 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1959 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1960 {
1961 if ((gmask & (1L << regno)) != 0)
1962 fprintf (file, "\tpop %s\n", reg_names[regno]);
1963 }
1964
1965 if (current_frame_info.save_fp)
1966 fprintf (file, "\tpop %s\n", fp_str);
1967
1968 /* Remove varargs area if present. */
1969 if (current_frame_info.pretend_size != 0)
1970 fprintf (file, "\taddi %s,%s%d\n",
1971 sp_str, IMMEDIATE_PREFIX, current_frame_info.pretend_size);
1972
1973 /* Emit the return instruction. */
1974 if (M32R_INTERRUPT_P (fn_type))
1975 fprintf (file, "\trte\n");
1976 else
1977 fprintf (file, "\tjmp %s\n", reg_names[RETURN_ADDR_REGNUM]);
1978 }
1979
1980 /* Reset state info for each function. */
1981 current_frame_info = zero_frame_info;
1982 m32r_compute_function_type (NULL_TREE);
1983 }
1984 \f
1985 /* Return nonzero if this function is known to have a null or 1 instruction
1986 epilogue. */
1987
1988 int
1989 direct_return (void)
1990 {
1991 if (!reload_completed)
1992 return FALSE;
1993
1994 if (! current_frame_info.initialized)
1995 m32r_compute_frame_size (get_frame_size ());
1996
1997 return current_frame_info.total_size == 0;
1998 }
1999
2000 \f
2001 /* PIC. */
2002
2003 int
2004 m32r_legitimate_pic_operand_p (rtx x)
2005 {
2006 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
2007 return 0;
2008
2009 if (GET_CODE (x) == CONST
2010 && GET_CODE (XEXP (x, 0)) == PLUS
2011 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2012 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2013 && (GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2014 return 0;
2015
2016 return 1;
2017 }
2018
2019 rtx
2020 m32r_legitimize_pic_address (rtx orig, rtx reg)
2021 {
2022 #ifdef DEBUG_PIC
2023 printf("m32r_legitimize_pic_address()\n");
2024 #endif
2025
2026 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
2027 {
2028 rtx pic_ref, address;
2029 rtx insn;
2030 int subregs = 0;
2031
2032 if (reg == 0)
2033 {
2034 if (reload_in_progress || reload_completed)
2035 abort ();
2036 else
2037 reg = gen_reg_rtx (Pmode);
2038
2039 subregs = 1;
2040 }
2041
2042 if (subregs)
2043 address = gen_reg_rtx (Pmode);
2044 else
2045 address = reg;
2046
2047 emit_insn (gen_pic_load_addr (address, orig));
2048
2049 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
2050 pic_ref = gen_rtx_MEM (Pmode, address);
2051
2052 RTX_UNCHANGING_P (pic_ref) = 1;
2053 insn = emit_move_insn (reg, pic_ref);
2054 current_function_uses_pic_offset_table = 1;
2055 #if 0
2056 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2057 by loop. */
2058 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2059 REG_NOTES (insn));
2060 #endif
2061 return reg;
2062 }
2063 else if (GET_CODE (orig) == CONST)
2064 {
2065 rtx base, offset;
2066
2067 if (GET_CODE (XEXP (orig, 0)) == PLUS
2068 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2069 return orig;
2070
2071 if (reg == 0)
2072 {
2073 if (reload_in_progress || reload_completed)
2074 abort ();
2075 else
2076 reg = gen_reg_rtx (Pmode);
2077 }
2078
2079 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2080 {
2081 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2082 if (base == reg)
2083 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2084 else
2085 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2086 }
2087 else
2088 return orig;
2089
2090 if (GET_CODE (offset) == CONST_INT)
2091 {
2092 if (INT16_P (INTVAL (offset)))
2093 return plus_constant (base, INTVAL (offset));
2094 else if (! reload_in_progress && ! reload_completed)
2095 offset = force_reg (Pmode, offset);
2096 else
2097 /* If we reach here, then something is seriously wrong. */
2098 abort ();
2099 }
2100
2101 return gen_rtx_PLUS (Pmode, base, offset);
2102 }
2103
2104 return orig;
2105 }
2106
2107 /* Emit special PIC prologues and epilogues. */
2108
2109 void
2110 m32r_finalize_pic (void)
2111 {
2112 current_function_uses_pic_offset_table |= current_function_profile;
2113 }
2114 \f
2115 /* Nested function support. */
2116
2117 /* Emit RTL insns to initialize the variable parts of a trampoline.
2118 FNADDR is an RTX for the address of the function's pure code.
2119 CXT is an RTX for the static chain value for the function. */
2120
2121 void
2122 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2123 rtx fnaddr ATTRIBUTE_UNUSED,
2124 rtx cxt ATTRIBUTE_UNUSED)
2125 {
2126 }
2127 \f
2128 static void
2129 m32r_file_start (void)
2130 {
2131 default_file_start ();
2132
2133 if (flag_verbose_asm)
2134 fprintf (asm_out_file,
2135 "%s M32R/D special options: -G " HOST_WIDE_INT_PRINT_UNSIGNED "\n",
2136 ASM_COMMENT_START, g_switch_value);
2137
2138 if (TARGET_LITTLE_ENDIAN)
2139 fprintf (asm_out_file, "\t.little\n");
2140 }
2141 \f
2142 /* Print operand X (an rtx) in assembler syntax to file FILE.
2143 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2144 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2145
2146 void
2147 m32r_print_operand (FILE * file, rtx x, int code)
2148 {
2149 rtx addr;
2150
2151 switch (code)
2152 {
2153 /* The 's' and 'p' codes are used by output_block_move() to
2154 indicate post-increment 's'tores and 'p're-increment loads. */
2155 case 's':
2156 if (GET_CODE (x) == REG)
2157 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2158 else
2159 output_operand_lossage ("invalid operand to %%s code");
2160 return;
2161
2162 case 'p':
2163 if (GET_CODE (x) == REG)
2164 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2165 else
2166 output_operand_lossage ("invalid operand to %%p code");
2167 return;
2168
2169 case 'R' :
2170 /* Write second word of DImode or DFmode reference,
2171 register or memory. */
2172 if (GET_CODE (x) == REG)
2173 fputs (reg_names[REGNO (x)+1], file);
2174 else if (GET_CODE (x) == MEM)
2175 {
2176 fprintf (file, "@(");
2177 /* Handle possible auto-increment. Since it is pre-increment and
2178 we have already done it, we can just use an offset of four. */
2179 /* ??? This is taken from rs6000.c I think. I don't think it is
2180 currently necessary, but keep it around. */
2181 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2182 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2183 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2184 else
2185 output_address (plus_constant (XEXP (x, 0), 4));
2186 fputc (')', file);
2187 }
2188 else
2189 output_operand_lossage ("invalid operand to %%R code");
2190 return;
2191
2192 case 'H' : /* High word. */
2193 case 'L' : /* Low word. */
2194 if (GET_CODE (x) == REG)
2195 {
2196 /* L = least significant word, H = most significant word. */
2197 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2198 fputs (reg_names[REGNO (x)], file);
2199 else
2200 fputs (reg_names[REGNO (x)+1], file);
2201 }
2202 else if (GET_CODE (x) == CONST_INT
2203 || GET_CODE (x) == CONST_DOUBLE)
2204 {
2205 rtx first, second;
2206
2207 split_double (x, &first, &second);
2208 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2209 code == 'L' ? INTVAL (first) : INTVAL (second));
2210 }
2211 else
2212 output_operand_lossage ("invalid operand to %%H/%%L code");
2213 return;
2214
2215 case 'A' :
2216 {
2217 char str[30];
2218
2219 if (GET_CODE (x) != CONST_DOUBLE
2220 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2221 fatal_insn ("bad insn for 'A'", x);
2222
2223 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2224 fprintf (file, "%s", str);
2225 return;
2226 }
2227
2228 case 'B' : /* Bottom half. */
2229 case 'T' : /* Top half. */
2230 /* Output the argument to a `seth' insn (sets the Top half-word).
2231 For constants output arguments to a seth/or3 pair to set Top and
2232 Bottom halves. For symbols output arguments to a seth/add3 pair to
2233 set Top and Bottom halves. The difference exists because for
2234 constants seth/or3 is more readable but for symbols we need to use
2235 the same scheme as `ld' and `st' insns (16 bit addend is signed). */
2236 switch (GET_CODE (x))
2237 {
2238 case CONST_INT :
2239 case CONST_DOUBLE :
2240 {
2241 rtx first, second;
2242
2243 split_double (x, &first, &second);
2244 x = WORDS_BIG_ENDIAN ? second : first;
2245 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2246 (code == 'B'
2247 ? INTVAL (x) & 0xffff
2248 : (INTVAL (x) >> 16) & 0xffff));
2249 }
2250 return;
2251 case CONST :
2252 case SYMBOL_REF :
2253 if (code == 'B'
2254 && small_data_operand (x, VOIDmode))
2255 {
2256 fputs ("sda(", file);
2257 output_addr_const (file, x);
2258 fputc (')', file);
2259 return;
2260 }
2261 /* fall through */
2262 case LABEL_REF :
2263 fputs (code == 'T' ? "shigh(" : "low(", file);
2264 output_addr_const (file, x);
2265 fputc (')', file);
2266 return;
2267 default :
2268 output_operand_lossage ("invalid operand to %%T/%%B code");
2269 return;
2270 }
2271 break;
2272
2273 case 'U' :
2274 /* ??? wip */
2275 /* Output a load/store with update indicator if appropriate. */
2276 if (GET_CODE (x) == MEM)
2277 {
2278 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2279 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2280 fputs (".a", file);
2281 }
2282 else
2283 output_operand_lossage ("invalid operand to %%U code");
2284 return;
2285
2286 case 'N' :
2287 /* Print a constant value negated. */
2288 if (GET_CODE (x) == CONST_INT)
2289 output_addr_const (file, GEN_INT (- INTVAL (x)));
2290 else
2291 output_operand_lossage ("invalid operand to %%N code");
2292 return;
2293
2294 case 'X' :
2295 /* Print a const_int in hex. Used in comments. */
2296 if (GET_CODE (x) == CONST_INT)
2297 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2298 return;
2299
2300 case '#' :
2301 fputs (IMMEDIATE_PREFIX, file);
2302 return;
2303
2304 case 0 :
2305 /* Do nothing special. */
2306 break;
2307
2308 default :
2309 /* Unknown flag. */
2310 output_operand_lossage ("invalid operand output code");
2311 }
2312
2313 switch (GET_CODE (x))
2314 {
2315 case REG :
2316 fputs (reg_names[REGNO (x)], file);
2317 break;
2318
2319 case MEM :
2320 addr = XEXP (x, 0);
2321 if (GET_CODE (addr) == PRE_INC)
2322 {
2323 if (GET_CODE (XEXP (addr, 0)) != REG)
2324 fatal_insn ("pre-increment address is not a register", x);
2325
2326 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2327 }
2328 else if (GET_CODE (addr) == PRE_DEC)
2329 {
2330 if (GET_CODE (XEXP (addr, 0)) != REG)
2331 fatal_insn ("pre-decrement address is not a register", x);
2332
2333 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2334 }
2335 else if (GET_CODE (addr) == POST_INC)
2336 {
2337 if (GET_CODE (XEXP (addr, 0)) != REG)
2338 fatal_insn ("post-increment address is not a register", x);
2339
2340 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2341 }
2342 else
2343 {
2344 fputs ("@(", file);
2345 output_address (XEXP (x, 0));
2346 fputc (')', file);
2347 }
2348 break;
2349
2350 case CONST_DOUBLE :
2351 /* We handle SFmode constants here as output_addr_const doesn't. */
2352 if (GET_MODE (x) == SFmode)
2353 {
2354 REAL_VALUE_TYPE d;
2355 long l;
2356
2357 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2358 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2359 fprintf (file, "0x%08lx", l);
2360 break;
2361 }
2362
2363 /* Fall through. Let output_addr_const deal with it. */
2364
2365 default :
2366 output_addr_const (file, x);
2367 break;
2368 }
2369 }
2370
2371 /* Print a memory address as an operand to reference that memory location. */
2372
2373 void
2374 m32r_print_operand_address (FILE * file, rtx addr)
2375 {
2376 rtx base;
2377 rtx index = 0;
2378 int offset = 0;
2379
2380 switch (GET_CODE (addr))
2381 {
2382 case REG :
2383 fputs (reg_names[REGNO (addr)], file);
2384 break;
2385
2386 case PLUS :
2387 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
2388 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2389 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2390 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2391 else
2392 base = XEXP (addr, 0), index = XEXP (addr, 1);
2393 if (GET_CODE (base) == REG)
2394 {
2395 /* Print the offset first (if present) to conform to the manual. */
2396 if (index == 0)
2397 {
2398 if (offset != 0)
2399 fprintf (file, "%d,", offset);
2400 fputs (reg_names[REGNO (base)], file);
2401 }
2402 /* The chip doesn't support this, but left in for generality. */
2403 else if (GET_CODE (index) == REG)
2404 fprintf (file, "%s,%s",
2405 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2406 /* Not sure this can happen, but leave in for now. */
2407 else if (GET_CODE (index) == SYMBOL_REF)
2408 {
2409 output_addr_const (file, index);
2410 fputc (',', file);
2411 fputs (reg_names[REGNO (base)], file);
2412 }
2413 else
2414 fatal_insn ("bad address", addr);
2415 }
2416 else if (GET_CODE (base) == LO_SUM)
2417 {
2418 if (index != 0
2419 || GET_CODE (XEXP (base, 0)) != REG)
2420 abort ();
2421 if (small_data_operand (XEXP (base, 1), VOIDmode))
2422 fputs ("sda(", file);
2423 else
2424 fputs ("low(", file);
2425 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2426 fputs ("),", file);
2427 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2428 }
2429 else
2430 fatal_insn ("bad address", addr);
2431 break;
2432
2433 case LO_SUM :
2434 if (GET_CODE (XEXP (addr, 0)) != REG)
2435 fatal_insn ("lo_sum not of register", addr);
2436 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2437 fputs ("sda(", file);
2438 else
2439 fputs ("low(", file);
2440 output_addr_const (file, XEXP (addr, 1));
2441 fputs ("),", file);
2442 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2443 break;
2444
2445 case PRE_INC : /* Assume SImode. */
2446 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2447 break;
2448
2449 case PRE_DEC : /* Assume SImode. */
2450 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2451 break;
2452
2453 case POST_INC : /* Assume SImode. */
2454 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2455 break;
2456
2457 default :
2458 output_addr_const (file, addr);
2459 break;
2460 }
2461 }
2462
2463 /* Return true if the operands are the constants 0 and 1. */
2464
2465 int
2466 zero_and_one (rtx operand1, rtx operand2)
2467 {
2468 return
2469 GET_CODE (operand1) == CONST_INT
2470 && GET_CODE (operand2) == CONST_INT
2471 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2472 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2473 }
2474
2475 /* Return nonzero if the operand is suitable for use in a conditional move sequence. */
2476
2477 int
2478 conditional_move_operand (rtx operand, enum machine_mode mode)
2479 {
2480 /* Only defined for simple integers so far... */
2481 if (mode != SImode && mode != HImode && mode != QImode)
2482 return FALSE;
2483
2484 /* At the moment we can handle moving registers and loading constants. */
2485 /* To be added: Addition/subtraction/bitops/multiplication of registers. */
2486
2487 switch (GET_CODE (operand))
2488 {
2489 case REG:
2490 return 1;
2491
2492 case CONST_INT:
2493 return INT8_P (INTVAL (operand));
2494
2495 default:
2496 #if 0
2497 fprintf (stderr, "Test for cond move op of type: %s\n",
2498 GET_RTX_NAME (GET_CODE (operand)));
2499 #endif
2500 return 0;
2501 }
2502 }
2503
2504 /* Return true if the code is a test of the carry bit. */
2505
2506 int
2507 carry_compare_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2508 {
2509 rtx x;
2510
2511 if (GET_MODE (op) != CCmode && GET_MODE (op) != VOIDmode)
2512 return FALSE;
2513
2514 if (GET_CODE (op) != NE && GET_CODE (op) != EQ)
2515 return FALSE;
2516
2517 x = XEXP (op, 0);
2518 if (GET_CODE (x) != REG || REGNO (x) != CARRY_REGNUM)
2519 return FALSE;
2520
2521 x = XEXP (op, 1);
2522 if (GET_CODE (x) != CONST_INT || INTVAL (x) != 0)
2523 return FALSE;
2524
2525 return TRUE;
2526 }
2527
2528 /* Generate the correct assembler code to handle the conditional loading of a
2529 value into a register. It is known that the operands satisfy the
2530 conditional_move_operand() function above. The destination is operand[0].
2531 The condition is operand [1]. The 'true' value is operand [2] and the
2532 'false' value is operand [3]. */
2533
2534 char *
2535 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2536 {
2537 static char buffer [100];
2538 const char * dest = reg_names [REGNO (operands [0])];
2539
2540 buffer [0] = 0;
2541
2542 /* Destination must be a register. */
2543 if (GET_CODE (operands [0]) != REG)
2544 abort();
2545 if (! conditional_move_operand (operands [2], SImode))
2546 abort();
2547 if (! conditional_move_operand (operands [3], SImode))
2548 abort();
2549
2550 /* Check to see if the test is reversed. */
2551 if (GET_CODE (operands [1]) == NE)
2552 {
2553 rtx tmp = operands [2];
2554 operands [2] = operands [3];
2555 operands [3] = tmp;
2556 }
2557
2558 sprintf (buffer, "mvfc %s, cbr", dest);
2559
2560 /* If the true value was '0' then we need to invert the results of the move. */
2561 if (INTVAL (operands [2]) == 0)
2562 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2563 dest, dest);
2564
2565 return buffer;
2566 }
2567
2568 /* Returns true if the registers contained in the two
2569 rtl expressions are different. */
2570
2571 int
2572 m32r_not_same_reg (rtx a, rtx b)
2573 {
2574 int reg_a = -1;
2575 int reg_b = -2;
2576
2577 while (GET_CODE (a) == SUBREG)
2578 a = SUBREG_REG (a);
2579
2580 if (GET_CODE (a) == REG)
2581 reg_a = REGNO (a);
2582
2583 while (GET_CODE (b) == SUBREG)
2584 b = SUBREG_REG (b);
2585
2586 if (GET_CODE (b) == REG)
2587 reg_b = REGNO (b);
2588
2589 return reg_a != reg_b;
2590 }
2591
2592 \f
2593 /* Use a library function to move some bytes. */
2594
2595 static void
2596 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2597 {
2598 /* We want to pass the size as Pmode, which will normally be SImode
2599 but will be DImode if we are using 64 bit longs and pointers. */
2600 if (GET_MODE (bytes_rtx) != VOIDmode
2601 && GET_MODE (bytes_rtx) != Pmode)
2602 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2603
2604 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "memcpy"), 0,
2605 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2606 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2607 TYPE_UNSIGNED (sizetype)),
2608 TYPE_MODE (sizetype));
2609 }
2610
2611 /* The maximum number of bytes to copy using pairs of load/store instructions.
2612 If a block is larger than this then a loop will be generated to copy
2613 MAX_MOVE_BYTES chunks at a time. The value of 32 is a semi-arbitrary choice.
2614 A customer uses Dhrystome as their benchmark, and Dhrystone has a 31 byte
2615 string copy in it. */
2616 #define MAX_MOVE_BYTES 32
2617
2618 /* Expand string/block move operations.
2619
2620 operands[0] is the pointer to the destination.
2621 operands[1] is the pointer to the source.
2622 operands[2] is the number of bytes to move.
2623 operands[3] is the alignment. */
2624
2625 void
2626 m32r_expand_block_move (rtx operands[])
2627 {
2628 rtx orig_dst = operands[0];
2629 rtx orig_src = operands[1];
2630 rtx bytes_rtx = operands[2];
2631 rtx align_rtx = operands[3];
2632 int constp = GET_CODE (bytes_rtx) == CONST_INT;
2633 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2634 int align = INTVAL (align_rtx);
2635 int leftover;
2636 rtx src_reg;
2637 rtx dst_reg;
2638
2639 if (constp && bytes <= 0)
2640 return;
2641
2642 /* Move the address into scratch registers. */
2643 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2644 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2645
2646 if (align > UNITS_PER_WORD)
2647 align = UNITS_PER_WORD;
2648
2649 /* If we prefer size over speed, always use a function call.
2650 If we do not know the size, use a function call.
2651 If the blocks are not word aligned, use a function call. */
2652 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2653 {
2654 block_move_call (dst_reg, src_reg, bytes_rtx);
2655 return;
2656 }
2657
2658 leftover = bytes % MAX_MOVE_BYTES;
2659 bytes -= leftover;
2660
2661 /* If necessary, generate a loop to handle the bulk of the copy. */
2662 if (bytes)
2663 {
2664 rtx label = NULL_RTX;
2665 rtx final_src = NULL_RTX;
2666 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2667 rtx rounded_total = GEN_INT (bytes);
2668 rtx new_dst_reg = gen_reg_rtx (SImode);
2669 rtx new_src_reg = gen_reg_rtx (SImode);
2670
2671 /* If we are going to have to perform this loop more than
2672 once, then generate a label and compute the address the
2673 source register will contain upon completion of the final
2674 iteration. */
2675 if (bytes > MAX_MOVE_BYTES)
2676 {
2677 final_src = gen_reg_rtx (Pmode);
2678
2679 if (INT16_P(bytes))
2680 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2681 else
2682 {
2683 emit_insn (gen_movsi (final_src, rounded_total));
2684 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2685 }
2686
2687 label = gen_label_rtx ();
2688 emit_label (label);
2689 }
2690
2691 /* It is known that output_block_move() will update src_reg to point
2692 to the word after the end of the source block, and dst_reg to point
2693 to the last word of the destination block, provided that the block
2694 is MAX_MOVE_BYTES long. */
2695 emit_insn (gen_movstrsi_internal (dst_reg, src_reg, at_a_time,
2696 new_dst_reg, new_src_reg));
2697 emit_move_insn (dst_reg, new_dst_reg);
2698 emit_move_insn (src_reg, new_src_reg);
2699 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2700
2701 if (bytes > MAX_MOVE_BYTES)
2702 {
2703 emit_insn (gen_cmpsi (src_reg, final_src));
2704 emit_jump_insn (gen_bne (label));
2705 }
2706 }
2707
2708 if (leftover)
2709 emit_insn (gen_movstrsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2710 gen_reg_rtx (SImode),
2711 gen_reg_rtx (SImode)));
2712 }
2713
2714 \f
2715 /* Emit load/stores for a small constant word aligned block_move.
2716
2717 operands[0] is the memory address of the destination.
2718 operands[1] is the memory address of the source.
2719 operands[2] is the number of bytes to move.
2720 operands[3] is a temp register.
2721 operands[4] is a temp register. */
2722
2723 void
2724 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2725 {
2726 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2727 int first_time;
2728 int got_extra = 0;
2729
2730 if (bytes < 1 || bytes > MAX_MOVE_BYTES)
2731 abort ();
2732
2733 /* We do not have a post-increment store available, so the first set of
2734 stores are done without any increment, then the remaining ones can use
2735 the pre-increment addressing mode.
2736
2737 Note: expand_block_move() also relies upon this behavior when building
2738 loops to copy large blocks. */
2739 first_time = 1;
2740
2741 while (bytes > 0)
2742 {
2743 if (bytes >= 8)
2744 {
2745 if (first_time)
2746 {
2747 output_asm_insn ("ld\t%5, %p1", operands);
2748 output_asm_insn ("ld\t%6, %p1", operands);
2749 output_asm_insn ("st\t%5, @%0", operands);
2750 output_asm_insn ("st\t%6, %s0", operands);
2751 }
2752 else
2753 {
2754 output_asm_insn ("ld\t%5, %p1", operands);
2755 output_asm_insn ("ld\t%6, %p1", operands);
2756 output_asm_insn ("st\t%5, %s0", operands);
2757 output_asm_insn ("st\t%6, %s0", operands);
2758 }
2759
2760 bytes -= 8;
2761 }
2762 else if (bytes >= 4)
2763 {
2764 if (bytes > 4)
2765 got_extra = 1;
2766
2767 output_asm_insn ("ld\t%5, %p1", operands);
2768
2769 if (got_extra)
2770 output_asm_insn ("ld\t%6, %p1", operands);
2771
2772 if (first_time)
2773 output_asm_insn ("st\t%5, @%0", operands);
2774 else
2775 output_asm_insn ("st\t%5, %s0", operands);
2776
2777 bytes -= 4;
2778 }
2779 else
2780 {
2781 /* Get the entire next word, even though we do not want all of it.
2782 The saves us from doing several smaller loads, and we assume that
2783 we cannot cause a page fault when at least part of the word is in
2784 valid memory [since we don't get called if things aren't properly
2785 aligned]. */
2786 int dst_offset = first_time ? 0 : 4;
2787 /* The amount of increment we have to make to the
2788 destination pointer. */
2789 int dst_inc_amount = dst_offset + bytes - 4;
2790 /* The same for the source pointer. */
2791 int src_inc_amount = bytes;
2792 int last_shift;
2793 rtx my_operands[3];
2794
2795 /* If got_extra is true then we have already loaded
2796 the next word as part of loading and storing the previous word. */
2797 if (! got_extra)
2798 output_asm_insn ("ld\t%6, @%1", operands);
2799
2800 if (bytes >= 2)
2801 {
2802 bytes -= 2;
2803
2804 output_asm_insn ("sra3\t%5, %6, #16", operands);
2805 my_operands[0] = operands[5];
2806 my_operands[1] = GEN_INT (dst_offset);
2807 my_operands[2] = operands[0];
2808 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2809
2810 /* If there is a byte left to store then increment the
2811 destination address and shift the contents of the source
2812 register down by 8 bits. We could not do the address
2813 increment in the store half word instruction, because it does
2814 not have an auto increment mode. */
2815 if (bytes > 0) /* assert (bytes == 1) */
2816 {
2817 dst_offset += 2;
2818 last_shift = 8;
2819 }
2820 }
2821 else
2822 last_shift = 24;
2823
2824 if (bytes > 0)
2825 {
2826 my_operands[0] = operands[6];
2827 my_operands[1] = GEN_INT (last_shift);
2828 output_asm_insn ("srai\t%0, #%1", my_operands);
2829 my_operands[0] = operands[6];
2830 my_operands[1] = GEN_INT (dst_offset);
2831 my_operands[2] = operands[0];
2832 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2833 }
2834
2835 /* Update the destination pointer if needed. We have to do
2836 this so that the patterns matches what we output in this
2837 function. */
2838 if (dst_inc_amount
2839 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2840 {
2841 my_operands[0] = operands[0];
2842 my_operands[1] = GEN_INT (dst_inc_amount);
2843 output_asm_insn ("addi\t%0, #%1", my_operands);
2844 }
2845
2846 /* Update the source pointer if needed. We have to do this
2847 so that the patterns matches what we output in this
2848 function. */
2849 if (src_inc_amount
2850 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2851 {
2852 my_operands[0] = operands[1];
2853 my_operands[1] = GEN_INT (src_inc_amount);
2854 output_asm_insn ("addi\t%0, #%1", my_operands);
2855 }
2856
2857 bytes = 0;
2858 }
2859
2860 first_time = 0;
2861 }
2862 }
2863
2864 /* Return true if op is an integer constant, less than or equal to
2865 MAX_MOVE_BYTES. */
2866
2867 int
2868 m32r_block_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2869 {
2870 if (GET_CODE (op) != CONST_INT
2871 || INTVAL (op) > MAX_MOVE_BYTES
2872 || INTVAL (op) <= 0)
2873 return 0;
2874
2875 return 1;
2876 }
2877
2878 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2879
2880 int
2881 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2882 unsigned int new_reg)
2883 {
2884 /* Interrupt routines can't clobber any register that isn't already used. */
2885 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2886 && !regs_ever_live[new_reg])
2887 return 0;
2888
2889 /* We currently emit epilogues as text, not rtl, so the liveness
2890 of the return address register isn't visible. */
2891 if (current_function_is_leaf && new_reg == RETURN_ADDR_REGNUM)
2892 return 0;
2893
2894 return 1;
2895 }
2896
2897 rtx
2898 m32r_return_addr (int count)
2899 {
2900 if (count != 0)
2901 return const0_rtx;
2902
2903 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2904 }
This page took 0.165949 seconds and 5 git commands to generate.