]> gcc.gnu.org Git - gcc.git/blob - gcc/config/m32r/m32r.c
f03df26b5710dacc3ad19373f4de67aa3515600d
[gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "toplev.h"
39 #include "ggc.h"
40 #include "integrate.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "tm-constrs.h"
46
47 /* Array of valid operand punctuation characters. */
48 static char m32r_punct_chars[256];
49
50 /* Selected code model. */
51 enum m32r_model m32r_model = M32R_MODEL_DEFAULT;
52
53 /* Selected SDA support. */
54 enum m32r_sdata m32r_sdata = M32R_SDATA_DEFAULT;
55
56 /* Machine-specific symbol_ref flags. */
57 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
58 #define SYMBOL_REF_MODEL(X) \
59 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
60
61 /* For string literals, etc. */
62 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
63
64 /* Forward declaration. */
65 static bool m32r_handle_option (size_t, const char *, int);
66 static void m32r_option_override (void);
67 static void init_reg_tables (void);
68 static void block_move_call (rtx, rtx, rtx);
69 static int m32r_is_insn (rtx);
70 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
71 static bool m32r_mode_dependent_address_p (const_rtx);
72 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
73 static void m32r_print_operand (FILE *, rtx, int);
74 static void m32r_print_operand_address (FILE *, rtx);
75 static bool m32r_print_operand_punct_valid_p (unsigned char code);
76 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
77 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
78
79 static void m32r_file_start (void);
80
81 static int m32r_adjust_priority (rtx, int);
82 static int m32r_issue_rate (void);
83
84 static void m32r_encode_section_info (tree, rtx, int);
85 static bool m32r_in_small_data_p (const_tree);
86 static bool m32r_return_in_memory (const_tree, const_tree);
87 static rtx m32r_function_value (const_tree, const_tree, bool);
88 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
89 static bool m32r_function_value_regno_p (const unsigned int);
90 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
91 tree, int *, int);
92 static void init_idents (void);
93 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
94 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
97 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
98 tree, bool);
99 static rtx m32r_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
100 const_tree, bool);
101 static void m32r_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
102 const_tree, bool);
103 static bool m32r_can_eliminate (const int, const int);
104 static void m32r_trampoline_init (rtx, tree, rtx);
105 \f
106 /* M32R specific attributes. */
107
108 static const struct attribute_spec m32r_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
111 { "interrupt", 0, 0, true, false, false, NULL },
112 { "model", 1, 1, true, false, false, m32r_handle_model_attribute },
113 { NULL, 0, 0, false, false, false, NULL }
114 };
115
116 static const struct default_options m32r_option_optimization_table[] =
117 {
118 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
119 { OPT_LEVELS_1_PLUS, OPT_fregmove, NULL, 1 },
120 { OPT_LEVELS_NONE, 0, NULL, 0 }
121 };
122 \f
123 /* Initialize the GCC target structure. */
124 #undef TARGET_ATTRIBUTE_TABLE
125 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
126
127 #undef TARGET_LEGITIMIZE_ADDRESS
128 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
129 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
130 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
131
132 #undef TARGET_ASM_ALIGNED_HI_OP
133 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
134 #undef TARGET_ASM_ALIGNED_SI_OP
135 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
136
137 #undef TARGET_PRINT_OPERAND
138 #define TARGET_PRINT_OPERAND m32r_print_operand
139 #undef TARGET_PRINT_OPERAND_ADDRESS
140 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
141 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
142 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
143
144 #undef TARGET_ASM_FUNCTION_PROLOGUE
145 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
146 #undef TARGET_ASM_FUNCTION_EPILOGUE
147 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
148
149 #undef TARGET_ASM_FILE_START
150 #define TARGET_ASM_FILE_START m32r_file_start
151
152 #undef TARGET_SCHED_ADJUST_PRIORITY
153 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
154 #undef TARGET_SCHED_ISSUE_RATE
155 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
156
157 #undef TARGET_DEFAULT_TARGET_FLAGS
158 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
159 #undef TARGET_HANDLE_OPTION
160 #define TARGET_HANDLE_OPTION m32r_handle_option
161 #undef TARGET_OPTION_OVERRIDE
162 #define TARGET_OPTION_OVERRIDE m32r_option_override
163 #undef TARGET_OPTION_OPTIMIZATION_TABLE
164 #define TARGET_OPTION_OPTIMIZATION_TABLE m32r_option_optimization_table
165
166 #undef TARGET_ENCODE_SECTION_INFO
167 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
168 #undef TARGET_IN_SMALL_DATA_P
169 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
170
171
172 #undef TARGET_MEMORY_MOVE_COST
173 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
174 #undef TARGET_RTX_COSTS
175 #define TARGET_RTX_COSTS m32r_rtx_costs
176 #undef TARGET_ADDRESS_COST
177 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
178
179 #undef TARGET_PROMOTE_PROTOTYPES
180 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
181 #undef TARGET_RETURN_IN_MEMORY
182 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
183
184 #undef TARGET_FUNCTION_VALUE
185 #define TARGET_FUNCTION_VALUE m32r_function_value
186 #undef TARGET_LIBCALL_VALUE
187 #define TARGET_LIBCALL_VALUE m32r_libcall_value
188 #undef TARGET_FUNCTION_VALUE_REGNO_P
189 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
190
191 #undef TARGET_SETUP_INCOMING_VARARGS
192 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
193 #undef TARGET_MUST_PASS_IN_STACK
194 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
195 #undef TARGET_PASS_BY_REFERENCE
196 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
197 #undef TARGET_ARG_PARTIAL_BYTES
198 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
199 #undef TARGET_FUNCTION_ARG
200 #define TARGET_FUNCTION_ARG m32r_function_arg
201 #undef TARGET_FUNCTION_ARG_ADVANCE
202 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
203
204 #undef TARGET_CAN_ELIMINATE
205 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
206
207 #undef TARGET_TRAMPOLINE_INIT
208 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
209
210 struct gcc_target targetm = TARGET_INITIALIZER;
211 \f
212 /* Implement TARGET_HANDLE_OPTION. */
213
214 static bool
215 m32r_handle_option (size_t code, const char *arg, int value)
216 {
217 switch (code)
218 {
219 case OPT_m32r:
220 target_flags &= ~(MASK_M32R2 | MASK_M32RX);
221 return true;
222
223 case OPT_mmodel_:
224 if (strcmp (arg, "small") == 0)
225 m32r_model = M32R_MODEL_SMALL;
226 else if (strcmp (arg, "medium") == 0)
227 m32r_model = M32R_MODEL_MEDIUM;
228 else if (strcmp (arg, "large") == 0)
229 m32r_model = M32R_MODEL_LARGE;
230 else
231 return false;
232 return true;
233
234 case OPT_msdata_:
235 if (strcmp (arg, "none") == 0)
236 m32r_sdata = M32R_SDATA_NONE;
237 else if (strcmp (arg, "sdata") == 0)
238 m32r_sdata = M32R_SDATA_SDATA;
239 else if (strcmp (arg, "use") == 0)
240 m32r_sdata = M32R_SDATA_USE;
241 else
242 return false;
243 return true;
244
245 case OPT_mno_flush_func:
246 m32r_cache_flush_func = NULL;
247 return true;
248
249 case OPT_mflush_trap_:
250 return value <= 15;
251
252 case OPT_mno_flush_trap:
253 m32r_cache_flush_trap = -1;
254 return true;
255
256 default:
257 return true;
258 }
259 }
260
261 /* Called by m32r_option_override to initialize various things. */
262
263 void
264 m32r_init (void)
265 {
266 init_reg_tables ();
267
268 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
269 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
270 m32r_punct_chars['#'] = 1;
271 m32r_punct_chars['@'] = 1; /* ??? no longer used */
272
273 /* Provide default value if not specified. */
274 if (!global_options_set.x_g_switch_value)
275 g_switch_value = SDATA_DEFAULT_SIZE;
276 }
277
278 static void
279 m32r_option_override (void)
280 {
281 /* These need to be done at start up.
282 It's convenient to do them here. */
283 m32r_init ();
284 SUBTARGET_OVERRIDE_OPTIONS;
285 }
286
287 /* Vectors to keep interesting information about registers where it can easily
288 be got. We use to use the actual mode value as the bit number, but there
289 is (or may be) more than 32 modes now. Instead we use two tables: one
290 indexed by hard register number, and one indexed by mode. */
291
292 /* The purpose of m32r_mode_class is to shrink the range of modes so that
293 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
294 mapped into one m32r_mode_class mode. */
295
296 enum m32r_mode_class
297 {
298 C_MODE,
299 S_MODE, D_MODE, T_MODE, O_MODE,
300 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
301 };
302
303 /* Modes for condition codes. */
304 #define C_MODES (1 << (int) C_MODE)
305
306 /* Modes for single-word and smaller quantities. */
307 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
308
309 /* Modes for double-word and smaller quantities. */
310 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
311
312 /* Modes for quad-word and smaller quantities. */
313 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
314
315 /* Modes for accumulators. */
316 #define A_MODES (1 << (int) A_MODE)
317
318 /* Value is 1 if register/mode pair is acceptable on arc. */
319
320 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
321 {
322 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
323 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
324 S_MODES, C_MODES, A_MODES, A_MODES
325 };
326
327 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
328
329 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
330
331 static void
332 init_reg_tables (void)
333 {
334 int i;
335
336 for (i = 0; i < NUM_MACHINE_MODES; i++)
337 {
338 switch (GET_MODE_CLASS (i))
339 {
340 case MODE_INT:
341 case MODE_PARTIAL_INT:
342 case MODE_COMPLEX_INT:
343 if (GET_MODE_SIZE (i) <= 4)
344 m32r_mode_class[i] = 1 << (int) S_MODE;
345 else if (GET_MODE_SIZE (i) == 8)
346 m32r_mode_class[i] = 1 << (int) D_MODE;
347 else if (GET_MODE_SIZE (i) == 16)
348 m32r_mode_class[i] = 1 << (int) T_MODE;
349 else if (GET_MODE_SIZE (i) == 32)
350 m32r_mode_class[i] = 1 << (int) O_MODE;
351 else
352 m32r_mode_class[i] = 0;
353 break;
354 case MODE_FLOAT:
355 case MODE_COMPLEX_FLOAT:
356 if (GET_MODE_SIZE (i) <= 4)
357 m32r_mode_class[i] = 1 << (int) SF_MODE;
358 else if (GET_MODE_SIZE (i) == 8)
359 m32r_mode_class[i] = 1 << (int) DF_MODE;
360 else if (GET_MODE_SIZE (i) == 16)
361 m32r_mode_class[i] = 1 << (int) TF_MODE;
362 else if (GET_MODE_SIZE (i) == 32)
363 m32r_mode_class[i] = 1 << (int) OF_MODE;
364 else
365 m32r_mode_class[i] = 0;
366 break;
367 case MODE_CC:
368 m32r_mode_class[i] = 1 << (int) C_MODE;
369 break;
370 default:
371 m32r_mode_class[i] = 0;
372 break;
373 }
374 }
375
376 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
377 {
378 if (GPR_P (i))
379 m32r_regno_reg_class[i] = GENERAL_REGS;
380 else if (i == ARG_POINTER_REGNUM)
381 m32r_regno_reg_class[i] = GENERAL_REGS;
382 else
383 m32r_regno_reg_class[i] = NO_REGS;
384 }
385 }
386 \f
387 /* M32R specific attribute support.
388
389 interrupt - for interrupt functions
390
391 model - select code model used to access object
392
393 small: addresses use 24 bits, use bl to make calls
394 medium: addresses use 32 bits, use bl to make calls
395 large: addresses use 32 bits, use seth/add3/jl to make calls
396
397 Grep for MODEL in m32r.h for more info. */
398
399 static tree small_ident1;
400 static tree small_ident2;
401 static tree medium_ident1;
402 static tree medium_ident2;
403 static tree large_ident1;
404 static tree large_ident2;
405
406 static void
407 init_idents (void)
408 {
409 if (small_ident1 == 0)
410 {
411 small_ident1 = get_identifier ("small");
412 small_ident2 = get_identifier ("__small__");
413 medium_ident1 = get_identifier ("medium");
414 medium_ident2 = get_identifier ("__medium__");
415 large_ident1 = get_identifier ("large");
416 large_ident2 = get_identifier ("__large__");
417 }
418 }
419
420 /* Handle an "model" attribute; arguments as in
421 struct attribute_spec.handler. */
422 static tree
423 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
424 tree args, int flags ATTRIBUTE_UNUSED,
425 bool *no_add_attrs)
426 {
427 tree arg;
428
429 init_idents ();
430 arg = TREE_VALUE (args);
431
432 if (arg != small_ident1
433 && arg != small_ident2
434 && arg != medium_ident1
435 && arg != medium_ident2
436 && arg != large_ident1
437 && arg != large_ident2)
438 {
439 warning (OPT_Wattributes, "invalid argument of %qs attribute",
440 IDENTIFIER_POINTER (name));
441 *no_add_attrs = true;
442 }
443
444 return NULL_TREE;
445 }
446 \f
447 /* Encode section information of DECL, which is either a VAR_DECL,
448 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
449
450 For the M32R we want to record:
451
452 - whether the object lives in .sdata/.sbss.
453 - what code model should be used to access the object
454 */
455
456 static void
457 m32r_encode_section_info (tree decl, rtx rtl, int first)
458 {
459 int extra_flags = 0;
460 tree model_attr;
461 enum m32r_model model;
462
463 default_encode_section_info (decl, rtl, first);
464
465 if (!DECL_P (decl))
466 return;
467
468 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
469 if (model_attr)
470 {
471 tree id;
472
473 init_idents ();
474
475 id = TREE_VALUE (TREE_VALUE (model_attr));
476
477 if (id == small_ident1 || id == small_ident2)
478 model = M32R_MODEL_SMALL;
479 else if (id == medium_ident1 || id == medium_ident2)
480 model = M32R_MODEL_MEDIUM;
481 else if (id == large_ident1 || id == large_ident2)
482 model = M32R_MODEL_LARGE;
483 else
484 gcc_unreachable (); /* shouldn't happen */
485 }
486 else
487 {
488 if (TARGET_MODEL_SMALL)
489 model = M32R_MODEL_SMALL;
490 else if (TARGET_MODEL_MEDIUM)
491 model = M32R_MODEL_MEDIUM;
492 else if (TARGET_MODEL_LARGE)
493 model = M32R_MODEL_LARGE;
494 else
495 gcc_unreachable (); /* shouldn't happen */
496 }
497 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
498
499 if (extra_flags)
500 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
501 }
502
503 /* Only mark the object as being small data area addressable if
504 it hasn't been explicitly marked with a code model.
505
506 The user can explicitly put an object in the small data area with the
507 section attribute. If the object is in sdata/sbss and marked with a
508 code model do both [put the object in .sdata and mark it as being
509 addressed with a specific code model - don't mark it as being addressed
510 with an SDA reloc though]. This is ok and might be useful at times. If
511 the object doesn't fit the linker will give an error. */
512
513 static bool
514 m32r_in_small_data_p (const_tree decl)
515 {
516 const_tree section;
517
518 if (TREE_CODE (decl) != VAR_DECL)
519 return false;
520
521 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
522 return false;
523
524 section = DECL_SECTION_NAME (decl);
525 if (section)
526 {
527 const char *const name = TREE_STRING_POINTER (section);
528 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
529 return true;
530 }
531 else
532 {
533 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
534 {
535 int size = int_size_in_bytes (TREE_TYPE (decl));
536
537 if (size > 0 && size <= g_switch_value)
538 return true;
539 }
540 }
541
542 return false;
543 }
544
545 /* Do anything needed before RTL is emitted for each function. */
546
547 void
548 m32r_init_expanders (void)
549 {
550 /* ??? At one point there was code here. The function is left in
551 to make it easy to experiment. */
552 }
553 \f
554 int
555 call_operand (rtx op, enum machine_mode mode)
556 {
557 if (!MEM_P (op))
558 return 0;
559 op = XEXP (op, 0);
560 return call_address_operand (op, mode);
561 }
562
563 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
564
565 int
566 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
567 {
568 if (! TARGET_SDATA_USE)
569 return 0;
570
571 if (GET_CODE (op) == SYMBOL_REF)
572 return SYMBOL_REF_SMALL_P (op);
573
574 if (GET_CODE (op) == CONST
575 && GET_CODE (XEXP (op, 0)) == PLUS
576 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
577 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
578 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
579
580 return 0;
581 }
582
583 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
584
585 int
586 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
587 {
588 rtx sym;
589
590 if (flag_pic)
591 return 0;
592
593 if (GET_CODE (op) == LABEL_REF)
594 return TARGET_ADDR24;
595
596 if (GET_CODE (op) == SYMBOL_REF)
597 sym = op;
598 else if (GET_CODE (op) == CONST
599 && GET_CODE (XEXP (op, 0)) == PLUS
600 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
601 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
602 sym = XEXP (XEXP (op, 0), 0);
603 else
604 return 0;
605
606 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
607 return 1;
608
609 if (TARGET_ADDR24
610 && (CONSTANT_POOL_ADDRESS_P (sym)
611 || LIT_NAME_P (XSTR (sym, 0))))
612 return 1;
613
614 return 0;
615 }
616
617 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
618
619 int
620 addr32_operand (rtx op, enum machine_mode mode)
621 {
622 rtx sym;
623
624 if (GET_CODE (op) == LABEL_REF)
625 return TARGET_ADDR32;
626
627 if (GET_CODE (op) == SYMBOL_REF)
628 sym = op;
629 else if (GET_CODE (op) == CONST
630 && GET_CODE (XEXP (op, 0)) == PLUS
631 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
632 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
633 && ! flag_pic)
634 sym = XEXP (XEXP (op, 0), 0);
635 else
636 return 0;
637
638 return (! addr24_operand (sym, mode)
639 && ! small_data_operand (sym, mode));
640 }
641
642 /* Return 1 if OP is a function that can be called with the `bl' insn. */
643
644 int
645 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
646 {
647 if (flag_pic)
648 return 1;
649
650 if (GET_CODE (op) == SYMBOL_REF)
651 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
652
653 return TARGET_CALL26;
654 }
655
656 /* Return 1 if OP is a DImode const we want to handle inline.
657 This must match the code in the movdi pattern.
658 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
659
660 int
661 easy_di_const (rtx op)
662 {
663 rtx high_rtx, low_rtx;
664 HOST_WIDE_INT high, low;
665
666 split_double (op, &high_rtx, &low_rtx);
667 high = INTVAL (high_rtx);
668 low = INTVAL (low_rtx);
669 /* Pick constants loadable with 2 16-bit `ldi' insns. */
670 if (high >= -128 && high <= 127
671 && low >= -128 && low <= 127)
672 return 1;
673 return 0;
674 }
675
676 /* Return 1 if OP is a DFmode const we want to handle inline.
677 This must match the code in the movdf pattern.
678 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
679
680 int
681 easy_df_const (rtx op)
682 {
683 REAL_VALUE_TYPE r;
684 long l[2];
685
686 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
687 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
688 if (l[0] == 0 && l[1] == 0)
689 return 1;
690 if ((l[0] & 0xffff) == 0 && l[1] == 0)
691 return 1;
692 return 0;
693 }
694
695 /* Return 1 if OP is (mem (reg ...)).
696 This is used in insn length calcs. */
697
698 int
699 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
700 {
701 return MEM_P (op) && REG_P (XEXP (op, 0));
702 }
703
704 /* Return nonzero if TYPE must be passed by indirect reference. */
705
706 static bool
707 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
708 enum machine_mode mode, const_tree type,
709 bool named ATTRIBUTE_UNUSED)
710 {
711 int size;
712
713 if (type)
714 size = int_size_in_bytes (type);
715 else
716 size = GET_MODE_SIZE (mode);
717
718 return (size < 0 || size > 8);
719 }
720 \f
721 /* Comparisons. */
722
723 /* X and Y are two things to compare using CODE. Emit the compare insn and
724 return the rtx for compare [arg0 of the if_then_else].
725 If need_compare is true then the comparison insn must be generated, rather
726 than being subsumed into the following branch instruction. */
727
728 rtx
729 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
730 {
731 enum rtx_code compare_code;
732 enum rtx_code branch_code;
733 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
734 int must_swap = 0;
735
736 switch (code)
737 {
738 case EQ: compare_code = EQ; branch_code = NE; break;
739 case NE: compare_code = EQ; branch_code = EQ; break;
740 case LT: compare_code = LT; branch_code = NE; break;
741 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
742 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
743 case GE: compare_code = LT; branch_code = EQ; break;
744 case LTU: compare_code = LTU; branch_code = NE; break;
745 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
746 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
747 case GEU: compare_code = LTU; branch_code = EQ; break;
748
749 default:
750 gcc_unreachable ();
751 }
752
753 if (need_compare)
754 {
755 switch (compare_code)
756 {
757 case EQ:
758 if (satisfies_constraint_P (y) /* Reg equal to small const. */
759 && y != const0_rtx)
760 {
761 rtx tmp = gen_reg_rtx (SImode);
762
763 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
764 x = tmp;
765 y = const0_rtx;
766 }
767 else if (CONSTANT_P (y)) /* Reg equal to const. */
768 {
769 rtx tmp = force_reg (GET_MODE (x), y);
770 y = tmp;
771 }
772
773 if (register_operand (y, SImode) /* Reg equal to reg. */
774 || y == const0_rtx) /* Reg equal to zero. */
775 {
776 emit_insn (gen_cmp_eqsi_insn (x, y));
777
778 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
779 }
780 break;
781
782 case LT:
783 if (register_operand (y, SImode)
784 || satisfies_constraint_P (y))
785 {
786 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
787
788 switch (code)
789 {
790 case LT:
791 emit_insn (gen_cmp_ltsi_insn (x, y));
792 code = EQ;
793 break;
794 case LE:
795 if (y == const0_rtx)
796 tmp = const1_rtx;
797 else
798 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
799 emit_insn (gen_cmp_ltsi_insn (x, tmp));
800 code = EQ;
801 break;
802 case GT:
803 if (CONST_INT_P (y))
804 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
805 else
806 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
807 emit_insn (gen_cmp_ltsi_insn (x, tmp));
808 code = NE;
809 break;
810 case GE:
811 emit_insn (gen_cmp_ltsi_insn (x, y));
812 code = NE;
813 break;
814 default:
815 gcc_unreachable ();
816 }
817
818 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
819 }
820 break;
821
822 case LTU:
823 if (register_operand (y, SImode)
824 || satisfies_constraint_P (y))
825 {
826 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
827
828 switch (code)
829 {
830 case LTU:
831 emit_insn (gen_cmp_ltusi_insn (x, y));
832 code = EQ;
833 break;
834 case LEU:
835 if (y == const0_rtx)
836 tmp = const1_rtx;
837 else
838 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
839 emit_insn (gen_cmp_ltusi_insn (x, tmp));
840 code = EQ;
841 break;
842 case GTU:
843 if (CONST_INT_P (y))
844 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
845 else
846 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
847 emit_insn (gen_cmp_ltusi_insn (x, tmp));
848 code = NE;
849 break;
850 case GEU:
851 emit_insn (gen_cmp_ltusi_insn (x, y));
852 code = NE;
853 break;
854 default:
855 gcc_unreachable ();
856 }
857
858 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
859 }
860 break;
861
862 default:
863 gcc_unreachable ();
864 }
865 }
866 else
867 {
868 /* Reg/reg equal comparison. */
869 if (compare_code == EQ
870 && register_operand (y, SImode))
871 return gen_rtx_fmt_ee (code, CCmode, x, y);
872
873 /* Reg/zero signed comparison. */
874 if ((compare_code == EQ || compare_code == LT)
875 && y == const0_rtx)
876 return gen_rtx_fmt_ee (code, CCmode, x, y);
877
878 /* Reg/smallconst equal comparison. */
879 if (compare_code == EQ
880 && satisfies_constraint_P (y))
881 {
882 rtx tmp = gen_reg_rtx (SImode);
883
884 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
885 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
886 }
887
888 /* Reg/const equal comparison. */
889 if (compare_code == EQ
890 && CONSTANT_P (y))
891 {
892 rtx tmp = force_reg (GET_MODE (x), y);
893
894 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
895 }
896 }
897
898 if (CONSTANT_P (y))
899 {
900 if (must_swap)
901 y = force_reg (GET_MODE (x), y);
902 else
903 {
904 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
905
906 if (! ok_const)
907 y = force_reg (GET_MODE (x), y);
908 }
909 }
910
911 switch (compare_code)
912 {
913 case EQ :
914 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
915 break;
916 case LT :
917 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
918 break;
919 case LTU :
920 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
921 break;
922
923 default:
924 gcc_unreachable ();
925 }
926
927 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
928 }
929
930 bool
931 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
932 {
933 enum machine_mode mode = GET_MODE (op0);
934
935 gcc_assert (mode == SImode);
936 switch (code)
937 {
938 case EQ:
939 if (!register_operand (op1, mode))
940 op1 = force_reg (mode, op1);
941
942 if (TARGET_M32RX || TARGET_M32R2)
943 {
944 if (!reg_or_zero_operand (op2, mode))
945 op2 = force_reg (mode, op2);
946
947 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
948 return true;
949 }
950 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
951 {
952 emit_insn (gen_seq_zero_insn (op0, op1));
953 return true;
954 }
955
956 if (!reg_or_eq_int16_operand (op2, mode))
957 op2 = force_reg (mode, op2);
958
959 emit_insn (gen_seq_insn (op0, op1, op2));
960 return true;
961
962 case NE:
963 if (!CONST_INT_P (op2)
964 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
965 {
966 rtx reg;
967
968 if (reload_completed || reload_in_progress)
969 return false;
970
971 reg = gen_reg_rtx (SImode);
972 emit_insn (gen_xorsi3 (reg, op1, op2));
973 op1 = reg;
974
975 if (!register_operand (op1, mode))
976 op1 = force_reg (mode, op1);
977
978 emit_insn (gen_sne_zero_insn (op0, op1));
979 return true;
980 }
981 return false;
982
983 case LT:
984 case GT:
985 if (code == GT)
986 {
987 rtx tmp = op2;
988 op2 = op1;
989 op1 = tmp;
990 code = LT;
991 }
992
993 if (!register_operand (op1, mode))
994 op1 = force_reg (mode, op1);
995
996 if (!reg_or_int16_operand (op2, mode))
997 op2 = force_reg (mode, op2);
998
999 emit_insn (gen_slt_insn (op0, op1, op2));
1000 return true;
1001
1002 case LTU:
1003 case GTU:
1004 if (code == GTU)
1005 {
1006 rtx tmp = op2;
1007 op2 = op1;
1008 op1 = tmp;
1009 code = LTU;
1010 }
1011
1012 if (!register_operand (op1, mode))
1013 op1 = force_reg (mode, op1);
1014
1015 if (!reg_or_int16_operand (op2, mode))
1016 op2 = force_reg (mode, op2);
1017
1018 emit_insn (gen_sltu_insn (op0, op1, op2));
1019 return true;
1020
1021 case GE:
1022 case GEU:
1023 if (!register_operand (op1, mode))
1024 op1 = force_reg (mode, op1);
1025
1026 if (!reg_or_int16_operand (op2, mode))
1027 op2 = force_reg (mode, op2);
1028
1029 if (code == GE)
1030 emit_insn (gen_sge_insn (op0, op1, op2));
1031 else
1032 emit_insn (gen_sgeu_insn (op0, op1, op2));
1033 return true;
1034
1035 case LE:
1036 case LEU:
1037 if (!register_operand (op1, mode))
1038 op1 = force_reg (mode, op1);
1039
1040 if (CONST_INT_P (op2))
1041 {
1042 HOST_WIDE_INT value = INTVAL (op2);
1043 if (value >= 2147483647)
1044 {
1045 emit_move_insn (op0, const1_rtx);
1046 return true;
1047 }
1048
1049 op2 = GEN_INT (value + 1);
1050 if (value < -32768 || value >= 32767)
1051 op2 = force_reg (mode, op2);
1052
1053 if (code == LEU)
1054 emit_insn (gen_sltu_insn (op0, op1, op2));
1055 else
1056 emit_insn (gen_slt_insn (op0, op1, op2));
1057 return true;
1058 }
1059
1060 if (!register_operand (op2, mode))
1061 op2 = force_reg (mode, op2);
1062
1063 if (code == LEU)
1064 emit_insn (gen_sleu_insn (op0, op1, op2));
1065 else
1066 emit_insn (gen_sle_insn (op0, op1, op2));
1067 return true;
1068
1069 default:
1070 gcc_unreachable ();
1071 }
1072 }
1073
1074 \f
1075 /* Split a 2 word move (DI or DF) into component parts. */
1076
1077 rtx
1078 gen_split_move_double (rtx operands[])
1079 {
1080 enum machine_mode mode = GET_MODE (operands[0]);
1081 rtx dest = operands[0];
1082 rtx src = operands[1];
1083 rtx val;
1084
1085 /* We might have (SUBREG (MEM)) here, so just get rid of the
1086 subregs to make this code simpler. It is safe to call
1087 alter_subreg any time after reload. */
1088 if (GET_CODE (dest) == SUBREG)
1089 alter_subreg (&dest);
1090 if (GET_CODE (src) == SUBREG)
1091 alter_subreg (&src);
1092
1093 start_sequence ();
1094 if (REG_P (dest))
1095 {
1096 int dregno = REGNO (dest);
1097
1098 /* Reg = reg. */
1099 if (REG_P (src))
1100 {
1101 int sregno = REGNO (src);
1102
1103 int reverse = (dregno == sregno + 1);
1104
1105 /* We normally copy the low-numbered register first. However, if
1106 the first register operand 0 is the same as the second register of
1107 operand 1, we must copy in the opposite order. */
1108 emit_insn (gen_rtx_SET (VOIDmode,
1109 operand_subword (dest, reverse, TRUE, mode),
1110 operand_subword (src, reverse, TRUE, mode)));
1111
1112 emit_insn (gen_rtx_SET (VOIDmode,
1113 operand_subword (dest, !reverse, TRUE, mode),
1114 operand_subword (src, !reverse, TRUE, mode)));
1115 }
1116
1117 /* Reg = constant. */
1118 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1119 {
1120 rtx words[2];
1121 split_double (src, &words[0], &words[1]);
1122 emit_insn (gen_rtx_SET (VOIDmode,
1123 operand_subword (dest, 0, TRUE, mode),
1124 words[0]));
1125
1126 emit_insn (gen_rtx_SET (VOIDmode,
1127 operand_subword (dest, 1, TRUE, mode),
1128 words[1]));
1129 }
1130
1131 /* Reg = mem. */
1132 else if (MEM_P (src))
1133 {
1134 /* If the high-address word is used in the address, we must load it
1135 last. Otherwise, load it first. */
1136 int reverse
1137 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1138
1139 /* We used to optimize loads from single registers as
1140
1141 ld r1,r3+; ld r2,r3
1142
1143 if r3 were not used subsequently. However, the REG_NOTES aren't
1144 propagated correctly by the reload phase, and it can cause bad
1145 code to be generated. We could still try:
1146
1147 ld r1,r3+; ld r2,r3; addi r3,-4
1148
1149 which saves 2 bytes and doesn't force longword alignment. */
1150 emit_insn (gen_rtx_SET (VOIDmode,
1151 operand_subword (dest, reverse, TRUE, mode),
1152 adjust_address (src, SImode,
1153 reverse * UNITS_PER_WORD)));
1154
1155 emit_insn (gen_rtx_SET (VOIDmode,
1156 operand_subword (dest, !reverse, TRUE, mode),
1157 adjust_address (src, SImode,
1158 !reverse * UNITS_PER_WORD)));
1159 }
1160 else
1161 gcc_unreachable ();
1162 }
1163
1164 /* Mem = reg. */
1165 /* We used to optimize loads from single registers as
1166
1167 st r1,r3; st r2,+r3
1168
1169 if r3 were not used subsequently. However, the REG_NOTES aren't
1170 propagated correctly by the reload phase, and it can cause bad
1171 code to be generated. We could still try:
1172
1173 st r1,r3; st r2,+r3; addi r3,-4
1174
1175 which saves 2 bytes and doesn't force longword alignment. */
1176 else if (MEM_P (dest) && REG_P (src))
1177 {
1178 emit_insn (gen_rtx_SET (VOIDmode,
1179 adjust_address (dest, SImode, 0),
1180 operand_subword (src, 0, TRUE, mode)));
1181
1182 emit_insn (gen_rtx_SET (VOIDmode,
1183 adjust_address (dest, SImode, UNITS_PER_WORD),
1184 operand_subword (src, 1, TRUE, mode)));
1185 }
1186
1187 else
1188 gcc_unreachable ();
1189
1190 val = get_insns ();
1191 end_sequence ();
1192 return val;
1193 }
1194
1195 \f
1196 static int
1197 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1198 tree type, bool named ATTRIBUTE_UNUSED)
1199 {
1200 int words;
1201 unsigned int size =
1202 (((mode == BLKmode && type)
1203 ? (unsigned int) int_size_in_bytes (type)
1204 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1205 / UNITS_PER_WORD;
1206
1207 if (*cum >= M32R_MAX_PARM_REGS)
1208 words = 0;
1209 else if (*cum + size > M32R_MAX_PARM_REGS)
1210 words = (*cum + size) - M32R_MAX_PARM_REGS;
1211 else
1212 words = 0;
1213
1214 return words * UNITS_PER_WORD;
1215 }
1216
1217 /* The ROUND_ADVANCE* macros are local to this file. */
1218 /* Round SIZE up to a word boundary. */
1219 #define ROUND_ADVANCE(SIZE) \
1220 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1221
1222 /* Round arg MODE/TYPE up to the next word boundary. */
1223 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1224 ((MODE) == BLKmode \
1225 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1226 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1227
1228 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1229 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1230
1231 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1232 a reg. This includes arguments that have to be passed by reference as the
1233 pointer to them is passed in a reg if one is available (and that is what
1234 we're given).
1235 This macro is only used in this file. */
1236 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1237 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1238
1239 /* Determine where to put an argument to a function.
1240 Value is zero to push the argument on the stack,
1241 or a hard register in which to store the argument.
1242
1243 MODE is the argument's machine mode.
1244 TYPE is the data type of the argument (as a tree).
1245 This is null for libcalls where that information may
1246 not be available.
1247 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1248 the preceding args and about the function being called.
1249 NAMED is nonzero if this argument is a named parameter
1250 (otherwise it is an extra parameter matching an ellipsis). */
1251 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1252 and the rest are pushed. */
1253
1254 static rtx
1255 m32r_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1256 const_tree type ATTRIBUTE_UNUSED,
1257 bool named ATTRIBUTE_UNUSED)
1258 {
1259 return (PASS_IN_REG_P (*cum, mode, type)
1260 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1261 : NULL_RTX);
1262 }
1263
1264 /* Update the data in CUM to advance over an argument
1265 of mode MODE and data type TYPE.
1266 (TYPE is null for libcalls where that information may not be available.) */
1267
1268 static void
1269 m32r_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1270 const_tree type, bool named ATTRIBUTE_UNUSED)
1271 {
1272 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1273 + ROUND_ADVANCE_ARG (mode, type));
1274 }
1275
1276 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1277
1278 static bool
1279 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1280 {
1281 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1282 }
1283
1284 /* Worker function for TARGET_FUNCTION_VALUE. */
1285
1286 static rtx
1287 m32r_function_value (const_tree valtype,
1288 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1289 bool outgoing ATTRIBUTE_UNUSED)
1290 {
1291 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1292 }
1293
1294 /* Worker function for TARGET_LIBCALL_VALUE. */
1295
1296 static rtx
1297 m32r_libcall_value (enum machine_mode mode,
1298 const_rtx fun ATTRIBUTE_UNUSED)
1299 {
1300 return gen_rtx_REG (mode, 0);
1301 }
1302
1303 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1304
1305 ??? What about r1 in DI/DF values. */
1306
1307 static bool
1308 m32r_function_value_regno_p (const unsigned int regno)
1309 {
1310 return (regno == 0);
1311 }
1312
1313 /* Do any needed setup for a variadic function. For the M32R, we must
1314 create a register parameter block, and then copy any anonymous arguments
1315 in registers to memory.
1316
1317 CUM has not been updated for the last named argument which has type TYPE
1318 and mode MODE, and we rely on this fact. */
1319
1320 static void
1321 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1322 tree type, int *pretend_size, int no_rtl)
1323 {
1324 int first_anon_arg;
1325
1326 if (no_rtl)
1327 return;
1328
1329 /* All BLKmode values are passed by reference. */
1330 gcc_assert (mode != BLKmode);
1331
1332 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1333 + ROUND_ADVANCE_ARG (mode, type));
1334
1335 if (first_anon_arg < M32R_MAX_PARM_REGS)
1336 {
1337 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1338 int first_reg_offset = first_anon_arg;
1339 /* Size in words to "pretend" allocate. */
1340 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1341 rtx regblock;
1342
1343 regblock = gen_frame_mem (BLKmode,
1344 plus_constant (arg_pointer_rtx,
1345 FIRST_PARM_OFFSET (0)));
1346 set_mem_alias_set (regblock, get_varargs_alias_set ());
1347 move_block_from_reg (first_reg_offset, regblock, size);
1348
1349 *pretend_size = (size * UNITS_PER_WORD);
1350 }
1351 }
1352
1353 \f
1354 /* Return true if INSN is real instruction bearing insn. */
1355
1356 static int
1357 m32r_is_insn (rtx insn)
1358 {
1359 return (NONDEBUG_INSN_P (insn)
1360 && GET_CODE (PATTERN (insn)) != USE
1361 && GET_CODE (PATTERN (insn)) != CLOBBER
1362 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1363 }
1364
1365 /* Increase the priority of long instructions so that the
1366 short instructions are scheduled ahead of the long ones. */
1367
1368 static int
1369 m32r_adjust_priority (rtx insn, int priority)
1370 {
1371 if (m32r_is_insn (insn)
1372 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1373 priority <<= 3;
1374
1375 return priority;
1376 }
1377
1378 \f
1379 /* Indicate how many instructions can be issued at the same time.
1380 This is sort of a lie. The m32r can issue only 1 long insn at
1381 once, but it can issue 2 short insns. The default therefore is
1382 set at 2, but this can be overridden by the command line option
1383 -missue-rate=1. */
1384
1385 static int
1386 m32r_issue_rate (void)
1387 {
1388 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1389 }
1390 \f
1391 /* Cost functions. */
1392
1393 /* Implement TARGET_HANDLE_OPTION.
1394
1395 Memory is 3 times as expensive as registers.
1396 ??? Is that the right way to look at it? */
1397
1398 static int
1399 m32r_memory_move_cost (enum machine_mode mode,
1400 reg_class_t rclass ATTRIBUTE_UNUSED,
1401 bool in ATTRIBUTE_UNUSED)
1402 {
1403 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1404 return 6;
1405 else
1406 return 12;
1407 }
1408
1409 static bool
1410 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1411 bool speed ATTRIBUTE_UNUSED)
1412 {
1413 switch (code)
1414 {
1415 /* Small integers are as cheap as registers. 4 byte values can be
1416 fetched as immediate constants - let's give that the cost of an
1417 extra insn. */
1418 case CONST_INT:
1419 if (INT16_P (INTVAL (x)))
1420 {
1421 *total = 0;
1422 return true;
1423 }
1424 /* FALLTHRU */
1425
1426 case CONST:
1427 case LABEL_REF:
1428 case SYMBOL_REF:
1429 *total = COSTS_N_INSNS (1);
1430 return true;
1431
1432 case CONST_DOUBLE:
1433 {
1434 rtx high, low;
1435
1436 split_double (x, &high, &low);
1437 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1438 + !INT16_P (INTVAL (low)));
1439 return true;
1440 }
1441
1442 case MULT:
1443 *total = COSTS_N_INSNS (3);
1444 return true;
1445
1446 case DIV:
1447 case UDIV:
1448 case MOD:
1449 case UMOD:
1450 *total = COSTS_N_INSNS (10);
1451 return true;
1452
1453 default:
1454 return false;
1455 }
1456 }
1457 \f
1458 /* Type of function DECL.
1459
1460 The result is cached. To reset the cache at the end of a function,
1461 call with DECL = NULL_TREE. */
1462
1463 enum m32r_function_type
1464 m32r_compute_function_type (tree decl)
1465 {
1466 /* Cached value. */
1467 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1468 /* Last function we were called for. */
1469 static tree last_fn = NULL_TREE;
1470
1471 /* Resetting the cached value? */
1472 if (decl == NULL_TREE)
1473 {
1474 fn_type = M32R_FUNCTION_UNKNOWN;
1475 last_fn = NULL_TREE;
1476 return fn_type;
1477 }
1478
1479 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1480 return fn_type;
1481
1482 /* Compute function type. */
1483 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1484 ? M32R_FUNCTION_INTERRUPT
1485 : M32R_FUNCTION_NORMAL);
1486
1487 last_fn = decl;
1488 return fn_type;
1489 }
1490 \f/* Function prologue/epilogue handlers. */
1491
1492 /* M32R stack frames look like:
1493
1494 Before call After call
1495 +-----------------------+ +-----------------------+
1496 | | | |
1497 high | local variables, | | local variables, |
1498 mem | reg save area, etc. | | reg save area, etc. |
1499 | | | |
1500 +-----------------------+ +-----------------------+
1501 | | | |
1502 | arguments on stack. | | arguments on stack. |
1503 | | | |
1504 SP+0->+-----------------------+ +-----------------------+
1505 | reg parm save area, |
1506 | only created for |
1507 | variable argument |
1508 | functions |
1509 +-----------------------+
1510 | previous frame ptr |
1511 +-----------------------+
1512 | |
1513 | register save area |
1514 | |
1515 +-----------------------+
1516 | return address |
1517 +-----------------------+
1518 | |
1519 | local variables |
1520 | |
1521 +-----------------------+
1522 | |
1523 | alloca allocations |
1524 | |
1525 +-----------------------+
1526 | |
1527 low | arguments on stack |
1528 memory | |
1529 SP+0->+-----------------------+
1530
1531 Notes:
1532 1) The "reg parm save area" does not exist for non variable argument fns.
1533 2) The "reg parm save area" can be eliminated completely if we saved regs
1534 containing anonymous args separately but that complicates things too
1535 much (so it's not done).
1536 3) The return address is saved after the register save area so as to have as
1537 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1538
1539 /* Structure to be filled in by m32r_compute_frame_size with register
1540 save masks, and offsets for the current function. */
1541 struct m32r_frame_info
1542 {
1543 unsigned int total_size; /* # bytes that the entire frame takes up. */
1544 unsigned int extra_size; /* # bytes of extra stuff. */
1545 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1546 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1547 unsigned int reg_size; /* # bytes needed to store regs. */
1548 unsigned int var_size; /* # bytes that variables take up. */
1549 unsigned int gmask; /* Mask of saved gp registers. */
1550 unsigned int save_fp; /* Nonzero if fp must be saved. */
1551 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1552 int initialized; /* Nonzero if frame size already calculated. */
1553 };
1554
1555 /* Current frame information calculated by m32r_compute_frame_size. */
1556 static struct m32r_frame_info current_frame_info;
1557
1558 /* Zero structure to initialize current_frame_info. */
1559 static struct m32r_frame_info zero_frame_info;
1560
1561 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1562 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1563
1564 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1565 The return address and frame pointer are treated separately.
1566 Don't consider them here. */
1567 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1568 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1569 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1570
1571 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1572 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1573
1574 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1575 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1576
1577 /* Return the bytes needed to compute the frame pointer from the current
1578 stack pointer.
1579
1580 SIZE is the size needed for local variables. */
1581
1582 unsigned int
1583 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1584 {
1585 unsigned int regno;
1586 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1587 unsigned int reg_size, frame_size;
1588 unsigned int gmask;
1589 enum m32r_function_type fn_type;
1590 int interrupt_p;
1591 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1592 | crtl->profile);
1593
1594 var_size = M32R_STACK_ALIGN (size);
1595 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1596 pretend_size = crtl->args.pretend_args_size;
1597 extra_size = FIRST_PARM_OFFSET (0);
1598 total_size = extra_size + pretend_size + args_size + var_size;
1599 reg_size = 0;
1600 gmask = 0;
1601
1602 /* See if this is an interrupt handler. Call used registers must be saved
1603 for them too. */
1604 fn_type = m32r_compute_function_type (current_function_decl);
1605 interrupt_p = M32R_INTERRUPT_P (fn_type);
1606
1607 /* Calculate space needed for registers. */
1608 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1609 {
1610 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1611 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1612 {
1613 reg_size += UNITS_PER_WORD;
1614 gmask |= 1 << regno;
1615 }
1616 }
1617
1618 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1619 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1620
1621 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1622 * UNITS_PER_WORD);
1623 total_size += reg_size;
1624
1625 /* ??? Not sure this is necessary, and I don't think the epilogue
1626 handler will do the right thing if this changes total_size. */
1627 total_size = M32R_STACK_ALIGN (total_size);
1628
1629 frame_size = total_size - (pretend_size + reg_size);
1630
1631 /* Save computed information. */
1632 current_frame_info.total_size = total_size;
1633 current_frame_info.extra_size = extra_size;
1634 current_frame_info.pretend_size = pretend_size;
1635 current_frame_info.var_size = var_size;
1636 current_frame_info.args_size = args_size;
1637 current_frame_info.reg_size = reg_size;
1638 current_frame_info.gmask = gmask;
1639 current_frame_info.initialized = reload_completed;
1640
1641 /* Ok, we're done. */
1642 return total_size;
1643 }
1644
1645 /* Worker function for TARGET_CAN_ELIMINATE. */
1646
1647 bool
1648 m32r_can_eliminate (const int from, const int to)
1649 {
1650 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1651 ? ! frame_pointer_needed
1652 : true);
1653 }
1654
1655 \f
1656 /* The table we use to reference PIC data. */
1657 static rtx global_offset_table;
1658
1659 static void
1660 m32r_reload_lr (rtx sp, int size)
1661 {
1662 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1663
1664 if (size == 0)
1665 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1666 else if (size < 32768)
1667 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1668 gen_rtx_PLUS (Pmode, sp,
1669 GEN_INT (size)))));
1670 else
1671 {
1672 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1673
1674 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1675 emit_insn (gen_addsi3 (tmp, tmp, sp));
1676 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1677 }
1678
1679 emit_use (lr);
1680 }
1681
1682 void
1683 m32r_load_pic_register (void)
1684 {
1685 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1686 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1687 GEN_INT (TARGET_MODEL_SMALL)));
1688
1689 /* Need to emit this whether or not we obey regdecls,
1690 since setjmp/longjmp can cause life info to screw up. */
1691 emit_use (pic_offset_table_rtx);
1692 }
1693
1694 /* Expand the m32r prologue as a series of insns. */
1695
1696 void
1697 m32r_expand_prologue (void)
1698 {
1699 int regno;
1700 int frame_size;
1701 unsigned int gmask;
1702 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1703 | crtl->profile);
1704
1705 if (! current_frame_info.initialized)
1706 m32r_compute_frame_size (get_frame_size ());
1707
1708 gmask = current_frame_info.gmask;
1709
1710 /* These cases shouldn't happen. Catch them now. */
1711 gcc_assert (current_frame_info.total_size || !gmask);
1712
1713 /* Allocate space for register arguments if this is a variadic function. */
1714 if (current_frame_info.pretend_size != 0)
1715 {
1716 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1717 the wrong result on a 64-bit host. */
1718 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1719 emit_insn (gen_addsi3 (stack_pointer_rtx,
1720 stack_pointer_rtx,
1721 GEN_INT (-pretend_size)));
1722 }
1723
1724 /* Save any registers we need to and set up fp. */
1725 if (current_frame_info.save_fp)
1726 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1727
1728 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1729
1730 /* Save any needed call-saved regs (and call-used if this is an
1731 interrupt handler). */
1732 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1733 {
1734 if ((gmask & (1 << regno)) != 0)
1735 emit_insn (gen_movsi_push (stack_pointer_rtx,
1736 gen_rtx_REG (Pmode, regno)));
1737 }
1738
1739 if (current_frame_info.save_lr)
1740 emit_insn (gen_movsi_push (stack_pointer_rtx,
1741 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1742
1743 /* Allocate the stack frame. */
1744 frame_size = (current_frame_info.total_size
1745 - (current_frame_info.pretend_size
1746 + current_frame_info.reg_size));
1747
1748 if (frame_size == 0)
1749 ; /* Nothing to do. */
1750 else if (frame_size <= 32768)
1751 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1752 GEN_INT (-frame_size)));
1753 else
1754 {
1755 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1756
1757 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1758 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1759 }
1760
1761 if (frame_pointer_needed)
1762 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1763
1764 if (crtl->profile)
1765 /* Push lr for mcount (form_pc, x). */
1766 emit_insn (gen_movsi_push (stack_pointer_rtx,
1767 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1768
1769 if (pic_reg_used)
1770 {
1771 m32r_load_pic_register ();
1772 m32r_reload_lr (stack_pointer_rtx,
1773 (crtl->profile ? 0 : frame_size));
1774 }
1775
1776 if (crtl->profile && !pic_reg_used)
1777 emit_insn (gen_blockage ());
1778 }
1779
1780 \f
1781 /* Set up the stack and frame pointer (if desired) for the function.
1782 Note, if this is changed, you need to mirror the changes in
1783 m32r_compute_frame_size which calculates the prolog size. */
1784
1785 static void
1786 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1787 {
1788 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1789
1790 /* If this is an interrupt handler, mark it as such. */
1791 if (M32R_INTERRUPT_P (fn_type))
1792 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1793
1794 if (! current_frame_info.initialized)
1795 m32r_compute_frame_size (size);
1796
1797 /* This is only for the human reader. */
1798 fprintf (file,
1799 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1800 ASM_COMMENT_START,
1801 current_frame_info.var_size,
1802 current_frame_info.reg_size / 4,
1803 current_frame_info.args_size,
1804 current_frame_info.extra_size);
1805 }
1806 \f
1807 /* Output RTL to pop register REGNO from the stack. */
1808
1809 static void
1810 pop (int regno)
1811 {
1812 rtx x;
1813
1814 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1815 stack_pointer_rtx));
1816 add_reg_note (x, REG_INC, stack_pointer_rtx);
1817 }
1818
1819 /* Expand the m32r epilogue as a series of insns. */
1820
1821 void
1822 m32r_expand_epilogue (void)
1823 {
1824 int regno;
1825 int noepilogue = FALSE;
1826 int total_size;
1827
1828 gcc_assert (current_frame_info.initialized);
1829 total_size = current_frame_info.total_size;
1830
1831 if (total_size == 0)
1832 {
1833 rtx insn = get_last_insn ();
1834
1835 /* If the last insn was a BARRIER, we don't have to write any code
1836 because a jump (aka return) was put there. */
1837 if (insn && NOTE_P (insn))
1838 insn = prev_nonnote_insn (insn);
1839 if (insn && BARRIER_P (insn))
1840 noepilogue = TRUE;
1841 }
1842
1843 if (!noepilogue)
1844 {
1845 unsigned int var_size = current_frame_info.var_size;
1846 unsigned int args_size = current_frame_info.args_size;
1847 unsigned int gmask = current_frame_info.gmask;
1848 int can_trust_sp_p = !cfun->calls_alloca;
1849
1850 if (flag_exceptions)
1851 emit_insn (gen_blockage ());
1852
1853 /* The first thing to do is point the sp at the bottom of the register
1854 save area. */
1855 if (can_trust_sp_p)
1856 {
1857 unsigned int reg_offset = var_size + args_size;
1858
1859 if (reg_offset == 0)
1860 ; /* Nothing to do. */
1861 else if (reg_offset < 32768)
1862 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1863 GEN_INT (reg_offset)));
1864 else
1865 {
1866 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1867
1868 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1869 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1870 tmp));
1871 }
1872 }
1873 else if (frame_pointer_needed)
1874 {
1875 unsigned int reg_offset = var_size + args_size;
1876
1877 if (reg_offset == 0)
1878 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1879 else if (reg_offset < 32768)
1880 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1881 GEN_INT (reg_offset)));
1882 else
1883 {
1884 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1885
1886 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1887 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1888 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1889 tmp));
1890 }
1891 }
1892 else
1893 gcc_unreachable ();
1894
1895 if (current_frame_info.save_lr)
1896 pop (RETURN_ADDR_REGNUM);
1897
1898 /* Restore any saved registers, in reverse order of course. */
1899 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1900 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1901 {
1902 if ((gmask & (1L << regno)) != 0)
1903 pop (regno);
1904 }
1905
1906 if (current_frame_info.save_fp)
1907 pop (FRAME_POINTER_REGNUM);
1908
1909 /* Remove varargs area if present. */
1910 if (current_frame_info.pretend_size != 0)
1911 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1912 GEN_INT (current_frame_info.pretend_size)));
1913
1914 emit_insn (gen_blockage ());
1915 }
1916 }
1917
1918 /* Do any necessary cleanup after a function to restore stack, frame,
1919 and regs. */
1920
1921 static void
1922 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1923 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1924 {
1925 /* Reset state info for each function. */
1926 current_frame_info = zero_frame_info;
1927 m32r_compute_function_type (NULL_TREE);
1928 }
1929 \f
1930 /* Return nonzero if this function is known to have a null or 1 instruction
1931 epilogue. */
1932
1933 int
1934 direct_return (void)
1935 {
1936 if (!reload_completed)
1937 return FALSE;
1938
1939 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1940 return FALSE;
1941
1942 if (! current_frame_info.initialized)
1943 m32r_compute_frame_size (get_frame_size ());
1944
1945 return current_frame_info.total_size == 0;
1946 }
1947
1948 \f
1949 /* PIC. */
1950
1951 int
1952 m32r_legitimate_pic_operand_p (rtx x)
1953 {
1954 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1955 return 0;
1956
1957 if (GET_CODE (x) == CONST
1958 && GET_CODE (XEXP (x, 0)) == PLUS
1959 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1960 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1961 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1962 return 0;
1963
1964 return 1;
1965 }
1966
1967 rtx
1968 m32r_legitimize_pic_address (rtx orig, rtx reg)
1969 {
1970 #ifdef DEBUG_PIC
1971 printf("m32r_legitimize_pic_address()\n");
1972 #endif
1973
1974 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1975 {
1976 rtx pic_ref, address;
1977 rtx insn;
1978 int subregs = 0;
1979
1980 if (reg == 0)
1981 {
1982 gcc_assert (!reload_in_progress && !reload_completed);
1983 reg = gen_reg_rtx (Pmode);
1984
1985 subregs = 1;
1986 }
1987
1988 if (subregs)
1989 address = gen_reg_rtx (Pmode);
1990 else
1991 address = reg;
1992
1993 crtl->uses_pic_offset_table = 1;
1994
1995 if (GET_CODE (orig) == LABEL_REF
1996 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1997 {
1998 emit_insn (gen_gotoff_load_addr (reg, orig));
1999 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
2000 return reg;
2001 }
2002
2003 emit_insn (gen_pic_load_addr (address, orig));
2004
2005 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
2006 pic_ref = gen_const_mem (Pmode, address);
2007 insn = emit_move_insn (reg, pic_ref);
2008 #if 0
2009 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2010 by loop. */
2011 set_unique_reg_note (insn, REG_EQUAL, orig);
2012 #endif
2013 return reg;
2014 }
2015 else if (GET_CODE (orig) == CONST)
2016 {
2017 rtx base, offset;
2018
2019 if (GET_CODE (XEXP (orig, 0)) == PLUS
2020 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2021 return orig;
2022
2023 if (reg == 0)
2024 {
2025 gcc_assert (!reload_in_progress && !reload_completed);
2026 reg = gen_reg_rtx (Pmode);
2027 }
2028
2029 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2030 {
2031 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2032 if (base == reg)
2033 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2034 else
2035 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2036 }
2037 else
2038 return orig;
2039
2040 if (CONST_INT_P (offset))
2041 {
2042 if (INT16_P (INTVAL (offset)))
2043 return plus_constant (base, INTVAL (offset));
2044 else
2045 {
2046 gcc_assert (! reload_in_progress && ! reload_completed);
2047 offset = force_reg (Pmode, offset);
2048 }
2049 }
2050
2051 return gen_rtx_PLUS (Pmode, base, offset);
2052 }
2053
2054 return orig;
2055 }
2056
2057 static rtx
2058 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2059 enum machine_mode mode ATTRIBUTE_UNUSED)
2060 {
2061 if (flag_pic)
2062 return m32r_legitimize_pic_address (x, NULL_RTX);
2063 else
2064 return x;
2065 }
2066
2067 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2068
2069 static bool
2070 m32r_mode_dependent_address_p (const_rtx addr)
2071 {
2072 if (GET_CODE (addr) == LO_SUM)
2073 return true;
2074
2075 return false;
2076 }
2077 \f
2078 /* Nested function support. */
2079
2080 /* Emit RTL insns to initialize the variable parts of a trampoline.
2081 FNADDR is an RTX for the address of the function's pure code.
2082 CXT is an RTX for the static chain value for the function. */
2083
2084 void
2085 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2086 rtx fnaddr ATTRIBUTE_UNUSED,
2087 rtx cxt ATTRIBUTE_UNUSED)
2088 {
2089 }
2090 \f
2091 static void
2092 m32r_file_start (void)
2093 {
2094 default_file_start ();
2095
2096 if (flag_verbose_asm)
2097 fprintf (asm_out_file,
2098 "%s M32R/D special options: -G %d\n",
2099 ASM_COMMENT_START, g_switch_value);
2100
2101 if (TARGET_LITTLE_ENDIAN)
2102 fprintf (asm_out_file, "\t.little\n");
2103 }
2104 \f
2105 /* Print operand X (an rtx) in assembler syntax to file FILE.
2106 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2107 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2108
2109 static void
2110 m32r_print_operand (FILE * file, rtx x, int code)
2111 {
2112 rtx addr;
2113
2114 switch (code)
2115 {
2116 /* The 's' and 'p' codes are used by output_block_move() to
2117 indicate post-increment 's'tores and 'p're-increment loads. */
2118 case 's':
2119 if (REG_P (x))
2120 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2121 else
2122 output_operand_lossage ("invalid operand to %%s code");
2123 return;
2124
2125 case 'p':
2126 if (REG_P (x))
2127 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2128 else
2129 output_operand_lossage ("invalid operand to %%p code");
2130 return;
2131
2132 case 'R' :
2133 /* Write second word of DImode or DFmode reference,
2134 register or memory. */
2135 if (REG_P (x))
2136 fputs (reg_names[REGNO (x)+1], file);
2137 else if (MEM_P (x))
2138 {
2139 fprintf (file, "@(");
2140 /* Handle possible auto-increment. Since it is pre-increment and
2141 we have already done it, we can just use an offset of four. */
2142 /* ??? This is taken from rs6000.c I think. I don't think it is
2143 currently necessary, but keep it around. */
2144 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2145 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2146 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2147 else
2148 output_address (plus_constant (XEXP (x, 0), 4));
2149 fputc (')', file);
2150 }
2151 else
2152 output_operand_lossage ("invalid operand to %%R code");
2153 return;
2154
2155 case 'H' : /* High word. */
2156 case 'L' : /* Low word. */
2157 if (REG_P (x))
2158 {
2159 /* L = least significant word, H = most significant word. */
2160 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2161 fputs (reg_names[REGNO (x)], file);
2162 else
2163 fputs (reg_names[REGNO (x)+1], file);
2164 }
2165 else if (CONST_INT_P (x)
2166 || GET_CODE (x) == CONST_DOUBLE)
2167 {
2168 rtx first, second;
2169
2170 split_double (x, &first, &second);
2171 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2172 code == 'L' ? INTVAL (first) : INTVAL (second));
2173 }
2174 else
2175 output_operand_lossage ("invalid operand to %%H/%%L code");
2176 return;
2177
2178 case 'A' :
2179 {
2180 char str[30];
2181
2182 if (GET_CODE (x) != CONST_DOUBLE
2183 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2184 fatal_insn ("bad insn for 'A'", x);
2185
2186 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2187 fprintf (file, "%s", str);
2188 return;
2189 }
2190
2191 case 'B' : /* Bottom half. */
2192 case 'T' : /* Top half. */
2193 /* Output the argument to a `seth' insn (sets the Top half-word).
2194 For constants output arguments to a seth/or3 pair to set Top and
2195 Bottom halves. For symbols output arguments to a seth/add3 pair to
2196 set Top and Bottom halves. The difference exists because for
2197 constants seth/or3 is more readable but for symbols we need to use
2198 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2199 switch (GET_CODE (x))
2200 {
2201 case CONST_INT :
2202 case CONST_DOUBLE :
2203 {
2204 rtx first, second;
2205
2206 split_double (x, &first, &second);
2207 x = WORDS_BIG_ENDIAN ? second : first;
2208 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2209 (code == 'B'
2210 ? INTVAL (x) & 0xffff
2211 : (INTVAL (x) >> 16) & 0xffff));
2212 }
2213 return;
2214 case CONST :
2215 case SYMBOL_REF :
2216 if (code == 'B'
2217 && small_data_operand (x, VOIDmode))
2218 {
2219 fputs ("sda(", file);
2220 output_addr_const (file, x);
2221 fputc (')', file);
2222 return;
2223 }
2224 /* fall through */
2225 case LABEL_REF :
2226 fputs (code == 'T' ? "shigh(" : "low(", file);
2227 output_addr_const (file, x);
2228 fputc (')', file);
2229 return;
2230 default :
2231 output_operand_lossage ("invalid operand to %%T/%%B code");
2232 return;
2233 }
2234 break;
2235
2236 case 'U' :
2237 /* ??? wip */
2238 /* Output a load/store with update indicator if appropriate. */
2239 if (MEM_P (x))
2240 {
2241 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2242 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2243 fputs (".a", file);
2244 }
2245 else
2246 output_operand_lossage ("invalid operand to %%U code");
2247 return;
2248
2249 case 'N' :
2250 /* Print a constant value negated. */
2251 if (CONST_INT_P (x))
2252 output_addr_const (file, GEN_INT (- INTVAL (x)));
2253 else
2254 output_operand_lossage ("invalid operand to %%N code");
2255 return;
2256
2257 case 'X' :
2258 /* Print a const_int in hex. Used in comments. */
2259 if (CONST_INT_P (x))
2260 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2261 return;
2262
2263 case '#' :
2264 fputs (IMMEDIATE_PREFIX, file);
2265 return;
2266
2267 case 0 :
2268 /* Do nothing special. */
2269 break;
2270
2271 default :
2272 /* Unknown flag. */
2273 output_operand_lossage ("invalid operand output code");
2274 }
2275
2276 switch (GET_CODE (x))
2277 {
2278 case REG :
2279 fputs (reg_names[REGNO (x)], file);
2280 break;
2281
2282 case MEM :
2283 addr = XEXP (x, 0);
2284 if (GET_CODE (addr) == PRE_INC)
2285 {
2286 if (!REG_P (XEXP (addr, 0)))
2287 fatal_insn ("pre-increment address is not a register", x);
2288
2289 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2290 }
2291 else if (GET_CODE (addr) == PRE_DEC)
2292 {
2293 if (!REG_P (XEXP (addr, 0)))
2294 fatal_insn ("pre-decrement address is not a register", x);
2295
2296 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2297 }
2298 else if (GET_CODE (addr) == POST_INC)
2299 {
2300 if (!REG_P (XEXP (addr, 0)))
2301 fatal_insn ("post-increment address is not a register", x);
2302
2303 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2304 }
2305 else
2306 {
2307 fputs ("@(", file);
2308 output_address (XEXP (x, 0));
2309 fputc (')', file);
2310 }
2311 break;
2312
2313 case CONST_DOUBLE :
2314 /* We handle SFmode constants here as output_addr_const doesn't. */
2315 if (GET_MODE (x) == SFmode)
2316 {
2317 REAL_VALUE_TYPE d;
2318 long l;
2319
2320 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2321 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2322 fprintf (file, "0x%08lx", l);
2323 break;
2324 }
2325
2326 /* Fall through. Let output_addr_const deal with it. */
2327
2328 default :
2329 output_addr_const (file, x);
2330 break;
2331 }
2332 }
2333
2334 /* Print a memory address as an operand to reference that memory location. */
2335
2336 static void
2337 m32r_print_operand_address (FILE * file, rtx addr)
2338 {
2339 rtx base;
2340 rtx index = 0;
2341 int offset = 0;
2342
2343 switch (GET_CODE (addr))
2344 {
2345 case REG :
2346 fputs (reg_names[REGNO (addr)], file);
2347 break;
2348
2349 case PLUS :
2350 if (CONST_INT_P (XEXP (addr, 0)))
2351 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2352 else if (CONST_INT_P (XEXP (addr, 1)))
2353 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2354 else
2355 base = XEXP (addr, 0), index = XEXP (addr, 1);
2356 if (REG_P (base))
2357 {
2358 /* Print the offset first (if present) to conform to the manual. */
2359 if (index == 0)
2360 {
2361 if (offset != 0)
2362 fprintf (file, "%d,", offset);
2363 fputs (reg_names[REGNO (base)], file);
2364 }
2365 /* The chip doesn't support this, but left in for generality. */
2366 else if (REG_P (index))
2367 fprintf (file, "%s,%s",
2368 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2369 /* Not sure this can happen, but leave in for now. */
2370 else if (GET_CODE (index) == SYMBOL_REF)
2371 {
2372 output_addr_const (file, index);
2373 fputc (',', file);
2374 fputs (reg_names[REGNO (base)], file);
2375 }
2376 else
2377 fatal_insn ("bad address", addr);
2378 }
2379 else if (GET_CODE (base) == LO_SUM)
2380 {
2381 gcc_assert (!index && REG_P (XEXP (base, 0)));
2382 if (small_data_operand (XEXP (base, 1), VOIDmode))
2383 fputs ("sda(", file);
2384 else
2385 fputs ("low(", file);
2386 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2387 fputs ("),", file);
2388 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2389 }
2390 else
2391 fatal_insn ("bad address", addr);
2392 break;
2393
2394 case LO_SUM :
2395 if (!REG_P (XEXP (addr, 0)))
2396 fatal_insn ("lo_sum not of register", addr);
2397 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2398 fputs ("sda(", file);
2399 else
2400 fputs ("low(", file);
2401 output_addr_const (file, XEXP (addr, 1));
2402 fputs ("),", file);
2403 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2404 break;
2405
2406 case PRE_INC : /* Assume SImode. */
2407 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2408 break;
2409
2410 case PRE_DEC : /* Assume SImode. */
2411 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2412 break;
2413
2414 case POST_INC : /* Assume SImode. */
2415 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2416 break;
2417
2418 default :
2419 output_addr_const (file, addr);
2420 break;
2421 }
2422 }
2423
2424 static bool
2425 m32r_print_operand_punct_valid_p (unsigned char code)
2426 {
2427 return m32r_punct_chars[code];
2428 }
2429
2430 /* Return true if the operands are the constants 0 and 1. */
2431
2432 int
2433 zero_and_one (rtx operand1, rtx operand2)
2434 {
2435 return
2436 CONST_INT_P (operand1)
2437 && CONST_INT_P (operand2)
2438 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2439 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2440 }
2441
2442 /* Generate the correct assembler code to handle the conditional loading of a
2443 value into a register. It is known that the operands satisfy the
2444 conditional_move_operand() function above. The destination is operand[0].
2445 The condition is operand [1]. The 'true' value is operand [2] and the
2446 'false' value is operand [3]. */
2447
2448 char *
2449 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2450 {
2451 static char buffer [100];
2452 const char * dest = reg_names [REGNO (operands [0])];
2453
2454 buffer [0] = 0;
2455
2456 /* Destination must be a register. */
2457 gcc_assert (REG_P (operands [0]));
2458 gcc_assert (conditional_move_operand (operands [2], SImode));
2459 gcc_assert (conditional_move_operand (operands [3], SImode));
2460
2461 /* Check to see if the test is reversed. */
2462 if (GET_CODE (operands [1]) == NE)
2463 {
2464 rtx tmp = operands [2];
2465 operands [2] = operands [3];
2466 operands [3] = tmp;
2467 }
2468
2469 sprintf (buffer, "mvfc %s, cbr", dest);
2470
2471 /* If the true value was '0' then we need to invert the results of the move. */
2472 if (INTVAL (operands [2]) == 0)
2473 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2474 dest, dest);
2475
2476 return buffer;
2477 }
2478
2479 /* Returns true if the registers contained in the two
2480 rtl expressions are different. */
2481
2482 int
2483 m32r_not_same_reg (rtx a, rtx b)
2484 {
2485 int reg_a = -1;
2486 int reg_b = -2;
2487
2488 while (GET_CODE (a) == SUBREG)
2489 a = SUBREG_REG (a);
2490
2491 if (REG_P (a))
2492 reg_a = REGNO (a);
2493
2494 while (GET_CODE (b) == SUBREG)
2495 b = SUBREG_REG (b);
2496
2497 if (REG_P (b))
2498 reg_b = REGNO (b);
2499
2500 return reg_a != reg_b;
2501 }
2502
2503 \f
2504 rtx
2505 m32r_function_symbol (const char *name)
2506 {
2507 int extra_flags = 0;
2508 enum m32r_model model;
2509 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2510
2511 if (TARGET_MODEL_SMALL)
2512 model = M32R_MODEL_SMALL;
2513 else if (TARGET_MODEL_MEDIUM)
2514 model = M32R_MODEL_MEDIUM;
2515 else if (TARGET_MODEL_LARGE)
2516 model = M32R_MODEL_LARGE;
2517 else
2518 gcc_unreachable (); /* Shouldn't happen. */
2519 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2520
2521 if (extra_flags)
2522 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2523
2524 return sym;
2525 }
2526
2527 /* Use a library function to move some bytes. */
2528
2529 static void
2530 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2531 {
2532 /* We want to pass the size as Pmode, which will normally be SImode
2533 but will be DImode if we are using 64-bit longs and pointers. */
2534 if (GET_MODE (bytes_rtx) != VOIDmode
2535 && GET_MODE (bytes_rtx) != Pmode)
2536 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2537
2538 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2539 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2540 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2541 TYPE_UNSIGNED (sizetype)),
2542 TYPE_MODE (sizetype));
2543 }
2544
2545 /* Expand string/block move operations.
2546
2547 operands[0] is the pointer to the destination.
2548 operands[1] is the pointer to the source.
2549 operands[2] is the number of bytes to move.
2550 operands[3] is the alignment.
2551
2552 Returns 1 upon success, 0 otherwise. */
2553
2554 int
2555 m32r_expand_block_move (rtx operands[])
2556 {
2557 rtx orig_dst = operands[0];
2558 rtx orig_src = operands[1];
2559 rtx bytes_rtx = operands[2];
2560 rtx align_rtx = operands[3];
2561 int constp = CONST_INT_P (bytes_rtx);
2562 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2563 int align = INTVAL (align_rtx);
2564 int leftover;
2565 rtx src_reg;
2566 rtx dst_reg;
2567
2568 if (constp && bytes <= 0)
2569 return 1;
2570
2571 /* Move the address into scratch registers. */
2572 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2573 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2574
2575 if (align > UNITS_PER_WORD)
2576 align = UNITS_PER_WORD;
2577
2578 /* If we prefer size over speed, always use a function call.
2579 If we do not know the size, use a function call.
2580 If the blocks are not word aligned, use a function call. */
2581 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2582 {
2583 block_move_call (dst_reg, src_reg, bytes_rtx);
2584 return 0;
2585 }
2586
2587 leftover = bytes % MAX_MOVE_BYTES;
2588 bytes -= leftover;
2589
2590 /* If necessary, generate a loop to handle the bulk of the copy. */
2591 if (bytes)
2592 {
2593 rtx label = NULL_RTX;
2594 rtx final_src = NULL_RTX;
2595 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2596 rtx rounded_total = GEN_INT (bytes);
2597 rtx new_dst_reg = gen_reg_rtx (SImode);
2598 rtx new_src_reg = gen_reg_rtx (SImode);
2599
2600 /* If we are going to have to perform this loop more than
2601 once, then generate a label and compute the address the
2602 source register will contain upon completion of the final
2603 iteration. */
2604 if (bytes > MAX_MOVE_BYTES)
2605 {
2606 final_src = gen_reg_rtx (Pmode);
2607
2608 if (INT16_P(bytes))
2609 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2610 else
2611 {
2612 emit_insn (gen_movsi (final_src, rounded_total));
2613 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2614 }
2615
2616 label = gen_label_rtx ();
2617 emit_label (label);
2618 }
2619
2620 /* It is known that output_block_move() will update src_reg to point
2621 to the word after the end of the source block, and dst_reg to point
2622 to the last word of the destination block, provided that the block
2623 is MAX_MOVE_BYTES long. */
2624 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2625 new_dst_reg, new_src_reg));
2626 emit_move_insn (dst_reg, new_dst_reg);
2627 emit_move_insn (src_reg, new_src_reg);
2628 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2629
2630 if (bytes > MAX_MOVE_BYTES)
2631 {
2632 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2633 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2634 }
2635 }
2636
2637 if (leftover)
2638 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2639 gen_reg_rtx (SImode),
2640 gen_reg_rtx (SImode)));
2641 return 1;
2642 }
2643
2644 \f
2645 /* Emit load/stores for a small constant word aligned block_move.
2646
2647 operands[0] is the memory address of the destination.
2648 operands[1] is the memory address of the source.
2649 operands[2] is the number of bytes to move.
2650 operands[3] is a temp register.
2651 operands[4] is a temp register. */
2652
2653 void
2654 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2655 {
2656 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2657 int first_time;
2658 int got_extra = 0;
2659
2660 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2661
2662 /* We do not have a post-increment store available, so the first set of
2663 stores are done without any increment, then the remaining ones can use
2664 the pre-increment addressing mode.
2665
2666 Note: expand_block_move() also relies upon this behavior when building
2667 loops to copy large blocks. */
2668 first_time = 1;
2669
2670 while (bytes > 0)
2671 {
2672 if (bytes >= 8)
2673 {
2674 if (first_time)
2675 {
2676 output_asm_insn ("ld\t%5, %p1", operands);
2677 output_asm_insn ("ld\t%6, %p1", operands);
2678 output_asm_insn ("st\t%5, @%0", operands);
2679 output_asm_insn ("st\t%6, %s0", operands);
2680 }
2681 else
2682 {
2683 output_asm_insn ("ld\t%5, %p1", operands);
2684 output_asm_insn ("ld\t%6, %p1", operands);
2685 output_asm_insn ("st\t%5, %s0", operands);
2686 output_asm_insn ("st\t%6, %s0", operands);
2687 }
2688
2689 bytes -= 8;
2690 }
2691 else if (bytes >= 4)
2692 {
2693 if (bytes > 4)
2694 got_extra = 1;
2695
2696 output_asm_insn ("ld\t%5, %p1", operands);
2697
2698 if (got_extra)
2699 output_asm_insn ("ld\t%6, %p1", operands);
2700
2701 if (first_time)
2702 output_asm_insn ("st\t%5, @%0", operands);
2703 else
2704 output_asm_insn ("st\t%5, %s0", operands);
2705
2706 bytes -= 4;
2707 }
2708 else
2709 {
2710 /* Get the entire next word, even though we do not want all of it.
2711 The saves us from doing several smaller loads, and we assume that
2712 we cannot cause a page fault when at least part of the word is in
2713 valid memory [since we don't get called if things aren't properly
2714 aligned]. */
2715 int dst_offset = first_time ? 0 : 4;
2716 /* The amount of increment we have to make to the
2717 destination pointer. */
2718 int dst_inc_amount = dst_offset + bytes - 4;
2719 /* The same for the source pointer. */
2720 int src_inc_amount = bytes;
2721 int last_shift;
2722 rtx my_operands[3];
2723
2724 /* If got_extra is true then we have already loaded
2725 the next word as part of loading and storing the previous word. */
2726 if (! got_extra)
2727 output_asm_insn ("ld\t%6, @%1", operands);
2728
2729 if (bytes >= 2)
2730 {
2731 bytes -= 2;
2732
2733 output_asm_insn ("sra3\t%5, %6, #16", operands);
2734 my_operands[0] = operands[5];
2735 my_operands[1] = GEN_INT (dst_offset);
2736 my_operands[2] = operands[0];
2737 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2738
2739 /* If there is a byte left to store then increment the
2740 destination address and shift the contents of the source
2741 register down by 8 bits. We could not do the address
2742 increment in the store half word instruction, because it does
2743 not have an auto increment mode. */
2744 if (bytes > 0) /* assert (bytes == 1) */
2745 {
2746 dst_offset += 2;
2747 last_shift = 8;
2748 }
2749 }
2750 else
2751 last_shift = 24;
2752
2753 if (bytes > 0)
2754 {
2755 my_operands[0] = operands[6];
2756 my_operands[1] = GEN_INT (last_shift);
2757 output_asm_insn ("srai\t%0, #%1", my_operands);
2758 my_operands[0] = operands[6];
2759 my_operands[1] = GEN_INT (dst_offset);
2760 my_operands[2] = operands[0];
2761 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2762 }
2763
2764 /* Update the destination pointer if needed. We have to do
2765 this so that the patterns matches what we output in this
2766 function. */
2767 if (dst_inc_amount
2768 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2769 {
2770 my_operands[0] = operands[0];
2771 my_operands[1] = GEN_INT (dst_inc_amount);
2772 output_asm_insn ("addi\t%0, #%1", my_operands);
2773 }
2774
2775 /* Update the source pointer if needed. We have to do this
2776 so that the patterns matches what we output in this
2777 function. */
2778 if (src_inc_amount
2779 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2780 {
2781 my_operands[0] = operands[1];
2782 my_operands[1] = GEN_INT (src_inc_amount);
2783 output_asm_insn ("addi\t%0, #%1", my_operands);
2784 }
2785
2786 bytes = 0;
2787 }
2788
2789 first_time = 0;
2790 }
2791 }
2792
2793 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2794
2795 int
2796 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2797 unsigned int new_reg)
2798 {
2799 /* Interrupt routines can't clobber any register that isn't already used. */
2800 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2801 && !df_regs_ever_live_p (new_reg))
2802 return 0;
2803
2804 return 1;
2805 }
2806
2807 rtx
2808 m32r_return_addr (int count)
2809 {
2810 if (count != 0)
2811 return const0_rtx;
2812
2813 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2814 }
2815
2816 static void
2817 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2818 {
2819 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2820 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2821 0x017e8e17 : 0x178e7e01, SImode));
2822 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2823 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2824 0x0c00ae86 : 0x86ae000c, SImode));
2825 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2826 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2827 0xe627871e : 0x1e8727e6, SImode));
2828 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2829 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2830 0xc616c626 : 0x26c61fc6, SImode));
2831 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2832 chain_value);
2833 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2834 XEXP (DECL_RTL (fndecl), 0));
2835
2836 if (m32r_cache_flush_trap >= 0)
2837 emit_insn (gen_flush_icache
2838 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2839 gen_int_mode (m32r_cache_flush_trap, SImode)));
2840 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2841 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2842 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2843 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2844 GEN_INT (3), SImode);
2845 }
This page took 0.139057 seconds and 4 git commands to generate.