]> gcc.gnu.org Git - gcc.git/blob - gcc/config/m32r/m32r.c
Update copyright years.
[gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define IN_TARGET_CODE 1
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "insn-config.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "alias.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "tm-constrs.h"
47 #include "builtins.h"
48
49 /* This file should be included last. */
50 #include "target-def.h"
51
52 /* Array of valid operand punctuation characters. */
53 static char m32r_punct_chars[256];
54
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
57 #define SYMBOL_REF_MODEL(X) \
58 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
59
60 /* For string literals, etc. */
61 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
62
63 /* Forward declaration. */
64 static void m32r_option_override (void);
65 static void init_reg_tables (void);
66 static void block_move_call (rtx, rtx, rtx);
67 static int m32r_is_insn (rtx);
68 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
69 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
70 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
71 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
72 static void m32r_print_operand (FILE *, rtx, int);
73 static void m32r_print_operand_address (FILE *, machine_mode, rtx);
74 static bool m32r_print_operand_punct_valid_p (unsigned char code);
75 static void m32r_output_function_prologue (FILE *);
76 static void m32r_output_function_epilogue (FILE *);
77
78 static void m32r_file_start (void);
79
80 static int m32r_adjust_priority (rtx_insn *, int);
81 static int m32r_issue_rate (void);
82
83 static void m32r_encode_section_info (tree, rtx, int);
84 static bool m32r_in_small_data_p (const_tree);
85 static bool m32r_return_in_memory (const_tree, const_tree);
86 static rtx m32r_function_value (const_tree, const_tree, bool);
87 static rtx m32r_libcall_value (machine_mode, const_rtx);
88 static bool m32r_function_value_regno_p (const unsigned int);
89 static void m32r_setup_incoming_varargs (cumulative_args_t,
90 const function_arg_info &,
91 int *, int);
92 static void init_idents (void);
93 static bool m32r_rtx_costs (rtx, machine_mode, int, int, int *, bool speed);
94 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
95 static bool m32r_pass_by_reference (cumulative_args_t,
96 const function_arg_info &arg);
97 static int m32r_arg_partial_bytes (cumulative_args_t,
98 const function_arg_info &);
99 static rtx m32r_function_arg (cumulative_args_t, const function_arg_info &);
100 static void m32r_function_arg_advance (cumulative_args_t,
101 const function_arg_info &);
102 static bool m32r_can_eliminate (const int, const int);
103 static void m32r_conditional_register_usage (void);
104 static void m32r_trampoline_init (rtx, tree, rtx);
105 static bool m32r_legitimate_constant_p (machine_mode, rtx);
106 static bool m32r_attribute_identifier (const_tree);
107 static bool m32r_hard_regno_mode_ok (unsigned int, machine_mode);
108 static bool m32r_modes_tieable_p (machine_mode, machine_mode);
109 static HOST_WIDE_INT m32r_starting_frame_offset (void);
110 \f
111 /* M32R specific attributes. */
112
113 static const struct attribute_spec m32r_attribute_table[] =
114 {
115 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
116 affects_type_identity, handler, exclude } */
117 { "interrupt", 0, 0, true, false, false, false, NULL, NULL },
118 { "model", 1, 1, true, false, false, false, m32r_handle_model_attribute,
119 NULL },
120 { NULL, 0, 0, false, false, false, false, NULL, NULL }
121 };
122 \f
123 /* Initialize the GCC target structure. */
124 #undef TARGET_ATTRIBUTE_TABLE
125 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
126 #undef TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P
127 #define TARGET_ATTRIBUTE_TAKES_IDENTIFIER_P m32r_attribute_identifier
128
129 #undef TARGET_LRA_P
130 #define TARGET_LRA_P hook_bool_void_false
131
132 #undef TARGET_LEGITIMATE_ADDRESS_P
133 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
134 #undef TARGET_LEGITIMIZE_ADDRESS
135 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
136 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
137 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
138
139 #undef TARGET_ASM_ALIGNED_HI_OP
140 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
141 #undef TARGET_ASM_ALIGNED_SI_OP
142 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
143
144 #undef TARGET_PRINT_OPERAND
145 #define TARGET_PRINT_OPERAND m32r_print_operand
146 #undef TARGET_PRINT_OPERAND_ADDRESS
147 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
148 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
149 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
150
151 #undef TARGET_ASM_FUNCTION_PROLOGUE
152 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
153 #undef TARGET_ASM_FUNCTION_EPILOGUE
154 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
155
156 #undef TARGET_ASM_FILE_START
157 #define TARGET_ASM_FILE_START m32r_file_start
158
159 #undef TARGET_SCHED_ADJUST_PRIORITY
160 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
161 #undef TARGET_SCHED_ISSUE_RATE
162 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
163
164 #undef TARGET_OPTION_OVERRIDE
165 #define TARGET_OPTION_OVERRIDE m32r_option_override
166
167 #undef TARGET_ENCODE_SECTION_INFO
168 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
169 #undef TARGET_IN_SMALL_DATA_P
170 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
171
172
173 #undef TARGET_MEMORY_MOVE_COST
174 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
175 #undef TARGET_RTX_COSTS
176 #define TARGET_RTX_COSTS m32r_rtx_costs
177 #undef TARGET_ADDRESS_COST
178 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
179
180 #undef TARGET_PROMOTE_PROTOTYPES
181 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
182 #undef TARGET_RETURN_IN_MEMORY
183 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
184
185 #undef TARGET_FUNCTION_VALUE
186 #define TARGET_FUNCTION_VALUE m32r_function_value
187 #undef TARGET_LIBCALL_VALUE
188 #define TARGET_LIBCALL_VALUE m32r_libcall_value
189 #undef TARGET_FUNCTION_VALUE_REGNO_P
190 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
191
192 #undef TARGET_SETUP_INCOMING_VARARGS
193 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
194 #undef TARGET_MUST_PASS_IN_STACK
195 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
196 #undef TARGET_PASS_BY_REFERENCE
197 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
198 #undef TARGET_ARG_PARTIAL_BYTES
199 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
200 #undef TARGET_FUNCTION_ARG
201 #define TARGET_FUNCTION_ARG m32r_function_arg
202 #undef TARGET_FUNCTION_ARG_ADVANCE
203 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
204
205 #undef TARGET_CAN_ELIMINATE
206 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
207
208 #undef TARGET_CONDITIONAL_REGISTER_USAGE
209 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
210
211 #undef TARGET_TRAMPOLINE_INIT
212 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
213
214 #undef TARGET_LEGITIMATE_CONSTANT_P
215 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
216
217 #undef TARGET_HARD_REGNO_MODE_OK
218 #define TARGET_HARD_REGNO_MODE_OK m32r_hard_regno_mode_ok
219
220 #undef TARGET_MODES_TIEABLE_P
221 #define TARGET_MODES_TIEABLE_P m32r_modes_tieable_p
222
223 #undef TARGET_CONSTANT_ALIGNMENT
224 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
225
226 #undef TARGET_STARTING_FRAME_OFFSET
227 #define TARGET_STARTING_FRAME_OFFSET m32r_starting_frame_offset
228
229 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
230 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
231
232 struct gcc_target targetm = TARGET_INITIALIZER;
233 \f
234 /* Called by m32r_option_override to initialize various things. */
235
236 void
237 m32r_init (void)
238 {
239 init_reg_tables ();
240
241 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
242 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
243 m32r_punct_chars['#'] = 1;
244 m32r_punct_chars['@'] = 1; /* ??? no longer used */
245
246 /* Provide default value if not specified. */
247 if (!global_options_set.x_g_switch_value)
248 g_switch_value = SDATA_DEFAULT_SIZE;
249 }
250
251 static void
252 m32r_option_override (void)
253 {
254 /* These need to be done at start up.
255 It's convenient to do them here. */
256 m32r_init ();
257 SUBTARGET_OVERRIDE_OPTIONS;
258 }
259
260 /* Vectors to keep interesting information about registers where it can easily
261 be got. We use to use the actual mode value as the bit number, but there
262 is (or may be) more than 32 modes now. Instead we use two tables: one
263 indexed by hard register number, and one indexed by mode. */
264
265 /* The purpose of m32r_mode_class is to shrink the range of modes so that
266 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
267 mapped into one m32r_mode_class mode. */
268
269 enum m32r_mode_class
270 {
271 C_MODE,
272 S_MODE, D_MODE, T_MODE, O_MODE,
273 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
274 };
275
276 /* Modes for condition codes. */
277 #define C_MODES (1 << (int) C_MODE)
278
279 /* Modes for single-word and smaller quantities. */
280 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
281
282 /* Modes for double-word and smaller quantities. */
283 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
284
285 /* Modes for quad-word and smaller quantities. */
286 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
287
288 /* Modes for accumulators. */
289 #define A_MODES (1 << (int) A_MODE)
290
291 /* Value is 1 if register/mode pair is acceptable on arc. */
292
293 static const unsigned int m32r_hard_regno_modes[FIRST_PSEUDO_REGISTER] =
294 {
295 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
296 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
297 S_MODES, C_MODES, A_MODES, A_MODES
298 };
299
300 static unsigned int m32r_mode_class [NUM_MACHINE_MODES];
301
302 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
303
304 static void
305 init_reg_tables (void)
306 {
307 int i;
308
309 for (i = 0; i < NUM_MACHINE_MODES; i++)
310 {
311 machine_mode m = (machine_mode) i;
312
313 switch (GET_MODE_CLASS (m))
314 {
315 case MODE_INT:
316 case MODE_PARTIAL_INT:
317 case MODE_COMPLEX_INT:
318 if (GET_MODE_SIZE (m) <= 4)
319 m32r_mode_class[i] = 1 << (int) S_MODE;
320 else if (GET_MODE_SIZE (m) == 8)
321 m32r_mode_class[i] = 1 << (int) D_MODE;
322 else if (GET_MODE_SIZE (m) == 16)
323 m32r_mode_class[i] = 1 << (int) T_MODE;
324 else if (GET_MODE_SIZE (m) == 32)
325 m32r_mode_class[i] = 1 << (int) O_MODE;
326 else
327 m32r_mode_class[i] = 0;
328 break;
329 case MODE_FLOAT:
330 case MODE_COMPLEX_FLOAT:
331 if (GET_MODE_SIZE (m) <= 4)
332 m32r_mode_class[i] = 1 << (int) SF_MODE;
333 else if (GET_MODE_SIZE (m) == 8)
334 m32r_mode_class[i] = 1 << (int) DF_MODE;
335 else if (GET_MODE_SIZE (m) == 16)
336 m32r_mode_class[i] = 1 << (int) TF_MODE;
337 else if (GET_MODE_SIZE (m) == 32)
338 m32r_mode_class[i] = 1 << (int) OF_MODE;
339 else
340 m32r_mode_class[i] = 0;
341 break;
342 case MODE_CC:
343 m32r_mode_class[i] = 1 << (int) C_MODE;
344 break;
345 default:
346 m32r_mode_class[i] = 0;
347 break;
348 }
349 }
350
351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
352 {
353 if (GPR_P (i))
354 m32r_regno_reg_class[i] = GENERAL_REGS;
355 else if (i == ARG_POINTER_REGNUM)
356 m32r_regno_reg_class[i] = GENERAL_REGS;
357 else
358 m32r_regno_reg_class[i] = NO_REGS;
359 }
360 }
361 \f
362 /* M32R specific attribute support.
363
364 interrupt - for interrupt functions
365
366 model - select code model used to access object
367
368 small: addresses use 24 bits, use bl to make calls
369 medium: addresses use 32 bits, use bl to make calls
370 large: addresses use 32 bits, use seth/add3/jl to make calls
371
372 Grep for MODEL in m32r.h for more info. */
373
374 static tree small_ident1;
375 static tree small_ident2;
376 static tree medium_ident1;
377 static tree medium_ident2;
378 static tree large_ident1;
379 static tree large_ident2;
380
381 static void
382 init_idents (void)
383 {
384 if (small_ident1 == 0)
385 {
386 small_ident1 = get_identifier ("small");
387 small_ident2 = get_identifier ("__small__");
388 medium_ident1 = get_identifier ("medium");
389 medium_ident2 = get_identifier ("__medium__");
390 large_ident1 = get_identifier ("large");
391 large_ident2 = get_identifier ("__large__");
392 }
393 }
394
395 /* Handle an "model" attribute; arguments as in
396 struct attribute_spec.handler. */
397 static tree
398 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
399 tree args, int flags ATTRIBUTE_UNUSED,
400 bool *no_add_attrs)
401 {
402 tree arg;
403
404 init_idents ();
405 arg = TREE_VALUE (args);
406
407 if (arg != small_ident1
408 && arg != small_ident2
409 && arg != medium_ident1
410 && arg != medium_ident2
411 && arg != large_ident1
412 && arg != large_ident2)
413 {
414 warning (OPT_Wattributes, "invalid argument of %qs attribute",
415 IDENTIFIER_POINTER (name));
416 *no_add_attrs = true;
417 }
418
419 return NULL_TREE;
420 }
421
422 static bool
423 m32r_attribute_identifier (const_tree name)
424 {
425 return strcmp (IDENTIFIER_POINTER (name), "model") == 0
426 || strcmp (IDENTIFIER_POINTER (name), "__model__") == 0;
427 }
428 \f
429 /* Encode section information of DECL, which is either a VAR_DECL,
430 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
431
432 For the M32R we want to record:
433
434 - whether the object lives in .sdata/.sbss.
435 - what code model should be used to access the object
436 */
437
438 static void
439 m32r_encode_section_info (tree decl, rtx rtl, int first)
440 {
441 int extra_flags = 0;
442 tree model_attr;
443 enum m32r_model model;
444
445 default_encode_section_info (decl, rtl, first);
446
447 if (!DECL_P (decl))
448 return;
449
450 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
451 if (model_attr)
452 {
453 tree id;
454
455 init_idents ();
456
457 id = TREE_VALUE (TREE_VALUE (model_attr));
458
459 if (id == small_ident1 || id == small_ident2)
460 model = M32R_MODEL_SMALL;
461 else if (id == medium_ident1 || id == medium_ident2)
462 model = M32R_MODEL_MEDIUM;
463 else if (id == large_ident1 || id == large_ident2)
464 model = M32R_MODEL_LARGE;
465 else
466 gcc_unreachable (); /* shouldn't happen */
467 }
468 else
469 {
470 if (TARGET_MODEL_SMALL)
471 model = M32R_MODEL_SMALL;
472 else if (TARGET_MODEL_MEDIUM)
473 model = M32R_MODEL_MEDIUM;
474 else if (TARGET_MODEL_LARGE)
475 model = M32R_MODEL_LARGE;
476 else
477 gcc_unreachable (); /* shouldn't happen */
478 }
479 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
480
481 if (extra_flags)
482 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
483 }
484
485 /* Only mark the object as being small data area addressable if
486 it hasn't been explicitly marked with a code model.
487
488 The user can explicitly put an object in the small data area with the
489 section attribute. If the object is in sdata/sbss and marked with a
490 code model do both [put the object in .sdata and mark it as being
491 addressed with a specific code model - don't mark it as being addressed
492 with an SDA reloc though]. This is ok and might be useful at times. If
493 the object doesn't fit the linker will give an error. */
494
495 static bool
496 m32r_in_small_data_p (const_tree decl)
497 {
498 const char *section;
499
500 if (TREE_CODE (decl) != VAR_DECL)
501 return false;
502
503 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
504 return false;
505
506 section = DECL_SECTION_NAME (decl);
507 if (section)
508 {
509 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
510 return true;
511 }
512 else
513 {
514 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
515 {
516 int size = int_size_in_bytes (TREE_TYPE (decl));
517
518 if (size > 0 && size <= g_switch_value)
519 return true;
520 }
521 }
522
523 return false;
524 }
525
526 /* Do anything needed before RTL is emitted for each function. */
527
528 void
529 m32r_init_expanders (void)
530 {
531 /* ??? At one point there was code here. The function is left in
532 to make it easy to experiment. */
533 }
534 \f
535 int
536 call_operand (rtx op, machine_mode mode)
537 {
538 if (!MEM_P (op))
539 return 0;
540 op = XEXP (op, 0);
541 return call_address_operand (op, mode);
542 }
543
544 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
545
546 int
547 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
548 {
549 if (! TARGET_SDATA_USE)
550 return 0;
551
552 if (GET_CODE (op) == SYMBOL_REF)
553 return SYMBOL_REF_SMALL_P (op);
554
555 if (GET_CODE (op) == CONST
556 && GET_CODE (XEXP (op, 0)) == PLUS
557 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
558 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
559 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
560
561 return 0;
562 }
563
564 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
565
566 int
567 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
568 {
569 rtx sym;
570
571 if (flag_pic)
572 return 0;
573
574 if (GET_CODE (op) == LABEL_REF)
575 return TARGET_ADDR24;
576
577 if (GET_CODE (op) == SYMBOL_REF)
578 sym = op;
579 else if (GET_CODE (op) == CONST
580 && GET_CODE (XEXP (op, 0)) == PLUS
581 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
582 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
583 sym = XEXP (XEXP (op, 0), 0);
584 else
585 return 0;
586
587 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
588 return 1;
589
590 if (TARGET_ADDR24
591 && (CONSTANT_POOL_ADDRESS_P (sym)
592 || LIT_NAME_P (XSTR (sym, 0))))
593 return 1;
594
595 return 0;
596 }
597
598 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
599
600 int
601 addr32_operand (rtx op, machine_mode mode)
602 {
603 rtx sym;
604
605 if (GET_CODE (op) == LABEL_REF)
606 return TARGET_ADDR32;
607
608 if (GET_CODE (op) == SYMBOL_REF)
609 sym = op;
610 else if (GET_CODE (op) == CONST
611 && GET_CODE (XEXP (op, 0)) == PLUS
612 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
613 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
614 && ! flag_pic)
615 sym = XEXP (XEXP (op, 0), 0);
616 else
617 return 0;
618
619 return (! addr24_operand (sym, mode)
620 && ! small_data_operand (sym, mode));
621 }
622
623 /* Return 1 if OP is a function that can be called with the `bl' insn. */
624
625 int
626 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
627 {
628 if (flag_pic)
629 return 1;
630
631 if (GET_CODE (op) == SYMBOL_REF)
632 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
633
634 return TARGET_CALL26;
635 }
636
637 /* Return 1 if OP is a DImode const we want to handle inline.
638 This must match the code in the movdi pattern.
639 It is used by the 'G' constraint. */
640
641 int
642 easy_di_const (rtx op)
643 {
644 rtx high_rtx, low_rtx;
645 HOST_WIDE_INT high, low;
646
647 split_double (op, &high_rtx, &low_rtx);
648 high = INTVAL (high_rtx);
649 low = INTVAL (low_rtx);
650 /* Pick constants loadable with 2 16-bit `ldi' insns. */
651 if (high >= -128 && high <= 127
652 && low >= -128 && low <= 127)
653 return 1;
654 return 0;
655 }
656
657 /* Return 1 if OP is a DFmode const we want to handle inline.
658 This must match the code in the movdf pattern.
659 It is used by the 'H' constraint. */
660
661 int
662 easy_df_const (rtx op)
663 {
664 long l[2];
665
666 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (op), l);
667 if (l[0] == 0 && l[1] == 0)
668 return 1;
669 if ((l[0] & 0xffff) == 0 && l[1] == 0)
670 return 1;
671 return 0;
672 }
673
674 /* Return 1 if OP is (mem (reg ...)).
675 This is used in insn length calcs. */
676
677 int
678 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
679 {
680 return MEM_P (op) && REG_P (XEXP (op, 0));
681 }
682
683 /* Return nonzero if ARG must be passed by indirect reference. */
684
685 static bool
686 m32r_pass_by_reference (cumulative_args_t, const function_arg_info &arg)
687 {
688 int size = arg.type_size_in_bytes ();
689 return (size < 0 || size > 8);
690 }
691 \f
692 /* Comparisons. */
693
694 /* X and Y are two things to compare using CODE. Emit the compare insn and
695 return the rtx for compare [arg0 of the if_then_else].
696 If need_compare is true then the comparison insn must be generated, rather
697 than being subsumed into the following branch instruction. */
698
699 rtx
700 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
701 {
702 enum rtx_code compare_code;
703 enum rtx_code branch_code;
704 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
705 int must_swap = 0;
706
707 switch (code)
708 {
709 case EQ: compare_code = EQ; branch_code = NE; break;
710 case NE: compare_code = EQ; branch_code = EQ; break;
711 case LT: compare_code = LT; branch_code = NE; break;
712 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
713 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
714 case GE: compare_code = LT; branch_code = EQ; break;
715 case LTU: compare_code = LTU; branch_code = NE; break;
716 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
717 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
718 case GEU: compare_code = LTU; branch_code = EQ; break;
719
720 default:
721 gcc_unreachable ();
722 }
723
724 if (need_compare)
725 {
726 switch (compare_code)
727 {
728 case EQ:
729 if (satisfies_constraint_P (y) /* Reg equal to small const. */
730 && y != const0_rtx)
731 {
732 rtx tmp = gen_reg_rtx (SImode);
733
734 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
735 x = tmp;
736 y = const0_rtx;
737 }
738 else if (CONSTANT_P (y)) /* Reg equal to const. */
739 {
740 rtx tmp = force_reg (GET_MODE (x), y);
741 y = tmp;
742 }
743
744 if (register_operand (y, SImode) /* Reg equal to reg. */
745 || y == const0_rtx) /* Reg equal to zero. */
746 {
747 emit_insn (gen_cmp_eqsi_insn (x, y));
748
749 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
750 }
751 break;
752
753 case LT:
754 if (register_operand (y, SImode)
755 || satisfies_constraint_P (y))
756 {
757 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
758
759 switch (code)
760 {
761 case LT:
762 emit_insn (gen_cmp_ltsi_insn (x, y));
763 code = EQ;
764 break;
765 case LE:
766 if (y == const0_rtx)
767 tmp = const1_rtx;
768 else
769 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
770 emit_insn (gen_cmp_ltsi_insn (x, tmp));
771 code = EQ;
772 break;
773 case GT:
774 if (CONST_INT_P (y))
775 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
776 else
777 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
778 emit_insn (gen_cmp_ltsi_insn (x, tmp));
779 code = NE;
780 break;
781 case GE:
782 emit_insn (gen_cmp_ltsi_insn (x, y));
783 code = NE;
784 break;
785 default:
786 gcc_unreachable ();
787 }
788
789 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
790 }
791 break;
792
793 case LTU:
794 if (register_operand (y, SImode)
795 || satisfies_constraint_P (y))
796 {
797 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
798
799 switch (code)
800 {
801 case LTU:
802 emit_insn (gen_cmp_ltusi_insn (x, y));
803 code = EQ;
804 break;
805 case LEU:
806 if (y == const0_rtx)
807 tmp = const1_rtx;
808 else
809 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
810 emit_insn (gen_cmp_ltusi_insn (x, tmp));
811 code = EQ;
812 break;
813 case GTU:
814 if (CONST_INT_P (y))
815 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
816 else
817 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
818 emit_insn (gen_cmp_ltusi_insn (x, tmp));
819 code = NE;
820 break;
821 case GEU:
822 emit_insn (gen_cmp_ltusi_insn (x, y));
823 code = NE;
824 break;
825 default:
826 gcc_unreachable ();
827 }
828
829 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
830 }
831 break;
832
833 default:
834 gcc_unreachable ();
835 }
836 }
837 else
838 {
839 /* Reg/reg equal comparison. */
840 if (compare_code == EQ
841 && register_operand (y, SImode))
842 return gen_rtx_fmt_ee (code, CCmode, x, y);
843
844 /* Reg/zero signed comparison. */
845 if ((compare_code == EQ || compare_code == LT)
846 && y == const0_rtx)
847 return gen_rtx_fmt_ee (code, CCmode, x, y);
848
849 /* Reg/smallconst equal comparison. */
850 if (compare_code == EQ
851 && satisfies_constraint_P (y))
852 {
853 rtx tmp = gen_reg_rtx (SImode);
854
855 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
856 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
857 }
858
859 /* Reg/const equal comparison. */
860 if (compare_code == EQ
861 && CONSTANT_P (y))
862 {
863 rtx tmp = force_reg (GET_MODE (x), y);
864
865 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
866 }
867 }
868
869 if (CONSTANT_P (y))
870 {
871 if (must_swap)
872 y = force_reg (GET_MODE (x), y);
873 else
874 {
875 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
876
877 if (! ok_const)
878 y = force_reg (GET_MODE (x), y);
879 }
880 }
881
882 switch (compare_code)
883 {
884 case EQ :
885 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
886 break;
887 case LT :
888 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
889 break;
890 case LTU :
891 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
892 break;
893
894 default:
895 gcc_unreachable ();
896 }
897
898 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
899 }
900
901 bool
902 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
903 {
904 machine_mode mode = GET_MODE (op0);
905
906 gcc_assert (mode == SImode);
907 switch (code)
908 {
909 case EQ:
910 if (!register_operand (op1, mode))
911 op1 = force_reg (mode, op1);
912
913 if (TARGET_M32RX || TARGET_M32R2)
914 {
915 if (!reg_or_zero_operand (op2, mode))
916 op2 = force_reg (mode, op2);
917
918 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
919 return true;
920 }
921 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
922 {
923 emit_insn (gen_seq_zero_insn (op0, op1));
924 return true;
925 }
926
927 if (!reg_or_eq_int16_operand (op2, mode))
928 op2 = force_reg (mode, op2);
929
930 emit_insn (gen_seq_insn (op0, op1, op2));
931 return true;
932
933 case NE:
934 if (!CONST_INT_P (op2)
935 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
936 {
937 rtx reg;
938
939 if (reload_completed || reload_in_progress)
940 return false;
941
942 reg = gen_reg_rtx (SImode);
943 emit_insn (gen_xorsi3 (reg, op1, op2));
944 op1 = reg;
945
946 if (!register_operand (op1, mode))
947 op1 = force_reg (mode, op1);
948
949 emit_insn (gen_sne_zero_insn (op0, op1));
950 return true;
951 }
952 return false;
953
954 case LT:
955 case GT:
956 if (code == GT)
957 {
958 rtx tmp = op2;
959 op2 = op1;
960 op1 = tmp;
961 code = LT;
962 }
963
964 if (!register_operand (op1, mode))
965 op1 = force_reg (mode, op1);
966
967 if (!reg_or_int16_operand (op2, mode))
968 op2 = force_reg (mode, op2);
969
970 emit_insn (gen_slt_insn (op0, op1, op2));
971 return true;
972
973 case LTU:
974 case GTU:
975 if (code == GTU)
976 {
977 rtx tmp = op2;
978 op2 = op1;
979 op1 = tmp;
980 code = LTU;
981 }
982
983 if (!register_operand (op1, mode))
984 op1 = force_reg (mode, op1);
985
986 if (!reg_or_int16_operand (op2, mode))
987 op2 = force_reg (mode, op2);
988
989 emit_insn (gen_sltu_insn (op0, op1, op2));
990 return true;
991
992 case GE:
993 case GEU:
994 if (!register_operand (op1, mode))
995 op1 = force_reg (mode, op1);
996
997 if (!reg_or_int16_operand (op2, mode))
998 op2 = force_reg (mode, op2);
999
1000 if (code == GE)
1001 emit_insn (gen_sge_insn (op0, op1, op2));
1002 else
1003 emit_insn (gen_sgeu_insn (op0, op1, op2));
1004 return true;
1005
1006 case LE:
1007 case LEU:
1008 if (!register_operand (op1, mode))
1009 op1 = force_reg (mode, op1);
1010
1011 if (CONST_INT_P (op2))
1012 {
1013 HOST_WIDE_INT value = INTVAL (op2);
1014 if (value >= 2147483647)
1015 {
1016 emit_move_insn (op0, const1_rtx);
1017 return true;
1018 }
1019
1020 op2 = GEN_INT (value + 1);
1021 if (value < -32768 || value >= 32767)
1022 op2 = force_reg (mode, op2);
1023
1024 if (code == LEU)
1025 emit_insn (gen_sltu_insn (op0, op1, op2));
1026 else
1027 emit_insn (gen_slt_insn (op0, op1, op2));
1028 return true;
1029 }
1030
1031 if (!register_operand (op2, mode))
1032 op2 = force_reg (mode, op2);
1033
1034 if (code == LEU)
1035 emit_insn (gen_sleu_insn (op0, op1, op2));
1036 else
1037 emit_insn (gen_sle_insn (op0, op1, op2));
1038 return true;
1039
1040 default:
1041 gcc_unreachable ();
1042 }
1043 }
1044
1045 \f
1046 /* Split a 2 word move (DI or DF) into component parts. */
1047
1048 rtx
1049 gen_split_move_double (rtx operands[])
1050 {
1051 machine_mode mode = GET_MODE (operands[0]);
1052 rtx dest = operands[0];
1053 rtx src = operands[1];
1054 rtx val;
1055
1056 /* We might have (SUBREG (MEM)) here, so just get rid of the
1057 subregs to make this code simpler. It is safe to call
1058 alter_subreg any time after reload. */
1059 if (GET_CODE (dest) == SUBREG)
1060 alter_subreg (&dest, true);
1061 if (GET_CODE (src) == SUBREG)
1062 alter_subreg (&src, true);
1063
1064 start_sequence ();
1065 if (REG_P (dest))
1066 {
1067 int dregno = REGNO (dest);
1068
1069 /* Reg = reg. */
1070 if (REG_P (src))
1071 {
1072 int sregno = REGNO (src);
1073
1074 int reverse = (dregno == sregno + 1);
1075
1076 /* We normally copy the low-numbered register first. However, if
1077 the first register operand 0 is the same as the second register of
1078 operand 1, we must copy in the opposite order. */
1079 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1080 operand_subword (src, reverse, TRUE, mode)));
1081
1082 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1083 operand_subword (src, !reverse, TRUE, mode)));
1084 }
1085
1086 /* Reg = constant. */
1087 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1088 {
1089 rtx words[2];
1090 split_double (src, &words[0], &words[1]);
1091 emit_insn (gen_rtx_SET (operand_subword (dest, 0, TRUE, mode),
1092 words[0]));
1093
1094 emit_insn (gen_rtx_SET (operand_subword (dest, 1, TRUE, mode),
1095 words[1]));
1096 }
1097
1098 /* Reg = mem. */
1099 else if (MEM_P (src))
1100 {
1101 /* If the high-address word is used in the address, we must load it
1102 last. Otherwise, load it first. */
1103 int reverse = refers_to_regno_p (dregno, XEXP (src, 0));
1104
1105 /* We used to optimize loads from single registers as
1106
1107 ld r1,r3+; ld r2,r3
1108
1109 if r3 were not used subsequently. However, the REG_NOTES aren't
1110 propagated correctly by the reload phase, and it can cause bad
1111 code to be generated. We could still try:
1112
1113 ld r1,r3+; ld r2,r3; addi r3,-4
1114
1115 which saves 2 bytes and doesn't force longword alignment. */
1116 emit_insn (gen_rtx_SET (operand_subword (dest, reverse, TRUE, mode),
1117 adjust_address (src, SImode,
1118 reverse * UNITS_PER_WORD)));
1119
1120 emit_insn (gen_rtx_SET (operand_subword (dest, !reverse, TRUE, mode),
1121 adjust_address (src, SImode,
1122 !reverse * UNITS_PER_WORD)));
1123 }
1124 else
1125 gcc_unreachable ();
1126 }
1127
1128 /* Mem = reg. */
1129 /* We used to optimize loads from single registers as
1130
1131 st r1,r3; st r2,+r3
1132
1133 if r3 were not used subsequently. However, the REG_NOTES aren't
1134 propagated correctly by the reload phase, and it can cause bad
1135 code to be generated. We could still try:
1136
1137 st r1,r3; st r2,+r3; addi r3,-4
1138
1139 which saves 2 bytes and doesn't force longword alignment. */
1140 else if (MEM_P (dest) && REG_P (src))
1141 {
1142 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, 0),
1143 operand_subword (src, 0, TRUE, mode)));
1144
1145 emit_insn (gen_rtx_SET (adjust_address (dest, SImode, UNITS_PER_WORD),
1146 operand_subword (src, 1, TRUE, mode)));
1147 }
1148
1149 else
1150 gcc_unreachable ();
1151
1152 val = get_insns ();
1153 end_sequence ();
1154 return val;
1155 }
1156
1157 \f
1158 static int
1159 m32r_arg_partial_bytes (cumulative_args_t cum_v, const function_arg_info &arg)
1160 {
1161 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1162
1163 int words;
1164 unsigned int size =
1165 (arg.promoted_size_in_bytes () + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1166
1167 if (*cum >= M32R_MAX_PARM_REGS)
1168 words = 0;
1169 else if (*cum + size > M32R_MAX_PARM_REGS)
1170 words = (*cum + size) - M32R_MAX_PARM_REGS;
1171 else
1172 words = 0;
1173
1174 return words * UNITS_PER_WORD;
1175 }
1176
1177 /* The ROUND_ADVANCE* macros are local to this file. */
1178 /* Round SIZE up to a word boundary. */
1179 #define ROUND_ADVANCE(SIZE) \
1180 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1181
1182 /* Round arg MODE/TYPE up to the next word boundary. */
1183 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1184 ((MODE) == BLKmode \
1185 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1186 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1187
1188 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1189 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1190
1191 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1192 a reg. This includes arguments that have to be passed by reference as the
1193 pointer to them is passed in a reg if one is available (and that is what
1194 we're given).
1195 This macro is only used in this file. */
1196 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1197 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1198
1199 /* Determine where to put an argument to a function.
1200 Value is zero to push the argument on the stack,
1201 or a hard register in which to store the argument.
1202
1203 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1204 the preceding args and about the function being called.
1205 ARG is a description of the argument. */
1206 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1207 and the rest are pushed. */
1208
1209 static rtx
1210 m32r_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
1211 {
1212 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1213
1214 return (PASS_IN_REG_P (*cum, arg.mode, arg.type)
1215 ? gen_rtx_REG (arg.mode,
1216 ROUND_ADVANCE_CUM (*cum, arg.mode, arg.type))
1217 : NULL_RTX);
1218 }
1219
1220 /* Update the data in CUM to advance over argument ARG. */
1221
1222 static void
1223 m32r_function_arg_advance (cumulative_args_t cum_v,
1224 const function_arg_info &arg)
1225 {
1226 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1227
1228 *cum = (ROUND_ADVANCE_CUM (*cum, arg.mode, arg.type)
1229 + ROUND_ADVANCE_ARG (arg.mode, arg.type));
1230 }
1231
1232 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1233
1234 static bool
1235 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1236 {
1237 cumulative_args_t dummy = pack_cumulative_args (NULL);
1238 function_arg_info arg (const_cast<tree> (type), /*named=*/false);
1239 return m32r_pass_by_reference (dummy, arg);
1240 }
1241
1242 /* Worker function for TARGET_FUNCTION_VALUE. */
1243
1244 static rtx
1245 m32r_function_value (const_tree valtype,
1246 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1247 bool outgoing ATTRIBUTE_UNUSED)
1248 {
1249 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1250 }
1251
1252 /* Worker function for TARGET_LIBCALL_VALUE. */
1253
1254 static rtx
1255 m32r_libcall_value (machine_mode mode,
1256 const_rtx fun ATTRIBUTE_UNUSED)
1257 {
1258 return gen_rtx_REG (mode, 0);
1259 }
1260
1261 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1262
1263 ??? What about r1 in DI/DF values. */
1264
1265 static bool
1266 m32r_function_value_regno_p (const unsigned int regno)
1267 {
1268 return (regno == 0);
1269 }
1270
1271 /* Do any needed setup for a variadic function. For the M32R, we must
1272 create a register parameter block, and then copy any anonymous arguments
1273 in registers to memory.
1274
1275 CUM has not been updated for the last named argument (which is given
1276 by ARG), and we rely on this fact. */
1277
1278 static void
1279 m32r_setup_incoming_varargs (cumulative_args_t cum,
1280 const function_arg_info &arg,
1281 int *pretend_size, int no_rtl)
1282 {
1283 int first_anon_arg;
1284
1285 if (no_rtl)
1286 return;
1287
1288 /* All BLKmode values are passed by reference. */
1289 gcc_assert (arg.mode != BLKmode);
1290
1291 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum),
1292 arg.mode, arg.type)
1293 + ROUND_ADVANCE_ARG (arg.mode, arg.type));
1294
1295 if (first_anon_arg < M32R_MAX_PARM_REGS)
1296 {
1297 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1298 int first_reg_offset = first_anon_arg;
1299 /* Size in words to "pretend" allocate. */
1300 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1301 rtx regblock;
1302
1303 regblock = gen_frame_mem (BLKmode,
1304 plus_constant (Pmode, arg_pointer_rtx,
1305 FIRST_PARM_OFFSET (0)));
1306 set_mem_alias_set (regblock, get_varargs_alias_set ());
1307 move_block_from_reg (first_reg_offset, regblock, size);
1308
1309 *pretend_size = (size * UNITS_PER_WORD);
1310 }
1311 }
1312
1313 \f
1314 /* Return true if INSN is real instruction bearing insn. */
1315
1316 static int
1317 m32r_is_insn (rtx insn)
1318 {
1319 return (NONDEBUG_INSN_P (insn)
1320 && GET_CODE (PATTERN (insn)) != USE
1321 && GET_CODE (PATTERN (insn)) != CLOBBER);
1322 }
1323
1324 /* Increase the priority of long instructions so that the
1325 short instructions are scheduled ahead of the long ones. */
1326
1327 static int
1328 m32r_adjust_priority (rtx_insn *insn, int priority)
1329 {
1330 if (m32r_is_insn (insn)
1331 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1332 priority <<= 3;
1333
1334 return priority;
1335 }
1336
1337 \f
1338 /* Indicate how many instructions can be issued at the same time.
1339 This is sort of a lie. The m32r can issue only 1 long insn at
1340 once, but it can issue 2 short insns. The default therefore is
1341 set at 2, but this can be overridden by the command line option
1342 -missue-rate=1. */
1343
1344 static int
1345 m32r_issue_rate (void)
1346 {
1347 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1348 }
1349 \f
1350 /* Cost functions. */
1351 /* Memory is 3 times as expensive as registers.
1352 ??? Is that the right way to look at it? */
1353
1354 static int
1355 m32r_memory_move_cost (machine_mode mode,
1356 reg_class_t rclass ATTRIBUTE_UNUSED,
1357 bool in ATTRIBUTE_UNUSED)
1358 {
1359 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1360 return 6;
1361 else
1362 return 12;
1363 }
1364
1365 static bool
1366 m32r_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED,
1367 int outer_code ATTRIBUTE_UNUSED,
1368 int opno ATTRIBUTE_UNUSED, int *total,
1369 bool speed ATTRIBUTE_UNUSED)
1370 {
1371 int code = GET_CODE (x);
1372
1373 switch (code)
1374 {
1375 /* Small integers are as cheap as registers. 4 byte values can be
1376 fetched as immediate constants - let's give that the cost of an
1377 extra insn. */
1378 case CONST_INT:
1379 if (INT16_P (INTVAL (x)))
1380 {
1381 *total = 0;
1382 return true;
1383 }
1384 /* FALLTHRU */
1385
1386 case CONST:
1387 case LABEL_REF:
1388 case SYMBOL_REF:
1389 *total = COSTS_N_INSNS (1);
1390 return true;
1391
1392 case CONST_DOUBLE:
1393 {
1394 rtx high, low;
1395
1396 split_double (x, &high, &low);
1397 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1398 + !INT16_P (INTVAL (low)));
1399 return true;
1400 }
1401
1402 case MULT:
1403 *total = COSTS_N_INSNS (3);
1404 return true;
1405
1406 case DIV:
1407 case UDIV:
1408 case MOD:
1409 case UMOD:
1410 *total = COSTS_N_INSNS (10);
1411 return true;
1412
1413 default:
1414 return false;
1415 }
1416 }
1417 \f
1418 /* Type of function DECL.
1419
1420 The result is cached. To reset the cache at the end of a function,
1421 call with DECL = NULL_TREE. */
1422
1423 enum m32r_function_type
1424 m32r_compute_function_type (tree decl)
1425 {
1426 /* Cached value. */
1427 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1428 /* Last function we were called for. */
1429 static tree last_fn = NULL_TREE;
1430
1431 /* Resetting the cached value? */
1432 if (decl == NULL_TREE)
1433 {
1434 fn_type = M32R_FUNCTION_UNKNOWN;
1435 last_fn = NULL_TREE;
1436 return fn_type;
1437 }
1438
1439 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1440 return fn_type;
1441
1442 /* Compute function type. */
1443 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1444 ? M32R_FUNCTION_INTERRUPT
1445 : M32R_FUNCTION_NORMAL);
1446
1447 last_fn = decl;
1448 return fn_type;
1449 }
1450 \f/* Function prologue/epilogue handlers. */
1451
1452 /* M32R stack frames look like:
1453
1454 Before call After call
1455 +-----------------------+ +-----------------------+
1456 | | | |
1457 high | local variables, | | local variables, |
1458 mem | reg save area, etc. | | reg save area, etc. |
1459 | | | |
1460 +-----------------------+ +-----------------------+
1461 | | | |
1462 | arguments on stack. | | arguments on stack. |
1463 | | | |
1464 SP+0->+-----------------------+ +-----------------------+
1465 | reg parm save area, |
1466 | only created for |
1467 | variable argument |
1468 | functions |
1469 +-----------------------+
1470 | previous frame ptr |
1471 +-----------------------+
1472 | |
1473 | register save area |
1474 | |
1475 +-----------------------+
1476 | return address |
1477 +-----------------------+
1478 | |
1479 | local variables |
1480 | |
1481 +-----------------------+
1482 | |
1483 | alloca allocations |
1484 | |
1485 +-----------------------+
1486 | |
1487 low | arguments on stack |
1488 memory | |
1489 SP+0->+-----------------------+
1490
1491 Notes:
1492 1) The "reg parm save area" does not exist for non variable argument fns.
1493 2) The "reg parm save area" can be eliminated completely if we saved regs
1494 containing anonymous args separately but that complicates things too
1495 much (so it's not done).
1496 3) The return address is saved after the register save area so as to have as
1497 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1498
1499 /* Structure to be filled in by m32r_compute_frame_size with register
1500 save masks, and offsets for the current function. */
1501 struct m32r_frame_info
1502 {
1503 unsigned int total_size; /* # bytes that the entire frame takes up. */
1504 unsigned int extra_size; /* # bytes of extra stuff. */
1505 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1506 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1507 unsigned int reg_size; /* # bytes needed to store regs. */
1508 unsigned int var_size; /* # bytes that variables take up. */
1509 unsigned int gmask; /* Mask of saved gp registers. */
1510 unsigned int save_fp; /* Nonzero if fp must be saved. */
1511 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1512 int initialized; /* Nonzero if frame size already calculated. */
1513 };
1514
1515 /* Current frame information calculated by m32r_compute_frame_size. */
1516 static struct m32r_frame_info current_frame_info;
1517
1518 /* Zero structure to initialize current_frame_info. */
1519 static struct m32r_frame_info zero_frame_info;
1520
1521 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1522 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1523
1524 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1525 The return address and frame pointer are treated separately.
1526 Don't consider them here. */
1527 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1528 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1529 && (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
1530
1531 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1532 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1533
1534 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1535 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1536
1537 /* Return the bytes needed to compute the frame pointer from the current
1538 stack pointer.
1539
1540 SIZE is the size needed for local variables. */
1541
1542 unsigned int
1543 m32r_compute_frame_size (poly_int64 size) /* # of var. bytes allocated. */
1544 {
1545 unsigned int regno;
1546 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1547 unsigned int reg_size;
1548 unsigned int gmask;
1549 enum m32r_function_type fn_type;
1550 int interrupt_p;
1551 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1552 | crtl->profile);
1553
1554 var_size = M32R_STACK_ALIGN (size);
1555 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1556 pretend_size = crtl->args.pretend_args_size;
1557 extra_size = FIRST_PARM_OFFSET (0);
1558 total_size = extra_size + pretend_size + args_size + var_size;
1559 reg_size = 0;
1560 gmask = 0;
1561
1562 /* See if this is an interrupt handler. Call used registers must be saved
1563 for them too. */
1564 fn_type = m32r_compute_function_type (current_function_decl);
1565 interrupt_p = M32R_INTERRUPT_P (fn_type);
1566
1567 /* Calculate space needed for registers. */
1568 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1569 {
1570 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1571 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1572 {
1573 reg_size += UNITS_PER_WORD;
1574 gmask |= 1 << regno;
1575 }
1576 }
1577
1578 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1579 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1580
1581 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1582 * UNITS_PER_WORD);
1583 total_size += reg_size;
1584
1585 /* ??? Not sure this is necessary, and I don't think the epilogue
1586 handler will do the right thing if this changes total_size. */
1587 total_size = M32R_STACK_ALIGN (total_size);
1588
1589 /* frame_size = total_size - (pretend_size + reg_size); */
1590
1591 /* Save computed information. */
1592 current_frame_info.total_size = total_size;
1593 current_frame_info.extra_size = extra_size;
1594 current_frame_info.pretend_size = pretend_size;
1595 current_frame_info.var_size = var_size;
1596 current_frame_info.args_size = args_size;
1597 current_frame_info.reg_size = reg_size;
1598 current_frame_info.gmask = gmask;
1599 current_frame_info.initialized = reload_completed;
1600
1601 /* Ok, we're done. */
1602 return total_size;
1603 }
1604
1605 /* Worker function for TARGET_CAN_ELIMINATE. */
1606
1607 bool
1608 m32r_can_eliminate (const int from, const int to)
1609 {
1610 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1611 ? ! frame_pointer_needed
1612 : true);
1613 }
1614
1615 \f
1616 /* The table we use to reference PIC data. */
1617 static rtx global_offset_table;
1618
1619 static void
1620 m32r_reload_lr (rtx sp, int size)
1621 {
1622 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1623
1624 if (size == 0)
1625 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1626 else if (size < 32768)
1627 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1628 gen_rtx_PLUS (Pmode, sp,
1629 GEN_INT (size)))));
1630 else
1631 {
1632 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1633
1634 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1635 emit_insn (gen_addsi3 (tmp, tmp, sp));
1636 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1637 }
1638
1639 emit_use (lr);
1640 }
1641
1642 void
1643 m32r_load_pic_register (void)
1644 {
1645 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1646 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1647 GEN_INT (TARGET_MODEL_SMALL)));
1648
1649 /* Need to emit this whether or not we obey regdecls,
1650 since setjmp/longjmp can cause life info to screw up. */
1651 emit_use (pic_offset_table_rtx);
1652 }
1653
1654 /* Expand the m32r prologue as a series of insns. */
1655
1656 void
1657 m32r_expand_prologue (void)
1658 {
1659 int regno;
1660 int frame_size;
1661 unsigned int gmask;
1662 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1663 | crtl->profile);
1664
1665 if (! current_frame_info.initialized)
1666 m32r_compute_frame_size (get_frame_size ());
1667
1668 if (flag_stack_usage_info)
1669 current_function_static_stack_size = current_frame_info.total_size;
1670
1671 gmask = current_frame_info.gmask;
1672
1673 /* These cases shouldn't happen. Catch them now. */
1674 gcc_assert (current_frame_info.total_size || !gmask);
1675
1676 /* Allocate space for register arguments if this is a variadic function. */
1677 if (current_frame_info.pretend_size != 0)
1678 {
1679 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1680 the wrong result on a 64-bit host. */
1681 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1682 emit_insn (gen_addsi3 (stack_pointer_rtx,
1683 stack_pointer_rtx,
1684 GEN_INT (-pretend_size)));
1685 }
1686
1687 /* Save any registers we need to and set up fp. */
1688 if (current_frame_info.save_fp)
1689 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1690
1691 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1692
1693 /* Save any needed call-saved regs (and call-used if this is an
1694 interrupt handler). */
1695 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1696 {
1697 if ((gmask & (1 << regno)) != 0)
1698 emit_insn (gen_movsi_push (stack_pointer_rtx,
1699 gen_rtx_REG (Pmode, regno)));
1700 }
1701
1702 if (current_frame_info.save_lr)
1703 emit_insn (gen_movsi_push (stack_pointer_rtx,
1704 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1705
1706 /* Allocate the stack frame. */
1707 frame_size = (current_frame_info.total_size
1708 - (current_frame_info.pretend_size
1709 + current_frame_info.reg_size));
1710
1711 if (frame_size == 0)
1712 ; /* Nothing to do. */
1713 else if (frame_size <= 32768)
1714 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1715 GEN_INT (-frame_size)));
1716 else
1717 {
1718 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1719
1720 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1721 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1722 }
1723
1724 if (frame_pointer_needed)
1725 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1726
1727 if (crtl->profile)
1728 /* Push lr for mcount (form_pc, x). */
1729 emit_insn (gen_movsi_push (stack_pointer_rtx,
1730 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1731
1732 if (pic_reg_used)
1733 {
1734 m32r_load_pic_register ();
1735 m32r_reload_lr (stack_pointer_rtx,
1736 (crtl->profile ? 0 : frame_size));
1737 }
1738
1739 if (crtl->profile && !pic_reg_used)
1740 emit_insn (gen_blockage ());
1741 }
1742
1743 \f
1744 /* Set up the stack and frame pointer (if desired) for the function.
1745 Note, if this is changed, you need to mirror the changes in
1746 m32r_compute_frame_size which calculates the prolog size. */
1747
1748 static void
1749 m32r_output_function_prologue (FILE * file)
1750 {
1751 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1752
1753 /* If this is an interrupt handler, mark it as such. */
1754 if (M32R_INTERRUPT_P (fn_type))
1755 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1756
1757 if (! current_frame_info.initialized)
1758 m32r_compute_frame_size (get_frame_size ());
1759
1760 /* This is only for the human reader. */
1761 fprintf (file,
1762 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1763 ASM_COMMENT_START,
1764 current_frame_info.var_size,
1765 current_frame_info.reg_size / 4,
1766 current_frame_info.args_size,
1767 current_frame_info.extra_size);
1768 }
1769 \f
1770 /* Output RTL to pop register REGNO from the stack. */
1771
1772 static void
1773 pop (int regno)
1774 {
1775 rtx x;
1776
1777 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1778 stack_pointer_rtx));
1779 add_reg_note (x, REG_INC, stack_pointer_rtx);
1780 }
1781
1782 /* Expand the m32r epilogue as a series of insns. */
1783
1784 void
1785 m32r_expand_epilogue (void)
1786 {
1787 int regno;
1788 int noepilogue = FALSE;
1789 int total_size;
1790
1791 gcc_assert (current_frame_info.initialized);
1792 total_size = current_frame_info.total_size;
1793
1794 if (total_size == 0)
1795 {
1796 rtx_insn *insn = get_last_insn ();
1797
1798 /* If the last insn was a BARRIER, we don't have to write any code
1799 because a jump (aka return) was put there. */
1800 if (insn && NOTE_P (insn))
1801 insn = prev_nonnote_insn (insn);
1802 if (insn && BARRIER_P (insn))
1803 noepilogue = TRUE;
1804 }
1805
1806 if (!noepilogue)
1807 {
1808 unsigned int var_size = current_frame_info.var_size;
1809 unsigned int args_size = current_frame_info.args_size;
1810 unsigned int gmask = current_frame_info.gmask;
1811 int can_trust_sp_p = !cfun->calls_alloca;
1812
1813 if (flag_exceptions)
1814 emit_insn (gen_blockage ());
1815
1816 /* The first thing to do is point the sp at the bottom of the register
1817 save area. */
1818 if (can_trust_sp_p)
1819 {
1820 unsigned int reg_offset = var_size + args_size;
1821
1822 if (reg_offset == 0)
1823 ; /* Nothing to do. */
1824 else if (reg_offset < 32768)
1825 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1826 GEN_INT (reg_offset)));
1827 else
1828 {
1829 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1830
1831 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1832 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1833 tmp));
1834 }
1835 }
1836 else if (frame_pointer_needed)
1837 {
1838 unsigned int reg_offset = var_size + args_size;
1839
1840 if (reg_offset == 0)
1841 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1842 else if (reg_offset < 32768)
1843 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1844 GEN_INT (reg_offset)));
1845 else
1846 {
1847 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1848
1849 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1850 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1851 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1852 tmp));
1853 }
1854 }
1855 else
1856 gcc_unreachable ();
1857
1858 if (current_frame_info.save_lr)
1859 pop (RETURN_ADDR_REGNUM);
1860
1861 /* Restore any saved registers, in reverse order of course. */
1862 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1863 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1864 {
1865 if ((gmask & (1L << regno)) != 0)
1866 pop (regno);
1867 }
1868
1869 if (current_frame_info.save_fp)
1870 pop (FRAME_POINTER_REGNUM);
1871
1872 /* Remove varargs area if present. */
1873 if (current_frame_info.pretend_size != 0)
1874 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1875 GEN_INT (current_frame_info.pretend_size)));
1876
1877 emit_insn (gen_blockage ());
1878 }
1879 }
1880
1881 /* Do any necessary cleanup after a function to restore stack, frame,
1882 and regs. */
1883
1884 static void
1885 m32r_output_function_epilogue (FILE *)
1886 {
1887 /* Reset state info for each function. */
1888 current_frame_info = zero_frame_info;
1889 m32r_compute_function_type (NULL_TREE);
1890 }
1891 \f
1892 /* Return nonzero if this function is known to have a null or 1 instruction
1893 epilogue. */
1894
1895 int
1896 direct_return (void)
1897 {
1898 if (!reload_completed)
1899 return FALSE;
1900
1901 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1902 return FALSE;
1903
1904 if (! current_frame_info.initialized)
1905 m32r_compute_frame_size (get_frame_size ());
1906
1907 return current_frame_info.total_size == 0;
1908 }
1909
1910 \f
1911 /* PIC. */
1912
1913 int
1914 m32r_legitimate_pic_operand_p (rtx x)
1915 {
1916 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1917 return 0;
1918
1919 if (GET_CODE (x) == CONST
1920 && GET_CODE (XEXP (x, 0)) == PLUS
1921 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1922 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1923 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1924 return 0;
1925
1926 return 1;
1927 }
1928
1929 rtx
1930 m32r_legitimize_pic_address (rtx orig, rtx reg)
1931 {
1932 #ifdef DEBUG_PIC
1933 printf("m32r_legitimize_pic_address()\n");
1934 #endif
1935
1936 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1937 {
1938 rtx pic_ref, address;
1939 int subregs = 0;
1940
1941 if (reg == 0)
1942 {
1943 gcc_assert (!reload_in_progress && !reload_completed);
1944 reg = gen_reg_rtx (Pmode);
1945
1946 subregs = 1;
1947 }
1948
1949 if (subregs)
1950 address = gen_reg_rtx (Pmode);
1951 else
1952 address = reg;
1953
1954 crtl->uses_pic_offset_table = 1;
1955
1956 if (GET_CODE (orig) == LABEL_REF
1957 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1958 {
1959 emit_insn (gen_gotoff_load_addr (reg, orig));
1960 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1961 return reg;
1962 }
1963
1964 emit_insn (gen_pic_load_addr (address, orig));
1965
1966 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1967 pic_ref = gen_const_mem (Pmode, address);
1968 emit_move_insn (reg, pic_ref);
1969 return reg;
1970 }
1971 else if (GET_CODE (orig) == CONST)
1972 {
1973 rtx base, offset;
1974
1975 if (GET_CODE (XEXP (orig, 0)) == PLUS
1976 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1977 return orig;
1978
1979 if (reg == 0)
1980 {
1981 gcc_assert (!reload_in_progress && !reload_completed);
1982 reg = gen_reg_rtx (Pmode);
1983 }
1984
1985 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1986 {
1987 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1988 if (base == reg)
1989 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1990 else
1991 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1992 }
1993 else
1994 return orig;
1995
1996 if (CONST_INT_P (offset))
1997 {
1998 if (INT16_P (INTVAL (offset)))
1999 return plus_constant (Pmode, base, INTVAL (offset));
2000 else
2001 {
2002 gcc_assert (! reload_in_progress && ! reload_completed);
2003 offset = force_reg (Pmode, offset);
2004 }
2005 }
2006
2007 return gen_rtx_PLUS (Pmode, base, offset);
2008 }
2009
2010 return orig;
2011 }
2012
2013 static rtx
2014 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2015 machine_mode mode ATTRIBUTE_UNUSED)
2016 {
2017 if (flag_pic)
2018 return m32r_legitimize_pic_address (x, NULL_RTX);
2019 else
2020 return x;
2021 }
2022
2023 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2024
2025 static bool
2026 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2027 {
2028 if (GET_CODE (addr) == LO_SUM)
2029 return true;
2030
2031 return false;
2032 }
2033 \f
2034 /* Nested function support. */
2035
2036 /* Emit RTL insns to initialize the variable parts of a trampoline.
2037 FNADDR is an RTX for the address of the function's pure code.
2038 CXT is an RTX for the static chain value for the function. */
2039
2040 void
2041 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2042 rtx fnaddr ATTRIBUTE_UNUSED,
2043 rtx cxt ATTRIBUTE_UNUSED)
2044 {
2045 }
2046 \f
2047 static void
2048 m32r_file_start (void)
2049 {
2050 default_file_start ();
2051
2052 if (flag_verbose_asm)
2053 fprintf (asm_out_file,
2054 "%s M32R/D special options: -G %d\n",
2055 ASM_COMMENT_START, g_switch_value);
2056
2057 if (TARGET_LITTLE_ENDIAN)
2058 fprintf (asm_out_file, "\t.little\n");
2059 }
2060 \f
2061 /* Print operand X (an rtx) in assembler syntax to file FILE.
2062 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2063 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2064
2065 static void
2066 m32r_print_operand (FILE * file, rtx x, int code)
2067 {
2068 rtx addr;
2069
2070 switch (code)
2071 {
2072 /* The 's' and 'p' codes are used by output_block_move() to
2073 indicate post-increment 's'tores and 'p're-increment loads. */
2074 case 's':
2075 if (REG_P (x))
2076 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2077 else
2078 output_operand_lossage ("invalid operand to %%s code");
2079 return;
2080
2081 case 'p':
2082 if (REG_P (x))
2083 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2084 else
2085 output_operand_lossage ("invalid operand to %%p code");
2086 return;
2087
2088 case 'R' :
2089 /* Write second word of DImode or DFmode reference,
2090 register or memory. */
2091 if (REG_P (x))
2092 fputs (reg_names[REGNO (x)+1], file);
2093 else if (MEM_P (x))
2094 {
2095 machine_mode mode = GET_MODE (x);
2096
2097 fprintf (file, "@(");
2098 /* Handle possible auto-increment. Since it is pre-increment and
2099 we have already done it, we can just use an offset of four. */
2100 /* ??? This is taken from rs6000.c I think. I don't think it is
2101 currently necessary, but keep it around. */
2102 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2103 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2104 output_address (mode, plus_constant (Pmode,
2105 XEXP (XEXP (x, 0), 0), 4));
2106 else
2107 output_address (mode, plus_constant (Pmode, XEXP (x, 0), 4));
2108 fputc (')', file);
2109 }
2110 else
2111 output_operand_lossage ("invalid operand to %%R code");
2112 return;
2113
2114 case 'H' : /* High word. */
2115 case 'L' : /* Low word. */
2116 if (REG_P (x))
2117 {
2118 /* L = least significant word, H = most significant word. */
2119 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2120 fputs (reg_names[REGNO (x)], file);
2121 else
2122 fputs (reg_names[REGNO (x)+1], file);
2123 }
2124 else if (CONST_INT_P (x)
2125 || GET_CODE (x) == CONST_DOUBLE)
2126 {
2127 rtx first, second;
2128
2129 split_double (x, &first, &second);
2130 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2131 code == 'L' ? INTVAL (first) : INTVAL (second));
2132 }
2133 else
2134 output_operand_lossage ("invalid operand to %%H/%%L code");
2135 return;
2136
2137 case 'A' :
2138 {
2139 char str[30];
2140
2141 if (GET_CODE (x) != CONST_DOUBLE
2142 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2143 fatal_insn ("bad insn for 'A'", x);
2144
2145 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2146 fprintf (file, "%s", str);
2147 return;
2148 }
2149
2150 case 'B' : /* Bottom half. */
2151 case 'T' : /* Top half. */
2152 /* Output the argument to a `seth' insn (sets the Top half-word).
2153 For constants output arguments to a seth/or3 pair to set Top and
2154 Bottom halves. For symbols output arguments to a seth/add3 pair to
2155 set Top and Bottom halves. The difference exists because for
2156 constants seth/or3 is more readable but for symbols we need to use
2157 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2158 switch (GET_CODE (x))
2159 {
2160 case CONST_INT :
2161 case CONST_DOUBLE :
2162 {
2163 rtx first, second;
2164
2165 split_double (x, &first, &second);
2166 x = WORDS_BIG_ENDIAN ? second : first;
2167 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2168 (code == 'B'
2169 ? INTVAL (x) & 0xffff
2170 : (INTVAL (x) >> 16) & 0xffff));
2171 }
2172 return;
2173 case CONST :
2174 case SYMBOL_REF :
2175 if (code == 'B'
2176 && small_data_operand (x, VOIDmode))
2177 {
2178 fputs ("sda(", file);
2179 output_addr_const (file, x);
2180 fputc (')', file);
2181 return;
2182 }
2183 /* fall through */
2184 case LABEL_REF :
2185 fputs (code == 'T' ? "shigh(" : "low(", file);
2186 output_addr_const (file, x);
2187 fputc (')', file);
2188 return;
2189 default :
2190 output_operand_lossage ("invalid operand to %%T/%%B code");
2191 return;
2192 }
2193 break;
2194
2195 case 'U' :
2196 /* ??? wip */
2197 /* Output a load/store with update indicator if appropriate. */
2198 if (MEM_P (x))
2199 {
2200 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2201 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2202 fputs (".a", file);
2203 }
2204 else
2205 output_operand_lossage ("invalid operand to %%U code");
2206 return;
2207
2208 case 'N' :
2209 /* Print a constant value negated. */
2210 if (CONST_INT_P (x))
2211 output_addr_const (file, GEN_INT (- INTVAL (x)));
2212 else
2213 output_operand_lossage ("invalid operand to %%N code");
2214 return;
2215
2216 case 'X' :
2217 /* Print a const_int in hex. Used in comments. */
2218 if (CONST_INT_P (x))
2219 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2220 return;
2221
2222 case '#' :
2223 fputs (IMMEDIATE_PREFIX, file);
2224 return;
2225
2226 case 0 :
2227 /* Do nothing special. */
2228 break;
2229
2230 default :
2231 /* Unknown flag. */
2232 output_operand_lossage ("invalid operand output code");
2233 }
2234
2235 switch (GET_CODE (x))
2236 {
2237 case REG :
2238 fputs (reg_names[REGNO (x)], file);
2239 break;
2240
2241 case MEM :
2242 addr = XEXP (x, 0);
2243 if (GET_CODE (addr) == PRE_INC)
2244 {
2245 if (!REG_P (XEXP (addr, 0)))
2246 fatal_insn ("pre-increment address is not a register", x);
2247
2248 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2249 }
2250 else if (GET_CODE (addr) == PRE_DEC)
2251 {
2252 if (!REG_P (XEXP (addr, 0)))
2253 fatal_insn ("pre-decrement address is not a register", x);
2254
2255 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2256 }
2257 else if (GET_CODE (addr) == POST_INC)
2258 {
2259 if (!REG_P (XEXP (addr, 0)))
2260 fatal_insn ("post-increment address is not a register", x);
2261
2262 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2263 }
2264 else
2265 {
2266 fputs ("@(", file);
2267 output_address (GET_MODE (x), addr);
2268 fputc (')', file);
2269 }
2270 break;
2271
2272 case CONST_DOUBLE :
2273 /* We handle SFmode constants here as output_addr_const doesn't. */
2274 if (GET_MODE (x) == SFmode)
2275 {
2276 long l;
2277
2278 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2279 fprintf (file, "0x%08lx", l);
2280 break;
2281 }
2282
2283 /* FALLTHRU */
2284 /* Let output_addr_const deal with it. */
2285
2286 default :
2287 output_addr_const (file, x);
2288 break;
2289 }
2290 }
2291
2292 /* Print a memory address as an operand to reference that memory location. */
2293
2294 static void
2295 m32r_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
2296 {
2297 rtx base;
2298 rtx index = 0;
2299 int offset = 0;
2300
2301 switch (GET_CODE (addr))
2302 {
2303 case REG :
2304 fputs (reg_names[REGNO (addr)], file);
2305 break;
2306
2307 case PLUS :
2308 if (CONST_INT_P (XEXP (addr, 0)))
2309 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2310 else if (CONST_INT_P (XEXP (addr, 1)))
2311 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2312 else
2313 base = XEXP (addr, 0), index = XEXP (addr, 1);
2314 if (REG_P (base))
2315 {
2316 /* Print the offset first (if present) to conform to the manual. */
2317 if (index == 0)
2318 {
2319 if (offset != 0)
2320 fprintf (file, "%d,", offset);
2321 fputs (reg_names[REGNO (base)], file);
2322 }
2323 /* The chip doesn't support this, but left in for generality. */
2324 else if (REG_P (index))
2325 fprintf (file, "%s,%s",
2326 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2327 /* Not sure this can happen, but leave in for now. */
2328 else if (GET_CODE (index) == SYMBOL_REF)
2329 {
2330 output_addr_const (file, index);
2331 fputc (',', file);
2332 fputs (reg_names[REGNO (base)], file);
2333 }
2334 else
2335 fatal_insn ("bad address", addr);
2336 }
2337 else if (GET_CODE (base) == LO_SUM)
2338 {
2339 gcc_assert (!index && REG_P (XEXP (base, 0)));
2340 if (small_data_operand (XEXP (base, 1), VOIDmode))
2341 fputs ("sda(", file);
2342 else
2343 fputs ("low(", file);
2344 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2345 offset));
2346 fputs ("),", file);
2347 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2348 }
2349 else
2350 fatal_insn ("bad address", addr);
2351 break;
2352
2353 case LO_SUM :
2354 if (!REG_P (XEXP (addr, 0)))
2355 fatal_insn ("lo_sum not of register", addr);
2356 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2357 fputs ("sda(", file);
2358 else
2359 fputs ("low(", file);
2360 output_addr_const (file, XEXP (addr, 1));
2361 fputs ("),", file);
2362 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2363 break;
2364
2365 case PRE_INC : /* Assume SImode. */
2366 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2367 break;
2368
2369 case PRE_DEC : /* Assume SImode. */
2370 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2371 break;
2372
2373 case POST_INC : /* Assume SImode. */
2374 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2375 break;
2376
2377 default :
2378 output_addr_const (file, addr);
2379 break;
2380 }
2381 }
2382
2383 static bool
2384 m32r_print_operand_punct_valid_p (unsigned char code)
2385 {
2386 return m32r_punct_chars[code];
2387 }
2388
2389 /* Return true if the operands are the constants 0 and 1. */
2390
2391 int
2392 zero_and_one (rtx operand1, rtx operand2)
2393 {
2394 return
2395 CONST_INT_P (operand1)
2396 && CONST_INT_P (operand2)
2397 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2398 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2399 }
2400
2401 /* Generate the correct assembler code to handle the conditional loading of a
2402 value into a register. It is known that the operands satisfy the
2403 conditional_move_operand() function above. The destination is operand[0].
2404 The condition is operand [1]. The 'true' value is operand [2] and the
2405 'false' value is operand [3]. */
2406
2407 char *
2408 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2409 {
2410 static char buffer [100];
2411 const char * dest = reg_names [REGNO (operands [0])];
2412
2413 buffer [0] = 0;
2414
2415 /* Destination must be a register. */
2416 gcc_assert (REG_P (operands [0]));
2417 gcc_assert (conditional_move_operand (operands [2], SImode));
2418 gcc_assert (conditional_move_operand (operands [3], SImode));
2419
2420 /* Check to see if the test is reversed. */
2421 if (GET_CODE (operands [1]) == NE)
2422 {
2423 rtx tmp = operands [2];
2424 operands [2] = operands [3];
2425 operands [3] = tmp;
2426 }
2427
2428 sprintf (buffer, "mvfc %s, cbr", dest);
2429
2430 /* If the true value was '0' then we need to invert the results of the move. */
2431 if (INTVAL (operands [2]) == 0)
2432 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2433 dest, dest);
2434
2435 return buffer;
2436 }
2437
2438 /* Returns true if the registers contained in the two
2439 rtl expressions are different. */
2440
2441 int
2442 m32r_not_same_reg (rtx a, rtx b)
2443 {
2444 int reg_a = -1;
2445 int reg_b = -2;
2446
2447 while (GET_CODE (a) == SUBREG)
2448 a = SUBREG_REG (a);
2449
2450 if (REG_P (a))
2451 reg_a = REGNO (a);
2452
2453 while (GET_CODE (b) == SUBREG)
2454 b = SUBREG_REG (b);
2455
2456 if (REG_P (b))
2457 reg_b = REGNO (b);
2458
2459 return reg_a != reg_b;
2460 }
2461
2462 \f
2463 rtx
2464 m32r_function_symbol (const char *name)
2465 {
2466 int extra_flags = 0;
2467 enum m32r_model model;
2468 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2469
2470 if (TARGET_MODEL_SMALL)
2471 model = M32R_MODEL_SMALL;
2472 else if (TARGET_MODEL_MEDIUM)
2473 model = M32R_MODEL_MEDIUM;
2474 else if (TARGET_MODEL_LARGE)
2475 model = M32R_MODEL_LARGE;
2476 else
2477 gcc_unreachable (); /* Shouldn't happen. */
2478 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2479
2480 if (extra_flags)
2481 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2482
2483 return sym;
2484 }
2485
2486 /* Use a library function to move some bytes. */
2487
2488 static void
2489 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2490 {
2491 /* We want to pass the size as Pmode, which will normally be SImode
2492 but will be DImode if we are using 64-bit longs and pointers. */
2493 if (GET_MODE (bytes_rtx) != VOIDmode
2494 && GET_MODE (bytes_rtx) != Pmode)
2495 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2496
2497 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2498 VOIDmode, dest_reg, Pmode, src_reg, Pmode,
2499 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2500 TYPE_UNSIGNED (sizetype)),
2501 TYPE_MODE (sizetype));
2502 }
2503
2504 /* Expand string/block move operations.
2505
2506 operands[0] is the pointer to the destination.
2507 operands[1] is the pointer to the source.
2508 operands[2] is the number of bytes to move.
2509 operands[3] is the alignment.
2510
2511 Returns 1 upon success, 0 otherwise. */
2512
2513 int
2514 m32r_expand_block_move (rtx operands[])
2515 {
2516 rtx orig_dst = operands[0];
2517 rtx orig_src = operands[1];
2518 rtx bytes_rtx = operands[2];
2519 rtx align_rtx = operands[3];
2520 int constp = CONST_INT_P (bytes_rtx);
2521 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2522 int align = INTVAL (align_rtx);
2523 int leftover;
2524 rtx src_reg;
2525 rtx dst_reg;
2526
2527 if (constp && bytes <= 0)
2528 return 1;
2529
2530 /* Move the address into scratch registers. */
2531 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2532 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2533
2534 if (align > UNITS_PER_WORD)
2535 align = UNITS_PER_WORD;
2536
2537 /* If we prefer size over speed, always use a function call.
2538 If we do not know the size, use a function call.
2539 If the blocks are not word aligned, use a function call. */
2540 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2541 {
2542 block_move_call (dst_reg, src_reg, bytes_rtx);
2543 return 0;
2544 }
2545
2546 leftover = bytes % MAX_MOVE_BYTES;
2547 bytes -= leftover;
2548
2549 /* If necessary, generate a loop to handle the bulk of the copy. */
2550 if (bytes)
2551 {
2552 rtx_code_label *label = NULL;
2553 rtx final_src = NULL_RTX;
2554 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2555 rtx rounded_total = GEN_INT (bytes);
2556 rtx new_dst_reg = gen_reg_rtx (SImode);
2557 rtx new_src_reg = gen_reg_rtx (SImode);
2558
2559 /* If we are going to have to perform this loop more than
2560 once, then generate a label and compute the address the
2561 source register will contain upon completion of the final
2562 iteration. */
2563 if (bytes > MAX_MOVE_BYTES)
2564 {
2565 final_src = gen_reg_rtx (Pmode);
2566
2567 if (INT16_P(bytes))
2568 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2569 else
2570 {
2571 emit_insn (gen_movsi (final_src, rounded_total));
2572 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2573 }
2574
2575 label = gen_label_rtx ();
2576 emit_label (label);
2577 }
2578
2579 /* It is known that output_block_move() will update src_reg to point
2580 to the word after the end of the source block, and dst_reg to point
2581 to the last word of the destination block, provided that the block
2582 is MAX_MOVE_BYTES long. */
2583 emit_insn (gen_cpymemsi_internal (dst_reg, src_reg, at_a_time,
2584 new_dst_reg, new_src_reg));
2585 emit_move_insn (dst_reg, new_dst_reg);
2586 emit_move_insn (src_reg, new_src_reg);
2587 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2588
2589 if (bytes > MAX_MOVE_BYTES)
2590 {
2591 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2592 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2593 }
2594 }
2595
2596 if (leftover)
2597 emit_insn (gen_cpymemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2598 gen_reg_rtx (SImode),
2599 gen_reg_rtx (SImode)));
2600 return 1;
2601 }
2602
2603 \f
2604 /* Emit load/stores for a small constant word aligned block_move.
2605
2606 operands[0] is the memory address of the destination.
2607 operands[1] is the memory address of the source.
2608 operands[2] is the number of bytes to move.
2609 operands[3] is a temp register.
2610 operands[4] is a temp register. */
2611
2612 void
2613 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2614 {
2615 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2616 int first_time;
2617 int got_extra = 0;
2618
2619 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2620
2621 /* We do not have a post-increment store available, so the first set of
2622 stores are done without any increment, then the remaining ones can use
2623 the pre-increment addressing mode.
2624
2625 Note: expand_block_move() also relies upon this behavior when building
2626 loops to copy large blocks. */
2627 first_time = 1;
2628
2629 while (bytes > 0)
2630 {
2631 if (bytes >= 8)
2632 {
2633 if (first_time)
2634 {
2635 output_asm_insn ("ld\t%5, %p1", operands);
2636 output_asm_insn ("ld\t%6, %p1", operands);
2637 output_asm_insn ("st\t%5, @%0", operands);
2638 output_asm_insn ("st\t%6, %s0", operands);
2639 }
2640 else
2641 {
2642 output_asm_insn ("ld\t%5, %p1", operands);
2643 output_asm_insn ("ld\t%6, %p1", operands);
2644 output_asm_insn ("st\t%5, %s0", operands);
2645 output_asm_insn ("st\t%6, %s0", operands);
2646 }
2647
2648 bytes -= 8;
2649 }
2650 else if (bytes >= 4)
2651 {
2652 if (bytes > 4)
2653 got_extra = 1;
2654
2655 output_asm_insn ("ld\t%5, %p1", operands);
2656
2657 if (got_extra)
2658 output_asm_insn ("ld\t%6, %p1", operands);
2659
2660 if (first_time)
2661 output_asm_insn ("st\t%5, @%0", operands);
2662 else
2663 output_asm_insn ("st\t%5, %s0", operands);
2664
2665 bytes -= 4;
2666 }
2667 else
2668 {
2669 /* Get the entire next word, even though we do not want all of it.
2670 The saves us from doing several smaller loads, and we assume that
2671 we cannot cause a page fault when at least part of the word is in
2672 valid memory [since we don't get called if things aren't properly
2673 aligned]. */
2674 int dst_offset = first_time ? 0 : 4;
2675 /* The amount of increment we have to make to the
2676 destination pointer. */
2677 int dst_inc_amount = dst_offset + bytes - 4;
2678 /* The same for the source pointer. */
2679 int src_inc_amount = bytes;
2680 int last_shift;
2681 rtx my_operands[3];
2682
2683 /* If got_extra is true then we have already loaded
2684 the next word as part of loading and storing the previous word. */
2685 if (! got_extra)
2686 output_asm_insn ("ld\t%6, @%1", operands);
2687
2688 if (bytes >= 2)
2689 {
2690 bytes -= 2;
2691
2692 output_asm_insn ("sra3\t%5, %6, #16", operands);
2693 my_operands[0] = operands[5];
2694 my_operands[1] = GEN_INT (dst_offset);
2695 my_operands[2] = operands[0];
2696 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2697
2698 /* If there is a byte left to store then increment the
2699 destination address and shift the contents of the source
2700 register down by 8 bits. We could not do the address
2701 increment in the store half word instruction, because it does
2702 not have an auto increment mode. */
2703 if (bytes > 0) /* assert (bytes == 1) */
2704 {
2705 dst_offset += 2;
2706 last_shift = 8;
2707 }
2708 }
2709 else
2710 last_shift = 24;
2711
2712 if (bytes > 0)
2713 {
2714 my_operands[0] = operands[6];
2715 my_operands[1] = GEN_INT (last_shift);
2716 output_asm_insn ("srai\t%0, #%1", my_operands);
2717 my_operands[0] = operands[6];
2718 my_operands[1] = GEN_INT (dst_offset);
2719 my_operands[2] = operands[0];
2720 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2721 }
2722
2723 /* Update the destination pointer if needed. We have to do
2724 this so that the patterns matches what we output in this
2725 function. */
2726 if (dst_inc_amount
2727 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2728 {
2729 my_operands[0] = operands[0];
2730 my_operands[1] = GEN_INT (dst_inc_amount);
2731 output_asm_insn ("addi\t%0, #%1", my_operands);
2732 }
2733
2734 /* Update the source pointer if needed. We have to do this
2735 so that the patterns matches what we output in this
2736 function. */
2737 if (src_inc_amount
2738 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2739 {
2740 my_operands[0] = operands[1];
2741 my_operands[1] = GEN_INT (src_inc_amount);
2742 output_asm_insn ("addi\t%0, #%1", my_operands);
2743 }
2744
2745 bytes = 0;
2746 }
2747
2748 first_time = 0;
2749 }
2750 }
2751
2752 /* Implement TARGET_HARD_REGNO_MODE_OK. */
2753
2754 static bool
2755 m32r_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2756 {
2757 return (m32r_hard_regno_modes[regno] & m32r_mode_class[mode]) != 0;
2758 }
2759
2760 /* Implement TARGET_MODES_TIEABLE_P. Tie QI/HI/SI modes together. */
2761
2762 static bool
2763 m32r_modes_tieable_p (machine_mode mode1, machine_mode mode2)
2764 {
2765 return (GET_MODE_CLASS (mode1) == MODE_INT
2766 && GET_MODE_CLASS (mode2) == MODE_INT
2767 && GET_MODE_SIZE (mode1) <= UNITS_PER_WORD
2768 && GET_MODE_SIZE (mode2) <= UNITS_PER_WORD);
2769 }
2770
2771 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2772
2773 int
2774 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2775 unsigned int new_reg)
2776 {
2777 /* Interrupt routines can't clobber any register that isn't already used. */
2778 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2779 && !df_regs_ever_live_p (new_reg))
2780 return 0;
2781
2782 return 1;
2783 }
2784
2785 rtx
2786 m32r_return_addr (int count)
2787 {
2788 if (count != 0)
2789 return const0_rtx;
2790
2791 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2792 }
2793
2794 static void
2795 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2796 {
2797 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2798 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2799 0x017e8e17 : 0x178e7e01, SImode));
2800 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2801 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2802 0x0c00ae86 : 0x86ae000c, SImode));
2803 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2804 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2805 0xe627871e : 0x1e8727e6, SImode));
2806 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2807 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2808 0xc616c626 : 0x26c61fc6, SImode));
2809 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2810 chain_value);
2811 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2812 XEXP (DECL_RTL (fndecl), 0));
2813
2814 if (m32r_cache_flush_trap >= 0)
2815 emit_insn (gen_flush_icache
2816 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2817 gen_int_mode (m32r_cache_flush_trap, SImode)));
2818 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2819 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2820 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode,
2821 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2822 GEN_INT (3), SImode);
2823 }
2824
2825 /* True if X is a reg that can be used as a base reg. */
2826
2827 static bool
2828 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2829 {
2830 if (! REG_P (x))
2831 return false;
2832
2833 if (strict)
2834 {
2835 if (GPR_P (REGNO (x)))
2836 return true;
2837 }
2838 else
2839 {
2840 if (GPR_P (REGNO (x))
2841 || REGNO (x) == ARG_POINTER_REGNUM
2842 || ! HARD_REGISTER_P (x))
2843 return true;
2844 }
2845
2846 return false;
2847 }
2848
2849 static inline bool
2850 m32r_rtx_ok_for_offset_p (const_rtx x)
2851 {
2852 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2853 }
2854
2855 static inline bool
2856 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2857 const_rtx x, bool strict)
2858 {
2859 if (GET_CODE (x) == PLUS
2860 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2861 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2862 return true;
2863
2864 return false;
2865 }
2866
2867 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2868 since more than one instruction will be required. */
2869
2870 static inline bool
2871 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2872 bool strict)
2873 {
2874 if (GET_CODE (x) == LO_SUM
2875 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2876 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2877 && CONSTANT_P (XEXP (x, 1)))
2878 return true;
2879
2880 return false;
2881 }
2882
2883 /* Is this a load and increment operation. */
2884
2885 static inline bool
2886 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2887 {
2888 if ((mode == SImode || mode == SFmode)
2889 && GET_CODE (x) == POST_INC
2890 && REG_P (XEXP (x, 0))
2891 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2892 return true;
2893
2894 return false;
2895 }
2896
2897 /* Is this an increment/decrement and store operation. */
2898
2899 static inline bool
2900 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2901 {
2902 if ((mode == SImode || mode == SFmode)
2903 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2904 && REG_P (XEXP (x, 0)) \
2905 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2906 return true;
2907
2908 return false;
2909 }
2910
2911 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2912
2913 static bool
2914 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2915 {
2916 if (m32r_rtx_ok_for_base_p (x, strict)
2917 || m32r_legitimate_offset_addres_p (mode, x, strict)
2918 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2919 || m32r_load_postinc_p (mode, x, strict)
2920 || m32r_store_preinc_predec_p (mode, x, strict))
2921 return true;
2922
2923 return false;
2924 }
2925
2926 static void
2927 m32r_conditional_register_usage (void)
2928 {
2929 if (flag_pic)
2930 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2931 }
2932
2933 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2934
2935 We don't allow (plus symbol large-constant) as the relocations can't
2936 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2937 We allow all CONST_DOUBLE's as the md file patterns will force the
2938 constant to memory if they can't handle them. */
2939
2940 static bool
2941 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2942 {
2943 return !(GET_CODE (x) == CONST
2944 && GET_CODE (XEXP (x, 0)) == PLUS
2945 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2946 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2947 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2948 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2949 }
2950
2951 /* Implement TARGET_STARTING_FRAME_OFFSET. The frame pointer points at
2952 the same place as the stack pointer, except if alloca has been called. */
2953
2954 static HOST_WIDE_INT
2955 m32r_starting_frame_offset (void)
2956 {
2957 return M32R_STACK_ALIGN (crtl->outgoing_args_size);
2958 }
This page took 0.177363 seconds and 5 git commands to generate.