]> gcc.gnu.org Git - gcc.git/blob - gcc/config/m32r/m32r.c
Update copyright years.
[gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "stringpool.h"
28 #include "calls.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "dbxout.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "expr.h"
39 #include "hashtab.h"
40 #include "hash-set.h"
41 #include "vec.h"
42 #include "machmode.h"
43 #include "input.h"
44 #include "function.h"
45 #include "recog.h"
46 #include "diagnostic-core.h"
47 #include "ggc.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfgrtl.h"
51 #include "cfganal.h"
52 #include "lcm.h"
53 #include "cfgbuild.h"
54 #include "cfgcleanup.h"
55 #include "predict.h"
56 #include "basic-block.h"
57 #include "df.h"
58 #include "tm_p.h"
59 #include "target.h"
60 #include "target-def.h"
61 #include "tm-constrs.h"
62 #include "opts.h"
63 #include "builtins.h"
64
65 /* Array of valid operand punctuation characters. */
66 static char m32r_punct_chars[256];
67
68 /* Machine-specific symbol_ref flags. */
69 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
70 #define SYMBOL_REF_MODEL(X) \
71 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
72
73 /* For string literals, etc. */
74 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
75
76 /* Forward declaration. */
77 static void m32r_option_override (void);
78 static void init_reg_tables (void);
79 static void block_move_call (rtx, rtx, rtx);
80 static int m32r_is_insn (rtx);
81 static bool m32r_legitimate_address_p (machine_mode, rtx, bool);
82 static rtx m32r_legitimize_address (rtx, rtx, machine_mode);
83 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
84 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
85 static void m32r_print_operand (FILE *, rtx, int);
86 static void m32r_print_operand_address (FILE *, rtx);
87 static bool m32r_print_operand_punct_valid_p (unsigned char code);
88 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
89 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
90
91 static void m32r_file_start (void);
92
93 static int m32r_adjust_priority (rtx_insn *, int);
94 static int m32r_issue_rate (void);
95
96 static void m32r_encode_section_info (tree, rtx, int);
97 static bool m32r_in_small_data_p (const_tree);
98 static bool m32r_return_in_memory (const_tree, const_tree);
99 static rtx m32r_function_value (const_tree, const_tree, bool);
100 static rtx m32r_libcall_value (machine_mode, const_rtx);
101 static bool m32r_function_value_regno_p (const unsigned int);
102 static void m32r_setup_incoming_varargs (cumulative_args_t, machine_mode,
103 tree, int *, int);
104 static void init_idents (void);
105 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
106 static int m32r_memory_move_cost (machine_mode, reg_class_t, bool);
107 static bool m32r_pass_by_reference (cumulative_args_t, machine_mode,
108 const_tree, bool);
109 static int m32r_arg_partial_bytes (cumulative_args_t, machine_mode,
110 tree, bool);
111 static rtx m32r_function_arg (cumulative_args_t, machine_mode,
112 const_tree, bool);
113 static void m32r_function_arg_advance (cumulative_args_t, machine_mode,
114 const_tree, bool);
115 static bool m32r_can_eliminate (const int, const int);
116 static void m32r_conditional_register_usage (void);
117 static void m32r_trampoline_init (rtx, tree, rtx);
118 static bool m32r_legitimate_constant_p (machine_mode, rtx);
119 \f
120 /* M32R specific attributes. */
121
122 static const struct attribute_spec m32r_attribute_table[] =
123 {
124 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
125 affects_type_identity } */
126 { "interrupt", 0, 0, true, false, false, NULL, false },
127 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
128 false },
129 { NULL, 0, 0, false, false, false, NULL, false }
130 };
131 \f
132 /* Initialize the GCC target structure. */
133 #undef TARGET_ATTRIBUTE_TABLE
134 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
135
136 #undef TARGET_LEGITIMATE_ADDRESS_P
137 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
138 #undef TARGET_LEGITIMIZE_ADDRESS
139 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
140 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
141 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
142
143 #undef TARGET_ASM_ALIGNED_HI_OP
144 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
145 #undef TARGET_ASM_ALIGNED_SI_OP
146 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
147
148 #undef TARGET_PRINT_OPERAND
149 #define TARGET_PRINT_OPERAND m32r_print_operand
150 #undef TARGET_PRINT_OPERAND_ADDRESS
151 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
152 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
153 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
154
155 #undef TARGET_ASM_FUNCTION_PROLOGUE
156 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
157 #undef TARGET_ASM_FUNCTION_EPILOGUE
158 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
159
160 #undef TARGET_ASM_FILE_START
161 #define TARGET_ASM_FILE_START m32r_file_start
162
163 #undef TARGET_SCHED_ADJUST_PRIORITY
164 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
165 #undef TARGET_SCHED_ISSUE_RATE
166 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
167
168 #undef TARGET_OPTION_OVERRIDE
169 #define TARGET_OPTION_OVERRIDE m32r_option_override
170
171 #undef TARGET_ENCODE_SECTION_INFO
172 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
173 #undef TARGET_IN_SMALL_DATA_P
174 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
175
176
177 #undef TARGET_MEMORY_MOVE_COST
178 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
179 #undef TARGET_RTX_COSTS
180 #define TARGET_RTX_COSTS m32r_rtx_costs
181 #undef TARGET_ADDRESS_COST
182 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
183
184 #undef TARGET_PROMOTE_PROTOTYPES
185 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
186 #undef TARGET_RETURN_IN_MEMORY
187 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
188
189 #undef TARGET_FUNCTION_VALUE
190 #define TARGET_FUNCTION_VALUE m32r_function_value
191 #undef TARGET_LIBCALL_VALUE
192 #define TARGET_LIBCALL_VALUE m32r_libcall_value
193 #undef TARGET_FUNCTION_VALUE_REGNO_P
194 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
195
196 #undef TARGET_SETUP_INCOMING_VARARGS
197 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
198 #undef TARGET_MUST_PASS_IN_STACK
199 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
200 #undef TARGET_PASS_BY_REFERENCE
201 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
202 #undef TARGET_ARG_PARTIAL_BYTES
203 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
204 #undef TARGET_FUNCTION_ARG
205 #define TARGET_FUNCTION_ARG m32r_function_arg
206 #undef TARGET_FUNCTION_ARG_ADVANCE
207 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
208
209 #undef TARGET_CAN_ELIMINATE
210 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
211
212 #undef TARGET_CONDITIONAL_REGISTER_USAGE
213 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
214
215 #undef TARGET_TRAMPOLINE_INIT
216 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
217
218 #undef TARGET_LEGITIMATE_CONSTANT_P
219 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
220
221 struct gcc_target targetm = TARGET_INITIALIZER;
222 \f
223 /* Called by m32r_option_override to initialize various things. */
224
225 void
226 m32r_init (void)
227 {
228 init_reg_tables ();
229
230 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
231 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
232 m32r_punct_chars['#'] = 1;
233 m32r_punct_chars['@'] = 1; /* ??? no longer used */
234
235 /* Provide default value if not specified. */
236 if (!global_options_set.x_g_switch_value)
237 g_switch_value = SDATA_DEFAULT_SIZE;
238 }
239
240 static void
241 m32r_option_override (void)
242 {
243 /* These need to be done at start up.
244 It's convenient to do them here. */
245 m32r_init ();
246 SUBTARGET_OVERRIDE_OPTIONS;
247 }
248
249 /* Vectors to keep interesting information about registers where it can easily
250 be got. We use to use the actual mode value as the bit number, but there
251 is (or may be) more than 32 modes now. Instead we use two tables: one
252 indexed by hard register number, and one indexed by mode. */
253
254 /* The purpose of m32r_mode_class is to shrink the range of modes so that
255 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
256 mapped into one m32r_mode_class mode. */
257
258 enum m32r_mode_class
259 {
260 C_MODE,
261 S_MODE, D_MODE, T_MODE, O_MODE,
262 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
263 };
264
265 /* Modes for condition codes. */
266 #define C_MODES (1 << (int) C_MODE)
267
268 /* Modes for single-word and smaller quantities. */
269 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
270
271 /* Modes for double-word and smaller quantities. */
272 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
273
274 /* Modes for quad-word and smaller quantities. */
275 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
276
277 /* Modes for accumulators. */
278 #define A_MODES (1 << (int) A_MODE)
279
280 /* Value is 1 if register/mode pair is acceptable on arc. */
281
282 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
283 {
284 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
285 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
286 S_MODES, C_MODES, A_MODES, A_MODES
287 };
288
289 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
290
291 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
292
293 static void
294 init_reg_tables (void)
295 {
296 int i;
297
298 for (i = 0; i < NUM_MACHINE_MODES; i++)
299 {
300 machine_mode m = (machine_mode) i;
301
302 switch (GET_MODE_CLASS (m))
303 {
304 case MODE_INT:
305 case MODE_PARTIAL_INT:
306 case MODE_COMPLEX_INT:
307 if (GET_MODE_SIZE (m) <= 4)
308 m32r_mode_class[i] = 1 << (int) S_MODE;
309 else if (GET_MODE_SIZE (m) == 8)
310 m32r_mode_class[i] = 1 << (int) D_MODE;
311 else if (GET_MODE_SIZE (m) == 16)
312 m32r_mode_class[i] = 1 << (int) T_MODE;
313 else if (GET_MODE_SIZE (m) == 32)
314 m32r_mode_class[i] = 1 << (int) O_MODE;
315 else
316 m32r_mode_class[i] = 0;
317 break;
318 case MODE_FLOAT:
319 case MODE_COMPLEX_FLOAT:
320 if (GET_MODE_SIZE (m) <= 4)
321 m32r_mode_class[i] = 1 << (int) SF_MODE;
322 else if (GET_MODE_SIZE (m) == 8)
323 m32r_mode_class[i] = 1 << (int) DF_MODE;
324 else if (GET_MODE_SIZE (m) == 16)
325 m32r_mode_class[i] = 1 << (int) TF_MODE;
326 else if (GET_MODE_SIZE (m) == 32)
327 m32r_mode_class[i] = 1 << (int) OF_MODE;
328 else
329 m32r_mode_class[i] = 0;
330 break;
331 case MODE_CC:
332 m32r_mode_class[i] = 1 << (int) C_MODE;
333 break;
334 default:
335 m32r_mode_class[i] = 0;
336 break;
337 }
338 }
339
340 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
341 {
342 if (GPR_P (i))
343 m32r_regno_reg_class[i] = GENERAL_REGS;
344 else if (i == ARG_POINTER_REGNUM)
345 m32r_regno_reg_class[i] = GENERAL_REGS;
346 else
347 m32r_regno_reg_class[i] = NO_REGS;
348 }
349 }
350 \f
351 /* M32R specific attribute support.
352
353 interrupt - for interrupt functions
354
355 model - select code model used to access object
356
357 small: addresses use 24 bits, use bl to make calls
358 medium: addresses use 32 bits, use bl to make calls
359 large: addresses use 32 bits, use seth/add3/jl to make calls
360
361 Grep for MODEL in m32r.h for more info. */
362
363 static tree small_ident1;
364 static tree small_ident2;
365 static tree medium_ident1;
366 static tree medium_ident2;
367 static tree large_ident1;
368 static tree large_ident2;
369
370 static void
371 init_idents (void)
372 {
373 if (small_ident1 == 0)
374 {
375 small_ident1 = get_identifier ("small");
376 small_ident2 = get_identifier ("__small__");
377 medium_ident1 = get_identifier ("medium");
378 medium_ident2 = get_identifier ("__medium__");
379 large_ident1 = get_identifier ("large");
380 large_ident2 = get_identifier ("__large__");
381 }
382 }
383
384 /* Handle an "model" attribute; arguments as in
385 struct attribute_spec.handler. */
386 static tree
387 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
388 tree args, int flags ATTRIBUTE_UNUSED,
389 bool *no_add_attrs)
390 {
391 tree arg;
392
393 init_idents ();
394 arg = TREE_VALUE (args);
395
396 if (arg != small_ident1
397 && arg != small_ident2
398 && arg != medium_ident1
399 && arg != medium_ident2
400 && arg != large_ident1
401 && arg != large_ident2)
402 {
403 warning (OPT_Wattributes, "invalid argument of %qs attribute",
404 IDENTIFIER_POINTER (name));
405 *no_add_attrs = true;
406 }
407
408 return NULL_TREE;
409 }
410 \f
411 /* Encode section information of DECL, which is either a VAR_DECL,
412 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
413
414 For the M32R we want to record:
415
416 - whether the object lives in .sdata/.sbss.
417 - what code model should be used to access the object
418 */
419
420 static void
421 m32r_encode_section_info (tree decl, rtx rtl, int first)
422 {
423 int extra_flags = 0;
424 tree model_attr;
425 enum m32r_model model;
426
427 default_encode_section_info (decl, rtl, first);
428
429 if (!DECL_P (decl))
430 return;
431
432 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
433 if (model_attr)
434 {
435 tree id;
436
437 init_idents ();
438
439 id = TREE_VALUE (TREE_VALUE (model_attr));
440
441 if (id == small_ident1 || id == small_ident2)
442 model = M32R_MODEL_SMALL;
443 else if (id == medium_ident1 || id == medium_ident2)
444 model = M32R_MODEL_MEDIUM;
445 else if (id == large_ident1 || id == large_ident2)
446 model = M32R_MODEL_LARGE;
447 else
448 gcc_unreachable (); /* shouldn't happen */
449 }
450 else
451 {
452 if (TARGET_MODEL_SMALL)
453 model = M32R_MODEL_SMALL;
454 else if (TARGET_MODEL_MEDIUM)
455 model = M32R_MODEL_MEDIUM;
456 else if (TARGET_MODEL_LARGE)
457 model = M32R_MODEL_LARGE;
458 else
459 gcc_unreachable (); /* shouldn't happen */
460 }
461 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
462
463 if (extra_flags)
464 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
465 }
466
467 /* Only mark the object as being small data area addressable if
468 it hasn't been explicitly marked with a code model.
469
470 The user can explicitly put an object in the small data area with the
471 section attribute. If the object is in sdata/sbss and marked with a
472 code model do both [put the object in .sdata and mark it as being
473 addressed with a specific code model - don't mark it as being addressed
474 with an SDA reloc though]. This is ok and might be useful at times. If
475 the object doesn't fit the linker will give an error. */
476
477 static bool
478 m32r_in_small_data_p (const_tree decl)
479 {
480 const char *section;
481
482 if (TREE_CODE (decl) != VAR_DECL)
483 return false;
484
485 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
486 return false;
487
488 section = DECL_SECTION_NAME (decl);
489 if (section)
490 {
491 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
492 return true;
493 }
494 else
495 {
496 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
497 {
498 int size = int_size_in_bytes (TREE_TYPE (decl));
499
500 if (size > 0 && size <= g_switch_value)
501 return true;
502 }
503 }
504
505 return false;
506 }
507
508 /* Do anything needed before RTL is emitted for each function. */
509
510 void
511 m32r_init_expanders (void)
512 {
513 /* ??? At one point there was code here. The function is left in
514 to make it easy to experiment. */
515 }
516 \f
517 int
518 call_operand (rtx op, machine_mode mode)
519 {
520 if (!MEM_P (op))
521 return 0;
522 op = XEXP (op, 0);
523 return call_address_operand (op, mode);
524 }
525
526 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
527
528 int
529 small_data_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
530 {
531 if (! TARGET_SDATA_USE)
532 return 0;
533
534 if (GET_CODE (op) == SYMBOL_REF)
535 return SYMBOL_REF_SMALL_P (op);
536
537 if (GET_CODE (op) == CONST
538 && GET_CODE (XEXP (op, 0)) == PLUS
539 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
540 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
541 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
542
543 return 0;
544 }
545
546 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
547
548 int
549 addr24_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
550 {
551 rtx sym;
552
553 if (flag_pic)
554 return 0;
555
556 if (GET_CODE (op) == LABEL_REF)
557 return TARGET_ADDR24;
558
559 if (GET_CODE (op) == SYMBOL_REF)
560 sym = op;
561 else if (GET_CODE (op) == CONST
562 && GET_CODE (XEXP (op, 0)) == PLUS
563 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
564 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
565 sym = XEXP (XEXP (op, 0), 0);
566 else
567 return 0;
568
569 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
570 return 1;
571
572 if (TARGET_ADDR24
573 && (CONSTANT_POOL_ADDRESS_P (sym)
574 || LIT_NAME_P (XSTR (sym, 0))))
575 return 1;
576
577 return 0;
578 }
579
580 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
581
582 int
583 addr32_operand (rtx op, machine_mode mode)
584 {
585 rtx sym;
586
587 if (GET_CODE (op) == LABEL_REF)
588 return TARGET_ADDR32;
589
590 if (GET_CODE (op) == SYMBOL_REF)
591 sym = op;
592 else if (GET_CODE (op) == CONST
593 && GET_CODE (XEXP (op, 0)) == PLUS
594 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
595 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
596 && ! flag_pic)
597 sym = XEXP (XEXP (op, 0), 0);
598 else
599 return 0;
600
601 return (! addr24_operand (sym, mode)
602 && ! small_data_operand (sym, mode));
603 }
604
605 /* Return 1 if OP is a function that can be called with the `bl' insn. */
606
607 int
608 call26_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
609 {
610 if (flag_pic)
611 return 1;
612
613 if (GET_CODE (op) == SYMBOL_REF)
614 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
615
616 return TARGET_CALL26;
617 }
618
619 /* Return 1 if OP is a DImode const we want to handle inline.
620 This must match the code in the movdi pattern.
621 It is used by the 'G' constraint. */
622
623 int
624 easy_di_const (rtx op)
625 {
626 rtx high_rtx, low_rtx;
627 HOST_WIDE_INT high, low;
628
629 split_double (op, &high_rtx, &low_rtx);
630 high = INTVAL (high_rtx);
631 low = INTVAL (low_rtx);
632 /* Pick constants loadable with 2 16-bit `ldi' insns. */
633 if (high >= -128 && high <= 127
634 && low >= -128 && low <= 127)
635 return 1;
636 return 0;
637 }
638
639 /* Return 1 if OP is a DFmode const we want to handle inline.
640 This must match the code in the movdf pattern.
641 It is used by the 'H' constraint. */
642
643 int
644 easy_df_const (rtx op)
645 {
646 REAL_VALUE_TYPE r;
647 long l[2];
648
649 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
650 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
651 if (l[0] == 0 && l[1] == 0)
652 return 1;
653 if ((l[0] & 0xffff) == 0 && l[1] == 0)
654 return 1;
655 return 0;
656 }
657
658 /* Return 1 if OP is (mem (reg ...)).
659 This is used in insn length calcs. */
660
661 int
662 memreg_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
663 {
664 return MEM_P (op) && REG_P (XEXP (op, 0));
665 }
666
667 /* Return nonzero if TYPE must be passed by indirect reference. */
668
669 static bool
670 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
671 machine_mode mode, const_tree type,
672 bool named ATTRIBUTE_UNUSED)
673 {
674 int size;
675
676 if (type)
677 size = int_size_in_bytes (type);
678 else
679 size = GET_MODE_SIZE (mode);
680
681 return (size < 0 || size > 8);
682 }
683 \f
684 /* Comparisons. */
685
686 /* X and Y are two things to compare using CODE. Emit the compare insn and
687 return the rtx for compare [arg0 of the if_then_else].
688 If need_compare is true then the comparison insn must be generated, rather
689 than being subsumed into the following branch instruction. */
690
691 rtx
692 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
693 {
694 enum rtx_code compare_code;
695 enum rtx_code branch_code;
696 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
697 int must_swap = 0;
698
699 switch (code)
700 {
701 case EQ: compare_code = EQ; branch_code = NE; break;
702 case NE: compare_code = EQ; branch_code = EQ; break;
703 case LT: compare_code = LT; branch_code = NE; break;
704 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
705 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
706 case GE: compare_code = LT; branch_code = EQ; break;
707 case LTU: compare_code = LTU; branch_code = NE; break;
708 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
709 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
710 case GEU: compare_code = LTU; branch_code = EQ; break;
711
712 default:
713 gcc_unreachable ();
714 }
715
716 if (need_compare)
717 {
718 switch (compare_code)
719 {
720 case EQ:
721 if (satisfies_constraint_P (y) /* Reg equal to small const. */
722 && y != const0_rtx)
723 {
724 rtx tmp = gen_reg_rtx (SImode);
725
726 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
727 x = tmp;
728 y = const0_rtx;
729 }
730 else if (CONSTANT_P (y)) /* Reg equal to const. */
731 {
732 rtx tmp = force_reg (GET_MODE (x), y);
733 y = tmp;
734 }
735
736 if (register_operand (y, SImode) /* Reg equal to reg. */
737 || y == const0_rtx) /* Reg equal to zero. */
738 {
739 emit_insn (gen_cmp_eqsi_insn (x, y));
740
741 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
742 }
743 break;
744
745 case LT:
746 if (register_operand (y, SImode)
747 || satisfies_constraint_P (y))
748 {
749 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
750
751 switch (code)
752 {
753 case LT:
754 emit_insn (gen_cmp_ltsi_insn (x, y));
755 code = EQ;
756 break;
757 case LE:
758 if (y == const0_rtx)
759 tmp = const1_rtx;
760 else
761 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
762 emit_insn (gen_cmp_ltsi_insn (x, tmp));
763 code = EQ;
764 break;
765 case GT:
766 if (CONST_INT_P (y))
767 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
768 else
769 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
770 emit_insn (gen_cmp_ltsi_insn (x, tmp));
771 code = NE;
772 break;
773 case GE:
774 emit_insn (gen_cmp_ltsi_insn (x, y));
775 code = NE;
776 break;
777 default:
778 gcc_unreachable ();
779 }
780
781 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
782 }
783 break;
784
785 case LTU:
786 if (register_operand (y, SImode)
787 || satisfies_constraint_P (y))
788 {
789 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
790
791 switch (code)
792 {
793 case LTU:
794 emit_insn (gen_cmp_ltusi_insn (x, y));
795 code = EQ;
796 break;
797 case LEU:
798 if (y == const0_rtx)
799 tmp = const1_rtx;
800 else
801 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
802 emit_insn (gen_cmp_ltusi_insn (x, tmp));
803 code = EQ;
804 break;
805 case GTU:
806 if (CONST_INT_P (y))
807 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
808 else
809 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
810 emit_insn (gen_cmp_ltusi_insn (x, tmp));
811 code = NE;
812 break;
813 case GEU:
814 emit_insn (gen_cmp_ltusi_insn (x, y));
815 code = NE;
816 break;
817 default:
818 gcc_unreachable ();
819 }
820
821 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
822 }
823 break;
824
825 default:
826 gcc_unreachable ();
827 }
828 }
829 else
830 {
831 /* Reg/reg equal comparison. */
832 if (compare_code == EQ
833 && register_operand (y, SImode))
834 return gen_rtx_fmt_ee (code, CCmode, x, y);
835
836 /* Reg/zero signed comparison. */
837 if ((compare_code == EQ || compare_code == LT)
838 && y == const0_rtx)
839 return gen_rtx_fmt_ee (code, CCmode, x, y);
840
841 /* Reg/smallconst equal comparison. */
842 if (compare_code == EQ
843 && satisfies_constraint_P (y))
844 {
845 rtx tmp = gen_reg_rtx (SImode);
846
847 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
848 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
849 }
850
851 /* Reg/const equal comparison. */
852 if (compare_code == EQ
853 && CONSTANT_P (y))
854 {
855 rtx tmp = force_reg (GET_MODE (x), y);
856
857 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
858 }
859 }
860
861 if (CONSTANT_P (y))
862 {
863 if (must_swap)
864 y = force_reg (GET_MODE (x), y);
865 else
866 {
867 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
868
869 if (! ok_const)
870 y = force_reg (GET_MODE (x), y);
871 }
872 }
873
874 switch (compare_code)
875 {
876 case EQ :
877 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
878 break;
879 case LT :
880 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
881 break;
882 case LTU :
883 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
884 break;
885
886 default:
887 gcc_unreachable ();
888 }
889
890 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
891 }
892
893 bool
894 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
895 {
896 machine_mode mode = GET_MODE (op0);
897
898 gcc_assert (mode == SImode);
899 switch (code)
900 {
901 case EQ:
902 if (!register_operand (op1, mode))
903 op1 = force_reg (mode, op1);
904
905 if (TARGET_M32RX || TARGET_M32R2)
906 {
907 if (!reg_or_zero_operand (op2, mode))
908 op2 = force_reg (mode, op2);
909
910 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
911 return true;
912 }
913 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
914 {
915 emit_insn (gen_seq_zero_insn (op0, op1));
916 return true;
917 }
918
919 if (!reg_or_eq_int16_operand (op2, mode))
920 op2 = force_reg (mode, op2);
921
922 emit_insn (gen_seq_insn (op0, op1, op2));
923 return true;
924
925 case NE:
926 if (!CONST_INT_P (op2)
927 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
928 {
929 rtx reg;
930
931 if (reload_completed || reload_in_progress)
932 return false;
933
934 reg = gen_reg_rtx (SImode);
935 emit_insn (gen_xorsi3 (reg, op1, op2));
936 op1 = reg;
937
938 if (!register_operand (op1, mode))
939 op1 = force_reg (mode, op1);
940
941 emit_insn (gen_sne_zero_insn (op0, op1));
942 return true;
943 }
944 return false;
945
946 case LT:
947 case GT:
948 if (code == GT)
949 {
950 rtx tmp = op2;
951 op2 = op1;
952 op1 = tmp;
953 code = LT;
954 }
955
956 if (!register_operand (op1, mode))
957 op1 = force_reg (mode, op1);
958
959 if (!reg_or_int16_operand (op2, mode))
960 op2 = force_reg (mode, op2);
961
962 emit_insn (gen_slt_insn (op0, op1, op2));
963 return true;
964
965 case LTU:
966 case GTU:
967 if (code == GTU)
968 {
969 rtx tmp = op2;
970 op2 = op1;
971 op1 = tmp;
972 code = LTU;
973 }
974
975 if (!register_operand (op1, mode))
976 op1 = force_reg (mode, op1);
977
978 if (!reg_or_int16_operand (op2, mode))
979 op2 = force_reg (mode, op2);
980
981 emit_insn (gen_sltu_insn (op0, op1, op2));
982 return true;
983
984 case GE:
985 case GEU:
986 if (!register_operand (op1, mode))
987 op1 = force_reg (mode, op1);
988
989 if (!reg_or_int16_operand (op2, mode))
990 op2 = force_reg (mode, op2);
991
992 if (code == GE)
993 emit_insn (gen_sge_insn (op0, op1, op2));
994 else
995 emit_insn (gen_sgeu_insn (op0, op1, op2));
996 return true;
997
998 case LE:
999 case LEU:
1000 if (!register_operand (op1, mode))
1001 op1 = force_reg (mode, op1);
1002
1003 if (CONST_INT_P (op2))
1004 {
1005 HOST_WIDE_INT value = INTVAL (op2);
1006 if (value >= 2147483647)
1007 {
1008 emit_move_insn (op0, const1_rtx);
1009 return true;
1010 }
1011
1012 op2 = GEN_INT (value + 1);
1013 if (value < -32768 || value >= 32767)
1014 op2 = force_reg (mode, op2);
1015
1016 if (code == LEU)
1017 emit_insn (gen_sltu_insn (op0, op1, op2));
1018 else
1019 emit_insn (gen_slt_insn (op0, op1, op2));
1020 return true;
1021 }
1022
1023 if (!register_operand (op2, mode))
1024 op2 = force_reg (mode, op2);
1025
1026 if (code == LEU)
1027 emit_insn (gen_sleu_insn (op0, op1, op2));
1028 else
1029 emit_insn (gen_sle_insn (op0, op1, op2));
1030 return true;
1031
1032 default:
1033 gcc_unreachable ();
1034 }
1035 }
1036
1037 \f
1038 /* Split a 2 word move (DI or DF) into component parts. */
1039
1040 rtx
1041 gen_split_move_double (rtx operands[])
1042 {
1043 machine_mode mode = GET_MODE (operands[0]);
1044 rtx dest = operands[0];
1045 rtx src = operands[1];
1046 rtx val;
1047
1048 /* We might have (SUBREG (MEM)) here, so just get rid of the
1049 subregs to make this code simpler. It is safe to call
1050 alter_subreg any time after reload. */
1051 if (GET_CODE (dest) == SUBREG)
1052 alter_subreg (&dest, true);
1053 if (GET_CODE (src) == SUBREG)
1054 alter_subreg (&src, true);
1055
1056 start_sequence ();
1057 if (REG_P (dest))
1058 {
1059 int dregno = REGNO (dest);
1060
1061 /* Reg = reg. */
1062 if (REG_P (src))
1063 {
1064 int sregno = REGNO (src);
1065
1066 int reverse = (dregno == sregno + 1);
1067
1068 /* We normally copy the low-numbered register first. However, if
1069 the first register operand 0 is the same as the second register of
1070 operand 1, we must copy in the opposite order. */
1071 emit_insn (gen_rtx_SET (VOIDmode,
1072 operand_subword (dest, reverse, TRUE, mode),
1073 operand_subword (src, reverse, TRUE, mode)));
1074
1075 emit_insn (gen_rtx_SET (VOIDmode,
1076 operand_subword (dest, !reverse, TRUE, mode),
1077 operand_subword (src, !reverse, TRUE, mode)));
1078 }
1079
1080 /* Reg = constant. */
1081 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1082 {
1083 rtx words[2];
1084 split_double (src, &words[0], &words[1]);
1085 emit_insn (gen_rtx_SET (VOIDmode,
1086 operand_subword (dest, 0, TRUE, mode),
1087 words[0]));
1088
1089 emit_insn (gen_rtx_SET (VOIDmode,
1090 operand_subword (dest, 1, TRUE, mode),
1091 words[1]));
1092 }
1093
1094 /* Reg = mem. */
1095 else if (MEM_P (src))
1096 {
1097 /* If the high-address word is used in the address, we must load it
1098 last. Otherwise, load it first. */
1099 int reverse
1100 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1101
1102 /* We used to optimize loads from single registers as
1103
1104 ld r1,r3+; ld r2,r3
1105
1106 if r3 were not used subsequently. However, the REG_NOTES aren't
1107 propagated correctly by the reload phase, and it can cause bad
1108 code to be generated. We could still try:
1109
1110 ld r1,r3+; ld r2,r3; addi r3,-4
1111
1112 which saves 2 bytes and doesn't force longword alignment. */
1113 emit_insn (gen_rtx_SET (VOIDmode,
1114 operand_subword (dest, reverse, TRUE, mode),
1115 adjust_address (src, SImode,
1116 reverse * UNITS_PER_WORD)));
1117
1118 emit_insn (gen_rtx_SET (VOIDmode,
1119 operand_subword (dest, !reverse, TRUE, mode),
1120 adjust_address (src, SImode,
1121 !reverse * UNITS_PER_WORD)));
1122 }
1123 else
1124 gcc_unreachable ();
1125 }
1126
1127 /* Mem = reg. */
1128 /* We used to optimize loads from single registers as
1129
1130 st r1,r3; st r2,+r3
1131
1132 if r3 were not used subsequently. However, the REG_NOTES aren't
1133 propagated correctly by the reload phase, and it can cause bad
1134 code to be generated. We could still try:
1135
1136 st r1,r3; st r2,+r3; addi r3,-4
1137
1138 which saves 2 bytes and doesn't force longword alignment. */
1139 else if (MEM_P (dest) && REG_P (src))
1140 {
1141 emit_insn (gen_rtx_SET (VOIDmode,
1142 adjust_address (dest, SImode, 0),
1143 operand_subword (src, 0, TRUE, mode)));
1144
1145 emit_insn (gen_rtx_SET (VOIDmode,
1146 adjust_address (dest, SImode, UNITS_PER_WORD),
1147 operand_subword (src, 1, TRUE, mode)));
1148 }
1149
1150 else
1151 gcc_unreachable ();
1152
1153 val = get_insns ();
1154 end_sequence ();
1155 return val;
1156 }
1157
1158 \f
1159 static int
1160 m32r_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
1161 tree type, bool named ATTRIBUTE_UNUSED)
1162 {
1163 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1164
1165 int words;
1166 unsigned int size =
1167 (((mode == BLKmode && type)
1168 ? (unsigned int) int_size_in_bytes (type)
1169 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1170 / UNITS_PER_WORD;
1171
1172 if (*cum >= M32R_MAX_PARM_REGS)
1173 words = 0;
1174 else if (*cum + size > M32R_MAX_PARM_REGS)
1175 words = (*cum + size) - M32R_MAX_PARM_REGS;
1176 else
1177 words = 0;
1178
1179 return words * UNITS_PER_WORD;
1180 }
1181
1182 /* The ROUND_ADVANCE* macros are local to this file. */
1183 /* Round SIZE up to a word boundary. */
1184 #define ROUND_ADVANCE(SIZE) \
1185 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1186
1187 /* Round arg MODE/TYPE up to the next word boundary. */
1188 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1189 ((MODE) == BLKmode \
1190 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1191 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1192
1193 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1194 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1195
1196 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1197 a reg. This includes arguments that have to be passed by reference as the
1198 pointer to them is passed in a reg if one is available (and that is what
1199 we're given).
1200 This macro is only used in this file. */
1201 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1202 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1203
1204 /* Determine where to put an argument to a function.
1205 Value is zero to push the argument on the stack,
1206 or a hard register in which to store the argument.
1207
1208 MODE is the argument's machine mode.
1209 TYPE is the data type of the argument (as a tree).
1210 This is null for libcalls where that information may
1211 not be available.
1212 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1213 the preceding args and about the function being called.
1214 NAMED is nonzero if this argument is a named parameter
1215 (otherwise it is an extra parameter matching an ellipsis). */
1216 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1217 and the rest are pushed. */
1218
1219 static rtx
1220 m32r_function_arg (cumulative_args_t cum_v, machine_mode mode,
1221 const_tree type ATTRIBUTE_UNUSED,
1222 bool named ATTRIBUTE_UNUSED)
1223 {
1224 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1225
1226 return (PASS_IN_REG_P (*cum, mode, type)
1227 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1228 : NULL_RTX);
1229 }
1230
1231 /* Update the data in CUM to advance over an argument
1232 of mode MODE and data type TYPE.
1233 (TYPE is null for libcalls where that information may not be available.) */
1234
1235 static void
1236 m32r_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
1237 const_tree type, bool named ATTRIBUTE_UNUSED)
1238 {
1239 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1240
1241 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1242 + ROUND_ADVANCE_ARG (mode, type));
1243 }
1244
1245 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1246
1247 static bool
1248 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1249 {
1250 cumulative_args_t dummy = pack_cumulative_args (NULL);
1251
1252 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1253 }
1254
1255 /* Worker function for TARGET_FUNCTION_VALUE. */
1256
1257 static rtx
1258 m32r_function_value (const_tree valtype,
1259 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1260 bool outgoing ATTRIBUTE_UNUSED)
1261 {
1262 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1263 }
1264
1265 /* Worker function for TARGET_LIBCALL_VALUE. */
1266
1267 static rtx
1268 m32r_libcall_value (machine_mode mode,
1269 const_rtx fun ATTRIBUTE_UNUSED)
1270 {
1271 return gen_rtx_REG (mode, 0);
1272 }
1273
1274 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1275
1276 ??? What about r1 in DI/DF values. */
1277
1278 static bool
1279 m32r_function_value_regno_p (const unsigned int regno)
1280 {
1281 return (regno == 0);
1282 }
1283
1284 /* Do any needed setup for a variadic function. For the M32R, we must
1285 create a register parameter block, and then copy any anonymous arguments
1286 in registers to memory.
1287
1288 CUM has not been updated for the last named argument which has type TYPE
1289 and mode MODE, and we rely on this fact. */
1290
1291 static void
1292 m32r_setup_incoming_varargs (cumulative_args_t cum, machine_mode mode,
1293 tree type, int *pretend_size, int no_rtl)
1294 {
1295 int first_anon_arg;
1296
1297 if (no_rtl)
1298 return;
1299
1300 /* All BLKmode values are passed by reference. */
1301 gcc_assert (mode != BLKmode);
1302
1303 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1304 + ROUND_ADVANCE_ARG (mode, type));
1305
1306 if (first_anon_arg < M32R_MAX_PARM_REGS)
1307 {
1308 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1309 int first_reg_offset = first_anon_arg;
1310 /* Size in words to "pretend" allocate. */
1311 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1312 rtx regblock;
1313
1314 regblock = gen_frame_mem (BLKmode,
1315 plus_constant (Pmode, arg_pointer_rtx,
1316 FIRST_PARM_OFFSET (0)));
1317 set_mem_alias_set (regblock, get_varargs_alias_set ());
1318 move_block_from_reg (first_reg_offset, regblock, size);
1319
1320 *pretend_size = (size * UNITS_PER_WORD);
1321 }
1322 }
1323
1324 \f
1325 /* Return true if INSN is real instruction bearing insn. */
1326
1327 static int
1328 m32r_is_insn (rtx insn)
1329 {
1330 return (NONDEBUG_INSN_P (insn)
1331 && GET_CODE (PATTERN (insn)) != USE
1332 && GET_CODE (PATTERN (insn)) != CLOBBER);
1333 }
1334
1335 /* Increase the priority of long instructions so that the
1336 short instructions are scheduled ahead of the long ones. */
1337
1338 static int
1339 m32r_adjust_priority (rtx_insn *insn, int priority)
1340 {
1341 if (m32r_is_insn (insn)
1342 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1343 priority <<= 3;
1344
1345 return priority;
1346 }
1347
1348 \f
1349 /* Indicate how many instructions can be issued at the same time.
1350 This is sort of a lie. The m32r can issue only 1 long insn at
1351 once, but it can issue 2 short insns. The default therefore is
1352 set at 2, but this can be overridden by the command line option
1353 -missue-rate=1. */
1354
1355 static int
1356 m32r_issue_rate (void)
1357 {
1358 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1359 }
1360 \f
1361 /* Cost functions. */
1362 /* Memory is 3 times as expensive as registers.
1363 ??? Is that the right way to look at it? */
1364
1365 static int
1366 m32r_memory_move_cost (machine_mode mode,
1367 reg_class_t rclass ATTRIBUTE_UNUSED,
1368 bool in ATTRIBUTE_UNUSED)
1369 {
1370 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1371 return 6;
1372 else
1373 return 12;
1374 }
1375
1376 static bool
1377 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1378 int opno ATTRIBUTE_UNUSED, int *total,
1379 bool speed ATTRIBUTE_UNUSED)
1380 {
1381 switch (code)
1382 {
1383 /* Small integers are as cheap as registers. 4 byte values can be
1384 fetched as immediate constants - let's give that the cost of an
1385 extra insn. */
1386 case CONST_INT:
1387 if (INT16_P (INTVAL (x)))
1388 {
1389 *total = 0;
1390 return true;
1391 }
1392 /* FALLTHRU */
1393
1394 case CONST:
1395 case LABEL_REF:
1396 case SYMBOL_REF:
1397 *total = COSTS_N_INSNS (1);
1398 return true;
1399
1400 case CONST_DOUBLE:
1401 {
1402 rtx high, low;
1403
1404 split_double (x, &high, &low);
1405 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1406 + !INT16_P (INTVAL (low)));
1407 return true;
1408 }
1409
1410 case MULT:
1411 *total = COSTS_N_INSNS (3);
1412 return true;
1413
1414 case DIV:
1415 case UDIV:
1416 case MOD:
1417 case UMOD:
1418 *total = COSTS_N_INSNS (10);
1419 return true;
1420
1421 default:
1422 return false;
1423 }
1424 }
1425 \f
1426 /* Type of function DECL.
1427
1428 The result is cached. To reset the cache at the end of a function,
1429 call with DECL = NULL_TREE. */
1430
1431 enum m32r_function_type
1432 m32r_compute_function_type (tree decl)
1433 {
1434 /* Cached value. */
1435 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1436 /* Last function we were called for. */
1437 static tree last_fn = NULL_TREE;
1438
1439 /* Resetting the cached value? */
1440 if (decl == NULL_TREE)
1441 {
1442 fn_type = M32R_FUNCTION_UNKNOWN;
1443 last_fn = NULL_TREE;
1444 return fn_type;
1445 }
1446
1447 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1448 return fn_type;
1449
1450 /* Compute function type. */
1451 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1452 ? M32R_FUNCTION_INTERRUPT
1453 : M32R_FUNCTION_NORMAL);
1454
1455 last_fn = decl;
1456 return fn_type;
1457 }
1458 \f/* Function prologue/epilogue handlers. */
1459
1460 /* M32R stack frames look like:
1461
1462 Before call After call
1463 +-----------------------+ +-----------------------+
1464 | | | |
1465 high | local variables, | | local variables, |
1466 mem | reg save area, etc. | | reg save area, etc. |
1467 | | | |
1468 +-----------------------+ +-----------------------+
1469 | | | |
1470 | arguments on stack. | | arguments on stack. |
1471 | | | |
1472 SP+0->+-----------------------+ +-----------------------+
1473 | reg parm save area, |
1474 | only created for |
1475 | variable argument |
1476 | functions |
1477 +-----------------------+
1478 | previous frame ptr |
1479 +-----------------------+
1480 | |
1481 | register save area |
1482 | |
1483 +-----------------------+
1484 | return address |
1485 +-----------------------+
1486 | |
1487 | local variables |
1488 | |
1489 +-----------------------+
1490 | |
1491 | alloca allocations |
1492 | |
1493 +-----------------------+
1494 | |
1495 low | arguments on stack |
1496 memory | |
1497 SP+0->+-----------------------+
1498
1499 Notes:
1500 1) The "reg parm save area" does not exist for non variable argument fns.
1501 2) The "reg parm save area" can be eliminated completely if we saved regs
1502 containing anonymous args separately but that complicates things too
1503 much (so it's not done).
1504 3) The return address is saved after the register save area so as to have as
1505 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1506
1507 /* Structure to be filled in by m32r_compute_frame_size with register
1508 save masks, and offsets for the current function. */
1509 struct m32r_frame_info
1510 {
1511 unsigned int total_size; /* # bytes that the entire frame takes up. */
1512 unsigned int extra_size; /* # bytes of extra stuff. */
1513 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1514 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1515 unsigned int reg_size; /* # bytes needed to store regs. */
1516 unsigned int var_size; /* # bytes that variables take up. */
1517 unsigned int gmask; /* Mask of saved gp registers. */
1518 unsigned int save_fp; /* Nonzero if fp must be saved. */
1519 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1520 int initialized; /* Nonzero if frame size already calculated. */
1521 };
1522
1523 /* Current frame information calculated by m32r_compute_frame_size. */
1524 static struct m32r_frame_info current_frame_info;
1525
1526 /* Zero structure to initialize current_frame_info. */
1527 static struct m32r_frame_info zero_frame_info;
1528
1529 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1530 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1531
1532 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1533 The return address and frame pointer are treated separately.
1534 Don't consider them here. */
1535 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1536 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1537 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1538
1539 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1540 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1541
1542 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1543 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1544
1545 /* Return the bytes needed to compute the frame pointer from the current
1546 stack pointer.
1547
1548 SIZE is the size needed for local variables. */
1549
1550 unsigned int
1551 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1552 {
1553 unsigned int regno;
1554 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1555 unsigned int reg_size;
1556 unsigned int gmask;
1557 enum m32r_function_type fn_type;
1558 int interrupt_p;
1559 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1560 | crtl->profile);
1561
1562 var_size = M32R_STACK_ALIGN (size);
1563 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1564 pretend_size = crtl->args.pretend_args_size;
1565 extra_size = FIRST_PARM_OFFSET (0);
1566 total_size = extra_size + pretend_size + args_size + var_size;
1567 reg_size = 0;
1568 gmask = 0;
1569
1570 /* See if this is an interrupt handler. Call used registers must be saved
1571 for them too. */
1572 fn_type = m32r_compute_function_type (current_function_decl);
1573 interrupt_p = M32R_INTERRUPT_P (fn_type);
1574
1575 /* Calculate space needed for registers. */
1576 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1577 {
1578 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1579 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1580 {
1581 reg_size += UNITS_PER_WORD;
1582 gmask |= 1 << regno;
1583 }
1584 }
1585
1586 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1587 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1588
1589 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1590 * UNITS_PER_WORD);
1591 total_size += reg_size;
1592
1593 /* ??? Not sure this is necessary, and I don't think the epilogue
1594 handler will do the right thing if this changes total_size. */
1595 total_size = M32R_STACK_ALIGN (total_size);
1596
1597 /* frame_size = total_size - (pretend_size + reg_size); */
1598
1599 /* Save computed information. */
1600 current_frame_info.total_size = total_size;
1601 current_frame_info.extra_size = extra_size;
1602 current_frame_info.pretend_size = pretend_size;
1603 current_frame_info.var_size = var_size;
1604 current_frame_info.args_size = args_size;
1605 current_frame_info.reg_size = reg_size;
1606 current_frame_info.gmask = gmask;
1607 current_frame_info.initialized = reload_completed;
1608
1609 /* Ok, we're done. */
1610 return total_size;
1611 }
1612
1613 /* Worker function for TARGET_CAN_ELIMINATE. */
1614
1615 bool
1616 m32r_can_eliminate (const int from, const int to)
1617 {
1618 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1619 ? ! frame_pointer_needed
1620 : true);
1621 }
1622
1623 \f
1624 /* The table we use to reference PIC data. */
1625 static rtx global_offset_table;
1626
1627 static void
1628 m32r_reload_lr (rtx sp, int size)
1629 {
1630 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1631
1632 if (size == 0)
1633 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1634 else if (size < 32768)
1635 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1636 gen_rtx_PLUS (Pmode, sp,
1637 GEN_INT (size)))));
1638 else
1639 {
1640 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1641
1642 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1643 emit_insn (gen_addsi3 (tmp, tmp, sp));
1644 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1645 }
1646
1647 emit_use (lr);
1648 }
1649
1650 void
1651 m32r_load_pic_register (void)
1652 {
1653 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1654 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1655 GEN_INT (TARGET_MODEL_SMALL)));
1656
1657 /* Need to emit this whether or not we obey regdecls,
1658 since setjmp/longjmp can cause life info to screw up. */
1659 emit_use (pic_offset_table_rtx);
1660 }
1661
1662 /* Expand the m32r prologue as a series of insns. */
1663
1664 void
1665 m32r_expand_prologue (void)
1666 {
1667 int regno;
1668 int frame_size;
1669 unsigned int gmask;
1670 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1671 | crtl->profile);
1672
1673 if (! current_frame_info.initialized)
1674 m32r_compute_frame_size (get_frame_size ());
1675
1676 gmask = current_frame_info.gmask;
1677
1678 /* These cases shouldn't happen. Catch them now. */
1679 gcc_assert (current_frame_info.total_size || !gmask);
1680
1681 /* Allocate space for register arguments if this is a variadic function. */
1682 if (current_frame_info.pretend_size != 0)
1683 {
1684 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1685 the wrong result on a 64-bit host. */
1686 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1687 emit_insn (gen_addsi3 (stack_pointer_rtx,
1688 stack_pointer_rtx,
1689 GEN_INT (-pretend_size)));
1690 }
1691
1692 /* Save any registers we need to and set up fp. */
1693 if (current_frame_info.save_fp)
1694 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1695
1696 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1697
1698 /* Save any needed call-saved regs (and call-used if this is an
1699 interrupt handler). */
1700 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1701 {
1702 if ((gmask & (1 << regno)) != 0)
1703 emit_insn (gen_movsi_push (stack_pointer_rtx,
1704 gen_rtx_REG (Pmode, regno)));
1705 }
1706
1707 if (current_frame_info.save_lr)
1708 emit_insn (gen_movsi_push (stack_pointer_rtx,
1709 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1710
1711 /* Allocate the stack frame. */
1712 frame_size = (current_frame_info.total_size
1713 - (current_frame_info.pretend_size
1714 + current_frame_info.reg_size));
1715
1716 if (frame_size == 0)
1717 ; /* Nothing to do. */
1718 else if (frame_size <= 32768)
1719 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1720 GEN_INT (-frame_size)));
1721 else
1722 {
1723 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1724
1725 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1726 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1727 }
1728
1729 if (frame_pointer_needed)
1730 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1731
1732 if (crtl->profile)
1733 /* Push lr for mcount (form_pc, x). */
1734 emit_insn (gen_movsi_push (stack_pointer_rtx,
1735 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1736
1737 if (pic_reg_used)
1738 {
1739 m32r_load_pic_register ();
1740 m32r_reload_lr (stack_pointer_rtx,
1741 (crtl->profile ? 0 : frame_size));
1742 }
1743
1744 if (crtl->profile && !pic_reg_used)
1745 emit_insn (gen_blockage ());
1746 }
1747
1748 \f
1749 /* Set up the stack and frame pointer (if desired) for the function.
1750 Note, if this is changed, you need to mirror the changes in
1751 m32r_compute_frame_size which calculates the prolog size. */
1752
1753 static void
1754 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1755 {
1756 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1757
1758 /* If this is an interrupt handler, mark it as such. */
1759 if (M32R_INTERRUPT_P (fn_type))
1760 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1761
1762 if (! current_frame_info.initialized)
1763 m32r_compute_frame_size (size);
1764
1765 /* This is only for the human reader. */
1766 fprintf (file,
1767 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1768 ASM_COMMENT_START,
1769 current_frame_info.var_size,
1770 current_frame_info.reg_size / 4,
1771 current_frame_info.args_size,
1772 current_frame_info.extra_size);
1773 }
1774 \f
1775 /* Output RTL to pop register REGNO from the stack. */
1776
1777 static void
1778 pop (int regno)
1779 {
1780 rtx x;
1781
1782 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1783 stack_pointer_rtx));
1784 add_reg_note (x, REG_INC, stack_pointer_rtx);
1785 }
1786
1787 /* Expand the m32r epilogue as a series of insns. */
1788
1789 void
1790 m32r_expand_epilogue (void)
1791 {
1792 int regno;
1793 int noepilogue = FALSE;
1794 int total_size;
1795
1796 gcc_assert (current_frame_info.initialized);
1797 total_size = current_frame_info.total_size;
1798
1799 if (total_size == 0)
1800 {
1801 rtx insn = get_last_insn ();
1802
1803 /* If the last insn was a BARRIER, we don't have to write any code
1804 because a jump (aka return) was put there. */
1805 if (insn && NOTE_P (insn))
1806 insn = prev_nonnote_insn (insn);
1807 if (insn && BARRIER_P (insn))
1808 noepilogue = TRUE;
1809 }
1810
1811 if (!noepilogue)
1812 {
1813 unsigned int var_size = current_frame_info.var_size;
1814 unsigned int args_size = current_frame_info.args_size;
1815 unsigned int gmask = current_frame_info.gmask;
1816 int can_trust_sp_p = !cfun->calls_alloca;
1817
1818 if (flag_exceptions)
1819 emit_insn (gen_blockage ());
1820
1821 /* The first thing to do is point the sp at the bottom of the register
1822 save area. */
1823 if (can_trust_sp_p)
1824 {
1825 unsigned int reg_offset = var_size + args_size;
1826
1827 if (reg_offset == 0)
1828 ; /* Nothing to do. */
1829 else if (reg_offset < 32768)
1830 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1831 GEN_INT (reg_offset)));
1832 else
1833 {
1834 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1835
1836 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1837 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1838 tmp));
1839 }
1840 }
1841 else if (frame_pointer_needed)
1842 {
1843 unsigned int reg_offset = var_size + args_size;
1844
1845 if (reg_offset == 0)
1846 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1847 else if (reg_offset < 32768)
1848 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1849 GEN_INT (reg_offset)));
1850 else
1851 {
1852 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1853
1854 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1855 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1856 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1857 tmp));
1858 }
1859 }
1860 else
1861 gcc_unreachable ();
1862
1863 if (current_frame_info.save_lr)
1864 pop (RETURN_ADDR_REGNUM);
1865
1866 /* Restore any saved registers, in reverse order of course. */
1867 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1868 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1869 {
1870 if ((gmask & (1L << regno)) != 0)
1871 pop (regno);
1872 }
1873
1874 if (current_frame_info.save_fp)
1875 pop (FRAME_POINTER_REGNUM);
1876
1877 /* Remove varargs area if present. */
1878 if (current_frame_info.pretend_size != 0)
1879 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1880 GEN_INT (current_frame_info.pretend_size)));
1881
1882 emit_insn (gen_blockage ());
1883 }
1884 }
1885
1886 /* Do any necessary cleanup after a function to restore stack, frame,
1887 and regs. */
1888
1889 static void
1890 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1891 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1892 {
1893 /* Reset state info for each function. */
1894 current_frame_info = zero_frame_info;
1895 m32r_compute_function_type (NULL_TREE);
1896 }
1897 \f
1898 /* Return nonzero if this function is known to have a null or 1 instruction
1899 epilogue. */
1900
1901 int
1902 direct_return (void)
1903 {
1904 if (!reload_completed)
1905 return FALSE;
1906
1907 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1908 return FALSE;
1909
1910 if (! current_frame_info.initialized)
1911 m32r_compute_frame_size (get_frame_size ());
1912
1913 return current_frame_info.total_size == 0;
1914 }
1915
1916 \f
1917 /* PIC. */
1918
1919 int
1920 m32r_legitimate_pic_operand_p (rtx x)
1921 {
1922 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1923 return 0;
1924
1925 if (GET_CODE (x) == CONST
1926 && GET_CODE (XEXP (x, 0)) == PLUS
1927 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1928 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1929 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1930 return 0;
1931
1932 return 1;
1933 }
1934
1935 rtx
1936 m32r_legitimize_pic_address (rtx orig, rtx reg)
1937 {
1938 #ifdef DEBUG_PIC
1939 printf("m32r_legitimize_pic_address()\n");
1940 #endif
1941
1942 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1943 {
1944 rtx pic_ref, address;
1945 int subregs = 0;
1946
1947 if (reg == 0)
1948 {
1949 gcc_assert (!reload_in_progress && !reload_completed);
1950 reg = gen_reg_rtx (Pmode);
1951
1952 subregs = 1;
1953 }
1954
1955 if (subregs)
1956 address = gen_reg_rtx (Pmode);
1957 else
1958 address = reg;
1959
1960 crtl->uses_pic_offset_table = 1;
1961
1962 if (GET_CODE (orig) == LABEL_REF
1963 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1964 {
1965 emit_insn (gen_gotoff_load_addr (reg, orig));
1966 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1967 return reg;
1968 }
1969
1970 emit_insn (gen_pic_load_addr (address, orig));
1971
1972 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1973 pic_ref = gen_const_mem (Pmode, address);
1974 emit_move_insn (reg, pic_ref);
1975 return reg;
1976 }
1977 else if (GET_CODE (orig) == CONST)
1978 {
1979 rtx base, offset;
1980
1981 if (GET_CODE (XEXP (orig, 0)) == PLUS
1982 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1983 return orig;
1984
1985 if (reg == 0)
1986 {
1987 gcc_assert (!reload_in_progress && !reload_completed);
1988 reg = gen_reg_rtx (Pmode);
1989 }
1990
1991 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1992 {
1993 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1994 if (base == reg)
1995 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1996 else
1997 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1998 }
1999 else
2000 return orig;
2001
2002 if (CONST_INT_P (offset))
2003 {
2004 if (INT16_P (INTVAL (offset)))
2005 return plus_constant (Pmode, base, INTVAL (offset));
2006 else
2007 {
2008 gcc_assert (! reload_in_progress && ! reload_completed);
2009 offset = force_reg (Pmode, offset);
2010 }
2011 }
2012
2013 return gen_rtx_PLUS (Pmode, base, offset);
2014 }
2015
2016 return orig;
2017 }
2018
2019 static rtx
2020 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2021 machine_mode mode ATTRIBUTE_UNUSED)
2022 {
2023 if (flag_pic)
2024 return m32r_legitimize_pic_address (x, NULL_RTX);
2025 else
2026 return x;
2027 }
2028
2029 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2030
2031 static bool
2032 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2033 {
2034 if (GET_CODE (addr) == LO_SUM)
2035 return true;
2036
2037 return false;
2038 }
2039 \f
2040 /* Nested function support. */
2041
2042 /* Emit RTL insns to initialize the variable parts of a trampoline.
2043 FNADDR is an RTX for the address of the function's pure code.
2044 CXT is an RTX for the static chain value for the function. */
2045
2046 void
2047 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2048 rtx fnaddr ATTRIBUTE_UNUSED,
2049 rtx cxt ATTRIBUTE_UNUSED)
2050 {
2051 }
2052 \f
2053 static void
2054 m32r_file_start (void)
2055 {
2056 default_file_start ();
2057
2058 if (flag_verbose_asm)
2059 fprintf (asm_out_file,
2060 "%s M32R/D special options: -G %d\n",
2061 ASM_COMMENT_START, g_switch_value);
2062
2063 if (TARGET_LITTLE_ENDIAN)
2064 fprintf (asm_out_file, "\t.little\n");
2065 }
2066 \f
2067 /* Print operand X (an rtx) in assembler syntax to file FILE.
2068 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2069 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2070
2071 static void
2072 m32r_print_operand (FILE * file, rtx x, int code)
2073 {
2074 rtx addr;
2075
2076 switch (code)
2077 {
2078 /* The 's' and 'p' codes are used by output_block_move() to
2079 indicate post-increment 's'tores and 'p're-increment loads. */
2080 case 's':
2081 if (REG_P (x))
2082 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2083 else
2084 output_operand_lossage ("invalid operand to %%s code");
2085 return;
2086
2087 case 'p':
2088 if (REG_P (x))
2089 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2090 else
2091 output_operand_lossage ("invalid operand to %%p code");
2092 return;
2093
2094 case 'R' :
2095 /* Write second word of DImode or DFmode reference,
2096 register or memory. */
2097 if (REG_P (x))
2098 fputs (reg_names[REGNO (x)+1], file);
2099 else if (MEM_P (x))
2100 {
2101 fprintf (file, "@(");
2102 /* Handle possible auto-increment. Since it is pre-increment and
2103 we have already done it, we can just use an offset of four. */
2104 /* ??? This is taken from rs6000.c I think. I don't think it is
2105 currently necessary, but keep it around. */
2106 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2107 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2108 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2109 else
2110 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2111 fputc (')', file);
2112 }
2113 else
2114 output_operand_lossage ("invalid operand to %%R code");
2115 return;
2116
2117 case 'H' : /* High word. */
2118 case 'L' : /* Low word. */
2119 if (REG_P (x))
2120 {
2121 /* L = least significant word, H = most significant word. */
2122 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2123 fputs (reg_names[REGNO (x)], file);
2124 else
2125 fputs (reg_names[REGNO (x)+1], file);
2126 }
2127 else if (CONST_INT_P (x)
2128 || GET_CODE (x) == CONST_DOUBLE)
2129 {
2130 rtx first, second;
2131
2132 split_double (x, &first, &second);
2133 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2134 code == 'L' ? INTVAL (first) : INTVAL (second));
2135 }
2136 else
2137 output_operand_lossage ("invalid operand to %%H/%%L code");
2138 return;
2139
2140 case 'A' :
2141 {
2142 char str[30];
2143
2144 if (GET_CODE (x) != CONST_DOUBLE
2145 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2146 fatal_insn ("bad insn for 'A'", x);
2147
2148 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2149 fprintf (file, "%s", str);
2150 return;
2151 }
2152
2153 case 'B' : /* Bottom half. */
2154 case 'T' : /* Top half. */
2155 /* Output the argument to a `seth' insn (sets the Top half-word).
2156 For constants output arguments to a seth/or3 pair to set Top and
2157 Bottom halves. For symbols output arguments to a seth/add3 pair to
2158 set Top and Bottom halves. The difference exists because for
2159 constants seth/or3 is more readable but for symbols we need to use
2160 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2161 switch (GET_CODE (x))
2162 {
2163 case CONST_INT :
2164 case CONST_DOUBLE :
2165 {
2166 rtx first, second;
2167
2168 split_double (x, &first, &second);
2169 x = WORDS_BIG_ENDIAN ? second : first;
2170 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2171 (code == 'B'
2172 ? INTVAL (x) & 0xffff
2173 : (INTVAL (x) >> 16) & 0xffff));
2174 }
2175 return;
2176 case CONST :
2177 case SYMBOL_REF :
2178 if (code == 'B'
2179 && small_data_operand (x, VOIDmode))
2180 {
2181 fputs ("sda(", file);
2182 output_addr_const (file, x);
2183 fputc (')', file);
2184 return;
2185 }
2186 /* fall through */
2187 case LABEL_REF :
2188 fputs (code == 'T' ? "shigh(" : "low(", file);
2189 output_addr_const (file, x);
2190 fputc (')', file);
2191 return;
2192 default :
2193 output_operand_lossage ("invalid operand to %%T/%%B code");
2194 return;
2195 }
2196 break;
2197
2198 case 'U' :
2199 /* ??? wip */
2200 /* Output a load/store with update indicator if appropriate. */
2201 if (MEM_P (x))
2202 {
2203 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2204 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2205 fputs (".a", file);
2206 }
2207 else
2208 output_operand_lossage ("invalid operand to %%U code");
2209 return;
2210
2211 case 'N' :
2212 /* Print a constant value negated. */
2213 if (CONST_INT_P (x))
2214 output_addr_const (file, GEN_INT (- INTVAL (x)));
2215 else
2216 output_operand_lossage ("invalid operand to %%N code");
2217 return;
2218
2219 case 'X' :
2220 /* Print a const_int in hex. Used in comments. */
2221 if (CONST_INT_P (x))
2222 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2223 return;
2224
2225 case '#' :
2226 fputs (IMMEDIATE_PREFIX, file);
2227 return;
2228
2229 case 0 :
2230 /* Do nothing special. */
2231 break;
2232
2233 default :
2234 /* Unknown flag. */
2235 output_operand_lossage ("invalid operand output code");
2236 }
2237
2238 switch (GET_CODE (x))
2239 {
2240 case REG :
2241 fputs (reg_names[REGNO (x)], file);
2242 break;
2243
2244 case MEM :
2245 addr = XEXP (x, 0);
2246 if (GET_CODE (addr) == PRE_INC)
2247 {
2248 if (!REG_P (XEXP (addr, 0)))
2249 fatal_insn ("pre-increment address is not a register", x);
2250
2251 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2252 }
2253 else if (GET_CODE (addr) == PRE_DEC)
2254 {
2255 if (!REG_P (XEXP (addr, 0)))
2256 fatal_insn ("pre-decrement address is not a register", x);
2257
2258 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2259 }
2260 else if (GET_CODE (addr) == POST_INC)
2261 {
2262 if (!REG_P (XEXP (addr, 0)))
2263 fatal_insn ("post-increment address is not a register", x);
2264
2265 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2266 }
2267 else
2268 {
2269 fputs ("@(", file);
2270 output_address (XEXP (x, 0));
2271 fputc (')', file);
2272 }
2273 break;
2274
2275 case CONST_DOUBLE :
2276 /* We handle SFmode constants here as output_addr_const doesn't. */
2277 if (GET_MODE (x) == SFmode)
2278 {
2279 REAL_VALUE_TYPE d;
2280 long l;
2281
2282 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2283 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2284 fprintf (file, "0x%08lx", l);
2285 break;
2286 }
2287
2288 /* Fall through. Let output_addr_const deal with it. */
2289
2290 default :
2291 output_addr_const (file, x);
2292 break;
2293 }
2294 }
2295
2296 /* Print a memory address as an operand to reference that memory location. */
2297
2298 static void
2299 m32r_print_operand_address (FILE * file, rtx addr)
2300 {
2301 rtx base;
2302 rtx index = 0;
2303 int offset = 0;
2304
2305 switch (GET_CODE (addr))
2306 {
2307 case REG :
2308 fputs (reg_names[REGNO (addr)], file);
2309 break;
2310
2311 case PLUS :
2312 if (CONST_INT_P (XEXP (addr, 0)))
2313 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2314 else if (CONST_INT_P (XEXP (addr, 1)))
2315 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2316 else
2317 base = XEXP (addr, 0), index = XEXP (addr, 1);
2318 if (REG_P (base))
2319 {
2320 /* Print the offset first (if present) to conform to the manual. */
2321 if (index == 0)
2322 {
2323 if (offset != 0)
2324 fprintf (file, "%d,", offset);
2325 fputs (reg_names[REGNO (base)], file);
2326 }
2327 /* The chip doesn't support this, but left in for generality. */
2328 else if (REG_P (index))
2329 fprintf (file, "%s,%s",
2330 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2331 /* Not sure this can happen, but leave in for now. */
2332 else if (GET_CODE (index) == SYMBOL_REF)
2333 {
2334 output_addr_const (file, index);
2335 fputc (',', file);
2336 fputs (reg_names[REGNO (base)], file);
2337 }
2338 else
2339 fatal_insn ("bad address", addr);
2340 }
2341 else if (GET_CODE (base) == LO_SUM)
2342 {
2343 gcc_assert (!index && REG_P (XEXP (base, 0)));
2344 if (small_data_operand (XEXP (base, 1), VOIDmode))
2345 fputs ("sda(", file);
2346 else
2347 fputs ("low(", file);
2348 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2349 offset));
2350 fputs ("),", file);
2351 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2352 }
2353 else
2354 fatal_insn ("bad address", addr);
2355 break;
2356
2357 case LO_SUM :
2358 if (!REG_P (XEXP (addr, 0)))
2359 fatal_insn ("lo_sum not of register", addr);
2360 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2361 fputs ("sda(", file);
2362 else
2363 fputs ("low(", file);
2364 output_addr_const (file, XEXP (addr, 1));
2365 fputs ("),", file);
2366 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2367 break;
2368
2369 case PRE_INC : /* Assume SImode. */
2370 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2371 break;
2372
2373 case PRE_DEC : /* Assume SImode. */
2374 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2375 break;
2376
2377 case POST_INC : /* Assume SImode. */
2378 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2379 break;
2380
2381 default :
2382 output_addr_const (file, addr);
2383 break;
2384 }
2385 }
2386
2387 static bool
2388 m32r_print_operand_punct_valid_p (unsigned char code)
2389 {
2390 return m32r_punct_chars[code];
2391 }
2392
2393 /* Return true if the operands are the constants 0 and 1. */
2394
2395 int
2396 zero_and_one (rtx operand1, rtx operand2)
2397 {
2398 return
2399 CONST_INT_P (operand1)
2400 && CONST_INT_P (operand2)
2401 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2402 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2403 }
2404
2405 /* Generate the correct assembler code to handle the conditional loading of a
2406 value into a register. It is known that the operands satisfy the
2407 conditional_move_operand() function above. The destination is operand[0].
2408 The condition is operand [1]. The 'true' value is operand [2] and the
2409 'false' value is operand [3]. */
2410
2411 char *
2412 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2413 {
2414 static char buffer [100];
2415 const char * dest = reg_names [REGNO (operands [0])];
2416
2417 buffer [0] = 0;
2418
2419 /* Destination must be a register. */
2420 gcc_assert (REG_P (operands [0]));
2421 gcc_assert (conditional_move_operand (operands [2], SImode));
2422 gcc_assert (conditional_move_operand (operands [3], SImode));
2423
2424 /* Check to see if the test is reversed. */
2425 if (GET_CODE (operands [1]) == NE)
2426 {
2427 rtx tmp = operands [2];
2428 operands [2] = operands [3];
2429 operands [3] = tmp;
2430 }
2431
2432 sprintf (buffer, "mvfc %s, cbr", dest);
2433
2434 /* If the true value was '0' then we need to invert the results of the move. */
2435 if (INTVAL (operands [2]) == 0)
2436 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2437 dest, dest);
2438
2439 return buffer;
2440 }
2441
2442 /* Returns true if the registers contained in the two
2443 rtl expressions are different. */
2444
2445 int
2446 m32r_not_same_reg (rtx a, rtx b)
2447 {
2448 int reg_a = -1;
2449 int reg_b = -2;
2450
2451 while (GET_CODE (a) == SUBREG)
2452 a = SUBREG_REG (a);
2453
2454 if (REG_P (a))
2455 reg_a = REGNO (a);
2456
2457 while (GET_CODE (b) == SUBREG)
2458 b = SUBREG_REG (b);
2459
2460 if (REG_P (b))
2461 reg_b = REGNO (b);
2462
2463 return reg_a != reg_b;
2464 }
2465
2466 \f
2467 rtx
2468 m32r_function_symbol (const char *name)
2469 {
2470 int extra_flags = 0;
2471 enum m32r_model model;
2472 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2473
2474 if (TARGET_MODEL_SMALL)
2475 model = M32R_MODEL_SMALL;
2476 else if (TARGET_MODEL_MEDIUM)
2477 model = M32R_MODEL_MEDIUM;
2478 else if (TARGET_MODEL_LARGE)
2479 model = M32R_MODEL_LARGE;
2480 else
2481 gcc_unreachable (); /* Shouldn't happen. */
2482 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2483
2484 if (extra_flags)
2485 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2486
2487 return sym;
2488 }
2489
2490 /* Use a library function to move some bytes. */
2491
2492 static void
2493 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2494 {
2495 /* We want to pass the size as Pmode, which will normally be SImode
2496 but will be DImode if we are using 64-bit longs and pointers. */
2497 if (GET_MODE (bytes_rtx) != VOIDmode
2498 && GET_MODE (bytes_rtx) != Pmode)
2499 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2500
2501 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2502 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2503 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2504 TYPE_UNSIGNED (sizetype)),
2505 TYPE_MODE (sizetype));
2506 }
2507
2508 /* Expand string/block move operations.
2509
2510 operands[0] is the pointer to the destination.
2511 operands[1] is the pointer to the source.
2512 operands[2] is the number of bytes to move.
2513 operands[3] is the alignment.
2514
2515 Returns 1 upon success, 0 otherwise. */
2516
2517 int
2518 m32r_expand_block_move (rtx operands[])
2519 {
2520 rtx orig_dst = operands[0];
2521 rtx orig_src = operands[1];
2522 rtx bytes_rtx = operands[2];
2523 rtx align_rtx = operands[3];
2524 int constp = CONST_INT_P (bytes_rtx);
2525 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2526 int align = INTVAL (align_rtx);
2527 int leftover;
2528 rtx src_reg;
2529 rtx dst_reg;
2530
2531 if (constp && bytes <= 0)
2532 return 1;
2533
2534 /* Move the address into scratch registers. */
2535 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2536 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2537
2538 if (align > UNITS_PER_WORD)
2539 align = UNITS_PER_WORD;
2540
2541 /* If we prefer size over speed, always use a function call.
2542 If we do not know the size, use a function call.
2543 If the blocks are not word aligned, use a function call. */
2544 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2545 {
2546 block_move_call (dst_reg, src_reg, bytes_rtx);
2547 return 0;
2548 }
2549
2550 leftover = bytes % MAX_MOVE_BYTES;
2551 bytes -= leftover;
2552
2553 /* If necessary, generate a loop to handle the bulk of the copy. */
2554 if (bytes)
2555 {
2556 rtx_code_label *label = NULL;
2557 rtx final_src = NULL_RTX;
2558 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2559 rtx rounded_total = GEN_INT (bytes);
2560 rtx new_dst_reg = gen_reg_rtx (SImode);
2561 rtx new_src_reg = gen_reg_rtx (SImode);
2562
2563 /* If we are going to have to perform this loop more than
2564 once, then generate a label and compute the address the
2565 source register will contain upon completion of the final
2566 iteration. */
2567 if (bytes > MAX_MOVE_BYTES)
2568 {
2569 final_src = gen_reg_rtx (Pmode);
2570
2571 if (INT16_P(bytes))
2572 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2573 else
2574 {
2575 emit_insn (gen_movsi (final_src, rounded_total));
2576 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2577 }
2578
2579 label = gen_label_rtx ();
2580 emit_label (label);
2581 }
2582
2583 /* It is known that output_block_move() will update src_reg to point
2584 to the word after the end of the source block, and dst_reg to point
2585 to the last word of the destination block, provided that the block
2586 is MAX_MOVE_BYTES long. */
2587 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2588 new_dst_reg, new_src_reg));
2589 emit_move_insn (dst_reg, new_dst_reg);
2590 emit_move_insn (src_reg, new_src_reg);
2591 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2592
2593 if (bytes > MAX_MOVE_BYTES)
2594 {
2595 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2596 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2597 }
2598 }
2599
2600 if (leftover)
2601 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2602 gen_reg_rtx (SImode),
2603 gen_reg_rtx (SImode)));
2604 return 1;
2605 }
2606
2607 \f
2608 /* Emit load/stores for a small constant word aligned block_move.
2609
2610 operands[0] is the memory address of the destination.
2611 operands[1] is the memory address of the source.
2612 operands[2] is the number of bytes to move.
2613 operands[3] is a temp register.
2614 operands[4] is a temp register. */
2615
2616 void
2617 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2618 {
2619 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2620 int first_time;
2621 int got_extra = 0;
2622
2623 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2624
2625 /* We do not have a post-increment store available, so the first set of
2626 stores are done without any increment, then the remaining ones can use
2627 the pre-increment addressing mode.
2628
2629 Note: expand_block_move() also relies upon this behavior when building
2630 loops to copy large blocks. */
2631 first_time = 1;
2632
2633 while (bytes > 0)
2634 {
2635 if (bytes >= 8)
2636 {
2637 if (first_time)
2638 {
2639 output_asm_insn ("ld\t%5, %p1", operands);
2640 output_asm_insn ("ld\t%6, %p1", operands);
2641 output_asm_insn ("st\t%5, @%0", operands);
2642 output_asm_insn ("st\t%6, %s0", operands);
2643 }
2644 else
2645 {
2646 output_asm_insn ("ld\t%5, %p1", operands);
2647 output_asm_insn ("ld\t%6, %p1", operands);
2648 output_asm_insn ("st\t%5, %s0", operands);
2649 output_asm_insn ("st\t%6, %s0", operands);
2650 }
2651
2652 bytes -= 8;
2653 }
2654 else if (bytes >= 4)
2655 {
2656 if (bytes > 4)
2657 got_extra = 1;
2658
2659 output_asm_insn ("ld\t%5, %p1", operands);
2660
2661 if (got_extra)
2662 output_asm_insn ("ld\t%6, %p1", operands);
2663
2664 if (first_time)
2665 output_asm_insn ("st\t%5, @%0", operands);
2666 else
2667 output_asm_insn ("st\t%5, %s0", operands);
2668
2669 bytes -= 4;
2670 }
2671 else
2672 {
2673 /* Get the entire next word, even though we do not want all of it.
2674 The saves us from doing several smaller loads, and we assume that
2675 we cannot cause a page fault when at least part of the word is in
2676 valid memory [since we don't get called if things aren't properly
2677 aligned]. */
2678 int dst_offset = first_time ? 0 : 4;
2679 /* The amount of increment we have to make to the
2680 destination pointer. */
2681 int dst_inc_amount = dst_offset + bytes - 4;
2682 /* The same for the source pointer. */
2683 int src_inc_amount = bytes;
2684 int last_shift;
2685 rtx my_operands[3];
2686
2687 /* If got_extra is true then we have already loaded
2688 the next word as part of loading and storing the previous word. */
2689 if (! got_extra)
2690 output_asm_insn ("ld\t%6, @%1", operands);
2691
2692 if (bytes >= 2)
2693 {
2694 bytes -= 2;
2695
2696 output_asm_insn ("sra3\t%5, %6, #16", operands);
2697 my_operands[0] = operands[5];
2698 my_operands[1] = GEN_INT (dst_offset);
2699 my_operands[2] = operands[0];
2700 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2701
2702 /* If there is a byte left to store then increment the
2703 destination address and shift the contents of the source
2704 register down by 8 bits. We could not do the address
2705 increment in the store half word instruction, because it does
2706 not have an auto increment mode. */
2707 if (bytes > 0) /* assert (bytes == 1) */
2708 {
2709 dst_offset += 2;
2710 last_shift = 8;
2711 }
2712 }
2713 else
2714 last_shift = 24;
2715
2716 if (bytes > 0)
2717 {
2718 my_operands[0] = operands[6];
2719 my_operands[1] = GEN_INT (last_shift);
2720 output_asm_insn ("srai\t%0, #%1", my_operands);
2721 my_operands[0] = operands[6];
2722 my_operands[1] = GEN_INT (dst_offset);
2723 my_operands[2] = operands[0];
2724 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2725 }
2726
2727 /* Update the destination pointer if needed. We have to do
2728 this so that the patterns matches what we output in this
2729 function. */
2730 if (dst_inc_amount
2731 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2732 {
2733 my_operands[0] = operands[0];
2734 my_operands[1] = GEN_INT (dst_inc_amount);
2735 output_asm_insn ("addi\t%0, #%1", my_operands);
2736 }
2737
2738 /* Update the source pointer if needed. We have to do this
2739 so that the patterns matches what we output in this
2740 function. */
2741 if (src_inc_amount
2742 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2743 {
2744 my_operands[0] = operands[1];
2745 my_operands[1] = GEN_INT (src_inc_amount);
2746 output_asm_insn ("addi\t%0, #%1", my_operands);
2747 }
2748
2749 bytes = 0;
2750 }
2751
2752 first_time = 0;
2753 }
2754 }
2755
2756 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2757
2758 int
2759 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2760 unsigned int new_reg)
2761 {
2762 /* Interrupt routines can't clobber any register that isn't already used. */
2763 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2764 && !df_regs_ever_live_p (new_reg))
2765 return 0;
2766
2767 return 1;
2768 }
2769
2770 rtx
2771 m32r_return_addr (int count)
2772 {
2773 if (count != 0)
2774 return const0_rtx;
2775
2776 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2777 }
2778
2779 static void
2780 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2781 {
2782 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2783 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2784 0x017e8e17 : 0x178e7e01, SImode));
2785 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2786 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2787 0x0c00ae86 : 0x86ae000c, SImode));
2788 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2789 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2790 0xe627871e : 0x1e8727e6, SImode));
2791 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2792 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2793 0xc616c626 : 0x26c61fc6, SImode));
2794 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2795 chain_value);
2796 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2797 XEXP (DECL_RTL (fndecl), 0));
2798
2799 if (m32r_cache_flush_trap >= 0)
2800 emit_insn (gen_flush_icache
2801 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2802 gen_int_mode (m32r_cache_flush_trap, SImode)));
2803 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2804 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2805 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2806 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2807 GEN_INT (3), SImode);
2808 }
2809
2810 /* True if X is a reg that can be used as a base reg. */
2811
2812 static bool
2813 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2814 {
2815 if (! REG_P (x))
2816 return false;
2817
2818 if (strict)
2819 {
2820 if (GPR_P (REGNO (x)))
2821 return true;
2822 }
2823 else
2824 {
2825 if (GPR_P (REGNO (x))
2826 || REGNO (x) == ARG_POINTER_REGNUM
2827 || ! HARD_REGISTER_P (x))
2828 return true;
2829 }
2830
2831 return false;
2832 }
2833
2834 static inline bool
2835 m32r_rtx_ok_for_offset_p (const_rtx x)
2836 {
2837 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2838 }
2839
2840 static inline bool
2841 m32r_legitimate_offset_addres_p (machine_mode mode ATTRIBUTE_UNUSED,
2842 const_rtx x, bool strict)
2843 {
2844 if (GET_CODE (x) == PLUS
2845 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2846 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2847 return true;
2848
2849 return false;
2850 }
2851
2852 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2853 since more than one instruction will be required. */
2854
2855 static inline bool
2856 m32r_legitimate_lo_sum_addres_p (machine_mode mode, const_rtx x,
2857 bool strict)
2858 {
2859 if (GET_CODE (x) == LO_SUM
2860 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2861 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2862 && CONSTANT_P (XEXP (x, 1)))
2863 return true;
2864
2865 return false;
2866 }
2867
2868 /* Is this a load and increment operation. */
2869
2870 static inline bool
2871 m32r_load_postinc_p (machine_mode mode, const_rtx x, bool strict)
2872 {
2873 if ((mode == SImode || mode == SFmode)
2874 && GET_CODE (x) == POST_INC
2875 && REG_P (XEXP (x, 0))
2876 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2877 return true;
2878
2879 return false;
2880 }
2881
2882 /* Is this an increment/decrement and store operation. */
2883
2884 static inline bool
2885 m32r_store_preinc_predec_p (machine_mode mode, const_rtx x, bool strict)
2886 {
2887 if ((mode == SImode || mode == SFmode)
2888 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2889 && REG_P (XEXP (x, 0)) \
2890 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2891 return true;
2892
2893 return false;
2894 }
2895
2896 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2897
2898 static bool
2899 m32r_legitimate_address_p (machine_mode mode, rtx x, bool strict)
2900 {
2901 if (m32r_rtx_ok_for_base_p (x, strict)
2902 || m32r_legitimate_offset_addres_p (mode, x, strict)
2903 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2904 || m32r_load_postinc_p (mode, x, strict)
2905 || m32r_store_preinc_predec_p (mode, x, strict))
2906 return true;
2907
2908 return false;
2909 }
2910
2911 static void
2912 m32r_conditional_register_usage (void)
2913 {
2914 if (flag_pic)
2915 {
2916 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2917 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2918 }
2919 }
2920
2921 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2922
2923 We don't allow (plus symbol large-constant) as the relocations can't
2924 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2925 We allow all CONST_DOUBLE's as the md file patterns will force the
2926 constant to memory if they can't handle them. */
2927
2928 static bool
2929 m32r_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2930 {
2931 return !(GET_CODE (x) == CONST
2932 && GET_CODE (XEXP (x, 0)) == PLUS
2933 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2934 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2935 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2936 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2937 }
This page took 0.171747 seconds and 5 git commands to generate.