]> gcc.gnu.org Git - gcc.git/blame - gcc/config/mep/mep.c
LANGUAGES: Fix typos.
[gcc.git] / gcc / config / mep / mep.c
CommitLineData
7acf4da6 1/* Definitions for Toshiba Media Processor
96e45421
JM
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011
7acf4da6
DD
4 Free Software Foundation, Inc.
5 Contributed by Red Hat, Inc.
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
27#include "rtl.h"
28#include "tree.h"
29#include "regs.h"
30#include "hard-reg-set.h"
7acf4da6
DD
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "flags.h"
37#include "recog.h"
38#include "obstack.h"
39#include "tree.h"
40#include "expr.h"
41#include "except.h"
42#include "function.h"
43#include "optabs.h"
44#include "reload.h"
45#include "tm_p.h"
46#include "ggc.h"
718f9c0f 47#include "diagnostic-core.h"
7acf4da6
DD
48#include "integrate.h"
49#include "target.h"
50#include "target-def.h"
51#include "langhooks.h"
52#include "df.h"
12a54f54 53#include "gimple.h"
96e45421 54#include "opts.h"
7acf4da6
DD
55
56/* Structure of this file:
57
58 + Command Line Option Support
59 + Pattern support - constraints, predicates, expanders
60 + Reload Support
61 + Costs
62 + Functions to save and restore machine-specific function data.
63 + Frame/Epilog/Prolog Related
64 + Operand Printing
65 + Function args in registers
66 + Handle pipeline hazards
67 + Handle attributes
68 + Trampolines
69 + Machine-dependent Reorg
70 + Builtins. */
71
72/* Symbol encodings:
73
74 Symbols are encoded as @ <char> . <name> where <char> is one of these:
75
76 b - based
77 t - tiny
78 n - near
79 f - far
80 i - io, near
81 I - io, far
82 c - cb (control bus) */
83
84struct GTY(()) machine_function
85{
86 int mep_frame_pointer_needed;
87
88 /* For varargs. */
89 int arg_regs_to_save;
90 int regsave_filler;
91 int frame_filler;
e756464b 92 int frame_locked;
7acf4da6
DD
93
94 /* Records __builtin_return address. */
95 rtx eh_stack_adjust;
96
97 int reg_save_size;
98 int reg_save_slot[FIRST_PSEUDO_REGISTER];
99 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
100
101 /* 2 if the current function has an interrupt attribute, 1 if not, 0
102 if unknown. This is here because resource.c uses EPILOGUE_USES
103 which needs it. */
104 int interrupt_handler;
105
106 /* Likewise, for disinterrupt attribute. */
107 int disable_interrupts;
108
109 /* Number of doloop tags used so far. */
110 int doloop_tags;
111
112 /* True if the last tag was allocated to a doloop_end. */
113 bool doloop_tag_from_end;
114
115 /* True if reload changes $TP. */
116 bool reload_changes_tp;
117
118 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
119 We only set this if the function is an interrupt handler. */
120 int asms_without_operands;
121};
122
123#define MEP_CONTROL_REG(x) \
124 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
125
7acf4da6
DD
126static GTY(()) section * based_section;
127static GTY(()) section * tinybss_section;
128static GTY(()) section * far_section;
129static GTY(()) section * farbss_section;
130static GTY(()) section * frodata_section;
131static GTY(()) section * srodata_section;
132
820ca276
DD
133static GTY(()) section * vtext_section;
134static GTY(()) section * vftext_section;
135static GTY(()) section * ftext_section;
136
7acf4da6
DD
137static void mep_set_leaf_registers (int);
138static bool symbol_p (rtx);
139static bool symbolref_p (rtx);
140static void encode_pattern_1 (rtx);
141static void encode_pattern (rtx);
142static bool const_in_range (rtx, int, int);
143static void mep_rewrite_mult (rtx, rtx);
144static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
145static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
146static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
147static bool move_needs_splitting (rtx, rtx, enum machine_mode);
148static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
149static bool mep_nongeneral_reg (rtx);
150static bool mep_general_copro_reg (rtx);
151static bool mep_nonregister (rtx);
152static struct machine_function* mep_init_machine_status (void);
153static rtx mep_tp_rtx (void);
154static rtx mep_gp_rtx (void);
155static bool mep_interrupt_p (void);
156static bool mep_disinterrupt_p (void);
157static bool mep_reg_set_p (rtx, rtx);
158static bool mep_reg_set_in_function (int);
159static bool mep_interrupt_saved_reg (int);
160static bool mep_call_saves_register (int);
161static rtx F (rtx);
162static void add_constant (int, int, int, int);
7acf4da6
DD
163static rtx maybe_dead_move (rtx, rtx, bool);
164static void mep_reload_pointer (int, const char *);
165static void mep_start_function (FILE *, HOST_WIDE_INT);
166static bool mep_function_ok_for_sibcall (tree, tree);
167static int unique_bit_in (HOST_WIDE_INT);
168static int bit_size_for_clip (HOST_WIDE_INT);
169static int bytesize (const_tree, enum machine_mode);
170static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176static bool mep_function_attribute_inlinable_p (const_tree);
5cec9f59 177static bool mep_can_inline_p (tree, tree);
7acf4da6
DD
178static bool mep_lookup_pragma_disinterrupt (const char *);
179static int mep_multiple_address_regions (tree, bool);
180static int mep_attrlist_to_encoding (tree, tree);
181static void mep_insert_attributes (tree, tree *);
182static void mep_encode_section_info (tree, rtx, int);
183static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184static void mep_unique_section (tree, int);
185static unsigned int mep_section_type_flags (tree, const char *, int);
186static void mep_asm_named_section (const char *, unsigned int, tree);
187static bool mep_mentioned_p (rtx, rtx, int);
188static void mep_reorg_regmove (rtx);
189static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
190static void mep_reorg_repeat (rtx);
191static bool mep_invertable_branch_p (rtx);
192static void mep_invert_branch (rtx, rtx);
193static void mep_reorg_erepeat (rtx);
194static void mep_jmp_return_reorg (rtx);
195static void mep_reorg_addcombine (rtx);
196static void mep_reorg (void);
197static void mep_init_intrinsics (void);
198static void mep_init_builtins (void);
199static void mep_intrinsic_unavailable (int);
200static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
201static bool mep_get_move_insn (int, const struct cgen_insn **);
202static rtx mep_convert_arg (enum machine_mode, rtx);
203static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
204static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
205static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
206static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
207static int mep_adjust_cost (rtx, rtx, rtx, int);
208static int mep_issue_rate (void);
209static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
210static void mep_move_ready_insn (rtx *, int, rtx);
211static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
212static rtx mep_make_bundle (rtx, rtx);
213static void mep_bundle_insns (rtx);
68f932c4 214static bool mep_rtx_cost (rtx, int, int, int, int *, bool);
7acf4da6 215static int mep_address_cost (rtx, bool);
d5cc9181 216static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
7acf4da6 217 tree, int *, int);
d5cc9181 218static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
7acf4da6 219 const_tree, bool);
d5cc9181 220static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
0851c6e3 221 const_tree, bool);
d5cc9181 222static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
0851c6e3 223 const_tree, bool);
7acf4da6 224static bool mep_vector_mode_supported_p (enum machine_mode);
7acf4da6
DD
225static rtx mep_allocate_initial_value (rtx);
226static void mep_asm_init_sections (void);
227static int mep_comp_type_attributes (const_tree, const_tree);
228static bool mep_narrow_volatile_bitfield (void);
229static rtx mep_expand_builtin_saveregs (void);
230static tree mep_build_builtin_va_list (void);
231static void mep_expand_va_start (tree, rtx);
12a54f54 232static tree mep_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
7b5cbb57 233static bool mep_can_eliminate (const int, const int);
5efd84c5 234static void mep_conditional_register_usage (void);
87138d8d 235static void mep_trampoline_init (rtx, tree, rtx);
7acf4da6 236\f
7acf4da6
DD
237#define WANT_GCC_DEFINITIONS
238#include "mep-intrin.h"
239#undef WANT_GCC_DEFINITIONS
240
241\f
242/* Command Line Option Support. */
243
244char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
245
246/* True if we can use cmov instructions to move values back and forth
247 between core and coprocessor registers. */
248bool mep_have_core_copro_moves_p;
249
250/* True if we can use cmov instructions (or a work-alike) to move
251 values between coprocessor registers. */
252bool mep_have_copro_copro_moves_p;
253
254/* A table of all coprocessor instructions that can act like
255 a coprocessor-to-coprocessor cmov. */
256static const int mep_cmov_insns[] = {
257 mep_cmov,
258 mep_cpmov,
259 mep_fmovs,
260 mep_caddi3,
261 mep_csubi3,
262 mep_candi3,
263 mep_cori3,
264 mep_cxori3,
265 mep_cand3,
266 mep_cor3
267};
268
7acf4da6
DD
269\f
270static void
271mep_set_leaf_registers (int enable)
272{
273 int i;
274
275 if (mep_leaf_registers[0] != enable)
276 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
277 mep_leaf_registers[i] = enable;
278}
279
5efd84c5 280static void
ce959463 281mep_conditional_register_usage (void)
7acf4da6
DD
282{
283 int i;
284
285 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
286 {
287 fixed_regs[HI_REGNO] = 1;
288 fixed_regs[LO_REGNO] = 1;
289 call_used_regs[HI_REGNO] = 1;
290 call_used_regs[LO_REGNO] = 1;
291 }
292
293 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
294 global_regs[i] = 1;
295}
296
c5387660
JM
297static void
298mep_option_override (void)
7acf4da6 299{
dd51e35a
JM
300 unsigned int i;
301 int j;
302 cl_deferred_option *opt;
303 VEC(cl_deferred_option,heap) *vec
304 = (VEC(cl_deferred_option,heap) *) mep_deferred_options;
305
306 FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
307 {
308 switch (opt->opt_index)
309 {
310 case OPT_mivc2:
311 for (j = 0; j < 32; j++)
312 fixed_regs[j + 48] = 0;
313 for (j = 0; j < 32; j++)
314 call_used_regs[j + 48] = 1;
315 for (j = 6; j < 8; j++)
316 call_used_regs[j + 48] = 0;
317
318#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
319 RN (0, "$csar0");
320 RN (1, "$cc");
321 RN (4, "$cofr0");
322 RN (5, "$cofr1");
323 RN (6, "$cofa0");
324 RN (7, "$cofa1");
325 RN (15, "$csar1");
326
327 RN (16, "$acc0_0");
328 RN (17, "$acc0_1");
329 RN (18, "$acc0_2");
330 RN (19, "$acc0_3");
331 RN (20, "$acc0_4");
332 RN (21, "$acc0_5");
333 RN (22, "$acc0_6");
334 RN (23, "$acc0_7");
335
336 RN (24, "$acc1_0");
337 RN (25, "$acc1_1");
338 RN (26, "$acc1_2");
339 RN (27, "$acc1_3");
340 RN (28, "$acc1_4");
341 RN (29, "$acc1_5");
342 RN (30, "$acc1_6");
343 RN (31, "$acc1_7");
344#undef RN
345 break;
346
347 default:
348 gcc_unreachable ();
349 }
350 }
351
7acf4da6
DD
352 if (flag_pic == 1)
353 warning (OPT_fpic, "-fpic is not supported");
354 if (flag_pic == 2)
355 warning (OPT_fPIC, "-fPIC is not supported");
356 if (TARGET_S && TARGET_M)
357 error ("only one of -ms and -mm may be given");
358 if (TARGET_S && TARGET_L)
359 error ("only one of -ms and -ml may be given");
360 if (TARGET_M && TARGET_L)
361 error ("only one of -mm and -ml may be given");
dd51e35a 362 if (TARGET_S && global_options_set.x_mep_tiny_cutoff)
7acf4da6 363 error ("only one of -ms and -mtiny= may be given");
dd51e35a 364 if (TARGET_M && global_options_set.x_mep_tiny_cutoff)
7acf4da6
DD
365 error ("only one of -mm and -mtiny= may be given");
366 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
367 warning (0, "-mclip currently has no effect without -mminmax");
368
369 if (mep_const_section)
370 {
371 if (strcmp (mep_const_section, "tiny") != 0
372 && strcmp (mep_const_section, "near") != 0
373 && strcmp (mep_const_section, "far") != 0)
374 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
375 }
376
377 if (TARGET_S)
378 mep_tiny_cutoff = 65536;
379 if (TARGET_M)
380 mep_tiny_cutoff = 0;
dd51e35a 381 if (TARGET_L && ! global_options_set.x_mep_tiny_cutoff)
7acf4da6
DD
382 mep_tiny_cutoff = 0;
383
384 if (TARGET_64BIT_CR_REGS)
385 flag_split_wide_types = 0;
386
387 init_machine_status = mep_init_machine_status;
388 mep_init_intrinsics ();
389}
390
391/* Pattern Support - constraints, predicates, expanders. */
392
393/* MEP has very few instructions that can refer to the span of
394 addresses used by symbols, so it's common to check for them. */
395
396static bool
397symbol_p (rtx x)
398{
399 int c = GET_CODE (x);
400
401 return (c == CONST_INT
402 || c == CONST
403 || c == SYMBOL_REF);
404}
405
406static bool
407symbolref_p (rtx x)
408{
409 int c;
410
411 if (GET_CODE (x) != MEM)
412 return false;
413
414 c = GET_CODE (XEXP (x, 0));
415 return (c == CONST_INT
416 || c == CONST
417 || c == SYMBOL_REF);
418}
419
420/* static const char *reg_class_names[] = REG_CLASS_NAMES; */
421
422#define GEN_REG(R, STRICT) \
423 (GR_REGNO_P (R) \
424 || (!STRICT \
425 && ((R) == ARG_POINTER_REGNUM \
426 || (R) >= FIRST_PSEUDO_REGISTER)))
427
428static char pattern[12], *patternp;
429static GTY(()) rtx patternr[12];
430#define RTX_IS(x) (strcmp (pattern, x) == 0)
431
432static void
433encode_pattern_1 (rtx x)
434{
435 int i;
436
437 if (patternp == pattern + sizeof (pattern) - 2)
438 {
439 patternp[-1] = '?';
440 return;
441 }
442
443 patternr[patternp-pattern] = x;
444
445 switch (GET_CODE (x))
446 {
447 case REG:
448 *patternp++ = 'r';
449 break;
450 case MEM:
451 *patternp++ = 'm';
452 case CONST:
453 encode_pattern_1 (XEXP(x, 0));
454 break;
455 case PLUS:
456 *patternp++ = '+';
457 encode_pattern_1 (XEXP(x, 0));
458 encode_pattern_1 (XEXP(x, 1));
459 break;
460 case LO_SUM:
461 *patternp++ = 'L';
462 encode_pattern_1 (XEXP(x, 0));
463 encode_pattern_1 (XEXP(x, 1));
464 break;
465 case HIGH:
466 *patternp++ = 'H';
467 encode_pattern_1 (XEXP(x, 0));
468 break;
469 case SYMBOL_REF:
470 *patternp++ = 's';
471 break;
472 case LABEL_REF:
473 *patternp++ = 'l';
474 break;
475 case CONST_INT:
476 case CONST_DOUBLE:
477 *patternp++ = 'i';
478 break;
479 case UNSPEC:
480 *patternp++ = 'u';
481 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
482 for (i=0; i<XVECLEN (x, 0); i++)
483 encode_pattern_1 (XVECEXP (x, 0, i));
484 break;
485 case USE:
486 *patternp++ = 'U';
487 break;
488 default:
489 *patternp++ = '?';
490#if 0
491 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
492 debug_rtx (x);
493 gcc_unreachable ();
494#endif
495 break;
496 }
497}
498
499static void
500encode_pattern (rtx x)
501{
502 patternp = pattern;
503 encode_pattern_1 (x);
504 *patternp = 0;
505}
506
507int
508mep_section_tag (rtx x)
509{
510 const char *name;
511
512 while (1)
513 {
514 switch (GET_CODE (x))
515 {
516 case MEM:
517 case CONST:
518 x = XEXP (x, 0);
519 break;
520 case UNSPEC:
521 x = XVECEXP (x, 0, 0);
522 break;
523 case PLUS:
524 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
525 return 0;
526 x = XEXP (x, 0);
527 break;
528 default:
529 goto done;
530 }
531 }
532 done:
533 if (GET_CODE (x) != SYMBOL_REF)
534 return 0;
535 name = XSTR (x, 0);
536 if (name[0] == '@' && name[2] == '.')
537 {
538 if (name[1] == 'i' || name[1] == 'I')
539 {
540 if (name[1] == 'I')
541 return 'f'; /* near */
542 return 'n'; /* far */
543 }
544 return name[1];
545 }
546 return 0;
547}
548
549int
550mep_regno_reg_class (int regno)
551{
552 switch (regno)
553 {
554 case SP_REGNO: return SP_REGS;
555 case TP_REGNO: return TP_REGS;
556 case GP_REGNO: return GP_REGS;
557 case 0: return R0_REGS;
558 case HI_REGNO: return HI_REGS;
559 case LO_REGNO: return LO_REGS;
560 case ARG_POINTER_REGNUM: return GENERAL_REGS;
561 }
562
563 if (GR_REGNO_P (regno))
564 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
565 if (CONTROL_REGNO_P (regno))
566 return CONTROL_REGS;
567
568 if (CR_REGNO_P (regno))
569 {
570 int i, j;
571
572 /* Search for the register amongst user-defined subclasses of
573 the coprocessor registers. */
574 for (i = USER0_REGS; i <= USER3_REGS; ++i)
575 {
576 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
577 continue;
578 for (j = 0; j < N_REG_CLASSES; ++j)
579 {
580 enum reg_class sub = reg_class_subclasses[i][j];
581
582 if (sub == LIM_REG_CLASSES)
583 return i;
584 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
585 break;
586 }
587 }
588
589 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
590 }
591
592 if (CCR_REGNO_P (regno))
593 return CCR_REGS;
594
595 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
596 return NO_REGS;
597}
598
599#if 0
600int
601mep_reg_class_from_constraint (int c, const char *str)
602{
603 switch (c)
604 {
605 case 'a':
606 return SP_REGS;
607 case 'b':
608 return TP_REGS;
609 case 'c':
610 return CONTROL_REGS;
611 case 'd':
612 return HILO_REGS;
613 case 'e':
614 {
615 switch (str[1])
616 {
617 case 'm':
618 return LOADABLE_CR_REGS;
619 case 'x':
620 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
621 case 'r':
622 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
623 default:
624 return NO_REGS;
625 }
626 }
627 case 'h':
628 return HI_REGS;
629 case 'j':
630 return RPC_REGS;
631 case 'l':
632 return LO_REGS;
633 case 't':
634 return TPREL_REGS;
635 case 'v':
636 return GP_REGS;
637 case 'x':
638 return CR_REGS;
639 case 'y':
640 return CCR_REGS;
641 case 'z':
642 return R0_REGS;
643
644 case 'A':
645 case 'B':
646 case 'C':
647 case 'D':
648 {
649 enum reg_class which = c - 'A' + USER0_REGS;
650 return (reg_class_size[which] > 0 ? which : NO_REGS);
651 }
652
653 default:
654 return NO_REGS;
655 }
656}
657
658bool
659mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
660{
661 switch (c)
662 {
663 case 'I': return value >= -32768 && value < 32768;
664 case 'J': return value >= 0 && value < 65536;
665 case 'K': return value >= 0 && value < 0x01000000;
666 case 'L': return value >= -32 && value < 32;
667 case 'M': return value >= 0 && value < 32;
668 case 'N': return value >= 0 && value < 16;
669 case 'O':
670 if (value & 0xffff)
671 return false;
672 return value >= -2147483647-1 && value <= 2147483647;
673 default:
674 gcc_unreachable ();
675 }
676}
677
678bool
679mep_extra_constraint (rtx value, int c)
680{
681 encode_pattern (value);
682
683 switch (c)
684 {
685 case 'R':
686 /* For near symbols, like what call uses. */
687 if (GET_CODE (value) == REG)
688 return 0;
689 return mep_call_address_operand (value, GET_MODE (value));
690
691 case 'S':
692 /* For signed 8-bit immediates. */
693 return (GET_CODE (value) == CONST_INT
694 && INTVAL (value) >= -128
695 && INTVAL (value) <= 127);
696
697 case 'T':
698 /* For tp/gp relative symbol values. */
699 return (RTX_IS ("u3s") || RTX_IS ("u2s")
700 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
701
702 case 'U':
703 /* Non-absolute memories. */
704 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
705
706 case 'W':
707 /* %hi(sym) */
708 return RTX_IS ("Hs");
709
710 case 'Y':
711 /* Register indirect. */
712 return RTX_IS ("mr");
713
714 case 'Z':
715 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
716 }
717
718 return false;
719}
720#endif
721
722#undef PASS
723#undef FAIL
724
725static bool
726const_in_range (rtx x, int minv, int maxv)
727{
728 return (GET_CODE (x) == CONST_INT
729 && INTVAL (x) >= minv
730 && INTVAL (x) <= maxv);
731}
732
733/* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
734 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
735 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
736 at the end of the insn stream. */
737
738rtx
739mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
740{
741 if (rtx_equal_p (dest, src1))
742 return src2;
743 else if (rtx_equal_p (dest, src2))
744 return src1;
745 else
746 {
747 if (insn == 0)
748 emit_insn (gen_movsi (copy_rtx (dest), src1));
749 else
750 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
751 return src2;
752 }
753}
754
755/* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
756 Change the last element of PATTERN from (clobber (scratch:SI))
757 to (clobber (reg:SI HI_REGNO)). */
758
759static void
760mep_rewrite_mult (rtx insn, rtx pattern)
761{
762 rtx hi_clobber;
763
764 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
765 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
766 PATTERN (insn) = pattern;
767 INSN_CODE (insn) = -1;
768}
769
770/* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
771 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
772 store the result in DEST if nonnull. */
773
774static void
775mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
776{
777 rtx lo, pattern;
778
779 lo = gen_rtx_REG (SImode, LO_REGNO);
780 if (dest)
781 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
782 mep_mulr_source (insn, dest, src1, src2));
783 else
784 pattern = gen_mulsi3_lo (lo, src1, src2);
785 mep_rewrite_mult (insn, pattern);
786}
787
788/* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
789 SRC3 into $lo, then use either madd or maddr. The move into $lo will
790 be deleted by a peephole2 if SRC3 is already in $lo. */
791
792static void
793mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
794{
795 rtx lo, pattern;
796
797 lo = gen_rtx_REG (SImode, LO_REGNO);
798 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
799 if (dest)
800 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
801 mep_mulr_source (insn, dest, src1, src2),
802 copy_rtx (lo));
803 else
804 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
805 mep_rewrite_mult (insn, pattern);
806}
807
808/* Return true if $lo has the same value as integer register GPR when
809 instruction INSN is reached. If necessary, rewrite the instruction
810 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
811 rtx for (reg:SI LO_REGNO).
812
813 This function is intended to be used by the peephole2 pass. Since
814 that pass goes from the end of a basic block to the beginning, and
815 propagates liveness information on the way, there is no need to
816 update register notes here.
817
818 If GPR_DEAD_P is true on entry, and this function returns true,
819 then the caller will replace _every_ use of GPR in and after INSN
820 with LO. This means that if the instruction that sets $lo is a
821 mulr- or maddr-type instruction, we can rewrite it to use mul or
822 madd instead. In combination with the copy progagation pass,
823 this allows us to replace sequences like:
824
825 mov GPR,R1
826 mulr GPR,R2
827
828 with:
829
830 mul R1,R2
831
832 if GPR is no longer used. */
833
834static bool
835mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
836{
837 do
838 {
839 insn = PREV_INSN (insn);
840 if (INSN_P (insn))
841 switch (recog_memoized (insn))
842 {
843 case CODE_FOR_mulsi3_1:
844 extract_insn (insn);
845 if (rtx_equal_p (recog_data.operand[0], gpr))
846 {
847 mep_rewrite_mulsi3 (insn,
848 gpr_dead_p ? NULL : recog_data.operand[0],
849 recog_data.operand[1],
850 recog_data.operand[2]);
851 return true;
852 }
853 return false;
854
855 case CODE_FOR_maddsi3:
856 extract_insn (insn);
857 if (rtx_equal_p (recog_data.operand[0], gpr))
858 {
859 mep_rewrite_maddsi3 (insn,
860 gpr_dead_p ? NULL : recog_data.operand[0],
861 recog_data.operand[1],
862 recog_data.operand[2],
863 recog_data.operand[3]);
864 return true;
865 }
866 return false;
867
868 case CODE_FOR_mulsi3r:
869 case CODE_FOR_maddsi3r:
870 extract_insn (insn);
871 return rtx_equal_p (recog_data.operand[1], gpr);
872
873 default:
874 if (reg_set_p (lo, insn)
875 || reg_set_p (gpr, insn)
876 || volatile_insn_p (PATTERN (insn)))
877 return false;
878
879 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
880 gpr_dead_p = false;
881 break;
882 }
883 }
884 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
885 return false;
886}
887
888/* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
889
890bool
891mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
892{
893 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
894 extract_insn (insn);
895 return result;
896}
897
898/* Return true if SET can be turned into a post-modify load or store
899 that adds OFFSET to GPR. In other words, return true if SET can be
900 changed into:
901
902 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
903
904 It's OK to change SET to an equivalent operation in order to
905 make it match. */
906
907static bool
908mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
909{
910 rtx *reg, *mem;
911 unsigned int reg_bytes, mem_bytes;
912 enum machine_mode reg_mode, mem_mode;
913
914 /* Only simple SETs can be converted. */
915 if (GET_CODE (set) != SET)
916 return false;
917
918 /* Point REG to what we hope will be the register side of the set and
919 MEM to what we hope will be the memory side. */
920 if (GET_CODE (SET_DEST (set)) == MEM)
921 {
922 mem = &SET_DEST (set);
923 reg = &SET_SRC (set);
924 }
925 else
926 {
927 reg = &SET_DEST (set);
928 mem = &SET_SRC (set);
929 if (GET_CODE (*mem) == SIGN_EXTEND)
930 mem = &XEXP (*mem, 0);
931 }
932
933 /* Check that *REG is a suitable coprocessor register. */
934 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
935 return false;
936
937 /* Check that *MEM is a suitable memory reference. */
938 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
939 return false;
940
941 /* Get the number of bytes in each operand. */
942 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
943 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
944
945 /* Check that OFFSET is suitably aligned. */
946 if (INTVAL (offset) & (mem_bytes - 1))
947 return false;
948
949 /* Convert *MEM to a normal integer mode. */
950 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
951 *mem = change_address (*mem, mem_mode, NULL);
952
953 /* Adjust *REG as well. */
954 *reg = shallow_copy_rtx (*reg);
955 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
956 {
957 /* SET is a subword load. Convert it to an explicit extension. */
958 PUT_MODE (*reg, SImode);
959 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
960 }
961 else
962 {
963 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
964 PUT_MODE (*reg, reg_mode);
965 }
966 return true;
967}
968
969/* Return the effect of frame-related instruction INSN. */
970
971static rtx
972mep_frame_expr (rtx insn)
973{
974 rtx note, expr;
975
976 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
977 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
978 RTX_FRAME_RELATED_P (expr) = 1;
979 return expr;
980}
981
982/* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
983 new pattern in INSN1; INSN2 will be deleted by the caller. */
984
985static void
986mep_make_parallel (rtx insn1, rtx insn2)
987{
988 rtx expr;
989
990 if (RTX_FRAME_RELATED_P (insn2))
991 {
992 expr = mep_frame_expr (insn2);
993 if (RTX_FRAME_RELATED_P (insn1))
994 expr = gen_rtx_SEQUENCE (VOIDmode,
995 gen_rtvec (2, mep_frame_expr (insn1), expr));
996 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
997 RTX_FRAME_RELATED_P (insn1) = 1;
998 }
999
1000 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1001 gen_rtvec (2, PATTERN (insn1),
1002 PATTERN (insn2)));
1003 INSN_CODE (insn1) = -1;
1004}
1005
1006/* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1007 the basic block to see if any previous load or store instruction can
1008 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1009
1010static bool
1011mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1012{
1013 rtx insn;
1014
1015 insn = set_insn;
1016 do
1017 {
1018 insn = PREV_INSN (insn);
1019 if (INSN_P (insn))
1020 {
1021 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1022 {
1023 mep_make_parallel (insn, set_insn);
1024 return true;
1025 }
1026
1027 if (reg_set_p (reg, insn)
1028 || reg_referenced_p (reg, PATTERN (insn))
1029 || volatile_insn_p (PATTERN (insn)))
1030 return false;
1031 }
1032 }
1033 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1034 return false;
1035}
1036
1037/* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1038
1039bool
1040mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1041{
1042 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1043 extract_insn (insn);
1044 return result;
1045}
1046
1047bool
1048mep_allow_clip (rtx ux, rtx lx, int s)
1049{
1050 HOST_WIDE_INT u = INTVAL (ux);
1051 HOST_WIDE_INT l = INTVAL (lx);
1052 int i;
1053
1054 if (!TARGET_OPT_CLIP)
1055 return false;
1056
1057 if (s)
1058 {
1059 for (i = 0; i < 30; i ++)
1060 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1061 && (l == - ((HOST_WIDE_INT) 1 << i)))
1062 return true;
1063 }
1064 else
1065 {
1066 if (l != 0)
1067 return false;
1068
1069 for (i = 0; i < 30; i ++)
1070 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1071 return true;
1072 }
1073 return false;
1074}
1075
1076bool
1077mep_bit_position_p (rtx x, bool looking_for)
1078{
1079 if (GET_CODE (x) != CONST_INT)
1080 return false;
1081 switch ((int) INTVAL(x) & 0xff)
1082 {
1083 case 0x01: case 0x02: case 0x04: case 0x08:
1084 case 0x10: case 0x20: case 0x40: case 0x80:
1085 return looking_for;
1086 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1087 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1088 return !looking_for;
1089 }
1090 return false;
1091}
1092
1093static bool
1094move_needs_splitting (rtx dest, rtx src,
1095 enum machine_mode mode ATTRIBUTE_UNUSED)
1096{
1097 int s = mep_section_tag (src);
1098
1099 while (1)
1100 {
1101 if (GET_CODE (src) == CONST
1102 || GET_CODE (src) == MEM)
1103 src = XEXP (src, 0);
1104 else if (GET_CODE (src) == SYMBOL_REF
1105 || GET_CODE (src) == LABEL_REF
1106 || GET_CODE (src) == PLUS)
1107 break;
1108 else
1109 return false;
1110 }
1111 if (s == 'f'
1112 || (GET_CODE (src) == PLUS
1113 && GET_CODE (XEXP (src, 1)) == CONST_INT
1114 && (INTVAL (XEXP (src, 1)) < -65536
1115 || INTVAL (XEXP (src, 1)) > 0xffffff))
1116 || (GET_CODE (dest) == REG
1117 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1118 return true;
1119 return false;
1120}
1121
1122bool
1123mep_split_mov (rtx *operands, int symbolic)
1124{
1125 if (symbolic)
1126 {
1127 if (move_needs_splitting (operands[0], operands[1], SImode))
1128 return true;
1129 return false;
1130 }
1131
1132 if (GET_CODE (operands[1]) != CONST_INT)
1133 return false;
1134
1135 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1136 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1137 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1138 return false;
1139
1140 if (((!reload_completed && !reload_in_progress)
1141 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1142 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1143 return false;
1144
1145 return true;
1146}
1147
1148/* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1149 it to one specific value. So the insn chosen depends on whether
1150 the source and destination modes match. */
1151
1152bool
1153mep_vliw_mode_match (rtx tgt)
1154{
1155 bool src_vliw = mep_vliw_function_p (cfun->decl);
1156 bool tgt_vliw = INTVAL (tgt);
1157
1158 return src_vliw == tgt_vliw;
1159}
1160
a9d1723f
DD
1161/* Like the above, but also test for near/far mismatches. */
1162
1163bool
1164mep_vliw_jmp_match (rtx tgt)
1165{
1166 bool src_vliw = mep_vliw_function_p (cfun->decl);
1167 bool tgt_vliw = INTVAL (tgt);
1168
1169 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1170 return false;
1171
1172 return src_vliw == tgt_vliw;
1173}
1174
7acf4da6
DD
1175bool
1176mep_multi_slot (rtx x)
1177{
1178 return get_attr_slot (x) == SLOT_MULTI;
1179}
1180
1a627b35 1181/* Implement TARGET_LEGITIMATE_CONSTANT_P. */
7acf4da6 1182
1a627b35
RS
1183static bool
1184mep_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
5ba863d7
DD
1185{
1186 /* We can't convert symbol values to gp- or tp-rel values after
1187 reload, as reload might have used $gp or $tp for other
1188 purposes. */
1189 if (GET_CODE (x) == SYMBOL_REF && (reload_in_progress || reload_completed))
1190 {
1191 char e = mep_section_tag (x);
1192 return (e != 't' && e != 'b');
1193 }
1194 return 1;
1195}
1196
7acf4da6
DD
1197/* Be careful not to use macros that need to be compiled one way for
1198 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1199
1200bool
1201mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1202{
1203 int the_tag;
1204
1205#define DEBUG_LEGIT 0
1206#if DEBUG_LEGIT
1207 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1208 debug_rtx (x);
1209#endif
1210
1211 if (GET_CODE (x) == LO_SUM
1212 && GET_CODE (XEXP (x, 0)) == REG
1213 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1214 && CONSTANT_P (XEXP (x, 1)))
1215 {
1216 if (GET_MODE_SIZE (mode) > 4)
1217 {
1218 /* We will end up splitting this, and lo_sums are not
1219 offsettable for us. */
1220#if DEBUG_LEGIT
1221 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1222#endif
1223 return false;
1224 }
1225#if DEBUG_LEGIT
1226 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1227#endif
1228 return true;
1229 }
1230
1231 if (GET_CODE (x) == REG
1232 && GEN_REG (REGNO (x), strict))
1233 {
1234#if DEBUG_LEGIT
1235 fprintf (stderr, " - yup, [reg]\n");
1236#endif
1237 return true;
1238 }
1239
1240 if (GET_CODE (x) == PLUS
1241 && GET_CODE (XEXP (x, 0)) == REG
1242 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1243 && const_in_range (XEXP (x, 1), -32768, 32767))
1244 {
1245#if DEBUG_LEGIT
1246 fprintf (stderr, " - yup, [reg+const]\n");
1247#endif
1248 return true;
1249 }
1250
1251 if (GET_CODE (x) == PLUS
1252 && GET_CODE (XEXP (x, 0)) == REG
1253 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1254 && GET_CODE (XEXP (x, 1)) == CONST
1255 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1256 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1257 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1258 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1259 {
1260#if DEBUG_LEGIT
1261 fprintf (stderr, " - yup, [reg+unspec]\n");
1262#endif
1263 return true;
1264 }
1265
1266 the_tag = mep_section_tag (x);
1267
1268 if (the_tag == 'f')
1269 {
1270#if DEBUG_LEGIT
1271 fprintf (stderr, " - nope, [far]\n");
1272#endif
1273 return false;
1274 }
1275
1276 if (mode == VOIDmode
1277 && GET_CODE (x) == SYMBOL_REF)
1278 {
1279#if DEBUG_LEGIT
1280 fprintf (stderr, " - yup, call [symbol]\n");
1281#endif
1282 return true;
1283 }
1284
1285 if ((mode == SImode || mode == SFmode)
1286 && CONSTANT_P (x)
1a627b35 1287 && mep_legitimate_constant_p (mode, x)
7acf4da6
DD
1288 && the_tag != 't' && the_tag != 'b')
1289 {
1290 if (GET_CODE (x) != CONST_INT
1291 || (INTVAL (x) <= 0xfffff
1292 && INTVAL (x) >= 0
1293 && (INTVAL (x) % 4) == 0))
1294 {
1295#if DEBUG_LEGIT
1296 fprintf (stderr, " - yup, [const]\n");
1297#endif
1298 return true;
1299 }
1300 }
1301
1302#if DEBUG_LEGIT
1303 fprintf (stderr, " - nope.\n");
1304#endif
1305 return false;
1306}
1307
1308int
1309mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
77b0efff 1310 int type_i,
7acf4da6
DD
1311 int ind_levels ATTRIBUTE_UNUSED)
1312{
77b0efff
JR
1313 enum reload_type type = (enum reload_type) type_i;
1314
7acf4da6
DD
1315 if (GET_CODE (*x) == PLUS
1316 && GET_CODE (XEXP (*x, 0)) == MEM
1317 && GET_CODE (XEXP (*x, 1)) == REG)
1318 {
1319 /* GCC will by default copy the MEM into a REG, which results in
1320 an invalid address. For us, the best thing to do is move the
1321 whole expression to a REG. */
1322 push_reload (*x, NULL_RTX, x, NULL,
1323 GENERAL_REGS, mode, VOIDmode,
1324 0, 0, opnum, type);
1325 return 1;
1326 }
1327
1328 if (GET_CODE (*x) == PLUS
1329 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1330 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1331 {
1332 char e = mep_section_tag (XEXP (*x, 0));
1333
1334 if (e != 't' && e != 'b')
1335 {
1336 /* GCC thinks that (sym+const) is a valid address. Well,
1337 sometimes it is, this time it isn't. The best thing to
1338 do is reload the symbol to a register, since reg+int
1339 tends to work, and we can't just add the symbol and
1340 constant anyway. */
1341 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1342 GENERAL_REGS, mode, VOIDmode,
1343 0, 0, opnum, type);
1344 return 1;
1345 }
1346 }
1347 return 0;
1348}
1349
1350int
1351mep_core_address_length (rtx insn, int opn)
1352{
1353 rtx set = single_set (insn);
1354 rtx mem = XEXP (set, opn);
1355 rtx other = XEXP (set, 1-opn);
1356 rtx addr = XEXP (mem, 0);
1357
1358 if (register_operand (addr, Pmode))
1359 return 2;
1360 if (GET_CODE (addr) == PLUS)
1361 {
1362 rtx addend = XEXP (addr, 1);
1363
1364 gcc_assert (REG_P (XEXP (addr, 0)));
1365
1366 switch (REGNO (XEXP (addr, 0)))
1367 {
1368 case STACK_POINTER_REGNUM:
1369 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1370 && mep_imm7a4_operand (addend, VOIDmode))
1371 return 2;
1372 break;
1373
1374 case 13: /* TP */
1375 gcc_assert (REG_P (other));
1376
1377 if (REGNO (other) >= 8)
1378 break;
1379
1380 if (GET_CODE (addend) == CONST
1381 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1382 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1383 return 2;
1384
1385 if (GET_CODE (addend) == CONST_INT
1386 && INTVAL (addend) >= 0
1387 && INTVAL (addend) <= 127
1388 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1389 return 2;
1390 break;
1391 }
1392 }
1393
1394 return 4;
1395}
1396
1397int
1398mep_cop_address_length (rtx insn, int opn)
1399{
1400 rtx set = single_set (insn);
1401 rtx mem = XEXP (set, opn);
1402 rtx addr = XEXP (mem, 0);
1403
1404 if (GET_CODE (mem) != MEM)
1405 return 2;
1406 if (register_operand (addr, Pmode))
1407 return 2;
1408 if (GET_CODE (addr) == POST_INC)
1409 return 2;
1410
1411 return 4;
1412}
1413
1414#define DEBUG_EXPAND_MOV 0
1415bool
1416mep_expand_mov (rtx *operands, enum machine_mode mode)
1417{
1418 int i, t;
1419 int tag[2];
1420 rtx tpsym, tpoffs;
1421 int post_reload = 0;
1422
1423 tag[0] = mep_section_tag (operands[0]);
1424 tag[1] = mep_section_tag (operands[1]);
1425
1426 if (!reload_in_progress
1427 && !reload_completed
1428 && GET_CODE (operands[0]) != REG
1429 && GET_CODE (operands[0]) != SUBREG
1430 && GET_CODE (operands[1]) != REG
1431 && GET_CODE (operands[1]) != SUBREG)
1432 operands[1] = copy_to_mode_reg (mode, operands[1]);
1433
1434#if DEBUG_EXPAND_MOV
1435 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1436 reload_in_progress || reload_completed);
1437 debug_rtx (operands[0]);
1438 debug_rtx (operands[1]);
1439#endif
1440
1441 if (mode == DImode || mode == DFmode)
1442 return false;
1443
1444 if (reload_in_progress || reload_completed)
1445 {
1446 rtx r;
1447
1448 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1449 cfun->machine->reload_changes_tp = true;
1450
1451 if (tag[0] == 't' || tag[1] == 't')
1452 {
1453 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1454 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1455 post_reload = 1;
1456 }
1457 if (tag[0] == 'b' || tag[1] == 'b')
1458 {
1459 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1460 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1461 post_reload = 1;
1462 }
1463 if (cfun->machine->reload_changes_tp == true)
1464 post_reload = 1;
1465 }
1466
1467 if (!post_reload)
1468 {
1469 rtx n;
1470 if (symbol_p (operands[1]))
1471 {
1472 t = mep_section_tag (operands[1]);
1473 if (t == 'b' || t == 't')
1474 {
1475
1476 if (GET_CODE (operands[1]) == SYMBOL_REF)
1477 {
1478 tpsym = operands[1];
1479 n = gen_rtx_UNSPEC (mode,
1480 gen_rtvec (1, operands[1]),
1481 t == 'b' ? UNS_TPREL : UNS_GPREL);
1482 n = gen_rtx_CONST (mode, n);
1483 }
1484 else if (GET_CODE (operands[1]) == CONST
1485 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1486 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1487 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1488 {
1489 tpsym = XEXP (XEXP (operands[1], 0), 0);
1490 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1491 n = gen_rtx_UNSPEC (mode,
1492 gen_rtvec (1, tpsym),
1493 t == 'b' ? UNS_TPREL : UNS_GPREL);
1494 n = gen_rtx_PLUS (mode, n, tpoffs);
1495 n = gen_rtx_CONST (mode, n);
1496 }
1497 else if (GET_CODE (operands[1]) == CONST
1498 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1499 return false;
1500 else
1501 {
1502 error ("unusual TP-relative address");
1503 return false;
1504 }
1505
1506 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1507 : mep_gp_rtx ()), n);
1508 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1509#if DEBUG_EXPAND_MOV
1510 fprintf(stderr, "mep_expand_mov emitting ");
1511 debug_rtx(n);
1512#endif
1513 return true;
1514 }
1515 }
1516
1517 for (i=0; i < 2; i++)
1518 {
1519 t = mep_section_tag (operands[i]);
1520 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1521 {
1522 rtx sym, n, r;
1523 int u;
1524
1525 sym = XEXP (operands[i], 0);
1526 if (GET_CODE (sym) == CONST
1527 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1528 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1529
1530 if (t == 'b')
1531 {
1532 r = mep_tp_rtx ();
1533 u = UNS_TPREL;
1534 }
1535 else
1536 {
1537 r = mep_gp_rtx ();
1538 u = UNS_GPREL;
1539 }
1540
1541 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1542 n = gen_rtx_CONST (Pmode, n);
1543 n = gen_rtx_PLUS (Pmode, r, n);
1544 operands[i] = replace_equiv_address (operands[i], n);
1545 }
1546 }
1547 }
1548
1549 if ((GET_CODE (operands[1]) != REG
1550 && MEP_CONTROL_REG (operands[0]))
1551 || (GET_CODE (operands[0]) != REG
1552 && MEP_CONTROL_REG (operands[1])))
1553 {
1554 rtx temp;
1555#if DEBUG_EXPAND_MOV
1556 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1557#endif
1558 temp = gen_reg_rtx (mode);
1559 emit_move_insn (temp, operands[1]);
1560 operands[1] = temp;
1561 }
1562
1563 if (symbolref_p (operands[0])
1564 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1565 || (GET_MODE_SIZE (mode) != 4)))
1566 {
1567 rtx temp;
1568
1569 gcc_assert (!reload_in_progress && !reload_completed);
1570
1571 temp = force_reg (Pmode, XEXP (operands[0], 0));
1572 operands[0] = replace_equiv_address (operands[0], temp);
1573 emit_move_insn (operands[0], operands[1]);
1574 return true;
1575 }
1576
1577 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1578 tag[1] = 0;
1579
1580 if (symbol_p (operands[1])
1581 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1582 {
1583 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1584 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1585 return true;
1586 }
1587
1588 if (symbolref_p (operands[1])
1589 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1590 {
1591 rtx temp;
1592
1593 if (reload_in_progress || reload_completed)
1594 temp = operands[0];
1595 else
1596 temp = gen_reg_rtx (Pmode);
1597
1598 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1599 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1600 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1601 return true;
1602 }
1603
1604 return false;
1605}
1606
1607/* Cases where the pattern can't be made to use at all. */
1608
1609bool
1610mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1611{
1612 int i;
1613
1614#define DEBUG_MOV_OK 0
1615#if DEBUG_MOV_OK
1616 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1617 mep_section_tag (operands[1]));
1618 debug_rtx (operands[0]);
1619 debug_rtx (operands[1]);
1620#endif
1621
1622 /* We want the movh patterns to get these. */
1623 if (GET_CODE (operands[1]) == HIGH)
1624 return false;
1625
1626 /* We can't store a register to a far variable without using a
1627 scratch register to hold the address. Using far variables should
1628 be split by mep_emit_mov anyway. */
1629 if (mep_section_tag (operands[0]) == 'f'
1630 || mep_section_tag (operands[1]) == 'f')
1631 {
1632#if DEBUG_MOV_OK
1633 fprintf (stderr, " - no, f\n");
1634#endif
1635 return false;
1636 }
1637 i = mep_section_tag (operands[1]);
1638 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1639 /* These are supposed to be generated with adds of the appropriate
1640 register. During and after reload, however, we allow them to
1641 be accessed as normal symbols because adding a dependency on
1642 the base register now might cause problems. */
1643 {
1644#if DEBUG_MOV_OK
1645 fprintf (stderr, " - no, bt\n");
1646#endif
1647 return false;
1648 }
1649
1650 /* The only moves we can allow involve at least one general
1651 register, so require it. */
1652 for (i = 0; i < 2; i ++)
1653 {
1654 /* Allow subregs too, before reload. */
1655 rtx x = operands[i];
1656
1657 if (GET_CODE (x) == SUBREG)
1658 x = XEXP (x, 0);
1659 if (GET_CODE (x) == REG
1660 && ! MEP_CONTROL_REG (x))
1661 {
1662#if DEBUG_MOV_OK
1663 fprintf (stderr, " - ok\n");
1664#endif
1665 return true;
1666 }
1667 }
1668#if DEBUG_MOV_OK
1669 fprintf (stderr, " - no, no gen reg\n");
1670#endif
1671 return false;
1672}
1673
1674#define DEBUG_SPLIT_WIDE_MOVE 0
1675void
1676mep_split_wide_move (rtx *operands, enum machine_mode mode)
1677{
1678 int i;
1679
1680#if DEBUG_SPLIT_WIDE_MOVE
1681 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1682 debug_rtx (operands[0]);
1683 debug_rtx (operands[1]);
1684#endif
1685
1686 for (i = 0; i <= 1; i++)
1687 {
1688 rtx op = operands[i], hi, lo;
1689
1690 switch (GET_CODE (op))
1691 {
1692 case REG:
1693 {
1694 unsigned int regno = REGNO (op);
1695
1696 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1697 {
1698 rtx i32;
1699
1700 lo = gen_rtx_REG (SImode, regno);
1701 i32 = GEN_INT (32);
1702 hi = gen_rtx_ZERO_EXTRACT (SImode,
1703 gen_rtx_REG (DImode, regno),
1704 i32, i32);
1705 }
1706 else
1707 {
1708 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1709 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1710 }
1711 }
1712 break;
1713
1714 case CONST_INT:
1715 case CONST_DOUBLE:
1716 case MEM:
1717 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1718 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1719 break;
1720
1721 default:
1722 gcc_unreachable ();
1723 }
1724
1725 /* The high part of CR <- GPR moves must be done after the low part. */
1726 operands [i + 4] = lo;
1727 operands [i + 2] = hi;
1728 }
1729
1730 if (reg_mentioned_p (operands[2], operands[5])
1731 || GET_CODE (operands[2]) == ZERO_EXTRACT
1732 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1733 {
1734 rtx tmp;
1735
1736 /* Overlapping register pairs -- make sure we don't
1737 early-clobber ourselves. */
1738 tmp = operands[2];
1739 operands[2] = operands[4];
1740 operands[4] = tmp;
1741 tmp = operands[3];
1742 operands[3] = operands[5];
1743 operands[5] = tmp;
1744 }
1745
1746#if DEBUG_SPLIT_WIDE_MOVE
1747 fprintf(stderr, "\033[34m");
1748 debug_rtx (operands[2]);
1749 debug_rtx (operands[3]);
1750 debug_rtx (operands[4]);
1751 debug_rtx (operands[5]);
1752 fprintf(stderr, "\033[0m");
1753#endif
1754}
1755
1756/* Emit a setcc instruction in its entirity. */
1757
1758static bool
1759mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1760{
1761 rtx tmp;
1762
1763 switch (code)
1764 {
1765 case GT:
1766 case GTU:
1767 tmp = op1, op1 = op2, op2 = tmp;
1768 code = swap_condition (code);
1769 /* FALLTHRU */
1770
1771 case LT:
1772 case LTU:
1773 op1 = force_reg (SImode, op1);
1774 emit_insn (gen_rtx_SET (VOIDmode, dest,
1775 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1776 return true;
1777
1778 case EQ:
1779 if (op2 != const0_rtx)
1780 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1781 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1782 return true;
1783
1784 case NE:
1785 /* Branchful sequence:
1786 mov dest, 0 16-bit
1787 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1788 mov dest, 1 16-bit
1789
1790 Branchless sequence:
1791 add3 tmp, op1, -op2 32-bit (or mov + sub)
1792 sltu3 tmp, tmp, 1 16-bit
1793 xor3 dest, tmp, 1 32-bit
1794 */
1795 if (optimize_size && op2 != const0_rtx)
1796 return false;
1797
1798 if (op2 != const0_rtx)
1799 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1800
1801 op2 = gen_reg_rtx (SImode);
1802 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1803
1804 emit_insn (gen_rtx_SET (VOIDmode, dest,
1805 gen_rtx_XOR (SImode, op2, const1_rtx)));
1806 return true;
1807
1808 case LE:
1809 if (GET_CODE (op2) != CONST_INT
1810 || INTVAL (op2) == 0x7ffffff)
1811 return false;
1812 op2 = GEN_INT (INTVAL (op2) + 1);
1813 return mep_expand_setcc_1 (LT, dest, op1, op2);
1814
1815 case LEU:
1816 if (GET_CODE (op2) != CONST_INT
1817 || INTVAL (op2) == -1)
1818 return false;
1819 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1820 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1821
1822 case GE:
1823 if (GET_CODE (op2) != CONST_INT
1824 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1825 return false;
1826 op2 = GEN_INT (INTVAL (op2) - 1);
1827 return mep_expand_setcc_1 (GT, dest, op1, op2);
1828
1829 case GEU:
1830 if (GET_CODE (op2) != CONST_INT
1831 || op2 == const0_rtx)
1832 return false;
1833 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1834 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1835
1836 default:
1837 gcc_unreachable ();
1838 }
1839}
1840
1841bool
1842mep_expand_setcc (rtx *operands)
1843{
1844 rtx dest = operands[0];
1845 enum rtx_code code = GET_CODE (operands[1]);
1846 rtx op0 = operands[2];
1847 rtx op1 = operands[3];
1848
1849 return mep_expand_setcc_1 (code, dest, op0, op1);
1850}
1851
1852rtx
1853mep_expand_cbranch (rtx *operands)
1854{
1855 enum rtx_code code = GET_CODE (operands[0]);
1856 rtx op0 = operands[1];
1857 rtx op1 = operands[2];
1858 rtx tmp;
1859
1860 restart:
1861 switch (code)
1862 {
1863 case LT:
1864 if (mep_imm4_operand (op1, SImode))
1865 break;
1866
1867 tmp = gen_reg_rtx (SImode);
1868 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1869 code = NE;
1870 op0 = tmp;
1871 op1 = const0_rtx;
1872 break;
1873
1874 case GE:
1875 if (mep_imm4_operand (op1, SImode))
1876 break;
1877
1878 tmp = gen_reg_rtx (SImode);
1879 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1880
1881 code = EQ;
1882 op0 = tmp;
1883 op1 = const0_rtx;
1884 break;
1885
1886 case EQ:
1887 case NE:
1888 if (! mep_reg_or_imm4_operand (op1, SImode))
1889 op1 = force_reg (SImode, op1);
1890 break;
1891
1892 case LE:
1893 case GT:
1894 if (GET_CODE (op1) == CONST_INT
1895 && INTVAL (op1) != 0x7fffffff)
1896 {
1897 op1 = GEN_INT (INTVAL (op1) + 1);
1898 code = (code == LE ? LT : GE);
1899 goto restart;
1900 }
1901
1902 tmp = gen_reg_rtx (SImode);
1903 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1904
1905 code = (code == LE ? EQ : NE);
1906 op0 = tmp;
1907 op1 = const0_rtx;
1908 break;
1909
1910 case LTU:
1911 if (op1 == const1_rtx)
1912 {
1913 code = EQ;
1914 op1 = const0_rtx;
1915 break;
1916 }
1917
1918 tmp = gen_reg_rtx (SImode);
1919 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1920 code = NE;
1921 op0 = tmp;
1922 op1 = const0_rtx;
1923 break;
1924
1925 case LEU:
1926 tmp = gen_reg_rtx (SImode);
1927 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1928 code = NE;
1929 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1930 code = EQ;
1931 else
1932 gcc_unreachable ();
1933 op0 = tmp;
1934 op1 = const0_rtx;
1935 break;
1936
1937 case GTU:
1938 tmp = gen_reg_rtx (SImode);
1939 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1940 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1941 code = NE;
1942 op0 = tmp;
1943 op1 = const0_rtx;
1944 break;
1945
1946 case GEU:
1947 tmp = gen_reg_rtx (SImode);
1948 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1949 code = NE;
1950 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1951 code = EQ;
1952 else
1953 gcc_unreachable ();
1954 op0 = tmp;
1955 op1 = const0_rtx;
1956 break;
1957
1958 default:
1959 gcc_unreachable ();
1960 }
1961
1962 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1963}
1964
1965const char *
1966mep_emit_cbranch (rtx *operands, int ne)
1967{
1968 if (GET_CODE (operands[1]) == REG)
1969 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
d839f1eb 1970 else if (INTVAL (operands[1]) == 0 && !mep_vliw_function_p(cfun->decl))
7acf4da6
DD
1971 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1972 else
1973 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1974}
1975
1976void
1977mep_expand_call (rtx *operands, int returns_value)
1978{
1979 rtx addr = operands[returns_value];
1980 rtx tp = mep_tp_rtx ();
1981 rtx gp = mep_gp_rtx ();
1982
1983 gcc_assert (GET_CODE (addr) == MEM);
1984
1985 addr = XEXP (addr, 0);
1986
1987 if (! mep_call_address_operand (addr, VOIDmode))
1988 addr = force_reg (SImode, addr);
1989
1990 if (! operands[returns_value+2])
1991 operands[returns_value+2] = const0_rtx;
1992
1993 if (returns_value)
1994 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1995 operands[3], tp, gp));
1996 else
1997 emit_call_insn (gen_call_internal (addr, operands[1],
1998 operands[2], tp, gp));
1999}
2000\f
2001/* Aliasing Support. */
2002
2003/* If X is a machine specific address (i.e. a symbol or label being
2004 referenced as a displacement from the GOT implemented using an
2005 UNSPEC), then return the base term. Otherwise return X. */
2006
2007rtx
2008mep_find_base_term (rtx x)
2009{
2010 rtx base, term;
2011 int unspec;
2012
2013 if (GET_CODE (x) != PLUS)
2014 return x;
2015 base = XEXP (x, 0);
2016 term = XEXP (x, 1);
2017
2018 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2019 && base == mep_tp_rtx ())
2020 unspec = UNS_TPREL;
2021 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2022 && base == mep_gp_rtx ())
2023 unspec = UNS_GPREL;
2024 else
2025 return x;
2026
2027 if (GET_CODE (term) != CONST)
2028 return x;
2029 term = XEXP (term, 0);
2030
2031 if (GET_CODE (term) != UNSPEC
2032 || XINT (term, 1) != unspec)
2033 return x;
2034
2035 return XVECEXP (term, 0, 0);
2036}
2037\f
2038/* Reload Support. */
2039
2040/* Return true if the registers in CLASS cannot represent the change from
2041 modes FROM to TO. */
2042
2043bool
2044mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2045 enum reg_class regclass)
2046{
2047 if (from == to)
2048 return false;
2049
2050 /* 64-bit COP regs must remain 64-bit COP regs. */
2051 if (TARGET_64BIT_CR_REGS
2052 && (regclass == CR_REGS
2053 || regclass == LOADABLE_CR_REGS)
2054 && (GET_MODE_SIZE (to) < 8
2055 || GET_MODE_SIZE (from) < 8))
2056 return true;
2057
2058 return false;
2059}
2060
2061#define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2062
2063static bool
2064mep_general_reg (rtx x)
2065{
2066 while (GET_CODE (x) == SUBREG)
2067 x = XEXP (x, 0);
2068 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2069}
2070
2071static bool
2072mep_nongeneral_reg (rtx x)
2073{
2074 while (GET_CODE (x) == SUBREG)
2075 x = XEXP (x, 0);
2076 return (GET_CODE (x) == REG
2077 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2078}
2079
2080static bool
2081mep_general_copro_reg (rtx x)
2082{
2083 while (GET_CODE (x) == SUBREG)
2084 x = XEXP (x, 0);
2085 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2086}
2087
2088static bool
2089mep_nonregister (rtx x)
2090{
2091 while (GET_CODE (x) == SUBREG)
2092 x = XEXP (x, 0);
2093 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2094}
2095
2096#define DEBUG_RELOAD 0
2097
2098/* Return the secondary reload class needed for moving value X to or
2099 from a register in coprocessor register class CLASS. */
2100
2101static enum reg_class
2102mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2103{
2104 if (mep_general_reg (x))
2105 /* We can do the move directly if mep_have_core_copro_moves_p,
2106 otherwise we need to go through memory. Either way, no secondary
2107 register is needed. */
2108 return NO_REGS;
2109
2110 if (mep_general_copro_reg (x))
2111 {
2112 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2113 if (mep_have_copro_copro_moves_p)
2114 return NO_REGS;
2115
2116 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2117 if (mep_have_core_copro_moves_p)
2118 return GENERAL_REGS;
2119
2120 /* Otherwise we need to do it through memory. No secondary
2121 register is needed. */
2122 return NO_REGS;
2123 }
2124
2125 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2126 && constraint_satisfied_p (x, CONSTRAINT_U))
2127 /* X is a memory value that we can access directly. */
2128 return NO_REGS;
2129
2130 /* We have to move X into a GPR first and then copy it to
2131 the coprocessor register. The move from the GPR to the
2132 coprocessor might be done directly or through memory,
2133 depending on mep_have_core_copro_moves_p. */
2134 return GENERAL_REGS;
2135}
2136
2137/* Copying X to register in RCLASS. */
2138
77b0efff 2139enum reg_class
7acf4da6
DD
2140mep_secondary_input_reload_class (enum reg_class rclass,
2141 enum machine_mode mode ATTRIBUTE_UNUSED,
2142 rtx x)
2143{
2144 int rv = NO_REGS;
2145
2146#if DEBUG_RELOAD
2147 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2148 debug_rtx (x);
2149#endif
2150
2151 if (reg_class_subset_p (rclass, CR_REGS))
2152 rv = mep_secondary_copro_reload_class (rclass, x);
2153 else if (MEP_NONGENERAL_CLASS (rclass)
2154 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2155 rv = GENERAL_REGS;
2156
2157#if DEBUG_RELOAD
2158 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2159#endif
77b0efff 2160 return (enum reg_class) rv;
7acf4da6
DD
2161}
2162
2163/* Copying register in RCLASS to X. */
2164
77b0efff 2165enum reg_class
7acf4da6
DD
2166mep_secondary_output_reload_class (enum reg_class rclass,
2167 enum machine_mode mode ATTRIBUTE_UNUSED,
2168 rtx x)
2169{
2170 int rv = NO_REGS;
2171
2172#if DEBUG_RELOAD
2173 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2174 debug_rtx (x);
2175#endif
2176
2177 if (reg_class_subset_p (rclass, CR_REGS))
2178 rv = mep_secondary_copro_reload_class (rclass, x);
2179 else if (MEP_NONGENERAL_CLASS (rclass)
2180 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2181 rv = GENERAL_REGS;
2182
2183#if DEBUG_RELOAD
2184 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2185#endif
2186
77b0efff 2187 return (enum reg_class) rv;
7acf4da6
DD
2188}
2189
2190/* Implement SECONDARY_MEMORY_NEEDED. */
2191
2192bool
2193mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2194 enum machine_mode mode ATTRIBUTE_UNUSED)
2195{
2196 if (!mep_have_core_copro_moves_p)
2197 {
2198 if (reg_classes_intersect_p (rclass1, CR_REGS)
2199 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2200 return true;
2201 if (reg_classes_intersect_p (rclass2, CR_REGS)
2202 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2203 return true;
2204 if (!mep_have_copro_copro_moves_p
2205 && reg_classes_intersect_p (rclass1, CR_REGS)
2206 && reg_classes_intersect_p (rclass2, CR_REGS))
2207 return true;
2208 }
2209 return false;
2210}
2211
2212void
2213mep_expand_reload (rtx *operands, enum machine_mode mode)
2214{
2215 /* There are three cases for each direction:
2216 register, farsym
2217 control, farsym
2218 control, nearsym */
2219
2220 int s0 = mep_section_tag (operands[0]) == 'f';
2221 int s1 = mep_section_tag (operands[1]) == 'f';
2222 int c0 = mep_nongeneral_reg (operands[0]);
2223 int c1 = mep_nongeneral_reg (operands[1]);
2224 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2225
2226#if DEBUG_RELOAD
2227 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2228 debug_rtx (operands[0]);
2229 debug_rtx (operands[1]);
2230#endif
2231
2232 switch (which)
2233 {
2234 case 00: /* Don't know why this gets here. */
2235 case 02: /* general = far */
2236 emit_move_insn (operands[0], operands[1]);
2237 return;
2238
2239 case 10: /* cr = mem */
2240 case 11: /* cr = cr */
2241 case 01: /* mem = cr */
2242 case 12: /* cr = far */
2243 emit_move_insn (operands[2], operands[1]);
2244 emit_move_insn (operands[0], operands[2]);
2245 return;
2246
2247 case 20: /* far = general */
2248 emit_move_insn (operands[2], XEXP (operands[1], 0));
2249 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2250 return;
2251
2252 case 21: /* far = cr */
2253 case 22: /* far = far */
2254 default:
2255 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2256 which, mode_name[mode]);
2257 debug_rtx (operands[0]);
2258 debug_rtx (operands[1]);
2259 gcc_unreachable ();
2260 }
2261}
2262
2263/* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2264 can be moved directly into registers 0 to 7, but not into the rest.
2265 If so, and if the required class includes registers 0 to 7, restrict
2266 it to those registers. */
2267
2268enum reg_class
2269mep_preferred_reload_class (rtx x, enum reg_class rclass)
2270{
2271 switch (GET_CODE (x))
2272 {
2273 case CONST_INT:
2274 if (INTVAL (x) >= 0x10000
2275 && INTVAL (x) < 0x01000000
2276 && (INTVAL (x) & 0xffff) != 0
2277 && reg_class_subset_p (TPREL_REGS, rclass))
2278 rclass = TPREL_REGS;
2279 break;
2280
2281 case CONST:
2282 case SYMBOL_REF:
2283 case LABEL_REF:
2284 if (mep_section_tag (x) != 'f'
2285 && reg_class_subset_p (TPREL_REGS, rclass))
2286 rclass = TPREL_REGS;
2287 break;
2288
2289 default:
2290 break;
2291 }
2292 return rclass;
2293}
2294\f
2295/* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2296 moves, 4 for direct double-register moves, and 1000 for anything
2297 that requires a temporary register or temporary stack slot. */
2298
2299int
2300mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2301{
2302 if (mep_have_copro_copro_moves_p
2303 && reg_class_subset_p (from, CR_REGS)
2304 && reg_class_subset_p (to, CR_REGS))
2305 {
2306 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2307 return 4;
2308 return 2;
2309 }
2310 if (reg_class_subset_p (from, CR_REGS)
2311 && reg_class_subset_p (to, CR_REGS))
2312 {
2313 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2314 return 8;
2315 return 4;
2316 }
2317 if (reg_class_subset_p (from, CR_REGS)
2318 || reg_class_subset_p (to, CR_REGS))
2319 {
2320 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2321 return 4;
2322 return 2;
2323 }
2324 if (mep_secondary_memory_needed (from, to, mode))
2325 return 1000;
2326 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2327 return 1000;
2328
2329 if (GET_MODE_SIZE (mode) > 4)
2330 return 4;
2331
2332 return 2;
2333}
2334
2335\f
2336/* Functions to save and restore machine-specific function data. */
2337
2338static struct machine_function *
2339mep_init_machine_status (void)
2340{
a9429e29 2341 return ggc_alloc_cleared_machine_function ();
7acf4da6
DD
2342}
2343
2344static rtx
2345mep_allocate_initial_value (rtx reg)
2346{
2347 int rss;
2348
2349 if (GET_CODE (reg) != REG)
2350 return NULL_RTX;
2351
2352 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2353 return NULL_RTX;
2354
2355 /* In interrupt functions, the "initial" values of $gp and $tp are
2356 provided by the prologue. They are not necessarily the same as
2357 the values that the caller was using. */
2358 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2359 if (mep_interrupt_p ())
2360 return NULL_RTX;
2361
2362 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2363 {
2364 cfun->machine->reg_save_size += 4;
2365 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2366 }
2367
2368 rss = cfun->machine->reg_save_slot[REGNO(reg)];
0a81f074 2369 return gen_rtx_MEM (SImode, plus_constant (Pmode, arg_pointer_rtx, -rss));
7acf4da6
DD
2370}
2371
2372rtx
2373mep_return_addr_rtx (int count)
2374{
2375 if (count != 0)
2376 return const0_rtx;
2377
2378 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2379}
2380
2381static rtx
2382mep_tp_rtx (void)
2383{
2384 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2385}
2386
2387static rtx
2388mep_gp_rtx (void)
2389{
2390 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2391}
2392
2393static bool
2394mep_interrupt_p (void)
2395{
2396 if (cfun->machine->interrupt_handler == 0)
2397 {
2398 int interrupt_handler
2399 = (lookup_attribute ("interrupt",
2400 DECL_ATTRIBUTES (current_function_decl))
2401 != NULL_TREE);
2402 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2403 }
2404 return cfun->machine->interrupt_handler == 2;
2405}
2406
2407static bool
2408mep_disinterrupt_p (void)
2409{
2410 if (cfun->machine->disable_interrupts == 0)
2411 {
2412 int disable_interrupts
2413 = (lookup_attribute ("disinterrupt",
2414 DECL_ATTRIBUTES (current_function_decl))
2415 != NULL_TREE);
2416 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2417 }
2418 return cfun->machine->disable_interrupts == 2;
2419}
2420
2421\f
2422/* Frame/Epilog/Prolog Related. */
2423
2424static bool
2425mep_reg_set_p (rtx reg, rtx insn)
2426{
2427 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2428 if (INSN_P (insn))
2429 {
2430 if (FIND_REG_INC_NOTE (insn, reg))
2431 return true;
2432 insn = PATTERN (insn);
2433 }
2434
2435 if (GET_CODE (insn) == SET
2436 && GET_CODE (XEXP (insn, 0)) == REG
2437 && GET_CODE (XEXP (insn, 1)) == REG
2438 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2439 return false;
2440
2441 return set_of (reg, insn) != NULL_RTX;
2442}
2443
2444
2445#define MEP_SAVES_UNKNOWN 0
2446#define MEP_SAVES_YES 1
2447#define MEP_SAVES_MAYBE 2
2448#define MEP_SAVES_NO 3
2449
2450static bool
2451mep_reg_set_in_function (int regno)
2452{
2453 rtx reg, insn;
2454
2455 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2456 return true;
2457
2458 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2459 return true;
2460
2461 push_topmost_sequence ();
2462 insn = get_insns ();
2463 pop_topmost_sequence ();
2464
2465 if (!insn)
2466 return false;
2467
2468 reg = gen_rtx_REG (SImode, regno);
2469
2470 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2471 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2472 return true;
2473 return false;
2474}
2475
2476static bool
2477mep_asm_without_operands_p (void)
2478{
2479 if (cfun->machine->asms_without_operands == 0)
2480 {
2481 rtx insn;
2482
2483 push_topmost_sequence ();
2484 insn = get_insns ();
2485 pop_topmost_sequence ();
2486
2487 cfun->machine->asms_without_operands = 1;
2488 while (insn)
2489 {
2490 if (INSN_P (insn)
2491 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2492 {
2493 cfun->machine->asms_without_operands = 2;
2494 break;
2495 }
2496 insn = NEXT_INSN (insn);
2497 }
2498
2499 }
2500 return cfun->machine->asms_without_operands == 2;
2501}
2502
2503/* Interrupt functions save/restore every call-preserved register, and
2504 any call-used register it uses (or all if it calls any function,
2505 since they may get clobbered there too). Here we check to see
2506 which call-used registers need saving. */
2507
d1b5afd5
DD
2508#define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2509 && (r == FIRST_CCR_REGNO + 1 \
2510 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2511 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2512
7acf4da6
DD
2513static bool
2514mep_interrupt_saved_reg (int r)
2515{
2516 if (!mep_interrupt_p ())
2517 return false;
2518 if (r == REGSAVE_CONTROL_TEMP
2519 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2520 return true;
2521 if (mep_asm_without_operands_p ()
2522 && (!fixed_regs[r]
d1b5afd5
DD
2523 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2524 || IVC2_ISAVED_REG (r)))
7acf4da6
DD
2525 return true;
2526 if (!current_function_is_leaf)
2527 /* Function calls mean we need to save $lp. */
d1b5afd5 2528 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
7acf4da6
DD
2529 return true;
2530 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2531 /* The interrupt handler might use these registers for repeat blocks,
2532 or it might call a function that does so. */
2533 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2534 return true;
2535 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2536 return false;
2537 /* Functions we call might clobber these. */
2538 if (call_used_regs[r] && !fixed_regs[r])
2539 return true;
f2082f90 2540 /* Additional registers that need to be saved for IVC2. */
d1b5afd5 2541 if (IVC2_ISAVED_REG (r))
f2082f90
DD
2542 return true;
2543
7acf4da6
DD
2544 return false;
2545}
2546
2547static bool
2548mep_call_saves_register (int r)
2549{
e756464b 2550 if (! cfun->machine->frame_locked)
7acf4da6
DD
2551 {
2552 int rv = MEP_SAVES_NO;
2553
2554 if (cfun->machine->reg_save_slot[r])
2555 rv = MEP_SAVES_YES;
2556 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2557 rv = MEP_SAVES_YES;
2558 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2559 rv = MEP_SAVES_YES;
2560 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2561 rv = MEP_SAVES_YES;
2562 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2563 /* We need these to have stack slots so that they can be set during
2564 unwinding. */
2565 rv = MEP_SAVES_YES;
2566 else if (mep_interrupt_saved_reg (r))
2567 rv = MEP_SAVES_YES;
2568 cfun->machine->reg_saved[r] = rv;
2569 }
2570 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2571}
2572
2573/* Return true if epilogue uses register REGNO. */
2574
2575bool
2576mep_epilogue_uses (int regno)
2577{
2578 /* Since $lp is a call-saved register, the generic code will normally
2579 mark it used in the epilogue if it needs to be saved and restored.
2580 However, when profiling is enabled, the profiling code will implicitly
2581 clobber $11. This case has to be handled specially both here and in
2582 mep_call_saves_register. */
2583 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2584 return true;
2585 /* Interrupt functions save/restore pretty much everything. */
2586 return (reload_completed && mep_interrupt_saved_reg (regno));
2587}
2588
2589static int
2590mep_reg_size (int regno)
2591{
2592 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2593 return 8;
2594 return 4;
2595}
2596
7b5cbb57
AS
2597/* Worker function for TARGET_CAN_ELIMINATE. */
2598
2599bool
2600mep_can_eliminate (const int from, const int to)
2601{
2602 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2603 ? ! frame_pointer_needed
2604 : true);
2605}
2606
7acf4da6
DD
2607int
2608mep_elimination_offset (int from, int to)
2609{
2610 int reg_save_size;
2611 int i;
2612 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2613 int total_size;
2614
e756464b
DD
2615 if (!cfun->machine->frame_locked)
2616 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
7acf4da6
DD
2617
2618 /* We don't count arg_regs_to_save in the arg pointer offset, because
2619 gcc thinks the arg pointer has moved along with the saved regs.
2620 However, we do count it when we adjust $sp in the prologue. */
2621 reg_save_size = 0;
2622 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2623 if (mep_call_saves_register (i))
2624 reg_save_size += mep_reg_size (i);
2625
2626 if (reg_save_size % 8)
2627 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2628 else
2629 cfun->machine->regsave_filler = 0;
2630
2631 /* This is what our total stack adjustment looks like. */
2632 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2633
2634 if (total_size % 8)
2635 cfun->machine->frame_filler = 8 - (total_size % 8);
2636 else
2637 cfun->machine->frame_filler = 0;
2638
2639
2640 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2641 return reg_save_size + cfun->machine->regsave_filler;
2642
2643 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2644 return cfun->machine->frame_filler + frame_size;
2645
2646 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2647 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2648
2649 gcc_unreachable ();
2650}
2651
2652static rtx
2653F (rtx x)
2654{
2655 RTX_FRAME_RELATED_P (x) = 1;
2656 return x;
2657}
2658
2659/* Since the prologue/epilogue code is generated after optimization,
2660 we can't rely on gcc to split constants for us. So, this code
2661 captures all the ways to add a constant to a register in one logic
2662 chunk, including optimizing away insns we just don't need. This
2663 makes the prolog/epilog code easier to follow. */
2664static void
2665add_constant (int dest, int src, int value, int mark_frame)
2666{
2667 rtx insn;
2668 int hi, lo;
2669
2670 if (src == dest && value == 0)
2671 return;
2672
2673 if (value == 0)
2674 {
2675 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2676 gen_rtx_REG (SImode, src));
2677 if (mark_frame)
2678 RTX_FRAME_RELATED_P(insn) = 1;
2679 return;
2680 }
2681
2682 if (value >= -32768 && value <= 32767)
2683 {
2684 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2685 gen_rtx_REG (SImode, src),
2686 GEN_INT (value)));
2687 if (mark_frame)
2688 RTX_FRAME_RELATED_P(insn) = 1;
2689 return;
2690 }
2691
2692 /* Big constant, need to use a temp register. We use
2693 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2694 area is always small enough to directly add to). */
2695
2696 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2697 lo = value & 0xffff;
2698
2699 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2700 GEN_INT (hi));
2701
2702 if (lo)
2703 {
2704 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2705 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2706 GEN_INT (lo)));
2707 }
2708
2709 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2710 gen_rtx_REG (SImode, src),
2711 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2712 if (mark_frame)
2713 {
2714 RTX_FRAME_RELATED_P(insn) = 1;
2715 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2716 gen_rtx_SET (SImode,
2717 gen_rtx_REG (SImode, dest),
2718 gen_rtx_PLUS (SImode,
2719 gen_rtx_REG (SImode, dest),
2720 GEN_INT (value))));
2721 }
2722}
2723
7acf4da6
DD
2724/* Move SRC to DEST. Mark the move as being potentially dead if
2725 MAYBE_DEAD_P. */
2726
2727static rtx
2728maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2729{
2730 rtx insn = emit_move_insn (dest, src);
2731#if 0
2732 if (maybe_dead_p)
2733 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2734#endif
2735 return insn;
2736}
2737
2738/* Used for interrupt functions, which can't assume that $tp and $gp
2739 contain the correct pointers. */
2740
2741static void
2742mep_reload_pointer (int regno, const char *symbol)
2743{
2744 rtx reg, sym;
2745
2746 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2747 return;
2748
2749 reg = gen_rtx_REG (SImode, regno);
2750 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2751 emit_insn (gen_movsi_topsym_s (reg, sym));
2752 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2753}
2754
e756464b
DD
2755/* Assign save slots for any register not already saved. DImode
2756 registers go at the end of the reg save area; the rest go at the
2757 beginning. This is for alignment purposes. Returns true if a frame
2758 is really needed. */
2759static bool
2760mep_assign_save_slots (int reg_save_size)
7acf4da6 2761{
e756464b 2762 bool really_need_stack_frame = false;
7acf4da6 2763 int di_ofs = 0;
e756464b 2764 int i;
7acf4da6 2765
7acf4da6
DD
2766 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2767 if (mep_call_saves_register(i))
2768 {
2769 int regsize = mep_reg_size (i);
2770
2771 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2772 || mep_reg_set_in_function (i))
e756464b 2773 really_need_stack_frame = true;
7acf4da6
DD
2774
2775 if (cfun->machine->reg_save_slot[i])
2776 continue;
2777
2778 if (regsize < 8)
2779 {
2780 cfun->machine->reg_save_size += regsize;
2781 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2782 }
2783 else
2784 {
2785 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2786 di_ofs += 8;
2787 }
2788 }
e756464b
DD
2789 cfun->machine->frame_locked = 1;
2790 return really_need_stack_frame;
2791}
2792
2793void
2794mep_expand_prologue (void)
2795{
2796 int i, rss, sp_offset = 0;
2797 int reg_save_size;
2798 int frame_size;
d2e1a4c2 2799 int really_need_stack_frame;
e756464b
DD
2800
2801 /* We must not allow register renaming in interrupt functions,
2802 because that invalidates the correctness of the set of call-used
2803 registers we're going to save/restore. */
2804 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2805
2806 if (mep_disinterrupt_p ())
2807 emit_insn (gen_mep_disable_int ());
2808
2809 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2810
2811 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2812 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
d2e1a4c2 2813 really_need_stack_frame = frame_size;
e756464b
DD
2814
2815 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
7acf4da6
DD
2816
2817 sp_offset = reg_save_size;
2818 if (sp_offset + frame_size < 128)
2819 sp_offset += frame_size ;
2820
2821 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2822
2823 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2824 if (mep_call_saves_register(i))
2825 {
2826 rtx mem;
2827 bool maybe_dead_p;
2828 enum machine_mode rmode;
2829
2830 rss = cfun->machine->reg_save_slot[i];
2831
2832 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2833 && (!mep_reg_set_in_function (i)
2834 && !mep_interrupt_p ()))
2835 continue;
2836
2837 if (mep_reg_size (i) == 8)
2838 rmode = DImode;
2839 else
2840 rmode = SImode;
2841
2842 /* If there is a pseudo associated with this register's initial value,
2843 reload might have already spilt it to the stack slot suggested by
2844 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2845 deleted as dead. */
2846 mem = gen_rtx_MEM (rmode,
0a81f074
RS
2847 plus_constant (Pmode, stack_pointer_rtx,
2848 sp_offset - rss));
7acf4da6
DD
2849 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2850
2851 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2852 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2853 else if (rmode == DImode)
2854 {
2855 rtx insn;
2856 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2857
2858 mem = gen_rtx_MEM (SImode,
0a81f074
RS
2859 plus_constant (Pmode, stack_pointer_rtx,
2860 sp_offset - rss + be));
7acf4da6
DD
2861
2862 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2863 gen_rtx_REG (SImode, i),
2864 maybe_dead_p);
2865 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2866 gen_rtx_ZERO_EXTRACT (SImode,
2867 gen_rtx_REG (DImode, i),
2868 GEN_INT (32),
2869 GEN_INT (32)),
2870 maybe_dead_p);
2871 insn = maybe_dead_move (mem,
2872 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2873 maybe_dead_p);
2874 RTX_FRAME_RELATED_P (insn) = 1;
2875
2876 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2877 gen_rtx_SET (VOIDmode,
2878 copy_rtx (mem),
2879 gen_rtx_REG (rmode, i)));
2880 mem = gen_rtx_MEM (SImode,
0a81f074
RS
2881 plus_constant (Pmode, stack_pointer_rtx,
2882 sp_offset - rss + (4-be)));
7acf4da6
DD
2883 insn = maybe_dead_move (mem,
2884 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2885 maybe_dead_p);
2886 }
2887 else
2888 {
2889 rtx insn;
2890 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2891 gen_rtx_REG (rmode, i),
2892 maybe_dead_p);
2893 insn = maybe_dead_move (mem,
2894 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2895 maybe_dead_p);
2896 RTX_FRAME_RELATED_P (insn) = 1;
2897
2898 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2899 gen_rtx_SET (VOIDmode,
2900 copy_rtx (mem),
2901 gen_rtx_REG (rmode, i)));
2902 }
2903 }
2904
2905 if (frame_pointer_needed)
a46f0964
DD
2906 {
2907 /* We've already adjusted down by sp_offset. Total $sp change
2908 is reg_save_size + frame_size. We want a net change here of
2909 just reg_save_size. */
2910 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2911 }
7acf4da6
DD
2912
2913 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2914
2915 if (mep_interrupt_p ())
2916 {
2917 mep_reload_pointer(GP_REGNO, "__sdabase");
2918 mep_reload_pointer(TP_REGNO, "__tpbase");
2919 }
2920}
2921
2922static void
2923mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2924{
2925 int local = hwi_local;
2926 int frame_size = local + crtl->outgoing_args_size;
2927 int reg_save_size;
2928 int ffill;
2929 int i, sp, skip;
2930 int sp_offset;
2931 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2932
2933 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2934 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2935 sp_offset = reg_save_size + frame_size;
2936
2937 ffill = cfun->machine->frame_filler;
2938
2939 if (cfun->machine->mep_frame_pointer_needed)
2940 reg_names[FP_REGNO] = "$fp";
2941 else
2942 reg_names[FP_REGNO] = "$8";
2943
2944 if (sp_offset == 0)
2945 return;
2946
2947 if (debug_info_level == DINFO_LEVEL_NONE)
2948 {
2949 fprintf (file, "\t# frame: %d", sp_offset);
2950 if (reg_save_size)
2951 fprintf (file, " %d regs", reg_save_size);
2952 if (local)
2953 fprintf (file, " %d locals", local);
2954 if (crtl->outgoing_args_size)
2955 fprintf (file, " %d args", crtl->outgoing_args_size);
2956 fprintf (file, "\n");
2957 return;
2958 }
2959
2960 fprintf (file, "\t#\n");
2961 fprintf (file, "\t# Initial Frame Information:\n");
2962 if (sp_offset || !frame_pointer_needed)
2963 fprintf (file, "\t# Entry ---------- 0\n");
2964
2965 /* Sort registers by save slots, so they're printed in the order
2966 they appear in memory, not the order they're saved in. */
2967 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2968 slot_map[si] = si;
2969 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2970 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2971 if (cfun->machine->reg_save_slot[slot_map[si]]
2972 > cfun->machine->reg_save_slot[slot_map[sj]])
2973 {
2974 int t = slot_map[si];
2975 slot_map[si] = slot_map[sj];
2976 slot_map[sj] = t;
2977 }
2978
2979 sp = 0;
2980 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2981 {
2982 int rsize;
2983 int r = slot_map[i];
2984 int rss = cfun->machine->reg_save_slot[r];
2985
e756464b
DD
2986 if (!mep_call_saves_register (r))
2987 continue;
2988
2989 if ((r == TP_REGNO || r == GP_REGNO || r == LP_REGNO)
2990 && (!mep_reg_set_in_function (r)
2991 && !mep_interrupt_p ()))
7acf4da6
DD
2992 continue;
2993
2994 rsize = mep_reg_size(r);
2995 skip = rss - (sp+rsize);
2996 if (skip)
2997 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2998 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2999 rsize, reg_names[r], sp_offset - rss);
3000 sp = rss;
3001 }
3002
3003 skip = reg_save_size - sp;
3004 if (skip)
3005 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3006
3007 if (frame_pointer_needed)
3008 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3009 if (local)
3010 fprintf (file, "\t# %3d bytes for local vars\n", local);
3011 if (ffill)
3012 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3013 if (crtl->outgoing_args_size)
3014 fprintf (file, "\t# %3d bytes for outgoing args\n",
3015 crtl->outgoing_args_size);
3016 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3017 fprintf (file, "\t#\n");
3018}
3019
3020
3021static int mep_prevent_lp_restore = 0;
3022static int mep_sibcall_epilogue = 0;
3023
3024void
3025mep_expand_epilogue (void)
3026{
3027 int i, sp_offset = 0;
3028 int reg_save_size = 0;
3029 int frame_size;
3030 int lp_temp = LP_REGNO, lp_slot = -1;
3031 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3032 int interrupt_handler = mep_interrupt_p ();
3033
3034 if (profile_arc_flag == 2)
3035 emit_insn (gen_mep_bb_trace_ret ());
3036
3037 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3038 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3039
e756464b 3040 really_need_stack_frame |= mep_assign_save_slots (reg_save_size);
7acf4da6
DD
3041
3042 if (frame_pointer_needed)
3043 {
3044 /* If we have a frame pointer, we won't have a reliable stack
3045 pointer (alloca, you know), so rebase SP from FP */
3046 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3047 gen_rtx_REG (SImode, FP_REGNO));
3048 sp_offset = reg_save_size;
3049 }
3050 else
3051 {
3052 /* SP is right under our local variable space. Adjust it if
3053 needed. */
3054 sp_offset = reg_save_size + frame_size;
3055 if (sp_offset >= 128)
3056 {
3057 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3058 sp_offset -= frame_size;
3059 }
3060 }
3061
3062 /* This is backwards so that we restore the control and coprocessor
3063 registers before the temporary registers we use to restore
3064 them. */
3065 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3066 if (mep_call_saves_register (i))
3067 {
3068 enum machine_mode rmode;
3069 int rss = cfun->machine->reg_save_slot[i];
3070
3071 if (mep_reg_size (i) == 8)
3072 rmode = DImode;
3073 else
3074 rmode = SImode;
3075
3076 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3077 && !(mep_reg_set_in_function (i) || interrupt_handler))
3078 continue;
3079 if (mep_prevent_lp_restore && i == LP_REGNO)
3080 continue;
3081 if (!mep_prevent_lp_restore
3082 && !interrupt_handler
3083 && (i == 10 || i == 11))
3084 continue;
3085
3086 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3087 emit_move_insn (gen_rtx_REG (rmode, i),
3088 gen_rtx_MEM (rmode,
0a81f074
RS
3089 plus_constant (Pmode, stack_pointer_rtx,
3090 sp_offset - rss)));
7acf4da6
DD
3091 else
3092 {
3093 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3094 /* Defer this one so we can jump indirect rather than
3095 copying the RA to $lp and "ret". EH epilogues
3096 automatically skip this anyway. */
3097 lp_slot = sp_offset-rss;
3098 else
3099 {
3100 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3101 gen_rtx_MEM (rmode,
0a81f074
RS
3102 plus_constant (Pmode,
3103 stack_pointer_rtx,
7acf4da6
DD
3104 sp_offset-rss)));
3105 emit_move_insn (gen_rtx_REG (rmode, i),
3106 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3107 }
3108 }
3109 }
3110 if (lp_slot != -1)
3111 {
3112 /* Restore this one last so we know it will be in the temp
3113 register when we return by jumping indirectly via the temp. */
3114 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3115 gen_rtx_MEM (SImode,
0a81f074 3116 plus_constant (Pmode, stack_pointer_rtx,
7acf4da6
DD
3117 lp_slot)));
3118 lp_temp = REGSAVE_CONTROL_TEMP;
3119 }
3120
3121
3122 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3123
3124 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3125 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3126 gen_rtx_REG (SImode, SP_REGNO),
3127 cfun->machine->eh_stack_adjust));
3128
3129 if (mep_sibcall_epilogue)
3130 return;
3131
3132 if (mep_disinterrupt_p ())
3133 emit_insn (gen_mep_enable_int ());
3134
3135 if (mep_prevent_lp_restore)
3136 {
3137 emit_jump_insn (gen_eh_return_internal ());
3138 emit_barrier ();
3139 }
3140 else if (interrupt_handler)
3141 emit_jump_insn (gen_mep_reti ());
3142 else
3143 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3144}
3145
3146void
3147mep_expand_eh_return (rtx *operands)
3148{
3149 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3150 {
3151 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3152 emit_move_insn (ra, operands[0]);
3153 operands[0] = ra;
3154 }
3155
3156 emit_insn (gen_eh_epilogue (operands[0]));
3157}
3158
3159void
3160mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3161{
3162 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3163 mep_prevent_lp_restore = 1;
3164 mep_expand_epilogue ();
3165 mep_prevent_lp_restore = 0;
3166}
3167
3168void
3169mep_expand_sibcall_epilogue (void)
3170{
3171 mep_sibcall_epilogue = 1;
3172 mep_expand_epilogue ();
3173 mep_sibcall_epilogue = 0;
3174}
3175
3176static bool
3177mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3178{
3179 if (decl == NULL)
3180 return false;
3181
3182 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3183 return false;
3184
3185 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3186 if (mep_interrupt_p () || mep_disinterrupt_p ())
3187 return false;
3188
3189 return true;
3190}
3191
3192rtx
3193mep_return_stackadj_rtx (void)
3194{
3195 return gen_rtx_REG (SImode, 10);
3196}
3197
3198rtx
3199mep_return_handler_rtx (void)
3200{
3201 return gen_rtx_REG (SImode, LP_REGNO);
3202}
3203
3204void
3205mep_function_profiler (FILE *file)
3206{
3207 /* Always right at the beginning of the function. */
3208 fprintf (file, "\t# mep function profiler\n");
3209 fprintf (file, "\tadd\t$sp, -8\n");
3210 fprintf (file, "\tsw\t$0, ($sp)\n");
3211 fprintf (file, "\tldc\t$0, $lp\n");
3212 fprintf (file, "\tsw\t$0, 4($sp)\n");
3213 fprintf (file, "\tbsr\t__mep_mcount\n");
3214 fprintf (file, "\tlw\t$0, 4($sp)\n");
3215 fprintf (file, "\tstc\t$0, $lp\n");
3216 fprintf (file, "\tlw\t$0, ($sp)\n");
3217 fprintf (file, "\tadd\t$sp, 8\n\n");
3218}
3219
3220const char *
3221mep_emit_bb_trace_ret (void)
3222{
3223 fprintf (asm_out_file, "\t# end of block profiling\n");
3224 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3225 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3226 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3227 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3228 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3229 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3230 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3231 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3232 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3233 return "";
3234}
3235
3236#undef SAVE
3237#undef RESTORE
3238\f
3239/* Operand Printing. */
3240
3241void
3242mep_print_operand_address (FILE *stream, rtx address)
3243{
3244 if (GET_CODE (address) == MEM)
3245 address = XEXP (address, 0);
3246 else
3247 /* cf: gcc.dg/asm-4.c. */
3248 gcc_assert (GET_CODE (address) == REG);
3249
3250 mep_print_operand (stream, address, 0);
3251}
3252
3253static struct
3254{
3255 char code;
3256 const char *pattern;
3257 const char *format;
3258}
3259const conversions[] =
3260{
3261 { 0, "r", "0" },
3262 { 0, "m+ri", "3(2)" },
3263 { 0, "mr", "(1)" },
3264 { 0, "ms", "(1)" },
5fb455bc 3265 { 0, "ml", "(1)" },
7acf4da6
DD
3266 { 0, "mLrs", "%lo(3)(2)" },
3267 { 0, "mLr+si", "%lo(4+5)(2)" },
3268 { 0, "m+ru2s", "%tpoff(5)(2)" },
3269 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3270 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3271 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3272 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3273 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3274 { 0, "mi", "(1)" },
3275 { 0, "m+si", "(2+3)" },
3276 { 0, "m+li", "(2+3)" },
3277 { 0, "i", "0" },
3278 { 0, "s", "0" },
3279 { 0, "+si", "1+2" },
3280 { 0, "+u2si", "%tpoff(3+4)" },
3281 { 0, "+u3si", "%sdaoff(3+4)" },
3282 { 0, "l", "0" },
3283 { 'b', "i", "0" },
3284 { 'B', "i", "0" },
3285 { 'U', "i", "0" },
3286 { 'h', "i", "0" },
3287 { 'h', "Hs", "%hi(1)" },
3288 { 'I', "i", "0" },
3289 { 'I', "u2s", "%tpoff(2)" },
3290 { 'I', "u3s", "%sdaoff(2)" },
3291 { 'I', "+u2si", "%tpoff(3+4)" },
3292 { 'I', "+u3si", "%sdaoff(3+4)" },
3293 { 'J', "i", "0" },
3294 { 'P', "mr", "(1\\+),\\0" },
3295 { 'x', "i", "0" },
3296 { 0, 0, 0 }
3297};
3298
3299static int
3300unique_bit_in (HOST_WIDE_INT i)
3301{
3302 switch (i & 0xff)
3303 {
3304 case 0x01: case 0xfe: return 0;
3305 case 0x02: case 0xfd: return 1;
3306 case 0x04: case 0xfb: return 2;
3307 case 0x08: case 0xf7: return 3;
3308 case 0x10: case 0x7f: return 4;
3309 case 0x20: case 0xbf: return 5;
3310 case 0x40: case 0xdf: return 6;
3311 case 0x80: case 0xef: return 7;
3312 default:
3313 gcc_unreachable ();
3314 }
3315}
3316
3317static int
3318bit_size_for_clip (HOST_WIDE_INT i)
3319{
3320 int rv;
3321
3322 for (rv = 0; rv < 31; rv ++)
3323 if (((HOST_WIDE_INT) 1 << rv) > i)
3324 return rv + 1;
3325 gcc_unreachable ();
3326}
3327
3328/* Print an operand to a assembler instruction. */
3329
3330void
3331mep_print_operand (FILE *file, rtx x, int code)
3332{
3333 int i, j;
3334 const char *real_name;
3335
3336 if (code == '<')
3337 {
3338 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3339 we're using, then skip over the "mep_" part of its name. */
3340 const struct cgen_insn *insn;
3341
3342 if (mep_get_move_insn (mep_cmov, &insn))
3343 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3344 else
3345 mep_intrinsic_unavailable (mep_cmov);
3346 return;
3347 }
3348 if (code == 'L')
3349 {
3350 switch (GET_CODE (x))
3351 {
3352 case AND:
3353 fputs ("clr", file);
3354 return;
3355 case IOR:
3356 fputs ("set", file);
3357 return;
3358 case XOR:
3359 fputs ("not", file);
3360 return;
3361 default:
3362 output_operand_lossage ("invalid %%L code");
3363 }
3364 }
3365 if (code == 'M')
3366 {
3367 /* Print the second operand of a CR <- CR move. If we're using
3368 a two-operand instruction (i.e., a real cmov), then just print
3369 the operand normally. If we're using a "reg, reg, immediate"
3370 instruction such as caddi3, print the operand followed by a
3371 zero field. If we're using a three-register instruction,
3372 print the operand twice. */
3373 const struct cgen_insn *insn;
3374
3375 mep_print_operand (file, x, 0);
3376 if (mep_get_move_insn (mep_cmov, &insn)
3377 && insn_data[insn->icode].n_operands == 3)
3378 {
3379 fputs (", ", file);
3380 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3381 mep_print_operand (file, x, 0);
3382 else
3383 mep_print_operand (file, const0_rtx, 0);
3384 }
3385 return;
3386 }
3387
3388 encode_pattern (x);
3389 for (i = 0; conversions[i].pattern; i++)
3390 if (conversions[i].code == code
3391 && strcmp(conversions[i].pattern, pattern) == 0)
3392 {
3393 for (j = 0; conversions[i].format[j]; j++)
3394 if (conversions[i].format[j] == '\\')
3395 {
3396 fputc (conversions[i].format[j+1], file);
3397 j++;
3398 }
3399 else if (ISDIGIT(conversions[i].format[j]))
3400 {
3401 rtx r = patternr[conversions[i].format[j] - '0'];
3402 switch (GET_CODE (r))
3403 {
3404 case REG:
3405 fprintf (file, "%s", reg_names [REGNO (r)]);
3406 break;
3407 case CONST_INT:
3408 switch (code)
3409 {
3410 case 'b':
3411 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3412 break;
3413 case 'B':
3414 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3415 break;
3416 case 'h':
3417 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3418 break;
3419 case 'U':
3420 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3421 break;
3422 case 'J':
3423 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3424 break;
3425 case 'x':
3426 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3427 && !(INTVAL (r) & 0xff))
3428 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3429 else
3430 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3431 break;
3432 case 'I':
3433 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3434 && conversions[i].format[j+1] == 0)
3435 {
3436 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3437 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3438 }
3439 else
3440 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3441 break;
3442 default:
3443 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3444 break;
3445 }
3446 break;
3447 case CONST_DOUBLE:
3448 fprintf(file, "[const_double 0x%lx]",
3449 (unsigned long) CONST_DOUBLE_HIGH(r));
3450 break;
3451 case SYMBOL_REF:
9018b19c 3452 real_name = targetm.strip_name_encoding (XSTR (r, 0));
7acf4da6
DD
3453 assemble_name (file, real_name);
3454 break;
3455 case LABEL_REF:
3456 output_asm_label (r);
3457 break;
3458 default:
3459 fprintf (stderr, "don't know how to print this operand:");
3460 debug_rtx (r);
3461 gcc_unreachable ();
3462 }
3463 }
3464 else
3465 {
3466 if (conversions[i].format[j] == '+'
3467 && (!code || code == 'I')
3468 && ISDIGIT (conversions[i].format[j+1])
3469 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3470 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3471 continue;
3472 fputc(conversions[i].format[j], file);
3473 }
3474 break;
3475 }
3476 if (!conversions[i].pattern)
3477 {
3478 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3479 debug_rtx(x);
3480 }
3481
3482 return;
3483}
3484
3485void
3486mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3487 int noperands ATTRIBUTE_UNUSED)
3488{
3489 /* Despite the fact that MeP is perfectly capable of branching and
3490 doing something else in the same bundle, gcc does jump
3491 optimization *after* scheduling, so we cannot trust the bundling
3492 flags on jump instructions. */
3493 if (GET_MODE (insn) == BImode
3494 && get_attr_slots (insn) != SLOTS_CORE)
3495 fputc ('+', asm_out_file);
3496}
3497
3498/* Function args in registers. */
3499
3500static void
d5cc9181 3501mep_setup_incoming_varargs (cumulative_args_t cum,
7acf4da6
DD
3502 enum machine_mode mode ATTRIBUTE_UNUSED,
3503 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3504 int second_time ATTRIBUTE_UNUSED)
3505{
d5cc9181 3506 int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
7acf4da6
DD
3507
3508 if (nsave > 0)
3509 cfun->machine->arg_regs_to_save = nsave;
3510 *pretend_size = nsave * 4;
3511}
3512
3513static int
3514bytesize (const_tree type, enum machine_mode mode)
3515{
3516 if (mode == BLKmode)
3517 return int_size_in_bytes (type);
3518 return GET_MODE_SIZE (mode);
3519}
3520
3521static rtx
3522mep_expand_builtin_saveregs (void)
3523{
3524 int bufsize, i, ns;
3525 rtx regbuf;
3526
3527 ns = cfun->machine->arg_regs_to_save;
683a1be6
DD
3528 if (TARGET_IVC2)
3529 {
3530 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3531 regbuf = assign_stack_local (SImode, bufsize, 64);
3532 }
3533 else
3534 {
3535 bufsize = ns * 4;
3536 regbuf = assign_stack_local (SImode, bufsize, 32);
3537 }
7acf4da6
DD
3538
3539 move_block_from_reg (5-ns, regbuf, ns);
3540
3541 if (TARGET_IVC2)
3542 {
3543 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
683a1be6 3544 int ofs = 8 * ((ns+1)/2);
7acf4da6
DD
3545
3546 for (i=0; i<ns; i++)
3547 {
3548 int rn = (4-ns) + i + 49;
3549 rtx ptr;
3550
3551 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3552 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3553 ofs += 8;
3554 }
3555 }
3556 return XEXP (regbuf, 0);
3557}
3558
3559#define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3560
3561static tree
3562mep_build_builtin_va_list (void)
3563{
3564 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3565 tree record;
3566
3567
3568 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3569
3570 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3571 get_identifier ("__va_next_gp"), ptr_type_node);
3572 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3573 get_identifier ("__va_next_gp_limit"),
3574 ptr_type_node);
3575 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3576 ptr_type_node);
3577 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3578 ptr_type_node);
3579
3580 DECL_FIELD_CONTEXT (f_next_gp) = record;
3581 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3582 DECL_FIELD_CONTEXT (f_next_cop) = record;
3583 DECL_FIELD_CONTEXT (f_next_stack) = record;
3584
3585 TYPE_FIELDS (record) = f_next_gp;
910ad8de
NF
3586 DECL_CHAIN (f_next_gp) = f_next_gp_limit;
3587 DECL_CHAIN (f_next_gp_limit) = f_next_cop;
3588 DECL_CHAIN (f_next_cop) = f_next_stack;
7acf4da6
DD
3589
3590 layout_type (record);
3591
3592 return record;
3593}
3594
3595static void
3596mep_expand_va_start (tree valist, rtx nextarg)
3597{
3598 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3599 tree next_gp, next_gp_limit, next_cop, next_stack;
3600 tree t, u;
3601 int ns;
3602
3603 ns = cfun->machine->arg_regs_to_save;
3604
3605 f_next_gp = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
3606 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3607 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3608 f_next_stack = DECL_CHAIN (f_next_cop);
7acf4da6
DD
3609
3610 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3611 NULL_TREE);
3612 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3613 valist, f_next_gp_limit, NULL_TREE);
3614 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3615 NULL_TREE);
3616 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3617 valist, f_next_stack, NULL_TREE);
3618
3619 /* va_list.next_gp = expand_builtin_saveregs (); */
3620 u = make_tree (sizetype, expand_builtin_saveregs ());
3621 u = fold_convert (ptr_type_node, u);
3622 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3623 TREE_SIDE_EFFECTS (t) = 1;
3624 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3625
3626 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
5d49b6a7 3627 u = fold_build_pointer_plus_hwi (u, 4 * ns);
7acf4da6
DD
3628 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3629 TREE_SIDE_EFFECTS (t) = 1;
3630 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3631
5d49b6a7 3632 u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
683a1be6 3633 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
7acf4da6
DD
3634 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3635 TREE_SIDE_EFFECTS (t) = 1;
3636 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3637
3638 /* va_list.next_stack = nextarg; */
3639 u = make_tree (ptr_type_node, nextarg);
3640 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3641 TREE_SIDE_EFFECTS (t) = 1;
3642 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3643}
3644
3645static tree
3646mep_gimplify_va_arg_expr (tree valist, tree type,
12a54f54
NC
3647 gimple_seq *pre_p,
3648 gimple_seq *post_p ATTRIBUTE_UNUSED)
7acf4da6
DD
3649{
3650 HOST_WIDE_INT size, rsize;
3651 bool by_reference, ivc2_vec;
3652 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3653 tree next_gp, next_gp_limit, next_cop, next_stack;
3654 tree label_sover, label_selse;
3655 tree tmp, res_addr;
3656
3657 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3658
3659 size = int_size_in_bytes (type);
3660 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3661
3662 if (by_reference)
3663 {
3664 type = build_pointer_type (type);
3665 size = 4;
3666 }
3667 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3668
3669 f_next_gp = TYPE_FIELDS (va_list_type_node);
910ad8de
NF
3670 f_next_gp_limit = DECL_CHAIN (f_next_gp);
3671 f_next_cop = DECL_CHAIN (f_next_gp_limit);
3672 f_next_stack = DECL_CHAIN (f_next_cop);
7acf4da6
DD
3673
3674 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3675 NULL_TREE);
3676 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3677 valist, f_next_gp_limit, NULL_TREE);
3678 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3679 NULL_TREE);
3680 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3681 valist, f_next_stack, NULL_TREE);
3682
3683 /* if f_next_gp < f_next_gp_limit
3684 IF (VECTOR_P && IVC2)
3685 val = *f_next_cop;
3686 ELSE
3687 val = *f_next_gp;
3688 f_next_gp += 4;
3689 f_next_cop += 8;
3690 else
3691 label_selse:
3692 val = *f_next_stack;
3693 f_next_stack += rsize;
3694 label_sover:
3695 */
3696
3697 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3698 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3699 res_addr = create_tmp_var (ptr_type_node, NULL);
3700
3701 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3702 unshare_expr (next_gp_limit));
3703 tmp = build3 (COND_EXPR, void_type_node, tmp,
3704 build1 (GOTO_EXPR, void_type_node,
3705 unshare_expr (label_selse)),
3706 NULL_TREE);
3707 gimplify_and_add (tmp, pre_p);
3708
3709 if (ivc2_vec)
3710 {
3711 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3712 gimplify_and_add (tmp, pre_p);
3713 }
3714 else
3715 {
3716 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3717 gimplify_and_add (tmp, pre_p);
3718 }
3719
5d49b6a7 3720 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
7acf4da6
DD
3721 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3722
5d49b6a7 3723 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
7acf4da6
DD
3724 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3725
3726 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3727 gimplify_and_add (tmp, pre_p);
3728
3729 /* - - */
3730
3731 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3732 gimplify_and_add (tmp, pre_p);
3733
3734 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3735 gimplify_and_add (tmp, pre_p);
3736
5d49b6a7 3737 tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
7acf4da6
DD
3738 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3739
3740 /* - - */
3741
3742 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3743 gimplify_and_add (tmp, pre_p);
3744
3745 res_addr = fold_convert (build_pointer_type (type), res_addr);
3746
3747 if (by_reference)
3748 res_addr = build_va_arg_indirect_ref (res_addr);
3749
3750 return build_va_arg_indirect_ref (res_addr);
3751}
3752
3753void
3754mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3755 rtx libname ATTRIBUTE_UNUSED,
3756 tree fndecl ATTRIBUTE_UNUSED)
3757{
3758 pcum->nregs = 0;
3759
3760 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3761 pcum->vliw = 1;
3762 else
3763 pcum->vliw = 0;
3764}
3765
0851c6e3
NF
3766/* The ABI is thus: Arguments are in $1, $2, $3, $4, stack. Arguments
3767 larger than 4 bytes are passed indirectly. Return value in 0,
3768 unless bigger than 4 bytes, then the caller passes a pointer as the
3769 first arg. For varargs, we copy $1..$4 to the stack. */
3770
3771static rtx
d5cc9181 3772mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
0851c6e3
NF
3773 const_tree type ATTRIBUTE_UNUSED,
3774 bool named ATTRIBUTE_UNUSED)
7acf4da6 3775{
d5cc9181
JR
3776 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3777
7acf4da6
DD
3778 /* VOIDmode is a signal for the backend to pass data to the call
3779 expander via the second operand to the call pattern. We use
3780 this to determine whether to use "jsr" or "jsrv". */
3781 if (mode == VOIDmode)
0851c6e3 3782 return GEN_INT (cum->vliw);
7acf4da6
DD
3783
3784 /* If we havn't run out of argument registers, return the next. */
0851c6e3 3785 if (cum->nregs < 4)
7acf4da6
DD
3786 {
3787 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
0851c6e3 3788 return gen_rtx_REG (mode, cum->nregs + 49);
7acf4da6 3789 else
0851c6e3 3790 return gen_rtx_REG (mode, cum->nregs + 1);
7acf4da6
DD
3791 }
3792
3793 /* Otherwise the argument goes on the stack. */
3794 return NULL_RTX;
3795}
3796
3797static bool
d5cc9181 3798mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
7acf4da6
DD
3799 enum machine_mode mode,
3800 const_tree type,
3801 bool named ATTRIBUTE_UNUSED)
3802{
3803 int size = bytesize (type, mode);
e756464b
DD
3804
3805 /* This is non-obvious, but yes, large values passed after we've run
3806 out of registers are *still* passed by reference - we put the
3807 address of the parameter on the stack, as well as putting the
3808 parameter itself elsewhere on the stack. */
3809
3810 if (size <= 0 || size > 8)
3811 return true;
3812 if (size <= 4)
3813 return false;
d5cc9181
JR
3814 if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
3815 && type != NULL_TREE && VECTOR_TYPE_P (type))
e756464b
DD
3816 return false;
3817 return true;
7acf4da6
DD
3818}
3819
0851c6e3 3820static void
d5cc9181 3821mep_function_arg_advance (cumulative_args_t pcum,
0851c6e3
NF
3822 enum machine_mode mode ATTRIBUTE_UNUSED,
3823 const_tree type ATTRIBUTE_UNUSED,
3824 bool named ATTRIBUTE_UNUSED)
7acf4da6 3825{
d5cc9181 3826 get_cumulative_args (pcum)->nregs += 1;
7acf4da6
DD
3827}
3828
3829bool
3830mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3831{
3832 int size = bytesize (type, BLKmode);
3833 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
e756464b
DD
3834 return size > 0 && size <= 8 ? 0 : 1;
3835 return size > 0 && size <= 4 ? 0 : 1;
7acf4da6
DD
3836}
3837
3838static bool
3839mep_narrow_volatile_bitfield (void)
3840{
3841 return true;
3842 return false;
3843}
3844
3845/* Implement FUNCTION_VALUE. All values are returned in $0. */
3846
3847rtx
77b0efff 3848mep_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
7acf4da6
DD
3849{
3850 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3851 return gen_rtx_REG (TYPE_MODE (type), 48);
3852 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3853}
3854
3855/* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3856
3857rtx
3858mep_libcall_value (enum machine_mode mode)
3859{
3860 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3861}
3862
3863/* Handle pipeline hazards. */
3864
3865typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3866static const char *opnames[] = { "", "stc", "fsft", "ret" };
3867
3868static int prev_opcode = 0;
3869
3870/* This isn't as optimal as it could be, because we don't know what
3871 control register the STC opcode is storing in. We only need to add
073a8998 3872 the nop if it's the relevant register, but we add it for irrelevant
7acf4da6
DD
3873 registers also. */
3874
3875void
3876mep_asm_output_opcode (FILE *file, const char *ptr)
3877{
3878 int this_opcode = op_none;
3879 const char *hazard = 0;
3880
3881 switch (*ptr)
3882 {
3883 case 'f':
3884 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3885 this_opcode = op_fsft;
3886 break;
3887 case 'r':
3888 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3889 this_opcode = op_ret;
3890 break;
3891 case 's':
3892 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3893 this_opcode = op_stc;
3894 break;
3895 }
3896
3897 if (prev_opcode == op_stc && this_opcode == op_fsft)
3898 hazard = "nop";
3899 if (prev_opcode == op_stc && this_opcode == op_ret)
3900 hazard = "nop";
3901
3902 if (hazard)
3903 fprintf(file, "%s\t# %s-%s hazard\n\t",
3904 hazard, opnames[prev_opcode], opnames[this_opcode]);
3905
3906 prev_opcode = this_opcode;
3907}
3908
3909/* Handle attributes. */
3910
3911static tree
3912mep_validate_based_tiny (tree *node, tree name, tree args,
3913 int flags ATTRIBUTE_UNUSED, bool *no_add)
3914{
3915 if (TREE_CODE (*node) != VAR_DECL
3916 && TREE_CODE (*node) != POINTER_TYPE
3917 && TREE_CODE (*node) != TYPE_DECL)
3918 {
3919 warning (0, "%qE attribute only applies to variables", name);
3920 *no_add = true;
3921 }
3922 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3923 {
3924 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3925 {
3926 warning (0, "address region attributes not allowed with auto storage class");
3927 *no_add = true;
3928 }
3929 /* Ignore storage attribute of pointed to variable: char __far * x; */
3930 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3931 {
3932 warning (0, "address region attributes on pointed-to types ignored");
3933 *no_add = true;
3934 }
3935 }
3936
3937 return NULL_TREE;
3938}
3939
3940static int
3941mep_multiple_address_regions (tree list, bool check_section_attr)
3942{
3943 tree a;
3944 int count_sections = 0;
3945 int section_attr_count = 0;
3946
3947 for (a = list; a; a = TREE_CHAIN (a))
3948 {
3949 if (is_attribute_p ("based", TREE_PURPOSE (a))
3950 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3951 || is_attribute_p ("near", TREE_PURPOSE (a))
3952 || is_attribute_p ("far", TREE_PURPOSE (a))
3953 || is_attribute_p ("io", TREE_PURPOSE (a)))
3954 count_sections ++;
3955 if (check_section_attr)
3956 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3957 }
3958
3959 if (check_section_attr)
3960 return section_attr_count;
3961 else
3962 return count_sections;
3963}
3964
3965#define MEP_ATTRIBUTES(decl) \
3966 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3967 : DECL_ATTRIBUTES (decl) \
3968 ? (DECL_ATTRIBUTES (decl)) \
3969 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3970
3971static tree
3972mep_validate_near_far (tree *node, tree name, tree args,
3973 int flags ATTRIBUTE_UNUSED, bool *no_add)
3974{
3975 if (TREE_CODE (*node) != VAR_DECL
3976 && TREE_CODE (*node) != FUNCTION_DECL
3977 && TREE_CODE (*node) != METHOD_TYPE
3978 && TREE_CODE (*node) != POINTER_TYPE
3979 && TREE_CODE (*node) != TYPE_DECL)
3980 {
3981 warning (0, "%qE attribute only applies to variables and functions",
3982 name);
3983 *no_add = true;
3984 }
3985 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3986 {
3987 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3988 {
3989 warning (0, "address region attributes not allowed with auto storage class");
3990 *no_add = true;
3991 }
3992 /* Ignore storage attribute of pointed to variable: char __far * x; */
3993 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3994 {
3995 warning (0, "address region attributes on pointed-to types ignored");
3996 *no_add = true;
3997 }
3998 }
3999 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
4000 {
4001 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4002 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
4003 DECL_ATTRIBUTES (*node) = NULL_TREE;
4004 }
4005 return NULL_TREE;
4006}
4007
4008static tree
4009mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4010 int flags ATTRIBUTE_UNUSED, bool *no_add)
4011{
4012 if (TREE_CODE (*node) != FUNCTION_DECL
4013 && TREE_CODE (*node) != METHOD_TYPE)
4014 {
4015 warning (0, "%qE attribute only applies to functions", name);
4016 *no_add = true;
4017 }
4018 return NULL_TREE;
4019}
4020
4021static tree
4022mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4023 int flags ATTRIBUTE_UNUSED, bool *no_add)
4024{
4025 tree function_type;
4026
4027 if (TREE_CODE (*node) != FUNCTION_DECL)
4028 {
4029 warning (0, "%qE attribute only applies to functions", name);
4030 *no_add = true;
4031 return NULL_TREE;
4032 }
4033
4034 if (DECL_DECLARED_INLINE_P (*node))
4035 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4036 DECL_UNINLINABLE (*node) = 1;
4037
4038 function_type = TREE_TYPE (*node);
4039
4040 if (TREE_TYPE (function_type) != void_type_node)
4041 error ("interrupt function must have return type of void");
4042
f4da8dce 4043 if (prototype_p (function_type)
7acf4da6
DD
4044 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4045 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4046 error ("interrupt function must have no arguments");
4047
4048 return NULL_TREE;
4049}
4050
4051static tree
4052mep_validate_io_cb (tree *node, tree name, tree args,
4053 int flags ATTRIBUTE_UNUSED, bool *no_add)
4054{
4055 if (TREE_CODE (*node) != VAR_DECL)
4056 {
4057 warning (0, "%qE attribute only applies to variables", name);
4058 *no_add = true;
4059 }
4060
4061 if (args != NULL_TREE)
4062 {
4063 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4064 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4065 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4066 {
4067 warning (0, "%qE attribute allows only an integer constant argument",
4068 name);
4069 *no_add = true;
4070 }
4071 }
4072
4073 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4074 TREE_THIS_VOLATILE (*node) = 1;
4075
4076 return NULL_TREE;
4077}
4078
4079static tree
4080mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4081 int flags ATTRIBUTE_UNUSED, bool *no_add)
4082{
4083 if (TREE_CODE (*node) != FUNCTION_TYPE
4084 && TREE_CODE (*node) != FUNCTION_DECL
4085 && TREE_CODE (*node) != METHOD_TYPE
4086 && TREE_CODE (*node) != FIELD_DECL
4087 && TREE_CODE (*node) != TYPE_DECL)
4088 {
4089 static int gave_pointer_note = 0;
4090 static int gave_array_note = 0;
4091 static const char * given_type = NULL;
4092
4093 given_type = tree_code_name[TREE_CODE (*node)];
4094 if (TREE_CODE (*node) == POINTER_TYPE)
4095 given_type = "pointers";
4096 if (TREE_CODE (*node) == ARRAY_TYPE)
4097 given_type = "arrays";
4098
4099 if (given_type)
4100 warning (0, "%qE attribute only applies to functions, not %s",
4101 name, given_type);
4102 else
4103 warning (0, "%qE attribute only applies to functions",
4104 name);
4105 *no_add = true;
4106
4107 if (TREE_CODE (*node) == POINTER_TYPE
4108 && !gave_pointer_note)
4109 {
6d9e7c41
PT
4110 inform (input_location,
4111 "to describe a pointer to a VLIW function, use syntax like this:\n%s",
4112 " typedef int (__vliw *vfuncptr) ();");
7acf4da6
DD
4113 gave_pointer_note = 1;
4114 }
4115
4116 if (TREE_CODE (*node) == ARRAY_TYPE
4117 && !gave_array_note)
4118 {
6d9e7c41
PT
4119 inform (input_location,
4120 "to describe an array of VLIW function pointers, use syntax like this:\n%s",
4121 " typedef int (__vliw *vfuncptr[]) ();");
7acf4da6
DD
4122 gave_array_note = 1;
4123 }
4124 }
4125 if (!TARGET_VLIW)
4126 error ("VLIW functions are not allowed without a VLIW configuration");
4127 return NULL_TREE;
4128}
4129
4130static const struct attribute_spec mep_attribute_table[11] =
4131{
62d784f7
KT
4132 /* name min max decl type func handler
4133 affects_type_identity */
4134 { "based", 0, 0, false, false, false, mep_validate_based_tiny, false },
4135 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny, false },
4136 { "near", 0, 0, false, false, false, mep_validate_near_far, false },
4137 { "far", 0, 0, false, false, false, mep_validate_near_far, false },
4138 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt,
4139 false },
4140 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt, false },
4141 { "io", 0, 1, false, false, false, mep_validate_io_cb, false },
4142 { "cb", 0, 1, false, false, false, mep_validate_io_cb, false },
4143 { "vliw", 0, 0, false, true, false, mep_validate_vliw, false },
4144 { NULL, 0, 0, false, false, false, NULL, false }
7acf4da6
DD
4145};
4146
4147static bool
4148mep_function_attribute_inlinable_p (const_tree callee)
4149{
4150 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4151 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4152 return (lookup_attribute ("disinterrupt", attrs) == 0
4153 && lookup_attribute ("interrupt", attrs) == 0);
4154}
4155
ae30c1fa 4156static bool
5cec9f59 4157mep_can_inline_p (tree caller, tree callee)
ae30c1fa
DD
4158{
4159 if (TREE_CODE (callee) == ADDR_EXPR)
4160 callee = TREE_OPERAND (callee, 0);
4161
82e45095 4162 if (!mep_vliw_function_p (caller)
ae30c1fa
DD
4163 && mep_vliw_function_p (callee))
4164 {
82e45095 4165 return false;
ae30c1fa 4166 }
82e45095 4167 return true;
ae30c1fa
DD
4168}
4169
7acf4da6
DD
4170#define FUNC_CALL 1
4171#define FUNC_DISINTERRUPT 2
4172
4173
4174struct GTY(()) pragma_entry {
4175 int used;
4176 int flag;
4177 const char *funcname;
4178};
4179typedef struct pragma_entry pragma_entry;
4180
4181/* Hash table of farcall-tagged sections. */
4182static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4183
4184static int
4185pragma_entry_eq (const void *p1, const void *p2)
4186{
4187 const pragma_entry *old = (const pragma_entry *) p1;
4188 const char *new_name = (const char *) p2;
4189
4190 return strcmp (old->funcname, new_name) == 0;
4191}
4192
4193static hashval_t
4194pragma_entry_hash (const void *p)
4195{
4196 const pragma_entry *old = (const pragma_entry *) p;
4197 return htab_hash_string (old->funcname);
4198}
4199
4200static void
4201mep_note_pragma_flag (const char *funcname, int flag)
4202{
4203 pragma_entry **slot;
4204
4205 if (!pragma_htab)
4206 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4207 pragma_entry_eq, NULL);
4208
4209 slot = (pragma_entry **)
4210 htab_find_slot_with_hash (pragma_htab, funcname,
4211 htab_hash_string (funcname), INSERT);
4212
4213 if (!*slot)
4214 {
a9429e29 4215 *slot = ggc_alloc_pragma_entry ();
7acf4da6
DD
4216 (*slot)->flag = 0;
4217 (*slot)->used = 0;
4218 (*slot)->funcname = ggc_strdup (funcname);
4219 }
4220 (*slot)->flag |= flag;
4221}
4222
4223static bool
4224mep_lookup_pragma_flag (const char *funcname, int flag)
4225{
4226 pragma_entry **slot;
4227
4228 if (!pragma_htab)
4229 return false;
4230
4231 if (funcname[0] == '@' && funcname[2] == '.')
4232 funcname += 3;
4233
4234 slot = (pragma_entry **)
4235 htab_find_slot_with_hash (pragma_htab, funcname,
4236 htab_hash_string (funcname), NO_INSERT);
4237 if (slot && *slot && ((*slot)->flag & flag))
4238 {
4239 (*slot)->used |= flag;
4240 return true;
4241 }
4242 return false;
4243}
4244
4245bool
4246mep_lookup_pragma_call (const char *funcname)
4247{
4248 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4249}
4250
4251void
4252mep_note_pragma_call (const char *funcname)
4253{
4254 mep_note_pragma_flag (funcname, FUNC_CALL);
4255}
4256
4257bool
4258mep_lookup_pragma_disinterrupt (const char *funcname)
4259{
4260 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4261}
4262
4263void
4264mep_note_pragma_disinterrupt (const char *funcname)
4265{
4266 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4267}
4268
4269static int
4270note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4271{
4272 const pragma_entry *d = (const pragma_entry *)(*slot);
4273
4274 if ((d->flag & FUNC_DISINTERRUPT)
4275 && !(d->used & FUNC_DISINTERRUPT))
4276 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4277 return 1;
4278}
4279
4280void
4281mep_file_cleanups (void)
4282{
4283 if (pragma_htab)
4284 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4285}
c28883e6
DD
4286
4287/* These three functions provide a bridge between the pramgas that
4288 affect register classes, and the functions that maintain them. We
4289 can't call those functions directly as pragma handling is part of
4290 the front end and doesn't have direct access to them. */
4291
4292void
4293mep_save_register_info (void)
4294{
4295 save_register_info ();
4296}
4297
4298void
4299mep_reinit_regs (void)
4300{
4301 reinit_regs ();
4302}
4303
4304void
4305mep_init_regs (void)
4306{
4307 init_regs ();
4308}
4309
7acf4da6
DD
4310
4311
4312static int
4313mep_attrlist_to_encoding (tree list, tree decl)
4314{
4315 if (mep_multiple_address_regions (list, false) > 1)
4316 {
4317 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4318 TREE_PURPOSE (TREE_CHAIN (list)),
4319 DECL_NAME (decl),
4320 DECL_SOURCE_LINE (decl));
4321 TREE_CHAIN (list) = NULL_TREE;
4322 }
4323
4324 while (list)
4325 {
4326 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4327 return 'b';
4328 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4329 return 't';
4330 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4331 return 'n';
4332 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4333 return 'f';
4334 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4335 {
4336 if (TREE_VALUE (list)
4337 && TREE_VALUE (TREE_VALUE (list))
4338 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4339 {
4340 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4341 if (location >= 0
4342 && location <= 0x1000000)
4343 return 'i';
4344 }
4345 return 'I';
4346 }
4347 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4348 return 'c';
4349 list = TREE_CHAIN (list);
4350 }
4351 if (TARGET_TF
4352 && TREE_CODE (decl) == FUNCTION_DECL
4353 && DECL_SECTION_NAME (decl) == 0)
4354 return 'f';
4355 return 0;
4356}
4357
4358static int
4359mep_comp_type_attributes (const_tree t1, const_tree t2)
4360{
4361 int vliw1, vliw2;
4362
4363 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4364 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4365
4366 if (vliw1 != vliw2)
4367 return 0;
4368
4369 return 1;
4370}
4371
4372static void
4373mep_insert_attributes (tree decl, tree *attributes)
4374{
4375 int size;
4376 const char *secname = 0;
4377 tree attrib, attrlist;
4378 char encoding;
4379
4380 if (TREE_CODE (decl) == FUNCTION_DECL)
4381 {
4382 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4383
4384 if (mep_lookup_pragma_disinterrupt (funcname))
4385 {
4386 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4387 *attributes = chainon (*attributes, attrib);
4388 }
4389 }
4390
4391 if (TREE_CODE (decl) != VAR_DECL
4392 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4393 return;
4394
4395 if (TREE_READONLY (decl) && TARGET_DC)
4396 /* -mdc means that const variables default to the near section,
4397 regardless of the size cutoff. */
4398 return;
4399
4400 /* User specified an attribute, so override the default.
4401 Ignore storage attribute of pointed to variable. char __far * x; */
4402 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4403 {
4404 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4405 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4406 else if (DECL_ATTRIBUTES (decl) && *attributes)
4407 DECL_ATTRIBUTES (decl) = NULL_TREE;
4408 }
4409
4410 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4411 encoding = mep_attrlist_to_encoding (attrlist, decl);
4412 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4413 {
4414 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4415 encoding = mep_attrlist_to_encoding (attrlist, decl);
4416 }
4417 if (encoding)
4418 {
4419 /* This means that the declaration has a specific section
4420 attribute, so we should not apply the default rules. */
4421
4422 if (encoding == 'i' || encoding == 'I')
4423 {
4424 tree attr = lookup_attribute ("io", attrlist);
4425 if (attr
4426 && TREE_VALUE (attr)
4427 && TREE_VALUE (TREE_VALUE(attr)))
4428 {
4429 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4430 static tree previous_value = 0;
4431 static int previous_location = 0;
4432 static tree previous_name = 0;
4433
4434 /* We take advantage of the fact that gcc will reuse the
4435 same tree pointer when applying an attribute to a
4436 list of decls, but produce a new tree for attributes
4437 on separate source lines, even when they're textually
4438 identical. This is the behavior we want. */
4439 if (TREE_VALUE (attr) == previous_value
4440 && location == previous_location)
4441 {
4442 warning(0, "__io address 0x%x is the same for %qE and %qE",
4443 location, previous_name, DECL_NAME (decl));
4444 }
4445 previous_name = DECL_NAME (decl);
4446 previous_location = location;
4447 previous_value = TREE_VALUE (attr);
4448 }
4449 }
4450 return;
4451 }
4452
4453
4454 /* Declarations of arrays can change size. Don't trust them. */
4455 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4456 size = 0;
4457 else
4458 size = int_size_in_bytes (TREE_TYPE (decl));
4459
4460 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4461 {
4462 if (TREE_PUBLIC (decl)
4463 || DECL_EXTERNAL (decl)
4464 || TREE_STATIC (decl))
4465 {
4466 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4467 int key = 0;
4468
4469 while (*name)
4470 key += *name++;
4471
4472 switch (key & 3)
4473 {
4474 case 0:
4475 secname = "based";
4476 break;
4477 case 1:
4478 secname = "tiny";
4479 break;
4480 case 2:
4481 secname = "far";
4482 break;
4483 default:
4484 ;
4485 }
4486 }
4487 }
4488 else
4489 {
4490 if (size <= mep_based_cutoff && size > 0)
4491 secname = "based";
4492 else if (size <= mep_tiny_cutoff && size > 0)
4493 secname = "tiny";
4494 else if (TARGET_L)
4495 secname = "far";
4496 }
4497
4498 if (mep_const_section && TREE_READONLY (decl))
4499 {
4500 if (strcmp (mep_const_section, "tiny") == 0)
4501 secname = "tiny";
4502 else if (strcmp (mep_const_section, "near") == 0)
4503 return;
4504 else if (strcmp (mep_const_section, "far") == 0)
4505 secname = "far";
4506 }
4507
4508 if (!secname)
4509 return;
4510
4511 if (!mep_multiple_address_regions (*attributes, true)
4512 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4513 {
4514 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4515
4516 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4517 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4518 and mep_validate_based_tiny. */
4519 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4520 }
4521}
4522
4523static void
4524mep_encode_section_info (tree decl, rtx rtl, int first)
4525{
4526 rtx rtlname;
4527 const char *oldname;
4528 const char *secname;
4529 char encoding;
4530 char *newname;
4531 tree idp;
4532 int maxsize;
4533 tree type;
4534 tree mep_attributes;
4535
4536 if (! first)
4537 return;
4538
4539 if (TREE_CODE (decl) != VAR_DECL
4540 && TREE_CODE (decl) != FUNCTION_DECL)
4541 return;
4542
4543 rtlname = XEXP (rtl, 0);
4544 if (GET_CODE (rtlname) == SYMBOL_REF)
4545 oldname = XSTR (rtlname, 0);
4546 else if (GET_CODE (rtlname) == MEM
4547 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4548 oldname = XSTR (XEXP (rtlname, 0), 0);
4549 else
4550 gcc_unreachable ();
4551
4552 type = TREE_TYPE (decl);
4553 if (type == error_mark_node)
4554 return;
4555 mep_attributes = MEP_ATTRIBUTES (decl);
4556
4557 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4558
4559 if (encoding)
4560 {
4561 newname = (char *) alloca (strlen (oldname) + 4);
4562 sprintf (newname, "@%c.%s", encoding, oldname);
4563 idp = get_identifier (newname);
4564 XEXP (rtl, 0) =
4565 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
1c6679e2
NC
4566 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4567 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
7acf4da6
DD
4568
4569 switch (encoding)
4570 {
4571 case 'b':
4572 maxsize = 128;
4573 secname = "based";
4574 break;
4575 case 't':
4576 maxsize = 65536;
4577 secname = "tiny";
4578 break;
4579 case 'n':
4580 maxsize = 0x1000000;
4581 secname = "near";
4582 break;
4583 default:
4584 maxsize = 0;
4585 secname = 0;
4586 break;
4587 }
4588 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4589 {
4590 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4591 oldname,
4592 (long) int_size_in_bytes (TREE_TYPE (decl)),
4593 secname,
4594 maxsize);
4595 }
4596 }
7acf4da6
DD
4597}
4598
4599const char *
4600mep_strip_name_encoding (const char *sym)
4601{
4602 while (1)
4603 {
4604 if (*sym == '*')
4605 sym++;
4606 else if (*sym == '@' && sym[2] == '.')
4607 sym += 3;
4608 else
4609 return sym;
4610 }
4611}
4612
4613static section *
4614mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4615 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4616{
4617 int readonly = 1;
820ca276 4618 int encoding;
7acf4da6
DD
4619
4620 switch (TREE_CODE (decl))
4621 {
4622 case VAR_DECL:
4623 if (!TREE_READONLY (decl)
4624 || TREE_SIDE_EFFECTS (decl)
4625 || !DECL_INITIAL (decl)
4626 || (DECL_INITIAL (decl) != error_mark_node
4627 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4628 readonly = 0;
4629 break;
4630 case CONSTRUCTOR:
4631 if (! TREE_CONSTANT (decl))
4632 readonly = 0;
4633 break;
4634
4635 default:
4636 break;
4637 }
4638
820ca276
DD
4639 if (TREE_CODE (decl) == FUNCTION_DECL)
4640 {
4641 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4642
4643 if (name[0] == '@' && name[2] == '.')
4644 encoding = name[1];
4645 else
4646 encoding = 0;
4647
4648 if (flag_function_sections || DECL_ONE_ONLY (decl))
4649 mep_unique_section (decl, 0);
4650 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4651 {
4652 if (encoding == 'f')
4653 return vftext_section;
4654 else
4655 return vtext_section;
4656 }
4657 else if (encoding == 'f')
4658 return ftext_section;
4659 else
4660 return text_section;
4661 }
4662
7acf4da6
DD
4663 if (TREE_CODE (decl) == VAR_DECL)
4664 {
4665 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4666
4667 if (name[0] == '@' && name[2] == '.')
4668 switch (name[1])
4669 {
4670 case 'b':
4671 return based_section;
4672
4673 case 't':
4674 if (readonly)
4675 return srodata_section;
4676 if (DECL_INITIAL (decl))
4677 return sdata_section;
4678 return tinybss_section;
4679
4680 case 'f':
4681 if (readonly)
4682 return frodata_section;
4683 return far_section;
4684
4685 case 'i':
4686 case 'I':
dcb91ebe
MLI
4687 error_at (DECL_SOURCE_LOCATION (decl),
4688 "variable %D of type %<io%> must be uninitialized", decl);
7acf4da6
DD
4689 return data_section;
4690
4691 case 'c':
dcb91ebe
MLI
4692 error_at (DECL_SOURCE_LOCATION (decl),
4693 "variable %D of type %<cb%> must be uninitialized", decl);
7acf4da6
DD
4694 return data_section;
4695 }
4696 }
4697
4698 if (readonly)
4699 return readonly_data_section;
4700
4701 return data_section;
4702}
4703
4704static void
4705mep_unique_section (tree decl, int reloc)
4706{
4707 static const char *prefixes[][2] =
4708 {
4709 { ".text.", ".gnu.linkonce.t." },
4710 { ".rodata.", ".gnu.linkonce.r." },
4711 { ".data.", ".gnu.linkonce.d." },
4712 { ".based.", ".gnu.linkonce.based." },
4713 { ".sdata.", ".gnu.linkonce.s." },
4714 { ".far.", ".gnu.linkonce.far." },
4715 { ".ftext.", ".gnu.linkonce.ft." },
4716 { ".frodata.", ".gnu.linkonce.frd." },
820ca276
DD
4717 { ".srodata.", ".gnu.linkonce.srd." },
4718 { ".vtext.", ".gnu.linkonce.v." },
4719 { ".vftext.", ".gnu.linkonce.vf." }
7acf4da6
DD
4720 };
4721 int sec = 2; /* .data */
4722 int len;
4723 const char *name, *prefix;
4724 char *string;
4725
4726 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4727 if (DECL_RTL (decl))
4728 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4729
4730 if (TREE_CODE (decl) == FUNCTION_DECL)
820ca276
DD
4731 {
4732 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4733 sec = 9; /* .vtext */
4734 else
4735 sec = 0; /* .text */
4736 }
7acf4da6
DD
4737 else if (decl_readonly_section (decl, reloc))
4738 sec = 1; /* .rodata */
4739
4740 if (name[0] == '@' && name[2] == '.')
4741 {
4742 switch (name[1])
4743 {
4744 case 'b':
4745 sec = 3; /* .based */
4746 break;
4747 case 't':
4748 if (sec == 1)
4749 sec = 8; /* .srodata */
4750 else
4751 sec = 4; /* .sdata */
4752 break;
4753 case 'f':
4754 if (sec == 0)
4755 sec = 6; /* .ftext */
820ca276
DD
4756 else if (sec == 9)
4757 sec = 10; /* .vftext */
7acf4da6
DD
4758 else if (sec == 1)
4759 sec = 7; /* .frodata */
4760 else
4761 sec = 5; /* .far. */
4762 break;
4763 }
4764 name += 3;
4765 }
4766
4767 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4768 len = strlen (name) + strlen (prefix);
4769 string = (char *) alloca (len + 1);
4770
4771 sprintf (string, "%s%s", prefix, name);
4772
4773 DECL_SECTION_NAME (decl) = build_string (len, string);
4774}
4775
4776/* Given a decl, a section name, and whether the decl initializer
4777 has relocs, choose attributes for the section. */
4778
4779#define SECTION_MEP_VLIW SECTION_MACH_DEP
4780
4781static unsigned int
4782mep_section_type_flags (tree decl, const char *name, int reloc)
4783{
4784 unsigned int flags = default_section_type_flags (decl, name, reloc);
4785
4786 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4787 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4788 flags |= SECTION_MEP_VLIW;
4789
4790 return flags;
4791}
4792
4793/* Switch to an arbitrary section NAME with attributes as specified
4794 by FLAGS. ALIGN specifies any known alignment requirements for
4795 the section; 0 if the default should be used.
4796
4797 Differs from the standard ELF version only in support of VLIW mode. */
4798
4799static void
4800mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4801{
4802 char flagchars[8], *f = flagchars;
4803 const char *type;
4804
4805 if (!(flags & SECTION_DEBUG))
4806 *f++ = 'a';
4807 if (flags & SECTION_WRITE)
4808 *f++ = 'w';
4809 if (flags & SECTION_CODE)
4810 *f++ = 'x';
4811 if (flags & SECTION_SMALL)
4812 *f++ = 's';
4813 if (flags & SECTION_MEP_VLIW)
4814 *f++ = 'v';
4815 *f = '\0';
4816
4817 if (flags & SECTION_BSS)
4818 type = "nobits";
4819 else
4820 type = "progbits";
4821
4822 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4823 name, flagchars, type);
4824
4825 if (flags & SECTION_CODE)
4826 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4827 asm_out_file);
4828}
4829
4830void
4831mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4832 int size, int align, int global)
4833{
4834 /* We intentionally don't use mep_section_tag() here. */
4835 if (name[0] == '@'
4836 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4837 && name[2] == '.')
4838 {
4839 int location = -1;
4840 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4841 DECL_ATTRIBUTES (decl));
4842 if (attr
4843 && TREE_VALUE (attr)
4844 && TREE_VALUE (TREE_VALUE(attr)))
4845 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4846 if (location == -1)
4847 return;
4848 if (global)
4849 {
4850 fprintf (stream, "\t.globl\t");
4851 assemble_name (stream, name);
4852 fprintf (stream, "\n");
4853 }
4854 assemble_name (stream, name);
4855 fprintf (stream, " = %d\n", location);
4856 return;
4857 }
4858 if (name[0] == '@' && name[2] == '.')
4859 {
4860 const char *sec = 0;
4861 switch (name[1])
4862 {
4863 case 'b':
4864 switch_to_section (based_section);
4865 sec = ".based";
4866 break;
4867 case 't':
4868 switch_to_section (tinybss_section);
4869 sec = ".sbss";
4870 break;
4871 case 'f':
4872 switch_to_section (farbss_section);
4873 sec = ".farbss";
4874 break;
4875 }
4876 if (sec)
4877 {
4878 const char *name2;
4879 int p2align = 0;
4880
4881 while (align > BITS_PER_UNIT)
4882 {
4883 align /= 2;
4884 p2align ++;
4885 }
9018b19c 4886 name2 = targetm.strip_name_encoding (name);
7acf4da6
DD
4887 if (global)
4888 fprintf (stream, "\t.globl\t%s\n", name2);
4889 fprintf (stream, "\t.p2align %d\n", p2align);
4890 fprintf (stream, "\t.type\t%s,@object\n", name2);
4891 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4892 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4893 return;
4894 }
4895 }
4896
4897 if (!global)
4898 {
4899 fprintf (stream, "\t.local\t");
4900 assemble_name (stream, name);
4901 fprintf (stream, "\n");
4902 }
4903 fprintf (stream, "\t.comm\t");
4904 assemble_name (stream, name);
4905 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4906}
4907
4908/* Trampolines. */
4909
87138d8d
RH
4910static void
4911mep_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
7acf4da6 4912{
87138d8d
RH
4913 rtx addr = XEXP (m_tramp, 0);
4914 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
4915
7acf4da6
DD
4916 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4917 LCT_NORMAL, VOIDmode, 3,
4918 addr, Pmode,
4919 fnaddr, Pmode,
4920 static_chain, Pmode);
4921}
4922
4923/* Experimental Reorg. */
4924
4925static bool
4926mep_mentioned_p (rtx in,
4927 rtx reg, /* NULL for mem */
4928 int modes_too) /* if nonzero, modes must match also. */
4929{
4930 const char *fmt;
4931 int i;
4932 enum rtx_code code;
4933
4934 if (in == 0)
4935 return false;
4936 if (reg && GET_CODE (reg) != REG)
4937 return false;
4938
4939 if (GET_CODE (in) == LABEL_REF)
4940 return (reg == 0);
4941
4942 code = GET_CODE (in);
4943
4944 switch (code)
4945 {
4946 case MEM:
4947 if (reg)
4948 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4949 return true;
4950
4951 case REG:
4952 if (!reg)
4953 return false;
4954 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4955 return false;
4956 return (REGNO (in) == REGNO (reg));
4957
4958 case SCRATCH:
4959 case CC0:
4960 case PC:
4961 case CONST_INT:
4962 case CONST_DOUBLE:
4963 return false;
4964
4965 default:
4966 break;
4967 }
4968
4969 /* Set's source should be read-only. */
4970 if (code == SET && !reg)
4971 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4972
4973 fmt = GET_RTX_FORMAT (code);
4974
4975 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4976 {
4977 if (fmt[i] == 'E')
4978 {
4979 register int j;
4980 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4981 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4982 return true;
4983 }
4984 else if (fmt[i] == 'e'
4985 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4986 return true;
4987 }
4988 return false;
4989}
4990
4991#define EXPERIMENTAL_REGMOVE_REORG 1
4992
4993#if EXPERIMENTAL_REGMOVE_REORG
4994
4995static int
4996mep_compatible_reg_class (int r1, int r2)
4997{
4998 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4999 return 1;
5000 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
5001 return 1;
5002 return 0;
5003}
5004
5005static void
5006mep_reorg_regmove (rtx insns)
5007{
5008 rtx insn, next, pat, follow, *where;
5009 int count = 0, done = 0, replace, before = 0;
5010
5011 if (dump_file)
5012 for (insn = insns; insn; insn = NEXT_INSN (insn))
5013 if (GET_CODE (insn) == INSN)
5014 before++;
5015
5016 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
5017 set that uses the r2 and r2 dies there. We replace r2 with r1
5018 and see if it's still a valid insn. If so, delete the first set.
5019 Copied from reorg.c. */
5020
5021 while (!done)
5022 {
5023 done = 1;
5024 for (insn = insns; insn; insn = next)
5025 {
5026 next = NEXT_INSN (insn);
5027 if (GET_CODE (insn) != INSN)
5028 continue;
5029 pat = PATTERN (insn);
5030
5031 replace = 0;
5032
5033 if (GET_CODE (pat) == SET
5034 && GET_CODE (SET_SRC (pat)) == REG
5035 && GET_CODE (SET_DEST (pat)) == REG
5036 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
5037 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
5038 {
5039 follow = next_nonnote_insn (insn);
5040 if (dump_file)
5041 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5042
5043 while (follow && GET_CODE (follow) == INSN
5044 && GET_CODE (PATTERN (follow)) == SET
5045 && !dead_or_set_p (follow, SET_SRC (pat))
5046 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5047 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5048 {
5049 if (dump_file)
5050 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5051 follow = next_nonnote_insn (follow);
5052 }
5053
5054 if (dump_file)
5055 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5056 if (follow && GET_CODE (follow) == INSN
5057 && GET_CODE (PATTERN (follow)) == SET
5058 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5059 {
5060 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5061 {
5062 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5063 {
5064 replace = 1;
5065 where = & SET_SRC (PATTERN (follow));
5066 }
5067 }
5068 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5069 {
5070 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5071 {
5072 replace = 1;
5073 where = & PATTERN (follow);
5074 }
5075 }
5076 }
5077 }
5078
5079 /* If so, follow is the corresponding insn */
5080 if (replace)
5081 {
5082 if (dump_file)
5083 {
5084 rtx x;
5085
5086 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5087 for (x = insn; x ;x = NEXT_INSN (x))
5088 {
5089 print_rtl_single (dump_file, x);
5090 if (x == follow)
5091 break;
5092 fprintf (dump_file, "\n");
5093 }
5094 }
5095
5096 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5097 follow, where))
5098 {
5099 count ++;
5100 next = delete_insn (insn);
5101 if (dump_file)
5102 {
5103 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5104 print_rtl_single (dump_file, follow);
5105 }
5106 done = 0;
5107 }
5108 }
5109 }
5110 }
5111
5112 if (dump_file)
5113 {
5114 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5115 fprintf (dump_file, "=====\n");
5116 }
5117}
5118#endif
5119
5120
5121/* Figure out where to put LABEL, which is the label for a repeat loop.
5122 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5123 the loop ends just before LAST_INSN. If SHARED, insns other than the
5124 "repeat" might use LABEL to jump to the loop's continuation point.
5125
5126 Return the last instruction in the adjusted loop. */
5127
5128static rtx
5129mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5130 bool shared)
5131{
5132 rtx next, prev;
5133 int count = 0, code, icode;
5134
5135 if (dump_file)
5136 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5137 INSN_UID (last_insn));
5138
5139 /* Set PREV to the last insn in the loop. */
5140 prev = last_insn;
5141 if (!including)
5142 prev = PREV_INSN (prev);
5143
5144 /* Set NEXT to the next insn after the repeat label. */
5145 next = last_insn;
5146 if (!shared)
5147 while (prev != 0)
5148 {
5149 code = GET_CODE (prev);
5150 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5151 break;
5152
5153 if (INSN_P (prev))
5154 {
5155 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5156 prev = XVECEXP (PATTERN (prev), 0, 1);
5157
5158 /* Other insns that should not be in the last two opcodes. */
5159 icode = recog_memoized (prev);
5160 if (icode < 0
5161 || icode == CODE_FOR_repeat
5162 || icode == CODE_FOR_erepeat
5163 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5164 break;
5165
5166 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5167 is the second instruction in a VLIW bundle. In that case,
5168 loop again: if the first instruction also satisfies the
5169 conditions above then we will reach here again and put
5170 both of them into the repeat epilogue. Otherwise both
5171 should remain outside. */
5172 if (GET_MODE (prev) != BImode)
5173 {
5174 count++;
5175 next = prev;
5176 if (dump_file)
5177 print_rtl_single (dump_file, next);
5178 if (count == 2)
5179 break;
5180 }
5181 }
5182 prev = PREV_INSN (prev);
5183 }
5184
5185 /* See if we're adding the label immediately after the repeat insn.
5186 If so, we need to separate them with a nop. */
5187 prev = prev_real_insn (next);
5188 if (prev)
5189 switch (recog_memoized (prev))
5190 {
5191 case CODE_FOR_repeat:
5192 case CODE_FOR_erepeat:
5193 if (dump_file)
5194 fprintf (dump_file, "Adding nop inside loop\n");
5195 emit_insn_before (gen_nop (), next);
5196 break;
5197
5198 default:
5199 break;
5200 }
5201
5202 /* Insert the label. */
5203 emit_label_before (label, next);
5204
5205 /* Insert the nops. */
5206 if (dump_file && count < 2)
5207 fprintf (dump_file, "Adding %d nop%s\n\n",
5208 2 - count, count == 1 ? "" : "s");
5209
5210 for (; count < 2; count++)
5211 if (including)
5212 last_insn = emit_insn_after (gen_nop (), last_insn);
5213 else
5214 emit_insn_before (gen_nop (), last_insn);
5215
5216 return last_insn;
5217}
5218
5219
5220void
5221mep_emit_doloop (rtx *operands, int is_end)
5222{
5223 rtx tag;
5224
5225 if (cfun->machine->doloop_tags == 0
5226 || cfun->machine->doloop_tag_from_end == is_end)
5227 {
5228 cfun->machine->doloop_tags++;
5229 cfun->machine->doloop_tag_from_end = is_end;
5230 }
5231
5232 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5233 if (is_end)
5234 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5235 else
5236 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5237}
5238
5239
5240/* Code for converting doloop_begins and doloop_ends into valid
5241 MeP instructions. A doloop_begin is just a placeholder:
5242
5243 $count = unspec ($count)
5244
5245 where $count is initially the number of iterations - 1.
5246 doloop_end has the form:
5247
5248 if ($count-- == 0) goto label
5249
5250 The counter variable is private to the doloop insns, nothing else
5251 relies on its value.
5252
5253 There are three cases, in decreasing order of preference:
5254
5255 1. A loop has exactly one doloop_begin and one doloop_end.
5256 The doloop_end branches to the first instruction after
5257 the doloop_begin.
5258
5259 In this case we can replace the doloop_begin with a repeat
5260 instruction and remove the doloop_end. I.e.:
5261
5262 $count1 = unspec ($count1)
5263 label:
5264 ...
5265 insn1
5266 insn2
5267 if ($count2-- == 0) goto label
5268
5269 becomes:
5270
5271 repeat $count1,repeat_label
5272 label:
5273 ...
5274 repeat_label:
5275 insn1
5276 insn2
5277 # end repeat
5278
5279 2. As for (1), except there are several doloop_ends. One of them
5280 (call it X) falls through to a label L. All the others fall
5281 through to branches to L.
5282
5283 In this case, we remove X and replace the other doloop_ends
5284 with branches to the repeat label. For example:
5285
5286 $count1 = unspec ($count1)
5287 start:
5288 ...
5289 if ($count2-- == 0) goto label
5290 end:
5291 ...
5292 if ($count3-- == 0) goto label
5293 goto end
5294
5295 becomes:
5296
5297 repeat $count1,repeat_label
5298 start:
5299 ...
5300 repeat_label:
5301 nop
5302 nop
5303 # end repeat
5304 end:
5305 ...
5306 goto repeat_label
5307
5308 3. The fallback case. Replace doloop_begins with:
5309
5310 $count = $count + 1
5311
5312 Replace doloop_ends with the equivalent of:
5313
5314 $count = $count - 1
5315 if ($count == 0) goto label
5316
5317 Note that this might need a scratch register if $count
5318 is stored in memory. */
5319
5320/* A structure describing one doloop_begin. */
5321struct mep_doloop_begin {
5322 /* The next doloop_begin with the same tag. */
5323 struct mep_doloop_begin *next;
5324
5325 /* The instruction itself. */
5326 rtx insn;
5327
5328 /* The initial counter value. This is known to be a general register. */
5329 rtx counter;
5330};
5331
5332/* A structure describing a doloop_end. */
5333struct mep_doloop_end {
5334 /* The next doloop_end with the same loop tag. */
5335 struct mep_doloop_end *next;
5336
5337 /* The instruction itself. */
5338 rtx insn;
5339
5340 /* The first instruction after INSN when the branch isn't taken. */
5341 rtx fallthrough;
5342
5343 /* The location of the counter value. Since doloop_end_internal is a
5344 jump instruction, it has to allow the counter to be stored anywhere
5345 (any non-fixed register or memory location). */
5346 rtx counter;
5347
5348 /* The target label (the place where the insn branches when the counter
5349 isn't zero). */
5350 rtx label;
5351
5352 /* A scratch register. Only available when COUNTER isn't stored
5353 in a general register. */
5354 rtx scratch;
5355};
5356
5357
5358/* One do-while loop. */
5359struct mep_doloop {
5360 /* All the doloop_begins for this loop (in no particular order). */
5361 struct mep_doloop_begin *begin;
5362
5363 /* All the doloop_ends. When there is more than one, arrange things
5364 so that the first one is the most likely to be X in case (2) above. */
5365 struct mep_doloop_end *end;
5366};
5367
5368
5369/* Return true if LOOP can be converted into repeat/repeat_end form
5370 (that is, if it matches cases (1) or (2) above). */
5371
5372static bool
5373mep_repeat_loop_p (struct mep_doloop *loop)
5374{
5375 struct mep_doloop_end *end;
5376 rtx fallthrough;
5377
5378 /* There must be exactly one doloop_begin and at least one doloop_end. */
5379 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5380 return false;
5381
5382 /* The first doloop_end (X) must branch back to the insn after
5383 the doloop_begin. */
5384 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5385 return false;
5386
5387 /* All the other doloop_ends must branch to the same place as X.
5388 When the branch isn't taken, they must jump to the instruction
5389 after X. */
5390 fallthrough = loop->end->fallthrough;
5391 for (end = loop->end->next; end != 0; end = end->next)
5392 if (end->label != loop->end->label
5393 || !simplejump_p (end->fallthrough)
5394 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5395 return false;
5396
5397 return true;
5398}
5399
5400
5401/* The main repeat reorg function. See comment above for details. */
5402
5403static void
5404mep_reorg_repeat (rtx insns)
5405{
5406 rtx insn;
5407 struct mep_doloop *loops, *loop;
5408 struct mep_doloop_begin *begin;
5409 struct mep_doloop_end *end;
5410
5411 /* Quick exit if we haven't created any loops. */
5412 if (cfun->machine->doloop_tags == 0)
5413 return;
5414
5415 /* Create an array of mep_doloop structures. */
5416 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5417 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5418
5419 /* Search the function for do-while insns and group them by loop tag. */
5420 for (insn = insns; insn; insn = NEXT_INSN (insn))
5421 if (INSN_P (insn))
5422 switch (recog_memoized (insn))
5423 {
5424 case CODE_FOR_doloop_begin_internal:
5425 insn_extract (insn);
5426 loop = &loops[INTVAL (recog_data.operand[2])];
5427
5428 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5429 begin->next = loop->begin;
5430 begin->insn = insn;
5431 begin->counter = recog_data.operand[0];
5432
5433 loop->begin = begin;
5434 break;
5435
5436 case CODE_FOR_doloop_end_internal:
5437 insn_extract (insn);
5438 loop = &loops[INTVAL (recog_data.operand[2])];
5439
5440 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5441 end->insn = insn;
5442 end->fallthrough = next_real_insn (insn);
5443 end->counter = recog_data.operand[0];
5444 end->label = recog_data.operand[1];
5445 end->scratch = recog_data.operand[3];
5446
5447 /* If this insn falls through to an unconditional jump,
5448 give it a lower priority than the others. */
5449 if (loop->end != 0 && simplejump_p (end->fallthrough))
5450 {
5451 end->next = loop->end->next;
5452 loop->end->next = end;
5453 }
5454 else
5455 {
5456 end->next = loop->end;
5457 loop->end = end;
5458 }
5459 break;
5460 }
5461
5462 /* Convert the insns for each loop in turn. */
5463 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5464 if (mep_repeat_loop_p (loop))
5465 {
5466 /* Case (1) or (2). */
5467 rtx repeat_label, label_ref;
5468
5469 /* Create a new label for the repeat insn. */
5470 repeat_label = gen_label_rtx ();
5471
5472 /* Replace the doloop_begin with a repeat. */
5473 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5474 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5475 loop->begin->insn);
5476 delete_insn (loop->begin->insn);
5477
5478 /* Insert the repeat label before the first doloop_end.
5479 Fill the gap with nops if there are other doloop_ends. */
5480 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5481 false, loop->end->next != 0);
5482
5483 /* Emit a repeat_end (to improve the readability of the output). */
5484 emit_insn_before (gen_repeat_end (), loop->end->insn);
5485
5486 /* Delete the first doloop_end. */
5487 delete_insn (loop->end->insn);
5488
5489 /* Replace the others with branches to REPEAT_LABEL. */
5490 for (end = loop->end->next; end != 0; end = end->next)
5491 {
5492 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5493 delete_insn (end->insn);
5494 delete_insn (end->fallthrough);
5495 }
5496 }
5497 else
5498 {
5499 /* Case (3). First replace all the doloop_begins with increment
5500 instructions. */
5501 for (begin = loop->begin; begin != 0; begin = begin->next)
5502 {
5503 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5504 begin->counter, const1_rtx),
5505 begin->insn);
5506 delete_insn (begin->insn);
5507 }
5508
5509 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5510 for (end = loop->end; end != 0; end = end->next)
5511 {
5512 rtx reg;
5513
5514 start_sequence ();
5515
5516 /* Load the counter value into a general register. */
5517 reg = end->counter;
5518 if (!REG_P (reg) || REGNO (reg) > 15)
5519 {
5520 reg = end->scratch;
5521 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5522 }
5523
5524 /* Decrement the counter. */
5525 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5526 constm1_rtx));
5527
5528 /* Copy it back to its original location. */
5529 if (reg != end->counter)
5530 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5531
5532 /* Jump back to the start label. */
5533 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5534 end->label));
5535 JUMP_LABEL (insn) = end->label;
5536 LABEL_NUSES (end->label)++;
5537
5538 /* Emit the whole sequence before the doloop_end. */
5539 insn = get_insns ();
5540 end_sequence ();
5541 emit_insn_before (insn, end->insn);
5542
5543 /* Delete the doloop_end. */
5544 delete_insn (end->insn);
5545 }
5546 }
5547}
5548
5549
5550static bool
5551mep_invertable_branch_p (rtx insn)
5552{
5553 rtx cond, set;
5554 enum rtx_code old_code;
5555 int i;
5556
5557 set = PATTERN (insn);
5558 if (GET_CODE (set) != SET)
5559 return false;
5560 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5561 return false;
5562 cond = XEXP (XEXP (set, 1), 0);
5563 old_code = GET_CODE (cond);
5564 switch (old_code)
5565 {
5566 case EQ:
5567 PUT_CODE (cond, NE);
5568 break;
5569 case NE:
5570 PUT_CODE (cond, EQ);
5571 break;
5572 case LT:
5573 PUT_CODE (cond, GE);
5574 break;
5575 case GE:
5576 PUT_CODE (cond, LT);
5577 break;
5578 default:
5579 return false;
5580 }
5581 INSN_CODE (insn) = -1;
5582 i = recog_memoized (insn);
5583 PUT_CODE (cond, old_code);
5584 INSN_CODE (insn) = -1;
5585 return i >= 0;
5586}
5587
5588static void
5589mep_invert_branch (rtx insn, rtx after)
5590{
5591 rtx cond, set, label;
5592 int i;
5593
5594 set = PATTERN (insn);
5595
5596 gcc_assert (GET_CODE (set) == SET);
5597 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5598
5599 cond = XEXP (XEXP (set, 1), 0);
5600 switch (GET_CODE (cond))
5601 {
5602 case EQ:
5603 PUT_CODE (cond, NE);
5604 break;
5605 case NE:
5606 PUT_CODE (cond, EQ);
5607 break;
5608 case LT:
5609 PUT_CODE (cond, GE);
5610 break;
5611 case GE:
5612 PUT_CODE (cond, LT);
5613 break;
5614 default:
5615 gcc_unreachable ();
5616 }
5617 label = gen_label_rtx ();
5618 emit_label_after (label, after);
5619 for (i=1; i<=2; i++)
5620 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5621 {
5622 rtx ref = XEXP (XEXP (set, 1), i);
5623 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5624 delete_insn (XEXP (ref, 0));
5625 XEXP (ref, 0) = label;
5626 LABEL_NUSES (label) ++;
5627 JUMP_LABEL (insn) = label;
5628 }
5629 INSN_CODE (insn) = -1;
5630 i = recog_memoized (insn);
5631 gcc_assert (i >= 0);
5632}
5633
5634static void
5635mep_reorg_erepeat (rtx insns)
5636{
c28883e6 5637 rtx insn, prev, l, x;
7acf4da6
DD
5638 int count;
5639
5640 for (insn = insns; insn; insn = NEXT_INSN (insn))
5641 if (JUMP_P (insn)
5642 && ! JUMP_TABLE_DATA_P (insn)
5643 && mep_invertable_branch_p (insn))
5644 {
5645 if (dump_file)
5646 {
5647 fprintf (dump_file, "\n------------------------------\n");
5648 fprintf (dump_file, "erepeat: considering this jump:\n");
5649 print_rtl_single (dump_file, insn);
5650 }
5651 count = simplejump_p (insn) ? 0 : 1;
7acf4da6
DD
5652 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5653 {
5654 if (GET_CODE (prev) == CALL_INSN
5655 || BARRIER_P (prev))
5656 break;
5657
5658 if (prev == JUMP_LABEL (insn))
5659 {
5660 rtx newlast;
5661 if (dump_file)
5662 fprintf (dump_file, "found loop top, %d insns\n", count);
5663
5664 if (LABEL_NUSES (prev) == 1)
5665 /* We're the only user, always safe */ ;
5666 else if (LABEL_NUSES (prev) == 2)
5667 {
5668 /* See if there's a barrier before this label. If
5669 so, we know nobody inside the loop uses it.
5670 But we must be careful to put the erepeat
5671 *after* the label. */
5672 rtx barrier;
5673 for (barrier = PREV_INSN (prev);
5674 barrier && GET_CODE (barrier) == NOTE;
5675 barrier = PREV_INSN (barrier))
5676 ;
5677 if (barrier && GET_CODE (barrier) != BARRIER)
5678 break;
5679 }
5680 else
5681 {
5682 /* We don't know who else, within or without our loop, uses this */
5683 if (dump_file)
5684 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5685 break;
5686 }
5687
5688 /* Generate a label to be used by the erepat insn. */
5689 l = gen_label_rtx ();
5690
5691 /* Insert the erepeat after INSN's target label. */
5692 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5693 LABEL_NUSES (l)++;
5694 emit_insn_after (x, prev);
5695
5696 /* Insert the erepeat label. */
5697 newlast = (mep_insert_repeat_label_last
5698 (insn, l, !simplejump_p (insn), false));
5699 if (simplejump_p (insn))
5700 {
5701 emit_insn_before (gen_erepeat_end (), insn);
5702 delete_insn (insn);
5703 }
5704 else
5705 {
5706 mep_invert_branch (insn, newlast);
5707 emit_insn_after (gen_erepeat_end (), newlast);
5708 }
5709 break;
5710 }
5711
5712 if (LABEL_P (prev))
5713 {
5714 /* A label is OK if there is exactly one user, and we
5715 can find that user before the next label. */
5716 rtx user = 0;
5717 int safe = 0;
5718 if (LABEL_NUSES (prev) == 1)
5719 {
5720 for (user = PREV_INSN (prev);
5721 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5722 user = PREV_INSN (user))
5723 if (GET_CODE (user) == JUMP_INSN
5724 && JUMP_LABEL (user) == prev)
5725 {
5726 safe = INSN_UID (user);
5727 break;
5728 }
5729 }
5730 if (!safe)
5731 break;
5732 if (dump_file)
5733 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5734 safe, INSN_UID (prev));
5735 }
5736
5737 if (INSN_P (prev))
5738 {
5739 count ++;
7acf4da6
DD
5740 }
5741 }
5742 }
5743 if (dump_file)
5744 fprintf (dump_file, "\n==============================\n");
5745}
5746
5747/* Replace a jump to a return, with a copy of the return. GCC doesn't
5748 always do this on its own. */
5749
5750static void
5751mep_jmp_return_reorg (rtx insns)
5752{
5753 rtx insn, label, ret;
5754 int ret_code;
5755
5756 for (insn = insns; insn; insn = NEXT_INSN (insn))
5757 if (simplejump_p (insn))
5758 {
5759 /* Find the fist real insn the jump jumps to. */
5760 label = ret = JUMP_LABEL (insn);
5761 while (ret
5762 && (GET_CODE (ret) == NOTE
5763 || GET_CODE (ret) == CODE_LABEL
5764 || GET_CODE (PATTERN (ret)) == USE))
5765 ret = NEXT_INSN (ret);
5766
5767 if (ret)
5768 {
5769 /* Is it a return? */
5770 ret_code = recog_memoized (ret);
5771 if (ret_code == CODE_FOR_return_internal
5772 || ret_code == CODE_FOR_eh_return_internal)
5773 {
5774 /* It is. Replace the jump with a return. */
5775 LABEL_NUSES (label) --;
5776 if (LABEL_NUSES (label) == 0)
5777 delete_insn (label);
5778 PATTERN (insn) = copy_rtx (PATTERN (ret));
5779 INSN_CODE (insn) = -1;
5780 }
5781 }
5782 }
5783}
5784
5785
5786static void
5787mep_reorg_addcombine (rtx insns)
5788{
5789 rtx i, n;
5790
5791 for (i = insns; i; i = NEXT_INSN (i))
5792 if (INSN_P (i)
5793 && INSN_CODE (i) == CODE_FOR_addsi3
5794 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5795 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5796 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5797 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5798 {
5799 n = NEXT_INSN (i);
5800 if (INSN_P (n)
5801 && INSN_CODE (n) == CODE_FOR_addsi3
5802 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5803 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5804 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5805 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5806 {
5807 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5808 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5809 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5810 && ic + nc < 32767
5811 && ic + nc > -32768)
5812 {
5813 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5814 NEXT_INSN (i) = NEXT_INSN (n);
5815 if (NEXT_INSN (i))
5816 PREV_INSN (NEXT_INSN (i)) = i;
5817 }
5818 }
5819 }
5820}
5821
5822/* If this insn adjusts the stack, return the adjustment, else return
5823 zero. */
5824static int
5825add_sp_insn_p (rtx insn)
5826{
5827 rtx pat;
5828
5829 if (! single_set (insn))
5830 return 0;
5831 pat = PATTERN (insn);
5832 if (GET_CODE (SET_DEST (pat)) != REG)
5833 return 0;
5834 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5835 return 0;
5836 if (GET_CODE (SET_SRC (pat)) != PLUS)
5837 return 0;
5838 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5839 return 0;
5840 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5841 return 0;
5842 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5843 return 0;
5844 return INTVAL (XEXP (SET_SRC (pat), 1));
5845}
5846
5847/* Check for trivial functions that set up an unneeded stack
5848 frame. */
5849static void
5850mep_reorg_noframe (rtx insns)
5851{
5852 rtx start_frame_insn;
5853 rtx end_frame_insn = 0;
5854 int sp_adjust, sp2;
5855 rtx sp;
5856
5857 /* The first insn should be $sp = $sp + N */
5858 while (insns && ! INSN_P (insns))
5859 insns = NEXT_INSN (insns);
5860 if (!insns)
5861 return;
5862
5863 sp_adjust = add_sp_insn_p (insns);
5864 if (sp_adjust == 0)
5865 return;
5866
5867 start_frame_insn = insns;
5868 sp = SET_DEST (PATTERN (start_frame_insn));
5869
5870 insns = next_real_insn (insns);
5871
5872 while (insns)
5873 {
5874 rtx next = next_real_insn (insns);
5875 if (!next)
5876 break;
5877
5878 sp2 = add_sp_insn_p (insns);
5879 if (sp2)
5880 {
5881 if (end_frame_insn)
5882 return;
5883 end_frame_insn = insns;
5884 if (sp2 != -sp_adjust)
5885 return;
5886 }
5887 else if (mep_mentioned_p (insns, sp, 0))
5888 return;
5889 else if (CALL_P (insns))
5890 return;
5891
5892 insns = next;
5893 }
5894
5895 if (end_frame_insn)
5896 {
5897 delete_insn (start_frame_insn);
5898 delete_insn (end_frame_insn);
5899 }
5900}
5901
5902static void
5903mep_reorg (void)
5904{
5905 rtx insns = get_insns ();
e756464b
DD
5906
5907 /* We require accurate REG_DEAD notes. */
5908 compute_bb_for_insn ();
5909 df_note_add_problem ();
5910 df_analyze ();
5911
7acf4da6
DD
5912 mep_reorg_addcombine (insns);
5913#if EXPERIMENTAL_REGMOVE_REORG
5914 /* VLIW packing has been done already, so we can't just delete things. */
5915 if (!mep_vliw_function_p (cfun->decl))
5916 mep_reorg_regmove (insns);
5917#endif
5918 mep_jmp_return_reorg (insns);
5919 mep_bundle_insns (insns);
5920 mep_reorg_repeat (insns);
5921 if (optimize
5922 && !profile_flag
5923 && !profile_arc_flag
5924 && TARGET_OPT_REPEAT
5925 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5926 mep_reorg_erepeat (insns);
5927
5928 /* This may delete *insns so make sure it's last. */
5929 mep_reorg_noframe (insns);
e756464b
DD
5930
5931 df_finish_pass (false);
7acf4da6
DD
5932}
5933
5934\f
5935
5936/*----------------------------------------------------------------------*/
5937/* Builtins */
5938/*----------------------------------------------------------------------*/
5939
5940/* Element X gives the index into cgen_insns[] of the most general
5941 implementation of intrinsic X. Unimplemented intrinsics are
5942 mapped to -1. */
5943int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5944
5945/* Element X gives the index of another instruction that is mapped to
5946 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5947 instruction.
5948
5949 Things are set up so that mep_intrinsic_chain[X] < X. */
5950static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5951
5952/* The bitmask for the current ISA. The ISA masks are declared
5953 in mep-intrin.h. */
5954unsigned int mep_selected_isa;
5955
5956struct mep_config {
5957 const char *config_name;
5958 unsigned int isa;
5959};
5960
5961static struct mep_config mep_configs[] = {
5962#ifdef COPROC_SELECTION_TABLE
5963 COPROC_SELECTION_TABLE,
5964#endif
5965 { 0, 0 }
5966};
5967
5968/* Initialize the global intrinsics variables above. */
5969
5970static void
5971mep_init_intrinsics (void)
5972{
5973 size_t i;
5974
5975 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5976 mep_selected_isa = mep_configs[0].isa;
5977 if (mep_config_string != 0)
5978 for (i = 0; mep_configs[i].config_name; i++)
5979 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5980 {
5981 mep_selected_isa = mep_configs[i].isa;
5982 break;
5983 }
5984
5985 /* Assume all intrinsics are unavailable. */
5986 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5987 mep_intrinsic_insn[i] = -1;
5988
5989 /* Build up the global intrinsic tables. */
5990 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5991 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5992 {
5993 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5994 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5995 }
5996 /* See whether we can directly move values between one coprocessor
5997 register and another. */
5998 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5999 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
6000 mep_have_copro_copro_moves_p = true;
6001
6002 /* See whether we can directly move values between core and
6003 coprocessor registers. */
6004 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
6005 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
6006
6007 mep_have_core_copro_moves_p = 1;
6008}
6009
6010/* Declare all available intrinsic functions. Called once only. */
6011
6012static tree cp_data_bus_int_type_node;
6013static tree opaque_vector_type_node;
6014static tree v8qi_type_node;
6015static tree v4hi_type_node;
6016static tree v2si_type_node;
6017static tree v8uqi_type_node;
6018static tree v4uhi_type_node;
6019static tree v2usi_type_node;
6020
6021static tree
6022mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
6023{
6024 switch (cr)
6025 {
6026 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
6027 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
6028 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
6029 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
6030 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
6031 case cgen_regnum_operand_type_CHAR: return char_type_node;
6032 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
6033 case cgen_regnum_operand_type_SI: return intSI_type_node;
6034 case cgen_regnum_operand_type_DI: return intDI_type_node;
6035 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
6036 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
6037 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
6038 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
6039 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
6040 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
6041 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
6042 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
6043 default:
6044 return void_type_node;
6045 }
6046}
6047
6048static void
6049mep_init_builtins (void)
6050{
6051 size_t i;
6052
6053 if (TARGET_64BIT_CR_REGS)
6054 cp_data_bus_int_type_node = long_long_integer_type_node;
6055 else
6056 cp_data_bus_int_type_node = long_integer_type_node;
6057
6058 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6059 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6060 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6061 v2si_type_node = build_vector_type (intSI_type_node, 2);
6062 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6063 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6064 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6065
c0814136
SB
6066 add_builtin_type ("cp_data_bus_int", cp_data_bus_int_type_node);
6067
6068 add_builtin_type ("cp_vector", opaque_vector_type_node);
6069
6070 add_builtin_type ("cp_v8qi", v8qi_type_node);
6071 add_builtin_type ("cp_v4hi", v4hi_type_node);
6072 add_builtin_type ("cp_v2si", v2si_type_node);
6073
6074 add_builtin_type ("cp_v8uqi", v8uqi_type_node);
6075 add_builtin_type ("cp_v4uhi", v4uhi_type_node);
6076 add_builtin_type ("cp_v2usi", v2usi_type_node);
7acf4da6
DD
6077
6078 /* Intrinsics like mep_cadd3 are implemented with two groups of
6079 instructions, one which uses UNSPECs and one which uses a specific
6080 rtl code such as PLUS. Instructions in the latter group belong
6081 to GROUP_KNOWN_CODE.
6082
6083 In such cases, the intrinsic will have two entries in the global
6084 tables above. The unspec form is accessed using builtin functions
6085 while the specific form is accessed using the mep_* enum in
6086 mep-intrin.h.
6087
6088 The idea is that __cop arithmetic and builtin functions have
6089 different optimization requirements. If mep_cadd3() appears in
6090 the source code, the user will surely except gcc to use cadd3
6091 rather than a work-alike such as add3. However, if the user
6092 just writes "a + b", where a or b are __cop variables, it is
6093 reasonable for gcc to choose a core instruction rather than
6094 cadd3 if it believes that is more optimal. */
6095 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6096 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6097 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6098 {
6099 tree ret_type = void_type_node;
6100 tree bi_type;
6101
6102 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6103 continue;
6104
6105 if (cgen_insns[i].cret_p)
6106 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6107
34149ed5 6108 bi_type = build_function_type_list (ret_type, NULL_TREE);
7acf4da6
DD
6109 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6110 bi_type,
6111 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6112 }
6113}
6114
6115/* Report the unavailablity of the given intrinsic. */
6116
6117#if 1
6118static void
6119mep_intrinsic_unavailable (int intrinsic)
6120{
6121 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6122
6123 if (already_reported_p[intrinsic])
6124 return;
6125
6126 if (mep_intrinsic_insn[intrinsic] < 0)
6127 error ("coprocessor intrinsic %qs is not available in this configuration",
6128 cgen_intrinsics[intrinsic]);
6129 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6130 error ("%qs is not available in VLIW functions",
6131 cgen_intrinsics[intrinsic]);
6132 else
6133 error ("%qs is not available in non-VLIW functions",
6134 cgen_intrinsics[intrinsic]);
6135
6136 already_reported_p[intrinsic] = 1;
6137}
6138#endif
6139
6140
6141/* See if any implementation of INTRINSIC is available to the
6142 current function. If so, store the most general implementation
6143 in *INSN_PTR and return true. Return false otherwise. */
6144
6145static bool
6146mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6147{
6148 int i;
6149
6150 i = mep_intrinsic_insn[intrinsic];
6151 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6152 i = mep_intrinsic_chain[i];
6153
6154 if (i >= 0)
6155 {
6156 *insn_ptr = &cgen_insns[i];
6157 return true;
6158 }
6159 return false;
6160}
6161
6162
6163/* Like mep_get_intrinsic_insn, but with extra handling for moves.
6164 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6165 try using a work-alike instead. In this case, the returned insn
6166 may have three operands rather than two. */
6167
6168static bool
6169mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6170{
6171 size_t i;
6172
6173 if (intrinsic == mep_cmov)
6174 {
6175 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6176 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6177 return true;
6178 return false;
6179 }
6180 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6181}
6182
6183
6184/* If ARG is a register operand that is the same size as MODE, convert it
6185 to MODE using a subreg. Otherwise return ARG as-is. */
6186
6187static rtx
6188mep_convert_arg (enum machine_mode mode, rtx arg)
6189{
6190 if (GET_MODE (arg) != mode
6191 && register_operand (arg, VOIDmode)
6192 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6193 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6194 return arg;
6195}
6196
6197
6198/* Apply regnum conversions to ARG using the description given by REGNUM.
6199 Return the new argument on success and null on failure. */
6200
6201static rtx
6202mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6203{
6204 if (regnum->count == 0)
6205 return arg;
6206
6207 if (GET_CODE (arg) != CONST_INT
6208 || INTVAL (arg) < 0
6209 || INTVAL (arg) >= regnum->count)
6210 return 0;
6211
6212 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6213}
6214
6215
6216/* Try to make intrinsic argument ARG match the given operand.
6217 UNSIGNED_P is true if the argument has an unsigned type. */
6218
6219static rtx
6220mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6221 int unsigned_p)
6222{
6223 if (GET_CODE (arg) == CONST_INT)
6224 {
6225 /* CONST_INTs can only be bound to integer operands. */
6226 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6227 return 0;
6228 }
6229 else if (GET_CODE (arg) == CONST_DOUBLE)
6230 /* These hold vector constants. */;
6231 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6232 {
6233 /* If the argument is a different size from what's expected, we must
6234 have a value in the right mode class in order to convert it. */
6235 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6236 return 0;
6237
6238 /* If the operand is an rvalue, promote or demote it to match the
6239 operand's size. This might not need extra instructions when
6240 ARG is a register value. */
6241 if (operand->constraint[0] != '=')
6242 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6243 }
6244
6245 /* If the operand is an lvalue, bind the operand to a new register.
6246 The caller will copy this value into ARG after the main
6247 instruction. By doing this always, we produce slightly more
6248 optimal code. */
6249 /* But not for control registers. */
6250 if (operand->constraint[0] == '='
6251 && (! REG_P (arg)
072ebd49
DD
6252 || ! (CONTROL_REGNO_P (REGNO (arg))
6253 || CCR_REGNO_P (REGNO (arg))
6254 || CR_REGNO_P (REGNO (arg)))
7acf4da6
DD
6255 ))
6256 return gen_reg_rtx (operand->mode);
6257
6258 /* Try simple mode punning. */
6259 arg = mep_convert_arg (operand->mode, arg);
6260 if (operand->predicate (arg, operand->mode))
6261 return arg;
6262
6263 /* See if forcing the argument into a register will make it match. */
6264 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6265 arg = force_reg (operand->mode, arg);
6266 else
6267 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6268 if (operand->predicate (arg, operand->mode))
6269 return arg;
6270
6271 return 0;
6272}
6273
6274
6275/* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6276 function FNNAME. OPERAND describes the operand to which ARGNUM
6277 is mapped. */
6278
6279static void
6280mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6281 int argnum, tree fnname)
6282{
6283 size_t i;
6284
6285 if (GET_CODE (arg) == CONST_INT)
6286 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6287 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6288 {
6289 const struct cgen_immediate_predicate *predicate;
6290 HOST_WIDE_INT argval;
6291
6292 predicate = &cgen_immediate_predicates[i];
6293 argval = INTVAL (arg);
6294 if (argval < predicate->lower || argval >= predicate->upper)
6295 error ("argument %d of %qE must be in the range %d...%d",
6296 argnum, fnname, predicate->lower, predicate->upper - 1);
6297 else
6298 error ("argument %d of %qE must be a multiple of %d",
6299 argnum, fnname, predicate->align);
6300 return;
6301 }
6302
6303 error ("incompatible type for argument %d of %qE", argnum, fnname);
6304}
6305
6306static rtx
6307mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6308 rtx subtarget ATTRIBUTE_UNUSED,
6309 enum machine_mode mode ATTRIBUTE_UNUSED,
6310 int ignore ATTRIBUTE_UNUSED)
6311{
6312 rtx pat, op[10], arg[10];
6313 unsigned int a;
6314 int opindex, unsigned_p[10];
6315 tree fndecl, args;
6316 unsigned int n_args;
6317 tree fnname;
6318 const struct cgen_insn *cgen_insn;
f12c802a 6319 const struct insn_data_d *idata;
12a54f54 6320 unsigned int first_arg = 0;
12a54f54 6321 unsigned int builtin_n_args;
7acf4da6
DD
6322
6323 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6324 fnname = DECL_NAME (fndecl);
6325
6326 /* Find out which instruction we should emit. Note that some coprocessor
6327 intrinsics may only be available in VLIW mode, or only in normal mode. */
6328 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6329 {
6330 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
12a54f54 6331 return NULL_RTX;
7acf4da6
DD
6332 }
6333 idata = &insn_data[cgen_insn->icode];
6334
6335 builtin_n_args = cgen_insn->num_args;
6336
6337 if (cgen_insn->cret_p)
6338 {
6339 if (cgen_insn->cret_p > 1)
6340 builtin_n_args ++;
6341 first_arg = 1;
c28883e6 6342 mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
7acf4da6
DD
6343 builtin_n_args --;
6344 }
6345
6346 /* Evaluate each argument. */
6347 n_args = call_expr_nargs (exp);
6348
6349 if (n_args < builtin_n_args)
6350 {
6351 error ("too few arguments to %qE", fnname);
12a54f54 6352 return NULL_RTX;
7acf4da6
DD
6353 }
6354 if (n_args > builtin_n_args)
6355 {
6356 error ("too many arguments to %qE", fnname);
12a54f54 6357 return NULL_RTX;
7acf4da6
DD
6358 }
6359
12a54f54 6360 for (a = first_arg; a < builtin_n_args + first_arg; a++)
7acf4da6
DD
6361 {
6362 tree value;
6363
12a54f54 6364 args = CALL_EXPR_ARG (exp, a - first_arg);
7acf4da6
DD
6365
6366 value = args;
6367
6368#if 0
6369 if (cgen_insn->regnums[a].reference_p)
6370 {
6371 if (TREE_CODE (value) != ADDR_EXPR)
6372 {
6373 debug_tree(value);
6374 error ("argument %d of %qE must be an address", a+1, fnname);
12a54f54 6375 return NULL_RTX;
7acf4da6
DD
6376 }
6377 value = TREE_OPERAND (value, 0);
6378 }
6379#endif
6380
6381 /* If the argument has been promoted to int, get the unpromoted
6382 value. This is necessary when sub-int memory values are bound
6383 to reference parameters. */
6384 if (TREE_CODE (value) == NOP_EXPR
6385 && TREE_TYPE (value) == integer_type_node
6386 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6387 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6388 < TYPE_PRECISION (TREE_TYPE (value))))
6389 value = TREE_OPERAND (value, 0);
6390
6391 /* If the argument has been promoted to double, get the unpromoted
6392 SFmode value. This is necessary for FMAX support, for example. */
6393 if (TREE_CODE (value) == NOP_EXPR
6394 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6395 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6396 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6397 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6398 value = TREE_OPERAND (value, 0);
6399
6400 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6401 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6402 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6403 if (cgen_insn->regnums[a].reference_p)
6404 {
6405 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6406 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6407
6408 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6409 }
6410 if (arg[a] == 0)
6411 {
6412 error ("argument %d of %qE must be in the range %d...%d",
6413 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
12a54f54 6414 return NULL_RTX;
7acf4da6
DD
6415 }
6416 }
6417
12a54f54 6418 for (a = 0; a < first_arg; a++)
7acf4da6
DD
6419 {
6420 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6421 arg[a] = target;
6422 else
6423 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6424 }
6425
6426 /* Convert the arguments into a form suitable for the intrinsic.
6427 Report an error if this isn't possible. */
6428 for (opindex = 0; opindex < idata->n_operands; opindex++)
6429 {
6430 a = cgen_insn->op_mapping[opindex];
6431 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6432 arg[a], unsigned_p[a]);
6433 if (op[opindex] == 0)
6434 {
6435 mep_incompatible_arg (&idata->operand[opindex],
6436 arg[a], a + 1 - first_arg, fnname);
12a54f54 6437 return NULL_RTX;
7acf4da6
DD
6438 }
6439 }
6440
6441 /* Emit the instruction. */
6442 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6443 op[5], op[6], op[7], op[8], op[9]);
6444
6445 if (GET_CODE (pat) == SET
6446 && GET_CODE (SET_DEST (pat)) == PC
6447 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6448 emit_jump_insn (pat);
6449 else
6450 emit_insn (pat);
6451
6452 /* Copy lvalues back to their final locations. */
6453 for (opindex = 0; opindex < idata->n_operands; opindex++)
6454 if (idata->operand[opindex].constraint[0] == '=')
6455 {
6456 a = cgen_insn->op_mapping[opindex];
6457 if (a >= first_arg)
6458 {
6459 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6460 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6461 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6462 op[opindex]));
6463 else
6464 {
6465 /* First convert the operand to the right mode, then copy it
6466 into the destination. Doing the conversion as a separate
6467 step (rather than using convert_move) means that we can
6468 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6469 refer to the same register. */
6470 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6471 op[opindex], unsigned_p[a]);
6472 if (!rtx_equal_p (arg[a], op[opindex]))
6473 emit_move_insn (arg[a], op[opindex]);
6474 }
6475 }
6476 }
6477
6478 if (first_arg > 0 && target && target != op[0])
6479 {
6480 emit_move_insn (target, op[0]);
6481 }
6482
6483 return target;
6484}
6485
6486static bool
6487mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6488{
6489 return false;
6490}
6491\f
6492/* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6493 a global register. */
6494
6495static int
6496global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6497{
6498 int regno;
6499 rtx x = *loc;
6500
6501 if (! x)
6502 return 0;
6503
6504 switch (GET_CODE (x))
6505 {
6506 case SUBREG:
6507 if (REG_P (SUBREG_REG (x)))
6508 {
6509 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6510 && global_regs[subreg_regno (x)])
6511 return 1;
6512 return 0;
6513 }
6514 break;
6515
6516 case REG:
6517 regno = REGNO (x);
6518 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6519 return 1;
6520 return 0;
6521
6522 case SCRATCH:
6523 case PC:
6524 case CC0:
6525 case CONST_INT:
6526 case CONST_DOUBLE:
6527 case CONST:
6528 case LABEL_REF:
6529 return 0;
6530
6531 case CALL:
6532 /* A non-constant call might use a global register. */
6533 return 1;
6534
6535 default:
6536 break;
6537 }
6538
6539 return 0;
6540}
6541
6542/* Returns nonzero if X mentions a global register. */
6543
6544static int
6545global_reg_mentioned_p (rtx x)
6546{
6547 if (INSN_P (x))
6548 {
6549 if (CALL_P (x))
6550 {
6551 if (! RTL_CONST_OR_PURE_CALL_P (x))
6552 return 1;
6553 x = CALL_INSN_FUNCTION_USAGE (x);
6554 if (x == 0)
6555 return 0;
6556 }
6557 else
6558 x = PATTERN (x);
6559 }
6560
6561 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6562}
6563/* Scheduling hooks for VLIW mode.
6564
6565 Conceptually this is very simple: we have a two-pack architecture
6566 that takes one core insn and one coprocessor insn to make up either
6567 a 32- or 64-bit instruction word (depending on the option bit set in
6568 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6569 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6570 and one 48-bit cop insn or two 32-bit core/cop insns.
6571
6572 In practice, instruction selection will be a bear. Consider in
6573 VL64 mode the following insns
6574
6575 add $1, 1
6576 cmov $cr0, $0
6577
6578 these cannot pack, since the add is a 16-bit core insn and cmov
6579 is a 32-bit cop insn. However,
6580
6581 add3 $1, $1, 1
6582 cmov $cr0, $0
6583
6584 packs just fine. For good VLIW code generation in VL64 mode, we
6585 will have to have 32-bit alternatives for many of the common core
6586 insns. Not implemented. */
6587
6588static int
6589mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6590{
6591 int cost_specified;
6592
6593 if (REG_NOTE_KIND (link) != 0)
6594 {
6595 /* See whether INSN and DEP_INSN are intrinsics that set the same
6596 hard register. If so, it is more important to free up DEP_INSN
6597 than it is to free up INSN.
6598
6599 Note that intrinsics like mep_mulr are handled differently from
6600 the equivalent mep.md patterns. In mep.md, if we don't care
6601 about the value of $lo and $hi, the pattern will just clobber
6602 the registers, not set them. Since clobbers don't count as
6603 output dependencies, it is often possible to reorder two mulrs,
6604 even after reload.
6605
6606 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6607 so any pair of mep_mulr()s will be inter-dependent. We should
6608 therefore give the first mep_mulr() a higher priority. */
6609 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6610 && global_reg_mentioned_p (PATTERN (insn))
6611 && global_reg_mentioned_p (PATTERN (dep_insn)))
6612 return 1;
6613
6614 /* If the dependence is an anti or output dependence, assume it
6615 has no cost. */
6616 return 0;
6617 }
6618
6619 /* If we can't recognize the insns, we can't really do anything. */
6620 if (recog_memoized (dep_insn) < 0)
6621 return cost;
6622
6623 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6624 attribute instead. */
6625 if (!TARGET_H1)
6626 {
6627 cost_specified = get_attr_latency (dep_insn);
6628 if (cost_specified != 0)
6629 return cost_specified;
6630 }
6631
6632 return cost;
6633}
6634
6635/* ??? We don't properly compute the length of a load/store insn,
6636 taking into account the addressing mode. */
6637
6638static int
6639mep_issue_rate (void)
6640{
6641 return TARGET_IVC2 ? 3 : 2;
6642}
6643
6644/* Return true if function DECL was declared with the vliw attribute. */
6645
6646bool
6647mep_vliw_function_p (tree decl)
6648{
6649 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6650}
6651
6652static rtx
6653mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6654{
6655 int i;
6656
6657 for (i = nready - 1; i >= 0; --i)
6658 {
6659 rtx insn = ready[i];
6660 if (recog_memoized (insn) >= 0
6661 && get_attr_slot (insn) == slot
6662 && get_attr_length (insn) == length)
6663 return insn;
6664 }
6665
6666 return NULL_RTX;
6667}
6668
6669static void
6670mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6671{
6672 int i;
6673
6674 for (i = 0; i < nready; ++i)
6675 if (ready[i] == insn)
6676 {
6677 for (; i < nready - 1; ++i)
6678 ready[i] = ready[i + 1];
6679 ready[i] = insn;
6680 return;
6681 }
6682
6683 gcc_unreachable ();
6684}
6685
6686static void
6687mep_print_sched_insn (FILE *dump, rtx insn)
6688{
6689 const char *slots = "none";
6690 const char *name = NULL;
6691 int code;
6692 char buf[30];
6693
6694 if (GET_CODE (PATTERN (insn)) == SET
6695 || GET_CODE (PATTERN (insn)) == PARALLEL)
6696 {
6697 switch (get_attr_slots (insn))
6698 {
6699 case SLOTS_CORE: slots = "core"; break;
6700 case SLOTS_C3: slots = "c3"; break;
6701 case SLOTS_P0: slots = "p0"; break;
6702 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6703 case SLOTS_P0_P1: slots = "p0,p1"; break;
6704 case SLOTS_P0S: slots = "p0s"; break;
6705 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6706 case SLOTS_P1: slots = "p1"; break;
6707 default:
6708 sprintf(buf, "%d", get_attr_slots (insn));
6709 slots = buf;
6710 break;
6711 }
6712 }
6713 if (GET_CODE (PATTERN (insn)) == USE)
6714 slots = "use";
6715
6716 code = INSN_CODE (insn);
6717 if (code >= 0)
6718 name = get_insn_name (code);
6719 if (!name)
6720 name = "{unknown}";
6721
6722 fprintf (dump,
6723 "insn %4d %4d %8s %s\n",
6724 code,
6725 INSN_UID (insn),
6726 name,
6727 slots);
6728}
6729
6730static int
6731mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6732 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6733 int *pnready, int clock ATTRIBUTE_UNUSED)
6734{
6735 int nready = *pnready;
6736 rtx core_insn, cop_insn;
6737 int i;
6738
6739 if (dump && sched_verbose > 1)
6740 {
6741 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6742 for (i=0; i<nready; i++)
6743 mep_print_sched_insn (dump, ready[i]);
6744 fprintf (dump, "\n");
6745 }
6746
6747 if (!mep_vliw_function_p (cfun->decl))
6748 return 1;
6749 if (nready < 2)
6750 return 1;
6751
6752 /* IVC2 uses a DFA to determine what's ready and what's not. */
6753 if (TARGET_IVC2)
6754 return nready;
6755
6756 /* We can issue either a core or coprocessor instruction.
6757 Look for a matched pair of insns to reorder. If we don't
6758 find any, don't second-guess the scheduler's priorities. */
6759
6760 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6761 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6762 TARGET_OPT_VL64 ? 6 : 2)))
6763 ;
6764 else if (TARGET_OPT_VL64
6765 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6766 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6767 ;
6768 else
6769 /* We didn't find a pair. Issue the single insn at the head
6770 of the ready list. */
6771 return 1;
6772
6773 /* Reorder the two insns first. */
6774 mep_move_ready_insn (ready, nready, core_insn);
6775 mep_move_ready_insn (ready, nready - 1, cop_insn);
6776 return 2;
6777}
6778
6779/* A for_each_rtx callback. Return true if *X is a register that is
6780 set by insn PREV. */
6781
6782static int
6783mep_store_find_set (rtx *x, void *prev)
6784{
6785 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6786}
6787
6788/* Like mep_store_bypass_p, but takes a pattern as the second argument,
6789 not the containing insn. */
6790
6791static bool
6792mep_store_data_bypass_1 (rtx prev, rtx pat)
6793{
6794 /* Cope with intrinsics like swcpa. */
6795 if (GET_CODE (pat) == PARALLEL)
6796 {
6797 int i;
6798
6799 for (i = 0; i < XVECLEN (pat, 0); i++)
6800 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6801 return true;
6802
6803 return false;
6804 }
6805
6806 /* Check for some sort of store. */
6807 if (GET_CODE (pat) != SET
6808 || GET_CODE (SET_DEST (pat)) != MEM)
6809 return false;
6810
6811 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6812 The first operand to the unspec is the store data and the other operands
6813 are used to calculate the address. */
6814 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6815 {
6816 rtx src;
6817 int i;
6818
6819 src = SET_SRC (pat);
6820 for (i = 1; i < XVECLEN (src, 0); i++)
6821 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6822 return false;
6823
6824 return true;
6825 }
6826
6827 /* Otherwise just check that PREV doesn't modify any register mentioned
6828 in the memory destination. */
6829 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6830}
6831
6832/* Return true if INSN is a store instruction and if the store address
6833 has no true dependence on PREV. */
6834
6835bool
6836mep_store_data_bypass_p (rtx prev, rtx insn)
6837{
6838 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6839}
6840
6841/* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6842 is a register other than LO or HI and if PREV sets *X. */
6843
6844static int
6845mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6846{
6847 return (REG_P (*x)
6848 && REGNO (*x) != LO_REGNO
6849 && REGNO (*x) != HI_REGNO
6850 && reg_set_p (*x, (const_rtx) prev));
6851}
6852
6853/* Return true if, apart from HI/LO, there are no true dependencies
6854 between multiplication instructions PREV and INSN. */
6855
6856bool
6857mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6858{
6859 rtx pat;
6860
6861 pat = PATTERN (insn);
6862 if (GET_CODE (pat) == PARALLEL)
6863 pat = XVECEXP (pat, 0, 0);
6864 return (GET_CODE (pat) == SET
6865 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6866}
6867
6868/* Return true if INSN is an ldc instruction that issues to the
6869 MeP-h1 integer pipeline. This is true for instructions that
6870 read from PSW, LP, SAR, HI and LO. */
6871
6872bool
6873mep_ipipe_ldc_p (rtx insn)
6874{
6875 rtx pat, src;
6876
6877 pat = PATTERN (insn);
6878
6879 /* Cope with instrinsics that set both a hard register and its shadow.
6880 The set of the hard register comes first. */
6881 if (GET_CODE (pat) == PARALLEL)
6882 pat = XVECEXP (pat, 0, 0);
6883
6884 if (GET_CODE (pat) == SET)
6885 {
6886 src = SET_SRC (pat);
6887
6888 /* Cope with intrinsics. The first operand to the unspec is
6889 the source register. */
6890 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6891 src = XVECEXP (src, 0, 0);
6892
6893 if (REG_P (src))
6894 switch (REGNO (src))
6895 {
6896 case PSW_REGNO:
6897 case LP_REGNO:
6898 case SAR_REGNO:
6899 case HI_REGNO:
6900 case LO_REGNO:
6901 return true;
6902 }
6903 }
6904 return false;
6905}
6906
6907/* Create a VLIW bundle from core instruction CORE and coprocessor
6908 instruction COP. COP always satisfies INSN_P, but CORE can be
6909 either a new pattern or an existing instruction.
6910
6911 Emit the bundle in place of COP and return it. */
6912
6913static rtx
6914mep_make_bundle (rtx core, rtx cop)
6915{
6916 rtx insn;
6917
6918 /* If CORE is an existing instruction, remove it, otherwise put
6919 the new pattern in an INSN harness. */
6920 if (INSN_P (core))
6921 remove_insn (core);
6922 else
6923 core = make_insn_raw (core);
6924
6925 /* Generate the bundle sequence and replace COP with it. */
6926 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6927 insn = emit_insn_after (insn, cop);
6928 remove_insn (cop);
6929
6930 /* Set up the links of the insns inside the SEQUENCE. */
6931 PREV_INSN (core) = PREV_INSN (insn);
6932 NEXT_INSN (core) = cop;
6933 PREV_INSN (cop) = core;
6934 NEXT_INSN (cop) = NEXT_INSN (insn);
6935
6936 /* Set the VLIW flag for the coprocessor instruction. */
6937 PUT_MODE (core, VOIDmode);
6938 PUT_MODE (cop, BImode);
6939
6940 /* Derive a location for the bundle. Individual instructions cannot
6941 have their own location because there can be no assembler labels
6942 between CORE and COP. */
6943 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6944 INSN_LOCATOR (core) = 0;
6945 INSN_LOCATOR (cop) = 0;
6946
6947 return insn;
6948}
6949
6950/* A helper routine for ms1_insn_dependent_p called through note_stores. */
6951
6952static void
6953mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6954{
6955 rtx * pinsn = (rtx *) data;
6956
6957 if (*pinsn && reg_mentioned_p (x, *pinsn))
6958 *pinsn = NULL_RTX;
6959}
6960
6961/* Return true if anything in insn X is (anti,output,true) dependent on
6962 anything in insn Y. */
6963
6964static int
6965mep_insn_dependent_p (rtx x, rtx y)
6966{
6967 rtx tmp;
6968
6969 gcc_assert (INSN_P (x));
6970 gcc_assert (INSN_P (y));
6971
6972 tmp = PATTERN (y);
6973 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6974 if (tmp == NULL_RTX)
6975 return 1;
6976
6977 tmp = PATTERN (x);
6978 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6979 if (tmp == NULL_RTX)
6980 return 1;
6981
6982 return 0;
6983}
6984
6985static int
6986core_insn_p (rtx insn)
6987{
6988 if (GET_CODE (PATTERN (insn)) == USE)
6989 return 0;
6990 if (get_attr_slot (insn) == SLOT_CORE)
6991 return 1;
6992 return 0;
6993}
6994
6995/* Mark coprocessor instructions that can be bundled together with
073a8998 6996 the immediately preceding core instruction. This is later used
7acf4da6
DD
6997 to emit the "+" that tells the assembler to create a VLIW insn.
6998
6999 For unbundled insns, the assembler will automatically add coprocessor
7000 nops, and 16-bit core nops. Due to an apparent oversight in the
7001 spec, the assembler will _not_ automatically add 32-bit core nops,
7002 so we have to emit those here.
7003
7004 Called from mep_insn_reorg. */
7005
7006static void
7007mep_bundle_insns (rtx insns)
7008{
7009 rtx insn, last = NULL_RTX, first = NULL_RTX;
7010 int saw_scheduling = 0;
7011
7012 /* Only do bundling if we're in vliw mode. */
7013 if (!mep_vliw_function_p (cfun->decl))
7014 return;
7015
7016 /* The first insn in a bundle are TImode, the remainder are
7017 VOIDmode. After this function, the first has VOIDmode and the
7018 rest have BImode. */
7019
bb7681bf
DD
7020 /* Note: this doesn't appear to be true for JUMP_INSNs. */
7021
7acf4da6
DD
7022 /* First, move any NOTEs that are within a bundle, to the beginning
7023 of the bundle. */
7024 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7025 {
7026 if (NOTE_P (insn) && first)
7027 /* Don't clear FIRST. */;
7028
bb7681bf 7029 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7acf4da6
DD
7030 first = insn;
7031
bb7681bf 7032 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7acf4da6
DD
7033 {
7034 rtx note, prev;
7035
7036 /* INSN is part of a bundle; FIRST is the first insn in that
7037 bundle. Move all intervening notes out of the bundle.
7038 In addition, since the debug pass may insert a label
7039 whenever the current line changes, set the location info
7040 for INSN to match FIRST. */
7041
7042 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7043
7044 note = PREV_INSN (insn);
7045 while (note && note != first)
7046 {
7047 prev = PREV_INSN (note);
7048
7049 if (NOTE_P (note))
7050 {
7051 /* Remove NOTE from here... */
7052 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7053 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7054 /* ...and put it in here. */
7055 NEXT_INSN (note) = first;
7056 PREV_INSN (note) = PREV_INSN (first);
7057 NEXT_INSN (PREV_INSN (note)) = note;
7058 PREV_INSN (NEXT_INSN (note)) = note;
7059 }
7060
7061 note = prev;
7062 }
7063 }
7064
bb7681bf 7065 else if (!NONJUMP_INSN_P (insn))
7acf4da6
DD
7066 first = 0;
7067 }
7068
7069 /* Now fix up the bundles. */
7070 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7071 {
7072 if (NOTE_P (insn))
7073 continue;
7074
bb7681bf 7075 if (!NONJUMP_INSN_P (insn))
7acf4da6
DD
7076 {
7077 last = 0;
7078 continue;
7079 }
7080
7081 /* If we're not optimizing enough, there won't be scheduling
7082 info. We detect that here. */
7083 if (GET_MODE (insn) == TImode)
7084 saw_scheduling = 1;
7085 if (!saw_scheduling)
7086 continue;
7087
7088 if (TARGET_IVC2)
7089 {
7090 rtx core_insn = NULL_RTX;
7091
7092 /* IVC2 slots are scheduled by DFA, so we just accept
7093 whatever the scheduler gives us. However, we must make
7094 sure the core insn (if any) is the first in the bundle.
7095 The IVC2 assembler can insert whatever NOPs are needed,
7096 and allows a COP insn to be first. */
7097
bb7681bf 7098 if (NONJUMP_INSN_P (insn)
7acf4da6
DD
7099 && GET_CODE (PATTERN (insn)) != USE
7100 && GET_MODE (insn) == TImode)
7101 {
7102 for (last = insn;
7103 NEXT_INSN (last)
7104 && GET_MODE (NEXT_INSN (last)) == VOIDmode
bb7681bf 7105 && NONJUMP_INSN_P (NEXT_INSN (last));
7acf4da6
DD
7106 last = NEXT_INSN (last))
7107 {
7108 if (core_insn_p (last))
7109 core_insn = last;
7110 }
7111 if (core_insn_p (last))
7112 core_insn = last;
7113
7114 if (core_insn && core_insn != insn)
7115 {
7116 /* Swap core insn to first in the bundle. */
7117
7118 /* Remove core insn. */
7119 if (PREV_INSN (core_insn))
7120 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7121 if (NEXT_INSN (core_insn))
7122 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7123
7124 /* Re-insert core insn. */
7125 PREV_INSN (core_insn) = PREV_INSN (insn);
7126 NEXT_INSN (core_insn) = insn;
7127
7128 if (PREV_INSN (core_insn))
7129 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7130 PREV_INSN (insn) = core_insn;
7131
7132 PUT_MODE (core_insn, TImode);
7133 PUT_MODE (insn, VOIDmode);
7134 }
7135 }
7136
7137 /* The first insn has TImode, the rest have VOIDmode */
7138 if (GET_MODE (insn) == TImode)
7139 PUT_MODE (insn, VOIDmode);
7140 else
7141 PUT_MODE (insn, BImode);
7142 continue;
7143 }
7144
7145 PUT_MODE (insn, VOIDmode);
7146 if (recog_memoized (insn) >= 0
7147 && get_attr_slot (insn) == SLOT_COP)
7148 {
7149 if (GET_CODE (insn) == JUMP_INSN
7150 || ! last
7151 || recog_memoized (last) < 0
7152 || get_attr_slot (last) != SLOT_CORE
7153 || (get_attr_length (insn)
7154 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7155 || mep_insn_dependent_p (insn, last))
7156 {
7157 switch (get_attr_length (insn))
7158 {
7159 case 8:
7160 break;
7161 case 6:
7162 insn = mep_make_bundle (gen_nop (), insn);
7163 break;
7164 case 4:
7165 if (TARGET_OPT_VL64)
7166 insn = mep_make_bundle (gen_nop32 (), insn);
7167 break;
7168 case 2:
7169 if (TARGET_OPT_VL64)
7170 error ("2 byte cop instructions are"
7171 " not allowed in 64-bit VLIW mode");
7172 else
7173 insn = mep_make_bundle (gen_nop (), insn);
7174 break;
7175 default:
7176 error ("unexpected %d byte cop instruction",
7177 get_attr_length (insn));
7178 break;
7179 }
7180 }
7181 else
7182 insn = mep_make_bundle (last, insn);
7183 }
7184
7185 last = insn;
7186 }
7187}
7188
7189
7190/* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7191 Return true on success. This function can fail if the intrinsic
7192 is unavailable or if the operands don't satisfy their predicates. */
7193
7194bool
7195mep_emit_intrinsic (int intrinsic, const rtx *operands)
7196{
7197 const struct cgen_insn *cgen_insn;
f12c802a 7198 const struct insn_data_d *idata;
7acf4da6
DD
7199 rtx newop[10];
7200 int i;
7201
7202 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7203 return false;
7204
7205 idata = &insn_data[cgen_insn->icode];
7206 for (i = 0; i < idata->n_operands; i++)
7207 {
7208 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7209 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7210 return false;
7211 }
7212
7213 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7214 newop[3], newop[4], newop[5],
7215 newop[6], newop[7], newop[8]));
7216
7217 return true;
7218}
7219
7220
7221/* Apply the given unary intrinsic to OPERANDS[1] and store it on
7222 OPERANDS[0]. Report an error if the instruction could not
7223 be synthesized. OPERANDS[1] is a register_operand. For sign
7224 and zero extensions, it may be smaller than SImode. */
7225
7226bool
7227mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7228 rtx * operands ATTRIBUTE_UNUSED)
7229{
7230 return false;
7231}
7232
7233
7234/* Likewise, but apply a binary operation to OPERANDS[1] and
7235 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7236 can be a general_operand.
7237
7238 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7239 third operand. REG and REG3 take register operands only. */
7240
7241bool
7242mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7243 int ATTRIBUTE_UNUSED immediate3,
7244 int ATTRIBUTE_UNUSED reg,
7245 int ATTRIBUTE_UNUSED reg3,
7246 rtx * operands ATTRIBUTE_UNUSED)
7247{
7248 return false;
7249}
7250
7251static bool
68f932c4
RS
7252mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
7253 int opno ATTRIBUTE_UNUSED, int *total,
7254 bool ATTRIBUTE_UNUSED speed_t)
7acf4da6
DD
7255{
7256 switch (code)
7257 {
7258 case CONST_INT:
7259 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7260 *total = 0;
7261 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7262 *total = 1;
7263 else
7264 *total = 3;
7265 return true;
7266
7267 case SYMBOL_REF:
7268 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7269 return true;
7270
7271 case MULT:
7272 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7273 ? COSTS_N_INSNS (3)
7274 : COSTS_N_INSNS (2));
7275 return true;
7276 }
7277 return false;
7278}
7279
7280static int
7281mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7282{
7283 return 1;
7284}
7285
7acf4da6
DD
7286static void
7287mep_asm_init_sections (void)
7288{
7289 based_section
7290 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7291 "\t.section .based,\"aw\"");
7292
7293 tinybss_section
7294 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7295 "\t.section .sbss,\"aw\"");
7296
7297 sdata_section
7298 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7299 "\t.section .sdata,\"aw\",@progbits");
7300
7301 far_section
7302 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7303 "\t.section .far,\"aw\"");
7304
7305 farbss_section
7306 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7307 "\t.section .farbss,\"aw\"");
7308
7309 frodata_section
7310 = get_unnamed_section (0, output_section_asm_op,
7311 "\t.section .frodata,\"a\"");
7312
7313 srodata_section
7314 = get_unnamed_section (0, output_section_asm_op,
7315 "\t.section .srodata,\"a\"");
7316
820ca276 7317 vtext_section
77806925
DD
7318 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7319 "\t.section .vtext,\"axv\"\n\t.vliw");
820ca276
DD
7320
7321 vftext_section
77806925 7322 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
da24f9d9 7323 "\t.section .vftext,\"axv\"\n\t.vliw");
820ca276
DD
7324
7325 ftext_section
77806925 7326 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
da24f9d9 7327 "\t.section .ftext,\"ax\"\n\t.core");
820ca276 7328
7acf4da6 7329}
c28883e6
DD
7330\f
7331/* Initialize the GCC target structure. */
7332
7333#undef TARGET_ASM_FUNCTION_PROLOGUE
7334#define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
7335#undef TARGET_ATTRIBUTE_TABLE
7336#define TARGET_ATTRIBUTE_TABLE mep_attribute_table
7337#undef TARGET_COMP_TYPE_ATTRIBUTES
7338#define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
7339#undef TARGET_INSERT_ATTRIBUTES
7340#define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
7341#undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
7342#define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
7343#undef TARGET_CAN_INLINE_P
7344#define TARGET_CAN_INLINE_P mep_can_inline_p
7345#undef TARGET_SECTION_TYPE_FLAGS
7346#define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
7347#undef TARGET_ASM_NAMED_SECTION
7348#define TARGET_ASM_NAMED_SECTION mep_asm_named_section
7349#undef TARGET_INIT_BUILTINS
7350#define TARGET_INIT_BUILTINS mep_init_builtins
7351#undef TARGET_EXPAND_BUILTIN
7352#define TARGET_EXPAND_BUILTIN mep_expand_builtin
7353#undef TARGET_SCHED_ADJUST_COST
7354#define TARGET_SCHED_ADJUST_COST mep_adjust_cost
7355#undef TARGET_SCHED_ISSUE_RATE
7356#define TARGET_SCHED_ISSUE_RATE mep_issue_rate
7357#undef TARGET_SCHED_REORDER
7358#define TARGET_SCHED_REORDER mep_sched_reorder
7359#undef TARGET_STRIP_NAME_ENCODING
7360#define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
7361#undef TARGET_ASM_SELECT_SECTION
7362#define TARGET_ASM_SELECT_SECTION mep_select_section
7363#undef TARGET_ASM_UNIQUE_SECTION
7364#define TARGET_ASM_UNIQUE_SECTION mep_unique_section
7365#undef TARGET_ENCODE_SECTION_INFO
7366#define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
7367#undef TARGET_FUNCTION_OK_FOR_SIBCALL
7368#define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
7369#undef TARGET_RTX_COSTS
7370#define TARGET_RTX_COSTS mep_rtx_cost
7371#undef TARGET_ADDRESS_COST
7372#define TARGET_ADDRESS_COST mep_address_cost
7373#undef TARGET_MACHINE_DEPENDENT_REORG
7374#define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
7375#undef TARGET_SETUP_INCOMING_VARARGS
7376#define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
7377#undef TARGET_PASS_BY_REFERENCE
7378#define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
0851c6e3
NF
7379#undef TARGET_FUNCTION_ARG
7380#define TARGET_FUNCTION_ARG mep_function_arg
7381#undef TARGET_FUNCTION_ARG_ADVANCE
7382#define TARGET_FUNCTION_ARG_ADVANCE mep_function_arg_advance
c28883e6
DD
7383#undef TARGET_VECTOR_MODE_SUPPORTED_P
7384#define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
c5387660
JM
7385#undef TARGET_OPTION_OVERRIDE
7386#define TARGET_OPTION_OVERRIDE mep_option_override
c28883e6
DD
7387#undef TARGET_ALLOCATE_INITIAL_VALUE
7388#define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
7389#undef TARGET_ASM_INIT_SECTIONS
7390#define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
7391#undef TARGET_RETURN_IN_MEMORY
7392#define TARGET_RETURN_IN_MEMORY mep_return_in_memory
7393#undef TARGET_NARROW_VOLATILE_BITFIELD
7394#define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
7395#undef TARGET_EXPAND_BUILTIN_SAVEREGS
7396#define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
7397#undef TARGET_BUILD_BUILTIN_VA_LIST
7398#define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
7399#undef TARGET_EXPAND_BUILTIN_VA_START
7400#define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
7401#undef TARGET_GIMPLIFY_VA_ARG_EXPR
7402#define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
7403#undef TARGET_CAN_ELIMINATE
7404#define TARGET_CAN_ELIMINATE mep_can_eliminate
5efd84c5
NF
7405#undef TARGET_CONDITIONAL_REGISTER_USAGE
7406#define TARGET_CONDITIONAL_REGISTER_USAGE mep_conditional_register_usage
c28883e6
DD
7407#undef TARGET_TRAMPOLINE_INIT
7408#define TARGET_TRAMPOLINE_INIT mep_trampoline_init
1a627b35
RS
7409#undef TARGET_LEGITIMATE_CONSTANT_P
7410#define TARGET_LEGITIMATE_CONSTANT_P mep_legitimate_constant_p
c28883e6
DD
7411
7412struct gcc_target targetm = TARGET_INITIALIZER;
7acf4da6
DD
7413
7414#include "gt-mep.h"
This page took 1.634293 seconds and 5 git commands to generate.