]> gcc.gnu.org Git - gcc.git/blame - gcc/config/c4x/c4x.c
builtins.c (expand_builtin_setjmp_receiver): Const-ify.
[gcc.git] / gcc / config / c4x / c4x.c
CommitLineData
cb0ca284 1/* Subroutines for assembler code output on the TMS320C[34]x
400500c4
RK
2 Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
3 Free Software Foundation, Inc.
cb0ca284
MH
4
5 Contributed by Michael Hayes (m.hayes@elec.canterbury.ac.nz)
6 and Herman Ten Brugge (Haj.Ten.Brugge@net.HCC.nl).
7
400500c4 8This file is part of GNU CC.
cb0ca284 9
400500c4
RK
10GNU CC is free software; you can redistribute it and/or modify
11it under the terms of the GNU General Public License as published by
12the Free Software Foundation; either version 2, or (at your option)
13any later version.
cb0ca284 14
400500c4
RK
15GNU CC is distributed in the hope that it will be useful,
16but WITHOUT ANY WARRANTY; without even the implied warranty of
17MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18GNU General Public License for more details.
cb0ca284 19
400500c4
RK
20You should have received a copy of the GNU General Public License
21along with GNU CC; see the file COPYING. If not, write to
22the Free Software Foundation, 59 Temple Place - Suite 330,
23Boston, MA 02111-1307, USA. */
cb0ca284
MH
24
25/* Some output-actions in c4x.md need these. */
cb0ca284 26#include "config.h"
5e6a42d9 27#include "system.h"
cb0ca284 28#include "rtl.h"
2cc07db4 29#include "tree.h"
cb0ca284
MH
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "basic-block.h"
33#include "real.h"
34#include "insn-config.h"
35#include "insn-attr.h"
cb0ca284 36#include "conditions.h"
cb0ca284 37#include "output.h"
49ad7cfa 38#include "function.h"
cb0ca284 39#include "expr.h"
e78d8e51
ZW
40#include "optabs.h"
41#include "libfuncs.h"
cb0ca284
MH
42#include "flags.h"
43#include "loop.h"
44#include "recog.h"
45#include "c-tree.h"
7eb3fb5f 46#include "ggc.h"
8b97c5f8
ZW
47#include "cpplib.h"
48#include "c-lex.h"
49#include "c-pragma.h"
2cc07db4 50#include "toplev.h"
798f6e6f 51#include "c4x-protos.h"
672a6f42
NB
52#include "target.h"
53#include "target-def.h"
cb0ca284 54
4fda2521
HB
55rtx smulhi3_libfunc;
56rtx umulhi3_libfunc;
57rtx fix_truncqfhi2_libfunc;
58rtx fixuns_truncqfhi2_libfunc;
59rtx fix_trunchfhi2_libfunc;
60rtx fixuns_trunchfhi2_libfunc;
61rtx floathiqf2_libfunc;
62rtx floatunshiqf2_libfunc;
63rtx floathihf2_libfunc;
64rtx floatunshihf2_libfunc;
65
cb0ca284
MH
66static int c4x_leaf_function;
67
27c38fbe 68static const char *const float_reg_names[] = FLOAT_REGISTER_NAMES;
cb0ca284
MH
69
70/* Array of the smallest class containing reg number REGNO, indexed by
71 REGNO. Used by REGNO_REG_CLASS in c4x.h. We assume that all these
72 registers are available and set the class to NO_REGS for registers
73 that the target switches say are unavailable. */
74
75enum reg_class c4x_regclass_map[FIRST_PSEUDO_REGISTER] =
76{
975ab131
MH
77 /* Reg Modes Saved. */
78 R0R1_REGS, /* R0 QI, QF, HF No. */
79 R0R1_REGS, /* R1 QI, QF, HF No. */
80 R2R3_REGS, /* R2 QI, QF, HF No. */
81 R2R3_REGS, /* R3 QI, QF, HF No. */
82 EXT_LOW_REGS, /* R4 QI, QF, HF QI. */
83 EXT_LOW_REGS, /* R5 QI, QF, HF QI. */
84 EXT_LOW_REGS, /* R6 QI, QF, HF QF. */
85 EXT_LOW_REGS, /* R7 QI, QF, HF QF. */
86 ADDR_REGS, /* AR0 QI No. */
87 ADDR_REGS, /* AR1 QI No. */
88 ADDR_REGS, /* AR2 QI No. */
89 ADDR_REGS, /* AR3 QI QI. */
90 ADDR_REGS, /* AR4 QI QI. */
91 ADDR_REGS, /* AR5 QI QI. */
92 ADDR_REGS, /* AR6 QI QI. */
93 ADDR_REGS, /* AR7 QI QI. */
94 DP_REG, /* DP QI No. */
95 INDEX_REGS, /* IR0 QI No. */
96 INDEX_REGS, /* IR1 QI No. */
97 BK_REG, /* BK QI QI. */
98 SP_REG, /* SP QI No. */
99 ST_REG, /* ST CC No. */
100 NO_REGS, /* DIE/IE No. */
101 NO_REGS, /* IIE/IF No. */
102 NO_REGS, /* IIF/IOF No. */
103 INT_REGS, /* RS QI No. */
104 INT_REGS, /* RE QI No. */
105 RC_REG, /* RC QI No. */
106 EXT_REGS, /* R8 QI, QF, HF QI. */
107 EXT_REGS, /* R9 QI, QF, HF No. */
108 EXT_REGS, /* R10 QI, QF, HF No. */
109 EXT_REGS, /* R11 QI, QF, HF No. */
cb0ca284
MH
110};
111
112enum machine_mode c4x_caller_save_map[FIRST_PSEUDO_REGISTER] =
113{
975ab131
MH
114 /* Reg Modes Saved. */
115 HFmode, /* R0 QI, QF, HF No. */
116 HFmode, /* R1 QI, QF, HF No. */
117 HFmode, /* R2 QI, QF, HF No. */
118 HFmode, /* R3 QI, QF, HF No. */
119 QFmode, /* R4 QI, QF, HF QI. */
120 QFmode, /* R5 QI, QF, HF QI. */
121 QImode, /* R6 QI, QF, HF QF. */
122 QImode, /* R7 QI, QF, HF QF. */
123 QImode, /* AR0 QI No. */
124 QImode, /* AR1 QI No. */
125 QImode, /* AR2 QI No. */
126 QImode, /* AR3 QI QI. */
127 QImode, /* AR4 QI QI. */
128 QImode, /* AR5 QI QI. */
129 QImode, /* AR6 QI QI. */
130 QImode, /* AR7 QI QI. */
131 VOIDmode, /* DP QI No. */
132 QImode, /* IR0 QI No. */
133 QImode, /* IR1 QI No. */
134 QImode, /* BK QI QI. */
135 VOIDmode, /* SP QI No. */
136 VOIDmode, /* ST CC No. */
137 VOIDmode, /* DIE/IE No. */
138 VOIDmode, /* IIE/IF No. */
139 VOIDmode, /* IIF/IOF No. */
140 QImode, /* RS QI No. */
141 QImode, /* RE QI No. */
142 VOIDmode, /* RC QI No. */
143 QFmode, /* R8 QI, QF, HF QI. */
144 HFmode, /* R9 QI, QF, HF No. */
145 HFmode, /* R10 QI, QF, HF No. */
146 HFmode, /* R11 QI, QF, HF No. */
cb0ca284
MH
147};
148
149
cb0ca284
MH
150/* Test and compare insns in c4x.md store the information needed to
151 generate branch and scc insns here. */
152
153struct rtx_def *c4x_compare_op0 = NULL_RTX;
154struct rtx_def *c4x_compare_op1 = NULL_RTX;
155
ddf16f18 156const char *c4x_rpts_cycles_string;
7eb3fb5f 157int c4x_rpts_cycles = 0; /* Max. cycles for RPTS. */
ddf16f18 158const char *c4x_cpu_version_string;
eda45b64 159int c4x_cpu_version = 40; /* CPU version C30/31/32/33/40/44. */
cb0ca284
MH
160
161/* Pragma definitions. */
162
7eb3fb5f
MH
163static tree code_tree = NULL_TREE;
164static tree data_tree = NULL_TREE;
165static tree pure_tree = NULL_TREE;
166static tree noreturn_tree = NULL_TREE;
167static tree interrupt_tree = NULL_TREE;
168
8d485e2d
MH
169/* Forward declarations */
170static void c4x_add_gc_roots PARAMS ((void));
171static int c4x_isr_reg_used_p PARAMS ((unsigned int));
172static int c4x_leaf_function_p PARAMS ((void));
173static int c4x_assembler_function_p PARAMS ((void));
174static int c4x_immed_float_p PARAMS ((rtx));
175static int c4x_a_register PARAMS ((rtx));
176static int c4x_x_register PARAMS ((rtx));
177static int c4x_immed_int_constant PARAMS ((rtx));
178static int c4x_immed_float_constant PARAMS ((rtx));
179static int c4x_K_constant PARAMS ((rtx));
180static int c4x_N_constant PARAMS ((rtx));
181static int c4x_O_constant PARAMS ((rtx));
182static int c4x_R_indirect PARAMS ((rtx));
183static int c4x_S_indirect PARAMS ((rtx));
184static void c4x_S_address_parse PARAMS ((rtx , int *, int *, int *, int *));
185static int c4x_valid_operands PARAMS ((enum rtx_code, rtx *,
186 enum machine_mode, int));
187static int c4x_arn_reg_operand PARAMS ((rtx, enum machine_mode, unsigned int));
188static int c4x_arn_mem_operand PARAMS ((rtx, enum machine_mode, unsigned int));
189static void c4x_check_attribute PARAMS ((const char *, tree, tree, tree *));
8b97c5f8 190static int c4x_parse_pragma PARAMS ((const char *, tree *, tree *));
5078f5eb 191static int c4x_r11_set_p PARAMS ((rtx));
0bbcfbaf
HB
192static int c4x_rptb_valid_p PARAMS ((rtx, rtx));
193static int c4x_label_ref_used_p PARAMS ((rtx, rtx));
91d231cb
JM
194static tree c4x_handle_fntype_attribute PARAMS ((tree *, tree, tree, int, bool *));
195const struct attribute_spec c4x_attribute_table[];
12a68f1f 196static void c4x_insert_attributes PARAMS ((tree, tree *));
715bdd29 197static void c4x_asm_named_section PARAMS ((const char *, unsigned int));
c237e94a 198static int c4x_adjust_cost PARAMS ((rtx, rtx, rtx, int));
672a6f42
NB
199\f
200/* Initialize the GCC target structure. */
91d231cb
JM
201#undef TARGET_ATTRIBUTE_TABLE
202#define TARGET_ATTRIBUTE_TABLE c4x_attribute_table
672a6f42 203
12a68f1f
JM
204#undef TARGET_INSERT_ATTRIBUTES
205#define TARGET_INSERT_ATTRIBUTES c4x_insert_attributes
206
f6155fda
SS
207#undef TARGET_INIT_BUILTINS
208#define TARGET_INIT_BUILTINS c4x_init_builtins
209
210#undef TARGET_EXPAND_BUILTIN
211#define TARGET_EXPAND_BUILTIN c4x_expand_builtin
212
c237e94a
ZW
213#undef TARGET_SCHED_ADJUST_COST
214#define TARGET_SCHED_ADJUST_COST c4x_adjust_cost
215
f6897b10 216struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 217\f
7eb3fb5f
MH
218/* Called to register all of our global variables with the garbage
219 collector. */
220
221static void
222c4x_add_gc_roots ()
223{
224 ggc_add_rtx_root (&c4x_compare_op0, 1);
225 ggc_add_rtx_root (&c4x_compare_op1, 1);
7ae0297f
MH
226 ggc_add_tree_root (&code_tree, 1);
227 ggc_add_tree_root (&data_tree, 1);
228 ggc_add_tree_root (&pure_tree, 1);
229 ggc_add_tree_root (&noreturn_tree, 1);
230 ggc_add_tree_root (&interrupt_tree, 1);
4fda2521
HB
231 ggc_add_rtx_root (&smulhi3_libfunc, 1);
232 ggc_add_rtx_root (&umulhi3_libfunc, 1);
233 ggc_add_rtx_root (&fix_truncqfhi2_libfunc, 1);
234 ggc_add_rtx_root (&fixuns_truncqfhi2_libfunc, 1);
235 ggc_add_rtx_root (&fix_trunchfhi2_libfunc, 1);
236 ggc_add_rtx_root (&fixuns_trunchfhi2_libfunc, 1);
237 ggc_add_rtx_root (&floathiqf2_libfunc, 1);
238 ggc_add_rtx_root (&floatunshiqf2_libfunc, 1);
239 ggc_add_rtx_root (&floathihf2_libfunc, 1);
240 ggc_add_rtx_root (&floatunshihf2_libfunc, 1);
7eb3fb5f 241}
cb0ca284 242
cb0ca284
MH
243
244/* Override command line options.
245 Called once after all options have been parsed.
246 Mostly we process the processor
247 type and sometimes adjust other TARGET_ options. */
248
249void
250c4x_override_options ()
251{
cb0ca284
MH
252 if (c4x_rpts_cycles_string)
253 c4x_rpts_cycles = atoi (c4x_rpts_cycles_string);
254 else
255 c4x_rpts_cycles = 0;
256
257 if (TARGET_C30)
258 c4x_cpu_version = 30;
259 else if (TARGET_C31)
260 c4x_cpu_version = 31;
261 else if (TARGET_C32)
262 c4x_cpu_version = 32;
eda45b64
MH
263 else if (TARGET_C33)
264 c4x_cpu_version = 33;
cb0ca284
MH
265 else if (TARGET_C40)
266 c4x_cpu_version = 40;
267 else if (TARGET_C44)
268 c4x_cpu_version = 44;
269 else
270 c4x_cpu_version = 40;
271
272 /* -mcpu=xx overrides -m40 etc. */
273 if (c4x_cpu_version_string)
798f6e6f
MH
274 {
275 const char *p = c4x_cpu_version_string;
276
277 /* Also allow -mcpu=c30 etc. */
278 if (*p == 'c' || *p == 'C')
279 p++;
280 c4x_cpu_version = atoi (p);
281 }
cb0ca284 282
eda45b64
MH
283 target_flags &= ~(C30_FLAG | C31_FLAG | C32_FLAG | C33_FLAG |
284 C40_FLAG | C44_FLAG);
cb0ca284
MH
285
286 switch (c4x_cpu_version)
287 {
288 case 30: target_flags |= C30_FLAG; break;
289 case 31: target_flags |= C31_FLAG; break;
290 case 32: target_flags |= C32_FLAG; break;
eda45b64 291 case 33: target_flags |= C33_FLAG; break;
cb0ca284
MH
292 case 40: target_flags |= C40_FLAG; break;
293 case 44: target_flags |= C44_FLAG; break;
294 default:
295 warning ("Unknown CPU version %d, using 40.\n", c4x_cpu_version);
296 c4x_cpu_version = 40;
297 target_flags |= C40_FLAG;
298 }
299
eda45b64 300 if (TARGET_C30 || TARGET_C31 || TARGET_C32 || TARGET_C33)
cb0ca284
MH
301 target_flags |= C3X_FLAG;
302 else
303 target_flags &= ~C3X_FLAG;
304
4271f003 305 /* Convert foo / 8.0 into foo * 0.125, etc. */
de6c5979 306 set_fast_math_flags();
4271f003 307
4271f003
MH
308 /* We should phase out the following at some stage.
309 This provides compatibility with the old -mno-aliases option. */
4ddb3ea6 310 if (! TARGET_ALIASES && ! flag_argument_noalias)
4271f003 311 flag_argument_noalias = 1;
7eb3fb5f
MH
312
313 /* Register global variables with the garbage collector. */
314 c4x_add_gc_roots ();
cb0ca284
MH
315}
316
7eb3fb5f 317
4271f003 318/* This is called before c4x_override_options. */
975ab131 319
d5e4ff48
MH
320void
321c4x_optimization_options (level, size)
8b97c5f8 322 int level ATTRIBUTE_UNUSED;
d5e4ff48
MH
323 int size ATTRIBUTE_UNUSED;
324{
5e6a42d9
MH
325 /* Scheduling before register allocation can screw up global
326 register allocation, especially for functions that use MPY||ADD
327 instructions. The benefit we gain we get by scheduling before
328 register allocation is probably marginal anyhow. */
329 flag_schedule_insns = 0;
d5e4ff48 330}
cb0ca284 331
975ab131 332
cb0ca284
MH
333/* Write an ASCII string. */
334
335#define C4X_ASCII_LIMIT 40
336
337void
338c4x_output_ascii (stream, ptr, len)
339 FILE *stream;
798f6e6f 340 const char *ptr;
cb0ca284
MH
341 int len;
342{
343 char sbuf[C4X_ASCII_LIMIT + 1];
94eebed9 344 int s, l, special, first = 1, onlys;
cb0ca284
MH
345
346 if (len)
cb0ca284 347 fprintf (stream, "\t.byte\t");
cb0ca284 348
dfafcb4d 349 for (s = l = 0; len > 0; --len, ++ptr)
cb0ca284
MH
350 {
351 onlys = 0;
352
353 /* Escape " and \ with a \". */
dfafcb4d 354 special = *ptr == '\"' || *ptr == '\\';
cb0ca284
MH
355
356 /* If printable - add to buff. */
dfafcb4d 357 if ((! TARGET_TI || ! special) && *ptr >= 0x20 && *ptr < 0x7f)
cb0ca284 358 {
dfafcb4d
HB
359 if (special)
360 sbuf[s++] = '\\';
cb0ca284
MH
361 sbuf[s++] = *ptr;
362 if (s < C4X_ASCII_LIMIT - 1)
363 continue;
364 onlys = 1;
365 }
366 if (s)
367 {
368 if (first)
369 first = 0;
370 else
dfafcb4d
HB
371 {
372 fputc (',', stream);
373 l++;
374 }
cb0ca284
MH
375
376 sbuf[s] = 0;
377 fprintf (stream, "\"%s\"", sbuf);
dfafcb4d
HB
378 l += s + 2;
379 if (TARGET_TI && l >= 80 && len > 1)
380 {
381 fprintf (stream, "\n\t.byte\t");
382 first = 1;
383 l = 0;
384 }
385
cb0ca284
MH
386 s = 0;
387 }
388 if (onlys)
389 continue;
390
391 if (first)
392 first = 0;
393 else
dfafcb4d
HB
394 {
395 fputc (',', stream);
396 l++;
397 }
cb0ca284
MH
398
399 fprintf (stream, "%d", *ptr);
dfafcb4d
HB
400 l += 3;
401 if (TARGET_TI && l >= 80 && len > 1)
402 {
403 fprintf (stream, "\n\t.byte\t");
404 first = 1;
405 l = 0;
406 }
cb0ca284
MH
407 }
408 if (s)
409 {
4ddb3ea6 410 if (! first)
cb0ca284
MH
411 fputc (',', stream);
412
413 sbuf[s] = 0;
414 fprintf (stream, "\"%s\"", sbuf);
415 s = 0;
416 }
417 fputc ('\n', stream);
418}
419
420
421int
422c4x_hard_regno_mode_ok (regno, mode)
8d485e2d 423 unsigned int regno;
cb0ca284
MH
424 enum machine_mode mode;
425{
426 switch (mode)
427 {
428#if Pmode != QImode
975ab131 429 case Pmode: /* Pointer (24/32 bits). */
cb0ca284 430#endif
975ab131 431 case QImode: /* Integer (32 bits). */
bc46716b 432 return IS_INT_REGNO (regno);
cb0ca284 433
975ab131
MH
434 case QFmode: /* Float, Double (32 bits). */
435 case HFmode: /* Long Double (40 bits). */
bc46716b 436 return IS_EXT_REGNO (regno);
cb0ca284 437
975ab131
MH
438 case CCmode: /* Condition Codes. */
439 case CC_NOOVmode: /* Condition Codes. */
bc46716b 440 return IS_ST_REGNO (regno);
cb0ca284 441
975ab131 442 case HImode: /* Long Long (64 bits). */
cb0ca284
MH
443 /* We need two registers to store long longs. Note that
444 it is much easier to constrain the first register
445 to start on an even boundary. */
bc46716b
MH
446 return IS_INT_REGNO (regno)
447 && IS_INT_REGNO (regno + 1)
cb0ca284
MH
448 && (regno & 1) == 0;
449
450 default:
975ab131 451 return 0; /* We don't support these modes. */
cb0ca284
MH
452 }
453
454 return 0;
455}
456
40eef757
HB
457/* Return non-zero if REGNO1 can be renamed to REGNO2. */
458int
459c4x_hard_regno_rename_ok (regno1, regno2)
460 unsigned int regno1;
461 unsigned int regno2;
462{
463 /* We can not copy call saved registers from mode QI into QF or from
464 mode QF into QI. */
0b53f039 465 if (IS_FLOAT_CALL_SAVED_REGNO (regno1) && IS_INT_CALL_SAVED_REGNO (regno2))
40eef757 466 return 0;
0b53f039 467 if (IS_INT_CALL_SAVED_REGNO (regno1) && IS_FLOAT_CALL_SAVED_REGNO (regno2))
40eef757
HB
468 return 0;
469 /* We cannot copy from an extended (40 bit) register to a standard
470 (32 bit) register because we only set the condition codes for
471 extended registers. */
472 if (IS_EXT_REGNO (regno1) && ! IS_EXT_REGNO (regno2))
473 return 0;
474 if (IS_EXT_REGNO (regno2) && ! IS_EXT_REGNO (regno1))
475 return 0;
476 return 1;
477}
cb0ca284
MH
478
479/* The TI C3x C compiler register argument runtime model uses 6 registers,
480 AR2, R2, R3, RC, RS, RE.
481
482 The first two floating point arguments (float, double, long double)
483 that are found scanning from left to right are assigned to R2 and R3.
484
485 The remaining integer (char, short, int, long) or pointer arguments
486 are assigned to the remaining registers in the order AR2, R2, R3,
487 RC, RS, RE when scanning left to right, except for the last named
488 argument prior to an ellipsis denoting variable number of
489 arguments. We don't have to worry about the latter condition since
490 function.c treats the last named argument as anonymous (unnamed).
491
492 All arguments that cannot be passed in registers are pushed onto
493 the stack in reverse order (right to left). GCC handles that for us.
494
495 c4x_init_cumulative_args() is called at the start, so we can parse
496 the args to see how many floating point arguments and how many
497 integer (or pointer) arguments there are. c4x_function_arg() is
498 then called (sometimes repeatedly) for each argument (parsed left
499 to right) to obtain the register to pass the argument in, or zero
500 if the argument is to be passed on the stack. Once the compiler is
501 happy, c4x_function_arg_advance() is called.
502
503 Don't use R0 to pass arguments in, we use 0 to indicate a stack
504 argument. */
505
8b60264b 506static const int c4x_int_reglist[3][6] =
cb0ca284
MH
507{
508 {AR2_REGNO, R2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO},
509 {AR2_REGNO, R3_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0},
510 {AR2_REGNO, RC_REGNO, RS_REGNO, RE_REGNO, 0, 0}
511};
512
513static int c4x_fp_reglist[2] = {R2_REGNO, R3_REGNO};
514
515
516/* Initialize a variable CUM of type CUMULATIVE_ARGS for a call to a
517 function whose data type is FNTYPE.
518 For a library call, FNTYPE is 0. */
519
520void
521c4x_init_cumulative_args (cum, fntype, libname)
975ab131
MH
522 CUMULATIVE_ARGS *cum; /* Argument info to initialize. */
523 tree fntype; /* Tree ptr for function decl. */
524 rtx libname; /* SYMBOL_REF of library name or 0. */
cb0ca284
MH
525{
526 tree param, next_param;
527
528 cum->floats = cum->ints = 0;
529 cum->init = 0;
530 cum->var = 0;
531 cum->args = 0;
532
533 if (TARGET_DEBUG)
534 {
535 fprintf (stderr, "\nc4x_init_cumulative_args (");
536 if (fntype)
537 {
538 tree ret_type = TREE_TYPE (fntype);
539
540 fprintf (stderr, "fntype code = %s, ret code = %s",
541 tree_code_name[(int) TREE_CODE (fntype)],
542 tree_code_name[(int) TREE_CODE (ret_type)]);
543 }
544 else
545 fprintf (stderr, "no fntype");
546
547 if (libname)
548 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
549 }
550
551 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
552
553 for (param = fntype ? TYPE_ARG_TYPES (fntype) : 0;
554 param; param = next_param)
555 {
556 tree type;
557
558 next_param = TREE_CHAIN (param);
559
560 type = TREE_VALUE (param);
561 if (type && type != void_type_node)
562 {
563 enum machine_mode mode;
564
565 /* If the last arg doesn't have void type then we have
566 variable arguments. */
4ddb3ea6 567 if (! next_param)
cb0ca284
MH
568 cum->var = 1;
569
570 if ((mode = TYPE_MODE (type)))
571 {
4ddb3ea6 572 if (! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
573 {
574 /* Look for float, double, or long double argument. */
575 if (mode == QFmode || mode == HFmode)
576 cum->floats++;
577 /* Look for integer, enumeral, boolean, char, or pointer
578 argument. */
579 else if (mode == QImode || mode == Pmode)
580 cum->ints++;
581 }
582 }
583 cum->args++;
584 }
585 }
586
587 if (TARGET_DEBUG)
588 fprintf (stderr, "%s%s, args = %d)\n",
589 cum->prototype ? ", prototype" : "",
590 cum->var ? ", variable args" : "",
591 cum->args);
592}
593
594
595/* Update the data in CUM to advance over an argument
596 of mode MODE and data type TYPE.
597 (TYPE is null for libcalls where that information may not be available.) */
598
599void
600c4x_function_arg_advance (cum, mode, type, named)
975ab131
MH
601 CUMULATIVE_ARGS *cum; /* Current arg information. */
602 enum machine_mode mode; /* Current arg mode. */
603 tree type; /* Type of the arg or 0 if lib support. */
604 int named; /* Whether or not the argument was named. */
cb0ca284
MH
605{
606 if (TARGET_DEBUG)
607 fprintf (stderr, "c4x_function_adv(mode=%s, named=%d)\n\n",
608 GET_MODE_NAME (mode), named);
4ddb3ea6 609 if (! TARGET_MEMPARM
cb0ca284
MH
610 && named
611 && type
4ddb3ea6 612 && ! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
613 {
614 /* Look for float, double, or long double argument. */
615 if (mode == QFmode || mode == HFmode)
616 cum->floats++;
617 /* Look for integer, enumeral, boolean, char, or pointer argument. */
618 else if (mode == QImode || mode == Pmode)
619 cum->ints++;
620 }
4ddb3ea6 621 else if (! TARGET_MEMPARM && ! type)
cb0ca284
MH
622 {
623 /* Handle libcall arguments. */
624 if (mode == QFmode || mode == HFmode)
625 cum->floats++;
626 else if (mode == QImode || mode == Pmode)
627 cum->ints++;
628 }
629 return;
630}
631
632
633/* Define where to put the arguments to a function. Value is zero to
634 push the argument on the stack, or a hard register in which to
635 store the argument.
636
637 MODE is the argument's machine mode.
638 TYPE is the data type of the argument (as a tree).
639 This is null for libcalls where that information may
640 not be available.
641 CUM is a variable of type CUMULATIVE_ARGS which gives info about
642 the preceding args and about the function being called.
643 NAMED is nonzero if this argument is a named parameter
644 (otherwise it is an extra parameter matching an ellipsis). */
645
646struct rtx_def *
647c4x_function_arg (cum, mode, type, named)
975ab131
MH
648 CUMULATIVE_ARGS *cum; /* Current arg information. */
649 enum machine_mode mode; /* Current arg mode. */
650 tree type; /* Type of the arg or 0 if lib support. */
651 int named; /* != 0 for normal args, == 0 for ... args. */
cb0ca284 652{
975ab131 653 int reg = 0; /* Default to passing argument on stack. */
cb0ca284 654
4ddb3ea6 655 if (! cum->init)
cb0ca284 656 {
975ab131 657 /* We can handle at most 2 floats in R2, R3. */
cb0ca284
MH
658 cum->maxfloats = (cum->floats > 2) ? 2 : cum->floats;
659
660 /* We can handle at most 6 integers minus number of floats passed
661 in registers. */
662 cum->maxints = (cum->ints > 6 - cum->maxfloats) ?
663 6 - cum->maxfloats : cum->ints;
664
1ac7a7f5 665 /* If there is no prototype, assume all the arguments are integers. */
4ddb3ea6 666 if (! cum->prototype)
cb0ca284
MH
667 cum->maxints = 6;
668
669 cum->ints = cum->floats = 0;
670 cum->init = 1;
671 }
672
49d1b871
MH
673 /* This marks the last argument. We don't need to pass this through
674 to the call insn. */
675 if (type == void_type_node)
676 return 0;
677
4ddb3ea6 678 if (! TARGET_MEMPARM
cb0ca284
MH
679 && named
680 && type
4ddb3ea6 681 && ! MUST_PASS_IN_STACK (mode, type))
cb0ca284
MH
682 {
683 /* Look for float, double, or long double argument. */
684 if (mode == QFmode || mode == HFmode)
685 {
686 if (cum->floats < cum->maxfloats)
687 reg = c4x_fp_reglist[cum->floats];
688 }
689 /* Look for integer, enumeral, boolean, char, or pointer argument. */
690 else if (mode == QImode || mode == Pmode)
691 {
692 if (cum->ints < cum->maxints)
693 reg = c4x_int_reglist[cum->maxfloats][cum->ints];
694 }
695 }
4ddb3ea6 696 else if (! TARGET_MEMPARM && ! type)
cb0ca284
MH
697 {
698 /* We could use a different argument calling model for libcalls,
699 since we're only calling functions in libgcc. Thus we could
700 pass arguments for long longs in registers rather than on the
701 stack. In the meantime, use the odd TI format. We make the
702 assumption that we won't have more than two floating point
703 args, six integer args, and that all the arguments are of the
704 same mode. */
705 if (mode == QFmode || mode == HFmode)
706 reg = c4x_fp_reglist[cum->floats];
707 else if (mode == QImode || mode == Pmode)
708 reg = c4x_int_reglist[0][cum->ints];
709 }
710
711 if (TARGET_DEBUG)
712 {
713 fprintf (stderr, "c4x_function_arg(mode=%s, named=%d",
714 GET_MODE_NAME (mode), named);
715 if (reg)
716 fprintf (stderr, ", reg=%s", reg_names[reg]);
717 else
718 fprintf (stderr, ", stack");
719 fprintf (stderr, ")\n");
720 }
721 if (reg)
d5e4ff48 722 return gen_rtx_REG (mode, reg);
cb0ca284
MH
723 else
724 return NULL_RTX;
725}
726
f959ff1a 727
634f1f20
MH
728void
729c4x_va_start (stdarg_p, valist, nextarg)
730 int stdarg_p;
731 tree valist;
732 rtx nextarg;
733{
734 nextarg = plus_constant (nextarg, stdarg_p ? 0 : UNITS_PER_WORD * 2);
735
736 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
737}
738
f959ff1a 739
cc7fd398 740/* C[34]x arguments grow in weird ways (downwards) that the standard
975ab131 741 varargs stuff can't handle.. */
cc7fd398
RH
742rtx
743c4x_va_arg (valist, type)
744 tree valist, type;
745{
746 tree t;
747
748 t = build (PREDECREMENT_EXPR, TREE_TYPE (valist), valist,
749 build_int_2 (int_size_in_bytes (type), 0));
750 TREE_SIDE_EFFECTS (t) = 1;
751
752 return expand_expr (t, NULL_RTX, Pmode, EXPAND_NORMAL);
753}
cb0ca284 754
f959ff1a 755
cb0ca284
MH
756static int
757c4x_isr_reg_used_p (regno)
8d485e2d 758 unsigned int regno;
cb0ca284
MH
759{
760 /* Don't save/restore FP or ST, we handle them separately. */
761 if (regno == FRAME_POINTER_REGNUM
bc46716b 762 || IS_ST_REGNO (regno))
cb0ca284
MH
763 return 0;
764
765 /* We could be a little smarter abut saving/restoring DP.
766 We'll only save if for the big memory model or if
767 we're paranoid. ;-) */
bc46716b 768 if (IS_DP_REGNO (regno))
4ddb3ea6 769 return ! TARGET_SMALL || TARGET_PARANOID;
cb0ca284
MH
770
771 /* Only save/restore regs in leaf function that are used. */
772 if (c4x_leaf_function)
773 return regs_ever_live[regno] && fixed_regs[regno] == 0;
774
775 /* Only save/restore regs that are used by the ISR and regs
776 that are likely to be used by functions the ISR calls
777 if they are not fixed. */
bc46716b 778 return IS_EXT_REGNO (regno)
cb0ca284
MH
779 || ((regs_ever_live[regno] || call_used_regs[regno])
780 && fixed_regs[regno] == 0);
781}
782
783
784static int
785c4x_leaf_function_p ()
786{
787 /* A leaf function makes no calls, so we only need
788 to save/restore the registers we actually use.
789 For the global variable leaf_function to be set, we need
790 to define LEAF_REGISTERS and all that it entails.
791 Let's check ourselves... */
792
793 if (lookup_attribute ("leaf_pretend",
794 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
795 return 1;
796
797 /* Use the leaf_pretend attribute at your own risk. This is a hack
798 to speed up ISRs that call a function infrequently where the
799 overhead of saving and restoring the additional registers is not
800 warranted. You must save and restore the additional registers
801 required by the called function. Caveat emptor. Here's enough
802 rope... */
803
804 if (leaf_function_p ())
805 return 1;
806
807 return 0;
808}
809
810
811static int
812c4x_assembler_function_p ()
813{
814 tree type;
815
816 type = TREE_TYPE (current_function_decl);
49d1b871
MH
817 return (lookup_attribute ("assembler", TYPE_ATTRIBUTES (type)) != NULL)
818 || (lookup_attribute ("naked", TYPE_ATTRIBUTES (type)) != NULL);
cb0ca284
MH
819}
820
821
8d485e2d 822int
cb0ca284
MH
823c4x_interrupt_function_p ()
824{
825 if (lookup_attribute ("interrupt",
826 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
827 return 1;
828
975ab131 829 /* Look for TI style c_intnn. */
cb0ca284
MH
830 return current_function_name[0] == 'c'
831 && current_function_name[1] == '_'
832 && current_function_name[2] == 'i'
833 && current_function_name[3] == 'n'
834 && current_function_name[4] == 't'
92a438d1
KG
835 && ISDIGIT (current_function_name[5])
836 && ISDIGIT (current_function_name[6]);
cb0ca284
MH
837}
838
cb0ca284 839void
8d485e2d 840c4x_expand_prologue ()
cb0ca284 841{
8d485e2d
MH
842 unsigned int regno;
843 int size = get_frame_size ();
844 rtx insn;
cb0ca284 845
8d485e2d
MH
846 /* In functions where ar3 is not used but frame pointers are still
847 specified, frame pointers are not adjusted (if >= -O2) and this
848 is used so it won't needlessly push the frame pointer. */
cb0ca284
MH
849 int dont_push_ar3;
850
851 /* For __assembler__ function don't build a prologue. */
852 if (c4x_assembler_function_p ())
853 {
cb0ca284
MH
854 return;
855 }
8d485e2d
MH
856
857#ifdef FUNCTION_BLOCK_PROFILER_EXIT
858 if (profile_block_flag == 2)
859 {
860 FUNCTION_BLOCK_PROFILER_EXIT
861 }
862#endif
cb0ca284
MH
863
864 /* For __interrupt__ function build specific prologue. */
865 if (c4x_interrupt_function_p ())
866 {
867 c4x_leaf_function = c4x_leaf_function_p ();
8d485e2d
MH
868
869 insn = emit_insn (gen_push_st ());
870 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
871 if (size)
872 {
8d485e2d
MH
873 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
874 RTX_FRAME_RELATED_P (insn) = 1;
875 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
876 gen_rtx_REG (QImode, SP_REGNO)));
877 RTX_FRAME_RELATED_P (insn) = 1;
69f6e760
MH
878 /* We require that an ISR uses fewer than 32768 words of
879 local variables, otherwise we have to go to lots of
880 effort to save a register, load it with the desired size,
881 adjust the stack pointer, and then restore the modified
882 register. Frankly, I think it is a poor ISR that
883 requires more than 32767 words of local temporary
884 storage! */
cb0ca284 885 if (size > 32767)
400500c4 886 error ("ISR %s requires %d words of local vars, max is 32767.",
06dd70c6 887 current_function_name, size);
400500c4 888
8d485e2d
MH
889 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
890 gen_rtx_REG (QImode, SP_REGNO),
06dd70c6 891 GEN_INT (size)));
8d485e2d 892 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
893 }
894 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
895 {
896 if (c4x_isr_reg_used_p (regno))
897 {
8d485e2d
MH
898 if (regno == DP_REGNO)
899 {
900 insn = emit_insn (gen_push_dp ());
901 RTX_FRAME_RELATED_P (insn) = 1;
902 }
903 else
904 {
905 insn = emit_insn (gen_pushqi (gen_rtx_REG (QImode, regno)));
906 RTX_FRAME_RELATED_P (insn) = 1;
907 if (IS_EXT_REGNO (regno))
908 {
909 insn = emit_insn (gen_pushqf
910 (gen_rtx_REG (QFmode, regno)));
911 RTX_FRAME_RELATED_P (insn) = 1;
912 }
913 }
cb0ca284
MH
914 }
915 }
916 /* We need to clear the repeat mode flag if the ISR is
917 going to use a RPTB instruction or uses the RC, RS, or RE
918 registers. */
919 if (regs_ever_live[RC_REGNO]
920 || regs_ever_live[RS_REGNO]
921 || regs_ever_live[RE_REGNO])
8d485e2d
MH
922 {
923 insn = emit_insn (gen_andn_st (GEN_INT(~0x100)));
924 RTX_FRAME_RELATED_P (insn) = 1;
925 }
cb0ca284
MH
926
927 /* Reload DP reg if we are paranoid about some turkey
928 violating small memory model rules. */
929 if (TARGET_SMALL && TARGET_PARANOID)
8d485e2d
MH
930 {
931 insn = emit_insn (gen_set_ldp_prologue
932 (gen_rtx_REG (QImode, DP_REGNO),
933 gen_rtx_SYMBOL_REF (QImode, "data_sec")));
934 RTX_FRAME_RELATED_P (insn) = 1;
935 }
cb0ca284
MH
936 }
937 else
938 {
939 if (frame_pointer_needed)
940 {
941 if ((size != 0)
942 || (current_function_args_size != 0)
943 || (optimize < 2))
944 {
8d485e2d
MH
945 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, AR3_REGNO)));
946 RTX_FRAME_RELATED_P (insn) = 1;
947 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, AR3_REGNO),
948 gen_rtx_REG (QImode, SP_REGNO)));
949 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
950 dont_push_ar3 = 1;
951 }
952 else
953 {
954 /* Since ar3 is not used, we don't need to push it. */
955 dont_push_ar3 = 1;
956 }
957 }
958 else
959 {
960 /* If we use ar3, we need to push it. */
961 dont_push_ar3 = 0;
962 if ((size != 0) || (current_function_args_size != 0))
963 {
964 /* If we are omitting the frame pointer, we still have
965 to make space for it so the offsets are correct
966 unless we don't use anything on the stack at all. */
967 size += 1;
968 }
969 }
8d485e2d 970
cb0ca284
MH
971 if (size > 32767)
972 {
973 /* Local vars are too big, it will take multiple operations
974 to increment SP. */
975 if (TARGET_C3X)
976 {
8d485e2d
MH
977 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
978 GEN_INT(size >> 16)));
979 RTX_FRAME_RELATED_P (insn) = 1;
980 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R1_REGNO),
981 gen_rtx_REG (QImode, R1_REGNO),
982 GEN_INT(-16)));
983 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
984 }
985 else
8d485e2d
MH
986 {
987 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R1_REGNO),
988 GEN_INT(size & ~0xffff)));
989 RTX_FRAME_RELATED_P (insn) = 1;
990 }
991 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R1_REGNO),
992 gen_rtx_REG (QImode, R1_REGNO),
993 GEN_INT(size & 0xffff)));
994 RTX_FRAME_RELATED_P (insn) = 1;
995 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
996 gen_rtx_REG (QImode, SP_REGNO),
997 gen_rtx_REG (QImode, R1_REGNO)));
998 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
999 }
1000 else if (size != 0)
1001 {
1002 /* Local vars take up less than 32767 words, so we can directly
1003 add the number. */
8d485e2d
MH
1004 insn = emit_insn (gen_addqi3 (gen_rtx_REG (QImode, SP_REGNO),
1005 gen_rtx_REG (QImode, SP_REGNO),
1006 GEN_INT (size)));
1007 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1008 }
8d485e2d 1009
cb0ca284
MH
1010 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1011 {
4ddb3ea6 1012 if (regs_ever_live[regno] && ! call_used_regs[regno])
cb0ca284 1013 {
0b53f039 1014 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
cb0ca284 1015 {
cb0ca284 1016 if (TARGET_PRESERVE_FLOAT)
8d485e2d
MH
1017 {
1018 insn = emit_insn (gen_pushqi
1019 (gen_rtx_REG (QImode, regno)));
1020 RTX_FRAME_RELATED_P (insn) = 1;
1021 }
1022 insn = emit_insn (gen_pushqf (gen_rtx_REG (QFmode, regno)));
1023 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1024 }
4ddb3ea6 1025 else if ((! dont_push_ar3) || (regno != AR3_REGNO))
cb0ca284 1026 {
8d485e2d
MH
1027 insn = emit_insn (gen_pushqi ( gen_rtx_REG (QImode, regno)));
1028 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1029 }
1030 }
1031 }
1032 }
1033}
1034
1035
cb0ca284 1036void
8d485e2d 1037c4x_expand_epilogue()
cb0ca284
MH
1038{
1039 int regno;
8d485e2d 1040 int jump = 0;
cb0ca284
MH
1041 int dont_pop_ar3;
1042 rtx insn;
8d485e2d
MH
1043 int size = get_frame_size ();
1044
cb0ca284
MH
1045 /* For __assembler__ function build no epilogue. */
1046 if (c4x_assembler_function_p ())
1047 {
8d485e2d
MH
1048 insn = emit_jump_insn (gen_return_from_epilogue ());
1049 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1050 return;
1051 }
1052
cb0ca284
MH
1053 /* For __interrupt__ function build specific epilogue. */
1054 if (c4x_interrupt_function_p ())
1055 {
1056 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; --regno)
1057 {
4ddb3ea6 1058 if (! c4x_isr_reg_used_p (regno))
cb0ca284 1059 continue;
8d485e2d
MH
1060 if (regno == DP_REGNO)
1061 {
1062 insn = emit_insn (gen_pop_dp ());
1063 RTX_FRAME_RELATED_P (insn) = 1;
1064 }
1065 else
1066 {
1067 /* We have to use unspec because the compiler will delete insns
1068 that are not call-saved. */
1069 if (IS_EXT_REGNO (regno))
1070 {
1071 insn = emit_insn (gen_popqf_unspec
1072 (gen_rtx_REG (QFmode, regno)));
1073 RTX_FRAME_RELATED_P (insn) = 1;
1074 }
1075 insn = emit_insn (gen_popqi_unspec (gen_rtx_REG (QImode, regno)));
1076 RTX_FRAME_RELATED_P (insn) = 1;
1077 }
cb0ca284
MH
1078 }
1079 if (size)
1080 {
8d485e2d
MH
1081 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1082 gen_rtx_REG (QImode, SP_REGNO),
1083 GEN_INT(size)));
1084 RTX_FRAME_RELATED_P (insn) = 1;
1085 insn = emit_insn (gen_popqi
1086 (gen_rtx_REG (QImode, AR3_REGNO)));
1087 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1088 }
8d485e2d
MH
1089 insn = emit_insn (gen_pop_st ());
1090 RTX_FRAME_RELATED_P (insn) = 1;
1091 insn = emit_jump_insn (gen_return_from_interrupt_epilogue ());
1092 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1093 }
1094 else
1095 {
1096 if (frame_pointer_needed)
1097 {
1098 if ((size != 0)
1099 || (current_function_args_size != 0)
1100 || (optimize < 2))
1101 {
8d485e2d
MH
1102 insn = emit_insn
1103 (gen_movqi (gen_rtx_REG (QImode, R2_REGNO),
1104 gen_rtx_MEM (QImode,
1105 gen_rtx_PLUS
1106 (QImode, gen_rtx_REG (QImode,
1107 AR3_REGNO),
1108 GEN_INT(-1)))));
1109 RTX_FRAME_RELATED_P (insn) = 1;
1110
cb0ca284
MH
1111 /* We already have the return value and the fp,
1112 so we need to add those to the stack. */
1113 size += 2;
8d485e2d 1114 jump = 1;
cb0ca284
MH
1115 dont_pop_ar3 = 1;
1116 }
1117 else
1118 {
1119 /* Since ar3 is not used for anything, we don't need to
1120 pop it. */
1121 dont_pop_ar3 = 1;
1122 }
1123 }
1124 else
1125 {
975ab131 1126 dont_pop_ar3 = 0; /* If we use ar3, we need to pop it. */
cb0ca284
MH
1127 if (size || current_function_args_size)
1128 {
1129 /* If we are ommitting the frame pointer, we still have
1130 to make space for it so the offsets are correct
1131 unless we don't use anything on the stack at all. */
1132 size += 1;
1133 }
1134 }
8d485e2d 1135
cb0ca284
MH
1136 /* Now restore the saved registers, putting in the delayed branch
1137 where required. */
1138 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1139 {
4ddb3ea6 1140 if (regs_ever_live[regno] && ! call_used_regs[regno])
cb0ca284
MH
1141 {
1142 if (regno == AR3_REGNO && dont_pop_ar3)
1143 continue;
8d485e2d 1144
0b53f039 1145 if (IS_FLOAT_CALL_SAVED_REGNO (regno))
cb0ca284 1146 {
8d485e2d
MH
1147 insn = emit_insn (gen_popqf_unspec
1148 (gen_rtx_REG (QFmode, regno)));
1149 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1150 if (TARGET_PRESERVE_FLOAT)
1151 {
8d485e2d
MH
1152 insn = emit_insn (gen_popqi_unspec
1153 (gen_rtx_REG (QImode, regno)));
1154 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1155 }
1156 }
1157 else
8d485e2d
MH
1158 {
1159 insn = emit_insn (gen_popqi (gen_rtx_REG (QImode, regno)));
1160 RTX_FRAME_RELATED_P (insn) = 1;
1161 }
cb0ca284
MH
1162 }
1163 }
8d485e2d 1164
cb0ca284
MH
1165 if (frame_pointer_needed)
1166 {
1167 if ((size != 0)
1168 || (current_function_args_size != 0)
1169 || (optimize < 2))
1170 {
1171 /* Restore the old FP. */
8d485e2d
MH
1172 insn = emit_insn
1173 (gen_movqi
1174 (gen_rtx_REG (QImode, AR3_REGNO),
1175 gen_rtx_MEM (QImode, gen_rtx_REG (QImode, AR3_REGNO))));
1176
1177 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1178 }
1179 }
8d485e2d 1180
cb0ca284
MH
1181 if (size > 32767)
1182 {
1183 /* Local vars are too big, it will take multiple operations
1184 to decrement SP. */
1185 if (TARGET_C3X)
1186 {
8d485e2d
MH
1187 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1188 GEN_INT(size >> 16)));
1189 RTX_FRAME_RELATED_P (insn) = 1;
1190 insn = emit_insn (gen_lshrqi3 (gen_rtx_REG (QImode, R3_REGNO),
1191 gen_rtx_REG (QImode, R3_REGNO),
1192 GEN_INT(-16)));
1193 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1194 }
1195 else
8d485e2d
MH
1196 {
1197 insn = emit_insn (gen_movqi (gen_rtx_REG (QImode, R3_REGNO),
1198 GEN_INT(size & ~0xffff)));
1199 RTX_FRAME_RELATED_P (insn) = 1;
1200 }
1201 insn = emit_insn (gen_iorqi3 (gen_rtx_REG (QImode, R3_REGNO),
1202 gen_rtx_REG (QImode, R3_REGNO),
1203 GEN_INT(size & 0xffff)));
1204 RTX_FRAME_RELATED_P (insn) = 1;
1205 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1206 gen_rtx_REG (QImode, SP_REGNO),
1207 gen_rtx_REG (QImode, R3_REGNO)));
1208 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284
MH
1209 }
1210 else if (size != 0)
1211 {
1212 /* Local vars take up less than 32768 words, so we can directly
1213 subtract the number. */
8d485e2d
MH
1214 insn = emit_insn (gen_subqi3 (gen_rtx_REG (QImode, SP_REGNO),
1215 gen_rtx_REG (QImode, SP_REGNO),
1216 GEN_INT(size)));
1217 RTX_FRAME_RELATED_P (insn) = 1;
1218 }
1219
1220 if (jump)
1221 {
39c1728e
HB
1222 insn = emit_jump_insn (gen_return_indirect_internal
1223 (gen_rtx_REG (QImode, R2_REGNO)));
8d485e2d
MH
1224 RTX_FRAME_RELATED_P (insn) = 1;
1225 }
1226 else
1227 {
1228 insn = emit_jump_insn (gen_return_from_epilogue ());
1229 RTX_FRAME_RELATED_P (insn) = 1;
cb0ca284 1230 }
cb0ca284
MH
1231 }
1232}
1233
8d485e2d 1234
cb0ca284
MH
1235int
1236c4x_null_epilogue_p ()
1237{
1238 int regno;
1239
1240 if (reload_completed
4ddb3ea6
MH
1241 && ! c4x_assembler_function_p ()
1242 && ! c4x_interrupt_function_p ()
1243 && ! current_function_calls_alloca
1244 && ! current_function_args_size
1245 && ! (profile_block_flag == 2)
1246 && ! (optimize < 2)
1247 && ! get_frame_size ())
cb0ca284
MH
1248 {
1249 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
4ddb3ea6 1250 if (regs_ever_live[regno] && ! call_used_regs[regno]
cb0ca284
MH
1251 && (regno != AR3_REGNO))
1252 return 0;
1253 return 1;
1254 }
1255 return 0;
1256}
1257
8d485e2d 1258
50c33087
MH
1259int
1260c4x_emit_move_sequence (operands, mode)
1261 rtx *operands;
1262 enum machine_mode mode;
1263{
1264 rtx op0 = operands[0];
1265 rtx op1 = operands[1];
1266
1267 if (! reload_in_progress
1268 && ! REG_P (op0)
1269 && ! REG_P (op1)
1270 && ! (stik_const_operand (op1, mode) && ! push_operand (op0, mode)))
1271 op1 = force_reg (mode, op1);
1272
2718204c
MH
1273 if (GET_CODE (op1) == LO_SUM
1274 && GET_MODE (op1) == Pmode
1275 && dp_reg_operand (XEXP (op1, 0), mode))
1276 {
1277 /* expand_increment will sometimes create a LO_SUM immediate
1278 address. */
1279 op1 = XEXP (op1, 1);
1280 }
55310df7 1281 else if (symbolic_address_operand (op1, mode))
50c33087
MH
1282 {
1283 if (TARGET_LOAD_ADDRESS)
1284 {
1285 /* Alias analysis seems to do a better job if we force
1286 constant addresses to memory after reload. */
1287 emit_insn (gen_load_immed_address (op0, op1));
1288 return 1;
1289 }
1290 else
1291 {
1292 /* Stick symbol or label address into the constant pool. */
1293 op1 = force_const_mem (Pmode, op1);
1294 }
1295 }
1296 else if (mode == HFmode && CONSTANT_P (op1) && ! LEGITIMATE_CONSTANT_P (op1))
1297 {
1298 /* We could be a lot smarter about loading some of these
1299 constants... */
1300 op1 = force_const_mem (mode, op1);
1301 }
50c33087
MH
1302
1303 /* Convert (MEM (SYMREF)) to a (MEM (LO_SUM (REG) (SYMREF)))
1304 and emit associated (HIGH (SYMREF)) if large memory model.
1305 c4x_legitimize_address could be used to do this,
1306 perhaps by calling validize_address. */
31445126
MH
1307 if (TARGET_EXPOSE_LDP
1308 && ! (reload_in_progress || reload_completed)
50c33087 1309 && GET_CODE (op1) == MEM
55310df7 1310 && symbolic_address_operand (XEXP (op1, 0), Pmode))
50c33087
MH
1311 {
1312 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1313 if (! TARGET_SMALL)
1314 emit_insn (gen_set_ldp (dp_reg, XEXP (op1, 0)));
1315 op1 = change_address (op1, mode,
1316 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op1, 0)));
1317 }
1318
31445126
MH
1319 if (TARGET_EXPOSE_LDP
1320 && ! (reload_in_progress || reload_completed)
50c33087 1321 && GET_CODE (op0) == MEM
55310df7 1322 && symbolic_address_operand (XEXP (op0, 0), Pmode))
50c33087
MH
1323 {
1324 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1325 if (! TARGET_SMALL)
1326 emit_insn (gen_set_ldp (dp_reg, XEXP (op0, 0)));
1327 op0 = change_address (op0, mode,
1328 gen_rtx_LO_SUM (Pmode, dp_reg, XEXP (op0, 0)));
1329 }
1330
ebcc44f4
MH
1331 if (GET_CODE (op0) == SUBREG
1332 && mixed_subreg_operand (op0, mode))
1333 {
1334 /* We should only generate these mixed mode patterns
1335 during RTL generation. If we need do it later on
1336 then we'll have to emit patterns that won't clobber CC. */
1337 if (reload_in_progress || reload_completed)
1338 abort ();
1339 if (GET_MODE (SUBREG_REG (op0)) == QImode)
1340 op0 = SUBREG_REG (op0);
1341 else if (GET_MODE (SUBREG_REG (op0)) == HImode)
1342 {
1343 op0 = copy_rtx (op0);
1344 PUT_MODE (op0, QImode);
1345 }
1346 else
1347 abort ();
1348
1349 if (mode == QFmode)
1350 emit_insn (gen_storeqf_int_clobber (op0, op1));
1351 else
1352 abort ();
1353 return 1;
1354 }
1355
1356 if (GET_CODE (op1) == SUBREG
1357 && mixed_subreg_operand (op1, mode))
1358 {
1359 /* We should only generate these mixed mode patterns
1360 during RTL generation. If we need do it later on
1361 then we'll have to emit patterns that won't clobber CC. */
1362 if (reload_in_progress || reload_completed)
1363 abort ();
1364 if (GET_MODE (SUBREG_REG (op1)) == QImode)
1365 op1 = SUBREG_REG (op1);
1366 else if (GET_MODE (SUBREG_REG (op1)) == HImode)
1367 {
1368 op1 = copy_rtx (op1);
1369 PUT_MODE (op1, QImode);
1370 }
1371 else
1372 abort ();
1373
1374 if (mode == QFmode)
1375 emit_insn (gen_loadqf_int_clobber (op0, op1));
1376 else
1377 abort ();
1378 return 1;
1379 }
1380
8d485e2d
MH
1381 if (mode == QImode
1382 && reg_operand (op0, mode)
1383 && const_int_operand (op1, mode)
1384 && ! IS_INT16_CONST (INTVAL (op1))
1385 && ! IS_HIGH_CONST (INTVAL (op1)))
1386 {
1387 emit_insn (gen_loadqi_big_constant (op0, op1));
1388 return 1;
1389 }
1390
1391 if (mode == HImode
1392 && reg_operand (op0, mode)
1393 && const_int_operand (op1, mode))
1394 {
1395 emit_insn (gen_loadhi_big_constant (op0, op1));
1396 return 1;
1397 }
1398
50c33087
MH
1399 /* Adjust operands in case we have modified them. */
1400 operands[0] = op0;
1401 operands[1] = op1;
1402
1403 /* Emit normal pattern. */
1404 return 0;
1405}
1406
1407
cb0ca284 1408void
4fda2521
HB
1409c4x_emit_libcall (libcall, code, dmode, smode, noperands, operands)
1410 rtx libcall;
cb0ca284
MH
1411 enum rtx_code code;
1412 enum machine_mode dmode;
1413 enum machine_mode smode;
1414 int noperands;
1415 rtx *operands;
1416{
1417 rtx ret;
1418 rtx insns;
cb0ca284
MH
1419 rtx equiv;
1420
1421 start_sequence ();
cb0ca284
MH
1422 switch (noperands)
1423 {
1424 case 2:
1425 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 1,
1426 operands[1], smode);
1427 equiv = gen_rtx (code, dmode, operands[1]);
1428 break;
1429
1430 case 3:
1431 ret = emit_library_call_value (libcall, NULL_RTX, 1, dmode, 2,
1432 operands[1], smode, operands[2], smode);
1433 equiv = gen_rtx (code, dmode, operands[1], operands[2]);
1434 break;
1435
1436 default:
400500c4 1437 abort ();
cb0ca284
MH
1438 }
1439
1440 insns = get_insns ();
1441 end_sequence ();
1442 emit_libcall_block (insns, operands[0], ret, equiv);
1443}
1444
1445
1446void
4fda2521
HB
1447c4x_emit_libcall3 (libcall, code, mode, operands)
1448 rtx libcall;
cb0ca284
MH
1449 enum rtx_code code;
1450 enum machine_mode mode;
1451 rtx *operands;
1452{
8d485e2d 1453 c4x_emit_libcall (libcall, code, mode, mode, 3, operands);
cb0ca284
MH
1454}
1455
50c33087 1456
cb0ca284 1457void
4fda2521
HB
1458c4x_emit_libcall_mulhi (libcall, code, mode, operands)
1459 rtx libcall;
cb0ca284
MH
1460 enum rtx_code code;
1461 enum machine_mode mode;
1462 rtx *operands;
1463{
1464 rtx ret;
1465 rtx insns;
cb0ca284
MH
1466 rtx equiv;
1467
1468 start_sequence ();
cb0ca284
MH
1469 ret = emit_library_call_value (libcall, NULL_RTX, 1, mode, 2,
1470 operands[1], mode, operands[2], mode);
d5e4ff48
MH
1471 equiv = gen_rtx_TRUNCATE (mode,
1472 gen_rtx_LSHIFTRT (HImode,
1473 gen_rtx_MULT (HImode,
cb0ca284
MH
1474 gen_rtx (code, HImode, operands[1]),
1475 gen_rtx (code, HImode, operands[2])),
e27f8c8a 1476 GEN_INT (32)));
cb0ca284
MH
1477 insns = get_insns ();
1478 end_sequence ();
1479 emit_libcall_block (insns, operands[0], ret, equiv);
1480}
1481
1482
50c33087
MH
1483/* Set the SYMBOL_REF_FLAG for a function decl. However, wo do not
1484 yet use this info. */
1485void
1486c4x_encode_section_info (decl)
1487 tree decl;
1488{
1489#if 0
1490 if (TREE_CODE (TREE_TYPE (decl)) == FUNCTION_TYPE)
1491 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1492#else
1493 if (TREE_CODE (decl) == FUNCTION_DECL)
1494 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
1495#endif
1496}
1497
1498
cb0ca284
MH
1499int
1500c4x_check_legit_addr (mode, addr, strict)
1501 enum machine_mode mode;
1502 rtx addr;
1503 int strict;
1504{
975ab131
MH
1505 rtx base = NULL_RTX; /* Base register (AR0-AR7). */
1506 rtx indx = NULL_RTX; /* Index register (IR0,IR1). */
1507 rtx disp = NULL_RTX; /* Displacement. */
cb0ca284
MH
1508 enum rtx_code code;
1509
1510 code = GET_CODE (addr);
1511 switch (code)
1512 {
1513 /* Register indirect with auto increment/decrement. We don't
1514 allow SP here---push_operand should recognise an operand
1515 being pushed on the stack. */
1516
1517 case PRE_DEC:
ebcc44f4 1518 case PRE_INC:
cb0ca284
MH
1519 case POST_DEC:
1520 if (mode != QImode && mode != QFmode)
1521 return 0;
ebcc44f4 1522
cb0ca284
MH
1523 case POST_INC:
1524 base = XEXP (addr, 0);
4ddb3ea6 1525 if (! REG_P (base))
cb0ca284
MH
1526 return 0;
1527 break;
1528
1529 case PRE_MODIFY:
1530 case POST_MODIFY:
1531 {
1532 rtx op0 = XEXP (addr, 0);
1533 rtx op1 = XEXP (addr, 1);
1534
1535 if (mode != QImode && mode != QFmode)
1536 return 0;
1537
4ddb3ea6 1538 if (! REG_P (op0)
cb0ca284
MH
1539 || (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS))
1540 return 0;
1541 base = XEXP (op1, 0);
1542 if (base != op0)
1543 return 0;
1544 if (REG_P (XEXP (op1, 1)))
1545 indx = XEXP (op1, 1);
1546 else
1547 disp = XEXP (op1, 1);
1548 }
1549 break;
1550
1551 /* Register indirect. */
1552 case REG:
1553 base = addr;
1554 break;
1555
1556 /* Register indirect with displacement or index. */
1557 case PLUS:
1558 {
1559 rtx op0 = XEXP (addr, 0);
1560 rtx op1 = XEXP (addr, 1);
1561 enum rtx_code code0 = GET_CODE (op0);
1562
1563 switch (code0)
1564 {
cb0ca284
MH
1565 case REG:
1566 if (REG_P (op1))
1567 {
975ab131 1568 base = op0; /* Base + index. */
cb0ca284 1569 indx = op1;
bc46716b 1570 if (IS_INDEX_REG (base) || IS_ADDR_REG (indx))
cb0ca284
MH
1571 {
1572 base = op1;
1573 indx = op0;
1574 }
1575 }
1576 else
1577 {
975ab131 1578 base = op0; /* Base + displacement. */
cb0ca284
MH
1579 disp = op1;
1580 }
1581 break;
1582
1583 default:
1584 return 0;
1585 }
1586 }
1587 break;
1588
50c33087
MH
1589 /* Direct addressing with DP register. */
1590 case LO_SUM:
1591 {
1592 rtx op0 = XEXP (addr, 0);
1593 rtx op1 = XEXP (addr, 1);
1594
1595 /* HImode and HFmode direct memory references aren't truly
1596 offsettable (consider case at end of data page). We
1597 probably get better code by loading a pointer and using an
1598 indirect memory reference. */
1599 if (mode == HImode || mode == HFmode)
1600 return 0;
1601
1602 if (!REG_P (op0) || REGNO (op0) != DP_REGNO)
1603 return 0;
1604
1605 if ((GET_CODE (op1) == SYMBOL_REF || GET_CODE (op1) == LABEL_REF))
1606 return 1;
1607
1608 if (GET_CODE (op1) == CONST)
5078f5eb 1609 return 1;
50c33087
MH
1610 return 0;
1611 }
1612 break;
1613
cb0ca284
MH
1614 /* Direct addressing with some work for the assembler... */
1615 case CONST:
cb0ca284 1616 /* Direct addressing. */
cb0ca284 1617 case LABEL_REF:
50c33087 1618 case SYMBOL_REF:
31445126
MH
1619 if (! TARGET_EXPOSE_LDP && ! strict && mode != HFmode && mode != HImode)
1620 return 1;
50c33087 1621 /* These need to be converted to a LO_SUM (...).
31445126 1622 LEGITIMIZE_RELOAD_ADDRESS will do this during reload. */
50c33087 1623 return 0;
cb0ca284
MH
1624
1625 /* Do not allow direct memory access to absolute addresses.
31445126 1626 This is more pain than it's worth, especially for the
cb0ca284
MH
1627 small memory model where we can't guarantee that
1628 this address is within the data page---we don't want
1629 to modify the DP register in the small memory model,
1630 even temporarily, since an interrupt can sneak in.... */
1631 case CONST_INT:
1632 return 0;
1633
1634 /* Indirect indirect addressing. */
1635 case MEM:
1636 return 0;
1637
1638 case CONST_DOUBLE:
1639 fatal_insn ("Using CONST_DOUBLE for address", addr);
1640
1641 default:
1642 return 0;
1643 }
1644
1645 /* Validate the base register. */
1646 if (base)
1647 {
1648 /* Check that the address is offsettable for HImode and HFmode. */
1649 if (indx && (mode == HImode || mode == HFmode))
1650 return 0;
1651
1652 /* Handle DP based stuff. */
1653 if (REGNO (base) == DP_REGNO)
1654 return 1;
4ddb3ea6 1655 if (strict && ! REGNO_OK_FOR_BASE_P (REGNO (base)))
cb0ca284 1656 return 0;
bc46716b 1657 else if (! strict && ! IS_ADDR_OR_PSEUDO_REG (base))
cb0ca284
MH
1658 return 0;
1659 }
1660
1661 /* Now validate the index register. */
1662 if (indx)
1663 {
1664 if (GET_CODE (indx) != REG)
1665 return 0;
4ddb3ea6 1666 if (strict && ! REGNO_OK_FOR_INDEX_P (REGNO (indx)))
cb0ca284 1667 return 0;
bc46716b 1668 else if (! strict && ! IS_INDEX_OR_PSEUDO_REG (indx))
cb0ca284
MH
1669 return 0;
1670 }
1671
1672 /* Validate displacement. */
1673 if (disp)
1674 {
1675 if (GET_CODE (disp) != CONST_INT)
1676 return 0;
1677 if (mode == HImode || mode == HFmode)
1678 {
1679 /* The offset displacement must be legitimate. */
4ddb3ea6 1680 if (! IS_DISP8_OFF_CONST (INTVAL (disp)))
cb0ca284
MH
1681 return 0;
1682 }
1683 else
1684 {
4ddb3ea6 1685 if (! IS_DISP8_CONST (INTVAL (disp)))
cb0ca284
MH
1686 return 0;
1687 }
1688 /* Can't add an index with a disp. */
1689 if (indx)
1690 return 0;
1691 }
1692 return 1;
1693}
1694
1695
1696rtx
1697c4x_legitimize_address (orig, mode)
d5e4ff48
MH
1698 rtx orig ATTRIBUTE_UNUSED;
1699 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 1700{
305902b0
MH
1701 if (GET_CODE (orig) == SYMBOL_REF
1702 || GET_CODE (orig) == LABEL_REF)
50c33087 1703 {
2718204c
MH
1704 if (mode == HImode || mode == HFmode)
1705 {
1706 /* We need to force the address into
1707 a register so that it is offsettable. */
1708 rtx addr_reg = gen_reg_rtx (Pmode);
1709 emit_move_insn (addr_reg, orig);
1710 return addr_reg;
1711 }
1712 else
1713 {
1714 rtx dp_reg = gen_rtx_REG (Pmode, DP_REGNO);
1715
1716 if (! TARGET_SMALL)
1717 emit_insn (gen_set_ldp (dp_reg, orig));
1718
1719 return gen_rtx_LO_SUM (Pmode, dp_reg, orig);
1720 }
50c33087
MH
1721 }
1722
cb0ca284
MH
1723 return NULL_RTX;
1724}
1725
1726
1727/* Provide the costs of an addressing mode that contains ADDR.
1728 If ADDR is not a valid address, its cost is irrelevant.
1729 This is used in cse and loop optimisation to determine
1730 if it is worthwhile storing a common address into a register.
1731 Unfortunately, the C4x address cost depends on other operands. */
1732
1733int
1734c4x_address_cost (addr)
798f6e6f 1735 rtx addr;
cb0ca284
MH
1736{
1737 switch (GET_CODE (addr))
1738 {
1739 case REG:
1740 return 1;
1741
cb0ca284
MH
1742 case POST_INC:
1743 case POST_DEC:
1744 case PRE_INC:
1745 case PRE_DEC:
1746 return 1;
1747
50c33087 1748 /* These shouldn't be directly generated. */
cb0ca284
MH
1749 case SYMBOL_REF:
1750 case LABEL_REF:
50c33087
MH
1751 case CONST:
1752 return 10;
1753
1754 case LO_SUM:
1755 {
1756 rtx op1 = XEXP (addr, 1);
1757
1758 if (GET_CODE (op1) == LABEL_REF || GET_CODE (op1) == SYMBOL_REF)
1759 return TARGET_SMALL ? 3 : 4;
1760
1761 if (GET_CODE (op1) == CONST)
1762 {
1763 rtx offset = const0_rtx;
1764
1765 op1 = eliminate_constant_term (op1, &offset);
1766
1ac7a7f5 1767 /* ??? These costs need rethinking... */
50c33087
MH
1768 if (GET_CODE (op1) == LABEL_REF)
1769 return 3;
1770
1771 if (GET_CODE (op1) != SYMBOL_REF)
1772 return 4;
1773
1774 if (INTVAL (offset) == 0)
1775 return 3;
1776
1777 return 4;
1778 }
1779 fatal_insn ("c4x_address_cost: Invalid addressing mode", addr);
1780 }
1781 break;
cb0ca284
MH
1782
1783 case PLUS:
1784 {
1785 register rtx op0 = XEXP (addr, 0);
1786 register rtx op1 = XEXP (addr, 1);
1787
1788 if (GET_CODE (op0) != REG)
1789 break;
1790
1791 switch (GET_CODE (op1))
1792 {
1793 default:
1794 break;
1795
1796 case REG:
21034cc5
MH
1797 /* This cost for REG+REG must be greater than the cost
1798 for REG if we want autoincrement addressing modes. */
cb0ca284
MH
1799 return 2;
1800
1801 case CONST_INT:
798f6e6f
MH
1802 /* The following tries to improve GIV combination
1803 in strength reduce but appears not to help. */
1804 if (TARGET_DEVEL && IS_UINT5_CONST (INTVAL (op1)))
1805 return 1;
1806
cb0ca284
MH
1807 if (IS_DISP1_CONST (INTVAL (op1)))
1808 return 1;
1809
4ddb3ea6 1810 if (! TARGET_C3X && IS_UINT5_CONST (INTVAL (op1)))
cb0ca284
MH
1811 return 2;
1812
1813 return 3;
1814 }
1815 }
1816 default:
933cddd0 1817 break;
cb0ca284
MH
1818 }
1819
1820 return 4;
1821}
1822
1823
1824rtx
1825c4x_gen_compare_reg (code, x, y)
1826 enum rtx_code code;
1827 rtx x, y;
1828{
1829 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
1830 rtx cc_reg;
1831
1832 if (mode == CC_NOOVmode
1833 && (code == LE || code == GE || code == LT || code == GT))
1834 return NULL_RTX;
1835
d5e4ff48
MH
1836 cc_reg = gen_rtx_REG (mode, ST_REGNO);
1837 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
1838 gen_rtx_COMPARE (mode, x, y)));
cb0ca284
MH
1839 return cc_reg;
1840}
1841
1842char *
4271f003 1843c4x_output_cbranch (form, seq)
8d485e2d 1844 const char *form;
4271f003 1845 rtx seq;
cb0ca284
MH
1846{
1847 int delayed = 0;
1848 int annultrue = 0;
1849 int annulfalse = 0;
1850 rtx delay;
1851 char *cp;
4271f003 1852 static char str[100];
cb0ca284
MH
1853
1854 if (final_sequence)
1855 {
1856 delay = XVECEXP (final_sequence, 0, 1);
4ddb3ea6
MH
1857 delayed = ! INSN_ANNULLED_BRANCH_P (seq);
1858 annultrue = INSN_ANNULLED_BRANCH_P (seq) && ! INSN_FROM_TARGET_P (delay);
4271f003 1859 annulfalse = INSN_ANNULLED_BRANCH_P (seq) && INSN_FROM_TARGET_P (delay);
cb0ca284 1860 }
4271f003
MH
1861 strcpy (str, form);
1862 cp = &str [strlen (str)];
cb0ca284
MH
1863 if (delayed)
1864 {
1865 *cp++ = '%';
1866 *cp++ = '#';
1867 }
1868 if (annultrue)
1869 {
1870 *cp++ = 'a';
1871 *cp++ = 't';
1872 }
1873 if (annulfalse)
1874 {
1875 *cp++ = 'a';
1876 *cp++ = 'f';
1877 }
1878 *cp++ = '\t';
1879 *cp++ = '%';
1880 *cp++ = 'l';
1881 *cp++ = '1';
1882 *cp = 0;
1883 return str;
1884}
1885
cb0ca284
MH
1886void
1887c4x_print_operand (file, op, letter)
975ab131
MH
1888 FILE *file; /* File to write to. */
1889 rtx op; /* Operand to print. */
1890 int letter; /* %<letter> or 0. */
cb0ca284
MH
1891{
1892 rtx op1;
1893 enum rtx_code code;
1894
1895 switch (letter)
1896 {
975ab131 1897 case '#': /* Delayed. */
cb0ca284
MH
1898 if (final_sequence)
1899 asm_fprintf (file, "d");
1900 return;
1901 }
1902
1903 code = GET_CODE (op);
1904 switch (letter)
1905 {
975ab131 1906 case 'A': /* Direct address. */
dfafcb4d 1907 if (code == CONST_INT || code == SYMBOL_REF || code == CONST)
cb0ca284
MH
1908 asm_fprintf (file, "@");
1909 break;
1910
975ab131 1911 case 'H': /* Sethi. */
50c33087
MH
1912 output_addr_const (file, op);
1913 return;
cb0ca284 1914
975ab131 1915 case 'I': /* Reversed condition. */
cb0ca284
MH
1916 code = reverse_condition (code);
1917 break;
1918
975ab131 1919 case 'L': /* Log 2 of constant. */
cb0ca284
MH
1920 if (code != CONST_INT)
1921 fatal_insn ("c4x_print_operand: %%L inconsistency", op);
1922 fprintf (file, "%d", exact_log2 (INTVAL (op)));
1923 return;
1924
975ab131 1925 case 'N': /* Ones complement of small constant. */
cb0ca284
MH
1926 if (code != CONST_INT)
1927 fatal_insn ("c4x_print_operand: %%N inconsistency", op);
1928 fprintf (file, "%d", ~INTVAL (op));
1929 return;
1930
975ab131 1931 case 'K': /* Generate ldp(k) if direct address. */
4ddb3ea6 1932 if (! TARGET_SMALL
cb0ca284 1933 && code == MEM
50c33087
MH
1934 && GET_CODE (XEXP (op, 0)) == LO_SUM
1935 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1936 && REGNO (XEXP (XEXP (op, 0), 0)) == DP_REGNO)
cb0ca284
MH
1937 {
1938 op1 = XEXP (XEXP (op, 0), 1);
1939 if (GET_CODE(op1) == CONST_INT || GET_CODE(op1) == SYMBOL_REF)
1940 {
dfafcb4d 1941 asm_fprintf (file, "\t%s\t@", TARGET_C3X ? "ldp" : "ldpk");
f6155fda 1942 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
cb0ca284
MH
1943 asm_fprintf (file, "\n");
1944 }
1945 }
1946 return;
1947
975ab131
MH
1948 case 'M': /* Generate ldp(k) if direct address. */
1949 if (! TARGET_SMALL /* Only used in asm statements. */
cb0ca284
MH
1950 && code == MEM
1951 && (GET_CODE (XEXP (op, 0)) == CONST
1952 || GET_CODE (XEXP (op, 0)) == SYMBOL_REF))
1953 {
dfafcb4d 1954 asm_fprintf (file, "%s\t@", TARGET_C3X ? "ldp" : "ldpk");
cb0ca284
MH
1955 output_address (XEXP (op, 0));
1956 asm_fprintf (file, "\n\t");
1957 }
1958 return;
1959
975ab131 1960 case 'O': /* Offset address. */
cb0ca284
MH
1961 if (code == MEM && c4x_autoinc_operand (op, Pmode))
1962 break;
1963 else if (code == MEM)
f6155fda 1964 output_address (XEXP (adjust_address (op, VOIDmode, 1), 0));
cb0ca284
MH
1965 else if (code == REG)
1966 fprintf (file, "%s", reg_names[REGNO (op) + 1]);
1967 else
1968 fatal_insn ("c4x_print_operand: %%O inconsistency", op);
1969 return;
1970
975ab131 1971 case 'C': /* Call. */
50c33087
MH
1972 break;
1973
975ab131 1974 case 'U': /* Call/callu. */
b2e9a2fd 1975 if (code != SYMBOL_REF)
50c33087 1976 asm_fprintf (file, "u");
cb0ca284
MH
1977 return;
1978
1979 default:
1980 break;
1981 }
1982
1983 switch (code)
1984 {
1985 case REG:
dfafcb4d
HB
1986 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1987 && ! TARGET_TI)
cb0ca284
MH
1988 fprintf (file, "%s", float_reg_names[REGNO (op)]);
1989 else
1990 fprintf (file, "%s", reg_names[REGNO (op)]);
1991 break;
1992
1993 case MEM:
1994 output_address (XEXP (op, 0));
1995 break;
1996
1997 case CONST_DOUBLE:
1998 {
1999 char str[30];
2000 REAL_VALUE_TYPE r;
2001
2002 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2003 REAL_VALUE_TO_DECIMAL (r, "%20f", str);
2004 fprintf (file, "%s", str);
2005 }
2006 break;
2007
2008 case CONST_INT:
2009 fprintf (file, "%d", INTVAL (op));
2010 break;
2011
2012 case NE:
2013 asm_fprintf (file, "ne");
2014 break;
2015
2016 case EQ:
2017 asm_fprintf (file, "eq");
2018 break;
2019
2020 case GE:
2021 asm_fprintf (file, "ge");
2022 break;
2023
2024 case GT:
2025 asm_fprintf (file, "gt");
2026 break;
2027
2028 case LE:
2029 asm_fprintf (file, "le");
2030 break;
2031
2032 case LT:
2033 asm_fprintf (file, "lt");
2034 break;
2035
2036 case GEU:
2037 asm_fprintf (file, "hs");
2038 break;
2039
2040 case GTU:
2041 asm_fprintf (file, "hi");
2042 break;
2043
2044 case LEU:
2045 asm_fprintf (file, "ls");
2046 break;
2047
2048 case LTU:
2049 asm_fprintf (file, "lo");
2050 break;
2051
2052 case SYMBOL_REF:
2053 output_addr_const (file, op);
2054 break;
2055
2056 case CONST:
2057 output_addr_const (file, XEXP (op, 0));
2058 break;
2059
2060 case CODE_LABEL:
2061 break;
2062
2063 default:
2064 fatal_insn ("c4x_print_operand: Bad operand case", op);
2065 break;
2066 }
2067}
2068
2069
2070void
2071c4x_print_operand_address (file, addr)
2072 FILE *file;
2073 rtx addr;
2074{
2075 switch (GET_CODE (addr))
2076 {
2077 case REG:
2078 fprintf (file, "*%s", reg_names[REGNO (addr)]);
2079 break;
2080
2081 case PRE_DEC:
2082 fprintf (file, "*--%s", reg_names[REGNO (XEXP (addr, 0))]);
2083 break;
2084
2085 case POST_INC:
2086 fprintf (file, "*%s++", reg_names[REGNO (XEXP (addr, 0))]);
2087 break;
2088
2089 case POST_MODIFY:
2090 {
2091 rtx op0 = XEXP (XEXP (addr, 1), 0);
2092 rtx op1 = XEXP (XEXP (addr, 1), 1);
2093
2094 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2095 fprintf (file, "*%s++(%s)", reg_names[REGNO (op0)],
2096 reg_names[REGNO (op1)]);
2097 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2098 fprintf (file, "*%s++(%d)", reg_names[REGNO (op0)],
2099 INTVAL (op1));
2100 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2101 fprintf (file, "*%s--(%d)", reg_names[REGNO (op0)],
2102 -INTVAL (op1));
2103 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2104 fprintf (file, "*%s--(%s)", reg_names[REGNO (op0)],
2105 reg_names[REGNO (op1)]);
2106 else
2107 fatal_insn ("c4x_print_operand_address: Bad post_modify", addr);
2108 }
2109 break;
2110
2111 case PRE_MODIFY:
2112 {
2113 rtx op0 = XEXP (XEXP (addr, 1), 0);
2114 rtx op1 = XEXP (XEXP (addr, 1), 1);
2115
2116 if (GET_CODE (XEXP (addr, 1)) == PLUS && REG_P (op1))
2117 fprintf (file, "*++%s(%s)", reg_names[REGNO (op0)],
2118 reg_names[REGNO (op1)]);
2119 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) > 0)
2120 fprintf (file, "*++%s(%d)", reg_names[REGNO (op0)],
2121 INTVAL (op1));
2122 else if (GET_CODE (XEXP (addr, 1)) == PLUS && INTVAL (op1) < 0)
2123 fprintf (file, "*--%s(%d)", reg_names[REGNO (op0)],
2124 -INTVAL (op1));
2125 else if (GET_CODE (XEXP (addr, 1)) == MINUS && REG_P (op1))
2126 fprintf (file, "*--%s(%s)", reg_names[REGNO (op0)],
2127 reg_names[REGNO (op1)]);
2128 else
2129 fatal_insn ("c4x_print_operand_address: Bad pre_modify", addr);
2130 }
2131 break;
2132
2133 case PRE_INC:
2134 fprintf (file, "*++%s", reg_names[REGNO (XEXP (addr, 0))]);
2135 break;
2136
2137 case POST_DEC:
2138 fprintf (file, "*%s--", reg_names[REGNO (XEXP (addr, 0))]);
2139 break;
2140
2141 case PLUS: /* Indirect with displacement. */
2142 {
2143 rtx op0 = XEXP (addr, 0);
2144 rtx op1 = XEXP (addr, 1);
cb0ca284 2145
50c33087 2146 if (REG_P (op0))
cb0ca284 2147 {
50c33087 2148 if (REG_P (op1))
cb0ca284 2149 {
bc46716b 2150 if (IS_INDEX_REG (op0))
cb0ca284
MH
2151 {
2152 fprintf (file, "*+%s(%s)",
2153 reg_names[REGNO (op1)],
975ab131 2154 reg_names[REGNO (op0)]); /* Index + base. */
cb0ca284
MH
2155 }
2156 else
2157 {
2158 fprintf (file, "*+%s(%s)",
2159 reg_names[REGNO (op0)],
975ab131 2160 reg_names[REGNO (op1)]); /* Base + index. */
cb0ca284
MH
2161 }
2162 }
2163 else if (INTVAL (op1) < 0)
2164 {
2165 fprintf (file, "*-%s(%d)",
2166 reg_names[REGNO (op0)],
975ab131 2167 -INTVAL (op1)); /* Base - displacement. */
cb0ca284
MH
2168 }
2169 else
2170 {
2171 fprintf (file, "*+%s(%d)",
2172 reg_names[REGNO (op0)],
975ab131 2173 INTVAL (op1)); /* Base + displacement. */
cb0ca284
MH
2174 }
2175 }
50c33087
MH
2176 else
2177 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2178 }
2179 break;
2180
2181 case LO_SUM:
2182 {
2183 rtx op0 = XEXP (addr, 0);
2184 rtx op1 = XEXP (addr, 1);
2185
2186 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
2187 c4x_print_operand_address (file, op1);
2188 else
2189 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
cb0ca284
MH
2190 }
2191 break;
2192
2193 case CONST:
2194 case SYMBOL_REF:
2195 case LABEL_REF:
50c33087 2196 fprintf (file, "@");
cb0ca284 2197 output_addr_const (file, addr);
cb0ca284
MH
2198 break;
2199
2200 /* We shouldn't access CONST_INT addresses. */
2201 case CONST_INT:
2202
2203 default:
2204 fatal_insn ("c4x_print_operand_address: Bad operand case", addr);
2205 break;
2206 }
2207}
2208
975ab131 2209
50c33087
MH
2210/* Return nonzero if the floating point operand will fit
2211 in the immediate field. */
975ab131 2212
cb0ca284 2213static int
50c33087
MH
2214c4x_immed_float_p (op)
2215 rtx op;
cb0ca284
MH
2216{
2217 long convval[2];
2218 int exponent;
2219 REAL_VALUE_TYPE r;
2220
50c33087
MH
2221 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
2222 if (GET_MODE (op) == HFmode)
cb0ca284
MH
2223 REAL_VALUE_TO_TARGET_DOUBLE (r, convval);
2224 else
2225 {
2226 REAL_VALUE_TO_TARGET_SINGLE (r, convval[0]);
2227 convval[1] = 0;
2228 }
2229
975ab131 2230 /* Sign extend exponent. */
cb0ca284
MH
2231 exponent = (((convval[0] >> 24) & 0xff) ^ 0x80) - 0x80;
2232 if (exponent == -128)
975ab131 2233 return 1; /* 0.0 */
cb0ca284 2234 if ((convval[0] & 0x00000fff) != 0 || convval[1] != 0)
975ab131
MH
2235 return 0; /* Precision doesn't fit. */
2236 return (exponent <= 7) /* Positive exp. */
2237 && (exponent >= -7); /* Negative exp. */
cb0ca284
MH
2238}
2239
975ab131 2240
cb0ca284
MH
2241/* The last instruction in a repeat block cannot be a Bcond, DBcound,
2242 CALL, CALLCond, TRAPcond, RETIcond, RETScond, IDLE, RPTB or RPTS.
2243
2244 None of the last four instructions from the bottom of the block can
2245 be a BcondD, BRD, DBcondD, RPTBD, LAJ, LAJcond, LATcond, BcondAF,
2246 BcondAT or RETIcondD.
2247
2248 This routine scans the four previous insns for a jump insn, and if
2249 one is found, returns 1 so that we bung in a nop instruction.
2250 This simple minded strategy will add a nop, when it may not
2251 be required. Say when there is a JUMP_INSN near the end of the
2252 block that doesn't get converted into a delayed branch.
2253
2254 Note that we cannot have a call insn, since we don't generate
2255 repeat loops with calls in them (although I suppose we could, but
d5e4ff48
MH
2256 there's no benefit.)
2257
2258 !!! FIXME. The rptb_top insn may be sucked into a SEQUENCE. */
cb0ca284
MH
2259
2260int
2261c4x_rptb_nop_p (insn)
2262 rtx insn;
2263{
d5e4ff48 2264 rtx start_label;
cb0ca284
MH
2265 int i;
2266
d5e4ff48
MH
2267 /* Extract the start label from the jump pattern (rptb_end). */
2268 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
2269
cb0ca284
MH
2270 /* If there is a label at the end of the loop we must insert
2271 a NOP. */
50c33087
MH
2272 do {
2273 insn = previous_insn (insn);
2274 } while (GET_CODE (insn) == NOTE
2275 || GET_CODE (insn) == USE
2276 || GET_CODE (insn) == CLOBBER);
cb0ca284
MH
2277 if (GET_CODE (insn) == CODE_LABEL)
2278 return 1;
2279
2280 for (i = 0; i < 4; i++)
2281 {
2282 /* Search back for prev non-note and non-label insn. */
2283 while (GET_CODE (insn) == NOTE || GET_CODE (insn) == CODE_LABEL
2284 || GET_CODE (insn) == USE || GET_CODE (insn) == CLOBBER)
d5e4ff48
MH
2285 {
2286 if (insn == start_label)
2287 return i == 0;
2288
50c33087 2289 insn = previous_insn (insn);
d5e4ff48 2290 };
cb0ca284 2291
d5e4ff48 2292 /* If we have a jump instruction we should insert a NOP. If we
cb0ca284 2293 hit repeat block top we should only insert a NOP if the loop
1ac7a7f5 2294 is empty. */
cb0ca284
MH
2295 if (GET_CODE (insn) == JUMP_INSN)
2296 return 1;
50c33087 2297 insn = previous_insn (insn);
cb0ca284
MH
2298 }
2299 return 0;
2300}
2301
2302
933cddd0
MH
2303/* The C4x looping instruction needs to be emitted at the top of the
2304 loop. Emitting the true RTL for a looping instruction at the top of
2305 the loop can cause problems with flow analysis. So instead, a dummy
2306 doloop insn is emitted at the end of the loop. This routine checks
2307 for the presence of this doloop insn and then searches back to the
2308 top of the loop, where it inserts the true looping insn (provided
2309 there are no instructions in the loop which would cause problems).
2310 Any additional labels can be emitted at this point. In addition, if
2311 the desired loop count register was not allocated, this routine does
0bbcfbaf
HB
2312 nothing.
2313
2314 Before we can create a repeat block looping instruction we have to
2315 verify that there are no jumps outside the loop and no jumps outside
2316 the loop go into this loop. This can happen in the basic blocks reorder
2317 pass. The C4x cpu can not handle this. */
2318
2319static int
2320c4x_label_ref_used_p (x, code_label)
2321 rtx x, code_label;
2322{
2323 enum rtx_code code;
2324 int i, j;
2325 const char *fmt;
2326
2327 if (x == 0)
2328 return 0;
2329
2330 code = GET_CODE (x);
2331 if (code == LABEL_REF)
2332 return INSN_UID (XEXP (x,0)) == INSN_UID (code_label);
2333
2334 fmt = GET_RTX_FORMAT (code);
2335 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2336 {
2337 if (fmt[i] == 'e')
2338 {
2339 if (c4x_label_ref_used_p (XEXP (x, i), code_label))
2340 return 1;
2341 }
2342 else if (fmt[i] == 'E')
2343 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2344 if (c4x_label_ref_used_p (XVECEXP (x, i, j), code_label))
2345 return 1;
2346 }
2347 return 0;
2348}
2349
2350
2351static int
2352c4x_rptb_valid_p (insn, start_label)
2353 rtx insn, start_label;
2354{
2355 rtx end = insn;
2356 rtx start;
2357 rtx tmp;
2358
2359 /* Find the start label. */
2360 for (; insn; insn = PREV_INSN (insn))
2361 if (insn == start_label)
2362 break;
2363
2364 /* Note found then we can not use a rptb or rpts. The label was
2365 probably moved by the basic block reorder pass. */
2366 if (! insn)
2367 return 0;
2368
2369 start = insn;
2370 /* If any jump jumps inside this block then we must fail. */
2371 for (insn = PREV_INSN (start); insn; insn = PREV_INSN (insn))
2372 {
2373 if (GET_CODE (insn) == CODE_LABEL)
2374 {
2375 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2376 if (GET_CODE (tmp) == JUMP_INSN
2377 && c4x_label_ref_used_p (tmp, insn))
2378 return 0;
2379 }
2380 }
2381 for (insn = NEXT_INSN (end); insn; insn = NEXT_INSN (insn))
2382 {
2383 if (GET_CODE (insn) == CODE_LABEL)
2384 {
2385 for (tmp = NEXT_INSN (start); tmp != end; tmp = NEXT_INSN(tmp))
2386 if (GET_CODE (tmp) == JUMP_INSN
2387 && c4x_label_ref_used_p (tmp, insn))
2388 return 0;
2389 }
2390 }
2391 /* If any jump jumps outside this block then we must fail. */
2392 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
2393 {
2394 if (GET_CODE (insn) == CODE_LABEL)
2395 {
2396 for (tmp = NEXT_INSN (end); tmp; tmp = NEXT_INSN(tmp))
2397 if (GET_CODE (tmp) == JUMP_INSN
2398 && c4x_label_ref_used_p (tmp, insn))
2399 return 0;
2400 for (tmp = PREV_INSN (start); tmp; tmp = PREV_INSN(tmp))
2401 if (GET_CODE (tmp) == JUMP_INSN
2402 && c4x_label_ref_used_p (tmp, insn))
2403 return 0;
2404 }
2405 }
2406
2407 /* All checks OK. */
2408 return 1;
2409}
2410
975ab131 2411
d5e4ff48
MH
2412void
2413c4x_rptb_insert (insn)
2414 rtx insn;
2415{
2416 rtx end_label;
2417 rtx start_label;
b864825e 2418 rtx new_start_label;
4271f003
MH
2419 rtx count_reg;
2420
2421 /* If the count register has not been allocated to RC, say if
2422 there is a movstr pattern in the loop, then do not insert a
2423 RPTB instruction. Instead we emit a decrement and branch
2424 at the end of the loop. */
2425 count_reg = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 0), 0);
2426 if (REGNO (count_reg) != RC_REGNO)
2427 return;
2428
d5e4ff48
MH
2429 /* Extract the start label from the jump pattern (rptb_end). */
2430 start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn), 0, 0)), 1), 0);
4271f003 2431
0bbcfbaf
HB
2432 if (! c4x_rptb_valid_p (insn, start_label))
2433 {
2434 /* We can not use the rptb insn. Replace it so reorg can use
2435 the delay slots of the jump insn. */
2436 emit_insn_before (gen_addqi3 (count_reg, count_reg, GEN_INT (-1)), insn);
2437 emit_insn_before (gen_cmpqi (count_reg, GEN_INT (0)), insn);
2438 emit_insn_before (gen_bge (start_label), insn);
2439 LABEL_NUSES (start_label)++;
2440 delete_insn (insn);
2441 return;
2442 }
2443
d5e4ff48 2444 end_label = gen_label_rtx ();
b864825e 2445 LABEL_NUSES (end_label)++;
d5e4ff48
MH
2446 emit_label_after (end_label, insn);
2447
b864825e
MH
2448 new_start_label = gen_label_rtx ();
2449 LABEL_NUSES (new_start_label)++;
2450
d5e4ff48 2451 for (; insn; insn = PREV_INSN (insn))
b864825e
MH
2452 {
2453 if (insn == start_label)
2454 break;
2455 if (GET_CODE (insn) == JUMP_INSN &&
2456 JUMP_LABEL (insn) == start_label)
2457 redirect_jump (insn, new_start_label, 0);
2458 }
4ddb3ea6 2459 if (! insn)
d5e4ff48
MH
2460 fatal_insn ("c4x_rptb_insert: Cannot find start label", start_label);
2461
b864825e
MH
2462 emit_label_after (new_start_label, insn);
2463
3b5e8a16 2464 if (TARGET_RPTS && c4x_rptb_rpts_p (PREV_INSN (insn), 0))
b864825e 2465 emit_insn_after (gen_rpts_top (new_start_label, end_label), insn);
3b5e8a16 2466 else
b864825e
MH
2467 emit_insn_after (gen_rptb_top (new_start_label, end_label), insn);
2468 if (LABEL_NUSES (start_label) == 0)
2469 delete_insn (start_label);
d5e4ff48
MH
2470}
2471
50c33087
MH
2472
2473/* This function is a C4x special called immediately before delayed
2474 branch scheduling. We fix up RTPB style loops that didn't get RC
2475 allocated as the loop counter. */
cb0ca284
MH
2476
2477void
2478c4x_process_after_reload (first)
2479 rtx first;
2480{
cb0ca284 2481 rtx insn;
cb0ca284
MH
2482
2483 for (insn = first; insn; insn = NEXT_INSN (insn))
2484 {
2485 /* Look for insn. */
2c3c49de 2486 if (INSN_P (insn))
cb0ca284 2487 {
cb0ca284 2488 int insn_code_number;
41387ffd 2489 rtx old;
cb0ca284
MH
2490
2491 insn_code_number = recog_memoized (insn);
2492
2493 if (insn_code_number < 0)
2494 continue;
2495
d5e4ff48 2496 /* Insert the RTX for RPTB at the top of the loop
1ac7a7f5 2497 and a label at the end of the loop. */
d5e4ff48
MH
2498 if (insn_code_number == CODE_FOR_rptb_end)
2499 c4x_rptb_insert(insn);
2500
41387ffd
MH
2501 /* We need to split the insn here. Otherwise the calls to
2502 force_const_mem will not work for load_immed_address. */
2503 old = insn;
34de028f 2504
41387ffd
MH
2505 /* Don't split the insn if it has been deleted. */
2506 if (! INSN_DELETED_P (old))
2507 insn = try_split (PATTERN(old), old, 1);
cb0ca284 2508
41387ffd
MH
2509 /* When not optimizing, the old insn will be still left around
2510 with only the 'deleted' bit set. Transform it into a note
2511 to avoid confusion of subsequent processing. */
2512 if (INSN_DELETED_P (old))
2513 {
2514 PUT_CODE (old, NOTE);
2515 NOTE_LINE_NUMBER (old) = NOTE_INSN_DELETED;
2516 NOTE_SOURCE_FILE (old) = 0;
cb0ca284 2517 }
cb0ca284
MH
2518 }
2519 }
2520}
2521
2522
2523static int
2524c4x_a_register (op)
2525 rtx op;
2526{
bc46716b 2527 return REG_P (op) && IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2528}
2529
2530
2531static int
2532c4x_x_register (op)
2533 rtx op;
2534{
bc46716b 2535 return REG_P (op) && IS_INDEX_OR_PSEUDO_REG (op);
cb0ca284
MH
2536}
2537
2538
2539static int
50c33087 2540c4x_immed_int_constant (op)
cb0ca284
MH
2541 rtx op;
2542{
2543 if (GET_CODE (op) != CONST_INT)
2544 return 0;
50c33087 2545
cb0ca284
MH
2546 return GET_MODE (op) == VOIDmode
2547 || GET_MODE_CLASS (op) == MODE_INT
2548 || GET_MODE_CLASS (op) == MODE_PARTIAL_INT;
2549}
2550
2551
2552static int
50c33087 2553c4x_immed_float_constant (op)
cb0ca284
MH
2554 rtx op;
2555{
2556 if (GET_CODE (op) != CONST_DOUBLE)
2557 return 0;
50c33087 2558
5078f5eb
HB
2559 /* Do not check if the CONST_DOUBLE is in memory. If there is a MEM
2560 present this only means that a MEM rtx has been generated. It does
2561 not mean the rtx is really in memory. */
50c33087 2562
cb0ca284
MH
2563 return GET_MODE (op) == QFmode || GET_MODE (op) == HFmode;
2564}
2565
2566
483dd5be
MH
2567int
2568c4x_shiftable_constant (op)
2569 rtx op;
2570{
2571 int i;
2572 int mask;
2573 int val = INTVAL (op);
2574
2575 for (i = 0; i < 16; i++)
2576 {
2577 if (val & (1 << i))
2578 break;
2579 }
2580 mask = ((0xffff >> i) << 16) | 0xffff;
4fda2521
HB
2581 if (IS_INT16_CONST (val & (1 << 31) ? (val >> i) | ~mask
2582 : (val >> i) & mask))
483dd5be
MH
2583 return i;
2584 return -1;
2585}
2586
2587
cb0ca284
MH
2588int
2589c4x_H_constant (op)
2590 rtx op;
2591{
50c33087 2592 return c4x_immed_float_constant (op) && c4x_immed_float_p (op);
cb0ca284
MH
2593}
2594
2595
2596int
2597c4x_I_constant (op)
2598 rtx op;
2599{
50c33087 2600 return c4x_immed_int_constant (op) && IS_INT16_CONST (INTVAL (op));
cb0ca284
MH
2601}
2602
2603
2604int
2605c4x_J_constant (op)
2606 rtx op;
2607{
2608 if (TARGET_C3X)
2609 return 0;
50c33087 2610 return c4x_immed_int_constant (op) && IS_INT8_CONST (INTVAL (op));
cb0ca284
MH
2611}
2612
2613
2614static int
2615c4x_K_constant (op)
2616 rtx op;
2617{
305902b0 2618 if (TARGET_C3X || ! c4x_immed_int_constant (op))
cb0ca284 2619 return 0;
305902b0 2620 return IS_INT5_CONST (INTVAL (op));
cb0ca284
MH
2621}
2622
2623
2624int
2625c4x_L_constant (op)
2626 rtx op;
2627{
50c33087 2628 return c4x_immed_int_constant (op) && IS_UINT16_CONST (INTVAL (op));
cb0ca284
MH
2629}
2630
2631
2632static int
2633c4x_N_constant (op)
2634 rtx op;
2635{
50c33087 2636 return c4x_immed_int_constant (op) && IS_NOT_UINT16_CONST (INTVAL (op));
cb0ca284
MH
2637}
2638
2639
2640static int
2641c4x_O_constant (op)
2642 rtx op;
2643{
50c33087 2644 return c4x_immed_int_constant (op) && IS_HIGH_CONST (INTVAL (op));
cb0ca284
MH
2645}
2646
2647
2648/* The constraints do not have to check the register class,
2649 except when needed to discriminate between the constraints.
2650 The operand has been checked by the predicates to be valid. */
2651
2652/* ARx + 9-bit signed const or IRn
2653 *ARx, *+ARx(n), *-ARx(n), *+ARx(IRn), *-Arx(IRn) for -256 < n < 256
2654 We don't include the pre/post inc/dec forms here since
2655 they are handled by the <> constraints. */
2656
2657int
2658c4x_Q_constraint (op)
2659 rtx op;
2660{
2661 enum machine_mode mode = GET_MODE (op);
2662
2663 if (GET_CODE (op) != MEM)
2664 return 0;
2665 op = XEXP (op, 0);
2666 switch (GET_CODE (op))
2667 {
2668 case REG:
2669 return 1;
2670
2671 case PLUS:
2672 {
2673 rtx op0 = XEXP (op, 0);
2674 rtx op1 = XEXP (op, 1);
2675
4ddb3ea6 2676 if (! REG_P (op0))
cb0ca284
MH
2677 return 0;
2678
2679 if (REG_P (op1))
2680 return 1;
2681
2682 if (GET_CODE (op1) != CONST_INT)
2683 return 0;
2684
2685 /* HImode and HFmode must be offsettable. */
2686 if (mode == HImode || mode == HFmode)
2687 return IS_DISP8_OFF_CONST (INTVAL (op1));
2688
2689 return IS_DISP8_CONST (INTVAL (op1));
2690 }
2691 break;
50c33087 2692
cb0ca284
MH
2693 default:
2694 break;
2695 }
2696 return 0;
2697}
2698
2699
2700/* ARx + 5-bit unsigned const
975ab131 2701 *ARx, *+ARx(n) for n < 32. */
cb0ca284
MH
2702
2703int
2704c4x_R_constraint (op)
2705 rtx op;
2706{
2707 enum machine_mode mode = GET_MODE (op);
2708
2709 if (TARGET_C3X)
2710 return 0;
2711 if (GET_CODE (op) != MEM)
2712 return 0;
2713 op = XEXP (op, 0);
2714 switch (GET_CODE (op))
2715 {
2716 case REG:
2717 return 1;
2718
2719 case PLUS:
2720 {
2721 rtx op0 = XEXP (op, 0);
2722 rtx op1 = XEXP (op, 1);
2723
4ddb3ea6 2724 if (! REG_P (op0))
cb0ca284
MH
2725 return 0;
2726
2727 if (GET_CODE (op1) != CONST_INT)
2728 return 0;
2729
2730 /* HImode and HFmode must be offsettable. */
2731 if (mode == HImode || mode == HFmode)
2732 return IS_UINT5_CONST (INTVAL (op1) + 1);
2733
2734 return IS_UINT5_CONST (INTVAL (op1));
2735 }
2736 break;
933cddd0 2737
cb0ca284
MH
2738 default:
2739 break;
2740 }
2741 return 0;
2742}
2743
2744
2745static int
2746c4x_R_indirect (op)
2747 rtx op;
2748{
2749 enum machine_mode mode = GET_MODE (op);
2750
2751 if (TARGET_C3X || GET_CODE (op) != MEM)
2752 return 0;
2753
2754 op = XEXP (op, 0);
2755 switch (GET_CODE (op))
2756 {
2757 case REG:
bc46716b 2758 return IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2759
2760 case PLUS:
2761 {
2762 rtx op0 = XEXP (op, 0);
2763 rtx op1 = XEXP (op, 1);
2764
2765 /* HImode and HFmode must be offsettable. */
2766 if (mode == HImode || mode == HFmode)
bc46716b 2767 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2768 && GET_CODE (op1) == CONST_INT
2769 && IS_UINT5_CONST (INTVAL (op1) + 1);
2770
2771 return REG_P (op0)
bc46716b 2772 && IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2773 && GET_CODE (op1) == CONST_INT
2774 && IS_UINT5_CONST (INTVAL (op1));
2775 }
2776 break;
2777
2778 default:
2779 break;
2780 }
2781 return 0;
2782}
2783
2784
2785/* ARx + 1-bit unsigned const or IRn
2786 *ARx, *+ARx(1), *-ARx(1), *+ARx(IRn), *-Arx(IRn)
2787 We don't include the pre/post inc/dec forms here since
2788 they are handled by the <> constraints. */
2789
2790int
2791c4x_S_constraint (op)
2792 rtx op;
2793{
2794 enum machine_mode mode = GET_MODE (op);
2795 if (GET_CODE (op) != MEM)
2796 return 0;
2797 op = XEXP (op, 0);
2798 switch (GET_CODE (op))
2799 {
2800 case REG:
2801 return 1;
2802
2803 case PRE_MODIFY:
2804 case POST_MODIFY:
2805 {
2806 rtx op0 = XEXP (op, 0);
2807 rtx op1 = XEXP (op, 1);
2808
2809 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2810 || (op0 != XEXP (op1, 0)))
2811 return 0;
2812
2813 op0 = XEXP (op1, 0);
2814 op1 = XEXP (op1, 1);
2815 return REG_P (op0) && REG_P (op1);
975ab131 2816 /* Pre or post_modify with a displacement of 0 or 1
cb0ca284
MH
2817 should not be generated. */
2818 }
2819 break;
2820
2821 case PLUS:
2822 {
2823 rtx op0 = XEXP (op, 0);
2824 rtx op1 = XEXP (op, 1);
2825
2826 if (!REG_P (op0))
2827 return 0;
2828
2829 if (REG_P (op1))
2830 return 1;
2831
dfb31eec 2832 if (GET_CODE (op1) != CONST_INT)
cb0ca284
MH
2833 return 0;
2834
2835 /* HImode and HFmode must be offsettable. */
2836 if (mode == HImode || mode == HFmode)
2837 return IS_DISP1_OFF_CONST (INTVAL (op1));
2838
2839 return IS_DISP1_CONST (INTVAL (op1));
2840 }
2841 break;
933cddd0 2842
cb0ca284
MH
2843 default:
2844 break;
2845 }
2846 return 0;
2847}
2848
2849
2850static int
2851c4x_S_indirect (op)
2852 rtx op;
2853{
2854 enum machine_mode mode = GET_MODE (op);
2855 if (GET_CODE (op) != MEM)
2856 return 0;
2857
2858 op = XEXP (op, 0);
2859 switch (GET_CODE (op))
2860 {
2861 case PRE_DEC:
2862 case POST_DEC:
2863 if (mode != QImode && mode != QFmode)
2864 return 0;
2865 case PRE_INC:
2866 case POST_INC:
2867 op = XEXP (op, 0);
2868
2869 case REG:
bc46716b 2870 return IS_ADDR_OR_PSEUDO_REG (op);
cb0ca284
MH
2871
2872 case PRE_MODIFY:
2873 case POST_MODIFY:
2874 {
2875 rtx op0 = XEXP (op, 0);
2876 rtx op1 = XEXP (op, 1);
2877
2878 if (mode != QImode && mode != QFmode)
2879 return 0;
2880
2881 if ((GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
2882 || (op0 != XEXP (op1, 0)))
2883 return 0;
2884
2885 op0 = XEXP (op1, 0);
2886 op1 = XEXP (op1, 1);
bc46716b
MH
2887 return REG_P (op0) && IS_ADDR_OR_PSEUDO_REG (op0)
2888 && REG_P (op1) && IS_INDEX_OR_PSEUDO_REG (op1);
975ab131 2889 /* Pre or post_modify with a displacement of 0 or 1
cb0ca284
MH
2890 should not be generated. */
2891 }
2892
2893 case PLUS:
2894 {
2895 rtx op0 = XEXP (op, 0);
2896 rtx op1 = XEXP (op, 1);
2897
2898 if (REG_P (op0))
2899 {
2900 /* HImode and HFmode must be offsettable. */
2901 if (mode == HImode || mode == HFmode)
bc46716b 2902 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2903 && GET_CODE (op1) == CONST_INT
2904 && IS_DISP1_OFF_CONST (INTVAL (op1));
2905
2906 if (REG_P (op1))
bc46716b
MH
2907 return (IS_INDEX_OR_PSEUDO_REG (op1)
2908 && IS_ADDR_OR_PSEUDO_REG (op0))
2909 || (IS_ADDR_OR_PSEUDO_REG (op1)
2910 && IS_INDEX_OR_PSEUDO_REG (op0));
cb0ca284 2911
bc46716b 2912 return IS_ADDR_OR_PSEUDO_REG (op0)
cb0ca284
MH
2913 && GET_CODE (op1) == CONST_INT
2914 && IS_DISP1_CONST (INTVAL (op1));
2915 }
2916 }
2917 break;
2918
2919 default:
2920 break;
2921 }
2922 return 0;
2923}
2924
2925
50c33087 2926/* Direct memory operand. */
cb0ca284
MH
2927
2928int
2929c4x_T_constraint (op)
2930 rtx op;
2931{
2932 if (GET_CODE (op) != MEM)
2933 return 0;
2934 op = XEXP (op, 0);
2935
50c33087 2936 if (GET_CODE (op) != LO_SUM)
cb0ca284 2937 {
50c33087
MH
2938 /* Allow call operands. */
2939 return GET_CODE (op) == SYMBOL_REF
2940 && GET_MODE (op) == Pmode
2941 && SYMBOL_REF_FLAG (op);
cb0ca284
MH
2942 }
2943
50c33087
MH
2944 /* HImode and HFmode are not offsettable. */
2945 if (GET_MODE (op) == HImode || GET_CODE (op) == HFmode)
2946 return 0;
2947
2948 if ((GET_CODE (XEXP (op, 0)) == REG)
2949 && (REGNO (XEXP (op, 0)) == DP_REGNO))
2950 return c4x_U_constraint (XEXP (op, 1));
2951
2952 return 0;
2953}
2954
2955
2956/* Symbolic operand. */
2957
2958int
2959c4x_U_constraint (op)
2960 rtx op;
2961{
cb0ca284 2962 /* Don't allow direct addressing to an arbitrary constant. */
5078f5eb
HB
2963 return GET_CODE (op) == CONST
2964 || GET_CODE (op) == SYMBOL_REF
2965 || GET_CODE (op) == LABEL_REF;
cb0ca284
MH
2966}
2967
2968
2969int
2970c4x_autoinc_operand (op, mode)
2971 rtx op;
d5e4ff48 2972 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
2973{
2974 if (GET_CODE (op) == MEM)
2975 {
2976 enum rtx_code code = GET_CODE (XEXP (op, 0));
2977
2978 if (code == PRE_INC
2979 || code == PRE_DEC
2980 || code == POST_INC
2981 || code == POST_DEC
2982 || code == PRE_MODIFY
2983 || code == POST_MODIFY
2984 )
2985 return 1;
2986 }
2987 return 0;
2988}
2989
2990
2991/* Match any operand. */
2992
2993int
2994any_operand (op, mode)
d5e4ff48
MH
2995 register rtx op ATTRIBUTE_UNUSED;
2996 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
2997{
2998 return 1;
2999}
3000
3001
3002/* Nonzero if OP is a floating point value with value 0.0. */
3003
3004int
798f6e6f 3005fp_zero_operand (op, mode)
cb0ca284 3006 rtx op;
798f6e6f 3007 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3008{
3009 REAL_VALUE_TYPE r;
3010
f9ef1f02
MH
3011 if (GET_CODE (op) != CONST_DOUBLE)
3012 return 0;
cb0ca284
MH
3013 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
3014 return REAL_VALUES_EQUAL (r, dconst0);
3015}
3016
3017
3018int
3019const_operand (op, mode)
3020 register rtx op;
3021 register enum machine_mode mode;
3022{
3023 switch (mode)
3024 {
3025 case QFmode:
3026 case HFmode:
3027 if (GET_CODE (op) != CONST_DOUBLE
3028 || GET_MODE (op) != mode
3029 || GET_MODE_CLASS (mode) != MODE_FLOAT)
3030 return 0;
3031
3032 return c4x_immed_float_p (op);
3033
3034#if Pmode != QImode
3035 case Pmode:
3036#endif
3037 case QImode:
ee5332b8
RH
3038 if (GET_CODE (op) == CONSTANT_P_RTX)
3039 return 1;
3040
cb0ca284
MH
3041 if (GET_CODE (op) != CONST_INT
3042 || (GET_MODE (op) != VOIDmode && GET_MODE (op) != mode)
3043 || GET_MODE_CLASS (mode) != MODE_INT)
3044 return 0;
3045
3046 return IS_HIGH_CONST (INTVAL (op)) || IS_INT16_CONST (INTVAL (op));
3047
3048 case HImode:
3049 return 0;
3050
3051 default:
3052 return 0;
3053 }
3054}
3055
3056
3057int
3058stik_const_operand (op, mode)
3059 rtx op;
d5e4ff48 3060 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3061{
3062 return c4x_K_constant (op);
3063}
3064
3065
3066int
3067not_const_operand (op, mode)
3068 rtx op;
d5e4ff48 3069 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3070{
3071 return c4x_N_constant (op);
3072}
3073
3074
3075int
3076reg_operand (op, mode)
3077 rtx op;
3078 enum machine_mode mode;
3079{
ebcc44f4
MH
3080 if (GET_CODE (op) == SUBREG
3081 && GET_MODE (op) == QFmode)
3082 return 0;
cb0ca284
MH
3083 return register_operand (op, mode);
3084}
3085
50c33087 3086
ebcc44f4
MH
3087int
3088mixed_subreg_operand (op, mode)
3089 rtx op;
483dd5be 3090 enum machine_mode mode ATTRIBUTE_UNUSED;
ebcc44f4
MH
3091{
3092 /* Allow (subreg:HF (reg:HI)) that be generated for a union of an
3093 int and a long double. */
3094 if (GET_CODE (op) == SUBREG
3095 && (GET_MODE (op) == QFmode)
3096 && (GET_MODE (SUBREG_REG (op)) == QImode
3097 || GET_MODE (SUBREG_REG (op)) == HImode))
3098 return 1;
3099 return 0;
3100}
3101
3102
cb0ca284
MH
3103int
3104reg_imm_operand (op, mode)
3105 rtx op;
d5e4ff48 3106 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3107{
3108 if (REG_P (op) || CONSTANT_P (op))
3109 return 1;
3110 return 0;
3111}
3112
50c33087 3113
cb0ca284
MH
3114int
3115not_modify_reg (op, mode)
3116 rtx op;
d5e4ff48 3117 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3118{
3119 if (REG_P (op) || CONSTANT_P (op))
3120 return 1;
3121 if (GET_CODE (op) != MEM)
3122 return 0;
3123 op = XEXP (op, 0);
3124 switch (GET_CODE (op))
3125 {
3126 case REG:
3127 return 1;
3128
3129 case PLUS:
3130 {
3131 rtx op0 = XEXP (op, 0);
3132 rtx op1 = XEXP (op, 1);
3133
4ddb3ea6 3134 if (! REG_P (op0))
cb0ca284
MH
3135 return 0;
3136
3137 if (REG_P (op1) || GET_CODE (op1) == CONST_INT)
3138 return 1;
3139 }
50c33087
MH
3140
3141 case LO_SUM:
3142 {
3143 rtx op0 = XEXP (op, 0);
3144
3145 if (REG_P (op0) && REGNO (op0) == DP_REGNO)
3146 return 1;
3147 }
3148 break;
3149
cb0ca284
MH
3150 case CONST:
3151 case SYMBOL_REF:
3152 case LABEL_REF:
3153 return 1;
933cddd0 3154
cb0ca284
MH
3155 default:
3156 break;
3157 }
3158 return 0;
3159}
3160
50c33087 3161
cb0ca284
MH
3162int
3163not_rc_reg (op, mode)
3164 rtx op;
d5e4ff48 3165 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3166{
3167 if (REG_P (op) && REGNO (op) == RC_REGNO)
3168 return 0;
3169 return 1;
3170}
3171
50c33087 3172
cb0ca284
MH
3173/* Extended precision register R0-R1. */
3174
3175int
3176r0r1_reg_operand (op, mode)
3177 rtx op;
3178 enum machine_mode mode;
3179{
ebcc44f4 3180 if (! reg_operand (op, mode))
cb0ca284
MH
3181 return 0;
3182 if (GET_CODE (op) == SUBREG)
3183 op = SUBREG_REG (op);
bc46716b 3184 return REG_P (op) && IS_R0R1_OR_PSEUDO_REG (op);
cb0ca284
MH
3185}
3186
3187
3188/* Extended precision register R2-R3. */
3189
3190int
3191r2r3_reg_operand (op, mode)
3192 rtx op;
3193 enum machine_mode mode;
3194{
ebcc44f4 3195 if (! reg_operand (op, mode))
cb0ca284
MH
3196 return 0;
3197 if (GET_CODE (op) == SUBREG)
3198 op = SUBREG_REG (op);
bc46716b 3199 return REG_P (op) && IS_R2R3_OR_PSEUDO_REG (op);
cb0ca284
MH
3200}
3201
3202
3203/* Low extended precision register R0-R7. */
3204
3205int
3206ext_low_reg_operand (op, mode)
3207 rtx op;
3208 enum machine_mode mode;
3209{
ebcc44f4 3210 if (! reg_operand (op, mode))
cb0ca284
MH
3211 return 0;
3212 if (GET_CODE (op) == SUBREG)
3213 op = SUBREG_REG (op);
bc46716b 3214 return REG_P (op) && IS_EXT_LOW_OR_PSEUDO_REG (op);
cb0ca284
MH
3215}
3216
3217
3218/* Extended precision register. */
3219
3220int
3221ext_reg_operand (op, mode)
3222 rtx op;
3223 enum machine_mode mode;
3224{
ebcc44f4 3225 if (! reg_operand (op, mode))
cb0ca284
MH
3226 return 0;
3227 if (GET_CODE (op) == SUBREG)
3228 op = SUBREG_REG (op);
4ddb3ea6 3229 if (! REG_P (op))
cb0ca284 3230 return 0;
bc46716b 3231 return IS_EXT_OR_PSEUDO_REG (op);
cb0ca284
MH
3232}
3233
3234
3235/* Standard precision register. */
3236
3237int
3238std_reg_operand (op, mode)
3239 rtx op;
3240 enum machine_mode mode;
3241{
ebcc44f4 3242 if (! reg_operand (op, mode))
cb0ca284
MH
3243 return 0;
3244 if (GET_CODE (op) == SUBREG)
3245 op = SUBREG_REG (op);
bc46716b 3246 return REG_P (op) && IS_STD_OR_PSEUDO_REG (op);
cb0ca284
MH
3247}
3248
ed3614cd
HB
3249/* Standard precision or normal register. */
3250
3251int
3252std_or_reg_operand (op, mode)
3253 rtx op;
3254 enum machine_mode mode;
3255{
3256 if (reload_in_progress)
3257 return std_reg_operand (op, mode);
3258 return reg_operand (op, mode);
3259}
3260
cb0ca284
MH
3261/* Address register. */
3262
3263int
3264addr_reg_operand (op, mode)
3265 rtx op;
3266 enum machine_mode mode;
3267{
ebcc44f4 3268 if (! reg_operand (op, mode))
cb0ca284
MH
3269 return 0;
3270 return c4x_a_register (op);
3271}
3272
3273
3274/* Index register. */
3275
3276int
3277index_reg_operand (op, mode)
3278 rtx op;
3279 enum machine_mode mode;
3280{
ebcc44f4 3281 if (! reg_operand (op, mode))
cb0ca284
MH
3282 return 0;
3283 if (GET_CODE (op) == SUBREG)
3284 op = SUBREG_REG (op);
3285 return c4x_x_register (op);
3286}
3287
3288
3289/* DP register. */
3290
3291int
3292dp_reg_operand (op, mode)
3293 rtx op;
d5e4ff48 3294 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3295{
bc46716b 3296 return REG_P (op) && IS_DP_OR_PSEUDO_REG (op);
cb0ca284
MH
3297}
3298
3299
3300/* SP register. */
3301
3302int
3303sp_reg_operand (op, mode)
3304 rtx op;
d5e4ff48 3305 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3306{
bc46716b 3307 return REG_P (op) && IS_SP_OR_PSEUDO_REG (op);
cb0ca284
MH
3308}
3309
3310
3311/* ST register. */
3312
3313int
3314st_reg_operand (op, mode)
3315 register rtx op;
d5e4ff48 3316 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3317{
bc46716b 3318 return REG_P (op) && IS_ST_OR_PSEUDO_REG (op);
cb0ca284
MH
3319}
3320
3321
d5e4ff48
MH
3322/* RC register. */
3323
3324int
3325rc_reg_operand (op, mode)
3326 register rtx op;
3327 enum machine_mode mode ATTRIBUTE_UNUSED;
3328{
bc46716b 3329 return REG_P (op) && IS_RC_OR_PSEUDO_REG (op);
d5e4ff48
MH
3330}
3331
3332
cb0ca284 3333int
55310df7 3334call_address_operand (op, mode)
cb0ca284 3335 rtx op;
d5e4ff48 3336 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284 3337{
55310df7 3338 return (REG_P (op) || symbolic_address_operand (op, mode));
cb0ca284
MH
3339}
3340
3341
305902b0 3342/* Symbolic address operand. */
50c33087
MH
3343
3344int
55310df7 3345symbolic_address_operand (op, mode)
50c33087
MH
3346 register rtx op;
3347 enum machine_mode mode ATTRIBUTE_UNUSED;
3348{
3349 switch (GET_CODE (op))
3350 {
5078f5eb 3351 case CONST:
50c33087
MH
3352 case SYMBOL_REF:
3353 case LABEL_REF:
3354 return 1;
50c33087
MH
3355 default:
3356 return 0;
3357 }
3358}
3359
975ab131 3360
f416f18c 3361/* Check dst operand of a move instruction. */
975ab131 3362
f416f18c
MH
3363int
3364dst_operand (op, mode)
3365 rtx op;
3366 enum machine_mode mode;
3367{
3368 if (GET_CODE (op) == SUBREG
3369 && mixed_subreg_operand (op, mode))
3370 return 0;
cb0ca284 3371
f416f18c
MH
3372 if (REG_P (op))
3373 return reg_operand (op, mode);
3374
f959ff1a 3375 return nonimmediate_operand (op, mode);
f416f18c
MH
3376}
3377
3378
3379/* Check src operand of two operand arithmetic instructions. */
975ab131 3380
cb0ca284
MH
3381int
3382src_operand (op, mode)
3383 rtx op;
3384 enum machine_mode mode;
3385{
ebcc44f4
MH
3386 if (GET_CODE (op) == SUBREG
3387 && mixed_subreg_operand (op, mode))
3388 return 0;
3389
cb0ca284
MH
3390 if (REG_P (op))
3391 return reg_operand (op, mode);
3392
3393 if (mode == VOIDmode)
3394 mode = GET_MODE (op);
3395
cb0ca284 3396 if (GET_CODE (op) == CONST_INT)
50c33087
MH
3397 return (mode == QImode || mode == Pmode || mode == HImode)
3398 && c4x_I_constant (op);
cb0ca284
MH
3399
3400 /* We don't like CONST_DOUBLE integers. */
3401 if (GET_CODE (op) == CONST_DOUBLE)
3402 return c4x_H_constant (op);
3403
31445126
MH
3404 /* Disallow symbolic addresses. Only the predicate
3405 symbolic_address_operand will match these. */
50c33087
MH
3406 if (GET_CODE (op) == SYMBOL_REF
3407 || GET_CODE (op) == LABEL_REF
3408 || GET_CODE (op) == CONST)
3409 return 0;
3410
4a1f52a8
HB
3411 /* If TARGET_LOAD_DIRECT_MEMS is non-zero, disallow direct memory
3412 access to symbolic addresses. These operands will get forced
3413 into a register and the movqi expander will generate a
3414 HIGH/LO_SUM pair if TARGET_EXPOSE_LDP is non-zero. */
50c33087
MH
3415 if (GET_CODE (op) == MEM
3416 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
3417 || GET_CODE (XEXP (op, 0)) == LABEL_REF
3418 || GET_CODE (XEXP (op, 0)) == CONST)))
4a1f52a8 3419 return ! TARGET_LOAD_DIRECT_MEMS && GET_MODE (op) == mode;
50c33087 3420
cb0ca284
MH
3421 return general_operand (op, mode);
3422}
3423
3424
3425int
3426src_hi_operand (op, mode)
3427 rtx op;
3428 enum machine_mode mode;
3429{
3430 if (c4x_O_constant (op))
3431 return 1;
3432 return src_operand (op, mode);
3433}
3434
3435
3436/* Check src operand of two operand logical instructions. */
3437
3438int
3439lsrc_operand (op, mode)
3440 rtx op;
3441 enum machine_mode mode;
3442{
3443 if (mode == VOIDmode)
3444 mode = GET_MODE (op);
3445
3446 if (mode != QImode && mode != Pmode)
3447 fatal_insn ("Mode not QImode", op);
3448
cb0ca284
MH
3449 if (GET_CODE (op) == CONST_INT)
3450 return c4x_L_constant (op) || c4x_J_constant (op);
3451
50c33087 3452 return src_operand (op, mode);
cb0ca284
MH
3453}
3454
3455
3456/* Check src operand of two operand tricky instructions. */
3457
3458int
3459tsrc_operand (op, mode)
3460 rtx op;
3461 enum machine_mode mode;
3462{
3463 if (mode == VOIDmode)
3464 mode = GET_MODE (op);
3465
3466 if (mode != QImode && mode != Pmode)
3467 fatal_insn ("Mode not QImode", op);
3468
cb0ca284
MH
3469 if (GET_CODE (op) == CONST_INT)
3470 return c4x_L_constant (op) || c4x_N_constant (op) || c4x_J_constant (op);
3471
50c33087 3472 return src_operand (op, mode);
cb0ca284
MH
3473}
3474
3475
3476int
3477reg_or_const_operand (op, mode)
3478 rtx op;
3479 enum machine_mode mode;
3480{
3481 return reg_operand (op, mode) || const_operand (op, mode);
3482}
3483
3484
3485/* Check for indirect operands allowable in parallel instruction. */
3486
3487int
3488par_ind_operand (op, mode)
3489 rtx op;
3490 enum machine_mode mode;
3491{
3492 if (mode != VOIDmode && mode != GET_MODE (op))
3493 return 0;
3494
3495 return c4x_S_indirect (op);
3496}
3497
3498
3499/* Check for operands allowable in parallel instruction. */
3500
3501int
3502parallel_operand (op, mode)
3503 rtx op;
3504 enum machine_mode mode;
3505{
3506 return ext_low_reg_operand (op, mode) || par_ind_operand (op, mode);
3507}
3508
3509
3510static void
3511c4x_S_address_parse (op, base, incdec, index, disp)
3512 rtx op;
3513 int *base;
3514 int *incdec;
3515 int *index;
3516 int *disp;
3517{
3518 *base = 0;
3519 *incdec = 0;
3520 *index = 0;
3521 *disp = 0;
3522
3523 if (GET_CODE (op) != MEM)
3524 fatal_insn ("Invalid indirect memory address", op);
3525
3526 op = XEXP (op, 0);
3527 switch (GET_CODE (op))
3528 {
3529 case PRE_DEC:
3530 *base = REGNO (XEXP (op, 0));
3531 *incdec = 1;
3532 *disp = -1;
3533 return;
3534
3535 case POST_DEC:
3536 *base = REGNO (XEXP (op, 0));
3537 *incdec = 1;
3538 *disp = 0;
3539 return;
3540
3541 case PRE_INC:
3542 *base = REGNO (XEXP (op, 0));
3543 *incdec = 1;
3544 *disp = 1;
3545 return;
3546
3547 case POST_INC:
3548 *base = REGNO (XEXP (op, 0));
3549 *incdec = 1;
3550 *disp = 0;
3551 return;
3552
3553 case POST_MODIFY:
3554 *base = REGNO (XEXP (op, 0));
3555 if (REG_P (XEXP (XEXP (op, 1), 1)))
3556 {
3557 *index = REGNO (XEXP (XEXP (op, 1), 1));
3558 *disp = 0; /* ??? */
3559 }
3560 else
3561 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3562 *incdec = 1;
3563 return;
3564
3565 case PRE_MODIFY:
3566 *base = REGNO (XEXP (op, 0));
3567 if (REG_P (XEXP (XEXP (op, 1), 1)))
3568 {
3569 *index = REGNO (XEXP (XEXP (op, 1), 1));
3570 *disp = 1; /* ??? */
3571 }
3572 else
3573 *disp = INTVAL (XEXP (XEXP (op, 1), 1));
3574 *incdec = 1;
3575
3576 return;
3577
3578 case REG:
3579 *base = REGNO (op);
3580 return;
3581
3582 case PLUS:
3583 {
3584 rtx op0 = XEXP (op, 0);
3585 rtx op1 = XEXP (op, 1);
3586
3587 if (c4x_a_register (op0))
3588 {
3589 if (c4x_x_register (op1))
3590 {
3591 *base = REGNO (op0);
3592 *index = REGNO (op1);
3593 return;
3594 }
3595 else if ((GET_CODE (op1) == CONST_INT
3596 && IS_DISP1_CONST (INTVAL (op1))))
3597 {
3598 *base = REGNO (op0);
3599 *disp = INTVAL (op1);
3600 return;
3601 }
3602 }
3603 else if (c4x_x_register (op0) && c4x_a_register (op1))
3604 {
3605 *base = REGNO (op1);
3606 *index = REGNO (op0);
3607 return;
3608 }
3609 }
975ab131 3610 /* Fallthrough. */
cb0ca284
MH
3611
3612 default:
3613 fatal_insn ("Invalid indirect (S) memory address", op);
3614 }
3615}
3616
3617
3618int
3619c4x_address_conflict (op0, op1, store0, store1)
3620 rtx op0;
3621 rtx op1;
3622 int store0;
3623 int store1;
3624{
3625 int base0;
3626 int base1;
3627 int incdec0;
3628 int incdec1;
3629 int index0;
3630 int index1;
3631 int disp0;
3632 int disp1;
3633
4271f003
MH
3634 if (MEM_VOLATILE_P (op0) && MEM_VOLATILE_P (op1))
3635 return 1;
3636
cb0ca284
MH
3637 c4x_S_address_parse (op0, &base0, &incdec0, &index0, &disp0);
3638 c4x_S_address_parse (op1, &base1, &incdec1, &index1, &disp1);
3639
3640 if (store0 && store1)
3641 {
3642 /* If we have two stores in parallel to the same address, then
3643 the C4x only executes one of the stores. This is unlikely to
3644 cause problems except when writing to a hardware device such
3645 as a FIFO since the second write will be lost. The user
3646 should flag the hardware location as being volatile so that
3647 we don't do this optimisation. While it is unlikely that we
3648 have an aliased address if both locations are not marked
3649 volatile, it is probably safer to flag a potential conflict
3650 if either location is volatile. */
4ddb3ea6 3651 if (! flag_argument_noalias)
cb0ca284
MH
3652 {
3653 if (MEM_VOLATILE_P (op0) || MEM_VOLATILE_P (op1))
3654 return 1;
3655 }
3656 }
3657
3658 /* If have a parallel load and a store to the same address, the load
3659 is performed first, so there is no conflict. Similarly, there is
3660 no conflict if have parallel loads from the same address. */
3661
3662 /* Cannot use auto increment or auto decrement twice for same
3663 base register. */
3664 if (base0 == base1 && incdec0 && incdec0)
3665 return 1;
3666
3667 /* It might be too confusing for GCC if we have use a base register
3668 with a side effect and a memory reference using the same register
3669 in parallel. */
4ddb3ea6 3670 if (! TARGET_DEVEL && base0 == base1 && (incdec0 || incdec1))
cb0ca284
MH
3671 return 1;
3672
f1c374cb 3673 /* We can not optimize the case where op1 and op2 refer to the same
1ac7a7f5 3674 address. */
f1c374cb 3675 if (base0 == base1 && disp0 == disp1 && index0 == index1)
cb0ca284
MH
3676 return 1;
3677
3678 /* No conflict. */
3679 return 0;
3680}
3681
3682
3683/* Check for while loop inside a decrement and branch loop. */
3684
3685int
3686c4x_label_conflict (insn, jump, db)
3687 rtx insn;
3688 rtx jump;
3689 rtx db;
3690{
3691 while (insn)
3692 {
3693 if (GET_CODE (insn) == CODE_LABEL)
3694 {
3695 if (CODE_LABEL_NUMBER (jump) == CODE_LABEL_NUMBER (insn))
3696 return 1;
3697 if (CODE_LABEL_NUMBER (db) == CODE_LABEL_NUMBER (insn))
3698 return 0;
3699 }
3700 insn = PREV_INSN (insn);
3701 }
3702 return 1;
3703}
3704
3705
3706/* Validate combination of operands for parallel load/store instructions. */
3707
5e6a42d9 3708int
e868a840 3709valid_parallel_load_store (operands, mode)
cb0ca284 3710 rtx *operands;
d5e4ff48 3711 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3712{
3713 rtx op0 = operands[0];
3714 rtx op1 = operands[1];
3715 rtx op2 = operands[2];
3716 rtx op3 = operands[3];
3717
3718 if (GET_CODE (op0) == SUBREG)
3719 op0 = SUBREG_REG (op0);
3720 if (GET_CODE (op1) == SUBREG)
3721 op1 = SUBREG_REG (op1);
3722 if (GET_CODE (op2) == SUBREG)
3723 op2 = SUBREG_REG (op2);
3724 if (GET_CODE (op3) == SUBREG)
3725 op3 = SUBREG_REG (op3);
3726
3727 /* The patterns should only allow ext_low_reg_operand() or
3728 par_ind_operand() operands. Thus of the 4 operands, only 2
3729 should be REGs and the other 2 should be MEMs. */
3730
4271f003 3731 /* This test prevents the multipack pass from using this pattern if
e868a840
MH
3732 op0 is used as an index or base register in op2 or op3, since
3733 this combination will require reloading. */
4271f003 3734 if (GET_CODE (op0) == REG
e868a840
MH
3735 && ((GET_CODE (op2) == MEM && reg_mentioned_p (op0, XEXP (op2, 0)))
3736 || (GET_CODE (op3) == MEM && reg_mentioned_p (op0, XEXP (op3, 0)))))
4271f003
MH
3737 return 0;
3738
975ab131 3739 /* LDI||LDI. */
cb0ca284
MH
3740 if (GET_CODE (op0) == REG && GET_CODE (op2) == REG)
3741 return (REGNO (op0) != REGNO (op2))
3742 && GET_CODE (op1) == MEM && GET_CODE (op3) == MEM
4ddb3ea6 3743 && ! c4x_address_conflict (op1, op3, 0, 0);
cb0ca284 3744
975ab131 3745 /* STI||STI. */
cb0ca284
MH
3746 if (GET_CODE (op1) == REG && GET_CODE (op3) == REG)
3747 return GET_CODE (op0) == MEM && GET_CODE (op2) == MEM
4ddb3ea6 3748 && ! c4x_address_conflict (op0, op2, 1, 1);
cb0ca284 3749
975ab131 3750 /* LDI||STI. */
cb0ca284
MH
3751 if (GET_CODE (op0) == REG && GET_CODE (op3) == REG)
3752 return GET_CODE (op1) == MEM && GET_CODE (op2) == MEM
4ddb3ea6 3753 && ! c4x_address_conflict (op1, op2, 0, 1);
cb0ca284 3754
975ab131 3755 /* STI||LDI. */
cb0ca284
MH
3756 if (GET_CODE (op1) == REG && GET_CODE (op2) == REG)
3757 return GET_CODE (op0) == MEM && GET_CODE (op3) == MEM
4ddb3ea6 3758 && ! c4x_address_conflict (op0, op3, 1, 0);
cb0ca284
MH
3759
3760 return 0;
3761}
3762
4271f003 3763
e868a840
MH
3764int
3765valid_parallel_operands_4 (operands, mode)
3766 rtx *operands;
3767 enum machine_mode mode ATTRIBUTE_UNUSED;
3768{
e868a840
MH
3769 rtx op0 = operands[0];
3770 rtx op2 = operands[2];
3771
3772 if (GET_CODE (op0) == SUBREG)
3773 op0 = SUBREG_REG (op0);
3774 if (GET_CODE (op2) == SUBREG)
3775 op2 = SUBREG_REG (op2);
3776
3777 /* This test prevents the multipack pass from using this pattern if
3778 op0 is used as an index or base register in op2, since this combination
3779 will require reloading. */
3780 if (GET_CODE (op0) == REG
3781 && GET_CODE (op2) == MEM
3782 && reg_mentioned_p (op0, XEXP (op2, 0)))
3783 return 0;
3784
3785 return 1;
3786}
3787
3788
cb0ca284
MH
3789int
3790valid_parallel_operands_5 (operands, mode)
3791 rtx *operands;
d5e4ff48 3792 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3793{
3794 int regs = 0;
4271f003 3795 rtx op0 = operands[0];
e868a840 3796 rtx op1 = operands[1];
4271f003
MH
3797 rtx op2 = operands[2];
3798 rtx op3 = operands[3];
cb0ca284
MH
3799
3800 if (GET_CODE (op0) == SUBREG)
3801 op0 = SUBREG_REG (op0);
e868a840
MH
3802 if (GET_CODE (op1) == SUBREG)
3803 op1 = SUBREG_REG (op1);
4271f003
MH
3804 if (GET_CODE (op2) == SUBREG)
3805 op2 = SUBREG_REG (op2);
cb0ca284
MH
3806
3807 /* The patterns should only allow ext_low_reg_operand() or
e868a840
MH
3808 par_ind_operand() operands. Operands 1 and 2 may be commutative
3809 but only one of them can be a register. */
3810 if (GET_CODE (op1) == REG)
cb0ca284 3811 regs++;
4271f003 3812 if (GET_CODE (op2) == REG)
cb0ca284
MH
3813 regs++;
3814
4271f003
MH
3815 if (regs != 1)
3816 return 0;
3817
3818 /* This test prevents the multipack pass from using this pattern if
3819 op0 is used as an index or base register in op3, since this combination
3820 will require reloading. */
3821 if (GET_CODE (op0) == REG
3822 && GET_CODE (op3) == MEM
3823 && reg_mentioned_p (op0, XEXP (op3, 0)))
3824 return 0;
3825
3826 return 1;
cb0ca284
MH
3827}
3828
3829
3830int
3831valid_parallel_operands_6 (operands, mode)
3832 rtx *operands;
d5e4ff48 3833 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3834{
3835 int regs = 0;
4271f003
MH
3836 rtx op0 = operands[0];
3837 rtx op1 = operands[1];
3838 rtx op2 = operands[2];
3839 rtx op4 = operands[4];
3840 rtx op5 = operands[5];
cb0ca284 3841
cb0ca284
MH
3842 if (GET_CODE (op1) == SUBREG)
3843 op1 = SUBREG_REG (op1);
3844 if (GET_CODE (op2) == SUBREG)
3845 op2 = SUBREG_REG (op2);
4271f003
MH
3846 if (GET_CODE (op4) == SUBREG)
3847 op4 = SUBREG_REG (op4);
3848 if (GET_CODE (op5) == SUBREG)
3849 op5 = SUBREG_REG (op5);
cb0ca284
MH
3850
3851 /* The patterns should only allow ext_low_reg_operand() or
3852 par_ind_operand() operands. Thus of the 4 input operands, only 2
3853 should be REGs and the other 2 should be MEMs. */
3854
cb0ca284
MH
3855 if (GET_CODE (op1) == REG)
3856 regs++;
3857 if (GET_CODE (op2) == REG)
3858 regs++;
4271f003
MH
3859 if (GET_CODE (op4) == REG)
3860 regs++;
3861 if (GET_CODE (op5) == REG)
cb0ca284
MH
3862 regs++;
3863
3864 /* The new C30/C40 silicon dies allow 3 regs of the 4 input operands.
3865 Perhaps we should count the MEMs as well? */
4271f003
MH
3866 if (regs != 2)
3867 return 0;
cb0ca284 3868
4271f003
MH
3869 /* This test prevents the multipack pass from using this pattern if
3870 op0 is used as an index or base register in op4 or op5, since
3871 this combination will require reloading. */
3872 if (GET_CODE (op0) == REG
3873 && ((GET_CODE (op4) == MEM && reg_mentioned_p (op0, XEXP (op4, 0)))
3874 || (GET_CODE (op5) == MEM && reg_mentioned_p (op0, XEXP (op5, 0)))))
3875 return 0;
cb0ca284 3876
4271f003 3877 return 1;
cb0ca284
MH
3878}
3879
3880
3881/* Validate combination of src operands. Note that the operands have
3882 been screened by the src_operand predicate. We just have to check
3883 that the combination of operands is valid. If FORCE is set, ensure
3884 that the destination regno is valid if we have a 2 operand insn. */
3885
3886static int
3887c4x_valid_operands (code, operands, mode, force)
3888 enum rtx_code code;
3889 rtx *operands;
8d485e2d 3890 enum machine_mode mode ATTRIBUTE_UNUSED;
cb0ca284
MH
3891 int force;
3892{
3893 rtx op1;
3894 rtx op2;
3895 enum rtx_code code1;
3896 enum rtx_code code2;
3897
3898 if (code == COMPARE)
3899 {
3900 op1 = operands[0];
3901 op2 = operands[1];
3902 }
3903 else
3904 {
3905 op1 = operands[1];
3906 op2 = operands[2];
3907 }
3908
3909 if (GET_CODE (op1) == SUBREG)
3910 op1 = SUBREG_REG (op1);
3911 if (GET_CODE (op2) == SUBREG)
3912 op2 = SUBREG_REG (op2);
3913
3914 code1 = GET_CODE (op1);
3915 code2 = GET_CODE (op2);
3916
3917 if (code1 == REG && code2 == REG)
3918 return 1;
3919
3920 if (code1 == MEM && code2 == MEM)
3921 {
8d485e2d 3922 if (c4x_S_indirect (op1) && c4x_S_indirect (op2))
cb0ca284 3923 return 1;
8d485e2d 3924 return c4x_R_indirect (op1) && c4x_R_indirect (op2);
cb0ca284
MH
3925 }
3926
3927 if (code1 == code2)
3928 return 0;
3929
3930 if (code1 == REG)
3931 {
3932 switch (code2)
3933 {
3934 case CONST_INT:
3935 if (c4x_J_constant (op2) && c4x_R_indirect (op1))
3936 return 1;
3937 break;
3938
3939 case CONST_DOUBLE:
4ddb3ea6 3940 if (! c4x_H_constant (op2))
cb0ca284
MH
3941 return 0;
3942 break;
3943
3944 /* Any valid memory operand screened by src_operand is OK. */
3945 case MEM:
3946
3947 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3948 into a stack slot memory address comprising a PLUS and a
3949 constant. */
3950 case ADDRESSOF:
3951 break;
3952
3953 default:
50c33087 3954 fatal_insn ("c4x_valid_operands: Internal error", op2);
cb0ca284
MH
3955 break;
3956 }
3957
3958 /* Check that we have a valid destination register for a two operand
3959 instruction. */
4ddb3ea6 3960 return ! force || code == COMPARE || REGNO (op1) == REGNO (operands[0]);
cb0ca284
MH
3961 }
3962
3963 /* We assume MINUS is commutative since the subtract patterns
3964 also support the reverse subtract instructions. Since op1
3965 is not a register, and op2 is a register, op1 can only
3966 be a restricted memory operand for a shift instruction. */
3967 if (code == ASHIFTRT || code == LSHIFTRT
3968 || code == ASHIFT || code == COMPARE)
3969 return code2 == REG
3970 && (c4x_S_indirect (op1) || c4x_R_indirect (op1));
3971
3972 switch (code1)
3973 {
3974 case CONST_INT:
3975 if (c4x_J_constant (op1) && c4x_R_indirect (op2))
3976 return 1;
3977 break;
3978
3979 case CONST_DOUBLE:
4ddb3ea6 3980 if (! c4x_H_constant (op1))
cb0ca284
MH
3981 return 0;
3982 break;
3983
1ac7a7f5 3984 /* Any valid memory operand screened by src_operand is OK. */
cb0ca284 3985 case MEM:
87ba6944
MH
3986#if 0
3987 if (code2 != REG)
3988 return 0;
3989#endif
3990 break;
cb0ca284
MH
3991
3992 /* After CSE, any remaining (ADDRESSOF:P reg) gets converted
3993 into a stack slot memory address comprising a PLUS and a
3994 constant. */
3995 case ADDRESSOF:
3996 break;
3997
3998 default:
400500c4 3999 abort ();
cb0ca284
MH
4000 break;
4001 }
4002
4003 /* Check that we have a valid destination register for a two operand
4004 instruction. */
4ddb3ea6 4005 return ! force || REGNO (op1) == REGNO (operands[0]);
cb0ca284
MH
4006}
4007
4008
4009int valid_operands (code, operands, mode)
4010 enum rtx_code code;
4011 rtx *operands;
4012 enum machine_mode mode;
4013{
4014
4015 /* If we are not optimizing then we have to let anything go and let
4016 reload fix things up. instantiate_decl in function.c can produce
4017 invalid insns by changing the offset of a memory operand from a
4018 valid one into an invalid one, when the second operand is also a
4019 memory operand. The alternative is not to allow two memory
4020 operands for an insn when not optimizing. The problem only rarely
975ab131 4021 occurs, for example with the C-torture program DFcmp.c. */
cb0ca284 4022
4ddb3ea6 4023 return ! optimize || c4x_valid_operands (code, operands, mode, 0);
cb0ca284
MH
4024}
4025
4026
4027int
4028legitimize_operands (code, operands, mode)
4029 enum rtx_code code;
4030 rtx *operands;
4031 enum machine_mode mode;
4032{
4033 /* Compare only has 2 operands. */
4034 if (code == COMPARE)
4035 {
4036 /* During RTL generation, force constants into pseudos so that
4037 they can get hoisted out of loops. This will tie up an extra
4038 register but can save an extra cycle. Only do this if loop
4039 optimisation enabled. (We cannot pull this trick for add and
4040 sub instructions since the flow pass won't find
4041 autoincrements etc.) This allows us to generate compare
4042 instructions like CMPI R0, *AR0++ where R0 = 42, say, instead
4043 of LDI *AR0++, R0; CMPI 42, R0.
4044
4045 Note that expand_binops will try to load an expensive constant
4046 into a register if it is used within a loop. Unfortunately,
4047 the cost mechanism doesn't allow us to look at the other
4048 operand to decide whether the constant is expensive. */
4049
4ddb3ea6 4050 if (! reload_in_progress
cb0ca284
MH
4051 && TARGET_HOIST
4052 && optimize > 0
87ba6944
MH
4053 && GET_CODE (operands[1]) == CONST_INT
4054 && preserve_subexpressions_p ()
4055 && rtx_cost (operands[1], code) > 1)
cb0ca284
MH
4056 operands[1] = force_reg (mode, operands[1]);
4057
4ddb3ea6
MH
4058 if (! reload_in_progress
4059 && ! c4x_valid_operands (code, operands, mode, 0))
cb0ca284
MH
4060 operands[0] = force_reg (mode, operands[0]);
4061 return 1;
4062 }
4063
4064 /* We cannot do this for ADDI/SUBI insns since we will
4065 defeat the flow pass from finding autoincrement addressing
4066 opportunities. */
4ddb3ea6
MH
4067 if (! reload_in_progress
4068 && ! ((code == PLUS || code == MINUS) && mode == Pmode)
87ba6944
MH
4069 && TARGET_HOIST
4070 && optimize > 1
4071 && GET_CODE (operands[2]) == CONST_INT
4072 && preserve_subexpressions_p ()
4073 && rtx_cost (operands[2], code) > 1)
cb0ca284
MH
4074 operands[2] = force_reg (mode, operands[2]);
4075
4076 /* We can get better code on a C30 if we force constant shift counts
4077 into a register. This way they can get hoisted out of loops,
4078 tying up a register, but saving an instruction. The downside is
4079 that they may get allocated to an address or index register, and
4080 thus we will get a pipeline conflict if there is a nearby
4081 indirect address using an address register.
4082
4083 Note that expand_binops will not try to load an expensive constant
4084 into a register if it is used within a loop for a shift insn. */
4085
4ddb3ea6
MH
4086 if (! reload_in_progress
4087 && ! c4x_valid_operands (code, operands, mode, TARGET_FORCE))
cb0ca284
MH
4088 {
4089 /* If the operand combination is invalid, we force operand1 into a
4090 register, preventing reload from having doing to do this at a
4091 later stage. */
4092 operands[1] = force_reg (mode, operands[1]);
4093 if (TARGET_FORCE)
4094 {
4095 emit_move_insn (operands[0], operands[1]);
4096 operands[1] = copy_rtx (operands[0]);
4097 }
4098 else
4099 {
4100 /* Just in case... */
4ddb3ea6 4101 if (! c4x_valid_operands (code, operands, mode, 0))
cb0ca284
MH
4102 operands[2] = force_reg (mode, operands[2]);
4103 }
4104 }
4105
4106 /* Right shifts require a negative shift count, but GCC expects
4107 a positive count, so we emit a NEG. */
4108 if ((code == ASHIFTRT || code == LSHIFTRT)
4109 && (GET_CODE (operands[2]) != CONST_INT))
d5e4ff48 4110 operands[2] = gen_rtx_NEG (mode, negate_rtx (mode, operands[2]));
cb0ca284
MH
4111
4112 return 1;
4113}
4114
4115
4116/* The following predicates are used for instruction scheduling. */
4117
4118int
4119group1_reg_operand (op, mode)
4120 rtx op;
4121 enum machine_mode mode;
4122{
4123 if (mode != VOIDmode && mode != GET_MODE (op))
4124 return 0;
4125 if (GET_CODE (op) == SUBREG)
4126 op = SUBREG_REG (op);
d001969e 4127 return REG_P (op) && (! reload_completed || IS_GROUP1_REG (op));
cb0ca284
MH
4128}
4129
4130
4131int
4132group1_mem_operand (op, mode)
4133 rtx op;
4134 enum machine_mode mode;
4135{
4136 if (mode != VOIDmode && mode != GET_MODE (op))
4137 return 0;
4138
4139 if (GET_CODE (op) == MEM)
4140 {
4141 op = XEXP (op, 0);
4142 if (GET_CODE (op) == PLUS)
4143 {
4144 rtx op0 = XEXP (op, 0);
4145 rtx op1 = XEXP (op, 1);
4146
d001969e
HB
4147 if ((REG_P (op0) && (! reload_completed || IS_GROUP1_REG (op0)))
4148 || (REG_P (op1) && (! reload_completed || IS_GROUP1_REG (op1))))
cb0ca284
MH
4149 return 1;
4150 }
d001969e 4151 else if ((REG_P (op)) && (! reload_completed || IS_GROUP1_REG (op)))
cb0ca284
MH
4152 return 1;
4153 }
4154
4155 return 0;
4156}
4157
4158
4159/* Return true if any one of the address registers. */
4160
4161int
4162arx_reg_operand (op, mode)
4163 rtx op;
4164 enum machine_mode mode;
4165{
4166 if (mode != VOIDmode && mode != GET_MODE (op))
4167 return 0;
4168 if (GET_CODE (op) == SUBREG)
4169 op = SUBREG_REG (op);
d001969e 4170 return REG_P (op) && (! reload_completed || IS_ADDR_REG (op));
cb0ca284
MH
4171}
4172
4173
4174static int
4175c4x_arn_reg_operand (op, mode, regno)
4176 rtx op;
4177 enum machine_mode mode;
8d485e2d 4178 unsigned int regno;
cb0ca284
MH
4179{
4180 if (mode != VOIDmode && mode != GET_MODE (op))
4181 return 0;
4182 if (GET_CODE (op) == SUBREG)
4183 op = SUBREG_REG (op);
d001969e 4184 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
cb0ca284
MH
4185}
4186
4187
4188static int
4189c4x_arn_mem_operand (op, mode, regno)
4190 rtx op;
4191 enum machine_mode mode;
8d485e2d 4192 unsigned int regno;
cb0ca284
MH
4193{
4194 if (mode != VOIDmode && mode != GET_MODE (op))
4195 return 0;
4196
4197 if (GET_CODE (op) == MEM)
4198 {
4199 op = XEXP (op, 0);
4200 switch (GET_CODE (op))
4201 {
4202 case PRE_DEC:
4203 case POST_DEC:
4204 case PRE_INC:
4205 case POST_INC:
4206 op = XEXP (op, 0);
4207
4208 case REG:
d001969e 4209 return REG_P (op) && (! reload_completed || (REGNO (op) == regno));
cb0ca284
MH
4210
4211 case PRE_MODIFY:
4212 case POST_MODIFY:
d001969e
HB
4213 if (REG_P (XEXP (op, 0)) && (! reload_completed
4214 || (REGNO (XEXP (op, 0)) == regno)))
cb0ca284
MH
4215 return 1;
4216 if (REG_P (XEXP (XEXP (op, 1), 1))
d001969e
HB
4217 && (! reload_completed
4218 || (REGNO (XEXP (XEXP (op, 1), 1)) == regno)))
cb0ca284
MH
4219 return 1;
4220 break;
4221
4222 case PLUS:
4223 {
4224 rtx op0 = XEXP (op, 0);
4225 rtx op1 = XEXP (op, 1);
4226
d001969e
HB
4227 if ((REG_P (op0) && (! reload_completed
4228 || (REGNO (op0) == regno)))
4229 || (REG_P (op1) && (! reload_completed
4230 || (REGNO (op1) == regno))))
cb0ca284
MH
4231 return 1;
4232 }
4233 break;
933cddd0 4234
cb0ca284
MH
4235 default:
4236 break;
4237 }
4238 }
4239 return 0;
4240}
4241
4242
4243int
4244ar0_reg_operand (op, mode)
4245 rtx op;
4246 enum machine_mode mode;
4247{
4248 return c4x_arn_reg_operand (op, mode, AR0_REGNO);
4249}
4250
4251
4252int
4253ar0_mem_operand (op, mode)
4254 rtx op;
4255 enum machine_mode mode;
4256{
4257 return c4x_arn_mem_operand (op, mode, AR0_REGNO);
4258}
4259
4260
4261int
4262ar1_reg_operand (op, mode)
4263 rtx op;
4264 enum machine_mode mode;
4265{
4266 return c4x_arn_reg_operand (op, mode, AR1_REGNO);
4267}
4268
4269
4270int
4271ar1_mem_operand (op, mode)
4272 rtx op;
4273 enum machine_mode mode;
4274{
4275 return c4x_arn_mem_operand (op, mode, AR1_REGNO);
4276}
4277
4278
4279int
4280ar2_reg_operand (op, mode)
4281 rtx op;
4282 enum machine_mode mode;
4283{
4284 return c4x_arn_reg_operand (op, mode, AR2_REGNO);
4285}
4286
4287
4288int
4289ar2_mem_operand (op, mode)
4290 rtx op;
4291 enum machine_mode mode;
4292{
4293 return c4x_arn_mem_operand (op, mode, AR2_REGNO);
4294}
4295
4296
4297int
4298ar3_reg_operand (op, mode)
4299 rtx op;
4300 enum machine_mode mode;
4301{
4302 return c4x_arn_reg_operand (op, mode, AR3_REGNO);
4303}
4304
4305
4306int
4307ar3_mem_operand (op, mode)
4308 rtx op;
4309 enum machine_mode mode;
4310{
4311 return c4x_arn_mem_operand (op, mode, AR3_REGNO);
4312}
4313
4314
4315int
4316ar4_reg_operand (op, mode)
4317 rtx op;
4318 enum machine_mode mode;
4319{
4320 return c4x_arn_reg_operand (op, mode, AR4_REGNO);
4321}
4322
4323
4324int
4325ar4_mem_operand (op, mode)
4326 rtx op;
4327 enum machine_mode mode;
4328{
4329 return c4x_arn_mem_operand (op, mode, AR4_REGNO);
4330}
4331
4332
4333int
4334ar5_reg_operand (op, mode)
4335 rtx op;
4336 enum machine_mode mode;
4337{
4338 return c4x_arn_reg_operand (op, mode, AR5_REGNO);
4339}
4340
4341
4342int
4343ar5_mem_operand (op, mode)
4344 rtx op;
4345 enum machine_mode mode;
4346{
4347 return c4x_arn_mem_operand (op, mode, AR5_REGNO);
4348}
4349
4350
4351int
4352ar6_reg_operand (op, mode)
4353 rtx op;
4354 enum machine_mode mode;
4355{
4356 return c4x_arn_reg_operand (op, mode, AR6_REGNO);
4357}
4358
4359
4360int
4361ar6_mem_operand (op, mode)
4362 rtx op;
4363 enum machine_mode mode;
4364{
4365 return c4x_arn_mem_operand (op, mode, AR6_REGNO);
4366}
4367
4368
4369int
4370ar7_reg_operand (op, mode)
4371 rtx op;
4372 enum machine_mode mode;
4373{
4374 return c4x_arn_reg_operand (op, mode, AR7_REGNO);
4375}
4376
4377
4378int
4379ar7_mem_operand (op, mode)
4380 rtx op;
4381 enum machine_mode mode;
4382{
4383 return c4x_arn_mem_operand (op, mode, AR7_REGNO);
4384}
4385
4386
4387int
4388ir0_reg_operand (op, mode)
4389 rtx op;
4390 enum machine_mode mode;
4391{
4392 return c4x_arn_reg_operand (op, mode, IR0_REGNO);
4393}
4394
4395
4396int
4397ir0_mem_operand (op, mode)
4398 rtx op;
4399 enum machine_mode mode;
4400{
4401 return c4x_arn_mem_operand (op, mode, IR0_REGNO);
4402}
4403
4404
4405int
4406ir1_reg_operand (op, mode)
4407 rtx op;
4408 enum machine_mode mode;
4409{
4410 return c4x_arn_reg_operand (op, mode, IR1_REGNO);
4411}
4412
4413
4414int
4415ir1_mem_operand (op, mode)
4416 rtx op;
4417 enum machine_mode mode;
4418{
4419 return c4x_arn_mem_operand (op, mode, IR1_REGNO);
4420}
4421
4422
975ab131
MH
4423/* This is similar to operand_subword but allows autoincrement
4424 addressing. */
cb0ca284
MH
4425
4426rtx
4427c4x_operand_subword (op, i, validate_address, mode)
4428 rtx op;
4429 int i;
4430 int validate_address;
4431 enum machine_mode mode;
4432{
4433 if (mode != HImode && mode != HFmode)
4434 fatal_insn ("c4x_operand_subword: invalid mode", op);
4435
4436 if (mode == HFmode && REG_P (op))
4437 fatal_insn ("c4x_operand_subword: invalid operand", op);
4438
4439 if (GET_CODE (op) == MEM)
4440 {
4441 enum rtx_code code = GET_CODE (XEXP (op, 0));
4442 enum machine_mode mode = GET_MODE (XEXP (op, 0));
50c33087
MH
4443 enum machine_mode submode;
4444
4445 submode = mode;
4446 if (mode == HImode)
4447 submode = QImode;
4448 else if (mode == HFmode)
4449 submode = QFmode;
cb0ca284
MH
4450
4451 switch (code)
4452 {
4453 case POST_INC:
4454 case PRE_INC:
50c33087 4455 return gen_rtx_MEM (submode, XEXP (op, 0));
cb0ca284
MH
4456
4457 case POST_DEC:
4458 case PRE_DEC:
4459 case PRE_MODIFY:
4460 case POST_MODIFY:
4461 /* We could handle these with some difficulty.
4462 e.g., *p-- => *(p-=2); *(p+1). */
4463 fatal_insn ("c4x_operand_subword: invalid autoincrement", op);
4464
50c33087
MH
4465 case SYMBOL_REF:
4466 case LABEL_REF:
4467 case CONST:
4468 case CONST_INT:
4469 fatal_insn ("c4x_operand_subword: invalid address", op);
4470
4471 /* Even though offsettable_address_p considers (MEM
4472 (LO_SUM)) to be offsettable, it is not safe if the
4473 address is at the end of the data page since we also have
4474 to fix up the associated high PART. In this case where
4475 we are trying to split a HImode or HFmode memory
4476 reference, we would have to emit another insn to reload a
4477 new HIGH value. It's easier to disable LO_SUM memory references
4478 in HImode or HFmode and we probably get better code. */
4479 case LO_SUM:
4480 fatal_insn ("c4x_operand_subword: address not offsettable", op);
4481
cb0ca284
MH
4482 default:
4483 break;
4484 }
4485 }
4486
4487 return operand_subword (op, i, validate_address, mode);
4488}
4489
4490/* Handle machine specific pragmas for compatibility with existing
4491 compilers for the C3x/C4x.
4492
4493 pragma attribute
4494 ----------------------------------------------------------
4495 CODE_SECTION(symbol,"section") section("section")
4496 DATA_SECTION(symbol,"section") section("section")
4497 FUNC_CANNOT_INLINE(function)
4498 FUNC_EXT_CALLED(function)
4499 FUNC_IS_PURE(function) const
4500 FUNC_IS_SYSTEM(function)
4501 FUNC_NEVER_RETURNS(function) noreturn
4502 FUNC_NO_GLOBAL_ASG(function)
4503 FUNC_NO_IND_ASG(function)
4504 INTERRUPT(function) interrupt
4505
4506 */
4507
8b97c5f8
ZW
4508/* Parse a C4x pragma, of the form ( function [, "section"] ) \n.
4509 FUNC is loaded with the IDENTIFIER_NODE of the function, SECT with
4510 the STRING_CST node of the string. If SECT is null, then this
4511 pragma doesn't take a section string. Returns 0 for a good pragma,
4512 -1 for a malformed pragma. */
4513#define BAD(msgid, arg) do { warning (msgid, arg); return -1; } while (0)
4514
9f416fac
MH
4515static int (*c_lex_func) (tree *);
4516
4517void
4518c4x_init_pragma (get_token)
4519 int (*get_token) PARAMS ((tree *));
4520{
4521 c_lex_func = get_token;
4522}
4523
4524
8b97c5f8
ZW
4525static int
4526c4x_parse_pragma (name, func, sect)
4527 const char *name;
4528 tree *func;
4529 tree *sect;
cb0ca284 4530{
8b97c5f8 4531 tree f, s, x;
cb0ca284 4532
9f416fac 4533 if (c_lex_func (&x) != CPP_OPEN_PAREN)
8b97c5f8 4534 BAD ("missing '(' after '#pragma %s' - ignored", name);
cb0ca284 4535
9f416fac 4536 if (c_lex_func (&f) != CPP_NAME)
8b97c5f8 4537 BAD ("missing function name in '#pragma %s' - ignored", name);
cb0ca284 4538
8b97c5f8 4539 if (sect)
cb0ca284 4540 {
9f416fac 4541 if (c_lex_func (&x) != CPP_COMMA)
8b97c5f8 4542 BAD ("malformed '#pragma %s' - ignored", name);
9f416fac 4543 if (c_lex_func (&s) != CPP_STRING)
8b97c5f8
ZW
4544 BAD ("missing section name in '#pragma %s' - ignored", name);
4545 *sect = s;
cb0ca284 4546 }
8b97c5f8 4547
9f416fac 4548 if (c_lex_func (&x) != CPP_CLOSE_PAREN)
8b97c5f8
ZW
4549 BAD ("missing ')' for '#pragma %s' - ignored", name);
4550
9f416fac 4551 if (c_lex_func (&x) != CPP_EOF)
8b97c5f8
ZW
4552 warning ("junk at end of '#pragma %s'", name);
4553
4554 *func = f;
4555 return 0;
cb0ca284
MH
4556}
4557
8b97c5f8
ZW
4558void
4559c4x_pr_CODE_SECTION (pfile)
4560 cpp_reader *pfile ATTRIBUTE_UNUSED;
4561{
4562 tree func, sect;
4563
4564 if (c4x_parse_pragma ("CODE_SECTION", &func, &sect))
4565 return;
4566 code_tree = chainon (code_tree,
4567 build_tree_list (func,
4568 build_tree_list (NULL_TREE, sect)));
4569}
4570
4571void
4572c4x_pr_DATA_SECTION (pfile)
4573 cpp_reader *pfile ATTRIBUTE_UNUSED;
4574{
4575 tree func, sect;
4576
4577 if (c4x_parse_pragma ("DATA_SECTION", &func, &sect))
4578 return;
4579 data_tree = chainon (data_tree,
4580 build_tree_list (func,
4581 build_tree_list (NULL_TREE, sect)));
4582}
4583
4584void
4585c4x_pr_FUNC_IS_PURE (pfile)
4586 cpp_reader *pfile ATTRIBUTE_UNUSED;
4587{
4588 tree func;
4589
4590 if (c4x_parse_pragma ("FUNC_IS_PURE", &func, 0))
4591 return;
4592 pure_tree = chainon (pure_tree, build_tree_list (func, NULL_TREE));
4593}
4594
4595void
4596c4x_pr_FUNC_NEVER_RETURNS (pfile)
4597 cpp_reader *pfile ATTRIBUTE_UNUSED;
4598{
4599 tree func;
4600
4601 if (c4x_parse_pragma ("FUNC_NEVER_RETURNS", &func, 0))
4602 return;
4603 noreturn_tree = chainon (noreturn_tree, build_tree_list (func, NULL_TREE));
4604}
4605
4606void
4607c4x_pr_INTERRUPT (pfile)
4608 cpp_reader *pfile ATTRIBUTE_UNUSED;
4609{
4610 tree func;
4611
4612 if (c4x_parse_pragma ("INTERRUPT", &func, 0))
4613 return;
4614 interrupt_tree = chainon (interrupt_tree, build_tree_list (func, NULL_TREE));
4615}
4616
4617/* Used for FUNC_CANNOT_INLINE, FUNC_EXT_CALLED, FUNC_IS_SYSTEM,
4618 FUNC_NO_GLOBAL_ASG, and FUNC_NO_IND_ASG. */
4619void
4620c4x_pr_ignored (pfile)
4621 cpp_reader *pfile ATTRIBUTE_UNUSED;
4622{
4623}
cb0ca284 4624
eff784fe
MH
4625struct name_list
4626{
4627 struct name_list *next;
16219347 4628 const char *name;
eff784fe
MH
4629};
4630
4631static struct name_list *global_head;
4632static struct name_list *extern_head;
4633
4634
4635/* Add NAME to list of global symbols and remove from external list if
4636 present on external list. */
4637
4638void
4639c4x_global_label (name)
41387ffd 4640 const char *name;
eff784fe
MH
4641{
4642 struct name_list *p, *last;
4643
4644 /* Do not insert duplicate names, so linearly search through list of
4645 existing names. */
4646 p = global_head;
4647 while (p)
4648 {
4649 if (strcmp (p->name, name) == 0)
4650 return;
4651 p = p->next;
4652 }
4653 p = (struct name_list *) permalloc (sizeof *p);
4654 p->next = global_head;
4655 p->name = name;
4656 global_head = p;
4657
4658 /* Remove this name from ref list if present. */
4659 last = NULL;
4660 p = extern_head;
4661 while (p)
4662 {
4663 if (strcmp (p->name, name) == 0)
4664 {
4665 if (last)
4666 last->next = p->next;
4667 else
4668 extern_head = p->next;
4669 break;
4670 }
4671 last = p;
4672 p = p->next;
4673 }
4674}
4675
4676
4677/* Add NAME to list of external symbols. */
4678
4679void
4680c4x_external_ref (name)
41387ffd 4681 const char *name;
eff784fe
MH
4682{
4683 struct name_list *p;
4684
4685 /* Do not insert duplicate names. */
4686 p = extern_head;
4687 while (p)
4688 {
4689 if (strcmp (p->name, name) == 0)
4690 return;
4691 p = p->next;
4692 }
4693
4694 /* Do not insert ref if global found. */
4695 p = global_head;
4696 while (p)
4697 {
4698 if (strcmp (p->name, name) == 0)
4699 return;
4700 p = p->next;
4701 }
4702 p = (struct name_list *) permalloc (sizeof *p);
4703 p->next = extern_head;
4704 p->name = name;
4705 extern_head = p;
4706}
4707
4708
4709void
4710c4x_file_end (fp)
4711 FILE *fp;
4712{
4713 struct name_list *p;
4714
4715 /* Output all external names that are not global. */
4716 p = extern_head;
4717 while (p)
4718 {
4719 fprintf (fp, "\t.ref\t");
4720 assemble_name (fp, p->name);
4721 fprintf (fp, "\n");
4722 p = p->next;
4723 }
4724 fprintf (fp, "\t.end\n");
4725}
4726
4727
cb0ca284 4728static void
eff784fe 4729c4x_check_attribute (attrib, list, decl, attributes)
8d485e2d 4730 const char *attrib;
cb0ca284
MH
4731 tree list, decl, *attributes;
4732{
4733 while (list != NULL_TREE
4ddb3ea6
MH
4734 && IDENTIFIER_POINTER (TREE_PURPOSE (list))
4735 != IDENTIFIER_POINTER (DECL_NAME (decl)))
eff784fe 4736 list = TREE_CHAIN (list);
cb0ca284 4737 if (list)
12a68f1f
JM
4738 *attributes = tree_cons (get_identifier (attrib), TREE_VALUE (list),
4739 *attributes);
cb0ca284
MH
4740}
4741
4742
12a68f1f
JM
4743static void
4744c4x_insert_attributes (decl, attributes)
cb0ca284
MH
4745 tree decl, *attributes;
4746{
4747 switch (TREE_CODE (decl))
4748 {
4749 case FUNCTION_DECL:
4750 c4x_check_attribute ("section", code_tree, decl, attributes);
4751 c4x_check_attribute ("const", pure_tree, decl, attributes);
4752 c4x_check_attribute ("noreturn", noreturn_tree, decl, attributes);
4753 c4x_check_attribute ("interrupt", interrupt_tree, decl, attributes);
4754 break;
4755
4756 case VAR_DECL:
4757 c4x_check_attribute ("section", data_tree, decl, attributes);
4758 break;
4759
4760 default:
4761 break;
4762 }
4763}
4764
91d231cb
JM
4765/* Table of valid machine attributes. */
4766const struct attribute_spec c4x_attribute_table[] =
4767{
4768 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4769 { "interrupt", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4770 /* FIXME: code elsewhere in this file treats "naked" as a synonym of
4771 "interrupt"; should it be accepted here? */
4772 { "assembler", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4773 { "leaf_pretend", 0, 0, false, true, true, c4x_handle_fntype_attribute },
4774 { NULL, 0, 0, false, false, false, NULL }
4775};
cb0ca284 4776
91d231cb
JM
4777/* Handle an attribute requiring a FUNCTION_TYPE;
4778 arguments as in struct attribute_spec.handler. */
4779static tree
4780c4x_handle_fntype_attribute (node, name, args, flags, no_add_attrs)
4781 tree *node;
4782 tree name;
d5e4ff48 4783 tree args ATTRIBUTE_UNUSED;
91d231cb
JM
4784 int flags ATTRIBUTE_UNUSED;
4785 bool *no_add_attrs;
cb0ca284 4786{
91d231cb
JM
4787 if (TREE_CODE (*node) != FUNCTION_TYPE)
4788 {
4789 warning ("`%s' attribute only applies to functions",
4790 IDENTIFIER_POINTER (name));
4791 *no_add_attrs = true;
4792 }
4793
4794 return NULL_TREE;
cb0ca284
MH
4795}
4796
4797
d5e4ff48 4798/* !!! FIXME to emit RPTS correctly. */
975ab131 4799
cb0ca284
MH
4800int
4801c4x_rptb_rpts_p (insn, op)
4802 rtx insn, op;
4803{
4804 /* The next insn should be our label marking where the
4805 repeat block starts. */
4806 insn = NEXT_INSN (insn);
4807 if (GET_CODE (insn) != CODE_LABEL)
4808 {
4809 /* Some insns may have been shifted between the RPTB insn
4810 and the top label... They were probably destined to
4811 be moved out of the loop. For now, let's leave them
4812 where they are and print a warning. We should
4813 probably move these insns before the repeat block insn. */
4814 if (TARGET_DEBUG)
4815 fatal_insn("c4x_rptb_rpts_p: Repeat block top label moved\n",
4816 insn);
4817 return 0;
4818 }
4819
4820 /* Skip any notes. */
4821 insn = next_nonnote_insn (insn);
4822
4823 /* This should be our first insn in the loop. */
2c3c49de 4824 if (! INSN_P (insn))
cb0ca284
MH
4825 return 0;
4826
4827 /* Skip any notes. */
4828 insn = next_nonnote_insn (insn);
4829
2c3c49de 4830 if (! INSN_P (insn))
cb0ca284
MH
4831 return 0;
4832
f1c374cb 4833 if (recog_memoized (insn) != CODE_FOR_rptb_end)
cb0ca284
MH
4834 return 0;
4835
4836 if (TARGET_RPTS)
4837 return 1;
4838
4839 return (GET_CODE (op) == CONST_INT) && TARGET_RPTS_CYCLES (INTVAL (op));
4840}
4841
cb0ca284 4842
5078f5eb
HB
4843/* Check if register r11 is used as the destination of an insn. */
4844
4845static int
4846c4x_r11_set_p(x)
4847 rtx x;
4848{
5078f5eb
HB
4849 rtx set;
4850 int i, j;
4851 const char *fmt;
4852
4853 if (x == 0)
4854 return 0;
4855
4a1f52a8 4856 if (INSN_P (x) && GET_CODE (PATTERN (x)) == SEQUENCE)
5078f5eb
HB
4857 x = XVECEXP (PATTERN (x), 0, XVECLEN (PATTERN (x), 0) - 1);
4858
4a1f52a8
HB
4859 if (INSN_P (x) && (set = single_set (x)))
4860 x = SET_DEST (set);
5078f5eb 4861
4a1f52a8 4862 if (GET_CODE (x) == REG && REGNO (x) == R11_REGNO)
5078f5eb
HB
4863 return 1;
4864
4865 fmt = GET_RTX_FORMAT (GET_CODE (x));
4a1f52a8 4866 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5078f5eb
HB
4867 {
4868 if (fmt[i] == 'e')
4869 {
4870 if (c4x_r11_set_p (XEXP (x, i)))
4871 return 1;
4872 }
4873 else if (fmt[i] == 'E')
4874 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4875 if (c4x_r11_set_p (XVECEXP (x, i, j)))
4876 return 1;
4877 }
4878 return 0;
4879}
4880
4881
4882/* The c4x sometimes has a problem when the insn before the laj insn
4883 sets the r11 register. Check for this situation. */
4884
4885int
4886c4x_check_laj_p (insn)
4887 rtx insn;
4888{
4889 insn = prev_nonnote_insn (insn);
4890
4891 /* If this is the start of the function no nop is needed. */
4892 if (insn == 0)
4893 return 0;
4894
4895 /* If the previous insn is a code label we have to insert a nop. This
4896 could be a jump or table jump. We can find the normal jumps by
4897 scanning the function but this will not find table jumps. */
4898 if (GET_CODE (insn) == CODE_LABEL)
4899 return 1;
4900
4901 /* If the previous insn sets register r11 we have to insert a nop. */
4902 if (c4x_r11_set_p (insn))
4903 return 1;
4904
4905 /* No nop needed. */
4906 return 0;
4907}
4908
4909
cb0ca284
MH
4910/* Adjust the cost of a scheduling dependency. Return the new cost of
4911 a dependency LINK or INSN on DEP_INSN. COST is the current cost.
4912 A set of an address register followed by a use occurs a 2 cycle
4913 stall (reduced to a single cycle on the c40 using LDA), while
4914 a read of an address register followed by a use occurs a single cycle. */
975ab131 4915
cb0ca284
MH
4916#define SET_USE_COST 3
4917#define SETLDA_USE_COST 2
4918#define READ_USE_COST 2
4919
c237e94a 4920static int
cb0ca284
MH
4921c4x_adjust_cost (insn, link, dep_insn, cost)
4922 rtx insn;
4923 rtx link;
4924 rtx dep_insn;
4925 int cost;
4926{
4927 /* Don't worry about this until we know what registers have been
4928 assigned. */
d001969e 4929 if (flag_schedule_insns == 0 && ! reload_completed)
cb0ca284
MH
4930 return 0;
4931
4932 /* How do we handle dependencies where a read followed by another
4933 read causes a pipeline stall? For example, a read of ar0 followed
4934 by the use of ar0 for a memory reference. It looks like we
4935 need to extend the scheduler to handle this case. */
4936
4937 /* Reload sometimes generates a CLOBBER of a stack slot, e.g.,
4938 (clobber (mem:QI (plus:QI (reg:QI 11 ar3) (const_int 261)))),
4939 so only deal with insns we know about. */
4940 if (recog_memoized (dep_insn) < 0)
4941 return 0;
4942
4943 if (REG_NOTE_KIND (link) == 0)
4944 {
4945 int max = 0;
4946
4947 /* Data dependency; DEP_INSN writes a register that INSN reads some
4948 cycles later. */
cb0ca284
MH
4949 if (TARGET_C3X)
4950 {
4951 if (get_attr_setgroup1 (dep_insn) && get_attr_usegroup1 (insn))
4952 max = SET_USE_COST > max ? SET_USE_COST : max;
4953 if (get_attr_readarx (dep_insn) && get_attr_usegroup1 (insn))
4954 max = READ_USE_COST > max ? READ_USE_COST : max;
4955 }
4956 else
4957 {
4958 /* This could be significantly optimized. We should look
4959 to see if dep_insn sets ar0-ar7 or ir0-ir1 and if
4960 insn uses ar0-ar7. We then test if the same register
4961 is used. The tricky bit is that some operands will
4962 use several registers... */
cb0ca284
MH
4963 if (get_attr_setar0 (dep_insn) && get_attr_usear0 (insn))
4964 max = SET_USE_COST > max ? SET_USE_COST : max;
4965 if (get_attr_setlda_ar0 (dep_insn) && get_attr_usear0 (insn))
4966 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4967 if (get_attr_readar0 (dep_insn) && get_attr_usear0 (insn))
4968 max = READ_USE_COST > max ? READ_USE_COST : max;
4969
4970 if (get_attr_setar1 (dep_insn) && get_attr_usear1 (insn))
4971 max = SET_USE_COST > max ? SET_USE_COST : max;
4972 if (get_attr_setlda_ar1 (dep_insn) && get_attr_usear1 (insn))
4973 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4974 if (get_attr_readar1 (dep_insn) && get_attr_usear1 (insn))
4975 max = READ_USE_COST > max ? READ_USE_COST : max;
4976
4977 if (get_attr_setar2 (dep_insn) && get_attr_usear2 (insn))
4978 max = SET_USE_COST > max ? SET_USE_COST : max;
4979 if (get_attr_setlda_ar2 (dep_insn) && get_attr_usear2 (insn))
4980 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4981 if (get_attr_readar2 (dep_insn) && get_attr_usear2 (insn))
4982 max = READ_USE_COST > max ? READ_USE_COST : max;
4983
4984 if (get_attr_setar3 (dep_insn) && get_attr_usear3 (insn))
4985 max = SET_USE_COST > max ? SET_USE_COST : max;
4986 if (get_attr_setlda_ar3 (dep_insn) && get_attr_usear3 (insn))
4987 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4988 if (get_attr_readar3 (dep_insn) && get_attr_usear3 (insn))
4989 max = READ_USE_COST > max ? READ_USE_COST : max;
4990
4991 if (get_attr_setar4 (dep_insn) && get_attr_usear4 (insn))
4992 max = SET_USE_COST > max ? SET_USE_COST : max;
4993 if (get_attr_setlda_ar4 (dep_insn) && get_attr_usear4 (insn))
4994 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
4995 if (get_attr_readar4 (dep_insn) && get_attr_usear4 (insn))
4996 max = READ_USE_COST > max ? READ_USE_COST : max;
4997
4998 if (get_attr_setar5 (dep_insn) && get_attr_usear5 (insn))
4999 max = SET_USE_COST > max ? SET_USE_COST : max;
5000 if (get_attr_setlda_ar5 (dep_insn) && get_attr_usear5 (insn))
5001 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5002 if (get_attr_readar5 (dep_insn) && get_attr_usear5 (insn))
5003 max = READ_USE_COST > max ? READ_USE_COST : max;
5004
5005 if (get_attr_setar6 (dep_insn) && get_attr_usear6 (insn))
5006 max = SET_USE_COST > max ? SET_USE_COST : max;
5007 if (get_attr_setlda_ar6 (dep_insn) && get_attr_usear6 (insn))
5008 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5009 if (get_attr_readar6 (dep_insn) && get_attr_usear6 (insn))
5010 max = READ_USE_COST > max ? READ_USE_COST : max;
5011
5012 if (get_attr_setar7 (dep_insn) && get_attr_usear7 (insn))
5013 max = SET_USE_COST > max ? SET_USE_COST : max;
5014 if (get_attr_setlda_ar7 (dep_insn) && get_attr_usear7 (insn))
5015 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5016 if (get_attr_readar7 (dep_insn) && get_attr_usear7 (insn))
5017 max = READ_USE_COST > max ? READ_USE_COST : max;
5018
5019 if (get_attr_setir0 (dep_insn) && get_attr_useir0 (insn))
5020 max = SET_USE_COST > max ? SET_USE_COST : max;
5021 if (get_attr_setlda_ir0 (dep_insn) && get_attr_useir0 (insn))
5022 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5023
5024 if (get_attr_setir1 (dep_insn) && get_attr_useir1 (insn))
5025 max = SET_USE_COST > max ? SET_USE_COST : max;
5026 if (get_attr_setlda_ir1 (dep_insn) && get_attr_useir1 (insn))
5027 max = SETLDA_USE_COST > max ? SETLDA_USE_COST : max;
5028 }
5029
5030 if (max)
5031 cost = max;
5032
5033 /* For other data dependencies, the default cost specified in the
5034 md is correct. */
5035 return cost;
5036 }
5037 else if (REG_NOTE_KIND (link) == REG_DEP_ANTI)
5038 {
5039 /* Anti dependency; DEP_INSN reads a register that INSN writes some
5040 cycles later. */
5041
5042 /* For c4x anti dependencies, the cost is 0. */
5043 return 0;
5044 }
5045 else if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
5046 {
5047 /* Output dependency; DEP_INSN writes a register that INSN writes some
5048 cycles later. */
5049
5050 /* For c4x output dependencies, the cost is 0. */
5051 return 0;
5052 }
5053 else
5054 abort ();
5055}
8a119a7d
MH
5056
5057void
f6155fda 5058c4x_init_builtins ()
8a119a7d 5059{
f6155fda
SS
5060 tree endlink = void_list_node;
5061
8a119a7d
MH
5062 builtin_function ("fast_ftoi",
5063 build_function_type
5064 (integer_type_node,
5065 tree_cons (NULL_TREE, double_type_node, endlink)),
df4ae160 5066 C4X_BUILTIN_FIX, BUILT_IN_MD, NULL);
8a119a7d
MH
5067 builtin_function ("ansi_ftoi",
5068 build_function_type
5069 (integer_type_node,
5070 tree_cons (NULL_TREE, double_type_node, endlink)),
df4ae160 5071 C4X_BUILTIN_FIX_ANSI, BUILT_IN_MD, NULL);
8a119a7d
MH
5072 if (TARGET_C3X)
5073 builtin_function ("fast_imult",
5074 build_function_type
5075 (integer_type_node,
5076 tree_cons (NULL_TREE, integer_type_node,
5077 tree_cons (NULL_TREE,
5078 integer_type_node, endlink))),
df4ae160 5079 C4X_BUILTIN_MPYI, BUILT_IN_MD, NULL);
8a119a7d
MH
5080 else
5081 {
5082 builtin_function ("toieee",
5083 build_function_type
5084 (double_type_node,
5085 tree_cons (NULL_TREE, double_type_node, endlink)),
df4ae160 5086 C4X_BUILTIN_TOIEEE, BUILT_IN_MD, NULL);
8a119a7d
MH
5087 builtin_function ("frieee",
5088 build_function_type
5089 (double_type_node,
5090 tree_cons (NULL_TREE, double_type_node, endlink)),
df4ae160 5091 C4X_BUILTIN_FRIEEE, BUILT_IN_MD, NULL);
8a119a7d
MH
5092 builtin_function ("fast_invf",
5093 build_function_type
5094 (double_type_node,
5095 tree_cons (NULL_TREE, double_type_node, endlink)),
df4ae160 5096 C4X_BUILTIN_RCPF, BUILT_IN_MD, NULL);
8a119a7d
MH
5097 }
5098}
5099
5100
5101rtx
5102c4x_expand_builtin (exp, target, subtarget, mode, ignore)
5103 tree exp;
5104 rtx target;
5105 rtx subtarget ATTRIBUTE_UNUSED;
5106 enum machine_mode mode ATTRIBUTE_UNUSED;
5107 int ignore ATTRIBUTE_UNUSED;
5108{
5109 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5110 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5111 tree arglist = TREE_OPERAND (exp, 1);
5112 tree arg0, arg1;
5113 rtx r0, r1;
5114
5115 switch (fcode)
5116 {
8a119a7d
MH
5117 case C4X_BUILTIN_FIX:
5118 arg0 = TREE_VALUE (arglist);
5119 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5120 r0 = protect_from_queue (r0, 0);
5121 if (! target || ! register_operand (target, QImode))
5122 target = gen_reg_rtx (QImode);
5123 emit_insn (gen_fixqfqi_clobber (target, r0));
5124 return target;
5125
5126 case C4X_BUILTIN_FIX_ANSI:
5127 arg0 = TREE_VALUE (arglist);
5128 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5129 r0 = protect_from_queue (r0, 0);
5130 if (! target || ! register_operand (target, QImode))
5131 target = gen_reg_rtx (QImode);
5132 emit_insn (gen_fix_truncqfqi2 (target, r0));
5133 return target;
5134
5135 case C4X_BUILTIN_MPYI:
5136 if (! TARGET_C3X)
5137 break;
5138 arg0 = TREE_VALUE (arglist);
5139 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5140 r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
5141 r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
5142 r0 = protect_from_queue (r0, 0);
5143 r1 = protect_from_queue (r1, 0);
5144 if (! target || ! register_operand (target, QImode))
5145 target = gen_reg_rtx (QImode);
5146 emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
5147 return target;
5148
5149 case C4X_BUILTIN_TOIEEE:
5150 if (TARGET_C3X)
5151 break;
5152 arg0 = TREE_VALUE (arglist);
5153 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5154 r0 = protect_from_queue (r0, 0);
5155 if (! target || ! register_operand (target, QFmode))
5156 target = gen_reg_rtx (QFmode);
5157 emit_insn (gen_toieee (target, r0));
5158 return target;
5159
5160 case C4X_BUILTIN_FRIEEE:
5161 if (TARGET_C3X)
5162 break;
5163 arg0 = TREE_VALUE (arglist);
5164 if (TREE_CODE (arg0) == VAR_DECL || TREE_CODE (arg0) == PARM_DECL)
5165 put_var_into_stack (arg0);
5166 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5167 r0 = protect_from_queue (r0, 0);
5168 if (register_operand (r0, QFmode))
5169 {
5170 r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
5171 emit_move_insn (r1, r0);
5172 r0 = r1;
5173 }
5174 if (! target || ! register_operand (target, QFmode))
5175 target = gen_reg_rtx (QFmode);
5176 emit_insn (gen_frieee (target, r0));
5177 return target;
5178
5179 case C4X_BUILTIN_RCPF:
5180 if (TARGET_C3X)
5181 break;
5182 arg0 = TREE_VALUE (arglist);
5183 r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
5184 r0 = protect_from_queue (r0, 0);
5185 if (! target || ! register_operand (target, QFmode))
5186 target = gen_reg_rtx (QFmode);
5187 emit_insn (gen_rcpfqf_clobber (target, r0));
5188 return target;
5189 }
5190 return NULL_RTX;
5191}
7c262518
RH
5192
5193static void
715bdd29 5194c4x_asm_named_section (name, flags)
7c262518
RH
5195 const char *name;
5196 unsigned int flags ATTRIBUTE_UNUSED;
7c262518
RH
5197{
5198 fprintf (asm_out_file, "\t.sect\t\"%s\"\n", name);
5199}
This page took 1.24099 seconds and 5 git commands to generate.