]> gcc.gnu.org Git - gcc.git/blob - gcc/config/bfin/bfin.c
bfin.c (n_dregs_to_save, [...]): New argument IS_INTHANDLER; all callers changed.
[gcc.git] / gcc / config / bfin / bfin.c
1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "tree.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "input.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "expr.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "integrate.h"
48 #include "langhooks.h"
49 #include "bfin-protos.h"
50 #include "tm-preds.h"
51 #include "gt-bfin.h"
52
53 /* Test and compare insns in bfin.md store the information needed to
54 generate branch and scc insns here. */
55 rtx bfin_compare_op0, bfin_compare_op1;
56
57 /* RTX for condition code flag register and RETS register */
58 extern GTY(()) rtx bfin_cc_rtx;
59 extern GTY(()) rtx bfin_rets_rtx;
60 rtx bfin_cc_rtx, bfin_rets_rtx;
61
62 int max_arg_registers = 0;
63
64 /* Arrays used when emitting register names. */
65 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
66 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
67 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
68 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
69
70 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
71
72 /* Nonzero if -mshared-library-id was given. */
73 static int bfin_lib_id_given;
74
75 static void
76 bfin_globalize_label (FILE *stream, const char *name)
77 {
78 fputs (".global ", stream);
79 assemble_name (stream, name);
80 fputc (';',stream);
81 fputc ('\n',stream);
82 }
83
84 static void
85 output_file_start (void)
86 {
87 FILE *file = asm_out_file;
88 int i;
89
90 fprintf (file, ".file \"%s\";\n", input_filename);
91
92 for (i = 0; arg_regs[i] >= 0; i++)
93 ;
94 max_arg_registers = i; /* how many arg reg used */
95 }
96
97 /* Called early in the compilation to conditionally modify
98 fixed_regs/call_used_regs. */
99
100 void
101 conditional_register_usage (void)
102 {
103 /* initialize condition code flag register rtx */
104 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
105 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
106 }
107
108 /* Examine machine-dependent attributes of function type FUNTYPE and return its
109 type. See the definition of E_FUNKIND. */
110
111 static e_funkind funkind (tree funtype)
112 {
113 tree attrs = TYPE_ATTRIBUTES (funtype);
114 if (lookup_attribute ("interrupt_handler", attrs))
115 return INTERRUPT_HANDLER;
116 else if (lookup_attribute ("exception_handler", attrs))
117 return EXCPT_HANDLER;
118 else if (lookup_attribute ("nmi_handler", attrs))
119 return NMI_HANDLER;
120 else
121 return SUBROUTINE;
122 }
123 \f
124 /* Legitimize PIC addresses. If the address is already position-independent,
125 we return ORIG. Newly generated position-independent addresses go into a
126 reg. This is REG if nonzero, otherwise we allocate register(s) as
127 necessary. PICREG is the register holding the pointer to the PIC offset
128 table. */
129
130 rtx
131 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
132 {
133 rtx addr = orig;
134 rtx new = orig;
135
136 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
137 {
138 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
139 reg = new = orig;
140 else
141 {
142 if (reg == 0)
143 {
144 gcc_assert (!no_new_pseudos);
145 reg = gen_reg_rtx (Pmode);
146 }
147
148 if (flag_pic == 2)
149 {
150 emit_insn (gen_movsi_high_pic (reg, addr));
151 emit_insn (gen_movsi_low_pic (reg, reg, addr));
152 emit_insn (gen_addsi3 (reg, reg, picreg));
153 new = gen_const_mem (Pmode, reg);
154 }
155 else
156 {
157 rtx tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
158 UNSPEC_MOVE_PIC);
159 new = gen_const_mem (Pmode,
160 gen_rtx_PLUS (Pmode, picreg, tmp));
161 }
162 emit_move_insn (reg, new);
163 }
164 if (picreg == pic_offset_table_rtx)
165 current_function_uses_pic_offset_table = 1;
166 return reg;
167 }
168
169 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
170 {
171 rtx base;
172
173 if (GET_CODE (addr) == CONST)
174 {
175 addr = XEXP (addr, 0);
176 gcc_assert (GET_CODE (addr) == PLUS);
177 }
178
179 if (XEXP (addr, 0) == picreg)
180 return orig;
181
182 if (reg == 0)
183 {
184 gcc_assert (!no_new_pseudos);
185 reg = gen_reg_rtx (Pmode);
186 }
187
188 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
189 addr = legitimize_pic_address (XEXP (addr, 1),
190 base == reg ? NULL_RTX : reg,
191 picreg);
192
193 if (GET_CODE (addr) == CONST_INT)
194 {
195 gcc_assert (! reload_in_progress && ! reload_completed);
196 addr = force_reg (Pmode, addr);
197 }
198
199 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
200 {
201 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
202 addr = XEXP (addr, 1);
203 }
204
205 return gen_rtx_PLUS (Pmode, base, addr);
206 }
207
208 return new;
209 }
210 \f
211 /* Stack frame layout. */
212
213 /* Compute the number of DREGS to save with a push_multiple operation.
214 This could include registers that aren't modified in the function,
215 since push_multiple only takes a range of registers.
216 If IS_INTHANDLER, then everything that is live must be saved, even
217 if normally call-clobbered. */
218
219 static int
220 n_dregs_to_save (bool is_inthandler)
221 {
222 unsigned i;
223
224 for (i = REG_R0; i <= REG_R7; i++)
225 {
226 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
227 return REG_R7 - i + 1;
228
229 if (current_function_calls_eh_return)
230 {
231 unsigned j;
232 for (j = 0; ; j++)
233 {
234 unsigned test = EH_RETURN_DATA_REGNO (j);
235 if (test == INVALID_REGNUM)
236 break;
237 if (test == i)
238 return REG_R7 - i + 1;
239 }
240 }
241
242 }
243 return 0;
244 }
245
246 /* Like n_dregs_to_save, but compute number of PREGS to save. */
247
248 static int
249 n_pregs_to_save (bool is_inthandler)
250 {
251 unsigned i;
252
253 for (i = REG_P0; i <= REG_P5; i++)
254 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
255 || (i == PIC_OFFSET_TABLE_REGNUM
256 && (current_function_uses_pic_offset_table
257 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
258 return REG_P5 - i + 1;
259 return 0;
260 }
261
262 /* Determine if we are going to save the frame pointer in the prologue. */
263
264 static bool
265 must_save_fp_p (void)
266 {
267 return frame_pointer_needed || regs_ever_live[REG_FP];
268 }
269
270 static bool
271 stack_frame_needed_p (void)
272 {
273 /* EH return puts a new return address into the frame using an
274 address relative to the frame pointer. */
275 if (current_function_calls_eh_return)
276 return true;
277 return frame_pointer_needed;
278 }
279
280 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
281 must save all registers; this is used for interrupt handlers.
282 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
283 this for an interrupt (or exception) handler. */
284
285 static void
286 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
287 {
288 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
289 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
290 int dregno = REG_R7 + 1 - ndregs;
291 int pregno = REG_P5 + 1 - npregs;
292 int total = ndregs + npregs;
293 int i;
294 rtx pat, insn, val;
295
296 if (total == 0)
297 return;
298
299 val = GEN_INT (-total * 4);
300 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
301 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
302 UNSPEC_PUSH_MULTIPLE);
303 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
304 gen_rtx_PLUS (Pmode, spreg,
305 val));
306 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
307 for (i = 0; i < total; i++)
308 {
309 rtx memref = gen_rtx_MEM (word_mode,
310 gen_rtx_PLUS (Pmode, spreg,
311 GEN_INT (- i * 4 - 4)));
312 rtx subpat;
313 if (ndregs > 0)
314 {
315 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
316 dregno++));
317 ndregs--;
318 }
319 else
320 {
321 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
322 pregno++));
323 npregs++;
324 }
325 XVECEXP (pat, 0, i + 1) = subpat;
326 RTX_FRAME_RELATED_P (subpat) = 1;
327 }
328 insn = emit_insn (pat);
329 RTX_FRAME_RELATED_P (insn) = 1;
330 }
331
332 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
333 must save all registers; this is used for interrupt handlers.
334 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
335 this for an interrupt (or exception) handler. */
336
337 static void
338 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
339 {
340 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
341 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
342 int total = ndregs + npregs;
343 int i, regno;
344 rtx pat, insn;
345
346 if (total == 0)
347 return;
348
349 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
350 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
351 gen_rtx_PLUS (Pmode, spreg,
352 GEN_INT (total * 4)));
353
354 if (npregs > 0)
355 regno = REG_P5 + 1;
356 else
357 regno = REG_R7 + 1;
358
359 for (i = 0; i < total; i++)
360 {
361 rtx addr = (i > 0
362 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
363 : spreg);
364 rtx memref = gen_rtx_MEM (word_mode, addr);
365
366 regno--;
367 XVECEXP (pat, 0, i + 1)
368 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
369
370 if (npregs > 0)
371 {
372 if (--npregs == 0)
373 regno = REG_R7 + 1;
374 }
375 }
376
377 insn = emit_insn (pat);
378 RTX_FRAME_RELATED_P (insn) = 1;
379 }
380
381 /* Perform any needed actions needed for a function that is receiving a
382 variable number of arguments.
383
384 CUM is as above.
385
386 MODE and TYPE are the mode and type of the current parameter.
387
388 PRETEND_SIZE is a variable that should be set to the amount of stack
389 that must be pushed by the prolog to pretend that our caller pushed
390 it.
391
392 Normally, this macro will push all remaining incoming registers on the
393 stack and set PRETEND_SIZE to the length of the registers pushed.
394
395 Blackfin specific :
396 - VDSP C compiler manual (our ABI) says that a variable args function
397 should save the R0, R1 and R2 registers in the stack.
398 - The caller will always leave space on the stack for the
399 arguments that are passed in registers, so we dont have
400 to leave any extra space.
401 - now, the vastart pointer can access all arguments from the stack. */
402
403 static void
404 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
405 enum machine_mode mode ATTRIBUTE_UNUSED,
406 tree type ATTRIBUTE_UNUSED, int *pretend_size,
407 int no_rtl)
408 {
409 rtx mem;
410 int i;
411
412 if (no_rtl)
413 return;
414
415 /* The move for named arguments will be generated automatically by the
416 compiler. We need to generate the move rtx for the unnamed arguments
417 if they are in the first 3 words. We assume at least 1 named argument
418 exists, so we never generate [ARGP] = R0 here. */
419
420 for (i = cum->words + 1; i < max_arg_registers; i++)
421 {
422 mem = gen_rtx_MEM (Pmode,
423 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
424 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
425 }
426
427 *pretend_size = 0;
428 }
429
430 /* Value should be nonzero if functions must have frame pointers.
431 Zero means the frame pointer need not be set up (and parms may
432 be accessed via the stack pointer) in functions that seem suitable. */
433
434 int
435 bfin_frame_pointer_required (void)
436 {
437 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
438
439 if (fkind != SUBROUTINE)
440 return 1;
441
442 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
443 so we have to override it for non-leaf functions. */
444 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
445 return 1;
446
447 return 0;
448 }
449
450 /* Return the number of registers pushed during the prologue. */
451
452 static int
453 n_regs_saved_by_prologue (void)
454 {
455 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
456 bool is_inthandler = fkind != SUBROUTINE;
457 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
458 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
459 || (is_inthandler && !current_function_is_leaf));
460 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
461 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
462 int n = ndregs + npregs;
463
464 if (all || stack_frame_needed_p ())
465 /* We use a LINK instruction in this case. */
466 n += 2;
467 else
468 {
469 if (must_save_fp_p ())
470 n++;
471 if (! current_function_is_leaf)
472 n++;
473 }
474
475 if (fkind != SUBROUTINE)
476 {
477 int i;
478
479 /* Increment once for ASTAT. */
480 n++;
481
482 /* RETE/X/N. */
483 if (lookup_attribute ("nesting", attrs))
484 n++;
485
486 for (i = REG_P7 + 1; i < REG_CC; i++)
487 if (all
488 || regs_ever_live[i]
489 || (!leaf_function_p () && call_used_regs[i]))
490 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
491 }
492 return n;
493 }
494
495 /* Return the offset between two registers, one to be eliminated, and the other
496 its replacement, at the start of a routine. */
497
498 HOST_WIDE_INT
499 bfin_initial_elimination_offset (int from, int to)
500 {
501 HOST_WIDE_INT offset = 0;
502
503 if (from == ARG_POINTER_REGNUM)
504 offset = n_regs_saved_by_prologue () * 4;
505
506 if (to == STACK_POINTER_REGNUM)
507 {
508 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
509 offset += current_function_outgoing_args_size;
510 else if (current_function_outgoing_args_size)
511 offset += FIXED_STACK_AREA;
512
513 offset += get_frame_size ();
514 }
515
516 return offset;
517 }
518
519 /* Emit code to load a constant CONSTANT into register REG; setting
520 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
521 Make sure that the insns we generate need not be split. */
522
523 static void
524 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
525 {
526 rtx insn;
527 rtx cst = GEN_INT (constant);
528
529 if (constant >= -32768 && constant < 65536)
530 insn = emit_move_insn (reg, cst);
531 else
532 {
533 /* We don't call split_load_immediate here, since dwarf2out.c can get
534 confused about some of the more clever sequences it can generate. */
535 insn = emit_insn (gen_movsi_high (reg, cst));
536 if (related)
537 RTX_FRAME_RELATED_P (insn) = 1;
538 insn = emit_insn (gen_movsi_low (reg, reg, cst));
539 }
540 if (related)
541 RTX_FRAME_RELATED_P (insn) = 1;
542 }
543
544 /* Generate efficient code to add a value to the frame pointer. We
545 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
546 generated insns if FRAME is nonzero. */
547
548 static void
549 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
550 {
551 if (value == 0)
552 return;
553
554 /* Choose whether to use a sequence using a temporary register, or
555 a sequence with multiple adds. We can add a signed 7 bit value
556 in one instruction. */
557 if (value > 120 || value < -120)
558 {
559 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
560 rtx insn;
561
562 if (frame)
563 frame_related_constant_load (tmpreg, value, TRUE);
564 else
565 {
566 insn = emit_move_insn (tmpreg, GEN_INT (value));
567 if (frame)
568 RTX_FRAME_RELATED_P (insn) = 1;
569 }
570
571 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
572 if (frame)
573 RTX_FRAME_RELATED_P (insn) = 1;
574 }
575 else
576 do
577 {
578 int size = value;
579 rtx insn;
580
581 if (size > 60)
582 size = 60;
583 else if (size < -60)
584 /* We could use -62, but that would leave the stack unaligned, so
585 it's no good. */
586 size = -60;
587
588 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
589 if (frame)
590 RTX_FRAME_RELATED_P (insn) = 1;
591 value -= size;
592 }
593 while (value != 0);
594 }
595
596 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
597 is too large, generate a sequence of insns that has the same effect.
598 SPREG contains (reg:SI REG_SP). */
599
600 static void
601 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
602 {
603 HOST_WIDE_INT link_size = frame_size;
604 rtx insn;
605 int i;
606
607 if (link_size > 262140)
608 link_size = 262140;
609
610 /* Use a LINK insn with as big a constant as possible, then subtract
611 any remaining size from the SP. */
612 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
613 RTX_FRAME_RELATED_P (insn) = 1;
614
615 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
616 {
617 rtx set = XVECEXP (PATTERN (insn), 0, i);
618 gcc_assert (GET_CODE (set) == SET);
619 RTX_FRAME_RELATED_P (set) = 1;
620 }
621
622 frame_size -= link_size;
623
624 if (frame_size > 0)
625 {
626 /* Must use a call-clobbered PREG that isn't the static chain. */
627 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
628
629 frame_related_constant_load (tmpreg, -frame_size, TRUE);
630 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
631 RTX_FRAME_RELATED_P (insn) = 1;
632 }
633 }
634
635 /* Return the number of bytes we must reserve for outgoing arguments
636 in the current function's stack frame. */
637
638 static HOST_WIDE_INT
639 arg_area_size (void)
640 {
641 if (current_function_outgoing_args_size)
642 {
643 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
644 return current_function_outgoing_args_size;
645 else
646 return FIXED_STACK_AREA;
647 }
648 return 0;
649 }
650
651 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
652 function must save all its registers (true only for certain interrupt
653 handlers). */
654
655 static void
656 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
657 {
658 frame_size += arg_area_size ();
659
660 if (all || stack_frame_needed_p ()
661 || (must_save_fp_p () && ! current_function_is_leaf))
662 emit_link_insn (spreg, frame_size);
663 else
664 {
665 if (! current_function_is_leaf)
666 {
667 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
668 gen_rtx_PRE_DEC (Pmode, spreg)),
669 bfin_rets_rtx);
670 rtx insn = emit_insn (pat);
671 RTX_FRAME_RELATED_P (insn) = 1;
672 }
673 if (must_save_fp_p ())
674 {
675 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
676 gen_rtx_PRE_DEC (Pmode, spreg)),
677 gen_rtx_REG (Pmode, REG_FP));
678 rtx insn = emit_insn (pat);
679 RTX_FRAME_RELATED_P (insn) = 1;
680 }
681 add_to_sp (spreg, -frame_size, 1);
682 }
683 }
684
685 /* Like do_link, but used for epilogues to deallocate the stack frame. */
686
687 static void
688 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
689 {
690 frame_size += arg_area_size ();
691
692 if (all || stack_frame_needed_p ())
693 emit_insn (gen_unlink ());
694 else
695 {
696 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
697
698 add_to_sp (spreg, frame_size, 0);
699 if (must_save_fp_p ())
700 {
701 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
702 emit_move_insn (fpreg, postinc);
703 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
704 }
705 if (! current_function_is_leaf)
706 {
707 emit_move_insn (bfin_rets_rtx, postinc);
708 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
709 }
710 }
711 }
712
713 /* Generate a prologue suitable for a function of kind FKIND. This is
714 called for interrupt and exception handler prologues.
715 SPREG contains (reg:SI REG_SP). */
716
717 static void
718 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
719 {
720 int i;
721 HOST_WIDE_INT frame_size = get_frame_size ();
722 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
723 rtx predec = gen_rtx_MEM (SImode, predec1);
724 rtx insn;
725 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
726 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
727 tree kspisusp = lookup_attribute ("kspisusp", attrs);
728
729 if (kspisusp)
730 {
731 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
732 RTX_FRAME_RELATED_P (insn) = 1;
733 }
734
735 /* We need space on the stack in case we need to save the argument
736 registers. */
737 if (fkind == EXCPT_HANDLER)
738 {
739 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
740 RTX_FRAME_RELATED_P (insn) = 1;
741 }
742
743 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
744 RTX_FRAME_RELATED_P (insn) = 1;
745
746 /* If we're calling other functions, they won't save their call-clobbered
747 registers, so we must save everything here. */
748 if (!current_function_is_leaf)
749 all = true;
750 expand_prologue_reg_save (spreg, all, true);
751
752 for (i = REG_P7 + 1; i < REG_CC; i++)
753 if (all
754 || regs_ever_live[i]
755 || (!leaf_function_p () && call_used_regs[i]))
756 {
757 if (i == REG_A0 || i == REG_A1)
758 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
759 gen_rtx_REG (PDImode, i));
760 else
761 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
762 RTX_FRAME_RELATED_P (insn) = 1;
763 }
764
765 if (lookup_attribute ("nesting", attrs))
766 {
767 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
768 : fkind == NMI_HANDLER ? REG_RETN
769 : REG_RETI));
770 insn = emit_move_insn (predec, srcreg);
771 RTX_FRAME_RELATED_P (insn) = 1;
772 }
773
774 do_link (spreg, frame_size, all);
775
776 if (fkind == EXCPT_HANDLER)
777 {
778 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
779 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
780 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
781 rtx insn;
782
783 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
784 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
785 NULL_RTX);
786 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
787 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
788 NULL_RTX);
789 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
790 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
791 NULL_RTX);
792 insn = emit_move_insn (r1reg, spreg);
793 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
794 NULL_RTX);
795 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
796 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
797 NULL_RTX);
798 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
799 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
800 NULL_RTX);
801 }
802 }
803
804 /* Generate an epilogue suitable for a function of kind FKIND. This is
805 called for interrupt and exception handler epilogues.
806 SPREG contains (reg:SI REG_SP). */
807
808 static void
809 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
810 {
811 int i;
812 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
813 rtx postinc = gen_rtx_MEM (SImode, postinc1);
814 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
815 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
816
817 /* A slightly crude technique to stop flow from trying to delete "dead"
818 insns. */
819 MEM_VOLATILE_P (postinc) = 1;
820
821 do_unlink (spreg, get_frame_size (), all);
822
823 if (lookup_attribute ("nesting", attrs))
824 {
825 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
826 : fkind == NMI_HANDLER ? REG_RETN
827 : REG_RETI));
828 emit_move_insn (srcreg, postinc);
829 }
830
831 /* If we're calling other functions, they won't save their call-clobbered
832 registers, so we must save (and restore) everything here. */
833 if (!current_function_is_leaf)
834 all = true;
835
836 for (i = REG_CC - 1; i > REG_P7; i--)
837 if (all
838 || regs_ever_live[i]
839 || (!leaf_function_p () && call_used_regs[i]))
840 {
841 if (i == REG_A0 || i == REG_A1)
842 {
843 rtx mem = gen_rtx_MEM (PDImode, postinc1);
844 MEM_VOLATILE_P (mem) = 1;
845 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
846 }
847 else
848 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
849 }
850
851 expand_epilogue_reg_restore (spreg, all, true);
852
853 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
854
855 /* Deallocate any space we left on the stack in case we needed to save the
856 argument registers. */
857 if (fkind == EXCPT_HANDLER)
858 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
859
860 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
861 }
862
863 /* Used while emitting the prologue to generate code to load the correct value
864 into the PIC register, which is passed in DEST. */
865
866 static void
867 bfin_load_pic_reg (rtx dest)
868 {
869 rtx addr, insn;
870
871 if (bfin_lib_id_given)
872 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
873 else
874 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
875 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
876 UNSPEC_LIBRARY_OFFSET));
877 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
878 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
879 }
880
881 /* Generate RTL for the prologue of the current function. */
882
883 void
884 bfin_expand_prologue (void)
885 {
886 rtx insn;
887 HOST_WIDE_INT frame_size = get_frame_size ();
888 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
889 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
890 rtx pic_reg_loaded = NULL_RTX;
891
892 if (fkind != SUBROUTINE)
893 {
894 expand_interrupt_handler_prologue (spreg, fkind);
895 return;
896 }
897
898 if (current_function_limit_stack)
899 {
900 HOST_WIDE_INT offset
901 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
902 STACK_POINTER_REGNUM);
903 rtx lim = stack_limit_rtx;
904
905 if (GET_CODE (lim) == SYMBOL_REF)
906 {
907 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
908 if (TARGET_ID_SHARED_LIBRARY)
909 {
910 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
911 rtx r3reg = gen_rtx_REG (Pmode, REG_R3);
912 rtx val;
913 pic_reg_loaded = p2reg;
914 bfin_load_pic_reg (pic_reg_loaded);
915 val = legitimize_pic_address (stack_limit_rtx, p1reg, p2reg);
916 emit_move_insn (p1reg, val);
917 frame_related_constant_load (p2reg, offset, FALSE);
918 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
919 lim = p2reg;
920 }
921 else
922 {
923 rtx limit = plus_constant (stack_limit_rtx, offset);
924 emit_move_insn (p2reg, limit);
925 lim = p2reg;
926 }
927 }
928 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
929 emit_insn (gen_trapifcc ());
930 }
931 expand_prologue_reg_save (spreg, 0, false);
932
933 do_link (spreg, frame_size, false);
934
935 if (TARGET_ID_SHARED_LIBRARY
936 && (current_function_uses_pic_offset_table
937 || !current_function_is_leaf))
938 bfin_load_pic_reg (pic_offset_table_rtx);
939 }
940
941 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
942 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
943 eh_return pattern. */
944
945 void
946 bfin_expand_epilogue (int need_return, int eh_return)
947 {
948 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
949 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
950
951 if (fkind != SUBROUTINE)
952 {
953 expand_interrupt_handler_epilogue (spreg, fkind);
954 return;
955 }
956
957 do_unlink (spreg, get_frame_size (), false);
958
959 expand_epilogue_reg_restore (spreg, false, false);
960
961 /* Omit the return insn if this is for a sibcall. */
962 if (! need_return)
963 return;
964
965 if (eh_return)
966 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
967
968 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
969 }
970 \f
971 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
972
973 int
974 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
975 unsigned int new_reg)
976 {
977 /* Interrupt functions can only use registers that have already been
978 saved by the prologue, even if they would normally be
979 call-clobbered. */
980
981 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
982 && !regs_ever_live[new_reg])
983 return 0;
984
985 return 1;
986 }
987
988 /* Return the value of the return address for the frame COUNT steps up
989 from the current frame, after the prologue.
990 We punt for everything but the current frame by returning const0_rtx. */
991
992 rtx
993 bfin_return_addr_rtx (int count)
994 {
995 if (count != 0)
996 return const0_rtx;
997
998 return get_hard_reg_initial_val (Pmode, REG_RETS);
999 }
1000
1001 /* Try machine-dependent ways of modifying an illegitimate address X
1002 to be legitimate. If we find one, return the new, valid address,
1003 otherwise return NULL_RTX.
1004
1005 OLDX is the address as it was before break_out_memory_refs was called.
1006 In some cases it is useful to look at this to decide what needs to be done.
1007
1008 MODE is the mode of the memory reference. */
1009
1010 rtx
1011 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1012 enum machine_mode mode ATTRIBUTE_UNUSED)
1013 {
1014 return NULL_RTX;
1015 }
1016
1017 /* This predicate is used to compute the length of a load/store insn.
1018 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1019 32 bit instruction. */
1020
1021 int
1022 effective_address_32bit_p (rtx op, enum machine_mode mode)
1023 {
1024 HOST_WIDE_INT offset;
1025
1026 mode = GET_MODE (op);
1027 op = XEXP (op, 0);
1028
1029 if (GET_CODE (op) != PLUS)
1030 {
1031 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1032 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1033 return 0;
1034 }
1035
1036 offset = INTVAL (XEXP (op, 1));
1037
1038 /* All byte loads use a 16 bit offset. */
1039 if (GET_MODE_SIZE (mode) == 1)
1040 return 1;
1041
1042 if (GET_MODE_SIZE (mode) == 4)
1043 {
1044 /* Frame pointer relative loads can use a negative offset, all others
1045 are restricted to a small positive one. */
1046 if (XEXP (op, 0) == frame_pointer_rtx)
1047 return offset < -128 || offset > 60;
1048 return offset < 0 || offset > 60;
1049 }
1050
1051 /* Must be HImode now. */
1052 return offset < 0 || offset > 30;
1053 }
1054
1055 /* Return cost of the memory address ADDR.
1056 All addressing modes are equally cheap on the Blackfin. */
1057
1058 static int
1059 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1060 {
1061 return 1;
1062 }
1063
1064 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1065
1066 void
1067 print_address_operand (FILE *file, rtx x)
1068 {
1069 switch (GET_CODE (x))
1070 {
1071 case PLUS:
1072 output_address (XEXP (x, 0));
1073 fprintf (file, "+");
1074 output_address (XEXP (x, 1));
1075 break;
1076
1077 case PRE_DEC:
1078 fprintf (file, "--");
1079 output_address (XEXP (x, 0));
1080 break;
1081 case POST_INC:
1082 output_address (XEXP (x, 0));
1083 fprintf (file, "++");
1084 break;
1085 case POST_DEC:
1086 output_address (XEXP (x, 0));
1087 fprintf (file, "--");
1088 break;
1089
1090 default:
1091 gcc_assert (GET_CODE (x) != MEM);
1092 print_operand (file, x, 0);
1093 break;
1094 }
1095 }
1096
1097 /* Adding intp DImode support by Tony
1098 * -- Q: (low word)
1099 * -- R: (high word)
1100 */
1101
1102 void
1103 print_operand (FILE *file, rtx x, char code)
1104 {
1105 enum machine_mode mode = GET_MODE (x);
1106
1107 switch (code)
1108 {
1109 case 'j':
1110 switch (GET_CODE (x))
1111 {
1112 case EQ:
1113 fprintf (file, "e");
1114 break;
1115 case NE:
1116 fprintf (file, "ne");
1117 break;
1118 case GT:
1119 fprintf (file, "g");
1120 break;
1121 case LT:
1122 fprintf (file, "l");
1123 break;
1124 case GE:
1125 fprintf (file, "ge");
1126 break;
1127 case LE:
1128 fprintf (file, "le");
1129 break;
1130 case GTU:
1131 fprintf (file, "g");
1132 break;
1133 case LTU:
1134 fprintf (file, "l");
1135 break;
1136 case GEU:
1137 fprintf (file, "ge");
1138 break;
1139 case LEU:
1140 fprintf (file, "le");
1141 break;
1142 default:
1143 output_operand_lossage ("invalid %%j value");
1144 }
1145 break;
1146
1147 case 'J': /* reverse logic */
1148 switch (GET_CODE(x))
1149 {
1150 case EQ:
1151 fprintf (file, "ne");
1152 break;
1153 case NE:
1154 fprintf (file, "e");
1155 break;
1156 case GT:
1157 fprintf (file, "le");
1158 break;
1159 case LT:
1160 fprintf (file, "ge");
1161 break;
1162 case GE:
1163 fprintf (file, "l");
1164 break;
1165 case LE:
1166 fprintf (file, "g");
1167 break;
1168 case GTU:
1169 fprintf (file, "le");
1170 break;
1171 case LTU:
1172 fprintf (file, "ge");
1173 break;
1174 case GEU:
1175 fprintf (file, "l");
1176 break;
1177 case LEU:
1178 fprintf (file, "g");
1179 break;
1180 default:
1181 output_operand_lossage ("invalid %%J value");
1182 }
1183 break;
1184
1185 default:
1186 switch (GET_CODE (x))
1187 {
1188 case REG:
1189 if (code == 'h')
1190 {
1191 gcc_assert (REGNO (x) < 32);
1192 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1193 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1194 break;
1195 }
1196 else if (code == 'd')
1197 {
1198 gcc_assert (REGNO (x) < 32);
1199 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1200 break;
1201 }
1202 else if (code == 'w')
1203 {
1204 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1205 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1206 }
1207 else if (code == 'x')
1208 {
1209 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1210 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1211 }
1212 else if (code == 'D')
1213 {
1214 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1215 }
1216 else if (code == 'H')
1217 {
1218 gcc_assert (mode == DImode || mode == DFmode);
1219 gcc_assert (REG_P (x));
1220 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1221 }
1222 else if (code == 'T')
1223 {
1224 gcc_assert (D_REGNO_P (REGNO (x)));
1225 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1226 }
1227 else
1228 fprintf (file, "%s", reg_names[REGNO (x)]);
1229 break;
1230
1231 case MEM:
1232 fputc ('[', file);
1233 x = XEXP (x,0);
1234 print_address_operand (file, x);
1235 fputc (']', file);
1236 break;
1237
1238 case CONST_INT:
1239 /* Moves to half registers with d or h modifiers always use unsigned
1240 constants. */
1241 if (code == 'd')
1242 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1243 else if (code == 'h')
1244 x = GEN_INT (INTVAL (x) & 0xffff);
1245 else if (code == 'X')
1246 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1247 else if (code == 'Y')
1248 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1249 else if (code == 'Z')
1250 /* Used for LINK insns. */
1251 x = GEN_INT (-8 - INTVAL (x));
1252
1253 /* fall through */
1254
1255 case SYMBOL_REF:
1256 output_addr_const (file, x);
1257 if (code == 'G' && flag_pic)
1258 fprintf (file, "@GOT");
1259 break;
1260
1261 case CONST_DOUBLE:
1262 output_operand_lossage ("invalid const_double operand");
1263 break;
1264
1265 case UNSPEC:
1266 switch (XINT (x, 1))
1267 {
1268 case UNSPEC_MOVE_PIC:
1269 output_addr_const (file, XVECEXP (x, 0, 0));
1270 fprintf (file, "@GOT");
1271 break;
1272
1273 case UNSPEC_LIBRARY_OFFSET:
1274 fprintf (file, "_current_shared_library_p5_offset_");
1275 break;
1276
1277 default:
1278 gcc_unreachable ();
1279 }
1280 break;
1281
1282 default:
1283 output_addr_const (file, x);
1284 }
1285 }
1286 }
1287 \f
1288 /* Argument support functions. */
1289
1290 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1291 for a call to a function whose data type is FNTYPE.
1292 For a library call, FNTYPE is 0.
1293 VDSP C Compiler manual, our ABI says that
1294 first 3 words of arguments will use R0, R1 and R2.
1295 */
1296
1297 void
1298 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1299 rtx libname ATTRIBUTE_UNUSED)
1300 {
1301 static CUMULATIVE_ARGS zero_cum;
1302
1303 *cum = zero_cum;
1304
1305 /* Set up the number of registers to use for passing arguments. */
1306
1307 cum->nregs = max_arg_registers;
1308 cum->arg_regs = arg_regs;
1309
1310 cum->call_cookie = CALL_NORMAL;
1311 /* Check for a longcall attribute. */
1312 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1313 cum->call_cookie |= CALL_SHORT;
1314 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1315 cum->call_cookie |= CALL_LONG;
1316
1317 return;
1318 }
1319
1320 /* Update the data in CUM to advance over an argument
1321 of mode MODE and data type TYPE.
1322 (TYPE is null for libcalls where that information may not be available.) */
1323
1324 void
1325 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1326 int named ATTRIBUTE_UNUSED)
1327 {
1328 int count, bytes, words;
1329
1330 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1331 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1332
1333 cum->words += words;
1334 cum->nregs -= words;
1335
1336 if (cum->nregs <= 0)
1337 {
1338 cum->nregs = 0;
1339 cum->arg_regs = NULL;
1340 }
1341 else
1342 {
1343 for (count = 1; count <= words; count++)
1344 cum->arg_regs++;
1345 }
1346
1347 return;
1348 }
1349
1350 /* Define where to put the arguments to a function.
1351 Value is zero to push the argument on the stack,
1352 or a hard register in which to store the argument.
1353
1354 MODE is the argument's machine mode.
1355 TYPE is the data type of the argument (as a tree).
1356 This is null for libcalls where that information may
1357 not be available.
1358 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1359 the preceding args and about the function being called.
1360 NAMED is nonzero if this argument is a named parameter
1361 (otherwise it is an extra parameter matching an ellipsis). */
1362
1363 struct rtx_def *
1364 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1365 int named ATTRIBUTE_UNUSED)
1366 {
1367 int bytes
1368 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1369
1370 if (mode == VOIDmode)
1371 /* Compute operand 2 of the call insn. */
1372 return GEN_INT (cum->call_cookie);
1373
1374 if (bytes == -1)
1375 return NULL_RTX;
1376
1377 if (cum->nregs)
1378 return gen_rtx_REG (mode, *(cum->arg_regs));
1379
1380 return NULL_RTX;
1381 }
1382
1383 /* For an arg passed partly in registers and partly in memory,
1384 this is the number of bytes passed in registers.
1385 For args passed entirely in registers or entirely in memory, zero.
1386
1387 Refer VDSP C Compiler manual, our ABI.
1388 First 3 words are in registers. So, if a an argument is larger
1389 than the registers available, it will span the register and
1390 stack. */
1391
1392 static int
1393 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1394 tree type ATTRIBUTE_UNUSED,
1395 bool named ATTRIBUTE_UNUSED)
1396 {
1397 int bytes
1398 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1399 int bytes_left = cum->nregs * UNITS_PER_WORD;
1400
1401 if (bytes == -1)
1402 return 0;
1403
1404 if (bytes_left == 0)
1405 return 0;
1406 if (bytes > bytes_left)
1407 return bytes_left;
1408 return 0;
1409 }
1410
1411 /* Variable sized types are passed by reference. */
1412
1413 static bool
1414 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1415 enum machine_mode mode ATTRIBUTE_UNUSED,
1416 tree type, bool named ATTRIBUTE_UNUSED)
1417 {
1418 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1419 }
1420
1421 /* Decide whether a type should be returned in memory (true)
1422 or in a register (false). This is called by the macro
1423 RETURN_IN_MEMORY. */
1424
1425 int
1426 bfin_return_in_memory (tree type)
1427 {
1428 int size = int_size_in_bytes (type);
1429 return size > 2 * UNITS_PER_WORD || size == -1;
1430 }
1431
1432 /* Register in which address to store a structure value
1433 is passed to a function. */
1434 static rtx
1435 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1436 int incoming ATTRIBUTE_UNUSED)
1437 {
1438 return gen_rtx_REG (Pmode, REG_P0);
1439 }
1440
1441 /* Return true when register may be used to pass function parameters. */
1442
1443 bool
1444 function_arg_regno_p (int n)
1445 {
1446 int i;
1447 for (i = 0; arg_regs[i] != -1; i++)
1448 if (n == arg_regs[i])
1449 return true;
1450 return false;
1451 }
1452
1453 /* Returns 1 if OP contains a symbol reference */
1454
1455 int
1456 symbolic_reference_mentioned_p (rtx op)
1457 {
1458 register const char *fmt;
1459 register int i;
1460
1461 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1462 return 1;
1463
1464 fmt = GET_RTX_FORMAT (GET_CODE (op));
1465 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1466 {
1467 if (fmt[i] == 'E')
1468 {
1469 register int j;
1470
1471 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1472 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1473 return 1;
1474 }
1475
1476 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1477 return 1;
1478 }
1479
1480 return 0;
1481 }
1482
1483 /* Decide whether we can make a sibling call to a function. DECL is the
1484 declaration of the function being targeted by the call and EXP is the
1485 CALL_EXPR representing the call. */
1486
1487 static bool
1488 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1489 tree exp ATTRIBUTE_UNUSED)
1490 {
1491 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1492 return fkind == SUBROUTINE;
1493 }
1494 \f
1495 /* Emit RTL insns to initialize the variable parts of a trampoline at
1496 TRAMP. FNADDR is an RTX for the address of the function's pure
1497 code. CXT is an RTX for the static chain value for the function. */
1498
1499 void
1500 initialize_trampoline (tramp, fnaddr, cxt)
1501 rtx tramp, fnaddr, cxt;
1502 {
1503 rtx t1 = copy_to_reg (fnaddr);
1504 rtx t2 = copy_to_reg (cxt);
1505 rtx addr;
1506
1507 addr = memory_address (Pmode, plus_constant (tramp, 2));
1508 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1509 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1510 addr = memory_address (Pmode, plus_constant (tramp, 6));
1511 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1512
1513 addr = memory_address (Pmode, plus_constant (tramp, 10));
1514 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1515 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1516 addr = memory_address (Pmode, plus_constant (tramp, 14));
1517 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1518 }
1519
1520 /* Emit insns to move operands[1] into operands[0]. */
1521
1522 void
1523 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1524 {
1525 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1526
1527 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1528 operands[1] = force_reg (SImode, operands[1]);
1529 else
1530 operands[1] = legitimize_pic_address (operands[1], temp,
1531 pic_offset_table_rtx);
1532 }
1533
1534 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1535
1536 void
1537 expand_move (rtx *operands, enum machine_mode mode)
1538 {
1539 if (flag_pic && SYMBOLIC_CONST (operands[1]))
1540 emit_pic_move (operands, mode);
1541
1542 /* Don't generate memory->memory or constant->memory moves, go through a
1543 register */
1544 else if ((reload_in_progress | reload_completed) == 0
1545 && GET_CODE (operands[0]) == MEM
1546 && GET_CODE (operands[1]) != REG)
1547 operands[1] = force_reg (mode, operands[1]);
1548 }
1549 \f
1550 /* Split one or more DImode RTL references into pairs of SImode
1551 references. The RTL can be REG, offsettable MEM, integer constant, or
1552 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1553 split and "num" is its length. lo_half and hi_half are output arrays
1554 that parallel "operands". */
1555
1556 void
1557 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1558 {
1559 while (num--)
1560 {
1561 rtx op = operands[num];
1562
1563 /* simplify_subreg refuse to split volatile memory addresses,
1564 but we still have to handle it. */
1565 if (GET_CODE (op) == MEM)
1566 {
1567 lo_half[num] = adjust_address (op, SImode, 0);
1568 hi_half[num] = adjust_address (op, SImode, 4);
1569 }
1570 else
1571 {
1572 lo_half[num] = simplify_gen_subreg (SImode, op,
1573 GET_MODE (op) == VOIDmode
1574 ? DImode : GET_MODE (op), 0);
1575 hi_half[num] = simplify_gen_subreg (SImode, op,
1576 GET_MODE (op) == VOIDmode
1577 ? DImode : GET_MODE (op), 4);
1578 }
1579 }
1580 }
1581 \f
1582 bool
1583 bfin_longcall_p (rtx op, int call_cookie)
1584 {
1585 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1586 if (call_cookie & CALL_SHORT)
1587 return 0;
1588 if (call_cookie & CALL_LONG)
1589 return 1;
1590 if (TARGET_LONG_CALLS)
1591 return 1;
1592 return 0;
1593 }
1594
1595 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1596 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1597 SIBCALL is nonzero if this is a sibling call. */
1598
1599 void
1600 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1601 {
1602 rtx use = NULL, call;
1603 rtx callee = XEXP (fnaddr, 0);
1604 rtx pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (sibcall ? 3 : 2));
1605
1606 /* In an untyped call, we can get NULL for operand 2. */
1607 if (cookie == NULL_RTX)
1608 cookie = const0_rtx;
1609
1610 /* Static functions and indirect calls don't need the pic register. */
1611 if (flag_pic
1612 && GET_CODE (callee) == SYMBOL_REF
1613 && !SYMBOL_REF_LOCAL_P (callee))
1614 use_reg (&use, pic_offset_table_rtx);
1615
1616 if ((!register_no_elim_operand (callee, Pmode)
1617 && GET_CODE (callee) != SYMBOL_REF)
1618 || (GET_CODE (callee) == SYMBOL_REF
1619 && (flag_pic
1620 || bfin_longcall_p (callee, INTVAL (cookie)))))
1621 {
1622 callee = copy_to_mode_reg (Pmode, callee);
1623 fnaddr = gen_rtx_MEM (Pmode, callee);
1624 }
1625 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1626
1627 if (retval)
1628 call = gen_rtx_SET (VOIDmode, retval, call);
1629
1630 XVECEXP (pat, 0, 0) = call;
1631 XVECEXP (pat, 0, 1) = gen_rtx_USE (VOIDmode, cookie);
1632 if (sibcall)
1633 XVECEXP (pat, 0, 2) = gen_rtx_RETURN (VOIDmode);
1634 call = emit_call_insn (pat);
1635 if (use)
1636 CALL_INSN_FUNCTION_USAGE (call) = use;
1637 }
1638 \f
1639 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1640
1641 int
1642 hard_regno_mode_ok (int regno, enum machine_mode mode)
1643 {
1644 /* Allow only dregs to store value of mode HI or QI */
1645 enum reg_class class = REGNO_REG_CLASS (regno);
1646
1647 if (mode == CCmode)
1648 return 0;
1649
1650 if (mode == V2HImode)
1651 return D_REGNO_P (regno);
1652 if (class == CCREGS)
1653 return mode == BImode;
1654 if (mode == PDImode)
1655 return regno == REG_A0 || regno == REG_A1;
1656 if (mode == SImode
1657 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1658 return 1;
1659
1660 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1661 }
1662
1663 /* Implements target hook vector_mode_supported_p. */
1664
1665 static bool
1666 bfin_vector_mode_supported_p (enum machine_mode mode)
1667 {
1668 return mode == V2HImode;
1669 }
1670
1671 /* Return the cost of moving data from a register in class CLASS1 to
1672 one in class CLASS2. A cost of 2 is the default. */
1673
1674 int
1675 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1676 enum reg_class class1, enum reg_class class2)
1677 {
1678 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1679 if (optimize_size)
1680 return 2;
1681
1682 /* There are some stalls involved when moving from a DREG to a different
1683 class reg, and using the value in one of the following instructions.
1684 Attempt to model this by slightly discouraging such moves. */
1685 if (class1 == DREGS && class2 != DREGS)
1686 return 2 * 2;
1687
1688 return 2;
1689 }
1690
1691 /* Return the cost of moving data of mode M between a
1692 register and memory. A value of 2 is the default; this cost is
1693 relative to those in `REGISTER_MOVE_COST'.
1694
1695 ??? In theory L1 memory has single-cycle latency. We should add a switch
1696 that tells the compiler whether we expect to use only L1 memory for the
1697 program; it'll make the costs more accurate. */
1698
1699 int
1700 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1701 enum reg_class class,
1702 int in ATTRIBUTE_UNUSED)
1703 {
1704 /* Make memory accesses slightly more expensive than any register-register
1705 move. Also, penalize non-DP registers, since they need secondary
1706 reloads to load and store. */
1707 if (! reg_class_subset_p (class, DPREGS))
1708 return 10;
1709
1710 return 8;
1711 }
1712
1713 /* Inform reload about cases where moving X with a mode MODE to a register in
1714 CLASS requires an extra scratch register. Return the class needed for the
1715 scratch register. */
1716
1717 enum reg_class
1718 secondary_input_reload_class (enum reg_class class, enum machine_mode mode,
1719 rtx x)
1720 {
1721 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1722 in most other cases we can also use PREGS. */
1723 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1724 enum reg_class x_class = NO_REGS;
1725 enum rtx_code code = GET_CODE (x);
1726
1727 if (code == SUBREG)
1728 x = SUBREG_REG (x), code = GET_CODE (x);
1729 if (REG_P (x))
1730 {
1731 int regno = REGNO (x);
1732 if (regno >= FIRST_PSEUDO_REGISTER)
1733 regno = reg_renumber[regno];
1734
1735 if (regno == -1)
1736 code = MEM;
1737 else
1738 x_class = REGNO_REG_CLASS (regno);
1739 }
1740
1741 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1742 This happens as a side effect of register elimination, and we need
1743 a scratch register to do it. */
1744 if (fp_plus_const_operand (x, mode))
1745 {
1746 rtx op2 = XEXP (x, 1);
1747 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1748
1749 if (class == PREGS || class == PREGS_CLOBBERED)
1750 return NO_REGS;
1751 /* If destination is a DREG, we can do this without a scratch register
1752 if the constant is valid for an add instruction. */
1753 if (class == DREGS || class == DPREGS)
1754 return large_constant_p ? PREGS : NO_REGS;
1755 /* Reloading to anything other than a DREG? Use a PREG scratch
1756 register. */
1757 return PREGS;
1758 }
1759
1760 /* Data can usually be moved freely between registers of most classes.
1761 AREGS are an exception; they can only move to or from another register
1762 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1763 if (x_class == AREGS)
1764 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1765
1766 if (class == AREGS)
1767 {
1768 if (x != const0_rtx && x_class != DREGS)
1769 return DREGS;
1770 else
1771 return NO_REGS;
1772 }
1773
1774 /* CCREGS can only be moved from/to DREGS. */
1775 if (class == CCREGS && x_class != DREGS)
1776 return DREGS;
1777 if (x_class == CCREGS && class != DREGS)
1778 return DREGS;
1779 /* All registers other than AREGS can load arbitrary constants. The only
1780 case that remains is MEM. */
1781 if (code == MEM)
1782 if (! reg_class_subset_p (class, default_class))
1783 return default_class;
1784 return NO_REGS;
1785 }
1786
1787 /* Like secondary_input_reload_class; and all we do is call that function. */
1788
1789 enum reg_class
1790 secondary_output_reload_class (enum reg_class class, enum machine_mode mode,
1791 rtx x)
1792 {
1793 return secondary_input_reload_class (class, mode, x);
1794 }
1795 \f
1796 /* Implement TARGET_HANDLE_OPTION. */
1797
1798 static bool
1799 bfin_handle_option (size_t code, const char *arg, int value)
1800 {
1801 switch (code)
1802 {
1803 case OPT_mshared_library_id_:
1804 if (value > MAX_LIBRARY_ID)
1805 error ("-mshared-library-id=%s is not between 0 and %d",
1806 arg, MAX_LIBRARY_ID);
1807 bfin_lib_id_given = 1;
1808 return true;
1809
1810 default:
1811 return true;
1812 }
1813 }
1814
1815 /* Implement the macro OVERRIDE_OPTIONS. */
1816
1817 void
1818 override_options (void)
1819 {
1820 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1821 flag_omit_frame_pointer = 1;
1822
1823 /* Library identification */
1824 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1825 error ("-mshared-library-id= specified without -mid-shared-library");
1826
1827 if (TARGET_ID_SHARED_LIBRARY)
1828 /* ??? Provide a way to use a bigger GOT. */
1829 flag_pic = 1;
1830
1831 flag_schedule_insns = 0;
1832 }
1833
1834 /* Return the destination address of BRANCH.
1835 We need to use this instead of get_attr_length, because the
1836 cbranch_with_nops pattern conservatively sets its length to 6, and
1837 we still prefer to use shorter sequences. */
1838
1839 static int
1840 branch_dest (rtx branch)
1841 {
1842 rtx dest;
1843 int dest_uid;
1844 rtx pat = PATTERN (branch);
1845 if (GET_CODE (pat) == PARALLEL)
1846 pat = XVECEXP (pat, 0, 0);
1847 dest = SET_SRC (pat);
1848 if (GET_CODE (dest) == IF_THEN_ELSE)
1849 dest = XEXP (dest, 1);
1850 dest = XEXP (dest, 0);
1851 dest_uid = INSN_UID (dest);
1852 return INSN_ADDRESSES (dest_uid);
1853 }
1854
1855 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
1856 it's a branch that's predicted taken. */
1857
1858 static int
1859 cbranch_predicted_taken_p (rtx insn)
1860 {
1861 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
1862
1863 if (x)
1864 {
1865 int pred_val = INTVAL (XEXP (x, 0));
1866
1867 return pred_val >= REG_BR_PROB_BASE / 2;
1868 }
1869
1870 return 0;
1871 }
1872
1873 /* Templates for use by asm_conditional_branch. */
1874
1875 static const char *ccbranch_templates[][3] = {
1876 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
1877 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
1878 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
1879 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
1880 };
1881
1882 /* Output INSN, which is a conditional branch instruction with operands
1883 OPERANDS.
1884
1885 We deal with the various forms of conditional branches that can be generated
1886 by bfin_reorg to prevent the hardware from doing speculative loads, by
1887 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
1888 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
1889 Either of these is only necessary if the branch is short, otherwise the
1890 template we use ends in an unconditional jump which flushes the pipeline
1891 anyway. */
1892
1893 void
1894 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
1895 {
1896 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
1897 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
1898 is to be taken from start of if cc rather than jump.
1899 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
1900 */
1901 int len = (offset >= -1024 && offset <= 1022 ? 0
1902 : offset >= -4094 && offset <= 4096 ? 1
1903 : 2);
1904 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
1905 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
1906 output_asm_insn (ccbranch_templates[idx][len], operands);
1907 gcc_assert (n_nops == 0 || !bp);
1908 if (len == 0)
1909 while (n_nops-- > 0)
1910 output_asm_insn ("nop;", NULL);
1911 }
1912
1913 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
1914 stored in bfin_compare_op0 and bfin_compare_op1 already. */
1915
1916 rtx
1917 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
1918 {
1919 enum rtx_code code1, code2;
1920 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
1921 rtx tem = bfin_cc_rtx;
1922 enum rtx_code code = GET_CODE (cmp);
1923
1924 /* If we have a BImode input, then we already have a compare result, and
1925 do not need to emit another comparison. */
1926 if (GET_MODE (op0) == BImode)
1927 {
1928 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
1929 tem = op0, code2 = code;
1930 }
1931 else
1932 {
1933 switch (code) {
1934 /* bfin has these conditions */
1935 case EQ:
1936 case LT:
1937 case LE:
1938 case LEU:
1939 case LTU:
1940 code1 = code;
1941 code2 = NE;
1942 break;
1943 default:
1944 code1 = reverse_condition (code);
1945 code2 = EQ;
1946 break;
1947 }
1948 emit_insn (gen_rtx_SET (BImode, tem,
1949 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
1950 }
1951
1952 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
1953 }
1954 \f
1955 /* Return nonzero iff C has exactly one bit set if it is interpreted
1956 as a 32 bit constant. */
1957
1958 int
1959 log2constp (unsigned HOST_WIDE_INT c)
1960 {
1961 c &= 0xFFFFFFFF;
1962 return c != 0 && (c & (c-1)) == 0;
1963 }
1964
1965 /* Returns the number of consecutive least significant zeros in the binary
1966 representation of *V.
1967 We modify *V to contain the original value arithmetically shifted right by
1968 the number of zeroes. */
1969
1970 static int
1971 shiftr_zero (HOST_WIDE_INT *v)
1972 {
1973 unsigned HOST_WIDE_INT tmp = *v;
1974 unsigned HOST_WIDE_INT sgn;
1975 int n = 0;
1976
1977 if (tmp == 0)
1978 return 0;
1979
1980 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
1981 while ((tmp & 0x1) == 0 && n <= 32)
1982 {
1983 tmp = (tmp >> 1) | sgn;
1984 n++;
1985 }
1986 *v = tmp;
1987 return n;
1988 }
1989
1990 /* After reload, split the load of an immediate constant. OPERANDS are the
1991 operands of the movsi_insn pattern which we are splitting. We return
1992 nonzero if we emitted a sequence to load the constant, zero if we emitted
1993 nothing because we want to use the splitter's default sequence. */
1994
1995 int
1996 split_load_immediate (rtx operands[])
1997 {
1998 HOST_WIDE_INT val = INTVAL (operands[1]);
1999 HOST_WIDE_INT tmp;
2000 HOST_WIDE_INT shifted = val;
2001 HOST_WIDE_INT shifted_compl = ~val;
2002 int num_zero = shiftr_zero (&shifted);
2003 int num_compl_zero = shiftr_zero (&shifted_compl);
2004 unsigned int regno = REGNO (operands[0]);
2005 enum reg_class class1 = REGNO_REG_CLASS (regno);
2006
2007 /* This case takes care of single-bit set/clear constants, which we could
2008 also implement with BITSET/BITCLR. */
2009 if (num_zero
2010 && shifted >= -32768 && shifted < 65536
2011 && (D_REGNO_P (regno)
2012 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2013 {
2014 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2015 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2016 return 1;
2017 }
2018
2019 tmp = val & 0xFFFF;
2020 tmp |= -(tmp & 0x8000);
2021
2022 /* If high word has one bit set or clear, try to use a bit operation. */
2023 if (D_REGNO_P (regno))
2024 {
2025 if (log2constp (val & 0xFFFF0000))
2026 {
2027 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2028 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2029 return 1;
2030 }
2031 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2032 {
2033 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2034 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2035 }
2036 }
2037
2038 if (D_REGNO_P (regno))
2039 {
2040 if (CONST_7BIT_IMM_P (tmp))
2041 {
2042 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2043 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2044 return 1;
2045 }
2046
2047 if ((val & 0xFFFF0000) == 0)
2048 {
2049 emit_insn (gen_movsi (operands[0], const0_rtx));
2050 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2051 return 1;
2052 }
2053
2054 if ((val & 0xFFFF0000) == 0xFFFF0000)
2055 {
2056 emit_insn (gen_movsi (operands[0], constm1_rtx));
2057 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2058 return 1;
2059 }
2060 }
2061
2062 /* Need DREGs for the remaining case. */
2063 if (regno > REG_R7)
2064 return 0;
2065
2066 if (optimize_size
2067 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2068 {
2069 /* If optimizing for size, generate a sequence that has more instructions
2070 but is shorter. */
2071 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2072 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2073 GEN_INT (num_compl_zero)));
2074 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2075 return 1;
2076 }
2077 return 0;
2078 }
2079 \f
2080 /* Return true if the legitimate memory address for a memory operand of mode
2081 MODE. Return false if not. */
2082
2083 static bool
2084 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2085 {
2086 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2087 int sz = GET_MODE_SIZE (mode);
2088 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2089 /* The usual offsettable_memref machinery doesn't work so well for this
2090 port, so we deal with the problem here. */
2091 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2092 return (v & ~(mask << shift)) == 0;
2093 }
2094
2095 static bool
2096 bfin_valid_reg_p (unsigned int regno, int strict)
2097 {
2098 return ((strict && REGNO_OK_FOR_BASE_STRICT_P (regno))
2099 || (!strict && REGNO_OK_FOR_BASE_NONSTRICT_P (regno)));
2100 }
2101
2102 bool
2103 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2104 {
2105 switch (GET_CODE (x)) {
2106 case REG:
2107 if (bfin_valid_reg_p (REGNO (x), strict))
2108 return true;
2109 break;
2110 case PLUS:
2111 if (REG_P (XEXP (x, 0))
2112 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict)
2113 && (GET_CODE (XEXP (x, 1)) == UNSPEC
2114 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2115 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2116 return true;
2117 break;
2118 case POST_INC:
2119 case POST_DEC:
2120 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2121 && REG_P (XEXP (x, 0))
2122 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2123 return true;
2124 case PRE_DEC:
2125 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2126 && XEXP (x, 0) == stack_pointer_rtx
2127 && REG_P (XEXP (x, 0))
2128 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict))
2129 return true;
2130 break;
2131 default:
2132 break;
2133 }
2134 return false;
2135 }
2136
2137 static bool
2138 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2139 {
2140 int cost2 = COSTS_N_INSNS (1);
2141
2142 switch (code)
2143 {
2144 case CONST_INT:
2145 if (outer_code == SET || outer_code == PLUS)
2146 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2147 else if (outer_code == AND)
2148 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2149 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2150 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2151 else if (outer_code == LEU || outer_code == LTU)
2152 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2153 else if (outer_code == MULT)
2154 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2155 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2156 *total = 0;
2157 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2158 || outer_code == LSHIFTRT)
2159 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2160 else if (outer_code == IOR || outer_code == XOR)
2161 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2162 else
2163 *total = cost2;
2164 return true;
2165
2166 case CONST:
2167 case LABEL_REF:
2168 case SYMBOL_REF:
2169 case CONST_DOUBLE:
2170 *total = COSTS_N_INSNS (2);
2171 return true;
2172
2173 case PLUS:
2174 if (GET_MODE (x) == Pmode)
2175 {
2176 if (GET_CODE (XEXP (x, 0)) == MULT
2177 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2178 {
2179 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2180 if (val == 2 || val == 4)
2181 {
2182 *total = cost2;
2183 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2184 *total += rtx_cost (XEXP (x, 1), outer_code);
2185 return true;
2186 }
2187 }
2188 }
2189
2190 /* fall through */
2191
2192 case MINUS:
2193 case ASHIFT:
2194 case ASHIFTRT:
2195 case LSHIFTRT:
2196 if (GET_MODE (x) == DImode)
2197 *total = 6 * cost2;
2198 return false;
2199
2200 case AND:
2201 case IOR:
2202 case XOR:
2203 if (GET_MODE (x) == DImode)
2204 *total = 2 * cost2;
2205 return false;
2206
2207 case MULT:
2208 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2209 *total = COSTS_N_INSNS (3);
2210 return false;
2211
2212 default:
2213 return false;
2214 }
2215 }
2216
2217 static void
2218 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2219 {
2220 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2221 }
2222 \f
2223 /* Used for communication between {push,pop}_multiple_operation (which
2224 we use not only as a predicate) and the corresponding output functions. */
2225 static int first_preg_to_save, first_dreg_to_save;
2226
2227 int
2228 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2229 {
2230 int lastdreg = 8, lastpreg = 6;
2231 int i, group;
2232
2233 first_preg_to_save = lastpreg;
2234 first_dreg_to_save = lastdreg;
2235 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2236 {
2237 rtx t = XVECEXP (op, 0, i);
2238 rtx src, dest;
2239 int regno;
2240
2241 if (GET_CODE (t) != SET)
2242 return 0;
2243
2244 src = SET_SRC (t);
2245 dest = SET_DEST (t);
2246 if (GET_CODE (dest) != MEM || ! REG_P (src))
2247 return 0;
2248 dest = XEXP (dest, 0);
2249 if (GET_CODE (dest) != PLUS
2250 || ! REG_P (XEXP (dest, 0))
2251 || REGNO (XEXP (dest, 0)) != REG_SP
2252 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2253 || INTVAL (XEXP (dest, 1)) != -i * 4)
2254 return 0;
2255
2256 regno = REGNO (src);
2257 if (group == 0)
2258 {
2259 if (D_REGNO_P (regno))
2260 {
2261 group = 1;
2262 first_dreg_to_save = lastdreg = regno - REG_R0;
2263 }
2264 else if (regno >= REG_P0 && regno <= REG_P7)
2265 {
2266 group = 2;
2267 first_preg_to_save = lastpreg = regno - REG_P0;
2268 }
2269 else
2270 return 0;
2271
2272 continue;
2273 }
2274
2275 if (group == 1)
2276 {
2277 if (regno >= REG_P0 && regno <= REG_P7)
2278 {
2279 group = 2;
2280 first_preg_to_save = lastpreg = regno - REG_P0;
2281 }
2282 else if (regno != REG_R0 + lastdreg + 1)
2283 return 0;
2284 else
2285 lastdreg++;
2286 }
2287 else if (group == 2)
2288 {
2289 if (regno != REG_P0 + lastpreg + 1)
2290 return 0;
2291 lastpreg++;
2292 }
2293 }
2294 return 1;
2295 }
2296
2297 int
2298 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2299 {
2300 int lastdreg = 8, lastpreg = 6;
2301 int i, group;
2302
2303 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2304 {
2305 rtx t = XVECEXP (op, 0, i);
2306 rtx src, dest;
2307 int regno;
2308
2309 if (GET_CODE (t) != SET)
2310 return 0;
2311
2312 src = SET_SRC (t);
2313 dest = SET_DEST (t);
2314 if (GET_CODE (src) != MEM || ! REG_P (dest))
2315 return 0;
2316 src = XEXP (src, 0);
2317
2318 if (i == 1)
2319 {
2320 if (! REG_P (src) || REGNO (src) != REG_SP)
2321 return 0;
2322 }
2323 else if (GET_CODE (src) != PLUS
2324 || ! REG_P (XEXP (src, 0))
2325 || REGNO (XEXP (src, 0)) != REG_SP
2326 || GET_CODE (XEXP (src, 1)) != CONST_INT
2327 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2328 return 0;
2329
2330 regno = REGNO (dest);
2331 if (group == 0)
2332 {
2333 if (regno == REG_R7)
2334 {
2335 group = 1;
2336 lastdreg = 7;
2337 }
2338 else if (regno != REG_P0 + lastpreg - 1)
2339 return 0;
2340 else
2341 lastpreg--;
2342 }
2343 else if (group == 1)
2344 {
2345 if (regno != REG_R0 + lastdreg - 1)
2346 return 0;
2347 else
2348 lastdreg--;
2349 }
2350 }
2351 first_dreg_to_save = lastdreg;
2352 first_preg_to_save = lastpreg;
2353 return 1;
2354 }
2355
2356 /* Emit assembly code for one multi-register push described by INSN, with
2357 operands in OPERANDS. */
2358
2359 void
2360 output_push_multiple (rtx insn, rtx *operands)
2361 {
2362 char buf[80];
2363 int ok;
2364
2365 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2366 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2367 gcc_assert (ok);
2368
2369 if (first_dreg_to_save == 8)
2370 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2371 else if (first_preg_to_save == 6)
2372 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2373 else
2374 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2375 first_dreg_to_save, first_preg_to_save);
2376
2377 output_asm_insn (buf, operands);
2378 }
2379
2380 /* Emit assembly code for one multi-register pop described by INSN, with
2381 operands in OPERANDS. */
2382
2383 void
2384 output_pop_multiple (rtx insn, rtx *operands)
2385 {
2386 char buf[80];
2387 int ok;
2388
2389 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2390 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2391 gcc_assert (ok);
2392
2393 if (first_dreg_to_save == 8)
2394 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2395 else if (first_preg_to_save == 6)
2396 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2397 else
2398 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2399 first_dreg_to_save, first_preg_to_save);
2400
2401 output_asm_insn (buf, operands);
2402 }
2403
2404 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2405
2406 static void
2407 single_move_for_strmov (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2408 {
2409 rtx scratch = gen_reg_rtx (mode);
2410 rtx srcmem, dstmem;
2411
2412 srcmem = adjust_address_nv (src, mode, offset);
2413 dstmem = adjust_address_nv (dst, mode, offset);
2414 emit_move_insn (scratch, srcmem);
2415 emit_move_insn (dstmem, scratch);
2416 }
2417
2418 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2419 alignment ALIGN_EXP. Return true if successful, false if we should fall
2420 back on a different method. */
2421
2422 bool
2423 bfin_expand_strmov (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2424 {
2425 rtx srcreg, destreg, countreg;
2426 HOST_WIDE_INT align = 0;
2427 unsigned HOST_WIDE_INT count = 0;
2428
2429 if (GET_CODE (align_exp) == CONST_INT)
2430 align = INTVAL (align_exp);
2431 if (GET_CODE (count_exp) == CONST_INT)
2432 {
2433 count = INTVAL (count_exp);
2434 #if 0
2435 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2436 return false;
2437 #endif
2438 }
2439
2440 /* If optimizing for size, only do single copies inline. */
2441 if (optimize_size)
2442 {
2443 if (count == 2 && align < 2)
2444 return false;
2445 if (count == 4 && align < 4)
2446 return false;
2447 if (count != 1 && count != 2 && count != 4)
2448 return false;
2449 }
2450 if (align < 2 && count != 1)
2451 return false;
2452
2453 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2454 if (destreg != XEXP (dst, 0))
2455 dst = replace_equiv_address_nv (dst, destreg);
2456 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2457 if (srcreg != XEXP (src, 0))
2458 src = replace_equiv_address_nv (src, srcreg);
2459
2460 if (count != 0 && align >= 2)
2461 {
2462 unsigned HOST_WIDE_INT offset = 0;
2463
2464 if (align >= 4)
2465 {
2466 if ((count & ~3) == 4)
2467 {
2468 single_move_for_strmov (dst, src, SImode, offset);
2469 offset = 4;
2470 }
2471 else if (count & ~3)
2472 {
2473 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2474 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2475
2476 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2477 }
2478 }
2479 else
2480 {
2481 if ((count & ~1) == 2)
2482 {
2483 single_move_for_strmov (dst, src, HImode, offset);
2484 offset = 2;
2485 }
2486 else if (count & ~1)
2487 {
2488 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2489 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2490
2491 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2492 }
2493 }
2494 if (count & 2)
2495 {
2496 single_move_for_strmov (dst, src, HImode, offset);
2497 offset += 2;
2498 }
2499 if (count & 1)
2500 {
2501 single_move_for_strmov (dst, src, QImode, offset);
2502 }
2503 return true;
2504 }
2505 return false;
2506 }
2507
2508 \f
2509 static int
2510 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2511 {
2512 enum attr_type insn_type, dep_insn_type;
2513 int dep_insn_code_number;
2514
2515 /* Anti and output dependencies have zero cost. */
2516 if (REG_NOTE_KIND (link) != 0)
2517 return 0;
2518
2519 dep_insn_code_number = recog_memoized (dep_insn);
2520
2521 /* If we can't recognize the insns, we can't really do anything. */
2522 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2523 return cost;
2524
2525 insn_type = get_attr_type (insn);
2526 dep_insn_type = get_attr_type (dep_insn);
2527
2528 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2529 {
2530 rtx pat = PATTERN (dep_insn);
2531 rtx dest = SET_DEST (pat);
2532 rtx src = SET_SRC (pat);
2533 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2534 return cost;
2535 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2536 }
2537
2538 return cost;
2539 }
2540 \f
2541 /* We use the machine specific reorg pass for emitting CSYNC instructions
2542 after conditional branches as needed.
2543
2544 The Blackfin is unusual in that a code sequence like
2545 if cc jump label
2546 r0 = (p0)
2547 may speculatively perform the load even if the condition isn't true. This
2548 happens for a branch that is predicted not taken, because the pipeline
2549 isn't flushed or stalled, so the early stages of the following instructions,
2550 which perform the memory reference, are allowed to execute before the
2551 jump condition is evaluated.
2552 Therefore, we must insert additional instructions in all places where this
2553 could lead to incorrect behavior. The manual recommends CSYNC, while
2554 VDSP seems to use NOPs (even though its corresponding compiler option is
2555 named CSYNC).
2556
2557 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
2558 When optimizing for size, we turn the branch into a predicted taken one.
2559 This may be slower due to mispredicts, but saves code size. */
2560
2561 static void
2562 bfin_reorg (void)
2563 {
2564 rtx insn, last_condjump = NULL_RTX;
2565 int cycles_since_jump = INT_MAX;
2566
2567 if (! TARGET_SPECLD_ANOMALY || ! TARGET_CSYNC_ANOMALY)
2568 return;
2569
2570 /* First pass: find predicted-false branches; if something after them
2571 needs nops, insert them or change the branch to predict true. */
2572 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2573 {
2574 rtx pat;
2575
2576 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
2577 continue;
2578
2579 pat = PATTERN (insn);
2580 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2581 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2582 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2583 continue;
2584
2585 if (JUMP_P (insn))
2586 {
2587 if (any_condjump_p (insn)
2588 && ! cbranch_predicted_taken_p (insn))
2589 {
2590 last_condjump = insn;
2591 cycles_since_jump = 0;
2592 }
2593 else
2594 cycles_since_jump = INT_MAX;
2595 }
2596 else if (INSN_P (insn))
2597 {
2598 enum attr_type type = get_attr_type (insn);
2599 int delay_needed = 0;
2600 if (cycles_since_jump < INT_MAX)
2601 cycles_since_jump++;
2602
2603 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
2604 {
2605 rtx pat = single_set (insn);
2606 if (may_trap_p (SET_SRC (pat)))
2607 delay_needed = 3;
2608 }
2609 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2610 delay_needed = 4;
2611
2612 if (delay_needed > cycles_since_jump)
2613 {
2614 rtx pat;
2615 int num_clobbers;
2616 rtx *op = recog_data.operand;
2617
2618 delay_needed -= cycles_since_jump;
2619
2620 extract_insn (last_condjump);
2621 if (optimize_size)
2622 {
2623 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
2624 op[3]);
2625 cycles_since_jump = INT_MAX;
2626 }
2627 else
2628 /* Do not adjust cycles_since_jump in this case, so that
2629 we'll increase the number of NOPs for a subsequent insn
2630 if necessary. */
2631 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
2632 GEN_INT (delay_needed));
2633 PATTERN (last_condjump) = pat;
2634 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
2635 }
2636 }
2637 }
2638 /* Second pass: for predicted-true branches, see if anything at the
2639 branch destination needs extra nops. */
2640 if (! TARGET_CSYNC_ANOMALY)
2641 return;
2642
2643 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2644 {
2645 if (JUMP_P (insn)
2646 && any_condjump_p (insn)
2647 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
2648 || cbranch_predicted_taken_p (insn)))
2649 {
2650 rtx target = JUMP_LABEL (insn);
2651 rtx label = target;
2652 cycles_since_jump = 0;
2653 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
2654 {
2655 rtx pat;
2656
2657 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
2658 continue;
2659
2660 pat = PATTERN (target);
2661 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
2662 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
2663 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
2664 continue;
2665
2666 if (INSN_P (target))
2667 {
2668 enum attr_type type = get_attr_type (target);
2669 int delay_needed = 0;
2670 if (cycles_since_jump < INT_MAX)
2671 cycles_since_jump++;
2672
2673 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
2674 delay_needed = 2;
2675
2676 if (delay_needed > cycles_since_jump)
2677 {
2678 rtx prev = prev_real_insn (label);
2679 delay_needed -= cycles_since_jump;
2680 if (dump_file)
2681 fprintf (dump_file, "Adding %d nops after %d\n",
2682 delay_needed, INSN_UID (label));
2683 if (JUMP_P (prev)
2684 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
2685 {
2686 rtx x;
2687 HOST_WIDE_INT v;
2688
2689 if (dump_file)
2690 fprintf (dump_file,
2691 "Reducing nops on insn %d.\n",
2692 INSN_UID (prev));
2693 x = PATTERN (prev);
2694 x = XVECEXP (x, 0, 1);
2695 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
2696 XVECEXP (x, 0, 0) = GEN_INT (v);
2697 }
2698 while (delay_needed-- > 0)
2699 emit_insn_after (gen_nop (), label);
2700 break;
2701 }
2702 }
2703 }
2704 }
2705 }
2706 }
2707 \f
2708 /* Handle interrupt_handler, exception_handler and nmi_handler function
2709 attributes; arguments as in struct attribute_spec.handler. */
2710
2711 static tree
2712 handle_int_attribute (tree *node, tree name,
2713 tree args ATTRIBUTE_UNUSED,
2714 int flags ATTRIBUTE_UNUSED,
2715 bool *no_add_attrs)
2716 {
2717 tree x = *node;
2718 if (TREE_CODE (x) == FUNCTION_DECL)
2719 x = TREE_TYPE (x);
2720
2721 if (TREE_CODE (x) != FUNCTION_TYPE)
2722 {
2723 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2724 IDENTIFIER_POINTER (name));
2725 *no_add_attrs = true;
2726 }
2727 else if (funkind (x) != SUBROUTINE)
2728 error ("multiple function type attributes specified");
2729
2730 return NULL_TREE;
2731 }
2732
2733 /* Return 0 if the attributes for two types are incompatible, 1 if they
2734 are compatible, and 2 if they are nearly compatible (which causes a
2735 warning to be generated). */
2736
2737 static int
2738 bfin_comp_type_attributes (tree type1, tree type2)
2739 {
2740 e_funkind kind1, kind2;
2741
2742 if (TREE_CODE (type1) != FUNCTION_TYPE)
2743 return 1;
2744
2745 kind1 = funkind (type1);
2746 kind2 = funkind (type2);
2747
2748 if (kind1 != kind2)
2749 return 0;
2750
2751 /* Check for mismatched modifiers */
2752 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
2753 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
2754 return 0;
2755
2756 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
2757 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
2758 return 0;
2759
2760 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
2761 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
2762 return 0;
2763
2764 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
2765 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
2766 return 0;
2767
2768 return 1;
2769 }
2770
2771 /* Handle a "longcall" or "shortcall" attribute; arguments as in
2772 struct attribute_spec.handler. */
2773
2774 static tree
2775 bfin_handle_longcall_attribute (tree *node, tree name,
2776 tree args ATTRIBUTE_UNUSED,
2777 int flags ATTRIBUTE_UNUSED,
2778 bool *no_add_attrs)
2779 {
2780 if (TREE_CODE (*node) != FUNCTION_TYPE
2781 && TREE_CODE (*node) != FIELD_DECL
2782 && TREE_CODE (*node) != TYPE_DECL)
2783 {
2784 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
2785 IDENTIFIER_POINTER (name));
2786 *no_add_attrs = true;
2787 }
2788
2789 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
2790 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
2791 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
2792 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
2793 {
2794 warning (OPT_Wattributes,
2795 "can't apply both longcall and shortcall attributes to the same function");
2796 *no_add_attrs = true;
2797 }
2798
2799 return NULL_TREE;
2800 }
2801
2802 /* Table of valid machine attributes. */
2803 const struct attribute_spec bfin_attribute_table[] =
2804 {
2805 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2806 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
2807 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
2808 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
2809 { "nesting", 0, 0, false, true, true, NULL },
2810 { "kspisusp", 0, 0, false, true, true, NULL },
2811 { "saveall", 0, 0, false, true, true, NULL },
2812 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2813 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
2814 { NULL, 0, 0, false, false, false, NULL }
2815 };
2816 \f
2817 /* Output the assembler code for a thunk function. THUNK_DECL is the
2818 declaration for the thunk function itself, FUNCTION is the decl for
2819 the target function. DELTA is an immediate constant offset to be
2820 added to THIS. If VCALL_OFFSET is nonzero, the word at
2821 *(*this + vcall_offset) should be added to THIS. */
2822
2823 static void
2824 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
2825 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
2826 HOST_WIDE_INT vcall_offset, tree function)
2827 {
2828 rtx xops[3];
2829 /* The this parameter is passed as the first argument. */
2830 rtx this = gen_rtx_REG (Pmode, REG_R0);
2831
2832 /* Adjust the this parameter by a fixed constant. */
2833 if (delta)
2834 {
2835 xops[1] = this;
2836 if (delta >= -64 && delta <= 63)
2837 {
2838 xops[0] = GEN_INT (delta);
2839 output_asm_insn ("%1 += %0;", xops);
2840 }
2841 else if (delta >= -128 && delta < -64)
2842 {
2843 xops[0] = GEN_INT (delta + 64);
2844 output_asm_insn ("%1 += -64; %1 += %0;", xops);
2845 }
2846 else if (delta > 63 && delta <= 126)
2847 {
2848 xops[0] = GEN_INT (delta - 63);
2849 output_asm_insn ("%1 += 63; %1 += %0;", xops);
2850 }
2851 else
2852 {
2853 xops[0] = GEN_INT (delta);
2854 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
2855 }
2856 }
2857
2858 /* Adjust the this parameter by a value stored in the vtable. */
2859 if (vcall_offset)
2860 {
2861 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
2862 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
2863
2864 xops[1] = tmp;
2865 xops[2] = p2tmp;
2866 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
2867
2868 /* Adjust the this parameter. */
2869 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
2870 if (!memory_operand (xops[0], Pmode))
2871 {
2872 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
2873 xops[0] = GEN_INT (vcall_offset);
2874 xops[1] = tmp2;
2875 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
2876 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
2877 }
2878 xops[2] = this;
2879 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
2880 }
2881
2882 xops[0] = XEXP (DECL_RTL (function), 0);
2883 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
2884 output_asm_insn ("jump.l\t%P0", xops);
2885 }
2886 \f
2887 /* Codes for all the Blackfin builtins. */
2888 enum bfin_builtins
2889 {
2890 BFIN_BUILTIN_CSYNC,
2891 BFIN_BUILTIN_SSYNC,
2892 BFIN_BUILTIN_MAX
2893 };
2894
2895 #define def_builtin(NAME, TYPE, CODE) \
2896 do { \
2897 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
2898 NULL, NULL_TREE); \
2899 } while (0)
2900
2901 /* Set up all builtin functions for this target. */
2902 static void
2903 bfin_init_builtins (void)
2904 {
2905 tree void_ftype_void
2906 = build_function_type (void_type_node, void_list_node);
2907
2908 /* Add the remaining MMX insns with somewhat more complicated types. */
2909 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
2910 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
2911 }
2912
2913 /* Expand an expression EXP that calls a built-in function,
2914 with result going to TARGET if that's convenient
2915 (and in mode MODE if that's convenient).
2916 SUBTARGET may be used as the target for computing one of EXP's operands.
2917 IGNORE is nonzero if the value is to be ignored. */
2918
2919 static rtx
2920 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
2921 rtx subtarget ATTRIBUTE_UNUSED,
2922 enum machine_mode mode ATTRIBUTE_UNUSED,
2923 int ignore ATTRIBUTE_UNUSED)
2924 {
2925 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2926 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
2927
2928 switch (fcode)
2929 {
2930 case BFIN_BUILTIN_CSYNC:
2931 emit_insn (gen_csync ());
2932 return 0;
2933 case BFIN_BUILTIN_SSYNC:
2934 emit_insn (gen_ssync ());
2935 return 0;
2936
2937 default:
2938 gcc_unreachable ();
2939 }
2940 }
2941 \f
2942 #undef TARGET_INIT_BUILTINS
2943 #define TARGET_INIT_BUILTINS bfin_init_builtins
2944
2945 #undef TARGET_EXPAND_BUILTIN
2946 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
2947
2948 #undef TARGET_ASM_GLOBALIZE_LABEL
2949 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
2950
2951 #undef TARGET_ASM_FILE_START
2952 #define TARGET_ASM_FILE_START output_file_start
2953
2954 #undef TARGET_ATTRIBUTE_TABLE
2955 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
2956
2957 #undef TARGET_COMP_TYPE_ATTRIBUTES
2958 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
2959
2960 #undef TARGET_RTX_COSTS
2961 #define TARGET_RTX_COSTS bfin_rtx_costs
2962
2963 #undef TARGET_ADDRESS_COST
2964 #define TARGET_ADDRESS_COST bfin_address_cost
2965
2966 #undef TARGET_ASM_INTERNAL_LABEL
2967 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
2968
2969 #undef TARGET_MACHINE_DEPENDENT_REORG
2970 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
2971
2972 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
2973 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
2974
2975 #undef TARGET_ASM_OUTPUT_MI_THUNK
2976 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
2977 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2978 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
2979
2980 #undef TARGET_SCHED_ADJUST_COST
2981 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
2982
2983 #undef TARGET_PROMOTE_PROTOTYPES
2984 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
2985 #undef TARGET_PROMOTE_FUNCTION_ARGS
2986 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
2987 #undef TARGET_PROMOTE_FUNCTION_RETURN
2988 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
2989
2990 #undef TARGET_ARG_PARTIAL_BYTES
2991 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
2992
2993 #undef TARGET_PASS_BY_REFERENCE
2994 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
2995
2996 #undef TARGET_SETUP_INCOMING_VARARGS
2997 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
2998
2999 #undef TARGET_STRUCT_VALUE_RTX
3000 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
3001
3002 #undef TARGET_VECTOR_MODE_SUPPORTED_P
3003 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
3004
3005 #undef TARGET_HANDLE_OPTION
3006 #define TARGET_HANDLE_OPTION bfin_handle_option
3007
3008 #undef TARGET_DEFAULT_TARGET_FLAGS
3009 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
3010
3011 struct gcc_target targetm = TARGET_INITIALIZER;
This page took 0.176642 seconds and 6 git commands to generate.