1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
42 #include "target-def.h"
48 #include "integrate.h"
50 #include "langhooks.h"
51 #include "bfin-protos.h"
54 #include "basic-block.h"
56 /* A C structure for machine-specific, per-function data.
57 This is added to the cfun structure. */
58 struct machine_function
GTY(())
60 int has_hardware_loops
;
63 /* Test and compare insns in bfin.md store the information needed to
64 generate branch and scc insns here. */
65 rtx bfin_compare_op0
, bfin_compare_op1
;
67 /* RTX for condition code flag register and RETS register */
68 extern GTY(()) rtx bfin_cc_rtx
;
69 extern GTY(()) rtx bfin_rets_rtx
;
70 rtx bfin_cc_rtx
, bfin_rets_rtx
;
72 int max_arg_registers
= 0;
74 /* Arrays used when emitting register names. */
75 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
76 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
77 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
78 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
80 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
82 /* Nonzero if -mshared-library-id was given. */
83 static int bfin_lib_id_given
;
86 bfin_globalize_label (FILE *stream
, const char *name
)
88 fputs (".global ", stream
);
89 assemble_name (stream
, name
);
95 output_file_start (void)
97 FILE *file
= asm_out_file
;
100 fprintf (file
, ".file \"%s\";\n", input_filename
);
102 for (i
= 0; arg_regs
[i
] >= 0; i
++)
104 max_arg_registers
= i
; /* how many arg reg used */
107 /* Called early in the compilation to conditionally modify
108 fixed_regs/call_used_regs. */
111 conditional_register_usage (void)
113 /* initialize condition code flag register rtx */
114 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
115 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
118 /* Examine machine-dependent attributes of function type FUNTYPE and return its
119 type. See the definition of E_FUNKIND. */
121 static e_funkind
funkind (tree funtype
)
123 tree attrs
= TYPE_ATTRIBUTES (funtype
);
124 if (lookup_attribute ("interrupt_handler", attrs
))
125 return INTERRUPT_HANDLER
;
126 else if (lookup_attribute ("exception_handler", attrs
))
127 return EXCPT_HANDLER
;
128 else if (lookup_attribute ("nmi_handler", attrs
))
134 /* Legitimize PIC addresses. If the address is already position-independent,
135 we return ORIG. Newly generated position-independent addresses go into a
136 reg. This is REG if nonzero, otherwise we allocate register(s) as
137 necessary. PICREG is the register holding the pointer to the PIC offset
141 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
146 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
151 if (TARGET_ID_SHARED_LIBRARY
)
152 unspec
= UNSPEC_MOVE_PIC
;
153 else if (GET_CODE (addr
) == SYMBOL_REF
154 && SYMBOL_REF_FUNCTION_P (addr
))
155 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
157 unspec
= UNSPEC_MOVE_FDPIC
;
161 gcc_assert (!no_new_pseudos
);
162 reg
= gen_reg_rtx (Pmode
);
165 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
166 new = gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
168 emit_move_insn (reg
, new);
169 if (picreg
== pic_offset_table_rtx
)
170 current_function_uses_pic_offset_table
= 1;
174 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
178 if (GET_CODE (addr
) == CONST
)
180 addr
= XEXP (addr
, 0);
181 gcc_assert (GET_CODE (addr
) == PLUS
);
184 if (XEXP (addr
, 0) == picreg
)
189 gcc_assert (!no_new_pseudos
);
190 reg
= gen_reg_rtx (Pmode
);
193 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
194 addr
= legitimize_pic_address (XEXP (addr
, 1),
195 base
== reg
? NULL_RTX
: reg
,
198 if (GET_CODE (addr
) == CONST_INT
)
200 gcc_assert (! reload_in_progress
&& ! reload_completed
);
201 addr
= force_reg (Pmode
, addr
);
204 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
206 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
207 addr
= XEXP (addr
, 1);
210 return gen_rtx_PLUS (Pmode
, base
, addr
);
216 /* Stack frame layout. */
218 /* Compute the number of DREGS to save with a push_multiple operation.
219 This could include registers that aren't modified in the function,
220 since push_multiple only takes a range of registers.
221 If IS_INTHANDLER, then everything that is live must be saved, even
222 if normally call-clobbered. */
225 n_dregs_to_save (bool is_inthandler
)
229 for (i
= REG_R0
; i
<= REG_R7
; i
++)
231 if (regs_ever_live
[i
] && (is_inthandler
|| ! call_used_regs
[i
]))
232 return REG_R7
- i
+ 1;
234 if (current_function_calls_eh_return
)
239 unsigned test
= EH_RETURN_DATA_REGNO (j
);
240 if (test
== INVALID_REGNUM
)
243 return REG_R7
- i
+ 1;
251 /* Like n_dregs_to_save, but compute number of PREGS to save. */
254 n_pregs_to_save (bool is_inthandler
)
258 for (i
= REG_P0
; i
<= REG_P5
; i
++)
259 if ((regs_ever_live
[i
] && (is_inthandler
|| ! call_used_regs
[i
]))
261 && i
== PIC_OFFSET_TABLE_REGNUM
262 && (current_function_uses_pic_offset_table
263 || (TARGET_ID_SHARED_LIBRARY
&& ! current_function_is_leaf
))))
264 return REG_P5
- i
+ 1;
268 /* Determine if we are going to save the frame pointer in the prologue. */
271 must_save_fp_p (void)
273 return frame_pointer_needed
|| regs_ever_live
[REG_FP
];
277 stack_frame_needed_p (void)
279 /* EH return puts a new return address into the frame using an
280 address relative to the frame pointer. */
281 if (current_function_calls_eh_return
)
283 return frame_pointer_needed
;
286 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
287 must save all registers; this is used for interrupt handlers.
288 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
289 this for an interrupt (or exception) handler. */
292 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
294 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
);
295 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
);
296 int dregno
= REG_R7
+ 1 - ndregs
;
297 int pregno
= REG_P5
+ 1 - npregs
;
298 int total
= ndregs
+ npregs
;
305 val
= GEN_INT (-total
* 4);
306 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total
+ 2));
307 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
308 UNSPEC_PUSH_MULTIPLE
);
309 XVECEXP (pat
, 0, total
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
310 gen_rtx_PLUS (Pmode
, spreg
,
312 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total
+ 1)) = 1;
313 for (i
= 0; i
< total
; i
++)
315 rtx memref
= gen_rtx_MEM (word_mode
,
316 gen_rtx_PLUS (Pmode
, spreg
,
317 GEN_INT (- i
* 4 - 4)));
321 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
327 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
331 XVECEXP (pat
, 0, i
+ 1) = subpat
;
332 RTX_FRAME_RELATED_P (subpat
) = 1;
334 insn
= emit_insn (pat
);
335 RTX_FRAME_RELATED_P (insn
) = 1;
338 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
339 must save all registers; this is used for interrupt handlers.
340 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
341 this for an interrupt (or exception) handler. */
344 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
346 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
);
347 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
);
348 int total
= ndregs
+ npregs
;
355 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total
+ 1));
356 XVECEXP (pat
, 0, 0) = gen_rtx_SET (VOIDmode
, spreg
,
357 gen_rtx_PLUS (Pmode
, spreg
,
358 GEN_INT (total
* 4)));
365 for (i
= 0; i
< total
; i
++)
368 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
370 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
373 XVECEXP (pat
, 0, i
+ 1)
374 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
383 insn
= emit_insn (pat
);
384 RTX_FRAME_RELATED_P (insn
) = 1;
387 /* Perform any needed actions needed for a function that is receiving a
388 variable number of arguments.
392 MODE and TYPE are the mode and type of the current parameter.
394 PRETEND_SIZE is a variable that should be set to the amount of stack
395 that must be pushed by the prolog to pretend that our caller pushed
398 Normally, this macro will push all remaining incoming registers on the
399 stack and set PRETEND_SIZE to the length of the registers pushed.
402 - VDSP C compiler manual (our ABI) says that a variable args function
403 should save the R0, R1 and R2 registers in the stack.
404 - The caller will always leave space on the stack for the
405 arguments that are passed in registers, so we dont have
406 to leave any extra space.
407 - now, the vastart pointer can access all arguments from the stack. */
410 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
411 enum machine_mode mode ATTRIBUTE_UNUSED
,
412 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
421 /* The move for named arguments will be generated automatically by the
422 compiler. We need to generate the move rtx for the unnamed arguments
423 if they are in the first 3 words. We assume at least 1 named argument
424 exists, so we never generate [ARGP] = R0 here. */
426 for (i
= cum
->words
+ 1; i
< max_arg_registers
; i
++)
428 mem
= gen_rtx_MEM (Pmode
,
429 plus_constant (arg_pointer_rtx
, (i
* UNITS_PER_WORD
)));
430 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
436 /* Value should be nonzero if functions must have frame pointers.
437 Zero means the frame pointer need not be set up (and parms may
438 be accessed via the stack pointer) in functions that seem suitable. */
441 bfin_frame_pointer_required (void)
443 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
445 if (fkind
!= SUBROUTINE
)
448 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
449 so we have to override it for non-leaf functions. */
450 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! current_function_is_leaf
)
456 /* Return the number of registers pushed during the prologue. */
459 n_regs_saved_by_prologue (void)
461 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
462 bool is_inthandler
= fkind
!= SUBROUTINE
;
463 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
464 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
465 || (is_inthandler
&& !current_function_is_leaf
));
466 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
);
467 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
);
468 int n
= ndregs
+ npregs
;
470 if (all
|| stack_frame_needed_p ())
471 /* We use a LINK instruction in this case. */
475 if (must_save_fp_p ())
477 if (! current_function_is_leaf
)
481 if (fkind
!= SUBROUTINE
)
485 /* Increment once for ASTAT. */
489 if (lookup_attribute ("nesting", attrs
))
492 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
495 || (!leaf_function_p () && call_used_regs
[i
]))
496 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
501 /* Return the offset between two registers, one to be eliminated, and the other
502 its replacement, at the start of a routine. */
505 bfin_initial_elimination_offset (int from
, int to
)
507 HOST_WIDE_INT offset
= 0;
509 if (from
== ARG_POINTER_REGNUM
)
510 offset
= n_regs_saved_by_prologue () * 4;
512 if (to
== STACK_POINTER_REGNUM
)
514 if (current_function_outgoing_args_size
>= FIXED_STACK_AREA
)
515 offset
+= current_function_outgoing_args_size
;
516 else if (current_function_outgoing_args_size
)
517 offset
+= FIXED_STACK_AREA
;
519 offset
+= get_frame_size ();
525 /* Emit code to load a constant CONSTANT into register REG; setting
526 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
527 Make sure that the insns we generate need not be split. */
530 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
533 rtx cst
= GEN_INT (constant
);
535 if (constant
>= -32768 && constant
< 65536)
536 insn
= emit_move_insn (reg
, cst
);
539 /* We don't call split_load_immediate here, since dwarf2out.c can get
540 confused about some of the more clever sequences it can generate. */
541 insn
= emit_insn (gen_movsi_high (reg
, cst
));
543 RTX_FRAME_RELATED_P (insn
) = 1;
544 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
547 RTX_FRAME_RELATED_P (insn
) = 1;
550 /* Generate efficient code to add a value to the frame pointer. We
551 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
552 generated insns if FRAME is nonzero. */
555 add_to_sp (rtx spreg
, HOST_WIDE_INT value
, int frame
)
560 /* Choose whether to use a sequence using a temporary register, or
561 a sequence with multiple adds. We can add a signed 7 bit value
562 in one instruction. */
563 if (value
> 120 || value
< -120)
565 rtx tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
569 frame_related_constant_load (tmpreg
, value
, TRUE
);
572 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
574 RTX_FRAME_RELATED_P (insn
) = 1;
577 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
579 RTX_FRAME_RELATED_P (insn
) = 1;
590 /* We could use -62, but that would leave the stack unaligned, so
594 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (size
)));
596 RTX_FRAME_RELATED_P (insn
) = 1;
602 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
603 is too large, generate a sequence of insns that has the same effect.
604 SPREG contains (reg:SI REG_SP). */
607 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
609 HOST_WIDE_INT link_size
= frame_size
;
613 if (link_size
> 262140)
616 /* Use a LINK insn with as big a constant as possible, then subtract
617 any remaining size from the SP. */
618 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
619 RTX_FRAME_RELATED_P (insn
) = 1;
621 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
623 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
624 gcc_assert (GET_CODE (set
) == SET
);
625 RTX_FRAME_RELATED_P (set
) = 1;
628 frame_size
-= link_size
;
632 /* Must use a call-clobbered PREG that isn't the static chain. */
633 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
635 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
636 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
637 RTX_FRAME_RELATED_P (insn
) = 1;
641 /* Return the number of bytes we must reserve for outgoing arguments
642 in the current function's stack frame. */
647 if (current_function_outgoing_args_size
)
649 if (current_function_outgoing_args_size
>= FIXED_STACK_AREA
)
650 return current_function_outgoing_args_size
;
652 return FIXED_STACK_AREA
;
657 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
658 function must save all its registers (true only for certain interrupt
662 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
664 frame_size
+= arg_area_size ();
666 if (all
|| stack_frame_needed_p ()
667 || (must_save_fp_p () && ! current_function_is_leaf
))
668 emit_link_insn (spreg
, frame_size
);
671 if (! current_function_is_leaf
)
673 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
674 gen_rtx_PRE_DEC (Pmode
, spreg
)),
676 rtx insn
= emit_insn (pat
);
677 RTX_FRAME_RELATED_P (insn
) = 1;
679 if (must_save_fp_p ())
681 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
682 gen_rtx_PRE_DEC (Pmode
, spreg
)),
683 gen_rtx_REG (Pmode
, REG_FP
));
684 rtx insn
= emit_insn (pat
);
685 RTX_FRAME_RELATED_P (insn
) = 1;
687 add_to_sp (spreg
, -frame_size
, 1);
691 /* Like do_link, but used for epilogues to deallocate the stack frame. */
694 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
696 frame_size
+= arg_area_size ();
698 if (all
|| stack_frame_needed_p ())
699 emit_insn (gen_unlink ());
702 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
704 add_to_sp (spreg
, frame_size
, 0);
705 if (must_save_fp_p ())
707 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
708 emit_move_insn (fpreg
, postinc
);
709 emit_insn (gen_rtx_USE (VOIDmode
, fpreg
));
711 if (! current_function_is_leaf
)
713 emit_move_insn (bfin_rets_rtx
, postinc
);
714 emit_insn (gen_rtx_USE (VOIDmode
, bfin_rets_rtx
));
719 /* Generate a prologue suitable for a function of kind FKIND. This is
720 called for interrupt and exception handler prologues.
721 SPREG contains (reg:SI REG_SP). */
724 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
)
727 HOST_WIDE_INT frame_size
= get_frame_size ();
728 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
729 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
731 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
732 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
733 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
737 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
738 RTX_FRAME_RELATED_P (insn
) = 1;
741 /* We need space on the stack in case we need to save the argument
743 if (fkind
== EXCPT_HANDLER
)
745 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
746 RTX_FRAME_RELATED_P (insn
) = 1;
749 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
750 RTX_FRAME_RELATED_P (insn
) = 1;
752 /* If we're calling other functions, they won't save their call-clobbered
753 registers, so we must save everything here. */
754 if (!current_function_is_leaf
)
756 expand_prologue_reg_save (spreg
, all
, true);
758 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
761 || (!leaf_function_p () && call_used_regs
[i
]))
763 if (i
== REG_A0
|| i
== REG_A1
)
764 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
765 gen_rtx_REG (PDImode
, i
));
767 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
768 RTX_FRAME_RELATED_P (insn
) = 1;
771 if (lookup_attribute ("nesting", attrs
))
773 rtx srcreg
= gen_rtx_REG (Pmode
, (fkind
== EXCPT_HANDLER
? REG_RETX
774 : fkind
== NMI_HANDLER
? REG_RETN
776 insn
= emit_move_insn (predec
, srcreg
);
777 RTX_FRAME_RELATED_P (insn
) = 1;
780 do_link (spreg
, frame_size
, all
);
782 if (fkind
== EXCPT_HANDLER
)
784 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
785 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
786 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
789 insn
= emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
790 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
792 insn
= emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
793 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
795 insn
= emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
796 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
798 insn
= emit_move_insn (r1reg
, spreg
);
799 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
801 insn
= emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
802 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
804 insn
= emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
805 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
810 /* Generate an epilogue suitable for a function of kind FKIND. This is
811 called for interrupt and exception handler epilogues.
812 SPREG contains (reg:SI REG_SP). */
815 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
)
818 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
819 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
820 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
821 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
823 /* A slightly crude technique to stop flow from trying to delete "dead"
825 MEM_VOLATILE_P (postinc
) = 1;
827 do_unlink (spreg
, get_frame_size (), all
);
829 if (lookup_attribute ("nesting", attrs
))
831 rtx srcreg
= gen_rtx_REG (Pmode
, (fkind
== EXCPT_HANDLER
? REG_RETX
832 : fkind
== NMI_HANDLER
? REG_RETN
834 emit_move_insn (srcreg
, postinc
);
837 /* If we're calling other functions, they won't save their call-clobbered
838 registers, so we must save (and restore) everything here. */
839 if (!current_function_is_leaf
)
842 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
845 || (!leaf_function_p () && call_used_regs
[i
]))
847 if (i
== REG_A0
|| i
== REG_A1
)
849 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
850 MEM_VOLATILE_P (mem
) = 1;
851 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
854 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
857 expand_epilogue_reg_restore (spreg
, all
, true);
859 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
861 /* Deallocate any space we left on the stack in case we needed to save the
862 argument registers. */
863 if (fkind
== EXCPT_HANDLER
)
864 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
866 emit_jump_insn (gen_return_internal (GEN_INT (fkind
)));
869 /* Used while emitting the prologue to generate code to load the correct value
870 into the PIC register, which is passed in DEST. */
873 bfin_load_pic_reg (rtx dest
)
875 struct cgraph_local_info
*i
= NULL
;
878 if (flag_unit_at_a_time
)
879 i
= cgraph_local_info (current_function_decl
);
881 /* Functions local to the translation unit don't need to reload the
882 pic reg, since the caller always passes a usable one. */
884 return pic_offset_table_rtx
;
886 if (bfin_lib_id_given
)
887 addr
= plus_constant (pic_offset_table_rtx
, -4 - bfin_library_id
* 4);
889 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
890 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
891 UNSPEC_LIBRARY_OFFSET
));
892 insn
= emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
893 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
897 /* Generate RTL for the prologue of the current function. */
900 bfin_expand_prologue (void)
903 HOST_WIDE_INT frame_size
= get_frame_size ();
904 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
905 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
906 rtx pic_reg_loaded
= NULL_RTX
;
908 if (fkind
!= SUBROUTINE
)
910 expand_interrupt_handler_prologue (spreg
, fkind
);
914 if (current_function_limit_stack
)
917 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
918 STACK_POINTER_REGNUM
);
919 rtx lim
= stack_limit_rtx
;
921 if (GET_CODE (lim
) == SYMBOL_REF
)
923 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
924 if (TARGET_ID_SHARED_LIBRARY
)
926 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
928 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
929 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
931 emit_move_insn (p1reg
, val
);
932 frame_related_constant_load (p2reg
, offset
, FALSE
);
933 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
938 rtx limit
= plus_constant (stack_limit_rtx
, offset
);
939 emit_move_insn (p2reg
, limit
);
943 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
944 emit_insn (gen_trapifcc ());
946 expand_prologue_reg_save (spreg
, 0, false);
948 do_link (spreg
, frame_size
, false);
950 if (TARGET_ID_SHARED_LIBRARY
952 && (current_function_uses_pic_offset_table
953 || !current_function_is_leaf
))
954 bfin_load_pic_reg (pic_offset_table_rtx
);
957 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
958 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
959 eh_return pattern. */
962 bfin_expand_epilogue (int need_return
, int eh_return
)
964 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
965 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
967 if (fkind
!= SUBROUTINE
)
969 expand_interrupt_handler_epilogue (spreg
, fkind
);
973 do_unlink (spreg
, get_frame_size (), false);
975 expand_epilogue_reg_restore (spreg
, false, false);
977 /* Omit the return insn if this is for a sibcall. */
982 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
984 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE
)));
987 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
990 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
991 unsigned int new_reg
)
993 /* Interrupt functions can only use registers that have already been
994 saved by the prologue, even if they would normally be
997 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
998 && !regs_ever_live
[new_reg
])
1004 /* Return the value of the return address for the frame COUNT steps up
1005 from the current frame, after the prologue.
1006 We punt for everything but the current frame by returning const0_rtx. */
1009 bfin_return_addr_rtx (int count
)
1014 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1017 /* Try machine-dependent ways of modifying an illegitimate address X
1018 to be legitimate. If we find one, return the new, valid address,
1019 otherwise return NULL_RTX.
1021 OLDX is the address as it was before break_out_memory_refs was called.
1022 In some cases it is useful to look at this to decide what needs to be done.
1024 MODE is the mode of the memory reference. */
1027 legitimize_address (rtx x ATTRIBUTE_UNUSED
, rtx oldx ATTRIBUTE_UNUSED
,
1028 enum machine_mode mode ATTRIBUTE_UNUSED
)
1034 bfin_delegitimize_address (rtx orig_x
)
1038 if (GET_CODE (x
) != MEM
)
1042 if (GET_CODE (x
) == PLUS
1043 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1044 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1045 && GET_CODE (XEXP (x
, 0)) == REG
1046 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1047 return XVECEXP (XEXP (x
, 1), 0, 0);
1052 /* This predicate is used to compute the length of a load/store insn.
1053 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1054 32 bit instruction. */
1057 effective_address_32bit_p (rtx op
, enum machine_mode mode
)
1059 HOST_WIDE_INT offset
;
1061 mode
= GET_MODE (op
);
1064 if (GET_CODE (op
) != PLUS
)
1066 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1067 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1071 offset
= INTVAL (XEXP (op
, 1));
1073 /* All byte loads use a 16 bit offset. */
1074 if (GET_MODE_SIZE (mode
) == 1)
1077 if (GET_MODE_SIZE (mode
) == 4)
1079 /* Frame pointer relative loads can use a negative offset, all others
1080 are restricted to a small positive one. */
1081 if (XEXP (op
, 0) == frame_pointer_rtx
)
1082 return offset
< -128 || offset
> 60;
1083 return offset
< 0 || offset
> 60;
1086 /* Must be HImode now. */
1087 return offset
< 0 || offset
> 30;
1090 /* Returns true if X is a memory reference using an I register. */
1092 bfin_dsp_memref_p (rtx x
)
1097 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1098 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1103 /* Return cost of the memory address ADDR.
1104 All addressing modes are equally cheap on the Blackfin. */
1107 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
)
1112 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1115 print_address_operand (FILE *file
, rtx x
)
1117 switch (GET_CODE (x
))
1120 output_address (XEXP (x
, 0));
1121 fprintf (file
, "+");
1122 output_address (XEXP (x
, 1));
1126 fprintf (file
, "--");
1127 output_address (XEXP (x
, 0));
1130 output_address (XEXP (x
, 0));
1131 fprintf (file
, "++");
1134 output_address (XEXP (x
, 0));
1135 fprintf (file
, "--");
1139 gcc_assert (GET_CODE (x
) != MEM
);
1140 print_operand (file
, x
, 0);
1145 /* Adding intp DImode support by Tony
1151 print_operand (FILE *file
, rtx x
, char code
)
1153 enum machine_mode mode
= GET_MODE (x
);
1158 switch (GET_CODE (x
))
1161 fprintf (file
, "e");
1164 fprintf (file
, "ne");
1167 fprintf (file
, "g");
1170 fprintf (file
, "l");
1173 fprintf (file
, "ge");
1176 fprintf (file
, "le");
1179 fprintf (file
, "g");
1182 fprintf (file
, "l");
1185 fprintf (file
, "ge");
1188 fprintf (file
, "le");
1191 output_operand_lossage ("invalid %%j value");
1195 case 'J': /* reverse logic */
1196 switch (GET_CODE(x
))
1199 fprintf (file
, "ne");
1202 fprintf (file
, "e");
1205 fprintf (file
, "le");
1208 fprintf (file
, "ge");
1211 fprintf (file
, "l");
1214 fprintf (file
, "g");
1217 fprintf (file
, "le");
1220 fprintf (file
, "ge");
1223 fprintf (file
, "l");
1226 fprintf (file
, "g");
1229 output_operand_lossage ("invalid %%J value");
1234 switch (GET_CODE (x
))
1239 gcc_assert (REGNO (x
) < 32);
1240 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1241 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1244 else if (code
== 'd')
1246 gcc_assert (REGNO (x
) < 32);
1247 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1250 else if (code
== 'w')
1252 gcc_assert (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
);
1253 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1255 else if (code
== 'x')
1257 gcc_assert (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
);
1258 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1260 else if (code
== 'D')
1262 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1264 else if (code
== 'H')
1266 gcc_assert (mode
== DImode
|| mode
== DFmode
);
1267 gcc_assert (REG_P (x
));
1268 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1270 else if (code
== 'T')
1272 gcc_assert (D_REGNO_P (REGNO (x
)));
1273 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1276 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1282 print_address_operand (file
, x
);
1294 fputs ("(FU)", file
);
1297 fputs ("(T)", file
);
1300 fputs ("(TFU)", file
);
1303 fputs ("(W32)", file
);
1306 fputs ("(IS)", file
);
1309 fputs ("(IU)", file
);
1312 fputs ("(IH)", file
);
1315 fputs ("(M)", file
);
1318 fputs ("(ISS2)", file
);
1321 fputs ("(S2RND)", file
);
1328 else if (code
== 'b')
1330 if (INTVAL (x
) == 0)
1332 else if (INTVAL (x
) == 1)
1338 /* Moves to half registers with d or h modifiers always use unsigned
1340 else if (code
== 'd')
1341 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1342 else if (code
== 'h')
1343 x
= GEN_INT (INTVAL (x
) & 0xffff);
1344 else if (code
== 'X')
1345 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1346 else if (code
== 'Y')
1347 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1348 else if (code
== 'Z')
1349 /* Used for LINK insns. */
1350 x
= GEN_INT (-8 - INTVAL (x
));
1355 output_addr_const (file
, x
);
1359 output_operand_lossage ("invalid const_double operand");
1363 switch (XINT (x
, 1))
1365 case UNSPEC_MOVE_PIC
:
1366 output_addr_const (file
, XVECEXP (x
, 0, 0));
1367 fprintf (file
, "@GOT");
1370 case UNSPEC_MOVE_FDPIC
:
1371 output_addr_const (file
, XVECEXP (x
, 0, 0));
1372 fprintf (file
, "@GOT17M4");
1375 case UNSPEC_FUNCDESC_GOT17M4
:
1376 output_addr_const (file
, XVECEXP (x
, 0, 0));
1377 fprintf (file
, "@FUNCDESC_GOT17M4");
1380 case UNSPEC_LIBRARY_OFFSET
:
1381 fprintf (file
, "_current_shared_library_p5_offset_");
1390 output_addr_const (file
, x
);
1395 /* Argument support functions. */
1397 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1398 for a call to a function whose data type is FNTYPE.
1399 For a library call, FNTYPE is 0.
1400 VDSP C Compiler manual, our ABI says that
1401 first 3 words of arguments will use R0, R1 and R2.
1405 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1406 rtx libname ATTRIBUTE_UNUSED
)
1408 static CUMULATIVE_ARGS zero_cum
;
1412 /* Set up the number of registers to use for passing arguments. */
1414 cum
->nregs
= max_arg_registers
;
1415 cum
->arg_regs
= arg_regs
;
1417 cum
->call_cookie
= CALL_NORMAL
;
1418 /* Check for a longcall attribute. */
1419 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1420 cum
->call_cookie
|= CALL_SHORT
;
1421 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1422 cum
->call_cookie
|= CALL_LONG
;
1427 /* Update the data in CUM to advance over an argument
1428 of mode MODE and data type TYPE.
1429 (TYPE is null for libcalls where that information may not be available.) */
1432 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1433 int named ATTRIBUTE_UNUSED
)
1435 int count
, bytes
, words
;
1437 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1438 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1440 cum
->words
+= words
;
1441 cum
->nregs
-= words
;
1443 if (cum
->nregs
<= 0)
1446 cum
->arg_regs
= NULL
;
1450 for (count
= 1; count
<= words
; count
++)
1457 /* Define where to put the arguments to a function.
1458 Value is zero to push the argument on the stack,
1459 or a hard register in which to store the argument.
1461 MODE is the argument's machine mode.
1462 TYPE is the data type of the argument (as a tree).
1463 This is null for libcalls where that information may
1465 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1466 the preceding args and about the function being called.
1467 NAMED is nonzero if this argument is a named parameter
1468 (otherwise it is an extra parameter matching an ellipsis). */
1471 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1472 int named ATTRIBUTE_UNUSED
)
1475 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1477 if (mode
== VOIDmode
)
1478 /* Compute operand 2 of the call insn. */
1479 return GEN_INT (cum
->call_cookie
);
1485 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1490 /* For an arg passed partly in registers and partly in memory,
1491 this is the number of bytes passed in registers.
1492 For args passed entirely in registers or entirely in memory, zero.
1494 Refer VDSP C Compiler manual, our ABI.
1495 First 3 words are in registers. So, if a an argument is larger
1496 than the registers available, it will span the register and
1500 bfin_arg_partial_bytes (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1501 tree type ATTRIBUTE_UNUSED
,
1502 bool named ATTRIBUTE_UNUSED
)
1505 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1506 int bytes_left
= cum
->nregs
* UNITS_PER_WORD
;
1511 if (bytes_left
== 0)
1513 if (bytes
> bytes_left
)
1518 /* Variable sized types are passed by reference. */
1521 bfin_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
1522 enum machine_mode mode ATTRIBUTE_UNUSED
,
1523 tree type
, bool named ATTRIBUTE_UNUSED
)
1525 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1528 /* Decide whether a type should be returned in memory (true)
1529 or in a register (false). This is called by the macro
1530 RETURN_IN_MEMORY. */
1533 bfin_return_in_memory (tree type
)
1535 int size
= int_size_in_bytes (type
);
1536 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1539 /* Register in which address to store a structure value
1540 is passed to a function. */
1542 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1543 int incoming ATTRIBUTE_UNUSED
)
1545 return gen_rtx_REG (Pmode
, REG_P0
);
1548 /* Return true when register may be used to pass function parameters. */
1551 function_arg_regno_p (int n
)
1554 for (i
= 0; arg_regs
[i
] != -1; i
++)
1555 if (n
== arg_regs
[i
])
1560 /* Returns 1 if OP contains a symbol reference */
1563 symbolic_reference_mentioned_p (rtx op
)
1565 register const char *fmt
;
1568 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1571 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1572 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1578 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1579 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1583 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1590 /* Decide whether we can make a sibling call to a function. DECL is the
1591 declaration of the function being targeted by the call and EXP is the
1592 CALL_EXPR representing the call. */
1595 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1596 tree exp ATTRIBUTE_UNUSED
)
1598 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1599 if (fkind
!= SUBROUTINE
)
1601 if (!TARGET_ID_SHARED_LIBRARY
|| TARGET_SEP_DATA
)
1604 /* When compiling for ID shared libraries, can't sibcall a local function
1605 from a non-local function, because the local function thinks it does
1606 not need to reload P5 in the prologue, but the sibcall wil pop P5 in the
1607 sibcall epilogue, and we end up with the wrong value in P5. */
1609 if (!flag_unit_at_a_time
|| decl
== NULL
)
1610 /* Not enough information. */
1614 struct cgraph_local_info
*this_func
, *called_func
;
1617 this_func
= cgraph_local_info (current_function_decl
);
1618 called_func
= cgraph_local_info (decl
);
1619 return !called_func
->local
|| this_func
->local
;
1623 /* Emit RTL insns to initialize the variable parts of a trampoline at
1624 TRAMP. FNADDR is an RTX for the address of the function's pure
1625 code. CXT is an RTX for the static chain value for the function. */
1628 initialize_trampoline (tramp
, fnaddr
, cxt
)
1629 rtx tramp
, fnaddr
, cxt
;
1631 rtx t1
= copy_to_reg (fnaddr
);
1632 rtx t2
= copy_to_reg (cxt
);
1638 rtx a
= memory_address (Pmode
, plus_constant (tramp
, 8));
1639 addr
= memory_address (Pmode
, tramp
);
1640 emit_move_insn (gen_rtx_MEM (SImode
, addr
), a
);
1644 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 2));
1645 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t1
));
1646 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1647 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 6));
1648 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t1
));
1650 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 10));
1651 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t2
));
1652 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1653 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 14));
1654 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t2
));
1657 /* Emit insns to move operands[1] into operands[0]. */
1660 emit_pic_move (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1662 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1664 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1665 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1666 operands
[1] = force_reg (SImode
, operands
[1]);
1668 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1669 TARGET_FDPIC
? OUR_FDPIC_REG
1670 : pic_offset_table_rtx
);
1673 /* Expand a move operation in mode MODE. The operands are in OPERANDS.
1674 Returns true if no further code must be generated, false if the caller
1675 should generate an insn to move OPERANDS[1] to OPERANDS[0]. */
1678 expand_move (rtx
*operands
, enum machine_mode mode
)
1680 rtx op
= operands
[1];
1681 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1682 && SYMBOLIC_CONST (op
))
1683 emit_pic_move (operands
, mode
);
1684 else if (mode
== SImode
&& GET_CODE (op
) == CONST
1685 && GET_CODE (XEXP (op
, 0)) == PLUS
1686 && GET_CODE (XEXP (XEXP (op
, 0), 0)) == SYMBOL_REF
1687 && !bfin_legitimate_constant_p (op
))
1689 rtx dest
= operands
[0];
1691 gcc_assert (!reload_in_progress
&& !reload_completed
);
1693 op0
= force_reg (mode
, XEXP (op
, 0));
1695 if (!insn_data
[CODE_FOR_addsi3
].operand
[2].predicate (op1
, mode
))
1696 op1
= force_reg (mode
, op1
);
1697 if (GET_CODE (dest
) == MEM
)
1698 dest
= gen_reg_rtx (mode
);
1699 emit_insn (gen_addsi3 (dest
, op0
, op1
));
1700 if (dest
== operands
[0])
1704 /* Don't generate memory->memory or constant->memory moves, go through a
1706 else if ((reload_in_progress
| reload_completed
) == 0
1707 && GET_CODE (operands
[0]) == MEM
1708 && GET_CODE (operands
[1]) != REG
)
1709 operands
[1] = force_reg (mode
, operands
[1]);
1713 /* Split one or more DImode RTL references into pairs of SImode
1714 references. The RTL can be REG, offsettable MEM, integer constant, or
1715 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1716 split and "num" is its length. lo_half and hi_half are output arrays
1717 that parallel "operands". */
1720 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1724 rtx op
= operands
[num
];
1726 /* simplify_subreg refuse to split volatile memory addresses,
1727 but we still have to handle it. */
1728 if (GET_CODE (op
) == MEM
)
1730 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1731 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1735 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1736 GET_MODE (op
) == VOIDmode
1737 ? DImode
: GET_MODE (op
), 0);
1738 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1739 GET_MODE (op
) == VOIDmode
1740 ? DImode
: GET_MODE (op
), 4);
1746 bfin_longcall_p (rtx op
, int call_cookie
)
1748 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
1749 if (call_cookie
& CALL_SHORT
)
1751 if (call_cookie
& CALL_LONG
)
1753 if (TARGET_LONG_CALLS
)
1758 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1759 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1760 SIBCALL is nonzero if this is a sibling call. */
1763 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
1765 rtx use
= NULL
, call
;
1766 rtx callee
= XEXP (fnaddr
, 0);
1767 int nelts
= 2 + !!sibcall
;
1769 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
1772 /* In an untyped call, we can get NULL for operand 2. */
1773 if (cookie
== NULL_RTX
)
1774 cookie
= const0_rtx
;
1776 /* Static functions and indirect calls don't need the pic register. */
1777 if (!TARGET_FDPIC
&& flag_pic
1778 && GET_CODE (callee
) == SYMBOL_REF
1779 && !SYMBOL_REF_LOCAL_P (callee
))
1780 use_reg (&use
, pic_offset_table_rtx
);
1784 if (GET_CODE (callee
) != SYMBOL_REF
1785 || bfin_longcall_p (callee
, INTVAL (cookie
)))
1788 if (! address_operand (addr
, Pmode
))
1789 addr
= force_reg (Pmode
, addr
);
1791 fnaddr
= gen_reg_rtx (SImode
);
1792 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
1793 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
1795 picreg
= gen_reg_rtx (SImode
);
1796 emit_insn (gen_load_funcdescsi (picreg
,
1797 plus_constant (addr
, 4)));
1802 else if ((!register_no_elim_operand (callee
, Pmode
)
1803 && GET_CODE (callee
) != SYMBOL_REF
)
1804 || (GET_CODE (callee
) == SYMBOL_REF
1805 && ((TARGET_ID_SHARED_LIBRARY
&& !TARGET_LEAF_ID_SHARED_LIBRARY
)
1806 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
1808 callee
= copy_to_mode_reg (Pmode
, callee
);
1809 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
1811 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
1814 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
1816 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
1818 XVECEXP (pat
, 0, n
++) = call
;
1820 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
1821 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
1823 XVECEXP (pat
, 0, n
++) = gen_rtx_RETURN (VOIDmode
);
1824 call
= emit_call_insn (pat
);
1826 CALL_INSN_FUNCTION_USAGE (call
) = use
;
1829 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1832 hard_regno_mode_ok (int regno
, enum machine_mode mode
)
1834 /* Allow only dregs to store value of mode HI or QI */
1835 enum reg_class
class = REGNO_REG_CLASS (regno
);
1840 if (mode
== V2HImode
)
1841 return D_REGNO_P (regno
);
1842 if (class == CCREGS
)
1843 return mode
== BImode
;
1844 if (mode
== PDImode
|| mode
== V2PDImode
)
1845 return regno
== REG_A0
|| regno
== REG_A1
;
1847 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
1850 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
1853 /* Implements target hook vector_mode_supported_p. */
1856 bfin_vector_mode_supported_p (enum machine_mode mode
)
1858 return mode
== V2HImode
;
1861 /* Return the cost of moving data from a register in class CLASS1 to
1862 one in class CLASS2. A cost of 2 is the default. */
1865 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
1866 enum reg_class class1
, enum reg_class class2
)
1868 /* These need secondary reloads, so they're more expensive. */
1869 if ((class1
== CCREGS
&& class2
!= DREGS
)
1870 || (class1
!= DREGS
&& class2
== CCREGS
))
1873 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1877 /* There are some stalls involved when moving from a DREG to a different
1878 class reg, and using the value in one of the following instructions.
1879 Attempt to model this by slightly discouraging such moves. */
1880 if (class1
== DREGS
&& class2
!= DREGS
)
1886 /* Return the cost of moving data of mode M between a
1887 register and memory. A value of 2 is the default; this cost is
1888 relative to those in `REGISTER_MOVE_COST'.
1890 ??? In theory L1 memory has single-cycle latency. We should add a switch
1891 that tells the compiler whether we expect to use only L1 memory for the
1892 program; it'll make the costs more accurate. */
1895 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
1896 enum reg_class
class,
1897 int in ATTRIBUTE_UNUSED
)
1899 /* Make memory accesses slightly more expensive than any register-register
1900 move. Also, penalize non-DP registers, since they need secondary
1901 reloads to load and store. */
1902 if (! reg_class_subset_p (class, DPREGS
))
1908 /* Inform reload about cases where moving X with a mode MODE to a register in
1909 CLASS requires an extra scratch register. Return the class needed for the
1910 scratch register. */
1912 static enum reg_class
1913 bfin_secondary_reload (bool in_p
, rtx x
, enum reg_class
class,
1914 enum machine_mode mode
, secondary_reload_info
*sri
)
1916 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1917 in most other cases we can also use PREGS. */
1918 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
1919 enum reg_class x_class
= NO_REGS
;
1920 enum rtx_code code
= GET_CODE (x
);
1923 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
1926 int regno
= REGNO (x
);
1927 if (regno
>= FIRST_PSEUDO_REGISTER
)
1928 regno
= reg_renumber
[regno
];
1933 x_class
= REGNO_REG_CLASS (regno
);
1936 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1937 This happens as a side effect of register elimination, and we need
1938 a scratch register to do it. */
1939 if (fp_plus_const_operand (x
, mode
))
1941 rtx op2
= XEXP (x
, 1);
1942 int large_constant_p
= ! CONST_7BIT_IMM_P (INTVAL (op2
));
1944 if (class == PREGS
|| class == PREGS_CLOBBERED
)
1946 /* If destination is a DREG, we can do this without a scratch register
1947 if the constant is valid for an add instruction. */
1948 if ((class == DREGS
|| class == DPREGS
)
1949 && ! large_constant_p
)
1951 /* Reloading to anything other than a DREG? Use a PREG scratch
1953 sri
->icode
= CODE_FOR_reload_insi
;
1957 /* Data can usually be moved freely between registers of most classes.
1958 AREGS are an exception; they can only move to or from another register
1959 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1960 if (x_class
== AREGS
)
1961 return class == DREGS
|| class == AREGS
? NO_REGS
: DREGS
;
1965 if (x
!= const0_rtx
&& x_class
!= DREGS
)
1971 /* CCREGS can only be moved from/to DREGS. */
1972 if (class == CCREGS
&& x_class
!= DREGS
)
1974 if (x_class
== CCREGS
&& class != DREGS
)
1977 /* All registers other than AREGS can load arbitrary constants. The only
1978 case that remains is MEM. */
1980 if (! reg_class_subset_p (class, default_class
))
1981 return default_class
;
1985 /* Implement TARGET_HANDLE_OPTION. */
1988 bfin_handle_option (size_t code
, const char *arg
, int value
)
1992 case OPT_mshared_library_id_
:
1993 if (value
> MAX_LIBRARY_ID
)
1994 error ("-mshared-library-id=%s is not between 0 and %d",
1995 arg
, MAX_LIBRARY_ID
);
1996 bfin_lib_id_given
= 1;
2004 static struct machine_function
*
2005 bfin_init_machine_status (void)
2007 struct machine_function
*f
;
2009 f
= ggc_alloc_cleared (sizeof (struct machine_function
));
2014 /* Implement the macro OVERRIDE_OPTIONS. */
2017 override_options (void)
2019 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
2020 flag_omit_frame_pointer
= 1;
2022 /* Library identification */
2023 if (bfin_lib_id_given
&& ! TARGET_ID_SHARED_LIBRARY
)
2024 error ("-mshared-library-id= specified without -mid-shared-library");
2026 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
2029 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
2030 error ("ID shared libraries and FD-PIC mode can't be used together.");
2032 /* Don't allow the user to specify -mid-shared-library and -msep-data
2033 together, as it makes little sense from a user's point of view... */
2034 if (TARGET_SEP_DATA
&& TARGET_ID_SHARED_LIBRARY
)
2035 error ("cannot specify both -msep-data and -mid-shared-library");
2036 /* ... internally, however, it's nearly the same. */
2037 if (TARGET_SEP_DATA
)
2038 target_flags
|= MASK_ID_SHARED_LIBRARY
| MASK_LEAF_ID_SHARED_LIBRARY
;
2040 /* There is no single unaligned SI op for PIC code. Sometimes we
2041 need to use ".4byte" and sometimes we need to use ".picptr".
2042 See bfin_assemble_integer for details. */
2044 targetm
.asm_out
.unaligned_op
.si
= 0;
2046 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
2047 since we don't support it and it'll just break. */
2048 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
2051 flag_schedule_insns
= 0;
2053 init_machine_status
= bfin_init_machine_status
;
2056 /* Return the destination address of BRANCH.
2057 We need to use this instead of get_attr_length, because the
2058 cbranch_with_nops pattern conservatively sets its length to 6, and
2059 we still prefer to use shorter sequences. */
2062 branch_dest (rtx branch
)
2066 rtx pat
= PATTERN (branch
);
2067 if (GET_CODE (pat
) == PARALLEL
)
2068 pat
= XVECEXP (pat
, 0, 0);
2069 dest
= SET_SRC (pat
);
2070 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2071 dest
= XEXP (dest
, 1);
2072 dest
= XEXP (dest
, 0);
2073 dest_uid
= INSN_UID (dest
);
2074 return INSN_ADDRESSES (dest_uid
);
2077 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2078 it's a branch that's predicted taken. */
2081 cbranch_predicted_taken_p (rtx insn
)
2083 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2087 int pred_val
= INTVAL (XEXP (x
, 0));
2089 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2095 /* Templates for use by asm_conditional_branch. */
2097 static const char *ccbranch_templates
[][3] = {
2098 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2099 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2100 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2101 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2104 /* Output INSN, which is a conditional branch instruction with operands
2107 We deal with the various forms of conditional branches that can be generated
2108 by bfin_reorg to prevent the hardware from doing speculative loads, by
2109 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2110 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2111 Either of these is only necessary if the branch is short, otherwise the
2112 template we use ends in an unconditional jump which flushes the pipeline
2116 asm_conditional_branch (rtx insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2118 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2119 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2120 is to be taken from start of if cc rather than jump.
2121 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2123 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2124 : offset
>= -4094 && offset
<= 4096 ? 1
2126 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2127 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2128 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2129 gcc_assert (n_nops
== 0 || !bp
);
2131 while (n_nops
-- > 0)
2132 output_asm_insn ("nop;", NULL
);
2135 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2136 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2139 bfin_gen_compare (rtx cmp
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2141 enum rtx_code code1
, code2
;
2142 rtx op0
= bfin_compare_op0
, op1
= bfin_compare_op1
;
2143 rtx tem
= bfin_cc_rtx
;
2144 enum rtx_code code
= GET_CODE (cmp
);
2146 /* If we have a BImode input, then we already have a compare result, and
2147 do not need to emit another comparison. */
2148 if (GET_MODE (op0
) == BImode
)
2150 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2151 tem
= op0
, code2
= code
;
2156 /* bfin has these conditions */
2166 code1
= reverse_condition (code
);
2170 emit_insn (gen_rtx_SET (BImode
, tem
,
2171 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2174 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2177 /* Return nonzero iff C has exactly one bit set if it is interpreted
2178 as a 32 bit constant. */
2181 log2constp (unsigned HOST_WIDE_INT c
)
2184 return c
!= 0 && (c
& (c
-1)) == 0;
2187 /* Returns the number of consecutive least significant zeros in the binary
2188 representation of *V.
2189 We modify *V to contain the original value arithmetically shifted right by
2190 the number of zeroes. */
2193 shiftr_zero (HOST_WIDE_INT
*v
)
2195 unsigned HOST_WIDE_INT tmp
= *v
;
2196 unsigned HOST_WIDE_INT sgn
;
2202 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2203 while ((tmp
& 0x1) == 0 && n
<= 32)
2205 tmp
= (tmp
>> 1) | sgn
;
2212 /* After reload, split the load of an immediate constant. OPERANDS are the
2213 operands of the movsi_insn pattern which we are splitting. We return
2214 nonzero if we emitted a sequence to load the constant, zero if we emitted
2215 nothing because we want to use the splitter's default sequence. */
2218 split_load_immediate (rtx operands
[])
2220 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2222 HOST_WIDE_INT shifted
= val
;
2223 HOST_WIDE_INT shifted_compl
= ~val
;
2224 int num_zero
= shiftr_zero (&shifted
);
2225 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2226 unsigned int regno
= REGNO (operands
[0]);
2227 enum reg_class class1
= REGNO_REG_CLASS (regno
);
2229 /* This case takes care of single-bit set/clear constants, which we could
2230 also implement with BITSET/BITCLR. */
2232 && shifted
>= -32768 && shifted
< 65536
2233 && (D_REGNO_P (regno
)
2234 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2236 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted
)));
2237 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2242 tmp
|= -(tmp
& 0x8000);
2244 /* If high word has one bit set or clear, try to use a bit operation. */
2245 if (D_REGNO_P (regno
))
2247 if (log2constp (val
& 0xFFFF0000))
2249 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2250 emit_insn (gen_iorsi3 (operands
[0], operands
[0], GEN_INT (val
& 0xFFFF0000)));
2253 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2255 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2256 emit_insn (gen_andsi3 (operands
[0], operands
[0], GEN_INT (val
| 0xFFFF)));
2260 if (D_REGNO_P (regno
))
2262 if (CONST_7BIT_IMM_P (tmp
))
2264 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2265 emit_insn (gen_movstricthi_high (operands
[0], GEN_INT (val
& -65536)));
2269 if ((val
& 0xFFFF0000) == 0)
2271 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2272 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2276 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2278 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2279 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2284 /* Need DREGs for the remaining case. */
2289 && num_compl_zero
&& CONST_7BIT_IMM_P (shifted_compl
))
2291 /* If optimizing for size, generate a sequence that has more instructions
2293 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted_compl
)));
2294 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2295 GEN_INT (num_compl_zero
)));
2296 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2302 /* Return true if the legitimate memory address for a memory operand of mode
2303 MODE. Return false if not. */
2306 bfin_valid_add (enum machine_mode mode
, HOST_WIDE_INT value
)
2308 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2309 int sz
= GET_MODE_SIZE (mode
);
2310 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2311 /* The usual offsettable_memref machinery doesn't work so well for this
2312 port, so we deal with the problem here. */
2313 unsigned HOST_WIDE_INT mask
= sz
== 8 ? 0x7ffe : 0x7fff;
2314 return (v
& ~(mask
<< shift
)) == 0;
2318 bfin_valid_reg_p (unsigned int regno
, int strict
, enum machine_mode mode
,
2319 enum rtx_code outer_code
)
2322 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2324 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2328 bfin_legitimate_address_p (enum machine_mode mode
, rtx x
, int strict
)
2330 switch (GET_CODE (x
)) {
2332 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2336 if (REG_P (XEXP (x
, 0))
2337 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2338 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2339 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2340 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2345 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2346 && REG_P (XEXP (x
, 0))
2347 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2350 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2351 && XEXP (x
, 0) == stack_pointer_rtx
2352 && REG_P (XEXP (x
, 0))
2353 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2362 /* Decide whether we can force certain constants to memory. If we
2363 decide we can't, the caller should be able to cope with it in
2367 bfin_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED
)
2369 /* We have only one class of non-legitimate constants, and our movsi
2370 expander knows how to handle them. Dropping these constants into the
2371 data section would only shift the problem - we'd still get relocs
2372 outside the object, in the data section rather than the text section. */
2376 /* Ensure that for any constant of the form symbol + offset, the offset
2377 remains within the object. Any other constants are ok.
2378 This ensures that flat binaries never have to deal with relocations
2379 crossing section boundaries. */
2382 bfin_legitimate_constant_p (rtx x
)
2385 HOST_WIDE_INT offset
;
2387 if (GET_CODE (x
) != CONST
)
2391 gcc_assert (GET_CODE (x
) == PLUS
);
2395 if (GET_CODE (sym
) != SYMBOL_REF
2396 || GET_CODE (x
) != CONST_INT
)
2398 offset
= INTVAL (x
);
2400 if (SYMBOL_REF_DECL (sym
) == 0)
2403 || offset
>= int_size_in_bytes (TREE_TYPE (SYMBOL_REF_DECL (sym
))))
2410 bfin_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
2412 int cost2
= COSTS_N_INSNS (1);
2417 if (outer_code
== SET
|| outer_code
== PLUS
)
2418 *total
= CONST_7BIT_IMM_P (INTVAL (x
)) ? 0 : cost2
;
2419 else if (outer_code
== AND
)
2420 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2421 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2422 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2423 else if (outer_code
== LEU
|| outer_code
== LTU
)
2424 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2425 else if (outer_code
== MULT
)
2426 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2427 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2429 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2430 || outer_code
== LSHIFTRT
)
2431 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2432 else if (outer_code
== IOR
|| outer_code
== XOR
)
2433 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2442 *total
= COSTS_N_INSNS (2);
2446 if (GET_MODE (x
) == Pmode
)
2448 if (GET_CODE (XEXP (x
, 0)) == MULT
2449 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
2451 HOST_WIDE_INT val
= INTVAL (XEXP (XEXP (x
, 0), 1));
2452 if (val
== 2 || val
== 4)
2455 *total
+= rtx_cost (XEXP (XEXP (x
, 0), 0), outer_code
);
2456 *total
+= rtx_cost (XEXP (x
, 1), outer_code
);
2468 if (GET_MODE (x
) == DImode
)
2475 if (GET_MODE (x
) == DImode
)
2480 if (GET_MODE_SIZE (GET_MODE (x
)) <= UNITS_PER_WORD
)
2481 *total
= COSTS_N_INSNS (3);
2486 *total
= COSTS_N_INSNS (32);
2491 if (outer_code
== SET
)
2501 bfin_internal_label (FILE *stream
, const char *prefix
, unsigned long num
)
2503 fprintf (stream
, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX
, prefix
, num
);
2506 /* Used for communication between {push,pop}_multiple_operation (which
2507 we use not only as a predicate) and the corresponding output functions. */
2508 static int first_preg_to_save
, first_dreg_to_save
;
2511 push_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2513 int lastdreg
= 8, lastpreg
= 6;
2516 first_preg_to_save
= lastpreg
;
2517 first_dreg_to_save
= lastdreg
;
2518 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
2520 rtx t
= XVECEXP (op
, 0, i
);
2524 if (GET_CODE (t
) != SET
)
2528 dest
= SET_DEST (t
);
2529 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
2531 dest
= XEXP (dest
, 0);
2532 if (GET_CODE (dest
) != PLUS
2533 || ! REG_P (XEXP (dest
, 0))
2534 || REGNO (XEXP (dest
, 0)) != REG_SP
2535 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
2536 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
2539 regno
= REGNO (src
);
2542 if (D_REGNO_P (regno
))
2545 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
2547 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
2550 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
2560 if (regno
>= REG_P0
&& regno
<= REG_P7
)
2563 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
2565 else if (regno
!= REG_R0
+ lastdreg
+ 1)
2570 else if (group
== 2)
2572 if (regno
!= REG_P0
+ lastpreg
+ 1)
2581 pop_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2583 int lastdreg
= 8, lastpreg
= 6;
2586 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
2588 rtx t
= XVECEXP (op
, 0, i
);
2592 if (GET_CODE (t
) != SET
)
2596 dest
= SET_DEST (t
);
2597 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
2599 src
= XEXP (src
, 0);
2603 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
2606 else if (GET_CODE (src
) != PLUS
2607 || ! REG_P (XEXP (src
, 0))
2608 || REGNO (XEXP (src
, 0)) != REG_SP
2609 || GET_CODE (XEXP (src
, 1)) != CONST_INT
2610 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
2613 regno
= REGNO (dest
);
2616 if (regno
== REG_R7
)
2621 else if (regno
!= REG_P0
+ lastpreg
- 1)
2626 else if (group
== 1)
2628 if (regno
!= REG_R0
+ lastdreg
- 1)
2634 first_dreg_to_save
= lastdreg
;
2635 first_preg_to_save
= lastpreg
;
2639 /* Emit assembly code for one multi-register push described by INSN, with
2640 operands in OPERANDS. */
2643 output_push_multiple (rtx insn
, rtx
*operands
)
2648 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2649 ok
= push_multiple_operation (PATTERN (insn
), VOIDmode
);
2652 if (first_dreg_to_save
== 8)
2653 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
2654 else if (first_preg_to_save
== 6)
2655 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
2657 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
2658 first_dreg_to_save
, first_preg_to_save
);
2660 output_asm_insn (buf
, operands
);
2663 /* Emit assembly code for one multi-register pop described by INSN, with
2664 operands in OPERANDS. */
2667 output_pop_multiple (rtx insn
, rtx
*operands
)
2672 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2673 ok
= pop_multiple_operation (PATTERN (insn
), VOIDmode
);
2676 if (first_dreg_to_save
== 8)
2677 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
2678 else if (first_preg_to_save
== 6)
2679 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
2681 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
2682 first_dreg_to_save
, first_preg_to_save
);
2684 output_asm_insn (buf
, operands
);
2687 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2690 single_move_for_movmem (rtx dst
, rtx src
, enum machine_mode mode
, HOST_WIDE_INT offset
)
2692 rtx scratch
= gen_reg_rtx (mode
);
2695 srcmem
= adjust_address_nv (src
, mode
, offset
);
2696 dstmem
= adjust_address_nv (dst
, mode
, offset
);
2697 emit_move_insn (scratch
, srcmem
);
2698 emit_move_insn (dstmem
, scratch
);
2701 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2702 alignment ALIGN_EXP. Return true if successful, false if we should fall
2703 back on a different method. */
2706 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
2708 rtx srcreg
, destreg
, countreg
;
2709 HOST_WIDE_INT align
= 0;
2710 unsigned HOST_WIDE_INT count
= 0;
2712 if (GET_CODE (align_exp
) == CONST_INT
)
2713 align
= INTVAL (align_exp
);
2714 if (GET_CODE (count_exp
) == CONST_INT
)
2716 count
= INTVAL (count_exp
);
2718 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
2723 /* If optimizing for size, only do single copies inline. */
2726 if (count
== 2 && align
< 2)
2728 if (count
== 4 && align
< 4)
2730 if (count
!= 1 && count
!= 2 && count
!= 4)
2733 if (align
< 2 && count
!= 1)
2736 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
2737 if (destreg
!= XEXP (dst
, 0))
2738 dst
= replace_equiv_address_nv (dst
, destreg
);
2739 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
2740 if (srcreg
!= XEXP (src
, 0))
2741 src
= replace_equiv_address_nv (src
, srcreg
);
2743 if (count
!= 0 && align
>= 2)
2745 unsigned HOST_WIDE_INT offset
= 0;
2749 if ((count
& ~3) == 4)
2751 single_move_for_movmem (dst
, src
, SImode
, offset
);
2754 else if (count
& ~3)
2756 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
2757 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
2759 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
2763 single_move_for_movmem (dst
, src
, HImode
, offset
);
2769 if ((count
& ~1) == 2)
2771 single_move_for_movmem (dst
, src
, HImode
, offset
);
2774 else if (count
& ~1)
2776 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
2777 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
2779 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
2784 single_move_for_movmem (dst
, src
, QImode
, offset
);
2793 bfin_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
2795 enum attr_type insn_type
, dep_insn_type
;
2796 int dep_insn_code_number
;
2798 /* Anti and output dependencies have zero cost. */
2799 if (REG_NOTE_KIND (link
) != 0)
2802 dep_insn_code_number
= recog_memoized (dep_insn
);
2804 /* If we can't recognize the insns, we can't really do anything. */
2805 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
2808 insn_type
= get_attr_type (insn
);
2809 dep_insn_type
= get_attr_type (dep_insn
);
2811 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
2813 rtx pat
= PATTERN (dep_insn
);
2814 rtx dest
= SET_DEST (pat
);
2815 rtx src
= SET_SRC (pat
);
2816 if (! ADDRESS_REGNO_P (REGNO (dest
)) || ! D_REGNO_P (REGNO (src
)))
2818 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
2825 /* Increment the counter for the number of loop instructions in the
2826 current function. */
2829 bfin_hardware_loop (void)
2831 cfun
->machine
->has_hardware_loops
++;
2834 /* Maximum loop nesting depth. */
2835 #define MAX_LOOP_DEPTH 2
2837 /* Maximum size of a loop. */
2838 #define MAX_LOOP_LENGTH 2042
2840 /* We need to keep a vector of loops */
2841 typedef struct loop_info
*loop_info
;
2842 DEF_VEC_P (loop_info
);
2843 DEF_VEC_ALLOC_P (loop_info
,heap
);
2845 /* Information about a loop we have found (or are in the process of
2847 struct loop_info
GTY (())
2849 /* loop number, for dumps */
2852 /* Predecessor block of the loop. This is the one that falls into
2853 the loop and contains the initialization instruction. */
2854 basic_block predecessor
;
2856 /* First block in the loop. This is the one branched to by the loop_end
2860 /* Last block in the loop (the one with the loop_end insn). */
2863 /* The successor block of the loop. This is the one the loop_end insn
2865 basic_block successor
;
2867 /* The last instruction in the tail. */
2870 /* The loop_end insn. */
2873 /* The iteration register. */
2876 /* The new initialization insn. */
2879 /* The new initialization instruction. */
2882 /* The new label placed at the beginning of the loop. */
2885 /* The new label placed at the end of the loop. */
2888 /* The length of the loop. */
2891 /* The nesting depth of the loop. */
2894 /* Nonzero if we can't optimize this loop. */
2897 /* True if we have visited this loop. */
2900 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
2903 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
2906 /* Next loop in the graph. */
2907 struct loop_info
*next
;
2909 /* Immediate outer loop of this loop. */
2910 struct loop_info
*outer
;
2912 /* Vector of blocks only within the loop, including those within
2914 VEC (basic_block
,heap
) *blocks
;
2916 /* Same information in a bitmap. */
2917 bitmap block_bitmap
;
2919 /* Vector of inner loops within this loop */
2920 VEC (loop_info
,heap
) *loops
;
2924 bfin_dump_loops (loop_info loops
)
2928 for (loop
= loops
; loop
; loop
= loop
->next
)
2934 fprintf (dump_file
, ";; loop %d: ", loop
->loop_no
);
2936 fprintf (dump_file
, "(bad) ");
2937 fprintf (dump_file
, "{head:%d, depth:%d}", loop
->head
->index
, loop
->depth
);
2939 fprintf (dump_file
, " blocks: [ ");
2940 for (ix
= 0; VEC_iterate (basic_block
, loop
->blocks
, ix
, b
); ix
++)
2941 fprintf (dump_file
, "%d ", b
->index
);
2942 fprintf (dump_file
, "] ");
2944 fprintf (dump_file
, " inner loops: [ ");
2945 for (ix
= 0; VEC_iterate (loop_info
, loop
->loops
, ix
, i
); ix
++)
2946 fprintf (dump_file
, "%d ", i
->loop_no
);
2947 fprintf (dump_file
, "]\n");
2949 fprintf (dump_file
, "\n");
2952 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
2953 BB. Return true, if we find it. */
2956 bfin_bb_in_loop (loop_info loop
, basic_block bb
)
2958 return bitmap_bit_p (loop
->block_bitmap
, bb
->index
);
2961 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
2962 REG. Return true, if we find any. Don't count the loop's loop_end
2963 insn if it matches LOOP_END. */
2966 bfin_scan_loop (loop_info loop
, rtx reg
, rtx loop_end
)
2971 for (ix
= 0; VEC_iterate (basic_block
, loop
->blocks
, ix
, bb
); ix
++)
2975 for (insn
= BB_HEAD (bb
);
2976 insn
!= NEXT_INSN (BB_END (bb
));
2977 insn
= NEXT_INSN (insn
))
2981 if (insn
== loop_end
)
2983 if (reg_mentioned_p (reg
, PATTERN (insn
)))
2990 /* Optimize LOOP. */
2993 bfin_optimize_loop (loop_info loop
)
2997 rtx insn
, init_insn
, last_insn
, nop_insn
;
2998 rtx loop_init
, start_label
, end_label
;
2999 rtx reg_lc0
, reg_lc1
, reg_lt0
, reg_lt1
, reg_lb0
, reg_lb1
;
3001 rtx lc_reg
, lt_reg
, lb_reg
;
3005 int inner_depth
= 0;
3015 fprintf (dump_file
, ";; loop %d bad when found\n", loop
->loop_no
);
3019 /* Every loop contains in its list of inner loops every loop nested inside
3020 it, even if there are intermediate loops. This works because we're doing
3021 a depth-first search here and never visit a loop more than once. */
3022 for (ix
= 0; VEC_iterate (loop_info
, loop
->loops
, ix
, inner
); ix
++)
3024 bfin_optimize_loop (inner
);
3026 if (!inner
->bad
&& inner_depth
< inner
->depth
)
3028 inner_depth
= inner
->depth
;
3030 loop
->clobber_loop0
|= inner
->clobber_loop0
;
3031 loop
->clobber_loop1
|= inner
->clobber_loop1
;
3035 loop
->depth
= inner_depth
+ 1;
3036 if (loop
->depth
> MAX_LOOP_DEPTH
)
3039 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
3043 /* Get the loop iteration register. */
3044 iter_reg
= loop
->iter_reg
;
3046 if (!DPREG_P (iter_reg
))
3049 fprintf (dump_file
, ";; loop %d iteration count NOT in PREG or DREG\n",
3054 /* Check if start_label appears before loop_end and calculate the
3055 offset between them. We calculate the length of instructions
3058 for (insn
= loop
->start_label
;
3059 insn
&& insn
!= loop
->loop_end
;
3060 insn
= NEXT_INSN (insn
))
3062 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
3064 if (TARGET_CSYNC_ANOMALY
)
3066 else if (TARGET_SPECLD_ANOMALY
)
3069 else if (LABEL_P (insn
))
3071 if (TARGET_CSYNC_ANOMALY
)
3076 length
+= get_attr_length (insn
);
3082 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
3087 loop
->length
= length
;
3088 if (loop
->length
> MAX_LOOP_LENGTH
)
3091 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
3095 /* Scan all the blocks to make sure they don't use iter_reg. */
3096 if (bfin_scan_loop (loop
, iter_reg
, loop
->loop_end
))
3099 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3103 /* Scan all the insns to see if the loop body clobber
3104 any hardware loop registers. */
3106 reg_lc0
= gen_rtx_REG (SImode
, REG_LC0
);
3107 reg_lc1
= gen_rtx_REG (SImode
, REG_LC1
);
3108 reg_lt0
= gen_rtx_REG (SImode
, REG_LT0
);
3109 reg_lt1
= gen_rtx_REG (SImode
, REG_LT1
);
3110 reg_lb0
= gen_rtx_REG (SImode
, REG_LB0
);
3111 reg_lb1
= gen_rtx_REG (SImode
, REG_LB1
);
3113 for (ix
= 0; VEC_iterate (basic_block
, loop
->blocks
, ix
, bb
); ix
++)
3117 for (insn
= BB_HEAD (bb
);
3118 insn
!= NEXT_INSN (BB_END (bb
));
3119 insn
= NEXT_INSN (insn
))
3124 if (reg_set_p (reg_lc0
, insn
)
3125 || reg_set_p (reg_lt0
, insn
)
3126 || reg_set_p (reg_lb0
, insn
))
3127 loop
->clobber_loop0
= 1;
3129 if (reg_set_p (reg_lc1
, insn
)
3130 || reg_set_p (reg_lt1
, insn
)
3131 || reg_set_p (reg_lb1
, insn
))
3132 loop
->clobber_loop1
|= 1;
3136 if ((loop
->clobber_loop0
&& loop
->clobber_loop1
)
3137 || (loop
->depth
== MAX_LOOP_DEPTH
&& loop
->clobber_loop0
))
3139 loop
->depth
= MAX_LOOP_DEPTH
+ 1;
3141 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3146 /* There should be an instruction before the loop_end instruction
3147 in the same basic block. And the instruction must not be
3149 - CONDITIONAL BRANCH
3153 - Returns (RTS, RTN, etc.) */
3156 last_insn
= PREV_INSN (loop
->loop_end
);
3160 for (; last_insn
!= PREV_INSN (BB_HEAD (bb
));
3161 last_insn
= PREV_INSN (last_insn
))
3162 if (INSN_P (last_insn
))
3165 if (last_insn
!= PREV_INSN (BB_HEAD (bb
)))
3168 if (single_pred_p (bb
)
3169 && single_pred (bb
) != ENTRY_BLOCK_PTR
)
3171 bb
= single_pred (bb
);
3172 last_insn
= BB_END (bb
);
3177 last_insn
= NULL_RTX
;
3185 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3190 if (JUMP_P (last_insn
))
3192 loop_info inner
= bb
->aux
;
3194 && inner
->outer
== loop
3195 && inner
->loop_end
== last_insn
3196 && inner
->depth
== 1)
3197 /* This jump_insn is the exact loop_end of an inner loop
3198 and to be optimized away. So use the inner's last_insn. */
3199 last_insn
= inner
->last_insn
;
3203 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3208 else if (CALL_P (last_insn
)
3209 || get_attr_type (last_insn
) == TYPE_SYNC
3210 || recog_memoized (last_insn
) == CODE_FOR_return_internal
)
3213 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3218 if (GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3219 || asm_noperands (PATTERN (last_insn
)) >= 0
3220 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
)
3222 nop_insn
= emit_insn_after (gen_nop (), last_insn
);
3223 last_insn
= nop_insn
;
3226 loop
->last_insn
= last_insn
;
3228 /* The loop is good for replacement. */
3229 start_label
= loop
->start_label
;
3230 end_label
= gen_label_rtx ();
3231 iter_reg
= loop
->iter_reg
;
3233 if (loop
->depth
== 1 && !loop
->clobber_loop1
)
3238 loop
->clobber_loop1
= 1;
3245 loop
->clobber_loop0
= 1;
3248 /* If iter_reg is a DREG, we need generate an instruction to load
3249 the loop count into LC register. */
3250 if (D_REGNO_P (REGNO (iter_reg
)))
3252 init_insn
= gen_movsi (lc_reg
, iter_reg
);
3253 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3257 else if (P_REGNO_P (REGNO (iter_reg
)))
3259 init_insn
= NULL_RTX
;
3260 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3267 loop
->init
= init_insn
;
3268 loop
->end_label
= end_label
;
3269 loop
->loop_init
= loop_init
;
3273 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3275 print_rtl_single (dump_file
, loop
->loop_init
);
3276 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3278 print_rtl_single (dump_file
, loop
->loop_end
);
3283 if (loop
->init
!= NULL_RTX
)
3284 emit_insn (loop
->init
);
3285 emit_insn(loop
->loop_init
);
3286 emit_label (loop
->start_label
);
3291 emit_insn_after (seq
, BB_END (loop
->predecessor
));
3292 delete_insn (loop
->loop_end
);
3294 /* Insert the loop end label before the last instruction of the loop. */
3295 emit_label_before (loop
->end_label
, loop
->last_insn
);
3302 fprintf (dump_file
, ";; loop %d is bad\n", loop
->loop_no
);
3306 if (DPREG_P (loop
->iter_reg
))
3308 /* If loop->iter_reg is a DREG or PREG, we can split it here
3309 without scratch register. */
3312 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3317 emit_insn_before (gen_cmpsi (loop
->iter_reg
, const0_rtx
),
3320 insn
= emit_jump_insn_before (gen_bne (loop
->start_label
),
3323 JUMP_LABEL (insn
) = loop
->start_label
;
3324 LABEL_NUSES (loop
->start_label
)++;
3325 delete_insn (loop
->loop_end
);
3329 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
3330 a newly set up structure describing the loop, it is this function's
3331 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
3332 loop_end insn and its enclosing basic block. */
3335 bfin_discover_loop (loop_info loop
, basic_block tail_bb
, rtx tail_insn
)
3339 VEC (basic_block
,heap
) *works
= VEC_alloc (basic_block
,heap
,20);
3341 loop
->tail
= tail_bb
;
3342 loop
->head
= BRANCH_EDGE (tail_bb
)->dest
;
3343 loop
->successor
= FALLTHRU_EDGE (tail_bb
)->dest
;
3344 loop
->predecessor
= NULL
;
3345 loop
->loop_end
= tail_insn
;
3346 loop
->last_insn
= NULL_RTX
;
3347 loop
->iter_reg
= SET_DEST (XVECEXP (PATTERN (tail_insn
), 0, 1));
3348 loop
->depth
= loop
->length
= 0;
3350 loop
->clobber_loop0
= loop
->clobber_loop1
= 0;
3354 loop
->init
= loop
->loop_init
= NULL_RTX
;
3355 loop
->start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn
), 0, 0)), 1), 0);
3356 loop
->end_label
= NULL_RTX
;
3359 VEC_safe_push (basic_block
, heap
, works
, loop
->head
);
3361 while (VEC_iterate (basic_block
, works
, dwork
++, bb
))
3365 if (bb
== EXIT_BLOCK_PTR
)
3367 /* We've reached the exit block. The loop must be bad. */
3370 ";; Loop is bad - reached exit block while scanning\n");
3375 if (bitmap_bit_p (loop
->block_bitmap
, bb
->index
))
3378 /* We've not seen this block before. Add it to the loop's
3379 list and then add each successor to the work list. */
3381 VEC_safe_push (basic_block
, heap
, loop
->blocks
, bb
);
3382 bitmap_set_bit (loop
->block_bitmap
, bb
->index
);
3386 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3388 basic_block succ
= EDGE_SUCC (bb
, ei
.index
)->dest
;
3389 if (!REGNO_REG_SET_P (succ
->il
.rtl
->global_live_at_start
,
3390 REGNO (loop
->iter_reg
)))
3392 if (!VEC_space (basic_block
, works
, 1))
3396 VEC_block_remove (basic_block
, works
, 0, dwork
);
3400 VEC_reserve (basic_block
, heap
, works
, 1);
3402 VEC_quick_push (basic_block
, works
, succ
);
3409 /* Make sure we only have one entry point. */
3410 if (EDGE_COUNT (loop
->head
->preds
) == 2)
3412 loop
->predecessor
= EDGE_PRED (loop
->head
, 0)->src
;
3413 if (loop
->predecessor
== loop
->tail
)
3414 /* We wanted the other predecessor. */
3415 loop
->predecessor
= EDGE_PRED (loop
->head
, 1)->src
;
3417 /* We can only place a loop insn on a fall through edge of a
3418 single exit block. */
3419 if (EDGE_COUNT (loop
->predecessor
->succs
) != 1
3420 || !(EDGE_SUCC (loop
->predecessor
, 0)->flags
& EDGE_FALLTHRU
)
3421 /* If loop->predecessor is in loop, loop->head is not really
3422 the head of the loop. */
3423 || bfin_bb_in_loop (loop
, loop
->predecessor
))
3424 loop
->predecessor
= NULL
;
3427 if (loop
->predecessor
== NULL
)
3430 fprintf (dump_file
, ";; loop has bad predecessor\n");
3435 #ifdef ENABLE_CHECKING
3436 /* Make sure nothing jumps into this loop. This shouldn't happen as we
3437 wouldn't have generated the counted loop patterns in such a case.
3438 However, this test must be done after the test above to detect loops
3439 with invalid headers. */
3441 for (dwork
= 0; VEC_iterate (basic_block
, loop
->blocks
, dwork
, bb
); dwork
++)
3445 if (bb
== loop
->head
)
3447 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3449 basic_block pred
= EDGE_PRED (bb
, ei
.index
)->src
;
3450 if (!bfin_bb_in_loop (loop
, pred
))
3455 VEC_free (basic_block
, heap
, works
);
3459 bfin_reorg_loops (FILE *dump_file
)
3461 bitmap_obstack stack
;
3464 loop_info loops
= NULL
;
3468 bitmap_obstack_initialize (&stack
);
3470 /* Find all the possible loop tails. This means searching for every
3471 loop_end instruction. For each one found, create a loop_info
3472 structure and add the head block to the work list. */
3475 rtx tail
= BB_END (bb
);
3477 while (GET_CODE (tail
) == NOTE
)
3478 tail
= PREV_INSN (tail
);
3482 if (INSN_P (tail
) && recog_memoized (tail
) == CODE_FOR_loop_end
)
3484 /* A possible loop end */
3486 loop
= XNEW (struct loop_info
);
3489 loop
->loop_no
= nloops
++;
3490 loop
->blocks
= VEC_alloc (basic_block
, heap
, 20);
3491 loop
->block_bitmap
= BITMAP_ALLOC (&stack
);
3496 fprintf (dump_file
, ";; potential loop %d ending at\n",
3498 print_rtl_single (dump_file
, tail
);
3501 bfin_discover_loop (loop
, bb
, tail
);
3505 tmp_bitmap
= BITMAP_ALLOC (&stack
);
3506 /* Compute loop nestings. */
3507 for (loop
= loops
; loop
; loop
= loop
->next
)
3513 for (other
= loop
->next
; other
; other
= other
->next
)
3518 bitmap_and (tmp_bitmap
, other
->block_bitmap
, loop
->block_bitmap
);
3519 if (bitmap_empty_p (tmp_bitmap
))
3521 if (bitmap_equal_p (tmp_bitmap
, other
->block_bitmap
))
3523 other
->outer
= loop
;
3524 VEC_safe_push (loop_info
, heap
, loop
->loops
, other
);
3526 else if (bitmap_equal_p (tmp_bitmap
, loop
->block_bitmap
))
3528 loop
->outer
= other
;
3529 VEC_safe_push (loop_info
, heap
, other
->loops
, loop
);
3533 loop
->bad
= other
->bad
= 1;
3537 BITMAP_FREE (tmp_bitmap
);
3541 fprintf (dump_file
, ";; All loops found:\n\n");
3542 bfin_dump_loops (loops
);
3545 /* Now apply the optimizations. */
3546 for (loop
= loops
; loop
; loop
= loop
->next
)
3547 bfin_optimize_loop (loop
);
3551 fprintf (dump_file
, ";; After hardware loops optimization:\n\n");
3552 bfin_dump_loops (loops
);
3555 /* Free up the loop structures */
3560 VEC_free (loop_info
, heap
, loop
->loops
);
3561 VEC_free (basic_block
, heap
, loop
->blocks
);
3562 BITMAP_FREE (loop
->block_bitmap
);
3567 print_rtl (dump_file
, get_insns ());
3571 /* We use the machine specific reorg pass for emitting CSYNC instructions
3572 after conditional branches as needed.
3574 The Blackfin is unusual in that a code sequence like
3577 may speculatively perform the load even if the condition isn't true. This
3578 happens for a branch that is predicted not taken, because the pipeline
3579 isn't flushed or stalled, so the early stages of the following instructions,
3580 which perform the memory reference, are allowed to execute before the
3581 jump condition is evaluated.
3582 Therefore, we must insert additional instructions in all places where this
3583 could lead to incorrect behavior. The manual recommends CSYNC, while
3584 VDSP seems to use NOPs (even though its corresponding compiler option is
3587 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
3588 When optimizing for size, we turn the branch into a predicted taken one.
3589 This may be slower due to mispredicts, but saves code size. */
3594 rtx insn
, last_condjump
= NULL_RTX
;
3595 int cycles_since_jump
= INT_MAX
;
3597 /* Doloop optimization */
3598 if (cfun
->machine
->has_hardware_loops
)
3599 bfin_reorg_loops (dump_file
);
3601 if (! TARGET_SPECLD_ANOMALY
&& ! TARGET_CSYNC_ANOMALY
)
3604 /* First pass: find predicted-false branches; if something after them
3605 needs nops, insert them or change the branch to predict true. */
3606 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3610 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
3613 pat
= PATTERN (insn
);
3614 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
3615 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
3616 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
3621 if (any_condjump_p (insn
)
3622 && ! cbranch_predicted_taken_p (insn
))
3624 last_condjump
= insn
;
3625 cycles_since_jump
= 0;
3628 cycles_since_jump
= INT_MAX
;
3630 else if (INSN_P (insn
))
3632 enum attr_type type
= get_attr_type (insn
);
3633 int delay_needed
= 0;
3634 if (cycles_since_jump
< INT_MAX
)
3635 cycles_since_jump
++;
3637 if (type
== TYPE_MCLD
&& TARGET_SPECLD_ANOMALY
)
3639 rtx pat
= single_set (insn
);
3640 if (may_trap_p (SET_SRC (pat
)))
3643 else if (type
== TYPE_SYNC
&& TARGET_CSYNC_ANOMALY
)
3646 if (delay_needed
> cycles_since_jump
)
3650 rtx
*op
= recog_data
.operand
;
3652 delay_needed
-= cycles_since_jump
;
3654 extract_insn (last_condjump
);
3657 pat
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
3659 cycles_since_jump
= INT_MAX
;
3662 /* Do not adjust cycles_since_jump in this case, so that
3663 we'll increase the number of NOPs for a subsequent insn
3665 pat
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
3666 GEN_INT (delay_needed
));
3667 PATTERN (last_condjump
) = pat
;
3668 INSN_CODE (last_condjump
) = recog (pat
, insn
, &num_clobbers
);
3672 /* Second pass: for predicted-true branches, see if anything at the
3673 branch destination needs extra nops. */
3674 if (! TARGET_CSYNC_ANOMALY
)
3677 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3680 && any_condjump_p (insn
)
3681 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
3682 || cbranch_predicted_taken_p (insn
)))
3684 rtx target
= JUMP_LABEL (insn
);
3686 cycles_since_jump
= 0;
3687 for (; target
&& cycles_since_jump
< 3; target
= NEXT_INSN (target
))
3691 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
3694 pat
= PATTERN (target
);
3695 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
3696 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
3697 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
3700 if (INSN_P (target
))
3702 enum attr_type type
= get_attr_type (target
);
3703 int delay_needed
= 0;
3704 if (cycles_since_jump
< INT_MAX
)
3705 cycles_since_jump
++;
3707 if (type
== TYPE_SYNC
&& TARGET_CSYNC_ANOMALY
)
3710 if (delay_needed
> cycles_since_jump
)
3712 rtx prev
= prev_real_insn (label
);
3713 delay_needed
-= cycles_since_jump
;
3715 fprintf (dump_file
, "Adding %d nops after %d\n",
3716 delay_needed
, INSN_UID (label
));
3718 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
3725 "Reducing nops on insn %d.\n",
3728 x
= XVECEXP (x
, 0, 1);
3729 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
3730 XVECEXP (x
, 0, 0) = GEN_INT (v
);
3732 while (delay_needed
-- > 0)
3733 emit_insn_after (gen_nop (), label
);
3742 /* Handle interrupt_handler, exception_handler and nmi_handler function
3743 attributes; arguments as in struct attribute_spec.handler. */
3746 handle_int_attribute (tree
*node
, tree name
,
3747 tree args ATTRIBUTE_UNUSED
,
3748 int flags ATTRIBUTE_UNUSED
,
3752 if (TREE_CODE (x
) == FUNCTION_DECL
)
3755 if (TREE_CODE (x
) != FUNCTION_TYPE
)
3757 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
3758 IDENTIFIER_POINTER (name
));
3759 *no_add_attrs
= true;
3761 else if (funkind (x
) != SUBROUTINE
)
3762 error ("multiple function type attributes specified");
3767 /* Return 0 if the attributes for two types are incompatible, 1 if they
3768 are compatible, and 2 if they are nearly compatible (which causes a
3769 warning to be generated). */
3772 bfin_comp_type_attributes (tree type1
, tree type2
)
3774 e_funkind kind1
, kind2
;
3776 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
3779 kind1
= funkind (type1
);
3780 kind2
= funkind (type2
);
3785 /* Check for mismatched modifiers */
3786 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
3787 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
3790 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
3791 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
3794 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
3795 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
3798 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
3799 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
3805 /* Handle a "longcall" or "shortcall" attribute; arguments as in
3806 struct attribute_spec.handler. */
3809 bfin_handle_longcall_attribute (tree
*node
, tree name
,
3810 tree args ATTRIBUTE_UNUSED
,
3811 int flags ATTRIBUTE_UNUSED
,
3814 if (TREE_CODE (*node
) != FUNCTION_TYPE
3815 && TREE_CODE (*node
) != FIELD_DECL
3816 && TREE_CODE (*node
) != TYPE_DECL
)
3818 warning (OPT_Wattributes
, "`%s' attribute only applies to functions",
3819 IDENTIFIER_POINTER (name
));
3820 *no_add_attrs
= true;
3823 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
3824 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
3825 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
3826 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
3828 warning (OPT_Wattributes
,
3829 "can't apply both longcall and shortcall attributes to the same function");
3830 *no_add_attrs
= true;
3836 /* Table of valid machine attributes. */
3837 const struct attribute_spec bfin_attribute_table
[] =
3839 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
3840 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
},
3841 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
},
3842 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
},
3843 { "nesting", 0, 0, false, true, true, NULL
},
3844 { "kspisusp", 0, 0, false, true, true, NULL
},
3845 { "saveall", 0, 0, false, true, true, NULL
},
3846 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
},
3847 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
},
3848 { NULL
, 0, 0, false, false, false, NULL
}
3851 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
3852 tell the assembler to generate pointers to function descriptors in
3856 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
3858 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
3860 if (GET_CODE (value
) == SYMBOL_REF
3861 && SYMBOL_REF_FUNCTION_P (value
))
3863 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
3864 output_addr_const (asm_out_file
, value
);
3865 fputs (")\n", asm_out_file
);
3870 /* We've set the unaligned SI op to NULL, so we always have to
3871 handle the unaligned case here. */
3872 assemble_integer_with_op ("\t.4byte\t", value
);
3876 return default_assemble_integer (value
, size
, aligned_p
);
3879 /* Output the assembler code for a thunk function. THUNK_DECL is the
3880 declaration for the thunk function itself, FUNCTION is the decl for
3881 the target function. DELTA is an immediate constant offset to be
3882 added to THIS. If VCALL_OFFSET is nonzero, the word at
3883 *(*this + vcall_offset) should be added to THIS. */
3886 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
3887 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
3888 HOST_WIDE_INT vcall_offset
, tree function
)
3891 /* The this parameter is passed as the first argument. */
3892 rtx
this = gen_rtx_REG (Pmode
, REG_R0
);
3894 /* Adjust the this parameter by a fixed constant. */
3898 if (delta
>= -64 && delta
<= 63)
3900 xops
[0] = GEN_INT (delta
);
3901 output_asm_insn ("%1 += %0;", xops
);
3903 else if (delta
>= -128 && delta
< -64)
3905 xops
[0] = GEN_INT (delta
+ 64);
3906 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
3908 else if (delta
> 63 && delta
<= 126)
3910 xops
[0] = GEN_INT (delta
- 63);
3911 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
3915 xops
[0] = GEN_INT (delta
);
3916 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
3920 /* Adjust the this parameter by a value stored in the vtable. */
3923 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
3924 rtx tmp
= gen_rtx_REG (Pmode
, REG_R2
);
3928 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
3930 /* Adjust the this parameter. */
3931 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (p2tmp
, vcall_offset
));
3932 if (!memory_operand (xops
[0], Pmode
))
3934 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
3935 xops
[0] = GEN_INT (vcall_offset
);
3937 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
3938 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
3941 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
3944 xops
[0] = XEXP (DECL_RTL (function
), 0);
3945 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
3946 output_asm_insn ("jump.l\t%P0", xops
);
3949 /* Codes for all the Blackfin builtins. */
3954 BFIN_BUILTIN_COMPOSE_2X16
,
3955 BFIN_BUILTIN_EXTRACTLO
,
3956 BFIN_BUILTIN_EXTRACTHI
,
3958 BFIN_BUILTIN_SSADD_2X16
,
3959 BFIN_BUILTIN_SSSUB_2X16
,
3960 BFIN_BUILTIN_SSADDSUB_2X16
,
3961 BFIN_BUILTIN_SSSUBADD_2X16
,
3962 BFIN_BUILTIN_MULT_2X16
,
3963 BFIN_BUILTIN_MULTR_2X16
,
3964 BFIN_BUILTIN_NEG_2X16
,
3965 BFIN_BUILTIN_ABS_2X16
,
3966 BFIN_BUILTIN_MIN_2X16
,
3967 BFIN_BUILTIN_MAX_2X16
,
3969 BFIN_BUILTIN_SSADD_1X16
,
3970 BFIN_BUILTIN_SSSUB_1X16
,
3971 BFIN_BUILTIN_MULT_1X16
,
3972 BFIN_BUILTIN_MULTR_1X16
,
3973 BFIN_BUILTIN_NORM_1X16
,
3974 BFIN_BUILTIN_NEG_1X16
,
3975 BFIN_BUILTIN_ABS_1X16
,
3976 BFIN_BUILTIN_MIN_1X16
,
3977 BFIN_BUILTIN_MAX_1X16
,
3979 BFIN_BUILTIN_DIFFHL_2X16
,
3980 BFIN_BUILTIN_DIFFLH_2X16
,
3982 BFIN_BUILTIN_SSADD_1X32
,
3983 BFIN_BUILTIN_SSSUB_1X32
,
3984 BFIN_BUILTIN_NORM_1X32
,
3985 BFIN_BUILTIN_NEG_1X32
,
3986 BFIN_BUILTIN_MIN_1X32
,
3987 BFIN_BUILTIN_MAX_1X32
,
3988 BFIN_BUILTIN_MULT_1X32
,
3990 BFIN_BUILTIN_MULHISILL
,
3991 BFIN_BUILTIN_MULHISILH
,
3992 BFIN_BUILTIN_MULHISIHL
,
3993 BFIN_BUILTIN_MULHISIHH
,
3995 BFIN_BUILTIN_LSHIFT_1X16
,
3996 BFIN_BUILTIN_LSHIFT_2X16
,
3997 BFIN_BUILTIN_SSASHIFT_1X16
,
3998 BFIN_BUILTIN_SSASHIFT_2X16
,
4000 BFIN_BUILTIN_CPLX_MUL_16
,
4001 BFIN_BUILTIN_CPLX_MAC_16
,
4002 BFIN_BUILTIN_CPLX_MSU_16
,
4007 #define def_builtin(NAME, TYPE, CODE) \
4009 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4013 /* Set up all builtin functions for this target. */
4015 bfin_init_builtins (void)
4017 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
4018 tree void_ftype_void
4019 = build_function_type (void_type_node
, void_list_node
);
4020 tree short_ftype_short
4021 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
4023 tree short_ftype_int_int
4024 = build_function_type_list (short_integer_type_node
, integer_type_node
,
4025 integer_type_node
, NULL_TREE
);
4026 tree int_ftype_int_int
4027 = build_function_type_list (integer_type_node
, integer_type_node
,
4028 integer_type_node
, NULL_TREE
);
4030 = build_function_type_list (integer_type_node
, integer_type_node
,
4032 tree short_ftype_int
4033 = build_function_type_list (short_integer_type_node
, integer_type_node
,
4035 tree int_ftype_v2hi_v2hi
4036 = build_function_type_list (integer_type_node
, V2HI_type_node
,
4037 V2HI_type_node
, NULL_TREE
);
4038 tree v2hi_ftype_v2hi_v2hi
4039 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
4040 V2HI_type_node
, NULL_TREE
);
4041 tree v2hi_ftype_v2hi_v2hi_v2hi
4042 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
4043 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
4044 tree v2hi_ftype_int_int
4045 = build_function_type_list (V2HI_type_node
, integer_type_node
,
4046 integer_type_node
, NULL_TREE
);
4047 tree v2hi_ftype_v2hi_int
4048 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
4049 integer_type_node
, NULL_TREE
);
4050 tree int_ftype_short_short
4051 = build_function_type_list (integer_type_node
, short_integer_type_node
,
4052 short_integer_type_node
, NULL_TREE
);
4053 tree v2hi_ftype_v2hi
4054 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
4055 tree short_ftype_v2hi
4056 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
4059 /* Add the remaining MMX insns with somewhat more complicated types. */
4060 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
4061 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
4063 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
4064 BFIN_BUILTIN_COMPOSE_2X16
);
4065 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
4066 BFIN_BUILTIN_EXTRACTHI
);
4067 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
4068 BFIN_BUILTIN_EXTRACTLO
);
4070 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
4071 BFIN_BUILTIN_MIN_2X16
);
4072 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
4073 BFIN_BUILTIN_MAX_2X16
);
4075 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
4076 BFIN_BUILTIN_SSADD_2X16
);
4077 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
4078 BFIN_BUILTIN_SSSUB_2X16
);
4079 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
4080 BFIN_BUILTIN_SSADDSUB_2X16
);
4081 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
4082 BFIN_BUILTIN_SSSUBADD_2X16
);
4083 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
4084 BFIN_BUILTIN_MULT_2X16
);
4085 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
4086 BFIN_BUILTIN_MULTR_2X16
);
4087 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
4088 BFIN_BUILTIN_NEG_2X16
);
4089 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
4090 BFIN_BUILTIN_ABS_2X16
);
4092 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
4093 BFIN_BUILTIN_SSADD_1X16
);
4094 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
4095 BFIN_BUILTIN_SSSUB_1X16
);
4096 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
4097 BFIN_BUILTIN_MULT_1X16
);
4098 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
4099 BFIN_BUILTIN_MULTR_1X16
);
4100 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
4101 BFIN_BUILTIN_NEG_1X16
);
4102 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
4103 BFIN_BUILTIN_ABS_1X16
);
4104 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
4105 BFIN_BUILTIN_NORM_1X16
);
4107 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
4108 BFIN_BUILTIN_DIFFHL_2X16
);
4109 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
4110 BFIN_BUILTIN_DIFFLH_2X16
);
4112 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
4113 BFIN_BUILTIN_MULHISILL
);
4114 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
4115 BFIN_BUILTIN_MULHISIHL
);
4116 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
4117 BFIN_BUILTIN_MULHISILH
);
4118 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
4119 BFIN_BUILTIN_MULHISIHH
);
4121 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
4122 BFIN_BUILTIN_SSADD_1X32
);
4123 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
4124 BFIN_BUILTIN_SSSUB_1X32
);
4125 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
4126 BFIN_BUILTIN_NEG_1X32
);
4127 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
4128 BFIN_BUILTIN_NORM_1X32
);
4129 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
4130 BFIN_BUILTIN_MULT_1X32
);
4133 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
4134 BFIN_BUILTIN_SSASHIFT_1X16
);
4135 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
4136 BFIN_BUILTIN_SSASHIFT_2X16
);
4137 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
4138 BFIN_BUILTIN_LSHIFT_1X16
);
4139 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
4140 BFIN_BUILTIN_LSHIFT_2X16
);
4142 /* Complex numbers. */
4143 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
4144 BFIN_BUILTIN_CPLX_MUL_16
);
4145 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
4146 BFIN_BUILTIN_CPLX_MAC_16
);
4147 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
4148 BFIN_BUILTIN_CPLX_MSU_16
);
4152 struct builtin_description
4154 const enum insn_code icode
;
4155 const char *const name
;
4156 const enum bfin_builtins code
;
4160 static const struct builtin_description bdesc_2arg
[] =
4162 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
4164 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
4165 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
4166 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
4167 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
4169 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
4170 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
4171 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
4172 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
4174 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
4175 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
4176 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
4177 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
4179 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
4180 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
4181 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
4182 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
4183 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
4184 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
4186 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
4187 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
4188 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
4189 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
4190 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
}
4193 static const struct builtin_description bdesc_1arg
[] =
4195 { CODE_FOR_signbitshi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
4196 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
4197 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
4199 { CODE_FOR_signbitssi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
4200 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
4202 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
4203 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
4204 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
4205 { CODE_FOR_absv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
4208 /* Errors in the source file can cause expand_expr to return const0_rtx
4209 where we expect a vector. To avoid crashing, use one of the vector
4210 clear instructions. */
4212 safe_vector_operand (rtx x
, enum machine_mode mode
)
4214 if (x
!= const0_rtx
)
4216 x
= gen_reg_rtx (SImode
);
4218 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
4219 return gen_lowpart (mode
, x
);
4222 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
4223 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
4226 bfin_expand_binop_builtin (enum insn_code icode
, tree arglist
, rtx target
,
4230 tree arg0
= TREE_VALUE (arglist
);
4231 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4232 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4233 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4234 enum machine_mode op0mode
= GET_MODE (op0
);
4235 enum machine_mode op1mode
= GET_MODE (op1
);
4236 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4237 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4238 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4240 if (VECTOR_MODE_P (mode0
))
4241 op0
= safe_vector_operand (op0
, mode0
);
4242 if (VECTOR_MODE_P (mode1
))
4243 op1
= safe_vector_operand (op1
, mode1
);
4246 || GET_MODE (target
) != tmode
4247 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4248 target
= gen_reg_rtx (tmode
);
4250 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
4253 op0
= gen_lowpart (HImode
, op0
);
4255 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
4258 op1
= gen_lowpart (HImode
, op1
);
4260 /* In case the insn wants input operands in modes different from
4261 the result, abort. */
4262 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
4263 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
4265 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4266 op0
= copy_to_mode_reg (mode0
, op0
);
4267 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4268 op1
= copy_to_mode_reg (mode1
, op1
);
4271 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4273 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
4281 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
4284 bfin_expand_unop_builtin (enum insn_code icode
, tree arglist
,
4288 tree arg0
= TREE_VALUE (arglist
);
4289 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4290 enum machine_mode op0mode
= GET_MODE (op0
);
4291 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4292 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4295 || GET_MODE (target
) != tmode
4296 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4297 target
= gen_reg_rtx (tmode
);
4299 if (VECTOR_MODE_P (mode0
))
4300 op0
= safe_vector_operand (op0
, mode0
);
4302 if (op0mode
== SImode
&& mode0
== HImode
)
4305 op0
= gen_lowpart (HImode
, op0
);
4307 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
4309 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4310 op0
= copy_to_mode_reg (mode0
, op0
);
4312 pat
= GEN_FCN (icode
) (target
, op0
);
4319 /* Expand an expression EXP that calls a built-in function,
4320 with result going to TARGET if that's convenient
4321 (and in mode MODE if that's convenient).
4322 SUBTARGET may be used as the target for computing one of EXP's operands.
4323 IGNORE is nonzero if the value is to be ignored. */
4326 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
4327 rtx subtarget ATTRIBUTE_UNUSED
,
4328 enum machine_mode mode ATTRIBUTE_UNUSED
,
4329 int ignore ATTRIBUTE_UNUSED
)
4332 enum insn_code icode
;
4333 const struct builtin_description
*d
;
4334 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4335 tree arglist
= TREE_OPERAND (exp
, 1);
4336 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4337 tree arg0
, arg1
, arg2
;
4338 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
;
4339 enum machine_mode tmode
, mode0
;
4343 case BFIN_BUILTIN_CSYNC
:
4344 emit_insn (gen_csync ());
4346 case BFIN_BUILTIN_SSYNC
:
4347 emit_insn (gen_ssync ());
4350 case BFIN_BUILTIN_DIFFHL_2X16
:
4351 case BFIN_BUILTIN_DIFFLH_2X16
:
4352 arg0
= TREE_VALUE (arglist
);
4353 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4354 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
4355 ? CODE_FOR_subhilov2hi3
: CODE_FOR_sublohiv2hi3
);
4356 tmode
= insn_data
[icode
].operand
[0].mode
;
4357 mode0
= insn_data
[icode
].operand
[1].mode
;
4360 || GET_MODE (target
) != tmode
4361 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4362 target
= gen_reg_rtx (tmode
);
4364 if (VECTOR_MODE_P (mode0
))
4365 op0
= safe_vector_operand (op0
, mode0
);
4367 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4368 op0
= copy_to_mode_reg (mode0
, op0
);
4370 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
4376 case BFIN_BUILTIN_CPLX_MUL_16
:
4377 arg0
= TREE_VALUE (arglist
);
4378 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4379 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4380 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4381 accvec
= gen_reg_rtx (V2PDImode
);
4384 || GET_MODE (target
) != V2HImode
4385 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
4386 target
= gen_reg_rtx (tmode
);
4387 if (! register_operand (op0
, GET_MODE (op0
)))
4388 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
4389 if (! register_operand (op1
, GET_MODE (op1
)))
4390 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
4392 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
4393 const0_rtx
, const0_rtx
,
4394 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
4395 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
4396 const1_rtx
, const1_rtx
,
4397 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
4398 GEN_INT (MACFLAG_NONE
), accvec
));
4402 case BFIN_BUILTIN_CPLX_MAC_16
:
4403 case BFIN_BUILTIN_CPLX_MSU_16
:
4404 arg0
= TREE_VALUE (arglist
);
4405 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4406 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4407 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4408 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4409 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4410 accvec
= gen_reg_rtx (V2PDImode
);
4413 || GET_MODE (target
) != V2HImode
4414 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
4415 target
= gen_reg_rtx (tmode
);
4416 if (! register_operand (op0
, GET_MODE (op0
)))
4417 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
4418 if (! register_operand (op1
, GET_MODE (op1
)))
4419 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
4421 tmp1
= gen_reg_rtx (SImode
);
4422 tmp2
= gen_reg_rtx (SImode
);
4423 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op2
), GEN_INT (16)));
4424 emit_move_insn (tmp2
, gen_lowpart (SImode
, op2
));
4425 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
4426 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
4427 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op0
, op1
, const0_rtx
,
4428 const0_rtx
, const0_rtx
,
4429 const1_rtx
, accvec
, const0_rtx
,
4431 GEN_INT (MACFLAG_W32
)));
4432 tmp1
= (fcode
== BFIN_BUILTIN_CPLX_MAC_16
? const1_rtx
: const0_rtx
);
4433 tmp2
= (fcode
== BFIN_BUILTIN_CPLX_MAC_16
? const0_rtx
: const1_rtx
);
4434 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
4435 const1_rtx
, const1_rtx
,
4436 const0_rtx
, accvec
, tmp1
, tmp2
,
4437 GEN_INT (MACFLAG_NONE
), accvec
));
4445 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
4446 if (d
->code
== fcode
)
4447 return bfin_expand_binop_builtin (d
->icode
, arglist
, target
,
4450 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
4451 if (d
->code
== fcode
)
4452 return bfin_expand_unop_builtin (d
->icode
, arglist
, target
);
4457 #undef TARGET_INIT_BUILTINS
4458 #define TARGET_INIT_BUILTINS bfin_init_builtins
4460 #undef TARGET_EXPAND_BUILTIN
4461 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
4463 #undef TARGET_ASM_GLOBALIZE_LABEL
4464 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
4466 #undef TARGET_ASM_FILE_START
4467 #define TARGET_ASM_FILE_START output_file_start
4469 #undef TARGET_ATTRIBUTE_TABLE
4470 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
4472 #undef TARGET_COMP_TYPE_ATTRIBUTES
4473 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
4475 #undef TARGET_RTX_COSTS
4476 #define TARGET_RTX_COSTS bfin_rtx_costs
4478 #undef TARGET_ADDRESS_COST
4479 #define TARGET_ADDRESS_COST bfin_address_cost
4481 #undef TARGET_ASM_INTERNAL_LABEL
4482 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
4484 #undef TARGET_ASM_INTEGER
4485 #define TARGET_ASM_INTEGER bfin_assemble_integer
4487 #undef TARGET_MACHINE_DEPENDENT_REORG
4488 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
4490 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4491 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
4493 #undef TARGET_ASM_OUTPUT_MI_THUNK
4494 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
4495 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4496 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
4498 #undef TARGET_SCHED_ADJUST_COST
4499 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
4501 #undef TARGET_PROMOTE_PROTOTYPES
4502 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
4503 #undef TARGET_PROMOTE_FUNCTION_ARGS
4504 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
4505 #undef TARGET_PROMOTE_FUNCTION_RETURN
4506 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
4508 #undef TARGET_ARG_PARTIAL_BYTES
4509 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
4511 #undef TARGET_PASS_BY_REFERENCE
4512 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
4514 #undef TARGET_SETUP_INCOMING_VARARGS
4515 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
4517 #undef TARGET_STRUCT_VALUE_RTX
4518 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
4520 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4521 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
4523 #undef TARGET_HANDLE_OPTION
4524 #define TARGET_HANDLE_OPTION bfin_handle_option
4526 #undef TARGET_DEFAULT_TARGET_FLAGS
4527 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
4529 #undef TARGET_SECONDARY_RELOAD
4530 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
4532 #undef TARGET_DELEGITIMIZE_ADDRESS
4533 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
4535 #undef TARGET_CANNOT_FORCE_CONST_MEM
4536 #define TARGET_CANNOT_FORCE_CONST_MEM bfin_cannot_force_const_mem
4538 struct gcc_target targetm
= TARGET_INITIALIZER
;