1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright (C) 2001 Free Software Foundation, Inc.
3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
49 #include "target-def.h"
50 #include "langhooks.h"
52 /* Enumeration for all of the relational tests, so that we can build
53 arrays indexed by the test type, and not worry about the order
70 /* Cached operands, and operator to compare for use in set/branch on
74 /* what type of branch to use */
75 enum cmp_type branch_type
;
77 /* Array giving truth value on whether or not a given hard register
78 can support a given mode. */
79 char xtensa_hard_regno_mode_ok
[(int) MAX_MACHINE_MODE
][FIRST_PSEUDO_REGISTER
];
81 /* Current frame size calculated by compute_frame_size. */
82 unsigned xtensa_current_frame_size
;
84 /* Tables of ld/st opcode names for block moves */
85 const char *xtensa_ld_opcodes
[(int) MAX_MACHINE_MODE
];
86 const char *xtensa_st_opcodes
[(int) MAX_MACHINE_MODE
];
87 #define LARGEST_MOVE_RATIO 15
89 /* Define the structure for the machine field in struct function. */
90 struct machine_function
GTY(())
92 int accesses_prev_frame
;
95 /* Vector, indexed by hard register number, which contains 1 for a
96 register that is allowable in a candidate for leaf function
99 const char xtensa_leaf_regs
[FIRST_PSEUDO_REGISTER
] =
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
103 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
107 /* Map hard register number to register class */
108 const enum reg_class xtensa_regno_to_class
[FIRST_PSEUDO_REGISTER
] =
110 GR_REGS
, SP_REG
, GR_REGS
, GR_REGS
,
111 GR_REGS
, GR_REGS
, GR_REGS
, GR_REGS
,
112 GR_REGS
, GR_REGS
, GR_REGS
, GR_REGS
,
113 GR_REGS
, GR_REGS
, GR_REGS
, GR_REGS
,
114 AR_REGS
, AR_REGS
, BR_REGS
,
115 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
116 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
117 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
118 FP_REGS
, FP_REGS
, FP_REGS
, FP_REGS
,
122 /* Map register constraint character to register class. */
123 enum reg_class xtensa_char_to_class
[256] =
125 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
126 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
127 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
128 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
129 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
130 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
131 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
132 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
133 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
134 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
135 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
136 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
137 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
138 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
139 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
140 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
141 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
142 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
143 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
144 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
145 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
146 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
147 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
148 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
149 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
150 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
151 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
152 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
153 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
154 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
155 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
156 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
157 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
158 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
159 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
160 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
161 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
162 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
163 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
164 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
165 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
166 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
167 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
168 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
169 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
170 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
171 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
172 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
173 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
174 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
175 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
176 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
177 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
178 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
179 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
180 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
181 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
182 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
183 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
184 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
185 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
186 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
187 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
188 NO_REGS
, NO_REGS
, NO_REGS
, NO_REGS
,
191 static int b4const_or_zero
PARAMS ((int));
192 static enum internal_test map_test_to_internal_test
PARAMS ((enum rtx_code
));
193 static rtx gen_int_relational
PARAMS ((enum rtx_code
, rtx
, rtx
, int *));
194 static rtx gen_float_relational
PARAMS ((enum rtx_code
, rtx
, rtx
));
195 static rtx gen_conditional_move
PARAMS ((rtx
));
196 static rtx fixup_subreg_mem
PARAMS ((rtx x
));
197 static enum machine_mode xtensa_find_mode_for_size
PARAMS ((unsigned));
198 static struct machine_function
* xtensa_init_machine_status
PARAMS ((void));
199 static void printx
PARAMS ((FILE *, signed int));
200 static void xtensa_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
201 unsigned HOST_WIDE_INT
));
202 static void xtensa_encode_section_info
PARAMS ((tree
, int));
204 static rtx frame_size_const
;
205 static int current_function_arg_words
;
206 static const int reg_nonleaf_alloc_order
[FIRST_PSEUDO_REGISTER
] =
209 /* This macro generates the assembly code for function entry.
210 FILE is a stdio stream to output the code to.
211 SIZE is an int: how many units of temporary storage to allocate.
212 Refer to the array 'regs_ever_live' to determine which registers
213 to save; 'regs_ever_live[I]' is nonzero if register number I
214 is ever used in the function. This macro is responsible for
215 knowing which registers should not be saved even if used. */
217 #undef TARGET_ASM_FUNCTION_PROLOGUE
218 #define TARGET_ASM_FUNCTION_PROLOGUE xtensa_function_prologue
220 /* This macro generates the assembly code for function exit,
221 on machines that need it. If FUNCTION_EPILOGUE is not defined
222 then individual return instructions are generated for each
223 return statement. Args are same as for FUNCTION_PROLOGUE. */
225 #undef TARGET_ASM_FUNCTION_EPILOGUE
226 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
228 /* These hooks specify assembly directives for creating certain kinds
229 of integer object. */
231 #undef TARGET_ASM_ALIGNED_SI_OP
232 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
234 #undef TARGET_ASM_SELECT_RTX_SECTION
235 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
236 #undef TARGET_ENCODE_SECTION_INFO
237 #define TARGET_ENCODE_SECTION_INFO xtensa_encode_section_info
239 struct gcc_target targetm
= TARGET_INITIALIZER
;
243 * Functions to test Xtensa immediate operand validity.
277 return (v
& 255) == 0 && (v
>= -32768 && v
<= 32512);
284 return (v
== -1 || (v
>= 1 && v
<= 15));
291 return v
>= -32 && v
<= 95;
325 return v
>= -128 && v
<= 127;
332 return (v
>= 7 && v
<= 22);
339 return (v
& 3) == 0 && (v
>= 0 && v
<= 60);
346 return v
>= -2048 && v
<= 2047;
353 return v
>= 0 && v
<= 255;
360 return (v
& 1) == 0 && (v
>= 0 && v
<= 510);
367 return (v
& 3) == 0 && (v
>= 0 && v
<= 1020);
371 /* This is just like the standard true_regnum() function except that it
372 works even when reg_renumber is not initialized. */
378 if (GET_CODE (x
) == REG
)
381 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
382 && reg_renumber
[REGNO (x
)] >= 0)
383 return reg_renumber
[REGNO (x
)];
386 if (GET_CODE (x
) == SUBREG
)
388 int base
= xt_true_regnum (SUBREG_REG (x
));
389 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
390 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
391 GET_MODE (SUBREG_REG (x
)),
392 SUBREG_BYTE (x
), GET_MODE (x
));
399 add_operand (op
, mode
)
401 enum machine_mode mode
;
403 if (GET_CODE (op
) == CONST_INT
)
404 return (xtensa_simm8 (INTVAL (op
)) ||
405 xtensa_simm8x256 (INTVAL (op
)));
407 return register_operand (op
, mode
);
412 arith_operand (op
, mode
)
414 enum machine_mode mode
;
416 if (GET_CODE (op
) == CONST_INT
)
417 return xtensa_simm8 (INTVAL (op
));
419 return register_operand (op
, mode
);
424 nonimmed_operand (op
, mode
)
426 enum machine_mode mode
;
428 /* We cannot use the standard nonimmediate_operand() predicate because
429 it includes constant pool memory operands. */
431 if (memory_operand (op
, mode
))
432 return !constantpool_address_p (XEXP (op
, 0));
434 return register_operand (op
, mode
);
439 mem_operand (op
, mode
)
441 enum machine_mode mode
;
443 /* We cannot use the standard memory_operand() predicate because
444 it includes constant pool memory operands. */
446 if (memory_operand (op
, mode
))
447 return !constantpool_address_p (XEXP (op
, 0));
454 xtensa_valid_move (mode
, operands
)
455 enum machine_mode mode
;
458 /* Either the destination or source must be a register, and the
459 MAC16 accumulator doesn't count. */
461 if (register_operand (operands
[0], mode
))
463 int dst_regnum
= xt_true_regnum (operands
[0]);
465 /* The stack pointer can only be assigned with a MOVSP opcode. */
466 if (dst_regnum
== STACK_POINTER_REGNUM
)
467 return (mode
== SImode
468 && register_operand (operands
[1], mode
)
469 && !ACC_REG_P (xt_true_regnum (operands
[1])));
471 if (!ACC_REG_P (dst_regnum
))
474 if (register_operand (operands
[1], mode
))
476 int src_regnum
= xt_true_regnum (operands
[1]);
477 if (!ACC_REG_P (src_regnum
))
485 mask_operand (op
, mode
)
487 enum machine_mode mode
;
489 if (GET_CODE (op
) == CONST_INT
)
490 return xtensa_mask_immediate (INTVAL (op
));
492 return register_operand (op
, mode
);
497 extui_fldsz_operand (op
, mode
)
499 enum machine_mode mode ATTRIBUTE_UNUSED
;
501 return ((GET_CODE (op
) == CONST_INT
)
502 && xtensa_mask_immediate ((1 << INTVAL (op
)) - 1));
507 sext_operand (op
, mode
)
509 enum machine_mode mode
;
512 return nonimmed_operand (op
, mode
);
513 return mem_operand (op
, mode
);
518 sext_fldsz_operand (op
, mode
)
520 enum machine_mode mode ATTRIBUTE_UNUSED
;
522 return ((GET_CODE (op
) == CONST_INT
) && xtensa_tp7 (INTVAL (op
) - 1));
527 lsbitnum_operand (op
, mode
)
529 enum machine_mode mode ATTRIBUTE_UNUSED
;
531 if (GET_CODE (op
) == CONST_INT
)
533 return (BITS_BIG_ENDIAN
534 ? (INTVAL (op
) == BITS_PER_WORD
-1)
535 : (INTVAL (op
) == 0));
547 return xtensa_b4const (v
);
552 branch_operand (op
, mode
)
554 enum machine_mode mode
;
556 if (GET_CODE (op
) == CONST_INT
)
557 return b4const_or_zero (INTVAL (op
));
559 return register_operand (op
, mode
);
564 ubranch_operand (op
, mode
)
566 enum machine_mode mode
;
568 if (GET_CODE (op
) == CONST_INT
)
569 return xtensa_b4constu (INTVAL (op
));
571 return register_operand (op
, mode
);
576 call_insn_operand (op
, mode
)
578 enum machine_mode mode ATTRIBUTE_UNUSED
;
580 if ((GET_CODE (op
) == REG
)
581 && (op
!= arg_pointer_rtx
)
582 && ((REGNO (op
) < FRAME_POINTER_REGNUM
)
583 || (REGNO (op
) > LAST_VIRTUAL_REGISTER
)))
586 if (CONSTANT_ADDRESS_P (op
))
588 /* Direct calls only allowed to static functions with PIC. */
589 return (!flag_pic
|| (GET_CODE (op
) == SYMBOL_REF
590 && SYMBOL_REF_FLAG (op
)));
598 move_operand (op
, mode
)
600 enum machine_mode mode
;
602 if (register_operand (op
, mode
))
605 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
607 if (GET_CODE (op
) == CONSTANT_P_RTX
)
610 if (GET_CODE (op
) == CONST_INT
)
611 return xtensa_simm12b (INTVAL (op
));
613 if (GET_CODE (op
) == MEM
)
614 return memory_address_p (mode
, XEXP (op
, 0));
621 smalloffset_mem_p (op
)
624 if (GET_CODE (op
) == MEM
)
626 rtx addr
= XEXP (op
, 0);
627 if (GET_CODE (addr
) == REG
)
628 return REG_OK_FOR_BASE_P (addr
);
629 if (GET_CODE (addr
) == PLUS
)
631 rtx offset
= XEXP (addr
, 0);
632 if (GET_CODE (offset
) != CONST_INT
)
633 offset
= XEXP (addr
, 1);
634 if (GET_CODE (offset
) != CONST_INT
)
636 return xtensa_lsi4x4 (INTVAL (offset
));
644 smalloffset_double_mem_p (op
)
647 if (!smalloffset_mem_p (op
))
649 return smalloffset_mem_p (adjust_address (op
, GET_MODE (op
), 4));
654 constantpool_address_p (addr
)
659 if (GET_CODE (addr
) == CONST
)
663 /* only handle (PLUS (SYM, OFFSET)) form */
664 addr
= XEXP (addr
, 0);
665 if (GET_CODE (addr
) != PLUS
)
668 /* make sure the address is word aligned */
669 offset
= XEXP (addr
, 1);
670 if ((GET_CODE (offset
) != CONST_INT
)
671 || ((INTVAL (offset
) & 3) != 0))
674 sym
= XEXP (addr
, 0);
677 if ((GET_CODE (sym
) == SYMBOL_REF
)
678 && CONSTANT_POOL_ADDRESS_P (sym
))
685 constantpool_mem_p (op
)
688 if (GET_CODE (op
) == MEM
)
689 return constantpool_address_p (XEXP (op
, 0));
695 non_const_move_operand (op
, mode
)
697 enum machine_mode mode
;
699 if (register_operand (op
, mode
))
701 if (GET_CODE (op
) == SUBREG
)
702 op
= SUBREG_REG (op
);
703 if (GET_CODE (op
) == MEM
)
704 return memory_address_p (mode
, XEXP (op
, 0));
709 /* Accept the floating point constant 1 in the appropriate mode. */
712 const_float_1_operand (op
, mode
)
714 enum machine_mode mode
;
717 static REAL_VALUE_TYPE onedf
;
718 static REAL_VALUE_TYPE onesf
;
719 static int one_initialized
;
721 if ((GET_CODE (op
) != CONST_DOUBLE
)
722 || (mode
!= GET_MODE (op
))
723 || (mode
!= DFmode
&& mode
!= SFmode
))
726 REAL_VALUE_FROM_CONST_DOUBLE (d
, op
);
728 if (! one_initialized
)
730 onedf
= REAL_VALUE_ATOF ("1.0", DFmode
);
731 onesf
= REAL_VALUE_ATOF ("1.0", SFmode
);
732 one_initialized
= TRUE
;
736 return REAL_VALUES_EQUAL (d
, onedf
);
738 return REAL_VALUES_EQUAL (d
, onesf
);
743 fpmem_offset_operand (op
, mode
)
745 enum machine_mode mode ATTRIBUTE_UNUSED
;
747 if (GET_CODE (op
) == CONST_INT
)
748 return xtensa_mem_offset (INTVAL (op
), SFmode
);
754 xtensa_extend_reg (dst
, src
)
758 rtx temp
= gen_reg_rtx (SImode
);
759 rtx shift
= GEN_INT (BITS_PER_WORD
- GET_MODE_BITSIZE (GET_MODE (src
)));
761 /* generate paradoxical subregs as needed so that the modes match */
762 src
= simplify_gen_subreg (SImode
, src
, GET_MODE (src
), 0);
763 dst
= simplify_gen_subreg (SImode
, dst
, GET_MODE (dst
), 0);
765 emit_insn (gen_ashlsi3 (temp
, src
, shift
));
766 emit_insn (gen_ashrsi3 (dst
, temp
, shift
));
771 xtensa_load_constant (dst
, src
)
775 enum machine_mode mode
= GET_MODE (dst
);
776 src
= force_const_mem (SImode
, src
);
778 /* PC-relative loads are always SImode so we have to add a SUBREG if that
779 is not the desired mode */
783 if (register_operand (dst
, mode
))
784 dst
= simplify_gen_subreg (SImode
, dst
, mode
, 0);
787 src
= force_reg (SImode
, src
);
788 src
= gen_lowpart_SUBREG (mode
, src
);
792 emit_move_insn (dst
, src
);
797 branch_operator (x
, mode
)
799 enum machine_mode mode
;
801 if (GET_MODE (x
) != mode
)
804 switch (GET_CODE (x
))
819 ubranch_operator (x
, mode
)
821 enum machine_mode mode
;
823 if (GET_MODE (x
) != mode
)
826 switch (GET_CODE (x
))
839 boolean_operator (x
, mode
)
841 enum machine_mode mode
;
843 if (GET_MODE (x
) != mode
)
846 switch (GET_CODE (x
))
859 xtensa_mask_immediate (v
)
862 #define MAX_MASK_SIZE 16
865 for (mask_size
= 1; mask_size
<= MAX_MASK_SIZE
; mask_size
++)
879 xtensa_mem_offset (v
, mode
)
881 enum machine_mode mode
;
886 /* Handle the worst case for block moves. See xtensa_expand_block_move
887 where we emit an optimized block move operation if the block can be
888 moved in < "move_ratio" pieces. The worst case is when the block is
889 aligned but has a size of (3 mod 4) (does this happen?) so that the
890 last piece requires a byte load/store. */
891 return (xtensa_uimm8 (v
) &&
892 xtensa_uimm8 (v
+ MOVE_MAX
* LARGEST_MOVE_RATIO
));
895 return xtensa_uimm8 (v
);
898 return xtensa_uimm8x2 (v
);
901 return (xtensa_uimm8x4 (v
) && xtensa_uimm8x4 (v
+ 4));
907 return xtensa_uimm8x4 (v
);
911 /* Make normal rtx_code into something we can index from an array */
913 static enum internal_test
914 map_test_to_internal_test (test_code
)
915 enum rtx_code test_code
;
917 enum internal_test test
= ITEST_MAX
;
922 case EQ
: test
= ITEST_EQ
; break;
923 case NE
: test
= ITEST_NE
; break;
924 case GT
: test
= ITEST_GT
; break;
925 case GE
: test
= ITEST_GE
; break;
926 case LT
: test
= ITEST_LT
; break;
927 case LE
: test
= ITEST_LE
; break;
928 case GTU
: test
= ITEST_GTU
; break;
929 case GEU
: test
= ITEST_GEU
; break;
930 case LTU
: test
= ITEST_LTU
; break;
931 case LEU
: test
= ITEST_LEU
; break;
938 /* Generate the code to compare two integer values. The return value is
939 the comparison expression. */
942 gen_int_relational (test_code
, cmp0
, cmp1
, p_invert
)
943 enum rtx_code test_code
; /* relational test (EQ, etc) */
944 rtx cmp0
; /* first operand to compare */
945 rtx cmp1
; /* second operand to compare */
946 int *p_invert
; /* whether branch needs to reverse its test */
949 enum rtx_code test_code
; /* test code to use in insn */
950 int (*const_range_p
) PARAMS ((int)); /* predicate function to check range */
951 int const_add
; /* constant to add (convert LE -> LT) */
952 int reverse_regs
; /* reverse registers in test */
953 int invert_const
; /* != 0 if invert value if cmp1 is constant */
954 int invert_reg
; /* != 0 if invert value if cmp1 is register */
955 int unsignedp
; /* != 0 for unsigned comparisons. */
958 static struct cmp_info info
[ (int)ITEST_MAX
] = {
960 { EQ
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* EQ */
961 { NE
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* NE */
963 { LT
, b4const_or_zero
, 1, 1, 1, 0, 0 }, /* GT */
964 { GE
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* GE */
965 { LT
, b4const_or_zero
, 0, 0, 0, 0, 0 }, /* LT */
966 { GE
, b4const_or_zero
, 1, 1, 1, 0, 0 }, /* LE */
968 { LTU
, xtensa_b4constu
, 1, 1, 1, 0, 1 }, /* GTU */
969 { GEU
, xtensa_b4constu
, 0, 0, 0, 0, 1 }, /* GEU */
970 { LTU
, xtensa_b4constu
, 0, 0, 0, 0, 1 }, /* LTU */
971 { GEU
, xtensa_b4constu
, 1, 1, 1, 0, 1 }, /* LEU */
974 enum internal_test test
;
975 enum machine_mode mode
;
976 struct cmp_info
*p_info
;
978 test
= map_test_to_internal_test (test_code
);
979 if (test
== ITEST_MAX
)
982 p_info
= &info
[ (int)test
];
984 mode
= GET_MODE (cmp0
);
985 if (mode
== VOIDmode
)
986 mode
= GET_MODE (cmp1
);
988 /* Make sure we can handle any constants given to us. */
989 if (GET_CODE (cmp1
) == CONST_INT
)
991 HOST_WIDE_INT value
= INTVAL (cmp1
);
992 unsigned HOST_WIDE_INT uvalue
= (unsigned HOST_WIDE_INT
)value
;
994 /* if the immediate overflows or does not fit in the immediate field,
995 spill it to a register */
997 if ((p_info
->unsignedp
?
998 (uvalue
+ p_info
->const_add
> uvalue
) :
999 (value
+ p_info
->const_add
> value
)) != (p_info
->const_add
> 0))
1001 cmp1
= force_reg (mode
, cmp1
);
1003 else if (!(p_info
->const_range_p
) (value
+ p_info
->const_add
))
1005 cmp1
= force_reg (mode
, cmp1
);
1008 else if ((GET_CODE (cmp1
) != REG
) && (GET_CODE (cmp1
) != SUBREG
))
1010 cmp1
= force_reg (mode
, cmp1
);
1013 /* See if we need to invert the result. */
1014 *p_invert
= ((GET_CODE (cmp1
) == CONST_INT
)
1015 ? p_info
->invert_const
1016 : p_info
->invert_reg
);
1018 /* Comparison to constants, may involve adding 1 to change a LT into LE.
1019 Comparison between two registers, may involve switching operands. */
1020 if (GET_CODE (cmp1
) == CONST_INT
)
1022 if (p_info
->const_add
!= 0)
1023 cmp1
= GEN_INT (INTVAL (cmp1
) + p_info
->const_add
);
1026 else if (p_info
->reverse_regs
)
1033 return gen_rtx (p_info
->test_code
, VOIDmode
, cmp0
, cmp1
);
1037 /* Generate the code to compare two float values. The return value is
1038 the comparison expression. */
1041 gen_float_relational (test_code
, cmp0
, cmp1
)
1042 enum rtx_code test_code
; /* relational test (EQ, etc) */
1043 rtx cmp0
; /* first operand to compare */
1044 rtx cmp1
; /* second operand to compare */
1046 rtx (*gen_fn
) PARAMS ((rtx
, rtx
, rtx
));
1048 int reverse_regs
, invert
;
1052 case EQ
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_seq_sf
; break;
1053 case NE
: reverse_regs
= 0; invert
= 1; gen_fn
= gen_seq_sf
; break;
1054 case LE
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_sle_sf
; break;
1055 case GT
: reverse_regs
= 1; invert
= 0; gen_fn
= gen_slt_sf
; break;
1056 case LT
: reverse_regs
= 0; invert
= 0; gen_fn
= gen_slt_sf
; break;
1057 case GE
: reverse_regs
= 1; invert
= 0; gen_fn
= gen_sle_sf
; break;
1059 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1060 reverse_regs
= 0; invert
= 0; gen_fn
= 0; /* avoid compiler warnings */
1070 brtmp
= gen_rtx_REG (CCmode
, FPCC_REGNUM
);
1071 emit_insn (gen_fn (brtmp
, cmp0
, cmp1
));
1073 return gen_rtx (invert
? EQ
: NE
, VOIDmode
, brtmp
, const0_rtx
);
1078 xtensa_expand_conditional_branch (operands
, test_code
)
1080 enum rtx_code test_code
;
1082 enum cmp_type type
= branch_type
;
1083 rtx cmp0
= branch_cmp
[0];
1084 rtx cmp1
= branch_cmp
[1];
1093 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1097 cmp
= gen_int_relational (test_code
, cmp0
, cmp1
, &invert
);
1101 if (!TARGET_HARD_FLOAT
)
1102 fatal_insn ("bad test", gen_rtx (test_code
, VOIDmode
, cmp0
, cmp1
));
1104 cmp
= gen_float_relational (test_code
, cmp0
, cmp1
);
1108 /* Generate the branch. */
1110 label1
= gen_rtx_LABEL_REF (VOIDmode
, operands
[0]);
1119 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
1120 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp
,
1127 gen_conditional_move (cmp
)
1130 enum rtx_code code
= GET_CODE (cmp
);
1131 rtx op0
= branch_cmp
[0];
1132 rtx op1
= branch_cmp
[1];
1134 if (branch_type
== CMP_SI
)
1136 /* Jump optimization calls get_condition() which canonicalizes
1137 comparisons like (GE x <const>) to (GT x <const-1>).
1138 Transform those comparisons back to GE, since that is the
1139 comparison supported in Xtensa. We shouldn't have to
1140 transform <LE x const> comparisons, because neither
1141 xtensa_expand_conditional_branch() nor get_condition() will
1144 if ((code
== GT
) && (op1
== constm1_rtx
))
1149 cmp
= gen_rtx (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
1151 if (boolean_operator (cmp
, VOIDmode
))
1153 /* swap the operands to make const0 second */
1154 if (op0
== const0_rtx
)
1160 /* if not comparing against zero, emit a comparison (subtract) */
1161 if (op1
!= const0_rtx
)
1163 op0
= expand_binop (SImode
, sub_optab
, op0
, op1
,
1164 0, 0, OPTAB_LIB_WIDEN
);
1168 else if (branch_operator (cmp
, VOIDmode
))
1170 /* swap the operands to make const0 second */
1171 if (op0
== const0_rtx
)
1178 case LT
: code
= GE
; break;
1179 case GE
: code
= LT
; break;
1184 if (op1
!= const0_rtx
)
1190 return gen_rtx (code
, VOIDmode
, op0
, op1
);
1193 if (TARGET_HARD_FLOAT
&& (branch_type
== CMP_SF
))
1194 return gen_float_relational (code
, op0
, op1
);
1201 xtensa_expand_conditional_move (operands
, isflt
)
1206 rtx (*gen_fn
) PARAMS ((rtx
, rtx
, rtx
, rtx
, rtx
));
1208 if (!(cmp
= gen_conditional_move (operands
[1])))
1212 gen_fn
= (branch_type
== CMP_SI
1213 ? gen_movsfcc_internal0
1214 : gen_movsfcc_internal1
);
1216 gen_fn
= (branch_type
== CMP_SI
1217 ? gen_movsicc_internal0
1218 : gen_movsicc_internal1
);
1220 emit_insn (gen_fn (operands
[0], XEXP (cmp
, 0),
1221 operands
[2], operands
[3], cmp
));
1227 xtensa_expand_scc (operands
)
1230 rtx dest
= operands
[0];
1231 rtx cmp
= operands
[1];
1232 rtx one_tmp
, zero_tmp
;
1233 rtx (*gen_fn
) PARAMS ((rtx
, rtx
, rtx
, rtx
, rtx
));
1235 if (!(cmp
= gen_conditional_move (cmp
)))
1238 one_tmp
= gen_reg_rtx (SImode
);
1239 zero_tmp
= gen_reg_rtx (SImode
);
1240 emit_insn (gen_movsi (one_tmp
, const_true_rtx
));
1241 emit_insn (gen_movsi (zero_tmp
, const0_rtx
));
1243 gen_fn
= (branch_type
== CMP_SI
1244 ? gen_movsicc_internal0
1245 : gen_movsicc_internal1
);
1246 emit_insn (gen_fn (dest
, XEXP (cmp
, 0), one_tmp
, zero_tmp
, cmp
));
1251 /* Emit insns to move operands[1] into operands[0].
1253 Return 1 if we have written out everything that needs to be done to
1254 do the move. Otherwise, return 0 and the caller will emit the move
1258 xtensa_emit_move_sequence (operands
, mode
)
1260 enum machine_mode mode
;
1262 if (CONSTANT_P (operands
[1])
1263 && GET_CODE (operands
[1]) != CONSTANT_P_RTX
1264 && (GET_CODE (operands
[1]) != CONST_INT
1265 || !xtensa_simm12b (INTVAL (operands
[1]))))
1267 xtensa_load_constant (operands
[0], operands
[1]);
1271 if (!(reload_in_progress
| reload_completed
))
1273 if (!xtensa_valid_move (mode
, operands
))
1274 operands
[1] = force_reg (mode
, operands
[1]);
1276 /* Check if this move is copying an incoming argument in a7. If
1277 so, emit the move, followed by the special "set_frame_ptr"
1278 unspec_volatile insn, at the very beginning of the function.
1279 This is necessary because the register allocator will ignore
1280 conflicts with a7 and may assign some other pseudo to a7. If
1281 that pseudo was assigned prior to this move, it would clobber
1282 the incoming argument in a7. By copying the argument out of
1283 a7 as the very first thing, and then immediately following
1284 that with an unspec_volatile to keep the scheduler away, we
1285 should avoid any problems. */
1287 if (a7_overlap_mentioned_p (operands
[1]))
1293 mov
= gen_movsi_internal (operands
[0], operands
[1]);
1296 mov
= gen_movhi_internal (operands
[0], operands
[1]);
1299 mov
= gen_movqi_internal (operands
[0], operands
[1]);
1305 /* Insert the instructions before any other argument copies.
1306 (The set_frame_ptr insn comes _after_ the move, so push it
1308 push_topmost_sequence ();
1309 emit_insn_after (gen_set_frame_ptr (), get_insns ());
1310 emit_insn_after (mov
, get_insns ());
1311 pop_topmost_sequence ();
1317 /* During reload we don't want to emit (subreg:X (mem:Y)) since that
1318 instruction won't be recognized after reload. So we remove the
1319 subreg and adjust mem accordingly. */
1320 if (reload_in_progress
)
1322 operands
[0] = fixup_subreg_mem (operands
[0]);
1323 operands
[1] = fixup_subreg_mem (operands
[1]);
1329 fixup_subreg_mem (x
)
1332 if (GET_CODE (x
) == SUBREG
1333 && GET_CODE (SUBREG_REG (x
)) == REG
1334 && REGNO (SUBREG_REG (x
)) >= FIRST_PSEUDO_REGISTER
)
1337 gen_rtx_SUBREG (GET_MODE (x
),
1338 reg_equiv_mem
[REGNO (SUBREG_REG (x
))],
1340 x
= alter_subreg (&temp
);
1346 /* Try to expand a block move operation to an RTL block move instruction.
1347 If not optimizing or if the block size is not a constant or if the
1348 block is small, the expansion fails and GCC falls back to calling
1351 operands[0] is the destination
1352 operands[1] is the source
1353 operands[2] is the length
1354 operands[3] is the alignment */
1357 xtensa_expand_block_move (operands
)
1360 rtx dest
= operands
[0];
1361 rtx src
= operands
[1];
1362 int bytes
= INTVAL (operands
[2]);
1363 int align
= XINT (operands
[3], 0);
1364 int num_pieces
, move_ratio
;
1366 /* If this is not a fixed size move, just call memcpy */
1367 if (!optimize
|| (GET_CODE (operands
[2]) != CONST_INT
))
1370 /* Anything to move? */
1374 if (align
> MOVE_MAX
)
1377 /* decide whether to expand inline based on the optimization level */
1380 move_ratio
= LARGEST_MOVE_RATIO
;
1381 num_pieces
= (bytes
/ align
) + (bytes
% align
); /* close enough anyway */
1382 if (num_pieces
>= move_ratio
)
1385 /* make sure the memory addresses are valid */
1386 operands
[0] = validize_mem (dest
);
1387 operands
[1] = validize_mem (src
);
1389 emit_insn (gen_movstrsi_internal (operands
[0], operands
[1],
1390 operands
[2], operands
[3]));
1395 /* Emit a sequence of instructions to implement a block move, trying
1396 to hide load delay slots as much as possible. Load N values into
1397 temporary registers, store those N values, and repeat until the
1398 complete block has been moved. N=delay_slots+1 */
1406 xtensa_emit_block_move (operands
, tmpregs
, delay_slots
)
1411 rtx dest
= operands
[0];
1412 rtx src
= operands
[1];
1413 int bytes
= INTVAL (operands
[2]);
1414 int align
= XINT (operands
[3], 0);
1415 rtx from_addr
= XEXP (src
, 0);
1416 rtx to_addr
= XEXP (dest
, 0);
1417 int from_struct
= MEM_IN_STRUCT_P (src
);
1418 int to_struct
= MEM_IN_STRUCT_P (dest
);
1420 int chunk_size
, item_size
;
1421 struct meminsnbuf
*ldinsns
, *stinsns
;
1422 const char *ldname
, *stname
;
1423 enum machine_mode mode
;
1425 if (align
> MOVE_MAX
)
1428 chunk_size
= delay_slots
+ 1;
1430 ldinsns
= (struct meminsnbuf
*)
1431 alloca (chunk_size
* sizeof (struct meminsnbuf
));
1432 stinsns
= (struct meminsnbuf
*)
1433 alloca (chunk_size
* sizeof (struct meminsnbuf
));
1435 mode
= xtensa_find_mode_for_size (item_size
);
1436 item_size
= GET_MODE_SIZE (mode
);
1437 ldname
= xtensa_ld_opcodes
[(int) mode
];
1438 stname
= xtensa_st_opcodes
[(int) mode
];
1444 for (n
= 0; n
< chunk_size
; n
++)
1454 if (bytes
< item_size
)
1456 /* find a smaller item_size which we can load & store */
1458 mode
= xtensa_find_mode_for_size (item_size
);
1459 item_size
= GET_MODE_SIZE (mode
);
1460 ldname
= xtensa_ld_opcodes
[(int) mode
];
1461 stname
= xtensa_st_opcodes
[(int) mode
];
1464 /* record the load instruction opcode and operands */
1465 addr
= plus_constant (from_addr
, offset
);
1466 mem
= gen_rtx_MEM (mode
, addr
);
1467 if (! memory_address_p (mode
, addr
))
1469 MEM_IN_STRUCT_P (mem
) = from_struct
;
1470 ldinsns
[n
].operands
[0] = tmpregs
[n
];
1471 ldinsns
[n
].operands
[1] = mem
;
1472 sprintf (ldinsns
[n
].template, "%s\t%%0, %%1", ldname
);
1474 /* record the store instruction opcode and operands */
1475 addr
= plus_constant (to_addr
, offset
);
1476 mem
= gen_rtx_MEM (mode
, addr
);
1477 if (! memory_address_p (mode
, addr
))
1479 MEM_IN_STRUCT_P (mem
) = to_struct
;
1480 stinsns
[n
].operands
[0] = tmpregs
[n
];
1481 stinsns
[n
].operands
[1] = mem
;
1482 sprintf (stinsns
[n
].template, "%s\t%%0, %%1", stname
);
1484 offset
+= item_size
;
1488 /* now output the loads followed by the stores */
1489 for (n
= 0; n
< chunk_size
; n
++)
1490 output_asm_insn (ldinsns
[n
].template, ldinsns
[n
].operands
);
1491 for (n
= 0; n
< chunk_size
; n
++)
1492 output_asm_insn (stinsns
[n
].template, stinsns
[n
].operands
);
1497 static enum machine_mode
1498 xtensa_find_mode_for_size (item_size
)
1501 enum machine_mode mode
, tmode
;
1507 /* find mode closest to but not bigger than item_size */
1508 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1509 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
1510 if (GET_MODE_SIZE (tmode
) <= item_size
)
1512 if (mode
== VOIDmode
)
1515 item_size
= GET_MODE_SIZE (mode
);
1517 if (xtensa_ld_opcodes
[(int) mode
]
1518 && xtensa_st_opcodes
[(int) mode
])
1521 /* cannot load & store this mode; try something smaller */
1530 xtensa_expand_nonlocal_goto (operands
)
1533 rtx goto_handler
= operands
[1];
1534 rtx containing_fp
= operands
[3];
1536 /* generate a call to "__xtensa_nonlocal_goto" (in libgcc); the code
1537 is too big to generate in-line */
1539 if (GET_CODE (containing_fp
) != REG
)
1540 containing_fp
= force_reg (Pmode
, containing_fp
);
1542 goto_handler
= replace_rtx (copy_rtx (goto_handler
),
1543 virtual_stack_vars_rtx
,
1546 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__xtensa_nonlocal_goto"),
1548 containing_fp
, Pmode
,
1549 goto_handler
, Pmode
);
1553 static struct machine_function
*
1554 xtensa_init_machine_status ()
1556 return ggc_alloc_cleared (sizeof (struct machine_function
));
1561 xtensa_setup_frame_addresses ()
1563 /* Set flag to cause FRAME_POINTER_REQUIRED to be set. */
1564 cfun
->machine
->accesses_prev_frame
= 1;
1567 (gen_rtx_SYMBOL_REF (Pmode
, "__xtensa_libgcc_window_spill"),
1572 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1573 a comment showing where the end of the loop is. However, if there is a
1574 label or a branch at the end of the loop then we need to place a nop
1575 there. If the loop ends with a label we need the nop so that branches
1576 targetting that label will target the nop (and thus remain in the loop),
1577 instead of targetting the instruction after the loop (and thus exiting
1578 the loop). If the loop ends with a branch, we need the nop in case the
1579 branch is targetting a location inside the loop. When the branch
1580 executes it will cause the loop count to be decremented even if it is
1581 taken (because it is the last instruction in the loop), so we need to
1582 nop after the branch to prevent the loop count from being decremented
1583 when the branch is taken. */
1586 xtensa_emit_loop_end (insn
, operands
)
1592 for (insn
= PREV_INSN (insn
); insn
&& !done
; insn
= PREV_INSN (insn
))
1594 switch (GET_CODE (insn
))
1601 output_asm_insn ("nop.n", operands
);
1607 rtx body
= PATTERN (insn
);
1609 if (GET_CODE (body
) == JUMP_INSN
)
1611 output_asm_insn ("nop.n", operands
);
1614 else if ((GET_CODE (body
) != USE
)
1615 && (GET_CODE (body
) != CLOBBER
))
1622 output_asm_insn ("# loop end for %0", operands
);
1627 xtensa_emit_call (callop
, operands
)
1631 static char result
[64];
1632 rtx tgt
= operands
[callop
];
1634 if (GET_CODE (tgt
) == CONST_INT
)
1635 sprintf (result
, "call8\t0x%x", INTVAL (tgt
));
1636 else if (register_operand (tgt
, VOIDmode
))
1637 sprintf (result
, "callx8\t%%%d", callop
);
1639 sprintf (result
, "call8\t%%%d", callop
);
1645 /* Return the stabs register number to use for 'regno'. */
1648 xtensa_dbx_register_number (regno
)
1653 if (GP_REG_P (regno
)) {
1654 regno
-= GP_REG_FIRST
;
1657 else if (BR_REG_P (regno
)) {
1658 regno
-= BR_REG_FIRST
;
1661 else if (FP_REG_P (regno
)) {
1662 regno
-= FP_REG_FIRST
;
1663 /* The current numbering convention is that TIE registers are
1664 numbered in libcc order beginning with 256. We can't guarantee
1665 that the FP registers will come first, so the following is just
1666 a guess. It seems like we should make a special case for FP
1667 registers and give them fixed numbers < 256. */
1670 else if (ACC_REG_P (regno
))
1676 /* When optimizing, we sometimes get asked about pseudo-registers
1677 that don't represent hard registers. Return 0 for these. */
1681 return first
+ regno
;
1685 /* Argument support functions. */
1687 /* Initialize CUMULATIVE_ARGS for a function. */
1690 init_cumulative_args (cum
, fntype
, libname
)
1691 CUMULATIVE_ARGS
*cum
; /* argument info to initialize */
1692 tree fntype ATTRIBUTE_UNUSED
; /* tree ptr for function decl */
1693 rtx libname ATTRIBUTE_UNUSED
; /* SYMBOL_REF of library name or 0 */
1698 /* Advance the argument to the next argument position. */
1701 function_arg_advance (cum
, mode
, type
)
1702 CUMULATIVE_ARGS
*cum
; /* current arg information */
1703 enum machine_mode mode
; /* current arg mode */
1704 tree type
; /* type of the argument or 0 if lib support */
1709 arg_words
= &cum
->arg_words
;
1710 max
= MAX_ARGS_IN_REGISTERS
;
1712 words
= (((mode
!= BLKmode
)
1713 ? (int) GET_MODE_SIZE (mode
)
1714 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1716 if ((*arg_words
+ words
> max
) && (*arg_words
< max
))
1719 *arg_words
+= words
;
1723 /* Return an RTL expression containing the register for the given mode,
1724 or 0 if the argument is to be passed on the stack. */
1727 function_arg (cum
, mode
, type
, incoming_p
)
1728 CUMULATIVE_ARGS
*cum
; /* current arg information */
1729 enum machine_mode mode
; /* current arg mode */
1730 tree type
; /* type of the argument or 0 if lib support */
1731 int incoming_p
; /* computing the incoming registers? */
1733 int regbase
, words
, max
;
1736 enum machine_mode result_mode
;
1738 arg_words
= &cum
->arg_words
;
1739 regbase
= (incoming_p
? GP_ARG_FIRST
: GP_OUTGOING_ARG_FIRST
);
1740 max
= MAX_ARGS_IN_REGISTERS
;
1742 words
= (((mode
!= BLKmode
)
1743 ? (int) GET_MODE_SIZE (mode
)
1744 : int_size_in_bytes (type
)) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1746 if (type
&& (TYPE_ALIGN (type
) > BITS_PER_WORD
))
1747 *arg_words
+= (*arg_words
& 1);
1749 if (*arg_words
+ words
> max
)
1752 regno
= regbase
+ *arg_words
;
1753 result_mode
= (mode
== BLKmode
? TYPE_MODE (type
) : mode
);
1755 /* We need to make sure that references to a7 are represented with
1756 rtx that is not equal to hard_frame_pointer_rtx. For BLKmode and
1757 modes bigger than 2 words (because we only have patterns for
1758 modes of 2 words or smaller), we can't control the expansion
1759 unless we explicitly list the individual registers in a PARALLEL. */
1761 if ((mode
== BLKmode
|| words
> 2)
1763 && regno
+ words
> A7_REG
)
1768 result
= gen_rtx_PARALLEL (result_mode
, rtvec_alloc (words
));
1769 for (n
= 0; n
< words
; n
++)
1771 XVECEXP (result
, 0, n
) =
1772 gen_rtx_EXPR_LIST (VOIDmode
,
1773 gen_raw_REG (SImode
, regno
+ n
),
1774 GEN_INT (n
* UNITS_PER_WORD
));
1779 return gen_raw_REG (result_mode
, regno
);
1787 enum machine_mode mode
;
1789 if (!TARGET_BOOLEANS
&& TARGET_HARD_FLOAT
)
1790 error ("boolean registers required for the floating-point option");
1792 /* set up the tables of ld/st opcode names for block moves */
1793 xtensa_ld_opcodes
[(int) SImode
] = "l32i";
1794 xtensa_ld_opcodes
[(int) HImode
] = "l16ui";
1795 xtensa_ld_opcodes
[(int) QImode
] = "l8ui";
1796 xtensa_st_opcodes
[(int) SImode
] = "s32i";
1797 xtensa_st_opcodes
[(int) HImode
] = "s16i";
1798 xtensa_st_opcodes
[(int) QImode
] = "s8i";
1800 xtensa_char_to_class
['q'] = SP_REG
;
1801 xtensa_char_to_class
['a'] = GR_REGS
;
1802 xtensa_char_to_class
['b'] = ((TARGET_BOOLEANS
) ? BR_REGS
: NO_REGS
);
1803 xtensa_char_to_class
['f'] = ((TARGET_HARD_FLOAT
) ? FP_REGS
: NO_REGS
);
1804 xtensa_char_to_class
['A'] = ((TARGET_MAC16
) ? ACC_REG
: NO_REGS
);
1805 xtensa_char_to_class
['B'] = ((TARGET_SEXT
) ? GR_REGS
: NO_REGS
);
1806 xtensa_char_to_class
['C'] = ((TARGET_MUL16
) ? GR_REGS
: NO_REGS
);
1807 xtensa_char_to_class
['D'] = ((TARGET_DENSITY
) ? GR_REGS
: NO_REGS
);
1808 xtensa_char_to_class
['d'] = ((TARGET_DENSITY
) ? AR_REGS
: NO_REGS
);
1810 /* Set up array giving whether a given register can hold a given mode. */
1811 for (mode
= VOIDmode
;
1812 mode
!= MAX_MACHINE_MODE
;
1813 mode
= (enum machine_mode
) ((int) mode
+ 1))
1815 int size
= GET_MODE_SIZE (mode
);
1816 enum mode_class
class = GET_MODE_CLASS (mode
);
1818 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1822 if (ACC_REG_P (regno
))
1823 temp
= (TARGET_MAC16
&&
1824 (class == MODE_INT
) && (size
<= UNITS_PER_WORD
));
1825 else if (GP_REG_P (regno
))
1826 temp
= ((regno
& 1) == 0 || (size
<= UNITS_PER_WORD
));
1827 else if (FP_REG_P (regno
))
1828 temp
= (TARGET_HARD_FLOAT
&& (mode
== SFmode
));
1829 else if (BR_REG_P (regno
))
1830 temp
= (TARGET_BOOLEANS
&& (mode
== CCmode
));
1834 xtensa_hard_regno_mode_ok
[(int) mode
][regno
] = temp
;
1838 init_machine_status
= xtensa_init_machine_status
;
1840 /* Check PIC settings. There's no need for -fPIC on Xtensa and
1841 some targets need to always use PIC. */
1842 if (XTENSA_ALWAYS_PIC
)
1845 warning ("-f%s ignored (all code is position independent)",
1846 (flag_pic
> 1 ? "PIC" : "pic"));
1854 /* A C compound statement to output to stdio stream STREAM the
1855 assembler syntax for an instruction operand X. X is an RTL
1858 CODE is a value that can be used to specify one of several ways
1859 of printing the operand. It is used when identical operands
1860 must be printed differently depending on the context. CODE
1861 comes from the '%' specification that was used to request
1862 printing of the operand. If the specification was just '%DIGIT'
1863 then CODE is 0; if the specification was '%LTR DIGIT' then CODE
1864 is the ASCII code for LTR.
1866 If X is a register, this macro should print the register's name.
1867 The names can be found in an array 'reg_names' whose type is
1868 'char *[]'. 'reg_names' is initialized from 'REGISTER_NAMES'.
1870 When the machine description has a specification '%PUNCT' (a '%'
1871 followed by a punctuation character), this macro is called with
1872 a null pointer for X and the punctuation character for CODE.
1874 'a', 'c', 'l', and 'n' are reserved.
1876 The Xtensa specific codes are:
1878 'd' CONST_INT, print as signed decimal
1879 'x' CONST_INT, print as signed hexadecimal
1880 'K' CONST_INT, print number of bits in mask for EXTUI
1881 'R' CONST_INT, print (X & 0x1f)
1882 'L' CONST_INT, print ((32 - X) & 0x1f)
1883 'D' REG, print second register of double-word register operand
1884 'N' MEM, print address of next word following a memory operand
1885 'v' MEM, if memory reference is volatile, output a MEMW before it
1893 /* print a hexadecimal value in a nice way */
1894 if ((val
> -0xa) && (val
< 0xa))
1895 fprintf (file
, "%d", val
);
1897 fprintf (file
, "-0x%x", -val
);
1899 fprintf (file
, "0x%x", val
);
1904 print_operand (file
, op
, letter
)
1905 FILE *file
; /* file to write to */
1906 rtx op
; /* operand to print */
1907 int letter
; /* %<letter> or 0 */
1912 error ("PRINT_OPERAND null pointer");
1914 code
= GET_CODE (op
);
1920 int regnum
= xt_true_regnum (op
);
1923 fprintf (file
, "%s", reg_names
[regnum
]);
1928 /* For a volatile memory reference, emit a MEMW before the
1932 if (MEM_VOLATILE_P (op
) && TARGET_SERIALIZE_VOLATILE
)
1933 fprintf (file
, "memw\n\t");
1936 else if (letter
== 'N')
1938 enum machine_mode mode
;
1939 switch (GET_MODE (op
))
1941 case DFmode
: mode
= SFmode
; break;
1942 case DImode
: mode
= SImode
; break;
1945 op
= adjust_address (op
, mode
, 4);
1948 output_address (XEXP (op
, 0));
1957 unsigned val
= INTVAL (op
);
1963 if ((val
!= 0) || (num_bits
== 0) || (num_bits
> 16))
1964 fatal_insn ("invalid mask", op
);
1966 fprintf (file
, "%d", num_bits
);
1971 fprintf (file
, "%d", (32 - INTVAL (op
)) & 0x1f);
1975 fprintf (file
, "%d", INTVAL (op
) & 0x1f);
1979 printx (file
, INTVAL (op
));
1984 fprintf (file
, "%d", INTVAL (op
));
1991 output_addr_const (file
, op
);
1996 /* A C compound statement to output to stdio stream STREAM the
1997 assembler syntax for an instruction operand that is a memory
1998 reference whose address is ADDR. ADDR is an RTL expression. */
2001 print_operand_address (file
, addr
)
2006 error ("PRINT_OPERAND_ADDRESS, null pointer");
2008 switch (GET_CODE (addr
))
2011 fatal_insn ("invalid address", addr
);
2015 fprintf (file
, "%s, 0", reg_names
[REGNO (addr
)]);
2021 rtx offset
= (rtx
)0;
2022 rtx arg0
= XEXP (addr
, 0);
2023 rtx arg1
= XEXP (addr
, 1);
2025 if (GET_CODE (arg0
) == REG
)
2030 else if (GET_CODE (arg1
) == REG
)
2036 fatal_insn ("no register in address", addr
);
2038 if (CONSTANT_P (offset
))
2040 fprintf (file
, "%s, ", reg_names
[REGNO (reg
)]);
2041 output_addr_const (file
, offset
);
2044 fatal_insn ("address offset not a constant", addr
);
2052 output_addr_const (file
, addr
);
2058 /* Emit either a label, .comm, or .lcomm directive. */
2061 xtensa_declare_object (file
, name
, init_string
, final_string
, size
)
2068 fputs (init_string
, file
); /* "", "\t.comm\t", or "\t.lcomm\t" */
2069 assemble_name (file
, name
);
2070 fprintf (file
, final_string
, size
); /* ":\n", ",%u\n", ",%u\n" */
2075 xtensa_output_literal (file
, x
, mode
, labelno
)
2078 enum machine_mode mode
;
2085 fprintf (file
, "\t.literal .LC%u, ", (unsigned) labelno
);
2087 switch (GET_MODE_CLASS (mode
))
2090 if (GET_CODE (x
) != CONST_DOUBLE
)
2093 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2097 REAL_VALUE_TO_TARGET_SINGLE (r
, value_long
[0]);
2098 fprintf (file
, "0x%08lx\n", value_long
[0]);
2102 REAL_VALUE_TO_TARGET_DOUBLE (r
, value_long
);
2103 fprintf (file
, "0x%08lx, 0x%08lx\n",
2104 value_long
[0], value_long
[1]);
2114 case MODE_PARTIAL_INT
:
2115 size
= GET_MODE_SIZE (mode
);
2118 output_addr_const (file
, x
);
2123 output_addr_const (file
, operand_subword (x
, 0, 0, DImode
));
2125 output_addr_const (file
, operand_subword (x
, 1, 0, DImode
));
2138 /* Return the bytes needed to compute the frame pointer from the current
2141 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2142 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2145 compute_frame_size (size
)
2146 int size
; /* # of var. bytes allocated */
2148 /* add space for the incoming static chain value */
2149 if (current_function_needs_context
)
2150 size
+= (1 * UNITS_PER_WORD
);
2152 xtensa_current_frame_size
=
2153 XTENSA_STACK_ALIGN (size
2154 + current_function_outgoing_args_size
2155 + (WINDOW_SIZE
* UNITS_PER_WORD
));
2156 return xtensa_current_frame_size
;
2161 xtensa_frame_pointer_required ()
2163 /* The code to expand builtin_frame_addr and builtin_return_addr
2164 currently uses the hard_frame_pointer instead of frame_pointer.
2165 This seems wrong but maybe it's necessary for other architectures.
2166 This function is derived from the i386 code. */
2168 if (cfun
->machine
->accesses_prev_frame
)
2176 xtensa_reorg (first
)
2179 rtx insn
, set_frame_ptr_insn
= 0;
2181 unsigned long tsize
= compute_frame_size (get_frame_size ());
2182 if (tsize
< (1 << (12+3)))
2183 frame_size_const
= 0;
2186 frame_size_const
= force_const_mem (SImode
, GEN_INT (tsize
- 16));;
2188 /* make sure the constant is used so it doesn't get eliminated
2189 from the constant pool */
2190 emit_insn_before (gen_rtx_USE (SImode
, frame_size_const
), first
);
2193 if (!frame_pointer_needed
)
2196 /* Search all instructions, looking for the insn that sets up the
2197 frame pointer. This search will fail if the function does not
2198 have an incoming argument in $a7, but in that case, we can just
2199 set up the frame pointer at the very beginning of the
2202 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2209 pat
= PATTERN (insn
);
2210 if (GET_CODE (pat
) == UNSPEC_VOLATILE
2211 && (XINT (pat
, 1) == UNSPECV_SET_FP
))
2213 set_frame_ptr_insn
= insn
;
2218 if (set_frame_ptr_insn
)
2220 /* for all instructions prior to set_frame_ptr_insn, replace
2221 hard_frame_pointer references with stack_pointer */
2222 for (insn
= first
; insn
!= set_frame_ptr_insn
; insn
= NEXT_INSN (insn
))
2225 PATTERN (insn
) = replace_rtx (copy_rtx (PATTERN (insn
)),
2226 hard_frame_pointer_rtx
,
2232 /* emit the frame pointer move immediately after the NOTE that starts
2234 emit_insn_after (gen_movsi (hard_frame_pointer_rtx
,
2235 stack_pointer_rtx
), first
);
2240 /* Set up the stack and frame (if desired) for the function. */
2243 xtensa_function_prologue (file
, size
)
2245 int size ATTRIBUTE_UNUSED
;
2247 unsigned long tsize
= compute_frame_size (get_frame_size ());
2249 if (frame_pointer_needed
)
2250 fprintf (file
, "\t.frame\ta7, %ld\n", tsize
);
2252 fprintf (file
, "\t.frame\tsp, %ld\n", tsize
);
2255 if (tsize
< (1 << (12+3)))
2257 fprintf (file
, "\tentry\tsp, %ld\n", tsize
);
2261 fprintf (file
, "\tentry\tsp, 16\n");
2263 /* use a8 as a temporary since a0-a7 may be live */
2264 fprintf (file
, "\tl32r\ta8, ");
2265 print_operand (file
, frame_size_const
, 0);
2266 fprintf (file
, "\n\tsub\ta8, sp, a8\n");
2267 fprintf (file
, "\tmovsp\tsp, a8\n");
2272 /* Do any necessary cleanup after a function to restore
2273 stack, frame, and regs. */
2276 xtensa_function_epilogue (file
, size
)
2278 int size ATTRIBUTE_UNUSED
;
2280 rtx insn
= get_last_insn ();
2281 /* If the last insn was a BARRIER, we don't have to write anything. */
2282 if (GET_CODE (insn
) == NOTE
)
2283 insn
= prev_nonnote_insn (insn
);
2284 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
2285 fprintf (file
, TARGET_DENSITY
? "\tretw.n\n" : "\tretw\n");
2287 xtensa_current_frame_size
= 0;
2291 /* Create the va_list data type.
2292 This structure is set up by __builtin_saveregs. The __va_reg
2293 field points to a stack-allocated region holding the contents of the
2294 incoming argument registers. The __va_ndx field is an index initialized
2295 to the position of the first unnamed (variable) argument. This same index
2296 is also used to address the arguments passed in memory. Thus, the
2297 __va_stk field is initialized to point to the position of the first
2298 argument in memory offset to account for the arguments passed in
2299 registers. E.G., if there are 6 argument registers, and each register is
2300 4 bytes, then __va_stk is set to $sp - (6 * 4); then __va_reg[N*4]
2301 references argument word N for 0 <= N < 6, and __va_stk[N*4] references
2302 argument word N for N >= 6. */
2305 xtensa_build_va_list (void)
2307 tree f_stk
, f_reg
, f_ndx
, record
, type_decl
;
2309 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
2310 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
2312 f_stk
= build_decl (FIELD_DECL
, get_identifier ("__va_stk"),
2314 f_reg
= build_decl (FIELD_DECL
, get_identifier ("__va_reg"),
2316 f_ndx
= build_decl (FIELD_DECL
, get_identifier ("__va_ndx"),
2319 DECL_FIELD_CONTEXT (f_stk
) = record
;
2320 DECL_FIELD_CONTEXT (f_reg
) = record
;
2321 DECL_FIELD_CONTEXT (f_ndx
) = record
;
2323 TREE_CHAIN (record
) = type_decl
;
2324 TYPE_NAME (record
) = type_decl
;
2325 TYPE_FIELDS (record
) = f_stk
;
2326 TREE_CHAIN (f_stk
) = f_reg
;
2327 TREE_CHAIN (f_reg
) = f_ndx
;
2329 layout_type (record
);
2334 /* Save the incoming argument registers on the stack. Returns the
2335 address of the saved registers. */
2338 xtensa_builtin_saveregs ()
2341 int arg_words
= current_function_arg_words
;
2342 int gp_left
= MAX_ARGS_IN_REGISTERS
- arg_words
;
2348 /* allocate the general-purpose register space */
2349 gp_regs
= assign_stack_local
2350 (BLKmode
, MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, -1);
2351 set_mem_alias_set (gp_regs
, get_varargs_alias_set ());
2353 /* Now store the incoming registers. */
2354 dest
= change_address (gp_regs
, SImode
,
2355 plus_constant (XEXP (gp_regs
, 0),
2356 arg_words
* UNITS_PER_WORD
));
2358 /* Note: Don't use move_block_from_reg() here because the incoming
2359 argument in a7 cannot be represented by hard_frame_pointer_rtx.
2360 Instead, call gen_raw_REG() directly so that we get a distinct
2361 instance of (REG:SI 7). */
2362 for (i
= 0; i
< gp_left
; i
++)
2364 emit_move_insn (operand_subword (dest
, i
, 1, BLKmode
),
2365 gen_raw_REG (SImode
, GP_ARG_FIRST
+ arg_words
+ i
));
2368 return XEXP (gp_regs
, 0);
2372 /* Implement `va_start' for varargs and stdarg. We look at the
2373 current function to fill in an initial va_list. */
2376 xtensa_va_start (stdarg_p
, valist
, nextarg
)
2377 int stdarg_p ATTRIBUTE_UNUSED
;
2379 rtx nextarg ATTRIBUTE_UNUSED
;
2387 arg_words
= current_function_args_info
.arg_words
;
2389 f_stk
= TYPE_FIELDS (va_list_type_node
);
2390 f_reg
= TREE_CHAIN (f_stk
);
2391 f_ndx
= TREE_CHAIN (f_reg
);
2393 stk
= build (COMPONENT_REF
, TREE_TYPE (f_stk
), valist
, f_stk
);
2394 reg
= build (COMPONENT_REF
, TREE_TYPE (f_reg
), valist
, f_reg
);
2395 ndx
= build (COMPONENT_REF
, TREE_TYPE (f_ndx
), valist
, f_ndx
);
2397 /* Call __builtin_saveregs; save the result in __va_reg */
2398 current_function_arg_words
= arg_words
;
2399 u
= make_tree (ptr_type_node
, expand_builtin_saveregs ());
2400 t
= build (MODIFY_EXPR
, ptr_type_node
, reg
, u
);
2401 TREE_SIDE_EFFECTS (t
) = 1;
2402 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2404 /* Set the __va_stk member to $arg_ptr - (size of __va_reg area) */
2405 u
= make_tree (ptr_type_node
, virtual_incoming_args_rtx
);
2406 u
= fold (build (PLUS_EXPR
, ptr_type_node
, u
,
2407 build_int_2 (-MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, -1)));
2408 t
= build (MODIFY_EXPR
, ptr_type_node
, stk
, u
);
2409 TREE_SIDE_EFFECTS (t
) = 1;
2410 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2412 /* Set the __va_ndx member. */
2413 u
= build_int_2 (arg_words
* UNITS_PER_WORD
, 0);
2414 t
= build (MODIFY_EXPR
, integer_type_node
, ndx
, u
);
2415 TREE_SIDE_EFFECTS (t
) = 1;
2416 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2420 /* Implement `va_arg'. */
2423 xtensa_va_arg (valist
, type
)
2429 tree tmp
, addr_tree
, type_size
;
2430 rtx array
, orig_ndx
, r
, addr
, size
, va_size
;
2431 rtx lab_false
, lab_over
, lab_false2
;
2433 f_stk
= TYPE_FIELDS (va_list_type_node
);
2434 f_reg
= TREE_CHAIN (f_stk
);
2435 f_ndx
= TREE_CHAIN (f_reg
);
2437 stk
= build (COMPONENT_REF
, TREE_TYPE (f_stk
), valist
, f_stk
);
2438 reg
= build (COMPONENT_REF
, TREE_TYPE (f_reg
), valist
, f_reg
);
2439 ndx
= build (COMPONENT_REF
, TREE_TYPE (f_ndx
), valist
, f_ndx
);
2441 type_size
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
));
2443 va_size
= gen_reg_rtx (SImode
);
2444 tmp
= fold (build (MULT_EXPR
, sizetype
,
2445 fold (build (TRUNC_DIV_EXPR
, sizetype
,
2446 fold (build (PLUS_EXPR
, sizetype
,
2448 size_int (UNITS_PER_WORD
- 1))),
2449 size_int (UNITS_PER_WORD
))),
2450 size_int (UNITS_PER_WORD
)));
2451 r
= expand_expr (tmp
, va_size
, SImode
, EXPAND_NORMAL
);
2453 emit_move_insn (va_size
, r
);
2456 /* First align __va_ndx to a double word boundary if necessary for this arg:
2458 if (__alignof__ (TYPE) > 4)
2459 (AP).__va_ndx = (((AP).__va_ndx + 7) & -8)
2462 if (TYPE_ALIGN (type
) > BITS_PER_WORD
)
2464 tmp
= build (PLUS_EXPR
, integer_type_node
, ndx
,
2465 build_int_2 ((2 * UNITS_PER_WORD
) - 1, 0));
2466 tmp
= build (BIT_AND_EXPR
, integer_type_node
, tmp
,
2467 build_int_2 (-2 * UNITS_PER_WORD
, -1));
2468 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2469 TREE_SIDE_EFFECTS (tmp
) = 1;
2470 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2474 /* Increment __va_ndx to point past the argument:
2476 orig_ndx = (AP).__va_ndx;
2477 (AP).__va_ndx += __va_size (TYPE);
2480 orig_ndx
= gen_reg_rtx (SImode
);
2481 r
= expand_expr (ndx
, orig_ndx
, SImode
, EXPAND_NORMAL
);
2483 emit_move_insn (orig_ndx
, r
);
2485 tmp
= build (PLUS_EXPR
, integer_type_node
, ndx
,
2486 make_tree (intSI_type_node
, va_size
));
2487 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2488 TREE_SIDE_EFFECTS (tmp
) = 1;
2489 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2492 /* Check if the argument is in registers:
2494 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2495 && !MUST_PASS_IN_STACK (type))
2496 __array = (AP).__va_reg;
2499 array
= gen_reg_rtx (Pmode
);
2501 lab_over
= NULL_RTX
;
2502 if (!MUST_PASS_IN_STACK (VOIDmode
, type
))
2504 lab_false
= gen_label_rtx ();
2505 lab_over
= gen_label_rtx ();
2507 emit_cmp_and_jump_insns (expand_expr (ndx
, NULL_RTX
, SImode
,
2509 GEN_INT (MAX_ARGS_IN_REGISTERS
2511 GT
, const1_rtx
, SImode
, 0, lab_false
);
2513 r
= expand_expr (reg
, array
, Pmode
, EXPAND_NORMAL
);
2515 emit_move_insn (array
, r
);
2517 emit_jump_insn (gen_jump (lab_over
));
2519 emit_label (lab_false
);
2522 /* ...otherwise, the argument is on the stack (never split between
2523 registers and the stack -- change __va_ndx if necessary):
2527 if (orig_ndx < __MAX_ARGS_IN_REGISTERS * 4)
2528 (AP).__va_ndx = __MAX_ARGS_IN_REGISTERS * 4 + __va_size (TYPE);
2529 __array = (AP).__va_stk;
2533 lab_false2
= gen_label_rtx ();
2534 emit_cmp_and_jump_insns (orig_ndx
,
2535 GEN_INT (MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
),
2536 GE
, const1_rtx
, SImode
, 0, lab_false2
);
2538 tmp
= build (PLUS_EXPR
, sizetype
, make_tree (intSI_type_node
, va_size
),
2539 build_int_2 (MAX_ARGS_IN_REGISTERS
* UNITS_PER_WORD
, 0));
2540 tmp
= build (MODIFY_EXPR
, integer_type_node
, ndx
, tmp
);
2541 TREE_SIDE_EFFECTS (tmp
) = 1;
2542 expand_expr (tmp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2544 emit_label (lab_false2
);
2546 r
= expand_expr (stk
, array
, Pmode
, EXPAND_NORMAL
);
2548 emit_move_insn (array
, r
);
2550 if (lab_over
!= NULL_RTX
)
2551 emit_label (lab_over
);
2554 /* Given the base array pointer (__array) and index to the subsequent
2555 argument (__va_ndx), find the address:
2557 __array + (AP).__va_ndx - (BYTES_BIG_ENDIAN && sizeof (TYPE) < 4
2561 The results are endian-dependent because values smaller than one word
2562 are aligned differently.
2565 size
= gen_reg_rtx (SImode
);
2566 emit_move_insn (size
, va_size
);
2568 if (BYTES_BIG_ENDIAN
)
2570 rtx lab_use_va_size
= gen_label_rtx ();
2572 emit_cmp_and_jump_insns (expand_expr (type_size
, NULL_RTX
, SImode
,
2574 GEN_INT (PARM_BOUNDARY
/ BITS_PER_UNIT
),
2575 GE
, const1_rtx
, SImode
, 0, lab_use_va_size
);
2577 r
= expand_expr (type_size
, size
, SImode
, EXPAND_NORMAL
);
2579 emit_move_insn (size
, r
);
2581 emit_label (lab_use_va_size
);
2584 addr_tree
= build (PLUS_EXPR
, ptr_type_node
,
2585 make_tree (ptr_type_node
, array
),
2587 addr_tree
= build (MINUS_EXPR
, ptr_type_node
, addr_tree
,
2588 make_tree (intSI_type_node
, size
));
2589 addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
2590 addr
= copy_to_reg (addr
);
2596 xtensa_preferred_reload_class (x
, class)
2598 enum reg_class
class;
2600 if (CONSTANT_P (x
) && GET_CODE (x
) == CONST_DOUBLE
)
2603 /* Don't use sp for reloads! */
2604 if (class == AR_REGS
)
2612 xtensa_secondary_reload_class (class, mode
, x
, isoutput
)
2613 enum reg_class
class;
2614 enum machine_mode mode ATTRIBUTE_UNUSED
;
2620 if (GET_CODE (x
) == SIGN_EXTEND
)
2622 regno
= xt_true_regnum (x
);
2626 if (class == FP_REGS
&& constantpool_mem_p (x
))
2630 if (ACC_REG_P (regno
))
2631 return (class == GR_REGS
? NO_REGS
: GR_REGS
);
2632 if (class == ACC_REG
)
2633 return (GP_REG_P (regno
) ? NO_REGS
: GR_REGS
);
2640 order_regs_for_local_alloc ()
2642 if (!leaf_function_p ())
2644 memcpy (reg_alloc_order
, reg_nonleaf_alloc_order
,
2645 FIRST_PSEUDO_REGISTER
* sizeof (int));
2649 int i
, num_arg_regs
;
2652 /* use the AR registers in increasing order (skipping a0 and a1)
2653 but save the incoming argument registers for a last resort */
2654 num_arg_regs
= current_function_args_info
.arg_words
;
2655 if (num_arg_regs
> MAX_ARGS_IN_REGISTERS
)
2656 num_arg_regs
= MAX_ARGS_IN_REGISTERS
;
2657 for (i
= GP_ARG_FIRST
; i
< 16 - num_arg_regs
; i
++)
2658 reg_alloc_order
[nxt
++] = i
+ num_arg_regs
;
2659 for (i
= 0; i
< num_arg_regs
; i
++)
2660 reg_alloc_order
[nxt
++] = GP_ARG_FIRST
+ i
;
2662 /* list the FP registers in order for now */
2663 for (i
= 0; i
< 16; i
++)
2664 reg_alloc_order
[nxt
++] = FP_REG_FIRST
+ i
;
2666 /* GCC requires that we list *all* the registers.... */
2667 reg_alloc_order
[nxt
++] = 0; /* a0 = return address */
2668 reg_alloc_order
[nxt
++] = 1; /* a1 = stack pointer */
2669 reg_alloc_order
[nxt
++] = 16; /* pseudo frame pointer */
2670 reg_alloc_order
[nxt
++] = 17; /* pseudo arg pointer */
2672 /* list the coprocessor registers in order */
2673 for (i
= 0; i
< BR_REG_NUM
; i
++)
2674 reg_alloc_order
[nxt
++] = BR_REG_FIRST
+ i
;
2676 reg_alloc_order
[nxt
++] = ACC_REG_FIRST
; /* MAC16 accumulator */
2681 /* A customized version of reg_overlap_mentioned_p that only looks for
2682 references to a7 (as opposed to hard_frame_pointer_rtx). */
2685 a7_overlap_mentioned_p (x
)
2689 unsigned int x_regno
;
2692 if (GET_CODE (x
) == REG
)
2694 x_regno
= REGNO (x
);
2695 return (x
!= hard_frame_pointer_rtx
2696 && x_regno
< A7_REG
+ 1
2697 && x_regno
+ HARD_REGNO_NREGS (A7_REG
, GET_MODE (x
)) > A7_REG
);
2700 if (GET_CODE (x
) == SUBREG
2701 && GET_CODE (SUBREG_REG (x
)) == REG
2702 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
2704 x_regno
= subreg_regno (x
);
2705 return (SUBREG_REG (x
) != hard_frame_pointer_rtx
2706 && x_regno
< A7_REG
+ 1
2707 && x_regno
+ HARD_REGNO_NREGS (A7_REG
, GET_MODE (x
)) > A7_REG
);
2710 /* X does not match, so try its subexpressions. */
2711 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2712 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2716 if (a7_overlap_mentioned_p (XEXP (x
, i
)))
2719 else if (fmt
[i
] == 'E')
2721 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
2722 if (a7_overlap_mentioned_p (XVECEXP (x
, i
, j
)))
2730 /* The literal pool stays with the function. */
2733 xtensa_select_rtx_section (mode
, x
, align
)
2734 enum machine_mode mode ATTRIBUTE_UNUSED
;
2735 rtx x ATTRIBUTE_UNUSED
;
2736 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
2738 function_section (current_function_decl
);
2741 /* If we are referencing a function that is static, make the SYMBOL_REF
2742 special so that we can generate direct calls to it even with -fpic. */
2745 xtensa_encode_section_info (decl
, first
)
2747 int first ATTRIBUTE_UNUSED
;
2749 if (TREE_CODE (decl
) == FUNCTION_DECL
&& ! TREE_PUBLIC (decl
))
2750 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;
2753 #include "gt-xtensa.h"