]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
arm.c (arm_legitimate_address_p): Don't check the mode size for minipool references.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
f954388e
RE
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
3 2002, 2003, 2004 Free Software Foundation, Inc.
cce8749e 4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 5 and Martin Simmons (@harleqn.co.uk).
b36ba79f 6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e 7
4f448245 8 This file is part of GCC.
cce8749e 9
4f448245
NC
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
cce8749e 14
4f448245
NC
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
cce8749e 19
4f448245
NC
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
ff9940b0 24
56636818 25#include "config.h"
43cffd11 26#include "system.h"
4977bab6
ZW
27#include "coretypes.h"
28#include "tm.h"
cce8749e 29#include "rtl.h"
d5b7b3ae 30#include "tree.h"
c7319d87 31#include "obstack.h"
cce8749e
CH
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "real.h"
35#include "insn-config.h"
36#include "conditions.h"
cce8749e
CH
37#include "output.h"
38#include "insn-attr.h"
39#include "flags.h"
af48348a 40#include "reload.h"
49ad7cfa 41#include "function.h"
bee06f3d 42#include "expr.h"
e78d8e51 43#include "optabs.h"
ad076f4e 44#include "toplev.h"
aec3cfba 45#include "recog.h"
92a432f4 46#include "ggc.h"
d5b7b3ae 47#include "except.h"
8b97c5f8 48#include "c-pragma.h"
7b8b8ade 49#include "integrate.h"
c27ba912 50#include "tm_p.h"
672a6f42
NB
51#include "target.h"
52#include "target-def.h"
980e61bb 53#include "debug.h"
cce8749e 54
d5b7b3ae
RE
55/* Forward definitions of types. */
56typedef struct minipool_node Mnode;
57typedef struct minipool_fixup Mfix;
58
1d6e90ac
NC
59const struct attribute_spec arm_attribute_table[];
60
d5b7b3ae 61/* Forward function declarations. */
e32bac5b
RE
62static void arm_add_gc_roots (void);
63static int arm_gen_constant (enum rtx_code, enum machine_mode, HOST_WIDE_INT,
64 rtx, rtx, int, int);
65static unsigned bit_count (unsigned long);
66static int arm_address_register_rtx_p (rtx, int);
67static int arm_legitimate_index_p (enum machine_mode, rtx, int);
68static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
69inline static int thumb_index_register_rtx_p (rtx, int);
70static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
e32bac5b
RE
71static rtx emit_multi_reg_push (int);
72static rtx emit_sfm (int, int);
301d03af 73#ifndef AOF_ASSEMBLER
e32bac5b 74static bool arm_assemble_integer (rtx, unsigned int, int);
301d03af 75#endif
e32bac5b
RE
76static const char *fp_const_from_val (REAL_VALUE_TYPE *);
77static arm_cc get_arm_condition_code (rtx);
78static void init_fpa_table (void);
79static HOST_WIDE_INT int_log2 (HOST_WIDE_INT);
80static rtx is_jump_table (rtx);
81static const char *output_multi_immediate (rtx *, const char *, const char *,
82 int, HOST_WIDE_INT);
83static void print_multi_reg (FILE *, const char *, int, int);
84static const char *shift_op (rtx, HOST_WIDE_INT *);
85static struct machine_function *arm_init_machine_status (void);
86static int number_of_first_bit_set (int);
87static void replace_symbols_in_block (tree, rtx, rtx);
88static void thumb_exit (FILE *, int, rtx);
980e61bb 89static void thumb_pushpop (FILE *, int, int, int *, int);
e32bac5b
RE
90static rtx is_jump_table (rtx);
91static HOST_WIDE_INT get_jump_table_size (rtx);
92static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
93static Mnode *add_minipool_forward_ref (Mfix *);
94static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
95static Mnode *add_minipool_backward_ref (Mfix *);
96static void assign_minipool_offsets (Mfix *);
97static void arm_print_value (FILE *, rtx);
98static void dump_minipool (rtx);
99static int arm_barrier_cost (rtx);
100static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
101static void push_minipool_barrier (rtx, HOST_WIDE_INT);
102static void push_minipool_fix (rtx, HOST_WIDE_INT, rtx *, enum machine_mode,
103 rtx);
104static void arm_reorg (void);
105static bool note_invalid_constants (rtx, HOST_WIDE_INT, int);
106static int current_file_function_operand (rtx);
107static unsigned long arm_compute_save_reg0_reg12_mask (void);
108static unsigned long arm_compute_save_reg_mask (void);
109static unsigned long arm_isr_value (tree);
110static unsigned long arm_compute_func_type (void);
111static tree arm_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
112static tree arm_handle_isr_attribute (tree *, tree, tree, int, bool *);
113static void arm_output_function_epilogue (FILE *, HOST_WIDE_INT);
114static void arm_output_function_prologue (FILE *, HOST_WIDE_INT);
115static void thumb_output_function_prologue (FILE *, HOST_WIDE_INT);
116static int arm_comp_type_attributes (tree, tree);
117static void arm_set_default_type_attributes (tree);
118static int arm_adjust_cost (rtx, rtx, rtx, int);
119static int arm_use_dfa_pipeline_interface (void);
120static int count_insns_for_constant (HOST_WIDE_INT, int);
121static int arm_get_strip_length (int);
122static bool arm_function_ok_for_sibcall (tree, tree);
123static void arm_internal_label (FILE *, const char *, unsigned long);
124static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 tree);
126static int arm_rtx_costs_1 (rtx, enum rtx_code, enum rtx_code);
127static bool arm_rtx_costs (rtx, int, int, int *);
128static int arm_address_cost (rtx);
129static bool arm_memory_load_p (rtx);
130static bool arm_cirrus_insn_p (rtx);
131static void cirrus_reorg (rtx);
5a9335ef
NC
132static void arm_init_builtins (void);
133static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
134static void arm_init_iwmmxt_builtins (void);
135static rtx safe_vector_operand (rtx, enum machine_mode);
136static rtx arm_expand_binop_builtin (enum insn_code, tree, rtx);
137static rtx arm_expand_unop_builtin (enum insn_code, tree, rtx, int);
138static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
139
ebe413e5 140#ifdef OBJECT_FORMAT_ELF
e32bac5b 141static void arm_elf_asm_named_section (const char *, unsigned int);
ebe413e5 142#endif
fb49053f 143#ifndef ARM_PE
e32bac5b 144static void arm_encode_section_info (tree, rtx, int);
fb49053f 145#endif
5eb99654 146#ifdef AOF_ASSEMBLER
e32bac5b
RE
147static void aof_globalize_label (FILE *, const char *);
148static void aof_dump_imports (FILE *);
149static void aof_dump_pic_table (FILE *);
1bc7c5b6 150static void aof_file_start (void);
e32bac5b 151static void aof_file_end (void);
5eb99654 152#endif
c237e94a 153
672a6f42
NB
154\f
155/* Initialize the GCC target structure. */
156#ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
1d6e90ac 157#undef TARGET_MERGE_DECL_ATTRIBUTES
672a6f42
NB
158#define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
159#endif
f3bb6135 160
1d6e90ac 161#undef TARGET_ATTRIBUTE_TABLE
91d231cb 162#define TARGET_ATTRIBUTE_TABLE arm_attribute_table
672a6f42 163
301d03af 164#ifdef AOF_ASSEMBLER
1d6e90ac 165#undef TARGET_ASM_BYTE_OP
301d03af 166#define TARGET_ASM_BYTE_OP "\tDCB\t"
1d6e90ac 167#undef TARGET_ASM_ALIGNED_HI_OP
301d03af 168#define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
1d6e90ac 169#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 170#define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
5eb99654
KG
171#undef TARGET_ASM_GLOBALIZE_LABEL
172#define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
1bc7c5b6
ZW
173#undef TARGET_ASM_FILE_START
174#define TARGET_ASM_FILE_START aof_file_start
a5fe455b
ZW
175#undef TARGET_ASM_FILE_END
176#define TARGET_ASM_FILE_END aof_file_end
301d03af 177#else
1d6e90ac 178#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 179#define TARGET_ASM_ALIGNED_SI_OP NULL
1d6e90ac 180#undef TARGET_ASM_INTEGER
301d03af
RS
181#define TARGET_ASM_INTEGER arm_assemble_integer
182#endif
183
1d6e90ac 184#undef TARGET_ASM_FUNCTION_PROLOGUE
08c148a8
NB
185#define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
186
1d6e90ac 187#undef TARGET_ASM_FUNCTION_EPILOGUE
08c148a8
NB
188#define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
189
1d6e90ac 190#undef TARGET_COMP_TYPE_ATTRIBUTES
8d8e52be
JM
191#define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
192
1d6e90ac 193#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
8d8e52be
JM
194#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
195
1d6e90ac 196#undef TARGET_SCHED_ADJUST_COST
c237e94a
ZW
197#define TARGET_SCHED_ADJUST_COST arm_adjust_cost
198
103fc15d
BE
199#undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
200#define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE arm_use_dfa_pipeline_interface
201
fb49053f
RH
202#undef TARGET_ENCODE_SECTION_INFO
203#ifdef ARM_PE
204#define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
205#else
206#define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
207#endif
208
5a9335ef 209#undef TARGET_STRIP_NAME_ENCODING
772c5265
RH
210#define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
211
5a9335ef 212#undef TARGET_ASM_INTERNAL_LABEL
4977bab6
ZW
213#define TARGET_ASM_INTERNAL_LABEL arm_internal_label
214
5a9335ef 215#undef TARGET_FUNCTION_OK_FOR_SIBCALL
4977bab6
ZW
216#define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
217
5a9335ef 218#undef TARGET_ASM_OUTPUT_MI_THUNK
c590b625 219#define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
5a9335ef 220#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
3961e8fe 221#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
c590b625 222
5a9335ef 223#undef TARGET_RTX_COSTS
3c50106f 224#define TARGET_RTX_COSTS arm_rtx_costs
5a9335ef 225#undef TARGET_ADDRESS_COST
dcefdf67 226#define TARGET_ADDRESS_COST arm_address_cost
3c50106f 227
5a9335ef 228#undef TARGET_MACHINE_DEPENDENT_REORG
18dbd950
RS
229#define TARGET_MACHINE_DEPENDENT_REORG arm_reorg
230
5a9335ef
NC
231#undef TARGET_INIT_BUILTINS
232#define TARGET_INIT_BUILTINS arm_init_builtins
233#undef TARGET_EXPAND_BUILTIN
234#define TARGET_EXPAND_BUILTIN arm_expand_builtin
235
f6897b10 236struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 237\f
c7319d87
RE
238/* Obstack for minipool constant handling. */
239static struct obstack minipool_obstack;
1d6e90ac 240static char * minipool_startobj;
c7319d87 241
1d6e90ac
NC
242/* The maximum number of insns skipped which
243 will be conditionalised if possible. */
c27ba912
DM
244static int max_insns_skipped = 5;
245
246extern FILE * asm_out_file;
247
6354dc9b 248/* True if we are currently building a constant table. */
13bd191d
PB
249int making_const_table;
250
60d0536b 251/* Define the information needed to generate branch insns. This is
6354dc9b 252 stored from the compare operation. */
ff9940b0 253rtx arm_compare_op0, arm_compare_op1;
ff9940b0 254
6354dc9b 255/* What type of floating point are we tuning for? */
29ad9694 256enum fputype arm_fpu_tune;
bee06f3d 257
6354dc9b 258/* What type of floating point instructions are available? */
29ad9694 259enum fputype arm_fpu_arch;
b111229a 260
6354dc9b 261/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
262enum prog_mode_type arm_prgmode;
263
6354dc9b 264/* Set by the -mfp=... option. */
f9cc092a 265const char * target_fp_name = NULL;
2b835d68 266
b355a481 267/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 268const char * structure_size_string = NULL;
723ae7c1 269int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 270
aec3cfba 271/* Bit values used to identify processor capabilities. */
62b10bbc
NC
272#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
273#define FL_FAST_MULT (1 << 1) /* Fast multiply */
274#define FL_MODE26 (1 << 2) /* 26-bit mode support */
275#define FL_MODE32 (1 << 3) /* 32-bit mode support */
276#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
277#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
278#define FL_THUMB (1 << 6) /* Thumb aware */
279#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
280#define FL_STRONG (1 << 8) /* StrongARM */
6bc82793 281#define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
d19fb8e3 282#define FL_XSCALE (1 << 10) /* XScale */
9b6b54e2 283#define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
5a9335ef 284#define FL_IWMMXT (1 << 29) /* XScale v2 or "Intel Wireless MMX technology". */
81f9037c
MM
285#define FL_ARCH6J (1 << 12) /* Architecture rel 6. Adds
286 media instructions. */
287#define FL_VFPV2 (1 << 13) /* Vector Floating Point V2. */
aec3cfba 288
1d6e90ac
NC
289/* The bits in this mask specify which
290 instructions we are allowed to generate. */
0977774b 291static unsigned long insn_flags = 0;
d5b7b3ae 292
aec3cfba
NC
293/* The bits in this mask specify which instruction scheduling options should
294 be used. Note - there is an overlap with the FL_FAST_MULT. For some
295 hardware we want to be able to generate the multiply instructions, but to
296 tune as if they were not present in the architecture. */
0977774b 297static unsigned long tune_flags = 0;
aec3cfba
NC
298
299/* The following are used in the arm.md file as equivalents to bits
300 in the above two flag variables. */
301
2b835d68
RE
302/* Nonzero if this is an "M" variant of the processor. */
303int arm_fast_multiply = 0;
304
6354dc9b 305/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
306int arm_arch4 = 0;
307
6354dc9b 308/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
309int arm_arch5 = 0;
310
b15bca31
RE
311/* Nonzero if this chip supports the ARM Architecture 5E extensions. */
312int arm_arch5e = 0;
313
aec3cfba 314/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
315int arm_ld_sched = 0;
316
317/* Nonzero if this chip is a StrongARM. */
318int arm_is_strong = 0;
319
5a9335ef
NC
320/* Nonzero if this chip supports Intel Wireless MMX technology. */
321int arm_arch_iwmmxt = 0;
322
d19fb8e3 323/* Nonzero if this chip is an XScale. */
4b3c2e48
PB
324int arm_arch_xscale = 0;
325
326/* Nonzero if tuning for XScale */
327int arm_tune_xscale = 0;
d19fb8e3 328
3569057d 329/* Nonzero if this chip is an ARM6 or an ARM7. */
f5a1b0d2 330int arm_is_6_or_7 = 0;
b111229a 331
9b6b54e2
NC
332/* Nonzero if this chip is a Cirrus/DSP. */
333int arm_is_cirrus = 0;
334
0616531f
RE
335/* Nonzero if generating Thumb instructions. */
336int thumb_code = 0;
337
cce8749e
CH
338/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
339 must report the mode of the memory reference from PRINT_OPERAND to
340 PRINT_OPERAND_ADDRESS. */
f3bb6135 341enum machine_mode output_memory_reference_mode;
cce8749e 342
32de079a 343/* The register number to be used for the PIC offset register. */
ed0e6530 344const char * arm_pic_register_string = NULL;
5b43fed1 345int arm_pic_register = INVALID_REGNUM;
32de079a 346
ff9940b0 347/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 348 is not needed. */
d5b7b3ae 349int return_used_this_function;
ff9940b0 350
aec3cfba
NC
351/* Set to 1 after arm_reorg has started. Reset to start at the start of
352 the next function. */
4b632bf1
RE
353static int after_arm_reorg = 0;
354
aec3cfba 355/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
356static int arm_constant_limit = 3;
357
cce8749e
CH
358/* For an explanation of these variables, see final_prescan_insn below. */
359int arm_ccfsm_state;
84ed5e79 360enum arm_cond_code arm_current_cc;
cce8749e
CH
361rtx arm_target_insn;
362int arm_target_label;
9997d19d
RE
363
364/* The condition codes of the ARM, and the inverse function. */
1d6e90ac 365static const char * const arm_condition_codes[] =
9997d19d
RE
366{
367 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
368 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
369};
370
f5a1b0d2 371#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 372\f
6354dc9b 373/* Initialization code. */
2b835d68 374
2b835d68
RE
375struct processors
376{
8b60264b 377 const char *const name;
0977774b 378 const unsigned long flags;
2b835d68
RE
379};
380
381/* Not all of these give usefully different compilation alternatives,
382 but there is no simple way of generalizing them. */
8b60264b 383static const struct processors all_cores[] =
f5a1b0d2
NC
384{
385 /* ARM Cores */
386
387 {"arm2", FL_CO_PROC | FL_MODE26 },
388 {"arm250", FL_CO_PROC | FL_MODE26 },
389 {"arm3", FL_CO_PROC | FL_MODE26 },
390 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
391 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
392 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
393 {"arm610", FL_MODE26 | FL_MODE32 },
394 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
395 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
396 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 397 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
398 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
399 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
400 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
401 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
402 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
403 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
404 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
405 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
406 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 407 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
408 {"arm710c", FL_MODE26 | FL_MODE32 },
409 {"arm7100", FL_MODE26 | FL_MODE32 },
410 {"arm7500", FL_MODE26 | FL_MODE32 },
3b684012 411 /* Doesn't have an external co-proc, but does have embedded fpa. */
949d79eb 412 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
81f9037c 413 /* V4 Architecture Processors */
f5a1b0d2 414 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
131b9f3d
RE
415 {"arm710t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
416 {"arm720t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
417 {"arm740t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
f5a1b0d2
NC
418 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
419 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
420 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
421 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
422 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 423 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2 424 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 425 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
9b6b54e2 426 {"ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
f5a1b0d2
NC
427 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
428 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
429 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
eab4abeb 430 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
81f9037c 431 /* V5 Architecture Processors */
eab4abeb
NC
432 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
433 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
81f9037c
MM
434 {"arm926ejs", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
435 {"arm1026ejs", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
eab4abeb 436 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
5a9335ef 437 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
81f9037c
MM
438 /* V6 Architecture Processors */
439 {"arm1136js", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E | FL_ARCH6J },
440 {"arm1136jfs", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E | FL_ARCH6J | FL_VFPV2 },
f5a1b0d2
NC
441 {NULL, 0}
442};
443
8b60264b 444static const struct processors all_architectures[] =
2b835d68 445{
f5a1b0d2
NC
446 /* ARM Architectures */
447
62b10bbc
NC
448 { "armv2", FL_CO_PROC | FL_MODE26 },
449 { "armv2a", FL_CO_PROC | FL_MODE26 },
450 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
451 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 452 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
453 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
454 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
455 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
456 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
d19fb8e3
NC
457 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
458 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
81f9037c 459 { "armv6j", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E | FL_ARCH6J },
9b6b54e2 460 { "ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
5a9335ef 461 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
62b10bbc 462 { NULL, 0 }
f5a1b0d2
NC
463};
464
9a9f7594 465/* This is a magic structure. The 'string' field is magically filled in
f5a1b0d2
NC
466 with a pointer to the value specified by the user on the command line
467 assuming that the user has specified such a value. */
468
469struct arm_cpu_select arm_select[] =
470{
471 /* string name processors */
472 { NULL, "-mcpu=", all_cores },
473 { NULL, "-march=", all_architectures },
474 { NULL, "-mtune=", all_cores }
2b835d68
RE
475};
476
0977774b
JT
477/* Return the number of bits set in VALUE. */
478static unsigned
e32bac5b 479bit_count (unsigned long value)
aec3cfba 480{
d5b7b3ae 481 unsigned long count = 0;
aec3cfba
NC
482
483 while (value)
484 {
0977774b
JT
485 count++;
486 value &= value - 1; /* Clear the least-significant set bit. */
aec3cfba
NC
487 }
488
489 return count;
490}
491
2b835d68
RE
492/* Fix up any incompatible options that the user has specified.
493 This has now turned into a maze. */
494void
e32bac5b 495arm_override_options (void)
2b835d68 496{
ed4c4348 497 unsigned i;
f5a1b0d2
NC
498
499 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 500 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 501 {
f5a1b0d2
NC
502 struct arm_cpu_select * ptr = arm_select + i;
503
504 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 505 {
13bd191d 506 const struct processors * sel;
bd9c7e23 507
5895f793 508 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 509 if (streq (ptr->string, sel->name))
bd9c7e23 510 {
aec3cfba
NC
511 if (i == 2)
512 tune_flags = sel->flags;
513 else
b111229a 514 {
aec3cfba
NC
515 /* If we have been given an architecture and a processor
516 make sure that they are compatible. We only generate
517 a warning though, and we prefer the CPU over the
6354dc9b 518 architecture. */
aec3cfba 519 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 520 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
521 ptr->string);
522
523 insn_flags = sel->flags;
b111229a 524 }
f5a1b0d2 525
bd9c7e23
RE
526 break;
527 }
528
529 if (sel->name == NULL)
530 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
531 }
532 }
aec3cfba 533
f5a1b0d2 534 /* If the user did not specify a processor, choose one for them. */
aec3cfba 535 if (insn_flags == 0)
f5a1b0d2 536 {
8b60264b 537 const struct processors * sel;
aec3cfba 538 unsigned int sought;
8b60264b 539 static const struct cpu_default
aec3cfba 540 {
8b60264b
KG
541 const int cpu;
542 const char *const name;
aec3cfba
NC
543 }
544 cpu_defaults[] =
545 {
546 { TARGET_CPU_arm2, "arm2" },
547 { TARGET_CPU_arm6, "arm6" },
548 { TARGET_CPU_arm610, "arm610" },
2aa0c933 549 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
550 { TARGET_CPU_arm7m, "arm7m" },
551 { TARGET_CPU_arm7500fe, "arm7500fe" },
552 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
553 { TARGET_CPU_arm8, "arm8" },
554 { TARGET_CPU_arm810, "arm810" },
555 { TARGET_CPU_arm9, "arm9" },
556 { TARGET_CPU_strongarm, "strongarm" },
d19fb8e3 557 { TARGET_CPU_xscale, "xscale" },
9b6b54e2 558 { TARGET_CPU_ep9312, "ep9312" },
5a9335ef 559 { TARGET_CPU_iwmmxt, "iwmmxt" },
81f9037c
MM
560 { TARGET_CPU_arm926ej_s, "arm926ej-s" },
561 { TARGET_CPU_arm1026ej_s, "arm1026ej-s" },
562 { TARGET_CPU_arm1136j_s, "arm1136j_s" },
563 { TARGET_CPU_arm1136jf_s, "arm1136jf_s" },
aec3cfba
NC
564 { TARGET_CPU_generic, "arm" },
565 { 0, 0 }
566 };
8b60264b 567 const struct cpu_default * def;
aec3cfba
NC
568
569 /* Find the default. */
5895f793 570 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
571 if (def->cpu == TARGET_CPU_DEFAULT)
572 break;
573
574 /* Make sure we found the default CPU. */
575 if (def->name == NULL)
576 abort ();
577
578 /* Find the default CPU's flags. */
5895f793 579 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
580 if (streq (def->name, sel->name))
581 break;
582
583 if (sel->name == NULL)
584 abort ();
585
586 insn_flags = sel->flags;
587
588 /* Now check to see if the user has specified some command line
589 switch that require certain abilities from the cpu. */
590 sought = 0;
f5a1b0d2 591
d5b7b3ae 592 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 593 {
aec3cfba
NC
594 sought |= (FL_THUMB | FL_MODE32);
595
596 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 597 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 598
d5b7b3ae 599 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
600 interworking. Therefore we force FL_MODE26 to be removed
601 from insn_flags here (if it was set), so that the search
602 below will always be able to find a compatible processor. */
5895f793 603 insn_flags &= ~FL_MODE26;
f5a1b0d2 604 }
5895f793 605 else if (!TARGET_APCS_32)
f5a1b0d2 606 sought |= FL_MODE26;
d5b7b3ae 607
aec3cfba 608 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 609 {
aec3cfba
NC
610 /* Try to locate a CPU type that supports all of the abilities
611 of the default CPU, plus the extra abilities requested by
612 the user. */
5895f793 613 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 614 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
615 break;
616
617 if (sel->name == NULL)
aec3cfba 618 {
0977774b 619 unsigned current_bit_count = 0;
8b60264b 620 const struct processors * best_fit = NULL;
aec3cfba
NC
621
622 /* Ideally we would like to issue an error message here
623 saying that it was not possible to find a CPU compatible
624 with the default CPU, but which also supports the command
625 line options specified by the programmer, and so they
626 ought to use the -mcpu=<name> command line option to
627 override the default CPU type.
628
629 Unfortunately this does not work with multilibing. We
630 need to be able to support multilibs for -mapcs-26 and for
631 -mthumb-interwork and there is no CPU that can support both
632 options. Instead if we cannot find a cpu that has both the
633 characteristics of the default cpu and the given command line
634 options we scan the array again looking for a best match. */
5895f793 635 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
636 if ((sel->flags & sought) == sought)
637 {
0977774b 638 unsigned count;
aec3cfba
NC
639
640 count = bit_count (sel->flags & insn_flags);
641
642 if (count >= current_bit_count)
643 {
644 best_fit = sel;
645 current_bit_count = count;
646 }
647 }
f5a1b0d2 648
aec3cfba
NC
649 if (best_fit == NULL)
650 abort ();
651 else
652 sel = best_fit;
653 }
654
655 insn_flags = sel->flags;
f5a1b0d2
NC
656 }
657 }
aec3cfba
NC
658
659 /* If tuning has not been specified, tune for whichever processor or
660 architecture has been selected. */
661 if (tune_flags == 0)
662 tune_flags = insn_flags;
e26053d1 663
f5a1b0d2
NC
664 /* Make sure that the processor choice does not conflict with any of the
665 other command line choices. */
aec3cfba 666 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 667 {
aec3cfba
NC
668 /* If APCS-32 was not the default then it must have been set by the
669 user, so issue a warning message. If the user has specified
670 "-mapcs-32 -mcpu=arm2" then we loose here. */
671 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
672 warning ("target CPU does not support APCS-32" );
5895f793 673 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 674 }
5895f793 675 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
676 {
677 warning ("target CPU does not support APCS-26" );
678 target_flags |= ARM_FLAG_APCS_32;
679 }
680
6cfc7210 681 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
682 {
683 warning ("target CPU does not support interworking" );
6cfc7210 684 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
685 }
686
d5b7b3ae
RE
687 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
688 {
c725bd79 689 warning ("target CPU does not support THUMB instructions");
d5b7b3ae
RE
690 target_flags &= ~ARM_FLAG_THUMB;
691 }
692
693 if (TARGET_APCS_FRAME && TARGET_THUMB)
694 {
c725bd79 695 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
d5b7b3ae
RE
696 target_flags &= ~ARM_FLAG_APCS_FRAME;
697 }
d19fb8e3 698
d5b7b3ae
RE
699 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
700 from here where no function is being compiled currently. */
701 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
702 && TARGET_ARM)
c725bd79 703 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
704
705 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
c725bd79 706 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
707
708 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
c725bd79 709 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae 710
f5a1b0d2 711 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 712 if (TARGET_INTERWORK)
f5a1b0d2 713 {
5895f793 714 if (!TARGET_APCS_32)
f5a1b0d2
NC
715 warning ("interworking forces APCS-32 to be used" );
716 target_flags |= ARM_FLAG_APCS_32;
717 }
718
5895f793 719 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
720 {
721 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
722 target_flags |= ARM_FLAG_APCS_FRAME;
723 }
aec3cfba 724
2b835d68
RE
725 if (TARGET_POKE_FUNCTION_NAME)
726 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 727
2b835d68 728 if (TARGET_APCS_REENT && flag_pic)
400500c4 729 error ("-fpic and -mapcs-reent are incompatible");
aec3cfba 730
2b835d68 731 if (TARGET_APCS_REENT)
f5a1b0d2 732 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 733
d5b7b3ae
RE
734 /* If this target is normally configured to use APCS frames, warn if they
735 are turned off and debugging is turned on. */
736 if (TARGET_ARM
737 && write_symbols != NO_DEBUG
5895f793 738 && !TARGET_APCS_FRAME
d5b7b3ae
RE
739 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
740 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 741
32de079a
RE
742 /* If stack checking is disabled, we can use r10 as the PIC register,
743 which keeps r9 available. */
5b43fed1
RH
744 if (flag_pic)
745 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
aec3cfba 746
2b835d68 747 if (TARGET_APCS_FLOAT)
c725bd79 748 warning ("passing floating point arguments in fp regs not yet supported");
f5a1b0d2 749
4912a07c 750 /* Initialize boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
751 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
752 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
753 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
b15bca31 754 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
4b3c2e48 755 arm_arch_xscale = (insn_flags & FL_XSCALE) != 0;
6f7ebcbb 756
2ca12935
JL
757 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
758 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 759 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
760 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
761 && !(tune_flags & FL_ARCH4))) != 0;
4b3c2e48 762 arm_tune_xscale = (tune_flags & FL_XSCALE) != 0;
9b6b54e2 763 arm_is_cirrus = (tune_flags & FL_CIRRUS) != 0;
5a9335ef
NC
764 arm_arch_iwmmxt = (insn_flags & FL_IWMMXT) != 0;
765
766 if (TARGET_IWMMXT && (! TARGET_ATPCS))
767 target_flags |= ARM_FLAG_ATPCS;
6f7ebcbb 768
9b6b54e2
NC
769 if (arm_is_cirrus)
770 {
29ad9694 771 arm_fpu_tune = FPUTYPE_MAVERICK;
9b6b54e2
NC
772
773 /* Ignore -mhard-float if -mcpu=ep9312. */
774 if (TARGET_HARD_FLOAT)
775 target_flags ^= ARM_FLAG_SOFT_FLOAT;
776 }
777 else
778 /* Default value for floating point code... if no co-processor
779 bus, then schedule for emulated floating point. Otherwise,
780 assume the user has an FPA.
781 Note: this does not prevent use of floating point instructions,
782 -msoft-float does that. */
29ad9694 783 arm_fpu_tune = (tune_flags & FL_CO_PROC) ? FPUTYPE_FPA : FPUTYPE_FPA_EMU3;
f5a1b0d2 784
b111229a 785 if (target_fp_name)
2b835d68 786 {
f5a1b0d2 787 if (streq (target_fp_name, "2"))
29ad9694 788 arm_fpu_arch = FPUTYPE_FPA_EMU2;
f5a1b0d2 789 else if (streq (target_fp_name, "3"))
29ad9694 790 arm_fpu_arch = FPUTYPE_FPA_EMU3;
2b835d68 791 else
c725bd79 792 error ("invalid floating point emulation option: -mfpe-%s",
b111229a 793 target_fp_name);
2b835d68 794 }
b111229a 795 else
29ad9694 796 arm_fpu_arch = FPUTYPE_DEFAULT;
f5a1b0d2 797
9b6b54e2
NC
798 if (TARGET_FPE)
799 {
29ad9694
RE
800 if (arm_fpu_tune == FPUTYPE_FPA_EMU3)
801 arm_fpu_tune = FPUTYPE_FPA_EMU2;
802 else if (arm_fpu_tune == FPUTYPE_MAVERICK)
803 warning ("-mfpe switch not supported by ep9312 target cpu - ignored.");
804 else if (arm_fpu_tune != FPUTYPE_FPA)
805 arm_fpu_tune = FPUTYPE_FPA_EMU2;
9b6b54e2 806 }
aec3cfba 807
f5a1b0d2
NC
808 /* For arm2/3 there is no need to do any scheduling if there is only
809 a floating point emulator, or we are doing software floating-point. */
29ad9694 810 if ((TARGET_SOFT_FLOAT || arm_fpu_tune != FPUTYPE_FPA)
ed0e6530 811 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 812 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 813
cd2b33d0 814 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
815
816 if (structure_size_string != NULL)
817 {
818 int size = strtol (structure_size_string, NULL, 0);
819
820 if (size == 8 || size == 32)
821 arm_structure_size_boundary = size;
822 else
c725bd79 823 warning ("structure size boundary can only be set to 8 or 32");
b355a481 824 }
ed0e6530
PB
825
826 if (arm_pic_register_string != NULL)
827 {
5b43fed1 828 int pic_register = decode_reg_name (arm_pic_register_string);
e26053d1 829
5895f793 830 if (!flag_pic)
ed0e6530
PB
831 warning ("-mpic-register= is useless without -fpic");
832
ed0e6530 833 /* Prevent the user from choosing an obviously stupid PIC register. */
5b43fed1
RH
834 else if (pic_register < 0 || call_used_regs[pic_register]
835 || pic_register == HARD_FRAME_POINTER_REGNUM
836 || pic_register == STACK_POINTER_REGNUM
837 || pic_register >= PC_REGNUM)
c725bd79 838 error ("unable to use '%s' for PIC register", arm_pic_register_string);
ed0e6530
PB
839 else
840 arm_pic_register = pic_register;
841 }
d5b7b3ae
RE
842
843 if (TARGET_THUMB && flag_schedule_insns)
844 {
845 /* Don't warn since it's on by default in -O2. */
846 flag_schedule_insns = 0;
847 }
848
f5a1b0d2 849 if (optimize_size)
be03ccc9 850 {
577d6328
RE
851 /* There's some dispute as to whether this should be 1 or 2. However,
852 experiments seem to show that in pathological cases a setting of
839a4992 853 1 degrades less severely than a setting of 2. This could change if
577d6328
RE
854 other parts of the compiler change their behavior. */
855 arm_constant_limit = 1;
be03ccc9
NP
856
857 /* If optimizing for size, bump the number of instructions that we
d6b4baa4 858 are prepared to conditionally execute (even on a StrongARM). */
be03ccc9
NP
859 max_insns_skipped = 6;
860 }
861 else
862 {
863 /* For processors with load scheduling, it never costs more than
864 2 cycles to load a constant, and the load scheduler may well
865 reduce that to 1. */
866 if (tune_flags & FL_LDSCHED)
867 arm_constant_limit = 1;
868
869 /* On XScale the longer latency of a load makes it more difficult
870 to achieve a good schedule, so it's faster to synthesize
d6b4baa4 871 constants that can be done in two insns. */
be03ccc9
NP
872 if (arm_tune_xscale)
873 arm_constant_limit = 2;
874
875 /* StrongARM has early execution of branches, so a sequence
876 that is worth skipping is shorter. */
877 if (arm_is_strong)
878 max_insns_skipped = 3;
879 }
92a432f4
RE
880
881 /* Register global variables with the garbage collector. */
882 arm_add_gc_roots ();
883}
884
885static void
e32bac5b 886arm_add_gc_roots (void)
92a432f4 887{
c7319d87
RE
888 gcc_obstack_init(&minipool_obstack);
889 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 890}
cce8749e 891\f
6d3d9133
NC
892/* A table of known ARM exception types.
893 For use with the interrupt function attribute. */
894
895typedef struct
896{
8b60264b
KG
897 const char *const arg;
898 const unsigned long return_value;
6d3d9133
NC
899}
900isr_attribute_arg;
901
8b60264b 902static const isr_attribute_arg isr_attribute_args [] =
6d3d9133
NC
903{
904 { "IRQ", ARM_FT_ISR },
905 { "irq", ARM_FT_ISR },
906 { "FIQ", ARM_FT_FIQ },
907 { "fiq", ARM_FT_FIQ },
908 { "ABORT", ARM_FT_ISR },
909 { "abort", ARM_FT_ISR },
910 { "ABORT", ARM_FT_ISR },
911 { "abort", ARM_FT_ISR },
912 { "UNDEF", ARM_FT_EXCEPTION },
913 { "undef", ARM_FT_EXCEPTION },
914 { "SWI", ARM_FT_EXCEPTION },
915 { "swi", ARM_FT_EXCEPTION },
916 { NULL, ARM_FT_NORMAL }
917};
918
919/* Returns the (interrupt) function type of the current
920 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
921
922static unsigned long
e32bac5b 923arm_isr_value (tree argument)
6d3d9133 924{
8b60264b 925 const isr_attribute_arg * ptr;
1d6e90ac 926 const char * arg;
6d3d9133
NC
927
928 /* No argument - default to IRQ. */
929 if (argument == NULL_TREE)
930 return ARM_FT_ISR;
931
932 /* Get the value of the argument. */
933 if (TREE_VALUE (argument) == NULL_TREE
934 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
935 return ARM_FT_UNKNOWN;
936
937 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
938
939 /* Check it against the list of known arguments. */
5a9335ef 940 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
1d6e90ac
NC
941 if (streq (arg, ptr->arg))
942 return ptr->return_value;
6d3d9133 943
05713b80 944 /* An unrecognized interrupt type. */
6d3d9133
NC
945 return ARM_FT_UNKNOWN;
946}
947
948/* Computes the type of the current function. */
949
950static unsigned long
e32bac5b 951arm_compute_func_type (void)
6d3d9133
NC
952{
953 unsigned long type = ARM_FT_UNKNOWN;
954 tree a;
955 tree attr;
956
957 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
958 abort ();
959
960 /* Decide if the current function is volatile. Such functions
961 never return, and many memory cycles can be saved by not storing
962 register values that will never be needed again. This optimization
963 was added to speed up context switching in a kernel application. */
964 if (optimize > 0
965 && current_function_nothrow
966 && TREE_THIS_VOLATILE (current_function_decl))
967 type |= ARM_FT_VOLATILE;
968
969 if (current_function_needs_context)
970 type |= ARM_FT_NESTED;
971
91d231cb 972 attr = DECL_ATTRIBUTES (current_function_decl);
6d3d9133
NC
973
974 a = lookup_attribute ("naked", attr);
975 if (a != NULL_TREE)
976 type |= ARM_FT_NAKED;
977
978 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
979 type |= ARM_FT_EXCEPTION_HANDLER;
980 else
981 {
982 a = lookup_attribute ("isr", attr);
983 if (a == NULL_TREE)
984 a = lookup_attribute ("interrupt", attr);
985
986 if (a == NULL_TREE)
987 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
988 else
989 type |= arm_isr_value (TREE_VALUE (a));
990 }
991
992 return type;
993}
994
995/* Returns the type of the current function. */
996
997unsigned long
e32bac5b 998arm_current_func_type (void)
6d3d9133
NC
999{
1000 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
1001 cfun->machine->func_type = arm_compute_func_type ();
1002
1003 return cfun->machine->func_type;
1004}
1005\f
a72d4945
RE
1006/* Return 1 if it is possible to return using a single instruction.
1007 If SIBLING is non-null, this is a test for a return before a sibling
1008 call. SIBLING is the call insn, so we can examine its register usage. */
6d3d9133 1009
ff9940b0 1010int
a72d4945 1011use_return_insn (int iscond, rtx sibling)
ff9940b0
RE
1012{
1013 int regno;
9b598fa0 1014 unsigned int func_type;
d5db54a1 1015 unsigned long saved_int_regs;
a72d4945 1016 unsigned HOST_WIDE_INT stack_adjust;
ff9940b0 1017
d5b7b3ae 1018 /* Never use a return instruction before reload has run. */
6d3d9133
NC
1019 if (!reload_completed)
1020 return 0;
efc2515b 1021
9b598fa0
RE
1022 func_type = arm_current_func_type ();
1023
3a7731fd
PB
1024 /* Naked functions and volatile functions need special
1025 consideration. */
1026 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
6d3d9133 1027 return 0;
06bea5aa
NC
1028
1029 /* So do interrupt functions that use the frame pointer. */
1030 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
1031 return 0;
a72d4945
RE
1032
1033 stack_adjust = arm_get_frame_size () + current_function_outgoing_args_size;
1034
6d3d9133
NC
1035 /* As do variadic functions. */
1036 if (current_function_pretend_args_size
3cb66fd7 1037 || cfun->machine->uses_anonymous_args
699a4925 1038 /* Or if the function calls __builtin_eh_return () */
6d3d9133 1039 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
699a4925
RE
1040 /* Or if the function calls alloca */
1041 || current_function_calls_alloca
a72d4945
RE
1042 /* Or if there is a stack adjustment. However, if the stack pointer
1043 is saved on the stack, we can use a pre-incrementing stack load. */
1044 || !(stack_adjust == 0 || (frame_pointer_needed && stack_adjust == 4)))
ff9940b0
RE
1045 return 0;
1046
d5db54a1
RE
1047 saved_int_regs = arm_compute_save_reg_mask ();
1048
a72d4945
RE
1049 /* Unfortunately, the insn
1050
1051 ldmib sp, {..., sp, ...}
1052
1053 triggers a bug on most SA-110 based devices, such that the stack
1054 pointer won't be correctly restored if the instruction takes a
839a4992 1055 page fault. We work around this problem by popping r3 along with
a72d4945
RE
1056 the other registers, since that is never slower than executing
1057 another instruction.
1058
1059 We test for !arm_arch5 here, because code for any architecture
1060 less than this could potentially be run on one of the buggy
1061 chips. */
1062 if (stack_adjust == 4 && !arm_arch5)
1063 {
1064 /* Validate that r3 is a call-clobbered register (always true in
d6b4baa4 1065 the default abi) ... */
a72d4945
RE
1066 if (!call_used_regs[3])
1067 return 0;
1068
1069 /* ... that it isn't being used for a return value (always true
1070 until we implement return-in-regs), or for a tail-call
d6b4baa4 1071 argument ... */
a72d4945
RE
1072 if (sibling)
1073 {
1074 if (GET_CODE (sibling) != CALL_INSN)
1075 abort ();
1076
1077 if (find_regno_fusage (sibling, USE, 3))
1078 return 0;
1079 }
1080
1081 /* ... and that there are no call-saved registers in r0-r2
1082 (always true in the default ABI). */
1083 if (saved_int_regs & 0x7)
1084 return 0;
1085 }
1086
b111229a 1087 /* Can't be done if interworking with Thumb, and any registers have been
d5db54a1
RE
1088 stacked. */
1089 if (TARGET_INTERWORK && saved_int_regs != 0)
b36ba79f 1090 return 0;
d5db54a1
RE
1091
1092 /* On StrongARM, conditional returns are expensive if they aren't
1093 taken and multiple registers have been stacked. */
1094 if (iscond && arm_is_strong)
6ed30148 1095 {
d5db54a1
RE
1096 /* Conditional return when just the LR is stored is a simple
1097 conditional-load instruction, that's not expensive. */
1098 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1099 return 0;
6ed30148
RE
1100
1101 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 1102 return 0;
6ed30148 1103 }
d5db54a1
RE
1104
1105 /* If there are saved registers but the LR isn't saved, then we need
1106 two instructions for the return. */
1107 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1108 return 0;
1109
3b684012 1110 /* Can't be done if any of the FPA regs are pushed,
6d3d9133 1111 since this also requires an insn. */
d5b7b3ae
RE
1112 if (TARGET_HARD_FLOAT)
1113 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 1114 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 1115 return 0;
ff9940b0 1116
5a9335ef
NC
1117 if (TARGET_REALLY_IWMMXT)
1118 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
1119 if (regs_ever_live[regno] && ! call_used_regs [regno])
1120 return 0;
1121
ff9940b0
RE
1122 return 1;
1123}
1124
cce8749e
CH
1125/* Return TRUE if int I is a valid immediate ARM constant. */
1126
1127int
e32bac5b 1128const_ok_for_arm (HOST_WIDE_INT i)
cce8749e 1129{
30cf4896 1130 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 1131
56636818
JL
1132 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1133 be all zero, or all one. */
30cf4896
KG
1134 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1135 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1136 != ((~(unsigned HOST_WIDE_INT) 0)
1137 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
1138 return FALSE;
1139
e2c671ba
RE
1140 /* Fast return for 0 and powers of 2 */
1141 if ((i & (i - 1)) == 0)
1142 return TRUE;
1143
cce8749e
CH
1144 do
1145 {
30cf4896 1146 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 1147 return TRUE;
abaa26e5 1148 mask =
30cf4896
KG
1149 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1150 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
ebe413e5
NC
1151 }
1152 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 1153
f3bb6135
RE
1154 return FALSE;
1155}
cce8749e 1156
6354dc9b 1157/* Return true if I is a valid constant for the operation CODE. */
74bbc178 1158static int
e32bac5b 1159const_ok_for_op (HOST_WIDE_INT i, enum rtx_code code)
e2c671ba
RE
1160{
1161 if (const_ok_for_arm (i))
1162 return 1;
1163
1164 switch (code)
1165 {
1166 case PLUS:
1167 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1168
1169 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1170 case XOR:
1171 case IOR:
1172 return 0;
1173
1174 case AND:
1175 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1176
1177 default:
1178 abort ();
1179 }
1180}
1181
1182/* Emit a sequence of insns to handle a large constant.
1183 CODE is the code of the operation required, it can be any of SET, PLUS,
1184 IOR, AND, XOR, MINUS;
1185 MODE is the mode in which the operation is being performed;
1186 VAL is the integer to operate on;
1187 SOURCE is the other operand (a register, or a null-pointer for SET);
1188 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
1189 either produce a simpler sequence, or we will want to cse the values.
1190 Return value is the number of insns emitted. */
e2c671ba
RE
1191
1192int
e32bac5b
RE
1193arm_split_constant (enum rtx_code code, enum machine_mode mode,
1194 HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
2b835d68
RE
1195{
1196 if (subtargets || code == SET
1197 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1198 && REGNO (target) != REGNO (source)))
1199 {
4b632bf1 1200 /* After arm_reorg has been called, we can't fix up expensive
05713b80 1201 constants by pushing them into memory so we must synthesize
4b632bf1
RE
1202 them in-line, regardless of the cost. This is only likely to
1203 be more costly on chips that have load delay slots and we are
1204 compiling without running the scheduler (so no splitting
aec3cfba
NC
1205 occurred before the final instruction emission).
1206
1207 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 1208 */
5895f793 1209 if (!after_arm_reorg
4b632bf1
RE
1210 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1211 > arm_constant_limit + (code != SET)))
2b835d68
RE
1212 {
1213 if (code == SET)
1214 {
1215 /* Currently SET is the only monadic value for CODE, all
1216 the rest are diadic. */
43cffd11 1217 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
1218 return 1;
1219 }
1220 else
1221 {
1222 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1223
43cffd11 1224 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
1225 /* For MINUS, the value is subtracted from, since we never
1226 have subtraction of a constant. */
1227 if (code == MINUS)
43cffd11 1228 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 1229 gen_rtx_MINUS (mode, temp, source)));
2b835d68 1230 else
43cffd11
RE
1231 emit_insn (gen_rtx_SET (VOIDmode, target,
1232 gen_rtx (code, mode, source, temp)));
2b835d68
RE
1233 return 2;
1234 }
1235 }
1236 }
1237
1238 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1239}
1240
ceebdb09 1241static int
e32bac5b 1242count_insns_for_constant (HOST_WIDE_INT remainder, int i)
ceebdb09
PB
1243{
1244 HOST_WIDE_INT temp1;
1245 int num_insns = 0;
1246 do
1247 {
1248 int end;
1249
1250 if (i <= 0)
1251 i += 32;
1252 if (remainder & (3 << (i - 2)))
1253 {
1254 end = i - 8;
1255 if (end < 0)
1256 end += 32;
1257 temp1 = remainder & ((0x0ff << end)
1258 | ((i < end) ? (0xff >> (32 - end)) : 0));
1259 remainder &= ~temp1;
1260 num_insns++;
1261 i -= 6;
1262 }
1263 i -= 2;
1264 } while (remainder);
1265 return num_insns;
1266}
1267
2b835d68
RE
1268/* As above, but extra parameter GENERATE which, if clear, suppresses
1269 RTL generation. */
1d6e90ac 1270
d5b7b3ae 1271static int
e32bac5b
RE
1272arm_gen_constant (enum rtx_code code, enum machine_mode mode,
1273 HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
1274 int generate)
e2c671ba 1275{
e2c671ba
RE
1276 int can_invert = 0;
1277 int can_negate = 0;
1278 int can_negate_initial = 0;
1279 int can_shift = 0;
1280 int i;
1281 int num_bits_set = 0;
1282 int set_sign_bit_copies = 0;
1283 int clear_sign_bit_copies = 0;
1284 int clear_zero_bit_copies = 0;
1285 int set_zero_bit_copies = 0;
1286 int insns = 0;
e2c671ba 1287 unsigned HOST_WIDE_INT temp1, temp2;
30cf4896 1288 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
e2c671ba 1289
d5b7b3ae 1290 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
1291 check for degenerate cases; these can occur when DImode operations
1292 are split. */
1293 switch (code)
1294 {
1295 case SET:
1296 can_invert = 1;
1297 can_shift = 1;
1298 can_negate = 1;
1299 break;
1300
1301 case PLUS:
1302 can_negate = 1;
1303 can_negate_initial = 1;
1304 break;
1305
1306 case IOR:
30cf4896 1307 if (remainder == 0xffffffff)
e2c671ba 1308 {
2b835d68 1309 if (generate)
43cffd11
RE
1310 emit_insn (gen_rtx_SET (VOIDmode, target,
1311 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
1312 return 1;
1313 }
1314 if (remainder == 0)
1315 {
1316 if (reload_completed && rtx_equal_p (target, source))
1317 return 0;
2b835d68 1318 if (generate)
43cffd11 1319 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1320 return 1;
1321 }
1322 break;
1323
1324 case AND:
1325 if (remainder == 0)
1326 {
2b835d68 1327 if (generate)
43cffd11 1328 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
1329 return 1;
1330 }
30cf4896 1331 if (remainder == 0xffffffff)
e2c671ba
RE
1332 {
1333 if (reload_completed && rtx_equal_p (target, source))
1334 return 0;
2b835d68 1335 if (generate)
43cffd11 1336 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1337 return 1;
1338 }
1339 can_invert = 1;
1340 break;
1341
1342 case XOR:
1343 if (remainder == 0)
1344 {
1345 if (reload_completed && rtx_equal_p (target, source))
1346 return 0;
2b835d68 1347 if (generate)
43cffd11 1348 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1349 return 1;
1350 }
30cf4896 1351 if (remainder == 0xffffffff)
e2c671ba 1352 {
2b835d68 1353 if (generate)
43cffd11
RE
1354 emit_insn (gen_rtx_SET (VOIDmode, target,
1355 gen_rtx_NOT (mode, source)));
e2c671ba
RE
1356 return 1;
1357 }
1358
1359 /* We don't know how to handle this yet below. */
1360 abort ();
1361
1362 case MINUS:
1363 /* We treat MINUS as (val - source), since (source - val) is always
1364 passed as (source + (-val)). */
1365 if (remainder == 0)
1366 {
2b835d68 1367 if (generate)
43cffd11
RE
1368 emit_insn (gen_rtx_SET (VOIDmode, target,
1369 gen_rtx_NEG (mode, source)));
e2c671ba
RE
1370 return 1;
1371 }
1372 if (const_ok_for_arm (val))
1373 {
2b835d68 1374 if (generate)
43cffd11
RE
1375 emit_insn (gen_rtx_SET (VOIDmode, target,
1376 gen_rtx_MINUS (mode, GEN_INT (val),
1377 source)));
e2c671ba
RE
1378 return 1;
1379 }
1380 can_negate = 1;
1381
1382 break;
1383
1384 default:
1385 abort ();
1386 }
1387
6354dc9b 1388 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
1389 if (const_ok_for_arm (val)
1390 || (can_negate_initial && const_ok_for_arm (-val))
1391 || (can_invert && const_ok_for_arm (~val)))
1392 {
2b835d68 1393 if (generate)
43cffd11
RE
1394 emit_insn (gen_rtx_SET (VOIDmode, target,
1395 (source ? gen_rtx (code, mode, source,
1396 GEN_INT (val))
1397 : GEN_INT (val))));
e2c671ba
RE
1398 return 1;
1399 }
1400
e2c671ba 1401 /* Calculate a few attributes that may be useful for specific
6354dc9b 1402 optimizations. */
e2c671ba
RE
1403 for (i = 31; i >= 0; i--)
1404 {
1405 if ((remainder & (1 << i)) == 0)
1406 clear_sign_bit_copies++;
1407 else
1408 break;
1409 }
1410
1411 for (i = 31; i >= 0; i--)
1412 {
1413 if ((remainder & (1 << i)) != 0)
1414 set_sign_bit_copies++;
1415 else
1416 break;
1417 }
1418
1419 for (i = 0; i <= 31; i++)
1420 {
1421 if ((remainder & (1 << i)) == 0)
1422 clear_zero_bit_copies++;
1423 else
1424 break;
1425 }
1426
1427 for (i = 0; i <= 31; i++)
1428 {
1429 if ((remainder & (1 << i)) != 0)
1430 set_zero_bit_copies++;
1431 else
1432 break;
1433 }
1434
1435 switch (code)
1436 {
1437 case SET:
1438 /* See if we can do this by sign_extending a constant that is known
1439 to be negative. This is a good, way of doing it, since the shift
1440 may well merge into a subsequent insn. */
1441 if (set_sign_bit_copies > 1)
1442 {
1443 if (const_ok_for_arm
1444 (temp1 = ARM_SIGN_EXTEND (remainder
1445 << (set_sign_bit_copies - 1))))
1446 {
2b835d68
RE
1447 if (generate)
1448 {
d499463f 1449 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1450 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1451 GEN_INT (temp1)));
2b835d68
RE
1452 emit_insn (gen_ashrsi3 (target, new_src,
1453 GEN_INT (set_sign_bit_copies - 1)));
1454 }
e2c671ba
RE
1455 return 2;
1456 }
1457 /* For an inverted constant, we will need to set the low bits,
1458 these will be shifted out of harm's way. */
1459 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1460 if (const_ok_for_arm (~temp1))
1461 {
2b835d68
RE
1462 if (generate)
1463 {
d499463f 1464 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1465 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1466 GEN_INT (temp1)));
2b835d68
RE
1467 emit_insn (gen_ashrsi3 (target, new_src,
1468 GEN_INT (set_sign_bit_copies - 1)));
1469 }
e2c671ba
RE
1470 return 2;
1471 }
1472 }
1473
1474 /* See if we can generate this by setting the bottom (or the top)
1475 16 bits, and then shifting these into the other half of the
1476 word. We only look for the simplest cases, to do more would cost
1477 too much. Be careful, however, not to generate this when the
1478 alternative would take fewer insns. */
30cf4896 1479 if (val & 0xffff0000)
e2c671ba 1480 {
30cf4896 1481 temp1 = remainder & 0xffff0000;
e2c671ba
RE
1482 temp2 = remainder & 0x0000ffff;
1483
6354dc9b 1484 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1485 for (i = 9; i < 24; i++)
1486 {
30cf4896 1487 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
5895f793 1488 && !const_ok_for_arm (temp2))
e2c671ba 1489 {
d499463f
RE
1490 rtx new_src = (subtargets
1491 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1492 : target);
1493 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1494 source, subtargets, generate);
e2c671ba 1495 source = new_src;
2b835d68 1496 if (generate)
43cffd11
RE
1497 emit_insn (gen_rtx_SET
1498 (VOIDmode, target,
1499 gen_rtx_IOR (mode,
1500 gen_rtx_ASHIFT (mode, source,
1501 GEN_INT (i)),
1502 source)));
e2c671ba
RE
1503 return insns + 1;
1504 }
1505 }
1506
6354dc9b 1507 /* Don't duplicate cases already considered. */
e2c671ba
RE
1508 for (i = 17; i < 24; i++)
1509 {
1510 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1511 && !const_ok_for_arm (temp1))
e2c671ba 1512 {
d499463f
RE
1513 rtx new_src = (subtargets
1514 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1515 : target);
1516 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1517 source, subtargets, generate);
e2c671ba 1518 source = new_src;
2b835d68 1519 if (generate)
43cffd11
RE
1520 emit_insn
1521 (gen_rtx_SET (VOIDmode, target,
1522 gen_rtx_IOR
1523 (mode,
1524 gen_rtx_LSHIFTRT (mode, source,
1525 GEN_INT (i)),
1526 source)));
e2c671ba
RE
1527 return insns + 1;
1528 }
1529 }
1530 }
1531 break;
1532
1533 case IOR:
1534 case XOR:
7b64da89
RE
1535 /* If we have IOR or XOR, and the constant can be loaded in a
1536 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1537 then this can be done in two instructions instead of 3-4. */
1538 if (subtargets
d499463f 1539 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1540 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1541 {
5895f793 1542 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1543 {
2b835d68
RE
1544 if (generate)
1545 {
1546 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1547
43cffd11
RE
1548 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1549 emit_insn (gen_rtx_SET (VOIDmode, target,
1550 gen_rtx (code, mode, source, sub)));
2b835d68 1551 }
e2c671ba
RE
1552 return 2;
1553 }
1554 }
1555
1556 if (code == XOR)
1557 break;
1558
1559 if (set_sign_bit_copies > 8
1560 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1561 {
2b835d68
RE
1562 if (generate)
1563 {
1564 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1565 rtx shift = GEN_INT (set_sign_bit_copies);
1566
43cffd11
RE
1567 emit_insn (gen_rtx_SET (VOIDmode, sub,
1568 gen_rtx_NOT (mode,
1569 gen_rtx_ASHIFT (mode,
1570 source,
f5a1b0d2 1571 shift))));
43cffd11
RE
1572 emit_insn (gen_rtx_SET (VOIDmode, target,
1573 gen_rtx_NOT (mode,
1574 gen_rtx_LSHIFTRT (mode, sub,
1575 shift))));
2b835d68 1576 }
e2c671ba
RE
1577 return 2;
1578 }
1579
1580 if (set_zero_bit_copies > 8
1581 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1582 {
2b835d68
RE
1583 if (generate)
1584 {
1585 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1586 rtx shift = GEN_INT (set_zero_bit_copies);
1587
43cffd11
RE
1588 emit_insn (gen_rtx_SET (VOIDmode, sub,
1589 gen_rtx_NOT (mode,
1590 gen_rtx_LSHIFTRT (mode,
1591 source,
f5a1b0d2 1592 shift))));
43cffd11
RE
1593 emit_insn (gen_rtx_SET (VOIDmode, target,
1594 gen_rtx_NOT (mode,
1595 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1596 shift))));
2b835d68 1597 }
e2c671ba
RE
1598 return 2;
1599 }
1600
5895f793 1601 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1602 {
2b835d68
RE
1603 if (generate)
1604 {
1605 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1606 emit_insn (gen_rtx_SET (VOIDmode, sub,
1607 gen_rtx_NOT (mode, source)));
2b835d68
RE
1608 source = sub;
1609 if (subtargets)
1610 sub = gen_reg_rtx (mode);
43cffd11
RE
1611 emit_insn (gen_rtx_SET (VOIDmode, sub,
1612 gen_rtx_AND (mode, source,
1613 GEN_INT (temp1))));
1614 emit_insn (gen_rtx_SET (VOIDmode, target,
1615 gen_rtx_NOT (mode, sub)));
2b835d68 1616 }
e2c671ba
RE
1617 return 3;
1618 }
1619 break;
1620
1621 case AND:
1622 /* See if two shifts will do 2 or more insn's worth of work. */
1623 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1624 {
30cf4896 1625 HOST_WIDE_INT shift_mask = ((0xffffffff
e2c671ba 1626 << (32 - clear_sign_bit_copies))
30cf4896 1627 & 0xffffffff);
e2c671ba 1628
30cf4896 1629 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1630 {
2b835d68
RE
1631 if (generate)
1632 {
d499463f 1633 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1634 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1635 new_src, source, subtargets, 1);
1636 source = new_src;
2b835d68
RE
1637 }
1638 else
d499463f
RE
1639 {
1640 rtx targ = subtargets ? NULL_RTX : target;
1641 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1642 targ, source, subtargets, 0);
1643 }
2b835d68
RE
1644 }
1645
1646 if (generate)
1647 {
d499463f
RE
1648 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1649 rtx shift = GEN_INT (clear_sign_bit_copies);
1650
1651 emit_insn (gen_ashlsi3 (new_src, source, shift));
1652 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1653 }
1654
e2c671ba
RE
1655 return insns + 2;
1656 }
1657
1658 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1659 {
1660 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1661
30cf4896 1662 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1663 {
2b835d68
RE
1664 if (generate)
1665 {
d499463f
RE
1666 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1667
2b835d68 1668 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1669 new_src, source, subtargets, 1);
1670 source = new_src;
2b835d68
RE
1671 }
1672 else
d499463f
RE
1673 {
1674 rtx targ = subtargets ? NULL_RTX : target;
1675
1676 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1677 targ, source, subtargets, 0);
1678 }
2b835d68
RE
1679 }
1680
1681 if (generate)
1682 {
d499463f
RE
1683 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1684 rtx shift = GEN_INT (clear_zero_bit_copies);
1685
1686 emit_insn (gen_lshrsi3 (new_src, source, shift));
1687 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1688 }
1689
e2c671ba
RE
1690 return insns + 2;
1691 }
1692
1693 break;
1694
1695 default:
1696 break;
1697 }
1698
1699 for (i = 0; i < 32; i++)
1700 if (remainder & (1 << i))
1701 num_bits_set++;
1702
1703 if (code == AND || (can_invert && num_bits_set > 16))
30cf4896 1704 remainder = (~remainder) & 0xffffffff;
e2c671ba 1705 else if (code == PLUS && num_bits_set > 16)
30cf4896 1706 remainder = (-remainder) & 0xffffffff;
e2c671ba
RE
1707 else
1708 {
1709 can_invert = 0;
1710 can_negate = 0;
1711 }
1712
1713 /* Now try and find a way of doing the job in either two or three
1714 instructions.
1715 We start by looking for the largest block of zeros that are aligned on
1716 a 2-bit boundary, we then fill up the temps, wrapping around to the
1717 top of the word when we drop off the bottom.
6354dc9b 1718 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1719 {
1720 int best_start = 0;
1721 int best_consecutive_zeros = 0;
1722
1723 for (i = 0; i < 32; i += 2)
1724 {
1725 int consecutive_zeros = 0;
1726
5895f793 1727 if (!(remainder & (3 << i)))
e2c671ba 1728 {
5895f793 1729 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1730 {
1731 consecutive_zeros += 2;
1732 i += 2;
1733 }
1734 if (consecutive_zeros > best_consecutive_zeros)
1735 {
1736 best_consecutive_zeros = consecutive_zeros;
1737 best_start = i - consecutive_zeros;
1738 }
1739 i -= 2;
1740 }
1741 }
1742
ceebdb09
PB
1743 /* So long as it won't require any more insns to do so, it's
1744 desirable to emit a small constant (in bits 0...9) in the last
1745 insn. This way there is more chance that it can be combined with
1746 a later addressing insn to form a pre-indexed load or store
1747 operation. Consider:
1748
1749 *((volatile int *)0xe0000100) = 1;
1750 *((volatile int *)0xe0000110) = 2;
1751
1752 We want this to wind up as:
1753
1754 mov rA, #0xe0000000
1755 mov rB, #1
1756 str rB, [rA, #0x100]
1757 mov rB, #2
1758 str rB, [rA, #0x110]
1759
1760 rather than having to synthesize both large constants from scratch.
1761
1762 Therefore, we calculate how many insns would be required to emit
1763 the constant starting from `best_start', and also starting from
1764 zero (ie with bit 31 first to be output). If `best_start' doesn't
1765 yield a shorter sequence, we may as well use zero. */
1766 if (best_start != 0
1767 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1768 && (count_insns_for_constant (remainder, 0) <=
1769 count_insns_for_constant (remainder, best_start)))
1770 best_start = 0;
1771
1772 /* Now start emitting the insns. */
e2c671ba
RE
1773 i = best_start;
1774 do
1775 {
1776 int end;
1777
1778 if (i <= 0)
1779 i += 32;
1780 if (remainder & (3 << (i - 2)))
1781 {
1782 end = i - 8;
1783 if (end < 0)
1784 end += 32;
1785 temp1 = remainder & ((0x0ff << end)
1786 | ((i < end) ? (0xff >> (32 - end)) : 0));
1787 remainder &= ~temp1;
1788
d499463f 1789 if (generate)
e2c671ba 1790 {
9503f3d1
RH
1791 rtx new_src, temp1_rtx;
1792
1793 if (code == SET || code == MINUS)
1794 {
1795 new_src = (subtargets ? gen_reg_rtx (mode) : target);
96ae8197 1796 if (can_invert && code != MINUS)
9503f3d1
RH
1797 temp1 = ~temp1;
1798 }
1799 else
1800 {
96ae8197 1801 if (remainder && subtargets)
9503f3d1 1802 new_src = gen_reg_rtx (mode);
96ae8197
NC
1803 else
1804 new_src = target;
9503f3d1
RH
1805 if (can_invert)
1806 temp1 = ~temp1;
1807 else if (can_negate)
1808 temp1 = -temp1;
1809 }
1810
1811 temp1 = trunc_int_for_mode (temp1, mode);
1812 temp1_rtx = GEN_INT (temp1);
d499463f
RE
1813
1814 if (code == SET)
9503f3d1 1815 ;
d499463f 1816 else if (code == MINUS)
9503f3d1 1817 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
d499463f 1818 else
9503f3d1
RH
1819 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1820
1821 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
d499463f 1822 source = new_src;
e2c671ba
RE
1823 }
1824
d499463f
RE
1825 if (code == SET)
1826 {
1827 can_invert = 0;
1828 code = PLUS;
1829 }
1830 else if (code == MINUS)
1831 code = PLUS;
1832
e2c671ba 1833 insns++;
e2c671ba
RE
1834 i -= 6;
1835 }
1836 i -= 2;
1d6e90ac
NC
1837 }
1838 while (remainder);
e2c671ba 1839 }
1d6e90ac 1840
e2c671ba
RE
1841 return insns;
1842}
1843
bd9c7e23
RE
1844/* Canonicalize a comparison so that we are more likely to recognize it.
1845 This can be done for a few constant compares, where we can make the
1846 immediate value easier to load. */
1d6e90ac 1847
bd9c7e23 1848enum rtx_code
e32bac5b 1849arm_canonicalize_comparison (enum rtx_code code, rtx * op1)
bd9c7e23 1850{
ad076f4e 1851 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1852
1853 switch (code)
1854 {
1855 case EQ:
1856 case NE:
1857 return code;
1858
1859 case GT:
1860 case LE:
30cf4896 1861 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
5895f793 1862 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1863 {
5895f793 1864 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1865 return code == GT ? GE : LT;
1866 }
1867 break;
1868
1869 case GE:
1870 case LT:
30cf4896 1871 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1872 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1873 {
5895f793 1874 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1875 return code == GE ? GT : LE;
1876 }
1877 break;
1878
1879 case GTU:
1880 case LEU:
30cf4896 1881 if (i != ~((unsigned HOST_WIDE_INT) 0)
5895f793 1882 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1883 {
1884 *op1 = GEN_INT (i + 1);
1885 return code == GTU ? GEU : LTU;
1886 }
1887 break;
1888
1889 case GEU:
1890 case LTU:
1891 if (i != 0
5895f793 1892 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1893 {
1894 *op1 = GEN_INT (i - 1);
1895 return code == GEU ? GTU : LEU;
1896 }
1897 break;
1898
1899 default:
1900 abort ();
1901 }
1902
1903 return code;
1904}
bd9c7e23 1905
f5a1b0d2
NC
1906/* Decide whether a type should be returned in memory (true)
1907 or in a register (false). This is called by the macro
1908 RETURN_IN_MEMORY. */
2b835d68 1909int
e32bac5b 1910arm_return_in_memory (tree type)
2b835d68 1911{
dc0ba55a
JT
1912 HOST_WIDE_INT size;
1913
5895f793 1914 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1915 /* All simple types are returned in registers. */
d7d01975 1916 return 0;
dc0ba55a
JT
1917
1918 size = int_size_in_bytes (type);
1919
1920 if (TARGET_ATPCS)
1921 {
1922 /* ATPCS returns aggregate types in memory only if they are
1923 larger than a word (or are variable size). */
1924 return (size < 0 || size > UNITS_PER_WORD);
1925 }
d5b7b3ae 1926
6bc82793 1927 /* For the arm-wince targets we choose to be compatible with Microsoft's
d5b7b3ae
RE
1928 ARM and Thumb compilers, which always return aggregates in memory. */
1929#ifndef ARM_WINCE
e529bd42
NC
1930 /* All structures/unions bigger than one word are returned in memory.
1931 Also catch the case where int_size_in_bytes returns -1. In this case
6bc82793 1932 the aggregate is either huge or of variable size, and in either case
e529bd42 1933 we will want to return it via memory and not in a register. */
dc0ba55a 1934 if (size < 0 || size > UNITS_PER_WORD)
d7d01975 1935 return 1;
d5b7b3ae 1936
d7d01975 1937 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1938 {
1939 tree field;
1940
3a2ea258
RE
1941 /* For a struct the APCS says that we only return in a register
1942 if the type is 'integer like' and every addressable element
1943 has an offset of zero. For practical purposes this means
1944 that the structure can have at most one non bit-field element
1945 and that this element must be the first one in the structure. */
1946
f5a1b0d2
NC
1947 /* Find the first field, ignoring non FIELD_DECL things which will
1948 have been created by C++. */
1949 for (field = TYPE_FIELDS (type);
1950 field && TREE_CODE (field) != FIELD_DECL;
1951 field = TREE_CHAIN (field))
1952 continue;
1953
1954 if (field == NULL)
9e291dbe 1955 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1956
d5b7b3ae
RE
1957 /* Check that the first field is valid for returning in a register. */
1958
1959 /* ... Floats are not allowed */
9e291dbe 1960 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1961 return 1;
1962
d5b7b3ae
RE
1963 /* ... Aggregates that are not themselves valid for returning in
1964 a register are not allowed. */
9e291dbe 1965 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1966 return 1;
6f7ebcbb 1967
3a2ea258
RE
1968 /* Now check the remaining fields, if any. Only bitfields are allowed,
1969 since they are not addressable. */
f5a1b0d2
NC
1970 for (field = TREE_CHAIN (field);
1971 field;
1972 field = TREE_CHAIN (field))
1973 {
1974 if (TREE_CODE (field) != FIELD_DECL)
1975 continue;
1976
5895f793 1977 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1978 return 1;
1979 }
2b835d68
RE
1980
1981 return 0;
1982 }
d7d01975
NC
1983
1984 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1985 {
1986 tree field;
1987
1988 /* Unions can be returned in registers if every element is
1989 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1990 for (field = TYPE_FIELDS (type);
1991 field;
1992 field = TREE_CHAIN (field))
2b835d68 1993 {
f5a1b0d2
NC
1994 if (TREE_CODE (field) != FIELD_DECL)
1995 continue;
1996
6cc8c0b3
NC
1997 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1998 return 1;
1999
f5a1b0d2 2000 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
2001 return 1;
2002 }
f5a1b0d2 2003
2b835d68
RE
2004 return 0;
2005 }
d5b7b3ae 2006#endif /* not ARM_WINCE */
f5a1b0d2 2007
d5b7b3ae 2008 /* Return all other types in memory. */
2b835d68
RE
2009 return 1;
2010}
2011
d6b4baa4 2012/* Indicate whether or not words of a double are in big-endian order. */
3717da94
JT
2013
2014int
e32bac5b 2015arm_float_words_big_endian (void)
3717da94 2016{
9b6b54e2
NC
2017 if (TARGET_CIRRUS)
2018 return 0;
3717da94
JT
2019
2020 /* For FPA, float words are always big-endian. For VFP, floats words
2021 follow the memory system mode. */
2022
2023 if (TARGET_HARD_FLOAT)
2024 {
2025 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
2026 return 1;
2027 }
2028
2029 if (TARGET_VFP)
2030 return (TARGET_BIG_END ? 1 : 0);
2031
2032 return 1;
2033}
2034
82e9d970
PB
2035/* Initialize a variable CUM of type CUMULATIVE_ARGS
2036 for a call to a function whose data type is FNTYPE.
2037 For a library call, FNTYPE is NULL. */
2038void
e32bac5b
RE
2039arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
2040 rtx libname ATTRIBUTE_UNUSED,
2041 tree fndecl ATTRIBUTE_UNUSED)
82e9d970
PB
2042{
2043 /* On the ARM, the offset starts at 0. */
61f71b34 2044 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype), fntype)) ? 1 : 0);
5a9335ef 2045 pcum->iwmmxt_nregs = 0;
c27ba912 2046
82e9d970
PB
2047 pcum->call_cookie = CALL_NORMAL;
2048
2049 if (TARGET_LONG_CALLS)
2050 pcum->call_cookie = CALL_LONG;
2051
2052 /* Check for long call/short call attributes. The attributes
2053 override any command line option. */
2054 if (fntype)
2055 {
2056 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
2057 pcum->call_cookie = CALL_SHORT;
2058 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
2059 pcum->call_cookie = CALL_LONG;
2060 }
5a9335ef
NC
2061
2062 /* Varargs vectors are treated the same as long long.
2063 named_count avoids having to change the way arm handles 'named' */
2064 pcum->named_count = 0;
2065 pcum->nargs = 0;
2066
2067 if (TARGET_REALLY_IWMMXT && fntype)
2068 {
2069 tree fn_arg;
2070
2071 for (fn_arg = TYPE_ARG_TYPES (fntype);
2072 fn_arg;
2073 fn_arg = TREE_CHAIN (fn_arg))
2074 pcum->named_count += 1;
2075
2076 if (! pcum->named_count)
2077 pcum->named_count = INT_MAX;
2078 }
82e9d970
PB
2079}
2080
2081/* Determine where to put an argument to a function.
2082 Value is zero to push the argument on the stack,
2083 or a hard register in which to store the argument.
2084
2085 MODE is the argument's machine mode.
2086 TYPE is the data type of the argument (as a tree).
2087 This is null for libcalls where that information may
2088 not be available.
2089 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2090 the preceding args and about the function being called.
2091 NAMED is nonzero if this argument is a named parameter
2092 (otherwise it is an extra parameter matching an ellipsis). */
1d6e90ac 2093
82e9d970 2094rtx
e32bac5b
RE
2095arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
2096 tree type ATTRIBUTE_UNUSED, int named)
82e9d970 2097{
5a9335ef
NC
2098 if (TARGET_REALLY_IWMMXT)
2099 {
2100 if (VECTOR_MODE_SUPPORTED_P (mode))
2101 {
2102 /* varargs vectors are treated the same as long long.
2103 named_count avoids having to change the way arm handles 'named' */
2104 if (pcum->named_count <= pcum->nargs + 1)
2105 {
2106 if (pcum->nregs == 1)
2107 pcum->nregs += 1;
2108 if (pcum->nregs <= 2)
2109 return gen_rtx_REG (mode, pcum->nregs);
2110 else
2111 return NULL_RTX;
2112 }
2113 else if (pcum->iwmmxt_nregs <= 9)
2114 return gen_rtx_REG (mode, pcum->iwmmxt_nregs + FIRST_IWMMXT_REGNUM);
2115 else
2116 return NULL_RTX;
2117 }
2118 else if ((mode == DImode || mode == DFmode) && pcum->nregs & 1)
2119 pcum->nregs += 1;
2120 }
2121
82e9d970
PB
2122 if (mode == VOIDmode)
2123 /* Compute operand 2 of the call insn. */
2124 return GEN_INT (pcum->call_cookie);
2125
5895f793 2126 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
2127 return NULL_RTX;
2128
2129 return gen_rtx_REG (mode, pcum->nregs);
2130}
1741620c
JD
2131
2132/* Variable sized types are passed by reference. This is a GCC
2133 extension to the ARM ABI. */
2134
2135int
e32bac5b
RE
2136arm_function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2137 enum machine_mode mode ATTRIBUTE_UNUSED,
2138 tree type, int named ATTRIBUTE_UNUSED)
1741620c
JD
2139{
2140 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2141}
2142
2143/* Implement va_arg. */
2144
2145rtx
e32bac5b 2146arm_va_arg (tree valist, tree type)
1741620c
JD
2147{
2148 /* Variable sized types are passed by reference. */
2149 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2150 {
2151 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2152 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2153 }
2154
5a9335ef
NC
2155 if (FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), NULL) == IWMMXT_ALIGNMENT)
2156 {
2157 tree minus_eight;
2158 tree t;
2159
2160 /* Maintain 64-bit alignment of the valist pointer by
093354e0 2161 constructing: valist = ((valist + (8 - 1)) & -8). */
5a9335ef
NC
2162 minus_eight = build_int_2 (- (IWMMXT_ALIGNMENT / BITS_PER_UNIT), -1);
2163 t = build_int_2 ((IWMMXT_ALIGNMENT / BITS_PER_UNIT) - 1, 0);
2164 t = build (PLUS_EXPR, TREE_TYPE (valist), valist, t);
2165 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, minus_eight);
2166 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2167 TREE_SIDE_EFFECTS (t) = 1;
2168 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2169
f1ba665b 2170 /* This is to stop the combine pass optimizing
5a9335ef
NC
2171 away the alignment adjustment. */
2172 mark_reg_pointer (arg_pointer_rtx, PARM_BOUNDARY);
2173 }
2174
1741620c
JD
2175 return std_expand_builtin_va_arg (valist, type);
2176}
82e9d970 2177\f
c27ba912
DM
2178/* Encode the current state of the #pragma [no_]long_calls. */
2179typedef enum
82e9d970 2180{
c27ba912
DM
2181 OFF, /* No #pramgma [no_]long_calls is in effect. */
2182 LONG, /* #pragma long_calls is in effect. */
2183 SHORT /* #pragma no_long_calls is in effect. */
2184} arm_pragma_enum;
82e9d970 2185
c27ba912 2186static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 2187
8b97c5f8 2188void
e32bac5b 2189arm_pr_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
82e9d970 2190{
8b97c5f8
ZW
2191 arm_pragma_long_calls = LONG;
2192}
2193
2194void
e32bac5b 2195arm_pr_no_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
8b97c5f8
ZW
2196{
2197 arm_pragma_long_calls = SHORT;
2198}
2199
2200void
e32bac5b 2201arm_pr_long_calls_off (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
8b97c5f8
ZW
2202{
2203 arm_pragma_long_calls = OFF;
82e9d970
PB
2204}
2205\f
91d231cb
JM
2206/* Table of machine attributes. */
2207const struct attribute_spec arm_attribute_table[] =
82e9d970 2208{
91d231cb 2209 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
82e9d970
PB
2210 /* Function calls made to this symbol must be done indirectly, because
2211 it may lie outside of the 26 bit addressing range of a normal function
2212 call. */
91d231cb 2213 { "long_call", 0, 0, false, true, true, NULL },
82e9d970
PB
2214 /* Whereas these functions are always known to reside within the 26 bit
2215 addressing range. */
91d231cb 2216 { "short_call", 0, 0, false, true, true, NULL },
6d3d9133 2217 /* Interrupt Service Routines have special prologue and epilogue requirements. */
91d231cb
JM
2218 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2219 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2220 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2221#ifdef ARM_PE
2222 /* ARM/PE has three new attributes:
2223 interfacearm - ?
2224 dllexport - for exporting a function/variable that will live in a dll
2225 dllimport - for importing a function/variable from a dll
2226
2227 Microsoft allows multiple declspecs in one __declspec, separating
2228 them with spaces. We do NOT support this. Instead, use __declspec
2229 multiple times.
2230 */
2231 { "dllimport", 0, 0, true, false, false, NULL },
2232 { "dllexport", 0, 0, true, false, false, NULL },
2233 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2234#endif
2235 { NULL, 0, 0, false, false, false, NULL }
2236};
6d3d9133 2237
91d231cb
JM
2238/* Handle an attribute requiring a FUNCTION_DECL;
2239 arguments as in struct attribute_spec.handler. */
2240static tree
e32bac5b
RE
2241arm_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
2242 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
91d231cb
JM
2243{
2244 if (TREE_CODE (*node) != FUNCTION_DECL)
2245 {
2246 warning ("`%s' attribute only applies to functions",
2247 IDENTIFIER_POINTER (name));
2248 *no_add_attrs = true;
2249 }
2250
2251 return NULL_TREE;
2252}
2253
2254/* Handle an "interrupt" or "isr" attribute;
2255 arguments as in struct attribute_spec.handler. */
2256static tree
e32bac5b
RE
2257arm_handle_isr_attribute (tree *node, tree name, tree args, int flags,
2258 bool *no_add_attrs)
91d231cb
JM
2259{
2260 if (DECL_P (*node))
2261 {
2262 if (TREE_CODE (*node) != FUNCTION_DECL)
2263 {
2264 warning ("`%s' attribute only applies to functions",
2265 IDENTIFIER_POINTER (name));
2266 *no_add_attrs = true;
2267 }
2268 /* FIXME: the argument if any is checked for type attributes;
2269 should it be checked for decl ones? */
2270 }
2271 else
2272 {
2273 if (TREE_CODE (*node) == FUNCTION_TYPE
2274 || TREE_CODE (*node) == METHOD_TYPE)
2275 {
2276 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2277 {
2278 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2279 *no_add_attrs = true;
2280 }
2281 }
2282 else if (TREE_CODE (*node) == POINTER_TYPE
2283 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2284 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2285 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2286 {
2287 *node = build_type_copy (*node);
1d6e90ac
NC
2288 TREE_TYPE (*node) = build_type_attribute_variant
2289 (TREE_TYPE (*node),
2290 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
91d231cb
JM
2291 *no_add_attrs = true;
2292 }
2293 else
2294 {
2295 /* Possibly pass this attribute on from the type to a decl. */
2296 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2297 | (int) ATTR_FLAG_FUNCTION_NEXT
2298 | (int) ATTR_FLAG_ARRAY_NEXT))
2299 {
2300 *no_add_attrs = true;
2301 return tree_cons (name, args, NULL_TREE);
2302 }
2303 else
2304 {
2305 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2306 }
2307 }
2308 }
2309
2310 return NULL_TREE;
82e9d970
PB
2311}
2312
2313/* Return 0 if the attributes for two types are incompatible, 1 if they
2314 are compatible, and 2 if they are nearly compatible (which causes a
2315 warning to be generated). */
8d8e52be 2316static int
e32bac5b 2317arm_comp_type_attributes (tree type1, tree type2)
82e9d970 2318{
1cb8d58a 2319 int l1, l2, s1, s2;
bd7fc26f 2320
82e9d970
PB
2321 /* Check for mismatch of non-default calling convention. */
2322 if (TREE_CODE (type1) != FUNCTION_TYPE)
2323 return 1;
2324
2325 /* Check for mismatched call attributes. */
1cb8d58a
NC
2326 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2327 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2328 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2329 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
2330
2331 /* Only bother to check if an attribute is defined. */
2332 if (l1 | l2 | s1 | s2)
2333 {
2334 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 2335 if ((l1 != l2) || (s1 != s2))
bd7fc26f 2336 return 0;
82e9d970 2337
bd7fc26f
NC
2338 /* Disallow mixed attributes. */
2339 if ((l1 & s2) || (l2 & s1))
2340 return 0;
2341 }
2342
6d3d9133
NC
2343 /* Check for mismatched ISR attribute. */
2344 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2345 if (! l1)
2346 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2347 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2348 if (! l2)
2349 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2350 if (l1 != l2)
2351 return 0;
2352
bd7fc26f 2353 return 1;
82e9d970
PB
2354}
2355
c27ba912
DM
2356/* Encode long_call or short_call attribute by prefixing
2357 symbol name in DECL with a special character FLAG. */
2358void
e32bac5b 2359arm_encode_call_attribute (tree decl, int flag)
c27ba912 2360{
3cce094d 2361 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 2362 int len = strlen (str);
d19fb8e3 2363 char * newstr;
c27ba912 2364
c27ba912
DM
2365 /* Do not allow weak functions to be treated as short call. */
2366 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2367 return;
c27ba912 2368
520a57c8
ZW
2369 newstr = alloca (len + 2);
2370 newstr[0] = flag;
2371 strcpy (newstr + 1, str);
c27ba912 2372
6d3d9133 2373 newstr = (char *) ggc_alloc_string (newstr, len + 1);
c27ba912
DM
2374 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2375}
2376
2377/* Assigns default attributes to newly defined type. This is used to
2378 set short_call/long_call attributes for function types of
2379 functions defined inside corresponding #pragma scopes. */
8d8e52be 2380static void
e32bac5b 2381arm_set_default_type_attributes (tree type)
c27ba912
DM
2382{
2383 /* Add __attribute__ ((long_call)) to all functions, when
2384 inside #pragma long_calls or __attribute__ ((short_call)),
2385 when inside #pragma no_long_calls. */
2386 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2387 {
2388 tree type_attr_list, attr_name;
2389 type_attr_list = TYPE_ATTRIBUTES (type);
2390
2391 if (arm_pragma_long_calls == LONG)
2392 attr_name = get_identifier ("long_call");
2393 else if (arm_pragma_long_calls == SHORT)
2394 attr_name = get_identifier ("short_call");
2395 else
2396 return;
2397
2398 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2399 TYPE_ATTRIBUTES (type) = type_attr_list;
2400 }
2401}
2402\f
2403/* Return 1 if the operand is a SYMBOL_REF for a function known to be
6bc82793 2404 defined within the current compilation unit. If this cannot be
c27ba912
DM
2405 determined, then 0 is returned. */
2406static int
e32bac5b 2407current_file_function_operand (rtx sym_ref)
c27ba912
DM
2408{
2409 /* This is a bit of a fib. A function will have a short call flag
2410 applied to its name if it has the short call attribute, or it has
2411 already been defined within the current compilation unit. */
2412 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2413 return 1;
2414
6d77b53e 2415 /* The current function is always defined within the current compilation
d6a7951f
JM
2416 unit. if it s a weak definition however, then this may not be the real
2417 definition of the function, and so we have to say no. */
c27ba912 2418 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 2419 && !DECL_WEAK (current_function_decl))
c27ba912
DM
2420 return 1;
2421
2422 /* We cannot make the determination - default to returning 0. */
2423 return 0;
2424}
2425
825dda42 2426/* Return nonzero if a 32 bit "long_call" should be generated for
c27ba912
DM
2427 this call. We generate a long_call if the function:
2428
2429 a. has an __attribute__((long call))
2430 or b. is within the scope of a #pragma long_calls
2431 or c. the -mlong-calls command line switch has been specified
2432
2433 However we do not generate a long call if the function:
2434
2435 d. has an __attribute__ ((short_call))
2436 or e. is inside the scope of a #pragma no_long_calls
2437 or f. has an __attribute__ ((section))
2438 or g. is defined within the current compilation unit.
2439
2440 This function will be called by C fragments contained in the machine
2441 description file. CALL_REF and CALL_COOKIE correspond to the matched
2442 rtl operands. CALL_SYMBOL is used to distinguish between
2443 two different callers of the function. It is set to 1 in the
2444 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2445 and "call_value" patterns. This is because of the difference in the
2446 SYM_REFs passed by these patterns. */
2447int
e32bac5b 2448arm_is_longcall_p (rtx sym_ref, int call_cookie, int call_symbol)
c27ba912 2449{
5895f793 2450 if (!call_symbol)
c27ba912
DM
2451 {
2452 if (GET_CODE (sym_ref) != MEM)
2453 return 0;
2454
2455 sym_ref = XEXP (sym_ref, 0);
2456 }
2457
2458 if (GET_CODE (sym_ref) != SYMBOL_REF)
2459 return 0;
2460
2461 if (call_cookie & CALL_SHORT)
2462 return 0;
2463
2464 if (TARGET_LONG_CALLS && flag_function_sections)
2465 return 1;
2466
87e27392 2467 if (current_file_function_operand (sym_ref))
c27ba912
DM
2468 return 0;
2469
2470 return (call_cookie & CALL_LONG)
2471 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2472 || TARGET_LONG_CALLS;
2473}
f99fce0c 2474
825dda42 2475/* Return nonzero if it is ok to make a tail-call to DECL. */
4977bab6 2476static bool
e32bac5b 2477arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
f99fce0c
RE
2478{
2479 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2480
5a9335ef
NC
2481 if (cfun->machine->sibcall_blocked)
2482 return false;
2483
f99fce0c
RE
2484 /* Never tailcall something for which we have no decl, or if we
2485 are in Thumb mode. */
2486 if (decl == NULL || TARGET_THUMB)
4977bab6 2487 return false;
f99fce0c
RE
2488
2489 /* Get the calling method. */
2490 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2491 call_type = CALL_SHORT;
2492 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2493 call_type = CALL_LONG;
2494
2495 /* Cannot tail-call to long calls, since these are out of range of
2496 a branch instruction. However, if not compiling PIC, we know
2497 we can reach the symbol if it is in this compilation unit. */
5895f793 2498 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
4977bab6 2499 return false;
f99fce0c
RE
2500
2501 /* If we are interworking and the function is not declared static
2502 then we can't tail-call it unless we know that it exists in this
2503 compilation unit (since it might be a Thumb routine). */
5895f793 2504 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
4977bab6 2505 return false;
f99fce0c 2506
6d3d9133
NC
2507 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2508 if (IS_INTERRUPT (arm_current_func_type ()))
4977bab6 2509 return false;
6d3d9133 2510
f99fce0c 2511 /* Everything else is ok. */
4977bab6 2512 return true;
f99fce0c
RE
2513}
2514
82e9d970 2515\f
6b990f6b
RE
2516/* Addressing mode support functions. */
2517
0b4be7de 2518/* Return nonzero if X is a legitimate immediate operand when compiling
6b990f6b 2519 for PIC. */
32de079a 2520int
e32bac5b 2521legitimate_pic_operand_p (rtx x)
32de079a 2522{
d5b7b3ae
RE
2523 if (CONSTANT_P (x)
2524 && flag_pic
32de079a
RE
2525 && (GET_CODE (x) == SYMBOL_REF
2526 || (GET_CODE (x) == CONST
2527 && GET_CODE (XEXP (x, 0)) == PLUS
2528 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2529 return 0;
2530
2531 return 1;
2532}
2533
2534rtx
e32bac5b 2535legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
32de079a 2536{
a3c48721
RE
2537 if (GET_CODE (orig) == SYMBOL_REF
2538 || GET_CODE (orig) == LABEL_REF)
32de079a 2539 {
5f37d07c 2540#ifndef AOF_ASSEMBLER
32de079a 2541 rtx pic_ref, address;
5f37d07c 2542#endif
32de079a
RE
2543 rtx insn;
2544 int subregs = 0;
2545
2546 if (reg == 0)
2547 {
893f3d5b 2548 if (no_new_pseudos)
32de079a
RE
2549 abort ();
2550 else
2551 reg = gen_reg_rtx (Pmode);
2552
2553 subregs = 1;
2554 }
2555
2556#ifdef AOF_ASSEMBLER
2557 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 2558 understands that the PIC register has to be added into the offset. */
32de079a
RE
2559 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2560#else
2561 if (subregs)
2562 address = gen_reg_rtx (Pmode);
2563 else
2564 address = reg;
2565
4bec9f7d
NC
2566 if (TARGET_ARM)
2567 emit_insn (gen_pic_load_addr_arm (address, orig));
2568 else
2569 emit_insn (gen_pic_load_addr_thumb (address, orig));
32de079a 2570
14f583b8
PB
2571 if ((GET_CODE (orig) == LABEL_REF
2572 || (GET_CODE (orig) == SYMBOL_REF &&
94428622 2573 SYMBOL_REF_LOCAL_P (orig)))
14f583b8 2574 && NEED_GOT_RELOC)
a3c48721
RE
2575 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2576 else
2577 {
2578 pic_ref = gen_rtx_MEM (Pmode,
2579 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2580 address));
2581 RTX_UNCHANGING_P (pic_ref) = 1;
2582 }
2583
32de079a
RE
2584 insn = emit_move_insn (reg, pic_ref);
2585#endif
2586 current_function_uses_pic_offset_table = 1;
2587 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2588 by loop. */
43cffd11
RE
2589 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2590 REG_NOTES (insn));
32de079a
RE
2591 return reg;
2592 }
2593 else if (GET_CODE (orig) == CONST)
2594 {
2595 rtx base, offset;
2596
2597 if (GET_CODE (XEXP (orig, 0)) == PLUS
2598 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2599 return orig;
2600
2601 if (reg == 0)
2602 {
893f3d5b 2603 if (no_new_pseudos)
32de079a
RE
2604 abort ();
2605 else
2606 reg = gen_reg_rtx (Pmode);
2607 }
2608
2609 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2610 {
2611 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2612 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2613 base == reg ? 0 : reg);
2614 }
2615 else
2616 abort ();
2617
2618 if (GET_CODE (offset) == CONST_INT)
2619 {
2620 /* The base register doesn't really matter, we only want to
2621 test the index for the appropriate mode. */
6b990f6b
RE
2622 if (!arm_legitimate_index_p (mode, offset, 0))
2623 {
2624 if (!no_new_pseudos)
2625 offset = force_reg (Pmode, offset);
2626 else
2627 abort ();
2628 }
32de079a 2629
32de079a 2630 if (GET_CODE (offset) == CONST_INT)
ed8908e7 2631 return plus_constant (base, INTVAL (offset));
32de079a
RE
2632 }
2633
2634 if (GET_MODE_SIZE (mode) > 4
2635 && (GET_MODE_CLASS (mode) == MODE_INT
2636 || TARGET_SOFT_FLOAT))
2637 {
2638 emit_insn (gen_addsi3 (reg, base, offset));
2639 return reg;
2640 }
2641
43cffd11 2642 return gen_rtx_PLUS (Pmode, base, offset);
32de079a 2643 }
32de079a
RE
2644
2645 return orig;
2646}
2647
c1163e75
PB
2648/* Generate code to load the PIC register. PROLOGUE is true if
2649 called from arm_expand_prologue (in which case we want the
2650 generated insns at the start of the function); false if called
2651 by an exception receiver that needs the PIC register reloaded
2652 (in which case the insns are just dumped at the current location). */
32de079a 2653void
e32bac5b 2654arm_finalize_pic (int prologue ATTRIBUTE_UNUSED)
32de079a
RE
2655{
2656#ifndef AOF_ASSEMBLER
c1163e75 2657 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
32de079a
RE
2658 rtx global_offset_table;
2659
ed0e6530 2660 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2661 return;
2662
5895f793 2663 if (!flag_pic)
32de079a
RE
2664 abort ();
2665
2666 start_sequence ();
2667 l1 = gen_label_rtx ();
2668
43cffd11 2669 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2670 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2671 addition, on the Thumb it is 'dot + 4'. */
2672 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2673 if (GOT_PCREL)
2674 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2675 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2676 else
2677 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2678
2679 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2680
d5b7b3ae 2681 if (TARGET_ARM)
4bec9f7d
NC
2682 {
2683 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2684 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2685 }
d5b7b3ae 2686 else
4bec9f7d
NC
2687 {
2688 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2689 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2690 }
32de079a 2691
2f937369 2692 seq = get_insns ();
32de079a 2693 end_sequence ();
c1163e75
PB
2694 if (prologue)
2695 emit_insn_after (seq, get_insns ());
2696 else
2697 emit_insn (seq);
32de079a
RE
2698
2699 /* Need to emit this whether or not we obey regdecls,
2700 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2701 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2702#endif /* AOF_ASSEMBLER */
2703}
2704
6b990f6b
RE
2705/* Return nonzero if X is valid as an ARM state addressing register. */
2706static int
e32bac5b 2707arm_address_register_rtx_p (rtx x, int strict_p)
6b990f6b
RE
2708{
2709 int regno;
2710
2711 if (GET_CODE (x) != REG)
2712 return 0;
2713
2714 regno = REGNO (x);
2715
2716 if (strict_p)
2717 return ARM_REGNO_OK_FOR_BASE_P (regno);
2718
2719 return (regno <= LAST_ARM_REGNUM
2720 || regno >= FIRST_PSEUDO_REGISTER
2721 || regno == FRAME_POINTER_REGNUM
2722 || regno == ARG_POINTER_REGNUM);
2723}
2724
2725/* Return nonzero if X is a valid ARM state address operand. */
2726int
e32bac5b 2727arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
6b990f6b
RE
2728{
2729 if (arm_address_register_rtx_p (x, strict_p))
2730 return 1;
2731
2732 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2733 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2734
2735 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2736 && GET_MODE_SIZE (mode) <= 4
2737 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2738 && GET_CODE (XEXP (x, 1)) == PLUS
2739 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2740 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2741
2742 /* After reload constants split into minipools will have addresses
2743 from a LABEL_REF. */
0bfb39ef 2744 else if (reload_completed
6b990f6b
RE
2745 && (GET_CODE (x) == LABEL_REF
2746 || (GET_CODE (x) == CONST
2747 && GET_CODE (XEXP (x, 0)) == PLUS
2748 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2749 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2750 return 1;
2751
2752 else if (mode == TImode)
2753 return 0;
2754
2755 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2756 {
2757 if (GET_CODE (x) == PLUS
2758 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2759 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2760 {
2761 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2762
2763 if (val == 4 || val == -4 || val == -8)
2764 return 1;
2765 }
2766 }
2767
2768 else if (GET_CODE (x) == PLUS)
2769 {
2770 rtx xop0 = XEXP (x, 0);
2771 rtx xop1 = XEXP (x, 1);
2772
2773 return ((arm_address_register_rtx_p (xop0, strict_p)
2774 && arm_legitimate_index_p (mode, xop1, strict_p))
2775 || (arm_address_register_rtx_p (xop1, strict_p)
2776 && arm_legitimate_index_p (mode, xop0, strict_p)));
2777 }
2778
2779#if 0
2780 /* Reload currently can't handle MINUS, so disable this for now */
2781 else if (GET_CODE (x) == MINUS)
2782 {
2783 rtx xop0 = XEXP (x, 0);
2784 rtx xop1 = XEXP (x, 1);
2785
2786 return (arm_address_register_rtx_p (xop0, strict_p)
2787 && arm_legitimate_index_p (mode, xop1, strict_p));
2788 }
2789#endif
2790
2791 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2792 && GET_CODE (x) == SYMBOL_REF
2793 && CONSTANT_POOL_ADDRESS_P (x)
2794 && ! (flag_pic
2795 && symbol_mentioned_p (get_pool_constant (x))))
2796 return 1;
2797
2798 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2799 && (GET_MODE_SIZE (mode) <= 4)
2800 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2801 return 1;
2802
2803 return 0;
2804}
2805
2806/* Return nonzero if INDEX is valid for an address index operand in
2807 ARM state. */
2808static int
e32bac5b 2809arm_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
6b990f6b
RE
2810{
2811 HOST_WIDE_INT range;
2812 enum rtx_code code = GET_CODE (index);
2813
2814 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2815 return (code == CONST_INT && INTVAL (index) < 1024
2816 && INTVAL (index) > -1024
2817 && (INTVAL (index) & 3) == 0);
2818
9b6b54e2
NC
2819 if (TARGET_CIRRUS
2820 && (GET_MODE_CLASS (mode) == MODE_FLOAT || mode == DImode))
2821 return (code == CONST_INT
2822 && INTVAL (index) < 255
2823 && INTVAL (index) > -255);
2824
6b990f6b
RE
2825 if (arm_address_register_rtx_p (index, strict_p)
2826 && GET_MODE_SIZE (mode) <= 4)
2827 return 1;
2828
5a9335ef
NC
2829 if (TARGET_REALLY_IWMMXT && VALID_IWMMXT_REG_MODE (mode))
2830 return (code == CONST_INT
2831 && INTVAL (index) < 256
2832 && INTVAL (index) > -256);
2833
6b990f6b
RE
2834 /* XXX What about ldrsb? */
2835 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2836 && (!arm_arch4 || (mode) != HImode))
2837 {
2838 rtx xiop0 = XEXP (index, 0);
2839 rtx xiop1 = XEXP (index, 1);
2840
2841 return ((arm_address_register_rtx_p (xiop0, strict_p)
2842 && power_of_two_operand (xiop1, SImode))
2843 || (arm_address_register_rtx_p (xiop1, strict_p)
2844 && power_of_two_operand (xiop0, SImode)));
2845 }
2846
2847 if (GET_MODE_SIZE (mode) <= 4
2848 && (code == LSHIFTRT || code == ASHIFTRT
2849 || code == ASHIFT || code == ROTATERT)
2850 && (!arm_arch4 || (mode) != HImode))
2851 {
2852 rtx op = XEXP (index, 1);
2853
2854 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2855 && GET_CODE (op) == CONST_INT
2856 && INTVAL (op) > 0
2857 && INTVAL (op) <= 31);
2858 }
2859
2860 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2861 load, but that has a restricted addressing range and we are unable
2862 to tell here whether that is the case. To be safe we restrict all
2863 loads to that range. */
2864 range = ((mode) == HImode || (mode) == QImode)
2865 ? (arm_arch4 ? 256 : 4095) : 4096;
2866
2867 return (code == CONST_INT
2868 && INTVAL (index) < range
2869 && INTVAL (index) > -range);
76a318e9
RE
2870}
2871
edf7cee8 2872/* Return nonzero if X is valid as a Thumb state base register. */
76a318e9 2873static int
e32bac5b 2874thumb_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
76a318e9
RE
2875{
2876 int regno;
2877
2878 if (GET_CODE (x) != REG)
2879 return 0;
2880
2881 regno = REGNO (x);
2882
2883 if (strict_p)
2884 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2885
2886 return (regno <= LAST_LO_REGNUM
07e58265 2887 || regno > LAST_VIRTUAL_REGISTER
76a318e9
RE
2888 || regno == FRAME_POINTER_REGNUM
2889 || (GET_MODE_SIZE (mode) >= 4
2890 && (regno == STACK_POINTER_REGNUM
edf7cee8 2891 || regno >= FIRST_PSEUDO_REGISTER
76a318e9
RE
2892 || x == hard_frame_pointer_rtx
2893 || x == arg_pointer_rtx)));
2894}
2895
2896/* Return nonzero if x is a legitimate index register. This is the case
2897 for any base register that can access a QImode object. */
2898inline static int
e32bac5b 2899thumb_index_register_rtx_p (rtx x, int strict_p)
76a318e9
RE
2900{
2901 return thumb_base_register_rtx_p (x, QImode, strict_p);
2902}
2903
2904/* Return nonzero if x is a legitimate Thumb-state address.
2905
2906 The AP may be eliminated to either the SP or the FP, so we use the
2907 least common denominator, e.g. SImode, and offsets from 0 to 64.
2908
2909 ??? Verify whether the above is the right approach.
2910
2911 ??? Also, the FP may be eliminated to the SP, so perhaps that
2912 needs special handling also.
2913
2914 ??? Look at how the mips16 port solves this problem. It probably uses
2915 better ways to solve some of these problems.
2916
2917 Although it is not incorrect, we don't accept QImode and HImode
2918 addresses based on the frame pointer or arg pointer until the
2919 reload pass starts. This is so that eliminating such addresses
2920 into stack based ones won't produce impossible code. */
2921int
e32bac5b 2922thumb_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
76a318e9
RE
2923{
2924 /* ??? Not clear if this is right. Experiment. */
2925 if (GET_MODE_SIZE (mode) < 4
2926 && !(reload_in_progress || reload_completed)
2927 && (reg_mentioned_p (frame_pointer_rtx, x)
2928 || reg_mentioned_p (arg_pointer_rtx, x)
2929 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2930 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2931 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2932 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2933 return 0;
2934
2935 /* Accept any base register. SP only in SImode or larger. */
2936 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2937 return 1;
2938
18dbd950 2939 /* This is PC relative data before arm_reorg runs. */
76a318e9
RE
2940 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2941 && GET_CODE (x) == SYMBOL_REF
2942 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2943 return 1;
2944
18dbd950 2945 /* This is PC relative data after arm_reorg runs. */
76a318e9
RE
2946 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2947 && (GET_CODE (x) == LABEL_REF
2948 || (GET_CODE (x) == CONST
2949 && GET_CODE (XEXP (x, 0)) == PLUS
2950 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2951 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2952 return 1;
2953
2954 /* Post-inc indexing only supported for SImode and larger. */
2955 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2956 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2957 return 1;
2958
2959 else if (GET_CODE (x) == PLUS)
2960 {
2961 /* REG+REG address can be any two index registers. */
2962 /* We disallow FRAME+REG addressing since we know that FRAME
2963 will be replaced with STACK, and SP relative addressing only
2964 permits SP+OFFSET. */
2965 if (GET_MODE_SIZE (mode) <= 4
2966 && XEXP (x, 0) != frame_pointer_rtx
2967 && XEXP (x, 1) != frame_pointer_rtx
76a318e9
RE
2968 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2969 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2970 return 1;
2971
2972 /* REG+const has 5-7 bit offset for non-SP registers. */
2973 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2974 || XEXP (x, 0) == arg_pointer_rtx)
2975 && GET_CODE (XEXP (x, 1)) == CONST_INT
2976 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2977 return 1;
2978
2979 /* REG+const has 10 bit offset for SP, but only SImode and
2980 larger is supported. */
2981 /* ??? Should probably check for DI/DFmode overflow here
2982 just like GO_IF_LEGITIMATE_OFFSET does. */
2983 else if (GET_CODE (XEXP (x, 0)) == REG
2984 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2985 && GET_MODE_SIZE (mode) >= 4
2986 && GET_CODE (XEXP (x, 1)) == CONST_INT
2987 && INTVAL (XEXP (x, 1)) >= 0
2988 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2989 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2990 return 1;
2991
2992 else if (GET_CODE (XEXP (x, 0)) == REG
2993 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2994 && GET_MODE_SIZE (mode) >= 4
2995 && GET_CODE (XEXP (x, 1)) == CONST_INT
2996 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2997 return 1;
2998 }
2999
3000 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
f954388e 3001 && GET_MODE_SIZE (mode) == 4
76a318e9
RE
3002 && GET_CODE (x) == SYMBOL_REF
3003 && CONSTANT_POOL_ADDRESS_P (x)
3004 && !(flag_pic
3005 && symbol_mentioned_p (get_pool_constant (x))))
3006 return 1;
3007
3008 return 0;
3009}
3010
3011/* Return nonzero if VAL can be used as an offset in a Thumb-state address
3012 instruction of mode MODE. */
3013int
e32bac5b 3014thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
76a318e9
RE
3015{
3016 switch (GET_MODE_SIZE (mode))
3017 {
3018 case 1:
3019 return val >= 0 && val < 32;
3020
3021 case 2:
3022 return val >= 0 && val < 64 && (val & 1) == 0;
3023
3024 default:
3025 return (val >= 0
3026 && (val + GET_MODE_SIZE (mode)) <= 128
3027 && (val & 3) == 0);
3028 }
3029}
3030
ccf4d512
RE
3031/* Try machine-dependent ways of modifying an illegitimate address
3032 to be legitimate. If we find one, return the new, valid address. */
ccf4d512 3033rtx
e32bac5b 3034arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
ccf4d512
RE
3035{
3036 if (GET_CODE (x) == PLUS)
3037 {
3038 rtx xop0 = XEXP (x, 0);
3039 rtx xop1 = XEXP (x, 1);
3040
3041 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
3042 xop0 = force_reg (SImode, xop0);
3043
3044 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
3045 xop1 = force_reg (SImode, xop1);
3046
3047 if (ARM_BASE_REGISTER_RTX_P (xop0)
3048 && GET_CODE (xop1) == CONST_INT)
3049 {
3050 HOST_WIDE_INT n, low_n;
3051 rtx base_reg, val;
3052 n = INTVAL (xop1);
3053
3054 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
3055 {
3056 low_n = n & 0x0f;
3057 n &= ~0x0f;
3058 if (low_n > 4)
3059 {
3060 n += 16;
3061 low_n -= 16;
3062 }
3063 }
3064 else
3065 {
3066 low_n = ((mode) == TImode ? 0
3067 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
3068 n -= low_n;
3069 }
3070
3071 base_reg = gen_reg_rtx (SImode);
3072 val = force_operand (gen_rtx_PLUS (SImode, xop0,
3073 GEN_INT (n)), NULL_RTX);
3074 emit_move_insn (base_reg, val);
3075 x = (low_n == 0 ? base_reg
3076 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
3077 }
3078 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3079 x = gen_rtx_PLUS (SImode, xop0, xop1);
3080 }
3081
3082 /* XXX We don't allow MINUS any more -- see comment in
3083 arm_legitimate_address_p (). */
3084 else if (GET_CODE (x) == MINUS)
3085 {
3086 rtx xop0 = XEXP (x, 0);
3087 rtx xop1 = XEXP (x, 1);
3088
3089 if (CONSTANT_P (xop0))
3090 xop0 = force_reg (SImode, xop0);
3091
3092 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
3093 xop1 = force_reg (SImode, xop1);
3094
3095 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3096 x = gen_rtx_MINUS (SImode, xop0, xop1);
3097 }
3098
3099 if (flag_pic)
3100 {
3101 /* We need to find and carefully transform any SYMBOL and LABEL
3102 references; so go back to the original address expression. */
3103 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3104
3105 if (new_x != orig_x)
3106 x = new_x;
3107 }
3108
3109 return x;
3110}
3111
6b990f6b
RE
3112\f
3113
e2c671ba
RE
3114#define REG_OR_SUBREG_REG(X) \
3115 (GET_CODE (X) == REG \
3116 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
3117
3118#define REG_OR_SUBREG_RTX(X) \
3119 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3120
d5b7b3ae
RE
3121#ifndef COSTS_N_INSNS
3122#define COSTS_N_INSNS(N) ((N) * 4 - 2)
3123#endif
e32bac5b 3124/* Worker routine for arm_rtx_costs. */
3c50106f 3125static inline int
e32bac5b 3126arm_rtx_costs_1 (rtx x, enum rtx_code code, enum rtx_code outer)
e2c671ba
RE
3127{
3128 enum machine_mode mode = GET_MODE (x);
3129 enum rtx_code subcode;
3130 int extra_cost;
3131
d5b7b3ae
RE
3132 if (TARGET_THUMB)
3133 {
3134 switch (code)
3135 {
3136 case ASHIFT:
3137 case ASHIFTRT:
3138 case LSHIFTRT:
3139 case ROTATERT:
3140 case PLUS:
3141 case MINUS:
3142 case COMPARE:
3143 case NEG:
3144 case NOT:
3145 return COSTS_N_INSNS (1);
3146
3147 case MULT:
3148 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3149 {
3150 int cycles = 0;
3151 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3152
3153 while (i)
3154 {
3155 i >>= 2;
5895f793 3156 cycles++;
d5b7b3ae
RE
3157 }
3158 return COSTS_N_INSNS (2) + cycles;
3159 }
3160 return COSTS_N_INSNS (1) + 16;
3161
3162 case SET:
3163 return (COSTS_N_INSNS (1)
3164 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3165 + GET_CODE (SET_DEST (x)) == MEM));
3166
3167 case CONST_INT:
3168 if (outer == SET)
3169 {
3170 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3171 return 0;
3172 if (thumb_shiftable_const (INTVAL (x)))
3173 return COSTS_N_INSNS (2);
3174 return COSTS_N_INSNS (3);
3175 }
c769a35d 3176 else if ((outer == PLUS || outer == COMPARE)
d5b7b3ae 3177 && INTVAL (x) < 256 && INTVAL (x) > -256)
c769a35d
RE
3178 return 0;
3179 else if (outer == AND
3180 && INTVAL (x) < 256 && INTVAL (x) >= -256)
3181 return COSTS_N_INSNS (1);
d5b7b3ae
RE
3182 else if (outer == ASHIFT || outer == ASHIFTRT
3183 || outer == LSHIFTRT)
3184 return 0;
3185 return COSTS_N_INSNS (2);
3186
3187 case CONST:
3188 case CONST_DOUBLE:
3189 case LABEL_REF:
3190 case SYMBOL_REF:
3191 return COSTS_N_INSNS (3);
3192
3193 case UDIV:
3194 case UMOD:
3195 case DIV:
3196 case MOD:
3197 return 100;
3198
3199 case TRUNCATE:
3200 return 99;
3201
3202 case AND:
3203 case XOR:
3204 case IOR:
d6b4baa4 3205 /* XXX guess. */
d5b7b3ae
RE
3206 return 8;
3207
3208 case ADDRESSOF:
3209 case MEM:
3210 /* XXX another guess. */
3211 /* Memory costs quite a lot for the first word, but subsequent words
3212 load at the equivalent of a single insn each. */
3213 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
48f6efae
NC
3214 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3215 ? 4 : 0));
d5b7b3ae
RE
3216
3217 case IF_THEN_ELSE:
d6b4baa4 3218 /* XXX a guess. */
d5b7b3ae
RE
3219 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3220 return 14;
3221 return 2;
3222
3223 case ZERO_EXTEND:
3224 /* XXX still guessing. */
3225 switch (GET_MODE (XEXP (x, 0)))
3226 {
3227 case QImode:
3228 return (1 + (mode == DImode ? 4 : 0)
3229 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3230
3231 case HImode:
3232 return (4 + (mode == DImode ? 4 : 0)
3233 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3234
3235 case SImode:
3236 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3237
3238 default:
3239 return 99;
3240 }
3241
3242 default:
3243 return 99;
d5b7b3ae
RE
3244 }
3245 }
3246
e2c671ba
RE
3247 switch (code)
3248 {
3249 case MEM:
3250 /* Memory costs quite a lot for the first word, but subsequent words
3251 load at the equivalent of a single insn each. */
3252 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
48f6efae
NC
3253 + (GET_CODE (x) == SYMBOL_REF
3254 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
e2c671ba
RE
3255
3256 case DIV:
3257 case MOD:
b9c53150
RS
3258 case UDIV:
3259 case UMOD:
3260 return optimize_size ? COSTS_N_INSNS (2) : 100;
e2c671ba
RE
3261
3262 case ROTATE:
3263 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3264 return 4;
3265 /* Fall through */
3266 case ROTATERT:
3267 if (mode != SImode)
3268 return 8;
3269 /* Fall through */
3270 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3271 if (mode == DImode)
3272 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3273 + ((GET_CODE (XEXP (x, 0)) == REG
3274 || (GET_CODE (XEXP (x, 0)) == SUBREG
3275 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3276 ? 0 : 8));
3277 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3278 || (GET_CODE (XEXP (x, 0)) == SUBREG
3279 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3280 ? 0 : 4)
3281 + ((GET_CODE (XEXP (x, 1)) == REG
3282 || (GET_CODE (XEXP (x, 1)) == SUBREG
3283 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3284 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3285 ? 0 : 4));
3286
3287 case MINUS:
3288 if (mode == DImode)
3289 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3290 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3291 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3292 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3293 ? 0 : 8));
3294
3295 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3296 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3297 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3b684012 3298 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
e2c671ba
RE
3299 ? 0 : 8)
3300 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3301 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3b684012 3302 && const_double_rtx_ok_for_fpa (XEXP (x, 0))))
e2c671ba
RE
3303 ? 0 : 8));
3304
3305 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3306 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3307 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3308 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3309 || subcode == ASHIFTRT || subcode == LSHIFTRT
3310 || subcode == ROTATE || subcode == ROTATERT
3311 || (subcode == MULT
3312 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3313 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3314 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3315 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3316 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3317 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3318 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3319 return 1;
3320 /* Fall through */
3321
3322 case PLUS:
3323 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3324 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3325 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3326 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3b684012 3327 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
e2c671ba
RE
3328 ? 0 : 8));
3329
3330 /* Fall through */
3331 case AND: case XOR: case IOR:
3332 extra_cost = 0;
3333
3334 /* Normally the frame registers will be spilt into reg+const during
3335 reload, so it is a bad idea to combine them with other instructions,
3336 since then they might not be moved outside of loops. As a compromise
3337 we allow integration with ops that have a constant as their second
3338 operand. */
3339 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3340 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3341 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3342 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3343 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3344 extra_cost = 4;
3345
3346 if (mode == DImode)
3347 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3348 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3349 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 3350 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
3351 ? 0 : 8));
3352
3353 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3354 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3355 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3356 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 3357 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
3358 ? 0 : 4));
3359
3360 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3361 return (1 + extra_cost
3362 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3363 || subcode == LSHIFTRT || subcode == ASHIFTRT
3364 || subcode == ROTATE || subcode == ROTATERT
3365 || (subcode == MULT
3366 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3367 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 3368 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
3369 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3370 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 3371 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
3372 ? 0 : 4));
3373
3374 return 8;
3375
3376 case MULT:
b111229a 3377 /* There is no point basing this on the tuning, since it is always the
6354dc9b 3378 fast variant if it exists at all. */
2b835d68
RE
3379 if (arm_fast_multiply && mode == DImode
3380 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3381 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3382 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3383 return 8;
3384
e2c671ba
RE
3385 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3386 || mode == DImode)
3387 return 30;
3388
3389 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3390 {
2b835d68 3391 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
30cf4896 3392 & (unsigned HOST_WIDE_INT) 0xffffffff);
e3b66120
JL
3393 int cost, const_ok = const_ok_for_arm (i);
3394 int j, booth_unit_size;
3395
3396 if (arm_tune_xscale)
3397 {
3398 unsigned HOST_WIDE_INT masked_const;
3399
3400 /* The cost will be related to two insns.
d6b4baa4 3401 First a load of the constant (MOV or LDR), then a multiply. */
e3b66120
JL
3402 cost = 2;
3403 if (! const_ok)
3404 cost += 1; /* LDR is probably more expensive because
d6b4baa4 3405 of longer result latency. */
e3b66120
JL
3406 masked_const = i & 0xffff8000;
3407 if (masked_const != 0 && masked_const != 0xffff8000)
3408 {
3409 masked_const = i & 0xf8000000;
3410 if (masked_const == 0 || masked_const == 0xf8000000)
3411 cost += 1;
3412 else
3413 cost += 2;
3414 }
3415 return cost;
3416 }
6354dc9b
NC
3417
3418 /* Tune as appropriate. */
e3b66120
JL
3419 cost = const_ok ? 4 : 8;
3420 booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2b835d68 3421 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 3422 {
2b835d68 3423 i >>= booth_unit_size;
e3b66120 3424 cost += 2;
e2c671ba
RE
3425 }
3426
e3b66120 3427 return cost;
e2c671ba
RE
3428 }
3429
aec3cfba 3430 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 3431 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
3432 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3433
56636818
JL
3434 case TRUNCATE:
3435 if (arm_fast_multiply && mode == SImode
3436 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3437 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3438 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3439 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3440 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3441 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3442 return 8;
3443 return 99;
3444
e2c671ba
RE
3445 case NEG:
3446 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3447 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3448 /* Fall through */
3449 case NOT:
3450 if (mode == DImode)
3451 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3452
3453 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3454
3455 case IF_THEN_ELSE:
3456 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3457 return 14;
3458 return 2;
3459
3460 case COMPARE:
3461 return 1;
3462
3463 case ABS:
3464 return 4 + (mode == DImode ? 4 : 0);
3465
3466 case SIGN_EXTEND:
3467 if (GET_MODE (XEXP (x, 0)) == QImode)
3468 return (4 + (mode == DImode ? 4 : 0)
3469 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3470 /* Fall through */
3471 case ZERO_EXTEND:
3472 switch (GET_MODE (XEXP (x, 0)))
3473 {
3474 case QImode:
3475 return (1 + (mode == DImode ? 4 : 0)
3476 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3477
3478 case HImode:
3479 return (4 + (mode == DImode ? 4 : 0)
3480 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3481
3482 case SImode:
3483 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e 3484
5a9335ef
NC
3485 case V8QImode:
3486 case V4HImode:
3487 case V2SImode:
3488 case V4QImode:
3489 case V2HImode:
3490 return 1;
3491
ad076f4e
RE
3492 default:
3493 break;
e2c671ba
RE
3494 }
3495 abort ();
3496
d5b7b3ae
RE
3497 case CONST_INT:
3498 if (const_ok_for_arm (INTVAL (x)))
3499 return outer == SET ? 2 : -1;
3500 else if (outer == AND
5895f793 3501 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
3502 return -1;
3503 else if ((outer == COMPARE
3504 || outer == PLUS || outer == MINUS)
5895f793 3505 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
3506 return -1;
3507 else
3508 return 5;
3509
3510 case CONST:
3511 case LABEL_REF:
3512 case SYMBOL_REF:
3513 return 6;
3514
3515 case CONST_DOUBLE:
3b684012 3516 if (const_double_rtx_ok_for_fpa (x))
d5b7b3ae
RE
3517 return outer == SET ? 2 : -1;
3518 else if ((outer == COMPARE || outer == PLUS)
3b684012 3519 && neg_const_double_rtx_ok_for_fpa (x))
d5b7b3ae
RE
3520 return -1;
3521 return 7;
3522
e2c671ba
RE
3523 default:
3524 return 99;
3525 }
3526}
32de079a 3527
3c50106f 3528static bool
e32bac5b 3529arm_rtx_costs (rtx x, int code, int outer_code, int *total)
3c50106f
RH
3530{
3531 *total = arm_rtx_costs_1 (x, code, outer_code);
3532 return true;
3533}
3534
dcefdf67
RH
3535/* All address computations that can be done are free, but rtx cost returns
3536 the same for practically all of them. So we weight the different types
3537 of address here in the order (most pref first):
d6b4baa4 3538 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
d2b6eb76
ZW
3539static inline int
3540arm_arm_address_cost (rtx x)
3541{
3542 enum rtx_code c = GET_CODE (x);
3543
3544 if (c == PRE_INC || c == PRE_DEC || c == POST_INC || c == POST_DEC)
3545 return 0;
3546 if (c == MEM || c == LABEL_REF || c == SYMBOL_REF)
3547 return 10;
3548
3549 if (c == PLUS || c == MINUS)
3550 {
3551 char cl0 = GET_RTX_CLASS (GET_CODE (XEXP (x, 0)));
3552 char cl1 = GET_RTX_CLASS (GET_CODE (XEXP (x, 1)));
3553
3554 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3555 return 2;
3556
3557 if (cl0 == '2' || cl0 == 'c' || cl1 == '2' || cl1 == 'c')
3558 return 3;
3559
3560 return 4;
3561 }
3562
3563 return 6;
3564}
3565
3566static inline int
3567arm_thumb_address_cost (rtx x)
3568{
3569 enum rtx_code c = GET_CODE (x);
3570
3571 if (c == REG)
3572 return 1;
3573 if (c == PLUS
3574 && GET_CODE (XEXP (x, 0)) == REG
3575 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3576 return 1;
3577
3578 return 2;
3579}
3580
dcefdf67 3581static int
e32bac5b 3582arm_address_cost (rtx x)
dcefdf67 3583{
d2b6eb76 3584 return TARGET_ARM ? arm_arm_address_cost (x) : arm_thumb_address_cost (x);
dcefdf67
RH
3585}
3586
103fc15d 3587static int
e32bac5b 3588arm_use_dfa_pipeline_interface (void)
103fc15d
BE
3589{
3590 return true;
3591}
3592
c237e94a 3593static int
e32bac5b 3594arm_adjust_cost (rtx insn, rtx link, rtx dep, int cost)
32de079a
RE
3595{
3596 rtx i_pat, d_pat;
3597
d19fb8e3
NC
3598 /* Some true dependencies can have a higher cost depending
3599 on precisely how certain input operands are used. */
4b3c2e48 3600 if (arm_tune_xscale
d19fb8e3 3601 && REG_NOTE_KIND (link) == 0
eda833e3
BE
3602 && recog_memoized (insn) >= 0
3603 && recog_memoized (dep) >= 0)
d19fb8e3
NC
3604 {
3605 int shift_opnum = get_attr_shift (insn);
3606 enum attr_type attr_type = get_attr_type (dep);
3607
3608 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3609 operand for INSN. If we have a shifted input operand and the
3610 instruction we depend on is another ALU instruction, then we may
3611 have to account for an additional stall. */
3612 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3613 {
3614 rtx shifted_operand;
3615 int opno;
3616
3617 /* Get the shifted operand. */
3618 extract_insn (insn);
3619 shifted_operand = recog_data.operand[shift_opnum];
3620
3621 /* Iterate over all the operands in DEP. If we write an operand
3622 that overlaps with SHIFTED_OPERAND, then we have increase the
3623 cost of this dependency. */
3624 extract_insn (dep);
3625 preprocess_constraints ();
3626 for (opno = 0; opno < recog_data.n_operands; opno++)
3627 {
3628 /* We can ignore strict inputs. */
3629 if (recog_data.operand_type[opno] == OP_IN)
3630 continue;
3631
3632 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3633 shifted_operand))
3634 return 2;
3635 }
3636 }
3637 }
3638
6354dc9b 3639 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
3640 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3641 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
3642 return 0;
3643
d5b7b3ae
RE
3644 /* Call insns don't incur a stall, even if they follow a load. */
3645 if (REG_NOTE_KIND (link) == 0
3646 && GET_CODE (insn) == CALL_INSN)
3647 return 1;
3648
32de079a
RE
3649 if ((i_pat = single_set (insn)) != NULL
3650 && GET_CODE (SET_SRC (i_pat)) == MEM
3651 && (d_pat = single_set (dep)) != NULL
3652 && GET_CODE (SET_DEST (d_pat)) == MEM)
3653 {
48f6efae 3654 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
32de079a
RE
3655 /* This is a load after a store, there is no conflict if the load reads
3656 from a cached area. Assume that loads from the stack, and from the
3657 constant pool are cached, and that others will miss. This is a
6354dc9b 3658 hack. */
32de079a 3659
48f6efae
NC
3660 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3661 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3662 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3663 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
949d79eb 3664 return 1;
32de079a
RE
3665 }
3666
3667 return cost;
3668}
3669
ff9940b0
RE
3670static int fpa_consts_inited = 0;
3671
1d6e90ac 3672static const char * const strings_fpa[8] =
62b10bbc 3673{
2b835d68
RE
3674 "0", "1", "2", "3",
3675 "4", "5", "0.5", "10"
3676};
ff9940b0
RE
3677
3678static REAL_VALUE_TYPE values_fpa[8];
3679
3680static void
e32bac5b 3681init_fpa_table (void)
ff9940b0
RE
3682{
3683 int i;
3684 REAL_VALUE_TYPE r;
3685
3686 for (i = 0; i < 8; i++)
3687 {
3688 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3689 values_fpa[i] = r;
3690 }
f3bb6135 3691
ff9940b0
RE
3692 fpa_consts_inited = 1;
3693}
3694
3b684012 3695/* Return TRUE if rtx X is a valid immediate FPA constant. */
cce8749e 3696int
e32bac5b 3697const_double_rtx_ok_for_fpa (rtx x)
cce8749e 3698{
ff9940b0
RE
3699 REAL_VALUE_TYPE r;
3700 int i;
3701
3702 if (!fpa_consts_inited)
3703 init_fpa_table ();
3704
3705 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3706 if (REAL_VALUE_MINUS_ZERO (r))
3707 return 0;
f3bb6135 3708
ff9940b0
RE
3709 for (i = 0; i < 8; i++)
3710 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3711 return 1;
f3bb6135 3712
ff9940b0 3713 return 0;
f3bb6135 3714}
ff9940b0 3715
3b684012 3716/* Return TRUE if rtx X is a valid immediate FPA constant. */
ff9940b0 3717int
e32bac5b 3718neg_const_double_rtx_ok_for_fpa (rtx x)
ff9940b0
RE
3719{
3720 REAL_VALUE_TYPE r;
3721 int i;
3722
3723 if (!fpa_consts_inited)
3724 init_fpa_table ();
3725
3726 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3727 r = REAL_VALUE_NEGATE (r);
3728 if (REAL_VALUE_MINUS_ZERO (r))
3729 return 0;
f3bb6135 3730
ff9940b0
RE
3731 for (i = 0; i < 8; i++)
3732 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3733 return 1;
f3bb6135 3734
ff9940b0 3735 return 0;
f3bb6135 3736}
cce8749e
CH
3737\f
3738/* Predicates for `match_operand' and `match_operator'. */
3739
ff9940b0 3740/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
3741 (SUBREG (MEM)...).
3742
3743 This function exists because at the time it was put in it led to better
3744 code. SUBREG(MEM) always needs a reload in the places where
3745 s_register_operand is used, and this seemed to lead to excessive
3746 reloading. */
ff9940b0 3747int
e32bac5b 3748s_register_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
3749{
3750 if (GET_MODE (op) != mode && mode != VOIDmode)
3751 return 0;
3752
3753 if (GET_CODE (op) == SUBREG)
f3bb6135 3754 op = SUBREG_REG (op);
ff9940b0
RE
3755
3756 /* We don't consider registers whose class is NO_REGS
3757 to be a register operand. */
d5b7b3ae 3758 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
3759 return (GET_CODE (op) == REG
3760 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3761 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3762}
3763
b0888988
RE
3764/* A hard register operand (even before reload. */
3765int
e32bac5b 3766arm_hard_register_operand (rtx op, enum machine_mode mode)
b0888988
RE
3767{
3768 if (GET_MODE (op) != mode && mode != VOIDmode)
3769 return 0;
3770
3771 return (GET_CODE (op) == REG
3772 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3773}
3774
e2c671ba 3775/* Only accept reg, subreg(reg), const_int. */
e2c671ba 3776int
e32bac5b 3777reg_or_int_operand (rtx op, enum machine_mode mode)
e2c671ba
RE
3778{
3779 if (GET_CODE (op) == CONST_INT)
3780 return 1;
3781
3782 if (GET_MODE (op) != mode && mode != VOIDmode)
3783 return 0;
3784
3785 if (GET_CODE (op) == SUBREG)
3786 op = SUBREG_REG (op);
3787
3788 /* We don't consider registers whose class is NO_REGS
3789 to be a register operand. */
3790 return (GET_CODE (op) == REG
3791 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3792 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3793}
3794
ff9940b0 3795/* Return 1 if OP is an item in memory, given that we are in reload. */
ff9940b0 3796int
e32bac5b 3797arm_reload_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0
RE
3798{
3799 int regno = true_regnum (op);
3800
5895f793 3801 return (!CONSTANT_P (op)
ff9940b0
RE
3802 && (regno == -1
3803 || (GET_CODE (op) == REG
3804 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3805}
3806
4d818c85 3807/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae 3808 memory access (architecture V4).
f710504c 3809 MODE is QImode if called when computing constraints, or VOIDmode when
d5b7b3ae 3810 emitting patterns. In this latter case we cannot use memory_operand()
6bc82793 3811 because it will fail on badly formed MEMs, which is precisely what we are
d5b7b3ae 3812 trying to catch. */
4d818c85 3813int
e32bac5b 3814bad_signed_byte_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4d818c85 3815{
d5b7b3ae 3816 if (GET_CODE (op) != MEM)
4d818c85
RE
3817 return 0;
3818
3819 op = XEXP (op, 0);
3820
6354dc9b 3821 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 3822 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
3823 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3824 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 3825 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
3826 return 1;
3827
6354dc9b 3828 /* Big constants are also bad. */
4d818c85
RE
3829 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3830 && (INTVAL (XEXP (op, 1)) > 0xff
3831 || -INTVAL (XEXP (op, 1)) > 0xff))
3832 return 1;
3833
6354dc9b 3834 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
3835 return 0;
3836}
3837
cce8749e 3838/* Return TRUE for valid operands for the rhs of an ARM instruction. */
cce8749e 3839int
e32bac5b 3840arm_rhs_operand (rtx op, enum machine_mode mode)
cce8749e 3841{
ff9940b0 3842 return (s_register_operand (op, mode)
cce8749e 3843 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 3844}
cce8749e 3845
1d6e90ac
NC
3846/* Return TRUE for valid operands for the
3847 rhs of an ARM instruction, or a load. */
ff9940b0 3848int
e32bac5b 3849arm_rhsm_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
3850{
3851 return (s_register_operand (op, mode)
3852 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3853 || memory_operand (op, mode));
f3bb6135 3854}
ff9940b0
RE
3855
3856/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3857 constant that is valid when negated. */
ff9940b0 3858int
e32bac5b 3859arm_add_operand (rtx op, enum machine_mode mode)
ff9940b0 3860{
d5b7b3ae
RE
3861 if (TARGET_THUMB)
3862 return thumb_cmp_operand (op, mode);
3863
ff9940b0
RE
3864 return (s_register_operand (op, mode)
3865 || (GET_CODE (op) == CONST_INT
3866 && (const_ok_for_arm (INTVAL (op))
3867 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 3868}
ff9940b0 3869
f9b9980e
RE
3870/* Return TRUE for valid ARM constants (or when valid if negated). */
3871int
91de08c3 3872arm_addimm_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
f9b9980e
RE
3873{
3874 return (GET_CODE (op) == CONST_INT
3875 && (const_ok_for_arm (INTVAL (op))
3876 || const_ok_for_arm (-INTVAL (op))));
3877}
3878
ff9940b0 3879int
e32bac5b 3880arm_not_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
3881{
3882 return (s_register_operand (op, mode)
3883 || (GET_CODE (op) == CONST_INT
3884 && (const_ok_for_arm (INTVAL (op))
3885 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 3886}
ff9940b0 3887
5165176d
RE
3888/* Return TRUE if the operand is a memory reference which contains an
3889 offsettable address. */
3890int
e32bac5b 3891offsettable_memory_operand (rtx op, enum machine_mode mode)
5165176d
RE
3892{
3893 if (mode == VOIDmode)
3894 mode = GET_MODE (op);
3895
3896 return (mode == GET_MODE (op)
3897 && GET_CODE (op) == MEM
3898 && offsettable_address_p (reload_completed | reload_in_progress,
3899 mode, XEXP (op, 0)));
3900}
3901
3902/* Return TRUE if the operand is a memory reference which is, or can be
3903 made word aligned by adjusting the offset. */
3904int
e32bac5b 3905alignable_memory_operand (rtx op, enum machine_mode mode)
5165176d
RE
3906{
3907 rtx reg;
3908
3909 if (mode == VOIDmode)
3910 mode = GET_MODE (op);
3911
3912 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3913 return 0;
3914
3915 op = XEXP (op, 0);
3916
3917 return ((GET_CODE (reg = op) == REG
3918 || (GET_CODE (op) == SUBREG
3919 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3920 || (GET_CODE (op) == PLUS
3921 && GET_CODE (XEXP (op, 1)) == CONST_INT
3922 && (GET_CODE (reg = XEXP (op, 0)) == REG
3923 || (GET_CODE (XEXP (op, 0)) == SUBREG
3924 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 3925 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
3926}
3927
b111229a
RE
3928/* Similar to s_register_operand, but does not allow hard integer
3929 registers. */
3930int
e32bac5b 3931f_register_operand (rtx op, enum machine_mode mode)
b111229a
RE
3932{
3933 if (GET_MODE (op) != mode && mode != VOIDmode)
3934 return 0;
3935
3936 if (GET_CODE (op) == SUBREG)
3937 op = SUBREG_REG (op);
3938
3939 /* We don't consider registers whose class is NO_REGS
3940 to be a register operand. */
3941 return (GET_CODE (op) == REG
3942 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3b684012 3943 || REGNO_REG_CLASS (REGNO (op)) == FPA_REGS));
b111229a
RE
3944}
3945
3b684012 3946/* Return TRUE for valid operands for the rhs of an FPA instruction. */
cce8749e 3947int
e32bac5b 3948fpa_rhs_operand (rtx op, enum machine_mode mode)
cce8749e 3949{
ff9940b0 3950 if (s_register_operand (op, mode))
f3bb6135 3951 return TRUE;
9ce71c6f
BS
3952
3953 if (GET_MODE (op) != mode && mode != VOIDmode)
3954 return FALSE;
3955
3956 if (GET_CODE (op) == CONST_DOUBLE)
3b684012 3957 return const_double_rtx_ok_for_fpa (op);
f3bb6135
RE
3958
3959 return FALSE;
3960}
cce8749e 3961
ff9940b0 3962int
e32bac5b 3963fpa_add_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
3964{
3965 if (s_register_operand (op, mode))
f3bb6135 3966 return TRUE;
9ce71c6f
BS
3967
3968 if (GET_MODE (op) != mode && mode != VOIDmode)
3969 return FALSE;
3970
3971 if (GET_CODE (op) == CONST_DOUBLE)
3b684012
RE
3972 return (const_double_rtx_ok_for_fpa (op)
3973 || neg_const_double_rtx_ok_for_fpa (op));
f3bb6135
RE
3974
3975 return FALSE;
ff9940b0
RE
3976}
3977
9b6b54e2 3978/* Return nonzero if OP is a valid Cirrus memory address pattern. */
9b6b54e2 3979int
e32bac5b 3980cirrus_memory_offset (rtx op)
9b6b54e2
NC
3981{
3982 /* Reject eliminable registers. */
3983 if (! (reload_in_progress || reload_completed)
3984 && ( reg_mentioned_p (frame_pointer_rtx, op)
3985 || reg_mentioned_p (arg_pointer_rtx, op)
3986 || reg_mentioned_p (virtual_incoming_args_rtx, op)
3987 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
3988 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
3989 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
3990 return 0;
3991
3992 if (GET_CODE (op) == MEM)
3993 {
3994 rtx ind;
3995
3996 ind = XEXP (op, 0);
3997
3998 /* Match: (mem (reg)). */
3999 if (GET_CODE (ind) == REG)
4000 return 1;
4001
4002 /* Match:
4003 (mem (plus (reg)
4004 (const))). */
4005 if (GET_CODE (ind) == PLUS
4006 && GET_CODE (XEXP (ind, 0)) == REG
4007 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
4008 && GET_CODE (XEXP (ind, 1)) == CONST_INT)
4009 return 1;
4010 }
4011
4012 return 0;
4013}
4014
4015/* Return nonzero if OP is a Cirrus or general register. */
9b6b54e2 4016int
e32bac5b 4017cirrus_register_operand (rtx op, enum machine_mode mode)
9b6b54e2
NC
4018{
4019 if (GET_MODE (op) != mode && mode != VOIDmode)
4020 return FALSE;
4021
4022 if (GET_CODE (op) == SUBREG)
4023 op = SUBREG_REG (op);
4024
4025 return (GET_CODE (op) == REG
4026 && (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
4027 || REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
4028}
4029
4030/* Return nonzero if OP is a cirrus FP register. */
9b6b54e2 4031int
e32bac5b 4032cirrus_fp_register (rtx op, enum machine_mode mode)
9b6b54e2
NC
4033{
4034 if (GET_MODE (op) != mode && mode != VOIDmode)
4035 return FALSE;
4036
4037 if (GET_CODE (op) == SUBREG)
4038 op = SUBREG_REG (op);
4039
4040 return (GET_CODE (op) == REG
4041 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
4042 || REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
4043}
4044
4045/* Return nonzero if OP is a 6bit constant (0..63). */
9b6b54e2 4046int
e32bac5b 4047cirrus_shift_const (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9b6b54e2
NC
4048{
4049 return (GET_CODE (op) == CONST_INT
4050 && INTVAL (op) >= 0
4051 && INTVAL (op) < 64);
4052}
4053
f0375c66
NC
4054/* Returns TRUE if INSN is an "LDR REG, ADDR" instruction.
4055 Use by the Cirrus Maverick code which has to workaround
4056 a hardware bug triggered by such instructions. */
f0375c66 4057static bool
e32bac5b 4058arm_memory_load_p (rtx insn)
9b6b54e2
NC
4059{
4060 rtx body, lhs, rhs;;
4061
f0375c66
NC
4062 if (insn == NULL_RTX || GET_CODE (insn) != INSN)
4063 return false;
9b6b54e2
NC
4064
4065 body = PATTERN (insn);
4066
4067 if (GET_CODE (body) != SET)
f0375c66 4068 return false;
9b6b54e2
NC
4069
4070 lhs = XEXP (body, 0);
4071 rhs = XEXP (body, 1);
4072
f0375c66
NC
4073 lhs = REG_OR_SUBREG_RTX (lhs);
4074
4075 /* If the destination is not a general purpose
4076 register we do not have to worry. */
4077 if (GET_CODE (lhs) != REG
4078 || REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
4079 return false;
4080
4081 /* As well as loads from memory we also have to react
4082 to loads of invalid constants which will be turned
4083 into loads from the minipool. */
4084 return (GET_CODE (rhs) == MEM
4085 || GET_CODE (rhs) == SYMBOL_REF
4086 || note_invalid_constants (insn, -1, false));
9b6b54e2
NC
4087}
4088
f0375c66 4089/* Return TRUE if INSN is a Cirrus instruction. */
f0375c66 4090static bool
e32bac5b 4091arm_cirrus_insn_p (rtx insn)
9b6b54e2
NC
4092{
4093 enum attr_cirrus attr;
4094
4095 /* get_attr aborts on USE and CLOBBER. */
4096 if (!insn
4097 || GET_CODE (insn) != INSN
4098 || GET_CODE (PATTERN (insn)) == USE
4099 || GET_CODE (PATTERN (insn)) == CLOBBER)
4100 return 0;
4101
4102 attr = get_attr_cirrus (insn);
4103
f0375c66 4104 return attr != CIRRUS_NOT;
9b6b54e2
NC
4105}
4106
4107/* Cirrus reorg for invalid instruction combinations. */
9b6b54e2 4108static void
e32bac5b 4109cirrus_reorg (rtx first)
9b6b54e2
NC
4110{
4111 enum attr_cirrus attr;
4112 rtx body = PATTERN (first);
4113 rtx t;
4114 int nops;
4115
4116 /* Any branch must be followed by 2 non Cirrus instructions. */
4117 if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
4118 {
4119 nops = 0;
4120 t = next_nonnote_insn (first);
4121
f0375c66 4122 if (arm_cirrus_insn_p (t))
9b6b54e2
NC
4123 ++ nops;
4124
f0375c66 4125 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
9b6b54e2
NC
4126 ++ nops;
4127
4128 while (nops --)
4129 emit_insn_after (gen_nop (), first);
4130
4131 return;
4132 }
4133
4134 /* (float (blah)) is in parallel with a clobber. */
4135 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4136 body = XVECEXP (body, 0, 0);
4137
4138 if (GET_CODE (body) == SET)
4139 {
4140 rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
4141
4142 /* cfldrd, cfldr64, cfstrd, cfstr64 must
4143 be followed by a non Cirrus insn. */
4144 if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
4145 {
f0375c66 4146 if (arm_cirrus_insn_p (next_nonnote_insn (first)))
9b6b54e2
NC
4147 emit_insn_after (gen_nop (), first);
4148
4149 return;
4150 }
f0375c66 4151 else if (arm_memory_load_p (first))
9b6b54e2
NC
4152 {
4153 unsigned int arm_regno;
4154
4155 /* Any ldr/cfmvdlr, ldr/cfmvdhr, ldr/cfmvsr, ldr/cfmv64lr,
4156 ldr/cfmv64hr combination where the Rd field is the same
4157 in both instructions must be split with a non Cirrus
4158 insn. Example:
4159
4160 ldr r0, blah
4161 nop
4162 cfmvsr mvf0, r0. */
4163
4164 /* Get Arm register number for ldr insn. */
4165 if (GET_CODE (lhs) == REG)
4166 arm_regno = REGNO (lhs);
4167 else if (GET_CODE (rhs) == REG)
4168 arm_regno = REGNO (rhs);
4169 else
4170 abort ();
4171
4172 /* Next insn. */
4173 first = next_nonnote_insn (first);
4174
f0375c66 4175 if (! arm_cirrus_insn_p (first))
9b6b54e2
NC
4176 return;
4177
4178 body = PATTERN (first);
4179
4180 /* (float (blah)) is in parallel with a clobber. */
4181 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
4182 body = XVECEXP (body, 0, 0);
4183
4184 if (GET_CODE (body) == FLOAT)
4185 body = XEXP (body, 0);
4186
4187 if (get_attr_cirrus (first) == CIRRUS_MOVE
4188 && GET_CODE (XEXP (body, 1)) == REG
4189 && arm_regno == REGNO (XEXP (body, 1)))
4190 emit_insn_after (gen_nop (), first);
4191
4192 return;
4193 }
4194 }
4195
4196 /* get_attr aborts on USE and CLOBBER. */
4197 if (!first
4198 || GET_CODE (first) != INSN
4199 || GET_CODE (PATTERN (first)) == USE
4200 || GET_CODE (PATTERN (first)) == CLOBBER)
4201 return;
4202
4203 attr = get_attr_cirrus (first);
4204
4205 /* Any coprocessor compare instruction (cfcmps, cfcmpd, ...)
4206 must be followed by a non-coprocessor instruction. */
4207 if (attr == CIRRUS_COMPARE)
4208 {
4209 nops = 0;
4210
4211 t = next_nonnote_insn (first);
4212
f0375c66 4213 if (arm_cirrus_insn_p (t))
9b6b54e2
NC
4214 ++ nops;
4215
f0375c66 4216 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
9b6b54e2
NC
4217 ++ nops;
4218
4219 while (nops --)
4220 emit_insn_after (gen_nop (), first);
4221
4222 return;
4223 }
4224}
4225
cce8749e 4226/* Return nonzero if OP is a constant power of two. */
cce8749e 4227int
e32bac5b 4228power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
cce8749e
CH
4229{
4230 if (GET_CODE (op) == CONST_INT)
4231 {
d5b7b3ae 4232 HOST_WIDE_INT value = INTVAL (op);
1d6e90ac 4233
f3bb6135 4234 return value != 0 && (value & (value - 1)) == 0;
cce8749e 4235 }
1d6e90ac 4236
f3bb6135
RE
4237 return FALSE;
4238}
cce8749e
CH
4239
4240/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 4241 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
4242 Note that this disallows MEM(REG+REG), but allows
4243 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e 4244int
e32bac5b 4245di_operand (rtx op, enum machine_mode mode)
cce8749e 4246{
ff9940b0 4247 if (s_register_operand (op, mode))
f3bb6135 4248 return TRUE;
cce8749e 4249
9ce71c6f
BS
4250 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4251 return FALSE;
4252
e9c6b69b
NC
4253 if (GET_CODE (op) == SUBREG)
4254 op = SUBREG_REG (op);
4255
cce8749e
CH
4256 switch (GET_CODE (op))
4257 {
4258 case CONST_DOUBLE:
4259 case CONST_INT:
f3bb6135
RE
4260 return TRUE;
4261
cce8749e 4262 case MEM:
f3bb6135
RE
4263 return memory_address_p (DImode, XEXP (op, 0));
4264
cce8749e 4265 default:
f3bb6135 4266 return FALSE;
cce8749e 4267 }
f3bb6135 4268}
cce8749e 4269
d5b7b3ae
RE
4270/* Like di_operand, but don't accept constants. */
4271int
e32bac5b 4272nonimmediate_di_operand (rtx op, enum machine_mode mode)
d5b7b3ae
RE
4273{
4274 if (s_register_operand (op, mode))
4275 return TRUE;
4276
4277 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4278 return FALSE;
4279
4280 if (GET_CODE (op) == SUBREG)
4281 op = SUBREG_REG (op);
4282
4283 if (GET_CODE (op) == MEM)
4284 return memory_address_p (DImode, XEXP (op, 0));
4285
4286 return FALSE;
4287}
4288
f3139301 4289/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 4290 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
4291 Note that this disallows MEM(REG+REG), but allows
4292 MEM(PRE/POST_INC/DEC(REG)). */
f3139301 4293int
e32bac5b 4294soft_df_operand (rtx op, enum machine_mode mode)
f3139301
DE
4295{
4296 if (s_register_operand (op, mode))
4b02997f 4297 return TRUE;
f3139301 4298
9ce71c6f
BS
4299 if (mode != VOIDmode && GET_MODE (op) != mode)
4300 return FALSE;
4301
37b80d2e
BS
4302 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
4303 return FALSE;
4304
e9c6b69b
NC
4305 if (GET_CODE (op) == SUBREG)
4306 op = SUBREG_REG (op);
9ce71c6f 4307
f3139301
DE
4308 switch (GET_CODE (op))
4309 {
4310 case CONST_DOUBLE:
4311 return TRUE;
4312
4313 case MEM:
4314 return memory_address_p (DFmode, XEXP (op, 0));
4315
4316 default:
4317 return FALSE;
4318 }
4319}
4320
d5b7b3ae
RE
4321/* Like soft_df_operand, but don't accept constants. */
4322int
e32bac5b 4323nonimmediate_soft_df_operand (rtx op, enum machine_mode mode)
d5b7b3ae
RE
4324{
4325 if (s_register_operand (op, mode))
4b02997f 4326 return TRUE;
d5b7b3ae
RE
4327
4328 if (mode != VOIDmode && GET_MODE (op) != mode)
4329 return FALSE;
4330
4331 if (GET_CODE (op) == SUBREG)
4332 op = SUBREG_REG (op);
4333
4334 if (GET_CODE (op) == MEM)
4335 return memory_address_p (DFmode, XEXP (op, 0));
4336 return FALSE;
4337}
cce8749e 4338
d5b7b3ae 4339/* Return TRUE for valid index operands. */
cce8749e 4340int
e32bac5b 4341index_operand (rtx op, enum machine_mode mode)
cce8749e 4342{
d5b7b3ae 4343 return (s_register_operand (op, mode)
ff9940b0 4344 || (immediate_operand (op, mode)
d5b7b3ae
RE
4345 && (GET_CODE (op) != CONST_INT
4346 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 4347}
cce8749e 4348
ff9940b0
RE
4349/* Return TRUE for valid shifts by a constant. This also accepts any
4350 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 4351 shift operator in this case was a mult. */
ff9940b0 4352int
e32bac5b 4353const_shift_operand (rtx op, enum machine_mode mode)
ff9940b0
RE
4354{
4355 return (power_of_two_operand (op, mode)
4356 || (immediate_operand (op, mode)
d5b7b3ae
RE
4357 && (GET_CODE (op) != CONST_INT
4358 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 4359}
ff9940b0 4360
cce8749e
CH
4361/* Return TRUE for arithmetic operators which can be combined with a multiply
4362 (shift). */
cce8749e 4363int
e32bac5b 4364shiftable_operator (rtx x, enum machine_mode mode)
cce8749e 4365{
1d6e90ac
NC
4366 enum rtx_code code;
4367
cce8749e
CH
4368 if (GET_MODE (x) != mode)
4369 return FALSE;
cce8749e 4370
1d6e90ac
NC
4371 code = GET_CODE (x);
4372
4373 return (code == PLUS || code == MINUS
4374 || code == IOR || code == XOR || code == AND);
f3bb6135 4375}
cce8749e 4376
6ab589e0 4377/* Return TRUE for binary logical operators. */
6ab589e0 4378int
e32bac5b 4379logical_binary_operator (rtx x, enum machine_mode mode)
6ab589e0 4380{
1d6e90ac
NC
4381 enum rtx_code code;
4382
6ab589e0
JL
4383 if (GET_MODE (x) != mode)
4384 return FALSE;
6ab589e0 4385
1d6e90ac
NC
4386 code = GET_CODE (x);
4387
4388 return (code == IOR || code == XOR || code == AND);
6ab589e0
JL
4389}
4390
6354dc9b 4391/* Return TRUE for shift operators. */
cce8749e 4392int
e32bac5b 4393shift_operator (rtx x,enum machine_mode mode)
cce8749e 4394{
1d6e90ac
NC
4395 enum rtx_code code;
4396
cce8749e
CH
4397 if (GET_MODE (x) != mode)
4398 return FALSE;
cce8749e 4399
1d6e90ac 4400 code = GET_CODE (x);
f3bb6135 4401
1d6e90ac
NC
4402 if (code == MULT)
4403 return power_of_two_operand (XEXP (x, 1), mode);
4404
4405 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4406 || code == ROTATERT);
f3bb6135 4407}
ff9940b0 4408
6354dc9b
NC
4409/* Return TRUE if x is EQ or NE. */
4410int
e32bac5b 4411equality_operator (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0 4412{
f3bb6135 4413 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
4414}
4415
e45b72c4
RE
4416/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4417int
e32bac5b 4418arm_comparison_operator (rtx x, enum machine_mode mode)
e45b72c4
RE
4419{
4420 return (comparison_operator (x, mode)
4421 && GET_CODE (x) != LTGT
4422 && GET_CODE (x) != UNEQ);
4423}
4424
6354dc9b 4425/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0 4426int
e32bac5b 4427minmax_operator (rtx x, enum machine_mode mode)
ff9940b0
RE
4428{
4429 enum rtx_code code = GET_CODE (x);
4430
4431 if (GET_MODE (x) != mode)
4432 return FALSE;
f3bb6135 4433
ff9940b0 4434 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 4435}
ff9940b0 4436
ff9940b0 4437/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 4438 a mode, accept any class CCmode register. */
ff9940b0 4439int
e32bac5b 4440cc_register (rtx x, enum machine_mode mode)
ff9940b0
RE
4441{
4442 if (mode == VOIDmode)
4443 {
4444 mode = GET_MODE (x);
d5b7b3ae 4445
ff9940b0
RE
4446 if (GET_MODE_CLASS (mode) != MODE_CC)
4447 return FALSE;
4448 }
f3bb6135 4449
d5b7b3ae
RE
4450 if ( GET_MODE (x) == mode
4451 && GET_CODE (x) == REG
4452 && REGNO (x) == CC_REGNUM)
ff9940b0 4453 return TRUE;
f3bb6135 4454
ff9940b0
RE
4455 return FALSE;
4456}
5bbe2d40
RE
4457
4458/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
4459 a mode, accept any class CCmode register which indicates a dominance
4460 expression. */
5bbe2d40 4461int
e32bac5b 4462dominant_cc_register (rtx x, enum machine_mode mode)
5bbe2d40
RE
4463{
4464 if (mode == VOIDmode)
4465 {
4466 mode = GET_MODE (x);
d5b7b3ae 4467
84ed5e79 4468 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
4469 return FALSE;
4470 }
4471
e32bac5b 4472 if (mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
4473 && mode != CC_DLEmode && mode != CC_DLTmode
4474 && mode != CC_DGEmode && mode != CC_DGTmode
4475 && mode != CC_DLEUmode && mode != CC_DLTUmode
4476 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4477 return FALSE;
4478
d5b7b3ae 4479 return cc_register (x, mode);
5bbe2d40
RE
4480}
4481
2b835d68
RE
4482/* Return TRUE if X references a SYMBOL_REF. */
4483int
e32bac5b 4484symbol_mentioned_p (rtx x)
2b835d68 4485{
1d6e90ac
NC
4486 const char * fmt;
4487 int i;
2b835d68
RE
4488
4489 if (GET_CODE (x) == SYMBOL_REF)
4490 return 1;
4491
4492 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 4493
2b835d68
RE
4494 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4495 {
4496 if (fmt[i] == 'E')
4497 {
1d6e90ac 4498 int j;
2b835d68
RE
4499
4500 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4501 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4502 return 1;
4503 }
4504 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4505 return 1;
4506 }
4507
4508 return 0;
4509}
4510
4511/* Return TRUE if X references a LABEL_REF. */
4512int
e32bac5b 4513label_mentioned_p (rtx x)
2b835d68 4514{
1d6e90ac
NC
4515 const char * fmt;
4516 int i;
2b835d68
RE
4517
4518 if (GET_CODE (x) == LABEL_REF)
4519 return 1;
4520
4521 fmt = GET_RTX_FORMAT (GET_CODE (x));
4522 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4523 {
4524 if (fmt[i] == 'E')
4525 {
1d6e90ac 4526 int j;
2b835d68
RE
4527
4528 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4529 if (label_mentioned_p (XVECEXP (x, i, j)))
4530 return 1;
4531 }
4532 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4533 return 1;
4534 }
4535
4536 return 0;
4537}
4538
ff9940b0 4539enum rtx_code
e32bac5b 4540minmax_code (rtx x)
ff9940b0
RE
4541{
4542 enum rtx_code code = GET_CODE (x);
4543
4544 if (code == SMAX)
4545 return GE;
f3bb6135 4546 else if (code == SMIN)
ff9940b0 4547 return LE;
f3bb6135 4548 else if (code == UMIN)
ff9940b0 4549 return LEU;
f3bb6135 4550 else if (code == UMAX)
ff9940b0 4551 return GEU;
f3bb6135 4552
ff9940b0
RE
4553 abort ();
4554}
4555
6354dc9b 4556/* Return 1 if memory locations are adjacent. */
f3bb6135 4557int
e32bac5b 4558adjacent_mem_locations (rtx a, rtx b)
ff9940b0 4559{
ff9940b0
RE
4560 if ((GET_CODE (XEXP (a, 0)) == REG
4561 || (GET_CODE (XEXP (a, 0)) == PLUS
4562 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4563 && (GET_CODE (XEXP (b, 0)) == REG
4564 || (GET_CODE (XEXP (b, 0)) == PLUS
4565 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4566 {
1d6e90ac
NC
4567 int val0 = 0, val1 = 0;
4568 int reg0, reg1;
4569
ff9940b0
RE
4570 if (GET_CODE (XEXP (a, 0)) == PLUS)
4571 {
1d6e90ac 4572 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
ff9940b0
RE
4573 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4574 }
4575 else
4576 reg0 = REGNO (XEXP (a, 0));
1d6e90ac 4577
ff9940b0
RE
4578 if (GET_CODE (XEXP (b, 0)) == PLUS)
4579 {
1d6e90ac 4580 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
ff9940b0
RE
4581 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4582 }
4583 else
4584 reg1 = REGNO (XEXP (b, 0));
1d6e90ac 4585
e32bac5b
RE
4586 /* Don't accept any offset that will require multiple
4587 instructions to handle, since this would cause the
4588 arith_adjacentmem pattern to output an overlong sequence. */
c75a3ddc
PB
4589 if (!const_ok_for_op (PLUS, val0) || !const_ok_for_op (PLUS, val1))
4590 return 0;
4591
ff9940b0
RE
4592 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4593 }
4594 return 0;
4595}
4596
4597/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 4598 parallel and the first section will be tested. */
f3bb6135 4599int
e32bac5b 4600load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0 4601{
f3bb6135 4602 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
4603 int dest_regno;
4604 rtx src_addr;
f3bb6135 4605 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
4606 rtx elt;
4607
4608 if (count <= 1
4609 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4610 return 0;
4611
6354dc9b 4612 /* Check to see if this might be a write-back. */
ff9940b0
RE
4613 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4614 {
4615 i++;
4616 base = 1;
4617
6354dc9b 4618 /* Now check it more carefully. */
ff9940b0
RE
4619 if (GET_CODE (SET_DEST (elt)) != REG
4620 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4621 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4622 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 4623 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 4624 return 0;
ff9940b0
RE
4625 }
4626
4627 /* Perform a quick check so we don't blow up below. */
4628 if (count <= i
4629 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4630 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4631 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4632 return 0;
4633
4634 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4635 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4636
4637 for (; i < count; i++)
4638 {
ed4c4348 4639 elt = XVECEXP (op, 0, i);
ff9940b0
RE
4640
4641 if (GET_CODE (elt) != SET
4642 || GET_CODE (SET_DEST (elt)) != REG
4643 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 4644 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
4645 || GET_CODE (SET_SRC (elt)) != MEM
4646 || GET_MODE (SET_SRC (elt)) != SImode
4647 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 4648 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
4649 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4650 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4651 return 0;
4652 }
4653
4654 return 1;
4655}
4656
4657/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 4658 parallel and the first section will be tested. */
f3bb6135 4659int
e32bac5b 4660store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
ff9940b0 4661{
f3bb6135 4662 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
4663 int src_regno;
4664 rtx dest_addr;
f3bb6135 4665 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
4666 rtx elt;
4667
4668 if (count <= 1
4669 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4670 return 0;
4671
6354dc9b 4672 /* Check to see if this might be a write-back. */
ff9940b0
RE
4673 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4674 {
4675 i++;
4676 base = 1;
4677
6354dc9b 4678 /* Now check it more carefully. */
ff9940b0
RE
4679 if (GET_CODE (SET_DEST (elt)) != REG
4680 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4681 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4682 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 4683 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 4684 return 0;
ff9940b0
RE
4685 }
4686
4687 /* Perform a quick check so we don't blow up below. */
4688 if (count <= i
4689 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4690 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4691 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4692 return 0;
4693
4694 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4695 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4696
4697 for (; i < count; i++)
4698 {
4699 elt = XVECEXP (op, 0, i);
4700
4701 if (GET_CODE (elt) != SET
4702 || GET_CODE (SET_SRC (elt)) != REG
4703 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 4704 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
4705 || GET_CODE (SET_DEST (elt)) != MEM
4706 || GET_MODE (SET_DEST (elt)) != SImode
4707 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 4708 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
4709 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4710 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4711 return 0;
4712 }
4713
4714 return 1;
4715}
e2c671ba 4716
84ed5e79 4717int
e32bac5b
RE
4718load_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4719 HOST_WIDE_INT *load_offset)
84ed5e79
RE
4720{
4721 int unsorted_regs[4];
4722 HOST_WIDE_INT unsorted_offsets[4];
4723 int order[4];
ad076f4e 4724 int base_reg = -1;
84ed5e79
RE
4725 int i;
4726
1d6e90ac
NC
4727 /* Can only handle 2, 3, or 4 insns at present,
4728 though could be easily extended if required. */
84ed5e79
RE
4729 if (nops < 2 || nops > 4)
4730 abort ();
4731
4732 /* Loop over the operands and check that the memory references are
4733 suitable (ie immediate offsets from the same base register). At
4734 the same time, extract the target register, and the memory
4735 offsets. */
4736 for (i = 0; i < nops; i++)
4737 {
4738 rtx reg;
4739 rtx offset;
4740
56636818
JL
4741 /* Convert a subreg of a mem into the mem itself. */
4742 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 4743 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 4744
84ed5e79
RE
4745 if (GET_CODE (operands[nops + i]) != MEM)
4746 abort ();
4747
4748 /* Don't reorder volatile memory references; it doesn't seem worth
4749 looking for the case where the order is ok anyway. */
4750 if (MEM_VOLATILE_P (operands[nops + i]))
4751 return 0;
4752
4753 offset = const0_rtx;
4754
4755 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4756 || (GET_CODE (reg) == SUBREG
4757 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4758 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4759 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4760 == REG)
4761 || (GET_CODE (reg) == SUBREG
4762 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4763 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4764 == CONST_INT)))
4765 {
4766 if (i == 0)
4767 {
d5b7b3ae 4768 base_reg = REGNO (reg);
84ed5e79
RE
4769 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4770 ? REGNO (operands[i])
4771 : REGNO (SUBREG_REG (operands[i])));
4772 order[0] = 0;
4773 }
4774 else
4775 {
6354dc9b 4776 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
4777 /* Not addressed from the same base register. */
4778 return 0;
4779
4780 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4781 ? REGNO (operands[i])
4782 : REGNO (SUBREG_REG (operands[i])));
4783 if (unsorted_regs[i] < unsorted_regs[order[0]])
4784 order[0] = i;
4785 }
4786
4787 /* If it isn't an integer register, or if it overwrites the
4788 base register but isn't the last insn in the list, then
4789 we can't do this. */
4790 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4791 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4792 return 0;
4793
4794 unsorted_offsets[i] = INTVAL (offset);
4795 }
4796 else
4797 /* Not a suitable memory address. */
4798 return 0;
4799 }
4800
4801 /* All the useful information has now been extracted from the
4802 operands into unsorted_regs and unsorted_offsets; additionally,
4803 order[0] has been set to the lowest numbered register in the
4804 list. Sort the registers into order, and check that the memory
4805 offsets are ascending and adjacent. */
4806
4807 for (i = 1; i < nops; i++)
4808 {
4809 int j;
4810
4811 order[i] = order[i - 1];
4812 for (j = 0; j < nops; j++)
4813 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4814 && (order[i] == order[i - 1]
4815 || unsorted_regs[j] < unsorted_regs[order[i]]))
4816 order[i] = j;
4817
4818 /* Have we found a suitable register? if not, one must be used more
4819 than once. */
4820 if (order[i] == order[i - 1])
4821 return 0;
4822
4823 /* Is the memory address adjacent and ascending? */
4824 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4825 return 0;
4826 }
4827
4828 if (base)
4829 {
4830 *base = base_reg;
4831
4832 for (i = 0; i < nops; i++)
4833 regs[i] = unsorted_regs[order[i]];
4834
4835 *load_offset = unsorted_offsets[order[0]];
4836 }
4837
4838 if (unsorted_offsets[order[0]] == 0)
4839 return 1; /* ldmia */
4840
4841 if (unsorted_offsets[order[0]] == 4)
4842 return 2; /* ldmib */
4843
4844 if (unsorted_offsets[order[nops - 1]] == 0)
4845 return 3; /* ldmda */
4846
4847 if (unsorted_offsets[order[nops - 1]] == -4)
4848 return 4; /* ldmdb */
4849
949d79eb
RE
4850 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4851 if the offset isn't small enough. The reason 2 ldrs are faster
4852 is because these ARMs are able to do more than one cache access
4853 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4854 whilst the ARM8 has a double bandwidth cache. This means that
4855 these cores can do both an instruction fetch and a data fetch in
4856 a single cycle, so the trick of calculating the address into a
4857 scratch register (one of the result regs) and then doing a load
4858 multiple actually becomes slower (and no smaller in code size).
4859 That is the transformation
6cc8c0b3
NC
4860
4861 ldr rd1, [rbase + offset]
4862 ldr rd2, [rbase + offset + 4]
4863
4864 to
4865
4866 add rd1, rbase, offset
4867 ldmia rd1, {rd1, rd2}
4868
949d79eb
RE
4869 produces worse code -- '3 cycles + any stalls on rd2' instead of
4870 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4871 access per cycle, the first sequence could never complete in less
4872 than 6 cycles, whereas the ldm sequence would only take 5 and
4873 would make better use of sequential accesses if not hitting the
4874 cache.
4875
4876 We cheat here and test 'arm_ld_sched' which we currently know to
4877 only be true for the ARM8, ARM9 and StrongARM. If this ever
4878 changes, then the test below needs to be reworked. */
f5a1b0d2 4879 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
4880 return 0;
4881
84ed5e79
RE
4882 /* Can't do it without setting up the offset, only do this if it takes
4883 no more than one insn. */
4884 return (const_ok_for_arm (unsorted_offsets[order[0]])
4885 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4886}
4887
cd2b33d0 4888const char *
e32bac5b 4889emit_ldm_seq (rtx *operands, int nops)
84ed5e79
RE
4890{
4891 int regs[4];
4892 int base_reg;
4893 HOST_WIDE_INT offset;
4894 char buf[100];
4895 int i;
4896
4897 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4898 {
4899 case 1:
4900 strcpy (buf, "ldm%?ia\t");
4901 break;
4902
4903 case 2:
4904 strcpy (buf, "ldm%?ib\t");
4905 break;
4906
4907 case 3:
4908 strcpy (buf, "ldm%?da\t");
4909 break;
4910
4911 case 4:
4912 strcpy (buf, "ldm%?db\t");
4913 break;
4914
4915 case 5:
4916 if (offset >= 0)
4917 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4918 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4919 (long) offset);
4920 else
4921 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4922 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4923 (long) -offset);
4924 output_asm_insn (buf, operands);
4925 base_reg = regs[0];
4926 strcpy (buf, "ldm%?ia\t");
4927 break;
4928
4929 default:
4930 abort ();
4931 }
4932
4933 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4934 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4935
4936 for (i = 1; i < nops; i++)
4937 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4938 reg_names[regs[i]]);
4939
4940 strcat (buf, "}\t%@ phole ldm");
4941
4942 output_asm_insn (buf, operands);
4943 return "";
4944}
4945
4946int
e32bac5b
RE
4947store_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4948 HOST_WIDE_INT * load_offset)
84ed5e79
RE
4949{
4950 int unsorted_regs[4];
4951 HOST_WIDE_INT unsorted_offsets[4];
4952 int order[4];
ad076f4e 4953 int base_reg = -1;
84ed5e79
RE
4954 int i;
4955
4956 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4957 extended if required. */
4958 if (nops < 2 || nops > 4)
4959 abort ();
4960
4961 /* Loop over the operands and check that the memory references are
4962 suitable (ie immediate offsets from the same base register). At
4963 the same time, extract the target register, and the memory
4964 offsets. */
4965 for (i = 0; i < nops; i++)
4966 {
4967 rtx reg;
4968 rtx offset;
4969
56636818
JL
4970 /* Convert a subreg of a mem into the mem itself. */
4971 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 4972 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 4973
84ed5e79
RE
4974 if (GET_CODE (operands[nops + i]) != MEM)
4975 abort ();
4976
4977 /* Don't reorder volatile memory references; it doesn't seem worth
4978 looking for the case where the order is ok anyway. */
4979 if (MEM_VOLATILE_P (operands[nops + i]))
4980 return 0;
4981
4982 offset = const0_rtx;
4983
4984 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4985 || (GET_CODE (reg) == SUBREG
4986 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4987 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4988 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4989 == REG)
4990 || (GET_CODE (reg) == SUBREG
4991 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4992 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4993 == CONST_INT)))
4994 {
4995 if (i == 0)
4996 {
62b10bbc 4997 base_reg = REGNO (reg);
84ed5e79
RE
4998 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4999 ? REGNO (operands[i])
5000 : REGNO (SUBREG_REG (operands[i])));
5001 order[0] = 0;
5002 }
5003 else
5004 {
6354dc9b 5005 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
5006 /* Not addressed from the same base register. */
5007 return 0;
5008
5009 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
5010 ? REGNO (operands[i])
5011 : REGNO (SUBREG_REG (operands[i])));
5012 if (unsorted_regs[i] < unsorted_regs[order[0]])
5013 order[0] = i;
5014 }
5015
5016 /* If it isn't an integer register, then we can't do this. */
5017 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
5018 return 0;
5019
5020 unsorted_offsets[i] = INTVAL (offset);
5021 }
5022 else
5023 /* Not a suitable memory address. */
5024 return 0;
5025 }
5026
5027 /* All the useful information has now been extracted from the
5028 operands into unsorted_regs and unsorted_offsets; additionally,
5029 order[0] has been set to the lowest numbered register in the
5030 list. Sort the registers into order, and check that the memory
5031 offsets are ascending and adjacent. */
5032
5033 for (i = 1; i < nops; i++)
5034 {
5035 int j;
5036
5037 order[i] = order[i - 1];
5038 for (j = 0; j < nops; j++)
5039 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
5040 && (order[i] == order[i - 1]
5041 || unsorted_regs[j] < unsorted_regs[order[i]]))
5042 order[i] = j;
5043
5044 /* Have we found a suitable register? if not, one must be used more
5045 than once. */
5046 if (order[i] == order[i - 1])
5047 return 0;
5048
5049 /* Is the memory address adjacent and ascending? */
5050 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
5051 return 0;
5052 }
5053
5054 if (base)
5055 {
5056 *base = base_reg;
5057
5058 for (i = 0; i < nops; i++)
5059 regs[i] = unsorted_regs[order[i]];
5060
5061 *load_offset = unsorted_offsets[order[0]];
5062 }
5063
5064 if (unsorted_offsets[order[0]] == 0)
5065 return 1; /* stmia */
5066
5067 if (unsorted_offsets[order[0]] == 4)
5068 return 2; /* stmib */
5069
5070 if (unsorted_offsets[order[nops - 1]] == 0)
5071 return 3; /* stmda */
5072
5073 if (unsorted_offsets[order[nops - 1]] == -4)
5074 return 4; /* stmdb */
5075
5076 return 0;
5077}
5078
cd2b33d0 5079const char *
e32bac5b 5080emit_stm_seq (rtx *operands, int nops)
84ed5e79
RE
5081{
5082 int regs[4];
5083 int base_reg;
5084 HOST_WIDE_INT offset;
5085 char buf[100];
5086 int i;
5087
5088 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
5089 {
5090 case 1:
5091 strcpy (buf, "stm%?ia\t");
5092 break;
5093
5094 case 2:
5095 strcpy (buf, "stm%?ib\t");
5096 break;
5097
5098 case 3:
5099 strcpy (buf, "stm%?da\t");
5100 break;
5101
5102 case 4:
5103 strcpy (buf, "stm%?db\t");
5104 break;
5105
5106 default:
5107 abort ();
5108 }
5109
5110 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
5111 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
5112
5113 for (i = 1; i < nops; i++)
5114 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
5115 reg_names[regs[i]]);
5116
5117 strcat (buf, "}\t%@ phole stm");
5118
5119 output_asm_insn (buf, operands);
5120 return "";
5121}
5122
e2c671ba 5123int
e32bac5b 5124multi_register_push (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
e2c671ba
RE
5125{
5126 if (GET_CODE (op) != PARALLEL
5127 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
5128 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
b15bca31 5129 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
e2c671ba
RE
5130 return 0;
5131
5132 return 1;
5133}
ff9940b0 5134\f
6354dc9b 5135/* Routines for use in generating RTL. */
1d6e90ac 5136
f3bb6135 5137rtx
e32bac5b
RE
5138arm_gen_load_multiple (int base_regno, int count, rtx from, int up,
5139 int write_back, int unchanging_p, int in_struct_p,
5140 int scalar_p)
ff9940b0
RE
5141{
5142 int i = 0, j;
5143 rtx result;
5144 int sign = up ? 1 : -1;
56636818 5145 rtx mem;
ff9940b0 5146
d19fb8e3
NC
5147 /* XScale has load-store double instructions, but they have stricter
5148 alignment requirements than load-store multiple, so we can not
5149 use them.
5150
5151 For XScale ldm requires 2 + NREGS cycles to complete and blocks
5152 the pipeline until completion.
5153
5154 NREGS CYCLES
5155 1 3
5156 2 4
5157 3 5
5158 4 6
5159
5160 An ldr instruction takes 1-3 cycles, but does not block the
5161 pipeline.
5162
5163 NREGS CYCLES
5164 1 1-3
5165 2 2-6
5166 3 3-9
5167 4 4-12
5168
5169 Best case ldr will always win. However, the more ldr instructions
5170 we issue, the less likely we are to be able to schedule them well.
5171 Using ldr instructions also increases code size.
5172
5173 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
5174 for counts of 3 or 4 regs. */
4b3c2e48 5175 if (arm_tune_xscale && count <= 2 && ! optimize_size)
d19fb8e3
NC
5176 {
5177 rtx seq;
5178
5179 start_sequence ();
5180
5181 for (i = 0; i < count; i++)
5182 {
5183 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
5184 RTX_UNCHANGING_P (mem) = unchanging_p;
5185 MEM_IN_STRUCT_P (mem) = in_struct_p;
5186 MEM_SCALAR_P (mem) = scalar_p;
5187 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
5188 }
5189
5190 if (write_back)
5191 emit_move_insn (from, plus_constant (from, count * 4 * sign));
5192
2f937369 5193 seq = get_insns ();
d19fb8e3
NC
5194 end_sequence ();
5195
5196 return seq;
5197 }
5198
43cffd11 5199 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 5200 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 5201 if (write_back)
f3bb6135 5202 {
ff9940b0 5203 XVECEXP (result, 0, 0)
43cffd11
RE
5204 = gen_rtx_SET (GET_MODE (from), from,
5205 plus_constant (from, count * 4 * sign));
ff9940b0
RE
5206 i = 1;
5207 count++;
f3bb6135
RE
5208 }
5209
ff9940b0 5210 for (j = 0; i < count; i++, j++)
f3bb6135 5211 {
43cffd11 5212 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
5213 RTX_UNCHANGING_P (mem) = unchanging_p;
5214 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 5215 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
5216 XVECEXP (result, 0, i)
5217 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
5218 }
5219
ff9940b0
RE
5220 return result;
5221}
5222
f3bb6135 5223rtx
e32bac5b
RE
5224arm_gen_store_multiple (int base_regno, int count, rtx to, int up,
5225 int write_back, int unchanging_p, int in_struct_p,
5226 int scalar_p)
ff9940b0
RE
5227{
5228 int i = 0, j;
5229 rtx result;
5230 int sign = up ? 1 : -1;
56636818 5231 rtx mem;
ff9940b0 5232
d19fb8e3
NC
5233 /* See arm_gen_load_multiple for discussion of
5234 the pros/cons of ldm/stm usage for XScale. */
4b3c2e48 5235 if (arm_tune_xscale && count <= 2 && ! optimize_size)
d19fb8e3
NC
5236 {
5237 rtx seq;
5238
5239 start_sequence ();
5240
5241 for (i = 0; i < count; i++)
5242 {
5243 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
5244 RTX_UNCHANGING_P (mem) = unchanging_p;
5245 MEM_IN_STRUCT_P (mem) = in_struct_p;
5246 MEM_SCALAR_P (mem) = scalar_p;
5247 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
5248 }
5249
5250 if (write_back)
5251 emit_move_insn (to, plus_constant (to, count * 4 * sign));
5252
2f937369 5253 seq = get_insns ();
d19fb8e3
NC
5254 end_sequence ();
5255
5256 return seq;
5257 }
5258
43cffd11 5259 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 5260 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 5261 if (write_back)
f3bb6135 5262 {
ff9940b0 5263 XVECEXP (result, 0, 0)
43cffd11
RE
5264 = gen_rtx_SET (GET_MODE (to), to,
5265 plus_constant (to, count * 4 * sign));
ff9940b0
RE
5266 i = 1;
5267 count++;
f3bb6135
RE
5268 }
5269
ff9940b0 5270 for (j = 0; i < count; i++, j++)
f3bb6135 5271 {
43cffd11 5272 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
5273 RTX_UNCHANGING_P (mem) = unchanging_p;
5274 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 5275 MEM_SCALAR_P (mem) = scalar_p;
56636818 5276
43cffd11
RE
5277 XVECEXP (result, 0, i)
5278 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
5279 }
5280
ff9940b0
RE
5281 return result;
5282}
5283
880e2516 5284int
e32bac5b 5285arm_gen_movstrqi (rtx *operands)
880e2516
RE
5286{
5287 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 5288 int i;
880e2516 5289 rtx src, dst;
ad076f4e 5290 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 5291 rtx part_bytes_reg = NULL;
56636818
JL
5292 rtx mem;
5293 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 5294 int dst_scalar_p, src_scalar_p;
880e2516
RE
5295
5296 if (GET_CODE (operands[2]) != CONST_INT
5297 || GET_CODE (operands[3]) != CONST_INT
5298 || INTVAL (operands[2]) > 64
5299 || INTVAL (operands[3]) & 3)
5300 return 0;
5301
5302 st_dst = XEXP (operands[0], 0);
5303 st_src = XEXP (operands[1], 0);
56636818
JL
5304
5305 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5306 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 5307 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
5308 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5309 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 5310 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 5311
880e2516
RE
5312 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5313 fin_src = src = copy_to_mode_reg (SImode, st_src);
5314
e9d7b180 5315 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
880e2516
RE
5316 out_words_to_go = INTVAL (operands[2]) / 4;
5317 last_bytes = INTVAL (operands[2]) & 3;
5318
5319 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 5320 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
5321
5322 for (i = 0; in_words_to_go >= 2; i+=4)
5323 {
bd9c7e23 5324 if (in_words_to_go > 4)
56636818 5325 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
5326 src_unchanging_p,
5327 src_in_struct_p,
5328 src_scalar_p));
bd9c7e23
RE
5329 else
5330 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 5331 FALSE, src_unchanging_p,
c6df88cb 5332 src_in_struct_p, src_scalar_p));
bd9c7e23 5333
880e2516
RE
5334 if (out_words_to_go)
5335 {
bd9c7e23 5336 if (out_words_to_go > 4)
56636818
JL
5337 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5338 dst_unchanging_p,
c6df88cb
MM
5339 dst_in_struct_p,
5340 dst_scalar_p));
bd9c7e23
RE
5341 else if (out_words_to_go != 1)
5342 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5343 dst, TRUE,
5344 (last_bytes == 0
56636818
JL
5345 ? FALSE : TRUE),
5346 dst_unchanging_p,
c6df88cb
MM
5347 dst_in_struct_p,
5348 dst_scalar_p));
880e2516
RE
5349 else
5350 {
43cffd11 5351 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
5352 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5353 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 5354 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 5355 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
5356 if (last_bytes != 0)
5357 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
5358 }
5359 }
5360
5361 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5362 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5363 }
5364
5365 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5366 if (out_words_to_go)
62b10bbc
NC
5367 {
5368 rtx sreg;
5369
5370 mem = gen_rtx_MEM (SImode, src);
5371 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5372 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5373 MEM_SCALAR_P (mem) = src_scalar_p;
5374 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5375 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5376
5377 mem = gen_rtx_MEM (SImode, dst);
5378 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5379 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5380 MEM_SCALAR_P (mem) = dst_scalar_p;
5381 emit_move_insn (mem, sreg);
5382 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5383 in_words_to_go--;
5384
5385 if (in_words_to_go) /* Sanity check */
5386 abort ();
5387 }
880e2516
RE
5388
5389 if (in_words_to_go)
5390 {
5391 if (in_words_to_go < 0)
5392 abort ();
5393
43cffd11 5394 mem = gen_rtx_MEM (SImode, src);
56636818
JL
5395 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5396 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 5397 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 5398 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
5399 }
5400
d5b7b3ae
RE
5401 if (last_bytes && part_bytes_reg == NULL)
5402 abort ();
5403
880e2516
RE
5404 if (BYTES_BIG_ENDIAN && last_bytes)
5405 {
5406 rtx tmp = gen_reg_rtx (SImode);
5407
6354dc9b 5408 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
5409 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5410 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
5411 part_bytes_reg = tmp;
5412
5413 while (last_bytes)
5414 {
43cffd11 5415 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
5416 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5417 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 5418 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2
BS
5419 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5420
880e2516
RE
5421 if (--last_bytes)
5422 {
5423 tmp = gen_reg_rtx (SImode);
5424 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5425 part_bytes_reg = tmp;
5426 }
5427 }
5428
5429 }
5430 else
5431 {
d5b7b3ae 5432 if (last_bytes > 1)
880e2516 5433 {
d5b7b3ae 5434 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
5435 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5436 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 5437 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 5438 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
d5b7b3ae
RE
5439 last_bytes -= 2;
5440 if (last_bytes)
880e2516
RE
5441 {
5442 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 5443
d5b7b3ae
RE
5444 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5445 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
5446 part_bytes_reg = tmp;
5447 }
5448 }
d5b7b3ae
RE
5449
5450 if (last_bytes)
5451 {
5452 mem = gen_rtx_MEM (QImode, dst);
5453 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5454 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5455 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 5456 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
d5b7b3ae 5457 }
880e2516
RE
5458 }
5459
5460 return 1;
5461}
5462
5165176d
RE
5463/* Generate a memory reference for a half word, such that it will be loaded
5464 into the top 16 bits of the word. We can assume that the address is
5465 known to be alignable and of the form reg, or plus (reg, const). */
1d6e90ac 5466
5165176d 5467rtx
e32bac5b 5468arm_gen_rotated_half_load (rtx memref)
5165176d
RE
5469{
5470 HOST_WIDE_INT offset = 0;
5471 rtx base = XEXP (memref, 0);
5472
5473 if (GET_CODE (base) == PLUS)
5474 {
5475 offset = INTVAL (XEXP (base, 1));
5476 base = XEXP (base, 0);
5477 }
5478
956d6950 5479 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 5480 if (TARGET_MMU_TRAPS
5165176d
RE
5481 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5482 return NULL;
5483
43cffd11 5484 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
5485
5486 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5487 return base;
5488
43cffd11 5489 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
5490}
5491
03f1640c
RE
5492/* Select a dominance comparison mode if possible for a test of the general
5493 form (OP (COND_OR (X) (Y)) (const_int 0)). We support three forms.
5494 COND_OR == DOM_CC_X_AND_Y => (X && Y)
5495 COND_OR == DOM_CC_NX_OR_Y => ((! X) || Y)
5496 COND_OR == DOM_CC_X_OR_Y => (X || Y)
5497 In all cases OP will be either EQ or NE, but we don't need to know which
5498 here. If we are unable to support a dominance comparison we return
5499 CC mode. This will then fail to match for the RTL expressions that
5500 generate this call. */
03f1640c 5501enum machine_mode
e32bac5b 5502arm_select_dominance_cc_mode (rtx x, rtx y, HOST_WIDE_INT cond_or)
84ed5e79
RE
5503{
5504 enum rtx_code cond1, cond2;
5505 int swapped = 0;
5506
5507 /* Currently we will probably get the wrong result if the individual
5508 comparisons are not simple. This also ensures that it is safe to
956d6950 5509 reverse a comparison if necessary. */
84ed5e79
RE
5510 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5511 != CCmode)
5512 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5513 != CCmode))
5514 return CCmode;
5515
1646cf41
RE
5516 /* The if_then_else variant of this tests the second condition if the
5517 first passes, but is true if the first fails. Reverse the first
5518 condition to get a true "inclusive-or" expression. */
03f1640c 5519 if (cond_or == DOM_CC_NX_OR_Y)
84ed5e79
RE
5520 cond1 = reverse_condition (cond1);
5521
5522 /* If the comparisons are not equal, and one doesn't dominate the other,
5523 then we can't do this. */
5524 if (cond1 != cond2
5895f793
RE
5525 && !comparison_dominates_p (cond1, cond2)
5526 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
5527 return CCmode;
5528
5529 if (swapped)
5530 {
5531 enum rtx_code temp = cond1;
5532 cond1 = cond2;
5533 cond2 = temp;
5534 }
5535
5536 switch (cond1)
5537 {
5538 case EQ:
03f1640c 5539 if (cond2 == EQ || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
5540 return CC_DEQmode;
5541
5542 switch (cond2)
5543 {
5544 case LE: return CC_DLEmode;
5545 case LEU: return CC_DLEUmode;
5546 case GE: return CC_DGEmode;
5547 case GEU: return CC_DGEUmode;
ad076f4e 5548 default: break;
84ed5e79
RE
5549 }
5550
5551 break;
5552
5553 case LT:
03f1640c 5554 if (cond2 == LT || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
5555 return CC_DLTmode;
5556 if (cond2 == LE)
5557 return CC_DLEmode;
5558 if (cond2 == NE)
5559 return CC_DNEmode;
5560 break;
5561
5562 case GT:
03f1640c 5563 if (cond2 == GT || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
5564 return CC_DGTmode;
5565 if (cond2 == GE)
5566 return CC_DGEmode;
5567 if (cond2 == NE)
5568 return CC_DNEmode;
5569 break;
5570
5571 case LTU:
03f1640c 5572 if (cond2 == LTU || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
5573 return CC_DLTUmode;
5574 if (cond2 == LEU)
5575 return CC_DLEUmode;
5576 if (cond2 == NE)
5577 return CC_DNEmode;
5578 break;
5579
5580 case GTU:
03f1640c 5581 if (cond2 == GTU || cond_or == DOM_CC_X_AND_Y)
84ed5e79
RE
5582 return CC_DGTUmode;
5583 if (cond2 == GEU)
5584 return CC_DGEUmode;
5585 if (cond2 == NE)
5586 return CC_DNEmode;
5587 break;
5588
5589 /* The remaining cases only occur when both comparisons are the
5590 same. */
5591 case NE:
5592 return CC_DNEmode;
5593
5594 case LE:
5595 return CC_DLEmode;
5596
5597 case GE:
5598 return CC_DGEmode;
5599
5600 case LEU:
5601 return CC_DLEUmode;
5602
5603 case GEU:
5604 return CC_DGEUmode;
ad076f4e
RE
5605
5606 default:
5607 break;
84ed5e79
RE
5608 }
5609
5610 abort ();
5611}
5612
5613enum machine_mode
e32bac5b 5614arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
84ed5e79
RE
5615{
5616 /* All floating point compares return CCFP if it is an equality
5617 comparison, and CCFPE otherwise. */
5618 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
5619 {
5620 switch (op)
5621 {
5622 case EQ:
5623 case NE:
5624 case UNORDERED:
5625 case ORDERED:
5626 case UNLT:
5627 case UNLE:
5628 case UNGT:
5629 case UNGE:
5630 case UNEQ:
5631 case LTGT:
5632 return CCFPmode;
5633
5634 case LT:
5635 case LE:
5636 case GT:
5637 case GE:
9b6b54e2
NC
5638 if (TARGET_CIRRUS)
5639 return CCFPmode;
e45b72c4
RE
5640 return CCFPEmode;
5641
5642 default:
5643 abort ();
5644 }
5645 }
84ed5e79
RE
5646
5647 /* A compare with a shifted operand. Because of canonicalization, the
5648 comparison will have to be swapped when we emit the assembler. */
5649 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5650 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5651 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5652 || GET_CODE (x) == ROTATERT))
5653 return CC_SWPmode;
5654
956d6950
JL
5655 /* This is a special case that is used by combine to allow a
5656 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 5657 followed by a comparison of the shifted integer (only valid for
956d6950 5658 equalities and unsigned inequalities). */
84ed5e79
RE
5659 if (GET_MODE (x) == SImode
5660 && GET_CODE (x) == ASHIFT
5661 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5662 && GET_CODE (XEXP (x, 0)) == SUBREG
5663 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5664 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5665 && (op == EQ || op == NE
5666 || op == GEU || op == GTU || op == LTU || op == LEU)
5667 && GET_CODE (y) == CONST_INT)
5668 return CC_Zmode;
5669
1646cf41
RE
5670 /* A construct for a conditional compare, if the false arm contains
5671 0, then both conditions must be true, otherwise either condition
5672 must be true. Not all conditions are possible, so CCmode is
5673 returned if it can't be done. */
5674 if (GET_CODE (x) == IF_THEN_ELSE
5675 && (XEXP (x, 2) == const0_rtx
5676 || XEXP (x, 2) == const1_rtx)
5677 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5678 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
03f1640c
RE
5679 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5680 INTVAL (XEXP (x, 2)));
1646cf41
RE
5681
5682 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5683 if (GET_CODE (x) == AND
5684 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5685 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
03f1640c
RE
5686 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5687 DOM_CC_X_AND_Y);
1646cf41
RE
5688
5689 if (GET_CODE (x) == IOR
5690 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5691 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
03f1640c
RE
5692 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5693 DOM_CC_X_OR_Y);
1646cf41 5694
defc0463
RE
5695 /* An operation (on Thumb) where we want to test for a single bit.
5696 This is done by shifting that bit up into the top bit of a
5697 scratch register; we can then branch on the sign bit. */
5698 if (TARGET_THUMB
5699 && GET_MODE (x) == SImode
5700 && (op == EQ || op == NE)
5701 && (GET_CODE (x) == ZERO_EXTRACT))
5702 return CC_Nmode;
5703
84ed5e79
RE
5704 /* An operation that sets the condition codes as a side-effect, the
5705 V flag is not set correctly, so we can only use comparisons where
5706 this doesn't matter. (For LT and GE we can use "mi" and "pl"
defc0463 5707 instead.) */
84ed5e79
RE
5708 if (GET_MODE (x) == SImode
5709 && y == const0_rtx
5710 && (op == EQ || op == NE || op == LT || op == GE)
5711 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5712 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5713 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5714 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5715 || GET_CODE (x) == LSHIFTRT
5716 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
defc0463
RE
5717 || GET_CODE (x) == ROTATERT
5718 || (TARGET_ARM && GET_CODE (x) == ZERO_EXTRACT)))
84ed5e79
RE
5719 return CC_NOOVmode;
5720
84ed5e79
RE
5721 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5722 return CC_Zmode;
5723
bd9c7e23
RE
5724 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5725 && GET_CODE (x) == PLUS
5726 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5727 return CC_Cmode;
5728
84ed5e79
RE
5729 return CCmode;
5730}
5731
ff9940b0
RE
5732/* X and Y are two things to compare using CODE. Emit the compare insn and
5733 return the rtx for register 0 in the proper mode. FP means this is a
5734 floating point compare: I don't think that it is needed on the arm. */
ff9940b0 5735rtx
e32bac5b 5736arm_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
ff9940b0
RE
5737{
5738 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 5739 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 5740
43cffd11
RE
5741 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5742 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
5743
5744 return cc_reg;
5745}
5746
fcd53748
JT
5747/* Generate a sequence of insns that will generate the correct return
5748 address mask depending on the physical architecture that the program
5749 is running on. */
fcd53748 5750rtx
e32bac5b 5751arm_gen_return_addr_mask (void)
fcd53748
JT
5752{
5753 rtx reg = gen_reg_rtx (Pmode);
5754
5755 emit_insn (gen_return_addr_mask (reg));
5756 return reg;
5757}
5758
0a81f500 5759void
e32bac5b 5760arm_reload_in_hi (rtx *operands)
0a81f500 5761{
f9cc092a
RE
5762 rtx ref = operands[1];
5763 rtx base, scratch;
5764 HOST_WIDE_INT offset = 0;
5765
5766 if (GET_CODE (ref) == SUBREG)
5767 {
ddef6bc7 5768 offset = SUBREG_BYTE (ref);
f9cc092a
RE
5769 ref = SUBREG_REG (ref);
5770 }
5771
5772 if (GET_CODE (ref) == REG)
5773 {
5774 /* We have a pseudo which has been spilt onto the stack; there
5775 are two cases here: the first where there is a simple
5776 stack-slot replacement and a second where the stack-slot is
5777 out of range, or is used as a subreg. */
5778 if (reg_equiv_mem[REGNO (ref)])
5779 {
5780 ref = reg_equiv_mem[REGNO (ref)];
5781 base = find_replacement (&XEXP (ref, 0));
5782 }
5783 else
6354dc9b 5784 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
5785 base = reg_equiv_address[REGNO (ref)];
5786 }
5787 else
5788 base = find_replacement (&XEXP (ref, 0));
0a81f500 5789
e5e809f4
JL
5790 /* Handle the case where the address is too complex to be offset by 1. */
5791 if (GET_CODE (base) == MINUS
5792 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5793 {
f9cc092a 5794 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 5795
43cffd11 5796 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
5797 base = base_plus;
5798 }
f9cc092a
RE
5799 else if (GET_CODE (base) == PLUS)
5800 {
6354dc9b 5801 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
5802 HOST_WIDE_INT hi, lo;
5803
5804 offset += INTVAL (XEXP (base, 1));
5805 base = XEXP (base, 0);
5806
6354dc9b 5807 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
5808 /* Valid range for lo is -4095 -> 4095 */
5809 lo = (offset >= 0
5810 ? (offset & 0xfff)
5811 : -((-offset) & 0xfff));
5812
5813 /* Corner case, if lo is the max offset then we would be out of range
5814 once we have added the additional 1 below, so bump the msb into the
5815 pre-loading insn(s). */
5816 if (lo == 4095)
5817 lo &= 0x7ff;
5818
30cf4896
KG
5819 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5820 ^ (HOST_WIDE_INT) 0x80000000)
5821 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
5822
5823 if (hi + lo != offset)
5824 abort ();
5825
5826 if (hi != 0)
5827 {
5828 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5829
5830 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 5831 that require more than one insn. */
f9cc092a
RE
5832 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5833 base = base_plus;
5834 offset = lo;
5835 }
5836 }
e5e809f4 5837
3a1944a6
RE
5838 /* Operands[2] may overlap operands[0] (though it won't overlap
5839 operands[1]), that's why we asked for a DImode reg -- so we can
5840 use the bit that does not overlap. */
5841 if (REGNO (operands[2]) == REGNO (operands[0]))
5842 scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5843 else
5844 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5845
f9cc092a
RE
5846 emit_insn (gen_zero_extendqisi2 (scratch,
5847 gen_rtx_MEM (QImode,
5848 plus_constant (base,
5849 offset))));
43cffd11
RE
5850 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5851 gen_rtx_MEM (QImode,
f9cc092a
RE
5852 plus_constant (base,
5853 offset + 1))));
5895f793 5854 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
5855 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5856 gen_rtx_IOR (SImode,
5857 gen_rtx_ASHIFT
5858 (SImode,
5859 gen_rtx_SUBREG (SImode, operands[0], 0),
5860 GEN_INT (8)),
f9cc092a 5861 scratch)));
0a81f500 5862 else
43cffd11
RE
5863 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5864 gen_rtx_IOR (SImode,
f9cc092a 5865 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
5866 GEN_INT (8)),
5867 gen_rtx_SUBREG (SImode, operands[0],
5868 0))));
0a81f500
RE
5869}
5870
72ac76be 5871/* Handle storing a half-word to memory during reload by synthesizing as two
f9cc092a
RE
5872 byte stores. Take care not to clobber the input values until after we
5873 have moved them somewhere safe. This code assumes that if the DImode
5874 scratch in operands[2] overlaps either the input value or output address
5875 in some way, then that value must die in this insn (we absolutely need
5876 two scratch registers for some corner cases). */
f3bb6135 5877void
e32bac5b 5878arm_reload_out_hi (rtx *operands)
af48348a 5879{
f9cc092a
RE
5880 rtx ref = operands[0];
5881 rtx outval = operands[1];
5882 rtx base, scratch;
5883 HOST_WIDE_INT offset = 0;
5884
5885 if (GET_CODE (ref) == SUBREG)
5886 {
ddef6bc7 5887 offset = SUBREG_BYTE (ref);
f9cc092a
RE
5888 ref = SUBREG_REG (ref);
5889 }
5890
f9cc092a
RE
5891 if (GET_CODE (ref) == REG)
5892 {
5893 /* We have a pseudo which has been spilt onto the stack; there
5894 are two cases here: the first where there is a simple
5895 stack-slot replacement and a second where the stack-slot is
5896 out of range, or is used as a subreg. */
5897 if (reg_equiv_mem[REGNO (ref)])
5898 {
5899 ref = reg_equiv_mem[REGNO (ref)];
5900 base = find_replacement (&XEXP (ref, 0));
5901 }
5902 else
6354dc9b 5903 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
5904 base = reg_equiv_address[REGNO (ref)];
5905 }
5906 else
5907 base = find_replacement (&XEXP (ref, 0));
5908
5909 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5910
5911 /* Handle the case where the address is too complex to be offset by 1. */
5912 if (GET_CODE (base) == MINUS
5913 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5914 {
5915 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5916
5917 /* Be careful not to destroy OUTVAL. */
5918 if (reg_overlap_mentioned_p (base_plus, outval))
5919 {
5920 /* Updating base_plus might destroy outval, see if we can
5921 swap the scratch and base_plus. */
5895f793 5922 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
5923 {
5924 rtx tmp = scratch;
5925 scratch = base_plus;
5926 base_plus = tmp;
5927 }
5928 else
5929 {
5930 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5931
5932 /* Be conservative and copy OUTVAL into the scratch now,
5933 this should only be necessary if outval is a subreg
5934 of something larger than a word. */
5935 /* XXX Might this clobber base? I can't see how it can,
5936 since scratch is known to overlap with OUTVAL, and
5937 must be wider than a word. */
5938 emit_insn (gen_movhi (scratch_hi, outval));
5939 outval = scratch_hi;
5940 }
5941 }
5942
5943 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5944 base = base_plus;
5945 }
5946 else if (GET_CODE (base) == PLUS)
5947 {
6354dc9b 5948 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
5949 HOST_WIDE_INT hi, lo;
5950
5951 offset += INTVAL (XEXP (base, 1));
5952 base = XEXP (base, 0);
5953
6354dc9b 5954 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
5955 /* Valid range for lo is -4095 -> 4095 */
5956 lo = (offset >= 0
5957 ? (offset & 0xfff)
5958 : -((-offset) & 0xfff));
5959
5960 /* Corner case, if lo is the max offset then we would be out of range
5961 once we have added the additional 1 below, so bump the msb into the
5962 pre-loading insn(s). */
5963 if (lo == 4095)
5964 lo &= 0x7ff;
5965
30cf4896
KG
5966 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5967 ^ (HOST_WIDE_INT) 0x80000000)
5968 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
5969
5970 if (hi + lo != offset)
5971 abort ();
5972
5973 if (hi != 0)
5974 {
5975 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5976
5977 /* Be careful not to destroy OUTVAL. */
5978 if (reg_overlap_mentioned_p (base_plus, outval))
5979 {
5980 /* Updating base_plus might destroy outval, see if we
5981 can swap the scratch and base_plus. */
5895f793 5982 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
5983 {
5984 rtx tmp = scratch;
5985 scratch = base_plus;
5986 base_plus = tmp;
5987 }
5988 else
5989 {
5990 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5991
5992 /* Be conservative and copy outval into scratch now,
5993 this should only be necessary if outval is a
5994 subreg of something larger than a word. */
5995 /* XXX Might this clobber base? I can't see how it
5996 can, since scratch is known to overlap with
5997 outval. */
5998 emit_insn (gen_movhi (scratch_hi, outval));
5999 outval = scratch_hi;
6000 }
6001 }
6002
6003 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 6004 that require more than one insn. */
f9cc092a
RE
6005 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
6006 base = base_plus;
6007 offset = lo;
6008 }
6009 }
af48348a 6010
b5cc037f
RE
6011 if (BYTES_BIG_ENDIAN)
6012 {
f9cc092a
RE
6013 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6014 plus_constant (base, offset + 1)),
5d5603e2 6015 gen_lowpart (QImode, outval)));
f9cc092a
RE
6016 emit_insn (gen_lshrsi3 (scratch,
6017 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 6018 GEN_INT (8)));
f9cc092a 6019 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 6020 gen_lowpart (QImode, scratch)));
b5cc037f
RE
6021 }
6022 else
6023 {
f9cc092a 6024 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 6025 gen_lowpart (QImode, outval)));
f9cc092a
RE
6026 emit_insn (gen_lshrsi3 (scratch,
6027 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 6028 GEN_INT (8)));
f9cc092a
RE
6029 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6030 plus_constant (base, offset + 1)),
5d5603e2 6031 gen_lowpart (QImode, scratch)));
b5cc037f 6032 }
af48348a 6033}
2b835d68 6034\f
d5b7b3ae
RE
6035/* Print a symbolic form of X to the debug file, F. */
6036static void
e32bac5b 6037arm_print_value (FILE *f, rtx x)
d5b7b3ae
RE
6038{
6039 switch (GET_CODE (x))
6040 {
6041 case CONST_INT:
6042 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
6043 return;
6044
6045 case CONST_DOUBLE:
6046 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
6047 return;
6048
5a9335ef
NC
6049 case CONST_VECTOR:
6050 {
6051 int i;
6052
6053 fprintf (f, "<");
6054 for (i = 0; i < CONST_VECTOR_NUNITS (x); i++)
6055 {
6056 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (CONST_VECTOR_ELT (x, i)));
6057 if (i < (CONST_VECTOR_NUNITS (x) - 1))
6058 fputc (',', f);
6059 }
6060 fprintf (f, ">");
6061 }
6062 return;
6063
d5b7b3ae
RE
6064 case CONST_STRING:
6065 fprintf (f, "\"%s\"", XSTR (x, 0));
6066 return;
6067
6068 case SYMBOL_REF:
6069 fprintf (f, "`%s'", XSTR (x, 0));
6070 return;
6071
6072 case LABEL_REF:
6073 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
6074 return;
6075
6076 case CONST:
6077 arm_print_value (f, XEXP (x, 0));
6078 return;
6079
6080 case PLUS:
6081 arm_print_value (f, XEXP (x, 0));
6082 fprintf (f, "+");
6083 arm_print_value (f, XEXP (x, 1));
6084 return;
6085
6086 case PC:
6087 fprintf (f, "pc");
6088 return;
6089
6090 default:
6091 fprintf (f, "????");
6092 return;
6093 }
6094}
6095\f
2b835d68 6096/* Routines for manipulation of the constant pool. */
2b835d68 6097
949d79eb
RE
6098/* Arm instructions cannot load a large constant directly into a
6099 register; they have to come from a pc relative load. The constant
6100 must therefore be placed in the addressable range of the pc
6101 relative load. Depending on the precise pc relative load
6102 instruction the range is somewhere between 256 bytes and 4k. This
6103 means that we often have to dump a constant inside a function, and
2b835d68
RE
6104 generate code to branch around it.
6105
949d79eb
RE
6106 It is important to minimize this, since the branches will slow
6107 things down and make the code larger.
2b835d68 6108
949d79eb
RE
6109 Normally we can hide the table after an existing unconditional
6110 branch so that there is no interruption of the flow, but in the
6111 worst case the code looks like this:
2b835d68
RE
6112
6113 ldr rn, L1
949d79eb 6114 ...
2b835d68
RE
6115 b L2
6116 align
6117 L1: .long value
6118 L2:
949d79eb 6119 ...
2b835d68 6120
2b835d68 6121 ldr rn, L3
949d79eb 6122 ...
2b835d68
RE
6123 b L4
6124 align
2b835d68
RE
6125 L3: .long value
6126 L4:
949d79eb
RE
6127 ...
6128
6129 We fix this by performing a scan after scheduling, which notices
6130 which instructions need to have their operands fetched from the
6131 constant table and builds the table.
6132
6133 The algorithm starts by building a table of all the constants that
6134 need fixing up and all the natural barriers in the function (places
6135 where a constant table can be dropped without breaking the flow).
6136 For each fixup we note how far the pc-relative replacement will be
6137 able to reach and the offset of the instruction into the function.
6138
6139 Having built the table we then group the fixes together to form
6140 tables that are as large as possible (subject to addressing
6141 constraints) and emit each table of constants after the last
6142 barrier that is within range of all the instructions in the group.
6143 If a group does not contain a barrier, then we forcibly create one
6144 by inserting a jump instruction into the flow. Once the table has
6145 been inserted, the insns are then modified to reference the
6146 relevant entry in the pool.
6147
6354dc9b 6148 Possible enhancements to the algorithm (not implemented) are:
949d79eb 6149
d5b7b3ae 6150 1) For some processors and object formats, there may be benefit in
949d79eb
RE
6151 aligning the pools to the start of cache lines; this alignment
6152 would need to be taken into account when calculating addressability
6354dc9b 6153 of a pool. */
2b835d68 6154
d5b7b3ae
RE
6155/* These typedefs are located at the start of this file, so that
6156 they can be used in the prototypes there. This comment is to
6157 remind readers of that fact so that the following structures
6158 can be understood more easily.
6159
6160 typedef struct minipool_node Mnode;
6161 typedef struct minipool_fixup Mfix; */
6162
6163struct minipool_node
6164{
6165 /* Doubly linked chain of entries. */
6166 Mnode * next;
6167 Mnode * prev;
6168 /* The maximum offset into the code that this entry can be placed. While
6169 pushing fixes for forward references, all entries are sorted in order
6170 of increasing max_address. */
6171 HOST_WIDE_INT max_address;
5519a4f9 6172 /* Similarly for an entry inserted for a backwards ref. */
d5b7b3ae
RE
6173 HOST_WIDE_INT min_address;
6174 /* The number of fixes referencing this entry. This can become zero
6175 if we "unpush" an entry. In this case we ignore the entry when we
6176 come to emit the code. */
6177 int refcount;
6178 /* The offset from the start of the minipool. */
6179 HOST_WIDE_INT offset;
6180 /* The value in table. */
6181 rtx value;
6182 /* The mode of value. */
6183 enum machine_mode mode;
5a9335ef
NC
6184 /* The size of the value. With iWMMXt enabled
6185 sizes > 4 also imply an alignment of 8-bytes. */
d5b7b3ae
RE
6186 int fix_size;
6187};
6188
6189struct minipool_fixup
2b835d68 6190{
d5b7b3ae
RE
6191 Mfix * next;
6192 rtx insn;
6193 HOST_WIDE_INT address;
6194 rtx * loc;
6195 enum machine_mode mode;
6196 int fix_size;
6197 rtx value;
6198 Mnode * minipool;
6199 HOST_WIDE_INT forwards;
6200 HOST_WIDE_INT backwards;
6201};
2b835d68 6202
d5b7b3ae
RE
6203/* Fixes less than a word need padding out to a word boundary. */
6204#define MINIPOOL_FIX_SIZE(mode) \
6205 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 6206
d5b7b3ae
RE
6207static Mnode * minipool_vector_head;
6208static Mnode * minipool_vector_tail;
6209static rtx minipool_vector_label;
332072db 6210
d5b7b3ae
RE
6211/* The linked list of all minipool fixes required for this function. */
6212Mfix * minipool_fix_head;
6213Mfix * minipool_fix_tail;
6214/* The fix entry for the current minipool, once it has been placed. */
6215Mfix * minipool_barrier;
6216
6217/* Determines if INSN is the start of a jump table. Returns the end
6218 of the TABLE or NULL_RTX. */
6219static rtx
e32bac5b 6220is_jump_table (rtx insn)
2b835d68 6221{
d5b7b3ae 6222 rtx table;
da6558fd 6223
d5b7b3ae
RE
6224 if (GET_CODE (insn) == JUMP_INSN
6225 && JUMP_LABEL (insn) != NULL
6226 && ((table = next_real_insn (JUMP_LABEL (insn)))
6227 == next_real_insn (insn))
6228 && table != NULL
6229 && GET_CODE (table) == JUMP_INSN
6230 && (GET_CODE (PATTERN (table)) == ADDR_VEC
6231 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
6232 return table;
6233
6234 return NULL_RTX;
2b835d68
RE
6235}
6236
657d9449
RE
6237#ifndef JUMP_TABLES_IN_TEXT_SECTION
6238#define JUMP_TABLES_IN_TEXT_SECTION 0
6239#endif
6240
d5b7b3ae 6241static HOST_WIDE_INT
e32bac5b 6242get_jump_table_size (rtx insn)
2b835d68 6243{
657d9449
RE
6244 /* ADDR_VECs only take room if read-only data does into the text
6245 section. */
6246 if (JUMP_TABLES_IN_TEXT_SECTION
d48bc59a 6247#if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
657d9449
RE
6248 || 1
6249#endif
6250 )
6251 {
6252 rtx body = PATTERN (insn);
6253 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 6254
657d9449
RE
6255 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
6256 }
6257
6258 return 0;
d5b7b3ae 6259}
2b835d68 6260
d5b7b3ae
RE
6261/* Move a minipool fix MP from its current location to before MAX_MP.
6262 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
093354e0 6263 constraints may need updating. */
d5b7b3ae 6264static Mnode *
e32bac5b
RE
6265move_minipool_fix_forward_ref (Mnode *mp, Mnode *max_mp,
6266 HOST_WIDE_INT max_address)
d5b7b3ae
RE
6267{
6268 /* This should never be true and the code below assumes these are
6269 different. */
6270 if (mp == max_mp)
6271 abort ();
6272
6273 if (max_mp == NULL)
6274 {
6275 if (max_address < mp->max_address)
6276 mp->max_address = max_address;
6277 }
6278 else
2b835d68 6279 {
d5b7b3ae
RE
6280 if (max_address > max_mp->max_address - mp->fix_size)
6281 mp->max_address = max_mp->max_address - mp->fix_size;
6282 else
6283 mp->max_address = max_address;
2b835d68 6284
d5b7b3ae
RE
6285 /* Unlink MP from its current position. Since max_mp is non-null,
6286 mp->prev must be non-null. */
6287 mp->prev->next = mp->next;
6288 if (mp->next != NULL)
6289 mp->next->prev = mp->prev;
6290 else
6291 minipool_vector_tail = mp->prev;
2b835d68 6292
d5b7b3ae
RE
6293 /* Re-insert it before MAX_MP. */
6294 mp->next = max_mp;
6295 mp->prev = max_mp->prev;
6296 max_mp->prev = mp;
6297
6298 if (mp->prev != NULL)
6299 mp->prev->next = mp;
6300 else
6301 minipool_vector_head = mp;
6302 }
2b835d68 6303
d5b7b3ae
RE
6304 /* Save the new entry. */
6305 max_mp = mp;
6306
d6a7951f 6307 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
6308 required. */
6309 while (mp->prev != NULL
6310 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6311 {
6312 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6313 mp = mp->prev;
2b835d68
RE
6314 }
6315
d5b7b3ae 6316 return max_mp;
2b835d68
RE
6317}
6318
d5b7b3ae
RE
6319/* Add a constant to the minipool for a forward reference. Returns the
6320 node added or NULL if the constant will not fit in this pool. */
6321static Mnode *
e32bac5b 6322add_minipool_forward_ref (Mfix *fix)
d5b7b3ae
RE
6323{
6324 /* If set, max_mp is the first pool_entry that has a lower
6325 constraint than the one we are trying to add. */
6326 Mnode * max_mp = NULL;
6327 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6328 Mnode * mp;
6329
6330 /* If this fix's address is greater than the address of the first
6331 entry, then we can't put the fix in this pool. We subtract the
6332 size of the current fix to ensure that if the table is fully
6333 packed we still have enough room to insert this value by suffling
6334 the other fixes forwards. */
6335 if (minipool_vector_head &&
6336 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6337 return NULL;
2b835d68 6338
d5b7b3ae
RE
6339 /* Scan the pool to see if a constant with the same value has
6340 already been added. While we are doing this, also note the
6341 location where we must insert the constant if it doesn't already
6342 exist. */
6343 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6344 {
6345 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6346 && fix->mode == mp->mode
6347 && (GET_CODE (fix->value) != CODE_LABEL
6348 || (CODE_LABEL_NUMBER (fix->value)
6349 == CODE_LABEL_NUMBER (mp->value)))
6350 && rtx_equal_p (fix->value, mp->value))
6351 {
6352 /* More than one fix references this entry. */
6353 mp->refcount++;
6354 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6355 }
6356
6357 /* Note the insertion point if necessary. */
6358 if (max_mp == NULL
6359 && mp->max_address > max_address)
6360 max_mp = mp;
5a9335ef
NC
6361
6362 /* If we are inserting an 8-bytes aligned quantity and
6363 we have not already found an insertion point, then
6364 make sure that all such 8-byte aligned quantities are
6365 placed at the start of the pool. */
6366 if (TARGET_REALLY_IWMMXT
6367 && max_mp == NULL
6368 && fix->fix_size == 8
6369 && mp->fix_size != 8)
6370 {
6371 max_mp = mp;
6372 max_address = mp->max_address;
6373 }
d5b7b3ae
RE
6374 }
6375
6376 /* The value is not currently in the minipool, so we need to create
6377 a new entry for it. If MAX_MP is NULL, the entry will be put on
6378 the end of the list since the placement is less constrained than
6379 any existing entry. Otherwise, we insert the new fix before
6bc82793 6380 MAX_MP and, if necessary, adjust the constraints on the other
d5b7b3ae
RE
6381 entries. */
6382 mp = xmalloc (sizeof (* mp));
6383 mp->fix_size = fix->fix_size;
6384 mp->mode = fix->mode;
6385 mp->value = fix->value;
6386 mp->refcount = 1;
6387 /* Not yet required for a backwards ref. */
6388 mp->min_address = -65536;
6389
6390 if (max_mp == NULL)
6391 {
6392 mp->max_address = max_address;
6393 mp->next = NULL;
6394 mp->prev = minipool_vector_tail;
6395
6396 if (mp->prev == NULL)
6397 {
6398 minipool_vector_head = mp;
6399 minipool_vector_label = gen_label_rtx ();
7551cbc7 6400 }
2b835d68 6401 else
d5b7b3ae 6402 mp->prev->next = mp;
2b835d68 6403
d5b7b3ae
RE
6404 minipool_vector_tail = mp;
6405 }
6406 else
6407 {
6408 if (max_address > max_mp->max_address - mp->fix_size)
6409 mp->max_address = max_mp->max_address - mp->fix_size;
6410 else
6411 mp->max_address = max_address;
6412
6413 mp->next = max_mp;
6414 mp->prev = max_mp->prev;
6415 max_mp->prev = mp;
6416 if (mp->prev != NULL)
6417 mp->prev->next = mp;
6418 else
6419 minipool_vector_head = mp;
6420 }
6421
6422 /* Save the new entry. */
6423 max_mp = mp;
6424
d6a7951f 6425 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
6426 required. */
6427 while (mp->prev != NULL
6428 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6429 {
6430 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6431 mp = mp->prev;
2b835d68
RE
6432 }
6433
d5b7b3ae
RE
6434 return max_mp;
6435}
6436
6437static Mnode *
e32bac5b
RE
6438move_minipool_fix_backward_ref (Mnode *mp, Mnode *min_mp,
6439 HOST_WIDE_INT min_address)
d5b7b3ae
RE
6440{
6441 HOST_WIDE_INT offset;
6442
6443 /* This should never be true, and the code below assumes these are
6444 different. */
6445 if (mp == min_mp)
6446 abort ();
6447
6448 if (min_mp == NULL)
2b835d68 6449 {
d5b7b3ae
RE
6450 if (min_address > mp->min_address)
6451 mp->min_address = min_address;
6452 }
6453 else
6454 {
6455 /* We will adjust this below if it is too loose. */
6456 mp->min_address = min_address;
6457
6458 /* Unlink MP from its current position. Since min_mp is non-null,
6459 mp->next must be non-null. */
6460 mp->next->prev = mp->prev;
6461 if (mp->prev != NULL)
6462 mp->prev->next = mp->next;
6463 else
6464 minipool_vector_head = mp->next;
6465
6466 /* Reinsert it after MIN_MP. */
6467 mp->prev = min_mp;
6468 mp->next = min_mp->next;
6469 min_mp->next = mp;
6470 if (mp->next != NULL)
6471 mp->next->prev = mp;
2b835d68 6472 else
d5b7b3ae
RE
6473 minipool_vector_tail = mp;
6474 }
6475
6476 min_mp = mp;
6477
6478 offset = 0;
6479 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6480 {
6481 mp->offset = offset;
6482 if (mp->refcount > 0)
6483 offset += mp->fix_size;
6484
6485 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6486 mp->next->min_address = mp->min_address + mp->fix_size;
6487 }
6488
6489 return min_mp;
6490}
6491
6492/* Add a constant to the minipool for a backward reference. Returns the
6493 node added or NULL if the constant will not fit in this pool.
6494
6495 Note that the code for insertion for a backwards reference can be
6496 somewhat confusing because the calculated offsets for each fix do
6497 not take into account the size of the pool (which is still under
6498 construction. */
6499static Mnode *
e32bac5b 6500add_minipool_backward_ref (Mfix *fix)
d5b7b3ae
RE
6501{
6502 /* If set, min_mp is the last pool_entry that has a lower constraint
6503 than the one we are trying to add. */
e32bac5b 6504 Mnode *min_mp = NULL;
d5b7b3ae
RE
6505 /* This can be negative, since it is only a constraint. */
6506 HOST_WIDE_INT min_address = fix->address - fix->backwards;
e32bac5b 6507 Mnode *mp;
d5b7b3ae
RE
6508
6509 /* If we can't reach the current pool from this insn, or if we can't
6510 insert this entry at the end of the pool without pushing other
6511 fixes out of range, then we don't try. This ensures that we
6512 can't fail later on. */
6513 if (min_address >= minipool_barrier->address
6514 || (minipool_vector_tail->min_address + fix->fix_size
6515 >= minipool_barrier->address))
6516 return NULL;
6517
6518 /* Scan the pool to see if a constant with the same value has
6519 already been added. While we are doing this, also note the
6520 location where we must insert the constant if it doesn't already
6521 exist. */
6522 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6523 {
6524 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6525 && fix->mode == mp->mode
6526 && (GET_CODE (fix->value) != CODE_LABEL
6527 || (CODE_LABEL_NUMBER (fix->value)
6528 == CODE_LABEL_NUMBER (mp->value)))
6529 && rtx_equal_p (fix->value, mp->value)
6530 /* Check that there is enough slack to move this entry to the
6531 end of the table (this is conservative). */
6532 && (mp->max_address
6533 > (minipool_barrier->address
6534 + minipool_vector_tail->offset
6535 + minipool_vector_tail->fix_size)))
6536 {
6537 mp->refcount++;
6538 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6539 }
6540
6541 if (min_mp != NULL)
6542 mp->min_address += fix->fix_size;
6543 else
6544 {
6545 /* Note the insertion point if necessary. */
6546 if (mp->min_address < min_address)
5a9335ef
NC
6547 {
6548 /* For now, we do not allow the insertion of 8-byte alignment
6549 requiring nodes anywhere but at the start of the pool. */
6550 if (TARGET_REALLY_IWMMXT && fix->fix_size == 8 && mp->fix_size != 8)
6551 return NULL;
6552 else
6553 min_mp = mp;
6554 }
d5b7b3ae
RE
6555 else if (mp->max_address
6556 < minipool_barrier->address + mp->offset + fix->fix_size)
6557 {
6558 /* Inserting before this entry would push the fix beyond
6559 its maximum address (which can happen if we have
6560 re-located a forwards fix); force the new fix to come
6561 after it. */
6562 min_mp = mp;
6563 min_address = mp->min_address + fix->fix_size;
6564 }
5a9335ef
NC
6565 /* If we are inserting an 8-bytes aligned quantity and
6566 we have not already found an insertion point, then
6567 make sure that all such 8-byte aligned quantities are
6568 placed at the start of the pool. */
6569 else if (TARGET_REALLY_IWMMXT
6570 && min_mp == NULL
6571 && fix->fix_size == 8
6572 && mp->fix_size < 8)
6573 {
6574 min_mp = mp;
6575 min_address = mp->min_address + fix->fix_size;
6576 }
d5b7b3ae
RE
6577 }
6578 }
6579
6580 /* We need to create a new entry. */
6581 mp = xmalloc (sizeof (* mp));
6582 mp->fix_size = fix->fix_size;
6583 mp->mode = fix->mode;
6584 mp->value = fix->value;
6585 mp->refcount = 1;
6586 mp->max_address = minipool_barrier->address + 65536;
6587
6588 mp->min_address = min_address;
6589
6590 if (min_mp == NULL)
6591 {
6592 mp->prev = NULL;
6593 mp->next = minipool_vector_head;
6594
6595 if (mp->next == NULL)
6596 {
6597 minipool_vector_tail = mp;
6598 minipool_vector_label = gen_label_rtx ();
6599 }
6600 else
6601 mp->next->prev = mp;
6602
6603 minipool_vector_head = mp;
6604 }
6605 else
6606 {
6607 mp->next = min_mp->next;
6608 mp->prev = min_mp;
6609 min_mp->next = mp;
da6558fd 6610
d5b7b3ae
RE
6611 if (mp->next != NULL)
6612 mp->next->prev = mp;
6613 else
6614 minipool_vector_tail = mp;
6615 }
6616
6617 /* Save the new entry. */
6618 min_mp = mp;
6619
6620 if (mp->prev)
6621 mp = mp->prev;
6622 else
6623 mp->offset = 0;
6624
6625 /* Scan over the following entries and adjust their offsets. */
6626 while (mp->next != NULL)
6627 {
6628 if (mp->next->min_address < mp->min_address + mp->fix_size)
6629 mp->next->min_address = mp->min_address + mp->fix_size;
6630
6631 if (mp->refcount)
6632 mp->next->offset = mp->offset + mp->fix_size;
6633 else
6634 mp->next->offset = mp->offset;
6635
6636 mp = mp->next;
6637 }
6638
6639 return min_mp;
6640}
6641
6642static void
e32bac5b 6643assign_minipool_offsets (Mfix *barrier)
d5b7b3ae
RE
6644{
6645 HOST_WIDE_INT offset = 0;
e32bac5b 6646 Mnode *mp;
d5b7b3ae
RE
6647
6648 minipool_barrier = barrier;
6649
6650 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6651 {
6652 mp->offset = offset;
da6558fd 6653
d5b7b3ae
RE
6654 if (mp->refcount > 0)
6655 offset += mp->fix_size;
6656 }
6657}
6658
6659/* Output the literal table */
6660static void
e32bac5b 6661dump_minipool (rtx scan)
d5b7b3ae 6662{
5a9335ef
NC
6663 Mnode * mp;
6664 Mnode * nmp;
6665 int align64 = 0;
6666
6667 if (TARGET_REALLY_IWMMXT)
6668 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6669 if (mp->refcount > 0 && mp->fix_size == 8)
6670 {
6671 align64 = 1;
6672 break;
6673 }
d5b7b3ae
RE
6674
6675 if (rtl_dump_file)
6676 fprintf (rtl_dump_file,
5a9335ef
NC
6677 ";; Emitting minipool after insn %u; address %ld; align %d (bytes)\n",
6678 INSN_UID (scan), (unsigned long) minipool_barrier->address, align64 ? 8 : 4);
d5b7b3ae
RE
6679
6680 scan = emit_label_after (gen_label_rtx (), scan);
5a9335ef 6681 scan = emit_insn_after (align64 ? gen_align_8 () : gen_align_4 (), scan);
d5b7b3ae
RE
6682 scan = emit_label_after (minipool_vector_label, scan);
6683
6684 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6685 {
6686 if (mp->refcount > 0)
6687 {
6688 if (rtl_dump_file)
6689 {
6690 fprintf (rtl_dump_file,
6691 ";; Offset %u, min %ld, max %ld ",
6692 (unsigned) mp->offset, (unsigned long) mp->min_address,
6693 (unsigned long) mp->max_address);
6694 arm_print_value (rtl_dump_file, mp->value);
6695 fputc ('\n', rtl_dump_file);
6696 }
6697
6698 switch (mp->fix_size)
6699 {
6700#ifdef HAVE_consttable_1
6701 case 1:
6702 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6703 break;
6704
6705#endif
6706#ifdef HAVE_consttable_2
6707 case 2:
6708 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6709 break;
6710
6711#endif
6712#ifdef HAVE_consttable_4
6713 case 4:
6714 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6715 break;
6716
6717#endif
6718#ifdef HAVE_consttable_8
6719 case 8:
6720 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6721 break;
6722
6723#endif
6724 default:
6725 abort ();
6726 break;
6727 }
6728 }
6729
6730 nmp = mp->next;
6731 free (mp);
2b835d68
RE
6732 }
6733
d5b7b3ae
RE
6734 minipool_vector_head = minipool_vector_tail = NULL;
6735 scan = emit_insn_after (gen_consttable_end (), scan);
6736 scan = emit_barrier_after (scan);
2b835d68
RE
6737}
6738
d5b7b3ae
RE
6739/* Return the cost of forcibly inserting a barrier after INSN. */
6740static int
e32bac5b 6741arm_barrier_cost (rtx insn)
949d79eb 6742{
d5b7b3ae
RE
6743 /* Basing the location of the pool on the loop depth is preferable,
6744 but at the moment, the basic block information seems to be
6745 corrupt by this stage of the compilation. */
6746 int base_cost = 50;
6747 rtx next = next_nonnote_insn (insn);
6748
6749 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6750 base_cost -= 20;
6751
6752 switch (GET_CODE (insn))
6753 {
6754 case CODE_LABEL:
6755 /* It will always be better to place the table before the label, rather
6756 than after it. */
6757 return 50;
949d79eb 6758
d5b7b3ae
RE
6759 case INSN:
6760 case CALL_INSN:
6761 return base_cost;
6762
6763 case JUMP_INSN:
6764 return base_cost - 10;
6765
6766 default:
6767 return base_cost + 10;
6768 }
6769}
6770
6771/* Find the best place in the insn stream in the range
6772 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6773 Create the barrier by inserting a jump and add a new fix entry for
6774 it. */
6775static Mfix *
e32bac5b 6776create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
d5b7b3ae
RE
6777{
6778 HOST_WIDE_INT count = 0;
6779 rtx barrier;
6780 rtx from = fix->insn;
6781 rtx selected = from;
6782 int selected_cost;
6783 HOST_WIDE_INT selected_address;
6784 Mfix * new_fix;
6785 HOST_WIDE_INT max_count = max_address - fix->address;
6786 rtx label = gen_label_rtx ();
6787
6788 selected_cost = arm_barrier_cost (from);
6789 selected_address = fix->address;
6790
6791 while (from && count < max_count)
6792 {
6793 rtx tmp;
6794 int new_cost;
6795
6796 /* This code shouldn't have been called if there was a natural barrier
6797 within range. */
6798 if (GET_CODE (from) == BARRIER)
6799 abort ();
6800
6801 /* Count the length of this insn. */
6802 count += get_attr_length (from);
6803
6804 /* If there is a jump table, add its length. */
6805 tmp = is_jump_table (from);
6806 if (tmp != NULL)
6807 {
6808 count += get_jump_table_size (tmp);
6809
6810 /* Jump tables aren't in a basic block, so base the cost on
6811 the dispatch insn. If we select this location, we will
6812 still put the pool after the table. */
6813 new_cost = arm_barrier_cost (from);
6814
6815 if (count < max_count && new_cost <= selected_cost)
6816 {
6817 selected = tmp;
6818 selected_cost = new_cost;
6819 selected_address = fix->address + count;
6820 }
6821
6822 /* Continue after the dispatch table. */
6823 from = NEXT_INSN (tmp);
6824 continue;
6825 }
6826
6827 new_cost = arm_barrier_cost (from);
6828
6829 if (count < max_count && new_cost <= selected_cost)
6830 {
6831 selected = from;
6832 selected_cost = new_cost;
6833 selected_address = fix->address + count;
6834 }
6835
6836 from = NEXT_INSN (from);
6837 }
6838
6839 /* Create a new JUMP_INSN that branches around a barrier. */
6840 from = emit_jump_insn_after (gen_jump (label), selected);
6841 JUMP_LABEL (from) = label;
6842 barrier = emit_barrier_after (from);
6843 emit_label_after (label, barrier);
6844
6845 /* Create a minipool barrier entry for the new barrier. */
c7319d87 6846 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
6847 new_fix->insn = barrier;
6848 new_fix->address = selected_address;
6849 new_fix->next = fix->next;
6850 fix->next = new_fix;
6851
6852 return new_fix;
6853}
6854
6855/* Record that there is a natural barrier in the insn stream at
6856 ADDRESS. */
949d79eb 6857static void
e32bac5b 6858push_minipool_barrier (rtx insn, HOST_WIDE_INT address)
2b835d68 6859{
c7319d87 6860 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 6861
949d79eb
RE
6862 fix->insn = insn;
6863 fix->address = address;
2b835d68 6864
949d79eb
RE
6865 fix->next = NULL;
6866 if (minipool_fix_head != NULL)
6867 minipool_fix_tail->next = fix;
6868 else
6869 minipool_fix_head = fix;
6870
6871 minipool_fix_tail = fix;
6872}
2b835d68 6873
d5b7b3ae
RE
6874/* Record INSN, which will need fixing up to load a value from the
6875 minipool. ADDRESS is the offset of the insn since the start of the
6876 function; LOC is a pointer to the part of the insn which requires
6877 fixing; VALUE is the constant that must be loaded, which is of type
6878 MODE. */
949d79eb 6879static void
e32bac5b
RE
6880push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc,
6881 enum machine_mode mode, rtx value)
949d79eb 6882{
c7319d87 6883 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
6884
6885#ifdef AOF_ASSEMBLER
093354e0 6886 /* PIC symbol references need to be converted into offsets into the
949d79eb 6887 based area. */
d5b7b3ae
RE
6888 /* XXX This shouldn't be done here. */
6889 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
6890 value = aof_pic_entry (value);
6891#endif /* AOF_ASSEMBLER */
6892
6893 fix->insn = insn;
6894 fix->address = address;
6895 fix->loc = loc;
6896 fix->mode = mode;
d5b7b3ae 6897 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 6898 fix->value = value;
d5b7b3ae
RE
6899 fix->forwards = get_attr_pool_range (insn);
6900 fix->backwards = get_attr_neg_pool_range (insn);
6901 fix->minipool = NULL;
949d79eb
RE
6902
6903 /* If an insn doesn't have a range defined for it, then it isn't
6904 expecting to be reworked by this code. Better to abort now than
6905 to generate duff assembly code. */
d5b7b3ae 6906 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
6907 abort ();
6908
5a9335ef
NC
6909 /* With iWMMXt enabled, the pool is aligned to an 8-byte boundary.
6910 So there might be an empty word before the start of the pool.
6911 Hence we reduce the forward range by 4 to allow for this
6912 possibility. */
6913 if (TARGET_REALLY_IWMMXT && fix->fix_size == 8)
6914 fix->forwards -= 4;
6915
d5b7b3ae
RE
6916 if (rtl_dump_file)
6917 {
6918 fprintf (rtl_dump_file,
6919 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6920 GET_MODE_NAME (mode),
6921 INSN_UID (insn), (unsigned long) address,
6922 -1 * (long)fix->backwards, (long)fix->forwards);
6923 arm_print_value (rtl_dump_file, fix->value);
6924 fprintf (rtl_dump_file, "\n");
6925 }
6926
6354dc9b 6927 /* Add it to the chain of fixes. */
949d79eb 6928 fix->next = NULL;
d5b7b3ae 6929
949d79eb
RE
6930 if (minipool_fix_head != NULL)
6931 minipool_fix_tail->next = fix;
6932 else
6933 minipool_fix_head = fix;
6934
6935 minipool_fix_tail = fix;
6936}
6937
f0375c66
NC
6938/* Scan INSN and note any of its operands that need fixing.
6939 If DO_PUSHES is false we do not actually push any of the fixups
6940 needed. The function returns TRUE is any fixups were needed/pushed.
6941 This is used by arm_memory_load_p() which needs to know about loads
6942 of constants that will be converted into minipool loads. */
f0375c66 6943static bool
e32bac5b 6944note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
949d79eb 6945{
f0375c66 6946 bool result = false;
949d79eb
RE
6947 int opno;
6948
d5b7b3ae 6949 extract_insn (insn);
949d79eb 6950
5895f793 6951 if (!constrain_operands (1))
949d79eb
RE
6952 fatal_insn_not_found (insn);
6953
8c2a5582
RE
6954 if (recog_data.n_alternatives == 0)
6955 return false;
6956
f0375c66 6957 /* Fill in recog_op_alt with information about the constraints of this insn. */
949d79eb
RE
6958 preprocess_constraints ();
6959
1ccbefce 6960 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 6961 {
6354dc9b 6962 /* Things we need to fix can only occur in inputs. */
36ab44c7 6963 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
6964 continue;
6965
6966 /* If this alternative is a memory reference, then any mention
6967 of constants in this alternative is really to fool reload
6968 into allowing us to accept one there. We need to fix them up
6969 now so that we output the right code. */
6970 if (recog_op_alt[opno][which_alternative].memory_ok)
6971 {
1ccbefce 6972 rtx op = recog_data.operand[opno];
949d79eb
RE
6973
6974 if (CONSTANT_P (op))
f0375c66
NC
6975 {
6976 if (do_pushes)
6977 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6978 recog_data.operand_mode[opno], op);
6979 result = true;
6980 }
d5b7b3ae 6981 else if (GET_CODE (op) == MEM
949d79eb
RE
6982 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6983 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
f0375c66
NC
6984 {
6985 if (do_pushes)
244b1afb
RE
6986 {
6987 rtx cop = avoid_constant_pool_reference (op);
6988
6989 /* Casting the address of something to a mode narrower
6990 than a word can cause avoid_constant_pool_reference()
6991 to return the pool reference itself. That's no good to
6992 us here. Lets just hope that we can use the
6993 constant pool value directly. */
6994 if (op == cop)
c769a35d 6995 cop = get_pool_constant (XEXP (op, 0));
244b1afb
RE
6996
6997 push_minipool_fix (insn, address,
6998 recog_data.operand_loc[opno],
c769a35d 6999 recog_data.operand_mode[opno], cop);
244b1afb 7000 }
f0375c66
NC
7001
7002 result = true;
7003 }
949d79eb 7004 }
2b835d68 7005 }
f0375c66
NC
7006
7007 return result;
2b835d68
RE
7008}
7009
18dbd950
RS
7010/* Gcc puts the pool in the wrong place for ARM, since we can only
7011 load addresses a limited distance around the pc. We do some
7012 special munging to move the constant pool values to the correct
7013 point in the code. */
18dbd950 7014static void
e32bac5b 7015arm_reorg (void)
2b835d68
RE
7016{
7017 rtx insn;
d5b7b3ae
RE
7018 HOST_WIDE_INT address = 0;
7019 Mfix * fix;
ad076f4e 7020
949d79eb 7021 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 7022
949d79eb
RE
7023 /* The first insn must always be a note, or the code below won't
7024 scan it properly. */
18dbd950
RS
7025 insn = get_insns ();
7026 if (GET_CODE (insn) != NOTE)
949d79eb
RE
7027 abort ();
7028
7029 /* Scan all the insns and record the operands that will need fixing. */
18dbd950 7030 for (insn = next_nonnote_insn (insn); insn; insn = next_nonnote_insn (insn))
2b835d68 7031 {
9b6b54e2 7032 if (TARGET_CIRRUS_FIX_INVALID_INSNS
f0375c66 7033 && (arm_cirrus_insn_p (insn)
9b6b54e2 7034 || GET_CODE (insn) == JUMP_INSN
f0375c66 7035 || arm_memory_load_p (insn)))
9b6b54e2
NC
7036 cirrus_reorg (insn);
7037
949d79eb 7038 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 7039 push_minipool_barrier (insn, address);
f0375c66 7040 else if (INSN_P (insn))
949d79eb
RE
7041 {
7042 rtx table;
7043
f0375c66 7044 note_invalid_constants (insn, address, true);
949d79eb 7045 address += get_attr_length (insn);
d5b7b3ae 7046
949d79eb
RE
7047 /* If the insn is a vector jump, add the size of the table
7048 and skip the table. */
d5b7b3ae 7049 if ((table = is_jump_table (insn)) != NULL)
2b835d68 7050 {
d5b7b3ae 7051 address += get_jump_table_size (table);
949d79eb
RE
7052 insn = table;
7053 }
7054 }
7055 }
332072db 7056
d5b7b3ae
RE
7057 fix = minipool_fix_head;
7058
949d79eb 7059 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 7060 while (fix)
949d79eb 7061 {
d5b7b3ae
RE
7062 Mfix * ftmp;
7063 Mfix * fdel;
7064 Mfix * last_added_fix;
7065 Mfix * last_barrier = NULL;
7066 Mfix * this_fix;
949d79eb
RE
7067
7068 /* Skip any further barriers before the next fix. */
7069 while (fix && GET_CODE (fix->insn) == BARRIER)
7070 fix = fix->next;
7071
d5b7b3ae 7072 /* No more fixes. */
949d79eb
RE
7073 if (fix == NULL)
7074 break;
332072db 7075
d5b7b3ae 7076 last_added_fix = NULL;
2b835d68 7077
d5b7b3ae 7078 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 7079 {
949d79eb 7080 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 7081 {
d5b7b3ae
RE
7082 if (ftmp->address >= minipool_vector_head->max_address)
7083 break;
2b835d68 7084
d5b7b3ae 7085 last_barrier = ftmp;
2b835d68 7086 }
d5b7b3ae
RE
7087 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
7088 break;
7089
7090 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 7091 }
949d79eb 7092
d5b7b3ae
RE
7093 /* If we found a barrier, drop back to that; any fixes that we
7094 could have reached but come after the barrier will now go in
7095 the next mini-pool. */
949d79eb
RE
7096 if (last_barrier != NULL)
7097 {
d5b7b3ae
RE
7098 /* Reduce the refcount for those fixes that won't go into this
7099 pool after all. */
7100 for (fdel = last_barrier->next;
7101 fdel && fdel != ftmp;
7102 fdel = fdel->next)
7103 {
7104 fdel->minipool->refcount--;
7105 fdel->minipool = NULL;
7106 }
7107
949d79eb
RE
7108 ftmp = last_barrier;
7109 }
7110 else
2bfa88dc 7111 {
d5b7b3ae
RE
7112 /* ftmp is first fix that we can't fit into this pool and
7113 there no natural barriers that we could use. Insert a
7114 new barrier in the code somewhere between the previous
7115 fix and this one, and arrange to jump around it. */
7116 HOST_WIDE_INT max_address;
7117
7118 /* The last item on the list of fixes must be a barrier, so
7119 we can never run off the end of the list of fixes without
7120 last_barrier being set. */
7121 if (ftmp == NULL)
7122 abort ();
7123
7124 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
7125 /* Check that there isn't another fix that is in range that
7126 we couldn't fit into this pool because the pool was
7127 already too large: we need to put the pool before such an
7128 instruction. */
d5b7b3ae
RE
7129 if (ftmp->address < max_address)
7130 max_address = ftmp->address;
7131
7132 last_barrier = create_fix_barrier (last_added_fix, max_address);
7133 }
7134
7135 assign_minipool_offsets (last_barrier);
7136
7137 while (ftmp)
7138 {
7139 if (GET_CODE (ftmp->insn) != BARRIER
7140 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
7141 == NULL))
7142 break;
2bfa88dc 7143
d5b7b3ae 7144 ftmp = ftmp->next;
2bfa88dc 7145 }
949d79eb
RE
7146
7147 /* Scan over the fixes we have identified for this pool, fixing them
7148 up and adding the constants to the pool itself. */
d5b7b3ae 7149 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
7150 this_fix = this_fix->next)
7151 if (GET_CODE (this_fix->insn) != BARRIER)
7152 {
949d79eb
RE
7153 rtx addr
7154 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
7155 minipool_vector_label),
d5b7b3ae 7156 this_fix->minipool->offset);
949d79eb
RE
7157 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
7158 }
7159
d5b7b3ae 7160 dump_minipool (last_barrier->insn);
949d79eb 7161 fix = ftmp;
2b835d68 7162 }
4b632bf1 7163
949d79eb
RE
7164 /* From now on we must synthesize any constants that we can't handle
7165 directly. This can happen if the RTL gets split during final
7166 instruction generation. */
4b632bf1 7167 after_arm_reorg = 1;
c7319d87
RE
7168
7169 /* Free the minipool memory. */
7170 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 7171}
cce8749e
CH
7172\f
7173/* Routines to output assembly language. */
7174
f3bb6135 7175/* If the rtx is the correct value then return the string of the number.
ff9940b0 7176 In this way we can ensure that valid double constants are generated even
6354dc9b 7177 when cross compiling. */
cd2b33d0 7178const char *
e32bac5b 7179fp_immediate_constant (rtx x)
ff9940b0
RE
7180{
7181 REAL_VALUE_TYPE r;
7182 int i;
7183
7184 if (!fpa_consts_inited)
7185 init_fpa_table ();
7186
7187 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7188 for (i = 0; i < 8; i++)
7189 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
7190 return strings_fpa[i];
f3bb6135 7191
ff9940b0
RE
7192 abort ();
7193}
7194
9997d19d 7195/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 7196static const char *
e32bac5b 7197fp_const_from_val (REAL_VALUE_TYPE *r)
9997d19d
RE
7198{
7199 int i;
7200
5895f793 7201 if (!fpa_consts_inited)
9997d19d
RE
7202 init_fpa_table ();
7203
7204 for (i = 0; i < 8; i++)
7205 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
7206 return strings_fpa[i];
7207
7208 abort ();
7209}
ff9940b0 7210
cce8749e
CH
7211/* Output the operands of a LDM/STM instruction to STREAM.
7212 MASK is the ARM register set mask of which only bits 0-15 are important.
6d3d9133
NC
7213 REG is the base register, either the frame pointer or the stack pointer,
7214 INSTR is the possibly suffixed load or store instruction. */
d5b7b3ae 7215static void
e32bac5b 7216print_multi_reg (FILE *stream, const char *instr, int reg, int mask)
cce8749e
CH
7217{
7218 int i;
7219 int not_first = FALSE;
7220
1d5473cb 7221 fputc ('\t', stream);
dd18ae56 7222 asm_fprintf (stream, instr, reg);
1d5473cb 7223 fputs (", {", stream);
62b10bbc 7224
d5b7b3ae 7225 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
7226 if (mask & (1 << i))
7227 {
7228 if (not_first)
7229 fprintf (stream, ", ");
62b10bbc 7230
dd18ae56 7231 asm_fprintf (stream, "%r", i);
cce8749e
CH
7232 not_first = TRUE;
7233 }
f3bb6135 7234
b17fe233
NC
7235 fprintf (stream, "}");
7236
7237 /* Add a ^ character for the 26-bit ABI, but only if we were loading
1ce53769
NC
7238 the PC. Otherwise we would generate an UNPREDICTABLE instruction.
7239 Strictly speaking the instruction would be unpredicatble only if
7240 we were writing back the base register as well, but since we never
7241 want to generate an LDM type 2 instruction (register bank switching)
7242 which is what you get if the PC is not being loaded, we do not need
7243 to check for writeback. */
b17fe233 7244 if (! TARGET_APCS_32
1ce53769 7245 && ((mask & (1 << PC_REGNUM)) != 0))
b17fe233
NC
7246 fprintf (stream, "^");
7247
7248 fprintf (stream, "\n");
f3bb6135 7249}
cce8749e 7250
6354dc9b 7251/* Output a 'call' insn. */
cd2b33d0 7252const char *
e32bac5b 7253output_call (rtx *operands)
cce8749e 7254{
6354dc9b 7255 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 7256
62b10bbc 7257 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 7258 {
62b10bbc 7259 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 7260 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 7261 }
62b10bbc 7262
1d5473cb 7263 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 7264
6cfc7210 7265 if (TARGET_INTERWORK)
da6558fd
NC
7266 output_asm_insn ("bx%?\t%0", operands);
7267 else
7268 output_asm_insn ("mov%?\t%|pc, %0", operands);
7269
f3bb6135
RE
7270 return "";
7271}
cce8749e 7272
6354dc9b 7273/* Output a 'call' insn that is a reference in memory. */
cd2b33d0 7274const char *
e32bac5b 7275output_call_mem (rtx *operands)
ff9940b0 7276{
6cfc7210 7277 if (TARGET_INTERWORK)
da6558fd
NC
7278 {
7279 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7280 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7281 output_asm_insn ("bx%?\t%|ip", operands);
7282 }
6ab5da80
RE
7283 else if (regno_use_in (LR_REGNUM, operands[0]))
7284 {
7285 /* LR is used in the memory address. We load the address in the
7286 first instruction. It's safe to use IP as the target of the
7287 load since the call will kill it anyway. */
7288 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7289 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7290 output_asm_insn ("mov%?\t%|pc, %|ip", operands);
7291 }
da6558fd
NC
7292 else
7293 {
7294 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7295 output_asm_insn ("ldr%?\t%|pc, %0", operands);
7296 }
7297
f3bb6135
RE
7298 return "";
7299}
ff9940b0
RE
7300
7301
3b684012
RE
7302/* Output a move from arm registers to an fpa registers.
7303 OPERANDS[0] is an fpa register.
ff9940b0 7304 OPERANDS[1] is the first registers of an arm register pair. */
cd2b33d0 7305const char *
e32bac5b 7306output_mov_long_double_fpa_from_arm (rtx *operands)
ff9940b0
RE
7307{
7308 int arm_reg0 = REGNO (operands[1]);
7309 rtx ops[3];
7310
62b10bbc
NC
7311 if (arm_reg0 == IP_REGNUM)
7312 abort ();
f3bb6135 7313
43cffd11
RE
7314 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7315 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7316 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 7317
1d5473cb
RE
7318 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
7319 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 7320
f3bb6135
RE
7321 return "";
7322}
ff9940b0 7323
3b684012 7324/* Output a move from an fpa register to arm registers.
ff9940b0 7325 OPERANDS[0] is the first registers of an arm register pair.
3b684012 7326 OPERANDS[1] is an fpa register. */
cd2b33d0 7327const char *
e32bac5b 7328output_mov_long_double_arm_from_fpa (rtx *operands)
ff9940b0
RE
7329{
7330 int arm_reg0 = REGNO (operands[0]);
7331 rtx ops[3];
7332
62b10bbc
NC
7333 if (arm_reg0 == IP_REGNUM)
7334 abort ();
f3bb6135 7335
43cffd11
RE
7336 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7337 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7338 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 7339
1d5473cb
RE
7340 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7341 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
7342 return "";
7343}
ff9940b0
RE
7344
7345/* Output a move from arm registers to arm registers of a long double
7346 OPERANDS[0] is the destination.
7347 OPERANDS[1] is the source. */
cd2b33d0 7348const char *
e32bac5b 7349output_mov_long_double_arm_from_arm (rtx *operands)
ff9940b0 7350{
6354dc9b 7351 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
7352 int dest_start = REGNO (operands[0]);
7353 int src_start = REGNO (operands[1]);
7354 rtx ops[2];
7355 int i;
7356
7357 if (dest_start < src_start)
7358 {
7359 for (i = 0; i < 3; i++)
7360 {
43cffd11
RE
7361 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7362 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 7363 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
7364 }
7365 }
7366 else
7367 {
7368 for (i = 2; i >= 0; i--)
7369 {
43cffd11
RE
7370 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7371 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 7372 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
7373 }
7374 }
f3bb6135 7375
ff9940b0
RE
7376 return "";
7377}
7378
7379
3b684012
RE
7380/* Output a move from arm registers to an fpa registers.
7381 OPERANDS[0] is an fpa register.
cce8749e 7382 OPERANDS[1] is the first registers of an arm register pair. */
cd2b33d0 7383const char *
e32bac5b 7384output_mov_double_fpa_from_arm (rtx *operands)
cce8749e
CH
7385{
7386 int arm_reg0 = REGNO (operands[1]);
7387 rtx ops[2];
7388
62b10bbc
NC
7389 if (arm_reg0 == IP_REGNUM)
7390 abort ();
7391
43cffd11
RE
7392 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7393 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
7394 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7395 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
7396 return "";
7397}
cce8749e 7398
3b684012 7399/* Output a move from an fpa register to arm registers.
cce8749e 7400 OPERANDS[0] is the first registers of an arm register pair.
3b684012 7401 OPERANDS[1] is an fpa register. */
cd2b33d0 7402const char *
e32bac5b 7403output_mov_double_arm_from_fpa (rtx *operands)
cce8749e
CH
7404{
7405 int arm_reg0 = REGNO (operands[0]);
7406 rtx ops[2];
7407
62b10bbc
NC
7408 if (arm_reg0 == IP_REGNUM)
7409 abort ();
f3bb6135 7410
43cffd11
RE
7411 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7412 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
7413 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7414 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
7415 return "";
7416}
cce8749e
CH
7417
7418/* Output a move between double words.
7419 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7420 or MEM<-REG and all MEMs must be offsettable addresses. */
cd2b33d0 7421const char *
e32bac5b 7422output_move_double (rtx *operands)
cce8749e
CH
7423{
7424 enum rtx_code code0 = GET_CODE (operands[0]);
7425 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 7426 rtx otherops[3];
cce8749e
CH
7427
7428 if (code0 == REG)
7429 {
7430 int reg0 = REGNO (operands[0]);
7431
43cffd11 7432 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 7433
cce8749e
CH
7434 if (code1 == REG)
7435 {
7436 int reg1 = REGNO (operands[1]);
62b10bbc
NC
7437 if (reg1 == IP_REGNUM)
7438 abort ();
f3bb6135 7439
6354dc9b 7440 /* Ensure the second source is not overwritten. */
c1c2bc04 7441 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 7442 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 7443 else
6cfc7210 7444 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e 7445 }
5a9335ef
NC
7446 else if (code1 == CONST_VECTOR)
7447 {
7448 HOST_WIDE_INT hint = 0;
7449
7450 switch (GET_MODE (operands[1]))
7451 {
7452 case V2SImode:
7453 otherops[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 1)));
7454 operands[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)));
7455 break;
7456
7457 case V4HImode:
7458 if (BYTES_BIG_ENDIAN)
7459 {
7460 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7461 hint <<= 16;
7462 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7463 }
7464 else
7465 {
7466 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7467 hint <<= 16;
7468 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7469 }
7470
7471 otherops[1] = GEN_INT (hint);
7472 hint = 0;
7473
7474 if (BYTES_BIG_ENDIAN)
7475 {
7476 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7477 hint <<= 16;
7478 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7479 }
7480 else
7481 {
7482 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7483 hint <<= 16;
7484 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7485 }
7486
7487 operands[1] = GEN_INT (hint);
7488 break;
7489
7490 case V8QImode:
7491 if (BYTES_BIG_ENDIAN)
7492 {
7493 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
7494 hint <<= 8;
7495 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
7496 hint <<= 8;
7497 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
7498 hint <<= 8;
7499 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
7500 }
7501 else
7502 {
7503 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
7504 hint <<= 8;
7505 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
7506 hint <<= 8;
7507 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
7508 hint <<= 8;
7509 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
7510 }
7511
7512 otherops[1] = GEN_INT (hint);
7513 hint = 0;
7514
7515 if (BYTES_BIG_ENDIAN)
7516 {
7517 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7518 hint <<= 8;
7519 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7520 hint <<= 8;
7521 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7522 hint <<= 8;
7523 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7524 }
7525 else
7526 {
7527 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7528 hint <<= 8;
7529 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7530 hint <<= 8;
7531 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7532 hint <<= 8;
7533 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7534 }
7535
7536 operands[1] = GEN_INT (hint);
7537 break;
7538
7539 default:
7540 abort ();
7541 }
7542 output_mov_immediate (operands);
7543 output_mov_immediate (otherops);
7544 }
cce8749e
CH
7545 else if (code1 == CONST_DOUBLE)
7546 {
226a5051
RE
7547 if (GET_MODE (operands[1]) == DFmode)
7548 {
b216cd4a 7549 REAL_VALUE_TYPE r;
226a5051 7550 long l[2];
226a5051 7551
b216cd4a
ZW
7552 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7553 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
d5b7b3ae
RE
7554 otherops[1] = GEN_INT (l[1]);
7555 operands[1] = GEN_INT (l[0]);
226a5051 7556 }
c1c2bc04
RE
7557 else if (GET_MODE (operands[1]) != VOIDmode)
7558 abort ();
7559 else if (WORDS_BIG_ENDIAN)
7560 {
c1c2bc04
RE
7561 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7562 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7563 }
226a5051
RE
7564 else
7565 {
7566 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7567 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7568 }
6cfc7210 7569
c1c2bc04
RE
7570 output_mov_immediate (operands);
7571 output_mov_immediate (otherops);
cce8749e
CH
7572 }
7573 else if (code1 == CONST_INT)
7574 {
56636818
JL
7575#if HOST_BITS_PER_WIDE_INT > 32
7576 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7577 what the upper word is. */
7578 if (WORDS_BIG_ENDIAN)
7579 {
7580 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7581 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7582 }
7583 else
7584 {
7585 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7586 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7587 }
7588#else
6354dc9b 7589 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
7590 if (WORDS_BIG_ENDIAN)
7591 {
7592 otherops[1] = operands[1];
7593 operands[1] = (INTVAL (operands[1]) < 0
7594 ? constm1_rtx : const0_rtx);
7595 }
ff9940b0 7596 else
c1c2bc04 7597 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 7598#endif
c1c2bc04
RE
7599 output_mov_immediate (otherops);
7600 output_mov_immediate (operands);
cce8749e
CH
7601 }
7602 else if (code1 == MEM)
7603 {
ff9940b0 7604 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 7605 {
ff9940b0 7606 case REG:
9997d19d 7607 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 7608 break;
2b835d68 7609
ff9940b0 7610 case PRE_INC:
6354dc9b 7611 abort (); /* Should never happen now. */
ff9940b0 7612 break;
2b835d68 7613
ff9940b0 7614 case PRE_DEC:
2b835d68 7615 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 7616 break;
2b835d68 7617
ff9940b0 7618 case POST_INC:
9997d19d 7619 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 7620 break;
2b835d68 7621
ff9940b0 7622 case POST_DEC:
6354dc9b 7623 abort (); /* Should never happen now. */
ff9940b0 7624 break;
2b835d68
RE
7625
7626 case LABEL_REF:
7627 case CONST:
7628 output_asm_insn ("adr%?\t%0, %1", operands);
7629 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7630 break;
7631
ff9940b0 7632 default:
aec3cfba
NC
7633 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7634 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 7635 {
2b835d68
RE
7636 otherops[0] = operands[0];
7637 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7638 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
1d6e90ac 7639
2b835d68
RE
7640 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7641 {
7642 if (GET_CODE (otherops[2]) == CONST_INT)
7643 {
06bea5aa 7644 switch ((int) INTVAL (otherops[2]))
2b835d68
RE
7645 {
7646 case -8:
7647 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7648 return "";
7649 case -4:
7650 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7651 return "";
7652 case 4:
7653 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7654 return "";
7655 }
1d6e90ac 7656
2b835d68
RE
7657 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7658 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7659 else
7660 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7661 }
7662 else
7663 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7664 }
7665 else
7666 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 7667
2b835d68
RE
7668 return "ldm%?ia\t%0, %M0";
7669 }
7670 else
7671 {
a4a37b30 7672 otherops[1] = adjust_address (operands[1], SImode, 4);
2b835d68
RE
7673 /* Take care of overlapping base/data reg. */
7674 if (reg_mentioned_p (operands[0], operands[1]))
7675 {
7676 output_asm_insn ("ldr%?\t%0, %1", otherops);
7677 output_asm_insn ("ldr%?\t%0, %1", operands);
7678 }
7679 else
7680 {
7681 output_asm_insn ("ldr%?\t%0, %1", operands);
7682 output_asm_insn ("ldr%?\t%0, %1", otherops);
7683 }
cce8749e
CH
7684 }
7685 }
7686 }
2b835d68 7687 else
6354dc9b 7688 abort (); /* Constraints should prevent this. */
cce8749e
CH
7689 }
7690 else if (code0 == MEM && code1 == REG)
7691 {
62b10bbc
NC
7692 if (REGNO (operands[1]) == IP_REGNUM)
7693 abort ();
2b835d68 7694
ff9940b0
RE
7695 switch (GET_CODE (XEXP (operands[0], 0)))
7696 {
7697 case REG:
9997d19d 7698 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 7699 break;
2b835d68 7700
ff9940b0 7701 case PRE_INC:
6354dc9b 7702 abort (); /* Should never happen now. */
ff9940b0 7703 break;
2b835d68 7704
ff9940b0 7705 case PRE_DEC:
2b835d68 7706 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 7707 break;
2b835d68 7708
ff9940b0 7709 case POST_INC:
9997d19d 7710 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 7711 break;
2b835d68 7712
ff9940b0 7713 case POST_DEC:
6354dc9b 7714 abort (); /* Should never happen now. */
ff9940b0 7715 break;
2b835d68
RE
7716
7717 case PLUS:
7718 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7719 {
06bea5aa 7720 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
2b835d68
RE
7721 {
7722 case -8:
7723 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7724 return "";
7725
7726 case -4:
7727 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7728 return "";
7729
7730 case 4:
7731 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7732 return "";
7733 }
7734 }
7735 /* Fall through */
7736
ff9940b0 7737 default:
a4a37b30 7738 otherops[0] = adjust_address (operands[0], SImode, 4);
43cffd11 7739 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
7740 output_asm_insn ("str%?\t%1, %0", operands);
7741 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
7742 }
7743 }
2b835d68 7744 else
1d6e90ac
NC
7745 /* Constraints should prevent this. */
7746 abort ();
cce8749e 7747
9997d19d
RE
7748 return "";
7749}
cce8749e
CH
7750
7751
7752/* Output an arbitrary MOV reg, #n.
7753 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
cd2b33d0 7754const char *
e32bac5b 7755output_mov_immediate (rtx *operands)
cce8749e 7756{
f3bb6135 7757 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e 7758
1d6e90ac 7759 /* Try to use one MOV. */
cce8749e 7760 if (const_ok_for_arm (n))
1d6e90ac 7761 output_asm_insn ("mov%?\t%0, %1", operands);
cce8749e 7762
1d6e90ac
NC
7763 /* Try to use one MVN. */
7764 else if (const_ok_for_arm (~n))
cce8749e 7765 {
f3bb6135 7766 operands[1] = GEN_INT (~n);
9997d19d 7767 output_asm_insn ("mvn%?\t%0, %1", operands);
cce8749e 7768 }
1d6e90ac
NC
7769 else
7770 {
7771 int n_ones = 0;
7772 int i;
cce8749e 7773
1d6e90ac 7774 /* If all else fails, make it out of ORRs or BICs as appropriate. */
5a9335ef 7775 for (i = 0; i < 32; i++)
1d6e90ac 7776 if (n & 1 << i)
5a9335ef 7777 n_ones++;
cce8749e 7778
1d6e90ac
NC
7779 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7780 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7781 else
7782 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7783 }
f3bb6135
RE
7784
7785 return "";
7786}
cce8749e 7787
1d6e90ac
NC
7788/* Output an ADD r, s, #n where n may be too big for one instruction.
7789 If adding zero to one register, output nothing. */
cd2b33d0 7790const char *
e32bac5b 7791output_add_immediate (rtx *operands)
cce8749e 7792{
f3bb6135 7793 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
7794
7795 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7796 {
7797 if (n < 0)
7798 output_multi_immediate (operands,
9997d19d
RE
7799 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7800 -n);
cce8749e
CH
7801 else
7802 output_multi_immediate (operands,
9997d19d
RE
7803 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7804 n);
cce8749e 7805 }
f3bb6135
RE
7806
7807 return "";
7808}
cce8749e 7809
cce8749e
CH
7810/* Output a multiple immediate operation.
7811 OPERANDS is the vector of operands referred to in the output patterns.
7812 INSTR1 is the output pattern to use for the first constant.
7813 INSTR2 is the output pattern to use for subsequent constants.
7814 IMMED_OP is the index of the constant slot in OPERANDS.
7815 N is the constant value. */
cd2b33d0 7816static const char *
e32bac5b
RE
7817output_multi_immediate (rtx *operands, const char *instr1, const char *instr2,
7818 int immed_op, HOST_WIDE_INT n)
cce8749e 7819{
f3bb6135 7820#if HOST_BITS_PER_WIDE_INT > 32
30cf4896 7821 n &= 0xffffffff;
f3bb6135
RE
7822#endif
7823
cce8749e
CH
7824 if (n == 0)
7825 {
1d6e90ac 7826 /* Quick and easy output. */
cce8749e 7827 operands[immed_op] = const0_rtx;
1d6e90ac 7828 output_asm_insn (instr1, operands);
cce8749e
CH
7829 }
7830 else
7831 {
7832 int i;
cd2b33d0 7833 const char * instr = instr1;
cce8749e 7834
6354dc9b 7835 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
7836 for (i = 0; i < 32; i += 2)
7837 {
7838 if (n & (3 << i))
7839 {
f3bb6135
RE
7840 operands[immed_op] = GEN_INT (n & (255 << i));
7841 output_asm_insn (instr, operands);
cce8749e
CH
7842 instr = instr2;
7843 i += 6;
7844 }
7845 }
7846 }
cd2b33d0 7847
f3bb6135 7848 return "";
9997d19d 7849}
cce8749e 7850
cce8749e
CH
7851/* Return the appropriate ARM instruction for the operation code.
7852 The returned result should not be overwritten. OP is the rtx of the
7853 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7854 was shifted. */
cd2b33d0 7855const char *
e32bac5b 7856arithmetic_instr (rtx op, int shift_first_arg)
cce8749e 7857{
9997d19d 7858 switch (GET_CODE (op))
cce8749e
CH
7859 {
7860 case PLUS:
f3bb6135
RE
7861 return "add";
7862
cce8749e 7863 case MINUS:
f3bb6135
RE
7864 return shift_first_arg ? "rsb" : "sub";
7865
cce8749e 7866 case IOR:
f3bb6135
RE
7867 return "orr";
7868
cce8749e 7869 case XOR:
f3bb6135
RE
7870 return "eor";
7871
cce8749e 7872 case AND:
f3bb6135
RE
7873 return "and";
7874
cce8749e 7875 default:
f3bb6135 7876 abort ();
cce8749e 7877 }
f3bb6135 7878}
cce8749e 7879
cce8749e
CH
7880/* Ensure valid constant shifts and return the appropriate shift mnemonic
7881 for the operation code. The returned result should not be overwritten.
7882 OP is the rtx code of the shift.
9997d19d 7883 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 7884 shift. */
cd2b33d0 7885static const char *
e32bac5b 7886shift_op (rtx op, HOST_WIDE_INT *amountp)
cce8749e 7887{
cd2b33d0 7888 const char * mnem;
e2c671ba 7889 enum rtx_code code = GET_CODE (op);
cce8749e 7890
9997d19d
RE
7891 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7892 *amountp = -1;
7893 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7894 *amountp = INTVAL (XEXP (op, 1));
7895 else
7896 abort ();
7897
e2c671ba 7898 switch (code)
cce8749e
CH
7899 {
7900 case ASHIFT:
7901 mnem = "asl";
7902 break;
f3bb6135 7903
cce8749e
CH
7904 case ASHIFTRT:
7905 mnem = "asr";
cce8749e 7906 break;
f3bb6135 7907
cce8749e
CH
7908 case LSHIFTRT:
7909 mnem = "lsr";
cce8749e 7910 break;
f3bb6135 7911
9997d19d
RE
7912 case ROTATERT:
7913 mnem = "ror";
9997d19d
RE
7914 break;
7915
ff9940b0 7916 case MULT:
e2c671ba
RE
7917 /* We never have to worry about the amount being other than a
7918 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
7919 if (*amountp != -1)
7920 *amountp = int_log2 (*amountp);
7921 else
7922 abort ();
f3bb6135
RE
7923 return "asl";
7924
cce8749e 7925 default:
f3bb6135 7926 abort ();
cce8749e
CH
7927 }
7928
e2c671ba
RE
7929 if (*amountp != -1)
7930 {
7931 /* This is not 100% correct, but follows from the desire to merge
7932 multiplication by a power of 2 with the recognizer for a
7933 shift. >=32 is not a valid shift for "asl", so we must try and
7934 output a shift that produces the correct arithmetical result.
ddd5a7c1 7935 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
7936 is not set correctly if we set the flags; but we never use the
7937 carry bit from such an operation, so we can ignore that. */
7938 if (code == ROTATERT)
1d6e90ac
NC
7939 /* Rotate is just modulo 32. */
7940 *amountp &= 31;
e2c671ba
RE
7941 else if (*amountp != (*amountp & 31))
7942 {
7943 if (code == ASHIFT)
7944 mnem = "lsr";
7945 *amountp = 32;
7946 }
7947
7948 /* Shifts of 0 are no-ops. */
7949 if (*amountp == 0)
7950 return NULL;
7951 }
7952
9997d19d
RE
7953 return mnem;
7954}
cce8749e 7955
6354dc9b 7956/* Obtain the shift from the POWER of two. */
1d6e90ac 7957
18af7313 7958static HOST_WIDE_INT
e32bac5b 7959int_log2 (HOST_WIDE_INT power)
cce8749e 7960{
f3bb6135 7961 HOST_WIDE_INT shift = 0;
cce8749e 7962
30cf4896 7963 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
cce8749e
CH
7964 {
7965 if (shift > 31)
f3bb6135 7966 abort ();
e32bac5b 7967 shift++;
cce8749e 7968 }
f3bb6135
RE
7969
7970 return shift;
7971}
cce8749e 7972
cce8749e
CH
7973/* Output a .ascii pseudo-op, keeping track of lengths. This is because
7974 /bin/as is horribly restrictive. */
6cfc7210 7975#define MAX_ASCII_LEN 51
cce8749e
CH
7976
7977void
e32bac5b 7978output_ascii_pseudo_op (FILE *stream, const unsigned char *p, int len)
cce8749e
CH
7979{
7980 int i;
6cfc7210 7981 int len_so_far = 0;
cce8749e 7982
6cfc7210
NC
7983 fputs ("\t.ascii\t\"", stream);
7984
cce8749e
CH
7985 for (i = 0; i < len; i++)
7986 {
1d6e90ac 7987 int c = p[i];
cce8749e 7988
6cfc7210 7989 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 7990 {
6cfc7210 7991 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 7992 len_so_far = 0;
cce8749e
CH
7993 }
7994
6cfc7210 7995 switch (c)
cce8749e 7996 {
6cfc7210
NC
7997 case TARGET_TAB:
7998 fputs ("\\t", stream);
7999 len_so_far += 2;
8000 break;
8001
8002 case TARGET_FF:
8003 fputs ("\\f", stream);
8004 len_so_far += 2;
8005 break;
8006
8007 case TARGET_BS:
8008 fputs ("\\b", stream);
8009 len_so_far += 2;
8010 break;
8011
8012 case TARGET_CR:
8013 fputs ("\\r", stream);
8014 len_so_far += 2;
8015 break;
8016
8017 case TARGET_NEWLINE:
8018 fputs ("\\n", stream);
8019 c = p [i + 1];
8020 if ((c >= ' ' && c <= '~')
8021 || c == TARGET_TAB)
8022 /* This is a good place for a line break. */
8023 len_so_far = MAX_ASCII_LEN;
8024 else
8025 len_so_far += 2;
8026 break;
8027
8028 case '\"':
8029 case '\\':
8030 putc ('\\', stream);
5895f793 8031 len_so_far++;
d6b4baa4 8032 /* Drop through. */
f3bb6135 8033
6cfc7210
NC
8034 default:
8035 if (c >= ' ' && c <= '~')
8036 {
8037 putc (c, stream);
5895f793 8038 len_so_far++;
6cfc7210
NC
8039 }
8040 else
8041 {
8042 fprintf (stream, "\\%03o", c);
8043 len_so_far += 4;
8044 }
8045 break;
cce8749e 8046 }
cce8749e 8047 }
f3bb6135 8048
cce8749e 8049 fputs ("\"\n", stream);
f3bb6135 8050}
cce8749e 8051\f
121308d4
NC
8052/* Compute the register sabe mask for registers 0 through 12
8053 inclusive. This code is used by both arm_compute_save_reg_mask
8054 and arm_compute_initial_elimination_offset. */
6d3d9133 8055static unsigned long
e32bac5b 8056arm_compute_save_reg0_reg12_mask (void)
6d3d9133 8057{
121308d4 8058 unsigned long func_type = arm_current_func_type ();
6d3d9133
NC
8059 unsigned int save_reg_mask = 0;
8060 unsigned int reg;
6d3d9133 8061
7b8b8ade 8062 if (IS_INTERRUPT (func_type))
6d3d9133 8063 {
7b8b8ade 8064 unsigned int max_reg;
7b8b8ade
NC
8065 /* Interrupt functions must not corrupt any registers,
8066 even call clobbered ones. If this is a leaf function
8067 we can just examine the registers used by the RTL, but
8068 otherwise we have to assume that whatever function is
8069 called might clobber anything, and so we have to save
8070 all the call-clobbered registers as well. */
8071 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
8072 /* FIQ handlers have registers r8 - r12 banked, so
8073 we only need to check r0 - r7, Normal ISRs only
121308d4 8074 bank r14 and r15, so we must check up to r12.
7b8b8ade
NC
8075 r13 is the stack pointer which is always preserved,
8076 so we do not need to consider it here. */
8077 max_reg = 7;
8078 else
8079 max_reg = 12;
8080
8081 for (reg = 0; reg <= max_reg; reg++)
8082 if (regs_ever_live[reg]
8083 || (! current_function_is_leaf && call_used_regs [reg]))
6d3d9133
NC
8084 save_reg_mask |= (1 << reg);
8085 }
8086 else
8087 {
8088 /* In the normal case we only need to save those registers
8089 which are call saved and which are used by this function. */
8090 for (reg = 0; reg <= 10; reg++)
8091 if (regs_ever_live[reg] && ! call_used_regs [reg])
8092 save_reg_mask |= (1 << reg);
8093
8094 /* Handle the frame pointer as a special case. */
8095 if (! TARGET_APCS_FRAME
8096 && ! frame_pointer_needed
8097 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
8098 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
8099 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
8100
8101 /* If we aren't loading the PIC register,
8102 don't stack it even though it may be live. */
8103 if (flag_pic
8104 && ! TARGET_SINGLE_PIC_BASE
8105 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
8106 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
8107 }
8108
121308d4
NC
8109 return save_reg_mask;
8110}
8111
8112/* Compute a bit mask of which registers need to be
8113 saved on the stack for the current function. */
8114
8115static unsigned long
e32bac5b 8116arm_compute_save_reg_mask (void)
121308d4
NC
8117{
8118 unsigned int save_reg_mask = 0;
8119 unsigned long func_type = arm_current_func_type ();
8120
8121 if (IS_NAKED (func_type))
8122 /* This should never really happen. */
8123 return 0;
8124
8125 /* If we are creating a stack frame, then we must save the frame pointer,
8126 IP (which will hold the old stack pointer), LR and the PC. */
8127 if (frame_pointer_needed)
8128 save_reg_mask |=
8129 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
8130 | (1 << IP_REGNUM)
8131 | (1 << LR_REGNUM)
8132 | (1 << PC_REGNUM);
8133
8134 /* Volatile functions do not return, so there
8135 is no need to save any other registers. */
8136 if (IS_VOLATILE (func_type))
8137 return save_reg_mask;
8138
8139 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
8140
6d3d9133
NC
8141 /* Decide if we need to save the link register.
8142 Interrupt routines have their own banked link register,
8143 so they never need to save it.
1768c26f 8144 Otherwise if we do not use the link register we do not need to save
6d3d9133
NC
8145 it. If we are pushing other registers onto the stack however, we
8146 can save an instruction in the epilogue by pushing the link register
8147 now and then popping it back into the PC. This incurs extra memory
72ac76be 8148 accesses though, so we only do it when optimizing for size, and only
6d3d9133 8149 if we know that we will not need a fancy return sequence. */
3a7731fd 8150 if (regs_ever_live [LR_REGNUM]
6d3d9133
NC
8151 || (save_reg_mask
8152 && optimize_size
3a7731fd 8153 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
6d3d9133
NC
8154 save_reg_mask |= 1 << LR_REGNUM;
8155
6f7ebcbb
NC
8156 if (cfun->machine->lr_save_eliminated)
8157 save_reg_mask &= ~ (1 << LR_REGNUM);
8158
5a9335ef
NC
8159 if (TARGET_REALLY_IWMMXT
8160 && ((bit_count (save_reg_mask)
8161 + ARM_NUM_INTS (current_function_pretend_args_size)) % 2) != 0)
8162 {
8163 unsigned int reg;
8164
8165 /* The total number of registers that are going to be pushed
8166 onto the stack is odd. We need to ensure that the stack
8167 is 64-bit aligned before we start to save iWMMXt registers,
8168 and also before we start to create locals. (A local variable
8169 might be a double or long long which we will load/store using
8170 an iWMMXt instruction). Therefore we need to push another
8171 ARM register, so that the stack will be 64-bit aligned. We
8172 try to avoid using the arg registers (r0 -r3) as they might be
8173 used to pass values in a tail call. */
8174 for (reg = 4; reg <= 12; reg++)
8175 if ((save_reg_mask & (1 << reg)) == 0)
8176 break;
8177
8178 if (reg <= 12)
8179 save_reg_mask |= (1 << reg);
8180 else
8181 {
8182 cfun->machine->sibcall_blocked = 1;
8183 save_reg_mask |= (1 << 3);
8184 }
8185 }
8186
6d3d9133
NC
8187 return save_reg_mask;
8188}
8189
699a4925 8190/* Generate a function exit sequence. If REALLY_RETURN is false, then do
6d3d9133 8191 everything bar the final return instruction. */
cd2b33d0 8192const char *
e32bac5b 8193output_return_instruction (rtx operand, int really_return, int reverse)
ff9940b0 8194{
6d3d9133 8195 char conditional[10];
ff9940b0 8196 char instr[100];
6d3d9133
NC
8197 int reg;
8198 unsigned long live_regs_mask;
8199 unsigned long func_type;
e26053d1 8200
6d3d9133 8201 func_type = arm_current_func_type ();
e2c671ba 8202
6d3d9133 8203 if (IS_NAKED (func_type))
d5b7b3ae 8204 return "";
6d3d9133
NC
8205
8206 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 8207 {
699a4925
RE
8208 /* If this function was declared non-returning, and we have
8209 found a tail call, then we have to trust that the called
8210 function won't return. */
3a5a4282
PB
8211 if (really_return)
8212 {
8213 rtx ops[2];
8214
8215 /* Otherwise, trap an attempted return by aborting. */
8216 ops[0] = operand;
8217 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
8218 : "abort");
8219 assemble_external_libcall (ops[1]);
8220 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
8221 }
8222
e2c671ba
RE
8223 return "";
8224 }
6d3d9133 8225
5895f793 8226 if (current_function_calls_alloca && !really_return)
62b10bbc 8227 abort ();
ff9940b0 8228
c414f8a9 8229 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
d5b7b3ae 8230
6d3d9133 8231 return_used_this_function = 1;
ff9940b0 8232
6d3d9133 8233 live_regs_mask = arm_compute_save_reg_mask ();
ff9940b0 8234
1768c26f 8235 if (live_regs_mask)
6d3d9133 8236 {
1768c26f
PB
8237 const char * return_reg;
8238
8239 /* If we do not have any special requirements for function exit
8240 (eg interworking, or ISR) then we can load the return address
8241 directly into the PC. Otherwise we must load it into LR. */
8242 if (really_return
1768c26f
PB
8243 && ! TARGET_INTERWORK)
8244 return_reg = reg_names[PC_REGNUM];
6d3d9133 8245 else
1768c26f
PB
8246 return_reg = reg_names[LR_REGNUM];
8247
6d3d9133
NC
8248 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
8249 /* There are two possible reasons for the IP register being saved.
8250 Either a stack frame was created, in which case IP contains the
8251 old stack pointer, or an ISR routine corrupted it. If this in an
8252 ISR routine then just restore IP, otherwise restore IP into SP. */
8253 if (! IS_INTERRUPT (func_type))
8254 {
8255 live_regs_mask &= ~ (1 << IP_REGNUM);
8256 live_regs_mask |= (1 << SP_REGNUM);
8257 }
f3bb6135 8258
3a7731fd
PB
8259 /* On some ARM architectures it is faster to use LDR rather than
8260 LDM to load a single register. On other architectures, the
8261 cost is the same. In 26 bit mode, or for exception handlers,
8262 we have to use LDM to load the PC so that the CPSR is also
8263 restored. */
8264 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
6d3d9133 8265 {
3a7731fd
PB
8266 if (live_regs_mask == (unsigned int)(1 << reg))
8267 break;
8268 }
8269 if (reg <= LAST_ARM_REGNUM
8270 && (reg != LR_REGNUM
8271 || ! really_return
8272 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
8273 {
8274 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
8275 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
6d3d9133 8276 }
ff9940b0 8277 else
1d5473cb 8278 {
1768c26f
PB
8279 char *p;
8280 int first = 1;
6d3d9133 8281
699a4925
RE
8282 /* Generate the load multiple instruction to restore the
8283 registers. Note we can get here, even if
8284 frame_pointer_needed is true, but only if sp already
8285 points to the base of the saved core registers. */
8286 if (live_regs_mask & (1 << SP_REGNUM))
a72d4945
RE
8287 {
8288 unsigned HOST_WIDE_INT stack_adjust =
8289 arm_get_frame_size () + current_function_outgoing_args_size;
8290
8291 if (stack_adjust != 0 && stack_adjust != 4)
8292 abort ();
8293
8294 if (stack_adjust && arm_arch5)
8295 sprintf (instr, "ldm%sib\t%%|sp, {", conditional);
8296 else
8297 {
8298 /* If we can't use ldmib (SA110 bug), then try to pop r3
8299 instead. */
8300 if (stack_adjust)
8301 live_regs_mask |= 1 << 3;
8302 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
8303 }
8304 }
da6558fd 8305 else
1768c26f
PB
8306 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
8307
8308 p = instr + strlen (instr);
6d3d9133 8309
1768c26f
PB
8310 for (reg = 0; reg <= SP_REGNUM; reg++)
8311 if (live_regs_mask & (1 << reg))
8312 {
8313 int l = strlen (reg_names[reg]);
8314
8315 if (first)
8316 first = 0;
8317 else
8318 {
8319 memcpy (p, ", ", 2);
8320 p += 2;
8321 }
8322
8323 memcpy (p, "%|", 2);
8324 memcpy (p + 2, reg_names[reg], l);
8325 p += l + 2;
8326 }
8327
8328 if (live_regs_mask & (1 << LR_REGNUM))
8329 {
b17fe233
NC
8330 sprintf (p, "%s%%|%s}", first ? "" : ", ", return_reg);
8331 /* Decide if we need to add the ^ symbol to the end of the
8332 register list. This causes the saved condition codes
8333 register to be copied into the current condition codes
8334 register. We do the copy if we are conforming to the 32-bit
8335 ABI and this is an interrupt function, or if we are
8336 conforming to the 26-bit ABI. There is a special case for
8337 the 26-bit ABI however, which is if we are writing back the
8338 stack pointer but not loading the PC. In this case adding
8339 the ^ symbol would create a type 2 LDM instruction, where
8340 writeback is UNPREDICTABLE. We are safe in leaving the ^
8341 character off in this case however, since the actual return
8342 instruction will be a MOVS which will restore the CPSR. */
8343 if ((TARGET_APCS_32 && IS_INTERRUPT (func_type))
13eedc5a 8344 || (! TARGET_APCS_32 && really_return))
b17fe233 8345 strcat (p, "^");
1768c26f
PB
8346 }
8347 else
8348 strcpy (p, "}");
1d5473cb 8349 }
da6558fd 8350
1768c26f
PB
8351 output_asm_insn (instr, & operand);
8352
3a7731fd
PB
8353 /* See if we need to generate an extra instruction to
8354 perform the actual function return. */
8355 if (really_return
8356 && func_type != ARM_FT_INTERWORKED
8357 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
da6558fd 8358 {
3a7731fd
PB
8359 /* The return has already been handled
8360 by loading the LR into the PC. */
8361 really_return = 0;
da6558fd 8362 }
ff9940b0 8363 }
e26053d1 8364
1768c26f 8365 if (really_return)
ff9940b0 8366 {
6d3d9133
NC
8367 switch ((int) ARM_FUNC_TYPE (func_type))
8368 {
8369 case ARM_FT_ISR:
8370 case ARM_FT_FIQ:
8371 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
8372 break;
8373
8374 case ARM_FT_INTERWORKED:
8375 sprintf (instr, "bx%s\t%%|lr", conditional);
8376 break;
8377
8378 case ARM_FT_EXCEPTION:
8379 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
8380 break;
8381
8382 default:
1768c26f
PB
8383 /* ARMv5 implementations always provide BX, so interworking
8384 is the default unless APCS-26 is in use. */
8385 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
8386 sprintf (instr, "bx%s\t%%|lr", conditional);
8387 else
8388 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
8389 conditional, TARGET_APCS_32 ? "" : "s");
6d3d9133
NC
8390 break;
8391 }
1768c26f
PB
8392
8393 output_asm_insn (instr, & operand);
ff9940b0 8394 }
f3bb6135 8395
ff9940b0
RE
8396 return "";
8397}
8398
ef179a26
NC
8399/* Write the function name into the code section, directly preceding
8400 the function prologue.
8401
8402 Code will be output similar to this:
8403 t0
8404 .ascii "arm_poke_function_name", 0
8405 .align
8406 t1
8407 .word 0xff000000 + (t1 - t0)
8408 arm_poke_function_name
8409 mov ip, sp
8410 stmfd sp!, {fp, ip, lr, pc}
8411 sub fp, ip, #4
8412
8413 When performing a stack backtrace, code can inspect the value
8414 of 'pc' stored at 'fp' + 0. If the trace function then looks
8415 at location pc - 12 and the top 8 bits are set, then we know
8416 that there is a function name embedded immediately preceding this
8417 location and has length ((pc[-3]) & 0xff000000).
8418
8419 We assume that pc is declared as a pointer to an unsigned long.
8420
8421 It is of no benefit to output the function name if we are assembling
8422 a leaf function. These function types will not contain a stack
8423 backtrace structure, therefore it is not possible to determine the
8424 function name. */
ef179a26 8425void
e32bac5b 8426arm_poke_function_name (FILE *stream, const char *name)
ef179a26
NC
8427{
8428 unsigned long alignlength;
8429 unsigned long length;
8430 rtx x;
8431
d5b7b3ae 8432 length = strlen (name) + 1;
0c2ca901 8433 alignlength = ROUND_UP_WORD (length);
ef179a26 8434
949d79eb 8435 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 8436 ASM_OUTPUT_ALIGN (stream, 2);
30cf4896 8437 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
301d03af 8438 assemble_aligned_integer (UNITS_PER_WORD, x);
ef179a26
NC
8439}
8440
6d3d9133
NC
8441/* Place some comments into the assembler stream
8442 describing the current function. */
08c148a8 8443static void
e32bac5b 8444arm_output_function_prologue (FILE *f, HOST_WIDE_INT frame_size)
cce8749e 8445{
6d3d9133 8446 unsigned long func_type;
08c148a8
NB
8447
8448 if (!TARGET_ARM)
8449 {
8450 thumb_output_function_prologue (f, frame_size);
8451 return;
8452 }
6d3d9133
NC
8453
8454 /* Sanity check. */
abaa26e5 8455 if (arm_ccfsm_state || arm_target_insn)
6d3d9133 8456 abort ();
31fdb4d5 8457
6d3d9133
NC
8458 func_type = arm_current_func_type ();
8459
8460 switch ((int) ARM_FUNC_TYPE (func_type))
8461 {
8462 default:
8463 case ARM_FT_NORMAL:
8464 break;
8465 case ARM_FT_INTERWORKED:
8466 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8467 break;
8468 case ARM_FT_EXCEPTION_HANDLER:
8469 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8470 break;
8471 case ARM_FT_ISR:
8472 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8473 break;
8474 case ARM_FT_FIQ:
8475 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8476 break;
8477 case ARM_FT_EXCEPTION:
8478 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8479 break;
8480 }
ff9940b0 8481
6d3d9133
NC
8482 if (IS_NAKED (func_type))
8483 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8484
8485 if (IS_VOLATILE (func_type))
8486 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8487
8488 if (IS_NESTED (func_type))
8489 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8490
c53dddc2 8491 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %wd\n",
dd18ae56
NC
8492 current_function_args_size,
8493 current_function_pretend_args_size, frame_size);
6d3d9133 8494
3cb66fd7 8495 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
dd18ae56 8496 frame_pointer_needed,
3cb66fd7 8497 cfun->machine->uses_anonymous_args);
cce8749e 8498
6f7ebcbb
NC
8499 if (cfun->machine->lr_save_eliminated)
8500 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8501
32de079a
RE
8502#ifdef AOF_ASSEMBLER
8503 if (flag_pic)
dd18ae56 8504 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 8505#endif
6d3d9133
NC
8506
8507 return_used_this_function = 0;
f3bb6135 8508}
cce8749e 8509
cd2b33d0 8510const char *
a72d4945 8511arm_output_epilogue (rtx sibling)
cce8749e 8512{
949d79eb 8513 int reg;
6f7ebcbb 8514 unsigned long saved_regs_mask;
6d3d9133 8515 unsigned long func_type;
c882c7ac
RE
8516 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8517 frame that is $fp + 4 for a non-variadic function. */
8518 int floats_offset = 0;
cce8749e 8519 rtx operands[3];
0977774b 8520 int frame_size = arm_get_frame_size ();
d5b7b3ae 8521 FILE * f = asm_out_file;
6d3d9133 8522 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
5a9335ef 8523 unsigned int lrm_count = 0;
a72d4945 8524 int really_return = (sibling == NULL);
cce8749e 8525
6d3d9133
NC
8526 /* If we have already generated the return instruction
8527 then it is futile to generate anything else. */
a72d4945 8528 if (use_return_insn (FALSE, sibling) && return_used_this_function)
949d79eb 8529 return "";
cce8749e 8530
6d3d9133 8531 func_type = arm_current_func_type ();
d5b7b3ae 8532
6d3d9133
NC
8533 if (IS_NAKED (func_type))
8534 /* Naked functions don't have epilogues. */
8535 return "";
0616531f 8536
6d3d9133 8537 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 8538 {
86efdc8e 8539 rtx op;
6d3d9133
NC
8540
8541 /* A volatile function should never return. Call abort. */
ed0e6530 8542 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 8543 assemble_external_libcall (op);
e2c671ba 8544 output_asm_insn ("bl\t%a0", &op);
6d3d9133 8545
949d79eb 8546 return "";
e2c671ba
RE
8547 }
8548
6d3d9133
NC
8549 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8550 && ! really_return)
8551 /* If we are throwing an exception, then we really must
8552 be doing a return, so we can't tail-call. */
8553 abort ();
8554
6f7ebcbb 8555 saved_regs_mask = arm_compute_save_reg_mask ();
5a9335ef
NC
8556
8557 if (TARGET_IWMMXT)
8558 lrm_count = bit_count (saved_regs_mask);
8559
c882c7ac
RE
8560 /* XXX We should adjust floats_offset for any anonymous args, and then
8561 re-adjust vfp_offset below to compensate. */
8562
6d3d9133 8563 /* Compute how far away the floats will be. */
5a9335ef 8564 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
6f7ebcbb 8565 if (saved_regs_mask & (1 << reg))
6ed30148 8566 floats_offset += 4;
6d3d9133 8567
ff9940b0 8568 if (frame_pointer_needed)
cce8749e 8569 {
c882c7ac
RE
8570 int vfp_offset = 4;
8571
29ad9694 8572 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
b111229a 8573 {
d5b7b3ae 8574 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 8575 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
8576 {
8577 floats_offset += 12;
dd18ae56 8578 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
c882c7ac 8579 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
8580 }
8581 }
8582 else
8583 {
d5b7b3ae 8584 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 8585
d5b7b3ae 8586 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 8587 {
5895f793 8588 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
8589 {
8590 floats_offset += 12;
6cfc7210 8591
6354dc9b 8592 /* We can't unstack more than four registers at once. */
b111229a
RE
8593 if (start_reg - reg == 3)
8594 {
dd18ae56 8595 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
c882c7ac 8596 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
8597 start_reg = reg - 1;
8598 }
8599 }
8600 else
8601 {
8602 if (reg != start_reg)
dd18ae56
NC
8603 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8604 reg + 1, start_reg - reg,
c882c7ac 8605 FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
8606 start_reg = reg - 1;
8607 }
8608 }
8609
8610 /* Just in case the last register checked also needs unstacking. */
8611 if (reg != start_reg)
dd18ae56
NC
8612 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8613 reg + 1, start_reg - reg,
c882c7ac 8614 FP_REGNUM, floats_offset - vfp_offset);
b111229a 8615 }
6d3d9133 8616
5a9335ef
NC
8617 if (TARGET_IWMMXT)
8618 {
8619 /* The frame pointer is guaranteed to be non-double-word aligned.
8620 This is because it is set to (old_stack_pointer - 4) and the
8621 old_stack_pointer was double word aligned. Thus the offset to
8622 the iWMMXt registers to be loaded must also be non-double-word
8623 sized, so that the resultant address *is* double-word aligned.
8624 We can ignore floats_offset since that was already included in
8625 the live_regs_mask. */
8626 lrm_count += (lrm_count % 2 ? 2 : 1);
8627
8628 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8629 if (regs_ever_live[reg] && !call_used_regs[reg])
8630 {
8631 asm_fprintf (f, "\twldrd\t%r, [%r, #-%d]\n",
8632 reg, FP_REGNUM, lrm_count * 4);
8633 lrm_count += 2;
8634 }
8635 }
8636
6f7ebcbb 8637 /* saved_regs_mask should contain the IP, which at the time of stack
6d3d9133
NC
8638 frame generation actually contains the old stack pointer. So a
8639 quick way to unwind the stack is just pop the IP register directly
8640 into the stack pointer. */
6f7ebcbb 8641 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
6d3d9133 8642 abort ();
6f7ebcbb
NC
8643 saved_regs_mask &= ~ (1 << IP_REGNUM);
8644 saved_regs_mask |= (1 << SP_REGNUM);
6d3d9133 8645
6f7ebcbb 8646 /* There are two registers left in saved_regs_mask - LR and PC. We
6d3d9133
NC
8647 only need to restore the LR register (the return address), but to
8648 save time we can load it directly into the PC, unless we need a
8649 special function exit sequence, or we are not really returning. */
8650 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8651 /* Delete the LR from the register mask, so that the LR on
8652 the stack is loaded into the PC in the register mask. */
6f7ebcbb 8653 saved_regs_mask &= ~ (1 << LR_REGNUM);
b111229a 8654 else
6f7ebcbb 8655 saved_regs_mask &= ~ (1 << PC_REGNUM);
efc2515b
RE
8656
8657 /* We must use SP as the base register, because SP is one of the
8658 registers being restored. If an interrupt or page fault
8659 happens in the ldm instruction, the SP might or might not
8660 have been restored. That would be bad, as then SP will no
8661 longer indicate the safe area of stack, and we can get stack
8662 corruption. Using SP as the base register means that it will
8663 be reset correctly to the original value, should an interrupt
699a4925
RE
8664 occur. If the stack pointer already points at the right
8665 place, then omit the subtraction. */
8666 if (((frame_size + current_function_outgoing_args_size + floats_offset)
8667 != 4 * (1 + (int) bit_count (saved_regs_mask)))
8668 || current_function_calls_alloca)
8669 asm_fprintf (f, "\tsub\t%r, %r, #%d\n", SP_REGNUM, FP_REGNUM,
8670 4 * bit_count (saved_regs_mask));
efc2515b 8671 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
7b8b8ade
NC
8672
8673 if (IS_INTERRUPT (func_type))
8674 /* Interrupt handlers will have pushed the
8675 IP onto the stack, so restore it now. */
f55d7103 8676 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
cce8749e
CH
8677 }
8678 else
8679 {
d2288d8d 8680 /* Restore stack pointer if necessary. */
56636818 8681 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
8682 {
8683 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
8684 operands[2] = GEN_INT (frame_size
8685 + current_function_outgoing_args_size);
d2288d8d
TG
8686 output_add_immediate (operands);
8687 }
8688
29ad9694 8689 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
b111229a 8690 {
d5b7b3ae 8691 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 8692 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
8693 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8694 reg, SP_REGNUM);
b111229a
RE
8695 }
8696 else
8697 {
d5b7b3ae 8698 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 8699
d5b7b3ae 8700 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 8701 {
5895f793 8702 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
8703 {
8704 if (reg - start_reg == 3)
8705 {
dd18ae56
NC
8706 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8707 start_reg, SP_REGNUM);
b111229a
RE
8708 start_reg = reg + 1;
8709 }
8710 }
8711 else
8712 {
8713 if (reg != start_reg)
dd18ae56
NC
8714 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8715 start_reg, reg - start_reg,
8716 SP_REGNUM);
6cfc7210 8717
b111229a
RE
8718 start_reg = reg + 1;
8719 }
8720 }
8721
8722 /* Just in case the last register checked also needs unstacking. */
8723 if (reg != start_reg)
dd18ae56
NC
8724 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8725 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
8726 }
8727
5a9335ef
NC
8728 if (TARGET_IWMMXT)
8729 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8730 if (regs_ever_live[reg] && !call_used_regs[reg])
8731 asm_fprintf (f, "\twldrd\t%r, [%r, #+8]!\n", reg, SP_REGNUM);
8732
6d3d9133
NC
8733 /* If we can, restore the LR into the PC. */
8734 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8735 && really_return
8736 && current_function_pretend_args_size == 0
6f7ebcbb 8737 && saved_regs_mask & (1 << LR_REGNUM))
cce8749e 8738 {
6f7ebcbb
NC
8739 saved_regs_mask &= ~ (1 << LR_REGNUM);
8740 saved_regs_mask |= (1 << PC_REGNUM);
6d3d9133 8741 }
d5b7b3ae 8742
6d3d9133
NC
8743 /* Load the registers off the stack. If we only have one register
8744 to load use the LDR instruction - it is faster. */
6f7ebcbb 8745 if (saved_regs_mask == (1 << LR_REGNUM))
6d3d9133 8746 {
f4864588 8747 /* The exception handler ignores the LR, so we do
6d3d9133
NC
8748 not really need to load it off the stack. */
8749 if (eh_ofs)
8750 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
32de079a 8751 else
6d3d9133 8752 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
cce8749e 8753 }
6f7ebcbb 8754 else if (saved_regs_mask)
f1acdf8b
NC
8755 {
8756 if (saved_regs_mask & (1 << SP_REGNUM))
8757 /* Note - write back to the stack register is not enabled
8758 (ie "ldmfd sp!..."). We know that the stack pointer is
8759 in the list of registers and if we add writeback the
8760 instruction becomes UNPREDICTABLE. */
8761 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8762 else
8763 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8764 }
6d3d9133
NC
8765
8766 if (current_function_pretend_args_size)
cce8749e 8767 {
6d3d9133
NC
8768 /* Unwind the pre-pushed regs. */
8769 operands[0] = operands[1] = stack_pointer_rtx;
8770 operands[2] = GEN_INT (current_function_pretend_args_size);
8771 output_add_immediate (operands);
8772 }
8773 }
32de079a 8774
f4864588
PB
8775 if (! really_return
8776 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8777 && current_function_pretend_args_size == 0
8778 && saved_regs_mask & (1 << PC_REGNUM)))
6d3d9133 8779 return "";
d5b7b3ae 8780
6d3d9133
NC
8781 /* Generate the return instruction. */
8782 switch ((int) ARM_FUNC_TYPE (func_type))
8783 {
8784 case ARM_FT_EXCEPTION_HANDLER:
8785 /* Even in 26-bit mode we do a mov (rather than a movs)
8786 because we don't have the PSR bits set in the address. */
8787 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8788 break;
0616531f 8789
6d3d9133
NC
8790 case ARM_FT_ISR:
8791 case ARM_FT_FIQ:
8792 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8793 break;
8794
8795 case ARM_FT_EXCEPTION:
8796 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8797 break;
8798
8799 case ARM_FT_INTERWORKED:
8800 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8801 break;
8802
8803 default:
8804 if (frame_pointer_needed)
6bc82793 8805 /* If we used the frame pointer then the return address
6d3d9133
NC
8806 will have been loaded off the stack directly into the
8807 PC, so there is no need to issue a MOV instruction
8808 here. */
8809 ;
8810 else if (current_function_pretend_args_size == 0
6f7ebcbb 8811 && (saved_regs_mask & (1 << LR_REGNUM)))
6d3d9133
NC
8812 /* Similarly we may have been able to load LR into the PC
8813 even if we did not create a stack frame. */
8814 ;
8815 else if (TARGET_APCS_32)
8816 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8817 else
8818 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8819 break;
cce8749e 8820 }
f3bb6135 8821
949d79eb
RE
8822 return "";
8823}
8824
08c148a8 8825static void
e32bac5b
RE
8826arm_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
8827 HOST_WIDE_INT frame_size)
949d79eb 8828{
d5b7b3ae
RE
8829 if (TARGET_THUMB)
8830 {
8831 /* ??? Probably not safe to set this here, since it assumes that a
8832 function will be emitted as assembly immediately after we generate
8833 RTL for it. This does not happen for inline functions. */
8834 return_used_this_function = 0;
8835 }
8836 else
8837 {
0977774b
JT
8838 /* We need to take into account any stack-frame rounding. */
8839 frame_size = arm_get_frame_size ();
8840
a72d4945 8841 if (use_return_insn (FALSE, NULL)
d5b7b3ae
RE
8842 && return_used_this_function
8843 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 8844 && !frame_pointer_needed)
d5b7b3ae 8845 abort ();
f3bb6135 8846
d5b7b3ae 8847 /* Reset the ARM-specific per-function variables. */
d5b7b3ae
RE
8848 after_arm_reorg = 0;
8849 }
f3bb6135 8850}
e2c671ba 8851
2c849145
JM
8852/* Generate and emit an insn that we will recognize as a push_multi.
8853 Unfortunately, since this insn does not reflect very well the actual
8854 semantics of the operation, we need to annotate the insn for the benefit
8855 of DWARF2 frame unwind information. */
2c849145 8856static rtx
e32bac5b 8857emit_multi_reg_push (int mask)
e2c671ba
RE
8858{
8859 int num_regs = 0;
9b598fa0 8860 int num_dwarf_regs;
e2c671ba
RE
8861 int i, j;
8862 rtx par;
2c849145 8863 rtx dwarf;
87e27392 8864 int dwarf_par_index;
2c849145 8865 rtx tmp, reg;
e2c671ba 8866
d5b7b3ae 8867 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 8868 if (mask & (1 << i))
5895f793 8869 num_regs++;
e2c671ba
RE
8870
8871 if (num_regs == 0 || num_regs > 16)
8872 abort ();
8873
9b598fa0
RE
8874 /* We don't record the PC in the dwarf frame information. */
8875 num_dwarf_regs = num_regs;
8876 if (mask & (1 << PC_REGNUM))
8877 num_dwarf_regs--;
8878
87e27392 8879 /* For the body of the insn we are going to generate an UNSPEC in
05713b80 8880 parallel with several USEs. This allows the insn to be recognized
87e27392
NC
8881 by the push_multi pattern in the arm.md file. The insn looks
8882 something like this:
8883
8884 (parallel [
b15bca31
RE
8885 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8886 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
87e27392
NC
8887 (use (reg:SI 11 fp))
8888 (use (reg:SI 12 ip))
8889 (use (reg:SI 14 lr))
8890 (use (reg:SI 15 pc))
8891 ])
8892
8893 For the frame note however, we try to be more explicit and actually
8894 show each register being stored into the stack frame, plus a (single)
8895 decrement of the stack pointer. We do it this way in order to be
8896 friendly to the stack unwinding code, which only wants to see a single
8897 stack decrement per instruction. The RTL we generate for the note looks
8898 something like this:
8899
8900 (sequence [
8901 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8902 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8903 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8904 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8905 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
87e27392
NC
8906 ])
8907
8908 This sequence is used both by the code to support stack unwinding for
8909 exceptions handlers and the code to generate dwarf2 frame debugging. */
8910
43cffd11 8911 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
9b598fa0 8912 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
87e27392 8913 dwarf_par_index = 1;
e2c671ba 8914
d5b7b3ae 8915 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
8916 {
8917 if (mask & (1 << i))
8918 {
2c849145
JM
8919 reg = gen_rtx_REG (SImode, i);
8920
e2c671ba 8921 XVECEXP (par, 0, 0)
43cffd11
RE
8922 = gen_rtx_SET (VOIDmode,
8923 gen_rtx_MEM (BLKmode,
8924 gen_rtx_PRE_DEC (BLKmode,
8925 stack_pointer_rtx)),
8926 gen_rtx_UNSPEC (BLKmode,
2c849145 8927 gen_rtvec (1, reg),
9b598fa0 8928 UNSPEC_PUSH_MULT));
2c849145 8929
9b598fa0
RE
8930 if (i != PC_REGNUM)
8931 {
8932 tmp = gen_rtx_SET (VOIDmode,
8933 gen_rtx_MEM (SImode, stack_pointer_rtx),
8934 reg);
8935 RTX_FRAME_RELATED_P (tmp) = 1;
8936 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8937 dwarf_par_index++;
8938 }
2c849145 8939
e2c671ba
RE
8940 break;
8941 }
8942 }
8943
8944 for (j = 1, i++; j < num_regs; i++)
8945 {
8946 if (mask & (1 << i))
8947 {
2c849145
JM
8948 reg = gen_rtx_REG (SImode, i);
8949
8950 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8951
9b598fa0
RE
8952 if (i != PC_REGNUM)
8953 {
8954 tmp = gen_rtx_SET (VOIDmode,
8955 gen_rtx_MEM (SImode,
8956 plus_constant (stack_pointer_rtx,
8957 4 * j)),
8958 reg);
8959 RTX_FRAME_RELATED_P (tmp) = 1;
8960 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8961 }
8962
e2c671ba
RE
8963 j++;
8964 }
8965 }
b111229a 8966
2c849145 8967 par = emit_insn (par);
87e27392
NC
8968
8969 tmp = gen_rtx_SET (SImode,
8970 stack_pointer_rtx,
8971 gen_rtx_PLUS (SImode,
8972 stack_pointer_rtx,
8973 GEN_INT (-4 * num_regs)));
8974 RTX_FRAME_RELATED_P (tmp) = 1;
8975 XVECEXP (dwarf, 0, 0) = tmp;
8976
2c849145
JM
8977 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8978 REG_NOTES (par));
8979 return par;
b111229a
RE
8980}
8981
2c849145 8982static rtx
e32bac5b 8983emit_sfm (int base_reg, int count)
b111229a
RE
8984{
8985 rtx par;
2c849145
JM
8986 rtx dwarf;
8987 rtx tmp, reg;
b111229a
RE
8988 int i;
8989
43cffd11 8990 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145 8991 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
8992
8993 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
8994
8995 XVECEXP (par, 0, 0)
8996 = gen_rtx_SET (VOIDmode,
8997 gen_rtx_MEM (BLKmode,
8998 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8999 gen_rtx_UNSPEC (BLKmode,
2c849145 9000 gen_rtvec (1, reg),
b15bca31 9001 UNSPEC_PUSH_MULT));
2c849145
JM
9002 tmp
9003 = gen_rtx_SET (VOIDmode,
9004 gen_rtx_MEM (XFmode,
9005 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
9006 reg);
9007 RTX_FRAME_RELATED_P (tmp) = 1;
9008 XVECEXP (dwarf, 0, count - 1) = tmp;
9009
b111229a 9010 for (i = 1; i < count; i++)
2c849145
JM
9011 {
9012 reg = gen_rtx_REG (XFmode, base_reg++);
9013 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
9014
9015 tmp = gen_rtx_SET (VOIDmode,
9016 gen_rtx_MEM (XFmode,
9017 gen_rtx_PRE_DEC (BLKmode,
9018 stack_pointer_rtx)),
9019 reg);
9020 RTX_FRAME_RELATED_P (tmp) = 1;
9021 XVECEXP (dwarf, 0, count - i - 1) = tmp;
9022 }
b111229a 9023
2c849145
JM
9024 par = emit_insn (par);
9025 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
9026 REG_NOTES (par));
9027 return par;
e2c671ba
RE
9028}
9029
095bb276
NC
9030/* Compute the distance from register FROM to register TO.
9031 These can be the arg pointer (26), the soft frame pointer (25),
9032 the stack pointer (13) or the hard frame pointer (11).
9033 Typical stack layout looks like this:
9034
9035 old stack pointer -> | |
9036 ----
9037 | | \
9038 | | saved arguments for
9039 | | vararg functions
9040 | | /
9041 --
9042 hard FP & arg pointer -> | | \
9043 | | stack
9044 | | frame
9045 | | /
9046 --
9047 | | \
9048 | | call saved
9049 | | registers
9050 soft frame pointer -> | | /
9051 --
9052 | | \
9053 | | local
9054 | | variables
9055 | | /
9056 --
9057 | | \
9058 | | outgoing
9059 | | arguments
9060 current stack pointer -> | | /
9061 --
9062
43aa4e05 9063 For a given function some or all of these stack components
095bb276
NC
9064 may not be needed, giving rise to the possibility of
9065 eliminating some of the registers.
9066
825dda42 9067 The values returned by this function must reflect the behavior
095bb276
NC
9068 of arm_expand_prologue() and arm_compute_save_reg_mask().
9069
9070 The sign of the number returned reflects the direction of stack
9071 growth, so the values are positive for all eliminations except
9072 from the soft frame pointer to the hard frame pointer. */
095bb276 9073unsigned int
e32bac5b 9074arm_compute_initial_elimination_offset (unsigned int from, unsigned int to)
095bb276 9075{
0977774b 9076 unsigned int local_vars = arm_get_frame_size ();
095bb276
NC
9077 unsigned int outgoing_args = current_function_outgoing_args_size;
9078 unsigned int stack_frame;
9079 unsigned int call_saved_registers;
9080 unsigned long func_type;
9081
9082 func_type = arm_current_func_type ();
9083
9084 /* Volatile functions never return, so there is
9085 no need to save call saved registers. */
9086 call_saved_registers = 0;
9087 if (! IS_VOLATILE (func_type))
9088 {
121308d4 9089 unsigned int reg_mask;
095bb276
NC
9090 unsigned int reg;
9091
1d6e90ac 9092 /* Make sure that we compute which registers will be saved
121308d4 9093 on the stack using the same algorithm that is used by
5a9335ef
NC
9094 the prologue creation code. */
9095 reg_mask = arm_compute_save_reg_mask ();
095bb276 9096
121308d4 9097 /* Now count the number of bits set in save_reg_mask.
5a9335ef
NC
9098 If we have already counted the registers in the stack
9099 frame, do not count them again. Non call-saved registers
9100 might be saved in the call-save area of the stack, if
9101 doing so will preserve the stack's alignment. Hence we
9102 must count them here. For each set bit we need 4 bytes
9103 of stack space. */
9104 if (frame_pointer_needed)
9105 reg_mask &= 0x07ff;
9106 call_saved_registers += 4 * bit_count (reg_mask);
ef7112de
NC
9107
9108 /* If the hard floating point registers are going to be
9109 used then they must be saved on the stack as well.
9110 Each register occupies 12 bytes of stack space. */
5a9335ef 9111 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
ef7112de
NC
9112 if (regs_ever_live[reg] && ! call_used_regs[reg])
9113 call_saved_registers += 12;
5a9335ef
NC
9114
9115 if (TARGET_REALLY_IWMMXT)
9116 /* Check for the call-saved iWMMXt registers. */
9117 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
9118 if (regs_ever_live[reg] && ! call_used_regs [reg])
9119 call_saved_registers += 8;
095bb276
NC
9120 }
9121
9122 /* The stack frame contains 4 registers - the old frame pointer,
9123 the old stack pointer, the return address and PC of the start
9124 of the function. */
9125 stack_frame = frame_pointer_needed ? 16 : 0;
9126
095bb276
NC
9127 /* OK, now we have enough information to compute the distances.
9128 There must be an entry in these switch tables for each pair
9129 of registers in ELIMINABLE_REGS, even if some of the entries
9130 seem to be redundant or useless. */
9131 switch (from)
9132 {
9133 case ARG_POINTER_REGNUM:
9134 switch (to)
9135 {
9136 case THUMB_HARD_FRAME_POINTER_REGNUM:
9137 return 0;
9138
9139 case FRAME_POINTER_REGNUM:
9140 /* This is the reverse of the soft frame pointer
9141 to hard frame pointer elimination below. */
9142 if (call_saved_registers == 0 && stack_frame == 0)
9143 return 0;
9144 return (call_saved_registers + stack_frame - 4);
9145
9146 case ARM_HARD_FRAME_POINTER_REGNUM:
9147 /* If there is no stack frame then the hard
9148 frame pointer and the arg pointer coincide. */
9149 if (stack_frame == 0 && call_saved_registers != 0)
9150 return 0;
9151 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
9152 return (frame_pointer_needed
9153 && current_function_needs_context
3cb66fd7 9154 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
095bb276
NC
9155
9156 case STACK_POINTER_REGNUM:
9157 /* If nothing has been pushed on the stack at all
9158 then this will return -4. This *is* correct! */
9159 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
9160
9161 default:
9162 abort ();
9163 }
9164 break;
9165
9166 case FRAME_POINTER_REGNUM:
9167 switch (to)
9168 {
9169 case THUMB_HARD_FRAME_POINTER_REGNUM:
9170 return 0;
9171
9172 case ARM_HARD_FRAME_POINTER_REGNUM:
9173 /* The hard frame pointer points to the top entry in the
9174 stack frame. The soft frame pointer to the bottom entry
9175 in the stack frame. If there is no stack frame at all,
9176 then they are identical. */
9177 if (call_saved_registers == 0 && stack_frame == 0)
9178 return 0;
9179 return - (call_saved_registers + stack_frame - 4);
9180
9181 case STACK_POINTER_REGNUM:
9182 return local_vars + outgoing_args;
9183
9184 default:
9185 abort ();
9186 }
9187 break;
9188
9189 default:
9190 /* You cannot eliminate from the stack pointer.
9191 In theory you could eliminate from the hard frame
9192 pointer to the stack pointer, but this will never
9193 happen, since if a stack frame is not needed the
9194 hard frame pointer will never be used. */
9195 abort ();
9196 }
9197}
9198
0977774b
JT
9199/* Calculate the size of the stack frame, taking into account any
9200 padding that is required to ensure stack-alignment. */
0977774b 9201HOST_WIDE_INT
e32bac5b 9202arm_get_frame_size (void)
0977774b
JT
9203{
9204 int regno;
9205
0c2ca901 9206 int base_size = ROUND_UP_WORD (get_frame_size ());
0977774b
JT
9207 int entry_size = 0;
9208 unsigned long func_type = arm_current_func_type ();
c231c91e 9209 int leaf;
0977774b
JT
9210
9211 if (! TARGET_ARM)
9212 abort();
9213
9214 if (! TARGET_ATPCS)
9215 return base_size;
9216
c231c91e
RE
9217 /* We need to know if we are a leaf function. Unfortunately, it
9218 is possible to be called after start_sequence has been called,
9219 which causes get_insns to return the insns for the sequence,
9220 not the function, which will cause leaf_function_p to return
9221 the incorrect result.
9222
9223 To work around this, we cache the computed frame size. This
9224 works because we will only be calling RTL expanders that need
9225 to know about leaf functions once reload has completed, and the
9226 frame size cannot be changed after that time, so we can safely
9227 use the cached value. */
9228
9229 if (reload_completed)
9230 return cfun->machine->frame_size;
9231
9232 leaf = leaf_function_p ();
9233
9234 /* A leaf function does not need any stack alignment if it has nothing
9235 on the stack. */
9236 if (leaf && base_size == 0)
9237 {
9238 cfun->machine->frame_size = 0;
9239 return 0;
9240 }
9241
0977774b
JT
9242 /* We know that SP will be word aligned on entry, and we must
9243 preserve that condition at any subroutine call. But those are
9244 the only constraints. */
9245
9246 /* Space for variadic functions. */
9247 if (current_function_pretend_args_size)
9248 entry_size += current_function_pretend_args_size;
9249
9250 /* Space for saved registers. */
9251 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
9252
9253 /* Space for saved FPA registers. */
9254 if (! IS_VOLATILE (func_type))
9255 {
9256 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
9257 if (regs_ever_live[regno] && ! call_used_regs[regno])
9258 entry_size += 12;
9259 }
9260
5a9335ef
NC
9261 if (TARGET_REALLY_IWMMXT)
9262 {
9263 /* Check for the call-saved iWMMXt registers. */
9264 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
9265 if (regs_ever_live [regno] && ! call_used_regs [regno])
9266 entry_size += 8;
9267 }
9268
0977774b
JT
9269 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9270 base_size += 4;
9271 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9272 abort ();
9273
c231c91e
RE
9274 cfun->machine->frame_size = base_size;
9275
0977774b
JT
9276 return base_size;
9277}
9278
6d3d9133 9279/* Generate the prologue instructions for entry into an ARM function. */
e2c671ba 9280void
e32bac5b 9281arm_expand_prologue (void)
e2c671ba
RE
9282{
9283 int reg;
6d3d9133 9284 rtx amount;
2c849145 9285 rtx insn;
68dfd979 9286 rtx ip_rtx;
6d3d9133
NC
9287 unsigned long live_regs_mask;
9288 unsigned long func_type;
68dfd979 9289 int fp_offset = 0;
095bb276
NC
9290 int saved_pretend_args = 0;
9291 unsigned int args_to_push;
d3236b4d 9292
6d3d9133 9293 func_type = arm_current_func_type ();
e2c671ba 9294
31fdb4d5 9295 /* Naked functions don't have prologues. */
6d3d9133 9296 if (IS_NAKED (func_type))
31fdb4d5
DE
9297 return;
9298
095bb276
NC
9299 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
9300 args_to_push = current_function_pretend_args_size;
9301
6d3d9133
NC
9302 /* Compute which register we will have to save onto the stack. */
9303 live_regs_mask = arm_compute_save_reg_mask ();
e2c671ba 9304
68dfd979 9305 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
d3236b4d 9306
e2c671ba
RE
9307 if (frame_pointer_needed)
9308 {
7b8b8ade
NC
9309 if (IS_INTERRUPT (func_type))
9310 {
9311 /* Interrupt functions must not corrupt any registers.
9312 Creating a frame pointer however, corrupts the IP
9313 register, so we must push it first. */
9314 insn = emit_multi_reg_push (1 << IP_REGNUM);
121308d4
NC
9315
9316 /* Do not set RTX_FRAME_RELATED_P on this insn.
9317 The dwarf stack unwinding code only wants to see one
9318 stack decrement per function, and this is not it. If
9319 this instruction is labeled as being part of the frame
9320 creation sequence then dwarf2out_frame_debug_expr will
9321 abort when it encounters the assignment of IP to FP
9322 later on, since the use of SP here establishes SP as
9323 the CFA register and not IP.
9324
9325 Anyway this instruction is not really part of the stack
9326 frame creation although it is part of the prologue. */
7b8b8ade
NC
9327 }
9328 else if (IS_NESTED (func_type))
68dfd979
NC
9329 {
9330 /* The Static chain register is the same as the IP register
9331 used as a scratch register during stack frame creation.
9332 To get around this need to find somewhere to store IP
9333 whilst the frame is being created. We try the following
9334 places in order:
9335
6d3d9133 9336 1. The last argument register.
68dfd979
NC
9337 2. A slot on the stack above the frame. (This only
9338 works if the function is not a varargs function).
095bb276
NC
9339 3. Register r3, after pushing the argument registers
9340 onto the stack.
6d3d9133 9341
34ce3d7b
JM
9342 Note - we only need to tell the dwarf2 backend about the SP
9343 adjustment in the second variant; the static chain register
9344 doesn't need to be unwound, as it doesn't contain a value
9345 inherited from the caller. */
d3236b4d 9346
68dfd979
NC
9347 if (regs_ever_live[3] == 0)
9348 {
9349 insn = gen_rtx_REG (SImode, 3);
9350 insn = gen_rtx_SET (SImode, insn, ip_rtx);
d3236b4d 9351 insn = emit_insn (insn);
68dfd979 9352 }
095bb276 9353 else if (args_to_push == 0)
68dfd979 9354 {
34ce3d7b 9355 rtx dwarf;
68dfd979
NC
9356 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
9357 insn = gen_rtx_MEM (SImode, insn);
9358 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
9359 insn = emit_insn (insn);
34ce3d7b 9360
68dfd979 9361 fp_offset = 4;
34ce3d7b
JM
9362
9363 /* Just tell the dwarf backend that we adjusted SP. */
9364 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
9365 gen_rtx_PLUS (SImode, stack_pointer_rtx,
9366 GEN_INT (-fp_offset)));
9367 RTX_FRAME_RELATED_P (insn) = 1;
9368 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9369 dwarf, REG_NOTES (insn));
68dfd979
NC
9370 }
9371 else
095bb276
NC
9372 {
9373 /* Store the args on the stack. */
3cb66fd7 9374 if (cfun->machine->uses_anonymous_args)
095bb276
NC
9375 insn = emit_multi_reg_push
9376 ((0xf0 >> (args_to_push / 4)) & 0xf);
9377 else
9378 insn = emit_insn
9379 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9380 GEN_INT (- args_to_push)));
9381
9382 RTX_FRAME_RELATED_P (insn) = 1;
9383
9384 saved_pretend_args = 1;
9385 fp_offset = args_to_push;
9386 args_to_push = 0;
9387
9388 /* Now reuse r3 to preserve IP. */
9389 insn = gen_rtx_REG (SImode, 3);
9390 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9391 (void) emit_insn (insn);
9392 }
68dfd979
NC
9393 }
9394
68dfd979
NC
9395 if (fp_offset)
9396 {
9397 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
9398 insn = gen_rtx_SET (SImode, ip_rtx, insn);
9399 }
9400 else
9401 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
9402
6d3d9133 9403 insn = emit_insn (insn);
8e56560e 9404 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
9405 }
9406
095bb276 9407 if (args_to_push)
e2c671ba 9408 {
6d3d9133 9409 /* Push the argument registers, or reserve space for them. */
3cb66fd7 9410 if (cfun->machine->uses_anonymous_args)
2c849145 9411 insn = emit_multi_reg_push
095bb276 9412 ((0xf0 >> (args_to_push / 4)) & 0xf);
e2c671ba 9413 else
2c849145
JM
9414 insn = emit_insn
9415 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
095bb276 9416 GEN_INT (- args_to_push)));
2c849145 9417 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
9418 }
9419
06bea5aa
NC
9420 /* If this is an interrupt service routine, and the link register
9421 is going to be pushed, and we are not creating a stack frame,
9422 (which would involve an extra push of IP and a pop in the epilogue)
9423 subtracting four from LR now will mean that the function return
9424 can be done with a single instruction. */
3a7731fd 9425 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
06bea5aa
NC
9426 && (live_regs_mask & (1 << LR_REGNUM)) != 0
9427 && ! frame_pointer_needed)
9428 emit_insn (gen_rtx_SET (SImode,
9429 gen_rtx_REG (SImode, LR_REGNUM),
9430 gen_rtx_PLUS (SImode,
9431 gen_rtx_REG (SImode, LR_REGNUM),
9432 GEN_INT (-4))));
3a7731fd 9433
e2c671ba
RE
9434 if (live_regs_mask)
9435 {
2c849145
JM
9436 insn = emit_multi_reg_push (live_regs_mask);
9437 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba 9438 }
d5b7b3ae 9439
5a9335ef
NC
9440 if (TARGET_IWMMXT)
9441 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
9442 if (regs_ever_live[reg] && ! call_used_regs [reg])
9443 {
9444 insn = gen_rtx_PRE_DEC (V2SImode, stack_pointer_rtx);
9445 insn = gen_rtx_MEM (V2SImode, insn);
9446 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9447 gen_rtx_REG (V2SImode, reg)));
9448 RTX_FRAME_RELATED_P (insn) = 1;
9449 }
9450
6d3d9133 9451 if (! IS_VOLATILE (func_type))
b111229a 9452 {
29ad9694
RE
9453 /* Save any floating point call-saved registers used by this
9454 function. */
9455 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
b111229a 9456 {
29ad9694 9457 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 9458 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
9459 {
9460 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
9461 insn = gen_rtx_MEM (XFmode, insn);
9462 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9463 gen_rtx_REG (XFmode, reg)));
9464 RTX_FRAME_RELATED_P (insn) = 1;
9465 }
b111229a
RE
9466 }
9467 else
9468 {
d5b7b3ae 9469 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 9470
29ad9694 9471 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 9472 {
5895f793 9473 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
9474 {
9475 if (start_reg - reg == 3)
9476 {
2c849145
JM
9477 insn = emit_sfm (reg, 4);
9478 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
9479 start_reg = reg - 1;
9480 }
9481 }
9482 else
9483 {
9484 if (start_reg != reg)
2c849145
JM
9485 {
9486 insn = emit_sfm (reg + 1, start_reg - reg);
9487 RTX_FRAME_RELATED_P (insn) = 1;
9488 }
b111229a
RE
9489 start_reg = reg - 1;
9490 }
9491 }
9492
9493 if (start_reg != reg)
2c849145
JM
9494 {
9495 insn = emit_sfm (reg + 1, start_reg - reg);
9496 RTX_FRAME_RELATED_P (insn) = 1;
9497 }
b111229a
RE
9498 }
9499 }
e2c671ba
RE
9500
9501 if (frame_pointer_needed)
2c849145 9502 {
6d3d9133 9503 /* Create the new frame pointer. */
095bb276 9504 insn = GEN_INT (-(4 + args_to_push + fp_offset));
68dfd979 9505 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 9506 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979 9507
6d3d9133 9508 if (IS_NESTED (func_type))
68dfd979
NC
9509 {
9510 /* Recover the static chain register. */
095bb276
NC
9511 if (regs_ever_live [3] == 0
9512 || saved_pretend_args)
1d6e90ac 9513 insn = gen_rtx_REG (SImode, 3);
68dfd979
NC
9514 else /* if (current_function_pretend_args_size == 0) */
9515 {
29ad9694
RE
9516 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx,
9517 GEN_INT (4));
68dfd979 9518 insn = gen_rtx_MEM (SImode, insn);
68dfd979 9519 }
1d6e90ac 9520
c14a3a45
NC
9521 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9522 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 9523 emit_insn (gen_prologue_use (ip_rtx));
68dfd979 9524 }
2c849145 9525 }
e2c671ba 9526
0977774b 9527 amount = GEN_INT (-(arm_get_frame_size ()
6d3d9133
NC
9528 + current_function_outgoing_args_size));
9529
e2c671ba
RE
9530 if (amount != const0_rtx)
9531 {
745b9093
JM
9532 /* This add can produce multiple insns for a large constant, so we
9533 need to get tricky. */
9534 rtx last = get_last_insn ();
2c849145
JM
9535 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9536 amount));
745b9093
JM
9537 do
9538 {
9539 last = last ? NEXT_INSN (last) : get_insns ();
9540 RTX_FRAME_RELATED_P (last) = 1;
9541 }
9542 while (last != insn);
e04c2d6c
RE
9543
9544 /* If the frame pointer is needed, emit a special barrier that
9545 will prevent the scheduler from moving stores to the frame
9546 before the stack adjustment. */
9547 if (frame_pointer_needed)
3894f59e
RE
9548 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9549 hard_frame_pointer_rtx));
e2c671ba
RE
9550 }
9551
9552 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
9553 the call to mcount. Similarly if the user has requested no
9554 scheduling in the prolog. */
70f4f91c 9555 if (current_function_profile || TARGET_NO_SCHED_PRO)
e2c671ba 9556 emit_insn (gen_blockage ());
6f7ebcbb
NC
9557
9558 /* If the link register is being kept alive, with the return address in it,
9559 then make sure that it does not get reused by the ce2 pass. */
9560 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9561 {
6bacc7b0 9562 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
6f7ebcbb
NC
9563 cfun->machine->lr_save_eliminated = 1;
9564 }
e2c671ba 9565}
cce8749e 9566\f
9997d19d
RE
9567/* If CODE is 'd', then the X is a condition operand and the instruction
9568 should only be executed if the condition is true.
ddd5a7c1 9569 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
9570 should only be executed if the condition is false: however, if the mode
9571 of the comparison is CCFPEmode, then always execute the instruction -- we
9572 do this because in these circumstances !GE does not necessarily imply LT;
9573 in these cases the instruction pattern will take care to make sure that
9574 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 9575 doing this instruction unconditionally.
9997d19d
RE
9576 If CODE is 'N' then X is a floating point operand that must be negated
9577 before output.
9578 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9579 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9997d19d 9580void
e32bac5b 9581arm_print_operand (FILE *stream, rtx x, int code)
9997d19d
RE
9582{
9583 switch (code)
9584 {
9585 case '@':
f3139301 9586 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
9587 return;
9588
d5b7b3ae
RE
9589 case '_':
9590 fputs (user_label_prefix, stream);
9591 return;
9592
9997d19d 9593 case '|':
f3139301 9594 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
9595 return;
9596
9597 case '?':
9598 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
9599 {
9600 if (TARGET_THUMB || current_insn_predicate != NULL)
9601 abort ();
9602
9603 fputs (arm_condition_codes[arm_current_cc], stream);
9604 }
9605 else if (current_insn_predicate)
9606 {
9607 enum arm_cond_code code;
9608
9609 if (TARGET_THUMB)
9610 abort ();
9611
9612 code = get_arm_condition_code (current_insn_predicate);
9613 fputs (arm_condition_codes[code], stream);
9614 }
9997d19d
RE
9615 return;
9616
9617 case 'N':
9618 {
9619 REAL_VALUE_TYPE r;
9620 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9621 r = REAL_VALUE_NEGATE (r);
9622 fprintf (stream, "%s", fp_const_from_val (&r));
9623 }
9624 return;
9625
9626 case 'B':
9627 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
9628 {
9629 HOST_WIDE_INT val;
5895f793 9630 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 9631 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 9632 }
9997d19d
RE
9633 else
9634 {
9635 putc ('~', stream);
9636 output_addr_const (stream, x);
9637 }
9638 return;
9639
9640 case 'i':
9641 fprintf (stream, "%s", arithmetic_instr (x, 1));
9642 return;
9643
9b6b54e2
NC
9644 /* Truncate Cirrus shift counts. */
9645 case 's':
9646 if (GET_CODE (x) == CONST_INT)
9647 {
9648 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
9649 return;
9650 }
9651 arm_print_operand (stream, x, 0);
9652 return;
9653
9997d19d
RE
9654 case 'I':
9655 fprintf (stream, "%s", arithmetic_instr (x, 0));
9656 return;
9657
9658 case 'S':
9659 {
9660 HOST_WIDE_INT val;
5895f793 9661 const char * shift = shift_op (x, &val);
9997d19d 9662
e2c671ba
RE
9663 if (shift)
9664 {
5895f793 9665 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
9666 if (val == -1)
9667 arm_print_operand (stream, XEXP (x, 1), 0);
9668 else
4a0a75dd 9669 fprintf (stream, "#" HOST_WIDE_INT_PRINT_DEC, val);
e2c671ba 9670 }
9997d19d
RE
9671 }
9672 return;
9673
d5b7b3ae
RE
9674 /* An explanation of the 'Q', 'R' and 'H' register operands:
9675
9676 In a pair of registers containing a DI or DF value the 'Q'
9677 operand returns the register number of the register containing
093354e0 9678 the least significant part of the value. The 'R' operand returns
d5b7b3ae
RE
9679 the register number of the register containing the most
9680 significant part of the value.
9681
9682 The 'H' operand returns the higher of the two register numbers.
9683 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
093354e0 9684 same as the 'Q' operand, since the most significant part of the
d5b7b3ae
RE
9685 value is held in the lower number register. The reverse is true
9686 on systems where WORDS_BIG_ENDIAN is false.
9687
9688 The purpose of these operands is to distinguish between cases
9689 where the endian-ness of the values is important (for example
9690 when they are added together), and cases where the endian-ness
9691 is irrelevant, but the order of register operations is important.
9692 For example when loading a value from memory into a register
9693 pair, the endian-ness does not matter. Provided that the value
9694 from the lower memory address is put into the lower numbered
9695 register, and the value from the higher address is put into the
9696 higher numbered register, the load will work regardless of whether
9697 the value being loaded is big-wordian or little-wordian. The
9698 order of the two register loads can matter however, if the address
9699 of the memory location is actually held in one of the registers
9700 being overwritten by the load. */
c1c2bc04 9701 case 'Q':
d5b7b3ae 9702 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 9703 abort ();
d5b7b3ae 9704 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
9705 return;
9706
9997d19d 9707 case 'R':
d5b7b3ae 9708 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 9709 abort ();
d5b7b3ae
RE
9710 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9711 return;
9712
9713 case 'H':
9714 if (REGNO (x) > LAST_ARM_REGNUM)
9715 abort ();
9716 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
9717 return;
9718
9719 case 'm':
d5b7b3ae
RE
9720 asm_fprintf (stream, "%r",
9721 GET_CODE (XEXP (x, 0)) == REG
9722 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
9723 return;
9724
9725 case 'M':
dd18ae56 9726 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae 9727 REGNO (x),
e9d7b180 9728 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
9729 return;
9730
9731 case 'd':
64e92a26
RE
9732 /* CONST_TRUE_RTX means always -- that's the default. */
9733 if (x == const_true_rtx)
d5b7b3ae
RE
9734 return;
9735
defc0463
RE
9736 fputs (arm_condition_codes[get_arm_condition_code (x)],
9737 stream);
9997d19d
RE
9738 return;
9739
9740 case 'D':
64e92a26
RE
9741 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9742 want to do that. */
9743 if (x == const_true_rtx)
9744 abort ();
d5b7b3ae 9745
defc0463
RE
9746 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9747 (get_arm_condition_code (x))],
9748 stream);
9997d19d
RE
9749 return;
9750
9b6b54e2
NC
9751 /* Cirrus registers can be accessed in a variety of ways:
9752 single floating point (f)
9753 double floating point (d)
9754 32bit integer (fx)
9755 64bit integer (dx). */
9756 case 'W': /* Cirrus register in F mode. */
9757 case 'X': /* Cirrus register in D mode. */
9758 case 'Y': /* Cirrus register in FX mode. */
9759 case 'Z': /* Cirrus register in DX mode. */
9760 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9761 abort ();
9762
9763 fprintf (stream, "mv%s%s",
9764 code == 'W' ? "f"
9765 : code == 'X' ? "d"
9766 : code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
9767
9768 return;
9769
9770 /* Print cirrus register in the mode specified by the register's mode. */
9771 case 'V':
9772 {
9773 int mode = GET_MODE (x);
9774
9775 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9776 abort ();
9777
9778 fprintf (stream, "mv%s%s",
9779 mode == DFmode ? "d"
9780 : mode == SImode ? "fx"
9781 : mode == DImode ? "dx"
9782 : "f", reg_names[REGNO (x)] + 2);
9783
9784 return;
9785 }
9786
5a9335ef
NC
9787 case 'U':
9788 if (GET_CODE (x) != REG
9789 || REGNO (x) < FIRST_IWMMXT_GR_REGNUM
9790 || REGNO (x) > LAST_IWMMXT_GR_REGNUM)
9791 /* Bad value for wCG register number. */
9792 abort ();
9793 else
9794 fprintf (stream, "%d", REGNO (x) - FIRST_IWMMXT_GR_REGNUM);
9795 return;
9796
9797 /* Print an iWMMXt control register name. */
9798 case 'w':
9799 if (GET_CODE (x) != CONST_INT
9800 || INTVAL (x) < 0
9801 || INTVAL (x) >= 16)
9802 /* Bad value for wC register number. */
9803 abort ();
9804 else
9805 {
9806 static const char * wc_reg_names [16] =
9807 {
9808 "wCID", "wCon", "wCSSF", "wCASF",
9809 "wC4", "wC5", "wC6", "wC7",
9810 "wCGR0", "wCGR1", "wCGR2", "wCGR3",
9811 "wC12", "wC13", "wC14", "wC15"
9812 };
9813
9814 fprintf (stream, wc_reg_names [INTVAL (x)]);
9815 }
9816 return;
9817
9997d19d
RE
9818 default:
9819 if (x == 0)
9820 abort ();
9821
9822 if (GET_CODE (x) == REG)
d5b7b3ae 9823 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
9824 else if (GET_CODE (x) == MEM)
9825 {
9826 output_memory_reference_mode = GET_MODE (x);
9827 output_address (XEXP (x, 0));
9828 }
9829 else if (GET_CODE (x) == CONST_DOUBLE)
9830 fprintf (stream, "#%s", fp_immediate_constant (x));
9831 else if (GET_CODE (x) == NEG)
6354dc9b 9832 abort (); /* This should never happen now. */
9997d19d
RE
9833 else
9834 {
9835 fputc ('#', stream);
9836 output_addr_const (stream, x);
9837 }
9838 }
9839}
cce8749e 9840\f
301d03af
RS
9841#ifndef AOF_ASSEMBLER
9842/* Target hook for assembling integer objects. The ARM version needs to
9843 handle word-sized values specially. */
301d03af 9844static bool
e32bac5b 9845arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
301d03af
RS
9846{
9847 if (size == UNITS_PER_WORD && aligned_p)
9848 {
9849 fputs ("\t.word\t", asm_out_file);
9850 output_addr_const (asm_out_file, x);
9851
9852 /* Mark symbols as position independent. We only do this in the
d6b4baa4 9853 .text segment, not in the .data segment. */
301d03af
RS
9854 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9855 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9856 {
e26053d1 9857 if (GET_CODE (x) == SYMBOL_REF
14f583b8 9858 && (CONSTANT_POOL_ADDRESS_P (x)
94428622 9859 || SYMBOL_REF_LOCAL_P (x)))
301d03af
RS
9860 fputs ("(GOTOFF)", asm_out_file);
9861 else if (GET_CODE (x) == LABEL_REF)
9862 fputs ("(GOTOFF)", asm_out_file);
9863 else
9864 fputs ("(GOT)", asm_out_file);
9865 }
9866 fputc ('\n', asm_out_file);
9867 return true;
9868 }
1d6e90ac 9869
5a9335ef
NC
9870 if (VECTOR_MODE_SUPPORTED_P (GET_MODE (x)))
9871 {
9872 int i, units;
9873
9874 if (GET_CODE (x) != CONST_VECTOR)
9875 abort ();
9876
9877 units = CONST_VECTOR_NUNITS (x);
9878
9879 switch (GET_MODE (x))
9880 {
9881 case V2SImode: size = 4; break;
9882 case V4HImode: size = 2; break;
9883 case V8QImode: size = 1; break;
9884 default:
9885 abort ();
9886 }
9887
9888 for (i = 0; i < units; i++)
9889 {
9890 rtx elt;
9891
9892 elt = CONST_VECTOR_ELT (x, i);
9893 assemble_integer
9894 (elt, size, i == 0 ? BIGGEST_ALIGNMENT : size * BITS_PER_UNIT, 1);
9895 }
9896
9897 return true;
9898 }
9899
301d03af
RS
9900 return default_assemble_integer (x, size, aligned_p);
9901}
9902#endif
9903\f
cce8749e
CH
9904/* A finite state machine takes care of noticing whether or not instructions
9905 can be conditionally executed, and thus decrease execution time and code
9906 size by deleting branch instructions. The fsm is controlled by
9907 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9908
9909/* The state of the fsm controlling condition codes are:
9910 0: normal, do nothing special
9911 1: make ASM_OUTPUT_OPCODE not output this instruction
9912 2: make ASM_OUTPUT_OPCODE not output this instruction
9913 3: make instructions conditional
9914 4: make instructions conditional
9915
9916 State transitions (state->state by whom under condition):
9917 0 -> 1 final_prescan_insn if the `target' is a label
9918 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9919 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9920 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
4977bab6 9921 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
cce8749e
CH
9922 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9923 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9924 (the target insn is arm_target_insn).
9925
ff9940b0
RE
9926 If the jump clobbers the conditions then we use states 2 and 4.
9927
9928 A similar thing can be done with conditional return insns.
9929
cce8749e
CH
9930 XXX In case the `target' is an unconditional branch, this conditionalising
9931 of the instructions always reduces code size, but not always execution
9932 time. But then, I want to reduce the code size to somewhere near what
9933 /bin/cc produces. */
9934
cce8749e
CH
9935/* Returns the index of the ARM condition code string in
9936 `arm_condition_codes'. COMPARISON should be an rtx like
9937 `(eq (...) (...))'. */
84ed5e79 9938static enum arm_cond_code
e32bac5b 9939get_arm_condition_code (rtx comparison)
cce8749e 9940{
5165176d 9941 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
1d6e90ac
NC
9942 int code;
9943 enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
9944
9945 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 9946 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
9947 XEXP (comparison, 1));
9948
9949 switch (mode)
cce8749e 9950 {
84ed5e79
RE
9951 case CC_DNEmode: code = ARM_NE; goto dominance;
9952 case CC_DEQmode: code = ARM_EQ; goto dominance;
9953 case CC_DGEmode: code = ARM_GE; goto dominance;
9954 case CC_DGTmode: code = ARM_GT; goto dominance;
9955 case CC_DLEmode: code = ARM_LE; goto dominance;
9956 case CC_DLTmode: code = ARM_LT; goto dominance;
9957 case CC_DGEUmode: code = ARM_CS; goto dominance;
9958 case CC_DGTUmode: code = ARM_HI; goto dominance;
9959 case CC_DLEUmode: code = ARM_LS; goto dominance;
9960 case CC_DLTUmode: code = ARM_CC;
9961
9962 dominance:
9963 if (comp_code != EQ && comp_code != NE)
9964 abort ();
9965
9966 if (comp_code == EQ)
9967 return ARM_INVERSE_CONDITION_CODE (code);
9968 return code;
9969
5165176d 9970 case CC_NOOVmode:
84ed5e79 9971 switch (comp_code)
5165176d 9972 {
84ed5e79
RE
9973 case NE: return ARM_NE;
9974 case EQ: return ARM_EQ;
9975 case GE: return ARM_PL;
9976 case LT: return ARM_MI;
5165176d
RE
9977 default: abort ();
9978 }
9979
9980 case CC_Zmode:
84ed5e79 9981 switch (comp_code)
5165176d 9982 {
84ed5e79
RE
9983 case NE: return ARM_NE;
9984 case EQ: return ARM_EQ;
5165176d
RE
9985 default: abort ();
9986 }
9987
defc0463
RE
9988 case CC_Nmode:
9989 switch (comp_code)
9990 {
9991 case NE: return ARM_MI;
9992 case EQ: return ARM_PL;
9993 default: abort ();
9994 }
9995
5165176d 9996 case CCFPEmode:
e45b72c4
RE
9997 case CCFPmode:
9998 /* These encodings assume that AC=1 in the FPA system control
9999 byte. This allows us to handle all cases except UNEQ and
10000 LTGT. */
84ed5e79
RE
10001 switch (comp_code)
10002 {
10003 case GE: return ARM_GE;
10004 case GT: return ARM_GT;
10005 case LE: return ARM_LS;
10006 case LT: return ARM_MI;
e45b72c4
RE
10007 case NE: return ARM_NE;
10008 case EQ: return ARM_EQ;
10009 case ORDERED: return ARM_VC;
10010 case UNORDERED: return ARM_VS;
10011 case UNLT: return ARM_LT;
10012 case UNLE: return ARM_LE;
10013 case UNGT: return ARM_HI;
10014 case UNGE: return ARM_PL;
10015 /* UNEQ and LTGT do not have a representation. */
10016 case UNEQ: /* Fall through. */
10017 case LTGT: /* Fall through. */
84ed5e79
RE
10018 default: abort ();
10019 }
10020
10021 case CC_SWPmode:
10022 switch (comp_code)
10023 {
10024 case NE: return ARM_NE;
10025 case EQ: return ARM_EQ;
10026 case GE: return ARM_LE;
10027 case GT: return ARM_LT;
10028 case LE: return ARM_GE;
10029 case LT: return ARM_GT;
10030 case GEU: return ARM_LS;
10031 case GTU: return ARM_CC;
10032 case LEU: return ARM_CS;
10033 case LTU: return ARM_HI;
10034 default: abort ();
10035 }
10036
bd9c7e23
RE
10037 case CC_Cmode:
10038 switch (comp_code)
10039 {
10040 case LTU: return ARM_CS;
10041 case GEU: return ARM_CC;
10042 default: abort ();
10043 }
10044
5165176d 10045 case CCmode:
84ed5e79 10046 switch (comp_code)
5165176d 10047 {
84ed5e79
RE
10048 case NE: return ARM_NE;
10049 case EQ: return ARM_EQ;
10050 case GE: return ARM_GE;
10051 case GT: return ARM_GT;
10052 case LE: return ARM_LE;
10053 case LT: return ARM_LT;
10054 case GEU: return ARM_CS;
10055 case GTU: return ARM_HI;
10056 case LEU: return ARM_LS;
10057 case LTU: return ARM_CC;
5165176d
RE
10058 default: abort ();
10059 }
10060
cce8749e
CH
10061 default: abort ();
10062 }
84ed5e79
RE
10063
10064 abort ();
f3bb6135 10065}
cce8749e 10066
cce8749e 10067void
e32bac5b 10068arm_final_prescan_insn (rtx insn)
cce8749e
CH
10069{
10070 /* BODY will hold the body of INSN. */
1d6e90ac 10071 rtx body = PATTERN (insn);
cce8749e
CH
10072
10073 /* This will be 1 if trying to repeat the trick, and things need to be
10074 reversed if it appears to fail. */
10075 int reverse = 0;
10076
ff9940b0
RE
10077 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
10078 taken are clobbered, even if the rtl suggests otherwise. It also
10079 means that we have to grub around within the jump expression to find
10080 out what the conditions are when the jump isn't taken. */
10081 int jump_clobbers = 0;
10082
6354dc9b 10083 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
10084 int seeking_return = 0;
10085
cce8749e
CH
10086 /* START_INSN will hold the insn from where we start looking. This is the
10087 first insn after the following code_label if REVERSE is true. */
10088 rtx start_insn = insn;
10089
10090 /* If in state 4, check if the target branch is reached, in order to
10091 change back to state 0. */
10092 if (arm_ccfsm_state == 4)
10093 {
10094 if (insn == arm_target_insn)
f5a1b0d2
NC
10095 {
10096 arm_target_insn = NULL;
10097 arm_ccfsm_state = 0;
10098 }
cce8749e
CH
10099 return;
10100 }
10101
10102 /* If in state 3, it is possible to repeat the trick, if this insn is an
10103 unconditional branch to a label, and immediately following this branch
10104 is the previous target label which is only used once, and the label this
10105 branch jumps to is not too far off. */
10106 if (arm_ccfsm_state == 3)
10107 {
10108 if (simplejump_p (insn))
10109 {
10110 start_insn = next_nonnote_insn (start_insn);
10111 if (GET_CODE (start_insn) == BARRIER)
10112 {
10113 /* XXX Isn't this always a barrier? */
10114 start_insn = next_nonnote_insn (start_insn);
10115 }
10116 if (GET_CODE (start_insn) == CODE_LABEL
10117 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
10118 && LABEL_NUSES (start_insn) == 1)
10119 reverse = TRUE;
10120 else
10121 return;
10122 }
ff9940b0
RE
10123 else if (GET_CODE (body) == RETURN)
10124 {
10125 start_insn = next_nonnote_insn (start_insn);
10126 if (GET_CODE (start_insn) == BARRIER)
10127 start_insn = next_nonnote_insn (start_insn);
10128 if (GET_CODE (start_insn) == CODE_LABEL
10129 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
10130 && LABEL_NUSES (start_insn) == 1)
10131 {
10132 reverse = TRUE;
10133 seeking_return = 1;
10134 }
10135 else
10136 return;
10137 }
cce8749e
CH
10138 else
10139 return;
10140 }
10141
10142 if (arm_ccfsm_state != 0 && !reverse)
10143 abort ();
10144 if (GET_CODE (insn) != JUMP_INSN)
10145 return;
10146
ddd5a7c1 10147 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
10148 the jump should always come first */
10149 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
10150 body = XVECEXP (body, 0, 0);
10151
cce8749e
CH
10152 if (reverse
10153 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
10154 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
10155 {
bd9c7e23
RE
10156 int insns_skipped;
10157 int fail = FALSE, succeed = FALSE;
cce8749e
CH
10158 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
10159 int then_not_else = TRUE;
ff9940b0 10160 rtx this_insn = start_insn, label = 0;
cce8749e 10161
e45b72c4
RE
10162 /* If the jump cannot be done with one instruction, we cannot
10163 conditionally execute the instruction in the inverse case. */
ff9940b0 10164 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 10165 {
5bbe2d40
RE
10166 jump_clobbers = 1;
10167 return;
10168 }
ff9940b0 10169
cce8749e
CH
10170 /* Register the insn jumped to. */
10171 if (reverse)
ff9940b0
RE
10172 {
10173 if (!seeking_return)
10174 label = XEXP (SET_SRC (body), 0);
10175 }
cce8749e
CH
10176 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
10177 label = XEXP (XEXP (SET_SRC (body), 1), 0);
10178 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
10179 {
10180 label = XEXP (XEXP (SET_SRC (body), 2), 0);
10181 then_not_else = FALSE;
10182 }
ff9940b0
RE
10183 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
10184 seeking_return = 1;
10185 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
10186 {
10187 seeking_return = 1;
10188 then_not_else = FALSE;
10189 }
cce8749e
CH
10190 else
10191 abort ();
10192
10193 /* See how many insns this branch skips, and what kind of insns. If all
10194 insns are okay, and the label or unconditional branch to the same
10195 label is not too far away, succeed. */
10196 for (insns_skipped = 0;
b36ba79f 10197 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
10198 {
10199 rtx scanbody;
10200
10201 this_insn = next_nonnote_insn (this_insn);
10202 if (!this_insn)
10203 break;
10204
cce8749e
CH
10205 switch (GET_CODE (this_insn))
10206 {
10207 case CODE_LABEL:
10208 /* Succeed if it is the target label, otherwise fail since
10209 control falls in from somewhere else. */
10210 if (this_insn == label)
10211 {
ff9940b0
RE
10212 if (jump_clobbers)
10213 {
10214 arm_ccfsm_state = 2;
10215 this_insn = next_nonnote_insn (this_insn);
10216 }
10217 else
10218 arm_ccfsm_state = 1;
cce8749e
CH
10219 succeed = TRUE;
10220 }
10221 else
10222 fail = TRUE;
10223 break;
10224
ff9940b0 10225 case BARRIER:
cce8749e 10226 /* Succeed if the following insn is the target label.
ff9940b0
RE
10227 Otherwise fail.
10228 If return insns are used then the last insn in a function
6354dc9b 10229 will be a barrier. */
cce8749e 10230 this_insn = next_nonnote_insn (this_insn);
ff9940b0 10231 if (this_insn && this_insn == label)
cce8749e 10232 {
ff9940b0
RE
10233 if (jump_clobbers)
10234 {
10235 arm_ccfsm_state = 2;
10236 this_insn = next_nonnote_insn (this_insn);
10237 }
10238 else
10239 arm_ccfsm_state = 1;
cce8749e
CH
10240 succeed = TRUE;
10241 }
10242 else
10243 fail = TRUE;
10244 break;
10245
ff9940b0 10246 case CALL_INSN:
2b835d68 10247 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 10248 calls. */
2b835d68 10249 if (TARGET_APCS_32)
bd9c7e23
RE
10250 {
10251 /* Succeed if the following insn is the target label,
10252 or if the following two insns are a barrier and
10253 the target label. */
10254 this_insn = next_nonnote_insn (this_insn);
10255 if (this_insn && GET_CODE (this_insn) == BARRIER)
10256 this_insn = next_nonnote_insn (this_insn);
10257
10258 if (this_insn && this_insn == label
b36ba79f 10259 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
10260 {
10261 if (jump_clobbers)
10262 {
10263 arm_ccfsm_state = 2;
10264 this_insn = next_nonnote_insn (this_insn);
10265 }
10266 else
10267 arm_ccfsm_state = 1;
10268 succeed = TRUE;
10269 }
10270 else
10271 fail = TRUE;
10272 }
ff9940b0 10273 break;
2b835d68 10274
cce8749e
CH
10275 case JUMP_INSN:
10276 /* If this is an unconditional branch to the same label, succeed.
10277 If it is to another label, do nothing. If it is conditional,
10278 fail. */
e32bac5b
RE
10279 /* XXX Probably, the tests for SET and the PC are
10280 unnecessary. */
cce8749e 10281
ed4c4348 10282 scanbody = PATTERN (this_insn);
ff9940b0
RE
10283 if (GET_CODE (scanbody) == SET
10284 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
10285 {
10286 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
10287 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
10288 {
10289 arm_ccfsm_state = 2;
10290 succeed = TRUE;
10291 }
10292 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
10293 fail = TRUE;
10294 }
b36ba79f
RE
10295 /* Fail if a conditional return is undesirable (eg on a
10296 StrongARM), but still allow this if optimizing for size. */
10297 else if (GET_CODE (scanbody) == RETURN
a72d4945 10298 && !use_return_insn (TRUE, NULL)
5895f793 10299 && !optimize_size)
b36ba79f 10300 fail = TRUE;
ff9940b0
RE
10301 else if (GET_CODE (scanbody) == RETURN
10302 && seeking_return)
10303 {
10304 arm_ccfsm_state = 2;
10305 succeed = TRUE;
10306 }
10307 else if (GET_CODE (scanbody) == PARALLEL)
10308 {
10309 switch (get_attr_conds (this_insn))
10310 {
10311 case CONDS_NOCOND:
10312 break;
10313 default:
10314 fail = TRUE;
10315 break;
10316 }
10317 }
4e67550b
RE
10318 else
10319 fail = TRUE; /* Unrecognized jump (eg epilogue). */
10320
cce8749e
CH
10321 break;
10322
10323 case INSN:
ff9940b0
RE
10324 /* Instructions using or affecting the condition codes make it
10325 fail. */
ed4c4348 10326 scanbody = PATTERN (this_insn);
5895f793
RE
10327 if (!(GET_CODE (scanbody) == SET
10328 || GET_CODE (scanbody) == PARALLEL)
74641843 10329 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e 10330 fail = TRUE;
9b6b54e2
NC
10331
10332 /* A conditional cirrus instruction must be followed by
10333 a non Cirrus instruction. However, since we
10334 conditionalize instructions in this function and by
10335 the time we get here we can't add instructions
10336 (nops), because shorten_branches() has already been
10337 called, we will disable conditionalizing Cirrus
10338 instructions to be safe. */
10339 if (GET_CODE (scanbody) != USE
10340 && GET_CODE (scanbody) != CLOBBER
f0375c66 10341 && get_attr_cirrus (this_insn) != CIRRUS_NOT)
9b6b54e2 10342 fail = TRUE;
cce8749e
CH
10343 break;
10344
10345 default:
10346 break;
10347 }
10348 }
10349 if (succeed)
10350 {
ff9940b0 10351 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 10352 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
10353 else if (seeking_return || arm_ccfsm_state == 2)
10354 {
10355 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
10356 {
10357 this_insn = next_nonnote_insn (this_insn);
10358 if (this_insn && (GET_CODE (this_insn) == BARRIER
10359 || GET_CODE (this_insn) == CODE_LABEL))
10360 abort ();
10361 }
10362 if (!this_insn)
10363 {
d6b4baa4 10364 /* Oh, dear! we ran off the end.. give up. */
df4ae160 10365 recog (PATTERN (insn), insn, NULL);
ff9940b0 10366 arm_ccfsm_state = 0;
abaa26e5 10367 arm_target_insn = NULL;
ff9940b0
RE
10368 return;
10369 }
10370 arm_target_insn = this_insn;
10371 }
cce8749e
CH
10372 else
10373 abort ();
ff9940b0
RE
10374 if (jump_clobbers)
10375 {
10376 if (reverse)
10377 abort ();
10378 arm_current_cc =
10379 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
10380 0), 0), 1));
10381 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
10382 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10383 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
10384 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10385 }
10386 else
10387 {
10388 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
10389 what it was. */
10390 if (!reverse)
10391 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
10392 0));
10393 }
cce8749e 10394
cce8749e
CH
10395 if (reverse || then_not_else)
10396 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10397 }
d5b7b3ae 10398
1ccbefce 10399 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 10400 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 10401 across this call; since the insn has been recognized already we
b020fd92 10402 call recog direct). */
df4ae160 10403 recog (PATTERN (insn), insn, NULL);
cce8749e 10404 }
f3bb6135 10405}
cce8749e 10406
4b02997f
NC
10407/* Returns true if REGNO is a valid register
10408 for holding a quantity of tyoe MODE. */
4b02997f 10409int
e32bac5b 10410arm_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
4b02997f
NC
10411{
10412 if (GET_MODE_CLASS (mode) == MODE_CC)
10413 return regno == CC_REGNUM;
10414
10415 if (TARGET_THUMB)
10416 /* For the Thumb we only allow values bigger than SImode in
10417 registers 0 - 6, so that there is always a second low
10418 register available to hold the upper part of the value.
10419 We probably we ought to ensure that the register is the
10420 start of an even numbered register pair. */
e9d7b180 10421 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
4b02997f 10422
9b6b54e2
NC
10423 if (IS_CIRRUS_REGNUM (regno))
10424 /* We have outlawed SI values in Cirrus registers because they
10425 reside in the lower 32 bits, but SF values reside in the
10426 upper 32 bits. This causes gcc all sorts of grief. We can't
10427 even split the registers into pairs because Cirrus SI values
10428 get sign extended to 64bits-- aldyh. */
10429 return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
10430
5a9335ef
NC
10431 if (IS_IWMMXT_GR_REGNUM (regno))
10432 return mode == SImode;
10433
10434 if (IS_IWMMXT_REGNUM (regno))
10435 return VALID_IWMMXT_REG_MODE (mode);
10436
4b02997f 10437 if (regno <= LAST_ARM_REGNUM)
9a9f7594 10438 /* We allow any value to be stored in the general registers. */
3cb66fd7 10439 return 1;
4b02997f
NC
10440
10441 if ( regno == FRAME_POINTER_REGNUM
10442 || regno == ARG_POINTER_REGNUM)
10443 /* We only allow integers in the fake hard registers. */
10444 return GET_MODE_CLASS (mode) == MODE_INT;
10445
3b684012 10446 /* The only registers left are the FPA registers
4b02997f
NC
10447 which we only allow to hold FP values. */
10448 return GET_MODE_CLASS (mode) == MODE_FLOAT
10449 && regno >= FIRST_ARM_FP_REGNUM
10450 && regno <= LAST_ARM_FP_REGNUM;
10451}
10452
d5b7b3ae 10453int
e32bac5b 10454arm_regno_class (int regno)
d5b7b3ae
RE
10455{
10456 if (TARGET_THUMB)
10457 {
10458 if (regno == STACK_POINTER_REGNUM)
10459 return STACK_REG;
10460 if (regno == CC_REGNUM)
10461 return CC_REG;
10462 if (regno < 8)
10463 return LO_REGS;
10464 return HI_REGS;
10465 }
10466
10467 if ( regno <= LAST_ARM_REGNUM
10468 || regno == FRAME_POINTER_REGNUM
10469 || regno == ARG_POINTER_REGNUM)
10470 return GENERAL_REGS;
10471
10472 if (regno == CC_REGNUM)
10473 return NO_REGS;
10474
9b6b54e2
NC
10475 if (IS_CIRRUS_REGNUM (regno))
10476 return CIRRUS_REGS;
10477
5a9335ef
NC
10478 if (IS_IWMMXT_REGNUM (regno))
10479 return IWMMXT_REGS;
10480
e99faaaa
ILT
10481 if (IS_IWMMXT_GR_REGNUM (regno))
10482 return IWMMXT_GR_REGS;
10483
3b684012 10484 return FPA_REGS;
d5b7b3ae
RE
10485}
10486
10487/* Handle a special case when computing the offset
10488 of an argument from the frame pointer. */
10489int
e32bac5b 10490arm_debugger_arg_offset (int value, rtx addr)
d5b7b3ae
RE
10491{
10492 rtx insn;
10493
10494 /* We are only interested if dbxout_parms() failed to compute the offset. */
10495 if (value != 0)
10496 return 0;
10497
10498 /* We can only cope with the case where the address is held in a register. */
10499 if (GET_CODE (addr) != REG)
10500 return 0;
10501
10502 /* If we are using the frame pointer to point at the argument, then
10503 an offset of 0 is correct. */
cd2b33d0 10504 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
10505 return 0;
10506
10507 /* If we are using the stack pointer to point at the
10508 argument, then an offset of 0 is correct. */
5895f793 10509 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
10510 && REGNO (addr) == SP_REGNUM)
10511 return 0;
10512
10513 /* Oh dear. The argument is pointed to by a register rather
10514 than being held in a register, or being stored at a known
10515 offset from the frame pointer. Since GDB only understands
10516 those two kinds of argument we must translate the address
10517 held in the register into an offset from the frame pointer.
10518 We do this by searching through the insns for the function
10519 looking to see where this register gets its value. If the
4912a07c 10520 register is initialized from the frame pointer plus an offset
d5b7b3ae
RE
10521 then we are in luck and we can continue, otherwise we give up.
10522
10523 This code is exercised by producing debugging information
10524 for a function with arguments like this:
10525
10526 double func (double a, double b, int c, double d) {return d;}
10527
10528 Without this code the stab for parameter 'd' will be set to
10529 an offset of 0 from the frame pointer, rather than 8. */
10530
10531 /* The if() statement says:
10532
10533 If the insn is a normal instruction
10534 and if the insn is setting the value in a register
10535 and if the register being set is the register holding the address of the argument
10536 and if the address is computing by an addition
10537 that involves adding to a register
10538 which is the frame pointer
10539 a constant integer
10540
d6b4baa4 10541 then... */
d5b7b3ae
RE
10542
10543 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10544 {
10545 if ( GET_CODE (insn) == INSN
10546 && GET_CODE (PATTERN (insn)) == SET
10547 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
10548 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
10549 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 10550 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
10551 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
10552 )
10553 {
10554 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
10555
10556 break;
10557 }
10558 }
10559
10560 if (value == 0)
10561 {
10562 debug_rtx (addr);
c725bd79 10563 warning ("unable to compute real location of stacked parameter");
d5b7b3ae
RE
10564 value = 8; /* XXX magic hack */
10565 }
10566
10567 return value;
10568}
d5b7b3ae 10569\f
5a9335ef
NC
10570#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
10571 do \
10572 { \
10573 if ((MASK) & insn_flags) \
10574 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE); \
10575 } \
10576 while (0)
10577
10578struct builtin_description
10579{
10580 const unsigned int mask;
10581 const enum insn_code icode;
10582 const char * const name;
10583 const enum arm_builtins code;
10584 const enum rtx_code comparison;
10585 const unsigned int flag;
10586};
10587
10588static const struct builtin_description bdesc_2arg[] =
10589{
10590#define IWMMXT_BUILTIN(code, string, builtin) \
10591 { FL_IWMMXT, CODE_FOR_##code, "__builtin_arm_" string, \
10592 ARM_BUILTIN_##builtin, 0, 0 },
10593
10594 IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
10595 IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
10596 IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
10597 IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
10598 IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
10599 IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
10600 IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
10601 IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
10602 IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
10603 IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
10604 IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
10605 IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
10606 IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
10607 IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
10608 IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
10609 IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
10610 IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
10611 IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
10612 IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
10613 IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsh", WMULSH)
10614 IWMMXT_BUILTIN (umulv4hi3_highpart, "wmuluh", WMULUH)
10615 IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
10616 IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
10617 IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
10618 IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
10619 IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
10620 IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
10621 IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
10622 IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
10623 IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
10624 IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
10625 IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
10626 IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
10627 IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
10628 IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
10629 IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
10630 IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
10631 IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
10632 IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
10633 IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
10634 IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
10635 IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
10636 IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
10637 IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
10638 IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
10639 IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
10640 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
10641 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
10642 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
10643 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
10644 IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
10645 IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
10646 IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
10647 IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
10648 IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
10649 IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
10650 IWMMXT_BUILTIN (iwmmxt_wmadds, "wmadds", WMADDS)
10651 IWMMXT_BUILTIN (iwmmxt_wmaddu, "wmaddu", WMADDU)
10652
10653#define IWMMXT_BUILTIN2(code, builtin) \
10654 { FL_IWMMXT, CODE_FOR_##code, NULL, ARM_BUILTIN_##builtin, 0, 0 },
10655
10656 IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
10657 IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
10658 IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
10659 IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
10660 IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
10661 IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
10662 IWMMXT_BUILTIN2 (ashlv4hi3_di, WSLLH)
10663 IWMMXT_BUILTIN2 (ashlv4hi3, WSLLHI)
10664 IWMMXT_BUILTIN2 (ashlv2si3_di, WSLLW)
10665 IWMMXT_BUILTIN2 (ashlv2si3, WSLLWI)
10666 IWMMXT_BUILTIN2 (ashldi3_di, WSLLD)
10667 IWMMXT_BUILTIN2 (ashldi3_iwmmxt, WSLLDI)
10668 IWMMXT_BUILTIN2 (lshrv4hi3_di, WSRLH)
10669 IWMMXT_BUILTIN2 (lshrv4hi3, WSRLHI)
10670 IWMMXT_BUILTIN2 (lshrv2si3_di, WSRLW)
10671 IWMMXT_BUILTIN2 (lshrv2si3, WSRLWI)
10672 IWMMXT_BUILTIN2 (lshrdi3_di, WSRLD)
10673 IWMMXT_BUILTIN2 (lshrdi3, WSRLDI)
10674 IWMMXT_BUILTIN2 (ashrv4hi3_di, WSRAH)
10675 IWMMXT_BUILTIN2 (ashrv4hi3, WSRAHI)
10676 IWMMXT_BUILTIN2 (ashrv2si3_di, WSRAW)
10677 IWMMXT_BUILTIN2 (ashrv2si3, WSRAWI)
10678 IWMMXT_BUILTIN2 (ashrdi3_di, WSRAD)
10679 IWMMXT_BUILTIN2 (ashrdi3, WSRADI)
10680 IWMMXT_BUILTIN2 (rorv4hi3_di, WRORH)
10681 IWMMXT_BUILTIN2 (rorv4hi3, WRORHI)
10682 IWMMXT_BUILTIN2 (rorv2si3_di, WRORW)
10683 IWMMXT_BUILTIN2 (rorv2si3, WRORWI)
10684 IWMMXT_BUILTIN2 (rordi3_di, WRORD)
10685 IWMMXT_BUILTIN2 (rordi3, WRORDI)
10686 IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
10687 IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
10688};
10689
10690static const struct builtin_description bdesc_1arg[] =
10691{
10692 IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
10693 IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
10694 IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
10695 IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
10696 IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
10697 IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
10698 IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
10699 IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
10700 IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
10701 IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
10702 IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
10703 IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
10704 IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
10705 IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
10706 IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
10707 IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
10708 IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
10709 IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
10710};
10711
10712/* Set up all the iWMMXt builtins. This is
10713 not called if TARGET_IWMMXT is zero. */
10714
10715static void
10716arm_init_iwmmxt_builtins (void)
10717{
10718 const struct builtin_description * d;
10719 size_t i;
10720 tree endlink = void_list_node;
10721
10722 tree int_ftype_int
10723 = build_function_type (integer_type_node,
10724 tree_cons (NULL_TREE, integer_type_node, endlink));
10725 tree v8qi_ftype_v8qi_v8qi_int
10726 = build_function_type (V8QI_type_node,
10727 tree_cons (NULL_TREE, V8QI_type_node,
10728 tree_cons (NULL_TREE, V8QI_type_node,
10729 tree_cons (NULL_TREE,
10730 integer_type_node,
10731 endlink))));
10732 tree v4hi_ftype_v4hi_int
10733 = build_function_type (V4HI_type_node,
10734 tree_cons (NULL_TREE, V4HI_type_node,
10735 tree_cons (NULL_TREE, integer_type_node,
10736 endlink)));
10737 tree v2si_ftype_v2si_int
10738 = build_function_type (V2SI_type_node,
10739 tree_cons (NULL_TREE, V2SI_type_node,
10740 tree_cons (NULL_TREE, integer_type_node,
10741 endlink)));
10742 tree v2si_ftype_di_di
10743 = build_function_type (V2SI_type_node,
10744 tree_cons (NULL_TREE, long_long_integer_type_node,
10745 tree_cons (NULL_TREE, long_long_integer_type_node,
10746 endlink)));
10747 tree di_ftype_di_int
10748 = build_function_type (long_long_integer_type_node,
10749 tree_cons (NULL_TREE, long_long_integer_type_node,
10750 tree_cons (NULL_TREE, integer_type_node,
10751 endlink)));
10752 tree di_ftype_di_int_int
10753 = build_function_type (long_long_integer_type_node,
10754 tree_cons (NULL_TREE, long_long_integer_type_node,
10755 tree_cons (NULL_TREE, integer_type_node,
10756 tree_cons (NULL_TREE,
10757 integer_type_node,
10758 endlink))));
10759 tree int_ftype_v8qi
10760 = build_function_type (integer_type_node,
10761 tree_cons (NULL_TREE, V8QI_type_node,
10762 endlink));
10763 tree int_ftype_v4hi
10764 = build_function_type (integer_type_node,
10765 tree_cons (NULL_TREE, V4HI_type_node,
10766 endlink));
10767 tree int_ftype_v2si
10768 = build_function_type (integer_type_node,
10769 tree_cons (NULL_TREE, V2SI_type_node,
10770 endlink));
10771 tree int_ftype_v8qi_int
10772 = build_function_type (integer_type_node,
10773 tree_cons (NULL_TREE, V8QI_type_node,
10774 tree_cons (NULL_TREE, integer_type_node,
10775 endlink)));
10776 tree int_ftype_v4hi_int
10777 = build_function_type (integer_type_node,
10778 tree_cons (NULL_TREE, V4HI_type_node,
10779 tree_cons (NULL_TREE, integer_type_node,
10780 endlink)));
10781 tree int_ftype_v2si_int
10782 = build_function_type (integer_type_node,
10783 tree_cons (NULL_TREE, V2SI_type_node,
10784 tree_cons (NULL_TREE, integer_type_node,
10785 endlink)));
10786 tree v8qi_ftype_v8qi_int_int
10787 = build_function_type (V8QI_type_node,
10788 tree_cons (NULL_TREE, V8QI_type_node,
10789 tree_cons (NULL_TREE, integer_type_node,
10790 tree_cons (NULL_TREE,
10791 integer_type_node,
10792 endlink))));
10793 tree v4hi_ftype_v4hi_int_int
10794 = build_function_type (V4HI_type_node,
10795 tree_cons (NULL_TREE, V4HI_type_node,
10796 tree_cons (NULL_TREE, integer_type_node,
10797 tree_cons (NULL_TREE,
10798 integer_type_node,
10799 endlink))));
10800 tree v2si_ftype_v2si_int_int
10801 = build_function_type (V2SI_type_node,
10802 tree_cons (NULL_TREE, V2SI_type_node,
10803 tree_cons (NULL_TREE, integer_type_node,
10804 tree_cons (NULL_TREE,
10805 integer_type_node,
10806 endlink))));
10807 /* Miscellaneous. */
10808 tree v8qi_ftype_v4hi_v4hi
10809 = build_function_type (V8QI_type_node,
10810 tree_cons (NULL_TREE, V4HI_type_node,
10811 tree_cons (NULL_TREE, V4HI_type_node,
10812 endlink)));
10813 tree v4hi_ftype_v2si_v2si
10814 = build_function_type (V4HI_type_node,
10815 tree_cons (NULL_TREE, V2SI_type_node,
10816 tree_cons (NULL_TREE, V2SI_type_node,
10817 endlink)));
10818 tree v2si_ftype_v4hi_v4hi
10819 = build_function_type (V2SI_type_node,
10820 tree_cons (NULL_TREE, V4HI_type_node,
10821 tree_cons (NULL_TREE, V4HI_type_node,
10822 endlink)));
10823 tree v2si_ftype_v8qi_v8qi
10824 = build_function_type (V2SI_type_node,
10825 tree_cons (NULL_TREE, V8QI_type_node,
10826 tree_cons (NULL_TREE, V8QI_type_node,
10827 endlink)));
10828 tree v4hi_ftype_v4hi_di
10829 = build_function_type (V4HI_type_node,
10830 tree_cons (NULL_TREE, V4HI_type_node,
10831 tree_cons (NULL_TREE,
10832 long_long_integer_type_node,
10833 endlink)));
10834 tree v2si_ftype_v2si_di
10835 = build_function_type (V2SI_type_node,
10836 tree_cons (NULL_TREE, V2SI_type_node,
10837 tree_cons (NULL_TREE,
10838 long_long_integer_type_node,
10839 endlink)));
10840 tree void_ftype_int_int
10841 = build_function_type (void_type_node,
10842 tree_cons (NULL_TREE, integer_type_node,
10843 tree_cons (NULL_TREE, integer_type_node,
10844 endlink)));
10845 tree di_ftype_void
10846 = build_function_type (long_long_unsigned_type_node, endlink);
10847 tree di_ftype_v8qi
10848 = build_function_type (long_long_integer_type_node,
10849 tree_cons (NULL_TREE, V8QI_type_node,
10850 endlink));
10851 tree di_ftype_v4hi
10852 = build_function_type (long_long_integer_type_node,
10853 tree_cons (NULL_TREE, V4HI_type_node,
10854 endlink));
10855 tree di_ftype_v2si
10856 = build_function_type (long_long_integer_type_node,
10857 tree_cons (NULL_TREE, V2SI_type_node,
10858 endlink));
10859 tree v2si_ftype_v4hi
10860 = build_function_type (V2SI_type_node,
10861 tree_cons (NULL_TREE, V4HI_type_node,
10862 endlink));
10863 tree v4hi_ftype_v8qi
10864 = build_function_type (V4HI_type_node,
10865 tree_cons (NULL_TREE, V8QI_type_node,
10866 endlink));
10867
10868 tree di_ftype_di_v4hi_v4hi
10869 = build_function_type (long_long_unsigned_type_node,
10870 tree_cons (NULL_TREE,
10871 long_long_unsigned_type_node,
10872 tree_cons (NULL_TREE, V4HI_type_node,
10873 tree_cons (NULL_TREE,
10874 V4HI_type_node,
10875 endlink))));
10876
10877 tree di_ftype_v4hi_v4hi
10878 = build_function_type (long_long_unsigned_type_node,
10879 tree_cons (NULL_TREE, V4HI_type_node,
10880 tree_cons (NULL_TREE, V4HI_type_node,
10881 endlink)));
10882
10883 /* Normal vector binops. */
10884 tree v8qi_ftype_v8qi_v8qi
10885 = build_function_type (V8QI_type_node,
10886 tree_cons (NULL_TREE, V8QI_type_node,
10887 tree_cons (NULL_TREE, V8QI_type_node,
10888 endlink)));
10889 tree v4hi_ftype_v4hi_v4hi
10890 = build_function_type (V4HI_type_node,
10891 tree_cons (NULL_TREE, V4HI_type_node,
10892 tree_cons (NULL_TREE, V4HI_type_node,
10893 endlink)));
10894 tree v2si_ftype_v2si_v2si
10895 = build_function_type (V2SI_type_node,
10896 tree_cons (NULL_TREE, V2SI_type_node,
10897 tree_cons (NULL_TREE, V2SI_type_node,
10898 endlink)));
10899 tree di_ftype_di_di
10900 = build_function_type (long_long_unsigned_type_node,
10901 tree_cons (NULL_TREE, long_long_unsigned_type_node,
10902 tree_cons (NULL_TREE,
10903 long_long_unsigned_type_node,
10904 endlink)));
10905
10906 /* Add all builtins that are more or less simple operations on two
10907 operands. */
e97a46ce 10908 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5a9335ef
NC
10909 {
10910 /* Use one of the operands; the target can have a different mode for
10911 mask-generating compares. */
10912 enum machine_mode mode;
10913 tree type;
10914
10915 if (d->name == 0)
10916 continue;
10917
10918 mode = insn_data[d->icode].operand[1].mode;
10919
10920 switch (mode)
10921 {
10922 case V8QImode:
10923 type = v8qi_ftype_v8qi_v8qi;
10924 break;
10925 case V4HImode:
10926 type = v4hi_ftype_v4hi_v4hi;
10927 break;
10928 case V2SImode:
10929 type = v2si_ftype_v2si_v2si;
10930 break;
10931 case DImode:
10932 type = di_ftype_di_di;
10933 break;
10934
10935 default:
10936 abort ();
10937 }
10938
10939 def_mbuiltin (d->mask, d->name, type, d->code);
10940 }
10941
10942 /* Add the remaining MMX insns with somewhat more complicated types. */
10943 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wzero", di_ftype_void, ARM_BUILTIN_WZERO);
10944 def_mbuiltin (FL_IWMMXT, "__builtin_arm_setwcx", void_ftype_int_int, ARM_BUILTIN_SETWCX);
10945 def_mbuiltin (FL_IWMMXT, "__builtin_arm_getwcx", int_ftype_int, ARM_BUILTIN_GETWCX);
10946
10947 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSLLH);
10948 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllw", v2si_ftype_v2si_di, ARM_BUILTIN_WSLLW);
10949 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslld", di_ftype_di_di, ARM_BUILTIN_WSLLD);
10950 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSLLHI);
10951 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSLLWI);
10952 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslldi", di_ftype_di_int, ARM_BUILTIN_WSLLDI);
10953
10954 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRLH);
10955 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRLW);
10956 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrld", di_ftype_di_di, ARM_BUILTIN_WSRLD);
10957 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRLHI);
10958 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRLWI);
10959 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrldi", di_ftype_di_int, ARM_BUILTIN_WSRLDI);
10960
10961 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrah", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRAH);
10962 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsraw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRAW);
10963 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrad", di_ftype_di_di, ARM_BUILTIN_WSRAD);
10964 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrahi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRAHI);
10965 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrawi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRAWI);
10966 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsradi", di_ftype_di_int, ARM_BUILTIN_WSRADI);
10967
10968 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WRORH);
10969 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorw", v2si_ftype_v2si_di, ARM_BUILTIN_WRORW);
10970 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrord", di_ftype_di_di, ARM_BUILTIN_WRORD);
10971 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WRORHI);
10972 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorwi", v2si_ftype_v2si_int, ARM_BUILTIN_WRORWI);
10973 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrordi", di_ftype_di_int, ARM_BUILTIN_WRORDI);
10974
10975 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wshufh", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSHUFH);
10976
10977 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadb", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADB);
10978 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadh", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADH);
10979 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadbz", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADBZ);
10980 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadhz", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADHZ);
10981
10982 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsb", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMSB);
10983 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMSH);
10984 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMSW);
10985 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmub", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMUB);
10986 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMUH);
10987 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMUW);
10988 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrb", v8qi_ftype_v8qi_int_int, ARM_BUILTIN_TINSRB);
10989 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrh", v4hi_ftype_v4hi_int_int, ARM_BUILTIN_TINSRH);
10990 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrw", v2si_ftype_v2si_int_int, ARM_BUILTIN_TINSRW);
10991
10992 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccb", di_ftype_v8qi, ARM_BUILTIN_WACCB);
10993 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wacch", di_ftype_v4hi, ARM_BUILTIN_WACCH);
10994 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccw", di_ftype_v2si, ARM_BUILTIN_WACCW);
10995
10996 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskb", int_ftype_v8qi, ARM_BUILTIN_TMOVMSKB);
10997 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskh", int_ftype_v4hi, ARM_BUILTIN_TMOVMSKH);
10998 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskw", int_ftype_v2si, ARM_BUILTIN_TMOVMSKW);
10999
11000 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhss", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHSS);
11001 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhus", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHUS);
11002 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwus", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWUS);
11003 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwss", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWSS);
11004 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdus", v2si_ftype_di_di, ARM_BUILTIN_WPACKDUS);
11005 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdss", v2si_ftype_di_di, ARM_BUILTIN_WPACKDSS);
11006
11007 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHUB);
11008 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHUH);
11009 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHUW);
11010 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHSB);
11011 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHSH);
11012 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHSW);
11013 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELUB);
11014 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELUH);
11015 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELUW);
11016 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELSB);
11017 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELSH);
11018 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELSW);
11019
11020 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacs", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACS);
11021 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacsz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACSZ);
11022 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacu", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACU);
11023 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacuz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACUZ);
11024
11025 def_mbuiltin (FL_IWMMXT, "__builtin_arm_walign", v8qi_ftype_v8qi_v8qi_int, ARM_BUILTIN_WALIGN);
11026 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmia", di_ftype_di_int_int, ARM_BUILTIN_TMIA);
11027 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiaph", di_ftype_di_int_int, ARM_BUILTIN_TMIAPH);
11028 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabb", di_ftype_di_int_int, ARM_BUILTIN_TMIABB);
11029 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabt", di_ftype_di_int_int, ARM_BUILTIN_TMIABT);
11030 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatb", di_ftype_di_int_int, ARM_BUILTIN_TMIATB);
11031 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatt", di_ftype_di_int_int, ARM_BUILTIN_TMIATT);
11032}
11033
11034static void
11035arm_init_builtins (void)
11036{
11037 if (TARGET_REALLY_IWMMXT)
11038 arm_init_iwmmxt_builtins ();
11039}
11040
11041/* Errors in the source file can cause expand_expr to return const0_rtx
11042 where we expect a vector. To avoid crashing, use one of the vector
11043 clear instructions. */
11044
11045static rtx
11046safe_vector_operand (rtx x, enum machine_mode mode)
11047{
11048 if (x != const0_rtx)
11049 return x;
11050 x = gen_reg_rtx (mode);
11051
11052 emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
11053 : gen_rtx_SUBREG (DImode, x, 0)));
11054 return x;
11055}
11056
11057/* Subroutine of arm_expand_builtin to take care of binop insns. */
11058
11059static rtx
11060arm_expand_binop_builtin (enum insn_code icode,
11061 tree arglist, rtx target)
11062{
11063 rtx pat;
11064 tree arg0 = TREE_VALUE (arglist);
11065 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11066 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11067 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11068 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11069 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
11070 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
11071
11072 if (VECTOR_MODE_P (mode0))
11073 op0 = safe_vector_operand (op0, mode0);
11074 if (VECTOR_MODE_P (mode1))
11075 op1 = safe_vector_operand (op1, mode1);
11076
11077 if (! target
11078 || GET_MODE (target) != tmode
11079 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11080 target = gen_reg_rtx (tmode);
11081
11082 /* In case the insn wants input operands in modes different from
11083 the result, abort. */
11084 if (GET_MODE (op0) != mode0 || GET_MODE (op1) != mode1)
11085 abort ();
11086
11087 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11088 op0 = copy_to_mode_reg (mode0, op0);
11089 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11090 op1 = copy_to_mode_reg (mode1, op1);
11091
11092 pat = GEN_FCN (icode) (target, op0, op1);
11093 if (! pat)
11094 return 0;
11095 emit_insn (pat);
11096 return target;
11097}
11098
11099/* Subroutine of arm_expand_builtin to take care of unop insns. */
11100
11101static rtx
11102arm_expand_unop_builtin (enum insn_code icode,
11103 tree arglist, rtx target, int do_load)
11104{
11105 rtx pat;
11106 tree arg0 = TREE_VALUE (arglist);
11107 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11108 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11109 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
11110
11111 if (! target
11112 || GET_MODE (target) != tmode
11113 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11114 target = gen_reg_rtx (tmode);
11115 if (do_load)
11116 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
11117 else
11118 {
11119 if (VECTOR_MODE_P (mode0))
11120 op0 = safe_vector_operand (op0, mode0);
11121
11122 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11123 op0 = copy_to_mode_reg (mode0, op0);
11124 }
11125
11126 pat = GEN_FCN (icode) (target, op0);
11127 if (! pat)
11128 return 0;
11129 emit_insn (pat);
11130 return target;
11131}
11132
11133/* Expand an expression EXP that calls a built-in function,
11134 with result going to TARGET if that's convenient
11135 (and in mode MODE if that's convenient).
11136 SUBTARGET may be used as the target for computing one of EXP's operands.
11137 IGNORE is nonzero if the value is to be ignored. */
11138
11139static rtx
11140arm_expand_builtin (tree exp,
11141 rtx target,
11142 rtx subtarget ATTRIBUTE_UNUSED,
11143 enum machine_mode mode ATTRIBUTE_UNUSED,
11144 int ignore ATTRIBUTE_UNUSED)
11145{
11146 const struct builtin_description * d;
11147 enum insn_code icode;
11148 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
11149 tree arglist = TREE_OPERAND (exp, 1);
11150 tree arg0;
11151 tree arg1;
11152 tree arg2;
11153 rtx op0;
11154 rtx op1;
11155 rtx op2;
11156 rtx pat;
11157 int fcode = DECL_FUNCTION_CODE (fndecl);
11158 size_t i;
11159 enum machine_mode tmode;
11160 enum machine_mode mode0;
11161 enum machine_mode mode1;
11162 enum machine_mode mode2;
11163
11164 switch (fcode)
11165 {
11166 case ARM_BUILTIN_TEXTRMSB:
11167 case ARM_BUILTIN_TEXTRMUB:
11168 case ARM_BUILTIN_TEXTRMSH:
11169 case ARM_BUILTIN_TEXTRMUH:
11170 case ARM_BUILTIN_TEXTRMSW:
11171 case ARM_BUILTIN_TEXTRMUW:
11172 icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
11173 : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
11174 : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
11175 : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
11176 : CODE_FOR_iwmmxt_textrmw);
11177
11178 arg0 = TREE_VALUE (arglist);
11179 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11180 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11181 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11182 tmode = insn_data[icode].operand[0].mode;
11183 mode0 = insn_data[icode].operand[1].mode;
11184 mode1 = insn_data[icode].operand[2].mode;
11185
11186 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11187 op0 = copy_to_mode_reg (mode0, op0);
11188 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11189 {
11190 /* @@@ better error message */
11191 error ("selector must be an immediate");
11192 return gen_reg_rtx (tmode);
11193 }
11194 if (target == 0
11195 || GET_MODE (target) != tmode
11196 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11197 target = gen_reg_rtx (tmode);
11198 pat = GEN_FCN (icode) (target, op0, op1);
11199 if (! pat)
11200 return 0;
11201 emit_insn (pat);
11202 return target;
11203
11204 case ARM_BUILTIN_TINSRB:
11205 case ARM_BUILTIN_TINSRH:
11206 case ARM_BUILTIN_TINSRW:
11207 icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
11208 : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
11209 : CODE_FOR_iwmmxt_tinsrw);
11210 arg0 = TREE_VALUE (arglist);
11211 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11212 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
11213 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11214 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11215 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
11216 tmode = insn_data[icode].operand[0].mode;
11217 mode0 = insn_data[icode].operand[1].mode;
11218 mode1 = insn_data[icode].operand[2].mode;
11219 mode2 = insn_data[icode].operand[3].mode;
11220
11221 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11222 op0 = copy_to_mode_reg (mode0, op0);
11223 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11224 op1 = copy_to_mode_reg (mode1, op1);
11225 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
11226 {
11227 /* @@@ better error message */
11228 error ("selector must be an immediate");
11229 return const0_rtx;
11230 }
11231 if (target == 0
11232 || GET_MODE (target) != tmode
11233 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11234 target = gen_reg_rtx (tmode);
11235 pat = GEN_FCN (icode) (target, op0, op1, op2);
11236 if (! pat)
11237 return 0;
11238 emit_insn (pat);
11239 return target;
11240
11241 case ARM_BUILTIN_SETWCX:
11242 arg0 = TREE_VALUE (arglist);
11243 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11244 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11245 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11246 emit_insn (gen_iwmmxt_tmcr (op0, op1));
11247 return 0;
11248
11249 case ARM_BUILTIN_GETWCX:
11250 arg0 = TREE_VALUE (arglist);
11251 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11252 target = gen_reg_rtx (SImode);
11253 emit_insn (gen_iwmmxt_tmrc (target, op0));
11254 return target;
11255
11256 case ARM_BUILTIN_WSHUFH:
11257 icode = CODE_FOR_iwmmxt_wshufh;
11258 arg0 = TREE_VALUE (arglist);
11259 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11260 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11261 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11262 tmode = insn_data[icode].operand[0].mode;
11263 mode1 = insn_data[icode].operand[1].mode;
11264 mode2 = insn_data[icode].operand[2].mode;
11265
11266 if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
11267 op0 = copy_to_mode_reg (mode1, op0);
11268 if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
11269 {
11270 /* @@@ better error message */
11271 error ("mask must be an immediate");
11272 return const0_rtx;
11273 }
11274 if (target == 0
11275 || GET_MODE (target) != tmode
11276 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11277 target = gen_reg_rtx (tmode);
11278 pat = GEN_FCN (icode) (target, op0, op1);
11279 if (! pat)
11280 return 0;
11281 emit_insn (pat);
11282 return target;
11283
11284 case ARM_BUILTIN_WSADB:
11285 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadb, arglist, target);
11286 case ARM_BUILTIN_WSADH:
11287 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadh, arglist, target);
11288 case ARM_BUILTIN_WSADBZ:
11289 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, arglist, target);
11290 case ARM_BUILTIN_WSADHZ:
11291 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, arglist, target);
11292
11293 /* Several three-argument builtins. */
11294 case ARM_BUILTIN_WMACS:
11295 case ARM_BUILTIN_WMACU:
11296 case ARM_BUILTIN_WALIGN:
11297 case ARM_BUILTIN_TMIA:
11298 case ARM_BUILTIN_TMIAPH:
11299 case ARM_BUILTIN_TMIATT:
11300 case ARM_BUILTIN_TMIATB:
11301 case ARM_BUILTIN_TMIABT:
11302 case ARM_BUILTIN_TMIABB:
11303 icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
11304 : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
11305 : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
11306 : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
11307 : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
11308 : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
11309 : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
11310 : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
11311 : CODE_FOR_iwmmxt_walign);
11312 arg0 = TREE_VALUE (arglist);
11313 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11314 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
11315 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11316 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11317 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
11318 tmode = insn_data[icode].operand[0].mode;
11319 mode0 = insn_data[icode].operand[1].mode;
11320 mode1 = insn_data[icode].operand[2].mode;
11321 mode2 = insn_data[icode].operand[3].mode;
11322
11323 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11324 op0 = copy_to_mode_reg (mode0, op0);
11325 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11326 op1 = copy_to_mode_reg (mode1, op1);
11327 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
11328 op2 = copy_to_mode_reg (mode2, op2);
11329 if (target == 0
11330 || GET_MODE (target) != tmode
11331 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11332 target = gen_reg_rtx (tmode);
11333 pat = GEN_FCN (icode) (target, op0, op1, op2);
11334 if (! pat)
11335 return 0;
11336 emit_insn (pat);
11337 return target;
11338
11339 case ARM_BUILTIN_WZERO:
11340 target = gen_reg_rtx (DImode);
11341 emit_insn (gen_iwmmxt_clrdi (target));
11342 return target;
11343
11344 default:
11345 break;
11346 }
11347
e97a46ce 11348 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5a9335ef
NC
11349 if (d->code == (const enum arm_builtins) fcode)
11350 return arm_expand_binop_builtin (d->icode, arglist, target);
11351
e97a46ce 11352 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5a9335ef
NC
11353 if (d->code == (const enum arm_builtins) fcode)
11354 return arm_expand_unop_builtin (d->icode, arglist, target, 0);
11355
11356 /* @@@ Should really do something sensible here. */
11357 return NULL_RTX;
11358}
11359\f
d5b7b3ae
RE
11360/* Recursively search through all of the blocks in a function
11361 checking to see if any of the variables created in that
11362 function match the RTX called 'orig'. If they do then
11363 replace them with the RTX called 'new'. */
d5b7b3ae 11364static void
e32bac5b 11365replace_symbols_in_block (tree block, rtx orig, rtx new)
d5b7b3ae
RE
11366{
11367 for (; block; block = BLOCK_CHAIN (block))
11368 {
11369 tree sym;
11370
5895f793 11371 if (!TREE_USED (block))
d5b7b3ae
RE
11372 continue;
11373
11374 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
11375 {
11376 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
11377 || DECL_IGNORED_P (sym)
11378 || TREE_CODE (sym) != VAR_DECL
11379 || DECL_EXTERNAL (sym)
5895f793 11380 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
11381 )
11382 continue;
11383
7b8b8ade 11384 SET_DECL_RTL (sym, new);
d5b7b3ae
RE
11385 }
11386
11387 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
11388 }
11389}
11390
1d6e90ac
NC
11391/* Return the number (counting from 0) of
11392 the least significant set bit in MASK. */
11393
e32bac5b
RE
11394inline static int
11395number_of_first_bit_set (int mask)
d5b7b3ae
RE
11396{
11397 int bit;
11398
11399 for (bit = 0;
11400 (mask & (1 << bit)) == 0;
5895f793 11401 ++bit)
d5b7b3ae
RE
11402 continue;
11403
11404 return bit;
11405}
11406
11407/* Generate code to return from a thumb function.
11408 If 'reg_containing_return_addr' is -1, then the return address is
11409 actually on the stack, at the stack pointer. */
11410static void
e32bac5b 11411thumb_exit (FILE *f, int reg_containing_return_addr, rtx eh_ofs)
d5b7b3ae
RE
11412{
11413 unsigned regs_available_for_popping;
11414 unsigned regs_to_pop;
11415 int pops_needed;
11416 unsigned available;
11417 unsigned required;
11418 int mode;
11419 int size;
11420 int restore_a4 = FALSE;
11421
11422 /* Compute the registers we need to pop. */
11423 regs_to_pop = 0;
11424 pops_needed = 0;
11425
11426 /* There is an assumption here, that if eh_ofs is not NULL, the
11427 normal return address will have been pushed. */
11428 if (reg_containing_return_addr == -1 || eh_ofs)
11429 {
11430 /* When we are generating a return for __builtin_eh_return,
11431 reg_containing_return_addr must specify the return regno. */
11432 if (eh_ofs && reg_containing_return_addr == -1)
11433 abort ();
11434
11435 regs_to_pop |= 1 << LR_REGNUM;
5895f793 11436 ++pops_needed;
d5b7b3ae
RE
11437 }
11438
11439 if (TARGET_BACKTRACE)
11440 {
11441 /* Restore the (ARM) frame pointer and stack pointer. */
11442 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
11443 pops_needed += 2;
11444 }
11445
11446 /* If there is nothing to pop then just emit the BX instruction and
11447 return. */
11448 if (pops_needed == 0)
11449 {
11450 if (eh_ofs)
11451 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11452
11453 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11454 return;
11455 }
11456 /* Otherwise if we are not supporting interworking and we have not created
11457 a backtrace structure and the function was not entered in ARM mode then
11458 just pop the return address straight into the PC. */
5895f793
RE
11459 else if (!TARGET_INTERWORK
11460 && !TARGET_BACKTRACE
11461 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
11462 {
11463 if (eh_ofs)
11464 {
11465 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
11466 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11467 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11468 }
11469 else
11470 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
11471
11472 return;
11473 }
11474
11475 /* Find out how many of the (return) argument registers we can corrupt. */
11476 regs_available_for_popping = 0;
11477
11478 /* If returning via __builtin_eh_return, the bottom three registers
11479 all contain information needed for the return. */
11480 if (eh_ofs)
11481 size = 12;
11482 else
11483 {
11484#ifdef RTX_CODE
11485 /* If we can deduce the registers used from the function's
11486 return value. This is more reliable that examining
11487 regs_ever_live[] because that will be set if the register is
11488 ever used in the function, not just if the register is used
11489 to hold a return value. */
11490
11491 if (current_function_return_rtx != 0)
11492 mode = GET_MODE (current_function_return_rtx);
11493 else
11494#endif
11495 mode = DECL_MODE (DECL_RESULT (current_function_decl));
11496
11497 size = GET_MODE_SIZE (mode);
11498
11499 if (size == 0)
11500 {
11501 /* In a void function we can use any argument register.
11502 In a function that returns a structure on the stack
11503 we can use the second and third argument registers. */
11504 if (mode == VOIDmode)
11505 regs_available_for_popping =
11506 (1 << ARG_REGISTER (1))
11507 | (1 << ARG_REGISTER (2))
11508 | (1 << ARG_REGISTER (3));
11509 else
11510 regs_available_for_popping =
11511 (1 << ARG_REGISTER (2))
11512 | (1 << ARG_REGISTER (3));
11513 }
11514 else if (size <= 4)
11515 regs_available_for_popping =
11516 (1 << ARG_REGISTER (2))
11517 | (1 << ARG_REGISTER (3));
11518 else if (size <= 8)
11519 regs_available_for_popping =
11520 (1 << ARG_REGISTER (3));
11521 }
11522
11523 /* Match registers to be popped with registers into which we pop them. */
11524 for (available = regs_available_for_popping,
11525 required = regs_to_pop;
11526 required != 0 && available != 0;
11527 available &= ~(available & - available),
11528 required &= ~(required & - required))
11529 -- pops_needed;
11530
11531 /* If we have any popping registers left over, remove them. */
11532 if (available > 0)
5895f793 11533 regs_available_for_popping &= ~available;
d5b7b3ae
RE
11534
11535 /* Otherwise if we need another popping register we can use
11536 the fourth argument register. */
11537 else if (pops_needed)
11538 {
11539 /* If we have not found any free argument registers and
11540 reg a4 contains the return address, we must move it. */
11541 if (regs_available_for_popping == 0
11542 && reg_containing_return_addr == LAST_ARG_REGNUM)
11543 {
11544 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
11545 reg_containing_return_addr = LR_REGNUM;
11546 }
11547 else if (size > 12)
11548 {
11549 /* Register a4 is being used to hold part of the return value,
11550 but we have dire need of a free, low register. */
11551 restore_a4 = TRUE;
11552
11553 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
11554 }
11555
11556 if (reg_containing_return_addr != LAST_ARG_REGNUM)
11557 {
11558 /* The fourth argument register is available. */
11559 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
11560
5895f793 11561 --pops_needed;
d5b7b3ae
RE
11562 }
11563 }
11564
11565 /* Pop as many registers as we can. */
980e61bb
DJ
11566 thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
11567 regs_available_for_popping);
d5b7b3ae
RE
11568
11569 /* Process the registers we popped. */
11570 if (reg_containing_return_addr == -1)
11571 {
11572 /* The return address was popped into the lowest numbered register. */
5895f793 11573 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
11574
11575 reg_containing_return_addr =
11576 number_of_first_bit_set (regs_available_for_popping);
11577
11578 /* Remove this register for the mask of available registers, so that
6bc82793 11579 the return address will not be corrupted by further pops. */
5895f793 11580 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
11581 }
11582
11583 /* If we popped other registers then handle them here. */
11584 if (regs_available_for_popping)
11585 {
11586 int frame_pointer;
11587
11588 /* Work out which register currently contains the frame pointer. */
11589 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
11590
11591 /* Move it into the correct place. */
11592 asm_fprintf (f, "\tmov\t%r, %r\n",
11593 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
11594
11595 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
11596 regs_available_for_popping &= ~(1 << frame_pointer);
11597 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
11598
11599 if (regs_available_for_popping)
11600 {
11601 int stack_pointer;
11602
11603 /* We popped the stack pointer as well,
11604 find the register that contains it. */
11605 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
11606
11607 /* Move it into the stack register. */
11608 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
11609
11610 /* At this point we have popped all necessary registers, so
11611 do not worry about restoring regs_available_for_popping
11612 to its correct value:
11613
11614 assert (pops_needed == 0)
11615 assert (regs_available_for_popping == (1 << frame_pointer))
11616 assert (regs_to_pop == (1 << STACK_POINTER)) */
11617 }
11618 else
11619 {
11620 /* Since we have just move the popped value into the frame
11621 pointer, the popping register is available for reuse, and
11622 we know that we still have the stack pointer left to pop. */
11623 regs_available_for_popping |= (1 << frame_pointer);
11624 }
11625 }
11626
11627 /* If we still have registers left on the stack, but we no longer have
11628 any registers into which we can pop them, then we must move the return
11629 address into the link register and make available the register that
11630 contained it. */
11631 if (regs_available_for_popping == 0 && pops_needed > 0)
11632 {
11633 regs_available_for_popping |= 1 << reg_containing_return_addr;
11634
11635 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
11636 reg_containing_return_addr);
11637
11638 reg_containing_return_addr = LR_REGNUM;
11639 }
11640
11641 /* If we have registers left on the stack then pop some more.
11642 We know that at most we will want to pop FP and SP. */
11643 if (pops_needed > 0)
11644 {
11645 int popped_into;
11646 int move_to;
11647
980e61bb
DJ
11648 thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
11649 regs_available_for_popping);
d5b7b3ae
RE
11650
11651 /* We have popped either FP or SP.
11652 Move whichever one it is into the correct register. */
11653 popped_into = number_of_first_bit_set (regs_available_for_popping);
11654 move_to = number_of_first_bit_set (regs_to_pop);
11655
11656 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
11657
5895f793 11658 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 11659
5895f793 11660 --pops_needed;
d5b7b3ae
RE
11661 }
11662
11663 /* If we still have not popped everything then we must have only
11664 had one register available to us and we are now popping the SP. */
11665 if (pops_needed > 0)
11666 {
11667 int popped_into;
11668
980e61bb
DJ
11669 thumb_pushpop (f, regs_available_for_popping, FALSE, NULL,
11670 regs_available_for_popping);
d5b7b3ae
RE
11671
11672 popped_into = number_of_first_bit_set (regs_available_for_popping);
11673
11674 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
11675 /*
11676 assert (regs_to_pop == (1 << STACK_POINTER))
11677 assert (pops_needed == 1)
11678 */
11679 }
11680
11681 /* If necessary restore the a4 register. */
11682 if (restore_a4)
11683 {
11684 if (reg_containing_return_addr != LR_REGNUM)
11685 {
11686 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
11687 reg_containing_return_addr = LR_REGNUM;
11688 }
11689
11690 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11691 }
11692
11693 if (eh_ofs)
11694 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11695
11696 /* Return to caller. */
11697 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11698}
11699
980e61bb
DJ
11700/* Emit code to push or pop registers to or from the stack. F is the
11701 assembly file. MASK is the registers to push or pop. PUSH is
11702 non-zero if we should push, and zero if we should pop. For debugging
11703 output, if pushing, adjust CFA_OFFSET by the amount of space added
11704 to the stack. REAL_REGS should have the same number of bits set as
11705 MASK, and will be used instead (in the same order) to describe which
11706 registers were saved - this is used to mark the save slots when we
11707 push high registers after moving them to low registers. */
d5b7b3ae 11708static void
980e61bb 11709thumb_pushpop (FILE *f, int mask, int push, int *cfa_offset, int real_regs)
d5b7b3ae
RE
11710{
11711 int regno;
11712 int lo_mask = mask & 0xFF;
980e61bb 11713 int pushed_words = 0;
d5b7b3ae 11714
5895f793 11715 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
11716 {
11717 /* Special case. Do not generate a POP PC statement here, do it in
11718 thumb_exit() */
11719 thumb_exit (f, -1, NULL_RTX);
11720 return;
11721 }
11722
11723 fprintf (f, "\t%s\t{", push ? "push" : "pop");
11724
11725 /* Look at the low registers first. */
5895f793 11726 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
11727 {
11728 if (lo_mask & 1)
11729 {
11730 asm_fprintf (f, "%r", regno);
11731
11732 if ((lo_mask & ~1) != 0)
11733 fprintf (f, ", ");
980e61bb
DJ
11734
11735 pushed_words++;
d5b7b3ae
RE
11736 }
11737 }
11738
11739 if (push && (mask & (1 << LR_REGNUM)))
11740 {
11741 /* Catch pushing the LR. */
11742 if (mask & 0xFF)
11743 fprintf (f, ", ");
11744
11745 asm_fprintf (f, "%r", LR_REGNUM);
980e61bb
DJ
11746
11747 pushed_words++;
d5b7b3ae
RE
11748 }
11749 else if (!push && (mask & (1 << PC_REGNUM)))
11750 {
11751 /* Catch popping the PC. */
11752 if (TARGET_INTERWORK || TARGET_BACKTRACE)
11753 {
11754 /* The PC is never poped directly, instead
11755 it is popped into r3 and then BX is used. */
11756 fprintf (f, "}\n");
11757
11758 thumb_exit (f, -1, NULL_RTX);
11759
11760 return;
11761 }
11762 else
11763 {
11764 if (mask & 0xFF)
11765 fprintf (f, ", ");
11766
11767 asm_fprintf (f, "%r", PC_REGNUM);
11768 }
11769 }
11770
11771 fprintf (f, "}\n");
980e61bb
DJ
11772
11773 if (push && pushed_words && dwarf2out_do_frame ())
11774 {
11775 char *l = dwarf2out_cfi_label ();
11776 int pushed_mask = real_regs;
11777
11778 *cfa_offset += pushed_words * 4;
11779 dwarf2out_def_cfa (l, SP_REGNUM, *cfa_offset);
11780
11781 pushed_words = 0;
11782 pushed_mask = real_regs;
11783 for (regno = 0; regno <= 14; regno++, pushed_mask >>= 1)
11784 {
11785 if (pushed_mask & 1)
11786 dwarf2out_reg_save (l, regno, 4 * pushed_words++ - *cfa_offset);
11787 }
11788 }
d5b7b3ae
RE
11789}
11790\f
11791void
e32bac5b 11792thumb_final_prescan_insn (rtx insn)
d5b7b3ae 11793{
d5b7b3ae 11794 if (flag_print_asm_name)
9d98a694
AO
11795 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
11796 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
11797}
11798
11799int
e32bac5b 11800thumb_shiftable_const (unsigned HOST_WIDE_INT val)
d5b7b3ae
RE
11801{
11802 unsigned HOST_WIDE_INT mask = 0xff;
11803 int i;
11804
11805 if (val == 0) /* XXX */
11806 return 0;
11807
11808 for (i = 0; i < 25; i++)
11809 if ((val & (mask << i)) == val)
11810 return 1;
11811
11812 return 0;
11813}
11814
825dda42 11815/* Returns nonzero if the current function contains,
d5b7b3ae
RE
11816 or might contain a far jump. */
11817int
e32bac5b 11818thumb_far_jump_used_p (int in_prologue)
d5b7b3ae
RE
11819{
11820 rtx insn;
11821
11822 /* This test is only important for leaf functions. */
5895f793 11823 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
11824
11825 /* If we have already decided that far jumps may be used,
11826 do not bother checking again, and always return true even if
11827 it turns out that they are not being used. Once we have made
11828 the decision that far jumps are present (and that hence the link
11829 register will be pushed onto the stack) we cannot go back on it. */
11830 if (cfun->machine->far_jump_used)
11831 return 1;
11832
11833 /* If this function is not being called from the prologue/epilogue
11834 generation code then it must be being called from the
11835 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 11836 if (!in_prologue)
d5b7b3ae
RE
11837 {
11838 /* In this case we know that we are being asked about the elimination
11839 of the arg pointer register. If that register is not being used,
11840 then there are no arguments on the stack, and we do not have to
11841 worry that a far jump might force the prologue to push the link
11842 register, changing the stack offsets. In this case we can just
11843 return false, since the presence of far jumps in the function will
11844 not affect stack offsets.
11845
11846 If the arg pointer is live (or if it was live, but has now been
11847 eliminated and so set to dead) then we do have to test to see if
11848 the function might contain a far jump. This test can lead to some
11849 false negatives, since before reload is completed, then length of
11850 branch instructions is not known, so gcc defaults to returning their
11851 longest length, which in turn sets the far jump attribute to true.
11852
11853 A false negative will not result in bad code being generated, but it
11854 will result in a needless push and pop of the link register. We
11855 hope that this does not occur too often. */
11856 if (regs_ever_live [ARG_POINTER_REGNUM])
11857 cfun->machine->arg_pointer_live = 1;
5895f793 11858 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
11859 return 0;
11860 }
11861
11862 /* Check to see if the function contains a branch
11863 insn with the far jump attribute set. */
11864 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11865 {
11866 if (GET_CODE (insn) == JUMP_INSN
11867 /* Ignore tablejump patterns. */
11868 && GET_CODE (PATTERN (insn)) != ADDR_VEC
11869 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
11870 && get_attr_far_jump (insn) == FAR_JUMP_YES
11871 )
11872 {
9a9f7594 11873 /* Record the fact that we have decided that
d5b7b3ae
RE
11874 the function does use far jumps. */
11875 cfun->machine->far_jump_used = 1;
11876 return 1;
11877 }
11878 }
11879
11880 return 0;
11881}
11882
825dda42 11883/* Return nonzero if FUNC must be entered in ARM mode. */
d5b7b3ae 11884int
e32bac5b 11885is_called_in_ARM_mode (tree func)
d5b7b3ae
RE
11886{
11887 if (TREE_CODE (func) != FUNCTION_DECL)
11888 abort ();
11889
11890 /* Ignore the problem about functions whoes address is taken. */
11891 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
11892 return TRUE;
11893
11894#ifdef ARM_PE
91d231cb 11895 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
d5b7b3ae
RE
11896#else
11897 return FALSE;
11898#endif
11899}
11900
d6b4baa4 11901/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 11902const char *
e32bac5b 11903thumb_unexpanded_epilogue (void)
d5b7b3ae
RE
11904{
11905 int regno;
11906 int live_regs_mask = 0;
11907 int high_regs_pushed = 0;
11908 int leaf_function = leaf_function_p ();
11909 int had_to_push_lr;
11910 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
11911
11912 if (return_used_this_function)
11913 return "";
11914
58e60158
AN
11915 if (IS_NAKED (arm_current_func_type ()))
11916 return "";
11917
d5b7b3ae 11918 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 11919 if (THUMB_REG_PUSHED_P (regno))
d5b7b3ae
RE
11920 live_regs_mask |= 1 << regno;
11921
11922 for (regno = 8; regno < 13; regno++)
aeaf4d25
AN
11923 if (THUMB_REG_PUSHED_P (regno))
11924 high_regs_pushed++;
d5b7b3ae
RE
11925
11926 /* The prolog may have pushed some high registers to use as
093354e0 11927 work registers. eg the testsuite file:
d5b7b3ae
RE
11928 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
11929 compiles to produce:
11930 push {r4, r5, r6, r7, lr}
11931 mov r7, r9
11932 mov r6, r8
11933 push {r6, r7}
11934 as part of the prolog. We have to undo that pushing here. */
11935
11936 if (high_regs_pushed)
11937 {
11938 int mask = live_regs_mask;
11939 int next_hi_reg;
11940 int size;
11941 int mode;
11942
11943#ifdef RTX_CODE
11944 /* If we can deduce the registers used from the function's return value.
11945 This is more reliable that examining regs_ever_live[] because that
11946 will be set if the register is ever used in the function, not just if
11947 the register is used to hold a return value. */
11948
11949 if (current_function_return_rtx != 0)
11950 mode = GET_MODE (current_function_return_rtx);
11951 else
11952#endif
11953 mode = DECL_MODE (DECL_RESULT (current_function_decl));
11954
11955 size = GET_MODE_SIZE (mode);
11956
11957 /* Unless we are returning a type of size > 12 register r3 is
11958 available. */
11959 if (size < 13)
11960 mask |= 1 << 3;
11961
11962 if (mask == 0)
11963 /* Oh dear! We have no low registers into which we can pop
11964 high registers! */
400500c4
RK
11965 internal_error
11966 ("no low registers available for popping high registers");
d5b7b3ae
RE
11967
11968 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
aeaf4d25 11969 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae
RE
11970 break;
11971
11972 while (high_regs_pushed)
11973 {
11974 /* Find lo register(s) into which the high register(s) can
11975 be popped. */
11976 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11977 {
11978 if (mask & (1 << regno))
11979 high_regs_pushed--;
11980 if (high_regs_pushed == 0)
11981 break;
11982 }
11983
11984 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
11985
d6b4baa4 11986 /* Pop the values into the low register(s). */
980e61bb 11987 thumb_pushpop (asm_out_file, mask, 0, NULL, mask);
d5b7b3ae
RE
11988
11989 /* Move the value(s) into the high registers. */
11990 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11991 {
11992 if (mask & (1 << regno))
11993 {
11994 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
11995 regno);
11996
11997 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
aeaf4d25 11998 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae
RE
11999 break;
12000 }
12001 }
12002 }
12003 }
12004
5895f793 12005 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
12006 || thumb_far_jump_used_p (1));
12007
12008 if (TARGET_BACKTRACE
12009 && ((live_regs_mask & 0xFF) == 0)
12010 && regs_ever_live [LAST_ARG_REGNUM] != 0)
12011 {
12012 /* The stack backtrace structure creation code had to
12013 push R7 in order to get a work register, so we pop
d6b4baa4 12014 it now. */
d5b7b3ae
RE
12015 live_regs_mask |= (1 << LAST_LO_REGNUM);
12016 }
12017
12018 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
12019 {
12020 if (had_to_push_lr
5895f793
RE
12021 && !is_called_in_ARM_mode (current_function_decl)
12022 && !eh_ofs)
d5b7b3ae
RE
12023 live_regs_mask |= 1 << PC_REGNUM;
12024
12025 /* Either no argument registers were pushed or a backtrace
12026 structure was created which includes an adjusted stack
12027 pointer, so just pop everything. */
12028 if (live_regs_mask)
980e61bb
DJ
12029 thumb_pushpop (asm_out_file, live_regs_mask, FALSE, NULL,
12030 live_regs_mask);
d5b7b3ae
RE
12031
12032 if (eh_ofs)
12033 thumb_exit (asm_out_file, 2, eh_ofs);
12034 /* We have either just popped the return address into the
12035 PC or it is was kept in LR for the entire function or
12036 it is still on the stack because we do not want to
12037 return by doing a pop {pc}. */
12038 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
12039 thumb_exit (asm_out_file,
12040 (had_to_push_lr
12041 && is_called_in_ARM_mode (current_function_decl)) ?
12042 -1 : LR_REGNUM, NULL_RTX);
12043 }
12044 else
12045 {
12046 /* Pop everything but the return address. */
5895f793 12047 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
12048
12049 if (live_regs_mask)
980e61bb
DJ
12050 thumb_pushpop (asm_out_file, live_regs_mask, FALSE, NULL,
12051 live_regs_mask);
d5b7b3ae
RE
12052
12053 if (had_to_push_lr)
12054 /* Get the return address into a temporary register. */
980e61bb
DJ
12055 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0, NULL,
12056 1 << LAST_ARG_REGNUM);
d5b7b3ae
RE
12057
12058 /* Remove the argument registers that were pushed onto the stack. */
12059 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
12060 SP_REGNUM, SP_REGNUM,
12061 current_function_pretend_args_size);
12062
12063 if (eh_ofs)
12064 thumb_exit (asm_out_file, 2, eh_ofs);
12065 else
12066 thumb_exit (asm_out_file,
12067 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
12068 }
12069
12070 return "";
12071}
12072
12073/* Functions to save and restore machine-specific function data. */
e2500fed 12074static struct machine_function *
e32bac5b 12075arm_init_machine_status (void)
d5b7b3ae 12076{
e2500fed
GK
12077 struct machine_function *machine;
12078 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
6d3d9133 12079
e2500fed
GK
12080#if ARM_FT_UNKNOWN != 0
12081 machine->func_type = ARM_FT_UNKNOWN;
6d3d9133 12082#endif
e2500fed 12083 return machine;
f7a80099
NC
12084}
12085
d5b7b3ae
RE
12086/* Return an RTX indicating where the return address to the
12087 calling function can be found. */
12088rtx
e32bac5b 12089arm_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
d5b7b3ae 12090{
d5b7b3ae
RE
12091 if (count != 0)
12092 return NULL_RTX;
12093
9e2f7ec7
DD
12094 if (TARGET_APCS_32)
12095 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
12096 else
d5b7b3ae 12097 {
9e2f7ec7 12098 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
d5b7b3ae 12099 GEN_INT (RETURN_ADDR_MASK26));
9e2f7ec7 12100 return get_func_hard_reg_initial_val (cfun, lr);
d5b7b3ae 12101 }
d5b7b3ae
RE
12102}
12103
12104/* Do anything needed before RTL is emitted for each function. */
12105void
e32bac5b 12106arm_init_expanders (void)
d5b7b3ae
RE
12107{
12108 /* Arrange to initialize and mark the machine per-function status. */
12109 init_machine_status = arm_init_machine_status;
d5b7b3ae
RE
12110}
12111
0977774b 12112HOST_WIDE_INT
e32bac5b 12113thumb_get_frame_size (void)
0977774b
JT
12114{
12115 int regno;
12116
0c2ca901 12117 int base_size = ROUND_UP_WORD (get_frame_size ());
0977774b
JT
12118 int count_regs = 0;
12119 int entry_size = 0;
c231c91e 12120 int leaf;
0977774b
JT
12121
12122 if (! TARGET_THUMB)
12123 abort ();
12124
12125 if (! TARGET_ATPCS)
12126 return base_size;
12127
12128 /* We need to know if we are a leaf function. Unfortunately, it
12129 is possible to be called after start_sequence has been called,
12130 which causes get_insns to return the insns for the sequence,
12131 not the function, which will cause leaf_function_p to return
12132 the incorrect result.
12133
12134 To work around this, we cache the computed frame size. This
12135 works because we will only be calling RTL expanders that need
12136 to know about leaf functions once reload has completed, and the
12137 frame size cannot be changed after that time, so we can safely
12138 use the cached value. */
12139
12140 if (reload_completed)
12141 return cfun->machine->frame_size;
12142
c231c91e
RE
12143 leaf = leaf_function_p ();
12144
12145 /* A leaf function does not need any stack alignment if it has nothing
12146 on the stack. */
12147 if (leaf && base_size == 0)
12148 {
12149 cfun->machine->frame_size = 0;
12150 return 0;
12151 }
12152
0977774b
JT
12153 /* We know that SP will be word aligned on entry, and we must
12154 preserve that condition at any subroutine call. But those are
12155 the only constraints. */
12156
12157 /* Space for variadic functions. */
12158 if (current_function_pretend_args_size)
12159 entry_size += current_function_pretend_args_size;
12160
12161 /* Space for pushed lo registers. */
12162 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
12163 if (THUMB_REG_PUSHED_P (regno))
12164 count_regs++;
12165
12166 /* Space for backtrace structure. */
12167 if (TARGET_BACKTRACE)
12168 {
12169 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
12170 entry_size += 20;
12171 else
12172 entry_size += 16;
12173 }
12174
c231c91e 12175 if (count_regs || !leaf || thumb_far_jump_used_p (1))
0977774b
JT
12176 count_regs++; /* LR */
12177
12178 entry_size += count_regs * 4;
12179 count_regs = 0;
12180
12181 /* Space for pushed hi regs. */
12182 for (regno = 8; regno < 13; regno++)
12183 if (THUMB_REG_PUSHED_P (regno))
12184 count_regs++;
12185
12186 entry_size += count_regs * 4;
12187
12188 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
12189 base_size += 4;
12190 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
12191 abort ();
12192
12193 cfun->machine->frame_size = base_size;
12194
12195 return base_size;
12196}
12197
d5b7b3ae
RE
12198/* Generate the rest of a function's prologue. */
12199void
e32bac5b 12200thumb_expand_prologue (void)
d5b7b3ae 12201{
980e61bb
DJ
12202 rtx insn, dwarf;
12203
0977774b 12204 HOST_WIDE_INT amount = (thumb_get_frame_size ()
d5b7b3ae 12205 + current_function_outgoing_args_size);
6d3d9133
NC
12206 unsigned long func_type;
12207
12208 func_type = arm_current_func_type ();
d5b7b3ae
RE
12209
12210 /* Naked functions don't have prologues. */
6d3d9133 12211 if (IS_NAKED (func_type))
d5b7b3ae
RE
12212 return;
12213
6d3d9133
NC
12214 if (IS_INTERRUPT (func_type))
12215 {
c725bd79 12216 error ("interrupt Service Routines cannot be coded in Thumb mode");
6d3d9133
NC
12217 return;
12218 }
12219
d5b7b3ae 12220 if (frame_pointer_needed)
980e61bb
DJ
12221 {
12222 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
12223 RTX_FRAME_RELATED_P (insn) = 1;
12224 }
d5b7b3ae
RE
12225
12226 if (amount)
12227 {
0c2ca901 12228 amount = ROUND_UP_WORD (amount);
d5b7b3ae
RE
12229
12230 if (amount < 512)
980e61bb
DJ
12231 {
12232 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12233 GEN_INT (- amount)));
12234 RTX_FRAME_RELATED_P (insn) = 1;
12235 }
d5b7b3ae
RE
12236 else
12237 {
12238 int regno;
12239 rtx reg;
12240
12241 /* The stack decrement is too big for an immediate value in a single
12242 insn. In theory we could issue multiple subtracts, but after
12243 three of them it becomes more space efficient to place the full
12244 value in the constant pool and load into a register. (Also the
12245 ARM debugger really likes to see only one stack decrement per
12246 function). So instead we look for a scratch register into which
12247 we can load the decrement, and then we subtract this from the
12248 stack pointer. Unfortunately on the thumb the only available
12249 scratch registers are the argument registers, and we cannot use
12250 these as they may hold arguments to the function. Instead we
12251 attempt to locate a call preserved register which is used by this
12252 function. If we can find one, then we know that it will have
12253 been pushed at the start of the prologue and so we can corrupt
12254 it now. */
12255 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 12256 if (THUMB_REG_PUSHED_P (regno)
5895f793
RE
12257 && !(frame_pointer_needed
12258 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
12259 break;
12260
aeaf4d25 12261 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
d5b7b3ae
RE
12262 {
12263 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
12264
6bc82793 12265 /* Choose an arbitrary, non-argument low register. */
d5b7b3ae
RE
12266 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
12267
12268 /* Save it by copying it into a high, scratch register. */
c14a3a45
NC
12269 emit_insn (gen_movsi (spare, reg));
12270 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 12271 emit_insn (gen_prologue_use (spare));
d5b7b3ae
RE
12272
12273 /* Decrement the stack. */
1d6e90ac 12274 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
980e61bb
DJ
12275 insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
12276 stack_pointer_rtx, reg));
12277 RTX_FRAME_RELATED_P (insn) = 1;
12278 dwarf = gen_rtx_SET (SImode, stack_pointer_rtx,
12279 plus_constant (stack_pointer_rtx,
12280 GEN_INT (- amount)));
12281 RTX_FRAME_RELATED_P (dwarf) = 1;
12282 REG_NOTES (insn)
12283 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
12284 REG_NOTES (insn));
d5b7b3ae
RE
12285
12286 /* Restore the low register's original value. */
12287 emit_insn (gen_movsi (reg, spare));
12288
12289 /* Emit a USE of the restored scratch register, so that flow
12290 analysis will not consider the restore redundant. The
12291 register won't be used again in this function and isn't
12292 restored by the epilogue. */
6bacc7b0 12293 emit_insn (gen_prologue_use (reg));
d5b7b3ae
RE
12294 }
12295 else
12296 {
12297 reg = gen_rtx (REG, SImode, regno);
12298
1d6e90ac 12299 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
980e61bb
DJ
12300
12301 insn = emit_insn (gen_addsi3 (stack_pointer_rtx,
12302 stack_pointer_rtx, reg));
12303 RTX_FRAME_RELATED_P (insn) = 1;
12304 dwarf = gen_rtx_SET (SImode, stack_pointer_rtx,
12305 plus_constant (stack_pointer_rtx,
12306 GEN_INT (- amount)));
12307 RTX_FRAME_RELATED_P (dwarf) = 1;
12308 REG_NOTES (insn)
12309 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
12310 REG_NOTES (insn));
d5b7b3ae
RE
12311 }
12312 }
12313 }
12314
70f4f91c 12315 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae
RE
12316 emit_insn (gen_blockage ());
12317}
12318
12319void
e32bac5b 12320thumb_expand_epilogue (void)
d5b7b3ae 12321{
0977774b 12322 HOST_WIDE_INT amount = (thumb_get_frame_size ()
d5b7b3ae 12323 + current_function_outgoing_args_size);
defc0463
RE
12324 int regno;
12325
6d3d9133
NC
12326 /* Naked functions don't have prologues. */
12327 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
12328 return;
12329
12330 if (frame_pointer_needed)
12331 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
12332 else if (amount)
12333 {
0c2ca901 12334 amount = ROUND_UP_WORD (amount);
d5b7b3ae
RE
12335
12336 if (amount < 512)
12337 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12338 GEN_INT (amount)));
12339 else
12340 {
12341 /* r3 is always free in the epilogue. */
12342 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
12343
12344 emit_insn (gen_movsi (reg, GEN_INT (amount)));
12345 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
12346 }
12347 }
12348
12349 /* Emit a USE (stack_pointer_rtx), so that
12350 the stack adjustment will not be deleted. */
6bacc7b0 12351 emit_insn (gen_prologue_use (stack_pointer_rtx));
d5b7b3ae 12352
70f4f91c 12353 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae 12354 emit_insn (gen_blockage ());
defc0463
RE
12355
12356 /* Emit a clobber for each insn that will be restored in the epilogue,
12357 so that flow2 will get register lifetimes correct. */
12358 for (regno = 0; regno < 13; regno++)
12359 if (regs_ever_live[regno] && !call_used_regs[regno])
12360 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, regno)));
12361
12362 if (! regs_ever_live[LR_REGNUM])
12363 emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
d5b7b3ae
RE
12364}
12365
08c148a8 12366static void
e32bac5b 12367thumb_output_function_prologue (FILE *f, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
d5b7b3ae
RE
12368{
12369 int live_regs_mask = 0;
12370 int high_regs_pushed = 0;
980e61bb 12371 int cfa_offset = 0;
d5b7b3ae
RE
12372 int regno;
12373
6d3d9133 12374 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
12375 return;
12376
12377 if (is_called_in_ARM_mode (current_function_decl))
12378 {
12379 const char * name;
12380
12381 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
12382 abort ();
12383 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
12384 abort ();
12385 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12386
12387 /* Generate code sequence to switch us into Thumb mode. */
12388 /* The .code 32 directive has already been emitted by
6d77b53e 12389 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
12390 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
12391 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
12392
12393 /* Generate a label, so that the debugger will notice the
12394 change in instruction sets. This label is also used by
12395 the assembler to bypass the ARM code when this function
12396 is called from a Thumb encoded function elsewhere in the
12397 same file. Hence the definition of STUB_NAME here must
d6b4baa4 12398 agree with the definition in gas/config/tc-arm.c. */
d5b7b3ae
RE
12399
12400#define STUB_NAME ".real_start_of"
12401
761c70aa 12402 fprintf (f, "\t.code\t16\n");
d5b7b3ae
RE
12403#ifdef ARM_PE
12404 if (arm_dllexport_name_p (name))
e5951263 12405 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
12406#endif
12407 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
761c70aa 12408 fprintf (f, "\t.thumb_func\n");
d5b7b3ae
RE
12409 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
12410 }
12411
d5b7b3ae
RE
12412 if (current_function_pretend_args_size)
12413 {
3cb66fd7 12414 if (cfun->machine->uses_anonymous_args)
d5b7b3ae
RE
12415 {
12416 int num_pushes;
12417
761c70aa 12418 fprintf (f, "\tpush\t{");
d5b7b3ae 12419
e9d7b180 12420 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
d5b7b3ae
RE
12421
12422 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
12423 regno <= LAST_ARG_REGNUM;
5895f793 12424 regno++)
d5b7b3ae
RE
12425 asm_fprintf (f, "%r%s", regno,
12426 regno == LAST_ARG_REGNUM ? "" : ", ");
12427
761c70aa 12428 fprintf (f, "}\n");
d5b7b3ae
RE
12429 }
12430 else
12431 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
12432 SP_REGNUM, SP_REGNUM,
12433 current_function_pretend_args_size);
980e61bb
DJ
12434
12435 /* We don't need to record the stores for unwinding (would it
12436 help the debugger any if we did?), but record the change in
12437 the stack pointer. */
12438 if (dwarf2out_do_frame ())
12439 {
12440 char *l = dwarf2out_cfi_label ();
12441 cfa_offset = cfa_offset + current_function_pretend_args_size;
12442 dwarf2out_def_cfa (l, SP_REGNUM, cfa_offset);
12443 }
d5b7b3ae
RE
12444 }
12445
5895f793 12446 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 12447 if (THUMB_REG_PUSHED_P (regno))
d5b7b3ae
RE
12448 live_regs_mask |= 1 << regno;
12449
5895f793 12450 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
12451 live_regs_mask |= 1 << LR_REGNUM;
12452
12453 if (TARGET_BACKTRACE)
12454 {
12455 int offset;
12456 int work_register = 0;
12457 int wr;
12458
12459 /* We have been asked to create a stack backtrace structure.
12460 The code looks like this:
12461
12462 0 .align 2
12463 0 func:
12464 0 sub SP, #16 Reserve space for 4 registers.
12465 2 push {R7} Get a work register.
12466 4 add R7, SP, #20 Get the stack pointer before the push.
12467 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
12468 8 mov R7, PC Get hold of the start of this code plus 12.
12469 10 str R7, [SP, #16] Store it.
12470 12 mov R7, FP Get hold of the current frame pointer.
12471 14 str R7, [SP, #4] Store it.
12472 16 mov R7, LR Get hold of the current return address.
12473 18 str R7, [SP, #12] Store it.
12474 20 add R7, SP, #16 Point at the start of the backtrace structure.
12475 22 mov FP, R7 Put this value into the frame pointer. */
12476
12477 if ((live_regs_mask & 0xFF) == 0)
12478 {
12479 /* See if the a4 register is free. */
12480
12481 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
12482 work_register = LAST_ARG_REGNUM;
d6b4baa4 12483 else /* We must push a register of our own. */
d5b7b3ae
RE
12484 live_regs_mask |= (1 << LAST_LO_REGNUM);
12485 }
12486
12487 if (work_register == 0)
12488 {
12489 /* Select a register from the list that will be pushed to
12490 use as our work register. */
12491 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
12492 if ((1 << work_register) & live_regs_mask)
12493 break;
12494 }
12495
12496 asm_fprintf
12497 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
12498 SP_REGNUM, SP_REGNUM);
980e61bb
DJ
12499
12500 if (dwarf2out_do_frame ())
12501 {
12502 char *l = dwarf2out_cfi_label ();
12503 cfa_offset = cfa_offset + 16;
12504 dwarf2out_def_cfa (l, SP_REGNUM, cfa_offset);
12505 }
12506
d5b7b3ae 12507 if (live_regs_mask)
980e61bb 12508 thumb_pushpop (f, live_regs_mask, 1, &cfa_offset, live_regs_mask);
d5b7b3ae
RE
12509
12510 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
12511 if (wr & live_regs_mask)
12512 offset += 4;
12513
12514 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
12515 offset + 16 + current_function_pretend_args_size);
12516
12517 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12518 offset + 4);
12519
12520 /* Make sure that the instruction fetching the PC is in the right place
12521 to calculate "start of backtrace creation code + 12". */
12522 if (live_regs_mask)
12523 {
12524 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
12525 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12526 offset + 12);
12527 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
12528 ARM_HARD_FRAME_POINTER_REGNUM);
12529 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12530 offset);
12531 }
12532 else
12533 {
12534 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
12535 ARM_HARD_FRAME_POINTER_REGNUM);
12536 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12537 offset);
12538 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
12539 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12540 offset + 12);
12541 }
12542
12543 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
12544 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12545 offset + 8);
12546 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
12547 offset + 12);
12548 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
12549 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
12550 }
12551 else if (live_regs_mask)
980e61bb 12552 thumb_pushpop (f, live_regs_mask, 1, &cfa_offset, live_regs_mask);
d5b7b3ae
RE
12553
12554 for (regno = 8; regno < 13; regno++)
e26053d1
NC
12555 if (THUMB_REG_PUSHED_P (regno))
12556 high_regs_pushed++;
d5b7b3ae
RE
12557
12558 if (high_regs_pushed)
12559 {
12560 int pushable_regs = 0;
12561 int mask = live_regs_mask & 0xff;
12562 int next_hi_reg;
12563
12564 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
e26053d1
NC
12565 if (THUMB_REG_PUSHED_P (next_hi_reg))
12566 break;
d5b7b3ae
RE
12567
12568 pushable_regs = mask;
12569
12570 if (pushable_regs == 0)
12571 {
12572 /* Desperation time -- this probably will never happen. */
aeaf4d25 12573 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
d5b7b3ae
RE
12574 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
12575 mask = 1 << LAST_ARG_REGNUM;
12576 }
12577
12578 while (high_regs_pushed > 0)
12579 {
980e61bb
DJ
12580 int real_regs_mask = 0;
12581
d5b7b3ae
RE
12582 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
12583 {
12584 if (mask & (1 << regno))
12585 {
12586 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
12587
5895f793 12588 high_regs_pushed--;
980e61bb 12589 real_regs_mask |= (1 << next_hi_reg);
d5b7b3ae
RE
12590
12591 if (high_regs_pushed)
aeaf4d25
AN
12592 {
12593 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
12594 next_hi_reg--)
12595 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae 12596 break;
aeaf4d25 12597 }
d5b7b3ae
RE
12598 else
12599 {
5895f793 12600 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
12601 break;
12602 }
12603 }
12604 }
980e61bb
DJ
12605
12606 thumb_pushpop (f, mask, 1, &cfa_offset, real_regs_mask);
d5b7b3ae
RE
12607 }
12608
12609 if (pushable_regs == 0
aeaf4d25 12610 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
d5b7b3ae
RE
12611 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
12612 }
12613}
12614
12615/* Handle the case of a double word load into a low register from
12616 a computed memory address. The computed address may involve a
12617 register which is overwritten by the load. */
cd2b33d0 12618const char *
e32bac5b 12619thumb_load_double_from_address (rtx *operands)
d5b7b3ae
RE
12620{
12621 rtx addr;
12622 rtx base;
12623 rtx offset;
12624 rtx arg1;
12625 rtx arg2;
12626
12627 if (GET_CODE (operands[0]) != REG)
400500c4 12628 abort ();
d5b7b3ae
RE
12629
12630 if (GET_CODE (operands[1]) != MEM)
400500c4 12631 abort ();
d5b7b3ae
RE
12632
12633 /* Get the memory address. */
12634 addr = XEXP (operands[1], 0);
12635
12636 /* Work out how the memory address is computed. */
12637 switch (GET_CODE (addr))
12638 {
12639 case REG:
12640 operands[2] = gen_rtx (MEM, SImode,
12641 plus_constant (XEXP (operands[1], 0), 4));
12642
12643 if (REGNO (operands[0]) == REGNO (addr))
12644 {
12645 output_asm_insn ("ldr\t%H0, %2", operands);
12646 output_asm_insn ("ldr\t%0, %1", operands);
12647 }
12648 else
12649 {
12650 output_asm_insn ("ldr\t%0, %1", operands);
12651 output_asm_insn ("ldr\t%H0, %2", operands);
12652 }
12653 break;
12654
12655 case CONST:
12656 /* Compute <address> + 4 for the high order load. */
12657 operands[2] = gen_rtx (MEM, SImode,
12658 plus_constant (XEXP (operands[1], 0), 4));
12659
12660 output_asm_insn ("ldr\t%0, %1", operands);
12661 output_asm_insn ("ldr\t%H0, %2", operands);
12662 break;
12663
12664 case PLUS:
12665 arg1 = XEXP (addr, 0);
12666 arg2 = XEXP (addr, 1);
12667
12668 if (CONSTANT_P (arg1))
12669 base = arg2, offset = arg1;
12670 else
12671 base = arg1, offset = arg2;
12672
12673 if (GET_CODE (base) != REG)
400500c4 12674 abort ();
d5b7b3ae
RE
12675
12676 /* Catch the case of <address> = <reg> + <reg> */
12677 if (GET_CODE (offset) == REG)
12678 {
12679 int reg_offset = REGNO (offset);
12680 int reg_base = REGNO (base);
12681 int reg_dest = REGNO (operands[0]);
12682
12683 /* Add the base and offset registers together into the
12684 higher destination register. */
12685 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
12686 reg_dest + 1, reg_base, reg_offset);
12687
12688 /* Load the lower destination register from the address in
12689 the higher destination register. */
12690 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
12691 reg_dest, reg_dest + 1);
12692
12693 /* Load the higher destination register from its own address
12694 plus 4. */
12695 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
12696 reg_dest + 1, reg_dest + 1);
12697 }
12698 else
12699 {
12700 /* Compute <address> + 4 for the high order load. */
12701 operands[2] = gen_rtx (MEM, SImode,
12702 plus_constant (XEXP (operands[1], 0), 4));
12703
12704 /* If the computed address is held in the low order register
12705 then load the high order register first, otherwise always
12706 load the low order register first. */
12707 if (REGNO (operands[0]) == REGNO (base))
12708 {
12709 output_asm_insn ("ldr\t%H0, %2", operands);
12710 output_asm_insn ("ldr\t%0, %1", operands);
12711 }
12712 else
12713 {
12714 output_asm_insn ("ldr\t%0, %1", operands);
12715 output_asm_insn ("ldr\t%H0, %2", operands);
12716 }
12717 }
12718 break;
12719
12720 case LABEL_REF:
12721 /* With no registers to worry about we can just load the value
12722 directly. */
12723 operands[2] = gen_rtx (MEM, SImode,
12724 plus_constant (XEXP (operands[1], 0), 4));
12725
12726 output_asm_insn ("ldr\t%H0, %2", operands);
12727 output_asm_insn ("ldr\t%0, %1", operands);
12728 break;
12729
12730 default:
400500c4 12731 abort ();
d5b7b3ae
RE
12732 break;
12733 }
12734
12735 return "";
12736}
12737
cd2b33d0 12738const char *
e32bac5b 12739thumb_output_move_mem_multiple (int n, rtx *operands)
d5b7b3ae
RE
12740{
12741 rtx tmp;
12742
12743 switch (n)
12744 {
12745 case 2:
ca356f3a 12746 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 12747 {
ca356f3a
RE
12748 tmp = operands[4];
12749 operands[4] = operands[5];
12750 operands[5] = tmp;
d5b7b3ae 12751 }
ca356f3a
RE
12752 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
12753 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
12754 break;
12755
12756 case 3:
ca356f3a 12757 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 12758 {
ca356f3a
RE
12759 tmp = operands[4];
12760 operands[4] = operands[5];
12761 operands[5] = tmp;
d5b7b3ae 12762 }
ca356f3a 12763 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 12764 {
ca356f3a
RE
12765 tmp = operands[5];
12766 operands[5] = operands[6];
12767 operands[6] = tmp;
d5b7b3ae 12768 }
ca356f3a 12769 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 12770 {
ca356f3a
RE
12771 tmp = operands[4];
12772 operands[4] = operands[5];
12773 operands[5] = tmp;
d5b7b3ae
RE
12774 }
12775
ca356f3a
RE
12776 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
12777 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
12778 break;
12779
12780 default:
12781 abort ();
12782 }
12783
12784 return "";
12785}
12786
1d6e90ac 12787/* Routines for generating rtl. */
d5b7b3ae 12788void
e32bac5b 12789thumb_expand_movstrqi (rtx *operands)
d5b7b3ae
RE
12790{
12791 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
12792 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
12793 HOST_WIDE_INT len = INTVAL (operands[2]);
12794 HOST_WIDE_INT offset = 0;
12795
12796 while (len >= 12)
12797 {
ca356f3a 12798 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
12799 len -= 12;
12800 }
12801
12802 if (len >= 8)
12803 {
ca356f3a 12804 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
12805 len -= 8;
12806 }
12807
12808 if (len >= 4)
12809 {
12810 rtx reg = gen_reg_rtx (SImode);
12811 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
12812 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
12813 len -= 4;
12814 offset += 4;
12815 }
12816
12817 if (len >= 2)
12818 {
12819 rtx reg = gen_reg_rtx (HImode);
12820 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
12821 plus_constant (in, offset))));
12822 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
12823 reg));
12824 len -= 2;
12825 offset += 2;
12826 }
12827
12828 if (len)
12829 {
12830 rtx reg = gen_reg_rtx (QImode);
12831 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
12832 plus_constant (in, offset))));
12833 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
12834 reg));
12835 }
12836}
12837
12838int
e32bac5b 12839thumb_cmp_operand (rtx op, enum machine_mode mode)
d5b7b3ae
RE
12840{
12841 return ((GET_CODE (op) == CONST_INT
c769a35d
RE
12842 && INTVAL (op) < 256
12843 && INTVAL (op) >= 0)
defc0463 12844 || s_register_operand (op, mode));
d5b7b3ae
RE
12845}
12846
c769a35d
RE
12847int
12848thumb_cmpneg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
12849{
12850 return (GET_CODE (op) == CONST_INT
12851 && INTVAL (op) < 0
12852 && INTVAL (op) > -256);
12853}
12854
defc0463
RE
12855/* Return TRUE if a result can be stored in OP without clobbering the
12856 condition code register. Prior to reload we only accept a
12857 register. After reload we have to be able to handle memory as
12858 well, since a pseudo may not get a hard reg and reload cannot
12859 handle output-reloads on jump insns.
d5b7b3ae 12860
defc0463
RE
12861 We could possibly handle mem before reload as well, but that might
12862 complicate things with the need to handle increment
12863 side-effects. */
d5b7b3ae 12864
defc0463
RE
12865int
12866thumb_cbrch_target_operand (rtx op, enum machine_mode mode)
12867{
12868 return (s_register_operand (op, mode)
12869 || ((reload_in_progress || reload_completed)
12870 && memory_operand (op, mode)));
d5b7b3ae
RE
12871}
12872
12873/* Handle storing a half-word to memory during reload. */
12874void
e32bac5b 12875thumb_reload_out_hi (rtx *operands)
d5b7b3ae
RE
12876{
12877 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
12878}
12879
e32bac5b 12880/* Handle reading a half-word from memory during reload. */
d5b7b3ae 12881void
e32bac5b 12882thumb_reload_in_hi (rtx *operands ATTRIBUTE_UNUSED)
d5b7b3ae
RE
12883{
12884 abort ();
12885}
12886
c27ba912
DM
12887/* Return the length of a function name prefix
12888 that starts with the character 'c'. */
12889static int
e32bac5b 12890arm_get_strip_length (int c)
c27ba912
DM
12891{
12892 switch (c)
12893 {
12894 ARM_NAME_ENCODING_LENGTHS
12895 default: return 0;
12896 }
12897}
12898
12899/* Return a pointer to a function's name with any
12900 and all prefix encodings stripped from it. */
12901const char *
e32bac5b 12902arm_strip_name_encoding (const char *name)
c27ba912
DM
12903{
12904 int skip;
12905
12906 while ((skip = arm_get_strip_length (* name)))
12907 name += skip;
12908
12909 return name;
12910}
12911
e1944073
KW
12912/* If there is a '*' anywhere in the name's prefix, then
12913 emit the stripped name verbatim, otherwise prepend an
12914 underscore if leading underscores are being used. */
e1944073 12915void
e32bac5b 12916arm_asm_output_labelref (FILE *stream, const char *name)
e1944073
KW
12917{
12918 int skip;
12919 int verbatim = 0;
12920
12921 while ((skip = arm_get_strip_length (* name)))
12922 {
12923 verbatim |= (*name == '*');
12924 name += skip;
12925 }
12926
12927 if (verbatim)
12928 fputs (name, stream);
12929 else
12930 asm_fprintf (stream, "%U%s", name);
12931}
12932
e2500fed
GK
12933rtx aof_pic_label;
12934
2b835d68 12935#ifdef AOF_ASSEMBLER
6354dc9b 12936/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 12937
32de079a
RE
12938struct pic_chain
12939{
62b10bbc 12940 struct pic_chain * next;
5f37d07c 12941 const char * symname;
32de079a
RE
12942};
12943
62b10bbc 12944static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
12945
12946rtx
e32bac5b 12947aof_pic_entry (rtx x)
32de079a 12948{
62b10bbc 12949 struct pic_chain ** chainp;
32de079a
RE
12950 int offset;
12951
12952 if (aof_pic_label == NULL_RTX)
12953 {
43cffd11 12954 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
12955 }
12956
12957 for (offset = 0, chainp = &aof_pic_chain; *chainp;
12958 offset += 4, chainp = &(*chainp)->next)
12959 if ((*chainp)->symname == XSTR (x, 0))
12960 return plus_constant (aof_pic_label, offset);
12961
12962 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
12963 (*chainp)->next = NULL;
12964 (*chainp)->symname = XSTR (x, 0);
12965 return plus_constant (aof_pic_label, offset);
12966}
12967
12968void
e32bac5b 12969aof_dump_pic_table (FILE *f)
32de079a 12970{
62b10bbc 12971 struct pic_chain * chain;
32de079a
RE
12972
12973 if (aof_pic_chain == NULL)
12974 return;
12975
dd18ae56
NC
12976 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
12977 PIC_OFFSET_TABLE_REGNUM,
12978 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
12979 fputs ("|x$adcons|\n", f);
12980
12981 for (chain = aof_pic_chain; chain; chain = chain->next)
12982 {
12983 fputs ("\tDCD\t", f);
12984 assemble_name (f, chain->symname);
12985 fputs ("\n", f);
12986 }
12987}
12988
2b835d68
RE
12989int arm_text_section_count = 1;
12990
12991char *
e32bac5b 12992aof_text_section (void )
2b835d68
RE
12993{
12994 static char buf[100];
2b835d68
RE
12995 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
12996 arm_text_section_count++);
12997 if (flag_pic)
12998 strcat (buf, ", PIC, REENTRANT");
12999 return buf;
13000}
13001
13002static int arm_data_section_count = 1;
13003
13004char *
e32bac5b 13005aof_data_section (void)
2b835d68
RE
13006{
13007 static char buf[100];
13008 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
13009 return buf;
13010}
13011
13012/* The AOF assembler is religiously strict about declarations of
13013 imported and exported symbols, so that it is impossible to declare
956d6950 13014 a function as imported near the beginning of the file, and then to
2b835d68
RE
13015 export it later on. It is, however, possible to delay the decision
13016 until all the functions in the file have been compiled. To get
13017 around this, we maintain a list of the imports and exports, and
13018 delete from it any that are subsequently defined. At the end of
13019 compilation we spit the remainder of the list out before the END
13020 directive. */
13021
13022struct import
13023{
62b10bbc 13024 struct import * next;
5f37d07c 13025 const char * name;
2b835d68
RE
13026};
13027
62b10bbc 13028static struct import * imports_list = NULL;
2b835d68
RE
13029
13030void
e32bac5b 13031aof_add_import (const char *name)
2b835d68 13032{
62b10bbc 13033 struct import * new;
2b835d68
RE
13034
13035 for (new = imports_list; new; new = new->next)
13036 if (new->name == name)
13037 return;
13038
13039 new = (struct import *) xmalloc (sizeof (struct import));
13040 new->next = imports_list;
13041 imports_list = new;
13042 new->name = name;
13043}
13044
13045void
e32bac5b 13046aof_delete_import (const char *name)
2b835d68 13047{
62b10bbc 13048 struct import ** old;
2b835d68
RE
13049
13050 for (old = &imports_list; *old; old = & (*old)->next)
13051 {
13052 if ((*old)->name == name)
13053 {
13054 *old = (*old)->next;
13055 return;
13056 }
13057 }
13058}
13059
13060int arm_main_function = 0;
13061
a5fe455b 13062static void
e32bac5b 13063aof_dump_imports (FILE *f)
2b835d68
RE
13064{
13065 /* The AOF assembler needs this to cause the startup code to be extracted
13066 from the library. Brining in __main causes the whole thing to work
13067 automagically. */
13068 if (arm_main_function)
13069 {
13070 text_section ();
13071 fputs ("\tIMPORT __main\n", f);
13072 fputs ("\tDCD __main\n", f);
13073 }
13074
13075 /* Now dump the remaining imports. */
13076 while (imports_list)
13077 {
13078 fprintf (f, "\tIMPORT\t");
13079 assemble_name (f, imports_list->name);
13080 fputc ('\n', f);
13081 imports_list = imports_list->next;
13082 }
13083}
5eb99654
KG
13084
13085static void
e32bac5b 13086aof_globalize_label (FILE *stream, const char *name)
5eb99654
KG
13087{
13088 default_globalize_label (stream, name);
13089 if (! strcmp (name, "main"))
13090 arm_main_function = 1;
13091}
a5fe455b 13092
1bc7c5b6 13093static void
f1777882 13094aof_file_start (void)
1bc7c5b6
ZW
13095{
13096 fputs ("__r0\tRN\t0\n", asm_out_file);
13097 fputs ("__a1\tRN\t0\n", asm_out_file);
13098 fputs ("__a2\tRN\t1\n", asm_out_file);
13099 fputs ("__a3\tRN\t2\n", asm_out_file);
13100 fputs ("__a4\tRN\t3\n", asm_out_file);
13101 fputs ("__v1\tRN\t4\n", asm_out_file);
13102 fputs ("__v2\tRN\t5\n", asm_out_file);
13103 fputs ("__v3\tRN\t6\n", asm_out_file);
13104 fputs ("__v4\tRN\t7\n", asm_out_file);
13105 fputs ("__v5\tRN\t8\n", asm_out_file);
13106 fputs ("__v6\tRN\t9\n", asm_out_file);
13107 fputs ("__sl\tRN\t10\n", asm_out_file);
13108 fputs ("__fp\tRN\t11\n", asm_out_file);
13109 fputs ("__ip\tRN\t12\n", asm_out_file);
13110 fputs ("__sp\tRN\t13\n", asm_out_file);
13111 fputs ("__lr\tRN\t14\n", asm_out_file);
13112 fputs ("__pc\tRN\t15\n", asm_out_file);
13113 fputs ("__f0\tFN\t0\n", asm_out_file);
13114 fputs ("__f1\tFN\t1\n", asm_out_file);
13115 fputs ("__f2\tFN\t2\n", asm_out_file);
13116 fputs ("__f3\tFN\t3\n", asm_out_file);
13117 fputs ("__f4\tFN\t4\n", asm_out_file);
13118 fputs ("__f5\tFN\t5\n", asm_out_file);
13119 fputs ("__f6\tFN\t6\n", asm_out_file);
13120 fputs ("__f7\tFN\t7\n", asm_out_file);
13121 text_section ();
13122}
13123
a5fe455b 13124static void
e32bac5b 13125aof_file_end (void)
a5fe455b
ZW
13126{
13127 if (flag_pic)
13128 aof_dump_pic_table (asm_out_file);
13129 aof_dump_imports (asm_out_file);
13130 fputs ("\tEND\n", asm_out_file);
13131}
2b835d68 13132#endif /* AOF_ASSEMBLER */
7c262518 13133
ebe413e5 13134#ifdef OBJECT_FORMAT_ELF
7c262518
RH
13135/* Switch to an arbitrary section NAME with attributes as specified
13136 by FLAGS. ALIGN specifies any known alignment requirements for
13137 the section; 0 if the default should be used.
13138
13139 Differs from the default elf version only in the prefix character
13140 used before the section type. */
13141
13142static void
e32bac5b 13143arm_elf_asm_named_section (const char *name, unsigned int flags)
7c262518 13144{
6a0a6ac4
AM
13145 char flagchars[10], *f = flagchars;
13146
13147 if (! named_section_first_declaration (name))
13148 {
13149 fprintf (asm_out_file, "\t.section\t%s\n", name);
13150 return;
13151 }
7c262518
RH
13152
13153 if (!(flags & SECTION_DEBUG))
13154 *f++ = 'a';
13155 if (flags & SECTION_WRITE)
13156 *f++ = 'w';
13157 if (flags & SECTION_CODE)
13158 *f++ = 'x';
13159 if (flags & SECTION_SMALL)
13160 *f++ = 's';
201556f0
JJ
13161 if (flags & SECTION_MERGE)
13162 *f++ = 'M';
13163 if (flags & SECTION_STRINGS)
13164 *f++ = 'S';
6a0a6ac4
AM
13165 if (flags & SECTION_TLS)
13166 *f++ = 'T';
7c262518
RH
13167 *f = '\0';
13168
6a0a6ac4 13169 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
7c262518 13170
6a0a6ac4
AM
13171 if (!(flags & SECTION_NOTYPE))
13172 {
13173 const char *type;
13174
13175 if (flags & SECTION_BSS)
13176 type = "nobits";
13177 else
13178 type = "progbits";
13179
13180 fprintf (asm_out_file, ",%%%s", type);
13181
13182 if (flags & SECTION_ENTSIZE)
13183 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
13184 }
13185
13186 putc ('\n', asm_out_file);
7c262518 13187}
ebe413e5 13188#endif
fb49053f
RH
13189
13190#ifndef ARM_PE
13191/* Symbols in the text segment can be accessed without indirecting via the
13192 constant pool; it may take an extra binary operation, but this is still
13193 faster than indirecting via memory. Don't do this when not optimizing,
13194 since we won't be calculating al of the offsets necessary to do this
13195 simplification. */
13196
13197static void
e32bac5b 13198arm_encode_section_info (tree decl, rtx rtl, int first)
fb49053f
RH
13199{
13200 /* This doesn't work with AOF syntax, since the string table may be in
13201 a different AREA. */
13202#ifndef AOF_ASSEMBLER
13203 if (optimize > 0 && TREE_CONSTANT (decl)
13204 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
c6a2438a 13205 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
fb49053f
RH
13206#endif
13207
13208 /* If we are referencing a function that is weak then encode a long call
13209 flag in the function name, otherwise if the function is static or
13210 or known to be defined in this file then encode a short call flag. */
13211 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
13212 {
13213 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
13214 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
13215 else if (! TREE_PUBLIC (decl))
13216 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
13217 }
13218}
13219#endif /* !ARM_PE */
483ab821 13220
4977bab6 13221static void
e32bac5b 13222arm_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
4977bab6
ZW
13223{
13224 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
13225 && !strcmp (prefix, "L"))
13226 {
13227 arm_ccfsm_state = 0;
13228 arm_target_insn = NULL;
13229 }
13230 default_internal_label (stream, prefix, labelno);
13231}
13232
c590b625
RH
13233/* Output code to add DELTA to the first argument, and then jump
13234 to FUNCTION. Used for C++ multiple inheritance. */
c590b625 13235static void
e32bac5b
RE
13236arm_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
13237 HOST_WIDE_INT delta,
13238 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
13239 tree function)
483ab821
MM
13240{
13241 int mi_delta = delta;
13242 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
13243 int shift = 0;
61f71b34 13244 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function)
483ab821
MM
13245 ? 1 : 0);
13246 if (mi_delta < 0)
13247 mi_delta = - mi_delta;
13248 while (mi_delta != 0)
13249 {
13250 if ((mi_delta & (3 << shift)) == 0)
13251 shift += 2;
13252 else
13253 {
13254 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
13255 mi_op, this_regno, this_regno,
13256 mi_delta & (0xff << shift));
13257 mi_delta &= ~(0xff << shift);
13258 shift += 8;
13259 }
13260 }
13261 fputs ("\tb\t", file);
13262 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
13263 if (NEED_PLT_RELOC)
13264 fputs ("(PLT)", file);
13265 fputc ('\n', file);
13266}
5a9335ef
NC
13267
13268int
6f5f2481 13269arm_emit_vector_const (FILE *file, rtx x)
5a9335ef
NC
13270{
13271 int i;
13272 const char * pattern;
13273
13274 if (GET_CODE (x) != CONST_VECTOR)
13275 abort ();
13276
13277 switch (GET_MODE (x))
13278 {
13279 case V2SImode: pattern = "%08x"; break;
13280 case V4HImode: pattern = "%04x"; break;
13281 case V8QImode: pattern = "%02x"; break;
13282 default: abort ();
13283 }
13284
13285 fprintf (file, "0x");
13286 for (i = CONST_VECTOR_NUNITS (x); i--;)
13287 {
13288 rtx element;
13289
13290 element = CONST_VECTOR_ELT (x, i);
13291 fprintf (file, pattern, INTVAL (element));
13292 }
13293
13294 return 1;
13295}
13296
13297const char *
6f5f2481 13298arm_output_load_gr (rtx *operands)
5a9335ef
NC
13299{
13300 rtx reg;
13301 rtx offset;
13302 rtx wcgr;
13303 rtx sum;
13304
13305 if (GET_CODE (operands [1]) != MEM
13306 || GET_CODE (sum = XEXP (operands [1], 0)) != PLUS
13307 || GET_CODE (reg = XEXP (sum, 0)) != REG
13308 || GET_CODE (offset = XEXP (sum, 1)) != CONST_INT
13309 || ((INTVAL (offset) < 1024) && (INTVAL (offset) > -1024)))
13310 return "wldrw%?\t%0, %1";
13311
13312 /* Fix up an out-of-range load of a GR register. */
13313 output_asm_insn ("str%?\t%0, [sp, #-4]!\t@ Start of GR load expansion", & reg);
13314 wcgr = operands[0];
13315 operands[0] = reg;
13316 output_asm_insn ("ldr%?\t%0, %1", operands);
13317
13318 operands[0] = wcgr;
13319 operands[1] = reg;
13320 output_asm_insn ("tmcr%?\t%0, %1", operands);
13321 output_asm_insn ("ldr%?\t%0, [sp], #4\t@ End of GR load expansion", & reg);
13322
13323 return "";
13324}
This page took 3.559233 seconds and 5 git commands to generate.