]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
decl2.c (build_call_from_tree): Fix uninitialized variable.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
1d6e90ac 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
2398fb2a 3 Free Software Foundation, Inc.
cce8749e 4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 5 and Martin Simmons (@harleqn.co.uk).
b36ba79f 6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
7
8This file is part of GNU CC.
9
10GNU CC is free software; you can redistribute it and/or modify
11it under the terms of the GNU General Public License as published by
12the Free Software Foundation; either version 2, or (at your option)
13any later version.
14
15GNU CC is distributed in the hope that it will be useful,
16but WITHOUT ANY WARRANTY; without even the implied warranty of
17MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18GNU General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
22the Free Software Foundation, 59 Temple Place - Suite 330,
23Boston, MA 02111-1307, USA. */
ff9940b0 24
56636818 25#include "config.h"
43cffd11 26#include "system.h"
cce8749e 27#include "rtl.h"
d5b7b3ae 28#include "tree.h"
c7319d87 29#include "obstack.h"
cce8749e
CH
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "real.h"
33#include "insn-config.h"
34#include "conditions.h"
cce8749e
CH
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
e78d8e51 41#include "optabs.h"
ad076f4e 42#include "toplev.h"
aec3cfba 43#include "recog.h"
92a432f4 44#include "ggc.h"
d5b7b3ae 45#include "except.h"
8b97c5f8 46#include "c-pragma.h"
7b8b8ade 47#include "integrate.h"
c27ba912 48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
cce8749e 51
d5b7b3ae
RE
52/* Forward definitions of types. */
53typedef struct minipool_node Mnode;
54typedef struct minipool_fixup Mfix;
55
56/* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58#define Hint HOST_WIDE_INT
59#define Mmode enum machine_mode
60#define Ulong unsigned long
6d3d9133 61#define Ccstar const char *
d5b7b3ae 62
1d6e90ac
NC
63const struct attribute_spec arm_attribute_table[];
64
d5b7b3ae
RE
65/* Forward function declarations. */
66static void arm_add_gc_roots PARAMS ((void));
67static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
d5b7b3ae
RE
68static Ulong bit_count PARAMS ((signed int));
69static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
70static int eliminate_lr2ip PARAMS ((rtx *));
71static rtx emit_multi_reg_push PARAMS ((int));
72static rtx emit_sfm PARAMS ((int, int));
301d03af 73#ifndef AOF_ASSEMBLER
1d6e90ac 74static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
301d03af 75#endif
6d3d9133 76static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
77static arm_cc get_arm_condition_code PARAMS ((rtx));
78static void init_fpa_table PARAMS ((void));
79static Hint int_log2 PARAMS ((Hint));
80static rtx is_jump_table PARAMS ((rtx));
6d3d9133
NC
81static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
82static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
d5b7b3ae 83static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
6d3d9133 84static Ccstar shift_op PARAMS ((rtx, Hint *));
e2500fed 85static struct machine_function * arm_init_machine_status PARAMS ((void));
d5b7b3ae
RE
86static int number_of_first_bit_set PARAMS ((int));
87static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
88static void thumb_exit PARAMS ((FILE *, int, rtx));
89static void thumb_pushpop PARAMS ((FILE *, int, int));
6d3d9133 90static Ccstar thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
91static rtx is_jump_table PARAMS ((rtx));
92static Hint get_jump_table_size PARAMS ((rtx));
93static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
94static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
95static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
96static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
97static void assign_minipool_offsets PARAMS ((Mfix *));
98static void arm_print_value PARAMS ((FILE *, rtx));
99static void dump_minipool PARAMS ((rtx));
100static int arm_barrier_cost PARAMS ((rtx));
101static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
102static void push_minipool_barrier PARAMS ((rtx, Hint));
103static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
104static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 105static int current_file_function_operand PARAMS ((rtx));
1d6e90ac 106static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
6d3d9133
NC
107static Ulong arm_compute_save_reg_mask PARAMS ((void));
108static Ulong arm_isr_value PARAMS ((tree));
109static Ulong arm_compute_func_type PARAMS ((void));
1d6e90ac
NC
110static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
111static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
112static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
113static void arm_output_function_prologue PARAMS ((FILE *, Hint));
114static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
8d8e52be 115static int arm_comp_type_attributes PARAMS ((tree, tree));
1d6e90ac
NC
116static void arm_set_default_type_attributes PARAMS ((tree));
117static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
ebe413e5 118#ifdef OBJECT_FORMAT_ELF
1d6e90ac 119static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
ebe413e5 120#endif
fb49053f
RH
121#ifndef ARM_PE
122static void arm_encode_section_info PARAMS ((tree, int));
123#endif
c237e94a 124
d5b7b3ae
RE
125#undef Hint
126#undef Mmode
127#undef Ulong
6d3d9133 128#undef Ccstar
672a6f42
NB
129\f
130/* Initialize the GCC target structure. */
131#ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
1d6e90ac 132#undef TARGET_MERGE_DECL_ATTRIBUTES
672a6f42
NB
133#define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
134#endif
f3bb6135 135
1d6e90ac 136#undef TARGET_ATTRIBUTE_TABLE
91d231cb 137#define TARGET_ATTRIBUTE_TABLE arm_attribute_table
672a6f42 138
301d03af 139#ifdef AOF_ASSEMBLER
1d6e90ac 140#undef TARGET_ASM_BYTE_OP
301d03af 141#define TARGET_ASM_BYTE_OP "\tDCB\t"
1d6e90ac 142#undef TARGET_ASM_ALIGNED_HI_OP
301d03af 143#define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
1d6e90ac 144#undef TARGET_ASM_ALIGNED_SI_OP
301d03af
RS
145#define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
146#else
1d6e90ac 147#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 148#define TARGET_ASM_ALIGNED_SI_OP NULL
1d6e90ac 149#undef TARGET_ASM_INTEGER
301d03af
RS
150#define TARGET_ASM_INTEGER arm_assemble_integer
151#endif
152
1d6e90ac 153#undef TARGET_ASM_FUNCTION_PROLOGUE
08c148a8
NB
154#define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
155
1d6e90ac 156#undef TARGET_ASM_FUNCTION_EPILOGUE
08c148a8
NB
157#define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
158
1d6e90ac 159#undef TARGET_COMP_TYPE_ATTRIBUTES
8d8e52be
JM
160#define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
161
1d6e90ac 162#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
8d8e52be
JM
163#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
164
1d6e90ac 165#undef TARGET_INIT_BUILTINS
f6155fda
SS
166#define TARGET_INIT_BUILTINS arm_init_builtins
167
1d6e90ac 168#undef TARGET_EXPAND_BUILTIN
f6155fda
SS
169#define TARGET_EXPAND_BUILTIN arm_expand_builtin
170
1d6e90ac 171#undef TARGET_SCHED_ADJUST_COST
c237e94a
ZW
172#define TARGET_SCHED_ADJUST_COST arm_adjust_cost
173
fb49053f
RH
174#undef TARGET_ENCODE_SECTION_INFO
175#ifdef ARM_PE
176#define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
177#else
178#define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
179#endif
180
772c5265
RH
181#undef TARGET_STRIP_NAME_ENCODING
182#define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
183
f6897b10 184struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 185\f
c7319d87
RE
186/* Obstack for minipool constant handling. */
187static struct obstack minipool_obstack;
1d6e90ac 188static char * minipool_startobj;
c7319d87 189
1d6e90ac
NC
190/* The maximum number of insns skipped which
191 will be conditionalised if possible. */
c27ba912
DM
192static int max_insns_skipped = 5;
193
194extern FILE * asm_out_file;
195
6354dc9b 196/* True if we are currently building a constant table. */
13bd191d
PB
197int making_const_table;
198
60d0536b 199/* Define the information needed to generate branch insns. This is
6354dc9b 200 stored from the compare operation. */
ff9940b0 201rtx arm_compare_op0, arm_compare_op1;
ff9940b0 202
6354dc9b 203/* What type of floating point are we tuning for? */
bee06f3d
RE
204enum floating_point_type arm_fpu;
205
6354dc9b 206/* What type of floating point instructions are available? */
b111229a
RE
207enum floating_point_type arm_fpu_arch;
208
6354dc9b 209/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
210enum prog_mode_type arm_prgmode;
211
6354dc9b 212/* Set by the -mfp=... option. */
f9cc092a 213const char * target_fp_name = NULL;
2b835d68 214
b355a481 215/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 216const char * structure_size_string = NULL;
723ae7c1 217int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 218
aec3cfba 219/* Bit values used to identify processor capabilities. */
62b10bbc
NC
220#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
221#define FL_FAST_MULT (1 << 1) /* Fast multiply */
222#define FL_MODE26 (1 << 2) /* 26-bit mode support */
223#define FL_MODE32 (1 << 3) /* 32-bit mode support */
224#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
225#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
226#define FL_THUMB (1 << 6) /* Thumb aware */
227#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
228#define FL_STRONG (1 << 8) /* StrongARM */
b15bca31 229#define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
d19fb8e3 230#define FL_XSCALE (1 << 10) /* XScale */
aec3cfba 231
1d6e90ac
NC
232/* The bits in this mask specify which
233 instructions we are allowed to generate. */
aec3cfba 234static int insn_flags = 0;
d5b7b3ae 235
aec3cfba
NC
236/* The bits in this mask specify which instruction scheduling options should
237 be used. Note - there is an overlap with the FL_FAST_MULT. For some
238 hardware we want to be able to generate the multiply instructions, but to
239 tune as if they were not present in the architecture. */
240static int tune_flags = 0;
241
242/* The following are used in the arm.md file as equivalents to bits
243 in the above two flag variables. */
244
2b835d68
RE
245/* Nonzero if this is an "M" variant of the processor. */
246int arm_fast_multiply = 0;
247
6354dc9b 248/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
249int arm_arch4 = 0;
250
6354dc9b 251/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
252int arm_arch5 = 0;
253
b15bca31
RE
254/* Nonzero if this chip supports the ARM Architecture 5E extensions. */
255int arm_arch5e = 0;
256
aec3cfba 257/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
258int arm_ld_sched = 0;
259
260/* Nonzero if this chip is a StrongARM. */
261int arm_is_strong = 0;
262
d19fb8e3
NC
263/* Nonzero if this chip is an XScale. */
264int arm_is_xscale = 0;
265
3569057d 266/* Nonzero if this chip is an ARM6 or an ARM7. */
f5a1b0d2 267int arm_is_6_or_7 = 0;
b111229a 268
0616531f
RE
269/* Nonzero if generating Thumb instructions. */
270int thumb_code = 0;
271
cce8749e
CH
272/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
273 must report the mode of the memory reference from PRINT_OPERAND to
274 PRINT_OPERAND_ADDRESS. */
f3bb6135 275enum machine_mode output_memory_reference_mode;
cce8749e 276
32de079a 277/* The register number to be used for the PIC offset register. */
ed0e6530 278const char * arm_pic_register_string = NULL;
5b43fed1 279int arm_pic_register = INVALID_REGNUM;
32de079a 280
ff9940b0 281/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 282 is not needed. */
d5b7b3ae 283int return_used_this_function;
ff9940b0 284
aec3cfba
NC
285/* Set to 1 after arm_reorg has started. Reset to start at the start of
286 the next function. */
4b632bf1
RE
287static int after_arm_reorg = 0;
288
aec3cfba 289/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
290static int arm_constant_limit = 3;
291
cce8749e
CH
292/* For an explanation of these variables, see final_prescan_insn below. */
293int arm_ccfsm_state;
84ed5e79 294enum arm_cond_code arm_current_cc;
cce8749e
CH
295rtx arm_target_insn;
296int arm_target_label;
9997d19d
RE
297
298/* The condition codes of the ARM, and the inverse function. */
1d6e90ac 299static const char * const arm_condition_codes[] =
9997d19d
RE
300{
301 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
302 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
303};
304
f5a1b0d2 305#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 306\f
6354dc9b 307/* Initialization code. */
2b835d68 308
2b835d68
RE
309struct processors
310{
8b60264b
KG
311 const char *const name;
312 const unsigned int flags;
2b835d68
RE
313};
314
315/* Not all of these give usefully different compilation alternatives,
316 but there is no simple way of generalizing them. */
8b60264b 317static const struct processors all_cores[] =
f5a1b0d2
NC
318{
319 /* ARM Cores */
320
321 {"arm2", FL_CO_PROC | FL_MODE26 },
322 {"arm250", FL_CO_PROC | FL_MODE26 },
323 {"arm3", FL_CO_PROC | FL_MODE26 },
324 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
325 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
326 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
327 {"arm610", FL_MODE26 | FL_MODE32 },
328 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
329 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
330 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 331 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
332 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
333 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
334 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
335 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
336 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
337 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
338 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
339 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
340 {"arm710", FL_MODE26 | FL_MODE32 },
eab4abeb 341 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
a120a3bd 342 {"arm720", FL_MODE26 | FL_MODE32 },
eab4abeb
NC
343 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
344 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
f5a1b0d2
NC
345 {"arm710c", FL_MODE26 | FL_MODE32 },
346 {"arm7100", FL_MODE26 | FL_MODE32 },
347 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
348 /* Doesn't have an external co-proc, but does have embedded fpu. */
349 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
350 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
351 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
352 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
353 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
354 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
355 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 356 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2 357 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 358 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
f5a1b0d2
NC
359 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
360 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
361 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
eab4abeb
NC
362 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
363 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
364 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
365 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
e26053d1 366
f5a1b0d2
NC
367 {NULL, 0}
368};
369
8b60264b 370static const struct processors all_architectures[] =
2b835d68 371{
f5a1b0d2
NC
372 /* ARM Architectures */
373
62b10bbc
NC
374 { "armv2", FL_CO_PROC | FL_MODE26 },
375 { "armv2a", FL_CO_PROC | FL_MODE26 },
376 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
377 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 378 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
379 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
380 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
381 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
382 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
d19fb8e3
NC
383 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
384 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
62b10bbc 385 { NULL, 0 }
f5a1b0d2
NC
386};
387
388/* This is a magic stucture. The 'string' field is magically filled in
389 with a pointer to the value specified by the user on the command line
390 assuming that the user has specified such a value. */
391
392struct arm_cpu_select arm_select[] =
393{
394 /* string name processors */
395 { NULL, "-mcpu=", all_cores },
396 { NULL, "-march=", all_architectures },
397 { NULL, "-mtune=", all_cores }
2b835d68
RE
398};
399
aec3cfba 400/* Return the number of bits set in value' */
d5b7b3ae 401static unsigned long
aec3cfba
NC
402bit_count (value)
403 signed int value;
404{
d5b7b3ae 405 unsigned long count = 0;
aec3cfba
NC
406
407 while (value)
408 {
5895f793
RE
409 value &= ~(value & -value);
410 ++count;
aec3cfba
NC
411 }
412
413 return count;
414}
415
2b835d68
RE
416/* Fix up any incompatible options that the user has specified.
417 This has now turned into a maze. */
418void
419arm_override_options ()
420{
ed4c4348 421 unsigned i;
f5a1b0d2
NC
422
423 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 424 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 425 {
f5a1b0d2
NC
426 struct arm_cpu_select * ptr = arm_select + i;
427
428 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 429 {
13bd191d 430 const struct processors * sel;
bd9c7e23 431
5895f793 432 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 433 if (streq (ptr->string, sel->name))
bd9c7e23 434 {
aec3cfba
NC
435 if (i == 2)
436 tune_flags = sel->flags;
437 else
b111229a 438 {
aec3cfba
NC
439 /* If we have been given an architecture and a processor
440 make sure that they are compatible. We only generate
441 a warning though, and we prefer the CPU over the
6354dc9b 442 architecture. */
aec3cfba 443 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 444 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
445 ptr->string);
446
447 insn_flags = sel->flags;
b111229a 448 }
f5a1b0d2 449
bd9c7e23
RE
450 break;
451 }
452
453 if (sel->name == NULL)
454 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
455 }
456 }
aec3cfba 457
f5a1b0d2 458 /* If the user did not specify a processor, choose one for them. */
aec3cfba 459 if (insn_flags == 0)
f5a1b0d2 460 {
8b60264b 461 const struct processors * sel;
aec3cfba 462 unsigned int sought;
8b60264b 463 static const struct cpu_default
aec3cfba 464 {
8b60264b
KG
465 const int cpu;
466 const char *const name;
aec3cfba
NC
467 }
468 cpu_defaults[] =
469 {
470 { TARGET_CPU_arm2, "arm2" },
471 { TARGET_CPU_arm6, "arm6" },
472 { TARGET_CPU_arm610, "arm610" },
2aa0c933 473 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
474 { TARGET_CPU_arm7m, "arm7m" },
475 { TARGET_CPU_arm7500fe, "arm7500fe" },
476 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
477 { TARGET_CPU_arm8, "arm8" },
478 { TARGET_CPU_arm810, "arm810" },
479 { TARGET_CPU_arm9, "arm9" },
480 { TARGET_CPU_strongarm, "strongarm" },
d19fb8e3 481 { TARGET_CPU_xscale, "xscale" },
aec3cfba
NC
482 { TARGET_CPU_generic, "arm" },
483 { 0, 0 }
484 };
8b60264b 485 const struct cpu_default * def;
aec3cfba
NC
486
487 /* Find the default. */
5895f793 488 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
489 if (def->cpu == TARGET_CPU_DEFAULT)
490 break;
491
492 /* Make sure we found the default CPU. */
493 if (def->name == NULL)
494 abort ();
495
496 /* Find the default CPU's flags. */
5895f793 497 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
498 if (streq (def->name, sel->name))
499 break;
500
501 if (sel->name == NULL)
502 abort ();
503
504 insn_flags = sel->flags;
505
506 /* Now check to see if the user has specified some command line
507 switch that require certain abilities from the cpu. */
508 sought = 0;
f5a1b0d2 509
d5b7b3ae 510 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 511 {
aec3cfba
NC
512 sought |= (FL_THUMB | FL_MODE32);
513
514 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 515 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 516
d5b7b3ae 517 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
518 interworking. Therefore we force FL_MODE26 to be removed
519 from insn_flags here (if it was set), so that the search
520 below will always be able to find a compatible processor. */
5895f793 521 insn_flags &= ~FL_MODE26;
f5a1b0d2 522 }
5895f793 523 else if (!TARGET_APCS_32)
f5a1b0d2 524 sought |= FL_MODE26;
d5b7b3ae 525
aec3cfba 526 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 527 {
aec3cfba
NC
528 /* Try to locate a CPU type that supports all of the abilities
529 of the default CPU, plus the extra abilities requested by
530 the user. */
5895f793 531 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 532 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
533 break;
534
535 if (sel->name == NULL)
aec3cfba
NC
536 {
537 unsigned int current_bit_count = 0;
8b60264b 538 const struct processors * best_fit = NULL;
aec3cfba
NC
539
540 /* Ideally we would like to issue an error message here
541 saying that it was not possible to find a CPU compatible
542 with the default CPU, but which also supports the command
543 line options specified by the programmer, and so they
544 ought to use the -mcpu=<name> command line option to
545 override the default CPU type.
546
547 Unfortunately this does not work with multilibing. We
548 need to be able to support multilibs for -mapcs-26 and for
549 -mthumb-interwork and there is no CPU that can support both
550 options. Instead if we cannot find a cpu that has both the
551 characteristics of the default cpu and the given command line
552 options we scan the array again looking for a best match. */
5895f793 553 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
554 if ((sel->flags & sought) == sought)
555 {
556 unsigned int count;
557
558 count = bit_count (sel->flags & insn_flags);
559
560 if (count >= current_bit_count)
561 {
562 best_fit = sel;
563 current_bit_count = count;
564 }
565 }
f5a1b0d2 566
aec3cfba
NC
567 if (best_fit == NULL)
568 abort ();
569 else
570 sel = best_fit;
571 }
572
573 insn_flags = sel->flags;
f5a1b0d2
NC
574 }
575 }
aec3cfba
NC
576
577 /* If tuning has not been specified, tune for whichever processor or
578 architecture has been selected. */
579 if (tune_flags == 0)
580 tune_flags = insn_flags;
e26053d1 581
f5a1b0d2
NC
582 /* Make sure that the processor choice does not conflict with any of the
583 other command line choices. */
aec3cfba 584 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 585 {
aec3cfba
NC
586 /* If APCS-32 was not the default then it must have been set by the
587 user, so issue a warning message. If the user has specified
588 "-mapcs-32 -mcpu=arm2" then we loose here. */
589 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
590 warning ("target CPU does not support APCS-32" );
5895f793 591 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 592 }
5895f793 593 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
594 {
595 warning ("target CPU does not support APCS-26" );
596 target_flags |= ARM_FLAG_APCS_32;
597 }
598
6cfc7210 599 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
600 {
601 warning ("target CPU does not support interworking" );
6cfc7210 602 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
603 }
604
d5b7b3ae
RE
605 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
606 {
c725bd79 607 warning ("target CPU does not support THUMB instructions");
d5b7b3ae
RE
608 target_flags &= ~ARM_FLAG_THUMB;
609 }
610
611 if (TARGET_APCS_FRAME && TARGET_THUMB)
612 {
c725bd79 613 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
d5b7b3ae
RE
614 target_flags &= ~ARM_FLAG_APCS_FRAME;
615 }
d19fb8e3 616
d5b7b3ae
RE
617 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
618 from here where no function is being compiled currently. */
619 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
620 && TARGET_ARM)
c725bd79 621 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
622
623 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
c725bd79 624 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
625
626 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
c725bd79 627 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae 628
f5a1b0d2 629 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 630 if (TARGET_INTERWORK)
f5a1b0d2 631 {
5895f793 632 if (!TARGET_APCS_32)
f5a1b0d2
NC
633 warning ("interworking forces APCS-32 to be used" );
634 target_flags |= ARM_FLAG_APCS_32;
635 }
636
5895f793 637 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
638 {
639 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
640 target_flags |= ARM_FLAG_APCS_FRAME;
641 }
aec3cfba 642
2b835d68
RE
643 if (TARGET_POKE_FUNCTION_NAME)
644 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 645
2b835d68 646 if (TARGET_APCS_REENT && flag_pic)
400500c4 647 error ("-fpic and -mapcs-reent are incompatible");
aec3cfba 648
2b835d68 649 if (TARGET_APCS_REENT)
f5a1b0d2 650 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 651
d5b7b3ae
RE
652 /* If this target is normally configured to use APCS frames, warn if they
653 are turned off and debugging is turned on. */
654 if (TARGET_ARM
655 && write_symbols != NO_DEBUG
5895f793 656 && !TARGET_APCS_FRAME
d5b7b3ae
RE
657 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
658 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 659
32de079a
RE
660 /* If stack checking is disabled, we can use r10 as the PIC register,
661 which keeps r9 available. */
5b43fed1
RH
662 if (flag_pic)
663 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
aec3cfba 664
2b835d68 665 if (TARGET_APCS_FLOAT)
c725bd79 666 warning ("passing floating point arguments in fp regs not yet supported");
f5a1b0d2 667
aec3cfba 668 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
669 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
670 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
671 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
b15bca31 672 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
a67ded0f 673 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
6f7ebcbb 674
2ca12935
JL
675 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
676 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 677 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
678 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
679 && !(tune_flags & FL_ARCH4))) != 0;
6f7ebcbb 680
bd9c7e23
RE
681 /* Default value for floating point code... if no co-processor
682 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
683 assume the user has an FPA.
684 Note: this does not prevent use of floating point instructions,
685 -msoft-float does that. */
aec3cfba 686 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 687
b111229a 688 if (target_fp_name)
2b835d68 689 {
f5a1b0d2 690 if (streq (target_fp_name, "2"))
b111229a 691 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
692 else if (streq (target_fp_name, "3"))
693 arm_fpu_arch = FP_SOFT3;
2b835d68 694 else
c725bd79 695 error ("invalid floating point emulation option: -mfpe-%s",
b111229a 696 target_fp_name);
2b835d68 697 }
b111229a
RE
698 else
699 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
700
701 if (TARGET_FPE && arm_fpu != FP_HARD)
702 arm_fpu = FP_SOFT2;
aec3cfba 703
f5a1b0d2
NC
704 /* For arm2/3 there is no need to do any scheduling if there is only
705 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
706 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
707 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 708 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 709
cd2b33d0 710 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
711
712 if (structure_size_string != NULL)
713 {
714 int size = strtol (structure_size_string, NULL, 0);
715
716 if (size == 8 || size == 32)
717 arm_structure_size_boundary = size;
718 else
c725bd79 719 warning ("structure size boundary can only be set to 8 or 32");
b355a481 720 }
ed0e6530
PB
721
722 if (arm_pic_register_string != NULL)
723 {
5b43fed1 724 int pic_register = decode_reg_name (arm_pic_register_string);
e26053d1 725
5895f793 726 if (!flag_pic)
ed0e6530
PB
727 warning ("-mpic-register= is useless without -fpic");
728
ed0e6530 729 /* Prevent the user from choosing an obviously stupid PIC register. */
5b43fed1
RH
730 else if (pic_register < 0 || call_used_regs[pic_register]
731 || pic_register == HARD_FRAME_POINTER_REGNUM
732 || pic_register == STACK_POINTER_REGNUM
733 || pic_register >= PC_REGNUM)
c725bd79 734 error ("unable to use '%s' for PIC register", arm_pic_register_string);
ed0e6530
PB
735 else
736 arm_pic_register = pic_register;
737 }
d5b7b3ae
RE
738
739 if (TARGET_THUMB && flag_schedule_insns)
740 {
741 /* Don't warn since it's on by default in -O2. */
742 flag_schedule_insns = 0;
743 }
744
f5a1b0d2
NC
745 /* If optimizing for space, don't synthesize constants.
746 For processors with load scheduling, it never costs more than 2 cycles
747 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 748 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 749 arm_constant_limit = 1;
aec3cfba 750
d19fb8e3
NC
751 if (arm_is_xscale)
752 arm_constant_limit = 2;
753
f5a1b0d2
NC
754 /* If optimizing for size, bump the number of instructions that we
755 are prepared to conditionally execute (even on a StrongARM).
756 Otherwise for the StrongARM, which has early execution of branches,
757 a sequence that is worth skipping is shorter. */
758 if (optimize_size)
759 max_insns_skipped = 6;
760 else if (arm_is_strong)
761 max_insns_skipped = 3;
92a432f4
RE
762
763 /* Register global variables with the garbage collector. */
764 arm_add_gc_roots ();
765}
766
767static void
768arm_add_gc_roots ()
769{
c7319d87
RE
770 gcc_obstack_init(&minipool_obstack);
771 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 772}
cce8749e 773\f
6d3d9133
NC
774/* A table of known ARM exception types.
775 For use with the interrupt function attribute. */
776
777typedef struct
778{
8b60264b
KG
779 const char *const arg;
780 const unsigned long return_value;
6d3d9133
NC
781}
782isr_attribute_arg;
783
8b60264b 784static const isr_attribute_arg isr_attribute_args [] =
6d3d9133
NC
785{
786 { "IRQ", ARM_FT_ISR },
787 { "irq", ARM_FT_ISR },
788 { "FIQ", ARM_FT_FIQ },
789 { "fiq", ARM_FT_FIQ },
790 { "ABORT", ARM_FT_ISR },
791 { "abort", ARM_FT_ISR },
792 { "ABORT", ARM_FT_ISR },
793 { "abort", ARM_FT_ISR },
794 { "UNDEF", ARM_FT_EXCEPTION },
795 { "undef", ARM_FT_EXCEPTION },
796 { "SWI", ARM_FT_EXCEPTION },
797 { "swi", ARM_FT_EXCEPTION },
798 { NULL, ARM_FT_NORMAL }
799};
800
801/* Returns the (interrupt) function type of the current
802 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
803
804static unsigned long
805arm_isr_value (argument)
806 tree argument;
807{
8b60264b 808 const isr_attribute_arg * ptr;
1d6e90ac 809 const char * arg;
6d3d9133
NC
810
811 /* No argument - default to IRQ. */
812 if (argument == NULL_TREE)
813 return ARM_FT_ISR;
814
815 /* Get the value of the argument. */
816 if (TREE_VALUE (argument) == NULL_TREE
817 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
818 return ARM_FT_UNKNOWN;
819
820 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
821
822 /* Check it against the list of known arguments. */
823 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
1d6e90ac
NC
824 if (streq (arg, ptr->arg))
825 return ptr->return_value;
6d3d9133
NC
826
827 /* An unrecognised interrupt type. */
828 return ARM_FT_UNKNOWN;
829}
830
831/* Computes the type of the current function. */
832
833static unsigned long
834arm_compute_func_type ()
835{
836 unsigned long type = ARM_FT_UNKNOWN;
837 tree a;
838 tree attr;
839
840 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
841 abort ();
842
843 /* Decide if the current function is volatile. Such functions
844 never return, and many memory cycles can be saved by not storing
845 register values that will never be needed again. This optimization
846 was added to speed up context switching in a kernel application. */
847 if (optimize > 0
848 && current_function_nothrow
849 && TREE_THIS_VOLATILE (current_function_decl))
850 type |= ARM_FT_VOLATILE;
851
852 if (current_function_needs_context)
853 type |= ARM_FT_NESTED;
854
91d231cb 855 attr = DECL_ATTRIBUTES (current_function_decl);
6d3d9133
NC
856
857 a = lookup_attribute ("naked", attr);
858 if (a != NULL_TREE)
859 type |= ARM_FT_NAKED;
860
861 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
862 type |= ARM_FT_EXCEPTION_HANDLER;
863 else
864 {
865 a = lookup_attribute ("isr", attr);
866 if (a == NULL_TREE)
867 a = lookup_attribute ("interrupt", attr);
868
869 if (a == NULL_TREE)
870 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
871 else
872 type |= arm_isr_value (TREE_VALUE (a));
873 }
874
875 return type;
876}
877
878/* Returns the type of the current function. */
879
880unsigned long
881arm_current_func_type ()
882{
883 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
884 cfun->machine->func_type = arm_compute_func_type ();
885
886 return cfun->machine->func_type;
887}
888\f
6354dc9b 889/* Return 1 if it is possible to return using a single instruction. */
6d3d9133 890
ff9940b0 891int
b36ba79f
RE
892use_return_insn (iscond)
893 int iscond;
ff9940b0
RE
894{
895 int regno;
9b598fa0 896 unsigned int func_type;
ff9940b0 897
d5b7b3ae 898 /* Never use a return instruction before reload has run. */
6d3d9133
NC
899 if (!reload_completed)
900 return 0;
901
9b598fa0
RE
902 func_type = arm_current_func_type ();
903
3a7731fd
PB
904 /* Naked functions and volatile functions need special
905 consideration. */
906 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
6d3d9133
NC
907 return 0;
908
909 /* As do variadic functions. */
910 if (current_function_pretend_args_size
3cb66fd7 911 || cfun->machine->uses_anonymous_args
d5b7b3ae 912 /* Of if the function calls __builtin_eh_return () */
6d3d9133 913 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
d5b7b3ae 914 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 915 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 916 && !frame_pointer_needed))
ff9940b0
RE
917 return 0;
918
b111229a 919 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
920 stacked. Similarly, on StrongARM, conditional returns are expensive
921 if they aren't taken and registers have been stacked. */
f5a1b0d2 922 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 923 return 0;
d5b7b3ae 924
f5a1b0d2 925 if ((iscond && arm_is_strong)
6cfc7210 926 || TARGET_INTERWORK)
6ed30148 927 {
d5b7b3ae 928 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
5895f793 929 if (regs_ever_live[regno] && !call_used_regs[regno])
6ed30148
RE
930 return 0;
931
932 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 933 return 0;
6ed30148 934 }
b111229a 935
6d3d9133
NC
936 /* Can't be done if any of the FPU regs are pushed,
937 since this also requires an insn. */
d5b7b3ae
RE
938 if (TARGET_HARD_FLOAT)
939 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 940 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 941 return 0;
ff9940b0
RE
942
943 return 1;
944}
945
cce8749e
CH
946/* Return TRUE if int I is a valid immediate ARM constant. */
947
948int
949const_ok_for_arm (i)
ff9940b0 950 HOST_WIDE_INT i;
cce8749e 951{
30cf4896 952 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 953
56636818
JL
954 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
955 be all zero, or all one. */
30cf4896
KG
956 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
957 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
958 != ((~(unsigned HOST_WIDE_INT) 0)
959 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
960 return FALSE;
961
e2c671ba
RE
962 /* Fast return for 0 and powers of 2 */
963 if ((i & (i - 1)) == 0)
964 return TRUE;
965
cce8749e
CH
966 do
967 {
30cf4896 968 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 969 return TRUE;
abaa26e5 970 mask =
30cf4896
KG
971 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
972 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
ebe413e5
NC
973 }
974 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 975
f3bb6135
RE
976 return FALSE;
977}
cce8749e 978
6354dc9b 979/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
980static int
981const_ok_for_op (i, code)
e2c671ba
RE
982 HOST_WIDE_INT i;
983 enum rtx_code code;
e2c671ba
RE
984{
985 if (const_ok_for_arm (i))
986 return 1;
987
988 switch (code)
989 {
990 case PLUS:
991 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
992
993 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
994 case XOR:
995 case IOR:
996 return 0;
997
998 case AND:
999 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1000
1001 default:
1002 abort ();
1003 }
1004}
1005
1006/* Emit a sequence of insns to handle a large constant.
1007 CODE is the code of the operation required, it can be any of SET, PLUS,
1008 IOR, AND, XOR, MINUS;
1009 MODE is the mode in which the operation is being performed;
1010 VAL is the integer to operate on;
1011 SOURCE is the other operand (a register, or a null-pointer for SET);
1012 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
1013 either produce a simpler sequence, or we will want to cse the values.
1014 Return value is the number of insns emitted. */
e2c671ba
RE
1015
1016int
1017arm_split_constant (code, mode, val, target, source, subtargets)
1018 enum rtx_code code;
1019 enum machine_mode mode;
1020 HOST_WIDE_INT val;
1021 rtx target;
1022 rtx source;
1023 int subtargets;
2b835d68
RE
1024{
1025 if (subtargets || code == SET
1026 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1027 && REGNO (target) != REGNO (source)))
1028 {
4b632bf1
RE
1029 /* After arm_reorg has been called, we can't fix up expensive
1030 constants by pushing them into memory so we must synthesise
1031 them in-line, regardless of the cost. This is only likely to
1032 be more costly on chips that have load delay slots and we are
1033 compiling without running the scheduler (so no splitting
aec3cfba
NC
1034 occurred before the final instruction emission).
1035
1036 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 1037 */
5895f793 1038 if (!after_arm_reorg
4b632bf1
RE
1039 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1040 > arm_constant_limit + (code != SET)))
2b835d68
RE
1041 {
1042 if (code == SET)
1043 {
1044 /* Currently SET is the only monadic value for CODE, all
1045 the rest are diadic. */
43cffd11 1046 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
1047 return 1;
1048 }
1049 else
1050 {
1051 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1052
43cffd11 1053 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
1054 /* For MINUS, the value is subtracted from, since we never
1055 have subtraction of a constant. */
1056 if (code == MINUS)
43cffd11 1057 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 1058 gen_rtx_MINUS (mode, temp, source)));
2b835d68 1059 else
43cffd11
RE
1060 emit_insn (gen_rtx_SET (VOIDmode, target,
1061 gen_rtx (code, mode, source, temp)));
2b835d68
RE
1062 return 2;
1063 }
1064 }
1065 }
1066
1067 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1068}
1069
ceebdb09
PB
1070static int
1071count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1072{
1073 HOST_WIDE_INT temp1;
1074 int num_insns = 0;
1075 do
1076 {
1077 int end;
1078
1079 if (i <= 0)
1080 i += 32;
1081 if (remainder & (3 << (i - 2)))
1082 {
1083 end = i - 8;
1084 if (end < 0)
1085 end += 32;
1086 temp1 = remainder & ((0x0ff << end)
1087 | ((i < end) ? (0xff >> (32 - end)) : 0));
1088 remainder &= ~temp1;
1089 num_insns++;
1090 i -= 6;
1091 }
1092 i -= 2;
1093 } while (remainder);
1094 return num_insns;
1095}
1096
2b835d68
RE
1097/* As above, but extra parameter GENERATE which, if clear, suppresses
1098 RTL generation. */
1d6e90ac 1099
d5b7b3ae 1100static int
2b835d68
RE
1101arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1102 enum rtx_code code;
1103 enum machine_mode mode;
1104 HOST_WIDE_INT val;
1105 rtx target;
1106 rtx source;
1107 int subtargets;
1108 int generate;
e2c671ba 1109{
e2c671ba
RE
1110 int can_invert = 0;
1111 int can_negate = 0;
1112 int can_negate_initial = 0;
1113 int can_shift = 0;
1114 int i;
1115 int num_bits_set = 0;
1116 int set_sign_bit_copies = 0;
1117 int clear_sign_bit_copies = 0;
1118 int clear_zero_bit_copies = 0;
1119 int set_zero_bit_copies = 0;
1120 int insns = 0;
e2c671ba 1121 unsigned HOST_WIDE_INT temp1, temp2;
30cf4896 1122 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
e2c671ba 1123
d5b7b3ae 1124 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
1125 check for degenerate cases; these can occur when DImode operations
1126 are split. */
1127 switch (code)
1128 {
1129 case SET:
1130 can_invert = 1;
1131 can_shift = 1;
1132 can_negate = 1;
1133 break;
1134
1135 case PLUS:
1136 can_negate = 1;
1137 can_negate_initial = 1;
1138 break;
1139
1140 case IOR:
30cf4896 1141 if (remainder == 0xffffffff)
e2c671ba 1142 {
2b835d68 1143 if (generate)
43cffd11
RE
1144 emit_insn (gen_rtx_SET (VOIDmode, target,
1145 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
1146 return 1;
1147 }
1148 if (remainder == 0)
1149 {
1150 if (reload_completed && rtx_equal_p (target, source))
1151 return 0;
2b835d68 1152 if (generate)
43cffd11 1153 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1154 return 1;
1155 }
1156 break;
1157
1158 case AND:
1159 if (remainder == 0)
1160 {
2b835d68 1161 if (generate)
43cffd11 1162 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
1163 return 1;
1164 }
30cf4896 1165 if (remainder == 0xffffffff)
e2c671ba
RE
1166 {
1167 if (reload_completed && rtx_equal_p (target, source))
1168 return 0;
2b835d68 1169 if (generate)
43cffd11 1170 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1171 return 1;
1172 }
1173 can_invert = 1;
1174 break;
1175
1176 case XOR:
1177 if (remainder == 0)
1178 {
1179 if (reload_completed && rtx_equal_p (target, source))
1180 return 0;
2b835d68 1181 if (generate)
43cffd11 1182 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1183 return 1;
1184 }
30cf4896 1185 if (remainder == 0xffffffff)
e2c671ba 1186 {
2b835d68 1187 if (generate)
43cffd11
RE
1188 emit_insn (gen_rtx_SET (VOIDmode, target,
1189 gen_rtx_NOT (mode, source)));
e2c671ba
RE
1190 return 1;
1191 }
1192
1193 /* We don't know how to handle this yet below. */
1194 abort ();
1195
1196 case MINUS:
1197 /* We treat MINUS as (val - source), since (source - val) is always
1198 passed as (source + (-val)). */
1199 if (remainder == 0)
1200 {
2b835d68 1201 if (generate)
43cffd11
RE
1202 emit_insn (gen_rtx_SET (VOIDmode, target,
1203 gen_rtx_NEG (mode, source)));
e2c671ba
RE
1204 return 1;
1205 }
1206 if (const_ok_for_arm (val))
1207 {
2b835d68 1208 if (generate)
43cffd11
RE
1209 emit_insn (gen_rtx_SET (VOIDmode, target,
1210 gen_rtx_MINUS (mode, GEN_INT (val),
1211 source)));
e2c671ba
RE
1212 return 1;
1213 }
1214 can_negate = 1;
1215
1216 break;
1217
1218 default:
1219 abort ();
1220 }
1221
6354dc9b 1222 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
1223 if (const_ok_for_arm (val)
1224 || (can_negate_initial && const_ok_for_arm (-val))
1225 || (can_invert && const_ok_for_arm (~val)))
1226 {
2b835d68 1227 if (generate)
43cffd11
RE
1228 emit_insn (gen_rtx_SET (VOIDmode, target,
1229 (source ? gen_rtx (code, mode, source,
1230 GEN_INT (val))
1231 : GEN_INT (val))));
e2c671ba
RE
1232 return 1;
1233 }
1234
e2c671ba 1235 /* Calculate a few attributes that may be useful for specific
6354dc9b 1236 optimizations. */
e2c671ba
RE
1237 for (i = 31; i >= 0; i--)
1238 {
1239 if ((remainder & (1 << i)) == 0)
1240 clear_sign_bit_copies++;
1241 else
1242 break;
1243 }
1244
1245 for (i = 31; i >= 0; i--)
1246 {
1247 if ((remainder & (1 << i)) != 0)
1248 set_sign_bit_copies++;
1249 else
1250 break;
1251 }
1252
1253 for (i = 0; i <= 31; i++)
1254 {
1255 if ((remainder & (1 << i)) == 0)
1256 clear_zero_bit_copies++;
1257 else
1258 break;
1259 }
1260
1261 for (i = 0; i <= 31; i++)
1262 {
1263 if ((remainder & (1 << i)) != 0)
1264 set_zero_bit_copies++;
1265 else
1266 break;
1267 }
1268
1269 switch (code)
1270 {
1271 case SET:
1272 /* See if we can do this by sign_extending a constant that is known
1273 to be negative. This is a good, way of doing it, since the shift
1274 may well merge into a subsequent insn. */
1275 if (set_sign_bit_copies > 1)
1276 {
1277 if (const_ok_for_arm
1278 (temp1 = ARM_SIGN_EXTEND (remainder
1279 << (set_sign_bit_copies - 1))))
1280 {
2b835d68
RE
1281 if (generate)
1282 {
d499463f 1283 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1284 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1285 GEN_INT (temp1)));
2b835d68
RE
1286 emit_insn (gen_ashrsi3 (target, new_src,
1287 GEN_INT (set_sign_bit_copies - 1)));
1288 }
e2c671ba
RE
1289 return 2;
1290 }
1291 /* For an inverted constant, we will need to set the low bits,
1292 these will be shifted out of harm's way. */
1293 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1294 if (const_ok_for_arm (~temp1))
1295 {
2b835d68
RE
1296 if (generate)
1297 {
d499463f 1298 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1299 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1300 GEN_INT (temp1)));
2b835d68
RE
1301 emit_insn (gen_ashrsi3 (target, new_src,
1302 GEN_INT (set_sign_bit_copies - 1)));
1303 }
e2c671ba
RE
1304 return 2;
1305 }
1306 }
1307
1308 /* See if we can generate this by setting the bottom (or the top)
1309 16 bits, and then shifting these into the other half of the
1310 word. We only look for the simplest cases, to do more would cost
1311 too much. Be careful, however, not to generate this when the
1312 alternative would take fewer insns. */
30cf4896 1313 if (val & 0xffff0000)
e2c671ba 1314 {
30cf4896 1315 temp1 = remainder & 0xffff0000;
e2c671ba
RE
1316 temp2 = remainder & 0x0000ffff;
1317
6354dc9b 1318 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1319 for (i = 9; i < 24; i++)
1320 {
30cf4896 1321 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
5895f793 1322 && !const_ok_for_arm (temp2))
e2c671ba 1323 {
d499463f
RE
1324 rtx new_src = (subtargets
1325 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1326 : target);
1327 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1328 source, subtargets, generate);
e2c671ba 1329 source = new_src;
2b835d68 1330 if (generate)
43cffd11
RE
1331 emit_insn (gen_rtx_SET
1332 (VOIDmode, target,
1333 gen_rtx_IOR (mode,
1334 gen_rtx_ASHIFT (mode, source,
1335 GEN_INT (i)),
1336 source)));
e2c671ba
RE
1337 return insns + 1;
1338 }
1339 }
1340
6354dc9b 1341 /* Don't duplicate cases already considered. */
e2c671ba
RE
1342 for (i = 17; i < 24; i++)
1343 {
1344 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1345 && !const_ok_for_arm (temp1))
e2c671ba 1346 {
d499463f
RE
1347 rtx new_src = (subtargets
1348 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1349 : target);
1350 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1351 source, subtargets, generate);
e2c671ba 1352 source = new_src;
2b835d68 1353 if (generate)
43cffd11
RE
1354 emit_insn
1355 (gen_rtx_SET (VOIDmode, target,
1356 gen_rtx_IOR
1357 (mode,
1358 gen_rtx_LSHIFTRT (mode, source,
1359 GEN_INT (i)),
1360 source)));
e2c671ba
RE
1361 return insns + 1;
1362 }
1363 }
1364 }
1365 break;
1366
1367 case IOR:
1368 case XOR:
7b64da89
RE
1369 /* If we have IOR or XOR, and the constant can be loaded in a
1370 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1371 then this can be done in two instructions instead of 3-4. */
1372 if (subtargets
d499463f 1373 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1374 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1375 {
5895f793 1376 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1377 {
2b835d68
RE
1378 if (generate)
1379 {
1380 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1381
43cffd11
RE
1382 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1383 emit_insn (gen_rtx_SET (VOIDmode, target,
1384 gen_rtx (code, mode, source, sub)));
2b835d68 1385 }
e2c671ba
RE
1386 return 2;
1387 }
1388 }
1389
1390 if (code == XOR)
1391 break;
1392
1393 if (set_sign_bit_copies > 8
1394 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1395 {
2b835d68
RE
1396 if (generate)
1397 {
1398 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1399 rtx shift = GEN_INT (set_sign_bit_copies);
1400
43cffd11
RE
1401 emit_insn (gen_rtx_SET (VOIDmode, sub,
1402 gen_rtx_NOT (mode,
1403 gen_rtx_ASHIFT (mode,
1404 source,
f5a1b0d2 1405 shift))));
43cffd11
RE
1406 emit_insn (gen_rtx_SET (VOIDmode, target,
1407 gen_rtx_NOT (mode,
1408 gen_rtx_LSHIFTRT (mode, sub,
1409 shift))));
2b835d68 1410 }
e2c671ba
RE
1411 return 2;
1412 }
1413
1414 if (set_zero_bit_copies > 8
1415 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1416 {
2b835d68
RE
1417 if (generate)
1418 {
1419 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1420 rtx shift = GEN_INT (set_zero_bit_copies);
1421
43cffd11
RE
1422 emit_insn (gen_rtx_SET (VOIDmode, sub,
1423 gen_rtx_NOT (mode,
1424 gen_rtx_LSHIFTRT (mode,
1425 source,
f5a1b0d2 1426 shift))));
43cffd11
RE
1427 emit_insn (gen_rtx_SET (VOIDmode, target,
1428 gen_rtx_NOT (mode,
1429 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1430 shift))));
2b835d68 1431 }
e2c671ba
RE
1432 return 2;
1433 }
1434
5895f793 1435 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1436 {
2b835d68
RE
1437 if (generate)
1438 {
1439 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1440 emit_insn (gen_rtx_SET (VOIDmode, sub,
1441 gen_rtx_NOT (mode, source)));
2b835d68
RE
1442 source = sub;
1443 if (subtargets)
1444 sub = gen_reg_rtx (mode);
43cffd11
RE
1445 emit_insn (gen_rtx_SET (VOIDmode, sub,
1446 gen_rtx_AND (mode, source,
1447 GEN_INT (temp1))));
1448 emit_insn (gen_rtx_SET (VOIDmode, target,
1449 gen_rtx_NOT (mode, sub)));
2b835d68 1450 }
e2c671ba
RE
1451 return 3;
1452 }
1453 break;
1454
1455 case AND:
1456 /* See if two shifts will do 2 or more insn's worth of work. */
1457 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1458 {
30cf4896 1459 HOST_WIDE_INT shift_mask = ((0xffffffff
e2c671ba 1460 << (32 - clear_sign_bit_copies))
30cf4896 1461 & 0xffffffff);
e2c671ba 1462
30cf4896 1463 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1464 {
2b835d68
RE
1465 if (generate)
1466 {
d499463f 1467 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1468 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1469 new_src, source, subtargets, 1);
1470 source = new_src;
2b835d68
RE
1471 }
1472 else
d499463f
RE
1473 {
1474 rtx targ = subtargets ? NULL_RTX : target;
1475 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1476 targ, source, subtargets, 0);
1477 }
2b835d68
RE
1478 }
1479
1480 if (generate)
1481 {
d499463f
RE
1482 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1483 rtx shift = GEN_INT (clear_sign_bit_copies);
1484
1485 emit_insn (gen_ashlsi3 (new_src, source, shift));
1486 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1487 }
1488
e2c671ba
RE
1489 return insns + 2;
1490 }
1491
1492 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1493 {
1494 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1495
30cf4896 1496 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1497 {
2b835d68
RE
1498 if (generate)
1499 {
d499463f
RE
1500 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1501
2b835d68 1502 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1503 new_src, source, subtargets, 1);
1504 source = new_src;
2b835d68
RE
1505 }
1506 else
d499463f
RE
1507 {
1508 rtx targ = subtargets ? NULL_RTX : target;
1509
1510 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1511 targ, source, subtargets, 0);
1512 }
2b835d68
RE
1513 }
1514
1515 if (generate)
1516 {
d499463f
RE
1517 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1518 rtx shift = GEN_INT (clear_zero_bit_copies);
1519
1520 emit_insn (gen_lshrsi3 (new_src, source, shift));
1521 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1522 }
1523
e2c671ba
RE
1524 return insns + 2;
1525 }
1526
1527 break;
1528
1529 default:
1530 break;
1531 }
1532
1533 for (i = 0; i < 32; i++)
1534 if (remainder & (1 << i))
1535 num_bits_set++;
1536
1537 if (code == AND || (can_invert && num_bits_set > 16))
30cf4896 1538 remainder = (~remainder) & 0xffffffff;
e2c671ba 1539 else if (code == PLUS && num_bits_set > 16)
30cf4896 1540 remainder = (-remainder) & 0xffffffff;
e2c671ba
RE
1541 else
1542 {
1543 can_invert = 0;
1544 can_negate = 0;
1545 }
1546
1547 /* Now try and find a way of doing the job in either two or three
1548 instructions.
1549 We start by looking for the largest block of zeros that are aligned on
1550 a 2-bit boundary, we then fill up the temps, wrapping around to the
1551 top of the word when we drop off the bottom.
6354dc9b 1552 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1553 {
1554 int best_start = 0;
1555 int best_consecutive_zeros = 0;
1556
1557 for (i = 0; i < 32; i += 2)
1558 {
1559 int consecutive_zeros = 0;
1560
5895f793 1561 if (!(remainder & (3 << i)))
e2c671ba 1562 {
5895f793 1563 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1564 {
1565 consecutive_zeros += 2;
1566 i += 2;
1567 }
1568 if (consecutive_zeros > best_consecutive_zeros)
1569 {
1570 best_consecutive_zeros = consecutive_zeros;
1571 best_start = i - consecutive_zeros;
1572 }
1573 i -= 2;
1574 }
1575 }
1576
ceebdb09
PB
1577 /* So long as it won't require any more insns to do so, it's
1578 desirable to emit a small constant (in bits 0...9) in the last
1579 insn. This way there is more chance that it can be combined with
1580 a later addressing insn to form a pre-indexed load or store
1581 operation. Consider:
1582
1583 *((volatile int *)0xe0000100) = 1;
1584 *((volatile int *)0xe0000110) = 2;
1585
1586 We want this to wind up as:
1587
1588 mov rA, #0xe0000000
1589 mov rB, #1
1590 str rB, [rA, #0x100]
1591 mov rB, #2
1592 str rB, [rA, #0x110]
1593
1594 rather than having to synthesize both large constants from scratch.
1595
1596 Therefore, we calculate how many insns would be required to emit
1597 the constant starting from `best_start', and also starting from
1598 zero (ie with bit 31 first to be output). If `best_start' doesn't
1599 yield a shorter sequence, we may as well use zero. */
1600 if (best_start != 0
1601 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1602 && (count_insns_for_constant (remainder, 0) <=
1603 count_insns_for_constant (remainder, best_start)))
1604 best_start = 0;
1605
1606 /* Now start emitting the insns. */
e2c671ba
RE
1607 i = best_start;
1608 do
1609 {
1610 int end;
1611
1612 if (i <= 0)
1613 i += 32;
1614 if (remainder & (3 << (i - 2)))
1615 {
1616 end = i - 8;
1617 if (end < 0)
1618 end += 32;
1619 temp1 = remainder & ((0x0ff << end)
1620 | ((i < end) ? (0xff >> (32 - end)) : 0));
1621 remainder &= ~temp1;
1622
d499463f 1623 if (generate)
e2c671ba 1624 {
9503f3d1
RH
1625 rtx new_src, temp1_rtx;
1626
1627 if (code == SET || code == MINUS)
1628 {
1629 new_src = (subtargets ? gen_reg_rtx (mode) : target);
96ae8197 1630 if (can_invert && code != MINUS)
9503f3d1
RH
1631 temp1 = ~temp1;
1632 }
1633 else
1634 {
96ae8197 1635 if (remainder && subtargets)
9503f3d1 1636 new_src = gen_reg_rtx (mode);
96ae8197
NC
1637 else
1638 new_src = target;
9503f3d1
RH
1639 if (can_invert)
1640 temp1 = ~temp1;
1641 else if (can_negate)
1642 temp1 = -temp1;
1643 }
1644
1645 temp1 = trunc_int_for_mode (temp1, mode);
1646 temp1_rtx = GEN_INT (temp1);
d499463f
RE
1647
1648 if (code == SET)
9503f3d1 1649 ;
d499463f 1650 else if (code == MINUS)
9503f3d1 1651 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
d499463f 1652 else
9503f3d1
RH
1653 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1654
1655 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
d499463f 1656 source = new_src;
e2c671ba
RE
1657 }
1658
d499463f
RE
1659 if (code == SET)
1660 {
1661 can_invert = 0;
1662 code = PLUS;
1663 }
1664 else if (code == MINUS)
1665 code = PLUS;
1666
e2c671ba 1667 insns++;
e2c671ba
RE
1668 i -= 6;
1669 }
1670 i -= 2;
1d6e90ac
NC
1671 }
1672 while (remainder);
e2c671ba 1673 }
1d6e90ac 1674
e2c671ba
RE
1675 return insns;
1676}
1677
bd9c7e23
RE
1678/* Canonicalize a comparison so that we are more likely to recognize it.
1679 This can be done for a few constant compares, where we can make the
1680 immediate value easier to load. */
1d6e90ac 1681
bd9c7e23
RE
1682enum rtx_code
1683arm_canonicalize_comparison (code, op1)
1684 enum rtx_code code;
62b10bbc 1685 rtx * op1;
bd9c7e23 1686{
ad076f4e 1687 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1688
1689 switch (code)
1690 {
1691 case EQ:
1692 case NE:
1693 return code;
1694
1695 case GT:
1696 case LE:
30cf4896 1697 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
5895f793 1698 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1699 {
5895f793 1700 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1701 return code == GT ? GE : LT;
1702 }
1703 break;
1704
1705 case GE:
1706 case LT:
30cf4896 1707 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1708 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1709 {
5895f793 1710 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1711 return code == GE ? GT : LE;
1712 }
1713 break;
1714
1715 case GTU:
1716 case LEU:
30cf4896 1717 if (i != ~((unsigned HOST_WIDE_INT) 0)
5895f793 1718 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1719 {
1720 *op1 = GEN_INT (i + 1);
1721 return code == GTU ? GEU : LTU;
1722 }
1723 break;
1724
1725 case GEU:
1726 case LTU:
1727 if (i != 0
5895f793 1728 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1729 {
1730 *op1 = GEN_INT (i - 1);
1731 return code == GEU ? GTU : LEU;
1732 }
1733 break;
1734
1735 default:
1736 abort ();
1737 }
1738
1739 return code;
1740}
bd9c7e23 1741
f5a1b0d2
NC
1742/* Decide whether a type should be returned in memory (true)
1743 or in a register (false). This is called by the macro
1744 RETURN_IN_MEMORY. */
1d6e90ac 1745
2b835d68
RE
1746int
1747arm_return_in_memory (type)
1748 tree type;
1749{
5895f793 1750 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1751 /* All simple types are returned in registers. */
d7d01975 1752 return 0;
d5b7b3ae
RE
1753
1754 /* For the arm-wince targets we choose to be compitable with Microsoft's
1755 ARM and Thumb compilers, which always return aggregates in memory. */
1756#ifndef ARM_WINCE
e529bd42
NC
1757 /* All structures/unions bigger than one word are returned in memory.
1758 Also catch the case where int_size_in_bytes returns -1. In this case
1759 the aggregate is either huge or of varaible size, and in either case
1760 we will want to return it via memory and not in a register. */
1761 if (((unsigned int) int_size_in_bytes (type)) > UNITS_PER_WORD)
d7d01975 1762 return 1;
d5b7b3ae 1763
d7d01975 1764 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1765 {
1766 tree field;
1767
3a2ea258
RE
1768 /* For a struct the APCS says that we only return in a register
1769 if the type is 'integer like' and every addressable element
1770 has an offset of zero. For practical purposes this means
1771 that the structure can have at most one non bit-field element
1772 and that this element must be the first one in the structure. */
1773
f5a1b0d2
NC
1774 /* Find the first field, ignoring non FIELD_DECL things which will
1775 have been created by C++. */
1776 for (field = TYPE_FIELDS (type);
1777 field && TREE_CODE (field) != FIELD_DECL;
1778 field = TREE_CHAIN (field))
1779 continue;
1780
1781 if (field == NULL)
9e291dbe 1782 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1783
d5b7b3ae
RE
1784 /* Check that the first field is valid for returning in a register. */
1785
1786 /* ... Floats are not allowed */
9e291dbe 1787 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1788 return 1;
1789
d5b7b3ae
RE
1790 /* ... Aggregates that are not themselves valid for returning in
1791 a register are not allowed. */
9e291dbe 1792 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1793 return 1;
6f7ebcbb 1794
3a2ea258
RE
1795 /* Now check the remaining fields, if any. Only bitfields are allowed,
1796 since they are not addressable. */
f5a1b0d2
NC
1797 for (field = TREE_CHAIN (field);
1798 field;
1799 field = TREE_CHAIN (field))
1800 {
1801 if (TREE_CODE (field) != FIELD_DECL)
1802 continue;
1803
5895f793 1804 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1805 return 1;
1806 }
2b835d68
RE
1807
1808 return 0;
1809 }
d7d01975
NC
1810
1811 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1812 {
1813 tree field;
1814
1815 /* Unions can be returned in registers if every element is
1816 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1817 for (field = TYPE_FIELDS (type);
1818 field;
1819 field = TREE_CHAIN (field))
2b835d68 1820 {
f5a1b0d2
NC
1821 if (TREE_CODE (field) != FIELD_DECL)
1822 continue;
1823
6cc8c0b3
NC
1824 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1825 return 1;
1826
f5a1b0d2 1827 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1828 return 1;
1829 }
f5a1b0d2 1830
2b835d68
RE
1831 return 0;
1832 }
d5b7b3ae 1833#endif /* not ARM_WINCE */
f5a1b0d2 1834
d5b7b3ae 1835 /* Return all other types in memory. */
2b835d68
RE
1836 return 1;
1837}
1838
82e9d970
PB
1839/* Initialize a variable CUM of type CUMULATIVE_ARGS
1840 for a call to a function whose data type is FNTYPE.
1841 For a library call, FNTYPE is NULL. */
1842void
1843arm_init_cumulative_args (pcum, fntype, libname, indirect)
1844 CUMULATIVE_ARGS * pcum;
1845 tree fntype;
1846 rtx libname ATTRIBUTE_UNUSED;
1847 int indirect ATTRIBUTE_UNUSED;
1848{
1849 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1850 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1851
82e9d970
PB
1852 pcum->call_cookie = CALL_NORMAL;
1853
1854 if (TARGET_LONG_CALLS)
1855 pcum->call_cookie = CALL_LONG;
1856
1857 /* Check for long call/short call attributes. The attributes
1858 override any command line option. */
1859 if (fntype)
1860 {
1861 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1862 pcum->call_cookie = CALL_SHORT;
1863 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1864 pcum->call_cookie = CALL_LONG;
1865 }
1866}
1867
1868/* Determine where to put an argument to a function.
1869 Value is zero to push the argument on the stack,
1870 or a hard register in which to store the argument.
1871
1872 MODE is the argument's machine mode.
1873 TYPE is the data type of the argument (as a tree).
1874 This is null for libcalls where that information may
1875 not be available.
1876 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1877 the preceding args and about the function being called.
1878 NAMED is nonzero if this argument is a named parameter
1879 (otherwise it is an extra parameter matching an ellipsis). */
1d6e90ac 1880
82e9d970
PB
1881rtx
1882arm_function_arg (pcum, mode, type, named)
1883 CUMULATIVE_ARGS * pcum;
1884 enum machine_mode mode;
1885 tree type ATTRIBUTE_UNUSED;
1886 int named;
1887{
1888 if (mode == VOIDmode)
1889 /* Compute operand 2 of the call insn. */
1890 return GEN_INT (pcum->call_cookie);
1891
5895f793 1892 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1893 return NULL_RTX;
1894
1895 return gen_rtx_REG (mode, pcum->nregs);
1896}
82e9d970 1897\f
c27ba912
DM
1898/* Encode the current state of the #pragma [no_]long_calls. */
1899typedef enum
82e9d970 1900{
c27ba912
DM
1901 OFF, /* No #pramgma [no_]long_calls is in effect. */
1902 LONG, /* #pragma long_calls is in effect. */
1903 SHORT /* #pragma no_long_calls is in effect. */
1904} arm_pragma_enum;
82e9d970 1905
c27ba912 1906static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1907
8b97c5f8
ZW
1908void
1909arm_pr_long_calls (pfile)
1d6e90ac 1910 cpp_reader * pfile ATTRIBUTE_UNUSED;
82e9d970 1911{
8b97c5f8
ZW
1912 arm_pragma_long_calls = LONG;
1913}
1914
1915void
1916arm_pr_no_long_calls (pfile)
1d6e90ac 1917 cpp_reader * pfile ATTRIBUTE_UNUSED;
8b97c5f8
ZW
1918{
1919 arm_pragma_long_calls = SHORT;
1920}
1921
1922void
1923arm_pr_long_calls_off (pfile)
1d6e90ac 1924 cpp_reader * pfile ATTRIBUTE_UNUSED;
8b97c5f8
ZW
1925{
1926 arm_pragma_long_calls = OFF;
82e9d970
PB
1927}
1928\f
91d231cb
JM
1929/* Table of machine attributes. */
1930const struct attribute_spec arm_attribute_table[] =
82e9d970 1931{
91d231cb 1932 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
82e9d970
PB
1933 /* Function calls made to this symbol must be done indirectly, because
1934 it may lie outside of the 26 bit addressing range of a normal function
1935 call. */
91d231cb 1936 { "long_call", 0, 0, false, true, true, NULL },
82e9d970
PB
1937 /* Whereas these functions are always known to reside within the 26 bit
1938 addressing range. */
91d231cb 1939 { "short_call", 0, 0, false, true, true, NULL },
6d3d9133 1940 /* Interrupt Service Routines have special prologue and epilogue requirements. */
91d231cb
JM
1941 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
1942 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
1943 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1944#ifdef ARM_PE
1945 /* ARM/PE has three new attributes:
1946 interfacearm - ?
1947 dllexport - for exporting a function/variable that will live in a dll
1948 dllimport - for importing a function/variable from a dll
1949
1950 Microsoft allows multiple declspecs in one __declspec, separating
1951 them with spaces. We do NOT support this. Instead, use __declspec
1952 multiple times.
1953 */
1954 { "dllimport", 0, 0, true, false, false, NULL },
1955 { "dllexport", 0, 0, true, false, false, NULL },
1956 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1957#endif
1958 { NULL, 0, 0, false, false, false, NULL }
1959};
6d3d9133 1960
91d231cb
JM
1961/* Handle an attribute requiring a FUNCTION_DECL;
1962 arguments as in struct attribute_spec.handler. */
1d6e90ac 1963
91d231cb
JM
1964static tree
1965arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
1d6e90ac
NC
1966 tree * node;
1967 tree name;
1968 tree args ATTRIBUTE_UNUSED;
1969 int flags ATTRIBUTE_UNUSED;
1970 bool * no_add_attrs;
91d231cb
JM
1971{
1972 if (TREE_CODE (*node) != FUNCTION_DECL)
1973 {
1974 warning ("`%s' attribute only applies to functions",
1975 IDENTIFIER_POINTER (name));
1976 *no_add_attrs = true;
1977 }
1978
1979 return NULL_TREE;
1980}
1981
1982/* Handle an "interrupt" or "isr" attribute;
1983 arguments as in struct attribute_spec.handler. */
1d6e90ac 1984
91d231cb
JM
1985static tree
1986arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
1d6e90ac
NC
1987 tree * node;
1988 tree name;
1989 tree args;
1990 int flags;
1991 bool * no_add_attrs;
91d231cb
JM
1992{
1993 if (DECL_P (*node))
1994 {
1995 if (TREE_CODE (*node) != FUNCTION_DECL)
1996 {
1997 warning ("`%s' attribute only applies to functions",
1998 IDENTIFIER_POINTER (name));
1999 *no_add_attrs = true;
2000 }
2001 /* FIXME: the argument if any is checked for type attributes;
2002 should it be checked for decl ones? */
2003 }
2004 else
2005 {
2006 if (TREE_CODE (*node) == FUNCTION_TYPE
2007 || TREE_CODE (*node) == METHOD_TYPE)
2008 {
2009 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2010 {
2011 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2012 *no_add_attrs = true;
2013 }
2014 }
2015 else if (TREE_CODE (*node) == POINTER_TYPE
2016 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2017 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2018 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2019 {
2020 *node = build_type_copy (*node);
1d6e90ac
NC
2021 TREE_TYPE (*node) = build_type_attribute_variant
2022 (TREE_TYPE (*node),
2023 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
91d231cb
JM
2024 *no_add_attrs = true;
2025 }
2026 else
2027 {
2028 /* Possibly pass this attribute on from the type to a decl. */
2029 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2030 | (int) ATTR_FLAG_FUNCTION_NEXT
2031 | (int) ATTR_FLAG_ARRAY_NEXT))
2032 {
2033 *no_add_attrs = true;
2034 return tree_cons (name, args, NULL_TREE);
2035 }
2036 else
2037 {
2038 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2039 }
2040 }
2041 }
2042
2043 return NULL_TREE;
82e9d970
PB
2044}
2045
2046/* Return 0 if the attributes for two types are incompatible, 1 if they
2047 are compatible, and 2 if they are nearly compatible (which causes a
2048 warning to be generated). */
1d6e90ac 2049
8d8e52be 2050static int
82e9d970
PB
2051arm_comp_type_attributes (type1, type2)
2052 tree type1;
2053 tree type2;
2054{
1cb8d58a 2055 int l1, l2, s1, s2;
bd7fc26f 2056
82e9d970
PB
2057 /* Check for mismatch of non-default calling convention. */
2058 if (TREE_CODE (type1) != FUNCTION_TYPE)
2059 return 1;
2060
2061 /* Check for mismatched call attributes. */
1cb8d58a
NC
2062 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2063 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2064 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2065 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
2066
2067 /* Only bother to check if an attribute is defined. */
2068 if (l1 | l2 | s1 | s2)
2069 {
2070 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 2071 if ((l1 != l2) || (s1 != s2))
bd7fc26f 2072 return 0;
82e9d970 2073
bd7fc26f
NC
2074 /* Disallow mixed attributes. */
2075 if ((l1 & s2) || (l2 & s1))
2076 return 0;
2077 }
2078
6d3d9133
NC
2079 /* Check for mismatched ISR attribute. */
2080 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2081 if (! l1)
2082 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2083 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2084 if (! l2)
2085 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2086 if (l1 != l2)
2087 return 0;
2088
bd7fc26f 2089 return 1;
82e9d970
PB
2090}
2091
c27ba912
DM
2092/* Encode long_call or short_call attribute by prefixing
2093 symbol name in DECL with a special character FLAG. */
1d6e90ac 2094
c27ba912
DM
2095void
2096arm_encode_call_attribute (decl, flag)
2097 tree decl;
cd2b33d0 2098 int flag;
c27ba912 2099{
3cce094d 2100 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 2101 int len = strlen (str);
d19fb8e3 2102 char * newstr;
c27ba912 2103
c27ba912
DM
2104 /* Do not allow weak functions to be treated as short call. */
2105 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2106 return;
c27ba912 2107
520a57c8
ZW
2108 newstr = alloca (len + 2);
2109 newstr[0] = flag;
2110 strcpy (newstr + 1, str);
c27ba912 2111
6d3d9133 2112 newstr = (char *) ggc_alloc_string (newstr, len + 1);
c27ba912
DM
2113 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2114}
2115
2116/* Assigns default attributes to newly defined type. This is used to
2117 set short_call/long_call attributes for function types of
2118 functions defined inside corresponding #pragma scopes. */
1d6e90ac 2119
8d8e52be 2120static void
c27ba912
DM
2121arm_set_default_type_attributes (type)
2122 tree type;
2123{
2124 /* Add __attribute__ ((long_call)) to all functions, when
2125 inside #pragma long_calls or __attribute__ ((short_call)),
2126 when inside #pragma no_long_calls. */
2127 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2128 {
2129 tree type_attr_list, attr_name;
2130 type_attr_list = TYPE_ATTRIBUTES (type);
2131
2132 if (arm_pragma_long_calls == LONG)
2133 attr_name = get_identifier ("long_call");
2134 else if (arm_pragma_long_calls == SHORT)
2135 attr_name = get_identifier ("short_call");
2136 else
2137 return;
2138
2139 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2140 TYPE_ATTRIBUTES (type) = type_attr_list;
2141 }
2142}
2143\f
2144/* Return 1 if the operand is a SYMBOL_REF for a function known to be
2145 defined within the current compilation unit. If this caanot be
2146 determined, then 0 is returned. */
1d6e90ac 2147
c27ba912
DM
2148static int
2149current_file_function_operand (sym_ref)
2150 rtx sym_ref;
2151{
2152 /* This is a bit of a fib. A function will have a short call flag
2153 applied to its name if it has the short call attribute, or it has
2154 already been defined within the current compilation unit. */
2155 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2156 return 1;
2157
6d77b53e 2158 /* The current function is always defined within the current compilation
d6a7951f
JM
2159 unit. if it s a weak definition however, then this may not be the real
2160 definition of the function, and so we have to say no. */
c27ba912 2161 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 2162 && !DECL_WEAK (current_function_decl))
c27ba912
DM
2163 return 1;
2164
2165 /* We cannot make the determination - default to returning 0. */
2166 return 0;
2167}
2168
2169/* Return non-zero if a 32 bit "long_call" should be generated for
2170 this call. We generate a long_call if the function:
2171
2172 a. has an __attribute__((long call))
2173 or b. is within the scope of a #pragma long_calls
2174 or c. the -mlong-calls command line switch has been specified
2175
2176 However we do not generate a long call if the function:
2177
2178 d. has an __attribute__ ((short_call))
2179 or e. is inside the scope of a #pragma no_long_calls
2180 or f. has an __attribute__ ((section))
2181 or g. is defined within the current compilation unit.
2182
2183 This function will be called by C fragments contained in the machine
2184 description file. CALL_REF and CALL_COOKIE correspond to the matched
2185 rtl operands. CALL_SYMBOL is used to distinguish between
2186 two different callers of the function. It is set to 1 in the
2187 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2188 and "call_value" patterns. This is because of the difference in the
2189 SYM_REFs passed by these patterns. */
1d6e90ac 2190
c27ba912
DM
2191int
2192arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2193 rtx sym_ref;
2194 int call_cookie;
2195 int call_symbol;
2196{
5895f793 2197 if (!call_symbol)
c27ba912
DM
2198 {
2199 if (GET_CODE (sym_ref) != MEM)
2200 return 0;
2201
2202 sym_ref = XEXP (sym_ref, 0);
2203 }
2204
2205 if (GET_CODE (sym_ref) != SYMBOL_REF)
2206 return 0;
2207
2208 if (call_cookie & CALL_SHORT)
2209 return 0;
2210
2211 if (TARGET_LONG_CALLS && flag_function_sections)
2212 return 1;
2213
87e27392 2214 if (current_file_function_operand (sym_ref))
c27ba912
DM
2215 return 0;
2216
2217 return (call_cookie & CALL_LONG)
2218 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2219 || TARGET_LONG_CALLS;
2220}
f99fce0c
RE
2221
2222/* Return non-zero if it is ok to make a tail-call to DECL. */
1d6e90ac 2223
f99fce0c
RE
2224int
2225arm_function_ok_for_sibcall (decl)
2226 tree decl;
2227{
2228 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2229
2230 /* Never tailcall something for which we have no decl, or if we
2231 are in Thumb mode. */
2232 if (decl == NULL || TARGET_THUMB)
2233 return 0;
2234
2235 /* Get the calling method. */
2236 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2237 call_type = CALL_SHORT;
2238 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2239 call_type = CALL_LONG;
2240
2241 /* Cannot tail-call to long calls, since these are out of range of
2242 a branch instruction. However, if not compiling PIC, we know
2243 we can reach the symbol if it is in this compilation unit. */
5895f793 2244 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
2245 return 0;
2246
2247 /* If we are interworking and the function is not declared static
2248 then we can't tail-call it unless we know that it exists in this
2249 compilation unit (since it might be a Thumb routine). */
5895f793 2250 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
2251 return 0;
2252
6d3d9133
NC
2253 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2254 if (IS_INTERRUPT (arm_current_func_type ()))
2255 return 0;
2256
f99fce0c
RE
2257 /* Everything else is ok. */
2258 return 1;
2259}
2260
82e9d970 2261\f
32de079a
RE
2262int
2263legitimate_pic_operand_p (x)
2264 rtx x;
2265{
d5b7b3ae
RE
2266 if (CONSTANT_P (x)
2267 && flag_pic
32de079a
RE
2268 && (GET_CODE (x) == SYMBOL_REF
2269 || (GET_CODE (x) == CONST
2270 && GET_CODE (XEXP (x, 0)) == PLUS
2271 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2272 return 0;
2273
2274 return 1;
2275}
2276
2277rtx
2278legitimize_pic_address (orig, mode, reg)
2279 rtx orig;
2280 enum machine_mode mode;
2281 rtx reg;
2282{
a3c48721
RE
2283 if (GET_CODE (orig) == SYMBOL_REF
2284 || GET_CODE (orig) == LABEL_REF)
32de079a 2285 {
5f37d07c 2286#ifndef AOF_ASSEMBLER
32de079a 2287 rtx pic_ref, address;
5f37d07c 2288#endif
32de079a
RE
2289 rtx insn;
2290 int subregs = 0;
2291
2292 if (reg == 0)
2293 {
893f3d5b 2294 if (no_new_pseudos)
32de079a
RE
2295 abort ();
2296 else
2297 reg = gen_reg_rtx (Pmode);
2298
2299 subregs = 1;
2300 }
2301
2302#ifdef AOF_ASSEMBLER
2303 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 2304 understands that the PIC register has to be added into the offset. */
32de079a
RE
2305 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2306#else
2307 if (subregs)
2308 address = gen_reg_rtx (Pmode);
2309 else
2310 address = reg;
2311
4bec9f7d
NC
2312 if (TARGET_ARM)
2313 emit_insn (gen_pic_load_addr_arm (address, orig));
2314 else
2315 emit_insn (gen_pic_load_addr_thumb (address, orig));
32de079a 2316
14f583b8
PB
2317 if ((GET_CODE (orig) == LABEL_REF
2318 || (GET_CODE (orig) == SYMBOL_REF &&
2319 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2320 && NEED_GOT_RELOC)
a3c48721
RE
2321 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2322 else
2323 {
2324 pic_ref = gen_rtx_MEM (Pmode,
2325 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2326 address));
2327 RTX_UNCHANGING_P (pic_ref) = 1;
2328 }
2329
32de079a
RE
2330 insn = emit_move_insn (reg, pic_ref);
2331#endif
2332 current_function_uses_pic_offset_table = 1;
2333 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2334 by loop. */
43cffd11
RE
2335 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2336 REG_NOTES (insn));
32de079a
RE
2337 return reg;
2338 }
2339 else if (GET_CODE (orig) == CONST)
2340 {
2341 rtx base, offset;
2342
2343 if (GET_CODE (XEXP (orig, 0)) == PLUS
2344 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2345 return orig;
2346
2347 if (reg == 0)
2348 {
893f3d5b 2349 if (no_new_pseudos)
32de079a
RE
2350 abort ();
2351 else
2352 reg = gen_reg_rtx (Pmode);
2353 }
2354
2355 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2356 {
2357 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2358 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2359 base == reg ? 0 : reg);
2360 }
2361 else
2362 abort ();
2363
2364 if (GET_CODE (offset) == CONST_INT)
2365 {
2366 /* The base register doesn't really matter, we only want to
2367 test the index for the appropriate mode. */
f1008e52 2368 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
32de079a 2369
5895f793 2370 if (!no_new_pseudos)
32de079a
RE
2371 offset = force_reg (Pmode, offset);
2372 else
2373 abort ();
2374
2375 win:
2376 if (GET_CODE (offset) == CONST_INT)
ed8908e7 2377 return plus_constant (base, INTVAL (offset));
32de079a
RE
2378 }
2379
2380 if (GET_MODE_SIZE (mode) > 4
2381 && (GET_MODE_CLASS (mode) == MODE_INT
2382 || TARGET_SOFT_FLOAT))
2383 {
2384 emit_insn (gen_addsi3 (reg, base, offset));
2385 return reg;
2386 }
2387
43cffd11 2388 return gen_rtx_PLUS (Pmode, base, offset);
32de079a 2389 }
32de079a
RE
2390
2391 return orig;
2392}
2393
c1163e75
PB
2394/* Generate code to load the PIC register. PROLOGUE is true if
2395 called from arm_expand_prologue (in which case we want the
2396 generated insns at the start of the function); false if called
2397 by an exception receiver that needs the PIC register reloaded
2398 (in which case the insns are just dumped at the current location). */
eab4abeb 2399
32de079a 2400void
eab4abeb 2401arm_finalize_pic (prologue)
5f37d07c 2402 int prologue ATTRIBUTE_UNUSED;
32de079a
RE
2403{
2404#ifndef AOF_ASSEMBLER
c1163e75 2405 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
32de079a
RE
2406 rtx global_offset_table;
2407
ed0e6530 2408 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2409 return;
2410
5895f793 2411 if (!flag_pic)
32de079a
RE
2412 abort ();
2413
2414 start_sequence ();
2415 l1 = gen_label_rtx ();
2416
43cffd11 2417 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2418 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2419 addition, on the Thumb it is 'dot + 4'. */
2420 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2421 if (GOT_PCREL)
2422 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2423 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2424 else
2425 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2426
2427 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2428
d5b7b3ae 2429 if (TARGET_ARM)
4bec9f7d
NC
2430 {
2431 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2432 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2433 }
d5b7b3ae 2434 else
4bec9f7d
NC
2435 {
2436 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2437 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2438 }
32de079a 2439
2f937369 2440 seq = get_insns ();
32de079a 2441 end_sequence ();
c1163e75
PB
2442 if (prologue)
2443 emit_insn_after (seq, get_insns ());
2444 else
2445 emit_insn (seq);
32de079a
RE
2446
2447 /* Need to emit this whether or not we obey regdecls,
2448 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2449 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2450#endif /* AOF_ASSEMBLER */
2451}
2452
e2c671ba
RE
2453#define REG_OR_SUBREG_REG(X) \
2454 (GET_CODE (X) == REG \
2455 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2456
2457#define REG_OR_SUBREG_RTX(X) \
2458 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2459
d5b7b3ae
RE
2460#ifndef COSTS_N_INSNS
2461#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2462#endif
e2c671ba
RE
2463
2464int
d5b7b3ae 2465arm_rtx_costs (x, code, outer)
e2c671ba 2466 rtx x;
74bbc178 2467 enum rtx_code code;
d5b7b3ae 2468 enum rtx_code outer;
e2c671ba
RE
2469{
2470 enum machine_mode mode = GET_MODE (x);
2471 enum rtx_code subcode;
2472 int extra_cost;
2473
d5b7b3ae
RE
2474 if (TARGET_THUMB)
2475 {
2476 switch (code)
2477 {
2478 case ASHIFT:
2479 case ASHIFTRT:
2480 case LSHIFTRT:
2481 case ROTATERT:
2482 case PLUS:
2483 case MINUS:
2484 case COMPARE:
2485 case NEG:
2486 case NOT:
2487 return COSTS_N_INSNS (1);
2488
2489 case MULT:
2490 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2491 {
2492 int cycles = 0;
2493 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2494
2495 while (i)
2496 {
2497 i >>= 2;
5895f793 2498 cycles++;
d5b7b3ae
RE
2499 }
2500 return COSTS_N_INSNS (2) + cycles;
2501 }
2502 return COSTS_N_INSNS (1) + 16;
2503
2504 case SET:
2505 return (COSTS_N_INSNS (1)
2506 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2507 + GET_CODE (SET_DEST (x)) == MEM));
2508
2509 case CONST_INT:
2510 if (outer == SET)
2511 {
2512 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2513 return 0;
2514 if (thumb_shiftable_const (INTVAL (x)))
2515 return COSTS_N_INSNS (2);
2516 return COSTS_N_INSNS (3);
2517 }
2518 else if (outer == PLUS
2519 && INTVAL (x) < 256 && INTVAL (x) > -256)
2520 return 0;
2521 else if (outer == COMPARE
2522 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2523 return 0;
2524 else if (outer == ASHIFT || outer == ASHIFTRT
2525 || outer == LSHIFTRT)
2526 return 0;
2527 return COSTS_N_INSNS (2);
2528
2529 case CONST:
2530 case CONST_DOUBLE:
2531 case LABEL_REF:
2532 case SYMBOL_REF:
2533 return COSTS_N_INSNS (3);
2534
2535 case UDIV:
2536 case UMOD:
2537 case DIV:
2538 case MOD:
2539 return 100;
2540
2541 case TRUNCATE:
2542 return 99;
2543
2544 case AND:
2545 case XOR:
2546 case IOR:
2547 /* XXX guess. */
2548 return 8;
2549
2550 case ADDRESSOF:
2551 case MEM:
2552 /* XXX another guess. */
2553 /* Memory costs quite a lot for the first word, but subsequent words
2554 load at the equivalent of a single insn each. */
2555 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
48f6efae
NC
2556 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2557 ? 4 : 0));
d5b7b3ae
RE
2558
2559 case IF_THEN_ELSE:
2560 /* XXX a guess. */
2561 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2562 return 14;
2563 return 2;
2564
2565 case ZERO_EXTEND:
2566 /* XXX still guessing. */
2567 switch (GET_MODE (XEXP (x, 0)))
2568 {
2569 case QImode:
2570 return (1 + (mode == DImode ? 4 : 0)
2571 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2572
2573 case HImode:
2574 return (4 + (mode == DImode ? 4 : 0)
2575 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2576
2577 case SImode:
2578 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2579
2580 default:
2581 return 99;
2582 }
2583
2584 default:
2585 return 99;
2586#if 0
2587 case FFS:
2588 case FLOAT:
2589 case FIX:
2590 case UNSIGNED_FIX:
2591 /* XXX guess */
2592 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2593 rtx_name[code]);
2594 abort ();
2595#endif
2596 }
2597 }
2598
e2c671ba
RE
2599 switch (code)
2600 {
2601 case MEM:
2602 /* Memory costs quite a lot for the first word, but subsequent words
2603 load at the equivalent of a single insn each. */
2604 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
48f6efae
NC
2605 + (GET_CODE (x) == SYMBOL_REF
2606 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
e2c671ba
RE
2607
2608 case DIV:
2609 case MOD:
2610 return 100;
2611
2612 case ROTATE:
2613 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2614 return 4;
2615 /* Fall through */
2616 case ROTATERT:
2617 if (mode != SImode)
2618 return 8;
2619 /* Fall through */
2620 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2621 if (mode == DImode)
2622 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2623 + ((GET_CODE (XEXP (x, 0)) == REG
2624 || (GET_CODE (XEXP (x, 0)) == SUBREG
2625 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2626 ? 0 : 8));
2627 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2628 || (GET_CODE (XEXP (x, 0)) == SUBREG
2629 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2630 ? 0 : 4)
2631 + ((GET_CODE (XEXP (x, 1)) == REG
2632 || (GET_CODE (XEXP (x, 1)) == SUBREG
2633 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2634 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2635 ? 0 : 4));
2636
2637 case MINUS:
2638 if (mode == DImode)
2639 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2640 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2641 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2642 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2643 ? 0 : 8));
2644
2645 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2646 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2647 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2648 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2649 ? 0 : 8)
2650 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2651 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2652 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2653 ? 0 : 8));
2654
2655 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2656 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2657 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2658 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2659 || subcode == ASHIFTRT || subcode == LSHIFTRT
2660 || subcode == ROTATE || subcode == ROTATERT
2661 || (subcode == MULT
2662 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2663 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2664 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2665 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2666 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2667 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2668 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2669 return 1;
2670 /* Fall through */
2671
2672 case PLUS:
2673 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2674 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2675 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2676 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2677 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2678 ? 0 : 8));
2679
2680 /* Fall through */
2681 case AND: case XOR: case IOR:
2682 extra_cost = 0;
2683
2684 /* Normally the frame registers will be spilt into reg+const during
2685 reload, so it is a bad idea to combine them with other instructions,
2686 since then they might not be moved outside of loops. As a compromise
2687 we allow integration with ops that have a constant as their second
2688 operand. */
2689 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2690 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2691 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2692 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2693 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2694 extra_cost = 4;
2695
2696 if (mode == DImode)
2697 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2698 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2699 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2700 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2701 ? 0 : 8));
2702
2703 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2704 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2705 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2706 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2707 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2708 ? 0 : 4));
2709
2710 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2711 return (1 + extra_cost
2712 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2713 || subcode == LSHIFTRT || subcode == ASHIFTRT
2714 || subcode == ROTATE || subcode == ROTATERT
2715 || (subcode == MULT
2716 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2717 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2718 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2719 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2720 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2721 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2722 ? 0 : 4));
2723
2724 return 8;
2725
2726 case MULT:
b111229a 2727 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2728 fast variant if it exists at all. */
2b835d68
RE
2729 if (arm_fast_multiply && mode == DImode
2730 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2731 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2732 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2733 return 8;
2734
e2c671ba
RE
2735 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2736 || mode == DImode)
2737 return 30;
2738
2739 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2740 {
2b835d68 2741 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
30cf4896 2742 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
2743 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2744 int j;
6354dc9b
NC
2745
2746 /* Tune as appropriate. */
aec3cfba 2747 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2748
2b835d68 2749 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2750 {
2b835d68 2751 i >>= booth_unit_size;
e2c671ba
RE
2752 add_cost += 2;
2753 }
2754
2755 return add_cost;
2756 }
2757
aec3cfba 2758 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2759 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2760 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2761
56636818
JL
2762 case TRUNCATE:
2763 if (arm_fast_multiply && mode == SImode
2764 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2765 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2766 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2767 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2768 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2769 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2770 return 8;
2771 return 99;
2772
e2c671ba
RE
2773 case NEG:
2774 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2775 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2776 /* Fall through */
2777 case NOT:
2778 if (mode == DImode)
2779 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2780
2781 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2782
2783 case IF_THEN_ELSE:
2784 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2785 return 14;
2786 return 2;
2787
2788 case COMPARE:
2789 return 1;
2790
2791 case ABS:
2792 return 4 + (mode == DImode ? 4 : 0);
2793
2794 case SIGN_EXTEND:
2795 if (GET_MODE (XEXP (x, 0)) == QImode)
2796 return (4 + (mode == DImode ? 4 : 0)
2797 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2798 /* Fall through */
2799 case ZERO_EXTEND:
2800 switch (GET_MODE (XEXP (x, 0)))
2801 {
2802 case QImode:
2803 return (1 + (mode == DImode ? 4 : 0)
2804 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2805
2806 case HImode:
2807 return (4 + (mode == DImode ? 4 : 0)
2808 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2809
2810 case SImode:
2811 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2812
2813 default:
2814 break;
e2c671ba
RE
2815 }
2816 abort ();
2817
d5b7b3ae
RE
2818 case CONST_INT:
2819 if (const_ok_for_arm (INTVAL (x)))
2820 return outer == SET ? 2 : -1;
2821 else if (outer == AND
5895f793 2822 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2823 return -1;
2824 else if ((outer == COMPARE
2825 || outer == PLUS || outer == MINUS)
5895f793 2826 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2827 return -1;
2828 else
2829 return 5;
2830
2831 case CONST:
2832 case LABEL_REF:
2833 case SYMBOL_REF:
2834 return 6;
2835
2836 case CONST_DOUBLE:
2837 if (const_double_rtx_ok_for_fpu (x))
2838 return outer == SET ? 2 : -1;
2839 else if ((outer == COMPARE || outer == PLUS)
2840 && neg_const_double_rtx_ok_for_fpu (x))
2841 return -1;
2842 return 7;
2843
e2c671ba
RE
2844 default:
2845 return 99;
2846 }
2847}
32de079a 2848
c237e94a 2849static int
32de079a
RE
2850arm_adjust_cost (insn, link, dep, cost)
2851 rtx insn;
2852 rtx link;
2853 rtx dep;
2854 int cost;
2855{
2856 rtx i_pat, d_pat;
2857
d19fb8e3
NC
2858 /* Some true dependencies can have a higher cost depending
2859 on precisely how certain input operands are used. */
2860 if (arm_is_xscale
2861 && REG_NOTE_KIND (link) == 0
2862 && recog_memoized (insn) < 0
2863 && recog_memoized (dep) < 0)
2864 {
2865 int shift_opnum = get_attr_shift (insn);
2866 enum attr_type attr_type = get_attr_type (dep);
2867
2868 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2869 operand for INSN. If we have a shifted input operand and the
2870 instruction we depend on is another ALU instruction, then we may
2871 have to account for an additional stall. */
2872 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2873 {
2874 rtx shifted_operand;
2875 int opno;
2876
2877 /* Get the shifted operand. */
2878 extract_insn (insn);
2879 shifted_operand = recog_data.operand[shift_opnum];
2880
2881 /* Iterate over all the operands in DEP. If we write an operand
2882 that overlaps with SHIFTED_OPERAND, then we have increase the
2883 cost of this dependency. */
2884 extract_insn (dep);
2885 preprocess_constraints ();
2886 for (opno = 0; opno < recog_data.n_operands; opno++)
2887 {
2888 /* We can ignore strict inputs. */
2889 if (recog_data.operand_type[opno] == OP_IN)
2890 continue;
2891
2892 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2893 shifted_operand))
2894 return 2;
2895 }
2896 }
2897 }
2898
6354dc9b 2899 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2900 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2901 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2902 return 0;
2903
d5b7b3ae
RE
2904 /* Call insns don't incur a stall, even if they follow a load. */
2905 if (REG_NOTE_KIND (link) == 0
2906 && GET_CODE (insn) == CALL_INSN)
2907 return 1;
2908
32de079a
RE
2909 if ((i_pat = single_set (insn)) != NULL
2910 && GET_CODE (SET_SRC (i_pat)) == MEM
2911 && (d_pat = single_set (dep)) != NULL
2912 && GET_CODE (SET_DEST (d_pat)) == MEM)
2913 {
48f6efae 2914 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
32de079a
RE
2915 /* This is a load after a store, there is no conflict if the load reads
2916 from a cached area. Assume that loads from the stack, and from the
2917 constant pool are cached, and that others will miss. This is a
6354dc9b 2918 hack. */
32de079a 2919
48f6efae
NC
2920 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
2921 || reg_mentioned_p (stack_pointer_rtx, src_mem)
2922 || reg_mentioned_p (frame_pointer_rtx, src_mem)
2923 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
949d79eb 2924 return 1;
32de079a
RE
2925 }
2926
2927 return cost;
2928}
2929
6354dc9b 2930/* This code has been fixed for cross compilation. */
ff9940b0
RE
2931
2932static int fpa_consts_inited = 0;
2933
1d6e90ac 2934static const char * const strings_fpa[8] =
62b10bbc 2935{
2b835d68
RE
2936 "0", "1", "2", "3",
2937 "4", "5", "0.5", "10"
2938};
ff9940b0
RE
2939
2940static REAL_VALUE_TYPE values_fpa[8];
2941
2942static void
2943init_fpa_table ()
2944{
2945 int i;
2946 REAL_VALUE_TYPE r;
2947
2948 for (i = 0; i < 8; i++)
2949 {
2950 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2951 values_fpa[i] = r;
2952 }
f3bb6135 2953
ff9940b0
RE
2954 fpa_consts_inited = 1;
2955}
2956
6354dc9b 2957/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2958
2959int
2960const_double_rtx_ok_for_fpu (x)
2961 rtx x;
2962{
ff9940b0
RE
2963 REAL_VALUE_TYPE r;
2964 int i;
2965
2966 if (!fpa_consts_inited)
2967 init_fpa_table ();
2968
2969 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2970 if (REAL_VALUE_MINUS_ZERO (r))
2971 return 0;
f3bb6135 2972
ff9940b0
RE
2973 for (i = 0; i < 8; i++)
2974 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2975 return 1;
f3bb6135 2976
ff9940b0 2977 return 0;
f3bb6135 2978}
ff9940b0 2979
6354dc9b 2980/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2981
2982int
2983neg_const_double_rtx_ok_for_fpu (x)
2984 rtx x;
2985{
2986 REAL_VALUE_TYPE r;
2987 int i;
2988
2989 if (!fpa_consts_inited)
2990 init_fpa_table ();
2991
2992 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2993 r = REAL_VALUE_NEGATE (r);
2994 if (REAL_VALUE_MINUS_ZERO (r))
2995 return 0;
f3bb6135 2996
ff9940b0
RE
2997 for (i = 0; i < 8; i++)
2998 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2999 return 1;
f3bb6135 3000
ff9940b0 3001 return 0;
f3bb6135 3002}
cce8749e
CH
3003\f
3004/* Predicates for `match_operand' and `match_operator'. */
3005
ff9940b0 3006/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
3007 (SUBREG (MEM)...).
3008
3009 This function exists because at the time it was put in it led to better
3010 code. SUBREG(MEM) always needs a reload in the places where
3011 s_register_operand is used, and this seemed to lead to excessive
3012 reloading. */
ff9940b0
RE
3013
3014int
3015s_register_operand (op, mode)
1d6e90ac 3016 rtx op;
ff9940b0
RE
3017 enum machine_mode mode;
3018{
3019 if (GET_MODE (op) != mode && mode != VOIDmode)
3020 return 0;
3021
3022 if (GET_CODE (op) == SUBREG)
f3bb6135 3023 op = SUBREG_REG (op);
ff9940b0
RE
3024
3025 /* We don't consider registers whose class is NO_REGS
3026 to be a register operand. */
d5b7b3ae 3027 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
3028 return (GET_CODE (op) == REG
3029 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3030 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3031}
3032
b0888988 3033/* A hard register operand (even before reload. */
1d6e90ac 3034
b0888988
RE
3035int
3036arm_hard_register_operand (op, mode)
1d6e90ac 3037 rtx op;
b0888988
RE
3038 enum machine_mode mode;
3039{
3040 if (GET_MODE (op) != mode && mode != VOIDmode)
3041 return 0;
3042
3043 return (GET_CODE (op) == REG
3044 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3045}
3046
e2c671ba
RE
3047/* Only accept reg, subreg(reg), const_int. */
3048
3049int
3050reg_or_int_operand (op, mode)
1d6e90ac 3051 rtx op;
e2c671ba
RE
3052 enum machine_mode mode;
3053{
3054 if (GET_CODE (op) == CONST_INT)
3055 return 1;
3056
3057 if (GET_MODE (op) != mode && mode != VOIDmode)
3058 return 0;
3059
3060 if (GET_CODE (op) == SUBREG)
3061 op = SUBREG_REG (op);
3062
3063 /* We don't consider registers whose class is NO_REGS
3064 to be a register operand. */
3065 return (GET_CODE (op) == REG
3066 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3067 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3068}
3069
ff9940b0
RE
3070/* Return 1 if OP is an item in memory, given that we are in reload. */
3071
3072int
d5b7b3ae 3073arm_reload_memory_operand (op, mode)
ff9940b0 3074 rtx op;
74bbc178 3075 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
3076{
3077 int regno = true_regnum (op);
3078
5895f793 3079 return (!CONSTANT_P (op)
ff9940b0
RE
3080 && (regno == -1
3081 || (GET_CODE (op) == REG
3082 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3083}
3084
4d818c85 3085/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae 3086 memory access (architecture V4).
f710504c 3087 MODE is QImode if called when computing constraints, or VOIDmode when
d5b7b3ae
RE
3088 emitting patterns. In this latter case we cannot use memory_operand()
3089 because it will fail on badly formed MEMs, which is precisly what we are
3090 trying to catch. */
1d6e90ac 3091
4d818c85
RE
3092int
3093bad_signed_byte_operand (op, mode)
3094 rtx op;
d5b7b3ae 3095 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 3096{
d5b7b3ae 3097#if 0
5895f793 3098 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
3099 return 0;
3100#endif
3101 if (GET_CODE (op) != MEM)
4d818c85
RE
3102 return 0;
3103
3104 op = XEXP (op, 0);
3105
6354dc9b 3106 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 3107 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
3108 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3109 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 3110 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
3111 return 1;
3112
6354dc9b 3113 /* Big constants are also bad. */
4d818c85
RE
3114 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3115 && (INTVAL (XEXP (op, 1)) > 0xff
3116 || -INTVAL (XEXP (op, 1)) > 0xff))
3117 return 1;
3118
6354dc9b 3119 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
3120 return 0;
3121}
3122
cce8749e
CH
3123/* Return TRUE for valid operands for the rhs of an ARM instruction. */
3124
3125int
3126arm_rhs_operand (op, mode)
3127 rtx op;
3128 enum machine_mode mode;
3129{
ff9940b0 3130 return (s_register_operand (op, mode)
cce8749e 3131 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 3132}
cce8749e 3133
1d6e90ac
NC
3134/* Return TRUE for valid operands for the
3135 rhs of an ARM instruction, or a load. */
ff9940b0
RE
3136
3137int
3138arm_rhsm_operand (op, mode)
3139 rtx op;
3140 enum machine_mode mode;
3141{
3142 return (s_register_operand (op, mode)
3143 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3144 || memory_operand (op, mode));
f3bb6135 3145}
ff9940b0
RE
3146
3147/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3148 constant that is valid when negated. */
3149
3150int
3151arm_add_operand (op, mode)
3152 rtx op;
3153 enum machine_mode mode;
3154{
d5b7b3ae
RE
3155 if (TARGET_THUMB)
3156 return thumb_cmp_operand (op, mode);
3157
ff9940b0
RE
3158 return (s_register_operand (op, mode)
3159 || (GET_CODE (op) == CONST_INT
3160 && (const_ok_for_arm (INTVAL (op))
3161 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 3162}
ff9940b0
RE
3163
3164int
3165arm_not_operand (op, mode)
3166 rtx op;
3167 enum machine_mode mode;
3168{
3169 return (s_register_operand (op, mode)
3170 || (GET_CODE (op) == CONST_INT
3171 && (const_ok_for_arm (INTVAL (op))
3172 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 3173}
ff9940b0 3174
5165176d
RE
3175/* Return TRUE if the operand is a memory reference which contains an
3176 offsettable address. */
1d6e90ac 3177
5165176d
RE
3178int
3179offsettable_memory_operand (op, mode)
1d6e90ac 3180 rtx op;
5165176d
RE
3181 enum machine_mode mode;
3182{
3183 if (mode == VOIDmode)
3184 mode = GET_MODE (op);
3185
3186 return (mode == GET_MODE (op)
3187 && GET_CODE (op) == MEM
3188 && offsettable_address_p (reload_completed | reload_in_progress,
3189 mode, XEXP (op, 0)));
3190}
3191
3192/* Return TRUE if the operand is a memory reference which is, or can be
3193 made word aligned by adjusting the offset. */
1d6e90ac 3194
5165176d
RE
3195int
3196alignable_memory_operand (op, mode)
1d6e90ac 3197 rtx op;
5165176d
RE
3198 enum machine_mode mode;
3199{
3200 rtx reg;
3201
3202 if (mode == VOIDmode)
3203 mode = GET_MODE (op);
3204
3205 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3206 return 0;
3207
3208 op = XEXP (op, 0);
3209
3210 return ((GET_CODE (reg = op) == REG
3211 || (GET_CODE (op) == SUBREG
3212 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3213 || (GET_CODE (op) == PLUS
3214 && GET_CODE (XEXP (op, 1)) == CONST_INT
3215 && (GET_CODE (reg = XEXP (op, 0)) == REG
3216 || (GET_CODE (XEXP (op, 0)) == SUBREG
3217 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 3218 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
3219}
3220
b111229a
RE
3221/* Similar to s_register_operand, but does not allow hard integer
3222 registers. */
1d6e90ac 3223
b111229a
RE
3224int
3225f_register_operand (op, mode)
1d6e90ac 3226 rtx op;
b111229a
RE
3227 enum machine_mode mode;
3228{
3229 if (GET_MODE (op) != mode && mode != VOIDmode)
3230 return 0;
3231
3232 if (GET_CODE (op) == SUBREG)
3233 op = SUBREG_REG (op);
3234
3235 /* We don't consider registers whose class is NO_REGS
3236 to be a register operand. */
3237 return (GET_CODE (op) == REG
3238 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3239 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3240}
3241
cce8749e
CH
3242/* Return TRUE for valid operands for the rhs of an FPU instruction. */
3243
3244int
3245fpu_rhs_operand (op, mode)
3246 rtx op;
3247 enum machine_mode mode;
3248{
ff9940b0 3249 if (s_register_operand (op, mode))
f3bb6135 3250 return TRUE;
9ce71c6f
BS
3251
3252 if (GET_MODE (op) != mode && mode != VOIDmode)
3253 return FALSE;
3254
3255 if (GET_CODE (op) == CONST_DOUBLE)
3256 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
3257
3258 return FALSE;
3259}
cce8749e 3260
ff9940b0
RE
3261int
3262fpu_add_operand (op, mode)
3263 rtx op;
3264 enum machine_mode mode;
3265{
3266 if (s_register_operand (op, mode))
f3bb6135 3267 return TRUE;
9ce71c6f
BS
3268
3269 if (GET_MODE (op) != mode && mode != VOIDmode)
3270 return FALSE;
3271
3272 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
3273 return (const_double_rtx_ok_for_fpu (op)
3274 || neg_const_double_rtx_ok_for_fpu (op));
3275
3276 return FALSE;
ff9940b0
RE
3277}
3278
cce8749e
CH
3279/* Return nonzero if OP is a constant power of two. */
3280
3281int
3282power_of_two_operand (op, mode)
3283 rtx op;
74bbc178 3284 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
3285{
3286 if (GET_CODE (op) == CONST_INT)
3287 {
d5b7b3ae 3288 HOST_WIDE_INT value = INTVAL (op);
1d6e90ac 3289
f3bb6135 3290 return value != 0 && (value & (value - 1)) == 0;
cce8749e 3291 }
1d6e90ac 3292
f3bb6135
RE
3293 return FALSE;
3294}
cce8749e
CH
3295
3296/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 3297 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
3298 Note that this disallows MEM(REG+REG), but allows
3299 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
3300
3301int
3302di_operand (op, mode)
3303 rtx op;
3304 enum machine_mode mode;
3305{
ff9940b0 3306 if (s_register_operand (op, mode))
f3bb6135 3307 return TRUE;
cce8749e 3308
9ce71c6f
BS
3309 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3310 return FALSE;
3311
e9c6b69b
NC
3312 if (GET_CODE (op) == SUBREG)
3313 op = SUBREG_REG (op);
3314
cce8749e
CH
3315 switch (GET_CODE (op))
3316 {
3317 case CONST_DOUBLE:
3318 case CONST_INT:
f3bb6135
RE
3319 return TRUE;
3320
cce8749e 3321 case MEM:
f3bb6135
RE
3322 return memory_address_p (DImode, XEXP (op, 0));
3323
cce8749e 3324 default:
f3bb6135 3325 return FALSE;
cce8749e 3326 }
f3bb6135 3327}
cce8749e 3328
d5b7b3ae 3329/* Like di_operand, but don't accept constants. */
1d6e90ac 3330
d5b7b3ae
RE
3331int
3332nonimmediate_di_operand (op, mode)
3333 rtx op;
3334 enum machine_mode mode;
3335{
3336 if (s_register_operand (op, mode))
3337 return TRUE;
3338
3339 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3340 return FALSE;
3341
3342 if (GET_CODE (op) == SUBREG)
3343 op = SUBREG_REG (op);
3344
3345 if (GET_CODE (op) == MEM)
3346 return memory_address_p (DImode, XEXP (op, 0));
3347
3348 return FALSE;
3349}
3350
f3139301 3351/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 3352 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
3353 Note that this disallows MEM(REG+REG), but allows
3354 MEM(PRE/POST_INC/DEC(REG)). */
3355
3356int
3357soft_df_operand (op, mode)
3358 rtx op;
3359 enum machine_mode mode;
3360{
3361 if (s_register_operand (op, mode))
4b02997f 3362 return TRUE;
f3139301 3363
9ce71c6f
BS
3364 if (mode != VOIDmode && GET_MODE (op) != mode)
3365 return FALSE;
3366
37b80d2e
BS
3367 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3368 return FALSE;
3369
e9c6b69b
NC
3370 if (GET_CODE (op) == SUBREG)
3371 op = SUBREG_REG (op);
9ce71c6f 3372
f3139301
DE
3373 switch (GET_CODE (op))
3374 {
3375 case CONST_DOUBLE:
3376 return TRUE;
3377
3378 case MEM:
3379 return memory_address_p (DFmode, XEXP (op, 0));
3380
3381 default:
3382 return FALSE;
3383 }
3384}
3385
d5b7b3ae 3386/* Like soft_df_operand, but don't accept constants. */
1d6e90ac 3387
d5b7b3ae
RE
3388int
3389nonimmediate_soft_df_operand (op, mode)
3390 rtx op;
3391 enum machine_mode mode;
3392{
3393 if (s_register_operand (op, mode))
4b02997f 3394 return TRUE;
d5b7b3ae
RE
3395
3396 if (mode != VOIDmode && GET_MODE (op) != mode)
3397 return FALSE;
3398
3399 if (GET_CODE (op) == SUBREG)
3400 op = SUBREG_REG (op);
3401
3402 if (GET_CODE (op) == MEM)
3403 return memory_address_p (DFmode, XEXP (op, 0));
3404 return FALSE;
3405}
cce8749e 3406
d5b7b3ae 3407/* Return TRUE for valid index operands. */
1d6e90ac 3408
cce8749e
CH
3409int
3410index_operand (op, mode)
3411 rtx op;
3412 enum machine_mode mode;
3413{
d5b7b3ae 3414 return (s_register_operand (op, mode)
ff9940b0 3415 || (immediate_operand (op, mode)
d5b7b3ae
RE
3416 && (GET_CODE (op) != CONST_INT
3417 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 3418}
cce8749e 3419
ff9940b0
RE
3420/* Return TRUE for valid shifts by a constant. This also accepts any
3421 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 3422 shift operator in this case was a mult. */
ff9940b0
RE
3423
3424int
3425const_shift_operand (op, mode)
3426 rtx op;
3427 enum machine_mode mode;
3428{
3429 return (power_of_two_operand (op, mode)
3430 || (immediate_operand (op, mode)
d5b7b3ae
RE
3431 && (GET_CODE (op) != CONST_INT
3432 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 3433}
ff9940b0 3434
cce8749e
CH
3435/* Return TRUE for arithmetic operators which can be combined with a multiply
3436 (shift). */
3437
3438int
3439shiftable_operator (x, mode)
3440 rtx x;
3441 enum machine_mode mode;
3442{
1d6e90ac
NC
3443 enum rtx_code code;
3444
cce8749e
CH
3445 if (GET_MODE (x) != mode)
3446 return FALSE;
cce8749e 3447
1d6e90ac
NC
3448 code = GET_CODE (x);
3449
3450 return (code == PLUS || code == MINUS
3451 || code == IOR || code == XOR || code == AND);
f3bb6135 3452}
cce8749e 3453
6ab589e0
JL
3454/* Return TRUE for binary logical operators. */
3455
3456int
3457logical_binary_operator (x, mode)
3458 rtx x;
3459 enum machine_mode mode;
3460{
1d6e90ac
NC
3461 enum rtx_code code;
3462
6ab589e0
JL
3463 if (GET_MODE (x) != mode)
3464 return FALSE;
6ab589e0 3465
1d6e90ac
NC
3466 code = GET_CODE (x);
3467
3468 return (code == IOR || code == XOR || code == AND);
6ab589e0
JL
3469}
3470
6354dc9b 3471/* Return TRUE for shift operators. */
cce8749e
CH
3472
3473int
3474shift_operator (x, mode)
3475 rtx x;
3476 enum machine_mode mode;
3477{
1d6e90ac
NC
3478 enum rtx_code code;
3479
cce8749e
CH
3480 if (GET_MODE (x) != mode)
3481 return FALSE;
cce8749e 3482
1d6e90ac 3483 code = GET_CODE (x);
f3bb6135 3484
1d6e90ac
NC
3485 if (code == MULT)
3486 return power_of_two_operand (XEXP (x, 1), mode);
3487
3488 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3489 || code == ROTATERT);
f3bb6135 3490}
ff9940b0 3491
6354dc9b 3492/* Return TRUE if x is EQ or NE. */
1d6e90ac 3493
6354dc9b
NC
3494int
3495equality_operator (x, mode)
f3bb6135 3496 rtx x;
74bbc178 3497 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3498{
f3bb6135 3499 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3500}
3501
e45b72c4 3502/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
1d6e90ac 3503
e45b72c4
RE
3504int
3505arm_comparison_operator (x, mode)
3506 rtx x;
3507 enum machine_mode mode;
3508{
3509 return (comparison_operator (x, mode)
3510 && GET_CODE (x) != LTGT
3511 && GET_CODE (x) != UNEQ);
3512}
3513
6354dc9b 3514/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1d6e90ac 3515
ff9940b0
RE
3516int
3517minmax_operator (x, mode)
3518 rtx x;
3519 enum machine_mode mode;
3520{
3521 enum rtx_code code = GET_CODE (x);
3522
3523 if (GET_MODE (x) != mode)
3524 return FALSE;
f3bb6135 3525
ff9940b0 3526 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3527}
ff9940b0 3528
ff9940b0 3529/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3530 a mode, accept any class CCmode register. */
1d6e90ac 3531
ff9940b0
RE
3532int
3533cc_register (x, mode)
f3bb6135
RE
3534 rtx x;
3535 enum machine_mode mode;
ff9940b0
RE
3536{
3537 if (mode == VOIDmode)
3538 {
3539 mode = GET_MODE (x);
d5b7b3ae 3540
ff9940b0
RE
3541 if (GET_MODE_CLASS (mode) != MODE_CC)
3542 return FALSE;
3543 }
f3bb6135 3544
d5b7b3ae
RE
3545 if ( GET_MODE (x) == mode
3546 && GET_CODE (x) == REG
3547 && REGNO (x) == CC_REGNUM)
ff9940b0 3548 return TRUE;
f3bb6135 3549
ff9940b0
RE
3550 return FALSE;
3551}
5bbe2d40
RE
3552
3553/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3554 a mode, accept any class CCmode register which indicates a dominance
3555 expression. */
1d6e90ac 3556
5bbe2d40 3557int
84ed5e79 3558dominant_cc_register (x, mode)
5bbe2d40
RE
3559 rtx x;
3560 enum machine_mode mode;
3561{
3562 if (mode == VOIDmode)
3563 {
3564 mode = GET_MODE (x);
d5b7b3ae 3565
84ed5e79 3566 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3567 return FALSE;
3568 }
3569
d5b7b3ae 3570 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3571 && mode != CC_DLEmode && mode != CC_DLTmode
3572 && mode != CC_DGEmode && mode != CC_DGTmode
3573 && mode != CC_DLEUmode && mode != CC_DLTUmode
3574 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3575 return FALSE;
3576
d5b7b3ae 3577 return cc_register (x, mode);
5bbe2d40
RE
3578}
3579
2b835d68 3580/* Return TRUE if X references a SYMBOL_REF. */
1d6e90ac 3581
2b835d68
RE
3582int
3583symbol_mentioned_p (x)
3584 rtx x;
3585{
1d6e90ac
NC
3586 const char * fmt;
3587 int i;
2b835d68
RE
3588
3589 if (GET_CODE (x) == SYMBOL_REF)
3590 return 1;
3591
3592 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3593
2b835d68
RE
3594 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3595 {
3596 if (fmt[i] == 'E')
3597 {
1d6e90ac 3598 int j;
2b835d68
RE
3599
3600 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3601 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3602 return 1;
3603 }
3604 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3605 return 1;
3606 }
3607
3608 return 0;
3609}
3610
3611/* Return TRUE if X references a LABEL_REF. */
1d6e90ac 3612
2b835d68
RE
3613int
3614label_mentioned_p (x)
3615 rtx x;
3616{
1d6e90ac
NC
3617 const char * fmt;
3618 int i;
2b835d68
RE
3619
3620 if (GET_CODE (x) == LABEL_REF)
3621 return 1;
3622
3623 fmt = GET_RTX_FORMAT (GET_CODE (x));
3624 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3625 {
3626 if (fmt[i] == 'E')
3627 {
1d6e90ac 3628 int j;
2b835d68
RE
3629
3630 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3631 if (label_mentioned_p (XVECEXP (x, i, j)))
3632 return 1;
3633 }
3634 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3635 return 1;
3636 }
3637
3638 return 0;
3639}
3640
ff9940b0
RE
3641enum rtx_code
3642minmax_code (x)
f3bb6135 3643 rtx x;
ff9940b0
RE
3644{
3645 enum rtx_code code = GET_CODE (x);
3646
3647 if (code == SMAX)
3648 return GE;
f3bb6135 3649 else if (code == SMIN)
ff9940b0 3650 return LE;
f3bb6135 3651 else if (code == UMIN)
ff9940b0 3652 return LEU;
f3bb6135 3653 else if (code == UMAX)
ff9940b0 3654 return GEU;
f3bb6135 3655
ff9940b0
RE
3656 abort ();
3657}
3658
6354dc9b 3659/* Return 1 if memory locations are adjacent. */
1d6e90ac 3660
f3bb6135 3661int
ff9940b0
RE
3662adjacent_mem_locations (a, b)
3663 rtx a, b;
3664{
ff9940b0
RE
3665 if ((GET_CODE (XEXP (a, 0)) == REG
3666 || (GET_CODE (XEXP (a, 0)) == PLUS
3667 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3668 && (GET_CODE (XEXP (b, 0)) == REG
3669 || (GET_CODE (XEXP (b, 0)) == PLUS
3670 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3671 {
1d6e90ac
NC
3672 int val0 = 0, val1 = 0;
3673 int reg0, reg1;
3674
ff9940b0
RE
3675 if (GET_CODE (XEXP (a, 0)) == PLUS)
3676 {
1d6e90ac 3677 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
ff9940b0
RE
3678 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3679 }
3680 else
3681 reg0 = REGNO (XEXP (a, 0));
1d6e90ac 3682
ff9940b0
RE
3683 if (GET_CODE (XEXP (b, 0)) == PLUS)
3684 {
1d6e90ac 3685 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
ff9940b0
RE
3686 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3687 }
3688 else
3689 reg1 = REGNO (XEXP (b, 0));
1d6e90ac 3690
ff9940b0
RE
3691 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3692 }
3693 return 0;
3694}
3695
3696/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3697 parallel and the first section will be tested. */
1d6e90ac 3698
f3bb6135 3699int
ff9940b0
RE
3700load_multiple_operation (op, mode)
3701 rtx op;
74bbc178 3702 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3703{
f3bb6135 3704 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3705 int dest_regno;
3706 rtx src_addr;
f3bb6135 3707 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3708 rtx elt;
3709
3710 if (count <= 1
3711 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3712 return 0;
3713
6354dc9b 3714 /* Check to see if this might be a write-back. */
ff9940b0
RE
3715 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3716 {
3717 i++;
3718 base = 1;
3719
6354dc9b 3720 /* Now check it more carefully. */
ff9940b0
RE
3721 if (GET_CODE (SET_DEST (elt)) != REG
3722 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3723 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3724 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3725 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3726 return 0;
ff9940b0
RE
3727 }
3728
3729 /* Perform a quick check so we don't blow up below. */
3730 if (count <= i
3731 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3732 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3733 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3734 return 0;
3735
3736 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3737 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3738
3739 for (; i < count; i++)
3740 {
ed4c4348 3741 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3742
3743 if (GET_CODE (elt) != SET
3744 || GET_CODE (SET_DEST (elt)) != REG
3745 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3746 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3747 || GET_CODE (SET_SRC (elt)) != MEM
3748 || GET_MODE (SET_SRC (elt)) != SImode
3749 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3750 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3751 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3752 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3753 return 0;
3754 }
3755
3756 return 1;
3757}
3758
3759/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3760 parallel and the first section will be tested. */
1d6e90ac 3761
f3bb6135 3762int
ff9940b0
RE
3763store_multiple_operation (op, mode)
3764 rtx op;
74bbc178 3765 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3766{
f3bb6135 3767 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3768 int src_regno;
3769 rtx dest_addr;
f3bb6135 3770 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3771 rtx elt;
3772
3773 if (count <= 1
3774 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3775 return 0;
3776
6354dc9b 3777 /* Check to see if this might be a write-back. */
ff9940b0
RE
3778 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3779 {
3780 i++;
3781 base = 1;
3782
6354dc9b 3783 /* Now check it more carefully. */
ff9940b0
RE
3784 if (GET_CODE (SET_DEST (elt)) != REG
3785 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3786 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3787 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3788 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3789 return 0;
ff9940b0
RE
3790 }
3791
3792 /* Perform a quick check so we don't blow up below. */
3793 if (count <= i
3794 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3795 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3796 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3797 return 0;
3798
3799 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3800 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3801
3802 for (; i < count; i++)
3803 {
3804 elt = XVECEXP (op, 0, i);
3805
3806 if (GET_CODE (elt) != SET
3807 || GET_CODE (SET_SRC (elt)) != REG
3808 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3809 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3810 || GET_CODE (SET_DEST (elt)) != MEM
3811 || GET_MODE (SET_DEST (elt)) != SImode
3812 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3813 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3814 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3815 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3816 return 0;
3817 }
3818
3819 return 1;
3820}
e2c671ba 3821
84ed5e79
RE
3822int
3823load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3824 rtx * operands;
84ed5e79 3825 int nops;
62b10bbc
NC
3826 int * regs;
3827 int * base;
3828 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3829{
3830 int unsorted_regs[4];
3831 HOST_WIDE_INT unsorted_offsets[4];
3832 int order[4];
ad076f4e 3833 int base_reg = -1;
84ed5e79
RE
3834 int i;
3835
1d6e90ac
NC
3836 /* Can only handle 2, 3, or 4 insns at present,
3837 though could be easily extended if required. */
84ed5e79
RE
3838 if (nops < 2 || nops > 4)
3839 abort ();
3840
3841 /* Loop over the operands and check that the memory references are
3842 suitable (ie immediate offsets from the same base register). At
3843 the same time, extract the target register, and the memory
3844 offsets. */
3845 for (i = 0; i < nops; i++)
3846 {
3847 rtx reg;
3848 rtx offset;
3849
56636818
JL
3850 /* Convert a subreg of a mem into the mem itself. */
3851 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 3852 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 3853
84ed5e79
RE
3854 if (GET_CODE (operands[nops + i]) != MEM)
3855 abort ();
3856
3857 /* Don't reorder volatile memory references; it doesn't seem worth
3858 looking for the case where the order is ok anyway. */
3859 if (MEM_VOLATILE_P (operands[nops + i]))
3860 return 0;
3861
3862 offset = const0_rtx;
3863
3864 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3865 || (GET_CODE (reg) == SUBREG
3866 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3867 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3868 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3869 == REG)
3870 || (GET_CODE (reg) == SUBREG
3871 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3872 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3873 == CONST_INT)))
3874 {
3875 if (i == 0)
3876 {
d5b7b3ae 3877 base_reg = REGNO (reg);
84ed5e79
RE
3878 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3879 ? REGNO (operands[i])
3880 : REGNO (SUBREG_REG (operands[i])));
3881 order[0] = 0;
3882 }
3883 else
3884 {
6354dc9b 3885 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3886 /* Not addressed from the same base register. */
3887 return 0;
3888
3889 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3890 ? REGNO (operands[i])
3891 : REGNO (SUBREG_REG (operands[i])));
3892 if (unsorted_regs[i] < unsorted_regs[order[0]])
3893 order[0] = i;
3894 }
3895
3896 /* If it isn't an integer register, or if it overwrites the
3897 base register but isn't the last insn in the list, then
3898 we can't do this. */
3899 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3900 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3901 return 0;
3902
3903 unsorted_offsets[i] = INTVAL (offset);
3904 }
3905 else
3906 /* Not a suitable memory address. */
3907 return 0;
3908 }
3909
3910 /* All the useful information has now been extracted from the
3911 operands into unsorted_regs and unsorted_offsets; additionally,
3912 order[0] has been set to the lowest numbered register in the
3913 list. Sort the registers into order, and check that the memory
3914 offsets are ascending and adjacent. */
3915
3916 for (i = 1; i < nops; i++)
3917 {
3918 int j;
3919
3920 order[i] = order[i - 1];
3921 for (j = 0; j < nops; j++)
3922 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3923 && (order[i] == order[i - 1]
3924 || unsorted_regs[j] < unsorted_regs[order[i]]))
3925 order[i] = j;
3926
3927 /* Have we found a suitable register? if not, one must be used more
3928 than once. */
3929 if (order[i] == order[i - 1])
3930 return 0;
3931
3932 /* Is the memory address adjacent and ascending? */
3933 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3934 return 0;
3935 }
3936
3937 if (base)
3938 {
3939 *base = base_reg;
3940
3941 for (i = 0; i < nops; i++)
3942 regs[i] = unsorted_regs[order[i]];
3943
3944 *load_offset = unsorted_offsets[order[0]];
3945 }
3946
3947 if (unsorted_offsets[order[0]] == 0)
3948 return 1; /* ldmia */
3949
3950 if (unsorted_offsets[order[0]] == 4)
3951 return 2; /* ldmib */
3952
3953 if (unsorted_offsets[order[nops - 1]] == 0)
3954 return 3; /* ldmda */
3955
3956 if (unsorted_offsets[order[nops - 1]] == -4)
3957 return 4; /* ldmdb */
3958
949d79eb
RE
3959 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3960 if the offset isn't small enough. The reason 2 ldrs are faster
3961 is because these ARMs are able to do more than one cache access
3962 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3963 whilst the ARM8 has a double bandwidth cache. This means that
3964 these cores can do both an instruction fetch and a data fetch in
3965 a single cycle, so the trick of calculating the address into a
3966 scratch register (one of the result regs) and then doing a load
3967 multiple actually becomes slower (and no smaller in code size).
3968 That is the transformation
6cc8c0b3
NC
3969
3970 ldr rd1, [rbase + offset]
3971 ldr rd2, [rbase + offset + 4]
3972
3973 to
3974
3975 add rd1, rbase, offset
3976 ldmia rd1, {rd1, rd2}
3977
949d79eb
RE
3978 produces worse code -- '3 cycles + any stalls on rd2' instead of
3979 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3980 access per cycle, the first sequence could never complete in less
3981 than 6 cycles, whereas the ldm sequence would only take 5 and
3982 would make better use of sequential accesses if not hitting the
3983 cache.
3984
3985 We cheat here and test 'arm_ld_sched' which we currently know to
3986 only be true for the ARM8, ARM9 and StrongARM. If this ever
3987 changes, then the test below needs to be reworked. */
f5a1b0d2 3988 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3989 return 0;
3990
84ed5e79
RE
3991 /* Can't do it without setting up the offset, only do this if it takes
3992 no more than one insn. */
3993 return (const_ok_for_arm (unsorted_offsets[order[0]])
3994 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3995}
3996
cd2b33d0 3997const char *
84ed5e79 3998emit_ldm_seq (operands, nops)
62b10bbc 3999 rtx * operands;
84ed5e79
RE
4000 int nops;
4001{
4002 int regs[4];
4003 int base_reg;
4004 HOST_WIDE_INT offset;
4005 char buf[100];
4006 int i;
4007
4008 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4009 {
4010 case 1:
4011 strcpy (buf, "ldm%?ia\t");
4012 break;
4013
4014 case 2:
4015 strcpy (buf, "ldm%?ib\t");
4016 break;
4017
4018 case 3:
4019 strcpy (buf, "ldm%?da\t");
4020 break;
4021
4022 case 4:
4023 strcpy (buf, "ldm%?db\t");
4024 break;
4025
4026 case 5:
4027 if (offset >= 0)
4028 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4029 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4030 (long) offset);
4031 else
4032 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4033 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4034 (long) -offset);
4035 output_asm_insn (buf, operands);
4036 base_reg = regs[0];
4037 strcpy (buf, "ldm%?ia\t");
4038 break;
4039
4040 default:
4041 abort ();
4042 }
4043
4044 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4045 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4046
4047 for (i = 1; i < nops; i++)
4048 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4049 reg_names[regs[i]]);
4050
4051 strcat (buf, "}\t%@ phole ldm");
4052
4053 output_asm_insn (buf, operands);
4054 return "";
4055}
4056
4057int
4058store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 4059 rtx * operands;
84ed5e79 4060 int nops;
62b10bbc
NC
4061 int * regs;
4062 int * base;
4063 HOST_WIDE_INT * load_offset;
84ed5e79
RE
4064{
4065 int unsorted_regs[4];
4066 HOST_WIDE_INT unsorted_offsets[4];
4067 int order[4];
ad076f4e 4068 int base_reg = -1;
84ed5e79
RE
4069 int i;
4070
4071 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4072 extended if required. */
4073 if (nops < 2 || nops > 4)
4074 abort ();
4075
4076 /* Loop over the operands and check that the memory references are
4077 suitable (ie immediate offsets from the same base register). At
4078 the same time, extract the target register, and the memory
4079 offsets. */
4080 for (i = 0; i < nops; i++)
4081 {
4082 rtx reg;
4083 rtx offset;
4084
56636818
JL
4085 /* Convert a subreg of a mem into the mem itself. */
4086 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 4087 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 4088
84ed5e79
RE
4089 if (GET_CODE (operands[nops + i]) != MEM)
4090 abort ();
4091
4092 /* Don't reorder volatile memory references; it doesn't seem worth
4093 looking for the case where the order is ok anyway. */
4094 if (MEM_VOLATILE_P (operands[nops + i]))
4095 return 0;
4096
4097 offset = const0_rtx;
4098
4099 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4100 || (GET_CODE (reg) == SUBREG
4101 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4102 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4103 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4104 == REG)
4105 || (GET_CODE (reg) == SUBREG
4106 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4107 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4108 == CONST_INT)))
4109 {
4110 if (i == 0)
4111 {
62b10bbc 4112 base_reg = REGNO (reg);
84ed5e79
RE
4113 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4114 ? REGNO (operands[i])
4115 : REGNO (SUBREG_REG (operands[i])));
4116 order[0] = 0;
4117 }
4118 else
4119 {
6354dc9b 4120 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
4121 /* Not addressed from the same base register. */
4122 return 0;
4123
4124 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4125 ? REGNO (operands[i])
4126 : REGNO (SUBREG_REG (operands[i])));
4127 if (unsorted_regs[i] < unsorted_regs[order[0]])
4128 order[0] = i;
4129 }
4130
4131 /* If it isn't an integer register, then we can't do this. */
4132 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4133 return 0;
4134
4135 unsorted_offsets[i] = INTVAL (offset);
4136 }
4137 else
4138 /* Not a suitable memory address. */
4139 return 0;
4140 }
4141
4142 /* All the useful information has now been extracted from the
4143 operands into unsorted_regs and unsorted_offsets; additionally,
4144 order[0] has been set to the lowest numbered register in the
4145 list. Sort the registers into order, and check that the memory
4146 offsets are ascending and adjacent. */
4147
4148 for (i = 1; i < nops; i++)
4149 {
4150 int j;
4151
4152 order[i] = order[i - 1];
4153 for (j = 0; j < nops; j++)
4154 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4155 && (order[i] == order[i - 1]
4156 || unsorted_regs[j] < unsorted_regs[order[i]]))
4157 order[i] = j;
4158
4159 /* Have we found a suitable register? if not, one must be used more
4160 than once. */
4161 if (order[i] == order[i - 1])
4162 return 0;
4163
4164 /* Is the memory address adjacent and ascending? */
4165 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4166 return 0;
4167 }
4168
4169 if (base)
4170 {
4171 *base = base_reg;
4172
4173 for (i = 0; i < nops; i++)
4174 regs[i] = unsorted_regs[order[i]];
4175
4176 *load_offset = unsorted_offsets[order[0]];
4177 }
4178
4179 if (unsorted_offsets[order[0]] == 0)
4180 return 1; /* stmia */
4181
4182 if (unsorted_offsets[order[0]] == 4)
4183 return 2; /* stmib */
4184
4185 if (unsorted_offsets[order[nops - 1]] == 0)
4186 return 3; /* stmda */
4187
4188 if (unsorted_offsets[order[nops - 1]] == -4)
4189 return 4; /* stmdb */
4190
4191 return 0;
4192}
4193
cd2b33d0 4194const char *
84ed5e79 4195emit_stm_seq (operands, nops)
62b10bbc 4196 rtx * operands;
84ed5e79
RE
4197 int nops;
4198{
4199 int regs[4];
4200 int base_reg;
4201 HOST_WIDE_INT offset;
4202 char buf[100];
4203 int i;
4204
4205 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4206 {
4207 case 1:
4208 strcpy (buf, "stm%?ia\t");
4209 break;
4210
4211 case 2:
4212 strcpy (buf, "stm%?ib\t");
4213 break;
4214
4215 case 3:
4216 strcpy (buf, "stm%?da\t");
4217 break;
4218
4219 case 4:
4220 strcpy (buf, "stm%?db\t");
4221 break;
4222
4223 default:
4224 abort ();
4225 }
4226
4227 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4228 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4229
4230 for (i = 1; i < nops; i++)
4231 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4232 reg_names[regs[i]]);
4233
4234 strcat (buf, "}\t%@ phole stm");
4235
4236 output_asm_insn (buf, operands);
4237 return "";
4238}
4239
e2c671ba
RE
4240int
4241multi_register_push (op, mode)
0a81f500 4242 rtx op;
74bbc178 4243 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
4244{
4245 if (GET_CODE (op) != PARALLEL
4246 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4247 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
b15bca31 4248 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
e2c671ba
RE
4249 return 0;
4250
4251 return 1;
4252}
ff9940b0 4253\f
6354dc9b 4254/* Routines for use in generating RTL. */
1d6e90ac 4255
f3bb6135 4256rtx
56636818 4257arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 4258 in_struct_p, scalar_p)
ff9940b0
RE
4259 int base_regno;
4260 int count;
4261 rtx from;
4262 int up;
4263 int write_back;
56636818
JL
4264 int unchanging_p;
4265 int in_struct_p;
c6df88cb 4266 int scalar_p;
ff9940b0
RE
4267{
4268 int i = 0, j;
4269 rtx result;
4270 int sign = up ? 1 : -1;
56636818 4271 rtx mem;
ff9940b0 4272
d19fb8e3
NC
4273 /* XScale has load-store double instructions, but they have stricter
4274 alignment requirements than load-store multiple, so we can not
4275 use them.
4276
4277 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4278 the pipeline until completion.
4279
4280 NREGS CYCLES
4281 1 3
4282 2 4
4283 3 5
4284 4 6
4285
4286 An ldr instruction takes 1-3 cycles, but does not block the
4287 pipeline.
4288
4289 NREGS CYCLES
4290 1 1-3
4291 2 2-6
4292 3 3-9
4293 4 4-12
4294
4295 Best case ldr will always win. However, the more ldr instructions
4296 we issue, the less likely we are to be able to schedule them well.
4297 Using ldr instructions also increases code size.
4298
4299 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4300 for counts of 3 or 4 regs. */
4301 if (arm_is_xscale && count <= 2 && ! optimize_size)
4302 {
4303 rtx seq;
4304
4305 start_sequence ();
4306
4307 for (i = 0; i < count; i++)
4308 {
4309 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4310 RTX_UNCHANGING_P (mem) = unchanging_p;
4311 MEM_IN_STRUCT_P (mem) = in_struct_p;
4312 MEM_SCALAR_P (mem) = scalar_p;
4313 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4314 }
4315
4316 if (write_back)
4317 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4318
2f937369 4319 seq = get_insns ();
d19fb8e3
NC
4320 end_sequence ();
4321
4322 return seq;
4323 }
4324
43cffd11 4325 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4326 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4327 if (write_back)
f3bb6135 4328 {
ff9940b0 4329 XVECEXP (result, 0, 0)
43cffd11
RE
4330 = gen_rtx_SET (GET_MODE (from), from,
4331 plus_constant (from, count * 4 * sign));
ff9940b0
RE
4332 i = 1;
4333 count++;
f3bb6135
RE
4334 }
4335
ff9940b0 4336 for (j = 0; i < count; i++, j++)
f3bb6135 4337 {
43cffd11 4338 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
4339 RTX_UNCHANGING_P (mem) = unchanging_p;
4340 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4341 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
4342 XVECEXP (result, 0, i)
4343 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
4344 }
4345
ff9940b0
RE
4346 return result;
4347}
4348
f3bb6135 4349rtx
56636818 4350arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 4351 in_struct_p, scalar_p)
ff9940b0
RE
4352 int base_regno;
4353 int count;
4354 rtx to;
4355 int up;
4356 int write_back;
56636818
JL
4357 int unchanging_p;
4358 int in_struct_p;
c6df88cb 4359 int scalar_p;
ff9940b0
RE
4360{
4361 int i = 0, j;
4362 rtx result;
4363 int sign = up ? 1 : -1;
56636818 4364 rtx mem;
ff9940b0 4365
d19fb8e3
NC
4366 /* See arm_gen_load_multiple for discussion of
4367 the pros/cons of ldm/stm usage for XScale. */
4368 if (arm_is_xscale && count <= 2 && ! optimize_size)
4369 {
4370 rtx seq;
4371
4372 start_sequence ();
4373
4374 for (i = 0; i < count; i++)
4375 {
4376 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4377 RTX_UNCHANGING_P (mem) = unchanging_p;
4378 MEM_IN_STRUCT_P (mem) = in_struct_p;
4379 MEM_SCALAR_P (mem) = scalar_p;
4380 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4381 }
4382
4383 if (write_back)
4384 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4385
2f937369 4386 seq = get_insns ();
d19fb8e3
NC
4387 end_sequence ();
4388
4389 return seq;
4390 }
4391
43cffd11 4392 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4393 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4394 if (write_back)
f3bb6135 4395 {
ff9940b0 4396 XVECEXP (result, 0, 0)
43cffd11
RE
4397 = gen_rtx_SET (GET_MODE (to), to,
4398 plus_constant (to, count * 4 * sign));
ff9940b0
RE
4399 i = 1;
4400 count++;
f3bb6135
RE
4401 }
4402
ff9940b0 4403 for (j = 0; i < count; i++, j++)
f3bb6135 4404 {
43cffd11 4405 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
4406 RTX_UNCHANGING_P (mem) = unchanging_p;
4407 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4408 MEM_SCALAR_P (mem) = scalar_p;
56636818 4409
43cffd11
RE
4410 XVECEXP (result, 0, i)
4411 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
4412 }
4413
ff9940b0
RE
4414 return result;
4415}
4416
880e2516
RE
4417int
4418arm_gen_movstrqi (operands)
62b10bbc 4419 rtx * operands;
880e2516
RE
4420{
4421 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 4422 int i;
880e2516 4423 rtx src, dst;
ad076f4e 4424 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 4425 rtx part_bytes_reg = NULL;
56636818
JL
4426 rtx mem;
4427 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 4428 int dst_scalar_p, src_scalar_p;
880e2516
RE
4429
4430 if (GET_CODE (operands[2]) != CONST_INT
4431 || GET_CODE (operands[3]) != CONST_INT
4432 || INTVAL (operands[2]) > 64
4433 || INTVAL (operands[3]) & 3)
4434 return 0;
4435
4436 st_dst = XEXP (operands[0], 0);
4437 st_src = XEXP (operands[1], 0);
56636818
JL
4438
4439 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4440 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 4441 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
4442 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4443 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 4444 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 4445
880e2516
RE
4446 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4447 fin_src = src = copy_to_mode_reg (SImode, st_src);
4448
e9d7b180 4449 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
880e2516
RE
4450 out_words_to_go = INTVAL (operands[2]) / 4;
4451 last_bytes = INTVAL (operands[2]) & 3;
4452
4453 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 4454 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
4455
4456 for (i = 0; in_words_to_go >= 2; i+=4)
4457 {
bd9c7e23 4458 if (in_words_to_go > 4)
56636818 4459 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
4460 src_unchanging_p,
4461 src_in_struct_p,
4462 src_scalar_p));
bd9c7e23
RE
4463 else
4464 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 4465 FALSE, src_unchanging_p,
c6df88cb 4466 src_in_struct_p, src_scalar_p));
bd9c7e23 4467
880e2516
RE
4468 if (out_words_to_go)
4469 {
bd9c7e23 4470 if (out_words_to_go > 4)
56636818
JL
4471 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4472 dst_unchanging_p,
c6df88cb
MM
4473 dst_in_struct_p,
4474 dst_scalar_p));
bd9c7e23
RE
4475 else if (out_words_to_go != 1)
4476 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4477 dst, TRUE,
4478 (last_bytes == 0
56636818
JL
4479 ? FALSE : TRUE),
4480 dst_unchanging_p,
c6df88cb
MM
4481 dst_in_struct_p,
4482 dst_scalar_p));
880e2516
RE
4483 else
4484 {
43cffd11 4485 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
4486 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4487 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4488 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4489 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
4490 if (last_bytes != 0)
4491 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
4492 }
4493 }
4494
4495 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4496 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4497 }
4498
4499 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4500 if (out_words_to_go)
62b10bbc
NC
4501 {
4502 rtx sreg;
4503
4504 mem = gen_rtx_MEM (SImode, src);
4505 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4506 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4507 MEM_SCALAR_P (mem) = src_scalar_p;
4508 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4509 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4510
4511 mem = gen_rtx_MEM (SImode, dst);
4512 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4513 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4514 MEM_SCALAR_P (mem) = dst_scalar_p;
4515 emit_move_insn (mem, sreg);
4516 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4517 in_words_to_go--;
4518
4519 if (in_words_to_go) /* Sanity check */
4520 abort ();
4521 }
880e2516
RE
4522
4523 if (in_words_to_go)
4524 {
4525 if (in_words_to_go < 0)
4526 abort ();
4527
43cffd11 4528 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4529 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4530 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4531 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4532 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4533 }
4534
d5b7b3ae
RE
4535 if (last_bytes && part_bytes_reg == NULL)
4536 abort ();
4537
880e2516
RE
4538 if (BYTES_BIG_ENDIAN && last_bytes)
4539 {
4540 rtx tmp = gen_reg_rtx (SImode);
4541
6354dc9b 4542 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4543 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4544 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4545 part_bytes_reg = tmp;
4546
4547 while (last_bytes)
4548 {
43cffd11 4549 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4550 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4551 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4552 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2
BS
4553 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
4554
880e2516
RE
4555 if (--last_bytes)
4556 {
4557 tmp = gen_reg_rtx (SImode);
4558 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4559 part_bytes_reg = tmp;
4560 }
4561 }
4562
4563 }
4564 else
4565 {
d5b7b3ae 4566 if (last_bytes > 1)
880e2516 4567 {
d5b7b3ae 4568 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4569 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4570 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4571 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 4572 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
d5b7b3ae
RE
4573 last_bytes -= 2;
4574 if (last_bytes)
880e2516
RE
4575 {
4576 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4577
d5b7b3ae
RE
4578 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4579 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4580 part_bytes_reg = tmp;
4581 }
4582 }
d5b7b3ae
RE
4583
4584 if (last_bytes)
4585 {
4586 mem = gen_rtx_MEM (QImode, dst);
4587 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4588 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4589 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 4590 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
d5b7b3ae 4591 }
880e2516
RE
4592 }
4593
4594 return 1;
4595}
4596
5165176d
RE
4597/* Generate a memory reference for a half word, such that it will be loaded
4598 into the top 16 bits of the word. We can assume that the address is
4599 known to be alignable and of the form reg, or plus (reg, const). */
1d6e90ac 4600
5165176d 4601rtx
d5b7b3ae 4602arm_gen_rotated_half_load (memref)
5165176d
RE
4603 rtx memref;
4604{
4605 HOST_WIDE_INT offset = 0;
4606 rtx base = XEXP (memref, 0);
4607
4608 if (GET_CODE (base) == PLUS)
4609 {
4610 offset = INTVAL (XEXP (base, 1));
4611 base = XEXP (base, 0);
4612 }
4613
956d6950 4614 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4615 if (TARGET_MMU_TRAPS
5165176d
RE
4616 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4617 return NULL;
4618
43cffd11 4619 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4620
4621 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4622 return base;
4623
43cffd11 4624 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4625}
4626
1646cf41
RE
4627/* Select a dominance comparison mode if possible. We support three forms.
4628 COND_OR == 0 => (X && Y)
4629 COND_OR == 1 => ((! X( || Y)
4630 COND_OR == 2 => (X || Y)
4631 If we are unable to support a dominance comparsison we return CC mode.
4632 This will then fail to match for the RTL expressions that generate this
4633 call. */
d19fb8e3 4634
84ed5e79 4635static enum machine_mode
74bbc178 4636select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4637 rtx x;
4638 rtx y;
4639 HOST_WIDE_INT cond_or;
4640{
4641 enum rtx_code cond1, cond2;
4642 int swapped = 0;
4643
4644 /* Currently we will probably get the wrong result if the individual
4645 comparisons are not simple. This also ensures that it is safe to
956d6950 4646 reverse a comparison if necessary. */
84ed5e79
RE
4647 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4648 != CCmode)
4649 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4650 != CCmode))
4651 return CCmode;
4652
1646cf41
RE
4653 /* The if_then_else variant of this tests the second condition if the
4654 first passes, but is true if the first fails. Reverse the first
4655 condition to get a true "inclusive-or" expression. */
4656 if (cond_or == 1)
84ed5e79
RE
4657 cond1 = reverse_condition (cond1);
4658
4659 /* If the comparisons are not equal, and one doesn't dominate the other,
4660 then we can't do this. */
4661 if (cond1 != cond2
5895f793
RE
4662 && !comparison_dominates_p (cond1, cond2)
4663 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4664 return CCmode;
4665
4666 if (swapped)
4667 {
4668 enum rtx_code temp = cond1;
4669 cond1 = cond2;
4670 cond2 = temp;
4671 }
4672
4673 switch (cond1)
4674 {
4675 case EQ:
5895f793 4676 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4677 return CC_DEQmode;
4678
4679 switch (cond2)
4680 {
4681 case LE: return CC_DLEmode;
4682 case LEU: return CC_DLEUmode;
4683 case GE: return CC_DGEmode;
4684 case GEU: return CC_DGEUmode;
ad076f4e 4685 default: break;
84ed5e79
RE
4686 }
4687
4688 break;
4689
4690 case LT:
5895f793 4691 if (cond2 == LT || !cond_or)
84ed5e79
RE
4692 return CC_DLTmode;
4693 if (cond2 == LE)
4694 return CC_DLEmode;
4695 if (cond2 == NE)
4696 return CC_DNEmode;
4697 break;
4698
4699 case GT:
5895f793 4700 if (cond2 == GT || !cond_or)
84ed5e79
RE
4701 return CC_DGTmode;
4702 if (cond2 == GE)
4703 return CC_DGEmode;
4704 if (cond2 == NE)
4705 return CC_DNEmode;
4706 break;
4707
4708 case LTU:
5895f793 4709 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4710 return CC_DLTUmode;
4711 if (cond2 == LEU)
4712 return CC_DLEUmode;
4713 if (cond2 == NE)
4714 return CC_DNEmode;
4715 break;
4716
4717 case GTU:
5895f793 4718 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4719 return CC_DGTUmode;
4720 if (cond2 == GEU)
4721 return CC_DGEUmode;
4722 if (cond2 == NE)
4723 return CC_DNEmode;
4724 break;
4725
4726 /* The remaining cases only occur when both comparisons are the
4727 same. */
4728 case NE:
4729 return CC_DNEmode;
4730
4731 case LE:
4732 return CC_DLEmode;
4733
4734 case GE:
4735 return CC_DGEmode;
4736
4737 case LEU:
4738 return CC_DLEUmode;
4739
4740 case GEU:
4741 return CC_DGEUmode;
ad076f4e
RE
4742
4743 default:
4744 break;
84ed5e79
RE
4745 }
4746
4747 abort ();
4748}
4749
4750enum machine_mode
4751arm_select_cc_mode (op, x, y)
4752 enum rtx_code op;
4753 rtx x;
4754 rtx y;
4755{
4756 /* All floating point compares return CCFP if it is an equality
4757 comparison, and CCFPE otherwise. */
4758 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4759 {
4760 switch (op)
4761 {
4762 case EQ:
4763 case NE:
4764 case UNORDERED:
4765 case ORDERED:
4766 case UNLT:
4767 case UNLE:
4768 case UNGT:
4769 case UNGE:
4770 case UNEQ:
4771 case LTGT:
4772 return CCFPmode;
4773
4774 case LT:
4775 case LE:
4776 case GT:
4777 case GE:
4778 return CCFPEmode;
4779
4780 default:
4781 abort ();
4782 }
4783 }
84ed5e79
RE
4784
4785 /* A compare with a shifted operand. Because of canonicalization, the
4786 comparison will have to be swapped when we emit the assembler. */
4787 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4788 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4789 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4790 || GET_CODE (x) == ROTATERT))
4791 return CC_SWPmode;
4792
956d6950
JL
4793 /* This is a special case that is used by combine to allow a
4794 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4795 followed by a comparison of the shifted integer (only valid for
956d6950 4796 equalities and unsigned inequalities). */
84ed5e79
RE
4797 if (GET_MODE (x) == SImode
4798 && GET_CODE (x) == ASHIFT
4799 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4800 && GET_CODE (XEXP (x, 0)) == SUBREG
4801 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4802 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4803 && (op == EQ || op == NE
4804 || op == GEU || op == GTU || op == LTU || op == LEU)
4805 && GET_CODE (y) == CONST_INT)
4806 return CC_Zmode;
4807
1646cf41
RE
4808 /* A construct for a conditional compare, if the false arm contains
4809 0, then both conditions must be true, otherwise either condition
4810 must be true. Not all conditions are possible, so CCmode is
4811 returned if it can't be done. */
4812 if (GET_CODE (x) == IF_THEN_ELSE
4813 && (XEXP (x, 2) == const0_rtx
4814 || XEXP (x, 2) == const1_rtx)
4815 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4816 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4817 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4818 INTVAL (XEXP (x, 2)));
4819
4820 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4821 if (GET_CODE (x) == AND
4822 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4823 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4824 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4825
4826 if (GET_CODE (x) == IOR
4827 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4828 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4829 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4830
84ed5e79
RE
4831 /* An operation that sets the condition codes as a side-effect, the
4832 V flag is not set correctly, so we can only use comparisons where
4833 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4834 instead. */
4835 if (GET_MODE (x) == SImode
4836 && y == const0_rtx
4837 && (op == EQ || op == NE || op == LT || op == GE)
4838 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4839 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4840 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4841 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4842 || GET_CODE (x) == LSHIFTRT
4843 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4844 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4845 return CC_NOOVmode;
4846
84ed5e79
RE
4847 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4848 return CC_Zmode;
4849
bd9c7e23
RE
4850 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4851 && GET_CODE (x) == PLUS
4852 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4853 return CC_Cmode;
4854
84ed5e79
RE
4855 return CCmode;
4856}
4857
ff9940b0
RE
4858/* X and Y are two things to compare using CODE. Emit the compare insn and
4859 return the rtx for register 0 in the proper mode. FP means this is a
4860 floating point compare: I don't think that it is needed on the arm. */
4861
4862rtx
d5b7b3ae 4863arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4864 enum rtx_code code;
4865 rtx x, y;
4866{
4867 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4868 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4869
43cffd11
RE
4870 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4871 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4872
4873 return cc_reg;
4874}
4875
0a81f500
RE
4876void
4877arm_reload_in_hi (operands)
62b10bbc 4878 rtx * operands;
0a81f500 4879{
f9cc092a
RE
4880 rtx ref = operands[1];
4881 rtx base, scratch;
4882 HOST_WIDE_INT offset = 0;
4883
4884 if (GET_CODE (ref) == SUBREG)
4885 {
ddef6bc7 4886 offset = SUBREG_BYTE (ref);
f9cc092a
RE
4887 ref = SUBREG_REG (ref);
4888 }
4889
4890 if (GET_CODE (ref) == REG)
4891 {
4892 /* We have a pseudo which has been spilt onto the stack; there
4893 are two cases here: the first where there is a simple
4894 stack-slot replacement and a second where the stack-slot is
4895 out of range, or is used as a subreg. */
4896 if (reg_equiv_mem[REGNO (ref)])
4897 {
4898 ref = reg_equiv_mem[REGNO (ref)];
4899 base = find_replacement (&XEXP (ref, 0));
4900 }
4901 else
6354dc9b 4902 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4903 base = reg_equiv_address[REGNO (ref)];
4904 }
4905 else
4906 base = find_replacement (&XEXP (ref, 0));
0a81f500 4907
e5e809f4
JL
4908 /* Handle the case where the address is too complex to be offset by 1. */
4909 if (GET_CODE (base) == MINUS
4910 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4911 {
f9cc092a 4912 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4913
43cffd11 4914 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4915 base = base_plus;
4916 }
f9cc092a
RE
4917 else if (GET_CODE (base) == PLUS)
4918 {
6354dc9b 4919 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4920 HOST_WIDE_INT hi, lo;
4921
4922 offset += INTVAL (XEXP (base, 1));
4923 base = XEXP (base, 0);
4924
6354dc9b 4925 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4926 /* Valid range for lo is -4095 -> 4095 */
4927 lo = (offset >= 0
4928 ? (offset & 0xfff)
4929 : -((-offset) & 0xfff));
4930
4931 /* Corner case, if lo is the max offset then we would be out of range
4932 once we have added the additional 1 below, so bump the msb into the
4933 pre-loading insn(s). */
4934 if (lo == 4095)
4935 lo &= 0x7ff;
4936
30cf4896
KG
4937 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
4938 ^ (HOST_WIDE_INT) 0x80000000)
4939 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
4940
4941 if (hi + lo != offset)
4942 abort ();
4943
4944 if (hi != 0)
4945 {
4946 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4947
4948 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4949 that require more than one insn. */
f9cc092a
RE
4950 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4951 base = base_plus;
4952 offset = lo;
4953 }
4954 }
e5e809f4 4955
f9cc092a
RE
4956 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4957 emit_insn (gen_zero_extendqisi2 (scratch,
4958 gen_rtx_MEM (QImode,
4959 plus_constant (base,
4960 offset))));
43cffd11
RE
4961 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4962 gen_rtx_MEM (QImode,
f9cc092a
RE
4963 plus_constant (base,
4964 offset + 1))));
5895f793 4965 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
4966 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4967 gen_rtx_IOR (SImode,
4968 gen_rtx_ASHIFT
4969 (SImode,
4970 gen_rtx_SUBREG (SImode, operands[0], 0),
4971 GEN_INT (8)),
f9cc092a 4972 scratch)));
0a81f500 4973 else
43cffd11
RE
4974 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4975 gen_rtx_IOR (SImode,
f9cc092a 4976 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4977 GEN_INT (8)),
4978 gen_rtx_SUBREG (SImode, operands[0],
4979 0))));
0a81f500
RE
4980}
4981
f9cc092a
RE
4982/* Handle storing a half-word to memory during reload by synthesising as two
4983 byte stores. Take care not to clobber the input values until after we
4984 have moved them somewhere safe. This code assumes that if the DImode
4985 scratch in operands[2] overlaps either the input value or output address
4986 in some way, then that value must die in this insn (we absolutely need
4987 two scratch registers for some corner cases). */
1d6e90ac 4988
f3bb6135 4989void
af48348a 4990arm_reload_out_hi (operands)
62b10bbc 4991 rtx * operands;
af48348a 4992{
f9cc092a
RE
4993 rtx ref = operands[0];
4994 rtx outval = operands[1];
4995 rtx base, scratch;
4996 HOST_WIDE_INT offset = 0;
4997
4998 if (GET_CODE (ref) == SUBREG)
4999 {
ddef6bc7 5000 offset = SUBREG_BYTE (ref);
f9cc092a
RE
5001 ref = SUBREG_REG (ref);
5002 }
5003
f9cc092a
RE
5004 if (GET_CODE (ref) == REG)
5005 {
5006 /* We have a pseudo which has been spilt onto the stack; there
5007 are two cases here: the first where there is a simple
5008 stack-slot replacement and a second where the stack-slot is
5009 out of range, or is used as a subreg. */
5010 if (reg_equiv_mem[REGNO (ref)])
5011 {
5012 ref = reg_equiv_mem[REGNO (ref)];
5013 base = find_replacement (&XEXP (ref, 0));
5014 }
5015 else
6354dc9b 5016 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
5017 base = reg_equiv_address[REGNO (ref)];
5018 }
5019 else
5020 base = find_replacement (&XEXP (ref, 0));
5021
5022 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5023
5024 /* Handle the case where the address is too complex to be offset by 1. */
5025 if (GET_CODE (base) == MINUS
5026 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5027 {
5028 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5029
5030 /* Be careful not to destroy OUTVAL. */
5031 if (reg_overlap_mentioned_p (base_plus, outval))
5032 {
5033 /* Updating base_plus might destroy outval, see if we can
5034 swap the scratch and base_plus. */
5895f793 5035 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
5036 {
5037 rtx tmp = scratch;
5038 scratch = base_plus;
5039 base_plus = tmp;
5040 }
5041 else
5042 {
5043 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5044
5045 /* Be conservative and copy OUTVAL into the scratch now,
5046 this should only be necessary if outval is a subreg
5047 of something larger than a word. */
5048 /* XXX Might this clobber base? I can't see how it can,
5049 since scratch is known to overlap with OUTVAL, and
5050 must be wider than a word. */
5051 emit_insn (gen_movhi (scratch_hi, outval));
5052 outval = scratch_hi;
5053 }
5054 }
5055
5056 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5057 base = base_plus;
5058 }
5059 else if (GET_CODE (base) == PLUS)
5060 {
6354dc9b 5061 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
5062 HOST_WIDE_INT hi, lo;
5063
5064 offset += INTVAL (XEXP (base, 1));
5065 base = XEXP (base, 0);
5066
6354dc9b 5067 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
5068 /* Valid range for lo is -4095 -> 4095 */
5069 lo = (offset >= 0
5070 ? (offset & 0xfff)
5071 : -((-offset) & 0xfff));
5072
5073 /* Corner case, if lo is the max offset then we would be out of range
5074 once we have added the additional 1 below, so bump the msb into the
5075 pre-loading insn(s). */
5076 if (lo == 4095)
5077 lo &= 0x7ff;
5078
30cf4896
KG
5079 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5080 ^ (HOST_WIDE_INT) 0x80000000)
5081 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
5082
5083 if (hi + lo != offset)
5084 abort ();
5085
5086 if (hi != 0)
5087 {
5088 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5089
5090 /* Be careful not to destroy OUTVAL. */
5091 if (reg_overlap_mentioned_p (base_plus, outval))
5092 {
5093 /* Updating base_plus might destroy outval, see if we
5094 can swap the scratch and base_plus. */
5895f793 5095 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
5096 {
5097 rtx tmp = scratch;
5098 scratch = base_plus;
5099 base_plus = tmp;
5100 }
5101 else
5102 {
5103 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5104
5105 /* Be conservative and copy outval into scratch now,
5106 this should only be necessary if outval is a
5107 subreg of something larger than a word. */
5108 /* XXX Might this clobber base? I can't see how it
5109 can, since scratch is known to overlap with
5110 outval. */
5111 emit_insn (gen_movhi (scratch_hi, outval));
5112 outval = scratch_hi;
5113 }
5114 }
5115
5116 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 5117 that require more than one insn. */
f9cc092a
RE
5118 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5119 base = base_plus;
5120 offset = lo;
5121 }
5122 }
af48348a 5123
b5cc037f
RE
5124 if (BYTES_BIG_ENDIAN)
5125 {
f9cc092a
RE
5126 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5127 plus_constant (base, offset + 1)),
5d5603e2 5128 gen_lowpart (QImode, outval)));
f9cc092a
RE
5129 emit_insn (gen_lshrsi3 (scratch,
5130 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 5131 GEN_INT (8)));
f9cc092a 5132 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 5133 gen_lowpart (QImode, scratch)));
b5cc037f
RE
5134 }
5135 else
5136 {
f9cc092a 5137 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 5138 gen_lowpart (QImode, outval)));
f9cc092a
RE
5139 emit_insn (gen_lshrsi3 (scratch,
5140 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 5141 GEN_INT (8)));
f9cc092a
RE
5142 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5143 plus_constant (base, offset + 1)),
5d5603e2 5144 gen_lowpart (QImode, scratch)));
b5cc037f 5145 }
af48348a 5146}
2b835d68 5147\f
d5b7b3ae 5148/* Print a symbolic form of X to the debug file, F. */
1d6e90ac 5149
d5b7b3ae
RE
5150static void
5151arm_print_value (f, x)
5152 FILE * f;
5153 rtx x;
5154{
5155 switch (GET_CODE (x))
5156 {
5157 case CONST_INT:
5158 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5159 return;
5160
5161 case CONST_DOUBLE:
5162 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5163 return;
5164
5165 case CONST_STRING:
5166 fprintf (f, "\"%s\"", XSTR (x, 0));
5167 return;
5168
5169 case SYMBOL_REF:
5170 fprintf (f, "`%s'", XSTR (x, 0));
5171 return;
5172
5173 case LABEL_REF:
5174 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5175 return;
5176
5177 case CONST:
5178 arm_print_value (f, XEXP (x, 0));
5179 return;
5180
5181 case PLUS:
5182 arm_print_value (f, XEXP (x, 0));
5183 fprintf (f, "+");
5184 arm_print_value (f, XEXP (x, 1));
5185 return;
5186
5187 case PC:
5188 fprintf (f, "pc");
5189 return;
5190
5191 default:
5192 fprintf (f, "????");
5193 return;
5194 }
5195}
5196\f
2b835d68 5197/* Routines for manipulation of the constant pool. */
2b835d68 5198
949d79eb
RE
5199/* Arm instructions cannot load a large constant directly into a
5200 register; they have to come from a pc relative load. The constant
5201 must therefore be placed in the addressable range of the pc
5202 relative load. Depending on the precise pc relative load
5203 instruction the range is somewhere between 256 bytes and 4k. This
5204 means that we often have to dump a constant inside a function, and
2b835d68
RE
5205 generate code to branch around it.
5206
949d79eb
RE
5207 It is important to minimize this, since the branches will slow
5208 things down and make the code larger.
2b835d68 5209
949d79eb
RE
5210 Normally we can hide the table after an existing unconditional
5211 branch so that there is no interruption of the flow, but in the
5212 worst case the code looks like this:
2b835d68
RE
5213
5214 ldr rn, L1
949d79eb 5215 ...
2b835d68
RE
5216 b L2
5217 align
5218 L1: .long value
5219 L2:
949d79eb 5220 ...
2b835d68 5221
2b835d68 5222 ldr rn, L3
949d79eb 5223 ...
2b835d68
RE
5224 b L4
5225 align
2b835d68
RE
5226 L3: .long value
5227 L4:
949d79eb
RE
5228 ...
5229
5230 We fix this by performing a scan after scheduling, which notices
5231 which instructions need to have their operands fetched from the
5232 constant table and builds the table.
5233
5234 The algorithm starts by building a table of all the constants that
5235 need fixing up and all the natural barriers in the function (places
5236 where a constant table can be dropped without breaking the flow).
5237 For each fixup we note how far the pc-relative replacement will be
5238 able to reach and the offset of the instruction into the function.
5239
5240 Having built the table we then group the fixes together to form
5241 tables that are as large as possible (subject to addressing
5242 constraints) and emit each table of constants after the last
5243 barrier that is within range of all the instructions in the group.
5244 If a group does not contain a barrier, then we forcibly create one
5245 by inserting a jump instruction into the flow. Once the table has
5246 been inserted, the insns are then modified to reference the
5247 relevant entry in the pool.
5248
6354dc9b 5249 Possible enhancements to the algorithm (not implemented) are:
949d79eb 5250
d5b7b3ae 5251 1) For some processors and object formats, there may be benefit in
949d79eb
RE
5252 aligning the pools to the start of cache lines; this alignment
5253 would need to be taken into account when calculating addressability
6354dc9b 5254 of a pool. */
2b835d68 5255
d5b7b3ae
RE
5256/* These typedefs are located at the start of this file, so that
5257 they can be used in the prototypes there. This comment is to
5258 remind readers of that fact so that the following structures
5259 can be understood more easily.
5260
5261 typedef struct minipool_node Mnode;
5262 typedef struct minipool_fixup Mfix; */
5263
5264struct minipool_node
5265{
5266 /* Doubly linked chain of entries. */
5267 Mnode * next;
5268 Mnode * prev;
5269 /* The maximum offset into the code that this entry can be placed. While
5270 pushing fixes for forward references, all entries are sorted in order
5271 of increasing max_address. */
5272 HOST_WIDE_INT max_address;
5519a4f9 5273 /* Similarly for an entry inserted for a backwards ref. */
d5b7b3ae
RE
5274 HOST_WIDE_INT min_address;
5275 /* The number of fixes referencing this entry. This can become zero
5276 if we "unpush" an entry. In this case we ignore the entry when we
5277 come to emit the code. */
5278 int refcount;
5279 /* The offset from the start of the minipool. */
5280 HOST_WIDE_INT offset;
5281 /* The value in table. */
5282 rtx value;
5283 /* The mode of value. */
5284 enum machine_mode mode;
5285 int fix_size;
5286};
5287
5288struct minipool_fixup
2b835d68 5289{
d5b7b3ae
RE
5290 Mfix * next;
5291 rtx insn;
5292 HOST_WIDE_INT address;
5293 rtx * loc;
5294 enum machine_mode mode;
5295 int fix_size;
5296 rtx value;
5297 Mnode * minipool;
5298 HOST_WIDE_INT forwards;
5299 HOST_WIDE_INT backwards;
5300};
2b835d68 5301
d5b7b3ae
RE
5302/* Fixes less than a word need padding out to a word boundary. */
5303#define MINIPOOL_FIX_SIZE(mode) \
5304 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 5305
d5b7b3ae
RE
5306static Mnode * minipool_vector_head;
5307static Mnode * minipool_vector_tail;
5308static rtx minipool_vector_label;
332072db 5309
d5b7b3ae
RE
5310/* The linked list of all minipool fixes required for this function. */
5311Mfix * minipool_fix_head;
5312Mfix * minipool_fix_tail;
5313/* The fix entry for the current minipool, once it has been placed. */
5314Mfix * minipool_barrier;
5315
5316/* Determines if INSN is the start of a jump table. Returns the end
5317 of the TABLE or NULL_RTX. */
1d6e90ac 5318
d5b7b3ae
RE
5319static rtx
5320is_jump_table (insn)
5321 rtx insn;
2b835d68 5322{
d5b7b3ae 5323 rtx table;
da6558fd 5324
d5b7b3ae
RE
5325 if (GET_CODE (insn) == JUMP_INSN
5326 && JUMP_LABEL (insn) != NULL
5327 && ((table = next_real_insn (JUMP_LABEL (insn)))
5328 == next_real_insn (insn))
5329 && table != NULL
5330 && GET_CODE (table) == JUMP_INSN
5331 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5332 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5333 return table;
5334
5335 return NULL_RTX;
2b835d68
RE
5336}
5337
657d9449
RE
5338#ifndef JUMP_TABLES_IN_TEXT_SECTION
5339#define JUMP_TABLES_IN_TEXT_SECTION 0
5340#endif
5341
d5b7b3ae
RE
5342static HOST_WIDE_INT
5343get_jump_table_size (insn)
5344 rtx insn;
2b835d68 5345{
657d9449
RE
5346 /* ADDR_VECs only take room if read-only data does into the text
5347 section. */
5348 if (JUMP_TABLES_IN_TEXT_SECTION
d48bc59a 5349#if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
657d9449
RE
5350 || 1
5351#endif
5352 )
5353 {
5354 rtx body = PATTERN (insn);
5355 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 5356
657d9449
RE
5357 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5358 }
5359
5360 return 0;
d5b7b3ae 5361}
2b835d68 5362
d5b7b3ae
RE
5363/* Move a minipool fix MP from its current location to before MAX_MP.
5364 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5365 contrains may need updating. */
1d6e90ac 5366
d5b7b3ae
RE
5367static Mnode *
5368move_minipool_fix_forward_ref (mp, max_mp, max_address)
5369 Mnode * mp;
5370 Mnode * max_mp;
5371 HOST_WIDE_INT max_address;
5372{
5373 /* This should never be true and the code below assumes these are
5374 different. */
5375 if (mp == max_mp)
5376 abort ();
5377
5378 if (max_mp == NULL)
5379 {
5380 if (max_address < mp->max_address)
5381 mp->max_address = max_address;
5382 }
5383 else
2b835d68 5384 {
d5b7b3ae
RE
5385 if (max_address > max_mp->max_address - mp->fix_size)
5386 mp->max_address = max_mp->max_address - mp->fix_size;
5387 else
5388 mp->max_address = max_address;
2b835d68 5389
d5b7b3ae
RE
5390 /* Unlink MP from its current position. Since max_mp is non-null,
5391 mp->prev must be non-null. */
5392 mp->prev->next = mp->next;
5393 if (mp->next != NULL)
5394 mp->next->prev = mp->prev;
5395 else
5396 minipool_vector_tail = mp->prev;
2b835d68 5397
d5b7b3ae
RE
5398 /* Re-insert it before MAX_MP. */
5399 mp->next = max_mp;
5400 mp->prev = max_mp->prev;
5401 max_mp->prev = mp;
5402
5403 if (mp->prev != NULL)
5404 mp->prev->next = mp;
5405 else
5406 minipool_vector_head = mp;
5407 }
2b835d68 5408
d5b7b3ae
RE
5409 /* Save the new entry. */
5410 max_mp = mp;
5411
d6a7951f 5412 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
5413 required. */
5414 while (mp->prev != NULL
5415 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5416 {
5417 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5418 mp = mp->prev;
2b835d68
RE
5419 }
5420
d5b7b3ae 5421 return max_mp;
2b835d68
RE
5422}
5423
d5b7b3ae
RE
5424/* Add a constant to the minipool for a forward reference. Returns the
5425 node added or NULL if the constant will not fit in this pool. */
1d6e90ac 5426
d5b7b3ae
RE
5427static Mnode *
5428add_minipool_forward_ref (fix)
5429 Mfix * fix;
5430{
5431 /* If set, max_mp is the first pool_entry that has a lower
5432 constraint than the one we are trying to add. */
5433 Mnode * max_mp = NULL;
5434 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5435 Mnode * mp;
5436
5437 /* If this fix's address is greater than the address of the first
5438 entry, then we can't put the fix in this pool. We subtract the
5439 size of the current fix to ensure that if the table is fully
5440 packed we still have enough room to insert this value by suffling
5441 the other fixes forwards. */
5442 if (minipool_vector_head &&
5443 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5444 return NULL;
2b835d68 5445
d5b7b3ae
RE
5446 /* Scan the pool to see if a constant with the same value has
5447 already been added. While we are doing this, also note the
5448 location where we must insert the constant if it doesn't already
5449 exist. */
5450 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5451 {
5452 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5453 && fix->mode == mp->mode
5454 && (GET_CODE (fix->value) != CODE_LABEL
5455 || (CODE_LABEL_NUMBER (fix->value)
5456 == CODE_LABEL_NUMBER (mp->value)))
5457 && rtx_equal_p (fix->value, mp->value))
5458 {
5459 /* More than one fix references this entry. */
5460 mp->refcount++;
5461 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5462 }
5463
5464 /* Note the insertion point if necessary. */
5465 if (max_mp == NULL
5466 && mp->max_address > max_address)
5467 max_mp = mp;
5468 }
5469
5470 /* The value is not currently in the minipool, so we need to create
5471 a new entry for it. If MAX_MP is NULL, the entry will be put on
5472 the end of the list since the placement is less constrained than
5473 any existing entry. Otherwise, we insert the new fix before
5474 MAX_MP and, if neceesary, adjust the constraints on the other
5475 entries. */
5476 mp = xmalloc (sizeof (* mp));
5477 mp->fix_size = fix->fix_size;
5478 mp->mode = fix->mode;
5479 mp->value = fix->value;
5480 mp->refcount = 1;
5481 /* Not yet required for a backwards ref. */
5482 mp->min_address = -65536;
5483
5484 if (max_mp == NULL)
5485 {
5486 mp->max_address = max_address;
5487 mp->next = NULL;
5488 mp->prev = minipool_vector_tail;
5489
5490 if (mp->prev == NULL)
5491 {
5492 minipool_vector_head = mp;
5493 minipool_vector_label = gen_label_rtx ();
7551cbc7 5494 }
2b835d68 5495 else
d5b7b3ae 5496 mp->prev->next = mp;
2b835d68 5497
d5b7b3ae
RE
5498 minipool_vector_tail = mp;
5499 }
5500 else
5501 {
5502 if (max_address > max_mp->max_address - mp->fix_size)
5503 mp->max_address = max_mp->max_address - mp->fix_size;
5504 else
5505 mp->max_address = max_address;
5506
5507 mp->next = max_mp;
5508 mp->prev = max_mp->prev;
5509 max_mp->prev = mp;
5510 if (mp->prev != NULL)
5511 mp->prev->next = mp;
5512 else
5513 minipool_vector_head = mp;
5514 }
5515
5516 /* Save the new entry. */
5517 max_mp = mp;
5518
d6a7951f 5519 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
5520 required. */
5521 while (mp->prev != NULL
5522 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5523 {
5524 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5525 mp = mp->prev;
2b835d68
RE
5526 }
5527
d5b7b3ae
RE
5528 return max_mp;
5529}
5530
5531static Mnode *
5532move_minipool_fix_backward_ref (mp, min_mp, min_address)
5533 Mnode * mp;
5534 Mnode * min_mp;
5535 HOST_WIDE_INT min_address;
5536{
5537 HOST_WIDE_INT offset;
5538
5539 /* This should never be true, and the code below assumes these are
5540 different. */
5541 if (mp == min_mp)
5542 abort ();
5543
5544 if (min_mp == NULL)
2b835d68 5545 {
d5b7b3ae
RE
5546 if (min_address > mp->min_address)
5547 mp->min_address = min_address;
5548 }
5549 else
5550 {
5551 /* We will adjust this below if it is too loose. */
5552 mp->min_address = min_address;
5553
5554 /* Unlink MP from its current position. Since min_mp is non-null,
5555 mp->next must be non-null. */
5556 mp->next->prev = mp->prev;
5557 if (mp->prev != NULL)
5558 mp->prev->next = mp->next;
5559 else
5560 minipool_vector_head = mp->next;
5561
5562 /* Reinsert it after MIN_MP. */
5563 mp->prev = min_mp;
5564 mp->next = min_mp->next;
5565 min_mp->next = mp;
5566 if (mp->next != NULL)
5567 mp->next->prev = mp;
2b835d68 5568 else
d5b7b3ae
RE
5569 minipool_vector_tail = mp;
5570 }
5571
5572 min_mp = mp;
5573
5574 offset = 0;
5575 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5576 {
5577 mp->offset = offset;
5578 if (mp->refcount > 0)
5579 offset += mp->fix_size;
5580
5581 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5582 mp->next->min_address = mp->min_address + mp->fix_size;
5583 }
5584
5585 return min_mp;
5586}
5587
5588/* Add a constant to the minipool for a backward reference. Returns the
5589 node added or NULL if the constant will not fit in this pool.
5590
5591 Note that the code for insertion for a backwards reference can be
5592 somewhat confusing because the calculated offsets for each fix do
5593 not take into account the size of the pool (which is still under
5594 construction. */
1d6e90ac 5595
d5b7b3ae
RE
5596static Mnode *
5597add_minipool_backward_ref (fix)
5598 Mfix * fix;
5599{
5600 /* If set, min_mp is the last pool_entry that has a lower constraint
5601 than the one we are trying to add. */
5602 Mnode * min_mp = NULL;
5603 /* This can be negative, since it is only a constraint. */
5604 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5605 Mnode * mp;
5606
5607 /* If we can't reach the current pool from this insn, or if we can't
5608 insert this entry at the end of the pool without pushing other
5609 fixes out of range, then we don't try. This ensures that we
5610 can't fail later on. */
5611 if (min_address >= minipool_barrier->address
5612 || (minipool_vector_tail->min_address + fix->fix_size
5613 >= minipool_barrier->address))
5614 return NULL;
5615
5616 /* Scan the pool to see if a constant with the same value has
5617 already been added. While we are doing this, also note the
5618 location where we must insert the constant if it doesn't already
5619 exist. */
5620 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5621 {
5622 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5623 && fix->mode == mp->mode
5624 && (GET_CODE (fix->value) != CODE_LABEL
5625 || (CODE_LABEL_NUMBER (fix->value)
5626 == CODE_LABEL_NUMBER (mp->value)))
5627 && rtx_equal_p (fix->value, mp->value)
5628 /* Check that there is enough slack to move this entry to the
5629 end of the table (this is conservative). */
5630 && (mp->max_address
5631 > (minipool_barrier->address
5632 + minipool_vector_tail->offset
5633 + minipool_vector_tail->fix_size)))
5634 {
5635 mp->refcount++;
5636 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5637 }
5638
5639 if (min_mp != NULL)
5640 mp->min_address += fix->fix_size;
5641 else
5642 {
5643 /* Note the insertion point if necessary. */
5644 if (mp->min_address < min_address)
5645 min_mp = mp;
5646 else if (mp->max_address
5647 < minipool_barrier->address + mp->offset + fix->fix_size)
5648 {
5649 /* Inserting before this entry would push the fix beyond
5650 its maximum address (which can happen if we have
5651 re-located a forwards fix); force the new fix to come
5652 after it. */
5653 min_mp = mp;
5654 min_address = mp->min_address + fix->fix_size;
5655 }
5656 }
5657 }
5658
5659 /* We need to create a new entry. */
5660 mp = xmalloc (sizeof (* mp));
5661 mp->fix_size = fix->fix_size;
5662 mp->mode = fix->mode;
5663 mp->value = fix->value;
5664 mp->refcount = 1;
5665 mp->max_address = minipool_barrier->address + 65536;
5666
5667 mp->min_address = min_address;
5668
5669 if (min_mp == NULL)
5670 {
5671 mp->prev = NULL;
5672 mp->next = minipool_vector_head;
5673
5674 if (mp->next == NULL)
5675 {
5676 minipool_vector_tail = mp;
5677 minipool_vector_label = gen_label_rtx ();
5678 }
5679 else
5680 mp->next->prev = mp;
5681
5682 minipool_vector_head = mp;
5683 }
5684 else
5685 {
5686 mp->next = min_mp->next;
5687 mp->prev = min_mp;
5688 min_mp->next = mp;
da6558fd 5689
d5b7b3ae
RE
5690 if (mp->next != NULL)
5691 mp->next->prev = mp;
5692 else
5693 minipool_vector_tail = mp;
5694 }
5695
5696 /* Save the new entry. */
5697 min_mp = mp;
5698
5699 if (mp->prev)
5700 mp = mp->prev;
5701 else
5702 mp->offset = 0;
5703
5704 /* Scan over the following entries and adjust their offsets. */
5705 while (mp->next != NULL)
5706 {
5707 if (mp->next->min_address < mp->min_address + mp->fix_size)
5708 mp->next->min_address = mp->min_address + mp->fix_size;
5709
5710 if (mp->refcount)
5711 mp->next->offset = mp->offset + mp->fix_size;
5712 else
5713 mp->next->offset = mp->offset;
5714
5715 mp = mp->next;
5716 }
5717
5718 return min_mp;
5719}
5720
5721static void
5722assign_minipool_offsets (barrier)
5723 Mfix * barrier;
5724{
5725 HOST_WIDE_INT offset = 0;
5726 Mnode * mp;
5727
5728 minipool_barrier = barrier;
5729
5730 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5731 {
5732 mp->offset = offset;
da6558fd 5733
d5b7b3ae
RE
5734 if (mp->refcount > 0)
5735 offset += mp->fix_size;
5736 }
5737}
5738
5739/* Output the literal table */
5740static void
5741dump_minipool (scan)
5742 rtx scan;
5743{
5744 Mnode * mp;
5745 Mnode * nmp;
5746
5747 if (rtl_dump_file)
5748 fprintf (rtl_dump_file,
5749 ";; Emitting minipool after insn %u; address %ld\n",
5750 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5751
5752 scan = emit_label_after (gen_label_rtx (), scan);
5753 scan = emit_insn_after (gen_align_4 (), scan);
5754 scan = emit_label_after (minipool_vector_label, scan);
5755
5756 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5757 {
5758 if (mp->refcount > 0)
5759 {
5760 if (rtl_dump_file)
5761 {
5762 fprintf (rtl_dump_file,
5763 ";; Offset %u, min %ld, max %ld ",
5764 (unsigned) mp->offset, (unsigned long) mp->min_address,
5765 (unsigned long) mp->max_address);
5766 arm_print_value (rtl_dump_file, mp->value);
5767 fputc ('\n', rtl_dump_file);
5768 }
5769
5770 switch (mp->fix_size)
5771 {
5772#ifdef HAVE_consttable_1
5773 case 1:
5774 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5775 break;
5776
5777#endif
5778#ifdef HAVE_consttable_2
5779 case 2:
5780 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5781 break;
5782
5783#endif
5784#ifdef HAVE_consttable_4
5785 case 4:
5786 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5787 break;
5788
5789#endif
5790#ifdef HAVE_consttable_8
5791 case 8:
5792 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5793 break;
5794
5795#endif
5796 default:
5797 abort ();
5798 break;
5799 }
5800 }
5801
5802 nmp = mp->next;
5803 free (mp);
2b835d68
RE
5804 }
5805
d5b7b3ae
RE
5806 minipool_vector_head = minipool_vector_tail = NULL;
5807 scan = emit_insn_after (gen_consttable_end (), scan);
5808 scan = emit_barrier_after (scan);
2b835d68
RE
5809}
5810
d5b7b3ae 5811/* Return the cost of forcibly inserting a barrier after INSN. */
1d6e90ac 5812
d5b7b3ae
RE
5813static int
5814arm_barrier_cost (insn)
5815 rtx insn;
949d79eb 5816{
d5b7b3ae
RE
5817 /* Basing the location of the pool on the loop depth is preferable,
5818 but at the moment, the basic block information seems to be
5819 corrupt by this stage of the compilation. */
5820 int base_cost = 50;
5821 rtx next = next_nonnote_insn (insn);
5822
5823 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5824 base_cost -= 20;
5825
5826 switch (GET_CODE (insn))
5827 {
5828 case CODE_LABEL:
5829 /* It will always be better to place the table before the label, rather
5830 than after it. */
5831 return 50;
949d79eb 5832
d5b7b3ae
RE
5833 case INSN:
5834 case CALL_INSN:
5835 return base_cost;
5836
5837 case JUMP_INSN:
5838 return base_cost - 10;
5839
5840 default:
5841 return base_cost + 10;
5842 }
5843}
5844
5845/* Find the best place in the insn stream in the range
5846 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5847 Create the barrier by inserting a jump and add a new fix entry for
5848 it. */
1d6e90ac 5849
d5b7b3ae
RE
5850static Mfix *
5851create_fix_barrier (fix, max_address)
5852 Mfix * fix;
5853 HOST_WIDE_INT max_address;
5854{
5855 HOST_WIDE_INT count = 0;
5856 rtx barrier;
5857 rtx from = fix->insn;
5858 rtx selected = from;
5859 int selected_cost;
5860 HOST_WIDE_INT selected_address;
5861 Mfix * new_fix;
5862 HOST_WIDE_INT max_count = max_address - fix->address;
5863 rtx label = gen_label_rtx ();
5864
5865 selected_cost = arm_barrier_cost (from);
5866 selected_address = fix->address;
5867
5868 while (from && count < max_count)
5869 {
5870 rtx tmp;
5871 int new_cost;
5872
5873 /* This code shouldn't have been called if there was a natural barrier
5874 within range. */
5875 if (GET_CODE (from) == BARRIER)
5876 abort ();
5877
5878 /* Count the length of this insn. */
5879 count += get_attr_length (from);
5880
5881 /* If there is a jump table, add its length. */
5882 tmp = is_jump_table (from);
5883 if (tmp != NULL)
5884 {
5885 count += get_jump_table_size (tmp);
5886
5887 /* Jump tables aren't in a basic block, so base the cost on
5888 the dispatch insn. If we select this location, we will
5889 still put the pool after the table. */
5890 new_cost = arm_barrier_cost (from);
5891
5892 if (count < max_count && new_cost <= selected_cost)
5893 {
5894 selected = tmp;
5895 selected_cost = new_cost;
5896 selected_address = fix->address + count;
5897 }
5898
5899 /* Continue after the dispatch table. */
5900 from = NEXT_INSN (tmp);
5901 continue;
5902 }
5903
5904 new_cost = arm_barrier_cost (from);
5905
5906 if (count < max_count && new_cost <= selected_cost)
5907 {
5908 selected = from;
5909 selected_cost = new_cost;
5910 selected_address = fix->address + count;
5911 }
5912
5913 from = NEXT_INSN (from);
5914 }
5915
5916 /* Create a new JUMP_INSN that branches around a barrier. */
5917 from = emit_jump_insn_after (gen_jump (label), selected);
5918 JUMP_LABEL (from) = label;
5919 barrier = emit_barrier_after (from);
5920 emit_label_after (label, barrier);
5921
5922 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5923 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5924 new_fix->insn = barrier;
5925 new_fix->address = selected_address;
5926 new_fix->next = fix->next;
5927 fix->next = new_fix;
5928
5929 return new_fix;
5930}
5931
5932/* Record that there is a natural barrier in the insn stream at
5933 ADDRESS. */
949d79eb
RE
5934static void
5935push_minipool_barrier (insn, address)
2b835d68 5936 rtx insn;
d5b7b3ae 5937 HOST_WIDE_INT address;
2b835d68 5938{
c7319d87 5939 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 5940
949d79eb
RE
5941 fix->insn = insn;
5942 fix->address = address;
2b835d68 5943
949d79eb
RE
5944 fix->next = NULL;
5945 if (minipool_fix_head != NULL)
5946 minipool_fix_tail->next = fix;
5947 else
5948 minipool_fix_head = fix;
5949
5950 minipool_fix_tail = fix;
5951}
2b835d68 5952
d5b7b3ae
RE
5953/* Record INSN, which will need fixing up to load a value from the
5954 minipool. ADDRESS is the offset of the insn since the start of the
5955 function; LOC is a pointer to the part of the insn which requires
5956 fixing; VALUE is the constant that must be loaded, which is of type
5957 MODE. */
949d79eb
RE
5958static void
5959push_minipool_fix (insn, address, loc, mode, value)
5960 rtx insn;
d5b7b3ae
RE
5961 HOST_WIDE_INT address;
5962 rtx * loc;
949d79eb
RE
5963 enum machine_mode mode;
5964 rtx value;
5965{
c7319d87 5966 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
5967
5968#ifdef AOF_ASSEMBLER
5969 /* PIC symbol refereneces need to be converted into offsets into the
5970 based area. */
d5b7b3ae
RE
5971 /* XXX This shouldn't be done here. */
5972 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5973 value = aof_pic_entry (value);
5974#endif /* AOF_ASSEMBLER */
5975
5976 fix->insn = insn;
5977 fix->address = address;
5978 fix->loc = loc;
5979 fix->mode = mode;
d5b7b3ae 5980 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5981 fix->value = value;
d5b7b3ae
RE
5982 fix->forwards = get_attr_pool_range (insn);
5983 fix->backwards = get_attr_neg_pool_range (insn);
5984 fix->minipool = NULL;
949d79eb
RE
5985
5986 /* If an insn doesn't have a range defined for it, then it isn't
5987 expecting to be reworked by this code. Better to abort now than
5988 to generate duff assembly code. */
d5b7b3ae 5989 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5990 abort ();
5991
d5b7b3ae
RE
5992 if (rtl_dump_file)
5993 {
5994 fprintf (rtl_dump_file,
5995 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5996 GET_MODE_NAME (mode),
5997 INSN_UID (insn), (unsigned long) address,
5998 -1 * (long)fix->backwards, (long)fix->forwards);
5999 arm_print_value (rtl_dump_file, fix->value);
6000 fprintf (rtl_dump_file, "\n");
6001 }
6002
6354dc9b 6003 /* Add it to the chain of fixes. */
949d79eb 6004 fix->next = NULL;
d5b7b3ae 6005
949d79eb
RE
6006 if (minipool_fix_head != NULL)
6007 minipool_fix_tail->next = fix;
6008 else
6009 minipool_fix_head = fix;
6010
6011 minipool_fix_tail = fix;
6012}
6013
d5b7b3ae 6014/* Scan INSN and note any of its operands that need fixing. */
1d6e90ac 6015
949d79eb
RE
6016static void
6017note_invalid_constants (insn, address)
6018 rtx insn;
d5b7b3ae 6019 HOST_WIDE_INT address;
949d79eb
RE
6020{
6021 int opno;
6022
d5b7b3ae 6023 extract_insn (insn);
949d79eb 6024
5895f793 6025 if (!constrain_operands (1))
949d79eb
RE
6026 fatal_insn_not_found (insn);
6027
d5b7b3ae
RE
6028 /* Fill in recog_op_alt with information about the constraints of this
6029 insn. */
949d79eb
RE
6030 preprocess_constraints ();
6031
1ccbefce 6032 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 6033 {
6354dc9b 6034 /* Things we need to fix can only occur in inputs. */
36ab44c7 6035 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
6036 continue;
6037
6038 /* If this alternative is a memory reference, then any mention
6039 of constants in this alternative is really to fool reload
6040 into allowing us to accept one there. We need to fix them up
6041 now so that we output the right code. */
6042 if (recog_op_alt[opno][which_alternative].memory_ok)
6043 {
1ccbefce 6044 rtx op = recog_data.operand[opno];
949d79eb
RE
6045
6046 if (CONSTANT_P (op))
1ccbefce
RH
6047 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6048 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
6049#if 0
6050 /* RWE: Now we look correctly at the operands for the insn,
6051 this shouldn't be needed any more. */
949d79eb 6052#ifndef AOF_ASSEMBLER
d5b7b3ae 6053 /* XXX Is this still needed? */
b15bca31 6054 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
1ccbefce
RH
6055 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6056 recog_data.operand_mode[opno],
6057 XVECEXP (op, 0, 0));
949d79eb 6058#endif
d5b7b3ae
RE
6059#endif
6060 else if (GET_CODE (op) == MEM
949d79eb
RE
6061 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6062 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
6063 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6064 recog_data.operand_mode[opno],
949d79eb
RE
6065 get_pool_constant (XEXP (op, 0)));
6066 }
2b835d68 6067 }
2b835d68
RE
6068}
6069
6070void
6071arm_reorg (first)
6072 rtx first;
6073{
6074 rtx insn;
d5b7b3ae
RE
6075 HOST_WIDE_INT address = 0;
6076 Mfix * fix;
ad076f4e 6077
949d79eb 6078 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 6079
949d79eb
RE
6080 /* The first insn must always be a note, or the code below won't
6081 scan it properly. */
6082 if (GET_CODE (first) != NOTE)
6083 abort ();
6084
6085 /* Scan all the insns and record the operands that will need fixing. */
6086 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 6087 {
949d79eb 6088 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 6089 push_minipool_barrier (insn, address);
949d79eb
RE
6090 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6091 || GET_CODE (insn) == JUMP_INSN)
6092 {
6093 rtx table;
6094
6095 note_invalid_constants (insn, address);
6096 address += get_attr_length (insn);
d5b7b3ae 6097
949d79eb
RE
6098 /* If the insn is a vector jump, add the size of the table
6099 and skip the table. */
d5b7b3ae 6100 if ((table = is_jump_table (insn)) != NULL)
2b835d68 6101 {
d5b7b3ae 6102 address += get_jump_table_size (table);
949d79eb
RE
6103 insn = table;
6104 }
6105 }
6106 }
332072db 6107
d5b7b3ae
RE
6108 fix = minipool_fix_head;
6109
949d79eb 6110 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 6111 while (fix)
949d79eb 6112 {
d5b7b3ae
RE
6113 Mfix * ftmp;
6114 Mfix * fdel;
6115 Mfix * last_added_fix;
6116 Mfix * last_barrier = NULL;
6117 Mfix * this_fix;
949d79eb
RE
6118
6119 /* Skip any further barriers before the next fix. */
6120 while (fix && GET_CODE (fix->insn) == BARRIER)
6121 fix = fix->next;
6122
d5b7b3ae 6123 /* No more fixes. */
949d79eb
RE
6124 if (fix == NULL)
6125 break;
332072db 6126
d5b7b3ae 6127 last_added_fix = NULL;
2b835d68 6128
d5b7b3ae 6129 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 6130 {
949d79eb 6131 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 6132 {
d5b7b3ae
RE
6133 if (ftmp->address >= minipool_vector_head->max_address)
6134 break;
2b835d68 6135
d5b7b3ae 6136 last_barrier = ftmp;
2b835d68 6137 }
d5b7b3ae
RE
6138 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6139 break;
6140
6141 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 6142 }
949d79eb 6143
d5b7b3ae
RE
6144 /* If we found a barrier, drop back to that; any fixes that we
6145 could have reached but come after the barrier will now go in
6146 the next mini-pool. */
949d79eb
RE
6147 if (last_barrier != NULL)
6148 {
d5b7b3ae
RE
6149 /* Reduce the refcount for those fixes that won't go into this
6150 pool after all. */
6151 for (fdel = last_barrier->next;
6152 fdel && fdel != ftmp;
6153 fdel = fdel->next)
6154 {
6155 fdel->minipool->refcount--;
6156 fdel->minipool = NULL;
6157 }
6158
949d79eb
RE
6159 ftmp = last_barrier;
6160 }
6161 else
2bfa88dc 6162 {
d5b7b3ae
RE
6163 /* ftmp is first fix that we can't fit into this pool and
6164 there no natural barriers that we could use. Insert a
6165 new barrier in the code somewhere between the previous
6166 fix and this one, and arrange to jump around it. */
6167 HOST_WIDE_INT max_address;
6168
6169 /* The last item on the list of fixes must be a barrier, so
6170 we can never run off the end of the list of fixes without
6171 last_barrier being set. */
6172 if (ftmp == NULL)
6173 abort ();
6174
6175 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
6176 /* Check that there isn't another fix that is in range that
6177 we couldn't fit into this pool because the pool was
6178 already too large: we need to put the pool before such an
6179 instruction. */
d5b7b3ae
RE
6180 if (ftmp->address < max_address)
6181 max_address = ftmp->address;
6182
6183 last_barrier = create_fix_barrier (last_added_fix, max_address);
6184 }
6185
6186 assign_minipool_offsets (last_barrier);
6187
6188 while (ftmp)
6189 {
6190 if (GET_CODE (ftmp->insn) != BARRIER
6191 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6192 == NULL))
6193 break;
2bfa88dc 6194
d5b7b3ae 6195 ftmp = ftmp->next;
2bfa88dc 6196 }
949d79eb
RE
6197
6198 /* Scan over the fixes we have identified for this pool, fixing them
6199 up and adding the constants to the pool itself. */
d5b7b3ae 6200 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
6201 this_fix = this_fix->next)
6202 if (GET_CODE (this_fix->insn) != BARRIER)
6203 {
949d79eb
RE
6204 rtx addr
6205 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6206 minipool_vector_label),
d5b7b3ae 6207 this_fix->minipool->offset);
949d79eb
RE
6208 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6209 }
6210
d5b7b3ae 6211 dump_minipool (last_barrier->insn);
949d79eb 6212 fix = ftmp;
2b835d68 6213 }
4b632bf1 6214
949d79eb
RE
6215 /* From now on we must synthesize any constants that we can't handle
6216 directly. This can happen if the RTL gets split during final
6217 instruction generation. */
4b632bf1 6218 after_arm_reorg = 1;
c7319d87
RE
6219
6220 /* Free the minipool memory. */
6221 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 6222}
cce8749e
CH
6223\f
6224/* Routines to output assembly language. */
6225
f3bb6135 6226/* If the rtx is the correct value then return the string of the number.
ff9940b0 6227 In this way we can ensure that valid double constants are generated even
6354dc9b 6228 when cross compiling. */
1d6e90ac 6229
cd2b33d0 6230const char *
ff9940b0 6231fp_immediate_constant (x)
b5cc037f 6232 rtx x;
ff9940b0
RE
6233{
6234 REAL_VALUE_TYPE r;
6235 int i;
6236
6237 if (!fpa_consts_inited)
6238 init_fpa_table ();
6239
6240 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6241 for (i = 0; i < 8; i++)
6242 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6243 return strings_fpa[i];
f3bb6135 6244
ff9940b0
RE
6245 abort ();
6246}
6247
9997d19d 6248/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
1d6e90ac 6249
cd2b33d0 6250static const char *
9997d19d 6251fp_const_from_val (r)
62b10bbc 6252 REAL_VALUE_TYPE * r;
9997d19d
RE
6253{
6254 int i;
6255
5895f793 6256 if (!fpa_consts_inited)
9997d19d
RE
6257 init_fpa_table ();
6258
6259 for (i = 0; i < 8; i++)
6260 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6261 return strings_fpa[i];
6262
6263 abort ();
6264}
ff9940b0 6265
cce8749e
CH
6266/* Output the operands of a LDM/STM instruction to STREAM.
6267 MASK is the ARM register set mask of which only bits 0-15 are important.
6d3d9133
NC
6268 REG is the base register, either the frame pointer or the stack pointer,
6269 INSTR is the possibly suffixed load or store instruction. */
cce8749e 6270
d5b7b3ae 6271static void
6d3d9133 6272print_multi_reg (stream, instr, reg, mask)
62b10bbc 6273 FILE * stream;
cd2b33d0 6274 const char * instr;
dd18ae56
NC
6275 int reg;
6276 int mask;
cce8749e
CH
6277{
6278 int i;
6279 int not_first = FALSE;
6280
1d5473cb 6281 fputc ('\t', stream);
dd18ae56 6282 asm_fprintf (stream, instr, reg);
1d5473cb 6283 fputs (", {", stream);
62b10bbc 6284
d5b7b3ae 6285 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
6286 if (mask & (1 << i))
6287 {
6288 if (not_first)
6289 fprintf (stream, ", ");
62b10bbc 6290
dd18ae56 6291 asm_fprintf (stream, "%r", i);
cce8749e
CH
6292 not_first = TRUE;
6293 }
f3bb6135 6294
6d3d9133 6295 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
f3bb6135 6296}
cce8749e 6297
6354dc9b 6298/* Output a 'call' insn. */
cce8749e 6299
cd2b33d0 6300const char *
cce8749e 6301output_call (operands)
62b10bbc 6302 rtx * operands;
cce8749e 6303{
6354dc9b 6304 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 6305
62b10bbc 6306 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 6307 {
62b10bbc 6308 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 6309 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 6310 }
62b10bbc 6311
1d5473cb 6312 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 6313
6cfc7210 6314 if (TARGET_INTERWORK)
da6558fd
NC
6315 output_asm_insn ("bx%?\t%0", operands);
6316 else
6317 output_asm_insn ("mov%?\t%|pc, %0", operands);
6318
f3bb6135
RE
6319 return "";
6320}
cce8749e 6321
ff9940b0
RE
6322static int
6323eliminate_lr2ip (x)
62b10bbc 6324 rtx * x;
ff9940b0
RE
6325{
6326 int something_changed = 0;
62b10bbc 6327 rtx x0 = * x;
ff9940b0 6328 int code = GET_CODE (x0);
1d6e90ac
NC
6329 int i, j;
6330 const char * fmt;
ff9940b0
RE
6331
6332 switch (code)
6333 {
6334 case REG:
62b10bbc 6335 if (REGNO (x0) == LR_REGNUM)
ff9940b0 6336 {
62b10bbc 6337 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
6338 return 1;
6339 }
6340 return 0;
6341 default:
6354dc9b 6342 /* Scan through the sub-elements and change any references there. */
ff9940b0 6343 fmt = GET_RTX_FORMAT (code);
62b10bbc 6344
ff9940b0
RE
6345 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6346 if (fmt[i] == 'e')
6347 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6348 else if (fmt[i] == 'E')
6349 for (j = 0; j < XVECLEN (x0, i); j++)
6350 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 6351
ff9940b0
RE
6352 return something_changed;
6353 }
6354}
6355
6354dc9b 6356/* Output a 'call' insn that is a reference in memory. */
ff9940b0 6357
cd2b33d0 6358const char *
ff9940b0 6359output_call_mem (operands)
62b10bbc 6360 rtx * operands;
ff9940b0 6361{
6354dc9b
NC
6362 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6363 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 6364 if (eliminate_lr2ip (&operands[0]))
1d5473cb 6365 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 6366
6cfc7210 6367 if (TARGET_INTERWORK)
da6558fd
NC
6368 {
6369 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6370 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6371 output_asm_insn ("bx%?\t%|ip", operands);
6372 }
6373 else
6374 {
6375 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6376 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6377 }
6378
f3bb6135
RE
6379 return "";
6380}
ff9940b0
RE
6381
6382
6383/* Output a move from arm registers to an fpu registers.
6384 OPERANDS[0] is an fpu register.
6385 OPERANDS[1] is the first registers of an arm register pair. */
6386
cd2b33d0 6387const char *
ff9940b0 6388output_mov_long_double_fpu_from_arm (operands)
62b10bbc 6389 rtx * operands;
ff9940b0
RE
6390{
6391 int arm_reg0 = REGNO (operands[1]);
6392 rtx ops[3];
6393
62b10bbc
NC
6394 if (arm_reg0 == IP_REGNUM)
6395 abort ();
f3bb6135 6396
43cffd11
RE
6397 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6398 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6399 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6400
1d5473cb
RE
6401 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6402 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 6403
f3bb6135
RE
6404 return "";
6405}
ff9940b0
RE
6406
6407/* Output a move from an fpu register to arm registers.
6408 OPERANDS[0] is the first registers of an arm register pair.
6409 OPERANDS[1] is an fpu register. */
6410
cd2b33d0 6411const char *
ff9940b0 6412output_mov_long_double_arm_from_fpu (operands)
62b10bbc 6413 rtx * operands;
ff9940b0
RE
6414{
6415 int arm_reg0 = REGNO (operands[0]);
6416 rtx ops[3];
6417
62b10bbc
NC
6418 if (arm_reg0 == IP_REGNUM)
6419 abort ();
f3bb6135 6420
43cffd11
RE
6421 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6422 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6423 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6424
1d5473cb
RE
6425 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6426 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
6427 return "";
6428}
ff9940b0
RE
6429
6430/* Output a move from arm registers to arm registers of a long double
6431 OPERANDS[0] is the destination.
6432 OPERANDS[1] is the source. */
1d6e90ac 6433
cd2b33d0 6434const char *
ff9940b0 6435output_mov_long_double_arm_from_arm (operands)
62b10bbc 6436 rtx * operands;
ff9940b0 6437{
6354dc9b 6438 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
6439 int dest_start = REGNO (operands[0]);
6440 int src_start = REGNO (operands[1]);
6441 rtx ops[2];
6442 int i;
6443
6444 if (dest_start < src_start)
6445 {
6446 for (i = 0; i < 3; i++)
6447 {
43cffd11
RE
6448 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6449 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6450 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6451 }
6452 }
6453 else
6454 {
6455 for (i = 2; i >= 0; i--)
6456 {
43cffd11
RE
6457 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6458 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6459 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6460 }
6461 }
f3bb6135 6462
ff9940b0
RE
6463 return "";
6464}
6465
6466
cce8749e
CH
6467/* Output a move from arm registers to an fpu registers.
6468 OPERANDS[0] is an fpu register.
6469 OPERANDS[1] is the first registers of an arm register pair. */
6470
cd2b33d0 6471const char *
cce8749e 6472output_mov_double_fpu_from_arm (operands)
62b10bbc 6473 rtx * operands;
cce8749e
CH
6474{
6475 int arm_reg0 = REGNO (operands[1]);
6476 rtx ops[2];
6477
62b10bbc
NC
6478 if (arm_reg0 == IP_REGNUM)
6479 abort ();
6480
43cffd11
RE
6481 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6482 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6483 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6484 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
6485 return "";
6486}
cce8749e
CH
6487
6488/* Output a move from an fpu register to arm registers.
6489 OPERANDS[0] is the first registers of an arm register pair.
6490 OPERANDS[1] is an fpu register. */
6491
cd2b33d0 6492const char *
cce8749e 6493output_mov_double_arm_from_fpu (operands)
62b10bbc 6494 rtx * operands;
cce8749e
CH
6495{
6496 int arm_reg0 = REGNO (operands[0]);
6497 rtx ops[2];
6498
62b10bbc
NC
6499 if (arm_reg0 == IP_REGNUM)
6500 abort ();
f3bb6135 6501
43cffd11
RE
6502 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6503 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6504 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6505 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
6506 return "";
6507}
cce8749e
CH
6508
6509/* Output a move between double words.
6510 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6511 or MEM<-REG and all MEMs must be offsettable addresses. */
6512
cd2b33d0 6513const char *
cce8749e 6514output_move_double (operands)
aec3cfba 6515 rtx * operands;
cce8749e
CH
6516{
6517 enum rtx_code code0 = GET_CODE (operands[0]);
6518 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 6519 rtx otherops[3];
cce8749e
CH
6520
6521 if (code0 == REG)
6522 {
6523 int reg0 = REGNO (operands[0]);
6524
43cffd11 6525 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 6526
cce8749e
CH
6527 if (code1 == REG)
6528 {
6529 int reg1 = REGNO (operands[1]);
62b10bbc
NC
6530 if (reg1 == IP_REGNUM)
6531 abort ();
f3bb6135 6532
6354dc9b 6533 /* Ensure the second source is not overwritten. */
c1c2bc04 6534 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 6535 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 6536 else
6cfc7210 6537 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6538 }
6539 else if (code1 == CONST_DOUBLE)
6540 {
226a5051
RE
6541 if (GET_MODE (operands[1]) == DFmode)
6542 {
b216cd4a 6543 REAL_VALUE_TYPE r;
226a5051 6544 long l[2];
226a5051 6545
b216cd4a
ZW
6546 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6547 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
d5b7b3ae
RE
6548 otherops[1] = GEN_INT (l[1]);
6549 operands[1] = GEN_INT (l[0]);
226a5051 6550 }
c1c2bc04
RE
6551 else if (GET_MODE (operands[1]) != VOIDmode)
6552 abort ();
6553 else if (WORDS_BIG_ENDIAN)
6554 {
c1c2bc04
RE
6555 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6556 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6557 }
226a5051
RE
6558 else
6559 {
6560 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6561 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6562 }
6cfc7210 6563
c1c2bc04
RE
6564 output_mov_immediate (operands);
6565 output_mov_immediate (otherops);
cce8749e
CH
6566 }
6567 else if (code1 == CONST_INT)
6568 {
56636818
JL
6569#if HOST_BITS_PER_WIDE_INT > 32
6570 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6571 what the upper word is. */
6572 if (WORDS_BIG_ENDIAN)
6573 {
6574 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6575 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6576 }
6577 else
6578 {
6579 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6580 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6581 }
6582#else
6354dc9b 6583 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6584 if (WORDS_BIG_ENDIAN)
6585 {
6586 otherops[1] = operands[1];
6587 operands[1] = (INTVAL (operands[1]) < 0
6588 ? constm1_rtx : const0_rtx);
6589 }
ff9940b0 6590 else
c1c2bc04 6591 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6592#endif
c1c2bc04
RE
6593 output_mov_immediate (otherops);
6594 output_mov_immediate (operands);
cce8749e
CH
6595 }
6596 else if (code1 == MEM)
6597 {
ff9940b0 6598 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6599 {
ff9940b0 6600 case REG:
9997d19d 6601 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6602 break;
2b835d68 6603
ff9940b0 6604 case PRE_INC:
6354dc9b 6605 abort (); /* Should never happen now. */
ff9940b0 6606 break;
2b835d68 6607
ff9940b0 6608 case PRE_DEC:
2b835d68 6609 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6610 break;
2b835d68 6611
ff9940b0 6612 case POST_INC:
9997d19d 6613 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6614 break;
2b835d68 6615
ff9940b0 6616 case POST_DEC:
6354dc9b 6617 abort (); /* Should never happen now. */
ff9940b0 6618 break;
2b835d68
RE
6619
6620 case LABEL_REF:
6621 case CONST:
6622 output_asm_insn ("adr%?\t%0, %1", operands);
6623 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6624 break;
6625
ff9940b0 6626 default:
aec3cfba
NC
6627 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6628 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6629 {
2b835d68
RE
6630 otherops[0] = operands[0];
6631 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6632 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
1d6e90ac 6633
2b835d68
RE
6634 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6635 {
6636 if (GET_CODE (otherops[2]) == CONST_INT)
6637 {
6638 switch (INTVAL (otherops[2]))
6639 {
6640 case -8:
6641 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6642 return "";
6643 case -4:
6644 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6645 return "";
6646 case 4:
6647 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6648 return "";
6649 }
1d6e90ac 6650
2b835d68
RE
6651 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6652 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6653 else
6654 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6655 }
6656 else
6657 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6658 }
6659 else
6660 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6661
2b835d68
RE
6662 return "ldm%?ia\t%0, %M0";
6663 }
6664 else
6665 {
b72f00af 6666 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
2b835d68
RE
6667 /* Take care of overlapping base/data reg. */
6668 if (reg_mentioned_p (operands[0], operands[1]))
6669 {
6670 output_asm_insn ("ldr%?\t%0, %1", otherops);
6671 output_asm_insn ("ldr%?\t%0, %1", operands);
6672 }
6673 else
6674 {
6675 output_asm_insn ("ldr%?\t%0, %1", operands);
6676 output_asm_insn ("ldr%?\t%0, %1", otherops);
6677 }
cce8749e
CH
6678 }
6679 }
6680 }
2b835d68 6681 else
6354dc9b 6682 abort (); /* Constraints should prevent this. */
cce8749e
CH
6683 }
6684 else if (code0 == MEM && code1 == REG)
6685 {
62b10bbc
NC
6686 if (REGNO (operands[1]) == IP_REGNUM)
6687 abort ();
2b835d68 6688
ff9940b0
RE
6689 switch (GET_CODE (XEXP (operands[0], 0)))
6690 {
6691 case REG:
9997d19d 6692 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6693 break;
2b835d68 6694
ff9940b0 6695 case PRE_INC:
6354dc9b 6696 abort (); /* Should never happen now. */
ff9940b0 6697 break;
2b835d68 6698
ff9940b0 6699 case PRE_DEC:
2b835d68 6700 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6701 break;
2b835d68 6702
ff9940b0 6703 case POST_INC:
9997d19d 6704 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6705 break;
2b835d68 6706
ff9940b0 6707 case POST_DEC:
6354dc9b 6708 abort (); /* Should never happen now. */
ff9940b0 6709 break;
2b835d68
RE
6710
6711 case PLUS:
6712 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6713 {
6714 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6715 {
6716 case -8:
6717 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6718 return "";
6719
6720 case -4:
6721 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6722 return "";
6723
6724 case 4:
6725 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6726 return "";
6727 }
6728 }
6729 /* Fall through */
6730
ff9940b0 6731 default:
b72f00af 6732 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
43cffd11 6733 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6734 output_asm_insn ("str%?\t%1, %0", operands);
6735 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6736 }
6737 }
2b835d68 6738 else
1d6e90ac
NC
6739 /* Constraints should prevent this. */
6740 abort ();
cce8749e 6741
9997d19d
RE
6742 return "";
6743}
cce8749e
CH
6744
6745
6746/* Output an arbitrary MOV reg, #n.
6747 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6748
cd2b33d0 6749const char *
cce8749e 6750output_mov_immediate (operands)
62b10bbc 6751 rtx * operands;
cce8749e 6752{
f3bb6135 6753 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e 6754
1d6e90ac 6755 /* Try to use one MOV. */
cce8749e 6756 if (const_ok_for_arm (n))
1d6e90ac 6757 output_asm_insn ("mov%?\t%0, %1", operands);
cce8749e 6758
1d6e90ac
NC
6759 /* Try to use one MVN. */
6760 else if (const_ok_for_arm (~n))
cce8749e 6761 {
f3bb6135 6762 operands[1] = GEN_INT (~n);
9997d19d 6763 output_asm_insn ("mvn%?\t%0, %1", operands);
cce8749e 6764 }
1d6e90ac
NC
6765 else
6766 {
6767 int n_ones = 0;
6768 int i;
cce8749e 6769
1d6e90ac
NC
6770 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6771 for (i = 0; i < 32; i ++)
6772 if (n & 1 << i)
6773 n_ones ++;
cce8749e 6774
1d6e90ac
NC
6775 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6776 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
6777 else
6778 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6779 }
f3bb6135
RE
6780
6781 return "";
6782}
cce8749e 6783
1d6e90ac
NC
6784/* Output an ADD r, s, #n where n may be too big for one instruction.
6785 If adding zero to one register, output nothing. */
cce8749e 6786
cd2b33d0 6787const char *
cce8749e 6788output_add_immediate (operands)
62b10bbc 6789 rtx * operands;
cce8749e 6790{
f3bb6135 6791 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6792
6793 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6794 {
6795 if (n < 0)
6796 output_multi_immediate (operands,
9997d19d
RE
6797 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6798 -n);
cce8749e
CH
6799 else
6800 output_multi_immediate (operands,
9997d19d
RE
6801 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6802 n);
cce8749e 6803 }
f3bb6135
RE
6804
6805 return "";
6806}
cce8749e 6807
cce8749e
CH
6808/* Output a multiple immediate operation.
6809 OPERANDS is the vector of operands referred to in the output patterns.
6810 INSTR1 is the output pattern to use for the first constant.
6811 INSTR2 is the output pattern to use for subsequent constants.
6812 IMMED_OP is the index of the constant slot in OPERANDS.
6813 N is the constant value. */
6814
cd2b33d0 6815static const char *
cce8749e 6816output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6817 rtx * operands;
cd2b33d0
NC
6818 const char * instr1;
6819 const char * instr2;
f3bb6135
RE
6820 int immed_op;
6821 HOST_WIDE_INT n;
cce8749e 6822{
f3bb6135 6823#if HOST_BITS_PER_WIDE_INT > 32
30cf4896 6824 n &= 0xffffffff;
f3bb6135
RE
6825#endif
6826
cce8749e
CH
6827 if (n == 0)
6828 {
1d6e90ac 6829 /* Quick and easy output. */
cce8749e 6830 operands[immed_op] = const0_rtx;
1d6e90ac 6831 output_asm_insn (instr1, operands);
cce8749e
CH
6832 }
6833 else
6834 {
6835 int i;
cd2b33d0 6836 const char * instr = instr1;
cce8749e 6837
6354dc9b 6838 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6839 for (i = 0; i < 32; i += 2)
6840 {
6841 if (n & (3 << i))
6842 {
f3bb6135
RE
6843 operands[immed_op] = GEN_INT (n & (255 << i));
6844 output_asm_insn (instr, operands);
cce8749e
CH
6845 instr = instr2;
6846 i += 6;
6847 }
6848 }
6849 }
cd2b33d0 6850
f3bb6135 6851 return "";
9997d19d 6852}
cce8749e 6853
cce8749e
CH
6854/* Return the appropriate ARM instruction for the operation code.
6855 The returned result should not be overwritten. OP is the rtx of the
6856 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6857 was shifted. */
6858
cd2b33d0 6859const char *
cce8749e
CH
6860arithmetic_instr (op, shift_first_arg)
6861 rtx op;
f3bb6135 6862 int shift_first_arg;
cce8749e 6863{
9997d19d 6864 switch (GET_CODE (op))
cce8749e
CH
6865 {
6866 case PLUS:
f3bb6135
RE
6867 return "add";
6868
cce8749e 6869 case MINUS:
f3bb6135
RE
6870 return shift_first_arg ? "rsb" : "sub";
6871
cce8749e 6872 case IOR:
f3bb6135
RE
6873 return "orr";
6874
cce8749e 6875 case XOR:
f3bb6135
RE
6876 return "eor";
6877
cce8749e 6878 case AND:
f3bb6135
RE
6879 return "and";
6880
cce8749e 6881 default:
f3bb6135 6882 abort ();
cce8749e 6883 }
f3bb6135 6884}
cce8749e 6885
cce8749e
CH
6886/* Ensure valid constant shifts and return the appropriate shift mnemonic
6887 for the operation code. The returned result should not be overwritten.
6888 OP is the rtx code of the shift.
9997d19d 6889 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6890 shift. */
cce8749e 6891
cd2b33d0 6892static const char *
9997d19d
RE
6893shift_op (op, amountp)
6894 rtx op;
6895 HOST_WIDE_INT *amountp;
cce8749e 6896{
cd2b33d0 6897 const char * mnem;
e2c671ba 6898 enum rtx_code code = GET_CODE (op);
cce8749e 6899
9997d19d
RE
6900 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6901 *amountp = -1;
6902 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6903 *amountp = INTVAL (XEXP (op, 1));
6904 else
6905 abort ();
6906
e2c671ba 6907 switch (code)
cce8749e
CH
6908 {
6909 case ASHIFT:
6910 mnem = "asl";
6911 break;
f3bb6135 6912
cce8749e
CH
6913 case ASHIFTRT:
6914 mnem = "asr";
cce8749e 6915 break;
f3bb6135 6916
cce8749e
CH
6917 case LSHIFTRT:
6918 mnem = "lsr";
cce8749e 6919 break;
f3bb6135 6920
9997d19d
RE
6921 case ROTATERT:
6922 mnem = "ror";
9997d19d
RE
6923 break;
6924
ff9940b0 6925 case MULT:
e2c671ba
RE
6926 /* We never have to worry about the amount being other than a
6927 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6928 if (*amountp != -1)
6929 *amountp = int_log2 (*amountp);
6930 else
6931 abort ();
f3bb6135
RE
6932 return "asl";
6933
cce8749e 6934 default:
f3bb6135 6935 abort ();
cce8749e
CH
6936 }
6937
e2c671ba
RE
6938 if (*amountp != -1)
6939 {
6940 /* This is not 100% correct, but follows from the desire to merge
6941 multiplication by a power of 2 with the recognizer for a
6942 shift. >=32 is not a valid shift for "asl", so we must try and
6943 output a shift that produces the correct arithmetical result.
ddd5a7c1 6944 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6945 is not set correctly if we set the flags; but we never use the
6946 carry bit from such an operation, so we can ignore that. */
6947 if (code == ROTATERT)
1d6e90ac
NC
6948 /* Rotate is just modulo 32. */
6949 *amountp &= 31;
e2c671ba
RE
6950 else if (*amountp != (*amountp & 31))
6951 {
6952 if (code == ASHIFT)
6953 mnem = "lsr";
6954 *amountp = 32;
6955 }
6956
6957 /* Shifts of 0 are no-ops. */
6958 if (*amountp == 0)
6959 return NULL;
6960 }
6961
9997d19d
RE
6962 return mnem;
6963}
cce8749e 6964
6354dc9b 6965/* Obtain the shift from the POWER of two. */
1d6e90ac 6966
18af7313 6967static HOST_WIDE_INT
cce8749e 6968int_log2 (power)
f3bb6135 6969 HOST_WIDE_INT power;
cce8749e 6970{
f3bb6135 6971 HOST_WIDE_INT shift = 0;
cce8749e 6972
30cf4896 6973 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
cce8749e
CH
6974 {
6975 if (shift > 31)
f3bb6135 6976 abort ();
1d6e90ac 6977 shift ++;
cce8749e 6978 }
f3bb6135
RE
6979
6980 return shift;
6981}
cce8749e 6982
cce8749e
CH
6983/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6984 /bin/as is horribly restrictive. */
6cfc7210 6985#define MAX_ASCII_LEN 51
cce8749e
CH
6986
6987void
6988output_ascii_pseudo_op (stream, p, len)
62b10bbc 6989 FILE * stream;
3cce094d 6990 const unsigned char * p;
cce8749e
CH
6991 int len;
6992{
6993 int i;
6cfc7210 6994 int len_so_far = 0;
cce8749e 6995
6cfc7210
NC
6996 fputs ("\t.ascii\t\"", stream);
6997
cce8749e
CH
6998 for (i = 0; i < len; i++)
6999 {
1d6e90ac 7000 int c = p[i];
cce8749e 7001
6cfc7210 7002 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 7003 {
6cfc7210 7004 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 7005 len_so_far = 0;
cce8749e
CH
7006 }
7007
6cfc7210 7008 switch (c)
cce8749e 7009 {
6cfc7210
NC
7010 case TARGET_TAB:
7011 fputs ("\\t", stream);
7012 len_so_far += 2;
7013 break;
7014
7015 case TARGET_FF:
7016 fputs ("\\f", stream);
7017 len_so_far += 2;
7018 break;
7019
7020 case TARGET_BS:
7021 fputs ("\\b", stream);
7022 len_so_far += 2;
7023 break;
7024
7025 case TARGET_CR:
7026 fputs ("\\r", stream);
7027 len_so_far += 2;
7028 break;
7029
7030 case TARGET_NEWLINE:
7031 fputs ("\\n", stream);
7032 c = p [i + 1];
7033 if ((c >= ' ' && c <= '~')
7034 || c == TARGET_TAB)
7035 /* This is a good place for a line break. */
7036 len_so_far = MAX_ASCII_LEN;
7037 else
7038 len_so_far += 2;
7039 break;
7040
7041 case '\"':
7042 case '\\':
7043 putc ('\\', stream);
5895f793 7044 len_so_far++;
6cfc7210 7045 /* drop through. */
f3bb6135 7046
6cfc7210
NC
7047 default:
7048 if (c >= ' ' && c <= '~')
7049 {
7050 putc (c, stream);
5895f793 7051 len_so_far++;
6cfc7210
NC
7052 }
7053 else
7054 {
7055 fprintf (stream, "\\%03o", c);
7056 len_so_far += 4;
7057 }
7058 break;
cce8749e 7059 }
cce8749e 7060 }
f3bb6135 7061
cce8749e 7062 fputs ("\"\n", stream);
f3bb6135 7063}
cce8749e 7064\f
121308d4
NC
7065/* Compute the register sabe mask for registers 0 through 12
7066 inclusive. This code is used by both arm_compute_save_reg_mask
7067 and arm_compute_initial_elimination_offset. */
6d3d9133
NC
7068
7069static unsigned long
121308d4 7070arm_compute_save_reg0_reg12_mask ()
6d3d9133 7071{
121308d4 7072 unsigned long func_type = arm_current_func_type ();
6d3d9133
NC
7073 unsigned int save_reg_mask = 0;
7074 unsigned int reg;
6d3d9133 7075
7b8b8ade 7076 if (IS_INTERRUPT (func_type))
6d3d9133 7077 {
7b8b8ade 7078 unsigned int max_reg;
7b8b8ade
NC
7079 /* Interrupt functions must not corrupt any registers,
7080 even call clobbered ones. If this is a leaf function
7081 we can just examine the registers used by the RTL, but
7082 otherwise we have to assume that whatever function is
7083 called might clobber anything, and so we have to save
7084 all the call-clobbered registers as well. */
7085 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7086 /* FIQ handlers have registers r8 - r12 banked, so
7087 we only need to check r0 - r7, Normal ISRs only
121308d4 7088 bank r14 and r15, so we must check up to r12.
7b8b8ade
NC
7089 r13 is the stack pointer which is always preserved,
7090 so we do not need to consider it here. */
7091 max_reg = 7;
7092 else
7093 max_reg = 12;
7094
7095 for (reg = 0; reg <= max_reg; reg++)
7096 if (regs_ever_live[reg]
7097 || (! current_function_is_leaf && call_used_regs [reg]))
6d3d9133
NC
7098 save_reg_mask |= (1 << reg);
7099 }
7100 else
7101 {
7102 /* In the normal case we only need to save those registers
7103 which are call saved and which are used by this function. */
7104 for (reg = 0; reg <= 10; reg++)
7105 if (regs_ever_live[reg] && ! call_used_regs [reg])
7106 save_reg_mask |= (1 << reg);
7107
7108 /* Handle the frame pointer as a special case. */
7109 if (! TARGET_APCS_FRAME
7110 && ! frame_pointer_needed
7111 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7112 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7113 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7114
7115 /* If we aren't loading the PIC register,
7116 don't stack it even though it may be live. */
7117 if (flag_pic
7118 && ! TARGET_SINGLE_PIC_BASE
7119 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7120 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7121 }
7122
121308d4
NC
7123 return save_reg_mask;
7124}
7125
7126/* Compute a bit mask of which registers need to be
7127 saved on the stack for the current function. */
7128
7129static unsigned long
7130arm_compute_save_reg_mask ()
7131{
7132 unsigned int save_reg_mask = 0;
7133 unsigned long func_type = arm_current_func_type ();
7134
7135 if (IS_NAKED (func_type))
7136 /* This should never really happen. */
7137 return 0;
7138
7139 /* If we are creating a stack frame, then we must save the frame pointer,
7140 IP (which will hold the old stack pointer), LR and the PC. */
7141 if (frame_pointer_needed)
7142 save_reg_mask |=
7143 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7144 | (1 << IP_REGNUM)
7145 | (1 << LR_REGNUM)
7146 | (1 << PC_REGNUM);
7147
7148 /* Volatile functions do not return, so there
7149 is no need to save any other registers. */
7150 if (IS_VOLATILE (func_type))
7151 return save_reg_mask;
7152
7153 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7154
6d3d9133
NC
7155 /* Decide if we need to save the link register.
7156 Interrupt routines have their own banked link register,
7157 so they never need to save it.
1768c26f 7158 Otherwise if we do not use the link register we do not need to save
6d3d9133
NC
7159 it. If we are pushing other registers onto the stack however, we
7160 can save an instruction in the epilogue by pushing the link register
7161 now and then popping it back into the PC. This incurs extra memory
7162 accesses though, so we only do it when optimising for size, and only
7163 if we know that we will not need a fancy return sequence. */
3a7731fd 7164 if (regs_ever_live [LR_REGNUM]
6d3d9133
NC
7165 || (save_reg_mask
7166 && optimize_size
3a7731fd 7167 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
6d3d9133
NC
7168 save_reg_mask |= 1 << LR_REGNUM;
7169
6f7ebcbb
NC
7170 if (cfun->machine->lr_save_eliminated)
7171 save_reg_mask &= ~ (1 << LR_REGNUM);
7172
6d3d9133
NC
7173 return save_reg_mask;
7174}
7175
7176/* Generate a function exit sequence. If REALLY_RETURN is true, then do
7177 everything bar the final return instruction. */
ff9940b0 7178
cd2b33d0 7179const char *
84ed5e79 7180output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
7181 rtx operand;
7182 int really_return;
84ed5e79 7183 int reverse;
ff9940b0 7184{
6d3d9133 7185 char conditional[10];
ff9940b0 7186 char instr[100];
6d3d9133
NC
7187 int reg;
7188 unsigned long live_regs_mask;
7189 unsigned long func_type;
e26053d1 7190
6d3d9133 7191 func_type = arm_current_func_type ();
e2c671ba 7192
6d3d9133 7193 if (IS_NAKED (func_type))
d5b7b3ae 7194 return "";
6d3d9133
NC
7195
7196 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7197 {
e2c671ba 7198 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
7199 call, then we have to trust that the called function won't return. */
7200 if (really_return)
7201 {
7202 rtx ops[2];
7203
7204 /* Otherwise, trap an attempted return by aborting. */
7205 ops[0] = operand;
7206 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7207 : "abort");
7208 assemble_external_libcall (ops[1]);
7209 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7210 }
7211
e2c671ba
RE
7212 return "";
7213 }
6d3d9133 7214
5895f793 7215 if (current_function_calls_alloca && !really_return)
62b10bbc 7216 abort ();
ff9940b0 7217
c414f8a9 7218 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
d5b7b3ae 7219
6d3d9133 7220 return_used_this_function = 1;
ff9940b0 7221
6d3d9133 7222 live_regs_mask = arm_compute_save_reg_mask ();
ff9940b0 7223
1768c26f 7224 if (live_regs_mask)
6d3d9133 7225 {
1768c26f
PB
7226 const char * return_reg;
7227
7228 /* If we do not have any special requirements for function exit
7229 (eg interworking, or ISR) then we can load the return address
7230 directly into the PC. Otherwise we must load it into LR. */
7231 if (really_return
1768c26f
PB
7232 && ! TARGET_INTERWORK)
7233 return_reg = reg_names[PC_REGNUM];
6d3d9133 7234 else
1768c26f
PB
7235 return_reg = reg_names[LR_REGNUM];
7236
6d3d9133
NC
7237 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7238 /* There are two possible reasons for the IP register being saved.
7239 Either a stack frame was created, in which case IP contains the
7240 old stack pointer, or an ISR routine corrupted it. If this in an
7241 ISR routine then just restore IP, otherwise restore IP into SP. */
7242 if (! IS_INTERRUPT (func_type))
7243 {
7244 live_regs_mask &= ~ (1 << IP_REGNUM);
7245 live_regs_mask |= (1 << SP_REGNUM);
7246 }
f3bb6135 7247
3a7731fd
PB
7248 /* On some ARM architectures it is faster to use LDR rather than
7249 LDM to load a single register. On other architectures, the
7250 cost is the same. In 26 bit mode, or for exception handlers,
7251 we have to use LDM to load the PC so that the CPSR is also
7252 restored. */
7253 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
6d3d9133 7254 {
3a7731fd
PB
7255 if (live_regs_mask == (unsigned int)(1 << reg))
7256 break;
7257 }
7258 if (reg <= LAST_ARM_REGNUM
7259 && (reg != LR_REGNUM
7260 || ! really_return
7261 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7262 {
7263 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7264 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
6d3d9133 7265 }
ff9940b0 7266 else
1d5473cb 7267 {
1768c26f
PB
7268 char *p;
7269 int first = 1;
6d3d9133 7270
1768c26f
PB
7271 /* Generate the load multiple instruction to restore the registers. */
7272 if (frame_pointer_needed)
7273 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
da6558fd 7274 else
1768c26f
PB
7275 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7276
7277 p = instr + strlen (instr);
6d3d9133 7278
1768c26f
PB
7279 for (reg = 0; reg <= SP_REGNUM; reg++)
7280 if (live_regs_mask & (1 << reg))
7281 {
7282 int l = strlen (reg_names[reg]);
7283
7284 if (first)
7285 first = 0;
7286 else
7287 {
7288 memcpy (p, ", ", 2);
7289 p += 2;
7290 }
7291
7292 memcpy (p, "%|", 2);
7293 memcpy (p + 2, reg_names[reg], l);
7294 p += l + 2;
7295 }
7296
7297 if (live_regs_mask & (1 << LR_REGNUM))
7298 {
7299 int l = strlen (return_reg);
7300
7301 if (! first)
7302 {
7303 memcpy (p, ", ", 2);
7304 p += 2;
7305 }
7306
7307 memcpy (p, "%|", 2);
7308 memcpy (p + 2, return_reg, l);
3a7731fd
PB
7309 strcpy (p + 2 + l, ((TARGET_APCS_32
7310 && !IS_INTERRUPT (func_type))
7311 || !really_return)
7312 ? "}" : "}^");
1768c26f
PB
7313 }
7314 else
7315 strcpy (p, "}");
1d5473cb 7316 }
da6558fd 7317
1768c26f
PB
7318 output_asm_insn (instr, & operand);
7319
3a7731fd
PB
7320 /* See if we need to generate an extra instruction to
7321 perform the actual function return. */
7322 if (really_return
7323 && func_type != ARM_FT_INTERWORKED
7324 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
da6558fd 7325 {
3a7731fd
PB
7326 /* The return has already been handled
7327 by loading the LR into the PC. */
7328 really_return = 0;
da6558fd 7329 }
ff9940b0 7330 }
e26053d1 7331
1768c26f 7332 if (really_return)
ff9940b0 7333 {
6d3d9133
NC
7334 switch ((int) ARM_FUNC_TYPE (func_type))
7335 {
7336 case ARM_FT_ISR:
7337 case ARM_FT_FIQ:
7338 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7339 break;
7340
7341 case ARM_FT_INTERWORKED:
7342 sprintf (instr, "bx%s\t%%|lr", conditional);
7343 break;
7344
7345 case ARM_FT_EXCEPTION:
7346 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7347 break;
7348
7349 default:
1768c26f
PB
7350 /* ARMv5 implementations always provide BX, so interworking
7351 is the default unless APCS-26 is in use. */
7352 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7353 sprintf (instr, "bx%s\t%%|lr", conditional);
7354 else
7355 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7356 conditional, TARGET_APCS_32 ? "" : "s");
6d3d9133
NC
7357 break;
7358 }
1768c26f
PB
7359
7360 output_asm_insn (instr, & operand);
ff9940b0 7361 }
f3bb6135 7362
ff9940b0
RE
7363 return "";
7364}
7365
ef179a26
NC
7366/* Write the function name into the code section, directly preceding
7367 the function prologue.
7368
7369 Code will be output similar to this:
7370 t0
7371 .ascii "arm_poke_function_name", 0
7372 .align
7373 t1
7374 .word 0xff000000 + (t1 - t0)
7375 arm_poke_function_name
7376 mov ip, sp
7377 stmfd sp!, {fp, ip, lr, pc}
7378 sub fp, ip, #4
7379
7380 When performing a stack backtrace, code can inspect the value
7381 of 'pc' stored at 'fp' + 0. If the trace function then looks
7382 at location pc - 12 and the top 8 bits are set, then we know
7383 that there is a function name embedded immediately preceding this
7384 location and has length ((pc[-3]) & 0xff000000).
7385
7386 We assume that pc is declared as a pointer to an unsigned long.
7387
7388 It is of no benefit to output the function name if we are assembling
7389 a leaf function. These function types will not contain a stack
7390 backtrace structure, therefore it is not possible to determine the
7391 function name. */
7392
7393void
7394arm_poke_function_name (stream, name)
7395 FILE * stream;
5f37d07c 7396 const char * name;
ef179a26
NC
7397{
7398 unsigned long alignlength;
7399 unsigned long length;
7400 rtx x;
7401
d5b7b3ae
RE
7402 length = strlen (name) + 1;
7403 alignlength = ROUND_UP (length);
ef179a26 7404
949d79eb 7405 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 7406 ASM_OUTPUT_ALIGN (stream, 2);
30cf4896 7407 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
301d03af 7408 assemble_aligned_integer (UNITS_PER_WORD, x);
ef179a26
NC
7409}
7410
6d3d9133
NC
7411/* Place some comments into the assembler stream
7412 describing the current function. */
7413
08c148a8
NB
7414static void
7415arm_output_function_prologue (f, frame_size)
6cfc7210 7416 FILE * f;
08c148a8 7417 HOST_WIDE_INT frame_size;
cce8749e 7418{
6d3d9133 7419 unsigned long func_type;
08c148a8
NB
7420
7421 if (!TARGET_ARM)
7422 {
7423 thumb_output_function_prologue (f, frame_size);
7424 return;
7425 }
6d3d9133
NC
7426
7427 /* Sanity check. */
abaa26e5 7428 if (arm_ccfsm_state || arm_target_insn)
6d3d9133 7429 abort ();
31fdb4d5 7430
6d3d9133
NC
7431 func_type = arm_current_func_type ();
7432
7433 switch ((int) ARM_FUNC_TYPE (func_type))
7434 {
7435 default:
7436 case ARM_FT_NORMAL:
7437 break;
7438 case ARM_FT_INTERWORKED:
7439 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7440 break;
7441 case ARM_FT_EXCEPTION_HANDLER:
7442 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7443 break;
7444 case ARM_FT_ISR:
7445 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7446 break;
7447 case ARM_FT_FIQ:
7448 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7449 break;
7450 case ARM_FT_EXCEPTION:
7451 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7452 break;
7453 }
ff9940b0 7454
6d3d9133
NC
7455 if (IS_NAKED (func_type))
7456 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7457
7458 if (IS_VOLATILE (func_type))
7459 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7460
7461 if (IS_NESTED (func_type))
7462 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7463
dd18ae56
NC
7464 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7465 current_function_args_size,
7466 current_function_pretend_args_size, frame_size);
6d3d9133 7467
3cb66fd7 7468 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
dd18ae56 7469 frame_pointer_needed,
3cb66fd7 7470 cfun->machine->uses_anonymous_args);
cce8749e 7471
6f7ebcbb
NC
7472 if (cfun->machine->lr_save_eliminated)
7473 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7474
32de079a
RE
7475#ifdef AOF_ASSEMBLER
7476 if (flag_pic)
dd18ae56 7477 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 7478#endif
6d3d9133
NC
7479
7480 return_used_this_function = 0;
f3bb6135 7481}
cce8749e 7482
cd2b33d0 7483const char *
0616531f
RE
7484arm_output_epilogue (really_return)
7485 int really_return;
cce8749e 7486{
949d79eb 7487 int reg;
6f7ebcbb 7488 unsigned long saved_regs_mask;
6d3d9133 7489 unsigned long func_type;
c882c7ac
RE
7490 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7491 frame that is $fp + 4 for a non-variadic function. */
7492 int floats_offset = 0;
cce8749e 7493 rtx operands[3];
949d79eb 7494 int frame_size = get_frame_size ();
d5b7b3ae 7495 FILE * f = asm_out_file;
6d3d9133 7496 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
cce8749e 7497
6d3d9133
NC
7498 /* If we have already generated the return instruction
7499 then it is futile to generate anything else. */
b36ba79f 7500 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 7501 return "";
cce8749e 7502
6d3d9133 7503 func_type = arm_current_func_type ();
d5b7b3ae 7504
6d3d9133
NC
7505 if (IS_NAKED (func_type))
7506 /* Naked functions don't have epilogues. */
7507 return "";
0616531f 7508
6d3d9133 7509 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7510 {
86efdc8e 7511 rtx op;
6d3d9133
NC
7512
7513 /* A volatile function should never return. Call abort. */
ed0e6530 7514 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 7515 assemble_external_libcall (op);
e2c671ba 7516 output_asm_insn ("bl\t%a0", &op);
6d3d9133 7517
949d79eb 7518 return "";
e2c671ba
RE
7519 }
7520
6d3d9133
NC
7521 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7522 && ! really_return)
7523 /* If we are throwing an exception, then we really must
7524 be doing a return, so we can't tail-call. */
7525 abort ();
7526
6f7ebcbb 7527 saved_regs_mask = arm_compute_save_reg_mask ();
6d3d9133 7528
c882c7ac
RE
7529 /* XXX We should adjust floats_offset for any anonymous args, and then
7530 re-adjust vfp_offset below to compensate. */
7531
6d3d9133
NC
7532 /* Compute how far away the floats will be. */
7533 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
6f7ebcbb 7534 if (saved_regs_mask & (1 << reg))
6ed30148 7535 floats_offset += 4;
6d3d9133 7536
ff9940b0 7537 if (frame_pointer_needed)
cce8749e 7538 {
c882c7ac
RE
7539 int vfp_offset = 4;
7540
b111229a
RE
7541 if (arm_fpu_arch == FP_SOFT2)
7542 {
d5b7b3ae 7543 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 7544 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7545 {
7546 floats_offset += 12;
dd18ae56 7547 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
c882c7ac 7548 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
7549 }
7550 }
7551 else
7552 {
d5b7b3ae 7553 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7554
d5b7b3ae 7555 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 7556 {
5895f793 7557 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7558 {
7559 floats_offset += 12;
6cfc7210 7560
6354dc9b 7561 /* We can't unstack more than four registers at once. */
b111229a
RE
7562 if (start_reg - reg == 3)
7563 {
dd18ae56 7564 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
c882c7ac 7565 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
7566 start_reg = reg - 1;
7567 }
7568 }
7569 else
7570 {
7571 if (reg != start_reg)
dd18ae56
NC
7572 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7573 reg + 1, start_reg - reg,
c882c7ac 7574 FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
7575 start_reg = reg - 1;
7576 }
7577 }
7578
7579 /* Just in case the last register checked also needs unstacking. */
7580 if (reg != start_reg)
dd18ae56
NC
7581 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7582 reg + 1, start_reg - reg,
c882c7ac 7583 FP_REGNUM, floats_offset - vfp_offset);
b111229a 7584 }
6d3d9133 7585
6f7ebcbb 7586 /* saved_regs_mask should contain the IP, which at the time of stack
6d3d9133
NC
7587 frame generation actually contains the old stack pointer. So a
7588 quick way to unwind the stack is just pop the IP register directly
7589 into the stack pointer. */
6f7ebcbb 7590 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
6d3d9133 7591 abort ();
6f7ebcbb
NC
7592 saved_regs_mask &= ~ (1 << IP_REGNUM);
7593 saved_regs_mask |= (1 << SP_REGNUM);
6d3d9133 7594
6f7ebcbb 7595 /* There are two registers left in saved_regs_mask - LR and PC. We
6d3d9133
NC
7596 only need to restore the LR register (the return address), but to
7597 save time we can load it directly into the PC, unless we need a
7598 special function exit sequence, or we are not really returning. */
7599 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7600 /* Delete the LR from the register mask, so that the LR on
7601 the stack is loaded into the PC in the register mask. */
6f7ebcbb 7602 saved_regs_mask &= ~ (1 << LR_REGNUM);
b111229a 7603 else
6f7ebcbb 7604 saved_regs_mask &= ~ (1 << PC_REGNUM);
6d3d9133 7605
6f7ebcbb 7606 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7b8b8ade
NC
7607
7608 if (IS_INTERRUPT (func_type))
7609 /* Interrupt handlers will have pushed the
7610 IP onto the stack, so restore it now. */
121308d4 7611 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
cce8749e
CH
7612 }
7613 else
7614 {
d2288d8d 7615 /* Restore stack pointer if necessary. */
56636818 7616 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
7617 {
7618 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
7619 operands[2] = GEN_INT (frame_size
7620 + current_function_outgoing_args_size);
d2288d8d
TG
7621 output_add_immediate (operands);
7622 }
7623
b111229a
RE
7624 if (arm_fpu_arch == FP_SOFT2)
7625 {
d5b7b3ae 7626 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 7627 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
7628 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7629 reg, SP_REGNUM);
b111229a
RE
7630 }
7631 else
7632 {
d5b7b3ae 7633 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 7634
d5b7b3ae 7635 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 7636 {
5895f793 7637 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7638 {
7639 if (reg - start_reg == 3)
7640 {
dd18ae56
NC
7641 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7642 start_reg, SP_REGNUM);
b111229a
RE
7643 start_reg = reg + 1;
7644 }
7645 }
7646 else
7647 {
7648 if (reg != start_reg)
dd18ae56
NC
7649 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7650 start_reg, reg - start_reg,
7651 SP_REGNUM);
6cfc7210 7652
b111229a
RE
7653 start_reg = reg + 1;
7654 }
7655 }
7656
7657 /* Just in case the last register checked also needs unstacking. */
7658 if (reg != start_reg)
dd18ae56
NC
7659 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7660 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7661 }
7662
6d3d9133
NC
7663 /* If we can, restore the LR into the PC. */
7664 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7665 && really_return
7666 && current_function_pretend_args_size == 0
6f7ebcbb 7667 && saved_regs_mask & (1 << LR_REGNUM))
cce8749e 7668 {
6f7ebcbb
NC
7669 saved_regs_mask &= ~ (1 << LR_REGNUM);
7670 saved_regs_mask |= (1 << PC_REGNUM);
6d3d9133 7671 }
d5b7b3ae 7672
6d3d9133
NC
7673 /* Load the registers off the stack. If we only have one register
7674 to load use the LDR instruction - it is faster. */
6f7ebcbb 7675 if (saved_regs_mask == (1 << LR_REGNUM))
6d3d9133 7676 {
f4864588 7677 /* The exception handler ignores the LR, so we do
6d3d9133
NC
7678 not really need to load it off the stack. */
7679 if (eh_ofs)
7680 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
32de079a 7681 else
6d3d9133 7682 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
cce8749e 7683 }
6f7ebcbb
NC
7684 else if (saved_regs_mask)
7685 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
6d3d9133
NC
7686
7687 if (current_function_pretend_args_size)
cce8749e 7688 {
6d3d9133
NC
7689 /* Unwind the pre-pushed regs. */
7690 operands[0] = operands[1] = stack_pointer_rtx;
7691 operands[2] = GEN_INT (current_function_pretend_args_size);
7692 output_add_immediate (operands);
7693 }
7694 }
32de079a 7695
9b598fa0 7696#if 0
6d3d9133
NC
7697 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7698 /* Adjust the stack to remove the exception handler stuff. */
7699 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7700 REGNO (eh_ofs));
9b598fa0 7701#endif
b111229a 7702
f4864588
PB
7703 if (! really_return
7704 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7705 && current_function_pretend_args_size == 0
7706 && saved_regs_mask & (1 << PC_REGNUM)))
6d3d9133 7707 return "";
d5b7b3ae 7708
6d3d9133
NC
7709 /* Generate the return instruction. */
7710 switch ((int) ARM_FUNC_TYPE (func_type))
7711 {
7712 case ARM_FT_EXCEPTION_HANDLER:
7713 /* Even in 26-bit mode we do a mov (rather than a movs)
7714 because we don't have the PSR bits set in the address. */
7715 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7716 break;
0616531f 7717
6d3d9133
NC
7718 case ARM_FT_ISR:
7719 case ARM_FT_FIQ:
7720 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7721 break;
7722
7723 case ARM_FT_EXCEPTION:
7724 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7725 break;
7726
7727 case ARM_FT_INTERWORKED:
7728 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7729 break;
7730
7731 default:
7732 if (frame_pointer_needed)
7733 /* If we used the frame pointer then the return adddress
7734 will have been loaded off the stack directly into the
7735 PC, so there is no need to issue a MOV instruction
7736 here. */
7737 ;
7738 else if (current_function_pretend_args_size == 0
6f7ebcbb 7739 && (saved_regs_mask & (1 << LR_REGNUM)))
6d3d9133
NC
7740 /* Similarly we may have been able to load LR into the PC
7741 even if we did not create a stack frame. */
7742 ;
7743 else if (TARGET_APCS_32)
7744 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7745 else
7746 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7747 break;
cce8749e 7748 }
f3bb6135 7749
949d79eb
RE
7750 return "";
7751}
7752
08c148a8
NB
7753static void
7754arm_output_function_epilogue (file, frame_size)
7755 FILE *file ATTRIBUTE_UNUSED;
7756 HOST_WIDE_INT frame_size;
949d79eb 7757{
d5b7b3ae
RE
7758 if (TARGET_THUMB)
7759 {
7760 /* ??? Probably not safe to set this here, since it assumes that a
7761 function will be emitted as assembly immediately after we generate
7762 RTL for it. This does not happen for inline functions. */
7763 return_used_this_function = 0;
7764 }
7765 else
7766 {
7767 if (use_return_insn (FALSE)
7768 && return_used_this_function
7769 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7770 && !frame_pointer_needed)
d5b7b3ae 7771 abort ();
f3bb6135 7772
d5b7b3ae 7773 /* Reset the ARM-specific per-function variables. */
d5b7b3ae
RE
7774 after_arm_reorg = 0;
7775 }
f3bb6135 7776}
e2c671ba 7777
2c849145
JM
7778/* Generate and emit an insn that we will recognize as a push_multi.
7779 Unfortunately, since this insn does not reflect very well the actual
7780 semantics of the operation, we need to annotate the insn for the benefit
7781 of DWARF2 frame unwind information. */
6d3d9133 7782
2c849145 7783static rtx
e2c671ba
RE
7784emit_multi_reg_push (mask)
7785 int mask;
7786{
7787 int num_regs = 0;
9b598fa0 7788 int num_dwarf_regs;
e2c671ba
RE
7789 int i, j;
7790 rtx par;
2c849145 7791 rtx dwarf;
87e27392 7792 int dwarf_par_index;
2c849145 7793 rtx tmp, reg;
e2c671ba 7794
d5b7b3ae 7795 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7796 if (mask & (1 << i))
5895f793 7797 num_regs++;
e2c671ba
RE
7798
7799 if (num_regs == 0 || num_regs > 16)
7800 abort ();
7801
9b598fa0
RE
7802 /* We don't record the PC in the dwarf frame information. */
7803 num_dwarf_regs = num_regs;
7804 if (mask & (1 << PC_REGNUM))
7805 num_dwarf_regs--;
7806
87e27392
NC
7807 /* For the body of the insn we are going to generate an UNSPEC in
7808 parallel with several USEs. This allows the insn to be recognised
7809 by the push_multi pattern in the arm.md file. The insn looks
7810 something like this:
7811
7812 (parallel [
b15bca31
RE
7813 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7814 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
87e27392
NC
7815 (use (reg:SI 11 fp))
7816 (use (reg:SI 12 ip))
7817 (use (reg:SI 14 lr))
7818 (use (reg:SI 15 pc))
7819 ])
7820
7821 For the frame note however, we try to be more explicit and actually
7822 show each register being stored into the stack frame, plus a (single)
7823 decrement of the stack pointer. We do it this way in order to be
7824 friendly to the stack unwinding code, which only wants to see a single
7825 stack decrement per instruction. The RTL we generate for the note looks
7826 something like this:
7827
7828 (sequence [
7829 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7830 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7831 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7832 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7833 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
87e27392
NC
7834 ])
7835
7836 This sequence is used both by the code to support stack unwinding for
7837 exceptions handlers and the code to generate dwarf2 frame debugging. */
7838
43cffd11 7839 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
9b598fa0 7840 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
87e27392 7841 dwarf_par_index = 1;
e2c671ba 7842
d5b7b3ae 7843 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7844 {
7845 if (mask & (1 << i))
7846 {
2c849145
JM
7847 reg = gen_rtx_REG (SImode, i);
7848
e2c671ba 7849 XVECEXP (par, 0, 0)
43cffd11
RE
7850 = gen_rtx_SET (VOIDmode,
7851 gen_rtx_MEM (BLKmode,
7852 gen_rtx_PRE_DEC (BLKmode,
7853 stack_pointer_rtx)),
7854 gen_rtx_UNSPEC (BLKmode,
2c849145 7855 gen_rtvec (1, reg),
9b598fa0 7856 UNSPEC_PUSH_MULT));
2c849145 7857
9b598fa0
RE
7858 if (i != PC_REGNUM)
7859 {
7860 tmp = gen_rtx_SET (VOIDmode,
7861 gen_rtx_MEM (SImode, stack_pointer_rtx),
7862 reg);
7863 RTX_FRAME_RELATED_P (tmp) = 1;
7864 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7865 dwarf_par_index++;
7866 }
2c849145 7867
e2c671ba
RE
7868 break;
7869 }
7870 }
7871
7872 for (j = 1, i++; j < num_regs; i++)
7873 {
7874 if (mask & (1 << i))
7875 {
2c849145
JM
7876 reg = gen_rtx_REG (SImode, i);
7877
7878 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7879
9b598fa0
RE
7880 if (i != PC_REGNUM)
7881 {
7882 tmp = gen_rtx_SET (VOIDmode,
7883 gen_rtx_MEM (SImode,
7884 plus_constant (stack_pointer_rtx,
7885 4 * j)),
7886 reg);
7887 RTX_FRAME_RELATED_P (tmp) = 1;
7888 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7889 }
7890
e2c671ba
RE
7891 j++;
7892 }
7893 }
b111229a 7894
2c849145 7895 par = emit_insn (par);
87e27392
NC
7896
7897 tmp = gen_rtx_SET (SImode,
7898 stack_pointer_rtx,
7899 gen_rtx_PLUS (SImode,
7900 stack_pointer_rtx,
7901 GEN_INT (-4 * num_regs)));
7902 RTX_FRAME_RELATED_P (tmp) = 1;
7903 XVECEXP (dwarf, 0, 0) = tmp;
7904
2c849145
JM
7905 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7906 REG_NOTES (par));
7907 return par;
b111229a
RE
7908}
7909
2c849145 7910static rtx
b111229a
RE
7911emit_sfm (base_reg, count)
7912 int base_reg;
7913 int count;
7914{
7915 rtx par;
2c849145
JM
7916 rtx dwarf;
7917 rtx tmp, reg;
b111229a
RE
7918 int i;
7919
43cffd11 7920 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145 7921 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7922
7923 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7924
7925 XVECEXP (par, 0, 0)
7926 = gen_rtx_SET (VOIDmode,
7927 gen_rtx_MEM (BLKmode,
7928 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7929 gen_rtx_UNSPEC (BLKmode,
2c849145 7930 gen_rtvec (1, reg),
b15bca31 7931 UNSPEC_PUSH_MULT));
2c849145
JM
7932 tmp
7933 = gen_rtx_SET (VOIDmode,
7934 gen_rtx_MEM (XFmode,
7935 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7936 reg);
7937 RTX_FRAME_RELATED_P (tmp) = 1;
7938 XVECEXP (dwarf, 0, count - 1) = tmp;
7939
b111229a 7940 for (i = 1; i < count; i++)
2c849145
JM
7941 {
7942 reg = gen_rtx_REG (XFmode, base_reg++);
7943 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7944
7945 tmp = gen_rtx_SET (VOIDmode,
7946 gen_rtx_MEM (XFmode,
7947 gen_rtx_PRE_DEC (BLKmode,
7948 stack_pointer_rtx)),
7949 reg);
7950 RTX_FRAME_RELATED_P (tmp) = 1;
7951 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7952 }
b111229a 7953
2c849145
JM
7954 par = emit_insn (par);
7955 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7956 REG_NOTES (par));
7957 return par;
e2c671ba
RE
7958}
7959
095bb276
NC
7960/* Compute the distance from register FROM to register TO.
7961 These can be the arg pointer (26), the soft frame pointer (25),
7962 the stack pointer (13) or the hard frame pointer (11).
7963 Typical stack layout looks like this:
7964
7965 old stack pointer -> | |
7966 ----
7967 | | \
7968 | | saved arguments for
7969 | | vararg functions
7970 | | /
7971 --
7972 hard FP & arg pointer -> | | \
7973 | | stack
7974 | | frame
7975 | | /
7976 --
7977 | | \
7978 | | call saved
7979 | | registers
7980 soft frame pointer -> | | /
7981 --
7982 | | \
7983 | | local
7984 | | variables
7985 | | /
7986 --
7987 | | \
7988 | | outgoing
7989 | | arguments
7990 current stack pointer -> | | /
7991 --
7992
7993 For a given funciton some or all of these stack compomnents
7994 may not be needed, giving rise to the possibility of
7995 eliminating some of the registers.
7996
7997 The values returned by this function must reflect the behaviour
7998 of arm_expand_prologue() and arm_compute_save_reg_mask().
7999
8000 The sign of the number returned reflects the direction of stack
8001 growth, so the values are positive for all eliminations except
8002 from the soft frame pointer to the hard frame pointer. */
8003
8004unsigned int
8005arm_compute_initial_elimination_offset (from, to)
8006 unsigned int from;
8007 unsigned int to;
8008{
8009 unsigned int local_vars = (get_frame_size () + 3) & ~3;
8010 unsigned int outgoing_args = current_function_outgoing_args_size;
8011 unsigned int stack_frame;
8012 unsigned int call_saved_registers;
8013 unsigned long func_type;
8014
8015 func_type = arm_current_func_type ();
8016
8017 /* Volatile functions never return, so there is
8018 no need to save call saved registers. */
8019 call_saved_registers = 0;
8020 if (! IS_VOLATILE (func_type))
8021 {
121308d4 8022 unsigned int reg_mask;
095bb276
NC
8023 unsigned int reg;
8024
1d6e90ac 8025 /* Make sure that we compute which registers will be saved
121308d4
NC
8026 on the stack using the same algorithm that is used by
8027 arm_compute_save_reg_mask(). */
8028 reg_mask = arm_compute_save_reg0_reg12_mask ();
095bb276 8029
121308d4
NC
8030 /* Now count the number of bits set in save_reg_mask.
8031 For each set bit we need 4 bytes of stack space. */
121308d4
NC
8032 while (reg_mask)
8033 {
8034 call_saved_registers += 4;
8035 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8036 }
095bb276
NC
8037
8038 if (regs_ever_live[LR_REGNUM]
8039 /* If a stack frame is going to be created, the LR will
8040 be saved as part of that, so we do not need to allow
8041 for it here. */
8042 && ! frame_pointer_needed)
8043 call_saved_registers += 4;
ef7112de
NC
8044
8045 /* If the hard floating point registers are going to be
8046 used then they must be saved on the stack as well.
8047 Each register occupies 12 bytes of stack space. */
8048 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8049 if (regs_ever_live[reg] && ! call_used_regs[reg])
8050 call_saved_registers += 12;
095bb276
NC
8051 }
8052
8053 /* The stack frame contains 4 registers - the old frame pointer,
8054 the old stack pointer, the return address and PC of the start
8055 of the function. */
8056 stack_frame = frame_pointer_needed ? 16 : 0;
8057
095bb276
NC
8058 /* OK, now we have enough information to compute the distances.
8059 There must be an entry in these switch tables for each pair
8060 of registers in ELIMINABLE_REGS, even if some of the entries
8061 seem to be redundant or useless. */
8062 switch (from)
8063 {
8064 case ARG_POINTER_REGNUM:
8065 switch (to)
8066 {
8067 case THUMB_HARD_FRAME_POINTER_REGNUM:
8068 return 0;
8069
8070 case FRAME_POINTER_REGNUM:
8071 /* This is the reverse of the soft frame pointer
8072 to hard frame pointer elimination below. */
8073 if (call_saved_registers == 0 && stack_frame == 0)
8074 return 0;
8075 return (call_saved_registers + stack_frame - 4);
8076
8077 case ARM_HARD_FRAME_POINTER_REGNUM:
8078 /* If there is no stack frame then the hard
8079 frame pointer and the arg pointer coincide. */
8080 if (stack_frame == 0 && call_saved_registers != 0)
8081 return 0;
8082 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8083 return (frame_pointer_needed
8084 && current_function_needs_context
3cb66fd7 8085 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
095bb276
NC
8086
8087 case STACK_POINTER_REGNUM:
8088 /* If nothing has been pushed on the stack at all
8089 then this will return -4. This *is* correct! */
8090 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8091
8092 default:
8093 abort ();
8094 }
8095 break;
8096
8097 case FRAME_POINTER_REGNUM:
8098 switch (to)
8099 {
8100 case THUMB_HARD_FRAME_POINTER_REGNUM:
8101 return 0;
8102
8103 case ARM_HARD_FRAME_POINTER_REGNUM:
8104 /* The hard frame pointer points to the top entry in the
8105 stack frame. The soft frame pointer to the bottom entry
8106 in the stack frame. If there is no stack frame at all,
8107 then they are identical. */
8108 if (call_saved_registers == 0 && stack_frame == 0)
8109 return 0;
8110 return - (call_saved_registers + stack_frame - 4);
8111
8112 case STACK_POINTER_REGNUM:
8113 return local_vars + outgoing_args;
8114
8115 default:
8116 abort ();
8117 }
8118 break;
8119
8120 default:
8121 /* You cannot eliminate from the stack pointer.
8122 In theory you could eliminate from the hard frame
8123 pointer to the stack pointer, but this will never
8124 happen, since if a stack frame is not needed the
8125 hard frame pointer will never be used. */
8126 abort ();
8127 }
8128}
8129
6d3d9133
NC
8130/* Generate the prologue instructions for entry into an ARM function. */
8131
e2c671ba
RE
8132void
8133arm_expand_prologue ()
8134{
8135 int reg;
6d3d9133 8136 rtx amount;
2c849145 8137 rtx insn;
68dfd979 8138 rtx ip_rtx;
6d3d9133
NC
8139 unsigned long live_regs_mask;
8140 unsigned long func_type;
68dfd979 8141 int fp_offset = 0;
095bb276
NC
8142 int saved_pretend_args = 0;
8143 unsigned int args_to_push;
d3236b4d 8144
6d3d9133 8145 func_type = arm_current_func_type ();
e2c671ba 8146
31fdb4d5 8147 /* Naked functions don't have prologues. */
6d3d9133 8148 if (IS_NAKED (func_type))
31fdb4d5
DE
8149 return;
8150
095bb276
NC
8151 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8152 args_to_push = current_function_pretend_args_size;
8153
6d3d9133
NC
8154 /* Compute which register we will have to save onto the stack. */
8155 live_regs_mask = arm_compute_save_reg_mask ();
e2c671ba 8156
68dfd979 8157 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
d3236b4d 8158
e2c671ba
RE
8159 if (frame_pointer_needed)
8160 {
7b8b8ade
NC
8161 if (IS_INTERRUPT (func_type))
8162 {
8163 /* Interrupt functions must not corrupt any registers.
8164 Creating a frame pointer however, corrupts the IP
8165 register, so we must push it first. */
8166 insn = emit_multi_reg_push (1 << IP_REGNUM);
121308d4
NC
8167
8168 /* Do not set RTX_FRAME_RELATED_P on this insn.
8169 The dwarf stack unwinding code only wants to see one
8170 stack decrement per function, and this is not it. If
8171 this instruction is labeled as being part of the frame
8172 creation sequence then dwarf2out_frame_debug_expr will
8173 abort when it encounters the assignment of IP to FP
8174 later on, since the use of SP here establishes SP as
8175 the CFA register and not IP.
8176
8177 Anyway this instruction is not really part of the stack
8178 frame creation although it is part of the prologue. */
7b8b8ade
NC
8179 }
8180 else if (IS_NESTED (func_type))
68dfd979
NC
8181 {
8182 /* The Static chain register is the same as the IP register
8183 used as a scratch register during stack frame creation.
8184 To get around this need to find somewhere to store IP
8185 whilst the frame is being created. We try the following
8186 places in order:
8187
6d3d9133 8188 1. The last argument register.
68dfd979
NC
8189 2. A slot on the stack above the frame. (This only
8190 works if the function is not a varargs function).
095bb276
NC
8191 3. Register r3, after pushing the argument registers
8192 onto the stack.
6d3d9133 8193
34ce3d7b
JM
8194 Note - we only need to tell the dwarf2 backend about the SP
8195 adjustment in the second variant; the static chain register
8196 doesn't need to be unwound, as it doesn't contain a value
8197 inherited from the caller. */
d3236b4d 8198
68dfd979
NC
8199 if (regs_ever_live[3] == 0)
8200 {
8201 insn = gen_rtx_REG (SImode, 3);
8202 insn = gen_rtx_SET (SImode, insn, ip_rtx);
d3236b4d 8203 insn = emit_insn (insn);
68dfd979 8204 }
095bb276 8205 else if (args_to_push == 0)
68dfd979 8206 {
34ce3d7b 8207 rtx dwarf;
68dfd979
NC
8208 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8209 insn = gen_rtx_MEM (SImode, insn);
8210 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8211 insn = emit_insn (insn);
34ce3d7b 8212
68dfd979 8213 fp_offset = 4;
34ce3d7b
JM
8214
8215 /* Just tell the dwarf backend that we adjusted SP. */
8216 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8217 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8218 GEN_INT (-fp_offset)));
8219 RTX_FRAME_RELATED_P (insn) = 1;
8220 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8221 dwarf, REG_NOTES (insn));
68dfd979
NC
8222 }
8223 else
095bb276
NC
8224 {
8225 /* Store the args on the stack. */
3cb66fd7 8226 if (cfun->machine->uses_anonymous_args)
095bb276
NC
8227 insn = emit_multi_reg_push
8228 ((0xf0 >> (args_to_push / 4)) & 0xf);
8229 else
8230 insn = emit_insn
8231 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8232 GEN_INT (- args_to_push)));
8233
8234 RTX_FRAME_RELATED_P (insn) = 1;
8235
8236 saved_pretend_args = 1;
8237 fp_offset = args_to_push;
8238 args_to_push = 0;
8239
8240 /* Now reuse r3 to preserve IP. */
8241 insn = gen_rtx_REG (SImode, 3);
8242 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8243 (void) emit_insn (insn);
8244 }
68dfd979
NC
8245 }
8246
68dfd979
NC
8247 if (fp_offset)
8248 {
8249 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8250 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8251 }
8252 else
8253 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8254
6d3d9133 8255 insn = emit_insn (insn);
8e56560e 8256 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
8257 }
8258
095bb276 8259 if (args_to_push)
e2c671ba 8260 {
6d3d9133 8261 /* Push the argument registers, or reserve space for them. */
3cb66fd7 8262 if (cfun->machine->uses_anonymous_args)
2c849145 8263 insn = emit_multi_reg_push
095bb276 8264 ((0xf0 >> (args_to_push / 4)) & 0xf);
e2c671ba 8265 else
2c849145
JM
8266 insn = emit_insn
8267 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
095bb276 8268 GEN_INT (- args_to_push)));
2c849145 8269 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
8270 }
8271
3a7731fd
PB
8272 /* If this is an interrupt service routine, and the link register is
8273 going to be pushed, subtracting four now will mean that the
8274 function return can be done with a single instruction. */
8275 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8276 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8277 {
8278 emit_insn (gen_rtx_SET (SImode,
8279 gen_rtx_REG (SImode, LR_REGNUM),
8280 gen_rtx_PLUS (SImode,
8281 gen_rtx_REG (SImode, LR_REGNUM),
8282 GEN_INT (-4))));
8283 }
8284
e2c671ba
RE
8285 if (live_regs_mask)
8286 {
2c849145
JM
8287 insn = emit_multi_reg_push (live_regs_mask);
8288 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba 8289 }
d5b7b3ae 8290
6d3d9133 8291 if (! IS_VOLATILE (func_type))
b111229a 8292 {
6d3d9133 8293 /* Save any floating point call-saved registers used by this function. */
b111229a
RE
8294 if (arm_fpu_arch == FP_SOFT2)
8295 {
d5b7b3ae 8296 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 8297 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
8298 {
8299 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8300 insn = gen_rtx_MEM (XFmode, insn);
8301 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8302 gen_rtx_REG (XFmode, reg)));
8303 RTX_FRAME_RELATED_P (insn) = 1;
8304 }
b111229a
RE
8305 }
8306 else
8307 {
d5b7b3ae 8308 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 8309
d5b7b3ae 8310 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 8311 {
5895f793 8312 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
8313 {
8314 if (start_reg - reg == 3)
8315 {
2c849145
JM
8316 insn = emit_sfm (reg, 4);
8317 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
8318 start_reg = reg - 1;
8319 }
8320 }
8321 else
8322 {
8323 if (start_reg != reg)
2c849145
JM
8324 {
8325 insn = emit_sfm (reg + 1, start_reg - reg);
8326 RTX_FRAME_RELATED_P (insn) = 1;
8327 }
b111229a
RE
8328 start_reg = reg - 1;
8329 }
8330 }
8331
8332 if (start_reg != reg)
2c849145
JM
8333 {
8334 insn = emit_sfm (reg + 1, start_reg - reg);
8335 RTX_FRAME_RELATED_P (insn) = 1;
8336 }
b111229a
RE
8337 }
8338 }
e2c671ba
RE
8339
8340 if (frame_pointer_needed)
2c849145 8341 {
6d3d9133 8342 /* Create the new frame pointer. */
095bb276 8343 insn = GEN_INT (-(4 + args_to_push + fp_offset));
68dfd979 8344 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 8345 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979 8346
6d3d9133 8347 if (IS_NESTED (func_type))
68dfd979
NC
8348 {
8349 /* Recover the static chain register. */
095bb276
NC
8350 if (regs_ever_live [3] == 0
8351 || saved_pretend_args)
1d6e90ac 8352 insn = gen_rtx_REG (SImode, 3);
68dfd979
NC
8353 else /* if (current_function_pretend_args_size == 0) */
8354 {
8355 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8356 insn = gen_rtx_MEM (SImode, insn);
68dfd979 8357 }
1d6e90ac 8358
c14a3a45
NC
8359 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8360 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 8361 emit_insn (gen_prologue_use (ip_rtx));
68dfd979 8362 }
2c849145 8363 }
e2c671ba 8364
6d3d9133
NC
8365 amount = GEN_INT (-(get_frame_size ()
8366 + current_function_outgoing_args_size));
8367
e2c671ba
RE
8368 if (amount != const0_rtx)
8369 {
745b9093
JM
8370 /* This add can produce multiple insns for a large constant, so we
8371 need to get tricky. */
8372 rtx last = get_last_insn ();
2c849145
JM
8373 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8374 amount));
745b9093
JM
8375 do
8376 {
8377 last = last ? NEXT_INSN (last) : get_insns ();
8378 RTX_FRAME_RELATED_P (last) = 1;
8379 }
8380 while (last != insn);
e04c2d6c
RE
8381
8382 /* If the frame pointer is needed, emit a special barrier that
8383 will prevent the scheduler from moving stores to the frame
8384 before the stack adjustment. */
8385 if (frame_pointer_needed)
3894f59e
RE
8386 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
8387 hard_frame_pointer_rtx));
e2c671ba
RE
8388 }
8389
8390 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
8391 the call to mcount. Similarly if the user has requested no
8392 scheduling in the prolog. */
70f4f91c 8393 if (current_function_profile || TARGET_NO_SCHED_PRO)
e2c671ba 8394 emit_insn (gen_blockage ());
6f7ebcbb
NC
8395
8396 /* If the link register is being kept alive, with the return address in it,
8397 then make sure that it does not get reused by the ce2 pass. */
8398 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8399 {
6bacc7b0 8400 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
6f7ebcbb
NC
8401 cfun->machine->lr_save_eliminated = 1;
8402 }
e2c671ba 8403}
cce8749e 8404\f
9997d19d
RE
8405/* If CODE is 'd', then the X is a condition operand and the instruction
8406 should only be executed if the condition is true.
ddd5a7c1 8407 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
8408 should only be executed if the condition is false: however, if the mode
8409 of the comparison is CCFPEmode, then always execute the instruction -- we
8410 do this because in these circumstances !GE does not necessarily imply LT;
8411 in these cases the instruction pattern will take care to make sure that
8412 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 8413 doing this instruction unconditionally.
9997d19d
RE
8414 If CODE is 'N' then X is a floating point operand that must be negated
8415 before output.
8416 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8417 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8418
8419void
8420arm_print_operand (stream, x, code)
62b10bbc 8421 FILE * stream;
9997d19d
RE
8422 rtx x;
8423 int code;
8424{
8425 switch (code)
8426 {
8427 case '@':
f3139301 8428 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
8429 return;
8430
d5b7b3ae
RE
8431 case '_':
8432 fputs (user_label_prefix, stream);
8433 return;
8434
9997d19d 8435 case '|':
f3139301 8436 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
8437 return;
8438
8439 case '?':
8440 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
8441 {
8442 if (TARGET_THUMB || current_insn_predicate != NULL)
8443 abort ();
8444
8445 fputs (arm_condition_codes[arm_current_cc], stream);
8446 }
8447 else if (current_insn_predicate)
8448 {
8449 enum arm_cond_code code;
8450
8451 if (TARGET_THUMB)
8452 abort ();
8453
8454 code = get_arm_condition_code (current_insn_predicate);
8455 fputs (arm_condition_codes[code], stream);
8456 }
9997d19d
RE
8457 return;
8458
8459 case 'N':
8460 {
8461 REAL_VALUE_TYPE r;
8462 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8463 r = REAL_VALUE_NEGATE (r);
8464 fprintf (stream, "%s", fp_const_from_val (&r));
8465 }
8466 return;
8467
8468 case 'B':
8469 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
8470 {
8471 HOST_WIDE_INT val;
5895f793 8472 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 8473 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8474 }
9997d19d
RE
8475 else
8476 {
8477 putc ('~', stream);
8478 output_addr_const (stream, x);
8479 }
8480 return;
8481
8482 case 'i':
8483 fprintf (stream, "%s", arithmetic_instr (x, 1));
8484 return;
8485
8486 case 'I':
8487 fprintf (stream, "%s", arithmetic_instr (x, 0));
8488 return;
8489
8490 case 'S':
8491 {
8492 HOST_WIDE_INT val;
5895f793 8493 const char * shift = shift_op (x, &val);
9997d19d 8494
e2c671ba
RE
8495 if (shift)
8496 {
5895f793 8497 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
8498 if (val == -1)
8499 arm_print_operand (stream, XEXP (x, 1), 0);
8500 else
4bc74ece
NC
8501 {
8502 fputc ('#', stream);
36ba9cb8 8503 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8504 }
e2c671ba 8505 }
9997d19d
RE
8506 }
8507 return;
8508
d5b7b3ae
RE
8509 /* An explanation of the 'Q', 'R' and 'H' register operands:
8510
8511 In a pair of registers containing a DI or DF value the 'Q'
8512 operand returns the register number of the register containing
8513 the least signficant part of the value. The 'R' operand returns
8514 the register number of the register containing the most
8515 significant part of the value.
8516
8517 The 'H' operand returns the higher of the two register numbers.
8518 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8519 same as the 'Q' operand, since the most signficant part of the
8520 value is held in the lower number register. The reverse is true
8521 on systems where WORDS_BIG_ENDIAN is false.
8522
8523 The purpose of these operands is to distinguish between cases
8524 where the endian-ness of the values is important (for example
8525 when they are added together), and cases where the endian-ness
8526 is irrelevant, but the order of register operations is important.
8527 For example when loading a value from memory into a register
8528 pair, the endian-ness does not matter. Provided that the value
8529 from the lower memory address is put into the lower numbered
8530 register, and the value from the higher address is put into the
8531 higher numbered register, the load will work regardless of whether
8532 the value being loaded is big-wordian or little-wordian. The
8533 order of the two register loads can matter however, if the address
8534 of the memory location is actually held in one of the registers
8535 being overwritten by the load. */
c1c2bc04 8536 case 'Q':
d5b7b3ae 8537 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 8538 abort ();
d5b7b3ae 8539 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
8540 return;
8541
9997d19d 8542 case 'R':
d5b7b3ae 8543 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 8544 abort ();
d5b7b3ae
RE
8545 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8546 return;
8547
8548 case 'H':
8549 if (REGNO (x) > LAST_ARM_REGNUM)
8550 abort ();
8551 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
8552 return;
8553
8554 case 'm':
d5b7b3ae
RE
8555 asm_fprintf (stream, "%r",
8556 GET_CODE (XEXP (x, 0)) == REG
8557 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
8558 return;
8559
8560 case 'M':
dd18ae56 8561 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae 8562 REGNO (x),
e9d7b180 8563 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
8564 return;
8565
8566 case 'd':
64e92a26
RE
8567 /* CONST_TRUE_RTX means always -- that's the default. */
8568 if (x == const_true_rtx)
d5b7b3ae
RE
8569 return;
8570
8571 if (TARGET_ARM)
9997d19d
RE
8572 fputs (arm_condition_codes[get_arm_condition_code (x)],
8573 stream);
d5b7b3ae
RE
8574 else
8575 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
8576 return;
8577
8578 case 'D':
64e92a26
RE
8579 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
8580 want to do that. */
8581 if (x == const_true_rtx)
8582 abort ();
d5b7b3ae
RE
8583
8584 if (TARGET_ARM)
8585 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8586 (get_arm_condition_code (x))],
9997d19d 8587 stream);
d5b7b3ae
RE
8588 else
8589 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
8590 return;
8591
8592 default:
8593 if (x == 0)
8594 abort ();
8595
8596 if (GET_CODE (x) == REG)
d5b7b3ae 8597 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
8598 else if (GET_CODE (x) == MEM)
8599 {
8600 output_memory_reference_mode = GET_MODE (x);
8601 output_address (XEXP (x, 0));
8602 }
8603 else if (GET_CODE (x) == CONST_DOUBLE)
8604 fprintf (stream, "#%s", fp_immediate_constant (x));
8605 else if (GET_CODE (x) == NEG)
6354dc9b 8606 abort (); /* This should never happen now. */
9997d19d
RE
8607 else
8608 {
8609 fputc ('#', stream);
8610 output_addr_const (stream, x);
8611 }
8612 }
8613}
cce8749e 8614\f
301d03af
RS
8615#ifndef AOF_ASSEMBLER
8616/* Target hook for assembling integer objects. The ARM version needs to
8617 handle word-sized values specially. */
8618
8619static bool
8620arm_assemble_integer (x, size, aligned_p)
8621 rtx x;
8622 unsigned int size;
8623 int aligned_p;
8624{
8625 if (size == UNITS_PER_WORD && aligned_p)
8626 {
8627 fputs ("\t.word\t", asm_out_file);
8628 output_addr_const (asm_out_file, x);
8629
8630 /* Mark symbols as position independent. We only do this in the
8631 .text segment, not in the .data segment. */
8632 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
8633 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
8634 {
e26053d1 8635 if (GET_CODE (x) == SYMBOL_REF
14f583b8
PB
8636 && (CONSTANT_POOL_ADDRESS_P (x)
8637 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
301d03af
RS
8638 fputs ("(GOTOFF)", asm_out_file);
8639 else if (GET_CODE (x) == LABEL_REF)
8640 fputs ("(GOTOFF)", asm_out_file);
8641 else
8642 fputs ("(GOT)", asm_out_file);
8643 }
8644 fputc ('\n', asm_out_file);
8645 return true;
8646 }
1d6e90ac 8647
301d03af
RS
8648 return default_assemble_integer (x, size, aligned_p);
8649}
8650#endif
8651\f
cce8749e
CH
8652/* A finite state machine takes care of noticing whether or not instructions
8653 can be conditionally executed, and thus decrease execution time and code
8654 size by deleting branch instructions. The fsm is controlled by
8655 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8656
8657/* The state of the fsm controlling condition codes are:
8658 0: normal, do nothing special
8659 1: make ASM_OUTPUT_OPCODE not output this instruction
8660 2: make ASM_OUTPUT_OPCODE not output this instruction
8661 3: make instructions conditional
8662 4: make instructions conditional
8663
8664 State transitions (state->state by whom under condition):
8665 0 -> 1 final_prescan_insn if the `target' is a label
8666 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8667 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8668 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8669 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8670 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8671 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8672 (the target insn is arm_target_insn).
8673
ff9940b0
RE
8674 If the jump clobbers the conditions then we use states 2 and 4.
8675
8676 A similar thing can be done with conditional return insns.
8677
cce8749e
CH
8678 XXX In case the `target' is an unconditional branch, this conditionalising
8679 of the instructions always reduces code size, but not always execution
8680 time. But then, I want to reduce the code size to somewhere near what
8681 /bin/cc produces. */
8682
cce8749e
CH
8683/* Returns the index of the ARM condition code string in
8684 `arm_condition_codes'. COMPARISON should be an rtx like
8685 `(eq (...) (...))'. */
8686
84ed5e79 8687static enum arm_cond_code
cce8749e
CH
8688get_arm_condition_code (comparison)
8689 rtx comparison;
8690{
5165176d 8691 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
1d6e90ac
NC
8692 int code;
8693 enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
8694
8695 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 8696 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
8697 XEXP (comparison, 1));
8698
8699 switch (mode)
cce8749e 8700 {
84ed5e79
RE
8701 case CC_DNEmode: code = ARM_NE; goto dominance;
8702 case CC_DEQmode: code = ARM_EQ; goto dominance;
8703 case CC_DGEmode: code = ARM_GE; goto dominance;
8704 case CC_DGTmode: code = ARM_GT; goto dominance;
8705 case CC_DLEmode: code = ARM_LE; goto dominance;
8706 case CC_DLTmode: code = ARM_LT; goto dominance;
8707 case CC_DGEUmode: code = ARM_CS; goto dominance;
8708 case CC_DGTUmode: code = ARM_HI; goto dominance;
8709 case CC_DLEUmode: code = ARM_LS; goto dominance;
8710 case CC_DLTUmode: code = ARM_CC;
8711
8712 dominance:
8713 if (comp_code != EQ && comp_code != NE)
8714 abort ();
8715
8716 if (comp_code == EQ)
8717 return ARM_INVERSE_CONDITION_CODE (code);
8718 return code;
8719
5165176d 8720 case CC_NOOVmode:
84ed5e79 8721 switch (comp_code)
5165176d 8722 {
84ed5e79
RE
8723 case NE: return ARM_NE;
8724 case EQ: return ARM_EQ;
8725 case GE: return ARM_PL;
8726 case LT: return ARM_MI;
5165176d
RE
8727 default: abort ();
8728 }
8729
8730 case CC_Zmode:
84ed5e79 8731 switch (comp_code)
5165176d 8732 {
84ed5e79
RE
8733 case NE: return ARM_NE;
8734 case EQ: return ARM_EQ;
5165176d
RE
8735 default: abort ();
8736 }
8737
8738 case CCFPEmode:
e45b72c4
RE
8739 case CCFPmode:
8740 /* These encodings assume that AC=1 in the FPA system control
8741 byte. This allows us to handle all cases except UNEQ and
8742 LTGT. */
84ed5e79
RE
8743 switch (comp_code)
8744 {
8745 case GE: return ARM_GE;
8746 case GT: return ARM_GT;
8747 case LE: return ARM_LS;
8748 case LT: return ARM_MI;
e45b72c4
RE
8749 case NE: return ARM_NE;
8750 case EQ: return ARM_EQ;
8751 case ORDERED: return ARM_VC;
8752 case UNORDERED: return ARM_VS;
8753 case UNLT: return ARM_LT;
8754 case UNLE: return ARM_LE;
8755 case UNGT: return ARM_HI;
8756 case UNGE: return ARM_PL;
8757 /* UNEQ and LTGT do not have a representation. */
8758 case UNEQ: /* Fall through. */
8759 case LTGT: /* Fall through. */
84ed5e79
RE
8760 default: abort ();
8761 }
8762
8763 case CC_SWPmode:
8764 switch (comp_code)
8765 {
8766 case NE: return ARM_NE;
8767 case EQ: return ARM_EQ;
8768 case GE: return ARM_LE;
8769 case GT: return ARM_LT;
8770 case LE: return ARM_GE;
8771 case LT: return ARM_GT;
8772 case GEU: return ARM_LS;
8773 case GTU: return ARM_CC;
8774 case LEU: return ARM_CS;
8775 case LTU: return ARM_HI;
8776 default: abort ();
8777 }
8778
bd9c7e23
RE
8779 case CC_Cmode:
8780 switch (comp_code)
8781 {
8782 case LTU: return ARM_CS;
8783 case GEU: return ARM_CC;
8784 default: abort ();
8785 }
8786
5165176d 8787 case CCmode:
84ed5e79 8788 switch (comp_code)
5165176d 8789 {
84ed5e79
RE
8790 case NE: return ARM_NE;
8791 case EQ: return ARM_EQ;
8792 case GE: return ARM_GE;
8793 case GT: return ARM_GT;
8794 case LE: return ARM_LE;
8795 case LT: return ARM_LT;
8796 case GEU: return ARM_CS;
8797 case GTU: return ARM_HI;
8798 case LEU: return ARM_LS;
8799 case LTU: return ARM_CC;
5165176d
RE
8800 default: abort ();
8801 }
8802
cce8749e
CH
8803 default: abort ();
8804 }
84ed5e79
RE
8805
8806 abort ();
f3bb6135 8807}
cce8749e
CH
8808
8809
8810void
74bbc178 8811arm_final_prescan_insn (insn)
cce8749e 8812 rtx insn;
cce8749e
CH
8813{
8814 /* BODY will hold the body of INSN. */
1d6e90ac 8815 rtx body = PATTERN (insn);
cce8749e
CH
8816
8817 /* This will be 1 if trying to repeat the trick, and things need to be
8818 reversed if it appears to fail. */
8819 int reverse = 0;
8820
ff9940b0
RE
8821 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8822 taken are clobbered, even if the rtl suggests otherwise. It also
8823 means that we have to grub around within the jump expression to find
8824 out what the conditions are when the jump isn't taken. */
8825 int jump_clobbers = 0;
8826
6354dc9b 8827 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
8828 int seeking_return = 0;
8829
cce8749e
CH
8830 /* START_INSN will hold the insn from where we start looking. This is the
8831 first insn after the following code_label if REVERSE is true. */
8832 rtx start_insn = insn;
8833
8834 /* If in state 4, check if the target branch is reached, in order to
8835 change back to state 0. */
8836 if (arm_ccfsm_state == 4)
8837 {
8838 if (insn == arm_target_insn)
f5a1b0d2
NC
8839 {
8840 arm_target_insn = NULL;
8841 arm_ccfsm_state = 0;
8842 }
cce8749e
CH
8843 return;
8844 }
8845
8846 /* If in state 3, it is possible to repeat the trick, if this insn is an
8847 unconditional branch to a label, and immediately following this branch
8848 is the previous target label which is only used once, and the label this
8849 branch jumps to is not too far off. */
8850 if (arm_ccfsm_state == 3)
8851 {
8852 if (simplejump_p (insn))
8853 {
8854 start_insn = next_nonnote_insn (start_insn);
8855 if (GET_CODE (start_insn) == BARRIER)
8856 {
8857 /* XXX Isn't this always a barrier? */
8858 start_insn = next_nonnote_insn (start_insn);
8859 }
8860 if (GET_CODE (start_insn) == CODE_LABEL
8861 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8862 && LABEL_NUSES (start_insn) == 1)
8863 reverse = TRUE;
8864 else
8865 return;
8866 }
ff9940b0
RE
8867 else if (GET_CODE (body) == RETURN)
8868 {
8869 start_insn = next_nonnote_insn (start_insn);
8870 if (GET_CODE (start_insn) == BARRIER)
8871 start_insn = next_nonnote_insn (start_insn);
8872 if (GET_CODE (start_insn) == CODE_LABEL
8873 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8874 && LABEL_NUSES (start_insn) == 1)
8875 {
8876 reverse = TRUE;
8877 seeking_return = 1;
8878 }
8879 else
8880 return;
8881 }
cce8749e
CH
8882 else
8883 return;
8884 }
8885
8886 if (arm_ccfsm_state != 0 && !reverse)
8887 abort ();
8888 if (GET_CODE (insn) != JUMP_INSN)
8889 return;
8890
ddd5a7c1 8891 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
8892 the jump should always come first */
8893 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8894 body = XVECEXP (body, 0, 0);
8895
8896#if 0
8897 /* If this is a conditional return then we don't want to know */
8898 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8899 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8900 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8901 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8902 return;
8903#endif
8904
cce8749e
CH
8905 if (reverse
8906 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8907 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8908 {
bd9c7e23
RE
8909 int insns_skipped;
8910 int fail = FALSE, succeed = FALSE;
cce8749e
CH
8911 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8912 int then_not_else = TRUE;
ff9940b0 8913 rtx this_insn = start_insn, label = 0;
cce8749e 8914
e45b72c4
RE
8915 /* If the jump cannot be done with one instruction, we cannot
8916 conditionally execute the instruction in the inverse case. */
ff9940b0 8917 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 8918 {
5bbe2d40
RE
8919 jump_clobbers = 1;
8920 return;
8921 }
ff9940b0 8922
cce8749e
CH
8923 /* Register the insn jumped to. */
8924 if (reverse)
ff9940b0
RE
8925 {
8926 if (!seeking_return)
8927 label = XEXP (SET_SRC (body), 0);
8928 }
cce8749e
CH
8929 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8930 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8931 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8932 {
8933 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8934 then_not_else = FALSE;
8935 }
ff9940b0
RE
8936 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8937 seeking_return = 1;
8938 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8939 {
8940 seeking_return = 1;
8941 then_not_else = FALSE;
8942 }
cce8749e
CH
8943 else
8944 abort ();
8945
8946 /* See how many insns this branch skips, and what kind of insns. If all
8947 insns are okay, and the label or unconditional branch to the same
8948 label is not too far away, succeed. */
8949 for (insns_skipped = 0;
b36ba79f 8950 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
8951 {
8952 rtx scanbody;
8953
8954 this_insn = next_nonnote_insn (this_insn);
8955 if (!this_insn)
8956 break;
8957
cce8749e
CH
8958 switch (GET_CODE (this_insn))
8959 {
8960 case CODE_LABEL:
8961 /* Succeed if it is the target label, otherwise fail since
8962 control falls in from somewhere else. */
8963 if (this_insn == label)
8964 {
ff9940b0
RE
8965 if (jump_clobbers)
8966 {
8967 arm_ccfsm_state = 2;
8968 this_insn = next_nonnote_insn (this_insn);
8969 }
8970 else
8971 arm_ccfsm_state = 1;
cce8749e
CH
8972 succeed = TRUE;
8973 }
8974 else
8975 fail = TRUE;
8976 break;
8977
ff9940b0 8978 case BARRIER:
cce8749e 8979 /* Succeed if the following insn is the target label.
ff9940b0
RE
8980 Otherwise fail.
8981 If return insns are used then the last insn in a function
6354dc9b 8982 will be a barrier. */
cce8749e 8983 this_insn = next_nonnote_insn (this_insn);
ff9940b0 8984 if (this_insn && this_insn == label)
cce8749e 8985 {
ff9940b0
RE
8986 if (jump_clobbers)
8987 {
8988 arm_ccfsm_state = 2;
8989 this_insn = next_nonnote_insn (this_insn);
8990 }
8991 else
8992 arm_ccfsm_state = 1;
cce8749e
CH
8993 succeed = TRUE;
8994 }
8995 else
8996 fail = TRUE;
8997 break;
8998
ff9940b0 8999 case CALL_INSN:
2b835d68 9000 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 9001 calls. */
2b835d68 9002 if (TARGET_APCS_32)
bd9c7e23
RE
9003 {
9004 /* Succeed if the following insn is the target label,
9005 or if the following two insns are a barrier and
9006 the target label. */
9007 this_insn = next_nonnote_insn (this_insn);
9008 if (this_insn && GET_CODE (this_insn) == BARRIER)
9009 this_insn = next_nonnote_insn (this_insn);
9010
9011 if (this_insn && this_insn == label
b36ba79f 9012 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
9013 {
9014 if (jump_clobbers)
9015 {
9016 arm_ccfsm_state = 2;
9017 this_insn = next_nonnote_insn (this_insn);
9018 }
9019 else
9020 arm_ccfsm_state = 1;
9021 succeed = TRUE;
9022 }
9023 else
9024 fail = TRUE;
9025 }
ff9940b0 9026 break;
2b835d68 9027
cce8749e
CH
9028 case JUMP_INSN:
9029 /* If this is an unconditional branch to the same label, succeed.
9030 If it is to another label, do nothing. If it is conditional,
9031 fail. */
914a3b8c 9032 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 9033
ed4c4348 9034 scanbody = PATTERN (this_insn);
ff9940b0
RE
9035 if (GET_CODE (scanbody) == SET
9036 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
9037 {
9038 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9039 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9040 {
9041 arm_ccfsm_state = 2;
9042 succeed = TRUE;
9043 }
9044 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9045 fail = TRUE;
9046 }
b36ba79f
RE
9047 /* Fail if a conditional return is undesirable (eg on a
9048 StrongARM), but still allow this if optimizing for size. */
9049 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
9050 && !use_return_insn (TRUE)
9051 && !optimize_size)
b36ba79f 9052 fail = TRUE;
ff9940b0
RE
9053 else if (GET_CODE (scanbody) == RETURN
9054 && seeking_return)
9055 {
9056 arm_ccfsm_state = 2;
9057 succeed = TRUE;
9058 }
9059 else if (GET_CODE (scanbody) == PARALLEL)
9060 {
9061 switch (get_attr_conds (this_insn))
9062 {
9063 case CONDS_NOCOND:
9064 break;
9065 default:
9066 fail = TRUE;
9067 break;
9068 }
9069 }
4e67550b
RE
9070 else
9071 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9072
cce8749e
CH
9073 break;
9074
9075 case INSN:
ff9940b0
RE
9076 /* Instructions using or affecting the condition codes make it
9077 fail. */
ed4c4348 9078 scanbody = PATTERN (this_insn);
5895f793
RE
9079 if (!(GET_CODE (scanbody) == SET
9080 || GET_CODE (scanbody) == PARALLEL)
74641843 9081 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
9082 fail = TRUE;
9083 break;
9084
9085 default:
9086 break;
9087 }
9088 }
9089 if (succeed)
9090 {
ff9940b0 9091 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 9092 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
9093 else if (seeking_return || arm_ccfsm_state == 2)
9094 {
9095 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9096 {
9097 this_insn = next_nonnote_insn (this_insn);
9098 if (this_insn && (GET_CODE (this_insn) == BARRIER
9099 || GET_CODE (this_insn) == CODE_LABEL))
9100 abort ();
9101 }
9102 if (!this_insn)
9103 {
9104 /* Oh, dear! we ran off the end.. give up */
df4ae160 9105 recog (PATTERN (insn), insn, NULL);
ff9940b0 9106 arm_ccfsm_state = 0;
abaa26e5 9107 arm_target_insn = NULL;
ff9940b0
RE
9108 return;
9109 }
9110 arm_target_insn = this_insn;
9111 }
cce8749e
CH
9112 else
9113 abort ();
ff9940b0
RE
9114 if (jump_clobbers)
9115 {
9116 if (reverse)
9117 abort ();
9118 arm_current_cc =
9119 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9120 0), 0), 1));
9121 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9122 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9123 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9124 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9125 }
9126 else
9127 {
9128 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9129 what it was. */
9130 if (!reverse)
9131 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9132 0));
9133 }
cce8749e 9134
cce8749e
CH
9135 if (reverse || then_not_else)
9136 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9137 }
d5b7b3ae 9138
1ccbefce 9139 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 9140 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 9141 across this call; since the insn has been recognized already we
b020fd92 9142 call recog direct). */
df4ae160 9143 recog (PATTERN (insn), insn, NULL);
cce8749e 9144 }
f3bb6135 9145}
cce8749e 9146
4b02997f
NC
9147/* Returns true if REGNO is a valid register
9148 for holding a quantity of tyoe MODE. */
9149
9150int
9151arm_hard_regno_mode_ok (regno, mode)
9152 unsigned int regno;
9153 enum machine_mode mode;
9154{
9155 if (GET_MODE_CLASS (mode) == MODE_CC)
9156 return regno == CC_REGNUM;
9157
9158 if (TARGET_THUMB)
9159 /* For the Thumb we only allow values bigger than SImode in
9160 registers 0 - 6, so that there is always a second low
9161 register available to hold the upper part of the value.
9162 We probably we ought to ensure that the register is the
9163 start of an even numbered register pair. */
e9d7b180 9164 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
4b02997f
NC
9165
9166 if (regno <= LAST_ARM_REGNUM)
3cb66fd7
NC
9167 /* We allow any value to be stored in the general regisetrs. */
9168 return 1;
4b02997f
NC
9169
9170 if ( regno == FRAME_POINTER_REGNUM
9171 || regno == ARG_POINTER_REGNUM)
9172 /* We only allow integers in the fake hard registers. */
9173 return GET_MODE_CLASS (mode) == MODE_INT;
9174
9175 /* The only registers left are the FPU registers
9176 which we only allow to hold FP values. */
9177 return GET_MODE_CLASS (mode) == MODE_FLOAT
9178 && regno >= FIRST_ARM_FP_REGNUM
9179 && regno <= LAST_ARM_FP_REGNUM;
9180}
9181
d5b7b3ae
RE
9182int
9183arm_regno_class (regno)
9184 int regno;
9185{
9186 if (TARGET_THUMB)
9187 {
9188 if (regno == STACK_POINTER_REGNUM)
9189 return STACK_REG;
9190 if (regno == CC_REGNUM)
9191 return CC_REG;
9192 if (regno < 8)
9193 return LO_REGS;
9194 return HI_REGS;
9195 }
9196
9197 if ( regno <= LAST_ARM_REGNUM
9198 || regno == FRAME_POINTER_REGNUM
9199 || regno == ARG_POINTER_REGNUM)
9200 return GENERAL_REGS;
9201
9202 if (regno == CC_REGNUM)
9203 return NO_REGS;
9204
9205 return FPU_REGS;
9206}
9207
9208/* Handle a special case when computing the offset
9209 of an argument from the frame pointer. */
1d6e90ac 9210
d5b7b3ae
RE
9211int
9212arm_debugger_arg_offset (value, addr)
9213 int value;
9214 rtx addr;
9215{
9216 rtx insn;
9217
9218 /* We are only interested if dbxout_parms() failed to compute the offset. */
9219 if (value != 0)
9220 return 0;
9221
9222 /* We can only cope with the case where the address is held in a register. */
9223 if (GET_CODE (addr) != REG)
9224 return 0;
9225
9226 /* If we are using the frame pointer to point at the argument, then
9227 an offset of 0 is correct. */
cd2b33d0 9228 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
9229 return 0;
9230
9231 /* If we are using the stack pointer to point at the
9232 argument, then an offset of 0 is correct. */
5895f793 9233 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
9234 && REGNO (addr) == SP_REGNUM)
9235 return 0;
9236
9237 /* Oh dear. The argument is pointed to by a register rather
9238 than being held in a register, or being stored at a known
9239 offset from the frame pointer. Since GDB only understands
9240 those two kinds of argument we must translate the address
9241 held in the register into an offset from the frame pointer.
9242 We do this by searching through the insns for the function
9243 looking to see where this register gets its value. If the
9244 register is initialised from the frame pointer plus an offset
9245 then we are in luck and we can continue, otherwise we give up.
9246
9247 This code is exercised by producing debugging information
9248 for a function with arguments like this:
9249
9250 double func (double a, double b, int c, double d) {return d;}
9251
9252 Without this code the stab for parameter 'd' will be set to
9253 an offset of 0 from the frame pointer, rather than 8. */
9254
9255 /* The if() statement says:
9256
9257 If the insn is a normal instruction
9258 and if the insn is setting the value in a register
9259 and if the register being set is the register holding the address of the argument
9260 and if the address is computing by an addition
9261 that involves adding to a register
9262 which is the frame pointer
9263 a constant integer
9264
9265 then... */
9266
9267 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9268 {
9269 if ( GET_CODE (insn) == INSN
9270 && GET_CODE (PATTERN (insn)) == SET
9271 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9272 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9273 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 9274 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
9275 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9276 )
9277 {
9278 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9279
9280 break;
9281 }
9282 }
9283
9284 if (value == 0)
9285 {
9286 debug_rtx (addr);
c725bd79 9287 warning ("unable to compute real location of stacked parameter");
d5b7b3ae
RE
9288 value = 8; /* XXX magic hack */
9289 }
9290
9291 return value;
9292}
9293
d19fb8e3 9294#define def_builtin(NAME, TYPE, CODE) \
6a2dd09a 9295 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE)
d19fb8e3
NC
9296
9297void
9298arm_init_builtins ()
9299{
cbd5937a 9300 tree endlink = void_list_node;
d19fb8e3
NC
9301 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9302 tree pchar_type_node = build_pointer_type (char_type_node);
9303
9304 tree int_ftype_int, void_ftype_pchar;
9305
b4de2f7d 9306 /* void func (char *) */
d19fb8e3 9307 void_ftype_pchar
b4de2f7d 9308 = build_function_type_list (void_type_node, pchar_type_node, NULL_TREE);
d19fb8e3
NC
9309
9310 /* int func (int) */
9311 int_ftype_int
9312 = build_function_type (integer_type_node, int_endlink);
9313
9314 /* Initialize arm V5 builtins. */
9315 if (arm_arch5)
eab4abeb 9316 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
d19fb8e3
NC
9317}
9318
9319/* Expand an expression EXP that calls a built-in function,
9320 with result going to TARGET if that's convenient
9321 (and in mode MODE if that's convenient).
9322 SUBTARGET may be used as the target for computing one of EXP's operands.
9323 IGNORE is nonzero if the value is to be ignored. */
9324
9325rtx
9326arm_expand_builtin (exp, target, subtarget, mode, ignore)
9327 tree exp;
9328 rtx target;
9329 rtx subtarget ATTRIBUTE_UNUSED;
9330 enum machine_mode mode ATTRIBUTE_UNUSED;
9331 int ignore ATTRIBUTE_UNUSED;
9332{
9333 enum insn_code icode;
9334 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9335 tree arglist = TREE_OPERAND (exp, 1);
9336 tree arg0;
9337 rtx op0, pat;
9338 enum machine_mode tmode, mode0;
9339 int fcode = DECL_FUNCTION_CODE (fndecl);
9340
9341 switch (fcode)
9342 {
9343 default:
9344 break;
9345
9346 case ARM_BUILTIN_CLZ:
9347 icode = CODE_FOR_clz;
9348 arg0 = TREE_VALUE (arglist);
9349 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9350 tmode = insn_data[icode].operand[0].mode;
9351 mode0 = insn_data[icode].operand[1].mode;
9352
9353 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9354 op0 = copy_to_mode_reg (mode0, op0);
9355 if (target == 0
9356 || GET_MODE (target) != tmode
9357 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9358 target = gen_reg_rtx (tmode);
9359 pat = GEN_FCN (icode) (target, op0);
9360 if (! pat)
9361 return 0;
9362 emit_insn (pat);
9363 return target;
d19fb8e3 9364 }
e26053d1 9365
d19fb8e3
NC
9366 /* @@@ Should really do something sensible here. */
9367 return NULL_RTX;
9368}
d5b7b3ae
RE
9369\f
9370/* Recursively search through all of the blocks in a function
9371 checking to see if any of the variables created in that
9372 function match the RTX called 'orig'. If they do then
9373 replace them with the RTX called 'new'. */
9374
9375static void
9376replace_symbols_in_block (block, orig, new)
9377 tree block;
9378 rtx orig;
9379 rtx new;
9380{
9381 for (; block; block = BLOCK_CHAIN (block))
9382 {
9383 tree sym;
9384
5895f793 9385 if (!TREE_USED (block))
d5b7b3ae
RE
9386 continue;
9387
9388 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9389 {
9390 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9391 || DECL_IGNORED_P (sym)
9392 || TREE_CODE (sym) != VAR_DECL
9393 || DECL_EXTERNAL (sym)
5895f793 9394 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
9395 )
9396 continue;
9397
7b8b8ade 9398 SET_DECL_RTL (sym, new);
d5b7b3ae
RE
9399 }
9400
9401 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9402 }
9403}
9404
1d6e90ac
NC
9405/* Return the number (counting from 0) of
9406 the least significant set bit in MASK. */
9407
d5b7b3ae
RE
9408#ifdef __GNUC__
9409inline
9410#endif
9411static int
9412number_of_first_bit_set (mask)
9413 int mask;
9414{
9415 int bit;
9416
9417 for (bit = 0;
9418 (mask & (1 << bit)) == 0;
5895f793 9419 ++bit)
d5b7b3ae
RE
9420 continue;
9421
9422 return bit;
9423}
9424
9425/* Generate code to return from a thumb function.
9426 If 'reg_containing_return_addr' is -1, then the return address is
9427 actually on the stack, at the stack pointer. */
9428static void
9429thumb_exit (f, reg_containing_return_addr, eh_ofs)
9430 FILE * f;
9431 int reg_containing_return_addr;
9432 rtx eh_ofs;
9433{
9434 unsigned regs_available_for_popping;
9435 unsigned regs_to_pop;
9436 int pops_needed;
9437 unsigned available;
9438 unsigned required;
9439 int mode;
9440 int size;
9441 int restore_a4 = FALSE;
9442
9443 /* Compute the registers we need to pop. */
9444 regs_to_pop = 0;
9445 pops_needed = 0;
9446
9447 /* There is an assumption here, that if eh_ofs is not NULL, the
9448 normal return address will have been pushed. */
9449 if (reg_containing_return_addr == -1 || eh_ofs)
9450 {
9451 /* When we are generating a return for __builtin_eh_return,
9452 reg_containing_return_addr must specify the return regno. */
9453 if (eh_ofs && reg_containing_return_addr == -1)
9454 abort ();
9455
9456 regs_to_pop |= 1 << LR_REGNUM;
5895f793 9457 ++pops_needed;
d5b7b3ae
RE
9458 }
9459
9460 if (TARGET_BACKTRACE)
9461 {
9462 /* Restore the (ARM) frame pointer and stack pointer. */
9463 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9464 pops_needed += 2;
9465 }
9466
9467 /* If there is nothing to pop then just emit the BX instruction and
9468 return. */
9469 if (pops_needed == 0)
9470 {
9471 if (eh_ofs)
9472 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9473
9474 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9475 return;
9476 }
9477 /* Otherwise if we are not supporting interworking and we have not created
9478 a backtrace structure and the function was not entered in ARM mode then
9479 just pop the return address straight into the PC. */
5895f793
RE
9480 else if (!TARGET_INTERWORK
9481 && !TARGET_BACKTRACE
9482 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
9483 {
9484 if (eh_ofs)
9485 {
9486 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9487 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9488 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9489 }
9490 else
9491 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9492
9493 return;
9494 }
9495
9496 /* Find out how many of the (return) argument registers we can corrupt. */
9497 regs_available_for_popping = 0;
9498
9499 /* If returning via __builtin_eh_return, the bottom three registers
9500 all contain information needed for the return. */
9501 if (eh_ofs)
9502 size = 12;
9503 else
9504 {
9505#ifdef RTX_CODE
9506 /* If we can deduce the registers used from the function's
9507 return value. This is more reliable that examining
9508 regs_ever_live[] because that will be set if the register is
9509 ever used in the function, not just if the register is used
9510 to hold a return value. */
9511
9512 if (current_function_return_rtx != 0)
9513 mode = GET_MODE (current_function_return_rtx);
9514 else
9515#endif
9516 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9517
9518 size = GET_MODE_SIZE (mode);
9519
9520 if (size == 0)
9521 {
9522 /* In a void function we can use any argument register.
9523 In a function that returns a structure on the stack
9524 we can use the second and third argument registers. */
9525 if (mode == VOIDmode)
9526 regs_available_for_popping =
9527 (1 << ARG_REGISTER (1))
9528 | (1 << ARG_REGISTER (2))
9529 | (1 << ARG_REGISTER (3));
9530 else
9531 regs_available_for_popping =
9532 (1 << ARG_REGISTER (2))
9533 | (1 << ARG_REGISTER (3));
9534 }
9535 else if (size <= 4)
9536 regs_available_for_popping =
9537 (1 << ARG_REGISTER (2))
9538 | (1 << ARG_REGISTER (3));
9539 else if (size <= 8)
9540 regs_available_for_popping =
9541 (1 << ARG_REGISTER (3));
9542 }
9543
9544 /* Match registers to be popped with registers into which we pop them. */
9545 for (available = regs_available_for_popping,
9546 required = regs_to_pop;
9547 required != 0 && available != 0;
9548 available &= ~(available & - available),
9549 required &= ~(required & - required))
9550 -- pops_needed;
9551
9552 /* If we have any popping registers left over, remove them. */
9553 if (available > 0)
5895f793 9554 regs_available_for_popping &= ~available;
d5b7b3ae
RE
9555
9556 /* Otherwise if we need another popping register we can use
9557 the fourth argument register. */
9558 else if (pops_needed)
9559 {
9560 /* If we have not found any free argument registers and
9561 reg a4 contains the return address, we must move it. */
9562 if (regs_available_for_popping == 0
9563 && reg_containing_return_addr == LAST_ARG_REGNUM)
9564 {
9565 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9566 reg_containing_return_addr = LR_REGNUM;
9567 }
9568 else if (size > 12)
9569 {
9570 /* Register a4 is being used to hold part of the return value,
9571 but we have dire need of a free, low register. */
9572 restore_a4 = TRUE;
9573
9574 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9575 }
9576
9577 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9578 {
9579 /* The fourth argument register is available. */
9580 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9581
5895f793 9582 --pops_needed;
d5b7b3ae
RE
9583 }
9584 }
9585
9586 /* Pop as many registers as we can. */
9587 thumb_pushpop (f, regs_available_for_popping, FALSE);
9588
9589 /* Process the registers we popped. */
9590 if (reg_containing_return_addr == -1)
9591 {
9592 /* The return address was popped into the lowest numbered register. */
5895f793 9593 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
9594
9595 reg_containing_return_addr =
9596 number_of_first_bit_set (regs_available_for_popping);
9597
9598 /* Remove this register for the mask of available registers, so that
9599 the return address will not be corrupted by futher pops. */
5895f793 9600 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
9601 }
9602
9603 /* If we popped other registers then handle them here. */
9604 if (regs_available_for_popping)
9605 {
9606 int frame_pointer;
9607
9608 /* Work out which register currently contains the frame pointer. */
9609 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9610
9611 /* Move it into the correct place. */
9612 asm_fprintf (f, "\tmov\t%r, %r\n",
9613 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9614
9615 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
9616 regs_available_for_popping &= ~(1 << frame_pointer);
9617 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
9618
9619 if (regs_available_for_popping)
9620 {
9621 int stack_pointer;
9622
9623 /* We popped the stack pointer as well,
9624 find the register that contains it. */
9625 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9626
9627 /* Move it into the stack register. */
9628 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9629
9630 /* At this point we have popped all necessary registers, so
9631 do not worry about restoring regs_available_for_popping
9632 to its correct value:
9633
9634 assert (pops_needed == 0)
9635 assert (regs_available_for_popping == (1 << frame_pointer))
9636 assert (regs_to_pop == (1 << STACK_POINTER)) */
9637 }
9638 else
9639 {
9640 /* Since we have just move the popped value into the frame
9641 pointer, the popping register is available for reuse, and
9642 we know that we still have the stack pointer left to pop. */
9643 regs_available_for_popping |= (1 << frame_pointer);
9644 }
9645 }
9646
9647 /* If we still have registers left on the stack, but we no longer have
9648 any registers into which we can pop them, then we must move the return
9649 address into the link register and make available the register that
9650 contained it. */
9651 if (regs_available_for_popping == 0 && pops_needed > 0)
9652 {
9653 regs_available_for_popping |= 1 << reg_containing_return_addr;
9654
9655 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9656 reg_containing_return_addr);
9657
9658 reg_containing_return_addr = LR_REGNUM;
9659 }
9660
9661 /* If we have registers left on the stack then pop some more.
9662 We know that at most we will want to pop FP and SP. */
9663 if (pops_needed > 0)
9664 {
9665 int popped_into;
9666 int move_to;
9667
9668 thumb_pushpop (f, regs_available_for_popping, FALSE);
9669
9670 /* We have popped either FP or SP.
9671 Move whichever one it is into the correct register. */
9672 popped_into = number_of_first_bit_set (regs_available_for_popping);
9673 move_to = number_of_first_bit_set (regs_to_pop);
9674
9675 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9676
5895f793 9677 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 9678
5895f793 9679 --pops_needed;
d5b7b3ae
RE
9680 }
9681
9682 /* If we still have not popped everything then we must have only
9683 had one register available to us and we are now popping the SP. */
9684 if (pops_needed > 0)
9685 {
9686 int popped_into;
9687
9688 thumb_pushpop (f, regs_available_for_popping, FALSE);
9689
9690 popped_into = number_of_first_bit_set (regs_available_for_popping);
9691
9692 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9693 /*
9694 assert (regs_to_pop == (1 << STACK_POINTER))
9695 assert (pops_needed == 1)
9696 */
9697 }
9698
9699 /* If necessary restore the a4 register. */
9700 if (restore_a4)
9701 {
9702 if (reg_containing_return_addr != LR_REGNUM)
9703 {
9704 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9705 reg_containing_return_addr = LR_REGNUM;
9706 }
9707
9708 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9709 }
9710
9711 if (eh_ofs)
9712 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9713
9714 /* Return to caller. */
9715 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9716}
9717
9718/* Emit code to push or pop registers to or from the stack. */
1d6e90ac 9719
d5b7b3ae
RE
9720static void
9721thumb_pushpop (f, mask, push)
9722 FILE * f;
9723 int mask;
9724 int push;
9725{
9726 int regno;
9727 int lo_mask = mask & 0xFF;
9728
5895f793 9729 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
9730 {
9731 /* Special case. Do not generate a POP PC statement here, do it in
9732 thumb_exit() */
9733 thumb_exit (f, -1, NULL_RTX);
9734 return;
9735 }
9736
9737 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9738
9739 /* Look at the low registers first. */
5895f793 9740 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
9741 {
9742 if (lo_mask & 1)
9743 {
9744 asm_fprintf (f, "%r", regno);
9745
9746 if ((lo_mask & ~1) != 0)
9747 fprintf (f, ", ");
9748 }
9749 }
9750
9751 if (push && (mask & (1 << LR_REGNUM)))
9752 {
9753 /* Catch pushing the LR. */
9754 if (mask & 0xFF)
9755 fprintf (f, ", ");
9756
9757 asm_fprintf (f, "%r", LR_REGNUM);
9758 }
9759 else if (!push && (mask & (1 << PC_REGNUM)))
9760 {
9761 /* Catch popping the PC. */
9762 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9763 {
9764 /* The PC is never poped directly, instead
9765 it is popped into r3 and then BX is used. */
9766 fprintf (f, "}\n");
9767
9768 thumb_exit (f, -1, NULL_RTX);
9769
9770 return;
9771 }
9772 else
9773 {
9774 if (mask & 0xFF)
9775 fprintf (f, ", ");
9776
9777 asm_fprintf (f, "%r", PC_REGNUM);
9778 }
9779 }
9780
9781 fprintf (f, "}\n");
9782}
9783\f
9784void
9785thumb_final_prescan_insn (insn)
9786 rtx insn;
9787{
d5b7b3ae 9788 if (flag_print_asm_name)
9d98a694
AO
9789 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9790 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
9791}
9792
9793int
9794thumb_shiftable_const (val)
9795 unsigned HOST_WIDE_INT val;
9796{
9797 unsigned HOST_WIDE_INT mask = 0xff;
9798 int i;
9799
9800 if (val == 0) /* XXX */
9801 return 0;
9802
9803 for (i = 0; i < 25; i++)
9804 if ((val & (mask << i)) == val)
9805 return 1;
9806
9807 return 0;
9808}
9809
9810/* Returns non-zero if the current function contains,
9811 or might contain a far jump. */
1d6e90ac 9812
d5b7b3ae
RE
9813int
9814thumb_far_jump_used_p (int in_prologue)
9815{
9816 rtx insn;
9817
9818 /* This test is only important for leaf functions. */
5895f793 9819 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
9820
9821 /* If we have already decided that far jumps may be used,
9822 do not bother checking again, and always return true even if
9823 it turns out that they are not being used. Once we have made
9824 the decision that far jumps are present (and that hence the link
9825 register will be pushed onto the stack) we cannot go back on it. */
9826 if (cfun->machine->far_jump_used)
9827 return 1;
9828
9829 /* If this function is not being called from the prologue/epilogue
9830 generation code then it must be being called from the
9831 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 9832 if (!in_prologue)
d5b7b3ae
RE
9833 {
9834 /* In this case we know that we are being asked about the elimination
9835 of the arg pointer register. If that register is not being used,
9836 then there are no arguments on the stack, and we do not have to
9837 worry that a far jump might force the prologue to push the link
9838 register, changing the stack offsets. In this case we can just
9839 return false, since the presence of far jumps in the function will
9840 not affect stack offsets.
9841
9842 If the arg pointer is live (or if it was live, but has now been
9843 eliminated and so set to dead) then we do have to test to see if
9844 the function might contain a far jump. This test can lead to some
9845 false negatives, since before reload is completed, then length of
9846 branch instructions is not known, so gcc defaults to returning their
9847 longest length, which in turn sets the far jump attribute to true.
9848
9849 A false negative will not result in bad code being generated, but it
9850 will result in a needless push and pop of the link register. We
9851 hope that this does not occur too often. */
9852 if (regs_ever_live [ARG_POINTER_REGNUM])
9853 cfun->machine->arg_pointer_live = 1;
5895f793 9854 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
9855 return 0;
9856 }
9857
9858 /* Check to see if the function contains a branch
9859 insn with the far jump attribute set. */
9860 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9861 {
9862 if (GET_CODE (insn) == JUMP_INSN
9863 /* Ignore tablejump patterns. */
9864 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9865 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9866 && get_attr_far_jump (insn) == FAR_JUMP_YES
9867 )
9868 {
9869 /* Record the fact that we have decied that
9870 the function does use far jumps. */
9871 cfun->machine->far_jump_used = 1;
9872 return 1;
9873 }
9874 }
9875
9876 return 0;
9877}
9878
9879/* Return non-zero if FUNC must be entered in ARM mode. */
1d6e90ac 9880
d5b7b3ae
RE
9881int
9882is_called_in_ARM_mode (func)
9883 tree func;
9884{
9885 if (TREE_CODE (func) != FUNCTION_DECL)
9886 abort ();
9887
9888 /* Ignore the problem about functions whoes address is taken. */
9889 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9890 return TRUE;
9891
9892#ifdef ARM_PE
91d231cb 9893 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
d5b7b3ae
RE
9894#else
9895 return FALSE;
9896#endif
9897}
9898
9899/* The bits which aren't usefully expanded as rtl. */
400500c4 9900
cd2b33d0 9901const char *
d5b7b3ae
RE
9902thumb_unexpanded_epilogue ()
9903{
9904 int regno;
9905 int live_regs_mask = 0;
9906 int high_regs_pushed = 0;
9907 int leaf_function = leaf_function_p ();
9908 int had_to_push_lr;
9909 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9910
9911 if (return_used_this_function)
9912 return "";
9913
9914 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 9915 if (THUMB_REG_PUSHED_P (regno))
d5b7b3ae
RE
9916 live_regs_mask |= 1 << regno;
9917
9918 for (regno = 8; regno < 13; regno++)
aeaf4d25
AN
9919 if (THUMB_REG_PUSHED_P (regno))
9920 high_regs_pushed++;
d5b7b3ae
RE
9921
9922 /* The prolog may have pushed some high registers to use as
9923 work registers. eg the testuite file:
9924 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9925 compiles to produce:
9926 push {r4, r5, r6, r7, lr}
9927 mov r7, r9
9928 mov r6, r8
9929 push {r6, r7}
9930 as part of the prolog. We have to undo that pushing here. */
9931
9932 if (high_regs_pushed)
9933 {
9934 int mask = live_regs_mask;
9935 int next_hi_reg;
9936 int size;
9937 int mode;
9938
9939#ifdef RTX_CODE
9940 /* If we can deduce the registers used from the function's return value.
9941 This is more reliable that examining regs_ever_live[] because that
9942 will be set if the register is ever used in the function, not just if
9943 the register is used to hold a return value. */
9944
9945 if (current_function_return_rtx != 0)
9946 mode = GET_MODE (current_function_return_rtx);
9947 else
9948#endif
9949 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9950
9951 size = GET_MODE_SIZE (mode);
9952
9953 /* Unless we are returning a type of size > 12 register r3 is
9954 available. */
9955 if (size < 13)
9956 mask |= 1 << 3;
9957
9958 if (mask == 0)
9959 /* Oh dear! We have no low registers into which we can pop
9960 high registers! */
400500c4
RK
9961 internal_error
9962 ("no low registers available for popping high registers");
d5b7b3ae
RE
9963
9964 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
aeaf4d25 9965 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae
RE
9966 break;
9967
9968 while (high_regs_pushed)
9969 {
9970 /* Find lo register(s) into which the high register(s) can
9971 be popped. */
9972 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9973 {
9974 if (mask & (1 << regno))
9975 high_regs_pushed--;
9976 if (high_regs_pushed == 0)
9977 break;
9978 }
9979
9980 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9981
9982 /* Pop the values into the low register(s). */
9983 thumb_pushpop (asm_out_file, mask, 0);
9984
9985 /* Move the value(s) into the high registers. */
9986 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9987 {
9988 if (mask & (1 << regno))
9989 {
9990 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9991 regno);
9992
9993 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
aeaf4d25 9994 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae
RE
9995 break;
9996 }
9997 }
9998 }
9999 }
10000
5895f793 10001 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
10002 || thumb_far_jump_used_p (1));
10003
10004 if (TARGET_BACKTRACE
10005 && ((live_regs_mask & 0xFF) == 0)
10006 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10007 {
10008 /* The stack backtrace structure creation code had to
10009 push R7 in order to get a work register, so we pop
10010 it now. */
10011 live_regs_mask |= (1 << LAST_LO_REGNUM);
10012 }
10013
10014 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10015 {
10016 if (had_to_push_lr
5895f793
RE
10017 && !is_called_in_ARM_mode (current_function_decl)
10018 && !eh_ofs)
d5b7b3ae
RE
10019 live_regs_mask |= 1 << PC_REGNUM;
10020
10021 /* Either no argument registers were pushed or a backtrace
10022 structure was created which includes an adjusted stack
10023 pointer, so just pop everything. */
10024 if (live_regs_mask)
10025 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10026
10027 if (eh_ofs)
10028 thumb_exit (asm_out_file, 2, eh_ofs);
10029 /* We have either just popped the return address into the
10030 PC or it is was kept in LR for the entire function or
10031 it is still on the stack because we do not want to
10032 return by doing a pop {pc}. */
10033 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10034 thumb_exit (asm_out_file,
10035 (had_to_push_lr
10036 && is_called_in_ARM_mode (current_function_decl)) ?
10037 -1 : LR_REGNUM, NULL_RTX);
10038 }
10039 else
10040 {
10041 /* Pop everything but the return address. */
5895f793 10042 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
10043
10044 if (live_regs_mask)
10045 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10046
10047 if (had_to_push_lr)
10048 /* Get the return address into a temporary register. */
10049 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10050
10051 /* Remove the argument registers that were pushed onto the stack. */
10052 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10053 SP_REGNUM, SP_REGNUM,
10054 current_function_pretend_args_size);
10055
10056 if (eh_ofs)
10057 thumb_exit (asm_out_file, 2, eh_ofs);
10058 else
10059 thumb_exit (asm_out_file,
10060 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10061 }
10062
10063 return "";
10064}
10065
10066/* Functions to save and restore machine-specific function data. */
10067
e2500fed
GK
10068static struct machine_function *
10069arm_init_machine_status ()
d5b7b3ae 10070{
e2500fed
GK
10071 struct machine_function *machine;
10072 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
6d3d9133 10073
e2500fed
GK
10074#if ARM_FT_UNKNOWN != 0
10075 machine->func_type = ARM_FT_UNKNOWN;
6d3d9133 10076#endif
e2500fed 10077 return machine;
f7a80099
NC
10078}
10079
d5b7b3ae
RE
10080/* Return an RTX indicating where the return address to the
10081 calling function can be found. */
1d6e90ac 10082
d5b7b3ae
RE
10083rtx
10084arm_return_addr (count, frame)
10085 int count;
10086 rtx frame ATTRIBUTE_UNUSED;
10087{
d5b7b3ae
RE
10088 if (count != 0)
10089 return NULL_RTX;
10090
9e2f7ec7
DD
10091 if (TARGET_APCS_32)
10092 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10093 else
d5b7b3ae 10094 {
9e2f7ec7 10095 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
d5b7b3ae 10096 GEN_INT (RETURN_ADDR_MASK26));
9e2f7ec7 10097 return get_func_hard_reg_initial_val (cfun, lr);
d5b7b3ae 10098 }
d5b7b3ae
RE
10099}
10100
10101/* Do anything needed before RTL is emitted for each function. */
1d6e90ac 10102
d5b7b3ae
RE
10103void
10104arm_init_expanders ()
10105{
10106 /* Arrange to initialize and mark the machine per-function status. */
10107 init_machine_status = arm_init_machine_status;
d5b7b3ae
RE
10108}
10109
10110/* Generate the rest of a function's prologue. */
1d6e90ac 10111
d5b7b3ae
RE
10112void
10113thumb_expand_prologue ()
10114{
10115 HOST_WIDE_INT amount = (get_frame_size ()
10116 + current_function_outgoing_args_size);
6d3d9133
NC
10117 unsigned long func_type;
10118
10119 func_type = arm_current_func_type ();
d5b7b3ae
RE
10120
10121 /* Naked functions don't have prologues. */
6d3d9133 10122 if (IS_NAKED (func_type))
d5b7b3ae
RE
10123 return;
10124
6d3d9133
NC
10125 if (IS_INTERRUPT (func_type))
10126 {
c725bd79 10127 error ("interrupt Service Routines cannot be coded in Thumb mode");
6d3d9133
NC
10128 return;
10129 }
10130
d5b7b3ae
RE
10131 if (frame_pointer_needed)
10132 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10133
10134 if (amount)
10135 {
10136 amount = ROUND_UP (amount);
10137
10138 if (amount < 512)
10139 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1d6e90ac 10140 GEN_INT (- amount)));
d5b7b3ae
RE
10141 else
10142 {
10143 int regno;
10144 rtx reg;
10145
10146 /* The stack decrement is too big for an immediate value in a single
10147 insn. In theory we could issue multiple subtracts, but after
10148 three of them it becomes more space efficient to place the full
10149 value in the constant pool and load into a register. (Also the
10150 ARM debugger really likes to see only one stack decrement per
10151 function). So instead we look for a scratch register into which
10152 we can load the decrement, and then we subtract this from the
10153 stack pointer. Unfortunately on the thumb the only available
10154 scratch registers are the argument registers, and we cannot use
10155 these as they may hold arguments to the function. Instead we
10156 attempt to locate a call preserved register which is used by this
10157 function. If we can find one, then we know that it will have
10158 been pushed at the start of the prologue and so we can corrupt
10159 it now. */
10160 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 10161 if (THUMB_REG_PUSHED_P (regno)
5895f793
RE
10162 && !(frame_pointer_needed
10163 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
10164 break;
10165
aeaf4d25 10166 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
d5b7b3ae
RE
10167 {
10168 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10169
10170 /* Choose an arbitary, non-argument low register. */
10171 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10172
10173 /* Save it by copying it into a high, scratch register. */
c14a3a45
NC
10174 emit_insn (gen_movsi (spare, reg));
10175 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 10176 emit_insn (gen_prologue_use (spare));
d5b7b3ae
RE
10177
10178 /* Decrement the stack. */
1d6e90ac 10179 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
d5b7b3ae
RE
10180 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10181 reg));
10182
10183 /* Restore the low register's original value. */
10184 emit_insn (gen_movsi (reg, spare));
10185
10186 /* Emit a USE of the restored scratch register, so that flow
10187 analysis will not consider the restore redundant. The
10188 register won't be used again in this function and isn't
10189 restored by the epilogue. */
6bacc7b0 10190 emit_insn (gen_prologue_use (reg));
d5b7b3ae
RE
10191 }
10192 else
10193 {
10194 reg = gen_rtx (REG, SImode, regno);
10195
1d6e90ac 10196 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
d5b7b3ae
RE
10197 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10198 reg));
10199 }
10200 }
10201 }
10202
70f4f91c 10203 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae
RE
10204 emit_insn (gen_blockage ());
10205}
10206
10207void
10208thumb_expand_epilogue ()
10209{
10210 HOST_WIDE_INT amount = (get_frame_size ()
10211 + current_function_outgoing_args_size);
6d3d9133
NC
10212
10213 /* Naked functions don't have prologues. */
10214 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
10215 return;
10216
10217 if (frame_pointer_needed)
10218 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10219 else if (amount)
10220 {
10221 amount = ROUND_UP (amount);
10222
10223 if (amount < 512)
10224 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10225 GEN_INT (amount)));
10226 else
10227 {
10228 /* r3 is always free in the epilogue. */
10229 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10230
10231 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10232 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10233 }
10234 }
10235
10236 /* Emit a USE (stack_pointer_rtx), so that
10237 the stack adjustment will not be deleted. */
6bacc7b0 10238 emit_insn (gen_prologue_use (stack_pointer_rtx));
d5b7b3ae 10239
70f4f91c 10240 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae
RE
10241 emit_insn (gen_blockage ());
10242}
10243
08c148a8
NB
10244static void
10245thumb_output_function_prologue (f, size)
d5b7b3ae 10246 FILE * f;
08c148a8 10247 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
d5b7b3ae
RE
10248{
10249 int live_regs_mask = 0;
10250 int high_regs_pushed = 0;
d5b7b3ae
RE
10251 int regno;
10252
6d3d9133 10253 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
10254 return;
10255
10256 if (is_called_in_ARM_mode (current_function_decl))
10257 {
10258 const char * name;
10259
10260 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10261 abort ();
10262 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10263 abort ();
10264 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10265
10266 /* Generate code sequence to switch us into Thumb mode. */
10267 /* The .code 32 directive has already been emitted by
6d77b53e 10268 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
10269 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10270 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10271
10272 /* Generate a label, so that the debugger will notice the
10273 change in instruction sets. This label is also used by
10274 the assembler to bypass the ARM code when this function
10275 is called from a Thumb encoded function elsewhere in the
10276 same file. Hence the definition of STUB_NAME here must
10277 agree with the definition in gas/config/tc-arm.c */
10278
10279#define STUB_NAME ".real_start_of"
10280
10281 asm_fprintf (f, "\t.code\t16\n");
10282#ifdef ARM_PE
10283 if (arm_dllexport_name_p (name))
e5951263 10284 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
10285#endif
10286 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10287 asm_fprintf (f, "\t.thumb_func\n");
10288 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10289 }
10290
d5b7b3ae
RE
10291 if (current_function_pretend_args_size)
10292 {
3cb66fd7 10293 if (cfun->machine->uses_anonymous_args)
d5b7b3ae
RE
10294 {
10295 int num_pushes;
10296
10297 asm_fprintf (f, "\tpush\t{");
10298
e9d7b180 10299 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
d5b7b3ae
RE
10300
10301 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10302 regno <= LAST_ARG_REGNUM;
5895f793 10303 regno++)
d5b7b3ae
RE
10304 asm_fprintf (f, "%r%s", regno,
10305 regno == LAST_ARG_REGNUM ? "" : ", ");
10306
10307 asm_fprintf (f, "}\n");
10308 }
10309 else
10310 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10311 SP_REGNUM, SP_REGNUM,
10312 current_function_pretend_args_size);
10313 }
10314
5895f793 10315 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 10316 if (THUMB_REG_PUSHED_P (regno))
d5b7b3ae
RE
10317 live_regs_mask |= 1 << regno;
10318
5895f793 10319 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
10320 live_regs_mask |= 1 << LR_REGNUM;
10321
10322 if (TARGET_BACKTRACE)
10323 {
10324 int offset;
10325 int work_register = 0;
10326 int wr;
10327
10328 /* We have been asked to create a stack backtrace structure.
10329 The code looks like this:
10330
10331 0 .align 2
10332 0 func:
10333 0 sub SP, #16 Reserve space for 4 registers.
10334 2 push {R7} Get a work register.
10335 4 add R7, SP, #20 Get the stack pointer before the push.
10336 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10337 8 mov R7, PC Get hold of the start of this code plus 12.
10338 10 str R7, [SP, #16] Store it.
10339 12 mov R7, FP Get hold of the current frame pointer.
10340 14 str R7, [SP, #4] Store it.
10341 16 mov R7, LR Get hold of the current return address.
10342 18 str R7, [SP, #12] Store it.
10343 20 add R7, SP, #16 Point at the start of the backtrace structure.
10344 22 mov FP, R7 Put this value into the frame pointer. */
10345
10346 if ((live_regs_mask & 0xFF) == 0)
10347 {
10348 /* See if the a4 register is free. */
10349
10350 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10351 work_register = LAST_ARG_REGNUM;
10352 else /* We must push a register of our own */
10353 live_regs_mask |= (1 << LAST_LO_REGNUM);
10354 }
10355
10356 if (work_register == 0)
10357 {
10358 /* Select a register from the list that will be pushed to
10359 use as our work register. */
10360 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10361 if ((1 << work_register) & live_regs_mask)
10362 break;
10363 }
10364
10365 asm_fprintf
10366 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10367 SP_REGNUM, SP_REGNUM);
10368
10369 if (live_regs_mask)
10370 thumb_pushpop (f, live_regs_mask, 1);
10371
10372 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10373 if (wr & live_regs_mask)
10374 offset += 4;
10375
10376 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10377 offset + 16 + current_function_pretend_args_size);
10378
10379 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10380 offset + 4);
10381
10382 /* Make sure that the instruction fetching the PC is in the right place
10383 to calculate "start of backtrace creation code + 12". */
10384 if (live_regs_mask)
10385 {
10386 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10387 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10388 offset + 12);
10389 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10390 ARM_HARD_FRAME_POINTER_REGNUM);
10391 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10392 offset);
10393 }
10394 else
10395 {
10396 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10397 ARM_HARD_FRAME_POINTER_REGNUM);
10398 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10399 offset);
10400 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10401 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10402 offset + 12);
10403 }
10404
10405 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10406 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10407 offset + 8);
10408 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10409 offset + 12);
10410 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10411 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10412 }
10413 else if (live_regs_mask)
10414 thumb_pushpop (f, live_regs_mask, 1);
10415
10416 for (regno = 8; regno < 13; regno++)
e26053d1
NC
10417 if (THUMB_REG_PUSHED_P (regno))
10418 high_regs_pushed++;
d5b7b3ae
RE
10419
10420 if (high_regs_pushed)
10421 {
10422 int pushable_regs = 0;
10423 int mask = live_regs_mask & 0xff;
10424 int next_hi_reg;
10425
10426 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
e26053d1
NC
10427 if (THUMB_REG_PUSHED_P (next_hi_reg))
10428 break;
d5b7b3ae
RE
10429
10430 pushable_regs = mask;
10431
10432 if (pushable_regs == 0)
10433 {
10434 /* Desperation time -- this probably will never happen. */
aeaf4d25 10435 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
d5b7b3ae
RE
10436 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10437 mask = 1 << LAST_ARG_REGNUM;
10438 }
10439
10440 while (high_regs_pushed > 0)
10441 {
10442 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10443 {
10444 if (mask & (1 << regno))
10445 {
10446 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10447
5895f793 10448 high_regs_pushed--;
d5b7b3ae
RE
10449
10450 if (high_regs_pushed)
aeaf4d25
AN
10451 {
10452 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10453 next_hi_reg--)
10454 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae 10455 break;
aeaf4d25 10456 }
d5b7b3ae
RE
10457 else
10458 {
5895f793 10459 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
10460 break;
10461 }
10462 }
10463 }
10464
10465 thumb_pushpop (f, mask, 1);
10466 }
10467
10468 if (pushable_regs == 0
aeaf4d25 10469 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
d5b7b3ae
RE
10470 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10471 }
10472}
10473
10474/* Handle the case of a double word load into a low register from
10475 a computed memory address. The computed address may involve a
10476 register which is overwritten by the load. */
10477
cd2b33d0 10478const char *
d5b7b3ae 10479thumb_load_double_from_address (operands)
400500c4 10480 rtx *operands;
d5b7b3ae
RE
10481{
10482 rtx addr;
10483 rtx base;
10484 rtx offset;
10485 rtx arg1;
10486 rtx arg2;
10487
10488 if (GET_CODE (operands[0]) != REG)
400500c4 10489 abort ();
d5b7b3ae
RE
10490
10491 if (GET_CODE (operands[1]) != MEM)
400500c4 10492 abort ();
d5b7b3ae
RE
10493
10494 /* Get the memory address. */
10495 addr = XEXP (operands[1], 0);
10496
10497 /* Work out how the memory address is computed. */
10498 switch (GET_CODE (addr))
10499 {
10500 case REG:
10501 operands[2] = gen_rtx (MEM, SImode,
10502 plus_constant (XEXP (operands[1], 0), 4));
10503
10504 if (REGNO (operands[0]) == REGNO (addr))
10505 {
10506 output_asm_insn ("ldr\t%H0, %2", operands);
10507 output_asm_insn ("ldr\t%0, %1", operands);
10508 }
10509 else
10510 {
10511 output_asm_insn ("ldr\t%0, %1", operands);
10512 output_asm_insn ("ldr\t%H0, %2", operands);
10513 }
10514 break;
10515
10516 case CONST:
10517 /* Compute <address> + 4 for the high order load. */
10518 operands[2] = gen_rtx (MEM, SImode,
10519 plus_constant (XEXP (operands[1], 0), 4));
10520
10521 output_asm_insn ("ldr\t%0, %1", operands);
10522 output_asm_insn ("ldr\t%H0, %2", operands);
10523 break;
10524
10525 case PLUS:
10526 arg1 = XEXP (addr, 0);
10527 arg2 = XEXP (addr, 1);
10528
10529 if (CONSTANT_P (arg1))
10530 base = arg2, offset = arg1;
10531 else
10532 base = arg1, offset = arg2;
10533
10534 if (GET_CODE (base) != REG)
400500c4 10535 abort ();
d5b7b3ae
RE
10536
10537 /* Catch the case of <address> = <reg> + <reg> */
10538 if (GET_CODE (offset) == REG)
10539 {
10540 int reg_offset = REGNO (offset);
10541 int reg_base = REGNO (base);
10542 int reg_dest = REGNO (operands[0]);
10543
10544 /* Add the base and offset registers together into the
10545 higher destination register. */
10546 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10547 reg_dest + 1, reg_base, reg_offset);
10548
10549 /* Load the lower destination register from the address in
10550 the higher destination register. */
10551 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10552 reg_dest, reg_dest + 1);
10553
10554 /* Load the higher destination register from its own address
10555 plus 4. */
10556 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10557 reg_dest + 1, reg_dest + 1);
10558 }
10559 else
10560 {
10561 /* Compute <address> + 4 for the high order load. */
10562 operands[2] = gen_rtx (MEM, SImode,
10563 plus_constant (XEXP (operands[1], 0), 4));
10564
10565 /* If the computed address is held in the low order register
10566 then load the high order register first, otherwise always
10567 load the low order register first. */
10568 if (REGNO (operands[0]) == REGNO (base))
10569 {
10570 output_asm_insn ("ldr\t%H0, %2", operands);
10571 output_asm_insn ("ldr\t%0, %1", operands);
10572 }
10573 else
10574 {
10575 output_asm_insn ("ldr\t%0, %1", operands);
10576 output_asm_insn ("ldr\t%H0, %2", operands);
10577 }
10578 }
10579 break;
10580
10581 case LABEL_REF:
10582 /* With no registers to worry about we can just load the value
10583 directly. */
10584 operands[2] = gen_rtx (MEM, SImode,
10585 plus_constant (XEXP (operands[1], 0), 4));
10586
10587 output_asm_insn ("ldr\t%H0, %2", operands);
10588 output_asm_insn ("ldr\t%0, %1", operands);
10589 break;
10590
10591 default:
400500c4 10592 abort ();
d5b7b3ae
RE
10593 break;
10594 }
10595
10596 return "";
10597}
10598
10599
cd2b33d0 10600const char *
d5b7b3ae
RE
10601thumb_output_move_mem_multiple (n, operands)
10602 int n;
10603 rtx * operands;
10604{
10605 rtx tmp;
10606
10607 switch (n)
10608 {
10609 case 2:
ca356f3a 10610 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10611 {
ca356f3a
RE
10612 tmp = operands[4];
10613 operands[4] = operands[5];
10614 operands[5] = tmp;
d5b7b3ae 10615 }
ca356f3a
RE
10616 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10617 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
10618 break;
10619
10620 case 3:
ca356f3a 10621 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10622 {
ca356f3a
RE
10623 tmp = operands[4];
10624 operands[4] = operands[5];
10625 operands[5] = tmp;
d5b7b3ae 10626 }
ca356f3a 10627 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 10628 {
ca356f3a
RE
10629 tmp = operands[5];
10630 operands[5] = operands[6];
10631 operands[6] = tmp;
d5b7b3ae 10632 }
ca356f3a 10633 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10634 {
ca356f3a
RE
10635 tmp = operands[4];
10636 operands[4] = operands[5];
10637 operands[5] = tmp;
d5b7b3ae
RE
10638 }
10639
ca356f3a
RE
10640 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10641 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
10642 break;
10643
10644 default:
10645 abort ();
10646 }
10647
10648 return "";
10649}
10650
1d6e90ac 10651/* Routines for generating rtl. */
d5b7b3ae
RE
10652
10653void
10654thumb_expand_movstrqi (operands)
10655 rtx * operands;
10656{
10657 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10658 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10659 HOST_WIDE_INT len = INTVAL (operands[2]);
10660 HOST_WIDE_INT offset = 0;
10661
10662 while (len >= 12)
10663 {
ca356f3a 10664 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
10665 len -= 12;
10666 }
10667
10668 if (len >= 8)
10669 {
ca356f3a 10670 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
10671 len -= 8;
10672 }
10673
10674 if (len >= 4)
10675 {
10676 rtx reg = gen_reg_rtx (SImode);
10677 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10678 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10679 len -= 4;
10680 offset += 4;
10681 }
10682
10683 if (len >= 2)
10684 {
10685 rtx reg = gen_reg_rtx (HImode);
10686 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10687 plus_constant (in, offset))));
10688 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10689 reg));
10690 len -= 2;
10691 offset += 2;
10692 }
10693
10694 if (len)
10695 {
10696 rtx reg = gen_reg_rtx (QImode);
10697 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10698 plus_constant (in, offset))));
10699 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10700 reg));
10701 }
10702}
10703
10704int
10705thumb_cmp_operand (op, mode)
10706 rtx op;
10707 enum machine_mode mode;
10708{
10709 return ((GET_CODE (op) == CONST_INT
10710 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10711 || register_operand (op, mode));
10712}
10713
cd2b33d0 10714static const char *
d5b7b3ae
RE
10715thumb_condition_code (x, invert)
10716 rtx x;
10717 int invert;
10718{
1d6e90ac 10719 static const char * const conds[] =
d5b7b3ae
RE
10720 {
10721 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10722 "hi", "ls", "ge", "lt", "gt", "le"
10723 };
10724 int val;
10725
10726 switch (GET_CODE (x))
10727 {
10728 case EQ: val = 0; break;
10729 case NE: val = 1; break;
10730 case GEU: val = 2; break;
10731 case LTU: val = 3; break;
10732 case GTU: val = 8; break;
10733 case LEU: val = 9; break;
10734 case GE: val = 10; break;
10735 case LT: val = 11; break;
10736 case GT: val = 12; break;
10737 case LE: val = 13; break;
10738 default:
10739 abort ();
10740 }
10741
10742 return conds[val ^ invert];
10743}
10744
10745/* Handle storing a half-word to memory during reload. */
1d6e90ac 10746
d5b7b3ae
RE
10747void
10748thumb_reload_out_hi (operands)
10749 rtx * operands;
10750{
10751 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10752}
10753
10754/* Handle storing a half-word to memory during reload. */
1d6e90ac 10755
d5b7b3ae
RE
10756void
10757thumb_reload_in_hi (operands)
10758 rtx * operands ATTRIBUTE_UNUSED;
10759{
10760 abort ();
10761}
10762
c27ba912
DM
10763/* Return the length of a function name prefix
10764 that starts with the character 'c'. */
1d6e90ac 10765
c27ba912
DM
10766static int
10767arm_get_strip_length (char c)
10768{
10769 switch (c)
10770 {
10771 ARM_NAME_ENCODING_LENGTHS
10772 default: return 0;
10773 }
10774}
10775
10776/* Return a pointer to a function's name with any
10777 and all prefix encodings stripped from it. */
1d6e90ac 10778
c27ba912
DM
10779const char *
10780arm_strip_name_encoding (const char * name)
10781{
10782 int skip;
10783
10784 while ((skip = arm_get_strip_length (* name)))
10785 name += skip;
10786
10787 return name;
10788}
10789
e2500fed
GK
10790rtx aof_pic_label;
10791
2b835d68 10792#ifdef AOF_ASSEMBLER
6354dc9b 10793/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 10794
32de079a
RE
10795struct pic_chain
10796{
62b10bbc 10797 struct pic_chain * next;
5f37d07c 10798 const char * symname;
32de079a
RE
10799};
10800
62b10bbc 10801static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
10802
10803rtx
10804aof_pic_entry (x)
10805 rtx x;
10806{
62b10bbc 10807 struct pic_chain ** chainp;
32de079a
RE
10808 int offset;
10809
10810 if (aof_pic_label == NULL_RTX)
10811 {
43cffd11 10812 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
10813 }
10814
10815 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10816 offset += 4, chainp = &(*chainp)->next)
10817 if ((*chainp)->symname == XSTR (x, 0))
10818 return plus_constant (aof_pic_label, offset);
10819
10820 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10821 (*chainp)->next = NULL;
10822 (*chainp)->symname = XSTR (x, 0);
10823 return plus_constant (aof_pic_label, offset);
10824}
10825
10826void
10827aof_dump_pic_table (f)
62b10bbc 10828 FILE * f;
32de079a 10829{
62b10bbc 10830 struct pic_chain * chain;
32de079a
RE
10831
10832 if (aof_pic_chain == NULL)
10833 return;
10834
dd18ae56
NC
10835 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10836 PIC_OFFSET_TABLE_REGNUM,
10837 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
10838 fputs ("|x$adcons|\n", f);
10839
10840 for (chain = aof_pic_chain; chain; chain = chain->next)
10841 {
10842 fputs ("\tDCD\t", f);
10843 assemble_name (f, chain->symname);
10844 fputs ("\n", f);
10845 }
10846}
10847
2b835d68
RE
10848int arm_text_section_count = 1;
10849
10850char *
84ed5e79 10851aof_text_section ()
2b835d68
RE
10852{
10853 static char buf[100];
2b835d68
RE
10854 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10855 arm_text_section_count++);
10856 if (flag_pic)
10857 strcat (buf, ", PIC, REENTRANT");
10858 return buf;
10859}
10860
10861static int arm_data_section_count = 1;
10862
10863char *
10864aof_data_section ()
10865{
10866 static char buf[100];
10867 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10868 return buf;
10869}
10870
10871/* The AOF assembler is religiously strict about declarations of
10872 imported and exported symbols, so that it is impossible to declare
956d6950 10873 a function as imported near the beginning of the file, and then to
2b835d68
RE
10874 export it later on. It is, however, possible to delay the decision
10875 until all the functions in the file have been compiled. To get
10876 around this, we maintain a list of the imports and exports, and
10877 delete from it any that are subsequently defined. At the end of
10878 compilation we spit the remainder of the list out before the END
10879 directive. */
10880
10881struct import
10882{
62b10bbc 10883 struct import * next;
5f37d07c 10884 const char * name;
2b835d68
RE
10885};
10886
62b10bbc 10887static struct import * imports_list = NULL;
2b835d68
RE
10888
10889void
10890aof_add_import (name)
5f37d07c 10891 const char * name;
2b835d68 10892{
62b10bbc 10893 struct import * new;
2b835d68
RE
10894
10895 for (new = imports_list; new; new = new->next)
10896 if (new->name == name)
10897 return;
10898
10899 new = (struct import *) xmalloc (sizeof (struct import));
10900 new->next = imports_list;
10901 imports_list = new;
10902 new->name = name;
10903}
10904
10905void
10906aof_delete_import (name)
5f37d07c 10907 const char * name;
2b835d68 10908{
62b10bbc 10909 struct import ** old;
2b835d68
RE
10910
10911 for (old = &imports_list; *old; old = & (*old)->next)
10912 {
10913 if ((*old)->name == name)
10914 {
10915 *old = (*old)->next;
10916 return;
10917 }
10918 }
10919}
10920
10921int arm_main_function = 0;
10922
10923void
10924aof_dump_imports (f)
62b10bbc 10925 FILE * f;
2b835d68
RE
10926{
10927 /* The AOF assembler needs this to cause the startup code to be extracted
10928 from the library. Brining in __main causes the whole thing to work
10929 automagically. */
10930 if (arm_main_function)
10931 {
10932 text_section ();
10933 fputs ("\tIMPORT __main\n", f);
10934 fputs ("\tDCD __main\n", f);
10935 }
10936
10937 /* Now dump the remaining imports. */
10938 while (imports_list)
10939 {
10940 fprintf (f, "\tIMPORT\t");
10941 assemble_name (f, imports_list->name);
10942 fputc ('\n', f);
10943 imports_list = imports_list->next;
10944 }
10945}
10946#endif /* AOF_ASSEMBLER */
7c262518 10947
ebe413e5 10948#ifdef OBJECT_FORMAT_ELF
7c262518
RH
10949/* Switch to an arbitrary section NAME with attributes as specified
10950 by FLAGS. ALIGN specifies any known alignment requirements for
10951 the section; 0 if the default should be used.
10952
10953 Differs from the default elf version only in the prefix character
10954 used before the section type. */
10955
10956static void
715bdd29 10957arm_elf_asm_named_section (name, flags)
7c262518
RH
10958 const char *name;
10959 unsigned int flags;
7c262518
RH
10960{
10961 char flagchars[8], *f = flagchars;
10962 const char *type;
10963
10964 if (!(flags & SECTION_DEBUG))
10965 *f++ = 'a';
10966 if (flags & SECTION_WRITE)
10967 *f++ = 'w';
10968 if (flags & SECTION_CODE)
10969 *f++ = 'x';
10970 if (flags & SECTION_SMALL)
10971 *f++ = 's';
201556f0
JJ
10972 if (flags & SECTION_MERGE)
10973 *f++ = 'M';
10974 if (flags & SECTION_STRINGS)
10975 *f++ = 'S';
7c262518
RH
10976 *f = '\0';
10977
10978 if (flags & SECTION_BSS)
10979 type = "nobits";
10980 else
10981 type = "progbits";
10982
201556f0
JJ
10983 if (flags & SECTION_ENTSIZE)
10984 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s,%d\n",
10985 name, flagchars, type, flags & SECTION_ENTSIZE);
10986 else
10987 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s\n",
10988 name, flagchars, type);
7c262518 10989}
ebe413e5 10990#endif
fb49053f
RH
10991
10992#ifndef ARM_PE
10993/* Symbols in the text segment can be accessed without indirecting via the
10994 constant pool; it may take an extra binary operation, but this is still
10995 faster than indirecting via memory. Don't do this when not optimizing,
10996 since we won't be calculating al of the offsets necessary to do this
10997 simplification. */
10998
10999static void
11000arm_encode_section_info (decl, first)
11001 tree decl;
11002 int first;
11003{
11004 /* This doesn't work with AOF syntax, since the string table may be in
11005 a different AREA. */
11006#ifndef AOF_ASSEMBLER
11007 if (optimize > 0 && TREE_CONSTANT (decl)
11008 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11009 {
11010 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11011 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11012 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11013 }
11014#endif
11015
11016 /* If we are referencing a function that is weak then encode a long call
11017 flag in the function name, otherwise if the function is static or
11018 or known to be defined in this file then encode a short call flag. */
11019 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11020 {
11021 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11022 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11023 else if (! TREE_PUBLIC (decl))
11024 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11025 }
11026}
11027#endif /* !ARM_PE */
This page took 2.838944 seconds and 5 git commands to generate.