]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
backport: MarshalledObject.java (equals): Check hashcode first.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
1d6e90ac 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
2398fb2a 3 Free Software Foundation, Inc.
cce8749e 4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 5 and Martin Simmons (@harleqn.co.uk).
b36ba79f 6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
7
8This file is part of GNU CC.
9
10GNU CC is free software; you can redistribute it and/or modify
11it under the terms of the GNU General Public License as published by
12the Free Software Foundation; either version 2, or (at your option)
13any later version.
14
15GNU CC is distributed in the hope that it will be useful,
16but WITHOUT ANY WARRANTY; without even the implied warranty of
17MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18GNU General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
22the Free Software Foundation, 59 Temple Place - Suite 330,
23Boston, MA 02111-1307, USA. */
ff9940b0 24
56636818 25#include "config.h"
43cffd11 26#include "system.h"
cce8749e 27#include "rtl.h"
d5b7b3ae 28#include "tree.h"
c7319d87 29#include "obstack.h"
cce8749e
CH
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "real.h"
33#include "insn-config.h"
34#include "conditions.h"
cce8749e
CH
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
e78d8e51 41#include "optabs.h"
ad076f4e 42#include "toplev.h"
aec3cfba 43#include "recog.h"
92a432f4 44#include "ggc.h"
d5b7b3ae 45#include "except.h"
8b97c5f8 46#include "c-pragma.h"
7b8b8ade 47#include "integrate.h"
c27ba912 48#include "tm_p.h"
672a6f42
NB
49#include "target.h"
50#include "target-def.h"
cce8749e 51
d5b7b3ae
RE
52/* Forward definitions of types. */
53typedef struct minipool_node Mnode;
54typedef struct minipool_fixup Mfix;
55
56/* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58#define Hint HOST_WIDE_INT
59#define Mmode enum machine_mode
60#define Ulong unsigned long
6d3d9133 61#define Ccstar const char *
d5b7b3ae 62
1d6e90ac
NC
63const struct attribute_spec arm_attribute_table[];
64
d5b7b3ae
RE
65/* Forward function declarations. */
66static void arm_add_gc_roots PARAMS ((void));
67static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
d5b7b3ae
RE
68static Ulong bit_count PARAMS ((signed int));
69static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
70static int eliminate_lr2ip PARAMS ((rtx *));
71static rtx emit_multi_reg_push PARAMS ((int));
72static rtx emit_sfm PARAMS ((int, int));
301d03af 73#ifndef AOF_ASSEMBLER
1d6e90ac 74static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
301d03af 75#endif
6d3d9133 76static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
77static arm_cc get_arm_condition_code PARAMS ((rtx));
78static void init_fpa_table PARAMS ((void));
79static Hint int_log2 PARAMS ((Hint));
80static rtx is_jump_table PARAMS ((rtx));
6d3d9133
NC
81static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
82static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
d5b7b3ae 83static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
6d3d9133 84static Ccstar shift_op PARAMS ((rtx, Hint *));
e2500fed 85static struct machine_function * arm_init_machine_status PARAMS ((void));
d5b7b3ae
RE
86static int number_of_first_bit_set PARAMS ((int));
87static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
88static void thumb_exit PARAMS ((FILE *, int, rtx));
89static void thumb_pushpop PARAMS ((FILE *, int, int));
6d3d9133 90static Ccstar thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
91static rtx is_jump_table PARAMS ((rtx));
92static Hint get_jump_table_size PARAMS ((rtx));
93static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
94static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
95static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
96static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
97static void assign_minipool_offsets PARAMS ((Mfix *));
98static void arm_print_value PARAMS ((FILE *, rtx));
99static void dump_minipool PARAMS ((rtx));
100static int arm_barrier_cost PARAMS ((rtx));
101static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
102static void push_minipool_barrier PARAMS ((rtx, Hint));
103static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
104static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 105static int current_file_function_operand PARAMS ((rtx));
1d6e90ac 106static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
6d3d9133
NC
107static Ulong arm_compute_save_reg_mask PARAMS ((void));
108static Ulong arm_isr_value PARAMS ((tree));
109static Ulong arm_compute_func_type PARAMS ((void));
1d6e90ac
NC
110static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
111static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
112static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
113static void arm_output_function_prologue PARAMS ((FILE *, Hint));
114static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
8d8e52be 115static int arm_comp_type_attributes PARAMS ((tree, tree));
1d6e90ac
NC
116static void arm_set_default_type_attributes PARAMS ((tree));
117static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
ab2877a3
KG
118static int count_insns_for_constant PARAMS ((HOST_WIDE_INT, int));
119static int arm_get_strip_length PARAMS ((int));
ebe413e5 120#ifdef OBJECT_FORMAT_ELF
1d6e90ac 121static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
ebe413e5 122#endif
fb49053f
RH
123#ifndef ARM_PE
124static void arm_encode_section_info PARAMS ((tree, int));
125#endif
5eb99654
KG
126#ifdef AOF_ASSEMBLER
127static void aof_globalize_label PARAMS ((FILE *, const char *));
128#endif
c590b625 129static void arm_output_mi_thunk PARAMS ((FILE *, tree,
3961e8fe 130 HOST_WIDE_INT,
c590b625 131 HOST_WIDE_INT, tree));
c237e94a 132
d5b7b3ae
RE
133#undef Hint
134#undef Mmode
135#undef Ulong
6d3d9133 136#undef Ccstar
672a6f42
NB
137\f
138/* Initialize the GCC target structure. */
139#ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
1d6e90ac 140#undef TARGET_MERGE_DECL_ATTRIBUTES
672a6f42
NB
141#define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
142#endif
f3bb6135 143
1d6e90ac 144#undef TARGET_ATTRIBUTE_TABLE
91d231cb 145#define TARGET_ATTRIBUTE_TABLE arm_attribute_table
672a6f42 146
301d03af 147#ifdef AOF_ASSEMBLER
1d6e90ac 148#undef TARGET_ASM_BYTE_OP
301d03af 149#define TARGET_ASM_BYTE_OP "\tDCB\t"
1d6e90ac 150#undef TARGET_ASM_ALIGNED_HI_OP
301d03af 151#define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
1d6e90ac 152#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 153#define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
5eb99654
KG
154#undef TARGET_ASM_GLOBALIZE_LABEL
155#define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
301d03af 156#else
1d6e90ac 157#undef TARGET_ASM_ALIGNED_SI_OP
301d03af 158#define TARGET_ASM_ALIGNED_SI_OP NULL
1d6e90ac 159#undef TARGET_ASM_INTEGER
301d03af
RS
160#define TARGET_ASM_INTEGER arm_assemble_integer
161#endif
162
1d6e90ac 163#undef TARGET_ASM_FUNCTION_PROLOGUE
08c148a8
NB
164#define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
165
1d6e90ac 166#undef TARGET_ASM_FUNCTION_EPILOGUE
08c148a8
NB
167#define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
168
1d6e90ac 169#undef TARGET_COMP_TYPE_ATTRIBUTES
8d8e52be
JM
170#define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
171
1d6e90ac 172#undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
8d8e52be
JM
173#define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
174
1d6e90ac 175#undef TARGET_INIT_BUILTINS
f6155fda
SS
176#define TARGET_INIT_BUILTINS arm_init_builtins
177
1d6e90ac 178#undef TARGET_EXPAND_BUILTIN
f6155fda
SS
179#define TARGET_EXPAND_BUILTIN arm_expand_builtin
180
1d6e90ac 181#undef TARGET_SCHED_ADJUST_COST
c237e94a
ZW
182#define TARGET_SCHED_ADJUST_COST arm_adjust_cost
183
fb49053f
RH
184#undef TARGET_ENCODE_SECTION_INFO
185#ifdef ARM_PE
186#define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
187#else
188#define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
189#endif
190
772c5265
RH
191#undef TARGET_STRIP_NAME_ENCODING
192#define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
193
c590b625
RH
194#undef TARGET_ASM_OUTPUT_MI_THUNK
195#define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
3961e8fe
RH
196#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
197#define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
c590b625 198
f6897b10 199struct gcc_target targetm = TARGET_INITIALIZER;
672a6f42 200\f
c7319d87
RE
201/* Obstack for minipool constant handling. */
202static struct obstack minipool_obstack;
1d6e90ac 203static char * minipool_startobj;
c7319d87 204
1d6e90ac
NC
205/* The maximum number of insns skipped which
206 will be conditionalised if possible. */
c27ba912
DM
207static int max_insns_skipped = 5;
208
209extern FILE * asm_out_file;
210
6354dc9b 211/* True if we are currently building a constant table. */
13bd191d
PB
212int making_const_table;
213
60d0536b 214/* Define the information needed to generate branch insns. This is
6354dc9b 215 stored from the compare operation. */
ff9940b0 216rtx arm_compare_op0, arm_compare_op1;
ff9940b0 217
6354dc9b 218/* What type of floating point are we tuning for? */
bee06f3d
RE
219enum floating_point_type arm_fpu;
220
6354dc9b 221/* What type of floating point instructions are available? */
b111229a
RE
222enum floating_point_type arm_fpu_arch;
223
6354dc9b 224/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
225enum prog_mode_type arm_prgmode;
226
6354dc9b 227/* Set by the -mfp=... option. */
f9cc092a 228const char * target_fp_name = NULL;
2b835d68 229
b355a481 230/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 231const char * structure_size_string = NULL;
723ae7c1 232int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 233
aec3cfba 234/* Bit values used to identify processor capabilities. */
62b10bbc
NC
235#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
236#define FL_FAST_MULT (1 << 1) /* Fast multiply */
237#define FL_MODE26 (1 << 2) /* 26-bit mode support */
238#define FL_MODE32 (1 << 3) /* 32-bit mode support */
239#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
240#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
241#define FL_THUMB (1 << 6) /* Thumb aware */
242#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
243#define FL_STRONG (1 << 8) /* StrongARM */
b15bca31 244#define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
d19fb8e3 245#define FL_XSCALE (1 << 10) /* XScale */
aec3cfba 246
1d6e90ac
NC
247/* The bits in this mask specify which
248 instructions we are allowed to generate. */
aec3cfba 249static int insn_flags = 0;
d5b7b3ae 250
aec3cfba
NC
251/* The bits in this mask specify which instruction scheduling options should
252 be used. Note - there is an overlap with the FL_FAST_MULT. For some
253 hardware we want to be able to generate the multiply instructions, but to
254 tune as if they were not present in the architecture. */
255static int tune_flags = 0;
256
257/* The following are used in the arm.md file as equivalents to bits
258 in the above two flag variables. */
259
2b835d68
RE
260/* Nonzero if this is an "M" variant of the processor. */
261int arm_fast_multiply = 0;
262
6354dc9b 263/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
264int arm_arch4 = 0;
265
6354dc9b 266/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
267int arm_arch5 = 0;
268
b15bca31
RE
269/* Nonzero if this chip supports the ARM Architecture 5E extensions. */
270int arm_arch5e = 0;
271
aec3cfba 272/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
273int arm_ld_sched = 0;
274
275/* Nonzero if this chip is a StrongARM. */
276int arm_is_strong = 0;
277
d19fb8e3
NC
278/* Nonzero if this chip is an XScale. */
279int arm_is_xscale = 0;
280
3569057d 281/* Nonzero if this chip is an ARM6 or an ARM7. */
f5a1b0d2 282int arm_is_6_or_7 = 0;
b111229a 283
0616531f
RE
284/* Nonzero if generating Thumb instructions. */
285int thumb_code = 0;
286
cce8749e
CH
287/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
288 must report the mode of the memory reference from PRINT_OPERAND to
289 PRINT_OPERAND_ADDRESS. */
f3bb6135 290enum machine_mode output_memory_reference_mode;
cce8749e 291
32de079a 292/* The register number to be used for the PIC offset register. */
ed0e6530 293const char * arm_pic_register_string = NULL;
5b43fed1 294int arm_pic_register = INVALID_REGNUM;
32de079a 295
ff9940b0 296/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 297 is not needed. */
d5b7b3ae 298int return_used_this_function;
ff9940b0 299
aec3cfba
NC
300/* Set to 1 after arm_reorg has started. Reset to start at the start of
301 the next function. */
4b632bf1
RE
302static int after_arm_reorg = 0;
303
aec3cfba 304/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
305static int arm_constant_limit = 3;
306
cce8749e
CH
307/* For an explanation of these variables, see final_prescan_insn below. */
308int arm_ccfsm_state;
84ed5e79 309enum arm_cond_code arm_current_cc;
cce8749e
CH
310rtx arm_target_insn;
311int arm_target_label;
9997d19d
RE
312
313/* The condition codes of the ARM, and the inverse function. */
1d6e90ac 314static const char * const arm_condition_codes[] =
9997d19d
RE
315{
316 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
317 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
318};
319
f5a1b0d2 320#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 321\f
6354dc9b 322/* Initialization code. */
2b835d68 323
2b835d68
RE
324struct processors
325{
8b60264b
KG
326 const char *const name;
327 const unsigned int flags;
2b835d68
RE
328};
329
330/* Not all of these give usefully different compilation alternatives,
331 but there is no simple way of generalizing them. */
8b60264b 332static const struct processors all_cores[] =
f5a1b0d2
NC
333{
334 /* ARM Cores */
335
336 {"arm2", FL_CO_PROC | FL_MODE26 },
337 {"arm250", FL_CO_PROC | FL_MODE26 },
338 {"arm3", FL_CO_PROC | FL_MODE26 },
339 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
340 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
341 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
342 {"arm610", FL_MODE26 | FL_MODE32 },
343 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
344 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
345 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 346 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
347 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
348 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
349 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
350 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
351 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
352 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
353 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
354 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
355 {"arm710", FL_MODE26 | FL_MODE32 },
eab4abeb 356 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
a120a3bd 357 {"arm720", FL_MODE26 | FL_MODE32 },
eab4abeb
NC
358 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
359 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
f5a1b0d2
NC
360 {"arm710c", FL_MODE26 | FL_MODE32 },
361 {"arm7100", FL_MODE26 | FL_MODE32 },
362 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
363 /* Doesn't have an external co-proc, but does have embedded fpu. */
364 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
365 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
366 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
367 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
368 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
369 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
370 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 371 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2 372 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 373 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
f5a1b0d2
NC
374 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
375 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
376 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
eab4abeb
NC
377 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
378 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
379 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
380 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
e26053d1 381
f5a1b0d2
NC
382 {NULL, 0}
383};
384
8b60264b 385static const struct processors all_architectures[] =
2b835d68 386{
f5a1b0d2
NC
387 /* ARM Architectures */
388
62b10bbc
NC
389 { "armv2", FL_CO_PROC | FL_MODE26 },
390 { "armv2a", FL_CO_PROC | FL_MODE26 },
391 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
392 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 393 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
394 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
395 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
396 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
397 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
d19fb8e3
NC
398 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
399 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
62b10bbc 400 { NULL, 0 }
f5a1b0d2
NC
401};
402
403/* This is a magic stucture. The 'string' field is magically filled in
404 with a pointer to the value specified by the user on the command line
405 assuming that the user has specified such a value. */
406
407struct arm_cpu_select arm_select[] =
408{
409 /* string name processors */
410 { NULL, "-mcpu=", all_cores },
411 { NULL, "-march=", all_architectures },
412 { NULL, "-mtune=", all_cores }
2b835d68
RE
413};
414
aec3cfba 415/* Return the number of bits set in value' */
d5b7b3ae 416static unsigned long
aec3cfba
NC
417bit_count (value)
418 signed int value;
419{
d5b7b3ae 420 unsigned long count = 0;
aec3cfba
NC
421
422 while (value)
423 {
5895f793
RE
424 value &= ~(value & -value);
425 ++count;
aec3cfba
NC
426 }
427
428 return count;
429}
430
2b835d68
RE
431/* Fix up any incompatible options that the user has specified.
432 This has now turned into a maze. */
433void
434arm_override_options ()
435{
ed4c4348 436 unsigned i;
f5a1b0d2
NC
437
438 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 439 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 440 {
f5a1b0d2
NC
441 struct arm_cpu_select * ptr = arm_select + i;
442
443 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 444 {
13bd191d 445 const struct processors * sel;
bd9c7e23 446
5895f793 447 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 448 if (streq (ptr->string, sel->name))
bd9c7e23 449 {
aec3cfba
NC
450 if (i == 2)
451 tune_flags = sel->flags;
452 else
b111229a 453 {
aec3cfba
NC
454 /* If we have been given an architecture and a processor
455 make sure that they are compatible. We only generate
456 a warning though, and we prefer the CPU over the
6354dc9b 457 architecture. */
aec3cfba 458 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 459 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
460 ptr->string);
461
462 insn_flags = sel->flags;
b111229a 463 }
f5a1b0d2 464
bd9c7e23
RE
465 break;
466 }
467
468 if (sel->name == NULL)
469 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
470 }
471 }
aec3cfba 472
f5a1b0d2 473 /* If the user did not specify a processor, choose one for them. */
aec3cfba 474 if (insn_flags == 0)
f5a1b0d2 475 {
8b60264b 476 const struct processors * sel;
aec3cfba 477 unsigned int sought;
8b60264b 478 static const struct cpu_default
aec3cfba 479 {
8b60264b
KG
480 const int cpu;
481 const char *const name;
aec3cfba
NC
482 }
483 cpu_defaults[] =
484 {
485 { TARGET_CPU_arm2, "arm2" },
486 { TARGET_CPU_arm6, "arm6" },
487 { TARGET_CPU_arm610, "arm610" },
2aa0c933 488 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
489 { TARGET_CPU_arm7m, "arm7m" },
490 { TARGET_CPU_arm7500fe, "arm7500fe" },
491 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
492 { TARGET_CPU_arm8, "arm8" },
493 { TARGET_CPU_arm810, "arm810" },
494 { TARGET_CPU_arm9, "arm9" },
495 { TARGET_CPU_strongarm, "strongarm" },
d19fb8e3 496 { TARGET_CPU_xscale, "xscale" },
aec3cfba
NC
497 { TARGET_CPU_generic, "arm" },
498 { 0, 0 }
499 };
8b60264b 500 const struct cpu_default * def;
aec3cfba
NC
501
502 /* Find the default. */
5895f793 503 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
504 if (def->cpu == TARGET_CPU_DEFAULT)
505 break;
506
507 /* Make sure we found the default CPU. */
508 if (def->name == NULL)
509 abort ();
510
511 /* Find the default CPU's flags. */
5895f793 512 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
513 if (streq (def->name, sel->name))
514 break;
515
516 if (sel->name == NULL)
517 abort ();
518
519 insn_flags = sel->flags;
520
521 /* Now check to see if the user has specified some command line
522 switch that require certain abilities from the cpu. */
523 sought = 0;
f5a1b0d2 524
d5b7b3ae 525 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 526 {
aec3cfba
NC
527 sought |= (FL_THUMB | FL_MODE32);
528
529 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 530 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 531
d5b7b3ae 532 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
533 interworking. Therefore we force FL_MODE26 to be removed
534 from insn_flags here (if it was set), so that the search
535 below will always be able to find a compatible processor. */
5895f793 536 insn_flags &= ~FL_MODE26;
f5a1b0d2 537 }
5895f793 538 else if (!TARGET_APCS_32)
f5a1b0d2 539 sought |= FL_MODE26;
d5b7b3ae 540
aec3cfba 541 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 542 {
aec3cfba
NC
543 /* Try to locate a CPU type that supports all of the abilities
544 of the default CPU, plus the extra abilities requested by
545 the user. */
5895f793 546 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 547 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
548 break;
549
550 if (sel->name == NULL)
aec3cfba
NC
551 {
552 unsigned int current_bit_count = 0;
8b60264b 553 const struct processors * best_fit = NULL;
aec3cfba
NC
554
555 /* Ideally we would like to issue an error message here
556 saying that it was not possible to find a CPU compatible
557 with the default CPU, but which also supports the command
558 line options specified by the programmer, and so they
559 ought to use the -mcpu=<name> command line option to
560 override the default CPU type.
561
562 Unfortunately this does not work with multilibing. We
563 need to be able to support multilibs for -mapcs-26 and for
564 -mthumb-interwork and there is no CPU that can support both
565 options. Instead if we cannot find a cpu that has both the
566 characteristics of the default cpu and the given command line
567 options we scan the array again looking for a best match. */
5895f793 568 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
569 if ((sel->flags & sought) == sought)
570 {
571 unsigned int count;
572
573 count = bit_count (sel->flags & insn_flags);
574
575 if (count >= current_bit_count)
576 {
577 best_fit = sel;
578 current_bit_count = count;
579 }
580 }
f5a1b0d2 581
aec3cfba
NC
582 if (best_fit == NULL)
583 abort ();
584 else
585 sel = best_fit;
586 }
587
588 insn_flags = sel->flags;
f5a1b0d2
NC
589 }
590 }
aec3cfba
NC
591
592 /* If tuning has not been specified, tune for whichever processor or
593 architecture has been selected. */
594 if (tune_flags == 0)
595 tune_flags = insn_flags;
e26053d1 596
f5a1b0d2
NC
597 /* Make sure that the processor choice does not conflict with any of the
598 other command line choices. */
aec3cfba 599 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 600 {
aec3cfba
NC
601 /* If APCS-32 was not the default then it must have been set by the
602 user, so issue a warning message. If the user has specified
603 "-mapcs-32 -mcpu=arm2" then we loose here. */
604 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
605 warning ("target CPU does not support APCS-32" );
5895f793 606 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 607 }
5895f793 608 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
609 {
610 warning ("target CPU does not support APCS-26" );
611 target_flags |= ARM_FLAG_APCS_32;
612 }
613
6cfc7210 614 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
615 {
616 warning ("target CPU does not support interworking" );
6cfc7210 617 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
618 }
619
d5b7b3ae
RE
620 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
621 {
c725bd79 622 warning ("target CPU does not support THUMB instructions");
d5b7b3ae
RE
623 target_flags &= ~ARM_FLAG_THUMB;
624 }
625
626 if (TARGET_APCS_FRAME && TARGET_THUMB)
627 {
c725bd79 628 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
d5b7b3ae
RE
629 target_flags &= ~ARM_FLAG_APCS_FRAME;
630 }
d19fb8e3 631
d5b7b3ae
RE
632 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
633 from here where no function is being compiled currently. */
634 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
635 && TARGET_ARM)
c725bd79 636 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
637
638 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
c725bd79 639 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae
RE
640
641 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
c725bd79 642 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
d5b7b3ae 643
f5a1b0d2 644 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 645 if (TARGET_INTERWORK)
f5a1b0d2 646 {
5895f793 647 if (!TARGET_APCS_32)
f5a1b0d2
NC
648 warning ("interworking forces APCS-32 to be used" );
649 target_flags |= ARM_FLAG_APCS_32;
650 }
651
5895f793 652 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
653 {
654 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
655 target_flags |= ARM_FLAG_APCS_FRAME;
656 }
aec3cfba 657
2b835d68
RE
658 if (TARGET_POKE_FUNCTION_NAME)
659 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 660
2b835d68 661 if (TARGET_APCS_REENT && flag_pic)
400500c4 662 error ("-fpic and -mapcs-reent are incompatible");
aec3cfba 663
2b835d68 664 if (TARGET_APCS_REENT)
f5a1b0d2 665 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 666
d5b7b3ae
RE
667 /* If this target is normally configured to use APCS frames, warn if they
668 are turned off and debugging is turned on. */
669 if (TARGET_ARM
670 && write_symbols != NO_DEBUG
5895f793 671 && !TARGET_APCS_FRAME
d5b7b3ae
RE
672 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
673 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 674
32de079a
RE
675 /* If stack checking is disabled, we can use r10 as the PIC register,
676 which keeps r9 available. */
5b43fed1
RH
677 if (flag_pic)
678 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
aec3cfba 679
2b835d68 680 if (TARGET_APCS_FLOAT)
c725bd79 681 warning ("passing floating point arguments in fp regs not yet supported");
f5a1b0d2 682
4912a07c 683 /* Initialize boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
684 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
685 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
686 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
b15bca31 687 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
a67ded0f 688 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
6f7ebcbb 689
2ca12935
JL
690 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
691 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 692 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
693 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
694 && !(tune_flags & FL_ARCH4))) != 0;
6f7ebcbb 695
bd9c7e23
RE
696 /* Default value for floating point code... if no co-processor
697 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
698 assume the user has an FPA.
699 Note: this does not prevent use of floating point instructions,
700 -msoft-float does that. */
aec3cfba 701 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 702
b111229a 703 if (target_fp_name)
2b835d68 704 {
f5a1b0d2 705 if (streq (target_fp_name, "2"))
b111229a 706 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
707 else if (streq (target_fp_name, "3"))
708 arm_fpu_arch = FP_SOFT3;
2b835d68 709 else
c725bd79 710 error ("invalid floating point emulation option: -mfpe-%s",
b111229a 711 target_fp_name);
2b835d68 712 }
b111229a
RE
713 else
714 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
715
716 if (TARGET_FPE && arm_fpu != FP_HARD)
717 arm_fpu = FP_SOFT2;
aec3cfba 718
f5a1b0d2
NC
719 /* For arm2/3 there is no need to do any scheduling if there is only
720 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
721 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
722 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 723 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 724
cd2b33d0 725 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
726
727 if (structure_size_string != NULL)
728 {
729 int size = strtol (structure_size_string, NULL, 0);
730
731 if (size == 8 || size == 32)
732 arm_structure_size_boundary = size;
733 else
c725bd79 734 warning ("structure size boundary can only be set to 8 or 32");
b355a481 735 }
ed0e6530
PB
736
737 if (arm_pic_register_string != NULL)
738 {
5b43fed1 739 int pic_register = decode_reg_name (arm_pic_register_string);
e26053d1 740
5895f793 741 if (!flag_pic)
ed0e6530
PB
742 warning ("-mpic-register= is useless without -fpic");
743
ed0e6530 744 /* Prevent the user from choosing an obviously stupid PIC register. */
5b43fed1
RH
745 else if (pic_register < 0 || call_used_regs[pic_register]
746 || pic_register == HARD_FRAME_POINTER_REGNUM
747 || pic_register == STACK_POINTER_REGNUM
748 || pic_register >= PC_REGNUM)
c725bd79 749 error ("unable to use '%s' for PIC register", arm_pic_register_string);
ed0e6530
PB
750 else
751 arm_pic_register = pic_register;
752 }
d5b7b3ae
RE
753
754 if (TARGET_THUMB && flag_schedule_insns)
755 {
756 /* Don't warn since it's on by default in -O2. */
757 flag_schedule_insns = 0;
758 }
759
f5a1b0d2
NC
760 /* If optimizing for space, don't synthesize constants.
761 For processors with load scheduling, it never costs more than 2 cycles
762 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 763 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 764 arm_constant_limit = 1;
aec3cfba 765
d19fb8e3
NC
766 if (arm_is_xscale)
767 arm_constant_limit = 2;
768
f5a1b0d2
NC
769 /* If optimizing for size, bump the number of instructions that we
770 are prepared to conditionally execute (even on a StrongARM).
771 Otherwise for the StrongARM, which has early execution of branches,
772 a sequence that is worth skipping is shorter. */
773 if (optimize_size)
774 max_insns_skipped = 6;
775 else if (arm_is_strong)
776 max_insns_skipped = 3;
92a432f4
RE
777
778 /* Register global variables with the garbage collector. */
779 arm_add_gc_roots ();
780}
781
782static void
783arm_add_gc_roots ()
784{
c7319d87
RE
785 gcc_obstack_init(&minipool_obstack);
786 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 787}
cce8749e 788\f
6d3d9133
NC
789/* A table of known ARM exception types.
790 For use with the interrupt function attribute. */
791
792typedef struct
793{
8b60264b
KG
794 const char *const arg;
795 const unsigned long return_value;
6d3d9133
NC
796}
797isr_attribute_arg;
798
8b60264b 799static const isr_attribute_arg isr_attribute_args [] =
6d3d9133
NC
800{
801 { "IRQ", ARM_FT_ISR },
802 { "irq", ARM_FT_ISR },
803 { "FIQ", ARM_FT_FIQ },
804 { "fiq", ARM_FT_FIQ },
805 { "ABORT", ARM_FT_ISR },
806 { "abort", ARM_FT_ISR },
807 { "ABORT", ARM_FT_ISR },
808 { "abort", ARM_FT_ISR },
809 { "UNDEF", ARM_FT_EXCEPTION },
810 { "undef", ARM_FT_EXCEPTION },
811 { "SWI", ARM_FT_EXCEPTION },
812 { "swi", ARM_FT_EXCEPTION },
813 { NULL, ARM_FT_NORMAL }
814};
815
816/* Returns the (interrupt) function type of the current
817 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
818
819static unsigned long
820arm_isr_value (argument)
821 tree argument;
822{
8b60264b 823 const isr_attribute_arg * ptr;
1d6e90ac 824 const char * arg;
6d3d9133
NC
825
826 /* No argument - default to IRQ. */
827 if (argument == NULL_TREE)
828 return ARM_FT_ISR;
829
830 /* Get the value of the argument. */
831 if (TREE_VALUE (argument) == NULL_TREE
832 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
833 return ARM_FT_UNKNOWN;
834
835 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
836
837 /* Check it against the list of known arguments. */
838 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
1d6e90ac
NC
839 if (streq (arg, ptr->arg))
840 return ptr->return_value;
6d3d9133 841
05713b80 842 /* An unrecognized interrupt type. */
6d3d9133
NC
843 return ARM_FT_UNKNOWN;
844}
845
846/* Computes the type of the current function. */
847
848static unsigned long
849arm_compute_func_type ()
850{
851 unsigned long type = ARM_FT_UNKNOWN;
852 tree a;
853 tree attr;
854
855 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
856 abort ();
857
858 /* Decide if the current function is volatile. Such functions
859 never return, and many memory cycles can be saved by not storing
860 register values that will never be needed again. This optimization
861 was added to speed up context switching in a kernel application. */
862 if (optimize > 0
863 && current_function_nothrow
864 && TREE_THIS_VOLATILE (current_function_decl))
865 type |= ARM_FT_VOLATILE;
866
867 if (current_function_needs_context)
868 type |= ARM_FT_NESTED;
869
91d231cb 870 attr = DECL_ATTRIBUTES (current_function_decl);
6d3d9133
NC
871
872 a = lookup_attribute ("naked", attr);
873 if (a != NULL_TREE)
874 type |= ARM_FT_NAKED;
875
876 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
877 type |= ARM_FT_EXCEPTION_HANDLER;
878 else
879 {
880 a = lookup_attribute ("isr", attr);
881 if (a == NULL_TREE)
882 a = lookup_attribute ("interrupt", attr);
883
884 if (a == NULL_TREE)
885 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
886 else
887 type |= arm_isr_value (TREE_VALUE (a));
888 }
889
890 return type;
891}
892
893/* Returns the type of the current function. */
894
895unsigned long
896arm_current_func_type ()
897{
898 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
899 cfun->machine->func_type = arm_compute_func_type ();
900
901 return cfun->machine->func_type;
902}
903\f
6354dc9b 904/* Return 1 if it is possible to return using a single instruction. */
6d3d9133 905
ff9940b0 906int
b36ba79f
RE
907use_return_insn (iscond)
908 int iscond;
ff9940b0
RE
909{
910 int regno;
9b598fa0 911 unsigned int func_type;
d5db54a1 912 unsigned long saved_int_regs;
ff9940b0 913
d5b7b3ae 914 /* Never use a return instruction before reload has run. */
6d3d9133
NC
915 if (!reload_completed)
916 return 0;
917
9b598fa0
RE
918 func_type = arm_current_func_type ();
919
3a7731fd
PB
920 /* Naked functions and volatile functions need special
921 consideration. */
922 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
6d3d9133
NC
923 return 0;
924
925 /* As do variadic functions. */
926 if (current_function_pretend_args_size
3cb66fd7 927 || cfun->machine->uses_anonymous_args
d5b7b3ae 928 /* Of if the function calls __builtin_eh_return () */
6d3d9133 929 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
d5b7b3ae 930 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 931 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 932 && !frame_pointer_needed))
ff9940b0
RE
933 return 0;
934
d5db54a1
RE
935 saved_int_regs = arm_compute_save_reg_mask ();
936
b111229a 937 /* Can't be done if interworking with Thumb, and any registers have been
d5db54a1
RE
938 stacked. */
939 if (TARGET_INTERWORK && saved_int_regs != 0)
b36ba79f 940 return 0;
d5db54a1
RE
941
942 /* On StrongARM, conditional returns are expensive if they aren't
943 taken and multiple registers have been stacked. */
944 if (iscond && arm_is_strong)
6ed30148 945 {
d5db54a1
RE
946 /* Conditional return when just the LR is stored is a simple
947 conditional-load instruction, that's not expensive. */
948 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
949 return 0;
6ed30148
RE
950
951 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 952 return 0;
6ed30148 953 }
d5db54a1
RE
954
955 /* If there are saved registers but the LR isn't saved, then we need
956 two instructions for the return. */
957 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
958 return 0;
959
6d3d9133
NC
960 /* Can't be done if any of the FPU regs are pushed,
961 since this also requires an insn. */
d5b7b3ae
RE
962 if (TARGET_HARD_FLOAT)
963 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 964 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 965 return 0;
ff9940b0
RE
966
967 return 1;
968}
969
cce8749e
CH
970/* Return TRUE if int I is a valid immediate ARM constant. */
971
972int
973const_ok_for_arm (i)
ff9940b0 974 HOST_WIDE_INT i;
cce8749e 975{
30cf4896 976 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 977
56636818
JL
978 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
979 be all zero, or all one. */
30cf4896
KG
980 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
981 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
982 != ((~(unsigned HOST_WIDE_INT) 0)
983 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
984 return FALSE;
985
e2c671ba
RE
986 /* Fast return for 0 and powers of 2 */
987 if ((i & (i - 1)) == 0)
988 return TRUE;
989
cce8749e
CH
990 do
991 {
30cf4896 992 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 993 return TRUE;
abaa26e5 994 mask =
30cf4896
KG
995 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
996 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
ebe413e5
NC
997 }
998 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 999
f3bb6135
RE
1000 return FALSE;
1001}
cce8749e 1002
6354dc9b 1003/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
1004static int
1005const_ok_for_op (i, code)
e2c671ba
RE
1006 HOST_WIDE_INT i;
1007 enum rtx_code code;
e2c671ba
RE
1008{
1009 if (const_ok_for_arm (i))
1010 return 1;
1011
1012 switch (code)
1013 {
1014 case PLUS:
1015 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1016
1017 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1018 case XOR:
1019 case IOR:
1020 return 0;
1021
1022 case AND:
1023 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1024
1025 default:
1026 abort ();
1027 }
1028}
1029
1030/* Emit a sequence of insns to handle a large constant.
1031 CODE is the code of the operation required, it can be any of SET, PLUS,
1032 IOR, AND, XOR, MINUS;
1033 MODE is the mode in which the operation is being performed;
1034 VAL is the integer to operate on;
1035 SOURCE is the other operand (a register, or a null-pointer for SET);
1036 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
1037 either produce a simpler sequence, or we will want to cse the values.
1038 Return value is the number of insns emitted. */
e2c671ba
RE
1039
1040int
1041arm_split_constant (code, mode, val, target, source, subtargets)
1042 enum rtx_code code;
1043 enum machine_mode mode;
1044 HOST_WIDE_INT val;
1045 rtx target;
1046 rtx source;
1047 int subtargets;
2b835d68
RE
1048{
1049 if (subtargets || code == SET
1050 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1051 && REGNO (target) != REGNO (source)))
1052 {
4b632bf1 1053 /* After arm_reorg has been called, we can't fix up expensive
05713b80 1054 constants by pushing them into memory so we must synthesize
4b632bf1
RE
1055 them in-line, regardless of the cost. This is only likely to
1056 be more costly on chips that have load delay slots and we are
1057 compiling without running the scheduler (so no splitting
aec3cfba
NC
1058 occurred before the final instruction emission).
1059
1060 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 1061 */
5895f793 1062 if (!after_arm_reorg
4b632bf1
RE
1063 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1064 > arm_constant_limit + (code != SET)))
2b835d68
RE
1065 {
1066 if (code == SET)
1067 {
1068 /* Currently SET is the only monadic value for CODE, all
1069 the rest are diadic. */
43cffd11 1070 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
1071 return 1;
1072 }
1073 else
1074 {
1075 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1076
43cffd11 1077 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
1078 /* For MINUS, the value is subtracted from, since we never
1079 have subtraction of a constant. */
1080 if (code == MINUS)
43cffd11 1081 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 1082 gen_rtx_MINUS (mode, temp, source)));
2b835d68 1083 else
43cffd11
RE
1084 emit_insn (gen_rtx_SET (VOIDmode, target,
1085 gen_rtx (code, mode, source, temp)));
2b835d68
RE
1086 return 2;
1087 }
1088 }
1089 }
1090
1091 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1092}
1093
ceebdb09 1094static int
ab2877a3
KG
1095count_insns_for_constant (remainder, i)
1096 HOST_WIDE_INT remainder;
1097 int i;
ceebdb09
PB
1098{
1099 HOST_WIDE_INT temp1;
1100 int num_insns = 0;
1101 do
1102 {
1103 int end;
1104
1105 if (i <= 0)
1106 i += 32;
1107 if (remainder & (3 << (i - 2)))
1108 {
1109 end = i - 8;
1110 if (end < 0)
1111 end += 32;
1112 temp1 = remainder & ((0x0ff << end)
1113 | ((i < end) ? (0xff >> (32 - end)) : 0));
1114 remainder &= ~temp1;
1115 num_insns++;
1116 i -= 6;
1117 }
1118 i -= 2;
1119 } while (remainder);
1120 return num_insns;
1121}
1122
2b835d68
RE
1123/* As above, but extra parameter GENERATE which, if clear, suppresses
1124 RTL generation. */
1d6e90ac 1125
d5b7b3ae 1126static int
2b835d68
RE
1127arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1128 enum rtx_code code;
1129 enum machine_mode mode;
1130 HOST_WIDE_INT val;
1131 rtx target;
1132 rtx source;
1133 int subtargets;
1134 int generate;
e2c671ba 1135{
e2c671ba
RE
1136 int can_invert = 0;
1137 int can_negate = 0;
1138 int can_negate_initial = 0;
1139 int can_shift = 0;
1140 int i;
1141 int num_bits_set = 0;
1142 int set_sign_bit_copies = 0;
1143 int clear_sign_bit_copies = 0;
1144 int clear_zero_bit_copies = 0;
1145 int set_zero_bit_copies = 0;
1146 int insns = 0;
e2c671ba 1147 unsigned HOST_WIDE_INT temp1, temp2;
30cf4896 1148 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
e2c671ba 1149
d5b7b3ae 1150 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
1151 check for degenerate cases; these can occur when DImode operations
1152 are split. */
1153 switch (code)
1154 {
1155 case SET:
1156 can_invert = 1;
1157 can_shift = 1;
1158 can_negate = 1;
1159 break;
1160
1161 case PLUS:
1162 can_negate = 1;
1163 can_negate_initial = 1;
1164 break;
1165
1166 case IOR:
30cf4896 1167 if (remainder == 0xffffffff)
e2c671ba 1168 {
2b835d68 1169 if (generate)
43cffd11
RE
1170 emit_insn (gen_rtx_SET (VOIDmode, target,
1171 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
1172 return 1;
1173 }
1174 if (remainder == 0)
1175 {
1176 if (reload_completed && rtx_equal_p (target, source))
1177 return 0;
2b835d68 1178 if (generate)
43cffd11 1179 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1180 return 1;
1181 }
1182 break;
1183
1184 case AND:
1185 if (remainder == 0)
1186 {
2b835d68 1187 if (generate)
43cffd11 1188 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
1189 return 1;
1190 }
30cf4896 1191 if (remainder == 0xffffffff)
e2c671ba
RE
1192 {
1193 if (reload_completed && rtx_equal_p (target, source))
1194 return 0;
2b835d68 1195 if (generate)
43cffd11 1196 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1197 return 1;
1198 }
1199 can_invert = 1;
1200 break;
1201
1202 case XOR:
1203 if (remainder == 0)
1204 {
1205 if (reload_completed && rtx_equal_p (target, source))
1206 return 0;
2b835d68 1207 if (generate)
43cffd11 1208 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1209 return 1;
1210 }
30cf4896 1211 if (remainder == 0xffffffff)
e2c671ba 1212 {
2b835d68 1213 if (generate)
43cffd11
RE
1214 emit_insn (gen_rtx_SET (VOIDmode, target,
1215 gen_rtx_NOT (mode, source)));
e2c671ba
RE
1216 return 1;
1217 }
1218
1219 /* We don't know how to handle this yet below. */
1220 abort ();
1221
1222 case MINUS:
1223 /* We treat MINUS as (val - source), since (source - val) is always
1224 passed as (source + (-val)). */
1225 if (remainder == 0)
1226 {
2b835d68 1227 if (generate)
43cffd11
RE
1228 emit_insn (gen_rtx_SET (VOIDmode, target,
1229 gen_rtx_NEG (mode, source)));
e2c671ba
RE
1230 return 1;
1231 }
1232 if (const_ok_for_arm (val))
1233 {
2b835d68 1234 if (generate)
43cffd11
RE
1235 emit_insn (gen_rtx_SET (VOIDmode, target,
1236 gen_rtx_MINUS (mode, GEN_INT (val),
1237 source)));
e2c671ba
RE
1238 return 1;
1239 }
1240 can_negate = 1;
1241
1242 break;
1243
1244 default:
1245 abort ();
1246 }
1247
6354dc9b 1248 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
1249 if (const_ok_for_arm (val)
1250 || (can_negate_initial && const_ok_for_arm (-val))
1251 || (can_invert && const_ok_for_arm (~val)))
1252 {
2b835d68 1253 if (generate)
43cffd11
RE
1254 emit_insn (gen_rtx_SET (VOIDmode, target,
1255 (source ? gen_rtx (code, mode, source,
1256 GEN_INT (val))
1257 : GEN_INT (val))));
e2c671ba
RE
1258 return 1;
1259 }
1260
e2c671ba 1261 /* Calculate a few attributes that may be useful for specific
6354dc9b 1262 optimizations. */
e2c671ba
RE
1263 for (i = 31; i >= 0; i--)
1264 {
1265 if ((remainder & (1 << i)) == 0)
1266 clear_sign_bit_copies++;
1267 else
1268 break;
1269 }
1270
1271 for (i = 31; i >= 0; i--)
1272 {
1273 if ((remainder & (1 << i)) != 0)
1274 set_sign_bit_copies++;
1275 else
1276 break;
1277 }
1278
1279 for (i = 0; i <= 31; i++)
1280 {
1281 if ((remainder & (1 << i)) == 0)
1282 clear_zero_bit_copies++;
1283 else
1284 break;
1285 }
1286
1287 for (i = 0; i <= 31; i++)
1288 {
1289 if ((remainder & (1 << i)) != 0)
1290 set_zero_bit_copies++;
1291 else
1292 break;
1293 }
1294
1295 switch (code)
1296 {
1297 case SET:
1298 /* See if we can do this by sign_extending a constant that is known
1299 to be negative. This is a good, way of doing it, since the shift
1300 may well merge into a subsequent insn. */
1301 if (set_sign_bit_copies > 1)
1302 {
1303 if (const_ok_for_arm
1304 (temp1 = ARM_SIGN_EXTEND (remainder
1305 << (set_sign_bit_copies - 1))))
1306 {
2b835d68
RE
1307 if (generate)
1308 {
d499463f 1309 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1310 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1311 GEN_INT (temp1)));
2b835d68
RE
1312 emit_insn (gen_ashrsi3 (target, new_src,
1313 GEN_INT (set_sign_bit_copies - 1)));
1314 }
e2c671ba
RE
1315 return 2;
1316 }
1317 /* For an inverted constant, we will need to set the low bits,
1318 these will be shifted out of harm's way. */
1319 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1320 if (const_ok_for_arm (~temp1))
1321 {
2b835d68
RE
1322 if (generate)
1323 {
d499463f 1324 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1325 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1326 GEN_INT (temp1)));
2b835d68
RE
1327 emit_insn (gen_ashrsi3 (target, new_src,
1328 GEN_INT (set_sign_bit_copies - 1)));
1329 }
e2c671ba
RE
1330 return 2;
1331 }
1332 }
1333
1334 /* See if we can generate this by setting the bottom (or the top)
1335 16 bits, and then shifting these into the other half of the
1336 word. We only look for the simplest cases, to do more would cost
1337 too much. Be careful, however, not to generate this when the
1338 alternative would take fewer insns. */
30cf4896 1339 if (val & 0xffff0000)
e2c671ba 1340 {
30cf4896 1341 temp1 = remainder & 0xffff0000;
e2c671ba
RE
1342 temp2 = remainder & 0x0000ffff;
1343
6354dc9b 1344 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1345 for (i = 9; i < 24; i++)
1346 {
30cf4896 1347 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
5895f793 1348 && !const_ok_for_arm (temp2))
e2c671ba 1349 {
d499463f
RE
1350 rtx new_src = (subtargets
1351 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1352 : target);
1353 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1354 source, subtargets, generate);
e2c671ba 1355 source = new_src;
2b835d68 1356 if (generate)
43cffd11
RE
1357 emit_insn (gen_rtx_SET
1358 (VOIDmode, target,
1359 gen_rtx_IOR (mode,
1360 gen_rtx_ASHIFT (mode, source,
1361 GEN_INT (i)),
1362 source)));
e2c671ba
RE
1363 return insns + 1;
1364 }
1365 }
1366
6354dc9b 1367 /* Don't duplicate cases already considered. */
e2c671ba
RE
1368 for (i = 17; i < 24; i++)
1369 {
1370 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1371 && !const_ok_for_arm (temp1))
e2c671ba 1372 {
d499463f
RE
1373 rtx new_src = (subtargets
1374 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1375 : target);
1376 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1377 source, subtargets, generate);
e2c671ba 1378 source = new_src;
2b835d68 1379 if (generate)
43cffd11
RE
1380 emit_insn
1381 (gen_rtx_SET (VOIDmode, target,
1382 gen_rtx_IOR
1383 (mode,
1384 gen_rtx_LSHIFTRT (mode, source,
1385 GEN_INT (i)),
1386 source)));
e2c671ba
RE
1387 return insns + 1;
1388 }
1389 }
1390 }
1391 break;
1392
1393 case IOR:
1394 case XOR:
7b64da89
RE
1395 /* If we have IOR or XOR, and the constant can be loaded in a
1396 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1397 then this can be done in two instructions instead of 3-4. */
1398 if (subtargets
d499463f 1399 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1400 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1401 {
5895f793 1402 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1403 {
2b835d68
RE
1404 if (generate)
1405 {
1406 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1407
43cffd11
RE
1408 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1409 emit_insn (gen_rtx_SET (VOIDmode, target,
1410 gen_rtx (code, mode, source, sub)));
2b835d68 1411 }
e2c671ba
RE
1412 return 2;
1413 }
1414 }
1415
1416 if (code == XOR)
1417 break;
1418
1419 if (set_sign_bit_copies > 8
1420 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1421 {
2b835d68
RE
1422 if (generate)
1423 {
1424 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1425 rtx shift = GEN_INT (set_sign_bit_copies);
1426
43cffd11
RE
1427 emit_insn (gen_rtx_SET (VOIDmode, sub,
1428 gen_rtx_NOT (mode,
1429 gen_rtx_ASHIFT (mode,
1430 source,
f5a1b0d2 1431 shift))));
43cffd11
RE
1432 emit_insn (gen_rtx_SET (VOIDmode, target,
1433 gen_rtx_NOT (mode,
1434 gen_rtx_LSHIFTRT (mode, sub,
1435 shift))));
2b835d68 1436 }
e2c671ba
RE
1437 return 2;
1438 }
1439
1440 if (set_zero_bit_copies > 8
1441 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1442 {
2b835d68
RE
1443 if (generate)
1444 {
1445 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1446 rtx shift = GEN_INT (set_zero_bit_copies);
1447
43cffd11
RE
1448 emit_insn (gen_rtx_SET (VOIDmode, sub,
1449 gen_rtx_NOT (mode,
1450 gen_rtx_LSHIFTRT (mode,
1451 source,
f5a1b0d2 1452 shift))));
43cffd11
RE
1453 emit_insn (gen_rtx_SET (VOIDmode, target,
1454 gen_rtx_NOT (mode,
1455 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1456 shift))));
2b835d68 1457 }
e2c671ba
RE
1458 return 2;
1459 }
1460
5895f793 1461 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1462 {
2b835d68
RE
1463 if (generate)
1464 {
1465 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1466 emit_insn (gen_rtx_SET (VOIDmode, sub,
1467 gen_rtx_NOT (mode, source)));
2b835d68
RE
1468 source = sub;
1469 if (subtargets)
1470 sub = gen_reg_rtx (mode);
43cffd11
RE
1471 emit_insn (gen_rtx_SET (VOIDmode, sub,
1472 gen_rtx_AND (mode, source,
1473 GEN_INT (temp1))));
1474 emit_insn (gen_rtx_SET (VOIDmode, target,
1475 gen_rtx_NOT (mode, sub)));
2b835d68 1476 }
e2c671ba
RE
1477 return 3;
1478 }
1479 break;
1480
1481 case AND:
1482 /* See if two shifts will do 2 or more insn's worth of work. */
1483 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1484 {
30cf4896 1485 HOST_WIDE_INT shift_mask = ((0xffffffff
e2c671ba 1486 << (32 - clear_sign_bit_copies))
30cf4896 1487 & 0xffffffff);
e2c671ba 1488
30cf4896 1489 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1490 {
2b835d68
RE
1491 if (generate)
1492 {
d499463f 1493 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1494 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1495 new_src, source, subtargets, 1);
1496 source = new_src;
2b835d68
RE
1497 }
1498 else
d499463f
RE
1499 {
1500 rtx targ = subtargets ? NULL_RTX : target;
1501 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1502 targ, source, subtargets, 0);
1503 }
2b835d68
RE
1504 }
1505
1506 if (generate)
1507 {
d499463f
RE
1508 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1509 rtx shift = GEN_INT (clear_sign_bit_copies);
1510
1511 emit_insn (gen_ashlsi3 (new_src, source, shift));
1512 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1513 }
1514
e2c671ba
RE
1515 return insns + 2;
1516 }
1517
1518 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1519 {
1520 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1521
30cf4896 1522 if ((remainder | shift_mask) != 0xffffffff)
e2c671ba 1523 {
2b835d68
RE
1524 if (generate)
1525 {
d499463f
RE
1526 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1527
2b835d68 1528 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1529 new_src, source, subtargets, 1);
1530 source = new_src;
2b835d68
RE
1531 }
1532 else
d499463f
RE
1533 {
1534 rtx targ = subtargets ? NULL_RTX : target;
1535
1536 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1537 targ, source, subtargets, 0);
1538 }
2b835d68
RE
1539 }
1540
1541 if (generate)
1542 {
d499463f
RE
1543 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1544 rtx shift = GEN_INT (clear_zero_bit_copies);
1545
1546 emit_insn (gen_lshrsi3 (new_src, source, shift));
1547 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1548 }
1549
e2c671ba
RE
1550 return insns + 2;
1551 }
1552
1553 break;
1554
1555 default:
1556 break;
1557 }
1558
1559 for (i = 0; i < 32; i++)
1560 if (remainder & (1 << i))
1561 num_bits_set++;
1562
1563 if (code == AND || (can_invert && num_bits_set > 16))
30cf4896 1564 remainder = (~remainder) & 0xffffffff;
e2c671ba 1565 else if (code == PLUS && num_bits_set > 16)
30cf4896 1566 remainder = (-remainder) & 0xffffffff;
e2c671ba
RE
1567 else
1568 {
1569 can_invert = 0;
1570 can_negate = 0;
1571 }
1572
1573 /* Now try and find a way of doing the job in either two or three
1574 instructions.
1575 We start by looking for the largest block of zeros that are aligned on
1576 a 2-bit boundary, we then fill up the temps, wrapping around to the
1577 top of the word when we drop off the bottom.
6354dc9b 1578 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1579 {
1580 int best_start = 0;
1581 int best_consecutive_zeros = 0;
1582
1583 for (i = 0; i < 32; i += 2)
1584 {
1585 int consecutive_zeros = 0;
1586
5895f793 1587 if (!(remainder & (3 << i)))
e2c671ba 1588 {
5895f793 1589 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1590 {
1591 consecutive_zeros += 2;
1592 i += 2;
1593 }
1594 if (consecutive_zeros > best_consecutive_zeros)
1595 {
1596 best_consecutive_zeros = consecutive_zeros;
1597 best_start = i - consecutive_zeros;
1598 }
1599 i -= 2;
1600 }
1601 }
1602
ceebdb09
PB
1603 /* So long as it won't require any more insns to do so, it's
1604 desirable to emit a small constant (in bits 0...9) in the last
1605 insn. This way there is more chance that it can be combined with
1606 a later addressing insn to form a pre-indexed load or store
1607 operation. Consider:
1608
1609 *((volatile int *)0xe0000100) = 1;
1610 *((volatile int *)0xe0000110) = 2;
1611
1612 We want this to wind up as:
1613
1614 mov rA, #0xe0000000
1615 mov rB, #1
1616 str rB, [rA, #0x100]
1617 mov rB, #2
1618 str rB, [rA, #0x110]
1619
1620 rather than having to synthesize both large constants from scratch.
1621
1622 Therefore, we calculate how many insns would be required to emit
1623 the constant starting from `best_start', and also starting from
1624 zero (ie with bit 31 first to be output). If `best_start' doesn't
1625 yield a shorter sequence, we may as well use zero. */
1626 if (best_start != 0
1627 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1628 && (count_insns_for_constant (remainder, 0) <=
1629 count_insns_for_constant (remainder, best_start)))
1630 best_start = 0;
1631
1632 /* Now start emitting the insns. */
e2c671ba
RE
1633 i = best_start;
1634 do
1635 {
1636 int end;
1637
1638 if (i <= 0)
1639 i += 32;
1640 if (remainder & (3 << (i - 2)))
1641 {
1642 end = i - 8;
1643 if (end < 0)
1644 end += 32;
1645 temp1 = remainder & ((0x0ff << end)
1646 | ((i < end) ? (0xff >> (32 - end)) : 0));
1647 remainder &= ~temp1;
1648
d499463f 1649 if (generate)
e2c671ba 1650 {
9503f3d1
RH
1651 rtx new_src, temp1_rtx;
1652
1653 if (code == SET || code == MINUS)
1654 {
1655 new_src = (subtargets ? gen_reg_rtx (mode) : target);
96ae8197 1656 if (can_invert && code != MINUS)
9503f3d1
RH
1657 temp1 = ~temp1;
1658 }
1659 else
1660 {
96ae8197 1661 if (remainder && subtargets)
9503f3d1 1662 new_src = gen_reg_rtx (mode);
96ae8197
NC
1663 else
1664 new_src = target;
9503f3d1
RH
1665 if (can_invert)
1666 temp1 = ~temp1;
1667 else if (can_negate)
1668 temp1 = -temp1;
1669 }
1670
1671 temp1 = trunc_int_for_mode (temp1, mode);
1672 temp1_rtx = GEN_INT (temp1);
d499463f
RE
1673
1674 if (code == SET)
9503f3d1 1675 ;
d499463f 1676 else if (code == MINUS)
9503f3d1 1677 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
d499463f 1678 else
9503f3d1
RH
1679 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1680
1681 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
d499463f 1682 source = new_src;
e2c671ba
RE
1683 }
1684
d499463f
RE
1685 if (code == SET)
1686 {
1687 can_invert = 0;
1688 code = PLUS;
1689 }
1690 else if (code == MINUS)
1691 code = PLUS;
1692
e2c671ba 1693 insns++;
e2c671ba
RE
1694 i -= 6;
1695 }
1696 i -= 2;
1d6e90ac
NC
1697 }
1698 while (remainder);
e2c671ba 1699 }
1d6e90ac 1700
e2c671ba
RE
1701 return insns;
1702}
1703
bd9c7e23
RE
1704/* Canonicalize a comparison so that we are more likely to recognize it.
1705 This can be done for a few constant compares, where we can make the
1706 immediate value easier to load. */
1d6e90ac 1707
bd9c7e23
RE
1708enum rtx_code
1709arm_canonicalize_comparison (code, op1)
1710 enum rtx_code code;
62b10bbc 1711 rtx * op1;
bd9c7e23 1712{
ad076f4e 1713 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1714
1715 switch (code)
1716 {
1717 case EQ:
1718 case NE:
1719 return code;
1720
1721 case GT:
1722 case LE:
30cf4896 1723 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
5895f793 1724 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1725 {
5895f793 1726 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1727 return code == GT ? GE : LT;
1728 }
1729 break;
1730
1731 case GE:
1732 case LT:
30cf4896 1733 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1734 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1735 {
5895f793 1736 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1737 return code == GE ? GT : LE;
1738 }
1739 break;
1740
1741 case GTU:
1742 case LEU:
30cf4896 1743 if (i != ~((unsigned HOST_WIDE_INT) 0)
5895f793 1744 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1745 {
1746 *op1 = GEN_INT (i + 1);
1747 return code == GTU ? GEU : LTU;
1748 }
1749 break;
1750
1751 case GEU:
1752 case LTU:
1753 if (i != 0
5895f793 1754 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1755 {
1756 *op1 = GEN_INT (i - 1);
1757 return code == GEU ? GTU : LEU;
1758 }
1759 break;
1760
1761 default:
1762 abort ();
1763 }
1764
1765 return code;
1766}
bd9c7e23 1767
f5a1b0d2
NC
1768/* Decide whether a type should be returned in memory (true)
1769 or in a register (false). This is called by the macro
1770 RETURN_IN_MEMORY. */
1d6e90ac 1771
2b835d68
RE
1772int
1773arm_return_in_memory (type)
1774 tree type;
1775{
dc0ba55a
JT
1776 HOST_WIDE_INT size;
1777
5895f793 1778 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1779 /* All simple types are returned in registers. */
d7d01975 1780 return 0;
dc0ba55a
JT
1781
1782 size = int_size_in_bytes (type);
1783
1784 if (TARGET_ATPCS)
1785 {
1786 /* ATPCS returns aggregate types in memory only if they are
1787 larger than a word (or are variable size). */
1788 return (size < 0 || size > UNITS_PER_WORD);
1789 }
d5b7b3ae
RE
1790
1791 /* For the arm-wince targets we choose to be compitable with Microsoft's
1792 ARM and Thumb compilers, which always return aggregates in memory. */
1793#ifndef ARM_WINCE
e529bd42
NC
1794 /* All structures/unions bigger than one word are returned in memory.
1795 Also catch the case where int_size_in_bytes returns -1. In this case
1796 the aggregate is either huge or of varaible size, and in either case
1797 we will want to return it via memory and not in a register. */
dc0ba55a 1798 if (size < 0 || size > UNITS_PER_WORD)
d7d01975 1799 return 1;
d5b7b3ae 1800
d7d01975 1801 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1802 {
1803 tree field;
1804
3a2ea258
RE
1805 /* For a struct the APCS says that we only return in a register
1806 if the type is 'integer like' and every addressable element
1807 has an offset of zero. For practical purposes this means
1808 that the structure can have at most one non bit-field element
1809 and that this element must be the first one in the structure. */
1810
f5a1b0d2
NC
1811 /* Find the first field, ignoring non FIELD_DECL things which will
1812 have been created by C++. */
1813 for (field = TYPE_FIELDS (type);
1814 field && TREE_CODE (field) != FIELD_DECL;
1815 field = TREE_CHAIN (field))
1816 continue;
1817
1818 if (field == NULL)
9e291dbe 1819 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1820
d5b7b3ae
RE
1821 /* Check that the first field is valid for returning in a register. */
1822
1823 /* ... Floats are not allowed */
9e291dbe 1824 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1825 return 1;
1826
d5b7b3ae
RE
1827 /* ... Aggregates that are not themselves valid for returning in
1828 a register are not allowed. */
9e291dbe 1829 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1830 return 1;
6f7ebcbb 1831
3a2ea258
RE
1832 /* Now check the remaining fields, if any. Only bitfields are allowed,
1833 since they are not addressable. */
f5a1b0d2
NC
1834 for (field = TREE_CHAIN (field);
1835 field;
1836 field = TREE_CHAIN (field))
1837 {
1838 if (TREE_CODE (field) != FIELD_DECL)
1839 continue;
1840
5895f793 1841 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1842 return 1;
1843 }
2b835d68
RE
1844
1845 return 0;
1846 }
d7d01975
NC
1847
1848 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1849 {
1850 tree field;
1851
1852 /* Unions can be returned in registers if every element is
1853 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1854 for (field = TYPE_FIELDS (type);
1855 field;
1856 field = TREE_CHAIN (field))
2b835d68 1857 {
f5a1b0d2
NC
1858 if (TREE_CODE (field) != FIELD_DECL)
1859 continue;
1860
6cc8c0b3
NC
1861 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1862 return 1;
1863
f5a1b0d2 1864 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1865 return 1;
1866 }
f5a1b0d2 1867
2b835d68
RE
1868 return 0;
1869 }
d5b7b3ae 1870#endif /* not ARM_WINCE */
f5a1b0d2 1871
d5b7b3ae 1872 /* Return all other types in memory. */
2b835d68
RE
1873 return 1;
1874}
1875
3717da94
JT
1876/* Indicate whether or not words of a double are in big-endian order. */
1877
1878int
1879arm_float_words_big_endian ()
1880{
1881
1882 /* For FPA, float words are always big-endian. For VFP, floats words
1883 follow the memory system mode. */
1884
1885 if (TARGET_HARD_FLOAT)
1886 {
1887 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1888 return 1;
1889 }
1890
1891 if (TARGET_VFP)
1892 return (TARGET_BIG_END ? 1 : 0);
1893
1894 return 1;
1895}
1896
82e9d970
PB
1897/* Initialize a variable CUM of type CUMULATIVE_ARGS
1898 for a call to a function whose data type is FNTYPE.
1899 For a library call, FNTYPE is NULL. */
1900void
1901arm_init_cumulative_args (pcum, fntype, libname, indirect)
1902 CUMULATIVE_ARGS * pcum;
1903 tree fntype;
1904 rtx libname ATTRIBUTE_UNUSED;
1905 int indirect ATTRIBUTE_UNUSED;
1906{
1907 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1908 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1909
82e9d970
PB
1910 pcum->call_cookie = CALL_NORMAL;
1911
1912 if (TARGET_LONG_CALLS)
1913 pcum->call_cookie = CALL_LONG;
1914
1915 /* Check for long call/short call attributes. The attributes
1916 override any command line option. */
1917 if (fntype)
1918 {
1919 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1920 pcum->call_cookie = CALL_SHORT;
1921 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1922 pcum->call_cookie = CALL_LONG;
1923 }
1924}
1925
1926/* Determine where to put an argument to a function.
1927 Value is zero to push the argument on the stack,
1928 or a hard register in which to store the argument.
1929
1930 MODE is the argument's machine mode.
1931 TYPE is the data type of the argument (as a tree).
1932 This is null for libcalls where that information may
1933 not be available.
1934 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1935 the preceding args and about the function being called.
1936 NAMED is nonzero if this argument is a named parameter
1937 (otherwise it is an extra parameter matching an ellipsis). */
1d6e90ac 1938
82e9d970
PB
1939rtx
1940arm_function_arg (pcum, mode, type, named)
1941 CUMULATIVE_ARGS * pcum;
1942 enum machine_mode mode;
1943 tree type ATTRIBUTE_UNUSED;
1944 int named;
1945{
1946 if (mode == VOIDmode)
1947 /* Compute operand 2 of the call insn. */
1948 return GEN_INT (pcum->call_cookie);
1949
5895f793 1950 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1951 return NULL_RTX;
1952
1953 return gen_rtx_REG (mode, pcum->nregs);
1954}
82e9d970 1955\f
c27ba912
DM
1956/* Encode the current state of the #pragma [no_]long_calls. */
1957typedef enum
82e9d970 1958{
c27ba912
DM
1959 OFF, /* No #pramgma [no_]long_calls is in effect. */
1960 LONG, /* #pragma long_calls is in effect. */
1961 SHORT /* #pragma no_long_calls is in effect. */
1962} arm_pragma_enum;
82e9d970 1963
c27ba912 1964static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1965
8b97c5f8
ZW
1966void
1967arm_pr_long_calls (pfile)
1d6e90ac 1968 cpp_reader * pfile ATTRIBUTE_UNUSED;
82e9d970 1969{
8b97c5f8
ZW
1970 arm_pragma_long_calls = LONG;
1971}
1972
1973void
1974arm_pr_no_long_calls (pfile)
1d6e90ac 1975 cpp_reader * pfile ATTRIBUTE_UNUSED;
8b97c5f8
ZW
1976{
1977 arm_pragma_long_calls = SHORT;
1978}
1979
1980void
1981arm_pr_long_calls_off (pfile)
1d6e90ac 1982 cpp_reader * pfile ATTRIBUTE_UNUSED;
8b97c5f8
ZW
1983{
1984 arm_pragma_long_calls = OFF;
82e9d970
PB
1985}
1986\f
91d231cb
JM
1987/* Table of machine attributes. */
1988const struct attribute_spec arm_attribute_table[] =
82e9d970 1989{
91d231cb 1990 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
82e9d970
PB
1991 /* Function calls made to this symbol must be done indirectly, because
1992 it may lie outside of the 26 bit addressing range of a normal function
1993 call. */
91d231cb 1994 { "long_call", 0, 0, false, true, true, NULL },
82e9d970
PB
1995 /* Whereas these functions are always known to reside within the 26 bit
1996 addressing range. */
91d231cb 1997 { "short_call", 0, 0, false, true, true, NULL },
6d3d9133 1998 /* Interrupt Service Routines have special prologue and epilogue requirements. */
91d231cb
JM
1999 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2000 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2001 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2002#ifdef ARM_PE
2003 /* ARM/PE has three new attributes:
2004 interfacearm - ?
2005 dllexport - for exporting a function/variable that will live in a dll
2006 dllimport - for importing a function/variable from a dll
2007
2008 Microsoft allows multiple declspecs in one __declspec, separating
2009 them with spaces. We do NOT support this. Instead, use __declspec
2010 multiple times.
2011 */
2012 { "dllimport", 0, 0, true, false, false, NULL },
2013 { "dllexport", 0, 0, true, false, false, NULL },
2014 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2015#endif
2016 { NULL, 0, 0, false, false, false, NULL }
2017};
6d3d9133 2018
91d231cb
JM
2019/* Handle an attribute requiring a FUNCTION_DECL;
2020 arguments as in struct attribute_spec.handler. */
1d6e90ac 2021
91d231cb
JM
2022static tree
2023arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
1d6e90ac
NC
2024 tree * node;
2025 tree name;
2026 tree args ATTRIBUTE_UNUSED;
2027 int flags ATTRIBUTE_UNUSED;
2028 bool * no_add_attrs;
91d231cb
JM
2029{
2030 if (TREE_CODE (*node) != FUNCTION_DECL)
2031 {
2032 warning ("`%s' attribute only applies to functions",
2033 IDENTIFIER_POINTER (name));
2034 *no_add_attrs = true;
2035 }
2036
2037 return NULL_TREE;
2038}
2039
2040/* Handle an "interrupt" or "isr" attribute;
2041 arguments as in struct attribute_spec.handler. */
1d6e90ac 2042
91d231cb
JM
2043static tree
2044arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
1d6e90ac
NC
2045 tree * node;
2046 tree name;
2047 tree args;
2048 int flags;
2049 bool * no_add_attrs;
91d231cb
JM
2050{
2051 if (DECL_P (*node))
2052 {
2053 if (TREE_CODE (*node) != FUNCTION_DECL)
2054 {
2055 warning ("`%s' attribute only applies to functions",
2056 IDENTIFIER_POINTER (name));
2057 *no_add_attrs = true;
2058 }
2059 /* FIXME: the argument if any is checked for type attributes;
2060 should it be checked for decl ones? */
2061 }
2062 else
2063 {
2064 if (TREE_CODE (*node) == FUNCTION_TYPE
2065 || TREE_CODE (*node) == METHOD_TYPE)
2066 {
2067 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2068 {
2069 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2070 *no_add_attrs = true;
2071 }
2072 }
2073 else if (TREE_CODE (*node) == POINTER_TYPE
2074 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2075 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2076 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2077 {
2078 *node = build_type_copy (*node);
1d6e90ac
NC
2079 TREE_TYPE (*node) = build_type_attribute_variant
2080 (TREE_TYPE (*node),
2081 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
91d231cb
JM
2082 *no_add_attrs = true;
2083 }
2084 else
2085 {
2086 /* Possibly pass this attribute on from the type to a decl. */
2087 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2088 | (int) ATTR_FLAG_FUNCTION_NEXT
2089 | (int) ATTR_FLAG_ARRAY_NEXT))
2090 {
2091 *no_add_attrs = true;
2092 return tree_cons (name, args, NULL_TREE);
2093 }
2094 else
2095 {
2096 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2097 }
2098 }
2099 }
2100
2101 return NULL_TREE;
82e9d970
PB
2102}
2103
2104/* Return 0 if the attributes for two types are incompatible, 1 if they
2105 are compatible, and 2 if they are nearly compatible (which causes a
2106 warning to be generated). */
1d6e90ac 2107
8d8e52be 2108static int
82e9d970
PB
2109arm_comp_type_attributes (type1, type2)
2110 tree type1;
2111 tree type2;
2112{
1cb8d58a 2113 int l1, l2, s1, s2;
bd7fc26f 2114
82e9d970
PB
2115 /* Check for mismatch of non-default calling convention. */
2116 if (TREE_CODE (type1) != FUNCTION_TYPE)
2117 return 1;
2118
2119 /* Check for mismatched call attributes. */
1cb8d58a
NC
2120 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2121 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2122 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2123 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
2124
2125 /* Only bother to check if an attribute is defined. */
2126 if (l1 | l2 | s1 | s2)
2127 {
2128 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 2129 if ((l1 != l2) || (s1 != s2))
bd7fc26f 2130 return 0;
82e9d970 2131
bd7fc26f
NC
2132 /* Disallow mixed attributes. */
2133 if ((l1 & s2) || (l2 & s1))
2134 return 0;
2135 }
2136
6d3d9133
NC
2137 /* Check for mismatched ISR attribute. */
2138 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2139 if (! l1)
2140 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2141 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2142 if (! l2)
2143 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2144 if (l1 != l2)
2145 return 0;
2146
bd7fc26f 2147 return 1;
82e9d970
PB
2148}
2149
c27ba912
DM
2150/* Encode long_call or short_call attribute by prefixing
2151 symbol name in DECL with a special character FLAG. */
1d6e90ac 2152
c27ba912
DM
2153void
2154arm_encode_call_attribute (decl, flag)
2155 tree decl;
cd2b33d0 2156 int flag;
c27ba912 2157{
3cce094d 2158 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 2159 int len = strlen (str);
d19fb8e3 2160 char * newstr;
c27ba912 2161
c27ba912
DM
2162 /* Do not allow weak functions to be treated as short call. */
2163 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2164 return;
c27ba912 2165
520a57c8
ZW
2166 newstr = alloca (len + 2);
2167 newstr[0] = flag;
2168 strcpy (newstr + 1, str);
c27ba912 2169
6d3d9133 2170 newstr = (char *) ggc_alloc_string (newstr, len + 1);
c27ba912
DM
2171 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2172}
2173
2174/* Assigns default attributes to newly defined type. This is used to
2175 set short_call/long_call attributes for function types of
2176 functions defined inside corresponding #pragma scopes. */
1d6e90ac 2177
8d8e52be 2178static void
c27ba912
DM
2179arm_set_default_type_attributes (type)
2180 tree type;
2181{
2182 /* Add __attribute__ ((long_call)) to all functions, when
2183 inside #pragma long_calls or __attribute__ ((short_call)),
2184 when inside #pragma no_long_calls. */
2185 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2186 {
2187 tree type_attr_list, attr_name;
2188 type_attr_list = TYPE_ATTRIBUTES (type);
2189
2190 if (arm_pragma_long_calls == LONG)
2191 attr_name = get_identifier ("long_call");
2192 else if (arm_pragma_long_calls == SHORT)
2193 attr_name = get_identifier ("short_call");
2194 else
2195 return;
2196
2197 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2198 TYPE_ATTRIBUTES (type) = type_attr_list;
2199 }
2200}
2201\f
2202/* Return 1 if the operand is a SYMBOL_REF for a function known to be
2203 defined within the current compilation unit. If this caanot be
2204 determined, then 0 is returned. */
1d6e90ac 2205
c27ba912
DM
2206static int
2207current_file_function_operand (sym_ref)
2208 rtx sym_ref;
2209{
2210 /* This is a bit of a fib. A function will have a short call flag
2211 applied to its name if it has the short call attribute, or it has
2212 already been defined within the current compilation unit. */
2213 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2214 return 1;
2215
6d77b53e 2216 /* The current function is always defined within the current compilation
d6a7951f
JM
2217 unit. if it s a weak definition however, then this may not be the real
2218 definition of the function, and so we have to say no. */
c27ba912 2219 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 2220 && !DECL_WEAK (current_function_decl))
c27ba912
DM
2221 return 1;
2222
2223 /* We cannot make the determination - default to returning 0. */
2224 return 0;
2225}
2226
825dda42 2227/* Return nonzero if a 32 bit "long_call" should be generated for
c27ba912
DM
2228 this call. We generate a long_call if the function:
2229
2230 a. has an __attribute__((long call))
2231 or b. is within the scope of a #pragma long_calls
2232 or c. the -mlong-calls command line switch has been specified
2233
2234 However we do not generate a long call if the function:
2235
2236 d. has an __attribute__ ((short_call))
2237 or e. is inside the scope of a #pragma no_long_calls
2238 or f. has an __attribute__ ((section))
2239 or g. is defined within the current compilation unit.
2240
2241 This function will be called by C fragments contained in the machine
2242 description file. CALL_REF and CALL_COOKIE correspond to the matched
2243 rtl operands. CALL_SYMBOL is used to distinguish between
2244 two different callers of the function. It is set to 1 in the
2245 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2246 and "call_value" patterns. This is because of the difference in the
2247 SYM_REFs passed by these patterns. */
1d6e90ac 2248
c27ba912
DM
2249int
2250arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2251 rtx sym_ref;
2252 int call_cookie;
2253 int call_symbol;
2254{
5895f793 2255 if (!call_symbol)
c27ba912
DM
2256 {
2257 if (GET_CODE (sym_ref) != MEM)
2258 return 0;
2259
2260 sym_ref = XEXP (sym_ref, 0);
2261 }
2262
2263 if (GET_CODE (sym_ref) != SYMBOL_REF)
2264 return 0;
2265
2266 if (call_cookie & CALL_SHORT)
2267 return 0;
2268
2269 if (TARGET_LONG_CALLS && flag_function_sections)
2270 return 1;
2271
87e27392 2272 if (current_file_function_operand (sym_ref))
c27ba912
DM
2273 return 0;
2274
2275 return (call_cookie & CALL_LONG)
2276 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2277 || TARGET_LONG_CALLS;
2278}
f99fce0c 2279
825dda42 2280/* Return nonzero if it is ok to make a tail-call to DECL. */
1d6e90ac 2281
f99fce0c
RE
2282int
2283arm_function_ok_for_sibcall (decl)
2284 tree decl;
2285{
2286 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2287
2288 /* Never tailcall something for which we have no decl, or if we
2289 are in Thumb mode. */
2290 if (decl == NULL || TARGET_THUMB)
2291 return 0;
2292
2293 /* Get the calling method. */
2294 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2295 call_type = CALL_SHORT;
2296 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2297 call_type = CALL_LONG;
2298
2299 /* Cannot tail-call to long calls, since these are out of range of
2300 a branch instruction. However, if not compiling PIC, we know
2301 we can reach the symbol if it is in this compilation unit. */
5895f793 2302 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
2303 return 0;
2304
2305 /* If we are interworking and the function is not declared static
2306 then we can't tail-call it unless we know that it exists in this
2307 compilation unit (since it might be a Thumb routine). */
5895f793 2308 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
2309 return 0;
2310
6d3d9133
NC
2311 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2312 if (IS_INTERRUPT (arm_current_func_type ()))
2313 return 0;
2314
f99fce0c
RE
2315 /* Everything else is ok. */
2316 return 1;
2317}
2318
82e9d970 2319\f
32de079a
RE
2320int
2321legitimate_pic_operand_p (x)
2322 rtx x;
2323{
d5b7b3ae
RE
2324 if (CONSTANT_P (x)
2325 && flag_pic
32de079a
RE
2326 && (GET_CODE (x) == SYMBOL_REF
2327 || (GET_CODE (x) == CONST
2328 && GET_CODE (XEXP (x, 0)) == PLUS
2329 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2330 return 0;
2331
2332 return 1;
2333}
2334
2335rtx
2336legitimize_pic_address (orig, mode, reg)
2337 rtx orig;
2338 enum machine_mode mode;
2339 rtx reg;
2340{
a3c48721
RE
2341 if (GET_CODE (orig) == SYMBOL_REF
2342 || GET_CODE (orig) == LABEL_REF)
32de079a 2343 {
5f37d07c 2344#ifndef AOF_ASSEMBLER
32de079a 2345 rtx pic_ref, address;
5f37d07c 2346#endif
32de079a
RE
2347 rtx insn;
2348 int subregs = 0;
2349
2350 if (reg == 0)
2351 {
893f3d5b 2352 if (no_new_pseudos)
32de079a
RE
2353 abort ();
2354 else
2355 reg = gen_reg_rtx (Pmode);
2356
2357 subregs = 1;
2358 }
2359
2360#ifdef AOF_ASSEMBLER
2361 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 2362 understands that the PIC register has to be added into the offset. */
32de079a
RE
2363 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2364#else
2365 if (subregs)
2366 address = gen_reg_rtx (Pmode);
2367 else
2368 address = reg;
2369
4bec9f7d
NC
2370 if (TARGET_ARM)
2371 emit_insn (gen_pic_load_addr_arm (address, orig));
2372 else
2373 emit_insn (gen_pic_load_addr_thumb (address, orig));
32de079a 2374
14f583b8
PB
2375 if ((GET_CODE (orig) == LABEL_REF
2376 || (GET_CODE (orig) == SYMBOL_REF &&
2377 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2378 && NEED_GOT_RELOC)
a3c48721
RE
2379 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2380 else
2381 {
2382 pic_ref = gen_rtx_MEM (Pmode,
2383 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2384 address));
2385 RTX_UNCHANGING_P (pic_ref) = 1;
2386 }
2387
32de079a
RE
2388 insn = emit_move_insn (reg, pic_ref);
2389#endif
2390 current_function_uses_pic_offset_table = 1;
2391 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2392 by loop. */
43cffd11
RE
2393 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2394 REG_NOTES (insn));
32de079a
RE
2395 return reg;
2396 }
2397 else if (GET_CODE (orig) == CONST)
2398 {
2399 rtx base, offset;
2400
2401 if (GET_CODE (XEXP (orig, 0)) == PLUS
2402 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2403 return orig;
2404
2405 if (reg == 0)
2406 {
893f3d5b 2407 if (no_new_pseudos)
32de079a
RE
2408 abort ();
2409 else
2410 reg = gen_reg_rtx (Pmode);
2411 }
2412
2413 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2414 {
2415 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2416 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2417 base == reg ? 0 : reg);
2418 }
2419 else
2420 abort ();
2421
2422 if (GET_CODE (offset) == CONST_INT)
2423 {
2424 /* The base register doesn't really matter, we only want to
2425 test the index for the appropriate mode. */
f1008e52 2426 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
32de079a 2427
5895f793 2428 if (!no_new_pseudos)
32de079a
RE
2429 offset = force_reg (Pmode, offset);
2430 else
2431 abort ();
2432
2433 win:
2434 if (GET_CODE (offset) == CONST_INT)
ed8908e7 2435 return plus_constant (base, INTVAL (offset));
32de079a
RE
2436 }
2437
2438 if (GET_MODE_SIZE (mode) > 4
2439 && (GET_MODE_CLASS (mode) == MODE_INT
2440 || TARGET_SOFT_FLOAT))
2441 {
2442 emit_insn (gen_addsi3 (reg, base, offset));
2443 return reg;
2444 }
2445
43cffd11 2446 return gen_rtx_PLUS (Pmode, base, offset);
32de079a 2447 }
32de079a
RE
2448
2449 return orig;
2450}
2451
c1163e75
PB
2452/* Generate code to load the PIC register. PROLOGUE is true if
2453 called from arm_expand_prologue (in which case we want the
2454 generated insns at the start of the function); false if called
2455 by an exception receiver that needs the PIC register reloaded
2456 (in which case the insns are just dumped at the current location). */
eab4abeb 2457
32de079a 2458void
eab4abeb 2459arm_finalize_pic (prologue)
5f37d07c 2460 int prologue ATTRIBUTE_UNUSED;
32de079a
RE
2461{
2462#ifndef AOF_ASSEMBLER
c1163e75 2463 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
32de079a
RE
2464 rtx global_offset_table;
2465
ed0e6530 2466 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2467 return;
2468
5895f793 2469 if (!flag_pic)
32de079a
RE
2470 abort ();
2471
2472 start_sequence ();
2473 l1 = gen_label_rtx ();
2474
43cffd11 2475 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2476 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2477 addition, on the Thumb it is 'dot + 4'. */
2478 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2479 if (GOT_PCREL)
2480 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2481 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2482 else
2483 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2484
2485 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2486
d5b7b3ae 2487 if (TARGET_ARM)
4bec9f7d
NC
2488 {
2489 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2490 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2491 }
d5b7b3ae 2492 else
4bec9f7d
NC
2493 {
2494 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2495 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2496 }
32de079a 2497
2f937369 2498 seq = get_insns ();
32de079a 2499 end_sequence ();
c1163e75
PB
2500 if (prologue)
2501 emit_insn_after (seq, get_insns ());
2502 else
2503 emit_insn (seq);
32de079a
RE
2504
2505 /* Need to emit this whether or not we obey regdecls,
2506 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2507 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2508#endif /* AOF_ASSEMBLER */
2509}
2510
e2c671ba
RE
2511#define REG_OR_SUBREG_REG(X) \
2512 (GET_CODE (X) == REG \
2513 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2514
2515#define REG_OR_SUBREG_RTX(X) \
2516 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2517
d5b7b3ae
RE
2518#ifndef COSTS_N_INSNS
2519#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2520#endif
e2c671ba
RE
2521
2522int
d5b7b3ae 2523arm_rtx_costs (x, code, outer)
e2c671ba 2524 rtx x;
74bbc178 2525 enum rtx_code code;
d5b7b3ae 2526 enum rtx_code outer;
e2c671ba
RE
2527{
2528 enum machine_mode mode = GET_MODE (x);
2529 enum rtx_code subcode;
2530 int extra_cost;
2531
d5b7b3ae
RE
2532 if (TARGET_THUMB)
2533 {
2534 switch (code)
2535 {
2536 case ASHIFT:
2537 case ASHIFTRT:
2538 case LSHIFTRT:
2539 case ROTATERT:
2540 case PLUS:
2541 case MINUS:
2542 case COMPARE:
2543 case NEG:
2544 case NOT:
2545 return COSTS_N_INSNS (1);
2546
2547 case MULT:
2548 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2549 {
2550 int cycles = 0;
2551 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2552
2553 while (i)
2554 {
2555 i >>= 2;
5895f793 2556 cycles++;
d5b7b3ae
RE
2557 }
2558 return COSTS_N_INSNS (2) + cycles;
2559 }
2560 return COSTS_N_INSNS (1) + 16;
2561
2562 case SET:
2563 return (COSTS_N_INSNS (1)
2564 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2565 + GET_CODE (SET_DEST (x)) == MEM));
2566
2567 case CONST_INT:
2568 if (outer == SET)
2569 {
2570 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2571 return 0;
2572 if (thumb_shiftable_const (INTVAL (x)))
2573 return COSTS_N_INSNS (2);
2574 return COSTS_N_INSNS (3);
2575 }
2576 else if (outer == PLUS
2577 && INTVAL (x) < 256 && INTVAL (x) > -256)
2578 return 0;
2579 else if (outer == COMPARE
2580 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2581 return 0;
2582 else if (outer == ASHIFT || outer == ASHIFTRT
2583 || outer == LSHIFTRT)
2584 return 0;
2585 return COSTS_N_INSNS (2);
2586
2587 case CONST:
2588 case CONST_DOUBLE:
2589 case LABEL_REF:
2590 case SYMBOL_REF:
2591 return COSTS_N_INSNS (3);
2592
2593 case UDIV:
2594 case UMOD:
2595 case DIV:
2596 case MOD:
2597 return 100;
2598
2599 case TRUNCATE:
2600 return 99;
2601
2602 case AND:
2603 case XOR:
2604 case IOR:
2605 /* XXX guess. */
2606 return 8;
2607
2608 case ADDRESSOF:
2609 case MEM:
2610 /* XXX another guess. */
2611 /* Memory costs quite a lot for the first word, but subsequent words
2612 load at the equivalent of a single insn each. */
2613 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
48f6efae
NC
2614 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2615 ? 4 : 0));
d5b7b3ae
RE
2616
2617 case IF_THEN_ELSE:
2618 /* XXX a guess. */
2619 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2620 return 14;
2621 return 2;
2622
2623 case ZERO_EXTEND:
2624 /* XXX still guessing. */
2625 switch (GET_MODE (XEXP (x, 0)))
2626 {
2627 case QImode:
2628 return (1 + (mode == DImode ? 4 : 0)
2629 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2630
2631 case HImode:
2632 return (4 + (mode == DImode ? 4 : 0)
2633 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2634
2635 case SImode:
2636 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2637
2638 default:
2639 return 99;
2640 }
2641
2642 default:
2643 return 99;
2644#if 0
2645 case FFS:
2646 case FLOAT:
2647 case FIX:
2648 case UNSIGNED_FIX:
2649 /* XXX guess */
2650 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2651 rtx_name[code]);
2652 abort ();
2653#endif
2654 }
2655 }
2656
e2c671ba
RE
2657 switch (code)
2658 {
2659 case MEM:
2660 /* Memory costs quite a lot for the first word, but subsequent words
2661 load at the equivalent of a single insn each. */
2662 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
48f6efae
NC
2663 + (GET_CODE (x) == SYMBOL_REF
2664 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
e2c671ba
RE
2665
2666 case DIV:
2667 case MOD:
2668 return 100;
2669
2670 case ROTATE:
2671 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2672 return 4;
2673 /* Fall through */
2674 case ROTATERT:
2675 if (mode != SImode)
2676 return 8;
2677 /* Fall through */
2678 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2679 if (mode == DImode)
2680 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2681 + ((GET_CODE (XEXP (x, 0)) == REG
2682 || (GET_CODE (XEXP (x, 0)) == SUBREG
2683 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2684 ? 0 : 8));
2685 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2686 || (GET_CODE (XEXP (x, 0)) == SUBREG
2687 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2688 ? 0 : 4)
2689 + ((GET_CODE (XEXP (x, 1)) == REG
2690 || (GET_CODE (XEXP (x, 1)) == SUBREG
2691 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2692 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2693 ? 0 : 4));
2694
2695 case MINUS:
2696 if (mode == DImode)
2697 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2698 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2699 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2700 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2701 ? 0 : 8));
2702
2703 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2704 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2705 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2706 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2707 ? 0 : 8)
2708 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2709 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2710 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2711 ? 0 : 8));
2712
2713 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2714 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2715 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2716 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2717 || subcode == ASHIFTRT || subcode == LSHIFTRT
2718 || subcode == ROTATE || subcode == ROTATERT
2719 || (subcode == MULT
2720 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2721 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2722 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2723 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2724 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2725 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2726 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2727 return 1;
2728 /* Fall through */
2729
2730 case PLUS:
2731 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2732 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2733 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2734 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2735 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2736 ? 0 : 8));
2737
2738 /* Fall through */
2739 case AND: case XOR: case IOR:
2740 extra_cost = 0;
2741
2742 /* Normally the frame registers will be spilt into reg+const during
2743 reload, so it is a bad idea to combine them with other instructions,
2744 since then they might not be moved outside of loops. As a compromise
2745 we allow integration with ops that have a constant as their second
2746 operand. */
2747 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2748 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2749 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2750 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2751 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2752 extra_cost = 4;
2753
2754 if (mode == DImode)
2755 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2756 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2757 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2758 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2759 ? 0 : 8));
2760
2761 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2762 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2763 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2764 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2765 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2766 ? 0 : 4));
2767
2768 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2769 return (1 + extra_cost
2770 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2771 || subcode == LSHIFTRT || subcode == ASHIFTRT
2772 || subcode == ROTATE || subcode == ROTATERT
2773 || (subcode == MULT
2774 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2775 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2776 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2777 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2778 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2779 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2780 ? 0 : 4));
2781
2782 return 8;
2783
2784 case MULT:
b111229a 2785 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2786 fast variant if it exists at all. */
2b835d68
RE
2787 if (arm_fast_multiply && mode == DImode
2788 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2789 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2790 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2791 return 8;
2792
e2c671ba
RE
2793 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2794 || mode == DImode)
2795 return 30;
2796
2797 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2798 {
2b835d68 2799 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
30cf4896 2800 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
2801 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2802 int j;
6354dc9b
NC
2803
2804 /* Tune as appropriate. */
aec3cfba 2805 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2806
2b835d68 2807 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2808 {
2b835d68 2809 i >>= booth_unit_size;
e2c671ba
RE
2810 add_cost += 2;
2811 }
2812
2813 return add_cost;
2814 }
2815
aec3cfba 2816 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2817 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2818 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2819
56636818
JL
2820 case TRUNCATE:
2821 if (arm_fast_multiply && mode == SImode
2822 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2823 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2824 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2825 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2826 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2827 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2828 return 8;
2829 return 99;
2830
e2c671ba
RE
2831 case NEG:
2832 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2833 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2834 /* Fall through */
2835 case NOT:
2836 if (mode == DImode)
2837 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2838
2839 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2840
2841 case IF_THEN_ELSE:
2842 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2843 return 14;
2844 return 2;
2845
2846 case COMPARE:
2847 return 1;
2848
2849 case ABS:
2850 return 4 + (mode == DImode ? 4 : 0);
2851
2852 case SIGN_EXTEND:
2853 if (GET_MODE (XEXP (x, 0)) == QImode)
2854 return (4 + (mode == DImode ? 4 : 0)
2855 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2856 /* Fall through */
2857 case ZERO_EXTEND:
2858 switch (GET_MODE (XEXP (x, 0)))
2859 {
2860 case QImode:
2861 return (1 + (mode == DImode ? 4 : 0)
2862 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2863
2864 case HImode:
2865 return (4 + (mode == DImode ? 4 : 0)
2866 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2867
2868 case SImode:
2869 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2870
2871 default:
2872 break;
e2c671ba
RE
2873 }
2874 abort ();
2875
d5b7b3ae
RE
2876 case CONST_INT:
2877 if (const_ok_for_arm (INTVAL (x)))
2878 return outer == SET ? 2 : -1;
2879 else if (outer == AND
5895f793 2880 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2881 return -1;
2882 else if ((outer == COMPARE
2883 || outer == PLUS || outer == MINUS)
5895f793 2884 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2885 return -1;
2886 else
2887 return 5;
2888
2889 case CONST:
2890 case LABEL_REF:
2891 case SYMBOL_REF:
2892 return 6;
2893
2894 case CONST_DOUBLE:
2895 if (const_double_rtx_ok_for_fpu (x))
2896 return outer == SET ? 2 : -1;
2897 else if ((outer == COMPARE || outer == PLUS)
2898 && neg_const_double_rtx_ok_for_fpu (x))
2899 return -1;
2900 return 7;
2901
e2c671ba
RE
2902 default:
2903 return 99;
2904 }
2905}
32de079a 2906
c237e94a 2907static int
32de079a
RE
2908arm_adjust_cost (insn, link, dep, cost)
2909 rtx insn;
2910 rtx link;
2911 rtx dep;
2912 int cost;
2913{
2914 rtx i_pat, d_pat;
2915
d19fb8e3
NC
2916 /* Some true dependencies can have a higher cost depending
2917 on precisely how certain input operands are used. */
2918 if (arm_is_xscale
2919 && REG_NOTE_KIND (link) == 0
2920 && recog_memoized (insn) < 0
2921 && recog_memoized (dep) < 0)
2922 {
2923 int shift_opnum = get_attr_shift (insn);
2924 enum attr_type attr_type = get_attr_type (dep);
2925
2926 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2927 operand for INSN. If we have a shifted input operand and the
2928 instruction we depend on is another ALU instruction, then we may
2929 have to account for an additional stall. */
2930 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2931 {
2932 rtx shifted_operand;
2933 int opno;
2934
2935 /* Get the shifted operand. */
2936 extract_insn (insn);
2937 shifted_operand = recog_data.operand[shift_opnum];
2938
2939 /* Iterate over all the operands in DEP. If we write an operand
2940 that overlaps with SHIFTED_OPERAND, then we have increase the
2941 cost of this dependency. */
2942 extract_insn (dep);
2943 preprocess_constraints ();
2944 for (opno = 0; opno < recog_data.n_operands; opno++)
2945 {
2946 /* We can ignore strict inputs. */
2947 if (recog_data.operand_type[opno] == OP_IN)
2948 continue;
2949
2950 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2951 shifted_operand))
2952 return 2;
2953 }
2954 }
2955 }
2956
6354dc9b 2957 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2958 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2959 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2960 return 0;
2961
d5b7b3ae
RE
2962 /* Call insns don't incur a stall, even if they follow a load. */
2963 if (REG_NOTE_KIND (link) == 0
2964 && GET_CODE (insn) == CALL_INSN)
2965 return 1;
2966
32de079a
RE
2967 if ((i_pat = single_set (insn)) != NULL
2968 && GET_CODE (SET_SRC (i_pat)) == MEM
2969 && (d_pat = single_set (dep)) != NULL
2970 && GET_CODE (SET_DEST (d_pat)) == MEM)
2971 {
48f6efae 2972 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
32de079a
RE
2973 /* This is a load after a store, there is no conflict if the load reads
2974 from a cached area. Assume that loads from the stack, and from the
2975 constant pool are cached, and that others will miss. This is a
6354dc9b 2976 hack. */
32de079a 2977
48f6efae
NC
2978 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
2979 || reg_mentioned_p (stack_pointer_rtx, src_mem)
2980 || reg_mentioned_p (frame_pointer_rtx, src_mem)
2981 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
949d79eb 2982 return 1;
32de079a
RE
2983 }
2984
2985 return cost;
2986}
2987
6354dc9b 2988/* This code has been fixed for cross compilation. */
ff9940b0
RE
2989
2990static int fpa_consts_inited = 0;
2991
1d6e90ac 2992static const char * const strings_fpa[8] =
62b10bbc 2993{
2b835d68
RE
2994 "0", "1", "2", "3",
2995 "4", "5", "0.5", "10"
2996};
ff9940b0
RE
2997
2998static REAL_VALUE_TYPE values_fpa[8];
2999
3000static void
3001init_fpa_table ()
3002{
3003 int i;
3004 REAL_VALUE_TYPE r;
3005
3006 for (i = 0; i < 8; i++)
3007 {
3008 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3009 values_fpa[i] = r;
3010 }
f3bb6135 3011
ff9940b0
RE
3012 fpa_consts_inited = 1;
3013}
3014
6354dc9b 3015/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
3016
3017int
3018const_double_rtx_ok_for_fpu (x)
3019 rtx x;
3020{
ff9940b0
RE
3021 REAL_VALUE_TYPE r;
3022 int i;
3023
3024 if (!fpa_consts_inited)
3025 init_fpa_table ();
3026
3027 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3028 if (REAL_VALUE_MINUS_ZERO (r))
3029 return 0;
f3bb6135 3030
ff9940b0
RE
3031 for (i = 0; i < 8; i++)
3032 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3033 return 1;
f3bb6135 3034
ff9940b0 3035 return 0;
f3bb6135 3036}
ff9940b0 3037
6354dc9b 3038/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
3039
3040int
3041neg_const_double_rtx_ok_for_fpu (x)
3042 rtx x;
3043{
3044 REAL_VALUE_TYPE r;
3045 int i;
3046
3047 if (!fpa_consts_inited)
3048 init_fpa_table ();
3049
3050 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3051 r = REAL_VALUE_NEGATE (r);
3052 if (REAL_VALUE_MINUS_ZERO (r))
3053 return 0;
f3bb6135 3054
ff9940b0
RE
3055 for (i = 0; i < 8; i++)
3056 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3057 return 1;
f3bb6135 3058
ff9940b0 3059 return 0;
f3bb6135 3060}
cce8749e
CH
3061\f
3062/* Predicates for `match_operand' and `match_operator'. */
3063
ff9940b0 3064/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
3065 (SUBREG (MEM)...).
3066
3067 This function exists because at the time it was put in it led to better
3068 code. SUBREG(MEM) always needs a reload in the places where
3069 s_register_operand is used, and this seemed to lead to excessive
3070 reloading. */
ff9940b0
RE
3071
3072int
3073s_register_operand (op, mode)
1d6e90ac 3074 rtx op;
ff9940b0
RE
3075 enum machine_mode mode;
3076{
3077 if (GET_MODE (op) != mode && mode != VOIDmode)
3078 return 0;
3079
3080 if (GET_CODE (op) == SUBREG)
f3bb6135 3081 op = SUBREG_REG (op);
ff9940b0
RE
3082
3083 /* We don't consider registers whose class is NO_REGS
3084 to be a register operand. */
d5b7b3ae 3085 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
3086 return (GET_CODE (op) == REG
3087 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3088 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3089}
3090
b0888988 3091/* A hard register operand (even before reload. */
1d6e90ac 3092
b0888988
RE
3093int
3094arm_hard_register_operand (op, mode)
1d6e90ac 3095 rtx op;
b0888988
RE
3096 enum machine_mode mode;
3097{
3098 if (GET_MODE (op) != mode && mode != VOIDmode)
3099 return 0;
3100
3101 return (GET_CODE (op) == REG
3102 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3103}
3104
e2c671ba
RE
3105/* Only accept reg, subreg(reg), const_int. */
3106
3107int
3108reg_or_int_operand (op, mode)
1d6e90ac 3109 rtx op;
e2c671ba
RE
3110 enum machine_mode mode;
3111{
3112 if (GET_CODE (op) == CONST_INT)
3113 return 1;
3114
3115 if (GET_MODE (op) != mode && mode != VOIDmode)
3116 return 0;
3117
3118 if (GET_CODE (op) == SUBREG)
3119 op = SUBREG_REG (op);
3120
3121 /* We don't consider registers whose class is NO_REGS
3122 to be a register operand. */
3123 return (GET_CODE (op) == REG
3124 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3125 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3126}
3127
ff9940b0
RE
3128/* Return 1 if OP is an item in memory, given that we are in reload. */
3129
3130int
d5b7b3ae 3131arm_reload_memory_operand (op, mode)
ff9940b0 3132 rtx op;
74bbc178 3133 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
3134{
3135 int regno = true_regnum (op);
3136
5895f793 3137 return (!CONSTANT_P (op)
ff9940b0
RE
3138 && (regno == -1
3139 || (GET_CODE (op) == REG
3140 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3141}
3142
4d818c85 3143/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae 3144 memory access (architecture V4).
f710504c 3145 MODE is QImode if called when computing constraints, or VOIDmode when
d5b7b3ae
RE
3146 emitting patterns. In this latter case we cannot use memory_operand()
3147 because it will fail on badly formed MEMs, which is precisly what we are
3148 trying to catch. */
1d6e90ac 3149
4d818c85
RE
3150int
3151bad_signed_byte_operand (op, mode)
3152 rtx op;
d5b7b3ae 3153 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 3154{
d5b7b3ae 3155#if 0
5895f793 3156 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
3157 return 0;
3158#endif
3159 if (GET_CODE (op) != MEM)
4d818c85
RE
3160 return 0;
3161
3162 op = XEXP (op, 0);
3163
6354dc9b 3164 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 3165 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
3166 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3167 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 3168 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
3169 return 1;
3170
6354dc9b 3171 /* Big constants are also bad. */
4d818c85
RE
3172 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3173 && (INTVAL (XEXP (op, 1)) > 0xff
3174 || -INTVAL (XEXP (op, 1)) > 0xff))
3175 return 1;
3176
6354dc9b 3177 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
3178 return 0;
3179}
3180
cce8749e
CH
3181/* Return TRUE for valid operands for the rhs of an ARM instruction. */
3182
3183int
3184arm_rhs_operand (op, mode)
3185 rtx op;
3186 enum machine_mode mode;
3187{
ff9940b0 3188 return (s_register_operand (op, mode)
cce8749e 3189 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 3190}
cce8749e 3191
1d6e90ac
NC
3192/* Return TRUE for valid operands for the
3193 rhs of an ARM instruction, or a load. */
ff9940b0
RE
3194
3195int
3196arm_rhsm_operand (op, mode)
3197 rtx op;
3198 enum machine_mode mode;
3199{
3200 return (s_register_operand (op, mode)
3201 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3202 || memory_operand (op, mode));
f3bb6135 3203}
ff9940b0
RE
3204
3205/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3206 constant that is valid when negated. */
3207
3208int
3209arm_add_operand (op, mode)
3210 rtx op;
3211 enum machine_mode mode;
3212{
d5b7b3ae
RE
3213 if (TARGET_THUMB)
3214 return thumb_cmp_operand (op, mode);
3215
ff9940b0
RE
3216 return (s_register_operand (op, mode)
3217 || (GET_CODE (op) == CONST_INT
3218 && (const_ok_for_arm (INTVAL (op))
3219 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 3220}
ff9940b0
RE
3221
3222int
3223arm_not_operand (op, mode)
3224 rtx op;
3225 enum machine_mode mode;
3226{
3227 return (s_register_operand (op, mode)
3228 || (GET_CODE (op) == CONST_INT
3229 && (const_ok_for_arm (INTVAL (op))
3230 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 3231}
ff9940b0 3232
5165176d
RE
3233/* Return TRUE if the operand is a memory reference which contains an
3234 offsettable address. */
1d6e90ac 3235
5165176d
RE
3236int
3237offsettable_memory_operand (op, mode)
1d6e90ac 3238 rtx op;
5165176d
RE
3239 enum machine_mode mode;
3240{
3241 if (mode == VOIDmode)
3242 mode = GET_MODE (op);
3243
3244 return (mode == GET_MODE (op)
3245 && GET_CODE (op) == MEM
3246 && offsettable_address_p (reload_completed | reload_in_progress,
3247 mode, XEXP (op, 0)));
3248}
3249
3250/* Return TRUE if the operand is a memory reference which is, or can be
3251 made word aligned by adjusting the offset. */
1d6e90ac 3252
5165176d
RE
3253int
3254alignable_memory_operand (op, mode)
1d6e90ac 3255 rtx op;
5165176d
RE
3256 enum machine_mode mode;
3257{
3258 rtx reg;
3259
3260 if (mode == VOIDmode)
3261 mode = GET_MODE (op);
3262
3263 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3264 return 0;
3265
3266 op = XEXP (op, 0);
3267
3268 return ((GET_CODE (reg = op) == REG
3269 || (GET_CODE (op) == SUBREG
3270 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3271 || (GET_CODE (op) == PLUS
3272 && GET_CODE (XEXP (op, 1)) == CONST_INT
3273 && (GET_CODE (reg = XEXP (op, 0)) == REG
3274 || (GET_CODE (XEXP (op, 0)) == SUBREG
3275 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 3276 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
3277}
3278
b111229a
RE
3279/* Similar to s_register_operand, but does not allow hard integer
3280 registers. */
1d6e90ac 3281
b111229a
RE
3282int
3283f_register_operand (op, mode)
1d6e90ac 3284 rtx op;
b111229a
RE
3285 enum machine_mode mode;
3286{
3287 if (GET_MODE (op) != mode && mode != VOIDmode)
3288 return 0;
3289
3290 if (GET_CODE (op) == SUBREG)
3291 op = SUBREG_REG (op);
3292
3293 /* We don't consider registers whose class is NO_REGS
3294 to be a register operand. */
3295 return (GET_CODE (op) == REG
3296 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3297 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3298}
3299
cce8749e
CH
3300/* Return TRUE for valid operands for the rhs of an FPU instruction. */
3301
3302int
3303fpu_rhs_operand (op, mode)
3304 rtx op;
3305 enum machine_mode mode;
3306{
ff9940b0 3307 if (s_register_operand (op, mode))
f3bb6135 3308 return TRUE;
9ce71c6f
BS
3309
3310 if (GET_MODE (op) != mode && mode != VOIDmode)
3311 return FALSE;
3312
3313 if (GET_CODE (op) == CONST_DOUBLE)
3314 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
3315
3316 return FALSE;
3317}
cce8749e 3318
ff9940b0
RE
3319int
3320fpu_add_operand (op, mode)
3321 rtx op;
3322 enum machine_mode mode;
3323{
3324 if (s_register_operand (op, mode))
f3bb6135 3325 return TRUE;
9ce71c6f
BS
3326
3327 if (GET_MODE (op) != mode && mode != VOIDmode)
3328 return FALSE;
3329
3330 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
3331 return (const_double_rtx_ok_for_fpu (op)
3332 || neg_const_double_rtx_ok_for_fpu (op));
3333
3334 return FALSE;
ff9940b0
RE
3335}
3336
cce8749e
CH
3337/* Return nonzero if OP is a constant power of two. */
3338
3339int
3340power_of_two_operand (op, mode)
3341 rtx op;
74bbc178 3342 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
3343{
3344 if (GET_CODE (op) == CONST_INT)
3345 {
d5b7b3ae 3346 HOST_WIDE_INT value = INTVAL (op);
1d6e90ac 3347
f3bb6135 3348 return value != 0 && (value & (value - 1)) == 0;
cce8749e 3349 }
1d6e90ac 3350
f3bb6135
RE
3351 return FALSE;
3352}
cce8749e
CH
3353
3354/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 3355 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
3356 Note that this disallows MEM(REG+REG), but allows
3357 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
3358
3359int
3360di_operand (op, mode)
3361 rtx op;
3362 enum machine_mode mode;
3363{
ff9940b0 3364 if (s_register_operand (op, mode))
f3bb6135 3365 return TRUE;
cce8749e 3366
9ce71c6f
BS
3367 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3368 return FALSE;
3369
e9c6b69b
NC
3370 if (GET_CODE (op) == SUBREG)
3371 op = SUBREG_REG (op);
3372
cce8749e
CH
3373 switch (GET_CODE (op))
3374 {
3375 case CONST_DOUBLE:
3376 case CONST_INT:
f3bb6135
RE
3377 return TRUE;
3378
cce8749e 3379 case MEM:
f3bb6135
RE
3380 return memory_address_p (DImode, XEXP (op, 0));
3381
cce8749e 3382 default:
f3bb6135 3383 return FALSE;
cce8749e 3384 }
f3bb6135 3385}
cce8749e 3386
d5b7b3ae 3387/* Like di_operand, but don't accept constants. */
1d6e90ac 3388
d5b7b3ae
RE
3389int
3390nonimmediate_di_operand (op, mode)
3391 rtx op;
3392 enum machine_mode mode;
3393{
3394 if (s_register_operand (op, mode))
3395 return TRUE;
3396
3397 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3398 return FALSE;
3399
3400 if (GET_CODE (op) == SUBREG)
3401 op = SUBREG_REG (op);
3402
3403 if (GET_CODE (op) == MEM)
3404 return memory_address_p (DImode, XEXP (op, 0));
3405
3406 return FALSE;
3407}
3408
f3139301 3409/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 3410 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
3411 Note that this disallows MEM(REG+REG), but allows
3412 MEM(PRE/POST_INC/DEC(REG)). */
3413
3414int
3415soft_df_operand (op, mode)
3416 rtx op;
3417 enum machine_mode mode;
3418{
3419 if (s_register_operand (op, mode))
4b02997f 3420 return TRUE;
f3139301 3421
9ce71c6f
BS
3422 if (mode != VOIDmode && GET_MODE (op) != mode)
3423 return FALSE;
3424
37b80d2e
BS
3425 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3426 return FALSE;
3427
e9c6b69b
NC
3428 if (GET_CODE (op) == SUBREG)
3429 op = SUBREG_REG (op);
9ce71c6f 3430
f3139301
DE
3431 switch (GET_CODE (op))
3432 {
3433 case CONST_DOUBLE:
3434 return TRUE;
3435
3436 case MEM:
3437 return memory_address_p (DFmode, XEXP (op, 0));
3438
3439 default:
3440 return FALSE;
3441 }
3442}
3443
d5b7b3ae 3444/* Like soft_df_operand, but don't accept constants. */
1d6e90ac 3445
d5b7b3ae
RE
3446int
3447nonimmediate_soft_df_operand (op, mode)
3448 rtx op;
3449 enum machine_mode mode;
3450{
3451 if (s_register_operand (op, mode))
4b02997f 3452 return TRUE;
d5b7b3ae
RE
3453
3454 if (mode != VOIDmode && GET_MODE (op) != mode)
3455 return FALSE;
3456
3457 if (GET_CODE (op) == SUBREG)
3458 op = SUBREG_REG (op);
3459
3460 if (GET_CODE (op) == MEM)
3461 return memory_address_p (DFmode, XEXP (op, 0));
3462 return FALSE;
3463}
cce8749e 3464
d5b7b3ae 3465/* Return TRUE for valid index operands. */
1d6e90ac 3466
cce8749e
CH
3467int
3468index_operand (op, mode)
3469 rtx op;
3470 enum machine_mode mode;
3471{
d5b7b3ae 3472 return (s_register_operand (op, mode)
ff9940b0 3473 || (immediate_operand (op, mode)
d5b7b3ae
RE
3474 && (GET_CODE (op) != CONST_INT
3475 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 3476}
cce8749e 3477
ff9940b0
RE
3478/* Return TRUE for valid shifts by a constant. This also accepts any
3479 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 3480 shift operator in this case was a mult. */
ff9940b0
RE
3481
3482int
3483const_shift_operand (op, mode)
3484 rtx op;
3485 enum machine_mode mode;
3486{
3487 return (power_of_two_operand (op, mode)
3488 || (immediate_operand (op, mode)
d5b7b3ae
RE
3489 && (GET_CODE (op) != CONST_INT
3490 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 3491}
ff9940b0 3492
cce8749e
CH
3493/* Return TRUE for arithmetic operators which can be combined with a multiply
3494 (shift). */
3495
3496int
3497shiftable_operator (x, mode)
3498 rtx x;
3499 enum machine_mode mode;
3500{
1d6e90ac
NC
3501 enum rtx_code code;
3502
cce8749e
CH
3503 if (GET_MODE (x) != mode)
3504 return FALSE;
cce8749e 3505
1d6e90ac
NC
3506 code = GET_CODE (x);
3507
3508 return (code == PLUS || code == MINUS
3509 || code == IOR || code == XOR || code == AND);
f3bb6135 3510}
cce8749e 3511
6ab589e0
JL
3512/* Return TRUE for binary logical operators. */
3513
3514int
3515logical_binary_operator (x, mode)
3516 rtx x;
3517 enum machine_mode mode;
3518{
1d6e90ac
NC
3519 enum rtx_code code;
3520
6ab589e0
JL
3521 if (GET_MODE (x) != mode)
3522 return FALSE;
6ab589e0 3523
1d6e90ac
NC
3524 code = GET_CODE (x);
3525
3526 return (code == IOR || code == XOR || code == AND);
6ab589e0
JL
3527}
3528
6354dc9b 3529/* Return TRUE for shift operators. */
cce8749e
CH
3530
3531int
3532shift_operator (x, mode)
3533 rtx x;
3534 enum machine_mode mode;
3535{
1d6e90ac
NC
3536 enum rtx_code code;
3537
cce8749e
CH
3538 if (GET_MODE (x) != mode)
3539 return FALSE;
cce8749e 3540
1d6e90ac 3541 code = GET_CODE (x);
f3bb6135 3542
1d6e90ac
NC
3543 if (code == MULT)
3544 return power_of_two_operand (XEXP (x, 1), mode);
3545
3546 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3547 || code == ROTATERT);
f3bb6135 3548}
ff9940b0 3549
6354dc9b 3550/* Return TRUE if x is EQ or NE. */
1d6e90ac 3551
6354dc9b
NC
3552int
3553equality_operator (x, mode)
f3bb6135 3554 rtx x;
74bbc178 3555 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3556{
f3bb6135 3557 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3558}
3559
e45b72c4 3560/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
1d6e90ac 3561
e45b72c4
RE
3562int
3563arm_comparison_operator (x, mode)
3564 rtx x;
3565 enum machine_mode mode;
3566{
3567 return (comparison_operator (x, mode)
3568 && GET_CODE (x) != LTGT
3569 && GET_CODE (x) != UNEQ);
3570}
3571
6354dc9b 3572/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1d6e90ac 3573
ff9940b0
RE
3574int
3575minmax_operator (x, mode)
3576 rtx x;
3577 enum machine_mode mode;
3578{
3579 enum rtx_code code = GET_CODE (x);
3580
3581 if (GET_MODE (x) != mode)
3582 return FALSE;
f3bb6135 3583
ff9940b0 3584 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3585}
ff9940b0 3586
ff9940b0 3587/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3588 a mode, accept any class CCmode register. */
1d6e90ac 3589
ff9940b0
RE
3590int
3591cc_register (x, mode)
f3bb6135
RE
3592 rtx x;
3593 enum machine_mode mode;
ff9940b0
RE
3594{
3595 if (mode == VOIDmode)
3596 {
3597 mode = GET_MODE (x);
d5b7b3ae 3598
ff9940b0
RE
3599 if (GET_MODE_CLASS (mode) != MODE_CC)
3600 return FALSE;
3601 }
f3bb6135 3602
d5b7b3ae
RE
3603 if ( GET_MODE (x) == mode
3604 && GET_CODE (x) == REG
3605 && REGNO (x) == CC_REGNUM)
ff9940b0 3606 return TRUE;
f3bb6135 3607
ff9940b0
RE
3608 return FALSE;
3609}
5bbe2d40
RE
3610
3611/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3612 a mode, accept any class CCmode register which indicates a dominance
3613 expression. */
1d6e90ac 3614
5bbe2d40 3615int
84ed5e79 3616dominant_cc_register (x, mode)
5bbe2d40
RE
3617 rtx x;
3618 enum machine_mode mode;
3619{
3620 if (mode == VOIDmode)
3621 {
3622 mode = GET_MODE (x);
d5b7b3ae 3623
84ed5e79 3624 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3625 return FALSE;
3626 }
3627
d5b7b3ae 3628 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3629 && mode != CC_DLEmode && mode != CC_DLTmode
3630 && mode != CC_DGEmode && mode != CC_DGTmode
3631 && mode != CC_DLEUmode && mode != CC_DLTUmode
3632 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3633 return FALSE;
3634
d5b7b3ae 3635 return cc_register (x, mode);
5bbe2d40
RE
3636}
3637
2b835d68 3638/* Return TRUE if X references a SYMBOL_REF. */
1d6e90ac 3639
2b835d68
RE
3640int
3641symbol_mentioned_p (x)
3642 rtx x;
3643{
1d6e90ac
NC
3644 const char * fmt;
3645 int i;
2b835d68
RE
3646
3647 if (GET_CODE (x) == SYMBOL_REF)
3648 return 1;
3649
3650 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3651
2b835d68
RE
3652 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3653 {
3654 if (fmt[i] == 'E')
3655 {
1d6e90ac 3656 int j;
2b835d68
RE
3657
3658 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3659 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3660 return 1;
3661 }
3662 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3663 return 1;
3664 }
3665
3666 return 0;
3667}
3668
3669/* Return TRUE if X references a LABEL_REF. */
1d6e90ac 3670
2b835d68
RE
3671int
3672label_mentioned_p (x)
3673 rtx x;
3674{
1d6e90ac
NC
3675 const char * fmt;
3676 int i;
2b835d68
RE
3677
3678 if (GET_CODE (x) == LABEL_REF)
3679 return 1;
3680
3681 fmt = GET_RTX_FORMAT (GET_CODE (x));
3682 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3683 {
3684 if (fmt[i] == 'E')
3685 {
1d6e90ac 3686 int j;
2b835d68
RE
3687
3688 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3689 if (label_mentioned_p (XVECEXP (x, i, j)))
3690 return 1;
3691 }
3692 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3693 return 1;
3694 }
3695
3696 return 0;
3697}
3698
ff9940b0
RE
3699enum rtx_code
3700minmax_code (x)
f3bb6135 3701 rtx x;
ff9940b0
RE
3702{
3703 enum rtx_code code = GET_CODE (x);
3704
3705 if (code == SMAX)
3706 return GE;
f3bb6135 3707 else if (code == SMIN)
ff9940b0 3708 return LE;
f3bb6135 3709 else if (code == UMIN)
ff9940b0 3710 return LEU;
f3bb6135 3711 else if (code == UMAX)
ff9940b0 3712 return GEU;
f3bb6135 3713
ff9940b0
RE
3714 abort ();
3715}
3716
6354dc9b 3717/* Return 1 if memory locations are adjacent. */
1d6e90ac 3718
f3bb6135 3719int
ff9940b0
RE
3720adjacent_mem_locations (a, b)
3721 rtx a, b;
3722{
ff9940b0
RE
3723 if ((GET_CODE (XEXP (a, 0)) == REG
3724 || (GET_CODE (XEXP (a, 0)) == PLUS
3725 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3726 && (GET_CODE (XEXP (b, 0)) == REG
3727 || (GET_CODE (XEXP (b, 0)) == PLUS
3728 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3729 {
1d6e90ac
NC
3730 int val0 = 0, val1 = 0;
3731 int reg0, reg1;
3732
ff9940b0
RE
3733 if (GET_CODE (XEXP (a, 0)) == PLUS)
3734 {
1d6e90ac 3735 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
ff9940b0
RE
3736 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3737 }
3738 else
3739 reg0 = REGNO (XEXP (a, 0));
1d6e90ac 3740
ff9940b0
RE
3741 if (GET_CODE (XEXP (b, 0)) == PLUS)
3742 {
1d6e90ac 3743 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
ff9940b0
RE
3744 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3745 }
3746 else
3747 reg1 = REGNO (XEXP (b, 0));
1d6e90ac 3748
ff9940b0
RE
3749 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3750 }
3751 return 0;
3752}
3753
3754/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3755 parallel and the first section will be tested. */
1d6e90ac 3756
f3bb6135 3757int
ff9940b0
RE
3758load_multiple_operation (op, mode)
3759 rtx op;
74bbc178 3760 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3761{
f3bb6135 3762 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3763 int dest_regno;
3764 rtx src_addr;
f3bb6135 3765 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3766 rtx elt;
3767
3768 if (count <= 1
3769 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3770 return 0;
3771
6354dc9b 3772 /* Check to see if this might be a write-back. */
ff9940b0
RE
3773 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3774 {
3775 i++;
3776 base = 1;
3777
6354dc9b 3778 /* Now check it more carefully. */
ff9940b0
RE
3779 if (GET_CODE (SET_DEST (elt)) != REG
3780 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3781 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3782 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3783 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3784 return 0;
ff9940b0
RE
3785 }
3786
3787 /* Perform a quick check so we don't blow up below. */
3788 if (count <= i
3789 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3790 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3791 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3792 return 0;
3793
3794 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3795 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3796
3797 for (; i < count; i++)
3798 {
ed4c4348 3799 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3800
3801 if (GET_CODE (elt) != SET
3802 || GET_CODE (SET_DEST (elt)) != REG
3803 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3804 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3805 || GET_CODE (SET_SRC (elt)) != MEM
3806 || GET_MODE (SET_SRC (elt)) != SImode
3807 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3808 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3809 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3810 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3811 return 0;
3812 }
3813
3814 return 1;
3815}
3816
3817/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3818 parallel and the first section will be tested. */
1d6e90ac 3819
f3bb6135 3820int
ff9940b0
RE
3821store_multiple_operation (op, mode)
3822 rtx op;
74bbc178 3823 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3824{
f3bb6135 3825 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3826 int src_regno;
3827 rtx dest_addr;
f3bb6135 3828 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3829 rtx elt;
3830
3831 if (count <= 1
3832 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3833 return 0;
3834
6354dc9b 3835 /* Check to see if this might be a write-back. */
ff9940b0
RE
3836 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3837 {
3838 i++;
3839 base = 1;
3840
6354dc9b 3841 /* Now check it more carefully. */
ff9940b0
RE
3842 if (GET_CODE (SET_DEST (elt)) != REG
3843 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3844 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3845 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3846 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3847 return 0;
ff9940b0
RE
3848 }
3849
3850 /* Perform a quick check so we don't blow up below. */
3851 if (count <= i
3852 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3853 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3854 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3855 return 0;
3856
3857 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3858 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3859
3860 for (; i < count; i++)
3861 {
3862 elt = XVECEXP (op, 0, i);
3863
3864 if (GET_CODE (elt) != SET
3865 || GET_CODE (SET_SRC (elt)) != REG
3866 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3867 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3868 || GET_CODE (SET_DEST (elt)) != MEM
3869 || GET_MODE (SET_DEST (elt)) != SImode
3870 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3871 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3872 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3873 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3874 return 0;
3875 }
3876
3877 return 1;
3878}
e2c671ba 3879
84ed5e79
RE
3880int
3881load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3882 rtx * operands;
84ed5e79 3883 int nops;
62b10bbc
NC
3884 int * regs;
3885 int * base;
3886 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3887{
3888 int unsorted_regs[4];
3889 HOST_WIDE_INT unsorted_offsets[4];
3890 int order[4];
ad076f4e 3891 int base_reg = -1;
84ed5e79
RE
3892 int i;
3893
1d6e90ac
NC
3894 /* Can only handle 2, 3, or 4 insns at present,
3895 though could be easily extended if required. */
84ed5e79
RE
3896 if (nops < 2 || nops > 4)
3897 abort ();
3898
3899 /* Loop over the operands and check that the memory references are
3900 suitable (ie immediate offsets from the same base register). At
3901 the same time, extract the target register, and the memory
3902 offsets. */
3903 for (i = 0; i < nops; i++)
3904 {
3905 rtx reg;
3906 rtx offset;
3907
56636818
JL
3908 /* Convert a subreg of a mem into the mem itself. */
3909 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 3910 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 3911
84ed5e79
RE
3912 if (GET_CODE (operands[nops + i]) != MEM)
3913 abort ();
3914
3915 /* Don't reorder volatile memory references; it doesn't seem worth
3916 looking for the case where the order is ok anyway. */
3917 if (MEM_VOLATILE_P (operands[nops + i]))
3918 return 0;
3919
3920 offset = const0_rtx;
3921
3922 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3923 || (GET_CODE (reg) == SUBREG
3924 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3925 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3926 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3927 == REG)
3928 || (GET_CODE (reg) == SUBREG
3929 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3930 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3931 == CONST_INT)))
3932 {
3933 if (i == 0)
3934 {
d5b7b3ae 3935 base_reg = REGNO (reg);
84ed5e79
RE
3936 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3937 ? REGNO (operands[i])
3938 : REGNO (SUBREG_REG (operands[i])));
3939 order[0] = 0;
3940 }
3941 else
3942 {
6354dc9b 3943 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3944 /* Not addressed from the same base register. */
3945 return 0;
3946
3947 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3948 ? REGNO (operands[i])
3949 : REGNO (SUBREG_REG (operands[i])));
3950 if (unsorted_regs[i] < unsorted_regs[order[0]])
3951 order[0] = i;
3952 }
3953
3954 /* If it isn't an integer register, or if it overwrites the
3955 base register but isn't the last insn in the list, then
3956 we can't do this. */
3957 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3958 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3959 return 0;
3960
3961 unsorted_offsets[i] = INTVAL (offset);
3962 }
3963 else
3964 /* Not a suitable memory address. */
3965 return 0;
3966 }
3967
3968 /* All the useful information has now been extracted from the
3969 operands into unsorted_regs and unsorted_offsets; additionally,
3970 order[0] has been set to the lowest numbered register in the
3971 list. Sort the registers into order, and check that the memory
3972 offsets are ascending and adjacent. */
3973
3974 for (i = 1; i < nops; i++)
3975 {
3976 int j;
3977
3978 order[i] = order[i - 1];
3979 for (j = 0; j < nops; j++)
3980 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3981 && (order[i] == order[i - 1]
3982 || unsorted_regs[j] < unsorted_regs[order[i]]))
3983 order[i] = j;
3984
3985 /* Have we found a suitable register? if not, one must be used more
3986 than once. */
3987 if (order[i] == order[i - 1])
3988 return 0;
3989
3990 /* Is the memory address adjacent and ascending? */
3991 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3992 return 0;
3993 }
3994
3995 if (base)
3996 {
3997 *base = base_reg;
3998
3999 for (i = 0; i < nops; i++)
4000 regs[i] = unsorted_regs[order[i]];
4001
4002 *load_offset = unsorted_offsets[order[0]];
4003 }
4004
4005 if (unsorted_offsets[order[0]] == 0)
4006 return 1; /* ldmia */
4007
4008 if (unsorted_offsets[order[0]] == 4)
4009 return 2; /* ldmib */
4010
4011 if (unsorted_offsets[order[nops - 1]] == 0)
4012 return 3; /* ldmda */
4013
4014 if (unsorted_offsets[order[nops - 1]] == -4)
4015 return 4; /* ldmdb */
4016
949d79eb
RE
4017 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4018 if the offset isn't small enough. The reason 2 ldrs are faster
4019 is because these ARMs are able to do more than one cache access
4020 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4021 whilst the ARM8 has a double bandwidth cache. This means that
4022 these cores can do both an instruction fetch and a data fetch in
4023 a single cycle, so the trick of calculating the address into a
4024 scratch register (one of the result regs) and then doing a load
4025 multiple actually becomes slower (and no smaller in code size).
4026 That is the transformation
6cc8c0b3
NC
4027
4028 ldr rd1, [rbase + offset]
4029 ldr rd2, [rbase + offset + 4]
4030
4031 to
4032
4033 add rd1, rbase, offset
4034 ldmia rd1, {rd1, rd2}
4035
949d79eb
RE
4036 produces worse code -- '3 cycles + any stalls on rd2' instead of
4037 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4038 access per cycle, the first sequence could never complete in less
4039 than 6 cycles, whereas the ldm sequence would only take 5 and
4040 would make better use of sequential accesses if not hitting the
4041 cache.
4042
4043 We cheat here and test 'arm_ld_sched' which we currently know to
4044 only be true for the ARM8, ARM9 and StrongARM. If this ever
4045 changes, then the test below needs to be reworked. */
f5a1b0d2 4046 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
4047 return 0;
4048
84ed5e79
RE
4049 /* Can't do it without setting up the offset, only do this if it takes
4050 no more than one insn. */
4051 return (const_ok_for_arm (unsorted_offsets[order[0]])
4052 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4053}
4054
cd2b33d0 4055const char *
84ed5e79 4056emit_ldm_seq (operands, nops)
62b10bbc 4057 rtx * operands;
84ed5e79
RE
4058 int nops;
4059{
4060 int regs[4];
4061 int base_reg;
4062 HOST_WIDE_INT offset;
4063 char buf[100];
4064 int i;
4065
4066 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4067 {
4068 case 1:
4069 strcpy (buf, "ldm%?ia\t");
4070 break;
4071
4072 case 2:
4073 strcpy (buf, "ldm%?ib\t");
4074 break;
4075
4076 case 3:
4077 strcpy (buf, "ldm%?da\t");
4078 break;
4079
4080 case 4:
4081 strcpy (buf, "ldm%?db\t");
4082 break;
4083
4084 case 5:
4085 if (offset >= 0)
4086 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4087 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4088 (long) offset);
4089 else
4090 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4091 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4092 (long) -offset);
4093 output_asm_insn (buf, operands);
4094 base_reg = regs[0];
4095 strcpy (buf, "ldm%?ia\t");
4096 break;
4097
4098 default:
4099 abort ();
4100 }
4101
4102 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4103 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4104
4105 for (i = 1; i < nops; i++)
4106 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4107 reg_names[regs[i]]);
4108
4109 strcat (buf, "}\t%@ phole ldm");
4110
4111 output_asm_insn (buf, operands);
4112 return "";
4113}
4114
4115int
4116store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 4117 rtx * operands;
84ed5e79 4118 int nops;
62b10bbc
NC
4119 int * regs;
4120 int * base;
4121 HOST_WIDE_INT * load_offset;
84ed5e79
RE
4122{
4123 int unsorted_regs[4];
4124 HOST_WIDE_INT unsorted_offsets[4];
4125 int order[4];
ad076f4e 4126 int base_reg = -1;
84ed5e79
RE
4127 int i;
4128
4129 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4130 extended if required. */
4131 if (nops < 2 || nops > 4)
4132 abort ();
4133
4134 /* Loop over the operands and check that the memory references are
4135 suitable (ie immediate offsets from the same base register). At
4136 the same time, extract the target register, and the memory
4137 offsets. */
4138 for (i = 0; i < nops; i++)
4139 {
4140 rtx reg;
4141 rtx offset;
4142
56636818
JL
4143 /* Convert a subreg of a mem into the mem itself. */
4144 if (GET_CODE (operands[nops + i]) == SUBREG)
4e26a7af 4145 operands[nops + i] = alter_subreg (operands + (nops + i));
56636818 4146
84ed5e79
RE
4147 if (GET_CODE (operands[nops + i]) != MEM)
4148 abort ();
4149
4150 /* Don't reorder volatile memory references; it doesn't seem worth
4151 looking for the case where the order is ok anyway. */
4152 if (MEM_VOLATILE_P (operands[nops + i]))
4153 return 0;
4154
4155 offset = const0_rtx;
4156
4157 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4158 || (GET_CODE (reg) == SUBREG
4159 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4160 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4161 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4162 == REG)
4163 || (GET_CODE (reg) == SUBREG
4164 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4165 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4166 == CONST_INT)))
4167 {
4168 if (i == 0)
4169 {
62b10bbc 4170 base_reg = REGNO (reg);
84ed5e79
RE
4171 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4172 ? REGNO (operands[i])
4173 : REGNO (SUBREG_REG (operands[i])));
4174 order[0] = 0;
4175 }
4176 else
4177 {
6354dc9b 4178 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
4179 /* Not addressed from the same base register. */
4180 return 0;
4181
4182 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4183 ? REGNO (operands[i])
4184 : REGNO (SUBREG_REG (operands[i])));
4185 if (unsorted_regs[i] < unsorted_regs[order[0]])
4186 order[0] = i;
4187 }
4188
4189 /* If it isn't an integer register, then we can't do this. */
4190 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4191 return 0;
4192
4193 unsorted_offsets[i] = INTVAL (offset);
4194 }
4195 else
4196 /* Not a suitable memory address. */
4197 return 0;
4198 }
4199
4200 /* All the useful information has now been extracted from the
4201 operands into unsorted_regs and unsorted_offsets; additionally,
4202 order[0] has been set to the lowest numbered register in the
4203 list. Sort the registers into order, and check that the memory
4204 offsets are ascending and adjacent. */
4205
4206 for (i = 1; i < nops; i++)
4207 {
4208 int j;
4209
4210 order[i] = order[i - 1];
4211 for (j = 0; j < nops; j++)
4212 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4213 && (order[i] == order[i - 1]
4214 || unsorted_regs[j] < unsorted_regs[order[i]]))
4215 order[i] = j;
4216
4217 /* Have we found a suitable register? if not, one must be used more
4218 than once. */
4219 if (order[i] == order[i - 1])
4220 return 0;
4221
4222 /* Is the memory address adjacent and ascending? */
4223 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4224 return 0;
4225 }
4226
4227 if (base)
4228 {
4229 *base = base_reg;
4230
4231 for (i = 0; i < nops; i++)
4232 regs[i] = unsorted_regs[order[i]];
4233
4234 *load_offset = unsorted_offsets[order[0]];
4235 }
4236
4237 if (unsorted_offsets[order[0]] == 0)
4238 return 1; /* stmia */
4239
4240 if (unsorted_offsets[order[0]] == 4)
4241 return 2; /* stmib */
4242
4243 if (unsorted_offsets[order[nops - 1]] == 0)
4244 return 3; /* stmda */
4245
4246 if (unsorted_offsets[order[nops - 1]] == -4)
4247 return 4; /* stmdb */
4248
4249 return 0;
4250}
4251
cd2b33d0 4252const char *
84ed5e79 4253emit_stm_seq (operands, nops)
62b10bbc 4254 rtx * operands;
84ed5e79
RE
4255 int nops;
4256{
4257 int regs[4];
4258 int base_reg;
4259 HOST_WIDE_INT offset;
4260 char buf[100];
4261 int i;
4262
4263 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4264 {
4265 case 1:
4266 strcpy (buf, "stm%?ia\t");
4267 break;
4268
4269 case 2:
4270 strcpy (buf, "stm%?ib\t");
4271 break;
4272
4273 case 3:
4274 strcpy (buf, "stm%?da\t");
4275 break;
4276
4277 case 4:
4278 strcpy (buf, "stm%?db\t");
4279 break;
4280
4281 default:
4282 abort ();
4283 }
4284
4285 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4286 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4287
4288 for (i = 1; i < nops; i++)
4289 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4290 reg_names[regs[i]]);
4291
4292 strcat (buf, "}\t%@ phole stm");
4293
4294 output_asm_insn (buf, operands);
4295 return "";
4296}
4297
e2c671ba
RE
4298int
4299multi_register_push (op, mode)
0a81f500 4300 rtx op;
74bbc178 4301 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
4302{
4303 if (GET_CODE (op) != PARALLEL
4304 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4305 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
b15bca31 4306 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
e2c671ba
RE
4307 return 0;
4308
4309 return 1;
4310}
ff9940b0 4311\f
6354dc9b 4312/* Routines for use in generating RTL. */
1d6e90ac 4313
f3bb6135 4314rtx
56636818 4315arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 4316 in_struct_p, scalar_p)
ff9940b0
RE
4317 int base_regno;
4318 int count;
4319 rtx from;
4320 int up;
4321 int write_back;
56636818
JL
4322 int unchanging_p;
4323 int in_struct_p;
c6df88cb 4324 int scalar_p;
ff9940b0
RE
4325{
4326 int i = 0, j;
4327 rtx result;
4328 int sign = up ? 1 : -1;
56636818 4329 rtx mem;
ff9940b0 4330
d19fb8e3
NC
4331 /* XScale has load-store double instructions, but they have stricter
4332 alignment requirements than load-store multiple, so we can not
4333 use them.
4334
4335 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4336 the pipeline until completion.
4337
4338 NREGS CYCLES
4339 1 3
4340 2 4
4341 3 5
4342 4 6
4343
4344 An ldr instruction takes 1-3 cycles, but does not block the
4345 pipeline.
4346
4347 NREGS CYCLES
4348 1 1-3
4349 2 2-6
4350 3 3-9
4351 4 4-12
4352
4353 Best case ldr will always win. However, the more ldr instructions
4354 we issue, the less likely we are to be able to schedule them well.
4355 Using ldr instructions also increases code size.
4356
4357 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4358 for counts of 3 or 4 regs. */
4359 if (arm_is_xscale && count <= 2 && ! optimize_size)
4360 {
4361 rtx seq;
4362
4363 start_sequence ();
4364
4365 for (i = 0; i < count; i++)
4366 {
4367 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4368 RTX_UNCHANGING_P (mem) = unchanging_p;
4369 MEM_IN_STRUCT_P (mem) = in_struct_p;
4370 MEM_SCALAR_P (mem) = scalar_p;
4371 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4372 }
4373
4374 if (write_back)
4375 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4376
2f937369 4377 seq = get_insns ();
d19fb8e3
NC
4378 end_sequence ();
4379
4380 return seq;
4381 }
4382
43cffd11 4383 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4384 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4385 if (write_back)
f3bb6135 4386 {
ff9940b0 4387 XVECEXP (result, 0, 0)
43cffd11
RE
4388 = gen_rtx_SET (GET_MODE (from), from,
4389 plus_constant (from, count * 4 * sign));
ff9940b0
RE
4390 i = 1;
4391 count++;
f3bb6135
RE
4392 }
4393
ff9940b0 4394 for (j = 0; i < count; i++, j++)
f3bb6135 4395 {
43cffd11 4396 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
4397 RTX_UNCHANGING_P (mem) = unchanging_p;
4398 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4399 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
4400 XVECEXP (result, 0, i)
4401 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
4402 }
4403
ff9940b0
RE
4404 return result;
4405}
4406
f3bb6135 4407rtx
56636818 4408arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 4409 in_struct_p, scalar_p)
ff9940b0
RE
4410 int base_regno;
4411 int count;
4412 rtx to;
4413 int up;
4414 int write_back;
56636818
JL
4415 int unchanging_p;
4416 int in_struct_p;
c6df88cb 4417 int scalar_p;
ff9940b0
RE
4418{
4419 int i = 0, j;
4420 rtx result;
4421 int sign = up ? 1 : -1;
56636818 4422 rtx mem;
ff9940b0 4423
d19fb8e3
NC
4424 /* See arm_gen_load_multiple for discussion of
4425 the pros/cons of ldm/stm usage for XScale. */
4426 if (arm_is_xscale && count <= 2 && ! optimize_size)
4427 {
4428 rtx seq;
4429
4430 start_sequence ();
4431
4432 for (i = 0; i < count; i++)
4433 {
4434 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4435 RTX_UNCHANGING_P (mem) = unchanging_p;
4436 MEM_IN_STRUCT_P (mem) = in_struct_p;
4437 MEM_SCALAR_P (mem) = scalar_p;
4438 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4439 }
4440
4441 if (write_back)
4442 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4443
2f937369 4444 seq = get_insns ();
d19fb8e3
NC
4445 end_sequence ();
4446
4447 return seq;
4448 }
4449
43cffd11 4450 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4451 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4452 if (write_back)
f3bb6135 4453 {
ff9940b0 4454 XVECEXP (result, 0, 0)
43cffd11
RE
4455 = gen_rtx_SET (GET_MODE (to), to,
4456 plus_constant (to, count * 4 * sign));
ff9940b0
RE
4457 i = 1;
4458 count++;
f3bb6135
RE
4459 }
4460
ff9940b0 4461 for (j = 0; i < count; i++, j++)
f3bb6135 4462 {
43cffd11 4463 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
4464 RTX_UNCHANGING_P (mem) = unchanging_p;
4465 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4466 MEM_SCALAR_P (mem) = scalar_p;
56636818 4467
43cffd11
RE
4468 XVECEXP (result, 0, i)
4469 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
4470 }
4471
ff9940b0
RE
4472 return result;
4473}
4474
880e2516
RE
4475int
4476arm_gen_movstrqi (operands)
62b10bbc 4477 rtx * operands;
880e2516
RE
4478{
4479 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 4480 int i;
880e2516 4481 rtx src, dst;
ad076f4e 4482 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 4483 rtx part_bytes_reg = NULL;
56636818
JL
4484 rtx mem;
4485 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 4486 int dst_scalar_p, src_scalar_p;
880e2516
RE
4487
4488 if (GET_CODE (operands[2]) != CONST_INT
4489 || GET_CODE (operands[3]) != CONST_INT
4490 || INTVAL (operands[2]) > 64
4491 || INTVAL (operands[3]) & 3)
4492 return 0;
4493
4494 st_dst = XEXP (operands[0], 0);
4495 st_src = XEXP (operands[1], 0);
56636818
JL
4496
4497 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4498 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 4499 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
4500 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4501 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 4502 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 4503
880e2516
RE
4504 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4505 fin_src = src = copy_to_mode_reg (SImode, st_src);
4506
e9d7b180 4507 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
880e2516
RE
4508 out_words_to_go = INTVAL (operands[2]) / 4;
4509 last_bytes = INTVAL (operands[2]) & 3;
4510
4511 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 4512 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
4513
4514 for (i = 0; in_words_to_go >= 2; i+=4)
4515 {
bd9c7e23 4516 if (in_words_to_go > 4)
56636818 4517 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
4518 src_unchanging_p,
4519 src_in_struct_p,
4520 src_scalar_p));
bd9c7e23
RE
4521 else
4522 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 4523 FALSE, src_unchanging_p,
c6df88cb 4524 src_in_struct_p, src_scalar_p));
bd9c7e23 4525
880e2516
RE
4526 if (out_words_to_go)
4527 {
bd9c7e23 4528 if (out_words_to_go > 4)
56636818
JL
4529 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4530 dst_unchanging_p,
c6df88cb
MM
4531 dst_in_struct_p,
4532 dst_scalar_p));
bd9c7e23
RE
4533 else if (out_words_to_go != 1)
4534 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4535 dst, TRUE,
4536 (last_bytes == 0
56636818
JL
4537 ? FALSE : TRUE),
4538 dst_unchanging_p,
c6df88cb
MM
4539 dst_in_struct_p,
4540 dst_scalar_p));
880e2516
RE
4541 else
4542 {
43cffd11 4543 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
4544 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4545 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4546 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4547 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
4548 if (last_bytes != 0)
4549 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
4550 }
4551 }
4552
4553 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4554 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4555 }
4556
4557 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4558 if (out_words_to_go)
62b10bbc
NC
4559 {
4560 rtx sreg;
4561
4562 mem = gen_rtx_MEM (SImode, src);
4563 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4564 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4565 MEM_SCALAR_P (mem) = src_scalar_p;
4566 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4567 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4568
4569 mem = gen_rtx_MEM (SImode, dst);
4570 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4571 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4572 MEM_SCALAR_P (mem) = dst_scalar_p;
4573 emit_move_insn (mem, sreg);
4574 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4575 in_words_to_go--;
4576
4577 if (in_words_to_go) /* Sanity check */
4578 abort ();
4579 }
880e2516
RE
4580
4581 if (in_words_to_go)
4582 {
4583 if (in_words_to_go < 0)
4584 abort ();
4585
43cffd11 4586 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4587 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4588 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4589 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4590 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4591 }
4592
d5b7b3ae
RE
4593 if (last_bytes && part_bytes_reg == NULL)
4594 abort ();
4595
880e2516
RE
4596 if (BYTES_BIG_ENDIAN && last_bytes)
4597 {
4598 rtx tmp = gen_reg_rtx (SImode);
4599
6354dc9b 4600 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4601 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4602 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4603 part_bytes_reg = tmp;
4604
4605 while (last_bytes)
4606 {
43cffd11 4607 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4608 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4609 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4610 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2
BS
4611 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
4612
880e2516
RE
4613 if (--last_bytes)
4614 {
4615 tmp = gen_reg_rtx (SImode);
4616 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4617 part_bytes_reg = tmp;
4618 }
4619 }
4620
4621 }
4622 else
4623 {
d5b7b3ae 4624 if (last_bytes > 1)
880e2516 4625 {
d5b7b3ae 4626 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4627 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4628 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4629 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 4630 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
d5b7b3ae
RE
4631 last_bytes -= 2;
4632 if (last_bytes)
880e2516
RE
4633 {
4634 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4635
d5b7b3ae
RE
4636 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4637 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4638 part_bytes_reg = tmp;
4639 }
4640 }
d5b7b3ae
RE
4641
4642 if (last_bytes)
4643 {
4644 mem = gen_rtx_MEM (QImode, dst);
4645 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4646 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4647 MEM_SCALAR_P (mem) = dst_scalar_p;
5d5603e2 4648 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
d5b7b3ae 4649 }
880e2516
RE
4650 }
4651
4652 return 1;
4653}
4654
5165176d
RE
4655/* Generate a memory reference for a half word, such that it will be loaded
4656 into the top 16 bits of the word. We can assume that the address is
4657 known to be alignable and of the form reg, or plus (reg, const). */
1d6e90ac 4658
5165176d 4659rtx
d5b7b3ae 4660arm_gen_rotated_half_load (memref)
5165176d
RE
4661 rtx memref;
4662{
4663 HOST_WIDE_INT offset = 0;
4664 rtx base = XEXP (memref, 0);
4665
4666 if (GET_CODE (base) == PLUS)
4667 {
4668 offset = INTVAL (XEXP (base, 1));
4669 base = XEXP (base, 0);
4670 }
4671
956d6950 4672 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4673 if (TARGET_MMU_TRAPS
5165176d
RE
4674 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4675 return NULL;
4676
43cffd11 4677 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4678
4679 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4680 return base;
4681
43cffd11 4682 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4683}
4684
1646cf41
RE
4685/* Select a dominance comparison mode if possible. We support three forms.
4686 COND_OR == 0 => (X && Y)
4687 COND_OR == 1 => ((! X( || Y)
4688 COND_OR == 2 => (X || Y)
4689 If we are unable to support a dominance comparsison we return CC mode.
4690 This will then fail to match for the RTL expressions that generate this
4691 call. */
d19fb8e3 4692
84ed5e79 4693static enum machine_mode
74bbc178 4694select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4695 rtx x;
4696 rtx y;
4697 HOST_WIDE_INT cond_or;
4698{
4699 enum rtx_code cond1, cond2;
4700 int swapped = 0;
4701
4702 /* Currently we will probably get the wrong result if the individual
4703 comparisons are not simple. This also ensures that it is safe to
956d6950 4704 reverse a comparison if necessary. */
84ed5e79
RE
4705 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4706 != CCmode)
4707 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4708 != CCmode))
4709 return CCmode;
4710
1646cf41
RE
4711 /* The if_then_else variant of this tests the second condition if the
4712 first passes, but is true if the first fails. Reverse the first
4713 condition to get a true "inclusive-or" expression. */
4714 if (cond_or == 1)
84ed5e79
RE
4715 cond1 = reverse_condition (cond1);
4716
4717 /* If the comparisons are not equal, and one doesn't dominate the other,
4718 then we can't do this. */
4719 if (cond1 != cond2
5895f793
RE
4720 && !comparison_dominates_p (cond1, cond2)
4721 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4722 return CCmode;
4723
4724 if (swapped)
4725 {
4726 enum rtx_code temp = cond1;
4727 cond1 = cond2;
4728 cond2 = temp;
4729 }
4730
4731 switch (cond1)
4732 {
4733 case EQ:
5895f793 4734 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4735 return CC_DEQmode;
4736
4737 switch (cond2)
4738 {
4739 case LE: return CC_DLEmode;
4740 case LEU: return CC_DLEUmode;
4741 case GE: return CC_DGEmode;
4742 case GEU: return CC_DGEUmode;
ad076f4e 4743 default: break;
84ed5e79
RE
4744 }
4745
4746 break;
4747
4748 case LT:
5895f793 4749 if (cond2 == LT || !cond_or)
84ed5e79
RE
4750 return CC_DLTmode;
4751 if (cond2 == LE)
4752 return CC_DLEmode;
4753 if (cond2 == NE)
4754 return CC_DNEmode;
4755 break;
4756
4757 case GT:
5895f793 4758 if (cond2 == GT || !cond_or)
84ed5e79
RE
4759 return CC_DGTmode;
4760 if (cond2 == GE)
4761 return CC_DGEmode;
4762 if (cond2 == NE)
4763 return CC_DNEmode;
4764 break;
4765
4766 case LTU:
5895f793 4767 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4768 return CC_DLTUmode;
4769 if (cond2 == LEU)
4770 return CC_DLEUmode;
4771 if (cond2 == NE)
4772 return CC_DNEmode;
4773 break;
4774
4775 case GTU:
5895f793 4776 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4777 return CC_DGTUmode;
4778 if (cond2 == GEU)
4779 return CC_DGEUmode;
4780 if (cond2 == NE)
4781 return CC_DNEmode;
4782 break;
4783
4784 /* The remaining cases only occur when both comparisons are the
4785 same. */
4786 case NE:
4787 return CC_DNEmode;
4788
4789 case LE:
4790 return CC_DLEmode;
4791
4792 case GE:
4793 return CC_DGEmode;
4794
4795 case LEU:
4796 return CC_DLEUmode;
4797
4798 case GEU:
4799 return CC_DGEUmode;
ad076f4e
RE
4800
4801 default:
4802 break;
84ed5e79
RE
4803 }
4804
4805 abort ();
4806}
4807
4808enum machine_mode
4809arm_select_cc_mode (op, x, y)
4810 enum rtx_code op;
4811 rtx x;
4812 rtx y;
4813{
4814 /* All floating point compares return CCFP if it is an equality
4815 comparison, and CCFPE otherwise. */
4816 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4817 {
4818 switch (op)
4819 {
4820 case EQ:
4821 case NE:
4822 case UNORDERED:
4823 case ORDERED:
4824 case UNLT:
4825 case UNLE:
4826 case UNGT:
4827 case UNGE:
4828 case UNEQ:
4829 case LTGT:
4830 return CCFPmode;
4831
4832 case LT:
4833 case LE:
4834 case GT:
4835 case GE:
4836 return CCFPEmode;
4837
4838 default:
4839 abort ();
4840 }
4841 }
84ed5e79
RE
4842
4843 /* A compare with a shifted operand. Because of canonicalization, the
4844 comparison will have to be swapped when we emit the assembler. */
4845 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4846 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4847 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4848 || GET_CODE (x) == ROTATERT))
4849 return CC_SWPmode;
4850
956d6950
JL
4851 /* This is a special case that is used by combine to allow a
4852 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4853 followed by a comparison of the shifted integer (only valid for
956d6950 4854 equalities and unsigned inequalities). */
84ed5e79
RE
4855 if (GET_MODE (x) == SImode
4856 && GET_CODE (x) == ASHIFT
4857 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4858 && GET_CODE (XEXP (x, 0)) == SUBREG
4859 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4860 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4861 && (op == EQ || op == NE
4862 || op == GEU || op == GTU || op == LTU || op == LEU)
4863 && GET_CODE (y) == CONST_INT)
4864 return CC_Zmode;
4865
1646cf41
RE
4866 /* A construct for a conditional compare, if the false arm contains
4867 0, then both conditions must be true, otherwise either condition
4868 must be true. Not all conditions are possible, so CCmode is
4869 returned if it can't be done. */
4870 if (GET_CODE (x) == IF_THEN_ELSE
4871 && (XEXP (x, 2) == const0_rtx
4872 || XEXP (x, 2) == const1_rtx)
4873 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4874 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4875 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4876 INTVAL (XEXP (x, 2)));
4877
4878 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4879 if (GET_CODE (x) == AND
4880 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4881 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4882 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4883
4884 if (GET_CODE (x) == IOR
4885 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4886 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4887 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4888
84ed5e79
RE
4889 /* An operation that sets the condition codes as a side-effect, the
4890 V flag is not set correctly, so we can only use comparisons where
4891 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4892 instead. */
4893 if (GET_MODE (x) == SImode
4894 && y == const0_rtx
4895 && (op == EQ || op == NE || op == LT || op == GE)
4896 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4897 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4898 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4899 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4900 || GET_CODE (x) == LSHIFTRT
4901 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4902 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4903 return CC_NOOVmode;
4904
84ed5e79
RE
4905 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4906 return CC_Zmode;
4907
bd9c7e23
RE
4908 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4909 && GET_CODE (x) == PLUS
4910 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4911 return CC_Cmode;
4912
84ed5e79
RE
4913 return CCmode;
4914}
4915
ff9940b0
RE
4916/* X and Y are two things to compare using CODE. Emit the compare insn and
4917 return the rtx for register 0 in the proper mode. FP means this is a
4918 floating point compare: I don't think that it is needed on the arm. */
4919
4920rtx
d5b7b3ae 4921arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4922 enum rtx_code code;
4923 rtx x, y;
4924{
4925 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4926 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4927
43cffd11
RE
4928 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4929 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4930
4931 return cc_reg;
4932}
4933
fcd53748
JT
4934/* Generate a sequence of insns that will generate the correct return
4935 address mask depending on the physical architecture that the program
4936 is running on. */
4937
4938rtx
4939arm_gen_return_addr_mask ()
4940{
4941 rtx reg = gen_reg_rtx (Pmode);
4942
4943 emit_insn (gen_return_addr_mask (reg));
4944 return reg;
4945}
4946
0a81f500
RE
4947void
4948arm_reload_in_hi (operands)
62b10bbc 4949 rtx * operands;
0a81f500 4950{
f9cc092a
RE
4951 rtx ref = operands[1];
4952 rtx base, scratch;
4953 HOST_WIDE_INT offset = 0;
4954
4955 if (GET_CODE (ref) == SUBREG)
4956 {
ddef6bc7 4957 offset = SUBREG_BYTE (ref);
f9cc092a
RE
4958 ref = SUBREG_REG (ref);
4959 }
4960
4961 if (GET_CODE (ref) == REG)
4962 {
4963 /* We have a pseudo which has been spilt onto the stack; there
4964 are two cases here: the first where there is a simple
4965 stack-slot replacement and a second where the stack-slot is
4966 out of range, or is used as a subreg. */
4967 if (reg_equiv_mem[REGNO (ref)])
4968 {
4969 ref = reg_equiv_mem[REGNO (ref)];
4970 base = find_replacement (&XEXP (ref, 0));
4971 }
4972 else
6354dc9b 4973 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4974 base = reg_equiv_address[REGNO (ref)];
4975 }
4976 else
4977 base = find_replacement (&XEXP (ref, 0));
0a81f500 4978
e5e809f4
JL
4979 /* Handle the case where the address is too complex to be offset by 1. */
4980 if (GET_CODE (base) == MINUS
4981 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4982 {
f9cc092a 4983 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4984
43cffd11 4985 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4986 base = base_plus;
4987 }
f9cc092a
RE
4988 else if (GET_CODE (base) == PLUS)
4989 {
6354dc9b 4990 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4991 HOST_WIDE_INT hi, lo;
4992
4993 offset += INTVAL (XEXP (base, 1));
4994 base = XEXP (base, 0);
4995
6354dc9b 4996 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4997 /* Valid range for lo is -4095 -> 4095 */
4998 lo = (offset >= 0
4999 ? (offset & 0xfff)
5000 : -((-offset) & 0xfff));
5001
5002 /* Corner case, if lo is the max offset then we would be out of range
5003 once we have added the additional 1 below, so bump the msb into the
5004 pre-loading insn(s). */
5005 if (lo == 4095)
5006 lo &= 0x7ff;
5007
30cf4896
KG
5008 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5009 ^ (HOST_WIDE_INT) 0x80000000)
5010 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
5011
5012 if (hi + lo != offset)
5013 abort ();
5014
5015 if (hi != 0)
5016 {
5017 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5018
5019 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 5020 that require more than one insn. */
f9cc092a
RE
5021 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5022 base = base_plus;
5023 offset = lo;
5024 }
5025 }
e5e809f4 5026
f9cc092a
RE
5027 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5028 emit_insn (gen_zero_extendqisi2 (scratch,
5029 gen_rtx_MEM (QImode,
5030 plus_constant (base,
5031 offset))));
43cffd11
RE
5032 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5033 gen_rtx_MEM (QImode,
f9cc092a
RE
5034 plus_constant (base,
5035 offset + 1))));
5895f793 5036 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
5037 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5038 gen_rtx_IOR (SImode,
5039 gen_rtx_ASHIFT
5040 (SImode,
5041 gen_rtx_SUBREG (SImode, operands[0], 0),
5042 GEN_INT (8)),
f9cc092a 5043 scratch)));
0a81f500 5044 else
43cffd11
RE
5045 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5046 gen_rtx_IOR (SImode,
f9cc092a 5047 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
5048 GEN_INT (8)),
5049 gen_rtx_SUBREG (SImode, operands[0],
5050 0))));
0a81f500
RE
5051}
5052
f9cc092a
RE
5053/* Handle storing a half-word to memory during reload by synthesising as two
5054 byte stores. Take care not to clobber the input values until after we
5055 have moved them somewhere safe. This code assumes that if the DImode
5056 scratch in operands[2] overlaps either the input value or output address
5057 in some way, then that value must die in this insn (we absolutely need
5058 two scratch registers for some corner cases). */
1d6e90ac 5059
f3bb6135 5060void
af48348a 5061arm_reload_out_hi (operands)
62b10bbc 5062 rtx * operands;
af48348a 5063{
f9cc092a
RE
5064 rtx ref = operands[0];
5065 rtx outval = operands[1];
5066 rtx base, scratch;
5067 HOST_WIDE_INT offset = 0;
5068
5069 if (GET_CODE (ref) == SUBREG)
5070 {
ddef6bc7 5071 offset = SUBREG_BYTE (ref);
f9cc092a
RE
5072 ref = SUBREG_REG (ref);
5073 }
5074
f9cc092a
RE
5075 if (GET_CODE (ref) == REG)
5076 {
5077 /* We have a pseudo which has been spilt onto the stack; there
5078 are two cases here: the first where there is a simple
5079 stack-slot replacement and a second where the stack-slot is
5080 out of range, or is used as a subreg. */
5081 if (reg_equiv_mem[REGNO (ref)])
5082 {
5083 ref = reg_equiv_mem[REGNO (ref)];
5084 base = find_replacement (&XEXP (ref, 0));
5085 }
5086 else
6354dc9b 5087 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
5088 base = reg_equiv_address[REGNO (ref)];
5089 }
5090 else
5091 base = find_replacement (&XEXP (ref, 0));
5092
5093 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5094
5095 /* Handle the case where the address is too complex to be offset by 1. */
5096 if (GET_CODE (base) == MINUS
5097 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5098 {
5099 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5100
5101 /* Be careful not to destroy OUTVAL. */
5102 if (reg_overlap_mentioned_p (base_plus, outval))
5103 {
5104 /* Updating base_plus might destroy outval, see if we can
5105 swap the scratch and base_plus. */
5895f793 5106 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
5107 {
5108 rtx tmp = scratch;
5109 scratch = base_plus;
5110 base_plus = tmp;
5111 }
5112 else
5113 {
5114 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5115
5116 /* Be conservative and copy OUTVAL into the scratch now,
5117 this should only be necessary if outval is a subreg
5118 of something larger than a word. */
5119 /* XXX Might this clobber base? I can't see how it can,
5120 since scratch is known to overlap with OUTVAL, and
5121 must be wider than a word. */
5122 emit_insn (gen_movhi (scratch_hi, outval));
5123 outval = scratch_hi;
5124 }
5125 }
5126
5127 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5128 base = base_plus;
5129 }
5130 else if (GET_CODE (base) == PLUS)
5131 {
6354dc9b 5132 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
5133 HOST_WIDE_INT hi, lo;
5134
5135 offset += INTVAL (XEXP (base, 1));
5136 base = XEXP (base, 0);
5137
6354dc9b 5138 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
5139 /* Valid range for lo is -4095 -> 4095 */
5140 lo = (offset >= 0
5141 ? (offset & 0xfff)
5142 : -((-offset) & 0xfff));
5143
5144 /* Corner case, if lo is the max offset then we would be out of range
5145 once we have added the additional 1 below, so bump the msb into the
5146 pre-loading insn(s). */
5147 if (lo == 4095)
5148 lo &= 0x7ff;
5149
30cf4896
KG
5150 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5151 ^ (HOST_WIDE_INT) 0x80000000)
5152 - (HOST_WIDE_INT) 0x80000000);
f9cc092a
RE
5153
5154 if (hi + lo != offset)
5155 abort ();
5156
5157 if (hi != 0)
5158 {
5159 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5160
5161 /* Be careful not to destroy OUTVAL. */
5162 if (reg_overlap_mentioned_p (base_plus, outval))
5163 {
5164 /* Updating base_plus might destroy outval, see if we
5165 can swap the scratch and base_plus. */
5895f793 5166 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
5167 {
5168 rtx tmp = scratch;
5169 scratch = base_plus;
5170 base_plus = tmp;
5171 }
5172 else
5173 {
5174 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5175
5176 /* Be conservative and copy outval into scratch now,
5177 this should only be necessary if outval is a
5178 subreg of something larger than a word. */
5179 /* XXX Might this clobber base? I can't see how it
5180 can, since scratch is known to overlap with
5181 outval. */
5182 emit_insn (gen_movhi (scratch_hi, outval));
5183 outval = scratch_hi;
5184 }
5185 }
5186
5187 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 5188 that require more than one insn. */
f9cc092a
RE
5189 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5190 base = base_plus;
5191 offset = lo;
5192 }
5193 }
af48348a 5194
b5cc037f
RE
5195 if (BYTES_BIG_ENDIAN)
5196 {
f9cc092a
RE
5197 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5198 plus_constant (base, offset + 1)),
5d5603e2 5199 gen_lowpart (QImode, outval)));
f9cc092a
RE
5200 emit_insn (gen_lshrsi3 (scratch,
5201 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 5202 GEN_INT (8)));
f9cc092a 5203 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 5204 gen_lowpart (QImode, scratch)));
b5cc037f
RE
5205 }
5206 else
5207 {
f9cc092a 5208 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5d5603e2 5209 gen_lowpart (QImode, outval)));
f9cc092a
RE
5210 emit_insn (gen_lshrsi3 (scratch,
5211 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 5212 GEN_INT (8)));
f9cc092a
RE
5213 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5214 plus_constant (base, offset + 1)),
5d5603e2 5215 gen_lowpart (QImode, scratch)));
b5cc037f 5216 }
af48348a 5217}
2b835d68 5218\f
d5b7b3ae 5219/* Print a symbolic form of X to the debug file, F. */
1d6e90ac 5220
d5b7b3ae
RE
5221static void
5222arm_print_value (f, x)
5223 FILE * f;
5224 rtx x;
5225{
5226 switch (GET_CODE (x))
5227 {
5228 case CONST_INT:
5229 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5230 return;
5231
5232 case CONST_DOUBLE:
5233 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5234 return;
5235
5236 case CONST_STRING:
5237 fprintf (f, "\"%s\"", XSTR (x, 0));
5238 return;
5239
5240 case SYMBOL_REF:
5241 fprintf (f, "`%s'", XSTR (x, 0));
5242 return;
5243
5244 case LABEL_REF:
5245 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5246 return;
5247
5248 case CONST:
5249 arm_print_value (f, XEXP (x, 0));
5250 return;
5251
5252 case PLUS:
5253 arm_print_value (f, XEXP (x, 0));
5254 fprintf (f, "+");
5255 arm_print_value (f, XEXP (x, 1));
5256 return;
5257
5258 case PC:
5259 fprintf (f, "pc");
5260 return;
5261
5262 default:
5263 fprintf (f, "????");
5264 return;
5265 }
5266}
5267\f
2b835d68 5268/* Routines for manipulation of the constant pool. */
2b835d68 5269
949d79eb
RE
5270/* Arm instructions cannot load a large constant directly into a
5271 register; they have to come from a pc relative load. The constant
5272 must therefore be placed in the addressable range of the pc
5273 relative load. Depending on the precise pc relative load
5274 instruction the range is somewhere between 256 bytes and 4k. This
5275 means that we often have to dump a constant inside a function, and
2b835d68
RE
5276 generate code to branch around it.
5277
949d79eb
RE
5278 It is important to minimize this, since the branches will slow
5279 things down and make the code larger.
2b835d68 5280
949d79eb
RE
5281 Normally we can hide the table after an existing unconditional
5282 branch so that there is no interruption of the flow, but in the
5283 worst case the code looks like this:
2b835d68
RE
5284
5285 ldr rn, L1
949d79eb 5286 ...
2b835d68
RE
5287 b L2
5288 align
5289 L1: .long value
5290 L2:
949d79eb 5291 ...
2b835d68 5292
2b835d68 5293 ldr rn, L3
949d79eb 5294 ...
2b835d68
RE
5295 b L4
5296 align
2b835d68
RE
5297 L3: .long value
5298 L4:
949d79eb
RE
5299 ...
5300
5301 We fix this by performing a scan after scheduling, which notices
5302 which instructions need to have their operands fetched from the
5303 constant table and builds the table.
5304
5305 The algorithm starts by building a table of all the constants that
5306 need fixing up and all the natural barriers in the function (places
5307 where a constant table can be dropped without breaking the flow).
5308 For each fixup we note how far the pc-relative replacement will be
5309 able to reach and the offset of the instruction into the function.
5310
5311 Having built the table we then group the fixes together to form
5312 tables that are as large as possible (subject to addressing
5313 constraints) and emit each table of constants after the last
5314 barrier that is within range of all the instructions in the group.
5315 If a group does not contain a barrier, then we forcibly create one
5316 by inserting a jump instruction into the flow. Once the table has
5317 been inserted, the insns are then modified to reference the
5318 relevant entry in the pool.
5319
6354dc9b 5320 Possible enhancements to the algorithm (not implemented) are:
949d79eb 5321
d5b7b3ae 5322 1) For some processors and object formats, there may be benefit in
949d79eb
RE
5323 aligning the pools to the start of cache lines; this alignment
5324 would need to be taken into account when calculating addressability
6354dc9b 5325 of a pool. */
2b835d68 5326
d5b7b3ae
RE
5327/* These typedefs are located at the start of this file, so that
5328 they can be used in the prototypes there. This comment is to
5329 remind readers of that fact so that the following structures
5330 can be understood more easily.
5331
5332 typedef struct minipool_node Mnode;
5333 typedef struct minipool_fixup Mfix; */
5334
5335struct minipool_node
5336{
5337 /* Doubly linked chain of entries. */
5338 Mnode * next;
5339 Mnode * prev;
5340 /* The maximum offset into the code that this entry can be placed. While
5341 pushing fixes for forward references, all entries are sorted in order
5342 of increasing max_address. */
5343 HOST_WIDE_INT max_address;
5519a4f9 5344 /* Similarly for an entry inserted for a backwards ref. */
d5b7b3ae
RE
5345 HOST_WIDE_INT min_address;
5346 /* The number of fixes referencing this entry. This can become zero
5347 if we "unpush" an entry. In this case we ignore the entry when we
5348 come to emit the code. */
5349 int refcount;
5350 /* The offset from the start of the minipool. */
5351 HOST_WIDE_INT offset;
5352 /* The value in table. */
5353 rtx value;
5354 /* The mode of value. */
5355 enum machine_mode mode;
5356 int fix_size;
5357};
5358
5359struct minipool_fixup
2b835d68 5360{
d5b7b3ae
RE
5361 Mfix * next;
5362 rtx insn;
5363 HOST_WIDE_INT address;
5364 rtx * loc;
5365 enum machine_mode mode;
5366 int fix_size;
5367 rtx value;
5368 Mnode * minipool;
5369 HOST_WIDE_INT forwards;
5370 HOST_WIDE_INT backwards;
5371};
2b835d68 5372
d5b7b3ae
RE
5373/* Fixes less than a word need padding out to a word boundary. */
5374#define MINIPOOL_FIX_SIZE(mode) \
5375 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 5376
d5b7b3ae
RE
5377static Mnode * minipool_vector_head;
5378static Mnode * minipool_vector_tail;
5379static rtx minipool_vector_label;
332072db 5380
d5b7b3ae
RE
5381/* The linked list of all minipool fixes required for this function. */
5382Mfix * minipool_fix_head;
5383Mfix * minipool_fix_tail;
5384/* The fix entry for the current minipool, once it has been placed. */
5385Mfix * minipool_barrier;
5386
5387/* Determines if INSN is the start of a jump table. Returns the end
5388 of the TABLE or NULL_RTX. */
1d6e90ac 5389
d5b7b3ae
RE
5390static rtx
5391is_jump_table (insn)
5392 rtx insn;
2b835d68 5393{
d5b7b3ae 5394 rtx table;
da6558fd 5395
d5b7b3ae
RE
5396 if (GET_CODE (insn) == JUMP_INSN
5397 && JUMP_LABEL (insn) != NULL
5398 && ((table = next_real_insn (JUMP_LABEL (insn)))
5399 == next_real_insn (insn))
5400 && table != NULL
5401 && GET_CODE (table) == JUMP_INSN
5402 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5403 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5404 return table;
5405
5406 return NULL_RTX;
2b835d68
RE
5407}
5408
657d9449
RE
5409#ifndef JUMP_TABLES_IN_TEXT_SECTION
5410#define JUMP_TABLES_IN_TEXT_SECTION 0
5411#endif
5412
d5b7b3ae
RE
5413static HOST_WIDE_INT
5414get_jump_table_size (insn)
5415 rtx insn;
2b835d68 5416{
657d9449
RE
5417 /* ADDR_VECs only take room if read-only data does into the text
5418 section. */
5419 if (JUMP_TABLES_IN_TEXT_SECTION
d48bc59a 5420#if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
657d9449
RE
5421 || 1
5422#endif
5423 )
5424 {
5425 rtx body = PATTERN (insn);
5426 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 5427
657d9449
RE
5428 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5429 }
5430
5431 return 0;
d5b7b3ae 5432}
2b835d68 5433
d5b7b3ae
RE
5434/* Move a minipool fix MP from its current location to before MAX_MP.
5435 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5436 contrains may need updating. */
1d6e90ac 5437
d5b7b3ae
RE
5438static Mnode *
5439move_minipool_fix_forward_ref (mp, max_mp, max_address)
5440 Mnode * mp;
5441 Mnode * max_mp;
5442 HOST_WIDE_INT max_address;
5443{
5444 /* This should never be true and the code below assumes these are
5445 different. */
5446 if (mp == max_mp)
5447 abort ();
5448
5449 if (max_mp == NULL)
5450 {
5451 if (max_address < mp->max_address)
5452 mp->max_address = max_address;
5453 }
5454 else
2b835d68 5455 {
d5b7b3ae
RE
5456 if (max_address > max_mp->max_address - mp->fix_size)
5457 mp->max_address = max_mp->max_address - mp->fix_size;
5458 else
5459 mp->max_address = max_address;
2b835d68 5460
d5b7b3ae
RE
5461 /* Unlink MP from its current position. Since max_mp is non-null,
5462 mp->prev must be non-null. */
5463 mp->prev->next = mp->next;
5464 if (mp->next != NULL)
5465 mp->next->prev = mp->prev;
5466 else
5467 minipool_vector_tail = mp->prev;
2b835d68 5468
d5b7b3ae
RE
5469 /* Re-insert it before MAX_MP. */
5470 mp->next = max_mp;
5471 mp->prev = max_mp->prev;
5472 max_mp->prev = mp;
5473
5474 if (mp->prev != NULL)
5475 mp->prev->next = mp;
5476 else
5477 minipool_vector_head = mp;
5478 }
2b835d68 5479
d5b7b3ae
RE
5480 /* Save the new entry. */
5481 max_mp = mp;
5482
d6a7951f 5483 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
5484 required. */
5485 while (mp->prev != NULL
5486 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5487 {
5488 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5489 mp = mp->prev;
2b835d68
RE
5490 }
5491
d5b7b3ae 5492 return max_mp;
2b835d68
RE
5493}
5494
d5b7b3ae
RE
5495/* Add a constant to the minipool for a forward reference. Returns the
5496 node added or NULL if the constant will not fit in this pool. */
1d6e90ac 5497
d5b7b3ae
RE
5498static Mnode *
5499add_minipool_forward_ref (fix)
5500 Mfix * fix;
5501{
5502 /* If set, max_mp is the first pool_entry that has a lower
5503 constraint than the one we are trying to add. */
5504 Mnode * max_mp = NULL;
5505 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5506 Mnode * mp;
5507
5508 /* If this fix's address is greater than the address of the first
5509 entry, then we can't put the fix in this pool. We subtract the
5510 size of the current fix to ensure that if the table is fully
5511 packed we still have enough room to insert this value by suffling
5512 the other fixes forwards. */
5513 if (minipool_vector_head &&
5514 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5515 return NULL;
2b835d68 5516
d5b7b3ae
RE
5517 /* Scan the pool to see if a constant with the same value has
5518 already been added. While we are doing this, also note the
5519 location where we must insert the constant if it doesn't already
5520 exist. */
5521 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5522 {
5523 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5524 && fix->mode == mp->mode
5525 && (GET_CODE (fix->value) != CODE_LABEL
5526 || (CODE_LABEL_NUMBER (fix->value)
5527 == CODE_LABEL_NUMBER (mp->value)))
5528 && rtx_equal_p (fix->value, mp->value))
5529 {
5530 /* More than one fix references this entry. */
5531 mp->refcount++;
5532 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5533 }
5534
5535 /* Note the insertion point if necessary. */
5536 if (max_mp == NULL
5537 && mp->max_address > max_address)
5538 max_mp = mp;
5539 }
5540
5541 /* The value is not currently in the minipool, so we need to create
5542 a new entry for it. If MAX_MP is NULL, the entry will be put on
5543 the end of the list since the placement is less constrained than
5544 any existing entry. Otherwise, we insert the new fix before
5545 MAX_MP and, if neceesary, adjust the constraints on the other
5546 entries. */
5547 mp = xmalloc (sizeof (* mp));
5548 mp->fix_size = fix->fix_size;
5549 mp->mode = fix->mode;
5550 mp->value = fix->value;
5551 mp->refcount = 1;
5552 /* Not yet required for a backwards ref. */
5553 mp->min_address = -65536;
5554
5555 if (max_mp == NULL)
5556 {
5557 mp->max_address = max_address;
5558 mp->next = NULL;
5559 mp->prev = minipool_vector_tail;
5560
5561 if (mp->prev == NULL)
5562 {
5563 minipool_vector_head = mp;
5564 minipool_vector_label = gen_label_rtx ();
7551cbc7 5565 }
2b835d68 5566 else
d5b7b3ae 5567 mp->prev->next = mp;
2b835d68 5568
d5b7b3ae
RE
5569 minipool_vector_tail = mp;
5570 }
5571 else
5572 {
5573 if (max_address > max_mp->max_address - mp->fix_size)
5574 mp->max_address = max_mp->max_address - mp->fix_size;
5575 else
5576 mp->max_address = max_address;
5577
5578 mp->next = max_mp;
5579 mp->prev = max_mp->prev;
5580 max_mp->prev = mp;
5581 if (mp->prev != NULL)
5582 mp->prev->next = mp;
5583 else
5584 minipool_vector_head = mp;
5585 }
5586
5587 /* Save the new entry. */
5588 max_mp = mp;
5589
d6a7951f 5590 /* Scan over the preceding entries and adjust their addresses as
d5b7b3ae
RE
5591 required. */
5592 while (mp->prev != NULL
5593 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5594 {
5595 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5596 mp = mp->prev;
2b835d68
RE
5597 }
5598
d5b7b3ae
RE
5599 return max_mp;
5600}
5601
5602static Mnode *
5603move_minipool_fix_backward_ref (mp, min_mp, min_address)
5604 Mnode * mp;
5605 Mnode * min_mp;
5606 HOST_WIDE_INT min_address;
5607{
5608 HOST_WIDE_INT offset;
5609
5610 /* This should never be true, and the code below assumes these are
5611 different. */
5612 if (mp == min_mp)
5613 abort ();
5614
5615 if (min_mp == NULL)
2b835d68 5616 {
d5b7b3ae
RE
5617 if (min_address > mp->min_address)
5618 mp->min_address = min_address;
5619 }
5620 else
5621 {
5622 /* We will adjust this below if it is too loose. */
5623 mp->min_address = min_address;
5624
5625 /* Unlink MP from its current position. Since min_mp is non-null,
5626 mp->next must be non-null. */
5627 mp->next->prev = mp->prev;
5628 if (mp->prev != NULL)
5629 mp->prev->next = mp->next;
5630 else
5631 minipool_vector_head = mp->next;
5632
5633 /* Reinsert it after MIN_MP. */
5634 mp->prev = min_mp;
5635 mp->next = min_mp->next;
5636 min_mp->next = mp;
5637 if (mp->next != NULL)
5638 mp->next->prev = mp;
2b835d68 5639 else
d5b7b3ae
RE
5640 minipool_vector_tail = mp;
5641 }
5642
5643 min_mp = mp;
5644
5645 offset = 0;
5646 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5647 {
5648 mp->offset = offset;
5649 if (mp->refcount > 0)
5650 offset += mp->fix_size;
5651
5652 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5653 mp->next->min_address = mp->min_address + mp->fix_size;
5654 }
5655
5656 return min_mp;
5657}
5658
5659/* Add a constant to the minipool for a backward reference. Returns the
5660 node added or NULL if the constant will not fit in this pool.
5661
5662 Note that the code for insertion for a backwards reference can be
5663 somewhat confusing because the calculated offsets for each fix do
5664 not take into account the size of the pool (which is still under
5665 construction. */
1d6e90ac 5666
d5b7b3ae
RE
5667static Mnode *
5668add_minipool_backward_ref (fix)
5669 Mfix * fix;
5670{
5671 /* If set, min_mp is the last pool_entry that has a lower constraint
5672 than the one we are trying to add. */
5673 Mnode * min_mp = NULL;
5674 /* This can be negative, since it is only a constraint. */
5675 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5676 Mnode * mp;
5677
5678 /* If we can't reach the current pool from this insn, or if we can't
5679 insert this entry at the end of the pool without pushing other
5680 fixes out of range, then we don't try. This ensures that we
5681 can't fail later on. */
5682 if (min_address >= minipool_barrier->address
5683 || (minipool_vector_tail->min_address + fix->fix_size
5684 >= minipool_barrier->address))
5685 return NULL;
5686
5687 /* Scan the pool to see if a constant with the same value has
5688 already been added. While we are doing this, also note the
5689 location where we must insert the constant if it doesn't already
5690 exist. */
5691 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5692 {
5693 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5694 && fix->mode == mp->mode
5695 && (GET_CODE (fix->value) != CODE_LABEL
5696 || (CODE_LABEL_NUMBER (fix->value)
5697 == CODE_LABEL_NUMBER (mp->value)))
5698 && rtx_equal_p (fix->value, mp->value)
5699 /* Check that there is enough slack to move this entry to the
5700 end of the table (this is conservative). */
5701 && (mp->max_address
5702 > (minipool_barrier->address
5703 + minipool_vector_tail->offset
5704 + minipool_vector_tail->fix_size)))
5705 {
5706 mp->refcount++;
5707 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5708 }
5709
5710 if (min_mp != NULL)
5711 mp->min_address += fix->fix_size;
5712 else
5713 {
5714 /* Note the insertion point if necessary. */
5715 if (mp->min_address < min_address)
5716 min_mp = mp;
5717 else if (mp->max_address
5718 < minipool_barrier->address + mp->offset + fix->fix_size)
5719 {
5720 /* Inserting before this entry would push the fix beyond
5721 its maximum address (which can happen if we have
5722 re-located a forwards fix); force the new fix to come
5723 after it. */
5724 min_mp = mp;
5725 min_address = mp->min_address + fix->fix_size;
5726 }
5727 }
5728 }
5729
5730 /* We need to create a new entry. */
5731 mp = xmalloc (sizeof (* mp));
5732 mp->fix_size = fix->fix_size;
5733 mp->mode = fix->mode;
5734 mp->value = fix->value;
5735 mp->refcount = 1;
5736 mp->max_address = minipool_barrier->address + 65536;
5737
5738 mp->min_address = min_address;
5739
5740 if (min_mp == NULL)
5741 {
5742 mp->prev = NULL;
5743 mp->next = minipool_vector_head;
5744
5745 if (mp->next == NULL)
5746 {
5747 minipool_vector_tail = mp;
5748 minipool_vector_label = gen_label_rtx ();
5749 }
5750 else
5751 mp->next->prev = mp;
5752
5753 minipool_vector_head = mp;
5754 }
5755 else
5756 {
5757 mp->next = min_mp->next;
5758 mp->prev = min_mp;
5759 min_mp->next = mp;
da6558fd 5760
d5b7b3ae
RE
5761 if (mp->next != NULL)
5762 mp->next->prev = mp;
5763 else
5764 minipool_vector_tail = mp;
5765 }
5766
5767 /* Save the new entry. */
5768 min_mp = mp;
5769
5770 if (mp->prev)
5771 mp = mp->prev;
5772 else
5773 mp->offset = 0;
5774
5775 /* Scan over the following entries and adjust their offsets. */
5776 while (mp->next != NULL)
5777 {
5778 if (mp->next->min_address < mp->min_address + mp->fix_size)
5779 mp->next->min_address = mp->min_address + mp->fix_size;
5780
5781 if (mp->refcount)
5782 mp->next->offset = mp->offset + mp->fix_size;
5783 else
5784 mp->next->offset = mp->offset;
5785
5786 mp = mp->next;
5787 }
5788
5789 return min_mp;
5790}
5791
5792static void
5793assign_minipool_offsets (barrier)
5794 Mfix * barrier;
5795{
5796 HOST_WIDE_INT offset = 0;
5797 Mnode * mp;
5798
5799 minipool_barrier = barrier;
5800
5801 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5802 {
5803 mp->offset = offset;
da6558fd 5804
d5b7b3ae
RE
5805 if (mp->refcount > 0)
5806 offset += mp->fix_size;
5807 }
5808}
5809
5810/* Output the literal table */
5811static void
5812dump_minipool (scan)
5813 rtx scan;
5814{
5815 Mnode * mp;
5816 Mnode * nmp;
5817
5818 if (rtl_dump_file)
5819 fprintf (rtl_dump_file,
5820 ";; Emitting minipool after insn %u; address %ld\n",
5821 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5822
5823 scan = emit_label_after (gen_label_rtx (), scan);
5824 scan = emit_insn_after (gen_align_4 (), scan);
5825 scan = emit_label_after (minipool_vector_label, scan);
5826
5827 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5828 {
5829 if (mp->refcount > 0)
5830 {
5831 if (rtl_dump_file)
5832 {
5833 fprintf (rtl_dump_file,
5834 ";; Offset %u, min %ld, max %ld ",
5835 (unsigned) mp->offset, (unsigned long) mp->min_address,
5836 (unsigned long) mp->max_address);
5837 arm_print_value (rtl_dump_file, mp->value);
5838 fputc ('\n', rtl_dump_file);
5839 }
5840
5841 switch (mp->fix_size)
5842 {
5843#ifdef HAVE_consttable_1
5844 case 1:
5845 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5846 break;
5847
5848#endif
5849#ifdef HAVE_consttable_2
5850 case 2:
5851 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5852 break;
5853
5854#endif
5855#ifdef HAVE_consttable_4
5856 case 4:
5857 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5858 break;
5859
5860#endif
5861#ifdef HAVE_consttable_8
5862 case 8:
5863 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5864 break;
5865
5866#endif
5867 default:
5868 abort ();
5869 break;
5870 }
5871 }
5872
5873 nmp = mp->next;
5874 free (mp);
2b835d68
RE
5875 }
5876
d5b7b3ae
RE
5877 minipool_vector_head = minipool_vector_tail = NULL;
5878 scan = emit_insn_after (gen_consttable_end (), scan);
5879 scan = emit_barrier_after (scan);
2b835d68
RE
5880}
5881
d5b7b3ae 5882/* Return the cost of forcibly inserting a barrier after INSN. */
1d6e90ac 5883
d5b7b3ae
RE
5884static int
5885arm_barrier_cost (insn)
5886 rtx insn;
949d79eb 5887{
d5b7b3ae
RE
5888 /* Basing the location of the pool on the loop depth is preferable,
5889 but at the moment, the basic block information seems to be
5890 corrupt by this stage of the compilation. */
5891 int base_cost = 50;
5892 rtx next = next_nonnote_insn (insn);
5893
5894 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5895 base_cost -= 20;
5896
5897 switch (GET_CODE (insn))
5898 {
5899 case CODE_LABEL:
5900 /* It will always be better to place the table before the label, rather
5901 than after it. */
5902 return 50;
949d79eb 5903
d5b7b3ae
RE
5904 case INSN:
5905 case CALL_INSN:
5906 return base_cost;
5907
5908 case JUMP_INSN:
5909 return base_cost - 10;
5910
5911 default:
5912 return base_cost + 10;
5913 }
5914}
5915
5916/* Find the best place in the insn stream in the range
5917 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5918 Create the barrier by inserting a jump and add a new fix entry for
5919 it. */
1d6e90ac 5920
d5b7b3ae
RE
5921static Mfix *
5922create_fix_barrier (fix, max_address)
5923 Mfix * fix;
5924 HOST_WIDE_INT max_address;
5925{
5926 HOST_WIDE_INT count = 0;
5927 rtx barrier;
5928 rtx from = fix->insn;
5929 rtx selected = from;
5930 int selected_cost;
5931 HOST_WIDE_INT selected_address;
5932 Mfix * new_fix;
5933 HOST_WIDE_INT max_count = max_address - fix->address;
5934 rtx label = gen_label_rtx ();
5935
5936 selected_cost = arm_barrier_cost (from);
5937 selected_address = fix->address;
5938
5939 while (from && count < max_count)
5940 {
5941 rtx tmp;
5942 int new_cost;
5943
5944 /* This code shouldn't have been called if there was a natural barrier
5945 within range. */
5946 if (GET_CODE (from) == BARRIER)
5947 abort ();
5948
5949 /* Count the length of this insn. */
5950 count += get_attr_length (from);
5951
5952 /* If there is a jump table, add its length. */
5953 tmp = is_jump_table (from);
5954 if (tmp != NULL)
5955 {
5956 count += get_jump_table_size (tmp);
5957
5958 /* Jump tables aren't in a basic block, so base the cost on
5959 the dispatch insn. If we select this location, we will
5960 still put the pool after the table. */
5961 new_cost = arm_barrier_cost (from);
5962
5963 if (count < max_count && new_cost <= selected_cost)
5964 {
5965 selected = tmp;
5966 selected_cost = new_cost;
5967 selected_address = fix->address + count;
5968 }
5969
5970 /* Continue after the dispatch table. */
5971 from = NEXT_INSN (tmp);
5972 continue;
5973 }
5974
5975 new_cost = arm_barrier_cost (from);
5976
5977 if (count < max_count && new_cost <= selected_cost)
5978 {
5979 selected = from;
5980 selected_cost = new_cost;
5981 selected_address = fix->address + count;
5982 }
5983
5984 from = NEXT_INSN (from);
5985 }
5986
5987 /* Create a new JUMP_INSN that branches around a barrier. */
5988 from = emit_jump_insn_after (gen_jump (label), selected);
5989 JUMP_LABEL (from) = label;
5990 barrier = emit_barrier_after (from);
5991 emit_label_after (label, barrier);
5992
5993 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5994 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5995 new_fix->insn = barrier;
5996 new_fix->address = selected_address;
5997 new_fix->next = fix->next;
5998 fix->next = new_fix;
5999
6000 return new_fix;
6001}
6002
6003/* Record that there is a natural barrier in the insn stream at
6004 ADDRESS. */
949d79eb
RE
6005static void
6006push_minipool_barrier (insn, address)
2b835d68 6007 rtx insn;
d5b7b3ae 6008 HOST_WIDE_INT address;
2b835d68 6009{
c7319d87 6010 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 6011
949d79eb
RE
6012 fix->insn = insn;
6013 fix->address = address;
2b835d68 6014
949d79eb
RE
6015 fix->next = NULL;
6016 if (minipool_fix_head != NULL)
6017 minipool_fix_tail->next = fix;
6018 else
6019 minipool_fix_head = fix;
6020
6021 minipool_fix_tail = fix;
6022}
2b835d68 6023
d5b7b3ae
RE
6024/* Record INSN, which will need fixing up to load a value from the
6025 minipool. ADDRESS is the offset of the insn since the start of the
6026 function; LOC is a pointer to the part of the insn which requires
6027 fixing; VALUE is the constant that must be loaded, which is of type
6028 MODE. */
949d79eb
RE
6029static void
6030push_minipool_fix (insn, address, loc, mode, value)
6031 rtx insn;
d5b7b3ae
RE
6032 HOST_WIDE_INT address;
6033 rtx * loc;
949d79eb
RE
6034 enum machine_mode mode;
6035 rtx value;
6036{
c7319d87 6037 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
6038
6039#ifdef AOF_ASSEMBLER
6040 /* PIC symbol refereneces need to be converted into offsets into the
6041 based area. */
d5b7b3ae
RE
6042 /* XXX This shouldn't be done here. */
6043 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
6044 value = aof_pic_entry (value);
6045#endif /* AOF_ASSEMBLER */
6046
6047 fix->insn = insn;
6048 fix->address = address;
6049 fix->loc = loc;
6050 fix->mode = mode;
d5b7b3ae 6051 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 6052 fix->value = value;
d5b7b3ae
RE
6053 fix->forwards = get_attr_pool_range (insn);
6054 fix->backwards = get_attr_neg_pool_range (insn);
6055 fix->minipool = NULL;
949d79eb
RE
6056
6057 /* If an insn doesn't have a range defined for it, then it isn't
6058 expecting to be reworked by this code. Better to abort now than
6059 to generate duff assembly code. */
d5b7b3ae 6060 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
6061 abort ();
6062
d5b7b3ae
RE
6063 if (rtl_dump_file)
6064 {
6065 fprintf (rtl_dump_file,
6066 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6067 GET_MODE_NAME (mode),
6068 INSN_UID (insn), (unsigned long) address,
6069 -1 * (long)fix->backwards, (long)fix->forwards);
6070 arm_print_value (rtl_dump_file, fix->value);
6071 fprintf (rtl_dump_file, "\n");
6072 }
6073
6354dc9b 6074 /* Add it to the chain of fixes. */
949d79eb 6075 fix->next = NULL;
d5b7b3ae 6076
949d79eb
RE
6077 if (minipool_fix_head != NULL)
6078 minipool_fix_tail->next = fix;
6079 else
6080 minipool_fix_head = fix;
6081
6082 minipool_fix_tail = fix;
6083}
6084
d5b7b3ae 6085/* Scan INSN and note any of its operands that need fixing. */
1d6e90ac 6086
949d79eb
RE
6087static void
6088note_invalid_constants (insn, address)
6089 rtx insn;
d5b7b3ae 6090 HOST_WIDE_INT address;
949d79eb
RE
6091{
6092 int opno;
6093
d5b7b3ae 6094 extract_insn (insn);
949d79eb 6095
5895f793 6096 if (!constrain_operands (1))
949d79eb
RE
6097 fatal_insn_not_found (insn);
6098
d5b7b3ae
RE
6099 /* Fill in recog_op_alt with information about the constraints of this
6100 insn. */
949d79eb
RE
6101 preprocess_constraints ();
6102
1ccbefce 6103 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 6104 {
6354dc9b 6105 /* Things we need to fix can only occur in inputs. */
36ab44c7 6106 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
6107 continue;
6108
6109 /* If this alternative is a memory reference, then any mention
6110 of constants in this alternative is really to fool reload
6111 into allowing us to accept one there. We need to fix them up
6112 now so that we output the right code. */
6113 if (recog_op_alt[opno][which_alternative].memory_ok)
6114 {
1ccbefce 6115 rtx op = recog_data.operand[opno];
949d79eb
RE
6116
6117 if (CONSTANT_P (op))
1ccbefce
RH
6118 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6119 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
6120#if 0
6121 /* RWE: Now we look correctly at the operands for the insn,
6122 this shouldn't be needed any more. */
949d79eb 6123#ifndef AOF_ASSEMBLER
d5b7b3ae 6124 /* XXX Is this still needed? */
b15bca31 6125 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
1ccbefce
RH
6126 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6127 recog_data.operand_mode[opno],
6128 XVECEXP (op, 0, 0));
949d79eb 6129#endif
d5b7b3ae
RE
6130#endif
6131 else if (GET_CODE (op) == MEM
949d79eb
RE
6132 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6133 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
6134 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6135 recog_data.operand_mode[opno],
949d79eb
RE
6136 get_pool_constant (XEXP (op, 0)));
6137 }
2b835d68 6138 }
2b835d68
RE
6139}
6140
6141void
6142arm_reorg (first)
6143 rtx first;
6144{
6145 rtx insn;
d5b7b3ae
RE
6146 HOST_WIDE_INT address = 0;
6147 Mfix * fix;
ad076f4e 6148
949d79eb 6149 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 6150
949d79eb
RE
6151 /* The first insn must always be a note, or the code below won't
6152 scan it properly. */
6153 if (GET_CODE (first) != NOTE)
6154 abort ();
6155
6156 /* Scan all the insns and record the operands that will need fixing. */
6157 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 6158 {
949d79eb 6159 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 6160 push_minipool_barrier (insn, address);
949d79eb
RE
6161 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6162 || GET_CODE (insn) == JUMP_INSN)
6163 {
6164 rtx table;
6165
6166 note_invalid_constants (insn, address);
6167 address += get_attr_length (insn);
d5b7b3ae 6168
949d79eb
RE
6169 /* If the insn is a vector jump, add the size of the table
6170 and skip the table. */
d5b7b3ae 6171 if ((table = is_jump_table (insn)) != NULL)
2b835d68 6172 {
d5b7b3ae 6173 address += get_jump_table_size (table);
949d79eb
RE
6174 insn = table;
6175 }
6176 }
6177 }
332072db 6178
d5b7b3ae
RE
6179 fix = minipool_fix_head;
6180
949d79eb 6181 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 6182 while (fix)
949d79eb 6183 {
d5b7b3ae
RE
6184 Mfix * ftmp;
6185 Mfix * fdel;
6186 Mfix * last_added_fix;
6187 Mfix * last_barrier = NULL;
6188 Mfix * this_fix;
949d79eb
RE
6189
6190 /* Skip any further barriers before the next fix. */
6191 while (fix && GET_CODE (fix->insn) == BARRIER)
6192 fix = fix->next;
6193
d5b7b3ae 6194 /* No more fixes. */
949d79eb
RE
6195 if (fix == NULL)
6196 break;
332072db 6197
d5b7b3ae 6198 last_added_fix = NULL;
2b835d68 6199
d5b7b3ae 6200 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 6201 {
949d79eb 6202 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 6203 {
d5b7b3ae
RE
6204 if (ftmp->address >= minipool_vector_head->max_address)
6205 break;
2b835d68 6206
d5b7b3ae 6207 last_barrier = ftmp;
2b835d68 6208 }
d5b7b3ae
RE
6209 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6210 break;
6211
6212 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 6213 }
949d79eb 6214
d5b7b3ae
RE
6215 /* If we found a barrier, drop back to that; any fixes that we
6216 could have reached but come after the barrier will now go in
6217 the next mini-pool. */
949d79eb
RE
6218 if (last_barrier != NULL)
6219 {
d5b7b3ae
RE
6220 /* Reduce the refcount for those fixes that won't go into this
6221 pool after all. */
6222 for (fdel = last_barrier->next;
6223 fdel && fdel != ftmp;
6224 fdel = fdel->next)
6225 {
6226 fdel->minipool->refcount--;
6227 fdel->minipool = NULL;
6228 }
6229
949d79eb
RE
6230 ftmp = last_barrier;
6231 }
6232 else
2bfa88dc 6233 {
d5b7b3ae
RE
6234 /* ftmp is first fix that we can't fit into this pool and
6235 there no natural barriers that we could use. Insert a
6236 new barrier in the code somewhere between the previous
6237 fix and this one, and arrange to jump around it. */
6238 HOST_WIDE_INT max_address;
6239
6240 /* The last item on the list of fixes must be a barrier, so
6241 we can never run off the end of the list of fixes without
6242 last_barrier being set. */
6243 if (ftmp == NULL)
6244 abort ();
6245
6246 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
6247 /* Check that there isn't another fix that is in range that
6248 we couldn't fit into this pool because the pool was
6249 already too large: we need to put the pool before such an
6250 instruction. */
d5b7b3ae
RE
6251 if (ftmp->address < max_address)
6252 max_address = ftmp->address;
6253
6254 last_barrier = create_fix_barrier (last_added_fix, max_address);
6255 }
6256
6257 assign_minipool_offsets (last_barrier);
6258
6259 while (ftmp)
6260 {
6261 if (GET_CODE (ftmp->insn) != BARRIER
6262 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6263 == NULL))
6264 break;
2bfa88dc 6265
d5b7b3ae 6266 ftmp = ftmp->next;
2bfa88dc 6267 }
949d79eb
RE
6268
6269 /* Scan over the fixes we have identified for this pool, fixing them
6270 up and adding the constants to the pool itself. */
d5b7b3ae 6271 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
6272 this_fix = this_fix->next)
6273 if (GET_CODE (this_fix->insn) != BARRIER)
6274 {
949d79eb
RE
6275 rtx addr
6276 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6277 minipool_vector_label),
d5b7b3ae 6278 this_fix->minipool->offset);
949d79eb
RE
6279 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6280 }
6281
d5b7b3ae 6282 dump_minipool (last_barrier->insn);
949d79eb 6283 fix = ftmp;
2b835d68 6284 }
4b632bf1 6285
949d79eb
RE
6286 /* From now on we must synthesize any constants that we can't handle
6287 directly. This can happen if the RTL gets split during final
6288 instruction generation. */
4b632bf1 6289 after_arm_reorg = 1;
c7319d87
RE
6290
6291 /* Free the minipool memory. */
6292 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 6293}
cce8749e
CH
6294\f
6295/* Routines to output assembly language. */
6296
f3bb6135 6297/* If the rtx is the correct value then return the string of the number.
ff9940b0 6298 In this way we can ensure that valid double constants are generated even
6354dc9b 6299 when cross compiling. */
1d6e90ac 6300
cd2b33d0 6301const char *
ff9940b0 6302fp_immediate_constant (x)
b5cc037f 6303 rtx x;
ff9940b0
RE
6304{
6305 REAL_VALUE_TYPE r;
6306 int i;
6307
6308 if (!fpa_consts_inited)
6309 init_fpa_table ();
6310
6311 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6312 for (i = 0; i < 8; i++)
6313 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6314 return strings_fpa[i];
f3bb6135 6315
ff9940b0
RE
6316 abort ();
6317}
6318
9997d19d 6319/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
1d6e90ac 6320
cd2b33d0 6321static const char *
9997d19d 6322fp_const_from_val (r)
62b10bbc 6323 REAL_VALUE_TYPE * r;
9997d19d
RE
6324{
6325 int i;
6326
5895f793 6327 if (!fpa_consts_inited)
9997d19d
RE
6328 init_fpa_table ();
6329
6330 for (i = 0; i < 8; i++)
6331 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6332 return strings_fpa[i];
6333
6334 abort ();
6335}
ff9940b0 6336
cce8749e
CH
6337/* Output the operands of a LDM/STM instruction to STREAM.
6338 MASK is the ARM register set mask of which only bits 0-15 are important.
6d3d9133
NC
6339 REG is the base register, either the frame pointer or the stack pointer,
6340 INSTR is the possibly suffixed load or store instruction. */
cce8749e 6341
d5b7b3ae 6342static void
6d3d9133 6343print_multi_reg (stream, instr, reg, mask)
62b10bbc 6344 FILE * stream;
cd2b33d0 6345 const char * instr;
dd18ae56
NC
6346 int reg;
6347 int mask;
cce8749e
CH
6348{
6349 int i;
6350 int not_first = FALSE;
6351
1d5473cb 6352 fputc ('\t', stream);
dd18ae56 6353 asm_fprintf (stream, instr, reg);
1d5473cb 6354 fputs (", {", stream);
62b10bbc 6355
d5b7b3ae 6356 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
6357 if (mask & (1 << i))
6358 {
6359 if (not_first)
6360 fprintf (stream, ", ");
62b10bbc 6361
dd18ae56 6362 asm_fprintf (stream, "%r", i);
cce8749e
CH
6363 not_first = TRUE;
6364 }
f3bb6135 6365
6d3d9133 6366 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
f3bb6135 6367}
cce8749e 6368
6354dc9b 6369/* Output a 'call' insn. */
cce8749e 6370
cd2b33d0 6371const char *
cce8749e 6372output_call (operands)
62b10bbc 6373 rtx * operands;
cce8749e 6374{
6354dc9b 6375 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 6376
62b10bbc 6377 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 6378 {
62b10bbc 6379 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 6380 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 6381 }
62b10bbc 6382
1d5473cb 6383 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 6384
6cfc7210 6385 if (TARGET_INTERWORK)
da6558fd
NC
6386 output_asm_insn ("bx%?\t%0", operands);
6387 else
6388 output_asm_insn ("mov%?\t%|pc, %0", operands);
6389
f3bb6135
RE
6390 return "";
6391}
cce8749e 6392
ff9940b0
RE
6393static int
6394eliminate_lr2ip (x)
62b10bbc 6395 rtx * x;
ff9940b0
RE
6396{
6397 int something_changed = 0;
62b10bbc 6398 rtx x0 = * x;
ff9940b0 6399 int code = GET_CODE (x0);
1d6e90ac
NC
6400 int i, j;
6401 const char * fmt;
ff9940b0
RE
6402
6403 switch (code)
6404 {
6405 case REG:
62b10bbc 6406 if (REGNO (x0) == LR_REGNUM)
ff9940b0 6407 {
62b10bbc 6408 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
6409 return 1;
6410 }
6411 return 0;
6412 default:
6354dc9b 6413 /* Scan through the sub-elements and change any references there. */
ff9940b0 6414 fmt = GET_RTX_FORMAT (code);
62b10bbc 6415
ff9940b0
RE
6416 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6417 if (fmt[i] == 'e')
6418 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6419 else if (fmt[i] == 'E')
6420 for (j = 0; j < XVECLEN (x0, i); j++)
6421 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 6422
ff9940b0
RE
6423 return something_changed;
6424 }
6425}
6426
6354dc9b 6427/* Output a 'call' insn that is a reference in memory. */
ff9940b0 6428
cd2b33d0 6429const char *
ff9940b0 6430output_call_mem (operands)
62b10bbc 6431 rtx * operands;
ff9940b0 6432{
6354dc9b
NC
6433 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6434 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 6435 if (eliminate_lr2ip (&operands[0]))
1d5473cb 6436 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 6437
6cfc7210 6438 if (TARGET_INTERWORK)
da6558fd
NC
6439 {
6440 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6441 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6442 output_asm_insn ("bx%?\t%|ip", operands);
6443 }
6444 else
6445 {
6446 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6447 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6448 }
6449
f3bb6135
RE
6450 return "";
6451}
ff9940b0
RE
6452
6453
6454/* Output a move from arm registers to an fpu registers.
6455 OPERANDS[0] is an fpu register.
6456 OPERANDS[1] is the first registers of an arm register pair. */
6457
cd2b33d0 6458const char *
ff9940b0 6459output_mov_long_double_fpu_from_arm (operands)
62b10bbc 6460 rtx * operands;
ff9940b0
RE
6461{
6462 int arm_reg0 = REGNO (operands[1]);
6463 rtx ops[3];
6464
62b10bbc
NC
6465 if (arm_reg0 == IP_REGNUM)
6466 abort ();
f3bb6135 6467
43cffd11
RE
6468 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6469 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6470 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6471
1d5473cb
RE
6472 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6473 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 6474
f3bb6135
RE
6475 return "";
6476}
ff9940b0
RE
6477
6478/* Output a move from an fpu register to arm registers.
6479 OPERANDS[0] is the first registers of an arm register pair.
6480 OPERANDS[1] is an fpu register. */
6481
cd2b33d0 6482const char *
ff9940b0 6483output_mov_long_double_arm_from_fpu (operands)
62b10bbc 6484 rtx * operands;
ff9940b0
RE
6485{
6486 int arm_reg0 = REGNO (operands[0]);
6487 rtx ops[3];
6488
62b10bbc
NC
6489 if (arm_reg0 == IP_REGNUM)
6490 abort ();
f3bb6135 6491
43cffd11
RE
6492 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6493 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6494 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6495
1d5473cb
RE
6496 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6497 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
6498 return "";
6499}
ff9940b0
RE
6500
6501/* Output a move from arm registers to arm registers of a long double
6502 OPERANDS[0] is the destination.
6503 OPERANDS[1] is the source. */
1d6e90ac 6504
cd2b33d0 6505const char *
ff9940b0 6506output_mov_long_double_arm_from_arm (operands)
62b10bbc 6507 rtx * operands;
ff9940b0 6508{
6354dc9b 6509 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
6510 int dest_start = REGNO (operands[0]);
6511 int src_start = REGNO (operands[1]);
6512 rtx ops[2];
6513 int i;
6514
6515 if (dest_start < src_start)
6516 {
6517 for (i = 0; i < 3; i++)
6518 {
43cffd11
RE
6519 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6520 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6521 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6522 }
6523 }
6524 else
6525 {
6526 for (i = 2; i >= 0; i--)
6527 {
43cffd11
RE
6528 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6529 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6530 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6531 }
6532 }
f3bb6135 6533
ff9940b0
RE
6534 return "";
6535}
6536
6537
cce8749e
CH
6538/* Output a move from arm registers to an fpu registers.
6539 OPERANDS[0] is an fpu register.
6540 OPERANDS[1] is the first registers of an arm register pair. */
6541
cd2b33d0 6542const char *
cce8749e 6543output_mov_double_fpu_from_arm (operands)
62b10bbc 6544 rtx * operands;
cce8749e
CH
6545{
6546 int arm_reg0 = REGNO (operands[1]);
6547 rtx ops[2];
6548
62b10bbc
NC
6549 if (arm_reg0 == IP_REGNUM)
6550 abort ();
6551
43cffd11
RE
6552 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6553 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6554 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6555 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
6556 return "";
6557}
cce8749e
CH
6558
6559/* Output a move from an fpu register to arm registers.
6560 OPERANDS[0] is the first registers of an arm register pair.
6561 OPERANDS[1] is an fpu register. */
6562
cd2b33d0 6563const char *
cce8749e 6564output_mov_double_arm_from_fpu (operands)
62b10bbc 6565 rtx * operands;
cce8749e
CH
6566{
6567 int arm_reg0 = REGNO (operands[0]);
6568 rtx ops[2];
6569
62b10bbc
NC
6570 if (arm_reg0 == IP_REGNUM)
6571 abort ();
f3bb6135 6572
43cffd11
RE
6573 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6574 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6575 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6576 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
6577 return "";
6578}
cce8749e
CH
6579
6580/* Output a move between double words.
6581 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6582 or MEM<-REG and all MEMs must be offsettable addresses. */
6583
cd2b33d0 6584const char *
cce8749e 6585output_move_double (operands)
aec3cfba 6586 rtx * operands;
cce8749e
CH
6587{
6588 enum rtx_code code0 = GET_CODE (operands[0]);
6589 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 6590 rtx otherops[3];
cce8749e
CH
6591
6592 if (code0 == REG)
6593 {
6594 int reg0 = REGNO (operands[0]);
6595
43cffd11 6596 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 6597
cce8749e
CH
6598 if (code1 == REG)
6599 {
6600 int reg1 = REGNO (operands[1]);
62b10bbc
NC
6601 if (reg1 == IP_REGNUM)
6602 abort ();
f3bb6135 6603
6354dc9b 6604 /* Ensure the second source is not overwritten. */
c1c2bc04 6605 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 6606 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 6607 else
6cfc7210 6608 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6609 }
6610 else if (code1 == CONST_DOUBLE)
6611 {
226a5051
RE
6612 if (GET_MODE (operands[1]) == DFmode)
6613 {
b216cd4a 6614 REAL_VALUE_TYPE r;
226a5051 6615 long l[2];
226a5051 6616
b216cd4a
ZW
6617 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6618 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
d5b7b3ae
RE
6619 otherops[1] = GEN_INT (l[1]);
6620 operands[1] = GEN_INT (l[0]);
226a5051 6621 }
c1c2bc04
RE
6622 else if (GET_MODE (operands[1]) != VOIDmode)
6623 abort ();
6624 else if (WORDS_BIG_ENDIAN)
6625 {
c1c2bc04
RE
6626 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6627 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6628 }
226a5051
RE
6629 else
6630 {
6631 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6632 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6633 }
6cfc7210 6634
c1c2bc04
RE
6635 output_mov_immediate (operands);
6636 output_mov_immediate (otherops);
cce8749e
CH
6637 }
6638 else if (code1 == CONST_INT)
6639 {
56636818
JL
6640#if HOST_BITS_PER_WIDE_INT > 32
6641 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6642 what the upper word is. */
6643 if (WORDS_BIG_ENDIAN)
6644 {
6645 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6646 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6647 }
6648 else
6649 {
6650 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6651 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6652 }
6653#else
6354dc9b 6654 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6655 if (WORDS_BIG_ENDIAN)
6656 {
6657 otherops[1] = operands[1];
6658 operands[1] = (INTVAL (operands[1]) < 0
6659 ? constm1_rtx : const0_rtx);
6660 }
ff9940b0 6661 else
c1c2bc04 6662 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6663#endif
c1c2bc04
RE
6664 output_mov_immediate (otherops);
6665 output_mov_immediate (operands);
cce8749e
CH
6666 }
6667 else if (code1 == MEM)
6668 {
ff9940b0 6669 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6670 {
ff9940b0 6671 case REG:
9997d19d 6672 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6673 break;
2b835d68 6674
ff9940b0 6675 case PRE_INC:
6354dc9b 6676 abort (); /* Should never happen now. */
ff9940b0 6677 break;
2b835d68 6678
ff9940b0 6679 case PRE_DEC:
2b835d68 6680 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6681 break;
2b835d68 6682
ff9940b0 6683 case POST_INC:
9997d19d 6684 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6685 break;
2b835d68 6686
ff9940b0 6687 case POST_DEC:
6354dc9b 6688 abort (); /* Should never happen now. */
ff9940b0 6689 break;
2b835d68
RE
6690
6691 case LABEL_REF:
6692 case CONST:
6693 output_asm_insn ("adr%?\t%0, %1", operands);
6694 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6695 break;
6696
ff9940b0 6697 default:
aec3cfba
NC
6698 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6699 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6700 {
2b835d68
RE
6701 otherops[0] = operands[0];
6702 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6703 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
1d6e90ac 6704
2b835d68
RE
6705 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6706 {
6707 if (GET_CODE (otherops[2]) == CONST_INT)
6708 {
6709 switch (INTVAL (otherops[2]))
6710 {
6711 case -8:
6712 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6713 return "";
6714 case -4:
6715 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6716 return "";
6717 case 4:
6718 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6719 return "";
6720 }
1d6e90ac 6721
2b835d68
RE
6722 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6723 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6724 else
6725 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6726 }
6727 else
6728 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6729 }
6730 else
6731 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6732
2b835d68
RE
6733 return "ldm%?ia\t%0, %M0";
6734 }
6735 else
6736 {
b72f00af 6737 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
2b835d68
RE
6738 /* Take care of overlapping base/data reg. */
6739 if (reg_mentioned_p (operands[0], operands[1]))
6740 {
6741 output_asm_insn ("ldr%?\t%0, %1", otherops);
6742 output_asm_insn ("ldr%?\t%0, %1", operands);
6743 }
6744 else
6745 {
6746 output_asm_insn ("ldr%?\t%0, %1", operands);
6747 output_asm_insn ("ldr%?\t%0, %1", otherops);
6748 }
cce8749e
CH
6749 }
6750 }
6751 }
2b835d68 6752 else
6354dc9b 6753 abort (); /* Constraints should prevent this. */
cce8749e
CH
6754 }
6755 else if (code0 == MEM && code1 == REG)
6756 {
62b10bbc
NC
6757 if (REGNO (operands[1]) == IP_REGNUM)
6758 abort ();
2b835d68 6759
ff9940b0
RE
6760 switch (GET_CODE (XEXP (operands[0], 0)))
6761 {
6762 case REG:
9997d19d 6763 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6764 break;
2b835d68 6765
ff9940b0 6766 case PRE_INC:
6354dc9b 6767 abort (); /* Should never happen now. */
ff9940b0 6768 break;
2b835d68 6769
ff9940b0 6770 case PRE_DEC:
2b835d68 6771 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6772 break;
2b835d68 6773
ff9940b0 6774 case POST_INC:
9997d19d 6775 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6776 break;
2b835d68 6777
ff9940b0 6778 case POST_DEC:
6354dc9b 6779 abort (); /* Should never happen now. */
ff9940b0 6780 break;
2b835d68
RE
6781
6782 case PLUS:
6783 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6784 {
6785 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6786 {
6787 case -8:
6788 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6789 return "";
6790
6791 case -4:
6792 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6793 return "";
6794
6795 case 4:
6796 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6797 return "";
6798 }
6799 }
6800 /* Fall through */
6801
ff9940b0 6802 default:
b72f00af 6803 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
43cffd11 6804 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6805 output_asm_insn ("str%?\t%1, %0", operands);
6806 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6807 }
6808 }
2b835d68 6809 else
1d6e90ac
NC
6810 /* Constraints should prevent this. */
6811 abort ();
cce8749e 6812
9997d19d
RE
6813 return "";
6814}
cce8749e
CH
6815
6816
6817/* Output an arbitrary MOV reg, #n.
6818 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6819
cd2b33d0 6820const char *
cce8749e 6821output_mov_immediate (operands)
62b10bbc 6822 rtx * operands;
cce8749e 6823{
f3bb6135 6824 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e 6825
1d6e90ac 6826 /* Try to use one MOV. */
cce8749e 6827 if (const_ok_for_arm (n))
1d6e90ac 6828 output_asm_insn ("mov%?\t%0, %1", operands);
cce8749e 6829
1d6e90ac
NC
6830 /* Try to use one MVN. */
6831 else if (const_ok_for_arm (~n))
cce8749e 6832 {
f3bb6135 6833 operands[1] = GEN_INT (~n);
9997d19d 6834 output_asm_insn ("mvn%?\t%0, %1", operands);
cce8749e 6835 }
1d6e90ac
NC
6836 else
6837 {
6838 int n_ones = 0;
6839 int i;
cce8749e 6840
1d6e90ac
NC
6841 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6842 for (i = 0; i < 32; i ++)
6843 if (n & 1 << i)
6844 n_ones ++;
cce8749e 6845
1d6e90ac
NC
6846 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6847 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
6848 else
6849 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6850 }
f3bb6135
RE
6851
6852 return "";
6853}
cce8749e 6854
1d6e90ac
NC
6855/* Output an ADD r, s, #n where n may be too big for one instruction.
6856 If adding zero to one register, output nothing. */
cce8749e 6857
cd2b33d0 6858const char *
cce8749e 6859output_add_immediate (operands)
62b10bbc 6860 rtx * operands;
cce8749e 6861{
f3bb6135 6862 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6863
6864 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6865 {
6866 if (n < 0)
6867 output_multi_immediate (operands,
9997d19d
RE
6868 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6869 -n);
cce8749e
CH
6870 else
6871 output_multi_immediate (operands,
9997d19d
RE
6872 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6873 n);
cce8749e 6874 }
f3bb6135
RE
6875
6876 return "";
6877}
cce8749e 6878
cce8749e
CH
6879/* Output a multiple immediate operation.
6880 OPERANDS is the vector of operands referred to in the output patterns.
6881 INSTR1 is the output pattern to use for the first constant.
6882 INSTR2 is the output pattern to use for subsequent constants.
6883 IMMED_OP is the index of the constant slot in OPERANDS.
6884 N is the constant value. */
6885
cd2b33d0 6886static const char *
cce8749e 6887output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6888 rtx * operands;
cd2b33d0
NC
6889 const char * instr1;
6890 const char * instr2;
f3bb6135
RE
6891 int immed_op;
6892 HOST_WIDE_INT n;
cce8749e 6893{
f3bb6135 6894#if HOST_BITS_PER_WIDE_INT > 32
30cf4896 6895 n &= 0xffffffff;
f3bb6135
RE
6896#endif
6897
cce8749e
CH
6898 if (n == 0)
6899 {
1d6e90ac 6900 /* Quick and easy output. */
cce8749e 6901 operands[immed_op] = const0_rtx;
1d6e90ac 6902 output_asm_insn (instr1, operands);
cce8749e
CH
6903 }
6904 else
6905 {
6906 int i;
cd2b33d0 6907 const char * instr = instr1;
cce8749e 6908
6354dc9b 6909 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6910 for (i = 0; i < 32; i += 2)
6911 {
6912 if (n & (3 << i))
6913 {
f3bb6135
RE
6914 operands[immed_op] = GEN_INT (n & (255 << i));
6915 output_asm_insn (instr, operands);
cce8749e
CH
6916 instr = instr2;
6917 i += 6;
6918 }
6919 }
6920 }
cd2b33d0 6921
f3bb6135 6922 return "";
9997d19d 6923}
cce8749e 6924
cce8749e
CH
6925/* Return the appropriate ARM instruction for the operation code.
6926 The returned result should not be overwritten. OP is the rtx of the
6927 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6928 was shifted. */
6929
cd2b33d0 6930const char *
cce8749e
CH
6931arithmetic_instr (op, shift_first_arg)
6932 rtx op;
f3bb6135 6933 int shift_first_arg;
cce8749e 6934{
9997d19d 6935 switch (GET_CODE (op))
cce8749e
CH
6936 {
6937 case PLUS:
f3bb6135
RE
6938 return "add";
6939
cce8749e 6940 case MINUS:
f3bb6135
RE
6941 return shift_first_arg ? "rsb" : "sub";
6942
cce8749e 6943 case IOR:
f3bb6135
RE
6944 return "orr";
6945
cce8749e 6946 case XOR:
f3bb6135
RE
6947 return "eor";
6948
cce8749e 6949 case AND:
f3bb6135
RE
6950 return "and";
6951
cce8749e 6952 default:
f3bb6135 6953 abort ();
cce8749e 6954 }
f3bb6135 6955}
cce8749e 6956
cce8749e
CH
6957/* Ensure valid constant shifts and return the appropriate shift mnemonic
6958 for the operation code. The returned result should not be overwritten.
6959 OP is the rtx code of the shift.
9997d19d 6960 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6961 shift. */
cce8749e 6962
cd2b33d0 6963static const char *
9997d19d
RE
6964shift_op (op, amountp)
6965 rtx op;
6966 HOST_WIDE_INT *amountp;
cce8749e 6967{
cd2b33d0 6968 const char * mnem;
e2c671ba 6969 enum rtx_code code = GET_CODE (op);
cce8749e 6970
9997d19d
RE
6971 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6972 *amountp = -1;
6973 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6974 *amountp = INTVAL (XEXP (op, 1));
6975 else
6976 abort ();
6977
e2c671ba 6978 switch (code)
cce8749e
CH
6979 {
6980 case ASHIFT:
6981 mnem = "asl";
6982 break;
f3bb6135 6983
cce8749e
CH
6984 case ASHIFTRT:
6985 mnem = "asr";
cce8749e 6986 break;
f3bb6135 6987
cce8749e
CH
6988 case LSHIFTRT:
6989 mnem = "lsr";
cce8749e 6990 break;
f3bb6135 6991
9997d19d
RE
6992 case ROTATERT:
6993 mnem = "ror";
9997d19d
RE
6994 break;
6995
ff9940b0 6996 case MULT:
e2c671ba
RE
6997 /* We never have to worry about the amount being other than a
6998 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6999 if (*amountp != -1)
7000 *amountp = int_log2 (*amountp);
7001 else
7002 abort ();
f3bb6135
RE
7003 return "asl";
7004
cce8749e 7005 default:
f3bb6135 7006 abort ();
cce8749e
CH
7007 }
7008
e2c671ba
RE
7009 if (*amountp != -1)
7010 {
7011 /* This is not 100% correct, but follows from the desire to merge
7012 multiplication by a power of 2 with the recognizer for a
7013 shift. >=32 is not a valid shift for "asl", so we must try and
7014 output a shift that produces the correct arithmetical result.
ddd5a7c1 7015 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
7016 is not set correctly if we set the flags; but we never use the
7017 carry bit from such an operation, so we can ignore that. */
7018 if (code == ROTATERT)
1d6e90ac
NC
7019 /* Rotate is just modulo 32. */
7020 *amountp &= 31;
e2c671ba
RE
7021 else if (*amountp != (*amountp & 31))
7022 {
7023 if (code == ASHIFT)
7024 mnem = "lsr";
7025 *amountp = 32;
7026 }
7027
7028 /* Shifts of 0 are no-ops. */
7029 if (*amountp == 0)
7030 return NULL;
7031 }
7032
9997d19d
RE
7033 return mnem;
7034}
cce8749e 7035
6354dc9b 7036/* Obtain the shift from the POWER of two. */
1d6e90ac 7037
18af7313 7038static HOST_WIDE_INT
cce8749e 7039int_log2 (power)
f3bb6135 7040 HOST_WIDE_INT power;
cce8749e 7041{
f3bb6135 7042 HOST_WIDE_INT shift = 0;
cce8749e 7043
30cf4896 7044 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
cce8749e
CH
7045 {
7046 if (shift > 31)
f3bb6135 7047 abort ();
1d6e90ac 7048 shift ++;
cce8749e 7049 }
f3bb6135
RE
7050
7051 return shift;
7052}
cce8749e 7053
cce8749e
CH
7054/* Output a .ascii pseudo-op, keeping track of lengths. This is because
7055 /bin/as is horribly restrictive. */
6cfc7210 7056#define MAX_ASCII_LEN 51
cce8749e
CH
7057
7058void
7059output_ascii_pseudo_op (stream, p, len)
62b10bbc 7060 FILE * stream;
3cce094d 7061 const unsigned char * p;
cce8749e
CH
7062 int len;
7063{
7064 int i;
6cfc7210 7065 int len_so_far = 0;
cce8749e 7066
6cfc7210
NC
7067 fputs ("\t.ascii\t\"", stream);
7068
cce8749e
CH
7069 for (i = 0; i < len; i++)
7070 {
1d6e90ac 7071 int c = p[i];
cce8749e 7072
6cfc7210 7073 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 7074 {
6cfc7210 7075 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 7076 len_so_far = 0;
cce8749e
CH
7077 }
7078
6cfc7210 7079 switch (c)
cce8749e 7080 {
6cfc7210
NC
7081 case TARGET_TAB:
7082 fputs ("\\t", stream);
7083 len_so_far += 2;
7084 break;
7085
7086 case TARGET_FF:
7087 fputs ("\\f", stream);
7088 len_so_far += 2;
7089 break;
7090
7091 case TARGET_BS:
7092 fputs ("\\b", stream);
7093 len_so_far += 2;
7094 break;
7095
7096 case TARGET_CR:
7097 fputs ("\\r", stream);
7098 len_so_far += 2;
7099 break;
7100
7101 case TARGET_NEWLINE:
7102 fputs ("\\n", stream);
7103 c = p [i + 1];
7104 if ((c >= ' ' && c <= '~')
7105 || c == TARGET_TAB)
7106 /* This is a good place for a line break. */
7107 len_so_far = MAX_ASCII_LEN;
7108 else
7109 len_so_far += 2;
7110 break;
7111
7112 case '\"':
7113 case '\\':
7114 putc ('\\', stream);
5895f793 7115 len_so_far++;
6cfc7210 7116 /* drop through. */
f3bb6135 7117
6cfc7210
NC
7118 default:
7119 if (c >= ' ' && c <= '~')
7120 {
7121 putc (c, stream);
5895f793 7122 len_so_far++;
6cfc7210
NC
7123 }
7124 else
7125 {
7126 fprintf (stream, "\\%03o", c);
7127 len_so_far += 4;
7128 }
7129 break;
cce8749e 7130 }
cce8749e 7131 }
f3bb6135 7132
cce8749e 7133 fputs ("\"\n", stream);
f3bb6135 7134}
cce8749e 7135\f
121308d4
NC
7136/* Compute the register sabe mask for registers 0 through 12
7137 inclusive. This code is used by both arm_compute_save_reg_mask
7138 and arm_compute_initial_elimination_offset. */
6d3d9133
NC
7139
7140static unsigned long
121308d4 7141arm_compute_save_reg0_reg12_mask ()
6d3d9133 7142{
121308d4 7143 unsigned long func_type = arm_current_func_type ();
6d3d9133
NC
7144 unsigned int save_reg_mask = 0;
7145 unsigned int reg;
6d3d9133 7146
7b8b8ade 7147 if (IS_INTERRUPT (func_type))
6d3d9133 7148 {
7b8b8ade 7149 unsigned int max_reg;
7b8b8ade
NC
7150 /* Interrupt functions must not corrupt any registers,
7151 even call clobbered ones. If this is a leaf function
7152 we can just examine the registers used by the RTL, but
7153 otherwise we have to assume that whatever function is
7154 called might clobber anything, and so we have to save
7155 all the call-clobbered registers as well. */
7156 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7157 /* FIQ handlers have registers r8 - r12 banked, so
7158 we only need to check r0 - r7, Normal ISRs only
121308d4 7159 bank r14 and r15, so we must check up to r12.
7b8b8ade
NC
7160 r13 is the stack pointer which is always preserved,
7161 so we do not need to consider it here. */
7162 max_reg = 7;
7163 else
7164 max_reg = 12;
7165
7166 for (reg = 0; reg <= max_reg; reg++)
7167 if (regs_ever_live[reg]
7168 || (! current_function_is_leaf && call_used_regs [reg]))
6d3d9133
NC
7169 save_reg_mask |= (1 << reg);
7170 }
7171 else
7172 {
7173 /* In the normal case we only need to save those registers
7174 which are call saved and which are used by this function. */
7175 for (reg = 0; reg <= 10; reg++)
7176 if (regs_ever_live[reg] && ! call_used_regs [reg])
7177 save_reg_mask |= (1 << reg);
7178
7179 /* Handle the frame pointer as a special case. */
7180 if (! TARGET_APCS_FRAME
7181 && ! frame_pointer_needed
7182 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7183 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7184 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7185
7186 /* If we aren't loading the PIC register,
7187 don't stack it even though it may be live. */
7188 if (flag_pic
7189 && ! TARGET_SINGLE_PIC_BASE
7190 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7191 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7192 }
7193
121308d4
NC
7194 return save_reg_mask;
7195}
7196
7197/* Compute a bit mask of which registers need to be
7198 saved on the stack for the current function. */
7199
7200static unsigned long
7201arm_compute_save_reg_mask ()
7202{
7203 unsigned int save_reg_mask = 0;
7204 unsigned long func_type = arm_current_func_type ();
7205
7206 if (IS_NAKED (func_type))
7207 /* This should never really happen. */
7208 return 0;
7209
7210 /* If we are creating a stack frame, then we must save the frame pointer,
7211 IP (which will hold the old stack pointer), LR and the PC. */
7212 if (frame_pointer_needed)
7213 save_reg_mask |=
7214 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7215 | (1 << IP_REGNUM)
7216 | (1 << LR_REGNUM)
7217 | (1 << PC_REGNUM);
7218
7219 /* Volatile functions do not return, so there
7220 is no need to save any other registers. */
7221 if (IS_VOLATILE (func_type))
7222 return save_reg_mask;
7223
7224 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7225
6d3d9133
NC
7226 /* Decide if we need to save the link register.
7227 Interrupt routines have their own banked link register,
7228 so they never need to save it.
1768c26f 7229 Otherwise if we do not use the link register we do not need to save
6d3d9133
NC
7230 it. If we are pushing other registers onto the stack however, we
7231 can save an instruction in the epilogue by pushing the link register
7232 now and then popping it back into the PC. This incurs extra memory
7233 accesses though, so we only do it when optimising for size, and only
7234 if we know that we will not need a fancy return sequence. */
3a7731fd 7235 if (regs_ever_live [LR_REGNUM]
6d3d9133
NC
7236 || (save_reg_mask
7237 && optimize_size
3a7731fd 7238 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
6d3d9133
NC
7239 save_reg_mask |= 1 << LR_REGNUM;
7240
6f7ebcbb
NC
7241 if (cfun->machine->lr_save_eliminated)
7242 save_reg_mask &= ~ (1 << LR_REGNUM);
7243
6d3d9133
NC
7244 return save_reg_mask;
7245}
7246
7247/* Generate a function exit sequence. If REALLY_RETURN is true, then do
7248 everything bar the final return instruction. */
ff9940b0 7249
cd2b33d0 7250const char *
84ed5e79 7251output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
7252 rtx operand;
7253 int really_return;
84ed5e79 7254 int reverse;
ff9940b0 7255{
6d3d9133 7256 char conditional[10];
ff9940b0 7257 char instr[100];
6d3d9133
NC
7258 int reg;
7259 unsigned long live_regs_mask;
7260 unsigned long func_type;
e26053d1 7261
6d3d9133 7262 func_type = arm_current_func_type ();
e2c671ba 7263
6d3d9133 7264 if (IS_NAKED (func_type))
d5b7b3ae 7265 return "";
6d3d9133
NC
7266
7267 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7268 {
e2c671ba 7269 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
7270 call, then we have to trust that the called function won't return. */
7271 if (really_return)
7272 {
7273 rtx ops[2];
7274
7275 /* Otherwise, trap an attempted return by aborting. */
7276 ops[0] = operand;
7277 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7278 : "abort");
7279 assemble_external_libcall (ops[1]);
7280 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7281 }
7282
e2c671ba
RE
7283 return "";
7284 }
6d3d9133 7285
5895f793 7286 if (current_function_calls_alloca && !really_return)
62b10bbc 7287 abort ();
ff9940b0 7288
c414f8a9 7289 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
d5b7b3ae 7290
6d3d9133 7291 return_used_this_function = 1;
ff9940b0 7292
6d3d9133 7293 live_regs_mask = arm_compute_save_reg_mask ();
ff9940b0 7294
1768c26f 7295 if (live_regs_mask)
6d3d9133 7296 {
1768c26f
PB
7297 const char * return_reg;
7298
7299 /* If we do not have any special requirements for function exit
7300 (eg interworking, or ISR) then we can load the return address
7301 directly into the PC. Otherwise we must load it into LR. */
7302 if (really_return
1768c26f
PB
7303 && ! TARGET_INTERWORK)
7304 return_reg = reg_names[PC_REGNUM];
6d3d9133 7305 else
1768c26f
PB
7306 return_reg = reg_names[LR_REGNUM];
7307
6d3d9133
NC
7308 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7309 /* There are two possible reasons for the IP register being saved.
7310 Either a stack frame was created, in which case IP contains the
7311 old stack pointer, or an ISR routine corrupted it. If this in an
7312 ISR routine then just restore IP, otherwise restore IP into SP. */
7313 if (! IS_INTERRUPT (func_type))
7314 {
7315 live_regs_mask &= ~ (1 << IP_REGNUM);
7316 live_regs_mask |= (1 << SP_REGNUM);
7317 }
f3bb6135 7318
3a7731fd
PB
7319 /* On some ARM architectures it is faster to use LDR rather than
7320 LDM to load a single register. On other architectures, the
7321 cost is the same. In 26 bit mode, or for exception handlers,
7322 we have to use LDM to load the PC so that the CPSR is also
7323 restored. */
7324 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
6d3d9133 7325 {
3a7731fd
PB
7326 if (live_regs_mask == (unsigned int)(1 << reg))
7327 break;
7328 }
7329 if (reg <= LAST_ARM_REGNUM
7330 && (reg != LR_REGNUM
7331 || ! really_return
7332 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7333 {
7334 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7335 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
6d3d9133 7336 }
ff9940b0 7337 else
1d5473cb 7338 {
1768c26f
PB
7339 char *p;
7340 int first = 1;
6d3d9133 7341
1768c26f
PB
7342 /* Generate the load multiple instruction to restore the registers. */
7343 if (frame_pointer_needed)
7344 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
f1acdf8b
NC
7345 else if (live_regs_mask & (1 << SP_REGNUM))
7346 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
da6558fd 7347 else
1768c26f
PB
7348 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7349
7350 p = instr + strlen (instr);
6d3d9133 7351
1768c26f
PB
7352 for (reg = 0; reg <= SP_REGNUM; reg++)
7353 if (live_regs_mask & (1 << reg))
7354 {
7355 int l = strlen (reg_names[reg]);
7356
7357 if (first)
7358 first = 0;
7359 else
7360 {
7361 memcpy (p, ", ", 2);
7362 p += 2;
7363 }
7364
7365 memcpy (p, "%|", 2);
7366 memcpy (p + 2, reg_names[reg], l);
7367 p += l + 2;
7368 }
7369
7370 if (live_regs_mask & (1 << LR_REGNUM))
7371 {
7372 int l = strlen (return_reg);
7373
7374 if (! first)
7375 {
7376 memcpy (p, ", ", 2);
7377 p += 2;
7378 }
7379
7380 memcpy (p, "%|", 2);
7381 memcpy (p + 2, return_reg, l);
3a7731fd
PB
7382 strcpy (p + 2 + l, ((TARGET_APCS_32
7383 && !IS_INTERRUPT (func_type))
7384 || !really_return)
7385 ? "}" : "}^");
1768c26f
PB
7386 }
7387 else
7388 strcpy (p, "}");
1d5473cb 7389 }
da6558fd 7390
1768c26f
PB
7391 output_asm_insn (instr, & operand);
7392
3a7731fd
PB
7393 /* See if we need to generate an extra instruction to
7394 perform the actual function return. */
7395 if (really_return
7396 && func_type != ARM_FT_INTERWORKED
7397 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
da6558fd 7398 {
3a7731fd
PB
7399 /* The return has already been handled
7400 by loading the LR into the PC. */
7401 really_return = 0;
da6558fd 7402 }
ff9940b0 7403 }
e26053d1 7404
1768c26f 7405 if (really_return)
ff9940b0 7406 {
6d3d9133
NC
7407 switch ((int) ARM_FUNC_TYPE (func_type))
7408 {
7409 case ARM_FT_ISR:
7410 case ARM_FT_FIQ:
7411 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7412 break;
7413
7414 case ARM_FT_INTERWORKED:
7415 sprintf (instr, "bx%s\t%%|lr", conditional);
7416 break;
7417
7418 case ARM_FT_EXCEPTION:
7419 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7420 break;
7421
7422 default:
1768c26f
PB
7423 /* ARMv5 implementations always provide BX, so interworking
7424 is the default unless APCS-26 is in use. */
7425 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7426 sprintf (instr, "bx%s\t%%|lr", conditional);
7427 else
7428 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7429 conditional, TARGET_APCS_32 ? "" : "s");
6d3d9133
NC
7430 break;
7431 }
1768c26f
PB
7432
7433 output_asm_insn (instr, & operand);
ff9940b0 7434 }
f3bb6135 7435
ff9940b0
RE
7436 return "";
7437}
7438
ef179a26
NC
7439/* Write the function name into the code section, directly preceding
7440 the function prologue.
7441
7442 Code will be output similar to this:
7443 t0
7444 .ascii "arm_poke_function_name", 0
7445 .align
7446 t1
7447 .word 0xff000000 + (t1 - t0)
7448 arm_poke_function_name
7449 mov ip, sp
7450 stmfd sp!, {fp, ip, lr, pc}
7451 sub fp, ip, #4
7452
7453 When performing a stack backtrace, code can inspect the value
7454 of 'pc' stored at 'fp' + 0. If the trace function then looks
7455 at location pc - 12 and the top 8 bits are set, then we know
7456 that there is a function name embedded immediately preceding this
7457 location and has length ((pc[-3]) & 0xff000000).
7458
7459 We assume that pc is declared as a pointer to an unsigned long.
7460
7461 It is of no benefit to output the function name if we are assembling
7462 a leaf function. These function types will not contain a stack
7463 backtrace structure, therefore it is not possible to determine the
7464 function name. */
7465
7466void
7467arm_poke_function_name (stream, name)
7468 FILE * stream;
5f37d07c 7469 const char * name;
ef179a26
NC
7470{
7471 unsigned long alignlength;
7472 unsigned long length;
7473 rtx x;
7474
d5b7b3ae
RE
7475 length = strlen (name) + 1;
7476 alignlength = ROUND_UP (length);
ef179a26 7477
949d79eb 7478 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 7479 ASM_OUTPUT_ALIGN (stream, 2);
30cf4896 7480 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
301d03af 7481 assemble_aligned_integer (UNITS_PER_WORD, x);
ef179a26
NC
7482}
7483
6d3d9133
NC
7484/* Place some comments into the assembler stream
7485 describing the current function. */
7486
08c148a8
NB
7487static void
7488arm_output_function_prologue (f, frame_size)
6cfc7210 7489 FILE * f;
08c148a8 7490 HOST_WIDE_INT frame_size;
cce8749e 7491{
6d3d9133 7492 unsigned long func_type;
08c148a8
NB
7493
7494 if (!TARGET_ARM)
7495 {
7496 thumb_output_function_prologue (f, frame_size);
7497 return;
7498 }
6d3d9133
NC
7499
7500 /* Sanity check. */
abaa26e5 7501 if (arm_ccfsm_state || arm_target_insn)
6d3d9133 7502 abort ();
31fdb4d5 7503
6d3d9133
NC
7504 func_type = arm_current_func_type ();
7505
7506 switch ((int) ARM_FUNC_TYPE (func_type))
7507 {
7508 default:
7509 case ARM_FT_NORMAL:
7510 break;
7511 case ARM_FT_INTERWORKED:
7512 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7513 break;
7514 case ARM_FT_EXCEPTION_HANDLER:
7515 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7516 break;
7517 case ARM_FT_ISR:
7518 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7519 break;
7520 case ARM_FT_FIQ:
7521 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7522 break;
7523 case ARM_FT_EXCEPTION:
7524 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7525 break;
7526 }
ff9940b0 7527
6d3d9133
NC
7528 if (IS_NAKED (func_type))
7529 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7530
7531 if (IS_VOLATILE (func_type))
7532 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7533
7534 if (IS_NESTED (func_type))
7535 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7536
dd18ae56
NC
7537 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7538 current_function_args_size,
7539 current_function_pretend_args_size, frame_size);
6d3d9133 7540
3cb66fd7 7541 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
dd18ae56 7542 frame_pointer_needed,
3cb66fd7 7543 cfun->machine->uses_anonymous_args);
cce8749e 7544
6f7ebcbb
NC
7545 if (cfun->machine->lr_save_eliminated)
7546 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7547
32de079a
RE
7548#ifdef AOF_ASSEMBLER
7549 if (flag_pic)
dd18ae56 7550 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 7551#endif
6d3d9133
NC
7552
7553 return_used_this_function = 0;
f3bb6135 7554}
cce8749e 7555
cd2b33d0 7556const char *
0616531f
RE
7557arm_output_epilogue (really_return)
7558 int really_return;
cce8749e 7559{
949d79eb 7560 int reg;
6f7ebcbb 7561 unsigned long saved_regs_mask;
6d3d9133 7562 unsigned long func_type;
c882c7ac
RE
7563 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7564 frame that is $fp + 4 for a non-variadic function. */
7565 int floats_offset = 0;
cce8749e 7566 rtx operands[3];
949d79eb 7567 int frame_size = get_frame_size ();
d5b7b3ae 7568 FILE * f = asm_out_file;
6d3d9133 7569 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
cce8749e 7570
6d3d9133
NC
7571 /* If we have already generated the return instruction
7572 then it is futile to generate anything else. */
b36ba79f 7573 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 7574 return "";
cce8749e 7575
6d3d9133 7576 func_type = arm_current_func_type ();
d5b7b3ae 7577
6d3d9133
NC
7578 if (IS_NAKED (func_type))
7579 /* Naked functions don't have epilogues. */
7580 return "";
0616531f 7581
6d3d9133 7582 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7583 {
86efdc8e 7584 rtx op;
6d3d9133
NC
7585
7586 /* A volatile function should never return. Call abort. */
ed0e6530 7587 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 7588 assemble_external_libcall (op);
e2c671ba 7589 output_asm_insn ("bl\t%a0", &op);
6d3d9133 7590
949d79eb 7591 return "";
e2c671ba
RE
7592 }
7593
6d3d9133
NC
7594 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7595 && ! really_return)
7596 /* If we are throwing an exception, then we really must
7597 be doing a return, so we can't tail-call. */
7598 abort ();
7599
6f7ebcbb 7600 saved_regs_mask = arm_compute_save_reg_mask ();
6d3d9133 7601
c882c7ac
RE
7602 /* XXX We should adjust floats_offset for any anonymous args, and then
7603 re-adjust vfp_offset below to compensate. */
7604
6d3d9133
NC
7605 /* Compute how far away the floats will be. */
7606 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
6f7ebcbb 7607 if (saved_regs_mask & (1 << reg))
6ed30148 7608 floats_offset += 4;
6d3d9133 7609
ff9940b0 7610 if (frame_pointer_needed)
cce8749e 7611 {
c882c7ac
RE
7612 int vfp_offset = 4;
7613
b111229a
RE
7614 if (arm_fpu_arch == FP_SOFT2)
7615 {
d5b7b3ae 7616 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 7617 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7618 {
7619 floats_offset += 12;
dd18ae56 7620 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
c882c7ac 7621 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
7622 }
7623 }
7624 else
7625 {
d5b7b3ae 7626 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7627
d5b7b3ae 7628 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 7629 {
5895f793 7630 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7631 {
7632 floats_offset += 12;
6cfc7210 7633
6354dc9b 7634 /* We can't unstack more than four registers at once. */
b111229a
RE
7635 if (start_reg - reg == 3)
7636 {
dd18ae56 7637 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
c882c7ac 7638 reg, FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
7639 start_reg = reg - 1;
7640 }
7641 }
7642 else
7643 {
7644 if (reg != start_reg)
dd18ae56
NC
7645 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7646 reg + 1, start_reg - reg,
c882c7ac 7647 FP_REGNUM, floats_offset - vfp_offset);
b111229a
RE
7648 start_reg = reg - 1;
7649 }
7650 }
7651
7652 /* Just in case the last register checked also needs unstacking. */
7653 if (reg != start_reg)
dd18ae56
NC
7654 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7655 reg + 1, start_reg - reg,
c882c7ac 7656 FP_REGNUM, floats_offset - vfp_offset);
b111229a 7657 }
6d3d9133 7658
6f7ebcbb 7659 /* saved_regs_mask should contain the IP, which at the time of stack
6d3d9133
NC
7660 frame generation actually contains the old stack pointer. So a
7661 quick way to unwind the stack is just pop the IP register directly
7662 into the stack pointer. */
6f7ebcbb 7663 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
6d3d9133 7664 abort ();
6f7ebcbb
NC
7665 saved_regs_mask &= ~ (1 << IP_REGNUM);
7666 saved_regs_mask |= (1 << SP_REGNUM);
6d3d9133 7667
6f7ebcbb 7668 /* There are two registers left in saved_regs_mask - LR and PC. We
6d3d9133
NC
7669 only need to restore the LR register (the return address), but to
7670 save time we can load it directly into the PC, unless we need a
7671 special function exit sequence, or we are not really returning. */
7672 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7673 /* Delete the LR from the register mask, so that the LR on
7674 the stack is loaded into the PC in the register mask. */
6f7ebcbb 7675 saved_regs_mask &= ~ (1 << LR_REGNUM);
b111229a 7676 else
6f7ebcbb 7677 saved_regs_mask &= ~ (1 << PC_REGNUM);
6d3d9133 7678
6f7ebcbb 7679 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7b8b8ade
NC
7680
7681 if (IS_INTERRUPT (func_type))
7682 /* Interrupt handlers will have pushed the
7683 IP onto the stack, so restore it now. */
121308d4 7684 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
cce8749e
CH
7685 }
7686 else
7687 {
d2288d8d 7688 /* Restore stack pointer if necessary. */
56636818 7689 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
7690 {
7691 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
7692 operands[2] = GEN_INT (frame_size
7693 + current_function_outgoing_args_size);
d2288d8d
TG
7694 output_add_immediate (operands);
7695 }
7696
b111229a
RE
7697 if (arm_fpu_arch == FP_SOFT2)
7698 {
d5b7b3ae 7699 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 7700 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
7701 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7702 reg, SP_REGNUM);
b111229a
RE
7703 }
7704 else
7705 {
d5b7b3ae 7706 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 7707
d5b7b3ae 7708 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 7709 {
5895f793 7710 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7711 {
7712 if (reg - start_reg == 3)
7713 {
dd18ae56
NC
7714 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7715 start_reg, SP_REGNUM);
b111229a
RE
7716 start_reg = reg + 1;
7717 }
7718 }
7719 else
7720 {
7721 if (reg != start_reg)
dd18ae56
NC
7722 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7723 start_reg, reg - start_reg,
7724 SP_REGNUM);
6cfc7210 7725
b111229a
RE
7726 start_reg = reg + 1;
7727 }
7728 }
7729
7730 /* Just in case the last register checked also needs unstacking. */
7731 if (reg != start_reg)
dd18ae56
NC
7732 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7733 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7734 }
7735
6d3d9133
NC
7736 /* If we can, restore the LR into the PC. */
7737 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7738 && really_return
7739 && current_function_pretend_args_size == 0
6f7ebcbb 7740 && saved_regs_mask & (1 << LR_REGNUM))
cce8749e 7741 {
6f7ebcbb
NC
7742 saved_regs_mask &= ~ (1 << LR_REGNUM);
7743 saved_regs_mask |= (1 << PC_REGNUM);
6d3d9133 7744 }
d5b7b3ae 7745
6d3d9133
NC
7746 /* Load the registers off the stack. If we only have one register
7747 to load use the LDR instruction - it is faster. */
6f7ebcbb 7748 if (saved_regs_mask == (1 << LR_REGNUM))
6d3d9133 7749 {
f4864588 7750 /* The exception handler ignores the LR, so we do
6d3d9133
NC
7751 not really need to load it off the stack. */
7752 if (eh_ofs)
7753 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
32de079a 7754 else
6d3d9133 7755 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
cce8749e 7756 }
6f7ebcbb 7757 else if (saved_regs_mask)
f1acdf8b
NC
7758 {
7759 if (saved_regs_mask & (1 << SP_REGNUM))
7760 /* Note - write back to the stack register is not enabled
7761 (ie "ldmfd sp!..."). We know that the stack pointer is
7762 in the list of registers and if we add writeback the
7763 instruction becomes UNPREDICTABLE. */
7764 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
7765 else
7766 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
7767 }
6d3d9133
NC
7768
7769 if (current_function_pretend_args_size)
cce8749e 7770 {
6d3d9133
NC
7771 /* Unwind the pre-pushed regs. */
7772 operands[0] = operands[1] = stack_pointer_rtx;
7773 operands[2] = GEN_INT (current_function_pretend_args_size);
7774 output_add_immediate (operands);
7775 }
7776 }
32de079a 7777
9b598fa0 7778#if 0
6d3d9133
NC
7779 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7780 /* Adjust the stack to remove the exception handler stuff. */
7781 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7782 REGNO (eh_ofs));
9b598fa0 7783#endif
b111229a 7784
f4864588
PB
7785 if (! really_return
7786 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7787 && current_function_pretend_args_size == 0
7788 && saved_regs_mask & (1 << PC_REGNUM)))
6d3d9133 7789 return "";
d5b7b3ae 7790
6d3d9133
NC
7791 /* Generate the return instruction. */
7792 switch ((int) ARM_FUNC_TYPE (func_type))
7793 {
7794 case ARM_FT_EXCEPTION_HANDLER:
7795 /* Even in 26-bit mode we do a mov (rather than a movs)
7796 because we don't have the PSR bits set in the address. */
7797 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7798 break;
0616531f 7799
6d3d9133
NC
7800 case ARM_FT_ISR:
7801 case ARM_FT_FIQ:
7802 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7803 break;
7804
7805 case ARM_FT_EXCEPTION:
7806 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7807 break;
7808
7809 case ARM_FT_INTERWORKED:
7810 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7811 break;
7812
7813 default:
7814 if (frame_pointer_needed)
7815 /* If we used the frame pointer then the return adddress
7816 will have been loaded off the stack directly into the
7817 PC, so there is no need to issue a MOV instruction
7818 here. */
7819 ;
7820 else if (current_function_pretend_args_size == 0
6f7ebcbb 7821 && (saved_regs_mask & (1 << LR_REGNUM)))
6d3d9133
NC
7822 /* Similarly we may have been able to load LR into the PC
7823 even if we did not create a stack frame. */
7824 ;
7825 else if (TARGET_APCS_32)
7826 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7827 else
7828 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7829 break;
cce8749e 7830 }
f3bb6135 7831
949d79eb
RE
7832 return "";
7833}
7834
08c148a8
NB
7835static void
7836arm_output_function_epilogue (file, frame_size)
7837 FILE *file ATTRIBUTE_UNUSED;
7838 HOST_WIDE_INT frame_size;
949d79eb 7839{
d5b7b3ae
RE
7840 if (TARGET_THUMB)
7841 {
7842 /* ??? Probably not safe to set this here, since it assumes that a
7843 function will be emitted as assembly immediately after we generate
7844 RTL for it. This does not happen for inline functions. */
7845 return_used_this_function = 0;
7846 }
7847 else
7848 {
7849 if (use_return_insn (FALSE)
7850 && return_used_this_function
7851 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7852 && !frame_pointer_needed)
d5b7b3ae 7853 abort ();
f3bb6135 7854
d5b7b3ae 7855 /* Reset the ARM-specific per-function variables. */
d5b7b3ae
RE
7856 after_arm_reorg = 0;
7857 }
f3bb6135 7858}
e2c671ba 7859
2c849145
JM
7860/* Generate and emit an insn that we will recognize as a push_multi.
7861 Unfortunately, since this insn does not reflect very well the actual
7862 semantics of the operation, we need to annotate the insn for the benefit
7863 of DWARF2 frame unwind information. */
6d3d9133 7864
2c849145 7865static rtx
e2c671ba
RE
7866emit_multi_reg_push (mask)
7867 int mask;
7868{
7869 int num_regs = 0;
9b598fa0 7870 int num_dwarf_regs;
e2c671ba
RE
7871 int i, j;
7872 rtx par;
2c849145 7873 rtx dwarf;
87e27392 7874 int dwarf_par_index;
2c849145 7875 rtx tmp, reg;
e2c671ba 7876
d5b7b3ae 7877 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7878 if (mask & (1 << i))
5895f793 7879 num_regs++;
e2c671ba
RE
7880
7881 if (num_regs == 0 || num_regs > 16)
7882 abort ();
7883
9b598fa0
RE
7884 /* We don't record the PC in the dwarf frame information. */
7885 num_dwarf_regs = num_regs;
7886 if (mask & (1 << PC_REGNUM))
7887 num_dwarf_regs--;
7888
87e27392 7889 /* For the body of the insn we are going to generate an UNSPEC in
05713b80 7890 parallel with several USEs. This allows the insn to be recognized
87e27392
NC
7891 by the push_multi pattern in the arm.md file. The insn looks
7892 something like this:
7893
7894 (parallel [
b15bca31
RE
7895 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7896 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
87e27392
NC
7897 (use (reg:SI 11 fp))
7898 (use (reg:SI 12 ip))
7899 (use (reg:SI 14 lr))
7900 (use (reg:SI 15 pc))
7901 ])
7902
7903 For the frame note however, we try to be more explicit and actually
7904 show each register being stored into the stack frame, plus a (single)
7905 decrement of the stack pointer. We do it this way in order to be
7906 friendly to the stack unwinding code, which only wants to see a single
7907 stack decrement per instruction. The RTL we generate for the note looks
7908 something like this:
7909
7910 (sequence [
7911 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7912 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7913 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7914 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7915 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
87e27392
NC
7916 ])
7917
7918 This sequence is used both by the code to support stack unwinding for
7919 exceptions handlers and the code to generate dwarf2 frame debugging. */
7920
43cffd11 7921 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
9b598fa0 7922 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
87e27392 7923 dwarf_par_index = 1;
e2c671ba 7924
d5b7b3ae 7925 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7926 {
7927 if (mask & (1 << i))
7928 {
2c849145
JM
7929 reg = gen_rtx_REG (SImode, i);
7930
e2c671ba 7931 XVECEXP (par, 0, 0)
43cffd11
RE
7932 = gen_rtx_SET (VOIDmode,
7933 gen_rtx_MEM (BLKmode,
7934 gen_rtx_PRE_DEC (BLKmode,
7935 stack_pointer_rtx)),
7936 gen_rtx_UNSPEC (BLKmode,
2c849145 7937 gen_rtvec (1, reg),
9b598fa0 7938 UNSPEC_PUSH_MULT));
2c849145 7939
9b598fa0
RE
7940 if (i != PC_REGNUM)
7941 {
7942 tmp = gen_rtx_SET (VOIDmode,
7943 gen_rtx_MEM (SImode, stack_pointer_rtx),
7944 reg);
7945 RTX_FRAME_RELATED_P (tmp) = 1;
7946 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7947 dwarf_par_index++;
7948 }
2c849145 7949
e2c671ba
RE
7950 break;
7951 }
7952 }
7953
7954 for (j = 1, i++; j < num_regs; i++)
7955 {
7956 if (mask & (1 << i))
7957 {
2c849145
JM
7958 reg = gen_rtx_REG (SImode, i);
7959
7960 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7961
9b598fa0
RE
7962 if (i != PC_REGNUM)
7963 {
7964 tmp = gen_rtx_SET (VOIDmode,
7965 gen_rtx_MEM (SImode,
7966 plus_constant (stack_pointer_rtx,
7967 4 * j)),
7968 reg);
7969 RTX_FRAME_RELATED_P (tmp) = 1;
7970 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7971 }
7972
e2c671ba
RE
7973 j++;
7974 }
7975 }
b111229a 7976
2c849145 7977 par = emit_insn (par);
87e27392
NC
7978
7979 tmp = gen_rtx_SET (SImode,
7980 stack_pointer_rtx,
7981 gen_rtx_PLUS (SImode,
7982 stack_pointer_rtx,
7983 GEN_INT (-4 * num_regs)));
7984 RTX_FRAME_RELATED_P (tmp) = 1;
7985 XVECEXP (dwarf, 0, 0) = tmp;
7986
2c849145
JM
7987 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7988 REG_NOTES (par));
7989 return par;
b111229a
RE
7990}
7991
2c849145 7992static rtx
b111229a
RE
7993emit_sfm (base_reg, count)
7994 int base_reg;
7995 int count;
7996{
7997 rtx par;
2c849145
JM
7998 rtx dwarf;
7999 rtx tmp, reg;
b111229a
RE
8000 int i;
8001
43cffd11 8002 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145 8003 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
8004
8005 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
8006
8007 XVECEXP (par, 0, 0)
8008 = gen_rtx_SET (VOIDmode,
8009 gen_rtx_MEM (BLKmode,
8010 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8011 gen_rtx_UNSPEC (BLKmode,
2c849145 8012 gen_rtvec (1, reg),
b15bca31 8013 UNSPEC_PUSH_MULT));
2c849145
JM
8014 tmp
8015 = gen_rtx_SET (VOIDmode,
8016 gen_rtx_MEM (XFmode,
8017 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8018 reg);
8019 RTX_FRAME_RELATED_P (tmp) = 1;
8020 XVECEXP (dwarf, 0, count - 1) = tmp;
8021
b111229a 8022 for (i = 1; i < count; i++)
2c849145
JM
8023 {
8024 reg = gen_rtx_REG (XFmode, base_reg++);
8025 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8026
8027 tmp = gen_rtx_SET (VOIDmode,
8028 gen_rtx_MEM (XFmode,
8029 gen_rtx_PRE_DEC (BLKmode,
8030 stack_pointer_rtx)),
8031 reg);
8032 RTX_FRAME_RELATED_P (tmp) = 1;
8033 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8034 }
b111229a 8035
2c849145
JM
8036 par = emit_insn (par);
8037 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8038 REG_NOTES (par));
8039 return par;
e2c671ba
RE
8040}
8041
095bb276
NC
8042/* Compute the distance from register FROM to register TO.
8043 These can be the arg pointer (26), the soft frame pointer (25),
8044 the stack pointer (13) or the hard frame pointer (11).
8045 Typical stack layout looks like this:
8046
8047 old stack pointer -> | |
8048 ----
8049 | | \
8050 | | saved arguments for
8051 | | vararg functions
8052 | | /
8053 --
8054 hard FP & arg pointer -> | | \
8055 | | stack
8056 | | frame
8057 | | /
8058 --
8059 | | \
8060 | | call saved
8061 | | registers
8062 soft frame pointer -> | | /
8063 --
8064 | | \
8065 | | local
8066 | | variables
8067 | | /
8068 --
8069 | | \
8070 | | outgoing
8071 | | arguments
8072 current stack pointer -> | | /
8073 --
8074
43aa4e05 8075 For a given function some or all of these stack components
095bb276
NC
8076 may not be needed, giving rise to the possibility of
8077 eliminating some of the registers.
8078
825dda42 8079 The values returned by this function must reflect the behavior
095bb276
NC
8080 of arm_expand_prologue() and arm_compute_save_reg_mask().
8081
8082 The sign of the number returned reflects the direction of stack
8083 growth, so the values are positive for all eliminations except
8084 from the soft frame pointer to the hard frame pointer. */
8085
8086unsigned int
8087arm_compute_initial_elimination_offset (from, to)
8088 unsigned int from;
8089 unsigned int to;
8090{
8091 unsigned int local_vars = (get_frame_size () + 3) & ~3;
8092 unsigned int outgoing_args = current_function_outgoing_args_size;
8093 unsigned int stack_frame;
8094 unsigned int call_saved_registers;
8095 unsigned long func_type;
8096
8097 func_type = arm_current_func_type ();
8098
8099 /* Volatile functions never return, so there is
8100 no need to save call saved registers. */
8101 call_saved_registers = 0;
8102 if (! IS_VOLATILE (func_type))
8103 {
121308d4 8104 unsigned int reg_mask;
095bb276
NC
8105 unsigned int reg;
8106
1d6e90ac 8107 /* Make sure that we compute which registers will be saved
121308d4
NC
8108 on the stack using the same algorithm that is used by
8109 arm_compute_save_reg_mask(). */
8110 reg_mask = arm_compute_save_reg0_reg12_mask ();
095bb276 8111
121308d4
NC
8112 /* Now count the number of bits set in save_reg_mask.
8113 For each set bit we need 4 bytes of stack space. */
121308d4
NC
8114 while (reg_mask)
8115 {
8116 call_saved_registers += 4;
8117 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8118 }
095bb276
NC
8119
8120 if (regs_ever_live[LR_REGNUM]
8121 /* If a stack frame is going to be created, the LR will
8122 be saved as part of that, so we do not need to allow
8123 for it here. */
8124 && ! frame_pointer_needed)
8125 call_saved_registers += 4;
ef7112de
NC
8126
8127 /* If the hard floating point registers are going to be
8128 used then they must be saved on the stack as well.
8129 Each register occupies 12 bytes of stack space. */
8130 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8131 if (regs_ever_live[reg] && ! call_used_regs[reg])
8132 call_saved_registers += 12;
095bb276
NC
8133 }
8134
8135 /* The stack frame contains 4 registers - the old frame pointer,
8136 the old stack pointer, the return address and PC of the start
8137 of the function. */
8138 stack_frame = frame_pointer_needed ? 16 : 0;
8139
095bb276
NC
8140 /* OK, now we have enough information to compute the distances.
8141 There must be an entry in these switch tables for each pair
8142 of registers in ELIMINABLE_REGS, even if some of the entries
8143 seem to be redundant or useless. */
8144 switch (from)
8145 {
8146 case ARG_POINTER_REGNUM:
8147 switch (to)
8148 {
8149 case THUMB_HARD_FRAME_POINTER_REGNUM:
8150 return 0;
8151
8152 case FRAME_POINTER_REGNUM:
8153 /* This is the reverse of the soft frame pointer
8154 to hard frame pointer elimination below. */
8155 if (call_saved_registers == 0 && stack_frame == 0)
8156 return 0;
8157 return (call_saved_registers + stack_frame - 4);
8158
8159 case ARM_HARD_FRAME_POINTER_REGNUM:
8160 /* If there is no stack frame then the hard
8161 frame pointer and the arg pointer coincide. */
8162 if (stack_frame == 0 && call_saved_registers != 0)
8163 return 0;
8164 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8165 return (frame_pointer_needed
8166 && current_function_needs_context
3cb66fd7 8167 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
095bb276
NC
8168
8169 case STACK_POINTER_REGNUM:
8170 /* If nothing has been pushed on the stack at all
8171 then this will return -4. This *is* correct! */
8172 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8173
8174 default:
8175 abort ();
8176 }
8177 break;
8178
8179 case FRAME_POINTER_REGNUM:
8180 switch (to)
8181 {
8182 case THUMB_HARD_FRAME_POINTER_REGNUM:
8183 return 0;
8184
8185 case ARM_HARD_FRAME_POINTER_REGNUM:
8186 /* The hard frame pointer points to the top entry in the
8187 stack frame. The soft frame pointer to the bottom entry
8188 in the stack frame. If there is no stack frame at all,
8189 then they are identical. */
8190 if (call_saved_registers == 0 && stack_frame == 0)
8191 return 0;
8192 return - (call_saved_registers + stack_frame - 4);
8193
8194 case STACK_POINTER_REGNUM:
8195 return local_vars + outgoing_args;
8196
8197 default:
8198 abort ();
8199 }
8200 break;
8201
8202 default:
8203 /* You cannot eliminate from the stack pointer.
8204 In theory you could eliminate from the hard frame
8205 pointer to the stack pointer, but this will never
8206 happen, since if a stack frame is not needed the
8207 hard frame pointer will never be used. */
8208 abort ();
8209 }
8210}
8211
6d3d9133
NC
8212/* Generate the prologue instructions for entry into an ARM function. */
8213
e2c671ba
RE
8214void
8215arm_expand_prologue ()
8216{
8217 int reg;
6d3d9133 8218 rtx amount;
2c849145 8219 rtx insn;
68dfd979 8220 rtx ip_rtx;
6d3d9133
NC
8221 unsigned long live_regs_mask;
8222 unsigned long func_type;
68dfd979 8223 int fp_offset = 0;
095bb276
NC
8224 int saved_pretend_args = 0;
8225 unsigned int args_to_push;
d3236b4d 8226
6d3d9133 8227 func_type = arm_current_func_type ();
e2c671ba 8228
31fdb4d5 8229 /* Naked functions don't have prologues. */
6d3d9133 8230 if (IS_NAKED (func_type))
31fdb4d5
DE
8231 return;
8232
095bb276
NC
8233 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8234 args_to_push = current_function_pretend_args_size;
8235
6d3d9133
NC
8236 /* Compute which register we will have to save onto the stack. */
8237 live_regs_mask = arm_compute_save_reg_mask ();
e2c671ba 8238
68dfd979 8239 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
d3236b4d 8240
e2c671ba
RE
8241 if (frame_pointer_needed)
8242 {
7b8b8ade
NC
8243 if (IS_INTERRUPT (func_type))
8244 {
8245 /* Interrupt functions must not corrupt any registers.
8246 Creating a frame pointer however, corrupts the IP
8247 register, so we must push it first. */
8248 insn = emit_multi_reg_push (1 << IP_REGNUM);
121308d4
NC
8249
8250 /* Do not set RTX_FRAME_RELATED_P on this insn.
8251 The dwarf stack unwinding code only wants to see one
8252 stack decrement per function, and this is not it. If
8253 this instruction is labeled as being part of the frame
8254 creation sequence then dwarf2out_frame_debug_expr will
8255 abort when it encounters the assignment of IP to FP
8256 later on, since the use of SP here establishes SP as
8257 the CFA register and not IP.
8258
8259 Anyway this instruction is not really part of the stack
8260 frame creation although it is part of the prologue. */
7b8b8ade
NC
8261 }
8262 else if (IS_NESTED (func_type))
68dfd979
NC
8263 {
8264 /* The Static chain register is the same as the IP register
8265 used as a scratch register during stack frame creation.
8266 To get around this need to find somewhere to store IP
8267 whilst the frame is being created. We try the following
8268 places in order:
8269
6d3d9133 8270 1. The last argument register.
68dfd979
NC
8271 2. A slot on the stack above the frame. (This only
8272 works if the function is not a varargs function).
095bb276
NC
8273 3. Register r3, after pushing the argument registers
8274 onto the stack.
6d3d9133 8275
34ce3d7b
JM
8276 Note - we only need to tell the dwarf2 backend about the SP
8277 adjustment in the second variant; the static chain register
8278 doesn't need to be unwound, as it doesn't contain a value
8279 inherited from the caller. */
d3236b4d 8280
68dfd979
NC
8281 if (regs_ever_live[3] == 0)
8282 {
8283 insn = gen_rtx_REG (SImode, 3);
8284 insn = gen_rtx_SET (SImode, insn, ip_rtx);
d3236b4d 8285 insn = emit_insn (insn);
68dfd979 8286 }
095bb276 8287 else if (args_to_push == 0)
68dfd979 8288 {
34ce3d7b 8289 rtx dwarf;
68dfd979
NC
8290 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8291 insn = gen_rtx_MEM (SImode, insn);
8292 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8293 insn = emit_insn (insn);
34ce3d7b 8294
68dfd979 8295 fp_offset = 4;
34ce3d7b
JM
8296
8297 /* Just tell the dwarf backend that we adjusted SP. */
8298 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8299 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8300 GEN_INT (-fp_offset)));
8301 RTX_FRAME_RELATED_P (insn) = 1;
8302 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8303 dwarf, REG_NOTES (insn));
68dfd979
NC
8304 }
8305 else
095bb276
NC
8306 {
8307 /* Store the args on the stack. */
3cb66fd7 8308 if (cfun->machine->uses_anonymous_args)
095bb276
NC
8309 insn = emit_multi_reg_push
8310 ((0xf0 >> (args_to_push / 4)) & 0xf);
8311 else
8312 insn = emit_insn
8313 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8314 GEN_INT (- args_to_push)));
8315
8316 RTX_FRAME_RELATED_P (insn) = 1;
8317
8318 saved_pretend_args = 1;
8319 fp_offset = args_to_push;
8320 args_to_push = 0;
8321
8322 /* Now reuse r3 to preserve IP. */
8323 insn = gen_rtx_REG (SImode, 3);
8324 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8325 (void) emit_insn (insn);
8326 }
68dfd979
NC
8327 }
8328
68dfd979
NC
8329 if (fp_offset)
8330 {
8331 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8332 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8333 }
8334 else
8335 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8336
6d3d9133 8337 insn = emit_insn (insn);
8e56560e 8338 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
8339 }
8340
095bb276 8341 if (args_to_push)
e2c671ba 8342 {
6d3d9133 8343 /* Push the argument registers, or reserve space for them. */
3cb66fd7 8344 if (cfun->machine->uses_anonymous_args)
2c849145 8345 insn = emit_multi_reg_push
095bb276 8346 ((0xf0 >> (args_to_push / 4)) & 0xf);
e2c671ba 8347 else
2c849145
JM
8348 insn = emit_insn
8349 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
095bb276 8350 GEN_INT (- args_to_push)));
2c849145 8351 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
8352 }
8353
3a7731fd
PB
8354 /* If this is an interrupt service routine, and the link register is
8355 going to be pushed, subtracting four now will mean that the
8356 function return can be done with a single instruction. */
8357 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8358 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8359 {
8360 emit_insn (gen_rtx_SET (SImode,
8361 gen_rtx_REG (SImode, LR_REGNUM),
8362 gen_rtx_PLUS (SImode,
8363 gen_rtx_REG (SImode, LR_REGNUM),
8364 GEN_INT (-4))));
8365 }
8366
e2c671ba
RE
8367 if (live_regs_mask)
8368 {
2c849145
JM
8369 insn = emit_multi_reg_push (live_regs_mask);
8370 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba 8371 }
d5b7b3ae 8372
6d3d9133 8373 if (! IS_VOLATILE (func_type))
b111229a 8374 {
6d3d9133 8375 /* Save any floating point call-saved registers used by this function. */
b111229a
RE
8376 if (arm_fpu_arch == FP_SOFT2)
8377 {
d5b7b3ae 8378 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 8379 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
8380 {
8381 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8382 insn = gen_rtx_MEM (XFmode, insn);
8383 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8384 gen_rtx_REG (XFmode, reg)));
8385 RTX_FRAME_RELATED_P (insn) = 1;
8386 }
b111229a
RE
8387 }
8388 else
8389 {
d5b7b3ae 8390 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 8391
d5b7b3ae 8392 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 8393 {
5895f793 8394 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
8395 {
8396 if (start_reg - reg == 3)
8397 {
2c849145
JM
8398 insn = emit_sfm (reg, 4);
8399 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
8400 start_reg = reg - 1;
8401 }
8402 }
8403 else
8404 {
8405 if (start_reg != reg)
2c849145
JM
8406 {
8407 insn = emit_sfm (reg + 1, start_reg - reg);
8408 RTX_FRAME_RELATED_P (insn) = 1;
8409 }
b111229a
RE
8410 start_reg = reg - 1;
8411 }
8412 }
8413
8414 if (start_reg != reg)
2c849145
JM
8415 {
8416 insn = emit_sfm (reg + 1, start_reg - reg);
8417 RTX_FRAME_RELATED_P (insn) = 1;
8418 }
b111229a
RE
8419 }
8420 }
e2c671ba
RE
8421
8422 if (frame_pointer_needed)
2c849145 8423 {
6d3d9133 8424 /* Create the new frame pointer. */
095bb276 8425 insn = GEN_INT (-(4 + args_to_push + fp_offset));
68dfd979 8426 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 8427 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979 8428
6d3d9133 8429 if (IS_NESTED (func_type))
68dfd979
NC
8430 {
8431 /* Recover the static chain register. */
095bb276
NC
8432 if (regs_ever_live [3] == 0
8433 || saved_pretend_args)
1d6e90ac 8434 insn = gen_rtx_REG (SImode, 3);
68dfd979
NC
8435 else /* if (current_function_pretend_args_size == 0) */
8436 {
8437 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8438 insn = gen_rtx_MEM (SImode, insn);
68dfd979 8439 }
1d6e90ac 8440
c14a3a45
NC
8441 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8442 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 8443 emit_insn (gen_prologue_use (ip_rtx));
68dfd979 8444 }
2c849145 8445 }
e2c671ba 8446
6d3d9133
NC
8447 amount = GEN_INT (-(get_frame_size ()
8448 + current_function_outgoing_args_size));
8449
e2c671ba
RE
8450 if (amount != const0_rtx)
8451 {
745b9093
JM
8452 /* This add can produce multiple insns for a large constant, so we
8453 need to get tricky. */
8454 rtx last = get_last_insn ();
2c849145
JM
8455 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8456 amount));
745b9093
JM
8457 do
8458 {
8459 last = last ? NEXT_INSN (last) : get_insns ();
8460 RTX_FRAME_RELATED_P (last) = 1;
8461 }
8462 while (last != insn);
e04c2d6c
RE
8463
8464 /* If the frame pointer is needed, emit a special barrier that
8465 will prevent the scheduler from moving stores to the frame
8466 before the stack adjustment. */
8467 if (frame_pointer_needed)
3894f59e
RE
8468 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
8469 hard_frame_pointer_rtx));
e2c671ba
RE
8470 }
8471
8472 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
8473 the call to mcount. Similarly if the user has requested no
8474 scheduling in the prolog. */
70f4f91c 8475 if (current_function_profile || TARGET_NO_SCHED_PRO)
e2c671ba 8476 emit_insn (gen_blockage ());
6f7ebcbb
NC
8477
8478 /* If the link register is being kept alive, with the return address in it,
8479 then make sure that it does not get reused by the ce2 pass. */
8480 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8481 {
6bacc7b0 8482 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
6f7ebcbb
NC
8483 cfun->machine->lr_save_eliminated = 1;
8484 }
e2c671ba 8485}
cce8749e 8486\f
9997d19d
RE
8487/* If CODE is 'd', then the X is a condition operand and the instruction
8488 should only be executed if the condition is true.
ddd5a7c1 8489 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
8490 should only be executed if the condition is false: however, if the mode
8491 of the comparison is CCFPEmode, then always execute the instruction -- we
8492 do this because in these circumstances !GE does not necessarily imply LT;
8493 in these cases the instruction pattern will take care to make sure that
8494 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 8495 doing this instruction unconditionally.
9997d19d
RE
8496 If CODE is 'N' then X is a floating point operand that must be negated
8497 before output.
8498 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8499 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8500
8501void
8502arm_print_operand (stream, x, code)
62b10bbc 8503 FILE * stream;
9997d19d
RE
8504 rtx x;
8505 int code;
8506{
8507 switch (code)
8508 {
8509 case '@':
f3139301 8510 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
8511 return;
8512
d5b7b3ae
RE
8513 case '_':
8514 fputs (user_label_prefix, stream);
8515 return;
8516
9997d19d 8517 case '|':
f3139301 8518 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
8519 return;
8520
8521 case '?':
8522 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
8523 {
8524 if (TARGET_THUMB || current_insn_predicate != NULL)
8525 abort ();
8526
8527 fputs (arm_condition_codes[arm_current_cc], stream);
8528 }
8529 else if (current_insn_predicate)
8530 {
8531 enum arm_cond_code code;
8532
8533 if (TARGET_THUMB)
8534 abort ();
8535
8536 code = get_arm_condition_code (current_insn_predicate);
8537 fputs (arm_condition_codes[code], stream);
8538 }
9997d19d
RE
8539 return;
8540
8541 case 'N':
8542 {
8543 REAL_VALUE_TYPE r;
8544 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8545 r = REAL_VALUE_NEGATE (r);
8546 fprintf (stream, "%s", fp_const_from_val (&r));
8547 }
8548 return;
8549
8550 case 'B':
8551 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
8552 {
8553 HOST_WIDE_INT val;
5895f793 8554 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 8555 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8556 }
9997d19d
RE
8557 else
8558 {
8559 putc ('~', stream);
8560 output_addr_const (stream, x);
8561 }
8562 return;
8563
8564 case 'i':
8565 fprintf (stream, "%s", arithmetic_instr (x, 1));
8566 return;
8567
8568 case 'I':
8569 fprintf (stream, "%s", arithmetic_instr (x, 0));
8570 return;
8571
8572 case 'S':
8573 {
8574 HOST_WIDE_INT val;
5895f793 8575 const char * shift = shift_op (x, &val);
9997d19d 8576
e2c671ba
RE
8577 if (shift)
8578 {
5895f793 8579 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
8580 if (val == -1)
8581 arm_print_operand (stream, XEXP (x, 1), 0);
8582 else
4bc74ece
NC
8583 {
8584 fputc ('#', stream);
36ba9cb8 8585 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8586 }
e2c671ba 8587 }
9997d19d
RE
8588 }
8589 return;
8590
d5b7b3ae
RE
8591 /* An explanation of the 'Q', 'R' and 'H' register operands:
8592
8593 In a pair of registers containing a DI or DF value the 'Q'
8594 operand returns the register number of the register containing
8595 the least signficant part of the value. The 'R' operand returns
8596 the register number of the register containing the most
8597 significant part of the value.
8598
8599 The 'H' operand returns the higher of the two register numbers.
8600 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8601 same as the 'Q' operand, since the most signficant part of the
8602 value is held in the lower number register. The reverse is true
8603 on systems where WORDS_BIG_ENDIAN is false.
8604
8605 The purpose of these operands is to distinguish between cases
8606 where the endian-ness of the values is important (for example
8607 when they are added together), and cases where the endian-ness
8608 is irrelevant, but the order of register operations is important.
8609 For example when loading a value from memory into a register
8610 pair, the endian-ness does not matter. Provided that the value
8611 from the lower memory address is put into the lower numbered
8612 register, and the value from the higher address is put into the
8613 higher numbered register, the load will work regardless of whether
8614 the value being loaded is big-wordian or little-wordian. The
8615 order of the two register loads can matter however, if the address
8616 of the memory location is actually held in one of the registers
8617 being overwritten by the load. */
c1c2bc04 8618 case 'Q':
d5b7b3ae 8619 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 8620 abort ();
d5b7b3ae 8621 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
8622 return;
8623
9997d19d 8624 case 'R':
d5b7b3ae 8625 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 8626 abort ();
d5b7b3ae
RE
8627 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8628 return;
8629
8630 case 'H':
8631 if (REGNO (x) > LAST_ARM_REGNUM)
8632 abort ();
8633 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
8634 return;
8635
8636 case 'm':
d5b7b3ae
RE
8637 asm_fprintf (stream, "%r",
8638 GET_CODE (XEXP (x, 0)) == REG
8639 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
8640 return;
8641
8642 case 'M':
dd18ae56 8643 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae 8644 REGNO (x),
e9d7b180 8645 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
8646 return;
8647
8648 case 'd':
64e92a26
RE
8649 /* CONST_TRUE_RTX means always -- that's the default. */
8650 if (x == const_true_rtx)
d5b7b3ae
RE
8651 return;
8652
8653 if (TARGET_ARM)
9997d19d
RE
8654 fputs (arm_condition_codes[get_arm_condition_code (x)],
8655 stream);
d5b7b3ae
RE
8656 else
8657 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
8658 return;
8659
8660 case 'D':
64e92a26
RE
8661 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
8662 want to do that. */
8663 if (x == const_true_rtx)
8664 abort ();
d5b7b3ae
RE
8665
8666 if (TARGET_ARM)
8667 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8668 (get_arm_condition_code (x))],
9997d19d 8669 stream);
d5b7b3ae
RE
8670 else
8671 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
8672 return;
8673
8674 default:
8675 if (x == 0)
8676 abort ();
8677
8678 if (GET_CODE (x) == REG)
d5b7b3ae 8679 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
8680 else if (GET_CODE (x) == MEM)
8681 {
8682 output_memory_reference_mode = GET_MODE (x);
8683 output_address (XEXP (x, 0));
8684 }
8685 else if (GET_CODE (x) == CONST_DOUBLE)
8686 fprintf (stream, "#%s", fp_immediate_constant (x));
8687 else if (GET_CODE (x) == NEG)
6354dc9b 8688 abort (); /* This should never happen now. */
9997d19d
RE
8689 else
8690 {
8691 fputc ('#', stream);
8692 output_addr_const (stream, x);
8693 }
8694 }
8695}
cce8749e 8696\f
301d03af
RS
8697#ifndef AOF_ASSEMBLER
8698/* Target hook for assembling integer objects. The ARM version needs to
8699 handle word-sized values specially. */
8700
8701static bool
8702arm_assemble_integer (x, size, aligned_p)
8703 rtx x;
8704 unsigned int size;
8705 int aligned_p;
8706{
8707 if (size == UNITS_PER_WORD && aligned_p)
8708 {
8709 fputs ("\t.word\t", asm_out_file);
8710 output_addr_const (asm_out_file, x);
8711
8712 /* Mark symbols as position independent. We only do this in the
8713 .text segment, not in the .data segment. */
8714 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
8715 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
8716 {
e26053d1 8717 if (GET_CODE (x) == SYMBOL_REF
14f583b8
PB
8718 && (CONSTANT_POOL_ADDRESS_P (x)
8719 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
301d03af
RS
8720 fputs ("(GOTOFF)", asm_out_file);
8721 else if (GET_CODE (x) == LABEL_REF)
8722 fputs ("(GOTOFF)", asm_out_file);
8723 else
8724 fputs ("(GOT)", asm_out_file);
8725 }
8726 fputc ('\n', asm_out_file);
8727 return true;
8728 }
1d6e90ac 8729
301d03af
RS
8730 return default_assemble_integer (x, size, aligned_p);
8731}
8732#endif
8733\f
cce8749e
CH
8734/* A finite state machine takes care of noticing whether or not instructions
8735 can be conditionally executed, and thus decrease execution time and code
8736 size by deleting branch instructions. The fsm is controlled by
8737 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8738
8739/* The state of the fsm controlling condition codes are:
8740 0: normal, do nothing special
8741 1: make ASM_OUTPUT_OPCODE not output this instruction
8742 2: make ASM_OUTPUT_OPCODE not output this instruction
8743 3: make instructions conditional
8744 4: make instructions conditional
8745
8746 State transitions (state->state by whom under condition):
8747 0 -> 1 final_prescan_insn if the `target' is a label
8748 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8749 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8750 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8751 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8752 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8753 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8754 (the target insn is arm_target_insn).
8755
ff9940b0
RE
8756 If the jump clobbers the conditions then we use states 2 and 4.
8757
8758 A similar thing can be done with conditional return insns.
8759
cce8749e
CH
8760 XXX In case the `target' is an unconditional branch, this conditionalising
8761 of the instructions always reduces code size, but not always execution
8762 time. But then, I want to reduce the code size to somewhere near what
8763 /bin/cc produces. */
8764
cce8749e
CH
8765/* Returns the index of the ARM condition code string in
8766 `arm_condition_codes'. COMPARISON should be an rtx like
8767 `(eq (...) (...))'. */
8768
84ed5e79 8769static enum arm_cond_code
cce8749e
CH
8770get_arm_condition_code (comparison)
8771 rtx comparison;
8772{
5165176d 8773 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
1d6e90ac
NC
8774 int code;
8775 enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
8776
8777 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 8778 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
8779 XEXP (comparison, 1));
8780
8781 switch (mode)
cce8749e 8782 {
84ed5e79
RE
8783 case CC_DNEmode: code = ARM_NE; goto dominance;
8784 case CC_DEQmode: code = ARM_EQ; goto dominance;
8785 case CC_DGEmode: code = ARM_GE; goto dominance;
8786 case CC_DGTmode: code = ARM_GT; goto dominance;
8787 case CC_DLEmode: code = ARM_LE; goto dominance;
8788 case CC_DLTmode: code = ARM_LT; goto dominance;
8789 case CC_DGEUmode: code = ARM_CS; goto dominance;
8790 case CC_DGTUmode: code = ARM_HI; goto dominance;
8791 case CC_DLEUmode: code = ARM_LS; goto dominance;
8792 case CC_DLTUmode: code = ARM_CC;
8793
8794 dominance:
8795 if (comp_code != EQ && comp_code != NE)
8796 abort ();
8797
8798 if (comp_code == EQ)
8799 return ARM_INVERSE_CONDITION_CODE (code);
8800 return code;
8801
5165176d 8802 case CC_NOOVmode:
84ed5e79 8803 switch (comp_code)
5165176d 8804 {
84ed5e79
RE
8805 case NE: return ARM_NE;
8806 case EQ: return ARM_EQ;
8807 case GE: return ARM_PL;
8808 case LT: return ARM_MI;
5165176d
RE
8809 default: abort ();
8810 }
8811
8812 case CC_Zmode:
84ed5e79 8813 switch (comp_code)
5165176d 8814 {
84ed5e79
RE
8815 case NE: return ARM_NE;
8816 case EQ: return ARM_EQ;
5165176d
RE
8817 default: abort ();
8818 }
8819
8820 case CCFPEmode:
e45b72c4
RE
8821 case CCFPmode:
8822 /* These encodings assume that AC=1 in the FPA system control
8823 byte. This allows us to handle all cases except UNEQ and
8824 LTGT. */
84ed5e79
RE
8825 switch (comp_code)
8826 {
8827 case GE: return ARM_GE;
8828 case GT: return ARM_GT;
8829 case LE: return ARM_LS;
8830 case LT: return ARM_MI;
e45b72c4
RE
8831 case NE: return ARM_NE;
8832 case EQ: return ARM_EQ;
8833 case ORDERED: return ARM_VC;
8834 case UNORDERED: return ARM_VS;
8835 case UNLT: return ARM_LT;
8836 case UNLE: return ARM_LE;
8837 case UNGT: return ARM_HI;
8838 case UNGE: return ARM_PL;
8839 /* UNEQ and LTGT do not have a representation. */
8840 case UNEQ: /* Fall through. */
8841 case LTGT: /* Fall through. */
84ed5e79
RE
8842 default: abort ();
8843 }
8844
8845 case CC_SWPmode:
8846 switch (comp_code)
8847 {
8848 case NE: return ARM_NE;
8849 case EQ: return ARM_EQ;
8850 case GE: return ARM_LE;
8851 case GT: return ARM_LT;
8852 case LE: return ARM_GE;
8853 case LT: return ARM_GT;
8854 case GEU: return ARM_LS;
8855 case GTU: return ARM_CC;
8856 case LEU: return ARM_CS;
8857 case LTU: return ARM_HI;
8858 default: abort ();
8859 }
8860
bd9c7e23
RE
8861 case CC_Cmode:
8862 switch (comp_code)
8863 {
8864 case LTU: return ARM_CS;
8865 case GEU: return ARM_CC;
8866 default: abort ();
8867 }
8868
5165176d 8869 case CCmode:
84ed5e79 8870 switch (comp_code)
5165176d 8871 {
84ed5e79
RE
8872 case NE: return ARM_NE;
8873 case EQ: return ARM_EQ;
8874 case GE: return ARM_GE;
8875 case GT: return ARM_GT;
8876 case LE: return ARM_LE;
8877 case LT: return ARM_LT;
8878 case GEU: return ARM_CS;
8879 case GTU: return ARM_HI;
8880 case LEU: return ARM_LS;
8881 case LTU: return ARM_CC;
5165176d
RE
8882 default: abort ();
8883 }
8884
cce8749e
CH
8885 default: abort ();
8886 }
84ed5e79
RE
8887
8888 abort ();
f3bb6135 8889}
cce8749e
CH
8890
8891
8892void
74bbc178 8893arm_final_prescan_insn (insn)
cce8749e 8894 rtx insn;
cce8749e
CH
8895{
8896 /* BODY will hold the body of INSN. */
1d6e90ac 8897 rtx body = PATTERN (insn);
cce8749e
CH
8898
8899 /* This will be 1 if trying to repeat the trick, and things need to be
8900 reversed if it appears to fail. */
8901 int reverse = 0;
8902
ff9940b0
RE
8903 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8904 taken are clobbered, even if the rtl suggests otherwise. It also
8905 means that we have to grub around within the jump expression to find
8906 out what the conditions are when the jump isn't taken. */
8907 int jump_clobbers = 0;
8908
6354dc9b 8909 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
8910 int seeking_return = 0;
8911
cce8749e
CH
8912 /* START_INSN will hold the insn from where we start looking. This is the
8913 first insn after the following code_label if REVERSE is true. */
8914 rtx start_insn = insn;
8915
8916 /* If in state 4, check if the target branch is reached, in order to
8917 change back to state 0. */
8918 if (arm_ccfsm_state == 4)
8919 {
8920 if (insn == arm_target_insn)
f5a1b0d2
NC
8921 {
8922 arm_target_insn = NULL;
8923 arm_ccfsm_state = 0;
8924 }
cce8749e
CH
8925 return;
8926 }
8927
8928 /* If in state 3, it is possible to repeat the trick, if this insn is an
8929 unconditional branch to a label, and immediately following this branch
8930 is the previous target label which is only used once, and the label this
8931 branch jumps to is not too far off. */
8932 if (arm_ccfsm_state == 3)
8933 {
8934 if (simplejump_p (insn))
8935 {
8936 start_insn = next_nonnote_insn (start_insn);
8937 if (GET_CODE (start_insn) == BARRIER)
8938 {
8939 /* XXX Isn't this always a barrier? */
8940 start_insn = next_nonnote_insn (start_insn);
8941 }
8942 if (GET_CODE (start_insn) == CODE_LABEL
8943 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8944 && LABEL_NUSES (start_insn) == 1)
8945 reverse = TRUE;
8946 else
8947 return;
8948 }
ff9940b0
RE
8949 else if (GET_CODE (body) == RETURN)
8950 {
8951 start_insn = next_nonnote_insn (start_insn);
8952 if (GET_CODE (start_insn) == BARRIER)
8953 start_insn = next_nonnote_insn (start_insn);
8954 if (GET_CODE (start_insn) == CODE_LABEL
8955 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8956 && LABEL_NUSES (start_insn) == 1)
8957 {
8958 reverse = TRUE;
8959 seeking_return = 1;
8960 }
8961 else
8962 return;
8963 }
cce8749e
CH
8964 else
8965 return;
8966 }
8967
8968 if (arm_ccfsm_state != 0 && !reverse)
8969 abort ();
8970 if (GET_CODE (insn) != JUMP_INSN)
8971 return;
8972
ddd5a7c1 8973 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
8974 the jump should always come first */
8975 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8976 body = XVECEXP (body, 0, 0);
8977
8978#if 0
8979 /* If this is a conditional return then we don't want to know */
8980 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8981 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8982 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8983 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8984 return;
8985#endif
8986
cce8749e
CH
8987 if (reverse
8988 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8989 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8990 {
bd9c7e23
RE
8991 int insns_skipped;
8992 int fail = FALSE, succeed = FALSE;
cce8749e
CH
8993 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8994 int then_not_else = TRUE;
ff9940b0 8995 rtx this_insn = start_insn, label = 0;
cce8749e 8996
e45b72c4
RE
8997 /* If the jump cannot be done with one instruction, we cannot
8998 conditionally execute the instruction in the inverse case. */
ff9940b0 8999 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 9000 {
5bbe2d40
RE
9001 jump_clobbers = 1;
9002 return;
9003 }
ff9940b0 9004
cce8749e
CH
9005 /* Register the insn jumped to. */
9006 if (reverse)
ff9940b0
RE
9007 {
9008 if (!seeking_return)
9009 label = XEXP (SET_SRC (body), 0);
9010 }
cce8749e
CH
9011 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9012 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9013 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9014 {
9015 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9016 then_not_else = FALSE;
9017 }
ff9940b0
RE
9018 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9019 seeking_return = 1;
9020 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9021 {
9022 seeking_return = 1;
9023 then_not_else = FALSE;
9024 }
cce8749e
CH
9025 else
9026 abort ();
9027
9028 /* See how many insns this branch skips, and what kind of insns. If all
9029 insns are okay, and the label or unconditional branch to the same
9030 label is not too far away, succeed. */
9031 for (insns_skipped = 0;
b36ba79f 9032 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
9033 {
9034 rtx scanbody;
9035
9036 this_insn = next_nonnote_insn (this_insn);
9037 if (!this_insn)
9038 break;
9039
cce8749e
CH
9040 switch (GET_CODE (this_insn))
9041 {
9042 case CODE_LABEL:
9043 /* Succeed if it is the target label, otherwise fail since
9044 control falls in from somewhere else. */
9045 if (this_insn == label)
9046 {
ff9940b0
RE
9047 if (jump_clobbers)
9048 {
9049 arm_ccfsm_state = 2;
9050 this_insn = next_nonnote_insn (this_insn);
9051 }
9052 else
9053 arm_ccfsm_state = 1;
cce8749e
CH
9054 succeed = TRUE;
9055 }
9056 else
9057 fail = TRUE;
9058 break;
9059
ff9940b0 9060 case BARRIER:
cce8749e 9061 /* Succeed if the following insn is the target label.
ff9940b0
RE
9062 Otherwise fail.
9063 If return insns are used then the last insn in a function
6354dc9b 9064 will be a barrier. */
cce8749e 9065 this_insn = next_nonnote_insn (this_insn);
ff9940b0 9066 if (this_insn && this_insn == label)
cce8749e 9067 {
ff9940b0
RE
9068 if (jump_clobbers)
9069 {
9070 arm_ccfsm_state = 2;
9071 this_insn = next_nonnote_insn (this_insn);
9072 }
9073 else
9074 arm_ccfsm_state = 1;
cce8749e
CH
9075 succeed = TRUE;
9076 }
9077 else
9078 fail = TRUE;
9079 break;
9080
ff9940b0 9081 case CALL_INSN:
2b835d68 9082 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 9083 calls. */
2b835d68 9084 if (TARGET_APCS_32)
bd9c7e23
RE
9085 {
9086 /* Succeed if the following insn is the target label,
9087 or if the following two insns are a barrier and
9088 the target label. */
9089 this_insn = next_nonnote_insn (this_insn);
9090 if (this_insn && GET_CODE (this_insn) == BARRIER)
9091 this_insn = next_nonnote_insn (this_insn);
9092
9093 if (this_insn && this_insn == label
b36ba79f 9094 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
9095 {
9096 if (jump_clobbers)
9097 {
9098 arm_ccfsm_state = 2;
9099 this_insn = next_nonnote_insn (this_insn);
9100 }
9101 else
9102 arm_ccfsm_state = 1;
9103 succeed = TRUE;
9104 }
9105 else
9106 fail = TRUE;
9107 }
ff9940b0 9108 break;
2b835d68 9109
cce8749e
CH
9110 case JUMP_INSN:
9111 /* If this is an unconditional branch to the same label, succeed.
9112 If it is to another label, do nothing. If it is conditional,
9113 fail. */
914a3b8c 9114 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 9115
ed4c4348 9116 scanbody = PATTERN (this_insn);
ff9940b0
RE
9117 if (GET_CODE (scanbody) == SET
9118 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
9119 {
9120 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9121 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9122 {
9123 arm_ccfsm_state = 2;
9124 succeed = TRUE;
9125 }
9126 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9127 fail = TRUE;
9128 }
b36ba79f
RE
9129 /* Fail if a conditional return is undesirable (eg on a
9130 StrongARM), but still allow this if optimizing for size. */
9131 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
9132 && !use_return_insn (TRUE)
9133 && !optimize_size)
b36ba79f 9134 fail = TRUE;
ff9940b0
RE
9135 else if (GET_CODE (scanbody) == RETURN
9136 && seeking_return)
9137 {
9138 arm_ccfsm_state = 2;
9139 succeed = TRUE;
9140 }
9141 else if (GET_CODE (scanbody) == PARALLEL)
9142 {
9143 switch (get_attr_conds (this_insn))
9144 {
9145 case CONDS_NOCOND:
9146 break;
9147 default:
9148 fail = TRUE;
9149 break;
9150 }
9151 }
4e67550b
RE
9152 else
9153 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9154
cce8749e
CH
9155 break;
9156
9157 case INSN:
ff9940b0
RE
9158 /* Instructions using or affecting the condition codes make it
9159 fail. */
ed4c4348 9160 scanbody = PATTERN (this_insn);
5895f793
RE
9161 if (!(GET_CODE (scanbody) == SET
9162 || GET_CODE (scanbody) == PARALLEL)
74641843 9163 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
9164 fail = TRUE;
9165 break;
9166
9167 default:
9168 break;
9169 }
9170 }
9171 if (succeed)
9172 {
ff9940b0 9173 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 9174 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
9175 else if (seeking_return || arm_ccfsm_state == 2)
9176 {
9177 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9178 {
9179 this_insn = next_nonnote_insn (this_insn);
9180 if (this_insn && (GET_CODE (this_insn) == BARRIER
9181 || GET_CODE (this_insn) == CODE_LABEL))
9182 abort ();
9183 }
9184 if (!this_insn)
9185 {
9186 /* Oh, dear! we ran off the end.. give up */
df4ae160 9187 recog (PATTERN (insn), insn, NULL);
ff9940b0 9188 arm_ccfsm_state = 0;
abaa26e5 9189 arm_target_insn = NULL;
ff9940b0
RE
9190 return;
9191 }
9192 arm_target_insn = this_insn;
9193 }
cce8749e
CH
9194 else
9195 abort ();
ff9940b0
RE
9196 if (jump_clobbers)
9197 {
9198 if (reverse)
9199 abort ();
9200 arm_current_cc =
9201 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9202 0), 0), 1));
9203 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9204 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9205 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9206 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9207 }
9208 else
9209 {
9210 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9211 what it was. */
9212 if (!reverse)
9213 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9214 0));
9215 }
cce8749e 9216
cce8749e
CH
9217 if (reverse || then_not_else)
9218 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9219 }
d5b7b3ae 9220
1ccbefce 9221 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 9222 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 9223 across this call; since the insn has been recognized already we
b020fd92 9224 call recog direct). */
df4ae160 9225 recog (PATTERN (insn), insn, NULL);
cce8749e 9226 }
f3bb6135 9227}
cce8749e 9228
4b02997f
NC
9229/* Returns true if REGNO is a valid register
9230 for holding a quantity of tyoe MODE. */
9231
9232int
9233arm_hard_regno_mode_ok (regno, mode)
9234 unsigned int regno;
9235 enum machine_mode mode;
9236{
9237 if (GET_MODE_CLASS (mode) == MODE_CC)
9238 return regno == CC_REGNUM;
9239
9240 if (TARGET_THUMB)
9241 /* For the Thumb we only allow values bigger than SImode in
9242 registers 0 - 6, so that there is always a second low
9243 register available to hold the upper part of the value.
9244 We probably we ought to ensure that the register is the
9245 start of an even numbered register pair. */
e9d7b180 9246 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
4b02997f
NC
9247
9248 if (regno <= LAST_ARM_REGNUM)
3cb66fd7
NC
9249 /* We allow any value to be stored in the general regisetrs. */
9250 return 1;
4b02997f
NC
9251
9252 if ( regno == FRAME_POINTER_REGNUM
9253 || regno == ARG_POINTER_REGNUM)
9254 /* We only allow integers in the fake hard registers. */
9255 return GET_MODE_CLASS (mode) == MODE_INT;
9256
9257 /* The only registers left are the FPU registers
9258 which we only allow to hold FP values. */
9259 return GET_MODE_CLASS (mode) == MODE_FLOAT
9260 && regno >= FIRST_ARM_FP_REGNUM
9261 && regno <= LAST_ARM_FP_REGNUM;
9262}
9263
d5b7b3ae
RE
9264int
9265arm_regno_class (regno)
9266 int regno;
9267{
9268 if (TARGET_THUMB)
9269 {
9270 if (regno == STACK_POINTER_REGNUM)
9271 return STACK_REG;
9272 if (regno == CC_REGNUM)
9273 return CC_REG;
9274 if (regno < 8)
9275 return LO_REGS;
9276 return HI_REGS;
9277 }
9278
9279 if ( regno <= LAST_ARM_REGNUM
9280 || regno == FRAME_POINTER_REGNUM
9281 || regno == ARG_POINTER_REGNUM)
9282 return GENERAL_REGS;
9283
9284 if (regno == CC_REGNUM)
9285 return NO_REGS;
9286
9287 return FPU_REGS;
9288}
9289
9290/* Handle a special case when computing the offset
9291 of an argument from the frame pointer. */
1d6e90ac 9292
d5b7b3ae
RE
9293int
9294arm_debugger_arg_offset (value, addr)
9295 int value;
9296 rtx addr;
9297{
9298 rtx insn;
9299
9300 /* We are only interested if dbxout_parms() failed to compute the offset. */
9301 if (value != 0)
9302 return 0;
9303
9304 /* We can only cope with the case where the address is held in a register. */
9305 if (GET_CODE (addr) != REG)
9306 return 0;
9307
9308 /* If we are using the frame pointer to point at the argument, then
9309 an offset of 0 is correct. */
cd2b33d0 9310 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
9311 return 0;
9312
9313 /* If we are using the stack pointer to point at the
9314 argument, then an offset of 0 is correct. */
5895f793 9315 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
9316 && REGNO (addr) == SP_REGNUM)
9317 return 0;
9318
9319 /* Oh dear. The argument is pointed to by a register rather
9320 than being held in a register, or being stored at a known
9321 offset from the frame pointer. Since GDB only understands
9322 those two kinds of argument we must translate the address
9323 held in the register into an offset from the frame pointer.
9324 We do this by searching through the insns for the function
9325 looking to see where this register gets its value. If the
4912a07c 9326 register is initialized from the frame pointer plus an offset
d5b7b3ae
RE
9327 then we are in luck and we can continue, otherwise we give up.
9328
9329 This code is exercised by producing debugging information
9330 for a function with arguments like this:
9331
9332 double func (double a, double b, int c, double d) {return d;}
9333
9334 Without this code the stab for parameter 'd' will be set to
9335 an offset of 0 from the frame pointer, rather than 8. */
9336
9337 /* The if() statement says:
9338
9339 If the insn is a normal instruction
9340 and if the insn is setting the value in a register
9341 and if the register being set is the register holding the address of the argument
9342 and if the address is computing by an addition
9343 that involves adding to a register
9344 which is the frame pointer
9345 a constant integer
9346
9347 then... */
9348
9349 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9350 {
9351 if ( GET_CODE (insn) == INSN
9352 && GET_CODE (PATTERN (insn)) == SET
9353 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9354 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9355 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 9356 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
9357 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9358 )
9359 {
9360 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9361
9362 break;
9363 }
9364 }
9365
9366 if (value == 0)
9367 {
9368 debug_rtx (addr);
c725bd79 9369 warning ("unable to compute real location of stacked parameter");
d5b7b3ae
RE
9370 value = 8; /* XXX magic hack */
9371 }
9372
9373 return value;
9374}
9375
d19fb8e3 9376#define def_builtin(NAME, TYPE, CODE) \
6a2dd09a 9377 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE)
d19fb8e3
NC
9378
9379void
9380arm_init_builtins ()
9381{
cbd5937a 9382 tree endlink = void_list_node;
d19fb8e3
NC
9383 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9384 tree pchar_type_node = build_pointer_type (char_type_node);
9385
9386 tree int_ftype_int, void_ftype_pchar;
9387
b4de2f7d 9388 /* void func (char *) */
d19fb8e3 9389 void_ftype_pchar
b4de2f7d 9390 = build_function_type_list (void_type_node, pchar_type_node, NULL_TREE);
d19fb8e3
NC
9391
9392 /* int func (int) */
9393 int_ftype_int
9394 = build_function_type (integer_type_node, int_endlink);
9395
9396 /* Initialize arm V5 builtins. */
9397 if (arm_arch5)
eab4abeb 9398 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
d19fb8e3
NC
9399}
9400
9401/* Expand an expression EXP that calls a built-in function,
9402 with result going to TARGET if that's convenient
9403 (and in mode MODE if that's convenient).
9404 SUBTARGET may be used as the target for computing one of EXP's operands.
9405 IGNORE is nonzero if the value is to be ignored. */
9406
9407rtx
9408arm_expand_builtin (exp, target, subtarget, mode, ignore)
9409 tree exp;
9410 rtx target;
9411 rtx subtarget ATTRIBUTE_UNUSED;
9412 enum machine_mode mode ATTRIBUTE_UNUSED;
9413 int ignore ATTRIBUTE_UNUSED;
9414{
9415 enum insn_code icode;
9416 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9417 tree arglist = TREE_OPERAND (exp, 1);
9418 tree arg0;
9419 rtx op0, pat;
9420 enum machine_mode tmode, mode0;
9421 int fcode = DECL_FUNCTION_CODE (fndecl);
9422
9423 switch (fcode)
9424 {
9425 default:
9426 break;
9427
9428 case ARM_BUILTIN_CLZ:
9429 icode = CODE_FOR_clz;
9430 arg0 = TREE_VALUE (arglist);
9431 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9432 tmode = insn_data[icode].operand[0].mode;
9433 mode0 = insn_data[icode].operand[1].mode;
9434
9435 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9436 op0 = copy_to_mode_reg (mode0, op0);
9437 if (target == 0
9438 || GET_MODE (target) != tmode
9439 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9440 target = gen_reg_rtx (tmode);
9441 pat = GEN_FCN (icode) (target, op0);
9442 if (! pat)
9443 return 0;
9444 emit_insn (pat);
9445 return target;
d19fb8e3 9446 }
e26053d1 9447
d19fb8e3
NC
9448 /* @@@ Should really do something sensible here. */
9449 return NULL_RTX;
9450}
d5b7b3ae
RE
9451\f
9452/* Recursively search through all of the blocks in a function
9453 checking to see if any of the variables created in that
9454 function match the RTX called 'orig'. If they do then
9455 replace them with the RTX called 'new'. */
9456
9457static void
9458replace_symbols_in_block (block, orig, new)
9459 tree block;
9460 rtx orig;
9461 rtx new;
9462{
9463 for (; block; block = BLOCK_CHAIN (block))
9464 {
9465 tree sym;
9466
5895f793 9467 if (!TREE_USED (block))
d5b7b3ae
RE
9468 continue;
9469
9470 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9471 {
9472 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9473 || DECL_IGNORED_P (sym)
9474 || TREE_CODE (sym) != VAR_DECL
9475 || DECL_EXTERNAL (sym)
5895f793 9476 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
9477 )
9478 continue;
9479
7b8b8ade 9480 SET_DECL_RTL (sym, new);
d5b7b3ae
RE
9481 }
9482
9483 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9484 }
9485}
9486
1d6e90ac
NC
9487/* Return the number (counting from 0) of
9488 the least significant set bit in MASK. */
9489
d5b7b3ae
RE
9490#ifdef __GNUC__
9491inline
9492#endif
9493static int
9494number_of_first_bit_set (mask)
9495 int mask;
9496{
9497 int bit;
9498
9499 for (bit = 0;
9500 (mask & (1 << bit)) == 0;
5895f793 9501 ++bit)
d5b7b3ae
RE
9502 continue;
9503
9504 return bit;
9505}
9506
9507/* Generate code to return from a thumb function.
9508 If 'reg_containing_return_addr' is -1, then the return address is
9509 actually on the stack, at the stack pointer. */
9510static void
9511thumb_exit (f, reg_containing_return_addr, eh_ofs)
9512 FILE * f;
9513 int reg_containing_return_addr;
9514 rtx eh_ofs;
9515{
9516 unsigned regs_available_for_popping;
9517 unsigned regs_to_pop;
9518 int pops_needed;
9519 unsigned available;
9520 unsigned required;
9521 int mode;
9522 int size;
9523 int restore_a4 = FALSE;
9524
9525 /* Compute the registers we need to pop. */
9526 regs_to_pop = 0;
9527 pops_needed = 0;
9528
9529 /* There is an assumption here, that if eh_ofs is not NULL, the
9530 normal return address will have been pushed. */
9531 if (reg_containing_return_addr == -1 || eh_ofs)
9532 {
9533 /* When we are generating a return for __builtin_eh_return,
9534 reg_containing_return_addr must specify the return regno. */
9535 if (eh_ofs && reg_containing_return_addr == -1)
9536 abort ();
9537
9538 regs_to_pop |= 1 << LR_REGNUM;
5895f793 9539 ++pops_needed;
d5b7b3ae
RE
9540 }
9541
9542 if (TARGET_BACKTRACE)
9543 {
9544 /* Restore the (ARM) frame pointer and stack pointer. */
9545 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9546 pops_needed += 2;
9547 }
9548
9549 /* If there is nothing to pop then just emit the BX instruction and
9550 return. */
9551 if (pops_needed == 0)
9552 {
9553 if (eh_ofs)
9554 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9555
9556 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9557 return;
9558 }
9559 /* Otherwise if we are not supporting interworking and we have not created
9560 a backtrace structure and the function was not entered in ARM mode then
9561 just pop the return address straight into the PC. */
5895f793
RE
9562 else if (!TARGET_INTERWORK
9563 && !TARGET_BACKTRACE
9564 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
9565 {
9566 if (eh_ofs)
9567 {
9568 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9569 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9570 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9571 }
9572 else
9573 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9574
9575 return;
9576 }
9577
9578 /* Find out how many of the (return) argument registers we can corrupt. */
9579 regs_available_for_popping = 0;
9580
9581 /* If returning via __builtin_eh_return, the bottom three registers
9582 all contain information needed for the return. */
9583 if (eh_ofs)
9584 size = 12;
9585 else
9586 {
9587#ifdef RTX_CODE
9588 /* If we can deduce the registers used from the function's
9589 return value. This is more reliable that examining
9590 regs_ever_live[] because that will be set if the register is
9591 ever used in the function, not just if the register is used
9592 to hold a return value. */
9593
9594 if (current_function_return_rtx != 0)
9595 mode = GET_MODE (current_function_return_rtx);
9596 else
9597#endif
9598 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9599
9600 size = GET_MODE_SIZE (mode);
9601
9602 if (size == 0)
9603 {
9604 /* In a void function we can use any argument register.
9605 In a function that returns a structure on the stack
9606 we can use the second and third argument registers. */
9607 if (mode == VOIDmode)
9608 regs_available_for_popping =
9609 (1 << ARG_REGISTER (1))
9610 | (1 << ARG_REGISTER (2))
9611 | (1 << ARG_REGISTER (3));
9612 else
9613 regs_available_for_popping =
9614 (1 << ARG_REGISTER (2))
9615 | (1 << ARG_REGISTER (3));
9616 }
9617 else if (size <= 4)
9618 regs_available_for_popping =
9619 (1 << ARG_REGISTER (2))
9620 | (1 << ARG_REGISTER (3));
9621 else if (size <= 8)
9622 regs_available_for_popping =
9623 (1 << ARG_REGISTER (3));
9624 }
9625
9626 /* Match registers to be popped with registers into which we pop them. */
9627 for (available = regs_available_for_popping,
9628 required = regs_to_pop;
9629 required != 0 && available != 0;
9630 available &= ~(available & - available),
9631 required &= ~(required & - required))
9632 -- pops_needed;
9633
9634 /* If we have any popping registers left over, remove them. */
9635 if (available > 0)
5895f793 9636 regs_available_for_popping &= ~available;
d5b7b3ae
RE
9637
9638 /* Otherwise if we need another popping register we can use
9639 the fourth argument register. */
9640 else if (pops_needed)
9641 {
9642 /* If we have not found any free argument registers and
9643 reg a4 contains the return address, we must move it. */
9644 if (regs_available_for_popping == 0
9645 && reg_containing_return_addr == LAST_ARG_REGNUM)
9646 {
9647 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9648 reg_containing_return_addr = LR_REGNUM;
9649 }
9650 else if (size > 12)
9651 {
9652 /* Register a4 is being used to hold part of the return value,
9653 but we have dire need of a free, low register. */
9654 restore_a4 = TRUE;
9655
9656 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9657 }
9658
9659 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9660 {
9661 /* The fourth argument register is available. */
9662 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9663
5895f793 9664 --pops_needed;
d5b7b3ae
RE
9665 }
9666 }
9667
9668 /* Pop as many registers as we can. */
9669 thumb_pushpop (f, regs_available_for_popping, FALSE);
9670
9671 /* Process the registers we popped. */
9672 if (reg_containing_return_addr == -1)
9673 {
9674 /* The return address was popped into the lowest numbered register. */
5895f793 9675 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
9676
9677 reg_containing_return_addr =
9678 number_of_first_bit_set (regs_available_for_popping);
9679
9680 /* Remove this register for the mask of available registers, so that
9681 the return address will not be corrupted by futher pops. */
5895f793 9682 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
9683 }
9684
9685 /* If we popped other registers then handle them here. */
9686 if (regs_available_for_popping)
9687 {
9688 int frame_pointer;
9689
9690 /* Work out which register currently contains the frame pointer. */
9691 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9692
9693 /* Move it into the correct place. */
9694 asm_fprintf (f, "\tmov\t%r, %r\n",
9695 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9696
9697 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
9698 regs_available_for_popping &= ~(1 << frame_pointer);
9699 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
9700
9701 if (regs_available_for_popping)
9702 {
9703 int stack_pointer;
9704
9705 /* We popped the stack pointer as well,
9706 find the register that contains it. */
9707 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9708
9709 /* Move it into the stack register. */
9710 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9711
9712 /* At this point we have popped all necessary registers, so
9713 do not worry about restoring regs_available_for_popping
9714 to its correct value:
9715
9716 assert (pops_needed == 0)
9717 assert (regs_available_for_popping == (1 << frame_pointer))
9718 assert (regs_to_pop == (1 << STACK_POINTER)) */
9719 }
9720 else
9721 {
9722 /* Since we have just move the popped value into the frame
9723 pointer, the popping register is available for reuse, and
9724 we know that we still have the stack pointer left to pop. */
9725 regs_available_for_popping |= (1 << frame_pointer);
9726 }
9727 }
9728
9729 /* If we still have registers left on the stack, but we no longer have
9730 any registers into which we can pop them, then we must move the return
9731 address into the link register and make available the register that
9732 contained it. */
9733 if (regs_available_for_popping == 0 && pops_needed > 0)
9734 {
9735 regs_available_for_popping |= 1 << reg_containing_return_addr;
9736
9737 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9738 reg_containing_return_addr);
9739
9740 reg_containing_return_addr = LR_REGNUM;
9741 }
9742
9743 /* If we have registers left on the stack then pop some more.
9744 We know that at most we will want to pop FP and SP. */
9745 if (pops_needed > 0)
9746 {
9747 int popped_into;
9748 int move_to;
9749
9750 thumb_pushpop (f, regs_available_for_popping, FALSE);
9751
9752 /* We have popped either FP or SP.
9753 Move whichever one it is into the correct register. */
9754 popped_into = number_of_first_bit_set (regs_available_for_popping);
9755 move_to = number_of_first_bit_set (regs_to_pop);
9756
9757 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9758
5895f793 9759 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 9760
5895f793 9761 --pops_needed;
d5b7b3ae
RE
9762 }
9763
9764 /* If we still have not popped everything then we must have only
9765 had one register available to us and we are now popping the SP. */
9766 if (pops_needed > 0)
9767 {
9768 int popped_into;
9769
9770 thumb_pushpop (f, regs_available_for_popping, FALSE);
9771
9772 popped_into = number_of_first_bit_set (regs_available_for_popping);
9773
9774 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9775 /*
9776 assert (regs_to_pop == (1 << STACK_POINTER))
9777 assert (pops_needed == 1)
9778 */
9779 }
9780
9781 /* If necessary restore the a4 register. */
9782 if (restore_a4)
9783 {
9784 if (reg_containing_return_addr != LR_REGNUM)
9785 {
9786 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9787 reg_containing_return_addr = LR_REGNUM;
9788 }
9789
9790 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9791 }
9792
9793 if (eh_ofs)
9794 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9795
9796 /* Return to caller. */
9797 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9798}
9799
9800/* Emit code to push or pop registers to or from the stack. */
1d6e90ac 9801
d5b7b3ae
RE
9802static void
9803thumb_pushpop (f, mask, push)
9804 FILE * f;
9805 int mask;
9806 int push;
9807{
9808 int regno;
9809 int lo_mask = mask & 0xFF;
9810
5895f793 9811 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
9812 {
9813 /* Special case. Do not generate a POP PC statement here, do it in
9814 thumb_exit() */
9815 thumb_exit (f, -1, NULL_RTX);
9816 return;
9817 }
9818
9819 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9820
9821 /* Look at the low registers first. */
5895f793 9822 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
9823 {
9824 if (lo_mask & 1)
9825 {
9826 asm_fprintf (f, "%r", regno);
9827
9828 if ((lo_mask & ~1) != 0)
9829 fprintf (f, ", ");
9830 }
9831 }
9832
9833 if (push && (mask & (1 << LR_REGNUM)))
9834 {
9835 /* Catch pushing the LR. */
9836 if (mask & 0xFF)
9837 fprintf (f, ", ");
9838
9839 asm_fprintf (f, "%r", LR_REGNUM);
9840 }
9841 else if (!push && (mask & (1 << PC_REGNUM)))
9842 {
9843 /* Catch popping the PC. */
9844 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9845 {
9846 /* The PC is never poped directly, instead
9847 it is popped into r3 and then BX is used. */
9848 fprintf (f, "}\n");
9849
9850 thumb_exit (f, -1, NULL_RTX);
9851
9852 return;
9853 }
9854 else
9855 {
9856 if (mask & 0xFF)
9857 fprintf (f, ", ");
9858
9859 asm_fprintf (f, "%r", PC_REGNUM);
9860 }
9861 }
9862
9863 fprintf (f, "}\n");
9864}
9865\f
9866void
9867thumb_final_prescan_insn (insn)
9868 rtx insn;
9869{
d5b7b3ae 9870 if (flag_print_asm_name)
9d98a694
AO
9871 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9872 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
9873}
9874
9875int
9876thumb_shiftable_const (val)
9877 unsigned HOST_WIDE_INT val;
9878{
9879 unsigned HOST_WIDE_INT mask = 0xff;
9880 int i;
9881
9882 if (val == 0) /* XXX */
9883 return 0;
9884
9885 for (i = 0; i < 25; i++)
9886 if ((val & (mask << i)) == val)
9887 return 1;
9888
9889 return 0;
9890}
9891
825dda42 9892/* Returns nonzero if the current function contains,
d5b7b3ae 9893 or might contain a far jump. */
1d6e90ac 9894
d5b7b3ae 9895int
ab2877a3
KG
9896thumb_far_jump_used_p (in_prologue)
9897 int in_prologue;
d5b7b3ae
RE
9898{
9899 rtx insn;
9900
9901 /* This test is only important for leaf functions. */
5895f793 9902 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
9903
9904 /* If we have already decided that far jumps may be used,
9905 do not bother checking again, and always return true even if
9906 it turns out that they are not being used. Once we have made
9907 the decision that far jumps are present (and that hence the link
9908 register will be pushed onto the stack) we cannot go back on it. */
9909 if (cfun->machine->far_jump_used)
9910 return 1;
9911
9912 /* If this function is not being called from the prologue/epilogue
9913 generation code then it must be being called from the
9914 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 9915 if (!in_prologue)
d5b7b3ae
RE
9916 {
9917 /* In this case we know that we are being asked about the elimination
9918 of the arg pointer register. If that register is not being used,
9919 then there are no arguments on the stack, and we do not have to
9920 worry that a far jump might force the prologue to push the link
9921 register, changing the stack offsets. In this case we can just
9922 return false, since the presence of far jumps in the function will
9923 not affect stack offsets.
9924
9925 If the arg pointer is live (or if it was live, but has now been
9926 eliminated and so set to dead) then we do have to test to see if
9927 the function might contain a far jump. This test can lead to some
9928 false negatives, since before reload is completed, then length of
9929 branch instructions is not known, so gcc defaults to returning their
9930 longest length, which in turn sets the far jump attribute to true.
9931
9932 A false negative will not result in bad code being generated, but it
9933 will result in a needless push and pop of the link register. We
9934 hope that this does not occur too often. */
9935 if (regs_ever_live [ARG_POINTER_REGNUM])
9936 cfun->machine->arg_pointer_live = 1;
5895f793 9937 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
9938 return 0;
9939 }
9940
9941 /* Check to see if the function contains a branch
9942 insn with the far jump attribute set. */
9943 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9944 {
9945 if (GET_CODE (insn) == JUMP_INSN
9946 /* Ignore tablejump patterns. */
9947 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9948 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9949 && get_attr_far_jump (insn) == FAR_JUMP_YES
9950 )
9951 {
9952 /* Record the fact that we have decied that
9953 the function does use far jumps. */
9954 cfun->machine->far_jump_used = 1;
9955 return 1;
9956 }
9957 }
9958
9959 return 0;
9960}
9961
825dda42 9962/* Return nonzero if FUNC must be entered in ARM mode. */
1d6e90ac 9963
d5b7b3ae
RE
9964int
9965is_called_in_ARM_mode (func)
9966 tree func;
9967{
9968 if (TREE_CODE (func) != FUNCTION_DECL)
9969 abort ();
9970
9971 /* Ignore the problem about functions whoes address is taken. */
9972 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9973 return TRUE;
9974
9975#ifdef ARM_PE
91d231cb 9976 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
d5b7b3ae
RE
9977#else
9978 return FALSE;
9979#endif
9980}
9981
9982/* The bits which aren't usefully expanded as rtl. */
400500c4 9983
cd2b33d0 9984const char *
d5b7b3ae
RE
9985thumb_unexpanded_epilogue ()
9986{
9987 int regno;
9988 int live_regs_mask = 0;
9989 int high_regs_pushed = 0;
9990 int leaf_function = leaf_function_p ();
9991 int had_to_push_lr;
9992 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9993
9994 if (return_used_this_function)
9995 return "";
9996
58e60158
AN
9997 if (IS_NAKED (arm_current_func_type ()))
9998 return "";
9999
d5b7b3ae 10000 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 10001 if (THUMB_REG_PUSHED_P (regno))
d5b7b3ae
RE
10002 live_regs_mask |= 1 << regno;
10003
10004 for (regno = 8; regno < 13; regno++)
aeaf4d25
AN
10005 if (THUMB_REG_PUSHED_P (regno))
10006 high_regs_pushed++;
d5b7b3ae
RE
10007
10008 /* The prolog may have pushed some high registers to use as
10009 work registers. eg the testuite file:
10010 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10011 compiles to produce:
10012 push {r4, r5, r6, r7, lr}
10013 mov r7, r9
10014 mov r6, r8
10015 push {r6, r7}
10016 as part of the prolog. We have to undo that pushing here. */
10017
10018 if (high_regs_pushed)
10019 {
10020 int mask = live_regs_mask;
10021 int next_hi_reg;
10022 int size;
10023 int mode;
10024
10025#ifdef RTX_CODE
10026 /* If we can deduce the registers used from the function's return value.
10027 This is more reliable that examining regs_ever_live[] because that
10028 will be set if the register is ever used in the function, not just if
10029 the register is used to hold a return value. */
10030
10031 if (current_function_return_rtx != 0)
10032 mode = GET_MODE (current_function_return_rtx);
10033 else
10034#endif
10035 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10036
10037 size = GET_MODE_SIZE (mode);
10038
10039 /* Unless we are returning a type of size > 12 register r3 is
10040 available. */
10041 if (size < 13)
10042 mask |= 1 << 3;
10043
10044 if (mask == 0)
10045 /* Oh dear! We have no low registers into which we can pop
10046 high registers! */
400500c4
RK
10047 internal_error
10048 ("no low registers available for popping high registers");
d5b7b3ae
RE
10049
10050 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
aeaf4d25 10051 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae
RE
10052 break;
10053
10054 while (high_regs_pushed)
10055 {
10056 /* Find lo register(s) into which the high register(s) can
10057 be popped. */
10058 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10059 {
10060 if (mask & (1 << regno))
10061 high_regs_pushed--;
10062 if (high_regs_pushed == 0)
10063 break;
10064 }
10065
10066 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10067
10068 /* Pop the values into the low register(s). */
10069 thumb_pushpop (asm_out_file, mask, 0);
10070
10071 /* Move the value(s) into the high registers. */
10072 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10073 {
10074 if (mask & (1 << regno))
10075 {
10076 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10077 regno);
10078
10079 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
aeaf4d25 10080 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae
RE
10081 break;
10082 }
10083 }
10084 }
10085 }
10086
5895f793 10087 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
10088 || thumb_far_jump_used_p (1));
10089
10090 if (TARGET_BACKTRACE
10091 && ((live_regs_mask & 0xFF) == 0)
10092 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10093 {
10094 /* The stack backtrace structure creation code had to
10095 push R7 in order to get a work register, so we pop
10096 it now. */
10097 live_regs_mask |= (1 << LAST_LO_REGNUM);
10098 }
10099
10100 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10101 {
10102 if (had_to_push_lr
5895f793
RE
10103 && !is_called_in_ARM_mode (current_function_decl)
10104 && !eh_ofs)
d5b7b3ae
RE
10105 live_regs_mask |= 1 << PC_REGNUM;
10106
10107 /* Either no argument registers were pushed or a backtrace
10108 structure was created which includes an adjusted stack
10109 pointer, so just pop everything. */
10110 if (live_regs_mask)
10111 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10112
10113 if (eh_ofs)
10114 thumb_exit (asm_out_file, 2, eh_ofs);
10115 /* We have either just popped the return address into the
10116 PC or it is was kept in LR for the entire function or
10117 it is still on the stack because we do not want to
10118 return by doing a pop {pc}. */
10119 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10120 thumb_exit (asm_out_file,
10121 (had_to_push_lr
10122 && is_called_in_ARM_mode (current_function_decl)) ?
10123 -1 : LR_REGNUM, NULL_RTX);
10124 }
10125 else
10126 {
10127 /* Pop everything but the return address. */
5895f793 10128 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
10129
10130 if (live_regs_mask)
10131 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10132
10133 if (had_to_push_lr)
10134 /* Get the return address into a temporary register. */
10135 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10136
10137 /* Remove the argument registers that were pushed onto the stack. */
10138 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10139 SP_REGNUM, SP_REGNUM,
10140 current_function_pretend_args_size);
10141
10142 if (eh_ofs)
10143 thumb_exit (asm_out_file, 2, eh_ofs);
10144 else
10145 thumb_exit (asm_out_file,
10146 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10147 }
10148
10149 return "";
10150}
10151
10152/* Functions to save and restore machine-specific function data. */
10153
e2500fed
GK
10154static struct machine_function *
10155arm_init_machine_status ()
d5b7b3ae 10156{
e2500fed
GK
10157 struct machine_function *machine;
10158 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
6d3d9133 10159
e2500fed
GK
10160#if ARM_FT_UNKNOWN != 0
10161 machine->func_type = ARM_FT_UNKNOWN;
6d3d9133 10162#endif
e2500fed 10163 return machine;
f7a80099
NC
10164}
10165
d5b7b3ae
RE
10166/* Return an RTX indicating where the return address to the
10167 calling function can be found. */
1d6e90ac 10168
d5b7b3ae
RE
10169rtx
10170arm_return_addr (count, frame)
10171 int count;
10172 rtx frame ATTRIBUTE_UNUSED;
10173{
d5b7b3ae
RE
10174 if (count != 0)
10175 return NULL_RTX;
10176
9e2f7ec7
DD
10177 if (TARGET_APCS_32)
10178 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10179 else
d5b7b3ae 10180 {
9e2f7ec7 10181 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
d5b7b3ae 10182 GEN_INT (RETURN_ADDR_MASK26));
9e2f7ec7 10183 return get_func_hard_reg_initial_val (cfun, lr);
d5b7b3ae 10184 }
d5b7b3ae
RE
10185}
10186
10187/* Do anything needed before RTL is emitted for each function. */
1d6e90ac 10188
d5b7b3ae
RE
10189void
10190arm_init_expanders ()
10191{
10192 /* Arrange to initialize and mark the machine per-function status. */
10193 init_machine_status = arm_init_machine_status;
d5b7b3ae
RE
10194}
10195
10196/* Generate the rest of a function's prologue. */
1d6e90ac 10197
d5b7b3ae
RE
10198void
10199thumb_expand_prologue ()
10200{
10201 HOST_WIDE_INT amount = (get_frame_size ()
10202 + current_function_outgoing_args_size);
6d3d9133
NC
10203 unsigned long func_type;
10204
10205 func_type = arm_current_func_type ();
d5b7b3ae
RE
10206
10207 /* Naked functions don't have prologues. */
6d3d9133 10208 if (IS_NAKED (func_type))
d5b7b3ae
RE
10209 return;
10210
6d3d9133
NC
10211 if (IS_INTERRUPT (func_type))
10212 {
c725bd79 10213 error ("interrupt Service Routines cannot be coded in Thumb mode");
6d3d9133
NC
10214 return;
10215 }
10216
d5b7b3ae
RE
10217 if (frame_pointer_needed)
10218 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10219
10220 if (amount)
10221 {
10222 amount = ROUND_UP (amount);
10223
10224 if (amount < 512)
10225 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1d6e90ac 10226 GEN_INT (- amount)));
d5b7b3ae
RE
10227 else
10228 {
10229 int regno;
10230 rtx reg;
10231
10232 /* The stack decrement is too big for an immediate value in a single
10233 insn. In theory we could issue multiple subtracts, but after
10234 three of them it becomes more space efficient to place the full
10235 value in the constant pool and load into a register. (Also the
10236 ARM debugger really likes to see only one stack decrement per
10237 function). So instead we look for a scratch register into which
10238 we can load the decrement, and then we subtract this from the
10239 stack pointer. Unfortunately on the thumb the only available
10240 scratch registers are the argument registers, and we cannot use
10241 these as they may hold arguments to the function. Instead we
10242 attempt to locate a call preserved register which is used by this
10243 function. If we can find one, then we know that it will have
10244 been pushed at the start of the prologue and so we can corrupt
10245 it now. */
10246 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 10247 if (THUMB_REG_PUSHED_P (regno)
5895f793
RE
10248 && !(frame_pointer_needed
10249 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
10250 break;
10251
aeaf4d25 10252 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
d5b7b3ae
RE
10253 {
10254 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10255
10256 /* Choose an arbitary, non-argument low register. */
10257 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10258
10259 /* Save it by copying it into a high, scratch register. */
c14a3a45
NC
10260 emit_insn (gen_movsi (spare, reg));
10261 /* Add a USE to stop propagate_one_insn() from barfing. */
6bacc7b0 10262 emit_insn (gen_prologue_use (spare));
d5b7b3ae
RE
10263
10264 /* Decrement the stack. */
1d6e90ac 10265 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
d5b7b3ae
RE
10266 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10267 reg));
10268
10269 /* Restore the low register's original value. */
10270 emit_insn (gen_movsi (reg, spare));
10271
10272 /* Emit a USE of the restored scratch register, so that flow
10273 analysis will not consider the restore redundant. The
10274 register won't be used again in this function and isn't
10275 restored by the epilogue. */
6bacc7b0 10276 emit_insn (gen_prologue_use (reg));
d5b7b3ae
RE
10277 }
10278 else
10279 {
10280 reg = gen_rtx (REG, SImode, regno);
10281
1d6e90ac 10282 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
d5b7b3ae
RE
10283 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10284 reg));
10285 }
10286 }
10287 }
10288
70f4f91c 10289 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae
RE
10290 emit_insn (gen_blockage ());
10291}
10292
10293void
10294thumb_expand_epilogue ()
10295{
10296 HOST_WIDE_INT amount = (get_frame_size ()
10297 + current_function_outgoing_args_size);
6d3d9133
NC
10298
10299 /* Naked functions don't have prologues. */
10300 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
10301 return;
10302
10303 if (frame_pointer_needed)
10304 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10305 else if (amount)
10306 {
10307 amount = ROUND_UP (amount);
10308
10309 if (amount < 512)
10310 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10311 GEN_INT (amount)));
10312 else
10313 {
10314 /* r3 is always free in the epilogue. */
10315 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10316
10317 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10318 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10319 }
10320 }
10321
10322 /* Emit a USE (stack_pointer_rtx), so that
10323 the stack adjustment will not be deleted. */
6bacc7b0 10324 emit_insn (gen_prologue_use (stack_pointer_rtx));
d5b7b3ae 10325
70f4f91c 10326 if (current_function_profile || TARGET_NO_SCHED_PRO)
d5b7b3ae
RE
10327 emit_insn (gen_blockage ());
10328}
10329
08c148a8
NB
10330static void
10331thumb_output_function_prologue (f, size)
d5b7b3ae 10332 FILE * f;
08c148a8 10333 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
d5b7b3ae
RE
10334{
10335 int live_regs_mask = 0;
10336 int high_regs_pushed = 0;
d5b7b3ae
RE
10337 int regno;
10338
6d3d9133 10339 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
10340 return;
10341
10342 if (is_called_in_ARM_mode (current_function_decl))
10343 {
10344 const char * name;
10345
10346 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10347 abort ();
10348 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10349 abort ();
10350 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10351
10352 /* Generate code sequence to switch us into Thumb mode. */
10353 /* The .code 32 directive has already been emitted by
6d77b53e 10354 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
10355 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10356 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10357
10358 /* Generate a label, so that the debugger will notice the
10359 change in instruction sets. This label is also used by
10360 the assembler to bypass the ARM code when this function
10361 is called from a Thumb encoded function elsewhere in the
10362 same file. Hence the definition of STUB_NAME here must
10363 agree with the definition in gas/config/tc-arm.c */
10364
10365#define STUB_NAME ".real_start_of"
10366
761c70aa 10367 fprintf (f, "\t.code\t16\n");
d5b7b3ae
RE
10368#ifdef ARM_PE
10369 if (arm_dllexport_name_p (name))
e5951263 10370 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
10371#endif
10372 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
761c70aa 10373 fprintf (f, "\t.thumb_func\n");
d5b7b3ae
RE
10374 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10375 }
10376
d5b7b3ae
RE
10377 if (current_function_pretend_args_size)
10378 {
3cb66fd7 10379 if (cfun->machine->uses_anonymous_args)
d5b7b3ae
RE
10380 {
10381 int num_pushes;
10382
761c70aa 10383 fprintf (f, "\tpush\t{");
d5b7b3ae 10384
e9d7b180 10385 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
d5b7b3ae
RE
10386
10387 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10388 regno <= LAST_ARG_REGNUM;
5895f793 10389 regno++)
d5b7b3ae
RE
10390 asm_fprintf (f, "%r%s", regno,
10391 regno == LAST_ARG_REGNUM ? "" : ", ");
10392
761c70aa 10393 fprintf (f, "}\n");
d5b7b3ae
RE
10394 }
10395 else
10396 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10397 SP_REGNUM, SP_REGNUM,
10398 current_function_pretend_args_size);
10399 }
10400
5895f793 10401 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
aeaf4d25 10402 if (THUMB_REG_PUSHED_P (regno))
d5b7b3ae
RE
10403 live_regs_mask |= 1 << regno;
10404
5895f793 10405 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
10406 live_regs_mask |= 1 << LR_REGNUM;
10407
10408 if (TARGET_BACKTRACE)
10409 {
10410 int offset;
10411 int work_register = 0;
10412 int wr;
10413
10414 /* We have been asked to create a stack backtrace structure.
10415 The code looks like this:
10416
10417 0 .align 2
10418 0 func:
10419 0 sub SP, #16 Reserve space for 4 registers.
10420 2 push {R7} Get a work register.
10421 4 add R7, SP, #20 Get the stack pointer before the push.
10422 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10423 8 mov R7, PC Get hold of the start of this code plus 12.
10424 10 str R7, [SP, #16] Store it.
10425 12 mov R7, FP Get hold of the current frame pointer.
10426 14 str R7, [SP, #4] Store it.
10427 16 mov R7, LR Get hold of the current return address.
10428 18 str R7, [SP, #12] Store it.
10429 20 add R7, SP, #16 Point at the start of the backtrace structure.
10430 22 mov FP, R7 Put this value into the frame pointer. */
10431
10432 if ((live_regs_mask & 0xFF) == 0)
10433 {
10434 /* See if the a4 register is free. */
10435
10436 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10437 work_register = LAST_ARG_REGNUM;
10438 else /* We must push a register of our own */
10439 live_regs_mask |= (1 << LAST_LO_REGNUM);
10440 }
10441
10442 if (work_register == 0)
10443 {
10444 /* Select a register from the list that will be pushed to
10445 use as our work register. */
10446 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10447 if ((1 << work_register) & live_regs_mask)
10448 break;
10449 }
10450
10451 asm_fprintf
10452 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10453 SP_REGNUM, SP_REGNUM);
10454
10455 if (live_regs_mask)
10456 thumb_pushpop (f, live_regs_mask, 1);
10457
10458 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10459 if (wr & live_regs_mask)
10460 offset += 4;
10461
10462 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10463 offset + 16 + current_function_pretend_args_size);
10464
10465 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10466 offset + 4);
10467
10468 /* Make sure that the instruction fetching the PC is in the right place
10469 to calculate "start of backtrace creation code + 12". */
10470 if (live_regs_mask)
10471 {
10472 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10473 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10474 offset + 12);
10475 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10476 ARM_HARD_FRAME_POINTER_REGNUM);
10477 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10478 offset);
10479 }
10480 else
10481 {
10482 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10483 ARM_HARD_FRAME_POINTER_REGNUM);
10484 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10485 offset);
10486 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10487 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10488 offset + 12);
10489 }
10490
10491 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10492 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10493 offset + 8);
10494 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10495 offset + 12);
10496 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10497 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10498 }
10499 else if (live_regs_mask)
10500 thumb_pushpop (f, live_regs_mask, 1);
10501
10502 for (regno = 8; regno < 13; regno++)
e26053d1
NC
10503 if (THUMB_REG_PUSHED_P (regno))
10504 high_regs_pushed++;
d5b7b3ae
RE
10505
10506 if (high_regs_pushed)
10507 {
10508 int pushable_regs = 0;
10509 int mask = live_regs_mask & 0xff;
10510 int next_hi_reg;
10511
10512 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
e26053d1
NC
10513 if (THUMB_REG_PUSHED_P (next_hi_reg))
10514 break;
d5b7b3ae
RE
10515
10516 pushable_regs = mask;
10517
10518 if (pushable_regs == 0)
10519 {
10520 /* Desperation time -- this probably will never happen. */
aeaf4d25 10521 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
d5b7b3ae
RE
10522 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10523 mask = 1 << LAST_ARG_REGNUM;
10524 }
10525
10526 while (high_regs_pushed > 0)
10527 {
10528 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10529 {
10530 if (mask & (1 << regno))
10531 {
10532 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10533
5895f793 10534 high_regs_pushed--;
d5b7b3ae
RE
10535
10536 if (high_regs_pushed)
aeaf4d25
AN
10537 {
10538 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10539 next_hi_reg--)
10540 if (THUMB_REG_PUSHED_P (next_hi_reg))
d5b7b3ae 10541 break;
aeaf4d25 10542 }
d5b7b3ae
RE
10543 else
10544 {
5895f793 10545 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
10546 break;
10547 }
10548 }
10549 }
10550
10551 thumb_pushpop (f, mask, 1);
10552 }
10553
10554 if (pushable_regs == 0
aeaf4d25 10555 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
d5b7b3ae
RE
10556 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10557 }
10558}
10559
10560/* Handle the case of a double word load into a low register from
10561 a computed memory address. The computed address may involve a
10562 register which is overwritten by the load. */
10563
cd2b33d0 10564const char *
d5b7b3ae 10565thumb_load_double_from_address (operands)
400500c4 10566 rtx *operands;
d5b7b3ae
RE
10567{
10568 rtx addr;
10569 rtx base;
10570 rtx offset;
10571 rtx arg1;
10572 rtx arg2;
10573
10574 if (GET_CODE (operands[0]) != REG)
400500c4 10575 abort ();
d5b7b3ae
RE
10576
10577 if (GET_CODE (operands[1]) != MEM)
400500c4 10578 abort ();
d5b7b3ae
RE
10579
10580 /* Get the memory address. */
10581 addr = XEXP (operands[1], 0);
10582
10583 /* Work out how the memory address is computed. */
10584 switch (GET_CODE (addr))
10585 {
10586 case REG:
10587 operands[2] = gen_rtx (MEM, SImode,
10588 plus_constant (XEXP (operands[1], 0), 4));
10589
10590 if (REGNO (operands[0]) == REGNO (addr))
10591 {
10592 output_asm_insn ("ldr\t%H0, %2", operands);
10593 output_asm_insn ("ldr\t%0, %1", operands);
10594 }
10595 else
10596 {
10597 output_asm_insn ("ldr\t%0, %1", operands);
10598 output_asm_insn ("ldr\t%H0, %2", operands);
10599 }
10600 break;
10601
10602 case CONST:
10603 /* Compute <address> + 4 for the high order load. */
10604 operands[2] = gen_rtx (MEM, SImode,
10605 plus_constant (XEXP (operands[1], 0), 4));
10606
10607 output_asm_insn ("ldr\t%0, %1", operands);
10608 output_asm_insn ("ldr\t%H0, %2", operands);
10609 break;
10610
10611 case PLUS:
10612 arg1 = XEXP (addr, 0);
10613 arg2 = XEXP (addr, 1);
10614
10615 if (CONSTANT_P (arg1))
10616 base = arg2, offset = arg1;
10617 else
10618 base = arg1, offset = arg2;
10619
10620 if (GET_CODE (base) != REG)
400500c4 10621 abort ();
d5b7b3ae
RE
10622
10623 /* Catch the case of <address> = <reg> + <reg> */
10624 if (GET_CODE (offset) == REG)
10625 {
10626 int reg_offset = REGNO (offset);
10627 int reg_base = REGNO (base);
10628 int reg_dest = REGNO (operands[0]);
10629
10630 /* Add the base and offset registers together into the
10631 higher destination register. */
10632 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10633 reg_dest + 1, reg_base, reg_offset);
10634
10635 /* Load the lower destination register from the address in
10636 the higher destination register. */
10637 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10638 reg_dest, reg_dest + 1);
10639
10640 /* Load the higher destination register from its own address
10641 plus 4. */
10642 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10643 reg_dest + 1, reg_dest + 1);
10644 }
10645 else
10646 {
10647 /* Compute <address> + 4 for the high order load. */
10648 operands[2] = gen_rtx (MEM, SImode,
10649 plus_constant (XEXP (operands[1], 0), 4));
10650
10651 /* If the computed address is held in the low order register
10652 then load the high order register first, otherwise always
10653 load the low order register first. */
10654 if (REGNO (operands[0]) == REGNO (base))
10655 {
10656 output_asm_insn ("ldr\t%H0, %2", operands);
10657 output_asm_insn ("ldr\t%0, %1", operands);
10658 }
10659 else
10660 {
10661 output_asm_insn ("ldr\t%0, %1", operands);
10662 output_asm_insn ("ldr\t%H0, %2", operands);
10663 }
10664 }
10665 break;
10666
10667 case LABEL_REF:
10668 /* With no registers to worry about we can just load the value
10669 directly. */
10670 operands[2] = gen_rtx (MEM, SImode,
10671 plus_constant (XEXP (operands[1], 0), 4));
10672
10673 output_asm_insn ("ldr\t%H0, %2", operands);
10674 output_asm_insn ("ldr\t%0, %1", operands);
10675 break;
10676
10677 default:
400500c4 10678 abort ();
d5b7b3ae
RE
10679 break;
10680 }
10681
10682 return "";
10683}
10684
10685
cd2b33d0 10686const char *
d5b7b3ae
RE
10687thumb_output_move_mem_multiple (n, operands)
10688 int n;
10689 rtx * operands;
10690{
10691 rtx tmp;
10692
10693 switch (n)
10694 {
10695 case 2:
ca356f3a 10696 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10697 {
ca356f3a
RE
10698 tmp = operands[4];
10699 operands[4] = operands[5];
10700 operands[5] = tmp;
d5b7b3ae 10701 }
ca356f3a
RE
10702 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10703 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
10704 break;
10705
10706 case 3:
ca356f3a 10707 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10708 {
ca356f3a
RE
10709 tmp = operands[4];
10710 operands[4] = operands[5];
10711 operands[5] = tmp;
d5b7b3ae 10712 }
ca356f3a 10713 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 10714 {
ca356f3a
RE
10715 tmp = operands[5];
10716 operands[5] = operands[6];
10717 operands[6] = tmp;
d5b7b3ae 10718 }
ca356f3a 10719 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10720 {
ca356f3a
RE
10721 tmp = operands[4];
10722 operands[4] = operands[5];
10723 operands[5] = tmp;
d5b7b3ae
RE
10724 }
10725
ca356f3a
RE
10726 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10727 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
10728 break;
10729
10730 default:
10731 abort ();
10732 }
10733
10734 return "";
10735}
10736
1d6e90ac 10737/* Routines for generating rtl. */
d5b7b3ae
RE
10738
10739void
10740thumb_expand_movstrqi (operands)
10741 rtx * operands;
10742{
10743 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10744 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10745 HOST_WIDE_INT len = INTVAL (operands[2]);
10746 HOST_WIDE_INT offset = 0;
10747
10748 while (len >= 12)
10749 {
ca356f3a 10750 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
10751 len -= 12;
10752 }
10753
10754 if (len >= 8)
10755 {
ca356f3a 10756 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
10757 len -= 8;
10758 }
10759
10760 if (len >= 4)
10761 {
10762 rtx reg = gen_reg_rtx (SImode);
10763 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10764 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10765 len -= 4;
10766 offset += 4;
10767 }
10768
10769 if (len >= 2)
10770 {
10771 rtx reg = gen_reg_rtx (HImode);
10772 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10773 plus_constant (in, offset))));
10774 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10775 reg));
10776 len -= 2;
10777 offset += 2;
10778 }
10779
10780 if (len)
10781 {
10782 rtx reg = gen_reg_rtx (QImode);
10783 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10784 plus_constant (in, offset))));
10785 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10786 reg));
10787 }
10788}
10789
10790int
10791thumb_cmp_operand (op, mode)
10792 rtx op;
10793 enum machine_mode mode;
10794{
10795 return ((GET_CODE (op) == CONST_INT
10796 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10797 || register_operand (op, mode));
10798}
10799
cd2b33d0 10800static const char *
d5b7b3ae
RE
10801thumb_condition_code (x, invert)
10802 rtx x;
10803 int invert;
10804{
1d6e90ac 10805 static const char * const conds[] =
d5b7b3ae
RE
10806 {
10807 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10808 "hi", "ls", "ge", "lt", "gt", "le"
10809 };
10810 int val;
10811
10812 switch (GET_CODE (x))
10813 {
10814 case EQ: val = 0; break;
10815 case NE: val = 1; break;
10816 case GEU: val = 2; break;
10817 case LTU: val = 3; break;
10818 case GTU: val = 8; break;
10819 case LEU: val = 9; break;
10820 case GE: val = 10; break;
10821 case LT: val = 11; break;
10822 case GT: val = 12; break;
10823 case LE: val = 13; break;
10824 default:
10825 abort ();
10826 }
10827
10828 return conds[val ^ invert];
10829}
10830
10831/* Handle storing a half-word to memory during reload. */
1d6e90ac 10832
d5b7b3ae
RE
10833void
10834thumb_reload_out_hi (operands)
10835 rtx * operands;
10836{
10837 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10838}
10839
10840/* Handle storing a half-word to memory during reload. */
1d6e90ac 10841
d5b7b3ae
RE
10842void
10843thumb_reload_in_hi (operands)
10844 rtx * operands ATTRIBUTE_UNUSED;
10845{
10846 abort ();
10847}
10848
c27ba912
DM
10849/* Return the length of a function name prefix
10850 that starts with the character 'c'. */
1d6e90ac 10851
c27ba912 10852static int
ab2877a3
KG
10853arm_get_strip_length (c)
10854 int c;
c27ba912
DM
10855{
10856 switch (c)
10857 {
10858 ARM_NAME_ENCODING_LENGTHS
10859 default: return 0;
10860 }
10861}
10862
10863/* Return a pointer to a function's name with any
10864 and all prefix encodings stripped from it. */
1d6e90ac 10865
c27ba912 10866const char *
ab2877a3
KG
10867arm_strip_name_encoding (name)
10868 const char * name;
c27ba912
DM
10869{
10870 int skip;
10871
10872 while ((skip = arm_get_strip_length (* name)))
10873 name += skip;
10874
10875 return name;
10876}
10877
e1944073
KW
10878/* If there is a '*' anywhere in the name's prefix, then
10879 emit the stripped name verbatim, otherwise prepend an
10880 underscore if leading underscores are being used. */
10881
10882void
10883arm_asm_output_labelref (stream, name)
10884 FILE * stream;
10885 const char * name;
10886{
10887 int skip;
10888 int verbatim = 0;
10889
10890 while ((skip = arm_get_strip_length (* name)))
10891 {
10892 verbatim |= (*name == '*');
10893 name += skip;
10894 }
10895
10896 if (verbatim)
10897 fputs (name, stream);
10898 else
10899 asm_fprintf (stream, "%U%s", name);
10900}
10901
e2500fed
GK
10902rtx aof_pic_label;
10903
2b835d68 10904#ifdef AOF_ASSEMBLER
6354dc9b 10905/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 10906
32de079a
RE
10907struct pic_chain
10908{
62b10bbc 10909 struct pic_chain * next;
5f37d07c 10910 const char * symname;
32de079a
RE
10911};
10912
62b10bbc 10913static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
10914
10915rtx
10916aof_pic_entry (x)
10917 rtx x;
10918{
62b10bbc 10919 struct pic_chain ** chainp;
32de079a
RE
10920 int offset;
10921
10922 if (aof_pic_label == NULL_RTX)
10923 {
43cffd11 10924 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
10925 }
10926
10927 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10928 offset += 4, chainp = &(*chainp)->next)
10929 if ((*chainp)->symname == XSTR (x, 0))
10930 return plus_constant (aof_pic_label, offset);
10931
10932 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10933 (*chainp)->next = NULL;
10934 (*chainp)->symname = XSTR (x, 0);
10935 return plus_constant (aof_pic_label, offset);
10936}
10937
10938void
10939aof_dump_pic_table (f)
62b10bbc 10940 FILE * f;
32de079a 10941{
62b10bbc 10942 struct pic_chain * chain;
32de079a
RE
10943
10944 if (aof_pic_chain == NULL)
10945 return;
10946
dd18ae56
NC
10947 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10948 PIC_OFFSET_TABLE_REGNUM,
10949 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
10950 fputs ("|x$adcons|\n", f);
10951
10952 for (chain = aof_pic_chain; chain; chain = chain->next)
10953 {
10954 fputs ("\tDCD\t", f);
10955 assemble_name (f, chain->symname);
10956 fputs ("\n", f);
10957 }
10958}
10959
2b835d68
RE
10960int arm_text_section_count = 1;
10961
10962char *
84ed5e79 10963aof_text_section ()
2b835d68
RE
10964{
10965 static char buf[100];
2b835d68
RE
10966 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10967 arm_text_section_count++);
10968 if (flag_pic)
10969 strcat (buf, ", PIC, REENTRANT");
10970 return buf;
10971}
10972
10973static int arm_data_section_count = 1;
10974
10975char *
10976aof_data_section ()
10977{
10978 static char buf[100];
10979 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10980 return buf;
10981}
10982
10983/* The AOF assembler is religiously strict about declarations of
10984 imported and exported symbols, so that it is impossible to declare
956d6950 10985 a function as imported near the beginning of the file, and then to
2b835d68
RE
10986 export it later on. It is, however, possible to delay the decision
10987 until all the functions in the file have been compiled. To get
10988 around this, we maintain a list of the imports and exports, and
10989 delete from it any that are subsequently defined. At the end of
10990 compilation we spit the remainder of the list out before the END
10991 directive. */
10992
10993struct import
10994{
62b10bbc 10995 struct import * next;
5f37d07c 10996 const char * name;
2b835d68
RE
10997};
10998
62b10bbc 10999static struct import * imports_list = NULL;
2b835d68
RE
11000
11001void
11002aof_add_import (name)
5f37d07c 11003 const char * name;
2b835d68 11004{
62b10bbc 11005 struct import * new;
2b835d68
RE
11006
11007 for (new = imports_list; new; new = new->next)
11008 if (new->name == name)
11009 return;
11010
11011 new = (struct import *) xmalloc (sizeof (struct import));
11012 new->next = imports_list;
11013 imports_list = new;
11014 new->name = name;
11015}
11016
11017void
11018aof_delete_import (name)
5f37d07c 11019 const char * name;
2b835d68 11020{
62b10bbc 11021 struct import ** old;
2b835d68
RE
11022
11023 for (old = &imports_list; *old; old = & (*old)->next)
11024 {
11025 if ((*old)->name == name)
11026 {
11027 *old = (*old)->next;
11028 return;
11029 }
11030 }
11031}
11032
11033int arm_main_function = 0;
11034
11035void
11036aof_dump_imports (f)
62b10bbc 11037 FILE * f;
2b835d68
RE
11038{
11039 /* The AOF assembler needs this to cause the startup code to be extracted
11040 from the library. Brining in __main causes the whole thing to work
11041 automagically. */
11042 if (arm_main_function)
11043 {
11044 text_section ();
11045 fputs ("\tIMPORT __main\n", f);
11046 fputs ("\tDCD __main\n", f);
11047 }
11048
11049 /* Now dump the remaining imports. */
11050 while (imports_list)
11051 {
11052 fprintf (f, "\tIMPORT\t");
11053 assemble_name (f, imports_list->name);
11054 fputc ('\n', f);
11055 imports_list = imports_list->next;
11056 }
11057}
5eb99654
KG
11058
11059static void
11060aof_globalize_label (stream, name)
11061 FILE *stream;
11062 const char *name;
11063{
11064 default_globalize_label (stream, name);
11065 if (! strcmp (name, "main"))
11066 arm_main_function = 1;
11067}
2b835d68 11068#endif /* AOF_ASSEMBLER */
7c262518 11069
ebe413e5 11070#ifdef OBJECT_FORMAT_ELF
7c262518
RH
11071/* Switch to an arbitrary section NAME with attributes as specified
11072 by FLAGS. ALIGN specifies any known alignment requirements for
11073 the section; 0 if the default should be used.
11074
11075 Differs from the default elf version only in the prefix character
11076 used before the section type. */
11077
11078static void
715bdd29 11079arm_elf_asm_named_section (name, flags)
7c262518
RH
11080 const char *name;
11081 unsigned int flags;
7c262518 11082{
6a0a6ac4
AM
11083 char flagchars[10], *f = flagchars;
11084
11085 if (! named_section_first_declaration (name))
11086 {
11087 fprintf (asm_out_file, "\t.section\t%s\n", name);
11088 return;
11089 }
7c262518
RH
11090
11091 if (!(flags & SECTION_DEBUG))
11092 *f++ = 'a';
11093 if (flags & SECTION_WRITE)
11094 *f++ = 'w';
11095 if (flags & SECTION_CODE)
11096 *f++ = 'x';
11097 if (flags & SECTION_SMALL)
11098 *f++ = 's';
201556f0
JJ
11099 if (flags & SECTION_MERGE)
11100 *f++ = 'M';
11101 if (flags & SECTION_STRINGS)
11102 *f++ = 'S';
6a0a6ac4
AM
11103 if (flags & SECTION_TLS)
11104 *f++ = 'T';
7c262518
RH
11105 *f = '\0';
11106
6a0a6ac4 11107 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
7c262518 11108
6a0a6ac4
AM
11109 if (!(flags & SECTION_NOTYPE))
11110 {
11111 const char *type;
11112
11113 if (flags & SECTION_BSS)
11114 type = "nobits";
11115 else
11116 type = "progbits";
11117
11118 fprintf (asm_out_file, ",%%%s", type);
11119
11120 if (flags & SECTION_ENTSIZE)
11121 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
11122 }
11123
11124 putc ('\n', asm_out_file);
7c262518 11125}
ebe413e5 11126#endif
fb49053f
RH
11127
11128#ifndef ARM_PE
11129/* Symbols in the text segment can be accessed without indirecting via the
11130 constant pool; it may take an extra binary operation, but this is still
11131 faster than indirecting via memory. Don't do this when not optimizing,
11132 since we won't be calculating al of the offsets necessary to do this
11133 simplification. */
11134
11135static void
11136arm_encode_section_info (decl, first)
11137 tree decl;
11138 int first;
11139{
11140 /* This doesn't work with AOF syntax, since the string table may be in
11141 a different AREA. */
11142#ifndef AOF_ASSEMBLER
11143 if (optimize > 0 && TREE_CONSTANT (decl)
11144 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11145 {
11146 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11147 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11148 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11149 }
11150#endif
11151
11152 /* If we are referencing a function that is weak then encode a long call
11153 flag in the function name, otherwise if the function is static or
11154 or known to be defined in this file then encode a short call flag. */
11155 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11156 {
11157 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11158 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11159 else if (! TREE_PUBLIC (decl))
11160 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11161 }
11162}
11163#endif /* !ARM_PE */
483ab821 11164
c590b625
RH
11165/* Output code to add DELTA to the first argument, and then jump
11166 to FUNCTION. Used for C++ multiple inheritance. */
11167
11168static void
3961e8fe 11169arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
483ab821
MM
11170 FILE *file;
11171 tree thunk ATTRIBUTE_UNUSED;
eb0424da 11172 HOST_WIDE_INT delta;
3961e8fe 11173 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
483ab821
MM
11174 tree function;
11175{
11176 int mi_delta = delta;
11177 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
11178 int shift = 0;
11179 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
11180 ? 1 : 0);
11181 if (mi_delta < 0)
11182 mi_delta = - mi_delta;
11183 while (mi_delta != 0)
11184 {
11185 if ((mi_delta & (3 << shift)) == 0)
11186 shift += 2;
11187 else
11188 {
11189 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
11190 mi_op, this_regno, this_regno,
11191 mi_delta & (0xff << shift));
11192 mi_delta &= ~(0xff << shift);
11193 shift += 8;
11194 }
11195 }
11196 fputs ("\tb\t", file);
11197 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
11198 if (NEED_PLT_RELOC)
11199 fputs ("(PLT)", file);
11200 fputc ('\n', file);
11201}
11202
This page took 2.721412 seconds and 5 git commands to generate.