]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Daily bump.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
b0888988 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
2398fb2a 3 Free Software Foundation, Inc.
cce8749e 4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 5 and Martin Simmons (@harleqn.co.uk).
b36ba79f 6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
7
8This file is part of GNU CC.
9
10GNU CC is free software; you can redistribute it and/or modify
11it under the terms of the GNU General Public License as published by
12the Free Software Foundation; either version 2, or (at your option)
13any later version.
14
15GNU CC is distributed in the hope that it will be useful,
16but WITHOUT ANY WARRANTY; without even the implied warranty of
17MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18GNU General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
22the Free Software Foundation, 59 Temple Place - Suite 330,
23Boston, MA 02111-1307, USA. */
ff9940b0 24
56636818 25#include "config.h"
43cffd11 26#include "system.h"
cce8749e 27#include "rtl.h"
d5b7b3ae 28#include "tree.h"
c7319d87 29#include "obstack.h"
cce8749e
CH
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "real.h"
33#include "insn-config.h"
34#include "conditions.h"
cce8749e
CH
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
ad076f4e 41#include "toplev.h"
aec3cfba 42#include "recog.h"
92a432f4 43#include "ggc.h"
d5b7b3ae 44#include "except.h"
8b97c5f8 45#include "c-pragma.h"
c27ba912 46#include "tm_p.h"
cce8749e 47
d5b7b3ae
RE
48/* Forward definitions of types. */
49typedef struct minipool_node Mnode;
50typedef struct minipool_fixup Mfix;
51
52/* In order to improve the layout of the prototypes below
53 some short type abbreviations are defined here. */
54#define Hint HOST_WIDE_INT
55#define Mmode enum machine_mode
56#define Ulong unsigned long
6d3d9133 57#define Ccstar const char *
d5b7b3ae
RE
58
59/* Forward function declarations. */
60static void arm_add_gc_roots PARAMS ((void));
61static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
d5b7b3ae
RE
62static Ulong bit_count PARAMS ((signed int));
63static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
64static int eliminate_lr2ip PARAMS ((rtx *));
65static rtx emit_multi_reg_push PARAMS ((int));
66static rtx emit_sfm PARAMS ((int, int));
6d3d9133 67static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
68static arm_cc get_arm_condition_code PARAMS ((rtx));
69static void init_fpa_table PARAMS ((void));
70static Hint int_log2 PARAMS ((Hint));
71static rtx is_jump_table PARAMS ((rtx));
6d3d9133
NC
72static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
73static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
d5b7b3ae 74static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
6d3d9133 75static Ccstar shift_op PARAMS ((rtx, Hint *));
d5b7b3ae
RE
76static void arm_init_machine_status PARAMS ((struct function *));
77static void arm_mark_machine_status PARAMS ((struct function *));
f7a80099 78static void arm_free_machine_status PARAMS ((struct function *));
d5b7b3ae
RE
79static int number_of_first_bit_set PARAMS ((int));
80static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
81static void thumb_exit PARAMS ((FILE *, int, rtx));
82static void thumb_pushpop PARAMS ((FILE *, int, int));
6d3d9133 83static Ccstar thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
84static rtx is_jump_table PARAMS ((rtx));
85static Hint get_jump_table_size PARAMS ((rtx));
86static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
87static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
88static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
89static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
90static void assign_minipool_offsets PARAMS ((Mfix *));
91static void arm_print_value PARAMS ((FILE *, rtx));
92static void dump_minipool PARAMS ((rtx));
93static int arm_barrier_cost PARAMS ((rtx));
94static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
95static void push_minipool_barrier PARAMS ((rtx, Hint));
96static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
97static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 98static int current_file_function_operand PARAMS ((rtx));
6d3d9133
NC
99static Ulong arm_compute_save_reg_mask PARAMS ((void));
100static Ulong arm_isr_value PARAMS ((tree));
101static Ulong arm_compute_func_type PARAMS ((void));
d5b7b3ae
RE
102\f
103#undef Hint
104#undef Mmode
105#undef Ulong
6d3d9133 106#undef Ccstar
f3bb6135 107
c7319d87
RE
108/* Obstack for minipool constant handling. */
109static struct obstack minipool_obstack;
110static char *minipool_startobj;
111
112#define obstack_chunk_alloc xmalloc
113#define obstack_chunk_free free
114
c27ba912
DM
115/* The maximum number of insns skipped which will be conditionalised if
116 possible. */
117static int max_insns_skipped = 5;
118
119extern FILE * asm_out_file;
120
6354dc9b 121/* True if we are currently building a constant table. */
13bd191d
PB
122int making_const_table;
123
60d0536b 124/* Define the information needed to generate branch insns. This is
6354dc9b 125 stored from the compare operation. */
ff9940b0 126rtx arm_compare_op0, arm_compare_op1;
ff9940b0 127
6354dc9b 128/* What type of floating point are we tuning for? */
bee06f3d
RE
129enum floating_point_type arm_fpu;
130
6354dc9b 131/* What type of floating point instructions are available? */
b111229a
RE
132enum floating_point_type arm_fpu_arch;
133
6354dc9b 134/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
135enum prog_mode_type arm_prgmode;
136
6354dc9b 137/* Set by the -mfp=... option. */
f9cc092a 138const char * target_fp_name = NULL;
2b835d68 139
b355a481 140/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 141const char * structure_size_string = NULL;
723ae7c1 142int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 143
aec3cfba 144/* Bit values used to identify processor capabilities. */
62b10bbc
NC
145#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
146#define FL_FAST_MULT (1 << 1) /* Fast multiply */
147#define FL_MODE26 (1 << 2) /* 26-bit mode support */
148#define FL_MODE32 (1 << 3) /* 32-bit mode support */
149#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
150#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
151#define FL_THUMB (1 << 6) /* Thumb aware */
152#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
153#define FL_STRONG (1 << 8) /* StrongARM */
b15bca31 154#define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
d19fb8e3 155#define FL_XSCALE (1 << 10) /* XScale */
aec3cfba 156
d5b7b3ae
RE
157/* The bits in this mask specify which instructions we are
158 allowed to generate. */
aec3cfba 159static int insn_flags = 0;
d5b7b3ae 160
aec3cfba
NC
161/* The bits in this mask specify which instruction scheduling options should
162 be used. Note - there is an overlap with the FL_FAST_MULT. For some
163 hardware we want to be able to generate the multiply instructions, but to
164 tune as if they were not present in the architecture. */
165static int tune_flags = 0;
166
167/* The following are used in the arm.md file as equivalents to bits
168 in the above two flag variables. */
169
2b835d68
RE
170/* Nonzero if this is an "M" variant of the processor. */
171int arm_fast_multiply = 0;
172
6354dc9b 173/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
174int arm_arch4 = 0;
175
6354dc9b 176/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
177int arm_arch5 = 0;
178
b15bca31
RE
179/* Nonzero if this chip supports the ARM Architecture 5E extensions. */
180int arm_arch5e = 0;
181
aec3cfba 182/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
183int arm_ld_sched = 0;
184
185/* Nonzero if this chip is a StrongARM. */
186int arm_is_strong = 0;
187
d19fb8e3
NC
188/* Nonzero if this chip is an XScale. */
189int arm_is_xscale = 0;
190
f5a1b0d2
NC
191/* Nonzero if this chip is a an ARM6 or an ARM7. */
192int arm_is_6_or_7 = 0;
b111229a 193
0616531f
RE
194/* Nonzero if generating Thumb instructions. */
195int thumb_code = 0;
196
cce8749e
CH
197/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
198 must report the mode of the memory reference from PRINT_OPERAND to
199 PRINT_OPERAND_ADDRESS. */
f3bb6135 200enum machine_mode output_memory_reference_mode;
cce8749e
CH
201
202/* Nonzero if the prologue must setup `fp'. */
203int current_function_anonymous_args;
204
32de079a 205/* The register number to be used for the PIC offset register. */
ed0e6530 206const char * arm_pic_register_string = NULL;
32de079a
RE
207int arm_pic_register = 9;
208
ff9940b0 209/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 210 is not needed. */
d5b7b3ae 211int return_used_this_function;
ff9940b0 212
aec3cfba
NC
213/* Set to 1 after arm_reorg has started. Reset to start at the start of
214 the next function. */
4b632bf1
RE
215static int after_arm_reorg = 0;
216
aec3cfba 217/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
218static int arm_constant_limit = 3;
219
cce8749e
CH
220/* For an explanation of these variables, see final_prescan_insn below. */
221int arm_ccfsm_state;
84ed5e79 222enum arm_cond_code arm_current_cc;
cce8749e
CH
223rtx arm_target_insn;
224int arm_target_label;
9997d19d
RE
225
226/* The condition codes of the ARM, and the inverse function. */
cd2b33d0 227const char * arm_condition_codes[] =
9997d19d
RE
228{
229 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
230 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
231};
232
f5a1b0d2 233#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 234\f
6354dc9b 235/* Initialization code. */
2b835d68 236
2b835d68
RE
237struct processors
238{
cd2b33d0 239 const char * name;
2b835d68
RE
240 unsigned int flags;
241};
242
243/* Not all of these give usefully different compilation alternatives,
244 but there is no simple way of generalizing them. */
f5a1b0d2
NC
245static struct processors all_cores[] =
246{
247 /* ARM Cores */
248
249 {"arm2", FL_CO_PROC | FL_MODE26 },
250 {"arm250", FL_CO_PROC | FL_MODE26 },
251 {"arm3", FL_CO_PROC | FL_MODE26 },
252 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
253 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
254 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
255 {"arm610", FL_MODE26 | FL_MODE32 },
256 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
257 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
258 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 259 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
260 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
261 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
262 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
263 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
264 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
265 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
266 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
267 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
268 {"arm710", FL_MODE26 | FL_MODE32 },
eab4abeb 269 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
a120a3bd 270 {"arm720", FL_MODE26 | FL_MODE32 },
eab4abeb
NC
271 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
272 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
f5a1b0d2
NC
273 {"arm710c", FL_MODE26 | FL_MODE32 },
274 {"arm7100", FL_MODE26 | FL_MODE32 },
275 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
276 /* Doesn't have an external co-proc, but does have embedded fpu. */
277 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
278 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
279 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
280 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
281 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
282 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
283 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 284 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2 285 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
eab4abeb 286 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
f5a1b0d2
NC
287 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
288 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
289 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
eab4abeb
NC
290 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
291 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
292 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
293 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
f5a1b0d2
NC
294
295 {NULL, 0}
296};
297
298static struct processors all_architectures[] =
2b835d68 299{
f5a1b0d2
NC
300 /* ARM Architectures */
301
62b10bbc
NC
302 { "armv2", FL_CO_PROC | FL_MODE26 },
303 { "armv2a", FL_CO_PROC | FL_MODE26 },
304 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
305 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 306 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
307 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
308 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
309 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
310 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
d19fb8e3
NC
311 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
312 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
62b10bbc 313 { NULL, 0 }
f5a1b0d2
NC
314};
315
316/* This is a magic stucture. The 'string' field is magically filled in
317 with a pointer to the value specified by the user on the command line
318 assuming that the user has specified such a value. */
319
320struct arm_cpu_select arm_select[] =
321{
322 /* string name processors */
323 { NULL, "-mcpu=", all_cores },
324 { NULL, "-march=", all_architectures },
325 { NULL, "-mtune=", all_cores }
2b835d68
RE
326};
327
aec3cfba 328/* Return the number of bits set in value' */
d5b7b3ae 329static unsigned long
aec3cfba
NC
330bit_count (value)
331 signed int value;
332{
d5b7b3ae 333 unsigned long count = 0;
aec3cfba
NC
334
335 while (value)
336 {
5895f793
RE
337 value &= ~(value & -value);
338 ++count;
aec3cfba
NC
339 }
340
341 return count;
342}
343
2b835d68
RE
344/* Fix up any incompatible options that the user has specified.
345 This has now turned into a maze. */
346void
347arm_override_options ()
348{
ed4c4348 349 unsigned i;
f5a1b0d2
NC
350
351 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 352 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 353 {
f5a1b0d2
NC
354 struct arm_cpu_select * ptr = arm_select + i;
355
356 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 357 {
13bd191d 358 const struct processors * sel;
bd9c7e23 359
5895f793 360 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 361 if (streq (ptr->string, sel->name))
bd9c7e23 362 {
aec3cfba
NC
363 if (i == 2)
364 tune_flags = sel->flags;
365 else
b111229a 366 {
aec3cfba
NC
367 /* If we have been given an architecture and a processor
368 make sure that they are compatible. We only generate
369 a warning though, and we prefer the CPU over the
6354dc9b 370 architecture. */
aec3cfba 371 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 372 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
373 ptr->string);
374
375 insn_flags = sel->flags;
b111229a 376 }
f5a1b0d2 377
bd9c7e23
RE
378 break;
379 }
380
381 if (sel->name == NULL)
382 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
383 }
384 }
aec3cfba 385
f5a1b0d2 386 /* If the user did not specify a processor, choose one for them. */
aec3cfba 387 if (insn_flags == 0)
f5a1b0d2
NC
388 {
389 struct processors * sel;
aec3cfba
NC
390 unsigned int sought;
391 static struct cpu_default
392 {
cd2b33d0
NC
393 int cpu;
394 const char * name;
aec3cfba
NC
395 }
396 cpu_defaults[] =
397 {
398 { TARGET_CPU_arm2, "arm2" },
399 { TARGET_CPU_arm6, "arm6" },
400 { TARGET_CPU_arm610, "arm610" },
2aa0c933 401 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
402 { TARGET_CPU_arm7m, "arm7m" },
403 { TARGET_CPU_arm7500fe, "arm7500fe" },
404 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
405 { TARGET_CPU_arm8, "arm8" },
406 { TARGET_CPU_arm810, "arm810" },
407 { TARGET_CPU_arm9, "arm9" },
408 { TARGET_CPU_strongarm, "strongarm" },
d19fb8e3 409 { TARGET_CPU_xscale, "xscale" },
aec3cfba
NC
410 { TARGET_CPU_generic, "arm" },
411 { 0, 0 }
412 };
413 struct cpu_default * def;
414
415 /* Find the default. */
5895f793 416 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
417 if (def->cpu == TARGET_CPU_DEFAULT)
418 break;
419
420 /* Make sure we found the default CPU. */
421 if (def->name == NULL)
422 abort ();
423
424 /* Find the default CPU's flags. */
5895f793 425 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
426 if (streq (def->name, sel->name))
427 break;
428
429 if (sel->name == NULL)
430 abort ();
431
432 insn_flags = sel->flags;
433
434 /* Now check to see if the user has specified some command line
435 switch that require certain abilities from the cpu. */
436 sought = 0;
f5a1b0d2 437
d5b7b3ae 438 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 439 {
aec3cfba
NC
440 sought |= (FL_THUMB | FL_MODE32);
441
442 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 443 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 444
d5b7b3ae 445 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
446 interworking. Therefore we force FL_MODE26 to be removed
447 from insn_flags here (if it was set), so that the search
448 below will always be able to find a compatible processor. */
5895f793 449 insn_flags &= ~FL_MODE26;
f5a1b0d2 450 }
5895f793 451 else if (!TARGET_APCS_32)
f5a1b0d2 452 sought |= FL_MODE26;
d5b7b3ae 453
aec3cfba 454 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 455 {
aec3cfba
NC
456 /* Try to locate a CPU type that supports all of the abilities
457 of the default CPU, plus the extra abilities requested by
458 the user. */
5895f793 459 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 460 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
461 break;
462
463 if (sel->name == NULL)
aec3cfba
NC
464 {
465 unsigned int current_bit_count = 0;
466 struct processors * best_fit = NULL;
467
468 /* Ideally we would like to issue an error message here
469 saying that it was not possible to find a CPU compatible
470 with the default CPU, but which also supports the command
471 line options specified by the programmer, and so they
472 ought to use the -mcpu=<name> command line option to
473 override the default CPU type.
474
475 Unfortunately this does not work with multilibing. We
476 need to be able to support multilibs for -mapcs-26 and for
477 -mthumb-interwork and there is no CPU that can support both
478 options. Instead if we cannot find a cpu that has both the
479 characteristics of the default cpu and the given command line
480 options we scan the array again looking for a best match. */
5895f793 481 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
482 if ((sel->flags & sought) == sought)
483 {
484 unsigned int count;
485
486 count = bit_count (sel->flags & insn_flags);
487
488 if (count >= current_bit_count)
489 {
490 best_fit = sel;
491 current_bit_count = count;
492 }
493 }
f5a1b0d2 494
aec3cfba
NC
495 if (best_fit == NULL)
496 abort ();
497 else
498 sel = best_fit;
499 }
500
501 insn_flags = sel->flags;
f5a1b0d2
NC
502 }
503 }
aec3cfba
NC
504
505 /* If tuning has not been specified, tune for whichever processor or
506 architecture has been selected. */
507 if (tune_flags == 0)
508 tune_flags = insn_flags;
509
f5a1b0d2
NC
510 /* Make sure that the processor choice does not conflict with any of the
511 other command line choices. */
aec3cfba 512 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 513 {
aec3cfba
NC
514 /* If APCS-32 was not the default then it must have been set by the
515 user, so issue a warning message. If the user has specified
516 "-mapcs-32 -mcpu=arm2" then we loose here. */
517 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
518 warning ("target CPU does not support APCS-32" );
5895f793 519 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 520 }
5895f793 521 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
522 {
523 warning ("target CPU does not support APCS-26" );
524 target_flags |= ARM_FLAG_APCS_32;
525 }
526
6cfc7210 527 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
528 {
529 warning ("target CPU does not support interworking" );
6cfc7210 530 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
531 }
532
d5b7b3ae
RE
533 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
534 {
5184f7c5 535 warning ("target CPU does not support THUMB instructions.");
d5b7b3ae
RE
536 target_flags &= ~ARM_FLAG_THUMB;
537 }
538
539 if (TARGET_APCS_FRAME && TARGET_THUMB)
540 {
541 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
542 target_flags &= ~ARM_FLAG_APCS_FRAME;
543 }
d19fb8e3 544
d5b7b3ae
RE
545 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
546 from here where no function is being compiled currently. */
547 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
548 && TARGET_ARM)
549 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
550
551 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
552 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
553
554 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
555 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
556
f5a1b0d2 557 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 558 if (TARGET_INTERWORK)
f5a1b0d2 559 {
5895f793 560 if (!TARGET_APCS_32)
f5a1b0d2
NC
561 warning ("interworking forces APCS-32 to be used" );
562 target_flags |= ARM_FLAG_APCS_32;
563 }
564
5895f793 565 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
566 {
567 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
568 target_flags |= ARM_FLAG_APCS_FRAME;
569 }
aec3cfba 570
2b835d68
RE
571 if (TARGET_POKE_FUNCTION_NAME)
572 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 573
2b835d68 574 if (TARGET_APCS_REENT && flag_pic)
400500c4 575 error ("-fpic and -mapcs-reent are incompatible");
aec3cfba 576
2b835d68 577 if (TARGET_APCS_REENT)
f5a1b0d2 578 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 579
d5b7b3ae
RE
580 /* If this target is normally configured to use APCS frames, warn if they
581 are turned off and debugging is turned on. */
582 if (TARGET_ARM
583 && write_symbols != NO_DEBUG
5895f793 584 && !TARGET_APCS_FRAME
d5b7b3ae
RE
585 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
586 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 587
32de079a
RE
588 /* If stack checking is disabled, we can use r10 as the PIC register,
589 which keeps r9 available. */
5895f793 590 if (flag_pic && !TARGET_APCS_STACK)
32de079a 591 arm_pic_register = 10;
aec3cfba 592
2b835d68
RE
593 if (TARGET_APCS_FLOAT)
594 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 595
aec3cfba 596 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
597 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
598 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
599 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
b15bca31 600 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
a67ded0f 601 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
6f7ebcbb 602
2ca12935
JL
603 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
604 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 605 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
606 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
607 && !(tune_flags & FL_ARCH4))) != 0;
6f7ebcbb 608
bd9c7e23
RE
609 /* Default value for floating point code... if no co-processor
610 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
611 assume the user has an FPA.
612 Note: this does not prevent use of floating point instructions,
613 -msoft-float does that. */
aec3cfba 614 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 615
b111229a 616 if (target_fp_name)
2b835d68 617 {
f5a1b0d2 618 if (streq (target_fp_name, "2"))
b111229a 619 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
620 else if (streq (target_fp_name, "3"))
621 arm_fpu_arch = FP_SOFT3;
2b835d68 622 else
400500c4 623 error ("Invalid floating point emulation option: -mfpe-%s",
b111229a 624 target_fp_name);
2b835d68 625 }
b111229a
RE
626 else
627 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
628
629 if (TARGET_FPE && arm_fpu != FP_HARD)
630 arm_fpu = FP_SOFT2;
aec3cfba 631
f5a1b0d2
NC
632 /* For arm2/3 there is no need to do any scheduling if there is only
633 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
634 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
635 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 636 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 637
cd2b33d0 638 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
639
640 if (structure_size_string != NULL)
641 {
642 int size = strtol (structure_size_string, NULL, 0);
643
644 if (size == 8 || size == 32)
645 arm_structure_size_boundary = size;
646 else
647 warning ("Structure size boundary can only be set to 8 or 32");
648 }
ed0e6530
PB
649
650 if (arm_pic_register_string != NULL)
651 {
652 int pic_register;
653
5895f793 654 if (!flag_pic)
ed0e6530
PB
655 warning ("-mpic-register= is useless without -fpic");
656
657 pic_register = decode_reg_name (arm_pic_register_string);
658
659 /* Prevent the user from choosing an obviously stupid PIC register. */
660 if (pic_register < 0 || call_used_regs[pic_register]
661 || pic_register == HARD_FRAME_POINTER_REGNUM
662 || pic_register == STACK_POINTER_REGNUM
663 || pic_register >= PC_REGNUM)
664 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
665 else
666 arm_pic_register = pic_register;
667 }
d5b7b3ae
RE
668
669 if (TARGET_THUMB && flag_schedule_insns)
670 {
671 /* Don't warn since it's on by default in -O2. */
672 flag_schedule_insns = 0;
673 }
674
f5a1b0d2
NC
675 /* If optimizing for space, don't synthesize constants.
676 For processors with load scheduling, it never costs more than 2 cycles
677 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 678 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 679 arm_constant_limit = 1;
aec3cfba 680
d19fb8e3
NC
681 if (arm_is_xscale)
682 arm_constant_limit = 2;
683
f5a1b0d2
NC
684 /* If optimizing for size, bump the number of instructions that we
685 are prepared to conditionally execute (even on a StrongARM).
686 Otherwise for the StrongARM, which has early execution of branches,
687 a sequence that is worth skipping is shorter. */
688 if (optimize_size)
689 max_insns_skipped = 6;
690 else if (arm_is_strong)
691 max_insns_skipped = 3;
92a432f4
RE
692
693 /* Register global variables with the garbage collector. */
694 arm_add_gc_roots ();
695}
696
697static void
698arm_add_gc_roots ()
699{
700 ggc_add_rtx_root (&arm_compare_op0, 1);
701 ggc_add_rtx_root (&arm_compare_op1, 1);
6d3d9133 702 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root. */
c7319d87
RE
703
704 gcc_obstack_init(&minipool_obstack);
705 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 706}
cce8749e 707\f
6d3d9133
NC
708/* A table of known ARM exception types.
709 For use with the interrupt function attribute. */
710
711typedef struct
712{
713 const char * arg;
714 unsigned long return_value;
715}
716isr_attribute_arg;
717
718static isr_attribute_arg isr_attribute_args [] =
719{
720 { "IRQ", ARM_FT_ISR },
721 { "irq", ARM_FT_ISR },
722 { "FIQ", ARM_FT_FIQ },
723 { "fiq", ARM_FT_FIQ },
724 { "ABORT", ARM_FT_ISR },
725 { "abort", ARM_FT_ISR },
726 { "ABORT", ARM_FT_ISR },
727 { "abort", ARM_FT_ISR },
728 { "UNDEF", ARM_FT_EXCEPTION },
729 { "undef", ARM_FT_EXCEPTION },
730 { "SWI", ARM_FT_EXCEPTION },
731 { "swi", ARM_FT_EXCEPTION },
732 { NULL, ARM_FT_NORMAL }
733};
734
735/* Returns the (interrupt) function type of the current
736 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
737
738static unsigned long
739arm_isr_value (argument)
740 tree argument;
741{
742 isr_attribute_arg * ptr;
743 const char * arg;
744
745 /* No argument - default to IRQ. */
746 if (argument == NULL_TREE)
747 return ARM_FT_ISR;
748
749 /* Get the value of the argument. */
750 if (TREE_VALUE (argument) == NULL_TREE
751 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
752 return ARM_FT_UNKNOWN;
753
754 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
755
756 /* Check it against the list of known arguments. */
757 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
758 if (strcmp (arg, ptr->arg) == 0)
759 return ptr->return_value;
760
761 /* An unrecognised interrupt type. */
762 return ARM_FT_UNKNOWN;
763}
764
765/* Computes the type of the current function. */
766
767static unsigned long
768arm_compute_func_type ()
769{
770 unsigned long type = ARM_FT_UNKNOWN;
771 tree a;
772 tree attr;
773
774 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
775 abort ();
776
777 /* Decide if the current function is volatile. Such functions
778 never return, and many memory cycles can be saved by not storing
779 register values that will never be needed again. This optimization
780 was added to speed up context switching in a kernel application. */
781 if (optimize > 0
782 && current_function_nothrow
783 && TREE_THIS_VOLATILE (current_function_decl))
784 type |= ARM_FT_VOLATILE;
785
786 if (current_function_needs_context)
787 type |= ARM_FT_NESTED;
788
789 attr = DECL_MACHINE_ATTRIBUTES (current_function_decl);
790
791 a = lookup_attribute ("naked", attr);
792 if (a != NULL_TREE)
793 type |= ARM_FT_NAKED;
794
795 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
796 type |= ARM_FT_EXCEPTION_HANDLER;
797 else
798 {
799 a = lookup_attribute ("isr", attr);
800 if (a == NULL_TREE)
801 a = lookup_attribute ("interrupt", attr);
802
803 if (a == NULL_TREE)
804 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
805 else
806 type |= arm_isr_value (TREE_VALUE (a));
807 }
808
809 return type;
810}
811
812/* Returns the type of the current function. */
813
814unsigned long
815arm_current_func_type ()
816{
817 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
818 cfun->machine->func_type = arm_compute_func_type ();
819
820 return cfun->machine->func_type;
821}
822\f
6354dc9b 823/* Return 1 if it is possible to return using a single instruction. */
6d3d9133 824
ff9940b0 825int
b36ba79f
RE
826use_return_insn (iscond)
827 int iscond;
ff9940b0
RE
828{
829 int regno;
9b598fa0 830 unsigned int func_type;
ff9940b0 831
d5b7b3ae 832 /* Never use a return instruction before reload has run. */
6d3d9133
NC
833 if (!reload_completed)
834 return 0;
835
9b598fa0
RE
836 func_type = arm_current_func_type ();
837
6d3d9133
NC
838 /* Naked functions, volatile functiond and interrupt
839 functions all need special consideration. */
840 if (func_type & (ARM_FT_INTERRUPT | ARM_FT_VOLATILE | ARM_FT_NAKED))
841 return 0;
842
843 /* As do variadic functions. */
844 if (current_function_pretend_args_size
ff9940b0 845 || current_function_anonymous_args
d5b7b3ae 846 /* Of if the function calls __builtin_eh_return () */
6d3d9133 847 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
d5b7b3ae 848 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 849 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 850 && !frame_pointer_needed))
ff9940b0
RE
851 return 0;
852
b111229a 853 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
854 stacked. Similarly, on StrongARM, conditional returns are expensive
855 if they aren't taken and registers have been stacked. */
f5a1b0d2 856 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 857 return 0;
d5b7b3ae 858
f5a1b0d2 859 if ((iscond && arm_is_strong)
6cfc7210 860 || TARGET_INTERWORK)
6ed30148 861 {
d5b7b3ae 862 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
5895f793 863 if (regs_ever_live[regno] && !call_used_regs[regno])
6ed30148
RE
864 return 0;
865
866 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 867 return 0;
6ed30148 868 }
b111229a 869
6d3d9133
NC
870 /* Can't be done if any of the FPU regs are pushed,
871 since this also requires an insn. */
d5b7b3ae
RE
872 if (TARGET_HARD_FLOAT)
873 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 874 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 875 return 0;
ff9940b0
RE
876
877 return 1;
878}
879
cce8749e
CH
880/* Return TRUE if int I is a valid immediate ARM constant. */
881
882int
883const_ok_for_arm (i)
ff9940b0 884 HOST_WIDE_INT i;
cce8749e 885{
5895f793 886 unsigned HOST_WIDE_INT mask = ~HOST_UINT (0xFF);
cce8749e 887
56636818
JL
888 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
889 be all zero, or all one. */
5895f793
RE
890 if ((i & ~HOST_UINT (0xffffffff)) != 0
891 && ((i & ~HOST_UINT (0xffffffff))
892 != ((~HOST_UINT (0))
893 & ~HOST_UINT (0xffffffff))))
56636818
JL
894 return FALSE;
895
e2c671ba
RE
896 /* Fast return for 0 and powers of 2 */
897 if ((i & (i - 1)) == 0)
898 return TRUE;
899
cce8749e
CH
900 do
901 {
e5951263 902 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
f3bb6135 903 return TRUE;
abaa26e5 904 mask =
e5951263
NC
905 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
906 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
5895f793 907 } while (mask != ~HOST_UINT (0xFF));
cce8749e 908
f3bb6135
RE
909 return FALSE;
910}
cce8749e 911
6354dc9b 912/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
913static int
914const_ok_for_op (i, code)
e2c671ba
RE
915 HOST_WIDE_INT i;
916 enum rtx_code code;
e2c671ba
RE
917{
918 if (const_ok_for_arm (i))
919 return 1;
920
921 switch (code)
922 {
923 case PLUS:
924 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
925
926 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
927 case XOR:
928 case IOR:
929 return 0;
930
931 case AND:
932 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
933
934 default:
935 abort ();
936 }
937}
938
939/* Emit a sequence of insns to handle a large constant.
940 CODE is the code of the operation required, it can be any of SET, PLUS,
941 IOR, AND, XOR, MINUS;
942 MODE is the mode in which the operation is being performed;
943 VAL is the integer to operate on;
944 SOURCE is the other operand (a register, or a null-pointer for SET);
945 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
946 either produce a simpler sequence, or we will want to cse the values.
947 Return value is the number of insns emitted. */
e2c671ba
RE
948
949int
950arm_split_constant (code, mode, val, target, source, subtargets)
951 enum rtx_code code;
952 enum machine_mode mode;
953 HOST_WIDE_INT val;
954 rtx target;
955 rtx source;
956 int subtargets;
2b835d68
RE
957{
958 if (subtargets || code == SET
959 || (GET_CODE (target) == REG && GET_CODE (source) == REG
960 && REGNO (target) != REGNO (source)))
961 {
4b632bf1
RE
962 /* After arm_reorg has been called, we can't fix up expensive
963 constants by pushing them into memory so we must synthesise
964 them in-line, regardless of the cost. This is only likely to
965 be more costly on chips that have load delay slots and we are
966 compiling without running the scheduler (so no splitting
aec3cfba
NC
967 occurred before the final instruction emission).
968
969 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 970 */
5895f793 971 if (!after_arm_reorg
4b632bf1
RE
972 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
973 > arm_constant_limit + (code != SET)))
2b835d68
RE
974 {
975 if (code == SET)
976 {
977 /* Currently SET is the only monadic value for CODE, all
978 the rest are diadic. */
43cffd11 979 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
980 return 1;
981 }
982 else
983 {
984 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
985
43cffd11 986 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
987 /* For MINUS, the value is subtracted from, since we never
988 have subtraction of a constant. */
989 if (code == MINUS)
43cffd11 990 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 991 gen_rtx_MINUS (mode, temp, source)));
2b835d68 992 else
43cffd11
RE
993 emit_insn (gen_rtx_SET (VOIDmode, target,
994 gen_rtx (code, mode, source, temp)));
2b835d68
RE
995 return 2;
996 }
997 }
998 }
999
1000 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1001}
1002
ceebdb09
PB
1003static int
1004count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1005{
1006 HOST_WIDE_INT temp1;
1007 int num_insns = 0;
1008 do
1009 {
1010 int end;
1011
1012 if (i <= 0)
1013 i += 32;
1014 if (remainder & (3 << (i - 2)))
1015 {
1016 end = i - 8;
1017 if (end < 0)
1018 end += 32;
1019 temp1 = remainder & ((0x0ff << end)
1020 | ((i < end) ? (0xff >> (32 - end)) : 0));
1021 remainder &= ~temp1;
1022 num_insns++;
1023 i -= 6;
1024 }
1025 i -= 2;
1026 } while (remainder);
1027 return num_insns;
1028}
1029
2b835d68
RE
1030/* As above, but extra parameter GENERATE which, if clear, suppresses
1031 RTL generation. */
d5b7b3ae 1032static int
2b835d68
RE
1033arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1034 enum rtx_code code;
1035 enum machine_mode mode;
1036 HOST_WIDE_INT val;
1037 rtx target;
1038 rtx source;
1039 int subtargets;
1040 int generate;
e2c671ba 1041{
e2c671ba
RE
1042 int can_invert = 0;
1043 int can_negate = 0;
1044 int can_negate_initial = 0;
1045 int can_shift = 0;
1046 int i;
1047 int num_bits_set = 0;
1048 int set_sign_bit_copies = 0;
1049 int clear_sign_bit_copies = 0;
1050 int clear_zero_bit_copies = 0;
1051 int set_zero_bit_copies = 0;
1052 int insns = 0;
e2c671ba 1053 unsigned HOST_WIDE_INT temp1, temp2;
e5951263 1054 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
e2c671ba 1055
d5b7b3ae 1056 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
1057 check for degenerate cases; these can occur when DImode operations
1058 are split. */
1059 switch (code)
1060 {
1061 case SET:
1062 can_invert = 1;
1063 can_shift = 1;
1064 can_negate = 1;
1065 break;
1066
1067 case PLUS:
1068 can_negate = 1;
1069 can_negate_initial = 1;
1070 break;
1071
1072 case IOR:
e5951263 1073 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 1074 {
2b835d68 1075 if (generate)
43cffd11
RE
1076 emit_insn (gen_rtx_SET (VOIDmode, target,
1077 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
1078 return 1;
1079 }
1080 if (remainder == 0)
1081 {
1082 if (reload_completed && rtx_equal_p (target, source))
1083 return 0;
2b835d68 1084 if (generate)
43cffd11 1085 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1086 return 1;
1087 }
1088 break;
1089
1090 case AND:
1091 if (remainder == 0)
1092 {
2b835d68 1093 if (generate)
43cffd11 1094 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
1095 return 1;
1096 }
e5951263 1097 if (remainder == HOST_UINT (0xffffffff))
e2c671ba
RE
1098 {
1099 if (reload_completed && rtx_equal_p (target, source))
1100 return 0;
2b835d68 1101 if (generate)
43cffd11 1102 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1103 return 1;
1104 }
1105 can_invert = 1;
1106 break;
1107
1108 case XOR:
1109 if (remainder == 0)
1110 {
1111 if (reload_completed && rtx_equal_p (target, source))
1112 return 0;
2b835d68 1113 if (generate)
43cffd11 1114 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1115 return 1;
1116 }
e5951263 1117 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 1118 {
2b835d68 1119 if (generate)
43cffd11
RE
1120 emit_insn (gen_rtx_SET (VOIDmode, target,
1121 gen_rtx_NOT (mode, source)));
e2c671ba
RE
1122 return 1;
1123 }
1124
1125 /* We don't know how to handle this yet below. */
1126 abort ();
1127
1128 case MINUS:
1129 /* We treat MINUS as (val - source), since (source - val) is always
1130 passed as (source + (-val)). */
1131 if (remainder == 0)
1132 {
2b835d68 1133 if (generate)
43cffd11
RE
1134 emit_insn (gen_rtx_SET (VOIDmode, target,
1135 gen_rtx_NEG (mode, source)));
e2c671ba
RE
1136 return 1;
1137 }
1138 if (const_ok_for_arm (val))
1139 {
2b835d68 1140 if (generate)
43cffd11
RE
1141 emit_insn (gen_rtx_SET (VOIDmode, target,
1142 gen_rtx_MINUS (mode, GEN_INT (val),
1143 source)));
e2c671ba
RE
1144 return 1;
1145 }
1146 can_negate = 1;
1147
1148 break;
1149
1150 default:
1151 abort ();
1152 }
1153
6354dc9b 1154 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
1155 if (const_ok_for_arm (val)
1156 || (can_negate_initial && const_ok_for_arm (-val))
1157 || (can_invert && const_ok_for_arm (~val)))
1158 {
2b835d68 1159 if (generate)
43cffd11
RE
1160 emit_insn (gen_rtx_SET (VOIDmode, target,
1161 (source ? gen_rtx (code, mode, source,
1162 GEN_INT (val))
1163 : GEN_INT (val))));
e2c671ba
RE
1164 return 1;
1165 }
1166
e2c671ba 1167 /* Calculate a few attributes that may be useful for specific
6354dc9b 1168 optimizations. */
e2c671ba
RE
1169 for (i = 31; i >= 0; i--)
1170 {
1171 if ((remainder & (1 << i)) == 0)
1172 clear_sign_bit_copies++;
1173 else
1174 break;
1175 }
1176
1177 for (i = 31; i >= 0; i--)
1178 {
1179 if ((remainder & (1 << i)) != 0)
1180 set_sign_bit_copies++;
1181 else
1182 break;
1183 }
1184
1185 for (i = 0; i <= 31; i++)
1186 {
1187 if ((remainder & (1 << i)) == 0)
1188 clear_zero_bit_copies++;
1189 else
1190 break;
1191 }
1192
1193 for (i = 0; i <= 31; i++)
1194 {
1195 if ((remainder & (1 << i)) != 0)
1196 set_zero_bit_copies++;
1197 else
1198 break;
1199 }
1200
1201 switch (code)
1202 {
1203 case SET:
1204 /* See if we can do this by sign_extending a constant that is known
1205 to be negative. This is a good, way of doing it, since the shift
1206 may well merge into a subsequent insn. */
1207 if (set_sign_bit_copies > 1)
1208 {
1209 if (const_ok_for_arm
1210 (temp1 = ARM_SIGN_EXTEND (remainder
1211 << (set_sign_bit_copies - 1))))
1212 {
2b835d68
RE
1213 if (generate)
1214 {
d499463f 1215 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1216 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1217 GEN_INT (temp1)));
2b835d68
RE
1218 emit_insn (gen_ashrsi3 (target, new_src,
1219 GEN_INT (set_sign_bit_copies - 1)));
1220 }
e2c671ba
RE
1221 return 2;
1222 }
1223 /* For an inverted constant, we will need to set the low bits,
1224 these will be shifted out of harm's way. */
1225 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1226 if (const_ok_for_arm (~temp1))
1227 {
2b835d68
RE
1228 if (generate)
1229 {
d499463f 1230 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1231 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1232 GEN_INT (temp1)));
2b835d68
RE
1233 emit_insn (gen_ashrsi3 (target, new_src,
1234 GEN_INT (set_sign_bit_copies - 1)));
1235 }
e2c671ba
RE
1236 return 2;
1237 }
1238 }
1239
1240 /* See if we can generate this by setting the bottom (or the top)
1241 16 bits, and then shifting these into the other half of the
1242 word. We only look for the simplest cases, to do more would cost
1243 too much. Be careful, however, not to generate this when the
1244 alternative would take fewer insns. */
e5951263 1245 if (val & HOST_UINT (0xffff0000))
e2c671ba 1246 {
e5951263 1247 temp1 = remainder & HOST_UINT (0xffff0000);
e2c671ba
RE
1248 temp2 = remainder & 0x0000ffff;
1249
6354dc9b 1250 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1251 for (i = 9; i < 24; i++)
1252 {
d5b7b3ae 1253 if ((((temp2 | (temp2 << i))
e5951263 1254 & HOST_UINT (0xffffffff)) == remainder)
5895f793 1255 && !const_ok_for_arm (temp2))
e2c671ba 1256 {
d499463f
RE
1257 rtx new_src = (subtargets
1258 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1259 : target);
1260 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1261 source, subtargets, generate);
e2c671ba 1262 source = new_src;
2b835d68 1263 if (generate)
43cffd11
RE
1264 emit_insn (gen_rtx_SET
1265 (VOIDmode, target,
1266 gen_rtx_IOR (mode,
1267 gen_rtx_ASHIFT (mode, source,
1268 GEN_INT (i)),
1269 source)));
e2c671ba
RE
1270 return insns + 1;
1271 }
1272 }
1273
6354dc9b 1274 /* Don't duplicate cases already considered. */
e2c671ba
RE
1275 for (i = 17; i < 24; i++)
1276 {
1277 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1278 && !const_ok_for_arm (temp1))
e2c671ba 1279 {
d499463f
RE
1280 rtx new_src = (subtargets
1281 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1282 : target);
1283 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1284 source, subtargets, generate);
e2c671ba 1285 source = new_src;
2b835d68 1286 if (generate)
43cffd11
RE
1287 emit_insn
1288 (gen_rtx_SET (VOIDmode, target,
1289 gen_rtx_IOR
1290 (mode,
1291 gen_rtx_LSHIFTRT (mode, source,
1292 GEN_INT (i)),
1293 source)));
e2c671ba
RE
1294 return insns + 1;
1295 }
1296 }
1297 }
1298 break;
1299
1300 case IOR:
1301 case XOR:
7b64da89
RE
1302 /* If we have IOR or XOR, and the constant can be loaded in a
1303 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1304 then this can be done in two instructions instead of 3-4. */
1305 if (subtargets
d499463f 1306 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1307 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1308 {
5895f793 1309 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1310 {
2b835d68
RE
1311 if (generate)
1312 {
1313 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1314
43cffd11
RE
1315 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1316 emit_insn (gen_rtx_SET (VOIDmode, target,
1317 gen_rtx (code, mode, source, sub)));
2b835d68 1318 }
e2c671ba
RE
1319 return 2;
1320 }
1321 }
1322
1323 if (code == XOR)
1324 break;
1325
1326 if (set_sign_bit_copies > 8
1327 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1328 {
2b835d68
RE
1329 if (generate)
1330 {
1331 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1332 rtx shift = GEN_INT (set_sign_bit_copies);
1333
43cffd11
RE
1334 emit_insn (gen_rtx_SET (VOIDmode, sub,
1335 gen_rtx_NOT (mode,
1336 gen_rtx_ASHIFT (mode,
1337 source,
f5a1b0d2 1338 shift))));
43cffd11
RE
1339 emit_insn (gen_rtx_SET (VOIDmode, target,
1340 gen_rtx_NOT (mode,
1341 gen_rtx_LSHIFTRT (mode, sub,
1342 shift))));
2b835d68 1343 }
e2c671ba
RE
1344 return 2;
1345 }
1346
1347 if (set_zero_bit_copies > 8
1348 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1349 {
2b835d68
RE
1350 if (generate)
1351 {
1352 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1353 rtx shift = GEN_INT (set_zero_bit_copies);
1354
43cffd11
RE
1355 emit_insn (gen_rtx_SET (VOIDmode, sub,
1356 gen_rtx_NOT (mode,
1357 gen_rtx_LSHIFTRT (mode,
1358 source,
f5a1b0d2 1359 shift))));
43cffd11
RE
1360 emit_insn (gen_rtx_SET (VOIDmode, target,
1361 gen_rtx_NOT (mode,
1362 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1363 shift))));
2b835d68 1364 }
e2c671ba
RE
1365 return 2;
1366 }
1367
5895f793 1368 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1369 {
2b835d68
RE
1370 if (generate)
1371 {
1372 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1373 emit_insn (gen_rtx_SET (VOIDmode, sub,
1374 gen_rtx_NOT (mode, source)));
2b835d68
RE
1375 source = sub;
1376 if (subtargets)
1377 sub = gen_reg_rtx (mode);
43cffd11
RE
1378 emit_insn (gen_rtx_SET (VOIDmode, sub,
1379 gen_rtx_AND (mode, source,
1380 GEN_INT (temp1))));
1381 emit_insn (gen_rtx_SET (VOIDmode, target,
1382 gen_rtx_NOT (mode, sub)));
2b835d68 1383 }
e2c671ba
RE
1384 return 3;
1385 }
1386 break;
1387
1388 case AND:
1389 /* See if two shifts will do 2 or more insn's worth of work. */
1390 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1391 {
e5951263 1392 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
e2c671ba 1393 << (32 - clear_sign_bit_copies))
e5951263 1394 & HOST_UINT (0xffffffff));
e2c671ba 1395
e5951263 1396 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1397 {
2b835d68
RE
1398 if (generate)
1399 {
d499463f 1400 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1401 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1402 new_src, source, subtargets, 1);
1403 source = new_src;
2b835d68
RE
1404 }
1405 else
d499463f
RE
1406 {
1407 rtx targ = subtargets ? NULL_RTX : target;
1408 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1409 targ, source, subtargets, 0);
1410 }
2b835d68
RE
1411 }
1412
1413 if (generate)
1414 {
d499463f
RE
1415 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1416 rtx shift = GEN_INT (clear_sign_bit_copies);
1417
1418 emit_insn (gen_ashlsi3 (new_src, source, shift));
1419 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1420 }
1421
e2c671ba
RE
1422 return insns + 2;
1423 }
1424
1425 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1426 {
1427 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1428
e5951263 1429 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1430 {
2b835d68
RE
1431 if (generate)
1432 {
d499463f
RE
1433 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1434
2b835d68 1435 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1436 new_src, source, subtargets, 1);
1437 source = new_src;
2b835d68
RE
1438 }
1439 else
d499463f
RE
1440 {
1441 rtx targ = subtargets ? NULL_RTX : target;
1442
1443 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1444 targ, source, subtargets, 0);
1445 }
2b835d68
RE
1446 }
1447
1448 if (generate)
1449 {
d499463f
RE
1450 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1451 rtx shift = GEN_INT (clear_zero_bit_copies);
1452
1453 emit_insn (gen_lshrsi3 (new_src, source, shift));
1454 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1455 }
1456
e2c671ba
RE
1457 return insns + 2;
1458 }
1459
1460 break;
1461
1462 default:
1463 break;
1464 }
1465
1466 for (i = 0; i < 32; i++)
1467 if (remainder & (1 << i))
1468 num_bits_set++;
1469
1470 if (code == AND || (can_invert && num_bits_set > 16))
e5951263 1471 remainder = (~remainder) & HOST_UINT (0xffffffff);
e2c671ba 1472 else if (code == PLUS && num_bits_set > 16)
e5951263 1473 remainder = (-remainder) & HOST_UINT (0xffffffff);
e2c671ba
RE
1474 else
1475 {
1476 can_invert = 0;
1477 can_negate = 0;
1478 }
1479
1480 /* Now try and find a way of doing the job in either two or three
1481 instructions.
1482 We start by looking for the largest block of zeros that are aligned on
1483 a 2-bit boundary, we then fill up the temps, wrapping around to the
1484 top of the word when we drop off the bottom.
6354dc9b 1485 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1486 {
1487 int best_start = 0;
1488 int best_consecutive_zeros = 0;
1489
1490 for (i = 0; i < 32; i += 2)
1491 {
1492 int consecutive_zeros = 0;
1493
5895f793 1494 if (!(remainder & (3 << i)))
e2c671ba 1495 {
5895f793 1496 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1497 {
1498 consecutive_zeros += 2;
1499 i += 2;
1500 }
1501 if (consecutive_zeros > best_consecutive_zeros)
1502 {
1503 best_consecutive_zeros = consecutive_zeros;
1504 best_start = i - consecutive_zeros;
1505 }
1506 i -= 2;
1507 }
1508 }
1509
ceebdb09
PB
1510 /* So long as it won't require any more insns to do so, it's
1511 desirable to emit a small constant (in bits 0...9) in the last
1512 insn. This way there is more chance that it can be combined with
1513 a later addressing insn to form a pre-indexed load or store
1514 operation. Consider:
1515
1516 *((volatile int *)0xe0000100) = 1;
1517 *((volatile int *)0xe0000110) = 2;
1518
1519 We want this to wind up as:
1520
1521 mov rA, #0xe0000000
1522 mov rB, #1
1523 str rB, [rA, #0x100]
1524 mov rB, #2
1525 str rB, [rA, #0x110]
1526
1527 rather than having to synthesize both large constants from scratch.
1528
1529 Therefore, we calculate how many insns would be required to emit
1530 the constant starting from `best_start', and also starting from
1531 zero (ie with bit 31 first to be output). If `best_start' doesn't
1532 yield a shorter sequence, we may as well use zero. */
1533 if (best_start != 0
1534 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1535 && (count_insns_for_constant (remainder, 0) <=
1536 count_insns_for_constant (remainder, best_start)))
1537 best_start = 0;
1538
1539 /* Now start emitting the insns. */
e2c671ba
RE
1540 i = best_start;
1541 do
1542 {
1543 int end;
1544
1545 if (i <= 0)
1546 i += 32;
1547 if (remainder & (3 << (i - 2)))
1548 {
1549 end = i - 8;
1550 if (end < 0)
1551 end += 32;
1552 temp1 = remainder & ((0x0ff << end)
1553 | ((i < end) ? (0xff >> (32 - end)) : 0));
1554 remainder &= ~temp1;
1555
d499463f 1556 if (generate)
e2c671ba 1557 {
d499463f
RE
1558 rtx new_src;
1559
1560 if (code == SET)
43cffd11
RE
1561 emit_insn (gen_rtx_SET (VOIDmode,
1562 new_src = (subtargets
1563 ? gen_reg_rtx (mode)
1564 : target),
1565 GEN_INT (can_invert
1566 ? ~temp1 : temp1)));
d499463f 1567 else if (code == MINUS)
43cffd11
RE
1568 emit_insn (gen_rtx_SET (VOIDmode,
1569 new_src = (subtargets
1570 ? gen_reg_rtx (mode)
1571 : target),
1572 gen_rtx (code, mode, GEN_INT (temp1),
1573 source)));
d499463f 1574 else
43cffd11
RE
1575 emit_insn (gen_rtx_SET (VOIDmode,
1576 new_src = (remainder
1577 ? (subtargets
1578 ? gen_reg_rtx (mode)
1579 : target)
1580 : target),
1581 gen_rtx (code, mode, source,
1582 GEN_INT (can_invert ? ~temp1
1583 : (can_negate
1584 ? -temp1
1585 : temp1)))));
d499463f 1586 source = new_src;
e2c671ba
RE
1587 }
1588
d499463f
RE
1589 if (code == SET)
1590 {
1591 can_invert = 0;
1592 code = PLUS;
1593 }
1594 else if (code == MINUS)
1595 code = PLUS;
1596
e2c671ba 1597 insns++;
e2c671ba
RE
1598 i -= 6;
1599 }
1600 i -= 2;
1601 } while (remainder);
1602 }
1603 return insns;
1604}
1605
bd9c7e23
RE
1606/* Canonicalize a comparison so that we are more likely to recognize it.
1607 This can be done for a few constant compares, where we can make the
1608 immediate value easier to load. */
1609enum rtx_code
1610arm_canonicalize_comparison (code, op1)
1611 enum rtx_code code;
62b10bbc 1612 rtx * op1;
bd9c7e23 1613{
ad076f4e 1614 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1615
1616 switch (code)
1617 {
1618 case EQ:
1619 case NE:
1620 return code;
1621
1622 case GT:
1623 case LE:
5895f793
RE
1624 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1625 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1626 {
5895f793 1627 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1628 return code == GT ? GE : LT;
1629 }
1630 break;
1631
1632 case GE:
1633 case LT:
e5951263 1634 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1635 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1636 {
5895f793 1637 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1638 return code == GE ? GT : LE;
1639 }
1640 break;
1641
1642 case GTU:
1643 case LEU:
5895f793
RE
1644 if (i != ~(HOST_UINT (0))
1645 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1646 {
1647 *op1 = GEN_INT (i + 1);
1648 return code == GTU ? GEU : LTU;
1649 }
1650 break;
1651
1652 case GEU:
1653 case LTU:
1654 if (i != 0
5895f793 1655 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1656 {
1657 *op1 = GEN_INT (i - 1);
1658 return code == GEU ? GTU : LEU;
1659 }
1660 break;
1661
1662 default:
1663 abort ();
1664 }
1665
1666 return code;
1667}
bd9c7e23 1668
f5a1b0d2
NC
1669/* Decide whether a type should be returned in memory (true)
1670 or in a register (false). This is called by the macro
1671 RETURN_IN_MEMORY. */
2b835d68
RE
1672int
1673arm_return_in_memory (type)
1674 tree type;
1675{
5895f793 1676 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1677 /* All simple types are returned in registers. */
d7d01975 1678 return 0;
d5b7b3ae
RE
1679
1680 /* For the arm-wince targets we choose to be compitable with Microsoft's
1681 ARM and Thumb compilers, which always return aggregates in memory. */
1682#ifndef ARM_WINCE
1683
d7d01975 1684 if (int_size_in_bytes (type) > 4)
9e291dbe 1685 /* All structures/unions bigger than one word are returned in memory. */
d7d01975 1686 return 1;
d5b7b3ae 1687
d7d01975 1688 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1689 {
1690 tree field;
1691
3a2ea258
RE
1692 /* For a struct the APCS says that we only return in a register
1693 if the type is 'integer like' and every addressable element
1694 has an offset of zero. For practical purposes this means
1695 that the structure can have at most one non bit-field element
1696 and that this element must be the first one in the structure. */
1697
f5a1b0d2
NC
1698 /* Find the first field, ignoring non FIELD_DECL things which will
1699 have been created by C++. */
1700 for (field = TYPE_FIELDS (type);
1701 field && TREE_CODE (field) != FIELD_DECL;
1702 field = TREE_CHAIN (field))
1703 continue;
1704
1705 if (field == NULL)
9e291dbe 1706 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1707
d5b7b3ae
RE
1708 /* Check that the first field is valid for returning in a register. */
1709
1710 /* ... Floats are not allowed */
9e291dbe 1711 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1712 return 1;
1713
d5b7b3ae
RE
1714 /* ... Aggregates that are not themselves valid for returning in
1715 a register are not allowed. */
9e291dbe 1716 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1717 return 1;
6f7ebcbb 1718
3a2ea258
RE
1719 /* Now check the remaining fields, if any. Only bitfields are allowed,
1720 since they are not addressable. */
f5a1b0d2
NC
1721 for (field = TREE_CHAIN (field);
1722 field;
1723 field = TREE_CHAIN (field))
1724 {
1725 if (TREE_CODE (field) != FIELD_DECL)
1726 continue;
1727
5895f793 1728 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1729 return 1;
1730 }
2b835d68
RE
1731
1732 return 0;
1733 }
d7d01975
NC
1734
1735 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1736 {
1737 tree field;
1738
1739 /* Unions can be returned in registers if every element is
1740 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1741 for (field = TYPE_FIELDS (type);
1742 field;
1743 field = TREE_CHAIN (field))
2b835d68 1744 {
f5a1b0d2
NC
1745 if (TREE_CODE (field) != FIELD_DECL)
1746 continue;
1747
6cc8c0b3
NC
1748 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1749 return 1;
1750
f5a1b0d2 1751 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1752 return 1;
1753 }
f5a1b0d2 1754
2b835d68
RE
1755 return 0;
1756 }
d5b7b3ae 1757#endif /* not ARM_WINCE */
f5a1b0d2 1758
d5b7b3ae 1759 /* Return all other types in memory. */
2b835d68
RE
1760 return 1;
1761}
1762
82e9d970
PB
1763/* Initialize a variable CUM of type CUMULATIVE_ARGS
1764 for a call to a function whose data type is FNTYPE.
1765 For a library call, FNTYPE is NULL. */
1766void
1767arm_init_cumulative_args (pcum, fntype, libname, indirect)
1768 CUMULATIVE_ARGS * pcum;
1769 tree fntype;
1770 rtx libname ATTRIBUTE_UNUSED;
1771 int indirect ATTRIBUTE_UNUSED;
1772{
1773 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1774 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1775
82e9d970
PB
1776 pcum->call_cookie = CALL_NORMAL;
1777
1778 if (TARGET_LONG_CALLS)
1779 pcum->call_cookie = CALL_LONG;
1780
1781 /* Check for long call/short call attributes. The attributes
1782 override any command line option. */
1783 if (fntype)
1784 {
1785 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1786 pcum->call_cookie = CALL_SHORT;
1787 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1788 pcum->call_cookie = CALL_LONG;
1789 }
1790}
1791
1792/* Determine where to put an argument to a function.
1793 Value is zero to push the argument on the stack,
1794 or a hard register in which to store the argument.
1795
1796 MODE is the argument's machine mode.
1797 TYPE is the data type of the argument (as a tree).
1798 This is null for libcalls where that information may
1799 not be available.
1800 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1801 the preceding args and about the function being called.
1802 NAMED is nonzero if this argument is a named parameter
1803 (otherwise it is an extra parameter matching an ellipsis). */
1804rtx
1805arm_function_arg (pcum, mode, type, named)
1806 CUMULATIVE_ARGS * pcum;
1807 enum machine_mode mode;
1808 tree type ATTRIBUTE_UNUSED;
1809 int named;
1810{
1811 if (mode == VOIDmode)
1812 /* Compute operand 2 of the call insn. */
1813 return GEN_INT (pcum->call_cookie);
1814
5895f793 1815 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1816 return NULL_RTX;
1817
1818 return gen_rtx_REG (mode, pcum->nregs);
1819}
82e9d970 1820\f
c27ba912
DM
1821/* Encode the current state of the #pragma [no_]long_calls. */
1822typedef enum
82e9d970 1823{
c27ba912
DM
1824 OFF, /* No #pramgma [no_]long_calls is in effect. */
1825 LONG, /* #pragma long_calls is in effect. */
1826 SHORT /* #pragma no_long_calls is in effect. */
1827} arm_pragma_enum;
82e9d970 1828
c27ba912 1829static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1830
8b97c5f8
ZW
1831void
1832arm_pr_long_calls (pfile)
1833 cpp_reader *pfile ATTRIBUTE_UNUSED;
82e9d970 1834{
8b97c5f8
ZW
1835 arm_pragma_long_calls = LONG;
1836}
1837
1838void
1839arm_pr_no_long_calls (pfile)
1840 cpp_reader *pfile ATTRIBUTE_UNUSED;
1841{
1842 arm_pragma_long_calls = SHORT;
1843}
1844
1845void
1846arm_pr_long_calls_off (pfile)
1847 cpp_reader *pfile ATTRIBUTE_UNUSED;
1848{
1849 arm_pragma_long_calls = OFF;
82e9d970 1850}
8b97c5f8 1851
82e9d970
PB
1852\f
1853/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1854 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1855 assigned to TYPE. */
1856int
1857arm_valid_type_attribute_p (type, attributes, identifier, args)
1858 tree type;
1859 tree attributes ATTRIBUTE_UNUSED;
1860 tree identifier;
1861 tree args;
1862{
1863 if ( TREE_CODE (type) != FUNCTION_TYPE
1864 && TREE_CODE (type) != METHOD_TYPE
1865 && TREE_CODE (type) != FIELD_DECL
1866 && TREE_CODE (type) != TYPE_DECL)
1867 return 0;
1868
1869 /* Function calls made to this symbol must be done indirectly, because
1870 it may lie outside of the 26 bit addressing range of a normal function
1871 call. */
1872 if (is_attribute_p ("long_call", identifier))
1873 return (args == NULL_TREE);
c27ba912 1874
82e9d970
PB
1875 /* Whereas these functions are always known to reside within the 26 bit
1876 addressing range. */
1877 if (is_attribute_p ("short_call", identifier))
1878 return (args == NULL_TREE);
1879
6d3d9133
NC
1880 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1881 if (is_attribute_p ("isr", identifier)
1882 || is_attribute_p ("interrupt", identifier))
1883 return arm_isr_value (args);
1884
82e9d970
PB
1885 return 0;
1886}
1887
1888/* Return 0 if the attributes for two types are incompatible, 1 if they
1889 are compatible, and 2 if they are nearly compatible (which causes a
1890 warning to be generated). */
1891int
1892arm_comp_type_attributes (type1, type2)
1893 tree type1;
1894 tree type2;
1895{
1cb8d58a 1896 int l1, l2, s1, s2;
bd7fc26f 1897
82e9d970
PB
1898 /* Check for mismatch of non-default calling convention. */
1899 if (TREE_CODE (type1) != FUNCTION_TYPE)
1900 return 1;
1901
1902 /* Check for mismatched call attributes. */
1cb8d58a
NC
1903 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1904 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1905 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1906 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1907
1908 /* Only bother to check if an attribute is defined. */
1909 if (l1 | l2 | s1 | s2)
1910 {
1911 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1912 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1913 return 0;
82e9d970 1914
bd7fc26f
NC
1915 /* Disallow mixed attributes. */
1916 if ((l1 & s2) || (l2 & s1))
1917 return 0;
1918 }
1919
6d3d9133
NC
1920 /* Check for mismatched ISR attribute. */
1921 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
1922 if (! l1)
1923 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
1924 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
1925 if (! l2)
1926 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
1927 if (l1 != l2)
1928 return 0;
1929
bd7fc26f 1930 return 1;
82e9d970
PB
1931}
1932
c27ba912
DM
1933/* Encode long_call or short_call attribute by prefixing
1934 symbol name in DECL with a special character FLAG. */
1935void
1936arm_encode_call_attribute (decl, flag)
1937 tree decl;
cd2b33d0 1938 int flag;
c27ba912 1939{
3cce094d 1940 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 1941 int len = strlen (str);
d19fb8e3 1942 char * newstr;
c27ba912
DM
1943
1944 if (TREE_CODE (decl) != FUNCTION_DECL)
1945 return;
1946
1947 /* Do not allow weak functions to be treated as short call. */
1948 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1949 return;
c27ba912 1950
520a57c8
ZW
1951 newstr = alloca (len + 2);
1952 newstr[0] = flag;
1953 strcpy (newstr + 1, str);
c27ba912 1954
6d3d9133 1955 newstr = (char *) ggc_alloc_string (newstr, len + 1);
c27ba912
DM
1956 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1957}
1958
1959/* Assigns default attributes to newly defined type. This is used to
1960 set short_call/long_call attributes for function types of
1961 functions defined inside corresponding #pragma scopes. */
1962void
1963arm_set_default_type_attributes (type)
1964 tree type;
1965{
1966 /* Add __attribute__ ((long_call)) to all functions, when
1967 inside #pragma long_calls or __attribute__ ((short_call)),
1968 when inside #pragma no_long_calls. */
1969 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1970 {
1971 tree type_attr_list, attr_name;
1972 type_attr_list = TYPE_ATTRIBUTES (type);
1973
1974 if (arm_pragma_long_calls == LONG)
1975 attr_name = get_identifier ("long_call");
1976 else if (arm_pragma_long_calls == SHORT)
1977 attr_name = get_identifier ("short_call");
1978 else
1979 return;
1980
1981 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1982 TYPE_ATTRIBUTES (type) = type_attr_list;
1983 }
1984}
1985\f
1986/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1987 defined within the current compilation unit. If this caanot be
1988 determined, then 0 is returned. */
1989static int
1990current_file_function_operand (sym_ref)
1991 rtx sym_ref;
1992{
1993 /* This is a bit of a fib. A function will have a short call flag
1994 applied to its name if it has the short call attribute, or it has
1995 already been defined within the current compilation unit. */
1996 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1997 return 1;
1998
6d77b53e 1999 /* The current function is always defined within the current compilation
c27ba912
DM
2000 unit. if it s a weak defintion however, then this may not be the real
2001 defintion of the function, and so we have to say no. */
2002 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 2003 && !DECL_WEAK (current_function_decl))
c27ba912
DM
2004 return 1;
2005
2006 /* We cannot make the determination - default to returning 0. */
2007 return 0;
2008}
2009
2010/* Return non-zero if a 32 bit "long_call" should be generated for
2011 this call. We generate a long_call if the function:
2012
2013 a. has an __attribute__((long call))
2014 or b. is within the scope of a #pragma long_calls
2015 or c. the -mlong-calls command line switch has been specified
2016
2017 However we do not generate a long call if the function:
2018
2019 d. has an __attribute__ ((short_call))
2020 or e. is inside the scope of a #pragma no_long_calls
2021 or f. has an __attribute__ ((section))
2022 or g. is defined within the current compilation unit.
2023
2024 This function will be called by C fragments contained in the machine
2025 description file. CALL_REF and CALL_COOKIE correspond to the matched
2026 rtl operands. CALL_SYMBOL is used to distinguish between
2027 two different callers of the function. It is set to 1 in the
2028 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2029 and "call_value" patterns. This is because of the difference in the
2030 SYM_REFs passed by these patterns. */
2031int
2032arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2033 rtx sym_ref;
2034 int call_cookie;
2035 int call_symbol;
2036{
5895f793 2037 if (!call_symbol)
c27ba912
DM
2038 {
2039 if (GET_CODE (sym_ref) != MEM)
2040 return 0;
2041
2042 sym_ref = XEXP (sym_ref, 0);
2043 }
2044
2045 if (GET_CODE (sym_ref) != SYMBOL_REF)
2046 return 0;
2047
2048 if (call_cookie & CALL_SHORT)
2049 return 0;
2050
2051 if (TARGET_LONG_CALLS && flag_function_sections)
2052 return 1;
2053
87e27392 2054 if (current_file_function_operand (sym_ref))
c27ba912
DM
2055 return 0;
2056
2057 return (call_cookie & CALL_LONG)
2058 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2059 || TARGET_LONG_CALLS;
2060}
f99fce0c
RE
2061
2062/* Return non-zero if it is ok to make a tail-call to DECL. */
2063int
2064arm_function_ok_for_sibcall (decl)
2065 tree decl;
2066{
2067 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2068
2069 /* Never tailcall something for which we have no decl, or if we
2070 are in Thumb mode. */
2071 if (decl == NULL || TARGET_THUMB)
2072 return 0;
2073
2074 /* Get the calling method. */
2075 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2076 call_type = CALL_SHORT;
2077 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2078 call_type = CALL_LONG;
2079
2080 /* Cannot tail-call to long calls, since these are out of range of
2081 a branch instruction. However, if not compiling PIC, we know
2082 we can reach the symbol if it is in this compilation unit. */
5895f793 2083 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
2084 return 0;
2085
2086 /* If we are interworking and the function is not declared static
2087 then we can't tail-call it unless we know that it exists in this
2088 compilation unit (since it might be a Thumb routine). */
5895f793 2089 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
2090 return 0;
2091
6d3d9133
NC
2092 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2093 if (IS_INTERRUPT (arm_current_func_type ()))
2094 return 0;
2095
f99fce0c
RE
2096 /* Everything else is ok. */
2097 return 1;
2098}
2099
82e9d970 2100\f
32de079a
RE
2101int
2102legitimate_pic_operand_p (x)
2103 rtx x;
2104{
d5b7b3ae
RE
2105 if (CONSTANT_P (x)
2106 && flag_pic
32de079a
RE
2107 && (GET_CODE (x) == SYMBOL_REF
2108 || (GET_CODE (x) == CONST
2109 && GET_CODE (XEXP (x, 0)) == PLUS
2110 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2111 return 0;
2112
2113 return 1;
2114}
2115
2116rtx
2117legitimize_pic_address (orig, mode, reg)
2118 rtx orig;
2119 enum machine_mode mode;
2120 rtx reg;
2121{
2122 if (GET_CODE (orig) == SYMBOL_REF)
2123 {
2124 rtx pic_ref, address;
2125 rtx insn;
2126 int subregs = 0;
2127
2128 if (reg == 0)
2129 {
893f3d5b 2130 if (no_new_pseudos)
32de079a
RE
2131 abort ();
2132 else
2133 reg = gen_reg_rtx (Pmode);
2134
2135 subregs = 1;
2136 }
2137
2138#ifdef AOF_ASSEMBLER
2139 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 2140 understands that the PIC register has to be added into the offset. */
32de079a
RE
2141 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2142#else
2143 if (subregs)
2144 address = gen_reg_rtx (Pmode);
2145 else
2146 address = reg;
2147
4bec9f7d
NC
2148 if (TARGET_ARM)
2149 emit_insn (gen_pic_load_addr_arm (address, orig));
2150 else
2151 emit_insn (gen_pic_load_addr_thumb (address, orig));
32de079a 2152
43cffd11
RE
2153 pic_ref = gen_rtx_MEM (Pmode,
2154 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2155 address));
32de079a
RE
2156 RTX_UNCHANGING_P (pic_ref) = 1;
2157 insn = emit_move_insn (reg, pic_ref);
2158#endif
2159 current_function_uses_pic_offset_table = 1;
2160 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2161 by loop. */
43cffd11
RE
2162 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2163 REG_NOTES (insn));
32de079a
RE
2164 return reg;
2165 }
2166 else if (GET_CODE (orig) == CONST)
2167 {
2168 rtx base, offset;
2169
2170 if (GET_CODE (XEXP (orig, 0)) == PLUS
2171 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2172 return orig;
2173
2174 if (reg == 0)
2175 {
893f3d5b 2176 if (no_new_pseudos)
32de079a
RE
2177 abort ();
2178 else
2179 reg = gen_reg_rtx (Pmode);
2180 }
2181
2182 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2183 {
2184 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2185 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2186 base == reg ? 0 : reg);
2187 }
2188 else
2189 abort ();
2190
2191 if (GET_CODE (offset) == CONST_INT)
2192 {
2193 /* The base register doesn't really matter, we only want to
2194 test the index for the appropriate mode. */
f1008e52 2195 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
32de079a 2196
5895f793 2197 if (!no_new_pseudos)
32de079a
RE
2198 offset = force_reg (Pmode, offset);
2199 else
2200 abort ();
2201
2202 win:
2203 if (GET_CODE (offset) == CONST_INT)
2204 return plus_constant_for_output (base, INTVAL (offset));
2205 }
2206
2207 if (GET_MODE_SIZE (mode) > 4
2208 && (GET_MODE_CLASS (mode) == MODE_INT
2209 || TARGET_SOFT_FLOAT))
2210 {
2211 emit_insn (gen_addsi3 (reg, base, offset));
2212 return reg;
2213 }
2214
43cffd11 2215 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
2216 }
2217 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
2218 {
2219 current_function_uses_pic_offset_table = 1;
2220
2221 if (NEED_GOT_RELOC)
d5b7b3ae
RE
2222 {
2223 rtx pic_ref, address = gen_reg_rtx (Pmode);
4bec9f7d
NC
2224
2225 if (TARGET_ARM)
2226 emit_insn (gen_pic_load_addr_arm (address, orig));
2227 else
2228 emit_insn (gen_pic_load_addr_thumb (address, orig));
d19fb8e3 2229
d5b7b3ae
RE
2230 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2231
2232 emit_move_insn (address, pic_ref);
2233 return address;
2234 }
82e9d970 2235 }
32de079a
RE
2236
2237 return orig;
2238}
2239
c1163e75
PB
2240/* Generate code to load the PIC register. PROLOGUE is true if
2241 called from arm_expand_prologue (in which case we want the
2242 generated insns at the start of the function); false if called
2243 by an exception receiver that needs the PIC register reloaded
2244 (in which case the insns are just dumped at the current location). */
eab4abeb 2245
32de079a 2246void
eab4abeb
NC
2247arm_finalize_pic (prologue)
2248 int prologue;
32de079a
RE
2249{
2250#ifndef AOF_ASSEMBLER
c1163e75 2251 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
32de079a
RE
2252 rtx global_offset_table;
2253
ed0e6530 2254 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2255 return;
2256
5895f793 2257 if (!flag_pic)
32de079a
RE
2258 abort ();
2259
2260 start_sequence ();
2261 l1 = gen_label_rtx ();
2262
43cffd11 2263 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2264 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2265 addition, on the Thumb it is 'dot + 4'. */
2266 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2267 if (GOT_PCREL)
2268 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2269 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2270 else
2271 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2272
2273 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2274
d5b7b3ae 2275 if (TARGET_ARM)
4bec9f7d
NC
2276 {
2277 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2278 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2279 }
d5b7b3ae 2280 else
4bec9f7d
NC
2281 {
2282 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2283 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2284 }
32de079a
RE
2285
2286 seq = gen_sequence ();
2287 end_sequence ();
c1163e75
PB
2288 if (prologue)
2289 emit_insn_after (seq, get_insns ());
2290 else
2291 emit_insn (seq);
32de079a
RE
2292
2293 /* Need to emit this whether or not we obey regdecls,
2294 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2295 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2296#endif /* AOF_ASSEMBLER */
2297}
2298
e2c671ba
RE
2299#define REG_OR_SUBREG_REG(X) \
2300 (GET_CODE (X) == REG \
2301 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2302
2303#define REG_OR_SUBREG_RTX(X) \
2304 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2305
d5b7b3ae
RE
2306#ifndef COSTS_N_INSNS
2307#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2308#endif
e2c671ba
RE
2309
2310int
d5b7b3ae 2311arm_rtx_costs (x, code, outer)
e2c671ba 2312 rtx x;
74bbc178 2313 enum rtx_code code;
d5b7b3ae 2314 enum rtx_code outer;
e2c671ba
RE
2315{
2316 enum machine_mode mode = GET_MODE (x);
2317 enum rtx_code subcode;
2318 int extra_cost;
2319
d5b7b3ae
RE
2320 if (TARGET_THUMB)
2321 {
2322 switch (code)
2323 {
2324 case ASHIFT:
2325 case ASHIFTRT:
2326 case LSHIFTRT:
2327 case ROTATERT:
2328 case PLUS:
2329 case MINUS:
2330 case COMPARE:
2331 case NEG:
2332 case NOT:
2333 return COSTS_N_INSNS (1);
2334
2335 case MULT:
2336 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2337 {
2338 int cycles = 0;
2339 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2340
2341 while (i)
2342 {
2343 i >>= 2;
5895f793 2344 cycles++;
d5b7b3ae
RE
2345 }
2346 return COSTS_N_INSNS (2) + cycles;
2347 }
2348 return COSTS_N_INSNS (1) + 16;
2349
2350 case SET:
2351 return (COSTS_N_INSNS (1)
2352 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2353 + GET_CODE (SET_DEST (x)) == MEM));
2354
2355 case CONST_INT:
2356 if (outer == SET)
2357 {
2358 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2359 return 0;
2360 if (thumb_shiftable_const (INTVAL (x)))
2361 return COSTS_N_INSNS (2);
2362 return COSTS_N_INSNS (3);
2363 }
2364 else if (outer == PLUS
2365 && INTVAL (x) < 256 && INTVAL (x) > -256)
2366 return 0;
2367 else if (outer == COMPARE
2368 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2369 return 0;
2370 else if (outer == ASHIFT || outer == ASHIFTRT
2371 || outer == LSHIFTRT)
2372 return 0;
2373 return COSTS_N_INSNS (2);
2374
2375 case CONST:
2376 case CONST_DOUBLE:
2377 case LABEL_REF:
2378 case SYMBOL_REF:
2379 return COSTS_N_INSNS (3);
2380
2381 case UDIV:
2382 case UMOD:
2383 case DIV:
2384 case MOD:
2385 return 100;
2386
2387 case TRUNCATE:
2388 return 99;
2389
2390 case AND:
2391 case XOR:
2392 case IOR:
2393 /* XXX guess. */
2394 return 8;
2395
2396 case ADDRESSOF:
2397 case MEM:
2398 /* XXX another guess. */
2399 /* Memory costs quite a lot for the first word, but subsequent words
2400 load at the equivalent of a single insn each. */
2401 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2402 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2403
2404 case IF_THEN_ELSE:
2405 /* XXX a guess. */
2406 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2407 return 14;
2408 return 2;
2409
2410 case ZERO_EXTEND:
2411 /* XXX still guessing. */
2412 switch (GET_MODE (XEXP (x, 0)))
2413 {
2414 case QImode:
2415 return (1 + (mode == DImode ? 4 : 0)
2416 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2417
2418 case HImode:
2419 return (4 + (mode == DImode ? 4 : 0)
2420 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2421
2422 case SImode:
2423 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2424
2425 default:
2426 return 99;
2427 }
2428
2429 default:
2430 return 99;
2431#if 0
2432 case FFS:
2433 case FLOAT:
2434 case FIX:
2435 case UNSIGNED_FIX:
2436 /* XXX guess */
2437 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2438 rtx_name[code]);
2439 abort ();
2440#endif
2441 }
2442 }
2443
e2c671ba
RE
2444 switch (code)
2445 {
2446 case MEM:
2447 /* Memory costs quite a lot for the first word, but subsequent words
2448 load at the equivalent of a single insn each. */
2449 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2450 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2451
2452 case DIV:
2453 case MOD:
2454 return 100;
2455
2456 case ROTATE:
2457 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2458 return 4;
2459 /* Fall through */
2460 case ROTATERT:
2461 if (mode != SImode)
2462 return 8;
2463 /* Fall through */
2464 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2465 if (mode == DImode)
2466 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2467 + ((GET_CODE (XEXP (x, 0)) == REG
2468 || (GET_CODE (XEXP (x, 0)) == SUBREG
2469 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2470 ? 0 : 8));
2471 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2472 || (GET_CODE (XEXP (x, 0)) == SUBREG
2473 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2474 ? 0 : 4)
2475 + ((GET_CODE (XEXP (x, 1)) == REG
2476 || (GET_CODE (XEXP (x, 1)) == SUBREG
2477 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2478 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2479 ? 0 : 4));
2480
2481 case MINUS:
2482 if (mode == DImode)
2483 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2484 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2485 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2486 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2487 ? 0 : 8));
2488
2489 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2490 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2491 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2492 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2493 ? 0 : 8)
2494 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2495 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2496 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2497 ? 0 : 8));
2498
2499 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2500 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2501 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2502 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2503 || subcode == ASHIFTRT || subcode == LSHIFTRT
2504 || subcode == ROTATE || subcode == ROTATERT
2505 || (subcode == MULT
2506 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2507 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2508 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2509 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2510 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2511 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2512 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2513 return 1;
2514 /* Fall through */
2515
2516 case PLUS:
2517 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2518 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2519 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2520 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2521 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2522 ? 0 : 8));
2523
2524 /* Fall through */
2525 case AND: case XOR: case IOR:
2526 extra_cost = 0;
2527
2528 /* Normally the frame registers will be spilt into reg+const during
2529 reload, so it is a bad idea to combine them with other instructions,
2530 since then they might not be moved outside of loops. As a compromise
2531 we allow integration with ops that have a constant as their second
2532 operand. */
2533 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2534 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2535 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2536 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2537 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2538 extra_cost = 4;
2539
2540 if (mode == DImode)
2541 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2542 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2543 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2544 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2545 ? 0 : 8));
2546
2547 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2548 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2549 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2550 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2551 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2552 ? 0 : 4));
2553
2554 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2555 return (1 + extra_cost
2556 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2557 || subcode == LSHIFTRT || subcode == ASHIFTRT
2558 || subcode == ROTATE || subcode == ROTATERT
2559 || (subcode == MULT
2560 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2561 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2562 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2563 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2564 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2565 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2566 ? 0 : 4));
2567
2568 return 8;
2569
2570 case MULT:
b111229a 2571 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2572 fast variant if it exists at all. */
2b835d68
RE
2573 if (arm_fast_multiply && mode == DImode
2574 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2575 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2576 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2577 return 8;
2578
e2c671ba
RE
2579 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2580 || mode == DImode)
2581 return 30;
2582
2583 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2584 {
2b835d68 2585 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
e5951263 2586 & HOST_UINT (0xffffffff));
e2c671ba
RE
2587 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2588 int j;
6354dc9b
NC
2589
2590 /* Tune as appropriate. */
aec3cfba 2591 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2592
2b835d68 2593 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2594 {
2b835d68 2595 i >>= booth_unit_size;
e2c671ba
RE
2596 add_cost += 2;
2597 }
2598
2599 return add_cost;
2600 }
2601
aec3cfba 2602 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2603 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2604 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2605
56636818
JL
2606 case TRUNCATE:
2607 if (arm_fast_multiply && mode == SImode
2608 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2609 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2610 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2611 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2612 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2613 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2614 return 8;
2615 return 99;
2616
e2c671ba
RE
2617 case NEG:
2618 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2619 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2620 /* Fall through */
2621 case NOT:
2622 if (mode == DImode)
2623 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2624
2625 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2626
2627 case IF_THEN_ELSE:
2628 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2629 return 14;
2630 return 2;
2631
2632 case COMPARE:
2633 return 1;
2634
2635 case ABS:
2636 return 4 + (mode == DImode ? 4 : 0);
2637
2638 case SIGN_EXTEND:
2639 if (GET_MODE (XEXP (x, 0)) == QImode)
2640 return (4 + (mode == DImode ? 4 : 0)
2641 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2642 /* Fall through */
2643 case ZERO_EXTEND:
2644 switch (GET_MODE (XEXP (x, 0)))
2645 {
2646 case QImode:
2647 return (1 + (mode == DImode ? 4 : 0)
2648 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2649
2650 case HImode:
2651 return (4 + (mode == DImode ? 4 : 0)
2652 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2653
2654 case SImode:
2655 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2656
2657 default:
2658 break;
e2c671ba
RE
2659 }
2660 abort ();
2661
d5b7b3ae
RE
2662 case CONST_INT:
2663 if (const_ok_for_arm (INTVAL (x)))
2664 return outer == SET ? 2 : -1;
2665 else if (outer == AND
5895f793 2666 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2667 return -1;
2668 else if ((outer == COMPARE
2669 || outer == PLUS || outer == MINUS)
5895f793 2670 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2671 return -1;
2672 else
2673 return 5;
2674
2675 case CONST:
2676 case LABEL_REF:
2677 case SYMBOL_REF:
2678 return 6;
2679
2680 case CONST_DOUBLE:
2681 if (const_double_rtx_ok_for_fpu (x))
2682 return outer == SET ? 2 : -1;
2683 else if ((outer == COMPARE || outer == PLUS)
2684 && neg_const_double_rtx_ok_for_fpu (x))
2685 return -1;
2686 return 7;
2687
e2c671ba
RE
2688 default:
2689 return 99;
2690 }
2691}
32de079a
RE
2692
2693int
2694arm_adjust_cost (insn, link, dep, cost)
2695 rtx insn;
2696 rtx link;
2697 rtx dep;
2698 int cost;
2699{
2700 rtx i_pat, d_pat;
2701
d19fb8e3
NC
2702 /* Some true dependencies can have a higher cost depending
2703 on precisely how certain input operands are used. */
2704 if (arm_is_xscale
2705 && REG_NOTE_KIND (link) == 0
2706 && recog_memoized (insn) < 0
2707 && recog_memoized (dep) < 0)
2708 {
2709 int shift_opnum = get_attr_shift (insn);
2710 enum attr_type attr_type = get_attr_type (dep);
2711
2712 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2713 operand for INSN. If we have a shifted input operand and the
2714 instruction we depend on is another ALU instruction, then we may
2715 have to account for an additional stall. */
2716 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2717 {
2718 rtx shifted_operand;
2719 int opno;
2720
2721 /* Get the shifted operand. */
2722 extract_insn (insn);
2723 shifted_operand = recog_data.operand[shift_opnum];
2724
2725 /* Iterate over all the operands in DEP. If we write an operand
2726 that overlaps with SHIFTED_OPERAND, then we have increase the
2727 cost of this dependency. */
2728 extract_insn (dep);
2729 preprocess_constraints ();
2730 for (opno = 0; opno < recog_data.n_operands; opno++)
2731 {
2732 /* We can ignore strict inputs. */
2733 if (recog_data.operand_type[opno] == OP_IN)
2734 continue;
2735
2736 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2737 shifted_operand))
2738 return 2;
2739 }
2740 }
2741 }
2742
6354dc9b 2743 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2744 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2745 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2746 return 0;
2747
d5b7b3ae
RE
2748 /* Call insns don't incur a stall, even if they follow a load. */
2749 if (REG_NOTE_KIND (link) == 0
2750 && GET_CODE (insn) == CALL_INSN)
2751 return 1;
2752
32de079a
RE
2753 if ((i_pat = single_set (insn)) != NULL
2754 && GET_CODE (SET_SRC (i_pat)) == MEM
2755 && (d_pat = single_set (dep)) != NULL
2756 && GET_CODE (SET_DEST (d_pat)) == MEM)
2757 {
2758 /* This is a load after a store, there is no conflict if the load reads
2759 from a cached area. Assume that loads from the stack, and from the
2760 constant pool are cached, and that others will miss. This is a
6354dc9b 2761 hack. */
32de079a 2762
32de079a
RE
2763 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2764 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2765 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2766 || reg_mentioned_p (hard_frame_pointer_rtx,
2767 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2768 return 1;
32de079a
RE
2769 }
2770
2771 return cost;
2772}
2773
6354dc9b 2774/* This code has been fixed for cross compilation. */
ff9940b0
RE
2775
2776static int fpa_consts_inited = 0;
2777
cd2b33d0 2778static const char * strings_fpa[8] =
62b10bbc 2779{
2b835d68
RE
2780 "0", "1", "2", "3",
2781 "4", "5", "0.5", "10"
2782};
ff9940b0
RE
2783
2784static REAL_VALUE_TYPE values_fpa[8];
2785
2786static void
2787init_fpa_table ()
2788{
2789 int i;
2790 REAL_VALUE_TYPE r;
2791
2792 for (i = 0; i < 8; i++)
2793 {
2794 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2795 values_fpa[i] = r;
2796 }
f3bb6135 2797
ff9940b0
RE
2798 fpa_consts_inited = 1;
2799}
2800
6354dc9b 2801/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2802
2803int
2804const_double_rtx_ok_for_fpu (x)
2805 rtx x;
2806{
ff9940b0
RE
2807 REAL_VALUE_TYPE r;
2808 int i;
2809
2810 if (!fpa_consts_inited)
2811 init_fpa_table ();
2812
2813 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2814 if (REAL_VALUE_MINUS_ZERO (r))
2815 return 0;
f3bb6135 2816
ff9940b0
RE
2817 for (i = 0; i < 8; i++)
2818 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2819 return 1;
f3bb6135 2820
ff9940b0 2821 return 0;
f3bb6135 2822}
ff9940b0 2823
6354dc9b 2824/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2825
2826int
2827neg_const_double_rtx_ok_for_fpu (x)
2828 rtx x;
2829{
2830 REAL_VALUE_TYPE r;
2831 int i;
2832
2833 if (!fpa_consts_inited)
2834 init_fpa_table ();
2835
2836 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2837 r = REAL_VALUE_NEGATE (r);
2838 if (REAL_VALUE_MINUS_ZERO (r))
2839 return 0;
f3bb6135 2840
ff9940b0
RE
2841 for (i = 0; i < 8; i++)
2842 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2843 return 1;
f3bb6135 2844
ff9940b0 2845 return 0;
f3bb6135 2846}
cce8749e
CH
2847\f
2848/* Predicates for `match_operand' and `match_operator'. */
2849
ff9940b0 2850/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2851 (SUBREG (MEM)...).
2852
2853 This function exists because at the time it was put in it led to better
2854 code. SUBREG(MEM) always needs a reload in the places where
2855 s_register_operand is used, and this seemed to lead to excessive
2856 reloading. */
ff9940b0
RE
2857
2858int
2859s_register_operand (op, mode)
2860 register rtx op;
2861 enum machine_mode mode;
2862{
2863 if (GET_MODE (op) != mode && mode != VOIDmode)
2864 return 0;
2865
2866 if (GET_CODE (op) == SUBREG)
f3bb6135 2867 op = SUBREG_REG (op);
ff9940b0
RE
2868
2869 /* We don't consider registers whose class is NO_REGS
2870 to be a register operand. */
d5b7b3ae 2871 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
2872 return (GET_CODE (op) == REG
2873 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2874 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2875}
2876
b0888988
RE
2877/* A hard register operand (even before reload. */
2878int
2879arm_hard_register_operand (op, mode)
2880 register rtx op;
2881 enum machine_mode mode;
2882{
2883 if (GET_MODE (op) != mode && mode != VOIDmode)
2884 return 0;
2885
2886 return (GET_CODE (op) == REG
2887 && REGNO (op) < FIRST_PSEUDO_REGISTER);
2888}
2889
e2c671ba
RE
2890/* Only accept reg, subreg(reg), const_int. */
2891
2892int
2893reg_or_int_operand (op, mode)
2894 register rtx op;
2895 enum machine_mode mode;
2896{
2897 if (GET_CODE (op) == CONST_INT)
2898 return 1;
2899
2900 if (GET_MODE (op) != mode && mode != VOIDmode)
2901 return 0;
2902
2903 if (GET_CODE (op) == SUBREG)
2904 op = SUBREG_REG (op);
2905
2906 /* We don't consider registers whose class is NO_REGS
2907 to be a register operand. */
2908 return (GET_CODE (op) == REG
2909 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2910 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2911}
2912
ff9940b0
RE
2913/* Return 1 if OP is an item in memory, given that we are in reload. */
2914
2915int
d5b7b3ae 2916arm_reload_memory_operand (op, mode)
ff9940b0 2917 rtx op;
74bbc178 2918 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2919{
2920 int regno = true_regnum (op);
2921
5895f793 2922 return (!CONSTANT_P (op)
ff9940b0
RE
2923 && (regno == -1
2924 || (GET_CODE (op) == REG
2925 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2926}
2927
4d818c85 2928/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae
RE
2929 memory access (architecture V4).
2930 MODE is QImode if called when computing contraints, or VOIDmode when
2931 emitting patterns. In this latter case we cannot use memory_operand()
2932 because it will fail on badly formed MEMs, which is precisly what we are
2933 trying to catch. */
4d818c85
RE
2934int
2935bad_signed_byte_operand (op, mode)
2936 rtx op;
d5b7b3ae 2937 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 2938{
d5b7b3ae 2939#if 0
5895f793 2940 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
2941 return 0;
2942#endif
2943 if (GET_CODE (op) != MEM)
4d818c85
RE
2944 return 0;
2945
2946 op = XEXP (op, 0);
2947
6354dc9b 2948 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2949 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
2950 && (!s_register_operand (XEXP (op, 0), VOIDmode)
2951 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 2952 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2953 return 1;
2954
6354dc9b 2955 /* Big constants are also bad. */
4d818c85
RE
2956 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2957 && (INTVAL (XEXP (op, 1)) > 0xff
2958 || -INTVAL (XEXP (op, 1)) > 0xff))
2959 return 1;
2960
6354dc9b 2961 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2962 return 0;
2963}
2964
cce8749e
CH
2965/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2966
2967int
2968arm_rhs_operand (op, mode)
2969 rtx op;
2970 enum machine_mode mode;
2971{
ff9940b0 2972 return (s_register_operand (op, mode)
cce8749e 2973 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2974}
cce8749e 2975
ff9940b0
RE
2976/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2977 */
2978
2979int
2980arm_rhsm_operand (op, mode)
2981 rtx op;
2982 enum machine_mode mode;
2983{
2984 return (s_register_operand (op, mode)
2985 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2986 || memory_operand (op, mode));
f3bb6135 2987}
ff9940b0
RE
2988
2989/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2990 constant that is valid when negated. */
2991
2992int
2993arm_add_operand (op, mode)
2994 rtx op;
2995 enum machine_mode mode;
2996{
d5b7b3ae
RE
2997 if (TARGET_THUMB)
2998 return thumb_cmp_operand (op, mode);
2999
ff9940b0
RE
3000 return (s_register_operand (op, mode)
3001 || (GET_CODE (op) == CONST_INT
3002 && (const_ok_for_arm (INTVAL (op))
3003 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 3004}
ff9940b0
RE
3005
3006int
3007arm_not_operand (op, mode)
3008 rtx op;
3009 enum machine_mode mode;
3010{
3011 return (s_register_operand (op, mode)
3012 || (GET_CODE (op) == CONST_INT
3013 && (const_ok_for_arm (INTVAL (op))
3014 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 3015}
ff9940b0 3016
5165176d
RE
3017/* Return TRUE if the operand is a memory reference which contains an
3018 offsettable address. */
3019int
3020offsettable_memory_operand (op, mode)
3021 register rtx op;
3022 enum machine_mode mode;
3023{
3024 if (mode == VOIDmode)
3025 mode = GET_MODE (op);
3026
3027 return (mode == GET_MODE (op)
3028 && GET_CODE (op) == MEM
3029 && offsettable_address_p (reload_completed | reload_in_progress,
3030 mode, XEXP (op, 0)));
3031}
3032
3033/* Return TRUE if the operand is a memory reference which is, or can be
3034 made word aligned by adjusting the offset. */
3035int
3036alignable_memory_operand (op, mode)
3037 register rtx op;
3038 enum machine_mode mode;
3039{
3040 rtx reg;
3041
3042 if (mode == VOIDmode)
3043 mode = GET_MODE (op);
3044
3045 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3046 return 0;
3047
3048 op = XEXP (op, 0);
3049
3050 return ((GET_CODE (reg = op) == REG
3051 || (GET_CODE (op) == SUBREG
3052 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3053 || (GET_CODE (op) == PLUS
3054 && GET_CODE (XEXP (op, 1)) == CONST_INT
3055 && (GET_CODE (reg = XEXP (op, 0)) == REG
3056 || (GET_CODE (XEXP (op, 0)) == SUBREG
3057 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 3058 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
3059}
3060
b111229a
RE
3061/* Similar to s_register_operand, but does not allow hard integer
3062 registers. */
3063int
3064f_register_operand (op, mode)
3065 register rtx op;
3066 enum machine_mode mode;
3067{
3068 if (GET_MODE (op) != mode && mode != VOIDmode)
3069 return 0;
3070
3071 if (GET_CODE (op) == SUBREG)
3072 op = SUBREG_REG (op);
3073
3074 /* We don't consider registers whose class is NO_REGS
3075 to be a register operand. */
3076 return (GET_CODE (op) == REG
3077 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3078 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3079}
3080
cce8749e
CH
3081/* Return TRUE for valid operands for the rhs of an FPU instruction. */
3082
3083int
3084fpu_rhs_operand (op, mode)
3085 rtx op;
3086 enum machine_mode mode;
3087{
ff9940b0 3088 if (s_register_operand (op, mode))
f3bb6135 3089 return TRUE;
9ce71c6f
BS
3090
3091 if (GET_MODE (op) != mode && mode != VOIDmode)
3092 return FALSE;
3093
3094 if (GET_CODE (op) == CONST_DOUBLE)
3095 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
3096
3097 return FALSE;
3098}
cce8749e 3099
ff9940b0
RE
3100int
3101fpu_add_operand (op, mode)
3102 rtx op;
3103 enum machine_mode mode;
3104{
3105 if (s_register_operand (op, mode))
f3bb6135 3106 return TRUE;
9ce71c6f
BS
3107
3108 if (GET_MODE (op) != mode && mode != VOIDmode)
3109 return FALSE;
3110
3111 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
3112 return (const_double_rtx_ok_for_fpu (op)
3113 || neg_const_double_rtx_ok_for_fpu (op));
3114
3115 return FALSE;
ff9940b0
RE
3116}
3117
cce8749e
CH
3118/* Return nonzero if OP is a constant power of two. */
3119
3120int
3121power_of_two_operand (op, mode)
3122 rtx op;
74bbc178 3123 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
3124{
3125 if (GET_CODE (op) == CONST_INT)
3126 {
d5b7b3ae 3127 HOST_WIDE_INT value = INTVAL (op);
f3bb6135 3128 return value != 0 && (value & (value - 1)) == 0;
cce8749e 3129 }
f3bb6135
RE
3130 return FALSE;
3131}
cce8749e
CH
3132
3133/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 3134 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
3135 Note that this disallows MEM(REG+REG), but allows
3136 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
3137
3138int
3139di_operand (op, mode)
3140 rtx op;
3141 enum machine_mode mode;
3142{
ff9940b0 3143 if (s_register_operand (op, mode))
f3bb6135 3144 return TRUE;
cce8749e 3145
9ce71c6f
BS
3146 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3147 return FALSE;
3148
e9c6b69b
NC
3149 if (GET_CODE (op) == SUBREG)
3150 op = SUBREG_REG (op);
3151
cce8749e
CH
3152 switch (GET_CODE (op))
3153 {
3154 case CONST_DOUBLE:
3155 case CONST_INT:
f3bb6135
RE
3156 return TRUE;
3157
cce8749e 3158 case MEM:
f3bb6135
RE
3159 return memory_address_p (DImode, XEXP (op, 0));
3160
cce8749e 3161 default:
f3bb6135 3162 return FALSE;
cce8749e 3163 }
f3bb6135 3164}
cce8749e 3165
d5b7b3ae
RE
3166/* Like di_operand, but don't accept constants. */
3167int
3168nonimmediate_di_operand (op, mode)
3169 rtx op;
3170 enum machine_mode mode;
3171{
3172 if (s_register_operand (op, mode))
3173 return TRUE;
3174
3175 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3176 return FALSE;
3177
3178 if (GET_CODE (op) == SUBREG)
3179 op = SUBREG_REG (op);
3180
3181 if (GET_CODE (op) == MEM)
3182 return memory_address_p (DImode, XEXP (op, 0));
3183
3184 return FALSE;
3185}
3186
f3139301 3187/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 3188 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
3189 Note that this disallows MEM(REG+REG), but allows
3190 MEM(PRE/POST_INC/DEC(REG)). */
3191
3192int
3193soft_df_operand (op, mode)
3194 rtx op;
3195 enum machine_mode mode;
3196{
3197 if (s_register_operand (op, mode))
3198 return TRUE;
3199
9ce71c6f
BS
3200 if (mode != VOIDmode && GET_MODE (op) != mode)
3201 return FALSE;
3202
37b80d2e
BS
3203 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3204 return FALSE;
3205
e9c6b69b
NC
3206 if (GET_CODE (op) == SUBREG)
3207 op = SUBREG_REG (op);
9ce71c6f 3208
f3139301
DE
3209 switch (GET_CODE (op))
3210 {
3211 case CONST_DOUBLE:
3212 return TRUE;
3213
3214 case MEM:
3215 return memory_address_p (DFmode, XEXP (op, 0));
3216
3217 default:
3218 return FALSE;
3219 }
3220}
3221
d5b7b3ae
RE
3222/* Like soft_df_operand, but don't accept constants. */
3223int
3224nonimmediate_soft_df_operand (op, mode)
3225 rtx op;
3226 enum machine_mode mode;
3227{
3228 if (s_register_operand (op, mode))
3229 return TRUE;
3230
3231 if (mode != VOIDmode && GET_MODE (op) != mode)
3232 return FALSE;
3233
3234 if (GET_CODE (op) == SUBREG)
3235 op = SUBREG_REG (op);
3236
3237 if (GET_CODE (op) == MEM)
3238 return memory_address_p (DFmode, XEXP (op, 0));
3239 return FALSE;
3240}
cce8749e 3241
d5b7b3ae 3242/* Return TRUE for valid index operands. */
cce8749e
CH
3243int
3244index_operand (op, mode)
3245 rtx op;
3246 enum machine_mode mode;
3247{
d5b7b3ae 3248 return (s_register_operand (op, mode)
ff9940b0 3249 || (immediate_operand (op, mode)
d5b7b3ae
RE
3250 && (GET_CODE (op) != CONST_INT
3251 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 3252}
cce8749e 3253
ff9940b0
RE
3254/* Return TRUE for valid shifts by a constant. This also accepts any
3255 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 3256 shift operator in this case was a mult. */
ff9940b0
RE
3257
3258int
3259const_shift_operand (op, mode)
3260 rtx op;
3261 enum machine_mode mode;
3262{
3263 return (power_of_two_operand (op, mode)
3264 || (immediate_operand (op, mode)
d5b7b3ae
RE
3265 && (GET_CODE (op) != CONST_INT
3266 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 3267}
ff9940b0 3268
cce8749e
CH
3269/* Return TRUE for arithmetic operators which can be combined with a multiply
3270 (shift). */
3271
3272int
3273shiftable_operator (x, mode)
3274 rtx x;
3275 enum machine_mode mode;
3276{
3277 if (GET_MODE (x) != mode)
3278 return FALSE;
3279 else
3280 {
3281 enum rtx_code code = GET_CODE (x);
3282
3283 return (code == PLUS || code == MINUS
3284 || code == IOR || code == XOR || code == AND);
3285 }
f3bb6135 3286}
cce8749e 3287
6ab589e0
JL
3288/* Return TRUE for binary logical operators. */
3289
3290int
3291logical_binary_operator (x, mode)
3292 rtx x;
3293 enum machine_mode mode;
3294{
3295 if (GET_MODE (x) != mode)
3296 return FALSE;
3297 else
3298 {
3299 enum rtx_code code = GET_CODE (x);
3300
3301 return (code == IOR || code == XOR || code == AND);
3302 }
3303}
3304
6354dc9b 3305/* Return TRUE for shift operators. */
cce8749e
CH
3306
3307int
3308shift_operator (x, mode)
3309 rtx x;
3310 enum machine_mode mode;
3311{
3312 if (GET_MODE (x) != mode)
3313 return FALSE;
3314 else
3315 {
3316 enum rtx_code code = GET_CODE (x);
3317
ff9940b0 3318 if (code == MULT)
aec3cfba 3319 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 3320
e2c671ba
RE
3321 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3322 || code == ROTATERT);
cce8749e 3323 }
f3bb6135 3324}
ff9940b0 3325
6354dc9b
NC
3326/* Return TRUE if x is EQ or NE. */
3327int
3328equality_operator (x, mode)
f3bb6135 3329 rtx x;
74bbc178 3330 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3331{
f3bb6135 3332 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3333}
3334
e45b72c4
RE
3335/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3336int
3337arm_comparison_operator (x, mode)
3338 rtx x;
3339 enum machine_mode mode;
3340{
3341 return (comparison_operator (x, mode)
3342 && GET_CODE (x) != LTGT
3343 && GET_CODE (x) != UNEQ);
3344}
3345
6354dc9b 3346/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
3347int
3348minmax_operator (x, mode)
3349 rtx x;
3350 enum machine_mode mode;
3351{
3352 enum rtx_code code = GET_CODE (x);
3353
3354 if (GET_MODE (x) != mode)
3355 return FALSE;
f3bb6135 3356
ff9940b0 3357 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3358}
ff9940b0 3359
ff9940b0 3360/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3361 a mode, accept any class CCmode register. */
ff9940b0
RE
3362int
3363cc_register (x, mode)
f3bb6135
RE
3364 rtx x;
3365 enum machine_mode mode;
ff9940b0
RE
3366{
3367 if (mode == VOIDmode)
3368 {
3369 mode = GET_MODE (x);
d5b7b3ae 3370
ff9940b0
RE
3371 if (GET_MODE_CLASS (mode) != MODE_CC)
3372 return FALSE;
3373 }
f3bb6135 3374
d5b7b3ae
RE
3375 if ( GET_MODE (x) == mode
3376 && GET_CODE (x) == REG
3377 && REGNO (x) == CC_REGNUM)
ff9940b0 3378 return TRUE;
f3bb6135 3379
ff9940b0
RE
3380 return FALSE;
3381}
5bbe2d40
RE
3382
3383/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3384 a mode, accept any class CCmode register which indicates a dominance
3385 expression. */
5bbe2d40 3386int
84ed5e79 3387dominant_cc_register (x, mode)
5bbe2d40
RE
3388 rtx x;
3389 enum machine_mode mode;
3390{
3391 if (mode == VOIDmode)
3392 {
3393 mode = GET_MODE (x);
d5b7b3ae 3394
84ed5e79 3395 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3396 return FALSE;
3397 }
3398
d5b7b3ae 3399 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3400 && mode != CC_DLEmode && mode != CC_DLTmode
3401 && mode != CC_DGEmode && mode != CC_DGTmode
3402 && mode != CC_DLEUmode && mode != CC_DLTUmode
3403 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3404 return FALSE;
3405
d5b7b3ae 3406 return cc_register (x, mode);
5bbe2d40
RE
3407}
3408
2b835d68
RE
3409/* Return TRUE if X references a SYMBOL_REF. */
3410int
3411symbol_mentioned_p (x)
3412 rtx x;
3413{
6f7d635c 3414 register const char * fmt;
2b835d68
RE
3415 register int i;
3416
3417 if (GET_CODE (x) == SYMBOL_REF)
3418 return 1;
3419
3420 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3421
2b835d68
RE
3422 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3423 {
3424 if (fmt[i] == 'E')
3425 {
3426 register int j;
3427
3428 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3429 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3430 return 1;
3431 }
3432 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3433 return 1;
3434 }
3435
3436 return 0;
3437}
3438
3439/* Return TRUE if X references a LABEL_REF. */
3440int
3441label_mentioned_p (x)
3442 rtx x;
3443{
6f7d635c 3444 register const char * fmt;
2b835d68
RE
3445 register int i;
3446
3447 if (GET_CODE (x) == LABEL_REF)
3448 return 1;
3449
3450 fmt = GET_RTX_FORMAT (GET_CODE (x));
3451 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3452 {
3453 if (fmt[i] == 'E')
3454 {
3455 register int j;
3456
3457 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3458 if (label_mentioned_p (XVECEXP (x, i, j)))
3459 return 1;
3460 }
3461 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3462 return 1;
3463 }
3464
3465 return 0;
3466}
3467
ff9940b0
RE
3468enum rtx_code
3469minmax_code (x)
f3bb6135 3470 rtx x;
ff9940b0
RE
3471{
3472 enum rtx_code code = GET_CODE (x);
3473
3474 if (code == SMAX)
3475 return GE;
f3bb6135 3476 else if (code == SMIN)
ff9940b0 3477 return LE;
f3bb6135 3478 else if (code == UMIN)
ff9940b0 3479 return LEU;
f3bb6135 3480 else if (code == UMAX)
ff9940b0 3481 return GEU;
f3bb6135 3482
ff9940b0
RE
3483 abort ();
3484}
3485
6354dc9b 3486/* Return 1 if memory locations are adjacent. */
f3bb6135 3487int
ff9940b0
RE
3488adjacent_mem_locations (a, b)
3489 rtx a, b;
3490{
3491 int val0 = 0, val1 = 0;
3492 int reg0, reg1;
3493
3494 if ((GET_CODE (XEXP (a, 0)) == REG
3495 || (GET_CODE (XEXP (a, 0)) == PLUS
3496 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3497 && (GET_CODE (XEXP (b, 0)) == REG
3498 || (GET_CODE (XEXP (b, 0)) == PLUS
3499 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3500 {
3501 if (GET_CODE (XEXP (a, 0)) == PLUS)
3502 {
3503 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3504 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3505 }
3506 else
3507 reg0 = REGNO (XEXP (a, 0));
3508 if (GET_CODE (XEXP (b, 0)) == PLUS)
3509 {
3510 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3511 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3512 }
3513 else
3514 reg1 = REGNO (XEXP (b, 0));
3515 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3516 }
3517 return 0;
3518}
3519
3520/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3521 parallel and the first section will be tested. */
f3bb6135 3522int
ff9940b0
RE
3523load_multiple_operation (op, mode)
3524 rtx op;
74bbc178 3525 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3526{
f3bb6135 3527 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3528 int dest_regno;
3529 rtx src_addr;
f3bb6135 3530 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3531 rtx elt;
3532
3533 if (count <= 1
3534 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3535 return 0;
3536
6354dc9b 3537 /* Check to see if this might be a write-back. */
ff9940b0
RE
3538 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3539 {
3540 i++;
3541 base = 1;
3542
6354dc9b 3543 /* Now check it more carefully. */
ff9940b0
RE
3544 if (GET_CODE (SET_DEST (elt)) != REG
3545 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3546 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3547 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3548 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3549 return 0;
ff9940b0
RE
3550 }
3551
3552 /* Perform a quick check so we don't blow up below. */
3553 if (count <= i
3554 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3555 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3556 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3557 return 0;
3558
3559 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3560 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3561
3562 for (; i < count; i++)
3563 {
ed4c4348 3564 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3565
3566 if (GET_CODE (elt) != SET
3567 || GET_CODE (SET_DEST (elt)) != REG
3568 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3569 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3570 || GET_CODE (SET_SRC (elt)) != MEM
3571 || GET_MODE (SET_SRC (elt)) != SImode
3572 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3573 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3574 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3575 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3576 return 0;
3577 }
3578
3579 return 1;
3580}
3581
3582/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3583 parallel and the first section will be tested. */
f3bb6135 3584int
ff9940b0
RE
3585store_multiple_operation (op, mode)
3586 rtx op;
74bbc178 3587 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3588{
f3bb6135 3589 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3590 int src_regno;
3591 rtx dest_addr;
f3bb6135 3592 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3593 rtx elt;
3594
3595 if (count <= 1
3596 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3597 return 0;
3598
6354dc9b 3599 /* Check to see if this might be a write-back. */
ff9940b0
RE
3600 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3601 {
3602 i++;
3603 base = 1;
3604
6354dc9b 3605 /* Now check it more carefully. */
ff9940b0
RE
3606 if (GET_CODE (SET_DEST (elt)) != REG
3607 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3608 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3609 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3610 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3611 return 0;
ff9940b0
RE
3612 }
3613
3614 /* Perform a quick check so we don't blow up below. */
3615 if (count <= i
3616 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3617 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3618 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3619 return 0;
3620
3621 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3622 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3623
3624 for (; i < count; i++)
3625 {
3626 elt = XVECEXP (op, 0, i);
3627
3628 if (GET_CODE (elt) != SET
3629 || GET_CODE (SET_SRC (elt)) != REG
3630 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3631 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3632 || GET_CODE (SET_DEST (elt)) != MEM
3633 || GET_MODE (SET_DEST (elt)) != SImode
3634 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3635 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3636 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3637 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3638 return 0;
3639 }
3640
3641 return 1;
3642}
e2c671ba 3643
84ed5e79
RE
3644int
3645load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3646 rtx * operands;
84ed5e79 3647 int nops;
62b10bbc
NC
3648 int * regs;
3649 int * base;
3650 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3651{
3652 int unsorted_regs[4];
3653 HOST_WIDE_INT unsorted_offsets[4];
3654 int order[4];
ad076f4e 3655 int base_reg = -1;
84ed5e79
RE
3656 int i;
3657
3658 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3659 extended if required. */
3660 if (nops < 2 || nops > 4)
3661 abort ();
3662
3663 /* Loop over the operands and check that the memory references are
3664 suitable (ie immediate offsets from the same base register). At
3665 the same time, extract the target register, and the memory
3666 offsets. */
3667 for (i = 0; i < nops; i++)
3668 {
3669 rtx reg;
3670 rtx offset;
3671
56636818
JL
3672 /* Convert a subreg of a mem into the mem itself. */
3673 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3674 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3675
84ed5e79
RE
3676 if (GET_CODE (operands[nops + i]) != MEM)
3677 abort ();
3678
3679 /* Don't reorder volatile memory references; it doesn't seem worth
3680 looking for the case where the order is ok anyway. */
3681 if (MEM_VOLATILE_P (operands[nops + i]))
3682 return 0;
3683
3684 offset = const0_rtx;
3685
3686 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3687 || (GET_CODE (reg) == SUBREG
3688 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3689 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3690 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3691 == REG)
3692 || (GET_CODE (reg) == SUBREG
3693 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3694 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3695 == CONST_INT)))
3696 {
3697 if (i == 0)
3698 {
d5b7b3ae 3699 base_reg = REGNO (reg);
84ed5e79
RE
3700 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3701 ? REGNO (operands[i])
3702 : REGNO (SUBREG_REG (operands[i])));
3703 order[0] = 0;
3704 }
3705 else
3706 {
6354dc9b 3707 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3708 /* Not addressed from the same base register. */
3709 return 0;
3710
3711 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3712 ? REGNO (operands[i])
3713 : REGNO (SUBREG_REG (operands[i])));
3714 if (unsorted_regs[i] < unsorted_regs[order[0]])
3715 order[0] = i;
3716 }
3717
3718 /* If it isn't an integer register, or if it overwrites the
3719 base register but isn't the last insn in the list, then
3720 we can't do this. */
3721 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3722 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3723 return 0;
3724
3725 unsorted_offsets[i] = INTVAL (offset);
3726 }
3727 else
3728 /* Not a suitable memory address. */
3729 return 0;
3730 }
3731
3732 /* All the useful information has now been extracted from the
3733 operands into unsorted_regs and unsorted_offsets; additionally,
3734 order[0] has been set to the lowest numbered register in the
3735 list. Sort the registers into order, and check that the memory
3736 offsets are ascending and adjacent. */
3737
3738 for (i = 1; i < nops; i++)
3739 {
3740 int j;
3741
3742 order[i] = order[i - 1];
3743 for (j = 0; j < nops; j++)
3744 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3745 && (order[i] == order[i - 1]
3746 || unsorted_regs[j] < unsorted_regs[order[i]]))
3747 order[i] = j;
3748
3749 /* Have we found a suitable register? if not, one must be used more
3750 than once. */
3751 if (order[i] == order[i - 1])
3752 return 0;
3753
3754 /* Is the memory address adjacent and ascending? */
3755 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3756 return 0;
3757 }
3758
3759 if (base)
3760 {
3761 *base = base_reg;
3762
3763 for (i = 0; i < nops; i++)
3764 regs[i] = unsorted_regs[order[i]];
3765
3766 *load_offset = unsorted_offsets[order[0]];
3767 }
3768
3769 if (unsorted_offsets[order[0]] == 0)
3770 return 1; /* ldmia */
3771
3772 if (unsorted_offsets[order[0]] == 4)
3773 return 2; /* ldmib */
3774
3775 if (unsorted_offsets[order[nops - 1]] == 0)
3776 return 3; /* ldmda */
3777
3778 if (unsorted_offsets[order[nops - 1]] == -4)
3779 return 4; /* ldmdb */
3780
949d79eb
RE
3781 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3782 if the offset isn't small enough. The reason 2 ldrs are faster
3783 is because these ARMs are able to do more than one cache access
3784 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3785 whilst the ARM8 has a double bandwidth cache. This means that
3786 these cores can do both an instruction fetch and a data fetch in
3787 a single cycle, so the trick of calculating the address into a
3788 scratch register (one of the result regs) and then doing a load
3789 multiple actually becomes slower (and no smaller in code size).
3790 That is the transformation
6cc8c0b3
NC
3791
3792 ldr rd1, [rbase + offset]
3793 ldr rd2, [rbase + offset + 4]
3794
3795 to
3796
3797 add rd1, rbase, offset
3798 ldmia rd1, {rd1, rd2}
3799
949d79eb
RE
3800 produces worse code -- '3 cycles + any stalls on rd2' instead of
3801 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3802 access per cycle, the first sequence could never complete in less
3803 than 6 cycles, whereas the ldm sequence would only take 5 and
3804 would make better use of sequential accesses if not hitting the
3805 cache.
3806
3807 We cheat here and test 'arm_ld_sched' which we currently know to
3808 only be true for the ARM8, ARM9 and StrongARM. If this ever
3809 changes, then the test below needs to be reworked. */
f5a1b0d2 3810 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3811 return 0;
3812
84ed5e79
RE
3813 /* Can't do it without setting up the offset, only do this if it takes
3814 no more than one insn. */
3815 return (const_ok_for_arm (unsorted_offsets[order[0]])
3816 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3817}
3818
cd2b33d0 3819const char *
84ed5e79 3820emit_ldm_seq (operands, nops)
62b10bbc 3821 rtx * operands;
84ed5e79
RE
3822 int nops;
3823{
3824 int regs[4];
3825 int base_reg;
3826 HOST_WIDE_INT offset;
3827 char buf[100];
3828 int i;
3829
3830 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3831 {
3832 case 1:
3833 strcpy (buf, "ldm%?ia\t");
3834 break;
3835
3836 case 2:
3837 strcpy (buf, "ldm%?ib\t");
3838 break;
3839
3840 case 3:
3841 strcpy (buf, "ldm%?da\t");
3842 break;
3843
3844 case 4:
3845 strcpy (buf, "ldm%?db\t");
3846 break;
3847
3848 case 5:
3849 if (offset >= 0)
3850 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3851 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3852 (long) offset);
3853 else
3854 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3855 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3856 (long) -offset);
3857 output_asm_insn (buf, operands);
3858 base_reg = regs[0];
3859 strcpy (buf, "ldm%?ia\t");
3860 break;
3861
3862 default:
3863 abort ();
3864 }
3865
3866 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3867 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3868
3869 for (i = 1; i < nops; i++)
3870 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3871 reg_names[regs[i]]);
3872
3873 strcat (buf, "}\t%@ phole ldm");
3874
3875 output_asm_insn (buf, operands);
3876 return "";
3877}
3878
3879int
3880store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3881 rtx * operands;
84ed5e79 3882 int nops;
62b10bbc
NC
3883 int * regs;
3884 int * base;
3885 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3886{
3887 int unsorted_regs[4];
3888 HOST_WIDE_INT unsorted_offsets[4];
3889 int order[4];
ad076f4e 3890 int base_reg = -1;
84ed5e79
RE
3891 int i;
3892
3893 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3894 extended if required. */
3895 if (nops < 2 || nops > 4)
3896 abort ();
3897
3898 /* Loop over the operands and check that the memory references are
3899 suitable (ie immediate offsets from the same base register). At
3900 the same time, extract the target register, and the memory
3901 offsets. */
3902 for (i = 0; i < nops; i++)
3903 {
3904 rtx reg;
3905 rtx offset;
3906
56636818
JL
3907 /* Convert a subreg of a mem into the mem itself. */
3908 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3909 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3910
84ed5e79
RE
3911 if (GET_CODE (operands[nops + i]) != MEM)
3912 abort ();
3913
3914 /* Don't reorder volatile memory references; it doesn't seem worth
3915 looking for the case where the order is ok anyway. */
3916 if (MEM_VOLATILE_P (operands[nops + i]))
3917 return 0;
3918
3919 offset = const0_rtx;
3920
3921 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3922 || (GET_CODE (reg) == SUBREG
3923 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3924 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3925 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3926 == REG)
3927 || (GET_CODE (reg) == SUBREG
3928 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3929 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3930 == CONST_INT)))
3931 {
3932 if (i == 0)
3933 {
62b10bbc 3934 base_reg = REGNO (reg);
84ed5e79
RE
3935 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3936 ? REGNO (operands[i])
3937 : REGNO (SUBREG_REG (operands[i])));
3938 order[0] = 0;
3939 }
3940 else
3941 {
6354dc9b 3942 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3943 /* Not addressed from the same base register. */
3944 return 0;
3945
3946 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3947 ? REGNO (operands[i])
3948 : REGNO (SUBREG_REG (operands[i])));
3949 if (unsorted_regs[i] < unsorted_regs[order[0]])
3950 order[0] = i;
3951 }
3952
3953 /* If it isn't an integer register, then we can't do this. */
3954 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3955 return 0;
3956
3957 unsorted_offsets[i] = INTVAL (offset);
3958 }
3959 else
3960 /* Not a suitable memory address. */
3961 return 0;
3962 }
3963
3964 /* All the useful information has now been extracted from the
3965 operands into unsorted_regs and unsorted_offsets; additionally,
3966 order[0] has been set to the lowest numbered register in the
3967 list. Sort the registers into order, and check that the memory
3968 offsets are ascending and adjacent. */
3969
3970 for (i = 1; i < nops; i++)
3971 {
3972 int j;
3973
3974 order[i] = order[i - 1];
3975 for (j = 0; j < nops; j++)
3976 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3977 && (order[i] == order[i - 1]
3978 || unsorted_regs[j] < unsorted_regs[order[i]]))
3979 order[i] = j;
3980
3981 /* Have we found a suitable register? if not, one must be used more
3982 than once. */
3983 if (order[i] == order[i - 1])
3984 return 0;
3985
3986 /* Is the memory address adjacent and ascending? */
3987 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3988 return 0;
3989 }
3990
3991 if (base)
3992 {
3993 *base = base_reg;
3994
3995 for (i = 0; i < nops; i++)
3996 regs[i] = unsorted_regs[order[i]];
3997
3998 *load_offset = unsorted_offsets[order[0]];
3999 }
4000
4001 if (unsorted_offsets[order[0]] == 0)
4002 return 1; /* stmia */
4003
4004 if (unsorted_offsets[order[0]] == 4)
4005 return 2; /* stmib */
4006
4007 if (unsorted_offsets[order[nops - 1]] == 0)
4008 return 3; /* stmda */
4009
4010 if (unsorted_offsets[order[nops - 1]] == -4)
4011 return 4; /* stmdb */
4012
4013 return 0;
4014}
4015
cd2b33d0 4016const char *
84ed5e79 4017emit_stm_seq (operands, nops)
62b10bbc 4018 rtx * operands;
84ed5e79
RE
4019 int nops;
4020{
4021 int regs[4];
4022 int base_reg;
4023 HOST_WIDE_INT offset;
4024 char buf[100];
4025 int i;
4026
4027 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4028 {
4029 case 1:
4030 strcpy (buf, "stm%?ia\t");
4031 break;
4032
4033 case 2:
4034 strcpy (buf, "stm%?ib\t");
4035 break;
4036
4037 case 3:
4038 strcpy (buf, "stm%?da\t");
4039 break;
4040
4041 case 4:
4042 strcpy (buf, "stm%?db\t");
4043 break;
4044
4045 default:
4046 abort ();
4047 }
4048
4049 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4050 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4051
4052 for (i = 1; i < nops; i++)
4053 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4054 reg_names[regs[i]]);
4055
4056 strcat (buf, "}\t%@ phole stm");
4057
4058 output_asm_insn (buf, operands);
4059 return "";
4060}
4061
e2c671ba
RE
4062int
4063multi_register_push (op, mode)
0a81f500 4064 rtx op;
74bbc178 4065 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
4066{
4067 if (GET_CODE (op) != PARALLEL
4068 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4069 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
b15bca31 4070 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
e2c671ba
RE
4071 return 0;
4072
4073 return 1;
4074}
ff9940b0 4075\f
d7d01975 4076/* Routines for use with attributes. */
f3bb6135 4077
31fdb4d5 4078/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
4079 ATTRIBUTES are any existing attributes and ARGS are
4080 the arguments supplied with ATTR.
31fdb4d5
DE
4081
4082 Supported attributes:
4083
d5b7b3ae
RE
4084 naked:
4085 don't output any prologue or epilogue code, the user is assumed
4086 to do the right thing.
4087
6d3d9133
NC
4088 isr or interrupt:
4089 Interrupt Service Routine.
4090
d5b7b3ae
RE
4091 interfacearm:
4092 Always assume that this function will be entered in ARM mode,
4093 not Thumb mode, and that the caller wishes to be returned to in
4094 ARM mode. */
31fdb4d5 4095int
74bbc178 4096arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 4097 tree decl;
31fdb4d5
DE
4098 tree attr;
4099 tree args;
4100{
6d3d9133
NC
4101 /* The interrupt attribute can take args, so check for it before
4102 rejecting other attributes on the grounds that they did have args. */
4103 if (is_attribute_p ("isr", attr)
4104 || is_attribute_p ("interrupt", attr))
4105 return TREE_CODE (decl) == FUNCTION_DECL;
4106
31fdb4d5
DE
4107 if (args != NULL_TREE)
4108 return 0;
4109
4110 if (is_attribute_p ("naked", attr))
4111 return TREE_CODE (decl) == FUNCTION_DECL;
d19fb8e3 4112
d5b7b3ae
RE
4113#ifdef ARM_PE
4114 if (is_attribute_p ("interfacearm", attr))
4115 return TREE_CODE (decl) == FUNCTION_DECL;
4116#endif /* ARM_PE */
4117
31fdb4d5
DE
4118 return 0;
4119}
f3bb6135 4120\f
6354dc9b 4121/* Routines for use in generating RTL. */
f3bb6135 4122rtx
56636818 4123arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 4124 in_struct_p, scalar_p)
ff9940b0
RE
4125 int base_regno;
4126 int count;
4127 rtx from;
4128 int up;
4129 int write_back;
56636818
JL
4130 int unchanging_p;
4131 int in_struct_p;
c6df88cb 4132 int scalar_p;
ff9940b0
RE
4133{
4134 int i = 0, j;
4135 rtx result;
4136 int sign = up ? 1 : -1;
56636818 4137 rtx mem;
ff9940b0 4138
d19fb8e3
NC
4139 /* XScale has load-store double instructions, but they have stricter
4140 alignment requirements than load-store multiple, so we can not
4141 use them.
4142
4143 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4144 the pipeline until completion.
4145
4146 NREGS CYCLES
4147 1 3
4148 2 4
4149 3 5
4150 4 6
4151
4152 An ldr instruction takes 1-3 cycles, but does not block the
4153 pipeline.
4154
4155 NREGS CYCLES
4156 1 1-3
4157 2 2-6
4158 3 3-9
4159 4 4-12
4160
4161 Best case ldr will always win. However, the more ldr instructions
4162 we issue, the less likely we are to be able to schedule them well.
4163 Using ldr instructions also increases code size.
4164
4165 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4166 for counts of 3 or 4 regs. */
4167 if (arm_is_xscale && count <= 2 && ! optimize_size)
4168 {
4169 rtx seq;
4170
4171 start_sequence ();
4172
4173 for (i = 0; i < count; i++)
4174 {
4175 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4176 RTX_UNCHANGING_P (mem) = unchanging_p;
4177 MEM_IN_STRUCT_P (mem) = in_struct_p;
4178 MEM_SCALAR_P (mem) = scalar_p;
4179 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4180 }
4181
4182 if (write_back)
4183 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4184
4185 seq = gen_sequence ();
4186 end_sequence ();
4187
4188 return seq;
4189 }
4190
43cffd11 4191 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4192 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4193 if (write_back)
f3bb6135 4194 {
ff9940b0 4195 XVECEXP (result, 0, 0)
43cffd11
RE
4196 = gen_rtx_SET (GET_MODE (from), from,
4197 plus_constant (from, count * 4 * sign));
ff9940b0
RE
4198 i = 1;
4199 count++;
f3bb6135
RE
4200 }
4201
ff9940b0 4202 for (j = 0; i < count; i++, j++)
f3bb6135 4203 {
43cffd11 4204 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
4205 RTX_UNCHANGING_P (mem) = unchanging_p;
4206 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4207 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
4208 XVECEXP (result, 0, i)
4209 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
4210 }
4211
ff9940b0
RE
4212 return result;
4213}
4214
f3bb6135 4215rtx
56636818 4216arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 4217 in_struct_p, scalar_p)
ff9940b0
RE
4218 int base_regno;
4219 int count;
4220 rtx to;
4221 int up;
4222 int write_back;
56636818
JL
4223 int unchanging_p;
4224 int in_struct_p;
c6df88cb 4225 int scalar_p;
ff9940b0
RE
4226{
4227 int i = 0, j;
4228 rtx result;
4229 int sign = up ? 1 : -1;
56636818 4230 rtx mem;
ff9940b0 4231
d19fb8e3
NC
4232 /* See arm_gen_load_multiple for discussion of
4233 the pros/cons of ldm/stm usage for XScale. */
4234 if (arm_is_xscale && count <= 2 && ! optimize_size)
4235 {
4236 rtx seq;
4237
4238 start_sequence ();
4239
4240 for (i = 0; i < count; i++)
4241 {
4242 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4243 RTX_UNCHANGING_P (mem) = unchanging_p;
4244 MEM_IN_STRUCT_P (mem) = in_struct_p;
4245 MEM_SCALAR_P (mem) = scalar_p;
4246 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4247 }
4248
4249 if (write_back)
4250 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4251
4252 seq = gen_sequence ();
4253 end_sequence ();
4254
4255 return seq;
4256 }
4257
43cffd11 4258 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4259 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4260 if (write_back)
f3bb6135 4261 {
ff9940b0 4262 XVECEXP (result, 0, 0)
43cffd11
RE
4263 = gen_rtx_SET (GET_MODE (to), to,
4264 plus_constant (to, count * 4 * sign));
ff9940b0
RE
4265 i = 1;
4266 count++;
f3bb6135
RE
4267 }
4268
ff9940b0 4269 for (j = 0; i < count; i++, j++)
f3bb6135 4270 {
43cffd11 4271 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
4272 RTX_UNCHANGING_P (mem) = unchanging_p;
4273 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4274 MEM_SCALAR_P (mem) = scalar_p;
56636818 4275
43cffd11
RE
4276 XVECEXP (result, 0, i)
4277 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
4278 }
4279
ff9940b0
RE
4280 return result;
4281}
4282
880e2516
RE
4283int
4284arm_gen_movstrqi (operands)
62b10bbc 4285 rtx * operands;
880e2516
RE
4286{
4287 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 4288 int i;
880e2516 4289 rtx src, dst;
ad076f4e 4290 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 4291 rtx part_bytes_reg = NULL;
56636818
JL
4292 rtx mem;
4293 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 4294 int dst_scalar_p, src_scalar_p;
880e2516
RE
4295
4296 if (GET_CODE (operands[2]) != CONST_INT
4297 || GET_CODE (operands[3]) != CONST_INT
4298 || INTVAL (operands[2]) > 64
4299 || INTVAL (operands[3]) & 3)
4300 return 0;
4301
4302 st_dst = XEXP (operands[0], 0);
4303 st_src = XEXP (operands[1], 0);
56636818
JL
4304
4305 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4306 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 4307 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
4308 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4309 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 4310 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 4311
880e2516
RE
4312 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4313 fin_src = src = copy_to_mode_reg (SImode, st_src);
4314
d5b7b3ae 4315 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
880e2516
RE
4316 out_words_to_go = INTVAL (operands[2]) / 4;
4317 last_bytes = INTVAL (operands[2]) & 3;
4318
4319 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 4320 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
4321
4322 for (i = 0; in_words_to_go >= 2; i+=4)
4323 {
bd9c7e23 4324 if (in_words_to_go > 4)
56636818 4325 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
4326 src_unchanging_p,
4327 src_in_struct_p,
4328 src_scalar_p));
bd9c7e23
RE
4329 else
4330 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 4331 FALSE, src_unchanging_p,
c6df88cb 4332 src_in_struct_p, src_scalar_p));
bd9c7e23 4333
880e2516
RE
4334 if (out_words_to_go)
4335 {
bd9c7e23 4336 if (out_words_to_go > 4)
56636818
JL
4337 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4338 dst_unchanging_p,
c6df88cb
MM
4339 dst_in_struct_p,
4340 dst_scalar_p));
bd9c7e23
RE
4341 else if (out_words_to_go != 1)
4342 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4343 dst, TRUE,
4344 (last_bytes == 0
56636818
JL
4345 ? FALSE : TRUE),
4346 dst_unchanging_p,
c6df88cb
MM
4347 dst_in_struct_p,
4348 dst_scalar_p));
880e2516
RE
4349 else
4350 {
43cffd11 4351 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
4352 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4353 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4354 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4355 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
4356 if (last_bytes != 0)
4357 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
4358 }
4359 }
4360
4361 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4362 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4363 }
4364
4365 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4366 if (out_words_to_go)
62b10bbc
NC
4367 {
4368 rtx sreg;
4369
4370 mem = gen_rtx_MEM (SImode, src);
4371 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4372 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4373 MEM_SCALAR_P (mem) = src_scalar_p;
4374 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4375 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4376
4377 mem = gen_rtx_MEM (SImode, dst);
4378 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4379 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4380 MEM_SCALAR_P (mem) = dst_scalar_p;
4381 emit_move_insn (mem, sreg);
4382 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4383 in_words_to_go--;
4384
4385 if (in_words_to_go) /* Sanity check */
4386 abort ();
4387 }
880e2516
RE
4388
4389 if (in_words_to_go)
4390 {
4391 if (in_words_to_go < 0)
4392 abort ();
4393
43cffd11 4394 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4395 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4396 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4397 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4398 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4399 }
4400
d5b7b3ae
RE
4401 if (last_bytes && part_bytes_reg == NULL)
4402 abort ();
4403
880e2516
RE
4404 if (BYTES_BIG_ENDIAN && last_bytes)
4405 {
4406 rtx tmp = gen_reg_rtx (SImode);
4407
6354dc9b 4408 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4409 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4410 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4411 part_bytes_reg = tmp;
4412
4413 while (last_bytes)
4414 {
43cffd11 4415 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4416 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4417 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4418 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4419 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 4420
880e2516
RE
4421 if (--last_bytes)
4422 {
4423 tmp = gen_reg_rtx (SImode);
4424 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4425 part_bytes_reg = tmp;
4426 }
4427 }
4428
4429 }
4430 else
4431 {
d5b7b3ae 4432 if (last_bytes > 1)
880e2516 4433 {
d5b7b3ae 4434 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4435 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4436 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4437 MEM_SCALAR_P (mem) = dst_scalar_p;
d5b7b3ae
RE
4438 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4439 last_bytes -= 2;
4440 if (last_bytes)
880e2516
RE
4441 {
4442 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4443
d5b7b3ae
RE
4444 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4445 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4446 part_bytes_reg = tmp;
4447 }
4448 }
d5b7b3ae
RE
4449
4450 if (last_bytes)
4451 {
4452 mem = gen_rtx_MEM (QImode, dst);
4453 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4454 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4455 MEM_SCALAR_P (mem) = dst_scalar_p;
4456 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4457 }
880e2516
RE
4458 }
4459
4460 return 1;
4461}
4462
5165176d
RE
4463/* Generate a memory reference for a half word, such that it will be loaded
4464 into the top 16 bits of the word. We can assume that the address is
4465 known to be alignable and of the form reg, or plus (reg, const). */
4466rtx
d5b7b3ae 4467arm_gen_rotated_half_load (memref)
5165176d
RE
4468 rtx memref;
4469{
4470 HOST_WIDE_INT offset = 0;
4471 rtx base = XEXP (memref, 0);
4472
4473 if (GET_CODE (base) == PLUS)
4474 {
4475 offset = INTVAL (XEXP (base, 1));
4476 base = XEXP (base, 0);
4477 }
4478
956d6950 4479 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4480 if (TARGET_MMU_TRAPS
5165176d
RE
4481 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4482 return NULL;
4483
43cffd11 4484 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4485
4486 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4487 return base;
4488
43cffd11 4489 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4490}
4491
1646cf41
RE
4492/* Select a dominance comparison mode if possible. We support three forms.
4493 COND_OR == 0 => (X && Y)
4494 COND_OR == 1 => ((! X( || Y)
4495 COND_OR == 2 => (X || Y)
4496 If we are unable to support a dominance comparsison we return CC mode.
4497 This will then fail to match for the RTL expressions that generate this
4498 call. */
d19fb8e3 4499
84ed5e79 4500static enum machine_mode
74bbc178 4501select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4502 rtx x;
4503 rtx y;
4504 HOST_WIDE_INT cond_or;
4505{
4506 enum rtx_code cond1, cond2;
4507 int swapped = 0;
4508
4509 /* Currently we will probably get the wrong result if the individual
4510 comparisons are not simple. This also ensures that it is safe to
956d6950 4511 reverse a comparison if necessary. */
84ed5e79
RE
4512 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4513 != CCmode)
4514 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4515 != CCmode))
4516 return CCmode;
4517
1646cf41
RE
4518 /* The if_then_else variant of this tests the second condition if the
4519 first passes, but is true if the first fails. Reverse the first
4520 condition to get a true "inclusive-or" expression. */
4521 if (cond_or == 1)
84ed5e79
RE
4522 cond1 = reverse_condition (cond1);
4523
4524 /* If the comparisons are not equal, and one doesn't dominate the other,
4525 then we can't do this. */
4526 if (cond1 != cond2
5895f793
RE
4527 && !comparison_dominates_p (cond1, cond2)
4528 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4529 return CCmode;
4530
4531 if (swapped)
4532 {
4533 enum rtx_code temp = cond1;
4534 cond1 = cond2;
4535 cond2 = temp;
4536 }
4537
4538 switch (cond1)
4539 {
4540 case EQ:
5895f793 4541 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4542 return CC_DEQmode;
4543
4544 switch (cond2)
4545 {
4546 case LE: return CC_DLEmode;
4547 case LEU: return CC_DLEUmode;
4548 case GE: return CC_DGEmode;
4549 case GEU: return CC_DGEUmode;
ad076f4e 4550 default: break;
84ed5e79
RE
4551 }
4552
4553 break;
4554
4555 case LT:
5895f793 4556 if (cond2 == LT || !cond_or)
84ed5e79
RE
4557 return CC_DLTmode;
4558 if (cond2 == LE)
4559 return CC_DLEmode;
4560 if (cond2 == NE)
4561 return CC_DNEmode;
4562 break;
4563
4564 case GT:
5895f793 4565 if (cond2 == GT || !cond_or)
84ed5e79
RE
4566 return CC_DGTmode;
4567 if (cond2 == GE)
4568 return CC_DGEmode;
4569 if (cond2 == NE)
4570 return CC_DNEmode;
4571 break;
4572
4573 case LTU:
5895f793 4574 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4575 return CC_DLTUmode;
4576 if (cond2 == LEU)
4577 return CC_DLEUmode;
4578 if (cond2 == NE)
4579 return CC_DNEmode;
4580 break;
4581
4582 case GTU:
5895f793 4583 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4584 return CC_DGTUmode;
4585 if (cond2 == GEU)
4586 return CC_DGEUmode;
4587 if (cond2 == NE)
4588 return CC_DNEmode;
4589 break;
4590
4591 /* The remaining cases only occur when both comparisons are the
4592 same. */
4593 case NE:
4594 return CC_DNEmode;
4595
4596 case LE:
4597 return CC_DLEmode;
4598
4599 case GE:
4600 return CC_DGEmode;
4601
4602 case LEU:
4603 return CC_DLEUmode;
4604
4605 case GEU:
4606 return CC_DGEUmode;
ad076f4e
RE
4607
4608 default:
4609 break;
84ed5e79
RE
4610 }
4611
4612 abort ();
4613}
4614
4615enum machine_mode
4616arm_select_cc_mode (op, x, y)
4617 enum rtx_code op;
4618 rtx x;
4619 rtx y;
4620{
4621 /* All floating point compares return CCFP if it is an equality
4622 comparison, and CCFPE otherwise. */
4623 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4624 {
4625 switch (op)
4626 {
4627 case EQ:
4628 case NE:
4629 case UNORDERED:
4630 case ORDERED:
4631 case UNLT:
4632 case UNLE:
4633 case UNGT:
4634 case UNGE:
4635 case UNEQ:
4636 case LTGT:
4637 return CCFPmode;
4638
4639 case LT:
4640 case LE:
4641 case GT:
4642 case GE:
4643 return CCFPEmode;
4644
4645 default:
4646 abort ();
4647 }
4648 }
84ed5e79
RE
4649
4650 /* A compare with a shifted operand. Because of canonicalization, the
4651 comparison will have to be swapped when we emit the assembler. */
4652 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4653 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4654 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4655 || GET_CODE (x) == ROTATERT))
4656 return CC_SWPmode;
4657
956d6950
JL
4658 /* This is a special case that is used by combine to allow a
4659 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4660 followed by a comparison of the shifted integer (only valid for
956d6950 4661 equalities and unsigned inequalities). */
84ed5e79
RE
4662 if (GET_MODE (x) == SImode
4663 && GET_CODE (x) == ASHIFT
4664 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4665 && GET_CODE (XEXP (x, 0)) == SUBREG
4666 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4667 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4668 && (op == EQ || op == NE
4669 || op == GEU || op == GTU || op == LTU || op == LEU)
4670 && GET_CODE (y) == CONST_INT)
4671 return CC_Zmode;
4672
1646cf41
RE
4673 /* A construct for a conditional compare, if the false arm contains
4674 0, then both conditions must be true, otherwise either condition
4675 must be true. Not all conditions are possible, so CCmode is
4676 returned if it can't be done. */
4677 if (GET_CODE (x) == IF_THEN_ELSE
4678 && (XEXP (x, 2) == const0_rtx
4679 || XEXP (x, 2) == const1_rtx)
4680 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4681 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4682 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4683 INTVAL (XEXP (x, 2)));
4684
4685 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4686 if (GET_CODE (x) == AND
4687 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4688 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4689 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4690
4691 if (GET_CODE (x) == IOR
4692 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4693 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4694 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4695
84ed5e79
RE
4696 /* An operation that sets the condition codes as a side-effect, the
4697 V flag is not set correctly, so we can only use comparisons where
4698 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4699 instead. */
4700 if (GET_MODE (x) == SImode
4701 && y == const0_rtx
4702 && (op == EQ || op == NE || op == LT || op == GE)
4703 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4704 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4705 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4706 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4707 || GET_CODE (x) == LSHIFTRT
4708 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4709 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4710 return CC_NOOVmode;
4711
84ed5e79
RE
4712 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4713 return CC_Zmode;
4714
bd9c7e23
RE
4715 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4716 && GET_CODE (x) == PLUS
4717 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4718 return CC_Cmode;
4719
84ed5e79
RE
4720 return CCmode;
4721}
4722
ff9940b0
RE
4723/* X and Y are two things to compare using CODE. Emit the compare insn and
4724 return the rtx for register 0 in the proper mode. FP means this is a
4725 floating point compare: I don't think that it is needed on the arm. */
4726
4727rtx
d5b7b3ae 4728arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4729 enum rtx_code code;
4730 rtx x, y;
4731{
4732 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4733 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4734
43cffd11
RE
4735 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4736 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4737
4738 return cc_reg;
4739}
4740
0a81f500
RE
4741void
4742arm_reload_in_hi (operands)
62b10bbc 4743 rtx * operands;
0a81f500 4744{
f9cc092a
RE
4745 rtx ref = operands[1];
4746 rtx base, scratch;
4747 HOST_WIDE_INT offset = 0;
4748
4749 if (GET_CODE (ref) == SUBREG)
4750 {
ddef6bc7 4751 offset = SUBREG_BYTE (ref);
f9cc092a
RE
4752 ref = SUBREG_REG (ref);
4753 }
4754
4755 if (GET_CODE (ref) == REG)
4756 {
4757 /* We have a pseudo which has been spilt onto the stack; there
4758 are two cases here: the first where there is a simple
4759 stack-slot replacement and a second where the stack-slot is
4760 out of range, or is used as a subreg. */
4761 if (reg_equiv_mem[REGNO (ref)])
4762 {
4763 ref = reg_equiv_mem[REGNO (ref)];
4764 base = find_replacement (&XEXP (ref, 0));
4765 }
4766 else
6354dc9b 4767 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4768 base = reg_equiv_address[REGNO (ref)];
4769 }
4770 else
4771 base = find_replacement (&XEXP (ref, 0));
0a81f500 4772
e5e809f4
JL
4773 /* Handle the case where the address is too complex to be offset by 1. */
4774 if (GET_CODE (base) == MINUS
4775 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4776 {
f9cc092a 4777 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4778
43cffd11 4779 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4780 base = base_plus;
4781 }
f9cc092a
RE
4782 else if (GET_CODE (base) == PLUS)
4783 {
6354dc9b 4784 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4785 HOST_WIDE_INT hi, lo;
4786
4787 offset += INTVAL (XEXP (base, 1));
4788 base = XEXP (base, 0);
4789
6354dc9b 4790 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4791 /* Valid range for lo is -4095 -> 4095 */
4792 lo = (offset >= 0
4793 ? (offset & 0xfff)
4794 : -((-offset) & 0xfff));
4795
4796 /* Corner case, if lo is the max offset then we would be out of range
4797 once we have added the additional 1 below, so bump the msb into the
4798 pre-loading insn(s). */
4799 if (lo == 4095)
4800 lo &= 0x7ff;
4801
e5951263
NC
4802 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4803 ^ HOST_INT (0x80000000))
4804 - HOST_INT (0x80000000));
f9cc092a
RE
4805
4806 if (hi + lo != offset)
4807 abort ();
4808
4809 if (hi != 0)
4810 {
4811 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4812
4813 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4814 that require more than one insn. */
f9cc092a
RE
4815 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4816 base = base_plus;
4817 offset = lo;
4818 }
4819 }
e5e809f4 4820
f9cc092a
RE
4821 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4822 emit_insn (gen_zero_extendqisi2 (scratch,
4823 gen_rtx_MEM (QImode,
4824 plus_constant (base,
4825 offset))));
43cffd11
RE
4826 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4827 gen_rtx_MEM (QImode,
f9cc092a
RE
4828 plus_constant (base,
4829 offset + 1))));
5895f793 4830 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
4831 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4832 gen_rtx_IOR (SImode,
4833 gen_rtx_ASHIFT
4834 (SImode,
4835 gen_rtx_SUBREG (SImode, operands[0], 0),
4836 GEN_INT (8)),
f9cc092a 4837 scratch)));
0a81f500 4838 else
43cffd11
RE
4839 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4840 gen_rtx_IOR (SImode,
f9cc092a 4841 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4842 GEN_INT (8)),
4843 gen_rtx_SUBREG (SImode, operands[0],
4844 0))));
0a81f500
RE
4845}
4846
f9cc092a
RE
4847/* Handle storing a half-word to memory during reload by synthesising as two
4848 byte stores. Take care not to clobber the input values until after we
4849 have moved them somewhere safe. This code assumes that if the DImode
4850 scratch in operands[2] overlaps either the input value or output address
4851 in some way, then that value must die in this insn (we absolutely need
4852 two scratch registers for some corner cases). */
f3bb6135 4853void
af48348a 4854arm_reload_out_hi (operands)
62b10bbc 4855 rtx * operands;
af48348a 4856{
f9cc092a
RE
4857 rtx ref = operands[0];
4858 rtx outval = operands[1];
4859 rtx base, scratch;
4860 HOST_WIDE_INT offset = 0;
4861
4862 if (GET_CODE (ref) == SUBREG)
4863 {
ddef6bc7 4864 offset = SUBREG_BYTE (ref);
f9cc092a
RE
4865 ref = SUBREG_REG (ref);
4866 }
4867
4868
4869 if (GET_CODE (ref) == REG)
4870 {
4871 /* We have a pseudo which has been spilt onto the stack; there
4872 are two cases here: the first where there is a simple
4873 stack-slot replacement and a second where the stack-slot is
4874 out of range, or is used as a subreg. */
4875 if (reg_equiv_mem[REGNO (ref)])
4876 {
4877 ref = reg_equiv_mem[REGNO (ref)];
4878 base = find_replacement (&XEXP (ref, 0));
4879 }
4880 else
6354dc9b 4881 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4882 base = reg_equiv_address[REGNO (ref)];
4883 }
4884 else
4885 base = find_replacement (&XEXP (ref, 0));
4886
4887 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4888
4889 /* Handle the case where the address is too complex to be offset by 1. */
4890 if (GET_CODE (base) == MINUS
4891 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4892 {
4893 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4894
4895 /* Be careful not to destroy OUTVAL. */
4896 if (reg_overlap_mentioned_p (base_plus, outval))
4897 {
4898 /* Updating base_plus might destroy outval, see if we can
4899 swap the scratch and base_plus. */
5895f793 4900 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4901 {
4902 rtx tmp = scratch;
4903 scratch = base_plus;
4904 base_plus = tmp;
4905 }
4906 else
4907 {
4908 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4909
4910 /* Be conservative and copy OUTVAL into the scratch now,
4911 this should only be necessary if outval is a subreg
4912 of something larger than a word. */
4913 /* XXX Might this clobber base? I can't see how it can,
4914 since scratch is known to overlap with OUTVAL, and
4915 must be wider than a word. */
4916 emit_insn (gen_movhi (scratch_hi, outval));
4917 outval = scratch_hi;
4918 }
4919 }
4920
4921 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4922 base = base_plus;
4923 }
4924 else if (GET_CODE (base) == PLUS)
4925 {
6354dc9b 4926 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4927 HOST_WIDE_INT hi, lo;
4928
4929 offset += INTVAL (XEXP (base, 1));
4930 base = XEXP (base, 0);
4931
6354dc9b 4932 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4933 /* Valid range for lo is -4095 -> 4095 */
4934 lo = (offset >= 0
4935 ? (offset & 0xfff)
4936 : -((-offset) & 0xfff));
4937
4938 /* Corner case, if lo is the max offset then we would be out of range
4939 once we have added the additional 1 below, so bump the msb into the
4940 pre-loading insn(s). */
4941 if (lo == 4095)
4942 lo &= 0x7ff;
4943
e5951263
NC
4944 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4945 ^ HOST_INT (0x80000000))
5895f793 4946 - HOST_INT (0x80000000));
f9cc092a
RE
4947
4948 if (hi + lo != offset)
4949 abort ();
4950
4951 if (hi != 0)
4952 {
4953 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4954
4955 /* Be careful not to destroy OUTVAL. */
4956 if (reg_overlap_mentioned_p (base_plus, outval))
4957 {
4958 /* Updating base_plus might destroy outval, see if we
4959 can swap the scratch and base_plus. */
5895f793 4960 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4961 {
4962 rtx tmp = scratch;
4963 scratch = base_plus;
4964 base_plus = tmp;
4965 }
4966 else
4967 {
4968 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4969
4970 /* Be conservative and copy outval into scratch now,
4971 this should only be necessary if outval is a
4972 subreg of something larger than a word. */
4973 /* XXX Might this clobber base? I can't see how it
4974 can, since scratch is known to overlap with
4975 outval. */
4976 emit_insn (gen_movhi (scratch_hi, outval));
4977 outval = scratch_hi;
4978 }
4979 }
4980
4981 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4982 that require more than one insn. */
f9cc092a
RE
4983 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4984 base = base_plus;
4985 offset = lo;
4986 }
4987 }
af48348a 4988
b5cc037f
RE
4989 if (BYTES_BIG_ENDIAN)
4990 {
f9cc092a
RE
4991 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4992 plus_constant (base, offset + 1)),
4993 gen_rtx_SUBREG (QImode, outval, 0)));
4994 emit_insn (gen_lshrsi3 (scratch,
4995 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4996 GEN_INT (8)));
f9cc092a
RE
4997 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4998 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4999 }
5000 else
5001 {
f9cc092a
RE
5002 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5003 gen_rtx_SUBREG (QImode, outval, 0)));
5004 emit_insn (gen_lshrsi3 (scratch,
5005 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 5006 GEN_INT (8)));
f9cc092a
RE
5007 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5008 plus_constant (base, offset + 1)),
5009 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 5010 }
af48348a 5011}
2b835d68 5012\f
d5b7b3ae
RE
5013/* Print a symbolic form of X to the debug file, F. */
5014static void
5015arm_print_value (f, x)
5016 FILE * f;
5017 rtx x;
5018{
5019 switch (GET_CODE (x))
5020 {
5021 case CONST_INT:
5022 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5023 return;
5024
5025 case CONST_DOUBLE:
5026 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5027 return;
5028
5029 case CONST_STRING:
5030 fprintf (f, "\"%s\"", XSTR (x, 0));
5031 return;
5032
5033 case SYMBOL_REF:
5034 fprintf (f, "`%s'", XSTR (x, 0));
5035 return;
5036
5037 case LABEL_REF:
5038 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5039 return;
5040
5041 case CONST:
5042 arm_print_value (f, XEXP (x, 0));
5043 return;
5044
5045 case PLUS:
5046 arm_print_value (f, XEXP (x, 0));
5047 fprintf (f, "+");
5048 arm_print_value (f, XEXP (x, 1));
5049 return;
5050
5051 case PC:
5052 fprintf (f, "pc");
5053 return;
5054
5055 default:
5056 fprintf (f, "????");
5057 return;
5058 }
5059}
5060\f
2b835d68 5061/* Routines for manipulation of the constant pool. */
2b835d68 5062
949d79eb
RE
5063/* Arm instructions cannot load a large constant directly into a
5064 register; they have to come from a pc relative load. The constant
5065 must therefore be placed in the addressable range of the pc
5066 relative load. Depending on the precise pc relative load
5067 instruction the range is somewhere between 256 bytes and 4k. This
5068 means that we often have to dump a constant inside a function, and
2b835d68
RE
5069 generate code to branch around it.
5070
949d79eb
RE
5071 It is important to minimize this, since the branches will slow
5072 things down and make the code larger.
2b835d68 5073
949d79eb
RE
5074 Normally we can hide the table after an existing unconditional
5075 branch so that there is no interruption of the flow, but in the
5076 worst case the code looks like this:
2b835d68
RE
5077
5078 ldr rn, L1
949d79eb 5079 ...
2b835d68
RE
5080 b L2
5081 align
5082 L1: .long value
5083 L2:
949d79eb 5084 ...
2b835d68 5085
2b835d68 5086 ldr rn, L3
949d79eb 5087 ...
2b835d68
RE
5088 b L4
5089 align
2b835d68
RE
5090 L3: .long value
5091 L4:
949d79eb
RE
5092 ...
5093
5094 We fix this by performing a scan after scheduling, which notices
5095 which instructions need to have their operands fetched from the
5096 constant table and builds the table.
5097
5098 The algorithm starts by building a table of all the constants that
5099 need fixing up and all the natural barriers in the function (places
5100 where a constant table can be dropped without breaking the flow).
5101 For each fixup we note how far the pc-relative replacement will be
5102 able to reach and the offset of the instruction into the function.
5103
5104 Having built the table we then group the fixes together to form
5105 tables that are as large as possible (subject to addressing
5106 constraints) and emit each table of constants after the last
5107 barrier that is within range of all the instructions in the group.
5108 If a group does not contain a barrier, then we forcibly create one
5109 by inserting a jump instruction into the flow. Once the table has
5110 been inserted, the insns are then modified to reference the
5111 relevant entry in the pool.
5112
6354dc9b 5113 Possible enhancements to the algorithm (not implemented) are:
949d79eb 5114
d5b7b3ae 5115 1) For some processors and object formats, there may be benefit in
949d79eb
RE
5116 aligning the pools to the start of cache lines; this alignment
5117 would need to be taken into account when calculating addressability
6354dc9b 5118 of a pool. */
2b835d68 5119
d5b7b3ae
RE
5120/* These typedefs are located at the start of this file, so that
5121 they can be used in the prototypes there. This comment is to
5122 remind readers of that fact so that the following structures
5123 can be understood more easily.
5124
5125 typedef struct minipool_node Mnode;
5126 typedef struct minipool_fixup Mfix; */
5127
5128struct minipool_node
5129{
5130 /* Doubly linked chain of entries. */
5131 Mnode * next;
5132 Mnode * prev;
5133 /* The maximum offset into the code that this entry can be placed. While
5134 pushing fixes for forward references, all entries are sorted in order
5135 of increasing max_address. */
5136 HOST_WIDE_INT max_address;
5137 /* Similarly for a entry inserted for a backwards ref. */
5138 HOST_WIDE_INT min_address;
5139 /* The number of fixes referencing this entry. This can become zero
5140 if we "unpush" an entry. In this case we ignore the entry when we
5141 come to emit the code. */
5142 int refcount;
5143 /* The offset from the start of the minipool. */
5144 HOST_WIDE_INT offset;
5145 /* The value in table. */
5146 rtx value;
5147 /* The mode of value. */
5148 enum machine_mode mode;
5149 int fix_size;
5150};
5151
5152struct minipool_fixup
2b835d68 5153{
d5b7b3ae
RE
5154 Mfix * next;
5155 rtx insn;
5156 HOST_WIDE_INT address;
5157 rtx * loc;
5158 enum machine_mode mode;
5159 int fix_size;
5160 rtx value;
5161 Mnode * minipool;
5162 HOST_WIDE_INT forwards;
5163 HOST_WIDE_INT backwards;
5164};
2b835d68 5165
d5b7b3ae
RE
5166/* Fixes less than a word need padding out to a word boundary. */
5167#define MINIPOOL_FIX_SIZE(mode) \
5168 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 5169
d5b7b3ae
RE
5170static Mnode * minipool_vector_head;
5171static Mnode * minipool_vector_tail;
5172static rtx minipool_vector_label;
332072db 5173
d5b7b3ae
RE
5174/* The linked list of all minipool fixes required for this function. */
5175Mfix * minipool_fix_head;
5176Mfix * minipool_fix_tail;
5177/* The fix entry for the current minipool, once it has been placed. */
5178Mfix * minipool_barrier;
5179
5180/* Determines if INSN is the start of a jump table. Returns the end
5181 of the TABLE or NULL_RTX. */
5182static rtx
5183is_jump_table (insn)
5184 rtx insn;
2b835d68 5185{
d5b7b3ae 5186 rtx table;
da6558fd 5187
d5b7b3ae
RE
5188 if (GET_CODE (insn) == JUMP_INSN
5189 && JUMP_LABEL (insn) != NULL
5190 && ((table = next_real_insn (JUMP_LABEL (insn)))
5191 == next_real_insn (insn))
5192 && table != NULL
5193 && GET_CODE (table) == JUMP_INSN
5194 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5195 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5196 return table;
5197
5198 return NULL_RTX;
2b835d68
RE
5199}
5200
d5b7b3ae
RE
5201static HOST_WIDE_INT
5202get_jump_table_size (insn)
5203 rtx insn;
2b835d68 5204{
d5b7b3ae
RE
5205 rtx body = PATTERN (insn);
5206 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 5207
d5b7b3ae
RE
5208 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5209}
2b835d68 5210
d5b7b3ae
RE
5211/* Move a minipool fix MP from its current location to before MAX_MP.
5212 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5213 contrains may need updating. */
5214static Mnode *
5215move_minipool_fix_forward_ref (mp, max_mp, max_address)
5216 Mnode * mp;
5217 Mnode * max_mp;
5218 HOST_WIDE_INT max_address;
5219{
5220 /* This should never be true and the code below assumes these are
5221 different. */
5222 if (mp == max_mp)
5223 abort ();
5224
5225 if (max_mp == NULL)
5226 {
5227 if (max_address < mp->max_address)
5228 mp->max_address = max_address;
5229 }
5230 else
2b835d68 5231 {
d5b7b3ae
RE
5232 if (max_address > max_mp->max_address - mp->fix_size)
5233 mp->max_address = max_mp->max_address - mp->fix_size;
5234 else
5235 mp->max_address = max_address;
2b835d68 5236
d5b7b3ae
RE
5237 /* Unlink MP from its current position. Since max_mp is non-null,
5238 mp->prev must be non-null. */
5239 mp->prev->next = mp->next;
5240 if (mp->next != NULL)
5241 mp->next->prev = mp->prev;
5242 else
5243 minipool_vector_tail = mp->prev;
2b835d68 5244
d5b7b3ae
RE
5245 /* Re-insert it before MAX_MP. */
5246 mp->next = max_mp;
5247 mp->prev = max_mp->prev;
5248 max_mp->prev = mp;
5249
5250 if (mp->prev != NULL)
5251 mp->prev->next = mp;
5252 else
5253 minipool_vector_head = mp;
5254 }
2b835d68 5255
d5b7b3ae
RE
5256 /* Save the new entry. */
5257 max_mp = mp;
5258
5259 /* Scan over the preceeding entries and adjust their addresses as
5260 required. */
5261 while (mp->prev != NULL
5262 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5263 {
5264 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5265 mp = mp->prev;
2b835d68
RE
5266 }
5267
d5b7b3ae 5268 return max_mp;
2b835d68
RE
5269}
5270
d5b7b3ae
RE
5271/* Add a constant to the minipool for a forward reference. Returns the
5272 node added or NULL if the constant will not fit in this pool. */
5273static Mnode *
5274add_minipool_forward_ref (fix)
5275 Mfix * fix;
5276{
5277 /* If set, max_mp is the first pool_entry that has a lower
5278 constraint than the one we are trying to add. */
5279 Mnode * max_mp = NULL;
5280 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5281 Mnode * mp;
5282
5283 /* If this fix's address is greater than the address of the first
5284 entry, then we can't put the fix in this pool. We subtract the
5285 size of the current fix to ensure that if the table is fully
5286 packed we still have enough room to insert this value by suffling
5287 the other fixes forwards. */
5288 if (minipool_vector_head &&
5289 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5290 return NULL;
2b835d68 5291
d5b7b3ae
RE
5292 /* Scan the pool to see if a constant with the same value has
5293 already been added. While we are doing this, also note the
5294 location where we must insert the constant if it doesn't already
5295 exist. */
5296 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5297 {
5298 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5299 && fix->mode == mp->mode
5300 && (GET_CODE (fix->value) != CODE_LABEL
5301 || (CODE_LABEL_NUMBER (fix->value)
5302 == CODE_LABEL_NUMBER (mp->value)))
5303 && rtx_equal_p (fix->value, mp->value))
5304 {
5305 /* More than one fix references this entry. */
5306 mp->refcount++;
5307 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5308 }
5309
5310 /* Note the insertion point if necessary. */
5311 if (max_mp == NULL
5312 && mp->max_address > max_address)
5313 max_mp = mp;
5314 }
5315
5316 /* The value is not currently in the minipool, so we need to create
5317 a new entry for it. If MAX_MP is NULL, the entry will be put on
5318 the end of the list since the placement is less constrained than
5319 any existing entry. Otherwise, we insert the new fix before
5320 MAX_MP and, if neceesary, adjust the constraints on the other
5321 entries. */
5322 mp = xmalloc (sizeof (* mp));
5323 mp->fix_size = fix->fix_size;
5324 mp->mode = fix->mode;
5325 mp->value = fix->value;
5326 mp->refcount = 1;
5327 /* Not yet required for a backwards ref. */
5328 mp->min_address = -65536;
5329
5330 if (max_mp == NULL)
5331 {
5332 mp->max_address = max_address;
5333 mp->next = NULL;
5334 mp->prev = minipool_vector_tail;
5335
5336 if (mp->prev == NULL)
5337 {
5338 minipool_vector_head = mp;
5339 minipool_vector_label = gen_label_rtx ();
7551cbc7 5340 }
2b835d68 5341 else
d5b7b3ae 5342 mp->prev->next = mp;
2b835d68 5343
d5b7b3ae
RE
5344 minipool_vector_tail = mp;
5345 }
5346 else
5347 {
5348 if (max_address > max_mp->max_address - mp->fix_size)
5349 mp->max_address = max_mp->max_address - mp->fix_size;
5350 else
5351 mp->max_address = max_address;
5352
5353 mp->next = max_mp;
5354 mp->prev = max_mp->prev;
5355 max_mp->prev = mp;
5356 if (mp->prev != NULL)
5357 mp->prev->next = mp;
5358 else
5359 minipool_vector_head = mp;
5360 }
5361
5362 /* Save the new entry. */
5363 max_mp = mp;
5364
5365 /* Scan over the preceeding entries and adjust their addresses as
5366 required. */
5367 while (mp->prev != NULL
5368 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5369 {
5370 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5371 mp = mp->prev;
2b835d68
RE
5372 }
5373
d5b7b3ae
RE
5374 return max_mp;
5375}
5376
5377static Mnode *
5378move_minipool_fix_backward_ref (mp, min_mp, min_address)
5379 Mnode * mp;
5380 Mnode * min_mp;
5381 HOST_WIDE_INT min_address;
5382{
5383 HOST_WIDE_INT offset;
5384
5385 /* This should never be true, and the code below assumes these are
5386 different. */
5387 if (mp == min_mp)
5388 abort ();
5389
5390 if (min_mp == NULL)
2b835d68 5391 {
d5b7b3ae
RE
5392 if (min_address > mp->min_address)
5393 mp->min_address = min_address;
5394 }
5395 else
5396 {
5397 /* We will adjust this below if it is too loose. */
5398 mp->min_address = min_address;
5399
5400 /* Unlink MP from its current position. Since min_mp is non-null,
5401 mp->next must be non-null. */
5402 mp->next->prev = mp->prev;
5403 if (mp->prev != NULL)
5404 mp->prev->next = mp->next;
5405 else
5406 minipool_vector_head = mp->next;
5407
5408 /* Reinsert it after MIN_MP. */
5409 mp->prev = min_mp;
5410 mp->next = min_mp->next;
5411 min_mp->next = mp;
5412 if (mp->next != NULL)
5413 mp->next->prev = mp;
2b835d68 5414 else
d5b7b3ae
RE
5415 minipool_vector_tail = mp;
5416 }
5417
5418 min_mp = mp;
5419
5420 offset = 0;
5421 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5422 {
5423 mp->offset = offset;
5424 if (mp->refcount > 0)
5425 offset += mp->fix_size;
5426
5427 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5428 mp->next->min_address = mp->min_address + mp->fix_size;
5429 }
5430
5431 return min_mp;
5432}
5433
5434/* Add a constant to the minipool for a backward reference. Returns the
5435 node added or NULL if the constant will not fit in this pool.
5436
5437 Note that the code for insertion for a backwards reference can be
5438 somewhat confusing because the calculated offsets for each fix do
5439 not take into account the size of the pool (which is still under
5440 construction. */
5441static Mnode *
5442add_minipool_backward_ref (fix)
5443 Mfix * fix;
5444{
5445 /* If set, min_mp is the last pool_entry that has a lower constraint
5446 than the one we are trying to add. */
5447 Mnode * min_mp = NULL;
5448 /* This can be negative, since it is only a constraint. */
5449 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5450 Mnode * mp;
5451
5452 /* If we can't reach the current pool from this insn, or if we can't
5453 insert this entry at the end of the pool without pushing other
5454 fixes out of range, then we don't try. This ensures that we
5455 can't fail later on. */
5456 if (min_address >= minipool_barrier->address
5457 || (minipool_vector_tail->min_address + fix->fix_size
5458 >= minipool_barrier->address))
5459 return NULL;
5460
5461 /* Scan the pool to see if a constant with the same value has
5462 already been added. While we are doing this, also note the
5463 location where we must insert the constant if it doesn't already
5464 exist. */
5465 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5466 {
5467 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5468 && fix->mode == mp->mode
5469 && (GET_CODE (fix->value) != CODE_LABEL
5470 || (CODE_LABEL_NUMBER (fix->value)
5471 == CODE_LABEL_NUMBER (mp->value)))
5472 && rtx_equal_p (fix->value, mp->value)
5473 /* Check that there is enough slack to move this entry to the
5474 end of the table (this is conservative). */
5475 && (mp->max_address
5476 > (minipool_barrier->address
5477 + minipool_vector_tail->offset
5478 + minipool_vector_tail->fix_size)))
5479 {
5480 mp->refcount++;
5481 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5482 }
5483
5484 if (min_mp != NULL)
5485 mp->min_address += fix->fix_size;
5486 else
5487 {
5488 /* Note the insertion point if necessary. */
5489 if (mp->min_address < min_address)
5490 min_mp = mp;
5491 else if (mp->max_address
5492 < minipool_barrier->address + mp->offset + fix->fix_size)
5493 {
5494 /* Inserting before this entry would push the fix beyond
5495 its maximum address (which can happen if we have
5496 re-located a forwards fix); force the new fix to come
5497 after it. */
5498 min_mp = mp;
5499 min_address = mp->min_address + fix->fix_size;
5500 }
5501 }
5502 }
5503
5504 /* We need to create a new entry. */
5505 mp = xmalloc (sizeof (* mp));
5506 mp->fix_size = fix->fix_size;
5507 mp->mode = fix->mode;
5508 mp->value = fix->value;
5509 mp->refcount = 1;
5510 mp->max_address = minipool_barrier->address + 65536;
5511
5512 mp->min_address = min_address;
5513
5514 if (min_mp == NULL)
5515 {
5516 mp->prev = NULL;
5517 mp->next = minipool_vector_head;
5518
5519 if (mp->next == NULL)
5520 {
5521 minipool_vector_tail = mp;
5522 minipool_vector_label = gen_label_rtx ();
5523 }
5524 else
5525 mp->next->prev = mp;
5526
5527 minipool_vector_head = mp;
5528 }
5529 else
5530 {
5531 mp->next = min_mp->next;
5532 mp->prev = min_mp;
5533 min_mp->next = mp;
da6558fd 5534
d5b7b3ae
RE
5535 if (mp->next != NULL)
5536 mp->next->prev = mp;
5537 else
5538 minipool_vector_tail = mp;
5539 }
5540
5541 /* Save the new entry. */
5542 min_mp = mp;
5543
5544 if (mp->prev)
5545 mp = mp->prev;
5546 else
5547 mp->offset = 0;
5548
5549 /* Scan over the following entries and adjust their offsets. */
5550 while (mp->next != NULL)
5551 {
5552 if (mp->next->min_address < mp->min_address + mp->fix_size)
5553 mp->next->min_address = mp->min_address + mp->fix_size;
5554
5555 if (mp->refcount)
5556 mp->next->offset = mp->offset + mp->fix_size;
5557 else
5558 mp->next->offset = mp->offset;
5559
5560 mp = mp->next;
5561 }
5562
5563 return min_mp;
5564}
5565
5566static void
5567assign_minipool_offsets (barrier)
5568 Mfix * barrier;
5569{
5570 HOST_WIDE_INT offset = 0;
5571 Mnode * mp;
5572
5573 minipool_barrier = barrier;
5574
5575 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5576 {
5577 mp->offset = offset;
da6558fd 5578
d5b7b3ae
RE
5579 if (mp->refcount > 0)
5580 offset += mp->fix_size;
5581 }
5582}
5583
5584/* Output the literal table */
5585static void
5586dump_minipool (scan)
5587 rtx scan;
5588{
5589 Mnode * mp;
5590 Mnode * nmp;
5591
5592 if (rtl_dump_file)
5593 fprintf (rtl_dump_file,
5594 ";; Emitting minipool after insn %u; address %ld\n",
5595 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5596
5597 scan = emit_label_after (gen_label_rtx (), scan);
5598 scan = emit_insn_after (gen_align_4 (), scan);
5599 scan = emit_label_after (minipool_vector_label, scan);
5600
5601 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5602 {
5603 if (mp->refcount > 0)
5604 {
5605 if (rtl_dump_file)
5606 {
5607 fprintf (rtl_dump_file,
5608 ";; Offset %u, min %ld, max %ld ",
5609 (unsigned) mp->offset, (unsigned long) mp->min_address,
5610 (unsigned long) mp->max_address);
5611 arm_print_value (rtl_dump_file, mp->value);
5612 fputc ('\n', rtl_dump_file);
5613 }
5614
5615 switch (mp->fix_size)
5616 {
5617#ifdef HAVE_consttable_1
5618 case 1:
5619 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5620 break;
5621
5622#endif
5623#ifdef HAVE_consttable_2
5624 case 2:
5625 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5626 break;
5627
5628#endif
5629#ifdef HAVE_consttable_4
5630 case 4:
5631 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5632 break;
5633
5634#endif
5635#ifdef HAVE_consttable_8
5636 case 8:
5637 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5638 break;
5639
5640#endif
5641 default:
5642 abort ();
5643 break;
5644 }
5645 }
5646
5647 nmp = mp->next;
5648 free (mp);
2b835d68
RE
5649 }
5650
d5b7b3ae
RE
5651 minipool_vector_head = minipool_vector_tail = NULL;
5652 scan = emit_insn_after (gen_consttable_end (), scan);
5653 scan = emit_barrier_after (scan);
2b835d68
RE
5654}
5655
d5b7b3ae
RE
5656/* Return the cost of forcibly inserting a barrier after INSN. */
5657static int
5658arm_barrier_cost (insn)
5659 rtx insn;
949d79eb 5660{
d5b7b3ae
RE
5661 /* Basing the location of the pool on the loop depth is preferable,
5662 but at the moment, the basic block information seems to be
5663 corrupt by this stage of the compilation. */
5664 int base_cost = 50;
5665 rtx next = next_nonnote_insn (insn);
5666
5667 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5668 base_cost -= 20;
5669
5670 switch (GET_CODE (insn))
5671 {
5672 case CODE_LABEL:
5673 /* It will always be better to place the table before the label, rather
5674 than after it. */
5675 return 50;
949d79eb 5676
d5b7b3ae
RE
5677 case INSN:
5678 case CALL_INSN:
5679 return base_cost;
5680
5681 case JUMP_INSN:
5682 return base_cost - 10;
5683
5684 default:
5685 return base_cost + 10;
5686 }
5687}
5688
5689/* Find the best place in the insn stream in the range
5690 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5691 Create the barrier by inserting a jump and add a new fix entry for
5692 it. */
5693static Mfix *
5694create_fix_barrier (fix, max_address)
5695 Mfix * fix;
5696 HOST_WIDE_INT max_address;
5697{
5698 HOST_WIDE_INT count = 0;
5699 rtx barrier;
5700 rtx from = fix->insn;
5701 rtx selected = from;
5702 int selected_cost;
5703 HOST_WIDE_INT selected_address;
5704 Mfix * new_fix;
5705 HOST_WIDE_INT max_count = max_address - fix->address;
5706 rtx label = gen_label_rtx ();
5707
5708 selected_cost = arm_barrier_cost (from);
5709 selected_address = fix->address;
5710
5711 while (from && count < max_count)
5712 {
5713 rtx tmp;
5714 int new_cost;
5715
5716 /* This code shouldn't have been called if there was a natural barrier
5717 within range. */
5718 if (GET_CODE (from) == BARRIER)
5719 abort ();
5720
5721 /* Count the length of this insn. */
5722 count += get_attr_length (from);
5723
5724 /* If there is a jump table, add its length. */
5725 tmp = is_jump_table (from);
5726 if (tmp != NULL)
5727 {
5728 count += get_jump_table_size (tmp);
5729
5730 /* Jump tables aren't in a basic block, so base the cost on
5731 the dispatch insn. If we select this location, we will
5732 still put the pool after the table. */
5733 new_cost = arm_barrier_cost (from);
5734
5735 if (count < max_count && new_cost <= selected_cost)
5736 {
5737 selected = tmp;
5738 selected_cost = new_cost;
5739 selected_address = fix->address + count;
5740 }
5741
5742 /* Continue after the dispatch table. */
5743 from = NEXT_INSN (tmp);
5744 continue;
5745 }
5746
5747 new_cost = arm_barrier_cost (from);
5748
5749 if (count < max_count && new_cost <= selected_cost)
5750 {
5751 selected = from;
5752 selected_cost = new_cost;
5753 selected_address = fix->address + count;
5754 }
5755
5756 from = NEXT_INSN (from);
5757 }
5758
5759 /* Create a new JUMP_INSN that branches around a barrier. */
5760 from = emit_jump_insn_after (gen_jump (label), selected);
5761 JUMP_LABEL (from) = label;
5762 barrier = emit_barrier_after (from);
5763 emit_label_after (label, barrier);
5764
5765 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5766 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5767 new_fix->insn = barrier;
5768 new_fix->address = selected_address;
5769 new_fix->next = fix->next;
5770 fix->next = new_fix;
5771
5772 return new_fix;
5773}
5774
5775/* Record that there is a natural barrier in the insn stream at
5776 ADDRESS. */
949d79eb
RE
5777static void
5778push_minipool_barrier (insn, address)
2b835d68 5779 rtx insn;
d5b7b3ae 5780 HOST_WIDE_INT address;
2b835d68 5781{
c7319d87 5782 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 5783
949d79eb
RE
5784 fix->insn = insn;
5785 fix->address = address;
2b835d68 5786
949d79eb
RE
5787 fix->next = NULL;
5788 if (minipool_fix_head != NULL)
5789 minipool_fix_tail->next = fix;
5790 else
5791 minipool_fix_head = fix;
5792
5793 minipool_fix_tail = fix;
5794}
2b835d68 5795
d5b7b3ae
RE
5796/* Record INSN, which will need fixing up to load a value from the
5797 minipool. ADDRESS is the offset of the insn since the start of the
5798 function; LOC is a pointer to the part of the insn which requires
5799 fixing; VALUE is the constant that must be loaded, which is of type
5800 MODE. */
949d79eb
RE
5801static void
5802push_minipool_fix (insn, address, loc, mode, value)
5803 rtx insn;
d5b7b3ae
RE
5804 HOST_WIDE_INT address;
5805 rtx * loc;
949d79eb
RE
5806 enum machine_mode mode;
5807 rtx value;
5808{
c7319d87 5809 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
5810
5811#ifdef AOF_ASSEMBLER
5812 /* PIC symbol refereneces need to be converted into offsets into the
5813 based area. */
d5b7b3ae
RE
5814 /* XXX This shouldn't be done here. */
5815 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5816 value = aof_pic_entry (value);
5817#endif /* AOF_ASSEMBLER */
5818
5819 fix->insn = insn;
5820 fix->address = address;
5821 fix->loc = loc;
5822 fix->mode = mode;
d5b7b3ae 5823 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5824 fix->value = value;
d5b7b3ae
RE
5825 fix->forwards = get_attr_pool_range (insn);
5826 fix->backwards = get_attr_neg_pool_range (insn);
5827 fix->minipool = NULL;
949d79eb
RE
5828
5829 /* If an insn doesn't have a range defined for it, then it isn't
5830 expecting to be reworked by this code. Better to abort now than
5831 to generate duff assembly code. */
d5b7b3ae 5832 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5833 abort ();
5834
d5b7b3ae
RE
5835 if (rtl_dump_file)
5836 {
5837 fprintf (rtl_dump_file,
5838 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5839 GET_MODE_NAME (mode),
5840 INSN_UID (insn), (unsigned long) address,
5841 -1 * (long)fix->backwards, (long)fix->forwards);
5842 arm_print_value (rtl_dump_file, fix->value);
5843 fprintf (rtl_dump_file, "\n");
5844 }
5845
6354dc9b 5846 /* Add it to the chain of fixes. */
949d79eb 5847 fix->next = NULL;
d5b7b3ae 5848
949d79eb
RE
5849 if (minipool_fix_head != NULL)
5850 minipool_fix_tail->next = fix;
5851 else
5852 minipool_fix_head = fix;
5853
5854 minipool_fix_tail = fix;
5855}
5856
d5b7b3ae 5857/* Scan INSN and note any of its operands that need fixing. */
949d79eb
RE
5858static void
5859note_invalid_constants (insn, address)
5860 rtx insn;
d5b7b3ae 5861 HOST_WIDE_INT address;
949d79eb
RE
5862{
5863 int opno;
5864
d5b7b3ae 5865 extract_insn (insn);
949d79eb 5866
5895f793 5867 if (!constrain_operands (1))
949d79eb
RE
5868 fatal_insn_not_found (insn);
5869
d5b7b3ae
RE
5870 /* Fill in recog_op_alt with information about the constraints of this
5871 insn. */
949d79eb
RE
5872 preprocess_constraints ();
5873
1ccbefce 5874 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 5875 {
6354dc9b 5876 /* Things we need to fix can only occur in inputs. */
36ab44c7 5877 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
5878 continue;
5879
5880 /* If this alternative is a memory reference, then any mention
5881 of constants in this alternative is really to fool reload
5882 into allowing us to accept one there. We need to fix them up
5883 now so that we output the right code. */
5884 if (recog_op_alt[opno][which_alternative].memory_ok)
5885 {
1ccbefce 5886 rtx op = recog_data.operand[opno];
949d79eb
RE
5887
5888 if (CONSTANT_P (op))
1ccbefce
RH
5889 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5890 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
5891#if 0
5892 /* RWE: Now we look correctly at the operands for the insn,
5893 this shouldn't be needed any more. */
949d79eb 5894#ifndef AOF_ASSEMBLER
d5b7b3ae 5895 /* XXX Is this still needed? */
b15bca31 5896 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
1ccbefce
RH
5897 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5898 recog_data.operand_mode[opno],
5899 XVECEXP (op, 0, 0));
949d79eb 5900#endif
d5b7b3ae
RE
5901#endif
5902 else if (GET_CODE (op) == MEM
949d79eb
RE
5903 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5904 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
5905 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5906 recog_data.operand_mode[opno],
949d79eb
RE
5907 get_pool_constant (XEXP (op, 0)));
5908 }
2b835d68 5909 }
2b835d68
RE
5910}
5911
5912void
5913arm_reorg (first)
5914 rtx first;
5915{
5916 rtx insn;
d5b7b3ae
RE
5917 HOST_WIDE_INT address = 0;
5918 Mfix * fix;
ad076f4e 5919
949d79eb 5920 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 5921
949d79eb
RE
5922 /* The first insn must always be a note, or the code below won't
5923 scan it properly. */
5924 if (GET_CODE (first) != NOTE)
5925 abort ();
5926
5927 /* Scan all the insns and record the operands that will need fixing. */
5928 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 5929 {
949d79eb 5930 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 5931 push_minipool_barrier (insn, address);
949d79eb
RE
5932 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5933 || GET_CODE (insn) == JUMP_INSN)
5934 {
5935 rtx table;
5936
5937 note_invalid_constants (insn, address);
5938 address += get_attr_length (insn);
d5b7b3ae 5939
949d79eb
RE
5940 /* If the insn is a vector jump, add the size of the table
5941 and skip the table. */
d5b7b3ae 5942 if ((table = is_jump_table (insn)) != NULL)
2b835d68 5943 {
d5b7b3ae 5944 address += get_jump_table_size (table);
949d79eb
RE
5945 insn = table;
5946 }
5947 }
5948 }
332072db 5949
d5b7b3ae
RE
5950 fix = minipool_fix_head;
5951
949d79eb 5952 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 5953 while (fix)
949d79eb 5954 {
d5b7b3ae
RE
5955 Mfix * ftmp;
5956 Mfix * fdel;
5957 Mfix * last_added_fix;
5958 Mfix * last_barrier = NULL;
5959 Mfix * this_fix;
949d79eb
RE
5960
5961 /* Skip any further barriers before the next fix. */
5962 while (fix && GET_CODE (fix->insn) == BARRIER)
5963 fix = fix->next;
5964
d5b7b3ae 5965 /* No more fixes. */
949d79eb
RE
5966 if (fix == NULL)
5967 break;
332072db 5968
d5b7b3ae 5969 last_added_fix = NULL;
2b835d68 5970
d5b7b3ae 5971 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 5972 {
949d79eb 5973 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 5974 {
d5b7b3ae
RE
5975 if (ftmp->address >= minipool_vector_head->max_address)
5976 break;
2b835d68 5977
d5b7b3ae 5978 last_barrier = ftmp;
2b835d68 5979 }
d5b7b3ae
RE
5980 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5981 break;
5982
5983 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 5984 }
949d79eb 5985
d5b7b3ae
RE
5986 /* If we found a barrier, drop back to that; any fixes that we
5987 could have reached but come after the barrier will now go in
5988 the next mini-pool. */
949d79eb
RE
5989 if (last_barrier != NULL)
5990 {
d5b7b3ae
RE
5991 /* Reduce the refcount for those fixes that won't go into this
5992 pool after all. */
5993 for (fdel = last_barrier->next;
5994 fdel && fdel != ftmp;
5995 fdel = fdel->next)
5996 {
5997 fdel->minipool->refcount--;
5998 fdel->minipool = NULL;
5999 }
6000
949d79eb
RE
6001 ftmp = last_barrier;
6002 }
6003 else
2bfa88dc 6004 {
d5b7b3ae
RE
6005 /* ftmp is first fix that we can't fit into this pool and
6006 there no natural barriers that we could use. Insert a
6007 new barrier in the code somewhere between the previous
6008 fix and this one, and arrange to jump around it. */
6009 HOST_WIDE_INT max_address;
6010
6011 /* The last item on the list of fixes must be a barrier, so
6012 we can never run off the end of the list of fixes without
6013 last_barrier being set. */
6014 if (ftmp == NULL)
6015 abort ();
6016
6017 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
6018 /* Check that there isn't another fix that is in range that
6019 we couldn't fit into this pool because the pool was
6020 already too large: we need to put the pool before such an
6021 instruction. */
d5b7b3ae
RE
6022 if (ftmp->address < max_address)
6023 max_address = ftmp->address;
6024
6025 last_barrier = create_fix_barrier (last_added_fix, max_address);
6026 }
6027
6028 assign_minipool_offsets (last_barrier);
6029
6030 while (ftmp)
6031 {
6032 if (GET_CODE (ftmp->insn) != BARRIER
6033 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6034 == NULL))
6035 break;
2bfa88dc 6036
d5b7b3ae 6037 ftmp = ftmp->next;
2bfa88dc 6038 }
949d79eb
RE
6039
6040 /* Scan over the fixes we have identified for this pool, fixing them
6041 up and adding the constants to the pool itself. */
d5b7b3ae 6042 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
6043 this_fix = this_fix->next)
6044 if (GET_CODE (this_fix->insn) != BARRIER)
6045 {
949d79eb
RE
6046 rtx addr
6047 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6048 minipool_vector_label),
d5b7b3ae 6049 this_fix->minipool->offset);
949d79eb
RE
6050 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6051 }
6052
d5b7b3ae 6053 dump_minipool (last_barrier->insn);
949d79eb 6054 fix = ftmp;
2b835d68 6055 }
4b632bf1 6056
949d79eb
RE
6057 /* From now on we must synthesize any constants that we can't handle
6058 directly. This can happen if the RTL gets split during final
6059 instruction generation. */
4b632bf1 6060 after_arm_reorg = 1;
c7319d87
RE
6061
6062 /* Free the minipool memory. */
6063 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 6064}
cce8749e
CH
6065\f
6066/* Routines to output assembly language. */
6067
f3bb6135 6068/* If the rtx is the correct value then return the string of the number.
ff9940b0 6069 In this way we can ensure that valid double constants are generated even
6354dc9b 6070 when cross compiling. */
cd2b33d0 6071const char *
ff9940b0 6072fp_immediate_constant (x)
b5cc037f 6073 rtx x;
ff9940b0
RE
6074{
6075 REAL_VALUE_TYPE r;
6076 int i;
6077
6078 if (!fpa_consts_inited)
6079 init_fpa_table ();
6080
6081 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6082 for (i = 0; i < 8; i++)
6083 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6084 return strings_fpa[i];
f3bb6135 6085
ff9940b0
RE
6086 abort ();
6087}
6088
9997d19d 6089/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 6090static const char *
9997d19d 6091fp_const_from_val (r)
62b10bbc 6092 REAL_VALUE_TYPE * r;
9997d19d
RE
6093{
6094 int i;
6095
5895f793 6096 if (!fpa_consts_inited)
9997d19d
RE
6097 init_fpa_table ();
6098
6099 for (i = 0; i < 8; i++)
6100 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6101 return strings_fpa[i];
6102
6103 abort ();
6104}
ff9940b0 6105
cce8749e
CH
6106/* Output the operands of a LDM/STM instruction to STREAM.
6107 MASK is the ARM register set mask of which only bits 0-15 are important.
6d3d9133
NC
6108 REG is the base register, either the frame pointer or the stack pointer,
6109 INSTR is the possibly suffixed load or store instruction. */
cce8749e 6110
d5b7b3ae 6111static void
6d3d9133 6112print_multi_reg (stream, instr, reg, mask)
62b10bbc 6113 FILE * stream;
cd2b33d0 6114 const char * instr;
dd18ae56
NC
6115 int reg;
6116 int mask;
cce8749e
CH
6117{
6118 int i;
6119 int not_first = FALSE;
6120
1d5473cb 6121 fputc ('\t', stream);
dd18ae56 6122 asm_fprintf (stream, instr, reg);
1d5473cb 6123 fputs (", {", stream);
62b10bbc 6124
d5b7b3ae 6125 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
6126 if (mask & (1 << i))
6127 {
6128 if (not_first)
6129 fprintf (stream, ", ");
62b10bbc 6130
dd18ae56 6131 asm_fprintf (stream, "%r", i);
cce8749e
CH
6132 not_first = TRUE;
6133 }
f3bb6135 6134
6d3d9133 6135 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
f3bb6135 6136}
cce8749e 6137
6354dc9b 6138/* Output a 'call' insn. */
cce8749e 6139
cd2b33d0 6140const char *
cce8749e 6141output_call (operands)
62b10bbc 6142 rtx * operands;
cce8749e 6143{
6354dc9b 6144 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 6145
62b10bbc 6146 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 6147 {
62b10bbc 6148 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 6149 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 6150 }
62b10bbc 6151
1d5473cb 6152 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 6153
6cfc7210 6154 if (TARGET_INTERWORK)
da6558fd
NC
6155 output_asm_insn ("bx%?\t%0", operands);
6156 else
6157 output_asm_insn ("mov%?\t%|pc, %0", operands);
6158
f3bb6135
RE
6159 return "";
6160}
cce8749e 6161
ff9940b0
RE
6162static int
6163eliminate_lr2ip (x)
62b10bbc 6164 rtx * x;
ff9940b0
RE
6165{
6166 int something_changed = 0;
62b10bbc 6167 rtx x0 = * x;
ff9940b0
RE
6168 int code = GET_CODE (x0);
6169 register int i, j;
6f7d635c 6170 register const char * fmt;
ff9940b0
RE
6171
6172 switch (code)
6173 {
6174 case REG:
62b10bbc 6175 if (REGNO (x0) == LR_REGNUM)
ff9940b0 6176 {
62b10bbc 6177 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
6178 return 1;
6179 }
6180 return 0;
6181 default:
6354dc9b 6182 /* Scan through the sub-elements and change any references there. */
ff9940b0 6183 fmt = GET_RTX_FORMAT (code);
62b10bbc 6184
ff9940b0
RE
6185 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6186 if (fmt[i] == 'e')
6187 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6188 else if (fmt[i] == 'E')
6189 for (j = 0; j < XVECLEN (x0, i); j++)
6190 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 6191
ff9940b0
RE
6192 return something_changed;
6193 }
6194}
6195
6354dc9b 6196/* Output a 'call' insn that is a reference in memory. */
ff9940b0 6197
cd2b33d0 6198const char *
ff9940b0 6199output_call_mem (operands)
62b10bbc 6200 rtx * operands;
ff9940b0 6201{
6354dc9b
NC
6202 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6203 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 6204 if (eliminate_lr2ip (&operands[0]))
1d5473cb 6205 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 6206
6cfc7210 6207 if (TARGET_INTERWORK)
da6558fd
NC
6208 {
6209 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6210 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6211 output_asm_insn ("bx%?\t%|ip", operands);
6212 }
6213 else
6214 {
6215 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6216 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6217 }
6218
f3bb6135
RE
6219 return "";
6220}
ff9940b0
RE
6221
6222
6223/* Output a move from arm registers to an fpu registers.
6224 OPERANDS[0] is an fpu register.
6225 OPERANDS[1] is the first registers of an arm register pair. */
6226
cd2b33d0 6227const char *
ff9940b0 6228output_mov_long_double_fpu_from_arm (operands)
62b10bbc 6229 rtx * operands;
ff9940b0
RE
6230{
6231 int arm_reg0 = REGNO (operands[1]);
6232 rtx ops[3];
6233
62b10bbc
NC
6234 if (arm_reg0 == IP_REGNUM)
6235 abort ();
f3bb6135 6236
43cffd11
RE
6237 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6238 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6239 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6240
1d5473cb
RE
6241 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6242 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 6243
f3bb6135
RE
6244 return "";
6245}
ff9940b0
RE
6246
6247/* Output a move from an fpu register to arm registers.
6248 OPERANDS[0] is the first registers of an arm register pair.
6249 OPERANDS[1] is an fpu register. */
6250
cd2b33d0 6251const char *
ff9940b0 6252output_mov_long_double_arm_from_fpu (operands)
62b10bbc 6253 rtx * operands;
ff9940b0
RE
6254{
6255 int arm_reg0 = REGNO (operands[0]);
6256 rtx ops[3];
6257
62b10bbc
NC
6258 if (arm_reg0 == IP_REGNUM)
6259 abort ();
f3bb6135 6260
43cffd11
RE
6261 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6262 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6263 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6264
1d5473cb
RE
6265 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6266 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
6267 return "";
6268}
ff9940b0
RE
6269
6270/* Output a move from arm registers to arm registers of a long double
6271 OPERANDS[0] is the destination.
6272 OPERANDS[1] is the source. */
cd2b33d0 6273const char *
ff9940b0 6274output_mov_long_double_arm_from_arm (operands)
62b10bbc 6275 rtx * operands;
ff9940b0 6276{
6354dc9b 6277 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
6278 int dest_start = REGNO (operands[0]);
6279 int src_start = REGNO (operands[1]);
6280 rtx ops[2];
6281 int i;
6282
6283 if (dest_start < src_start)
6284 {
6285 for (i = 0; i < 3; i++)
6286 {
43cffd11
RE
6287 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6288 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6289 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6290 }
6291 }
6292 else
6293 {
6294 for (i = 2; i >= 0; i--)
6295 {
43cffd11
RE
6296 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6297 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6298 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6299 }
6300 }
f3bb6135 6301
ff9940b0
RE
6302 return "";
6303}
6304
6305
cce8749e
CH
6306/* Output a move from arm registers to an fpu registers.
6307 OPERANDS[0] is an fpu register.
6308 OPERANDS[1] is the first registers of an arm register pair. */
6309
cd2b33d0 6310const char *
cce8749e 6311output_mov_double_fpu_from_arm (operands)
62b10bbc 6312 rtx * operands;
cce8749e
CH
6313{
6314 int arm_reg0 = REGNO (operands[1]);
6315 rtx ops[2];
6316
62b10bbc
NC
6317 if (arm_reg0 == IP_REGNUM)
6318 abort ();
6319
43cffd11
RE
6320 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6321 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6322 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6323 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
6324 return "";
6325}
cce8749e
CH
6326
6327/* Output a move from an fpu register to arm registers.
6328 OPERANDS[0] is the first registers of an arm register pair.
6329 OPERANDS[1] is an fpu register. */
6330
cd2b33d0 6331const char *
cce8749e 6332output_mov_double_arm_from_fpu (operands)
62b10bbc 6333 rtx * operands;
cce8749e
CH
6334{
6335 int arm_reg0 = REGNO (operands[0]);
6336 rtx ops[2];
6337
62b10bbc
NC
6338 if (arm_reg0 == IP_REGNUM)
6339 abort ();
f3bb6135 6340
43cffd11
RE
6341 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6342 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6343 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6344 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
6345 return "";
6346}
cce8749e
CH
6347
6348/* Output a move between double words.
6349 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6350 or MEM<-REG and all MEMs must be offsettable addresses. */
6351
cd2b33d0 6352const char *
cce8749e 6353output_move_double (operands)
aec3cfba 6354 rtx * operands;
cce8749e
CH
6355{
6356 enum rtx_code code0 = GET_CODE (operands[0]);
6357 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 6358 rtx otherops[3];
cce8749e
CH
6359
6360 if (code0 == REG)
6361 {
6362 int reg0 = REGNO (operands[0]);
6363
43cffd11 6364 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 6365
cce8749e
CH
6366 if (code1 == REG)
6367 {
6368 int reg1 = REGNO (operands[1]);
62b10bbc
NC
6369 if (reg1 == IP_REGNUM)
6370 abort ();
f3bb6135 6371
6354dc9b 6372 /* Ensure the second source is not overwritten. */
c1c2bc04 6373 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 6374 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 6375 else
6cfc7210 6376 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6377 }
6378 else if (code1 == CONST_DOUBLE)
6379 {
226a5051
RE
6380 if (GET_MODE (operands[1]) == DFmode)
6381 {
6382 long l[2];
6383 union real_extract u;
6384
4e135bdd 6385 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
226a5051 6386 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
d5b7b3ae
RE
6387 otherops[1] = GEN_INT (l[1]);
6388 operands[1] = GEN_INT (l[0]);
226a5051 6389 }
c1c2bc04
RE
6390 else if (GET_MODE (operands[1]) != VOIDmode)
6391 abort ();
6392 else if (WORDS_BIG_ENDIAN)
6393 {
6394
6395 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6396 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6397 }
226a5051
RE
6398 else
6399 {
c1c2bc04 6400
226a5051
RE
6401 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6402 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6403 }
6cfc7210 6404
c1c2bc04
RE
6405 output_mov_immediate (operands);
6406 output_mov_immediate (otherops);
cce8749e
CH
6407 }
6408 else if (code1 == CONST_INT)
6409 {
56636818
JL
6410#if HOST_BITS_PER_WIDE_INT > 32
6411 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6412 what the upper word is. */
6413 if (WORDS_BIG_ENDIAN)
6414 {
6415 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6416 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6417 }
6418 else
6419 {
6420 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6421 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6422 }
6423#else
6354dc9b 6424 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6425 if (WORDS_BIG_ENDIAN)
6426 {
6427 otherops[1] = operands[1];
6428 operands[1] = (INTVAL (operands[1]) < 0
6429 ? constm1_rtx : const0_rtx);
6430 }
ff9940b0 6431 else
c1c2bc04 6432 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6433#endif
c1c2bc04
RE
6434 output_mov_immediate (otherops);
6435 output_mov_immediate (operands);
cce8749e
CH
6436 }
6437 else if (code1 == MEM)
6438 {
ff9940b0 6439 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6440 {
ff9940b0 6441 case REG:
9997d19d 6442 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6443 break;
2b835d68 6444
ff9940b0 6445 case PRE_INC:
6354dc9b 6446 abort (); /* Should never happen now. */
ff9940b0 6447 break;
2b835d68 6448
ff9940b0 6449 case PRE_DEC:
2b835d68 6450 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6451 break;
2b835d68 6452
ff9940b0 6453 case POST_INC:
9997d19d 6454 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6455 break;
2b835d68 6456
ff9940b0 6457 case POST_DEC:
6354dc9b 6458 abort (); /* Should never happen now. */
ff9940b0 6459 break;
2b835d68
RE
6460
6461 case LABEL_REF:
6462 case CONST:
6463 output_asm_insn ("adr%?\t%0, %1", operands);
6464 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6465 break;
6466
ff9940b0 6467 default:
aec3cfba
NC
6468 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6469 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6470 {
2b835d68
RE
6471 otherops[0] = operands[0];
6472 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6473 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6474 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6475 {
6476 if (GET_CODE (otherops[2]) == CONST_INT)
6477 {
6478 switch (INTVAL (otherops[2]))
6479 {
6480 case -8:
6481 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6482 return "";
6483 case -4:
6484 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6485 return "";
6486 case 4:
6487 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6488 return "";
6489 }
6490 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6491 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6492 else
6493 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6494 }
6495 else
6496 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6497 }
6498 else
6499 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6500
2b835d68
RE
6501 return "ldm%?ia\t%0, %M0";
6502 }
6503 else
6504 {
6505 otherops[1] = adj_offsettable_operand (operands[1], 4);
6506 /* Take care of overlapping base/data reg. */
6507 if (reg_mentioned_p (operands[0], operands[1]))
6508 {
6509 output_asm_insn ("ldr%?\t%0, %1", otherops);
6510 output_asm_insn ("ldr%?\t%0, %1", operands);
6511 }
6512 else
6513 {
6514 output_asm_insn ("ldr%?\t%0, %1", operands);
6515 output_asm_insn ("ldr%?\t%0, %1", otherops);
6516 }
cce8749e
CH
6517 }
6518 }
6519 }
2b835d68 6520 else
6354dc9b 6521 abort (); /* Constraints should prevent this. */
cce8749e
CH
6522 }
6523 else if (code0 == MEM && code1 == REG)
6524 {
62b10bbc
NC
6525 if (REGNO (operands[1]) == IP_REGNUM)
6526 abort ();
2b835d68 6527
ff9940b0
RE
6528 switch (GET_CODE (XEXP (operands[0], 0)))
6529 {
6530 case REG:
9997d19d 6531 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6532 break;
2b835d68 6533
ff9940b0 6534 case PRE_INC:
6354dc9b 6535 abort (); /* Should never happen now. */
ff9940b0 6536 break;
2b835d68 6537
ff9940b0 6538 case PRE_DEC:
2b835d68 6539 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6540 break;
2b835d68 6541
ff9940b0 6542 case POST_INC:
9997d19d 6543 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6544 break;
2b835d68 6545
ff9940b0 6546 case POST_DEC:
6354dc9b 6547 abort (); /* Should never happen now. */
ff9940b0 6548 break;
2b835d68
RE
6549
6550 case PLUS:
6551 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6552 {
6553 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6554 {
6555 case -8:
6556 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6557 return "";
6558
6559 case -4:
6560 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6561 return "";
6562
6563 case 4:
6564 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6565 return "";
6566 }
6567 }
6568 /* Fall through */
6569
ff9940b0 6570 default:
cce8749e 6571 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 6572 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6573 output_asm_insn ("str%?\t%1, %0", operands);
6574 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6575 }
6576 }
2b835d68 6577 else
62b10bbc 6578 abort (); /* Constraints should prevent this */
cce8749e 6579
9997d19d
RE
6580 return "";
6581}
cce8749e
CH
6582
6583
6584/* Output an arbitrary MOV reg, #n.
6585 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6586
cd2b33d0 6587const char *
cce8749e 6588output_mov_immediate (operands)
62b10bbc 6589 rtx * operands;
cce8749e 6590{
f3bb6135 6591 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
6592 int n_ones = 0;
6593 int i;
6594
6595 /* Try to use one MOV */
cce8749e 6596 if (const_ok_for_arm (n))
f3bb6135 6597 {
9997d19d 6598 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
6599 return "";
6600 }
cce8749e
CH
6601
6602 /* Try to use one MVN */
f3bb6135 6603 if (const_ok_for_arm (~n))
cce8749e 6604 {
f3bb6135 6605 operands[1] = GEN_INT (~n);
9997d19d 6606 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 6607 return "";
cce8749e
CH
6608 }
6609
6354dc9b 6610 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
6611
6612 for (i=0; i < 32; i++)
6613 if (n & 1 << i)
6614 n_ones++;
6615
6354dc9b 6616 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
e5951263 6617 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
cce8749e 6618 else
d5b7b3ae 6619 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
f3bb6135
RE
6620
6621 return "";
6622}
cce8749e
CH
6623
6624
6625/* Output an ADD r, s, #n where n may be too big for one instruction. If
6626 adding zero to one register, output nothing. */
6627
cd2b33d0 6628const char *
cce8749e 6629output_add_immediate (operands)
62b10bbc 6630 rtx * operands;
cce8749e 6631{
f3bb6135 6632 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6633
6634 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6635 {
6636 if (n < 0)
6637 output_multi_immediate (operands,
9997d19d
RE
6638 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6639 -n);
cce8749e
CH
6640 else
6641 output_multi_immediate (operands,
9997d19d
RE
6642 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6643 n);
cce8749e 6644 }
f3bb6135
RE
6645
6646 return "";
6647}
cce8749e 6648
cce8749e
CH
6649/* Output a multiple immediate operation.
6650 OPERANDS is the vector of operands referred to in the output patterns.
6651 INSTR1 is the output pattern to use for the first constant.
6652 INSTR2 is the output pattern to use for subsequent constants.
6653 IMMED_OP is the index of the constant slot in OPERANDS.
6654 N is the constant value. */
6655
cd2b33d0 6656static const char *
cce8749e 6657output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6658 rtx * operands;
cd2b33d0
NC
6659 const char * instr1;
6660 const char * instr2;
f3bb6135
RE
6661 int immed_op;
6662 HOST_WIDE_INT n;
cce8749e 6663{
f3bb6135 6664#if HOST_BITS_PER_WIDE_INT > 32
e5951263 6665 n &= HOST_UINT (0xffffffff);
f3bb6135
RE
6666#endif
6667
cce8749e
CH
6668 if (n == 0)
6669 {
6670 operands[immed_op] = const0_rtx;
6354dc9b 6671 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
6672 }
6673 else
6674 {
6675 int i;
cd2b33d0 6676 const char * instr = instr1;
cce8749e 6677
6354dc9b 6678 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6679 for (i = 0; i < 32; i += 2)
6680 {
6681 if (n & (3 << i))
6682 {
f3bb6135
RE
6683 operands[immed_op] = GEN_INT (n & (255 << i));
6684 output_asm_insn (instr, operands);
cce8749e
CH
6685 instr = instr2;
6686 i += 6;
6687 }
6688 }
6689 }
cd2b33d0 6690
f3bb6135 6691 return "";
9997d19d 6692}
cce8749e
CH
6693
6694
6695/* Return the appropriate ARM instruction for the operation code.
6696 The returned result should not be overwritten. OP is the rtx of the
6697 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6698 was shifted. */
6699
cd2b33d0 6700const char *
cce8749e
CH
6701arithmetic_instr (op, shift_first_arg)
6702 rtx op;
f3bb6135 6703 int shift_first_arg;
cce8749e 6704{
9997d19d 6705 switch (GET_CODE (op))
cce8749e
CH
6706 {
6707 case PLUS:
f3bb6135
RE
6708 return "add";
6709
cce8749e 6710 case MINUS:
f3bb6135
RE
6711 return shift_first_arg ? "rsb" : "sub";
6712
cce8749e 6713 case IOR:
f3bb6135
RE
6714 return "orr";
6715
cce8749e 6716 case XOR:
f3bb6135
RE
6717 return "eor";
6718
cce8749e 6719 case AND:
f3bb6135
RE
6720 return "and";
6721
cce8749e 6722 default:
f3bb6135 6723 abort ();
cce8749e 6724 }
f3bb6135 6725}
cce8749e
CH
6726
6727
6728/* Ensure valid constant shifts and return the appropriate shift mnemonic
6729 for the operation code. The returned result should not be overwritten.
6730 OP is the rtx code of the shift.
9997d19d 6731 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6732 shift. */
cce8749e 6733
cd2b33d0 6734static const char *
9997d19d
RE
6735shift_op (op, amountp)
6736 rtx op;
6737 HOST_WIDE_INT *amountp;
cce8749e 6738{
cd2b33d0 6739 const char * mnem;
e2c671ba 6740 enum rtx_code code = GET_CODE (op);
cce8749e 6741
9997d19d
RE
6742 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6743 *amountp = -1;
6744 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6745 *amountp = INTVAL (XEXP (op, 1));
6746 else
6747 abort ();
6748
e2c671ba 6749 switch (code)
cce8749e
CH
6750 {
6751 case ASHIFT:
6752 mnem = "asl";
6753 break;
f3bb6135 6754
cce8749e
CH
6755 case ASHIFTRT:
6756 mnem = "asr";
cce8749e 6757 break;
f3bb6135 6758
cce8749e
CH
6759 case LSHIFTRT:
6760 mnem = "lsr";
cce8749e 6761 break;
f3bb6135 6762
9997d19d
RE
6763 case ROTATERT:
6764 mnem = "ror";
9997d19d
RE
6765 break;
6766
ff9940b0 6767 case MULT:
e2c671ba
RE
6768 /* We never have to worry about the amount being other than a
6769 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6770 if (*amountp != -1)
6771 *amountp = int_log2 (*amountp);
6772 else
6773 abort ();
f3bb6135
RE
6774 return "asl";
6775
cce8749e 6776 default:
f3bb6135 6777 abort ();
cce8749e
CH
6778 }
6779
e2c671ba
RE
6780 if (*amountp != -1)
6781 {
6782 /* This is not 100% correct, but follows from the desire to merge
6783 multiplication by a power of 2 with the recognizer for a
6784 shift. >=32 is not a valid shift for "asl", so we must try and
6785 output a shift that produces the correct arithmetical result.
ddd5a7c1 6786 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6787 is not set correctly if we set the flags; but we never use the
6788 carry bit from such an operation, so we can ignore that. */
6789 if (code == ROTATERT)
6790 *amountp &= 31; /* Rotate is just modulo 32 */
6791 else if (*amountp != (*amountp & 31))
6792 {
6793 if (code == ASHIFT)
6794 mnem = "lsr";
6795 *amountp = 32;
6796 }
6797
6798 /* Shifts of 0 are no-ops. */
6799 if (*amountp == 0)
6800 return NULL;
6801 }
6802
9997d19d
RE
6803 return mnem;
6804}
cce8749e
CH
6805
6806
6354dc9b 6807/* Obtain the shift from the POWER of two. */
18af7313 6808static HOST_WIDE_INT
cce8749e 6809int_log2 (power)
f3bb6135 6810 HOST_WIDE_INT power;
cce8749e 6811{
f3bb6135 6812 HOST_WIDE_INT shift = 0;
cce8749e 6813
e5951263 6814 while ((((HOST_INT (1)) << shift) & power) == 0)
cce8749e
CH
6815 {
6816 if (shift > 31)
f3bb6135 6817 abort ();
cce8749e
CH
6818 shift++;
6819 }
f3bb6135
RE
6820
6821 return shift;
6822}
cce8749e 6823
cce8749e
CH
6824/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6825 /bin/as is horribly restrictive. */
6cfc7210 6826#define MAX_ASCII_LEN 51
cce8749e
CH
6827
6828void
6829output_ascii_pseudo_op (stream, p, len)
62b10bbc 6830 FILE * stream;
3cce094d 6831 const unsigned char * p;
cce8749e
CH
6832 int len;
6833{
6834 int i;
6cfc7210 6835 int len_so_far = 0;
cce8749e 6836
6cfc7210
NC
6837 fputs ("\t.ascii\t\"", stream);
6838
cce8749e
CH
6839 for (i = 0; i < len; i++)
6840 {
6841 register int c = p[i];
6842
6cfc7210 6843 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 6844 {
6cfc7210 6845 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 6846 len_so_far = 0;
cce8749e
CH
6847 }
6848
6cfc7210 6849 switch (c)
cce8749e 6850 {
6cfc7210
NC
6851 case TARGET_TAB:
6852 fputs ("\\t", stream);
6853 len_so_far += 2;
6854 break;
6855
6856 case TARGET_FF:
6857 fputs ("\\f", stream);
6858 len_so_far += 2;
6859 break;
6860
6861 case TARGET_BS:
6862 fputs ("\\b", stream);
6863 len_so_far += 2;
6864 break;
6865
6866 case TARGET_CR:
6867 fputs ("\\r", stream);
6868 len_so_far += 2;
6869 break;
6870
6871 case TARGET_NEWLINE:
6872 fputs ("\\n", stream);
6873 c = p [i + 1];
6874 if ((c >= ' ' && c <= '~')
6875 || c == TARGET_TAB)
6876 /* This is a good place for a line break. */
6877 len_so_far = MAX_ASCII_LEN;
6878 else
6879 len_so_far += 2;
6880 break;
6881
6882 case '\"':
6883 case '\\':
6884 putc ('\\', stream);
5895f793 6885 len_so_far++;
6cfc7210 6886 /* drop through. */
f3bb6135 6887
6cfc7210
NC
6888 default:
6889 if (c >= ' ' && c <= '~')
6890 {
6891 putc (c, stream);
5895f793 6892 len_so_far++;
6cfc7210
NC
6893 }
6894 else
6895 {
6896 fprintf (stream, "\\%03o", c);
6897 len_so_far += 4;
6898 }
6899 break;
cce8749e 6900 }
cce8749e 6901 }
f3bb6135 6902
cce8749e 6903 fputs ("\"\n", stream);
f3bb6135 6904}
cce8749e 6905\f
6d3d9133
NC
6906/* Compute a bit mask of which registers need to be
6907 saved on the stack for the current function. */
6908
6909static unsigned long
6910arm_compute_save_reg_mask ()
6911{
6912 unsigned int save_reg_mask = 0;
6913 unsigned int reg;
6914 unsigned long func_type = arm_current_func_type ();
6915
6916 if (IS_NAKED (func_type))
6917 /* This should never really happen. */
6918 return 0;
6919
6920 /* If we are creating a stack frame, then we must save the frame pointer,
6921 IP (which will hold the old stack pointer), LR and the PC. */
6922 if (frame_pointer_needed)
6923 save_reg_mask |=
6924 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
6925 | (1 << IP_REGNUM)
6926 | (1 << LR_REGNUM)
6927 | (1 << PC_REGNUM);
6928
6929 /* Volatile functions do not return, so there
6930 is no need to save any other registers. */
6931 if (IS_VOLATILE (func_type))
6932 return save_reg_mask;
6933
6934 if (ARM_FUNC_TYPE (func_type) == ARM_FT_ISR)
6935 {
6936 /* FIQ handlers have registers r8 - r12 banked, so
6937 we only need to check r0 - r7, they must save them. */
6938 for (reg = 0; reg < 8; reg++)
6939 if (regs_ever_live[reg])
6940 save_reg_mask |= (1 << reg);
6941 }
6942 else
6943 {
6944 /* In the normal case we only need to save those registers
6945 which are call saved and which are used by this function. */
6946 for (reg = 0; reg <= 10; reg++)
6947 if (regs_ever_live[reg] && ! call_used_regs [reg])
6948 save_reg_mask |= (1 << reg);
6949
6950 /* Handle the frame pointer as a special case. */
6951 if (! TARGET_APCS_FRAME
6952 && ! frame_pointer_needed
6953 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6954 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6955 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
6956
6957 /* If we aren't loading the PIC register,
6958 don't stack it even though it may be live. */
6959 if (flag_pic
6960 && ! TARGET_SINGLE_PIC_BASE
6961 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6962 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
6963 }
6964
6965 /* Decide if we need to save the link register.
6966 Interrupt routines have their own banked link register,
6967 so they never need to save it.
6968 Otheriwse if we do not use the link register we do not need to save
6969 it. If we are pushing other registers onto the stack however, we
6970 can save an instruction in the epilogue by pushing the link register
6971 now and then popping it back into the PC. This incurs extra memory
6972 accesses though, so we only do it when optimising for size, and only
6973 if we know that we will not need a fancy return sequence. */
6974 if (! IS_INTERRUPT (func_type)
6975 && (regs_ever_live [LR_REGNUM]
6976 || (save_reg_mask
6977 && optimize_size
6978 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)))
6979 save_reg_mask |= 1 << LR_REGNUM;
6980
6f7ebcbb
NC
6981 if (cfun->machine->lr_save_eliminated)
6982 save_reg_mask &= ~ (1 << LR_REGNUM);
6983
6d3d9133
NC
6984 return save_reg_mask;
6985}
6986
6987/* Generate a function exit sequence. If REALLY_RETURN is true, then do
6988 everything bar the final return instruction. */
ff9940b0 6989
cd2b33d0 6990const char *
84ed5e79 6991output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
6992 rtx operand;
6993 int really_return;
84ed5e79 6994 int reverse;
ff9940b0 6995{
6d3d9133 6996 char conditional[10];
ff9940b0 6997 char instr[100];
6d3d9133
NC
6998 int reg;
6999 unsigned long live_regs_mask;
7000 unsigned long func_type;
7001
7002 func_type = arm_current_func_type ();
e2c671ba 7003
6d3d9133 7004 if (IS_NAKED (func_type))
d5b7b3ae 7005 return "";
6d3d9133
NC
7006
7007 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7008 {
e2c671ba 7009 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
7010 call, then we have to trust that the called function won't return. */
7011 if (really_return)
7012 {
7013 rtx ops[2];
7014
7015 /* Otherwise, trap an attempted return by aborting. */
7016 ops[0] = operand;
7017 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7018 : "abort");
7019 assemble_external_libcall (ops[1]);
7020 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7021 }
7022
e2c671ba
RE
7023 return "";
7024 }
6d3d9133 7025
5895f793 7026 if (current_function_calls_alloca && !really_return)
62b10bbc 7027 abort ();
ff9940b0 7028
6d3d9133
NC
7029 /* Construct the conditional part of the instruction(s) to be emitted. */
7030 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
d5b7b3ae 7031
6d3d9133 7032 return_used_this_function = 1;
ff9940b0 7033
6d3d9133 7034 live_regs_mask = arm_compute_save_reg_mask ();
ff9940b0 7035
3a5a4282 7036 /* On some ARM architectures it is faster to use LDR rather than LDM to
6d3d9133
NC
7037 load a single register. On other architectures, the cost is the same.
7038 In 26 bit mode we have to use LDM in order to be able to restore the CPSR. */
7039 if ((live_regs_mask == (1 << LR_REGNUM))
e46ccf7c
PB
7040 && ! TARGET_INTERWORK
7041 && ! IS_INTERRUPT (func_type)
6d3d9133
NC
7042 && (! really_return || TARGET_APCS_32))
7043 {
7044 if (! really_return)
7045 sprintf (instr, "ldr%s\t%%|lr, [%%|sp], #4", conditional);
7046 else
7047 sprintf (instr, "ldr%s\t%%|pc, [%%|sp], #4", conditional);
7048 }
7049 else if (live_regs_mask)
7050 {
7051 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7052 /* There are two possible reasons for the IP register being saved.
7053 Either a stack frame was created, in which case IP contains the
7054 old stack pointer, or an ISR routine corrupted it. If this in an
7055 ISR routine then just restore IP, otherwise restore IP into SP. */
7056 if (! IS_INTERRUPT (func_type))
7057 {
7058 live_regs_mask &= ~ (1 << IP_REGNUM);
7059 live_regs_mask |= (1 << SP_REGNUM);
7060 }
f3bb6135 7061
6d3d9133 7062 /* Generate the load multiple instruction to restore the registers. */
ff9940b0 7063 if (frame_pointer_needed)
6d3d9133 7064 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
ff9940b0 7065 else
6d3d9133 7066 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
f3bb6135 7067
6d3d9133
NC
7068 for (reg = 0; reg <= SP_REGNUM; reg++)
7069 if (live_regs_mask & (1 << reg))
7070 {
1d5473cb 7071 strcat (instr, "%|");
6d3d9133
NC
7072 strcat (instr, reg_names[reg]);
7073 strcat (instr, ", ");
7074 }
f3bb6135 7075
6d3d9133
NC
7076 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7077 {
7078 /* If we are not restoring the LR register then we will
7079 have added one too many commas to the list above.
7080 Replace it with a closing brace. */
7081 instr [strlen (instr) - 2] = '}';
7082 }
ff9940b0 7083 else
1d5473cb
RE
7084 {
7085 strcat (instr, "%|");
6d3d9133
NC
7086
7087 /* At this point there should only be one or two registers left in
7088 live_regs_mask: always LR, and possibly PC if we created a stack
7089 frame. LR contains the return address. If we do not have any
7090 special requirements for function exit (eg interworking, or ISR)
7091 then we can load this value directly into the PC and save an
7092 instruction. */
7093 if (! TARGET_INTERWORK
7094 && ! IS_INTERRUPT (func_type)
7095 && really_return)
7096 strcat (instr, reg_names [PC_REGNUM]);
da6558fd 7097 else
6d3d9133
NC
7098 strcat (instr, reg_names [LR_REGNUM]);
7099
7100 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
1d5473cb 7101 }
da6558fd 7102
6d3d9133 7103 if (really_return)
da6558fd 7104 {
6d3d9133
NC
7105 /* See if we need to generate an extra instruction to
7106 perform the actual function return. */
7107 switch ((int) ARM_FUNC_TYPE (func_type))
7108 {
7109 case ARM_FT_ISR:
7110 case ARM_FT_FIQ:
7111 output_asm_insn (instr, & operand);
7112
7113 strcpy (instr, "sub");
7114 strcat (instr, conditional);
7115 strcat (instr, "s\t%|pc, %|lr, #4");
7116 break;
7117
7118 case ARM_FT_EXCEPTION:
7119 output_asm_insn (instr, & operand);
7120
7121 strcpy (instr, "mov");
7122 strcat (instr, conditional);
7123 strcat (instr, "s\t%|pc, %|lr");
7124 break;
7125
7126 case ARM_FT_INTERWORKED:
7127 output_asm_insn (instr, & operand);
da6558fd 7128
6d3d9133
NC
7129 strcpy (instr, "bx");
7130 strcat (instr, conditional);
7131 strcat (instr, "\t%|lr");
7132 break;
7133
7134 default:
7135 /* The return has already been handled
7136 by loading the LR into the PC. */
7137 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7138 {
7139 output_asm_insn (instr, & operand);
7140
7141 strcpy (instr, "mov");
7142 strcat (instr, conditional);
7143 if (! TARGET_APCS_32)
7144 strcat (instr, "s");
7145 strcat (instr, "\t%|pc, %|lr");
7146 }
7147 break;
7148 }
da6558fd 7149 }
ff9940b0
RE
7150 }
7151 else if (really_return)
7152 {
6d3d9133
NC
7153 switch ((int) ARM_FUNC_TYPE (func_type))
7154 {
7155 case ARM_FT_ISR:
7156 case ARM_FT_FIQ:
7157 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7158 break;
7159
7160 case ARM_FT_INTERWORKED:
7161 sprintf (instr, "bx%s\t%%|lr", conditional);
7162 break;
7163
7164 case ARM_FT_EXCEPTION:
7165 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7166 break;
7167
7168 default:
7169 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7170 conditional, TARGET_APCS_32 ? "" : "s");
7171 break;
7172 }
ff9940b0 7173 }
6d3d9133
NC
7174 else
7175 /* Nothing to load off the stack, and
7176 no return instruction to generate. */
7177 return "";
f3bb6135 7178
6d3d9133
NC
7179 output_asm_insn (instr, & operand);
7180
ff9940b0
RE
7181 return "";
7182}
7183
ef179a26
NC
7184/* Write the function name into the code section, directly preceding
7185 the function prologue.
7186
7187 Code will be output similar to this:
7188 t0
7189 .ascii "arm_poke_function_name", 0
7190 .align
7191 t1
7192 .word 0xff000000 + (t1 - t0)
7193 arm_poke_function_name
7194 mov ip, sp
7195 stmfd sp!, {fp, ip, lr, pc}
7196 sub fp, ip, #4
7197
7198 When performing a stack backtrace, code can inspect the value
7199 of 'pc' stored at 'fp' + 0. If the trace function then looks
7200 at location pc - 12 and the top 8 bits are set, then we know
7201 that there is a function name embedded immediately preceding this
7202 location and has length ((pc[-3]) & 0xff000000).
7203
7204 We assume that pc is declared as a pointer to an unsigned long.
7205
7206 It is of no benefit to output the function name if we are assembling
7207 a leaf function. These function types will not contain a stack
7208 backtrace structure, therefore it is not possible to determine the
7209 function name. */
7210
7211void
7212arm_poke_function_name (stream, name)
7213 FILE * stream;
7214 char * name;
7215{
7216 unsigned long alignlength;
7217 unsigned long length;
7218 rtx x;
7219
d5b7b3ae
RE
7220 length = strlen (name) + 1;
7221 alignlength = ROUND_UP (length);
ef179a26 7222
949d79eb 7223 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 7224 ASM_OUTPUT_ALIGN (stream, 2);
e5951263 7225 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
ef179a26
NC
7226 ASM_OUTPUT_INT (stream, x);
7227}
7228
6d3d9133
NC
7229/* Place some comments into the assembler stream
7230 describing the current function. */
7231
cce8749e 7232void
d5b7b3ae 7233output_arm_prologue (f, frame_size)
6cfc7210 7234 FILE * f;
cce8749e
CH
7235 int frame_size;
7236{
6d3d9133
NC
7237 unsigned long func_type;
7238
7239 /* Sanity check. */
abaa26e5 7240 if (arm_ccfsm_state || arm_target_insn)
6d3d9133 7241 abort ();
31fdb4d5 7242
6d3d9133
NC
7243 func_type = arm_current_func_type ();
7244
7245 switch ((int) ARM_FUNC_TYPE (func_type))
7246 {
7247 default:
7248 case ARM_FT_NORMAL:
7249 break;
7250 case ARM_FT_INTERWORKED:
7251 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7252 break;
7253 case ARM_FT_EXCEPTION_HANDLER:
7254 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7255 break;
7256 case ARM_FT_ISR:
7257 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7258 break;
7259 case ARM_FT_FIQ:
7260 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7261 break;
7262 case ARM_FT_EXCEPTION:
7263 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7264 break;
7265 }
ff9940b0 7266
6d3d9133
NC
7267 if (IS_NAKED (func_type))
7268 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7269
7270 if (IS_VOLATILE (func_type))
7271 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7272
7273 if (IS_NESTED (func_type))
7274 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7275
dd18ae56
NC
7276 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7277 current_function_args_size,
7278 current_function_pretend_args_size, frame_size);
6d3d9133 7279
dd18ae56
NC
7280 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
7281 frame_pointer_needed,
7282 current_function_anonymous_args);
cce8749e 7283
6f7ebcbb
NC
7284 if (cfun->machine->lr_save_eliminated)
7285 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7286
32de079a
RE
7287#ifdef AOF_ASSEMBLER
7288 if (flag_pic)
dd18ae56 7289 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 7290#endif
6d3d9133
NC
7291
7292 return_used_this_function = 0;
f3bb6135 7293}
cce8749e 7294
cd2b33d0 7295const char *
0616531f
RE
7296arm_output_epilogue (really_return)
7297 int really_return;
cce8749e 7298{
949d79eb 7299 int reg;
6f7ebcbb 7300 unsigned long saved_regs_mask;
6d3d9133 7301 unsigned long func_type;
6354dc9b 7302 /* If we need this, then it will always be at least this much. */
b111229a 7303 int floats_offset = 12;
cce8749e 7304 rtx operands[3];
949d79eb 7305 int frame_size = get_frame_size ();
d5b7b3ae 7306 FILE * f = asm_out_file;
6d3d9133 7307 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
cce8749e 7308
6d3d9133
NC
7309 /* If we have already generated the return instruction
7310 then it is futile to generate anything else. */
b36ba79f 7311 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 7312 return "";
cce8749e 7313
6d3d9133 7314 func_type = arm_current_func_type ();
d5b7b3ae 7315
6d3d9133
NC
7316 if (IS_NAKED (func_type))
7317 /* Naked functions don't have epilogues. */
7318 return "";
0616531f 7319
6d3d9133 7320 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7321 {
86efdc8e 7322 rtx op;
6d3d9133
NC
7323
7324 /* A volatile function should never return. Call abort. */
ed0e6530 7325 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 7326 assemble_external_libcall (op);
e2c671ba 7327 output_asm_insn ("bl\t%a0", &op);
6d3d9133 7328
949d79eb 7329 return "";
e2c671ba
RE
7330 }
7331
6d3d9133
NC
7332 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7333 && ! really_return)
7334 /* If we are throwing an exception, then we really must
7335 be doing a return, so we can't tail-call. */
7336 abort ();
7337
6f7ebcbb 7338 saved_regs_mask = arm_compute_save_reg_mask ();
6d3d9133
NC
7339
7340 /* Compute how far away the floats will be. */
7341 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
6f7ebcbb 7342 if (saved_regs_mask & (1 << reg))
6ed30148 7343 floats_offset += 4;
6d3d9133 7344
ff9940b0 7345 if (frame_pointer_needed)
cce8749e 7346 {
b111229a
RE
7347 if (arm_fpu_arch == FP_SOFT2)
7348 {
d5b7b3ae 7349 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 7350 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7351 {
7352 floats_offset += 12;
dd18ae56
NC
7353 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7354 reg, FP_REGNUM, floats_offset);
b111229a
RE
7355 }
7356 }
7357 else
7358 {
d5b7b3ae 7359 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7360
d5b7b3ae 7361 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 7362 {
5895f793 7363 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7364 {
7365 floats_offset += 12;
6cfc7210 7366
6354dc9b 7367 /* We can't unstack more than four registers at once. */
b111229a
RE
7368 if (start_reg - reg == 3)
7369 {
dd18ae56
NC
7370 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7371 reg, FP_REGNUM, floats_offset);
b111229a
RE
7372 start_reg = reg - 1;
7373 }
7374 }
7375 else
7376 {
7377 if (reg != start_reg)
dd18ae56
NC
7378 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7379 reg + 1, start_reg - reg,
7380 FP_REGNUM, floats_offset);
b111229a
RE
7381 start_reg = reg - 1;
7382 }
7383 }
7384
7385 /* Just in case the last register checked also needs unstacking. */
7386 if (reg != start_reg)
dd18ae56
NC
7387 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7388 reg + 1, start_reg - reg,
7389 FP_REGNUM, floats_offset);
b111229a 7390 }
6d3d9133 7391
6f7ebcbb 7392 /* saved_regs_mask should contain the IP, which at the time of stack
6d3d9133
NC
7393 frame generation actually contains the old stack pointer. So a
7394 quick way to unwind the stack is just pop the IP register directly
7395 into the stack pointer. */
6f7ebcbb 7396 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
6d3d9133 7397 abort ();
6f7ebcbb
NC
7398 saved_regs_mask &= ~ (1 << IP_REGNUM);
7399 saved_regs_mask |= (1 << SP_REGNUM);
6d3d9133 7400
6f7ebcbb 7401 /* There are two registers left in saved_regs_mask - LR and PC. We
6d3d9133
NC
7402 only need to restore the LR register (the return address), but to
7403 save time we can load it directly into the PC, unless we need a
7404 special function exit sequence, or we are not really returning. */
7405 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7406 /* Delete the LR from the register mask, so that the LR on
7407 the stack is loaded into the PC in the register mask. */
6f7ebcbb 7408 saved_regs_mask &= ~ (1 << LR_REGNUM);
b111229a 7409 else
6f7ebcbb 7410 saved_regs_mask &= ~ (1 << PC_REGNUM);
6d3d9133 7411
6f7ebcbb 7412 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
cce8749e
CH
7413 }
7414 else
7415 {
d2288d8d 7416 /* Restore stack pointer if necessary. */
56636818 7417 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
7418 {
7419 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
7420 operands[2] = GEN_INT (frame_size
7421 + current_function_outgoing_args_size);
d2288d8d
TG
7422 output_add_immediate (operands);
7423 }
7424
b111229a
RE
7425 if (arm_fpu_arch == FP_SOFT2)
7426 {
d5b7b3ae 7427 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 7428 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
7429 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7430 reg, SP_REGNUM);
b111229a
RE
7431 }
7432 else
7433 {
d5b7b3ae 7434 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 7435
d5b7b3ae 7436 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 7437 {
5895f793 7438 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7439 {
7440 if (reg - start_reg == 3)
7441 {
dd18ae56
NC
7442 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7443 start_reg, SP_REGNUM);
b111229a
RE
7444 start_reg = reg + 1;
7445 }
7446 }
7447 else
7448 {
7449 if (reg != start_reg)
dd18ae56
NC
7450 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7451 start_reg, reg - start_reg,
7452 SP_REGNUM);
6cfc7210 7453
b111229a
RE
7454 start_reg = reg + 1;
7455 }
7456 }
7457
7458 /* Just in case the last register checked also needs unstacking. */
7459 if (reg != start_reg)
dd18ae56
NC
7460 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7461 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7462 }
7463
6d3d9133
NC
7464 /* If we can, restore the LR into the PC. */
7465 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7466 && really_return
7467 && current_function_pretend_args_size == 0
6f7ebcbb 7468 && saved_regs_mask & (1 << LR_REGNUM))
cce8749e 7469 {
6f7ebcbb
NC
7470 saved_regs_mask &= ~ (1 << LR_REGNUM);
7471 saved_regs_mask |= (1 << PC_REGNUM);
6d3d9133 7472 }
d5b7b3ae 7473
6d3d9133
NC
7474 /* Load the registers off the stack. If we only have one register
7475 to load use the LDR instruction - it is faster. */
6f7ebcbb 7476 if (saved_regs_mask == (1 << LR_REGNUM))
6d3d9133
NC
7477 {
7478 /* The excpetion handler ignores the LR, so we do
7479 not really need to load it off the stack. */
7480 if (eh_ofs)
7481 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
32de079a 7482 else
6d3d9133 7483 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
cce8749e 7484 }
6f7ebcbb
NC
7485 else if (saved_regs_mask)
7486 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
6d3d9133
NC
7487
7488 if (current_function_pretend_args_size)
cce8749e 7489 {
6d3d9133
NC
7490 /* Unwind the pre-pushed regs. */
7491 operands[0] = operands[1] = stack_pointer_rtx;
7492 operands[2] = GEN_INT (current_function_pretend_args_size);
7493 output_add_immediate (operands);
7494 }
7495 }
32de079a 7496
9b598fa0 7497#if 0
6d3d9133
NC
7498 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7499 /* Adjust the stack to remove the exception handler stuff. */
7500 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7501 REGNO (eh_ofs));
9b598fa0 7502#endif
b111229a 7503
6d3d9133
NC
7504 if (! really_return)
7505 return "";
d5b7b3ae 7506
6d3d9133
NC
7507 /* Generate the return instruction. */
7508 switch ((int) ARM_FUNC_TYPE (func_type))
7509 {
7510 case ARM_FT_EXCEPTION_HANDLER:
7511 /* Even in 26-bit mode we do a mov (rather than a movs)
7512 because we don't have the PSR bits set in the address. */
7513 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7514 break;
0616531f 7515
6d3d9133
NC
7516 case ARM_FT_ISR:
7517 case ARM_FT_FIQ:
7518 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7519 break;
7520
7521 case ARM_FT_EXCEPTION:
7522 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7523 break;
7524
7525 case ARM_FT_INTERWORKED:
7526 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7527 break;
7528
7529 default:
7530 if (frame_pointer_needed)
7531 /* If we used the frame pointer then the return adddress
7532 will have been loaded off the stack directly into the
7533 PC, so there is no need to issue a MOV instruction
7534 here. */
7535 ;
7536 else if (current_function_pretend_args_size == 0
6f7ebcbb 7537 && (saved_regs_mask & (1 << LR_REGNUM)))
6d3d9133
NC
7538 /* Similarly we may have been able to load LR into the PC
7539 even if we did not create a stack frame. */
7540 ;
7541 else if (TARGET_APCS_32)
7542 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7543 else
7544 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7545 break;
cce8749e 7546 }
f3bb6135 7547
949d79eb
RE
7548 return "";
7549}
7550
7551void
eb3921e8 7552output_func_epilogue (frame_size)
949d79eb
RE
7553 int frame_size;
7554{
d5b7b3ae
RE
7555 if (TARGET_THUMB)
7556 {
7557 /* ??? Probably not safe to set this here, since it assumes that a
7558 function will be emitted as assembly immediately after we generate
7559 RTL for it. This does not happen for inline functions. */
7560 return_used_this_function = 0;
7561 }
7562 else
7563 {
7564 if (use_return_insn (FALSE)
7565 && return_used_this_function
7566 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7567 && !frame_pointer_needed)
d5b7b3ae 7568 abort ();
f3bb6135 7569
d5b7b3ae
RE
7570 /* Reset the ARM-specific per-function variables. */
7571 current_function_anonymous_args = 0;
7572 after_arm_reorg = 0;
7573 }
f3bb6135 7574}
e2c671ba 7575
2c849145
JM
7576/* Generate and emit an insn that we will recognize as a push_multi.
7577 Unfortunately, since this insn does not reflect very well the actual
7578 semantics of the operation, we need to annotate the insn for the benefit
7579 of DWARF2 frame unwind information. */
6d3d9133 7580
2c849145 7581static rtx
e2c671ba
RE
7582emit_multi_reg_push (mask)
7583 int mask;
7584{
7585 int num_regs = 0;
9b598fa0 7586 int num_dwarf_regs;
e2c671ba
RE
7587 int i, j;
7588 rtx par;
2c849145 7589 rtx dwarf;
87e27392 7590 int dwarf_par_index;
2c849145 7591 rtx tmp, reg;
e2c671ba 7592
d5b7b3ae 7593 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7594 if (mask & (1 << i))
5895f793 7595 num_regs++;
e2c671ba
RE
7596
7597 if (num_regs == 0 || num_regs > 16)
7598 abort ();
7599
9b598fa0
RE
7600 /* We don't record the PC in the dwarf frame information. */
7601 num_dwarf_regs = num_regs;
7602 if (mask & (1 << PC_REGNUM))
7603 num_dwarf_regs--;
7604
87e27392
NC
7605 /* For the body of the insn we are going to generate an UNSPEC in
7606 parallel with several USEs. This allows the insn to be recognised
7607 by the push_multi pattern in the arm.md file. The insn looks
7608 something like this:
7609
7610 (parallel [
b15bca31
RE
7611 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7612 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
87e27392
NC
7613 (use (reg:SI 11 fp))
7614 (use (reg:SI 12 ip))
7615 (use (reg:SI 14 lr))
7616 (use (reg:SI 15 pc))
7617 ])
7618
7619 For the frame note however, we try to be more explicit and actually
7620 show each register being stored into the stack frame, plus a (single)
7621 decrement of the stack pointer. We do it this way in order to be
7622 friendly to the stack unwinding code, which only wants to see a single
7623 stack decrement per instruction. The RTL we generate for the note looks
7624 something like this:
7625
7626 (sequence [
7627 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7628 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7629 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7630 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7631 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
87e27392
NC
7632 ])
7633
7634 This sequence is used both by the code to support stack unwinding for
7635 exceptions handlers and the code to generate dwarf2 frame debugging. */
7636
43cffd11 7637 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
9b598fa0 7638 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
2c849145 7639 RTX_FRAME_RELATED_P (dwarf) = 1;
87e27392 7640 dwarf_par_index = 1;
e2c671ba 7641
d5b7b3ae 7642 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7643 {
7644 if (mask & (1 << i))
7645 {
2c849145
JM
7646 reg = gen_rtx_REG (SImode, i);
7647
e2c671ba 7648 XVECEXP (par, 0, 0)
43cffd11
RE
7649 = gen_rtx_SET (VOIDmode,
7650 gen_rtx_MEM (BLKmode,
7651 gen_rtx_PRE_DEC (BLKmode,
7652 stack_pointer_rtx)),
7653 gen_rtx_UNSPEC (BLKmode,
2c849145 7654 gen_rtvec (1, reg),
9b598fa0 7655 UNSPEC_PUSH_MULT));
2c849145 7656
9b598fa0
RE
7657 if (i != PC_REGNUM)
7658 {
7659 tmp = gen_rtx_SET (VOIDmode,
7660 gen_rtx_MEM (SImode, stack_pointer_rtx),
7661 reg);
7662 RTX_FRAME_RELATED_P (tmp) = 1;
7663 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7664 dwarf_par_index++;
7665 }
2c849145 7666
e2c671ba
RE
7667 break;
7668 }
7669 }
7670
7671 for (j = 1, i++; j < num_regs; i++)
7672 {
7673 if (mask & (1 << i))
7674 {
2c849145
JM
7675 reg = gen_rtx_REG (SImode, i);
7676
7677 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7678
9b598fa0
RE
7679 if (i != PC_REGNUM)
7680 {
7681 tmp = gen_rtx_SET (VOIDmode,
7682 gen_rtx_MEM (SImode,
7683 plus_constant (stack_pointer_rtx,
7684 4 * j)),
7685 reg);
7686 RTX_FRAME_RELATED_P (tmp) = 1;
7687 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7688 }
7689
e2c671ba
RE
7690 j++;
7691 }
7692 }
b111229a 7693
2c849145 7694 par = emit_insn (par);
87e27392
NC
7695
7696 tmp = gen_rtx_SET (SImode,
7697 stack_pointer_rtx,
7698 gen_rtx_PLUS (SImode,
7699 stack_pointer_rtx,
7700 GEN_INT (-4 * num_regs)));
7701 RTX_FRAME_RELATED_P (tmp) = 1;
7702 XVECEXP (dwarf, 0, 0) = tmp;
7703
2c849145
JM
7704 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7705 REG_NOTES (par));
7706 return par;
b111229a
RE
7707}
7708
2c849145 7709static rtx
b111229a
RE
7710emit_sfm (base_reg, count)
7711 int base_reg;
7712 int count;
7713{
7714 rtx par;
2c849145
JM
7715 rtx dwarf;
7716 rtx tmp, reg;
b111229a
RE
7717 int i;
7718
43cffd11 7719 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7720 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7721 RTX_FRAME_RELATED_P (dwarf) = 1;
7722
7723 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7724
7725 XVECEXP (par, 0, 0)
7726 = gen_rtx_SET (VOIDmode,
7727 gen_rtx_MEM (BLKmode,
7728 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7729 gen_rtx_UNSPEC (BLKmode,
2c849145 7730 gen_rtvec (1, reg),
b15bca31 7731 UNSPEC_PUSH_MULT));
2c849145
JM
7732 tmp
7733 = gen_rtx_SET (VOIDmode,
7734 gen_rtx_MEM (XFmode,
7735 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7736 reg);
7737 RTX_FRAME_RELATED_P (tmp) = 1;
7738 XVECEXP (dwarf, 0, count - 1) = tmp;
7739
b111229a 7740 for (i = 1; i < count; i++)
2c849145
JM
7741 {
7742 reg = gen_rtx_REG (XFmode, base_reg++);
7743 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7744
7745 tmp = gen_rtx_SET (VOIDmode,
7746 gen_rtx_MEM (XFmode,
7747 gen_rtx_PRE_DEC (BLKmode,
7748 stack_pointer_rtx)),
7749 reg);
7750 RTX_FRAME_RELATED_P (tmp) = 1;
7751 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7752 }
b111229a 7753
2c849145
JM
7754 par = emit_insn (par);
7755 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7756 REG_NOTES (par));
7757 return par;
e2c671ba
RE
7758}
7759
6d3d9133
NC
7760/* Generate the prologue instructions for entry into an ARM function. */
7761
e2c671ba
RE
7762void
7763arm_expand_prologue ()
7764{
7765 int reg;
6d3d9133 7766 rtx amount;
2c849145 7767 rtx insn;
68dfd979 7768 rtx ip_rtx;
6d3d9133
NC
7769 unsigned long live_regs_mask;
7770 unsigned long func_type;
68dfd979 7771 int fp_offset = 0;
d3236b4d 7772
6d3d9133 7773 func_type = arm_current_func_type ();
e2c671ba 7774
31fdb4d5 7775 /* Naked functions don't have prologues. */
6d3d9133 7776 if (IS_NAKED (func_type))
31fdb4d5
DE
7777 return;
7778
6d3d9133
NC
7779 /* Compute which register we will have to save onto the stack. */
7780 live_regs_mask = arm_compute_save_reg_mask ();
e2c671ba 7781
68dfd979 7782 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
d3236b4d 7783
e2c671ba
RE
7784 if (frame_pointer_needed)
7785 {
6d3d9133 7786 if (IS_NESTED (func_type))
68dfd979
NC
7787 {
7788 /* The Static chain register is the same as the IP register
7789 used as a scratch register during stack frame creation.
7790 To get around this need to find somewhere to store IP
7791 whilst the frame is being created. We try the following
7792 places in order:
7793
6d3d9133 7794 1. The last argument register.
68dfd979
NC
7795 2. A slot on the stack above the frame. (This only
7796 works if the function is not a varargs function).
7797
6d3d9133
NC
7798 If neither of these places is available, we abort (for now).
7799
34ce3d7b
JM
7800 Note - we only need to tell the dwarf2 backend about the SP
7801 adjustment in the second variant; the static chain register
7802 doesn't need to be unwound, as it doesn't contain a value
7803 inherited from the caller. */
d3236b4d 7804
68dfd979
NC
7805 if (regs_ever_live[3] == 0)
7806 {
7807 insn = gen_rtx_REG (SImode, 3);
7808 insn = gen_rtx_SET (SImode, insn, ip_rtx);
d3236b4d 7809 insn = emit_insn (insn);
68dfd979
NC
7810 }
7811 else if (current_function_pretend_args_size == 0)
7812 {
34ce3d7b 7813 rtx dwarf;
68dfd979
NC
7814 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
7815 insn = gen_rtx_MEM (SImode, insn);
7816 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
7817 insn = emit_insn (insn);
34ce3d7b 7818
68dfd979 7819 fp_offset = 4;
34ce3d7b
JM
7820
7821 /* Just tell the dwarf backend that we adjusted SP. */
7822 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7823 gen_rtx_PLUS (SImode, stack_pointer_rtx,
7824 GEN_INT (-fp_offset)));
7825 RTX_FRAME_RELATED_P (insn) = 1;
7826 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7827 dwarf, REG_NOTES (insn));
68dfd979
NC
7828 }
7829 else
7830 /* FIXME - the way to handle this situation is to allow
7831 the pretend args to be dumped onto the stack, then
7832 reuse r3 to save IP. This would involve moving the
f18969c0 7833 copying of SP into IP until after the pretend args
68dfd979 7834 have been dumped, but this is not too hard. */
f18969c0
PB
7835 /* [See e.g. gcc.c-torture/execute/nest-stdar-1.c.] */
7836 error ("Unable to find a temporary location for static chain register");
68dfd979
NC
7837 }
7838
68dfd979
NC
7839 if (fp_offset)
7840 {
7841 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
7842 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7843 }
7844 else
7845 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
7846
6d3d9133 7847 insn = emit_insn (insn);
8e56560e 7848 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7849 }
7850
7851 if (current_function_pretend_args_size)
7852 {
6d3d9133
NC
7853 /* Push the argument registers, or reserve space for them. */
7854 if (current_function_anonymous_args)
2c849145
JM
7855 insn = emit_multi_reg_push
7856 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 7857 else
2c849145
JM
7858 insn = emit_insn
7859 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7860 GEN_INT (-current_function_pretend_args_size)));
7861 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7862 }
7863
7864 if (live_regs_mask)
7865 {
2c849145
JM
7866 insn = emit_multi_reg_push (live_regs_mask);
7867 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba 7868 }
d5b7b3ae 7869
6d3d9133 7870 if (! IS_VOLATILE (func_type))
b111229a 7871 {
6d3d9133 7872 /* Save any floating point call-saved registers used by this function. */
b111229a
RE
7873 if (arm_fpu_arch == FP_SOFT2)
7874 {
d5b7b3ae 7875 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 7876 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
7877 {
7878 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7879 insn = gen_rtx_MEM (XFmode, insn);
7880 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7881 gen_rtx_REG (XFmode, reg)));
7882 RTX_FRAME_RELATED_P (insn) = 1;
7883 }
b111229a
RE
7884 }
7885 else
7886 {
d5b7b3ae 7887 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7888
d5b7b3ae 7889 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 7890 {
5895f793 7891 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7892 {
7893 if (start_reg - reg == 3)
7894 {
2c849145
JM
7895 insn = emit_sfm (reg, 4);
7896 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
7897 start_reg = reg - 1;
7898 }
7899 }
7900 else
7901 {
7902 if (start_reg != reg)
2c849145
JM
7903 {
7904 insn = emit_sfm (reg + 1, start_reg - reg);
7905 RTX_FRAME_RELATED_P (insn) = 1;
7906 }
b111229a
RE
7907 start_reg = reg - 1;
7908 }
7909 }
7910
7911 if (start_reg != reg)
2c849145
JM
7912 {
7913 insn = emit_sfm (reg + 1, start_reg - reg);
7914 RTX_FRAME_RELATED_P (insn) = 1;
7915 }
b111229a
RE
7916 }
7917 }
e2c671ba
RE
7918
7919 if (frame_pointer_needed)
2c849145 7920 {
6d3d9133 7921 /* Create the new frame pointer. */
68dfd979
NC
7922 insn = GEN_INT (-(4 + current_function_pretend_args_size + fp_offset));
7923 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 7924 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979 7925
6d3d9133 7926 if (IS_NESTED (func_type))
68dfd979
NC
7927 {
7928 /* Recover the static chain register. */
7929 if (regs_ever_live [3] == 0)
7930 {
7931 insn = gen_rtx_REG (SImode, 3);
7932 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7933 insn = emit_insn (insn);
68dfd979
NC
7934 }
7935 else /* if (current_function_pretend_args_size == 0) */
7936 {
7937 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
7938 insn = gen_rtx_MEM (SImode, insn);
7939 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7940 insn = emit_insn (insn);
68dfd979
NC
7941 }
7942 }
2c849145 7943 }
e2c671ba 7944
6d3d9133
NC
7945 amount = GEN_INT (-(get_frame_size ()
7946 + current_function_outgoing_args_size));
7947
e2c671ba
RE
7948 if (amount != const0_rtx)
7949 {
745b9093
JM
7950 /* This add can produce multiple insns for a large constant, so we
7951 need to get tricky. */
7952 rtx last = get_last_insn ();
2c849145
JM
7953 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7954 amount));
745b9093
JM
7955 do
7956 {
7957 last = last ? NEXT_INSN (last) : get_insns ();
7958 RTX_FRAME_RELATED_P (last) = 1;
7959 }
7960 while (last != insn);
e04c2d6c
RE
7961
7962 /* If the frame pointer is needed, emit a special barrier that
7963 will prevent the scheduler from moving stores to the frame
7964 before the stack adjustment. */
7965 if (frame_pointer_needed)
7966 {
7967 rtx unspec = gen_rtx_UNSPEC (SImode,
7968 gen_rtvec (2, stack_pointer_rtx,
b15bca31
RE
7969 hard_frame_pointer_rtx),
7970 UNSPEC_PRLG_STK);
e04c2d6c 7971
6d3d9133 7972 insn = emit_insn (gen_rtx_CLOBBER (VOIDmode,
e04c2d6c
RE
7973 gen_rtx_MEM (BLKmode, unspec)));
7974 }
e2c671ba
RE
7975 }
7976
7977 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
7978 the call to mcount. Similarly if the user has requested no
7979 scheduling in the prolog. */
7980 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba 7981 emit_insn (gen_blockage ());
6f7ebcbb
NC
7982
7983 /* If the link register is being kept alive, with the return address in it,
7984 then make sure that it does not get reused by the ce2 pass. */
7985 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7986 {
7987 emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
7988 cfun->machine->lr_save_eliminated = 1;
7989 }
e2c671ba 7990}
cce8749e 7991\f
9997d19d
RE
7992/* If CODE is 'd', then the X is a condition operand and the instruction
7993 should only be executed if the condition is true.
ddd5a7c1 7994 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
7995 should only be executed if the condition is false: however, if the mode
7996 of the comparison is CCFPEmode, then always execute the instruction -- we
7997 do this because in these circumstances !GE does not necessarily imply LT;
7998 in these cases the instruction pattern will take care to make sure that
7999 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 8000 doing this instruction unconditionally.
9997d19d
RE
8001 If CODE is 'N' then X is a floating point operand that must be negated
8002 before output.
8003 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8004 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8005
8006void
8007arm_print_operand (stream, x, code)
62b10bbc 8008 FILE * stream;
9997d19d
RE
8009 rtx x;
8010 int code;
8011{
8012 switch (code)
8013 {
8014 case '@':
f3139301 8015 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
8016 return;
8017
d5b7b3ae
RE
8018 case '_':
8019 fputs (user_label_prefix, stream);
8020 return;
8021
9997d19d 8022 case '|':
f3139301 8023 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
8024 return;
8025
8026 case '?':
8027 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
8028 {
8029 if (TARGET_THUMB || current_insn_predicate != NULL)
8030 abort ();
8031
8032 fputs (arm_condition_codes[arm_current_cc], stream);
8033 }
8034 else if (current_insn_predicate)
8035 {
8036 enum arm_cond_code code;
8037
8038 if (TARGET_THUMB)
8039 abort ();
8040
8041 code = get_arm_condition_code (current_insn_predicate);
8042 fputs (arm_condition_codes[code], stream);
8043 }
9997d19d
RE
8044 return;
8045
8046 case 'N':
8047 {
8048 REAL_VALUE_TYPE r;
8049 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8050 r = REAL_VALUE_NEGATE (r);
8051 fprintf (stream, "%s", fp_const_from_val (&r));
8052 }
8053 return;
8054
8055 case 'B':
8056 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
8057 {
8058 HOST_WIDE_INT val;
5895f793 8059 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 8060 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8061 }
9997d19d
RE
8062 else
8063 {
8064 putc ('~', stream);
8065 output_addr_const (stream, x);
8066 }
8067 return;
8068
8069 case 'i':
8070 fprintf (stream, "%s", arithmetic_instr (x, 1));
8071 return;
8072
8073 case 'I':
8074 fprintf (stream, "%s", arithmetic_instr (x, 0));
8075 return;
8076
8077 case 'S':
8078 {
8079 HOST_WIDE_INT val;
5895f793 8080 const char * shift = shift_op (x, &val);
9997d19d 8081
e2c671ba
RE
8082 if (shift)
8083 {
5895f793 8084 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
8085 if (val == -1)
8086 arm_print_operand (stream, XEXP (x, 1), 0);
8087 else
4bc74ece
NC
8088 {
8089 fputc ('#', stream);
36ba9cb8 8090 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8091 }
e2c671ba 8092 }
9997d19d
RE
8093 }
8094 return;
8095
d5b7b3ae
RE
8096 /* An explanation of the 'Q', 'R' and 'H' register operands:
8097
8098 In a pair of registers containing a DI or DF value the 'Q'
8099 operand returns the register number of the register containing
8100 the least signficant part of the value. The 'R' operand returns
8101 the register number of the register containing the most
8102 significant part of the value.
8103
8104 The 'H' operand returns the higher of the two register numbers.
8105 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8106 same as the 'Q' operand, since the most signficant part of the
8107 value is held in the lower number register. The reverse is true
8108 on systems where WORDS_BIG_ENDIAN is false.
8109
8110 The purpose of these operands is to distinguish between cases
8111 where the endian-ness of the values is important (for example
8112 when they are added together), and cases where the endian-ness
8113 is irrelevant, but the order of register operations is important.
8114 For example when loading a value from memory into a register
8115 pair, the endian-ness does not matter. Provided that the value
8116 from the lower memory address is put into the lower numbered
8117 register, and the value from the higher address is put into the
8118 higher numbered register, the load will work regardless of whether
8119 the value being loaded is big-wordian or little-wordian. The
8120 order of the two register loads can matter however, if the address
8121 of the memory location is actually held in one of the registers
8122 being overwritten by the load. */
c1c2bc04 8123 case 'Q':
d5b7b3ae 8124 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 8125 abort ();
d5b7b3ae 8126 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
8127 return;
8128
9997d19d 8129 case 'R':
d5b7b3ae 8130 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 8131 abort ();
d5b7b3ae
RE
8132 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8133 return;
8134
8135 case 'H':
8136 if (REGNO (x) > LAST_ARM_REGNUM)
8137 abort ();
8138 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
8139 return;
8140
8141 case 'm':
d5b7b3ae
RE
8142 asm_fprintf (stream, "%r",
8143 GET_CODE (XEXP (x, 0)) == REG
8144 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
8145 return;
8146
8147 case 'M':
dd18ae56 8148 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae
RE
8149 REGNO (x),
8150 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
8151 return;
8152
8153 case 'd':
5895f793 8154 if (!x)
d5b7b3ae
RE
8155 return;
8156
8157 if (TARGET_ARM)
9997d19d
RE
8158 fputs (arm_condition_codes[get_arm_condition_code (x)],
8159 stream);
d5b7b3ae
RE
8160 else
8161 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
8162 return;
8163
8164 case 'D':
5895f793 8165 if (!x)
d5b7b3ae
RE
8166 return;
8167
8168 if (TARGET_ARM)
8169 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8170 (get_arm_condition_code (x))],
9997d19d 8171 stream);
d5b7b3ae
RE
8172 else
8173 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
8174 return;
8175
8176 default:
8177 if (x == 0)
8178 abort ();
8179
8180 if (GET_CODE (x) == REG)
d5b7b3ae 8181 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
8182 else if (GET_CODE (x) == MEM)
8183 {
8184 output_memory_reference_mode = GET_MODE (x);
8185 output_address (XEXP (x, 0));
8186 }
8187 else if (GET_CODE (x) == CONST_DOUBLE)
8188 fprintf (stream, "#%s", fp_immediate_constant (x));
8189 else if (GET_CODE (x) == NEG)
6354dc9b 8190 abort (); /* This should never happen now. */
9997d19d
RE
8191 else
8192 {
8193 fputc ('#', stream);
8194 output_addr_const (stream, x);
8195 }
8196 }
8197}
cce8749e
CH
8198\f
8199/* A finite state machine takes care of noticing whether or not instructions
8200 can be conditionally executed, and thus decrease execution time and code
8201 size by deleting branch instructions. The fsm is controlled by
8202 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8203
8204/* The state of the fsm controlling condition codes are:
8205 0: normal, do nothing special
8206 1: make ASM_OUTPUT_OPCODE not output this instruction
8207 2: make ASM_OUTPUT_OPCODE not output this instruction
8208 3: make instructions conditional
8209 4: make instructions conditional
8210
8211 State transitions (state->state by whom under condition):
8212 0 -> 1 final_prescan_insn if the `target' is a label
8213 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8214 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8215 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8216 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8217 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8218 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8219 (the target insn is arm_target_insn).
8220
ff9940b0
RE
8221 If the jump clobbers the conditions then we use states 2 and 4.
8222
8223 A similar thing can be done with conditional return insns.
8224
cce8749e
CH
8225 XXX In case the `target' is an unconditional branch, this conditionalising
8226 of the instructions always reduces code size, but not always execution
8227 time. But then, I want to reduce the code size to somewhere near what
8228 /bin/cc produces. */
8229
cce8749e
CH
8230/* Returns the index of the ARM condition code string in
8231 `arm_condition_codes'. COMPARISON should be an rtx like
8232 `(eq (...) (...))'. */
8233
84ed5e79 8234static enum arm_cond_code
cce8749e
CH
8235get_arm_condition_code (comparison)
8236 rtx comparison;
8237{
5165176d 8238 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
8239 register int code;
8240 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
8241
8242 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 8243 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
8244 XEXP (comparison, 1));
8245
8246 switch (mode)
cce8749e 8247 {
84ed5e79
RE
8248 case CC_DNEmode: code = ARM_NE; goto dominance;
8249 case CC_DEQmode: code = ARM_EQ; goto dominance;
8250 case CC_DGEmode: code = ARM_GE; goto dominance;
8251 case CC_DGTmode: code = ARM_GT; goto dominance;
8252 case CC_DLEmode: code = ARM_LE; goto dominance;
8253 case CC_DLTmode: code = ARM_LT; goto dominance;
8254 case CC_DGEUmode: code = ARM_CS; goto dominance;
8255 case CC_DGTUmode: code = ARM_HI; goto dominance;
8256 case CC_DLEUmode: code = ARM_LS; goto dominance;
8257 case CC_DLTUmode: code = ARM_CC;
8258
8259 dominance:
8260 if (comp_code != EQ && comp_code != NE)
8261 abort ();
8262
8263 if (comp_code == EQ)
8264 return ARM_INVERSE_CONDITION_CODE (code);
8265 return code;
8266
5165176d 8267 case CC_NOOVmode:
84ed5e79 8268 switch (comp_code)
5165176d 8269 {
84ed5e79
RE
8270 case NE: return ARM_NE;
8271 case EQ: return ARM_EQ;
8272 case GE: return ARM_PL;
8273 case LT: return ARM_MI;
5165176d
RE
8274 default: abort ();
8275 }
8276
8277 case CC_Zmode:
84ed5e79 8278 switch (comp_code)
5165176d 8279 {
84ed5e79
RE
8280 case NE: return ARM_NE;
8281 case EQ: return ARM_EQ;
5165176d
RE
8282 default: abort ();
8283 }
8284
8285 case CCFPEmode:
e45b72c4
RE
8286 case CCFPmode:
8287 /* These encodings assume that AC=1 in the FPA system control
8288 byte. This allows us to handle all cases except UNEQ and
8289 LTGT. */
84ed5e79
RE
8290 switch (comp_code)
8291 {
8292 case GE: return ARM_GE;
8293 case GT: return ARM_GT;
8294 case LE: return ARM_LS;
8295 case LT: return ARM_MI;
e45b72c4
RE
8296 case NE: return ARM_NE;
8297 case EQ: return ARM_EQ;
8298 case ORDERED: return ARM_VC;
8299 case UNORDERED: return ARM_VS;
8300 case UNLT: return ARM_LT;
8301 case UNLE: return ARM_LE;
8302 case UNGT: return ARM_HI;
8303 case UNGE: return ARM_PL;
8304 /* UNEQ and LTGT do not have a representation. */
8305 case UNEQ: /* Fall through. */
8306 case LTGT: /* Fall through. */
84ed5e79
RE
8307 default: abort ();
8308 }
8309
8310 case CC_SWPmode:
8311 switch (comp_code)
8312 {
8313 case NE: return ARM_NE;
8314 case EQ: return ARM_EQ;
8315 case GE: return ARM_LE;
8316 case GT: return ARM_LT;
8317 case LE: return ARM_GE;
8318 case LT: return ARM_GT;
8319 case GEU: return ARM_LS;
8320 case GTU: return ARM_CC;
8321 case LEU: return ARM_CS;
8322 case LTU: return ARM_HI;
8323 default: abort ();
8324 }
8325
bd9c7e23
RE
8326 case CC_Cmode:
8327 switch (comp_code)
8328 {
8329 case LTU: return ARM_CS;
8330 case GEU: return ARM_CC;
8331 default: abort ();
8332 }
8333
5165176d 8334 case CCmode:
84ed5e79 8335 switch (comp_code)
5165176d 8336 {
84ed5e79
RE
8337 case NE: return ARM_NE;
8338 case EQ: return ARM_EQ;
8339 case GE: return ARM_GE;
8340 case GT: return ARM_GT;
8341 case LE: return ARM_LE;
8342 case LT: return ARM_LT;
8343 case GEU: return ARM_CS;
8344 case GTU: return ARM_HI;
8345 case LEU: return ARM_LS;
8346 case LTU: return ARM_CC;
5165176d
RE
8347 default: abort ();
8348 }
8349
cce8749e
CH
8350 default: abort ();
8351 }
84ed5e79
RE
8352
8353 abort ();
f3bb6135 8354}
cce8749e
CH
8355
8356
8357void
74bbc178 8358arm_final_prescan_insn (insn)
cce8749e 8359 rtx insn;
cce8749e
CH
8360{
8361 /* BODY will hold the body of INSN. */
8362 register rtx body = PATTERN (insn);
8363
8364 /* This will be 1 if trying to repeat the trick, and things need to be
8365 reversed if it appears to fail. */
8366 int reverse = 0;
8367
ff9940b0
RE
8368 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8369 taken are clobbered, even if the rtl suggests otherwise. It also
8370 means that we have to grub around within the jump expression to find
8371 out what the conditions are when the jump isn't taken. */
8372 int jump_clobbers = 0;
8373
6354dc9b 8374 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
8375 int seeking_return = 0;
8376
cce8749e
CH
8377 /* START_INSN will hold the insn from where we start looking. This is the
8378 first insn after the following code_label if REVERSE is true. */
8379 rtx start_insn = insn;
8380
8381 /* If in state 4, check if the target branch is reached, in order to
8382 change back to state 0. */
8383 if (arm_ccfsm_state == 4)
8384 {
8385 if (insn == arm_target_insn)
f5a1b0d2
NC
8386 {
8387 arm_target_insn = NULL;
8388 arm_ccfsm_state = 0;
8389 }
cce8749e
CH
8390 return;
8391 }
8392
8393 /* If in state 3, it is possible to repeat the trick, if this insn is an
8394 unconditional branch to a label, and immediately following this branch
8395 is the previous target label which is only used once, and the label this
8396 branch jumps to is not too far off. */
8397 if (arm_ccfsm_state == 3)
8398 {
8399 if (simplejump_p (insn))
8400 {
8401 start_insn = next_nonnote_insn (start_insn);
8402 if (GET_CODE (start_insn) == BARRIER)
8403 {
8404 /* XXX Isn't this always a barrier? */
8405 start_insn = next_nonnote_insn (start_insn);
8406 }
8407 if (GET_CODE (start_insn) == CODE_LABEL
8408 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8409 && LABEL_NUSES (start_insn) == 1)
8410 reverse = TRUE;
8411 else
8412 return;
8413 }
ff9940b0
RE
8414 else if (GET_CODE (body) == RETURN)
8415 {
8416 start_insn = next_nonnote_insn (start_insn);
8417 if (GET_CODE (start_insn) == BARRIER)
8418 start_insn = next_nonnote_insn (start_insn);
8419 if (GET_CODE (start_insn) == CODE_LABEL
8420 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8421 && LABEL_NUSES (start_insn) == 1)
8422 {
8423 reverse = TRUE;
8424 seeking_return = 1;
8425 }
8426 else
8427 return;
8428 }
cce8749e
CH
8429 else
8430 return;
8431 }
8432
8433 if (arm_ccfsm_state != 0 && !reverse)
8434 abort ();
8435 if (GET_CODE (insn) != JUMP_INSN)
8436 return;
8437
ddd5a7c1 8438 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
8439 the jump should always come first */
8440 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8441 body = XVECEXP (body, 0, 0);
8442
8443#if 0
8444 /* If this is a conditional return then we don't want to know */
8445 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8446 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8447 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8448 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8449 return;
8450#endif
8451
cce8749e
CH
8452 if (reverse
8453 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8454 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8455 {
bd9c7e23
RE
8456 int insns_skipped;
8457 int fail = FALSE, succeed = FALSE;
cce8749e
CH
8458 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8459 int then_not_else = TRUE;
ff9940b0 8460 rtx this_insn = start_insn, label = 0;
cce8749e 8461
e45b72c4
RE
8462 /* If the jump cannot be done with one instruction, we cannot
8463 conditionally execute the instruction in the inverse case. */
ff9940b0 8464 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 8465 {
5bbe2d40
RE
8466 jump_clobbers = 1;
8467 return;
8468 }
ff9940b0 8469
cce8749e
CH
8470 /* Register the insn jumped to. */
8471 if (reverse)
ff9940b0
RE
8472 {
8473 if (!seeking_return)
8474 label = XEXP (SET_SRC (body), 0);
8475 }
cce8749e
CH
8476 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8477 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8478 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8479 {
8480 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8481 then_not_else = FALSE;
8482 }
ff9940b0
RE
8483 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8484 seeking_return = 1;
8485 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8486 {
8487 seeking_return = 1;
8488 then_not_else = FALSE;
8489 }
cce8749e
CH
8490 else
8491 abort ();
8492
8493 /* See how many insns this branch skips, and what kind of insns. If all
8494 insns are okay, and the label or unconditional branch to the same
8495 label is not too far away, succeed. */
8496 for (insns_skipped = 0;
b36ba79f 8497 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
8498 {
8499 rtx scanbody;
8500
8501 this_insn = next_nonnote_insn (this_insn);
8502 if (!this_insn)
8503 break;
8504
cce8749e
CH
8505 switch (GET_CODE (this_insn))
8506 {
8507 case CODE_LABEL:
8508 /* Succeed if it is the target label, otherwise fail since
8509 control falls in from somewhere else. */
8510 if (this_insn == label)
8511 {
ff9940b0
RE
8512 if (jump_clobbers)
8513 {
8514 arm_ccfsm_state = 2;
8515 this_insn = next_nonnote_insn (this_insn);
8516 }
8517 else
8518 arm_ccfsm_state = 1;
cce8749e
CH
8519 succeed = TRUE;
8520 }
8521 else
8522 fail = TRUE;
8523 break;
8524
ff9940b0 8525 case BARRIER:
cce8749e 8526 /* Succeed if the following insn is the target label.
ff9940b0
RE
8527 Otherwise fail.
8528 If return insns are used then the last insn in a function
6354dc9b 8529 will be a barrier. */
cce8749e 8530 this_insn = next_nonnote_insn (this_insn);
ff9940b0 8531 if (this_insn && this_insn == label)
cce8749e 8532 {
ff9940b0
RE
8533 if (jump_clobbers)
8534 {
8535 arm_ccfsm_state = 2;
8536 this_insn = next_nonnote_insn (this_insn);
8537 }
8538 else
8539 arm_ccfsm_state = 1;
cce8749e
CH
8540 succeed = TRUE;
8541 }
8542 else
8543 fail = TRUE;
8544 break;
8545
ff9940b0 8546 case CALL_INSN:
2b835d68 8547 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 8548 calls. */
2b835d68 8549 if (TARGET_APCS_32)
bd9c7e23
RE
8550 {
8551 /* Succeed if the following insn is the target label,
8552 or if the following two insns are a barrier and
8553 the target label. */
8554 this_insn = next_nonnote_insn (this_insn);
8555 if (this_insn && GET_CODE (this_insn) == BARRIER)
8556 this_insn = next_nonnote_insn (this_insn);
8557
8558 if (this_insn && this_insn == label
b36ba79f 8559 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
8560 {
8561 if (jump_clobbers)
8562 {
8563 arm_ccfsm_state = 2;
8564 this_insn = next_nonnote_insn (this_insn);
8565 }
8566 else
8567 arm_ccfsm_state = 1;
8568 succeed = TRUE;
8569 }
8570 else
8571 fail = TRUE;
8572 }
ff9940b0 8573 break;
2b835d68 8574
cce8749e
CH
8575 case JUMP_INSN:
8576 /* If this is an unconditional branch to the same label, succeed.
8577 If it is to another label, do nothing. If it is conditional,
8578 fail. */
914a3b8c 8579 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 8580
ed4c4348 8581 scanbody = PATTERN (this_insn);
ff9940b0
RE
8582 if (GET_CODE (scanbody) == SET
8583 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
8584 {
8585 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
8586 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
8587 {
8588 arm_ccfsm_state = 2;
8589 succeed = TRUE;
8590 }
8591 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
8592 fail = TRUE;
8593 }
b36ba79f
RE
8594 /* Fail if a conditional return is undesirable (eg on a
8595 StrongARM), but still allow this if optimizing for size. */
8596 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
8597 && !use_return_insn (TRUE)
8598 && !optimize_size)
b36ba79f 8599 fail = TRUE;
ff9940b0
RE
8600 else if (GET_CODE (scanbody) == RETURN
8601 && seeking_return)
8602 {
8603 arm_ccfsm_state = 2;
8604 succeed = TRUE;
8605 }
8606 else if (GET_CODE (scanbody) == PARALLEL)
8607 {
8608 switch (get_attr_conds (this_insn))
8609 {
8610 case CONDS_NOCOND:
8611 break;
8612 default:
8613 fail = TRUE;
8614 break;
8615 }
8616 }
4e67550b
RE
8617 else
8618 fail = TRUE; /* Unrecognized jump (eg epilogue). */
8619
cce8749e
CH
8620 break;
8621
8622 case INSN:
ff9940b0
RE
8623 /* Instructions using or affecting the condition codes make it
8624 fail. */
ed4c4348 8625 scanbody = PATTERN (this_insn);
5895f793
RE
8626 if (!(GET_CODE (scanbody) == SET
8627 || GET_CODE (scanbody) == PARALLEL)
74641843 8628 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
8629 fail = TRUE;
8630 break;
8631
8632 default:
8633 break;
8634 }
8635 }
8636 if (succeed)
8637 {
ff9940b0 8638 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 8639 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
8640 else if (seeking_return || arm_ccfsm_state == 2)
8641 {
8642 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
8643 {
8644 this_insn = next_nonnote_insn (this_insn);
8645 if (this_insn && (GET_CODE (this_insn) == BARRIER
8646 || GET_CODE (this_insn) == CODE_LABEL))
8647 abort ();
8648 }
8649 if (!this_insn)
8650 {
8651 /* Oh, dear! we ran off the end.. give up */
df4ae160 8652 recog (PATTERN (insn), insn, NULL);
ff9940b0 8653 arm_ccfsm_state = 0;
abaa26e5 8654 arm_target_insn = NULL;
ff9940b0
RE
8655 return;
8656 }
8657 arm_target_insn = this_insn;
8658 }
cce8749e
CH
8659 else
8660 abort ();
ff9940b0
RE
8661 if (jump_clobbers)
8662 {
8663 if (reverse)
8664 abort ();
8665 arm_current_cc =
8666 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
8667 0), 0), 1));
8668 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
8669 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8670 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
8671 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8672 }
8673 else
8674 {
8675 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
8676 what it was. */
8677 if (!reverse)
8678 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
8679 0));
8680 }
cce8749e 8681
cce8749e
CH
8682 if (reverse || then_not_else)
8683 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8684 }
d5b7b3ae 8685
1ccbefce 8686 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 8687 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 8688 across this call; since the insn has been recognized already we
b020fd92 8689 call recog direct). */
df4ae160 8690 recog (PATTERN (insn), insn, NULL);
cce8749e 8691 }
f3bb6135 8692}
cce8749e 8693
d5b7b3ae
RE
8694int
8695arm_regno_class (regno)
8696 int regno;
8697{
8698 if (TARGET_THUMB)
8699 {
8700 if (regno == STACK_POINTER_REGNUM)
8701 return STACK_REG;
8702 if (regno == CC_REGNUM)
8703 return CC_REG;
8704 if (regno < 8)
8705 return LO_REGS;
8706 return HI_REGS;
8707 }
8708
8709 if ( regno <= LAST_ARM_REGNUM
8710 || regno == FRAME_POINTER_REGNUM
8711 || regno == ARG_POINTER_REGNUM)
8712 return GENERAL_REGS;
8713
8714 if (regno == CC_REGNUM)
8715 return NO_REGS;
8716
8717 return FPU_REGS;
8718}
8719
8720/* Handle a special case when computing the offset
8721 of an argument from the frame pointer. */
8722int
8723arm_debugger_arg_offset (value, addr)
8724 int value;
8725 rtx addr;
8726{
8727 rtx insn;
8728
8729 /* We are only interested if dbxout_parms() failed to compute the offset. */
8730 if (value != 0)
8731 return 0;
8732
8733 /* We can only cope with the case where the address is held in a register. */
8734 if (GET_CODE (addr) != REG)
8735 return 0;
8736
8737 /* If we are using the frame pointer to point at the argument, then
8738 an offset of 0 is correct. */
cd2b33d0 8739 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
8740 return 0;
8741
8742 /* If we are using the stack pointer to point at the
8743 argument, then an offset of 0 is correct. */
5895f793 8744 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
8745 && REGNO (addr) == SP_REGNUM)
8746 return 0;
8747
8748 /* Oh dear. The argument is pointed to by a register rather
8749 than being held in a register, or being stored at a known
8750 offset from the frame pointer. Since GDB only understands
8751 those two kinds of argument we must translate the address
8752 held in the register into an offset from the frame pointer.
8753 We do this by searching through the insns for the function
8754 looking to see where this register gets its value. If the
8755 register is initialised from the frame pointer plus an offset
8756 then we are in luck and we can continue, otherwise we give up.
8757
8758 This code is exercised by producing debugging information
8759 for a function with arguments like this:
8760
8761 double func (double a, double b, int c, double d) {return d;}
8762
8763 Without this code the stab for parameter 'd' will be set to
8764 an offset of 0 from the frame pointer, rather than 8. */
8765
8766 /* The if() statement says:
8767
8768 If the insn is a normal instruction
8769 and if the insn is setting the value in a register
8770 and if the register being set is the register holding the address of the argument
8771 and if the address is computing by an addition
8772 that involves adding to a register
8773 which is the frame pointer
8774 a constant integer
8775
8776 then... */
8777
8778 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8779 {
8780 if ( GET_CODE (insn) == INSN
8781 && GET_CODE (PATTERN (insn)) == SET
8782 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8783 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8784 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 8785 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
8786 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8787 )
8788 {
8789 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8790
8791 break;
8792 }
8793 }
8794
8795 if (value == 0)
8796 {
8797 debug_rtx (addr);
8798 warning ("Unable to compute real location of stacked parameter");
8799 value = 8; /* XXX magic hack */
8800 }
8801
8802 return value;
8803}
8804
d19fb8e3 8805#define def_builtin(NAME, TYPE, CODE) \
df4ae160 8806 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL)
d19fb8e3
NC
8807
8808void
8809arm_init_builtins ()
8810{
cbd5937a 8811 tree endlink = void_list_node;
d19fb8e3
NC
8812 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
8813 tree pchar_type_node = build_pointer_type (char_type_node);
8814
8815 tree int_ftype_int, void_ftype_pchar;
8816
8817 /* void func (void *) */
8818 void_ftype_pchar
8819 = build_function_type (void_type_node,
8820 tree_cons (NULL_TREE, pchar_type_node, endlink));
8821
8822 /* int func (int) */
8823 int_ftype_int
8824 = build_function_type (integer_type_node, int_endlink);
8825
8826 /* Initialize arm V5 builtins. */
8827 if (arm_arch5)
eab4abeb 8828 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
b15bca31
RE
8829
8830 /* Initialize arm V5E builtins. */
8831 if (arm_arch5e)
eab4abeb
NC
8832 def_builtin ("__builtin_prefetch", void_ftype_pchar,
8833 ARM_BUILTIN_PREFETCH);
d19fb8e3
NC
8834}
8835
8836/* Expand an expression EXP that calls a built-in function,
8837 with result going to TARGET if that's convenient
8838 (and in mode MODE if that's convenient).
8839 SUBTARGET may be used as the target for computing one of EXP's operands.
8840 IGNORE is nonzero if the value is to be ignored. */
8841
8842rtx
8843arm_expand_builtin (exp, target, subtarget, mode, ignore)
8844 tree exp;
8845 rtx target;
8846 rtx subtarget ATTRIBUTE_UNUSED;
8847 enum machine_mode mode ATTRIBUTE_UNUSED;
8848 int ignore ATTRIBUTE_UNUSED;
8849{
8850 enum insn_code icode;
8851 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8852 tree arglist = TREE_OPERAND (exp, 1);
8853 tree arg0;
8854 rtx op0, pat;
8855 enum machine_mode tmode, mode0;
8856 int fcode = DECL_FUNCTION_CODE (fndecl);
8857
8858 switch (fcode)
8859 {
8860 default:
8861 break;
8862
8863 case ARM_BUILTIN_CLZ:
8864 icode = CODE_FOR_clz;
8865 arg0 = TREE_VALUE (arglist);
8866 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8867 tmode = insn_data[icode].operand[0].mode;
8868 mode0 = insn_data[icode].operand[1].mode;
8869
8870 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8871 op0 = copy_to_mode_reg (mode0, op0);
8872 if (target == 0
8873 || GET_MODE (target) != tmode
8874 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8875 target = gen_reg_rtx (tmode);
8876 pat = GEN_FCN (icode) (target, op0);
8877 if (! pat)
8878 return 0;
8879 emit_insn (pat);
8880 return target;
8881
8882 case ARM_BUILTIN_PREFETCH:
8883 icode = CODE_FOR_prefetch;
8884 arg0 = TREE_VALUE (arglist);
8885 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8886
8887 op0 = gen_rtx_MEM (SImode, copy_to_mode_reg (Pmode, op0));
8888
8889 pat = GEN_FCN (icode) (op0);
8890 if (! pat)
8891 return 0;
8892 emit_insn (pat);
8893 return target;
8894 }
8895
8896 /* @@@ Should really do something sensible here. */
8897 return NULL_RTX;
8898}
d5b7b3ae
RE
8899\f
8900/* Recursively search through all of the blocks in a function
8901 checking to see if any of the variables created in that
8902 function match the RTX called 'orig'. If they do then
8903 replace them with the RTX called 'new'. */
8904
8905static void
8906replace_symbols_in_block (block, orig, new)
8907 tree block;
8908 rtx orig;
8909 rtx new;
8910{
8911 for (; block; block = BLOCK_CHAIN (block))
8912 {
8913 tree sym;
8914
5895f793 8915 if (!TREE_USED (block))
d5b7b3ae
RE
8916 continue;
8917
8918 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8919 {
8920 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8921 || DECL_IGNORED_P (sym)
8922 || TREE_CODE (sym) != VAR_DECL
8923 || DECL_EXTERNAL (sym)
5895f793 8924 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
8925 )
8926 continue;
8927
8928 DECL_RTL (sym) = new;
8929 }
8930
8931 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8932 }
8933}
8934
8935/* Return the number (counting from 0) of the least significant set
8936 bit in MASK. */
8937#ifdef __GNUC__
8938inline
8939#endif
8940static int
8941number_of_first_bit_set (mask)
8942 int mask;
8943{
8944 int bit;
8945
8946 for (bit = 0;
8947 (mask & (1 << bit)) == 0;
5895f793 8948 ++bit)
d5b7b3ae
RE
8949 continue;
8950
8951 return bit;
8952}
8953
8954/* Generate code to return from a thumb function.
8955 If 'reg_containing_return_addr' is -1, then the return address is
8956 actually on the stack, at the stack pointer. */
8957static void
8958thumb_exit (f, reg_containing_return_addr, eh_ofs)
8959 FILE * f;
8960 int reg_containing_return_addr;
8961 rtx eh_ofs;
8962{
8963 unsigned regs_available_for_popping;
8964 unsigned regs_to_pop;
8965 int pops_needed;
8966 unsigned available;
8967 unsigned required;
8968 int mode;
8969 int size;
8970 int restore_a4 = FALSE;
8971
8972 /* Compute the registers we need to pop. */
8973 regs_to_pop = 0;
8974 pops_needed = 0;
8975
8976 /* There is an assumption here, that if eh_ofs is not NULL, the
8977 normal return address will have been pushed. */
8978 if (reg_containing_return_addr == -1 || eh_ofs)
8979 {
8980 /* When we are generating a return for __builtin_eh_return,
8981 reg_containing_return_addr must specify the return regno. */
8982 if (eh_ofs && reg_containing_return_addr == -1)
8983 abort ();
8984
8985 regs_to_pop |= 1 << LR_REGNUM;
5895f793 8986 ++pops_needed;
d5b7b3ae
RE
8987 }
8988
8989 if (TARGET_BACKTRACE)
8990 {
8991 /* Restore the (ARM) frame pointer and stack pointer. */
8992 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8993 pops_needed += 2;
8994 }
8995
8996 /* If there is nothing to pop then just emit the BX instruction and
8997 return. */
8998 if (pops_needed == 0)
8999 {
9000 if (eh_ofs)
9001 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9002
9003 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9004 return;
9005 }
9006 /* Otherwise if we are not supporting interworking and we have not created
9007 a backtrace structure and the function was not entered in ARM mode then
9008 just pop the return address straight into the PC. */
5895f793
RE
9009 else if (!TARGET_INTERWORK
9010 && !TARGET_BACKTRACE
9011 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
9012 {
9013 if (eh_ofs)
9014 {
9015 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9016 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9017 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9018 }
9019 else
9020 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9021
9022 return;
9023 }
9024
9025 /* Find out how many of the (return) argument registers we can corrupt. */
9026 regs_available_for_popping = 0;
9027
9028 /* If returning via __builtin_eh_return, the bottom three registers
9029 all contain information needed for the return. */
9030 if (eh_ofs)
9031 size = 12;
9032 else
9033 {
9034#ifdef RTX_CODE
9035 /* If we can deduce the registers used from the function's
9036 return value. This is more reliable that examining
9037 regs_ever_live[] because that will be set if the register is
9038 ever used in the function, not just if the register is used
9039 to hold a return value. */
9040
9041 if (current_function_return_rtx != 0)
9042 mode = GET_MODE (current_function_return_rtx);
9043 else
9044#endif
9045 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9046
9047 size = GET_MODE_SIZE (mode);
9048
9049 if (size == 0)
9050 {
9051 /* In a void function we can use any argument register.
9052 In a function that returns a structure on the stack
9053 we can use the second and third argument registers. */
9054 if (mode == VOIDmode)
9055 regs_available_for_popping =
9056 (1 << ARG_REGISTER (1))
9057 | (1 << ARG_REGISTER (2))
9058 | (1 << ARG_REGISTER (3));
9059 else
9060 regs_available_for_popping =
9061 (1 << ARG_REGISTER (2))
9062 | (1 << ARG_REGISTER (3));
9063 }
9064 else if (size <= 4)
9065 regs_available_for_popping =
9066 (1 << ARG_REGISTER (2))
9067 | (1 << ARG_REGISTER (3));
9068 else if (size <= 8)
9069 regs_available_for_popping =
9070 (1 << ARG_REGISTER (3));
9071 }
9072
9073 /* Match registers to be popped with registers into which we pop them. */
9074 for (available = regs_available_for_popping,
9075 required = regs_to_pop;
9076 required != 0 && available != 0;
9077 available &= ~(available & - available),
9078 required &= ~(required & - required))
9079 -- pops_needed;
9080
9081 /* If we have any popping registers left over, remove them. */
9082 if (available > 0)
5895f793 9083 regs_available_for_popping &= ~available;
d5b7b3ae
RE
9084
9085 /* Otherwise if we need another popping register we can use
9086 the fourth argument register. */
9087 else if (pops_needed)
9088 {
9089 /* If we have not found any free argument registers and
9090 reg a4 contains the return address, we must move it. */
9091 if (regs_available_for_popping == 0
9092 && reg_containing_return_addr == LAST_ARG_REGNUM)
9093 {
9094 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9095 reg_containing_return_addr = LR_REGNUM;
9096 }
9097 else if (size > 12)
9098 {
9099 /* Register a4 is being used to hold part of the return value,
9100 but we have dire need of a free, low register. */
9101 restore_a4 = TRUE;
9102
9103 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9104 }
9105
9106 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9107 {
9108 /* The fourth argument register is available. */
9109 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9110
5895f793 9111 --pops_needed;
d5b7b3ae
RE
9112 }
9113 }
9114
9115 /* Pop as many registers as we can. */
9116 thumb_pushpop (f, regs_available_for_popping, FALSE);
9117
9118 /* Process the registers we popped. */
9119 if (reg_containing_return_addr == -1)
9120 {
9121 /* The return address was popped into the lowest numbered register. */
5895f793 9122 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
9123
9124 reg_containing_return_addr =
9125 number_of_first_bit_set (regs_available_for_popping);
9126
9127 /* Remove this register for the mask of available registers, so that
9128 the return address will not be corrupted by futher pops. */
5895f793 9129 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
9130 }
9131
9132 /* If we popped other registers then handle them here. */
9133 if (regs_available_for_popping)
9134 {
9135 int frame_pointer;
9136
9137 /* Work out which register currently contains the frame pointer. */
9138 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9139
9140 /* Move it into the correct place. */
9141 asm_fprintf (f, "\tmov\t%r, %r\n",
9142 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9143
9144 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
9145 regs_available_for_popping &= ~(1 << frame_pointer);
9146 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
9147
9148 if (regs_available_for_popping)
9149 {
9150 int stack_pointer;
9151
9152 /* We popped the stack pointer as well,
9153 find the register that contains it. */
9154 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9155
9156 /* Move it into the stack register. */
9157 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9158
9159 /* At this point we have popped all necessary registers, so
9160 do not worry about restoring regs_available_for_popping
9161 to its correct value:
9162
9163 assert (pops_needed == 0)
9164 assert (regs_available_for_popping == (1 << frame_pointer))
9165 assert (regs_to_pop == (1 << STACK_POINTER)) */
9166 }
9167 else
9168 {
9169 /* Since we have just move the popped value into the frame
9170 pointer, the popping register is available for reuse, and
9171 we know that we still have the stack pointer left to pop. */
9172 regs_available_for_popping |= (1 << frame_pointer);
9173 }
9174 }
9175
9176 /* If we still have registers left on the stack, but we no longer have
9177 any registers into which we can pop them, then we must move the return
9178 address into the link register and make available the register that
9179 contained it. */
9180 if (regs_available_for_popping == 0 && pops_needed > 0)
9181 {
9182 regs_available_for_popping |= 1 << reg_containing_return_addr;
9183
9184 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9185 reg_containing_return_addr);
9186
9187 reg_containing_return_addr = LR_REGNUM;
9188 }
9189
9190 /* If we have registers left on the stack then pop some more.
9191 We know that at most we will want to pop FP and SP. */
9192 if (pops_needed > 0)
9193 {
9194 int popped_into;
9195 int move_to;
9196
9197 thumb_pushpop (f, regs_available_for_popping, FALSE);
9198
9199 /* We have popped either FP or SP.
9200 Move whichever one it is into the correct register. */
9201 popped_into = number_of_first_bit_set (regs_available_for_popping);
9202 move_to = number_of_first_bit_set (regs_to_pop);
9203
9204 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9205
5895f793 9206 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 9207
5895f793 9208 --pops_needed;
d5b7b3ae
RE
9209 }
9210
9211 /* If we still have not popped everything then we must have only
9212 had one register available to us and we are now popping the SP. */
9213 if (pops_needed > 0)
9214 {
9215 int popped_into;
9216
9217 thumb_pushpop (f, regs_available_for_popping, FALSE);
9218
9219 popped_into = number_of_first_bit_set (regs_available_for_popping);
9220
9221 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9222 /*
9223 assert (regs_to_pop == (1 << STACK_POINTER))
9224 assert (pops_needed == 1)
9225 */
9226 }
9227
9228 /* If necessary restore the a4 register. */
9229 if (restore_a4)
9230 {
9231 if (reg_containing_return_addr != LR_REGNUM)
9232 {
9233 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9234 reg_containing_return_addr = LR_REGNUM;
9235 }
9236
9237 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9238 }
9239
9240 if (eh_ofs)
9241 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9242
9243 /* Return to caller. */
9244 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9245}
9246
9247/* Emit code to push or pop registers to or from the stack. */
9248static void
9249thumb_pushpop (f, mask, push)
9250 FILE * f;
9251 int mask;
9252 int push;
9253{
9254 int regno;
9255 int lo_mask = mask & 0xFF;
9256
5895f793 9257 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
9258 {
9259 /* Special case. Do not generate a POP PC statement here, do it in
9260 thumb_exit() */
9261 thumb_exit (f, -1, NULL_RTX);
9262 return;
9263 }
9264
9265 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9266
9267 /* Look at the low registers first. */
5895f793 9268 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
9269 {
9270 if (lo_mask & 1)
9271 {
9272 asm_fprintf (f, "%r", regno);
9273
9274 if ((lo_mask & ~1) != 0)
9275 fprintf (f, ", ");
9276 }
9277 }
9278
9279 if (push && (mask & (1 << LR_REGNUM)))
9280 {
9281 /* Catch pushing the LR. */
9282 if (mask & 0xFF)
9283 fprintf (f, ", ");
9284
9285 asm_fprintf (f, "%r", LR_REGNUM);
9286 }
9287 else if (!push && (mask & (1 << PC_REGNUM)))
9288 {
9289 /* Catch popping the PC. */
9290 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9291 {
9292 /* The PC is never poped directly, instead
9293 it is popped into r3 and then BX is used. */
9294 fprintf (f, "}\n");
9295
9296 thumb_exit (f, -1, NULL_RTX);
9297
9298 return;
9299 }
9300 else
9301 {
9302 if (mask & 0xFF)
9303 fprintf (f, ", ");
9304
9305 asm_fprintf (f, "%r", PC_REGNUM);
9306 }
9307 }
9308
9309 fprintf (f, "}\n");
9310}
9311\f
9312void
9313thumb_final_prescan_insn (insn)
9314 rtx insn;
9315{
d5b7b3ae 9316 if (flag_print_asm_name)
9d98a694
AO
9317 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9318 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
9319}
9320
9321int
9322thumb_shiftable_const (val)
9323 unsigned HOST_WIDE_INT val;
9324{
9325 unsigned HOST_WIDE_INT mask = 0xff;
9326 int i;
9327
9328 if (val == 0) /* XXX */
9329 return 0;
9330
9331 for (i = 0; i < 25; i++)
9332 if ((val & (mask << i)) == val)
9333 return 1;
9334
9335 return 0;
9336}
9337
9338/* Returns non-zero if the current function contains,
9339 or might contain a far jump. */
9340int
9341thumb_far_jump_used_p (int in_prologue)
9342{
9343 rtx insn;
9344
9345 /* This test is only important for leaf functions. */
5895f793 9346 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
9347
9348 /* If we have already decided that far jumps may be used,
9349 do not bother checking again, and always return true even if
9350 it turns out that they are not being used. Once we have made
9351 the decision that far jumps are present (and that hence the link
9352 register will be pushed onto the stack) we cannot go back on it. */
9353 if (cfun->machine->far_jump_used)
9354 return 1;
9355
9356 /* If this function is not being called from the prologue/epilogue
9357 generation code then it must be being called from the
9358 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 9359 if (!in_prologue)
d5b7b3ae
RE
9360 {
9361 /* In this case we know that we are being asked about the elimination
9362 of the arg pointer register. If that register is not being used,
9363 then there are no arguments on the stack, and we do not have to
9364 worry that a far jump might force the prologue to push the link
9365 register, changing the stack offsets. In this case we can just
9366 return false, since the presence of far jumps in the function will
9367 not affect stack offsets.
9368
9369 If the arg pointer is live (or if it was live, but has now been
9370 eliminated and so set to dead) then we do have to test to see if
9371 the function might contain a far jump. This test can lead to some
9372 false negatives, since before reload is completed, then length of
9373 branch instructions is not known, so gcc defaults to returning their
9374 longest length, which in turn sets the far jump attribute to true.
9375
9376 A false negative will not result in bad code being generated, but it
9377 will result in a needless push and pop of the link register. We
9378 hope that this does not occur too often. */
9379 if (regs_ever_live [ARG_POINTER_REGNUM])
9380 cfun->machine->arg_pointer_live = 1;
5895f793 9381 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
9382 return 0;
9383 }
9384
9385 /* Check to see if the function contains a branch
9386 insn with the far jump attribute set. */
9387 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9388 {
9389 if (GET_CODE (insn) == JUMP_INSN
9390 /* Ignore tablejump patterns. */
9391 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9392 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9393 && get_attr_far_jump (insn) == FAR_JUMP_YES
9394 )
9395 {
9396 /* Record the fact that we have decied that
9397 the function does use far jumps. */
9398 cfun->machine->far_jump_used = 1;
9399 return 1;
9400 }
9401 }
9402
9403 return 0;
9404}
9405
9406/* Return non-zero if FUNC must be entered in ARM mode. */
9407int
9408is_called_in_ARM_mode (func)
9409 tree func;
9410{
9411 if (TREE_CODE (func) != FUNCTION_DECL)
9412 abort ();
9413
9414 /* Ignore the problem about functions whoes address is taken. */
9415 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9416 return TRUE;
9417
9418#ifdef ARM_PE
9419 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
9420#else
9421 return FALSE;
9422#endif
9423}
9424
9425/* The bits which aren't usefully expanded as rtl. */
400500c4 9426
cd2b33d0 9427const char *
d5b7b3ae
RE
9428thumb_unexpanded_epilogue ()
9429{
9430 int regno;
9431 int live_regs_mask = 0;
9432 int high_regs_pushed = 0;
9433 int leaf_function = leaf_function_p ();
9434 int had_to_push_lr;
9435 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9436
9437 if (return_used_this_function)
9438 return "";
9439
9440 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
5895f793
RE
9441 if (regs_ever_live[regno] && !call_used_regs[regno]
9442 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9443 live_regs_mask |= 1 << regno;
9444
9445 for (regno = 8; regno < 13; regno++)
9446 {
5895f793
RE
9447 if (regs_ever_live[regno] && !call_used_regs[regno]
9448 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9449 high_regs_pushed++;
d5b7b3ae
RE
9450 }
9451
9452 /* The prolog may have pushed some high registers to use as
9453 work registers. eg the testuite file:
9454 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9455 compiles to produce:
9456 push {r4, r5, r6, r7, lr}
9457 mov r7, r9
9458 mov r6, r8
9459 push {r6, r7}
9460 as part of the prolog. We have to undo that pushing here. */
9461
9462 if (high_regs_pushed)
9463 {
9464 int mask = live_regs_mask;
9465 int next_hi_reg;
9466 int size;
9467 int mode;
9468
9469#ifdef RTX_CODE
9470 /* If we can deduce the registers used from the function's return value.
9471 This is more reliable that examining regs_ever_live[] because that
9472 will be set if the register is ever used in the function, not just if
9473 the register is used to hold a return value. */
9474
9475 if (current_function_return_rtx != 0)
9476 mode = GET_MODE (current_function_return_rtx);
9477 else
9478#endif
9479 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9480
9481 size = GET_MODE_SIZE (mode);
9482
9483 /* Unless we are returning a type of size > 12 register r3 is
9484 available. */
9485 if (size < 13)
9486 mask |= 1 << 3;
9487
9488 if (mask == 0)
9489 /* Oh dear! We have no low registers into which we can pop
9490 high registers! */
400500c4
RK
9491 internal_error
9492 ("no low registers available for popping high registers");
d5b7b3ae
RE
9493
9494 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
9495 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9496 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9497 break;
9498
9499 while (high_regs_pushed)
9500 {
9501 /* Find lo register(s) into which the high register(s) can
9502 be popped. */
9503 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9504 {
9505 if (mask & (1 << regno))
9506 high_regs_pushed--;
9507 if (high_regs_pushed == 0)
9508 break;
9509 }
9510
9511 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9512
9513 /* Pop the values into the low register(s). */
9514 thumb_pushpop (asm_out_file, mask, 0);
9515
9516 /* Move the value(s) into the high registers. */
9517 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9518 {
9519 if (mask & (1 << regno))
9520 {
9521 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9522 regno);
9523
9524 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
9525 if (regs_ever_live[next_hi_reg]
9526 && !call_used_regs[next_hi_reg]
9527 && !(TARGET_SINGLE_PIC_BASE
9528 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9529 break;
9530 }
9531 }
9532 }
9533 }
9534
5895f793 9535 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
9536 || thumb_far_jump_used_p (1));
9537
9538 if (TARGET_BACKTRACE
9539 && ((live_regs_mask & 0xFF) == 0)
9540 && regs_ever_live [LAST_ARG_REGNUM] != 0)
9541 {
9542 /* The stack backtrace structure creation code had to
9543 push R7 in order to get a work register, so we pop
9544 it now. */
9545 live_regs_mask |= (1 << LAST_LO_REGNUM);
9546 }
9547
9548 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
9549 {
9550 if (had_to_push_lr
5895f793
RE
9551 && !is_called_in_ARM_mode (current_function_decl)
9552 && !eh_ofs)
d5b7b3ae
RE
9553 live_regs_mask |= 1 << PC_REGNUM;
9554
9555 /* Either no argument registers were pushed or a backtrace
9556 structure was created which includes an adjusted stack
9557 pointer, so just pop everything. */
9558 if (live_regs_mask)
9559 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9560
9561 if (eh_ofs)
9562 thumb_exit (asm_out_file, 2, eh_ofs);
9563 /* We have either just popped the return address into the
9564 PC or it is was kept in LR for the entire function or
9565 it is still on the stack because we do not want to
9566 return by doing a pop {pc}. */
9567 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
9568 thumb_exit (asm_out_file,
9569 (had_to_push_lr
9570 && is_called_in_ARM_mode (current_function_decl)) ?
9571 -1 : LR_REGNUM, NULL_RTX);
9572 }
9573 else
9574 {
9575 /* Pop everything but the return address. */
5895f793 9576 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
9577
9578 if (live_regs_mask)
9579 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9580
9581 if (had_to_push_lr)
9582 /* Get the return address into a temporary register. */
9583 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
9584
9585 /* Remove the argument registers that were pushed onto the stack. */
9586 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
9587 SP_REGNUM, SP_REGNUM,
9588 current_function_pretend_args_size);
9589
9590 if (eh_ofs)
9591 thumb_exit (asm_out_file, 2, eh_ofs);
9592 else
9593 thumb_exit (asm_out_file,
9594 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
9595 }
9596
9597 return "";
9598}
9599
9600/* Functions to save and restore machine-specific function data. */
9601
9602static void
9603arm_mark_machine_status (p)
9604 struct function * p;
9605{
6d3d9133 9606 machine_function *machine = p->machine;
d5b7b3ae 9607
f7a80099 9608 if (machine)
9e2f7ec7 9609 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
d5b7b3ae
RE
9610}
9611
9612static void
9613arm_init_machine_status (p)
9614 struct function * p;
9615{
9616 p->machine =
6d3d9133
NC
9617 (machine_function *) xcalloc (1, sizeof (machine_function));
9618
9619#if ARM_FT_UNKNOWWN != 0
9620 ((machine_function *) p->machine)->func_type = ARM_FT_UNKNOWN;
9621#endif
d5b7b3ae
RE
9622}
9623
f7a80099
NC
9624static void
9625arm_free_machine_status (p)
9626 struct function * p;
9627{
9628 if (p->machine)
9629 {
9630 free (p->machine);
9631 p->machine = NULL;
9632 }
9633}
9634
d5b7b3ae
RE
9635/* Return an RTX indicating where the return address to the
9636 calling function can be found. */
9637rtx
9638arm_return_addr (count, frame)
9639 int count;
9640 rtx frame ATTRIBUTE_UNUSED;
9641{
d5b7b3ae
RE
9642 if (count != 0)
9643 return NULL_RTX;
9644
9e2f7ec7
DD
9645 if (TARGET_APCS_32)
9646 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
9647 else
d5b7b3ae 9648 {
9e2f7ec7 9649 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
d5b7b3ae 9650 GEN_INT (RETURN_ADDR_MASK26));
9e2f7ec7 9651 return get_func_hard_reg_initial_val (cfun, lr);
d5b7b3ae 9652 }
d5b7b3ae
RE
9653}
9654
9655/* Do anything needed before RTL is emitted for each function. */
9656void
9657arm_init_expanders ()
9658{
9659 /* Arrange to initialize and mark the machine per-function status. */
9660 init_machine_status = arm_init_machine_status;
9661 mark_machine_status = arm_mark_machine_status;
f7a80099 9662 free_machine_status = arm_free_machine_status;
d5b7b3ae
RE
9663}
9664
9665/* Generate the rest of a function's prologue. */
9666void
9667thumb_expand_prologue ()
9668{
9669 HOST_WIDE_INT amount = (get_frame_size ()
9670 + current_function_outgoing_args_size);
6d3d9133
NC
9671 unsigned long func_type;
9672
9673 func_type = arm_current_func_type ();
d5b7b3ae
RE
9674
9675 /* Naked functions don't have prologues. */
6d3d9133 9676 if (IS_NAKED (func_type))
d5b7b3ae
RE
9677 return;
9678
6d3d9133
NC
9679 if (IS_INTERRUPT (func_type))
9680 {
9681 error ("Interrupt Service Routines cannot be coded in Thumb mode.");
9682 return;
9683 }
9684
d5b7b3ae
RE
9685 if (frame_pointer_needed)
9686 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
9687
9688 if (amount)
9689 {
9690 amount = ROUND_UP (amount);
9691
9692 if (amount < 512)
9693 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5895f793 9694 GEN_INT (-amount)));
d5b7b3ae
RE
9695 else
9696 {
9697 int regno;
9698 rtx reg;
9699
9700 /* The stack decrement is too big for an immediate value in a single
9701 insn. In theory we could issue multiple subtracts, but after
9702 three of them it becomes more space efficient to place the full
9703 value in the constant pool and load into a register. (Also the
9704 ARM debugger really likes to see only one stack decrement per
9705 function). So instead we look for a scratch register into which
9706 we can load the decrement, and then we subtract this from the
9707 stack pointer. Unfortunately on the thumb the only available
9708 scratch registers are the argument registers, and we cannot use
9709 these as they may hold arguments to the function. Instead we
9710 attempt to locate a call preserved register which is used by this
9711 function. If we can find one, then we know that it will have
9712 been pushed at the start of the prologue and so we can corrupt
9713 it now. */
9714 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
9715 if (regs_ever_live[regno]
5895f793
RE
9716 && !call_used_regs[regno] /* Paranoia */
9717 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
9718 && !(frame_pointer_needed
9719 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
9720 break;
9721
9722 if (regno > LAST_LO_REGNUM) /* Very unlikely */
9723 {
9724 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
9725
9726 /* Choose an arbitary, non-argument low register. */
9727 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
9728
9729 /* Save it by copying it into a high, scratch register. */
9730 emit_insn (gen_movsi (spare, reg));
9731
9732 /* Decrement the stack. */
5895f793 9733 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9734 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9735 reg));
9736
9737 /* Restore the low register's original value. */
9738 emit_insn (gen_movsi (reg, spare));
9739
9740 /* Emit a USE of the restored scratch register, so that flow
9741 analysis will not consider the restore redundant. The
9742 register won't be used again in this function and isn't
9743 restored by the epilogue. */
9744 emit_insn (gen_rtx_USE (VOIDmode, reg));
9745 }
9746 else
9747 {
9748 reg = gen_rtx (REG, SImode, regno);
9749
5895f793 9750 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9751 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9752 reg));
9753 }
9754 }
9755 }
9756
9757 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9758 emit_insn (gen_blockage ());
9759}
9760
9761void
9762thumb_expand_epilogue ()
9763{
9764 HOST_WIDE_INT amount = (get_frame_size ()
9765 + current_function_outgoing_args_size);
6d3d9133
NC
9766
9767 /* Naked functions don't have prologues. */
9768 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
9769 return;
9770
9771 if (frame_pointer_needed)
9772 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
9773 else if (amount)
9774 {
9775 amount = ROUND_UP (amount);
9776
9777 if (amount < 512)
9778 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9779 GEN_INT (amount)));
9780 else
9781 {
9782 /* r3 is always free in the epilogue. */
9783 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
9784
9785 emit_insn (gen_movsi (reg, GEN_INT (amount)));
9786 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
9787 }
9788 }
9789
9790 /* Emit a USE (stack_pointer_rtx), so that
9791 the stack adjustment will not be deleted. */
9792 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9793
9794 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9795 emit_insn (gen_blockage ());
9796}
9797
9798void
9799output_thumb_prologue (f)
9800 FILE * f;
9801{
9802 int live_regs_mask = 0;
9803 int high_regs_pushed = 0;
d5b7b3ae
RE
9804 int regno;
9805
6d3d9133 9806 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
9807 return;
9808
9809 if (is_called_in_ARM_mode (current_function_decl))
9810 {
9811 const char * name;
9812
9813 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9814 abort ();
9815 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9816 abort ();
9817 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9818
9819 /* Generate code sequence to switch us into Thumb mode. */
9820 /* The .code 32 directive has already been emitted by
6d77b53e 9821 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
9822 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9823 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9824
9825 /* Generate a label, so that the debugger will notice the
9826 change in instruction sets. This label is also used by
9827 the assembler to bypass the ARM code when this function
9828 is called from a Thumb encoded function elsewhere in the
9829 same file. Hence the definition of STUB_NAME here must
9830 agree with the definition in gas/config/tc-arm.c */
9831
9832#define STUB_NAME ".real_start_of"
9833
9834 asm_fprintf (f, "\t.code\t16\n");
9835#ifdef ARM_PE
9836 if (arm_dllexport_name_p (name))
e5951263 9837 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
9838#endif
9839 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9840 asm_fprintf (f, "\t.thumb_func\n");
9841 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9842 }
9843
d5b7b3ae
RE
9844 if (current_function_pretend_args_size)
9845 {
6d3d9133 9846 if (current_function_anonymous_args)
d5b7b3ae
RE
9847 {
9848 int num_pushes;
9849
9850 asm_fprintf (f, "\tpush\t{");
9851
9852 num_pushes = NUM_INTS (current_function_pretend_args_size);
9853
9854 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9855 regno <= LAST_ARG_REGNUM;
5895f793 9856 regno++)
d5b7b3ae
RE
9857 asm_fprintf (f, "%r%s", regno,
9858 regno == LAST_ARG_REGNUM ? "" : ", ");
9859
9860 asm_fprintf (f, "}\n");
9861 }
9862 else
9863 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9864 SP_REGNUM, SP_REGNUM,
9865 current_function_pretend_args_size);
9866 }
9867
5895f793
RE
9868 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9869 if (regs_ever_live[regno] && !call_used_regs[regno]
9870 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9871 live_regs_mask |= 1 << regno;
9872
5895f793 9873 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
9874 live_regs_mask |= 1 << LR_REGNUM;
9875
9876 if (TARGET_BACKTRACE)
9877 {
9878 int offset;
9879 int work_register = 0;
9880 int wr;
9881
9882 /* We have been asked to create a stack backtrace structure.
9883 The code looks like this:
9884
9885 0 .align 2
9886 0 func:
9887 0 sub SP, #16 Reserve space for 4 registers.
9888 2 push {R7} Get a work register.
9889 4 add R7, SP, #20 Get the stack pointer before the push.
9890 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9891 8 mov R7, PC Get hold of the start of this code plus 12.
9892 10 str R7, [SP, #16] Store it.
9893 12 mov R7, FP Get hold of the current frame pointer.
9894 14 str R7, [SP, #4] Store it.
9895 16 mov R7, LR Get hold of the current return address.
9896 18 str R7, [SP, #12] Store it.
9897 20 add R7, SP, #16 Point at the start of the backtrace structure.
9898 22 mov FP, R7 Put this value into the frame pointer. */
9899
9900 if ((live_regs_mask & 0xFF) == 0)
9901 {
9902 /* See if the a4 register is free. */
9903
9904 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9905 work_register = LAST_ARG_REGNUM;
9906 else /* We must push a register of our own */
9907 live_regs_mask |= (1 << LAST_LO_REGNUM);
9908 }
9909
9910 if (work_register == 0)
9911 {
9912 /* Select a register from the list that will be pushed to
9913 use as our work register. */
9914 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9915 if ((1 << work_register) & live_regs_mask)
9916 break;
9917 }
9918
9919 asm_fprintf
9920 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9921 SP_REGNUM, SP_REGNUM);
9922
9923 if (live_regs_mask)
9924 thumb_pushpop (f, live_regs_mask, 1);
9925
9926 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9927 if (wr & live_regs_mask)
9928 offset += 4;
9929
9930 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9931 offset + 16 + current_function_pretend_args_size);
9932
9933 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9934 offset + 4);
9935
9936 /* Make sure that the instruction fetching the PC is in the right place
9937 to calculate "start of backtrace creation code + 12". */
9938 if (live_regs_mask)
9939 {
9940 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9941 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9942 offset + 12);
9943 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9944 ARM_HARD_FRAME_POINTER_REGNUM);
9945 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9946 offset);
9947 }
9948 else
9949 {
9950 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9951 ARM_HARD_FRAME_POINTER_REGNUM);
9952 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9953 offset);
9954 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9955 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9956 offset + 12);
9957 }
9958
9959 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9960 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9961 offset + 8);
9962 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9963 offset + 12);
9964 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9965 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9966 }
9967 else if (live_regs_mask)
9968 thumb_pushpop (f, live_regs_mask, 1);
9969
9970 for (regno = 8; regno < 13; regno++)
9971 {
5895f793
RE
9972 if (regs_ever_live[regno] && !call_used_regs[regno]
9973 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9974 high_regs_pushed++;
d5b7b3ae
RE
9975 }
9976
9977 if (high_regs_pushed)
9978 {
9979 int pushable_regs = 0;
9980 int mask = live_regs_mask & 0xff;
9981 int next_hi_reg;
9982
9983 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9984 {
5895f793
RE
9985 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9986 && !(TARGET_SINGLE_PIC_BASE
9987 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9988 break;
9989 }
9990
9991 pushable_regs = mask;
9992
9993 if (pushable_regs == 0)
9994 {
9995 /* Desperation time -- this probably will never happen. */
9996 if (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9997 || !call_used_regs[LAST_ARG_REGNUM])
d5b7b3ae
RE
9998 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9999 mask = 1 << LAST_ARG_REGNUM;
10000 }
10001
10002 while (high_regs_pushed > 0)
10003 {
10004 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10005 {
10006 if (mask & (1 << regno))
10007 {
10008 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10009
5895f793 10010 high_regs_pushed--;
d5b7b3ae
RE
10011
10012 if (high_regs_pushed)
10013 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10014 next_hi_reg--)
10015 {
10016 if (regs_ever_live[next_hi_reg]
5895f793
RE
10017 && !call_used_regs[next_hi_reg]
10018 && !(TARGET_SINGLE_PIC_BASE
10019 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
10020 break;
10021 }
10022 else
10023 {
5895f793 10024 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
10025 break;
10026 }
10027 }
10028 }
10029
10030 thumb_pushpop (f, mask, 1);
10031 }
10032
10033 if (pushable_regs == 0
10034 && (regs_ever_live[LAST_ARG_REGNUM]
5895f793 10035 || !call_used_regs[LAST_ARG_REGNUM]))
d5b7b3ae
RE
10036 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10037 }
10038}
10039
10040/* Handle the case of a double word load into a low register from
10041 a computed memory address. The computed address may involve a
10042 register which is overwritten by the load. */
10043
cd2b33d0 10044const char *
d5b7b3ae 10045thumb_load_double_from_address (operands)
400500c4 10046 rtx *operands;
d5b7b3ae
RE
10047{
10048 rtx addr;
10049 rtx base;
10050 rtx offset;
10051 rtx arg1;
10052 rtx arg2;
10053
10054 if (GET_CODE (operands[0]) != REG)
400500c4 10055 abort ();
d5b7b3ae
RE
10056
10057 if (GET_CODE (operands[1]) != MEM)
400500c4 10058 abort ();
d5b7b3ae
RE
10059
10060 /* Get the memory address. */
10061 addr = XEXP (operands[1], 0);
10062
10063 /* Work out how the memory address is computed. */
10064 switch (GET_CODE (addr))
10065 {
10066 case REG:
10067 operands[2] = gen_rtx (MEM, SImode,
10068 plus_constant (XEXP (operands[1], 0), 4));
10069
10070 if (REGNO (operands[0]) == REGNO (addr))
10071 {
10072 output_asm_insn ("ldr\t%H0, %2", operands);
10073 output_asm_insn ("ldr\t%0, %1", operands);
10074 }
10075 else
10076 {
10077 output_asm_insn ("ldr\t%0, %1", operands);
10078 output_asm_insn ("ldr\t%H0, %2", operands);
10079 }
10080 break;
10081
10082 case CONST:
10083 /* Compute <address> + 4 for the high order load. */
10084 operands[2] = gen_rtx (MEM, SImode,
10085 plus_constant (XEXP (operands[1], 0), 4));
10086
10087 output_asm_insn ("ldr\t%0, %1", operands);
10088 output_asm_insn ("ldr\t%H0, %2", operands);
10089 break;
10090
10091 case PLUS:
10092 arg1 = XEXP (addr, 0);
10093 arg2 = XEXP (addr, 1);
10094
10095 if (CONSTANT_P (arg1))
10096 base = arg2, offset = arg1;
10097 else
10098 base = arg1, offset = arg2;
10099
10100 if (GET_CODE (base) != REG)
400500c4 10101 abort ();
d5b7b3ae
RE
10102
10103 /* Catch the case of <address> = <reg> + <reg> */
10104 if (GET_CODE (offset) == REG)
10105 {
10106 int reg_offset = REGNO (offset);
10107 int reg_base = REGNO (base);
10108 int reg_dest = REGNO (operands[0]);
10109
10110 /* Add the base and offset registers together into the
10111 higher destination register. */
10112 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10113 reg_dest + 1, reg_base, reg_offset);
10114
10115 /* Load the lower destination register from the address in
10116 the higher destination register. */
10117 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10118 reg_dest, reg_dest + 1);
10119
10120 /* Load the higher destination register from its own address
10121 plus 4. */
10122 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10123 reg_dest + 1, reg_dest + 1);
10124 }
10125 else
10126 {
10127 /* Compute <address> + 4 for the high order load. */
10128 operands[2] = gen_rtx (MEM, SImode,
10129 plus_constant (XEXP (operands[1], 0), 4));
10130
10131 /* If the computed address is held in the low order register
10132 then load the high order register first, otherwise always
10133 load the low order register first. */
10134 if (REGNO (operands[0]) == REGNO (base))
10135 {
10136 output_asm_insn ("ldr\t%H0, %2", operands);
10137 output_asm_insn ("ldr\t%0, %1", operands);
10138 }
10139 else
10140 {
10141 output_asm_insn ("ldr\t%0, %1", operands);
10142 output_asm_insn ("ldr\t%H0, %2", operands);
10143 }
10144 }
10145 break;
10146
10147 case LABEL_REF:
10148 /* With no registers to worry about we can just load the value
10149 directly. */
10150 operands[2] = gen_rtx (MEM, SImode,
10151 plus_constant (XEXP (operands[1], 0), 4));
10152
10153 output_asm_insn ("ldr\t%H0, %2", operands);
10154 output_asm_insn ("ldr\t%0, %1", operands);
10155 break;
10156
10157 default:
400500c4 10158 abort ();
d5b7b3ae
RE
10159 break;
10160 }
10161
10162 return "";
10163}
10164
10165
cd2b33d0 10166const char *
d5b7b3ae
RE
10167thumb_output_move_mem_multiple (n, operands)
10168 int n;
10169 rtx * operands;
10170{
10171 rtx tmp;
10172
10173 switch (n)
10174 {
10175 case 2:
ca356f3a 10176 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10177 {
ca356f3a
RE
10178 tmp = operands[4];
10179 operands[4] = operands[5];
10180 operands[5] = tmp;
d5b7b3ae 10181 }
ca356f3a
RE
10182 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10183 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
10184 break;
10185
10186 case 3:
ca356f3a 10187 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10188 {
ca356f3a
RE
10189 tmp = operands[4];
10190 operands[4] = operands[5];
10191 operands[5] = tmp;
d5b7b3ae 10192 }
ca356f3a 10193 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 10194 {
ca356f3a
RE
10195 tmp = operands[5];
10196 operands[5] = operands[6];
10197 operands[6] = tmp;
d5b7b3ae 10198 }
ca356f3a 10199 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10200 {
ca356f3a
RE
10201 tmp = operands[4];
10202 operands[4] = operands[5];
10203 operands[5] = tmp;
d5b7b3ae
RE
10204 }
10205
ca356f3a
RE
10206 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10207 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
10208 break;
10209
10210 default:
10211 abort ();
10212 }
10213
10214 return "";
10215}
10216
10217/* Routines for generating rtl */
10218
10219void
10220thumb_expand_movstrqi (operands)
10221 rtx * operands;
10222{
10223 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10224 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10225 HOST_WIDE_INT len = INTVAL (operands[2]);
10226 HOST_WIDE_INT offset = 0;
10227
10228 while (len >= 12)
10229 {
ca356f3a 10230 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
10231 len -= 12;
10232 }
10233
10234 if (len >= 8)
10235 {
ca356f3a 10236 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
10237 len -= 8;
10238 }
10239
10240 if (len >= 4)
10241 {
10242 rtx reg = gen_reg_rtx (SImode);
10243 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10244 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10245 len -= 4;
10246 offset += 4;
10247 }
10248
10249 if (len >= 2)
10250 {
10251 rtx reg = gen_reg_rtx (HImode);
10252 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10253 plus_constant (in, offset))));
10254 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10255 reg));
10256 len -= 2;
10257 offset += 2;
10258 }
10259
10260 if (len)
10261 {
10262 rtx reg = gen_reg_rtx (QImode);
10263 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10264 plus_constant (in, offset))));
10265 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10266 reg));
10267 }
10268}
10269
10270int
10271thumb_cmp_operand (op, mode)
10272 rtx op;
10273 enum machine_mode mode;
10274{
10275 return ((GET_CODE (op) == CONST_INT
10276 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10277 || register_operand (op, mode));
10278}
10279
cd2b33d0 10280static const char *
d5b7b3ae
RE
10281thumb_condition_code (x, invert)
10282 rtx x;
10283 int invert;
10284{
cd2b33d0 10285 static const char * conds[] =
d5b7b3ae
RE
10286 {
10287 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10288 "hi", "ls", "ge", "lt", "gt", "le"
10289 };
10290 int val;
10291
10292 switch (GET_CODE (x))
10293 {
10294 case EQ: val = 0; break;
10295 case NE: val = 1; break;
10296 case GEU: val = 2; break;
10297 case LTU: val = 3; break;
10298 case GTU: val = 8; break;
10299 case LEU: val = 9; break;
10300 case GE: val = 10; break;
10301 case LT: val = 11; break;
10302 case GT: val = 12; break;
10303 case LE: val = 13; break;
10304 default:
10305 abort ();
10306 }
10307
10308 return conds[val ^ invert];
10309}
10310
10311/* Handle storing a half-word to memory during reload. */
10312void
10313thumb_reload_out_hi (operands)
10314 rtx * operands;
10315{
10316 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10317}
10318
10319/* Handle storing a half-word to memory during reload. */
10320void
10321thumb_reload_in_hi (operands)
10322 rtx * operands ATTRIBUTE_UNUSED;
10323{
10324 abort ();
10325}
10326
c27ba912
DM
10327/* Return the length of a function name prefix
10328 that starts with the character 'c'. */
10329static int
10330arm_get_strip_length (char c)
10331{
10332 switch (c)
10333 {
10334 ARM_NAME_ENCODING_LENGTHS
10335 default: return 0;
10336 }
10337}
10338
10339/* Return a pointer to a function's name with any
10340 and all prefix encodings stripped from it. */
10341const char *
10342arm_strip_name_encoding (const char * name)
10343{
10344 int skip;
10345
10346 while ((skip = arm_get_strip_length (* name)))
10347 name += skip;
10348
10349 return name;
10350}
10351
2b835d68 10352#ifdef AOF_ASSEMBLER
6354dc9b 10353/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 10354
32de079a
RE
10355rtx aof_pic_label = NULL_RTX;
10356struct pic_chain
10357{
62b10bbc
NC
10358 struct pic_chain * next;
10359 char * symname;
32de079a
RE
10360};
10361
62b10bbc 10362static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
10363
10364rtx
10365aof_pic_entry (x)
10366 rtx x;
10367{
62b10bbc 10368 struct pic_chain ** chainp;
32de079a
RE
10369 int offset;
10370
10371 if (aof_pic_label == NULL_RTX)
10372 {
92a432f4
RE
10373 /* We mark this here and not in arm_add_gc_roots() to avoid
10374 polluting even more code with ifdefs, and because it never
10375 contains anything useful until we assign to it here. */
5895f793 10376 ggc_add_rtx_root (&aof_pic_label, 1);
43cffd11 10377 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
10378 }
10379
10380 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10381 offset += 4, chainp = &(*chainp)->next)
10382 if ((*chainp)->symname == XSTR (x, 0))
10383 return plus_constant (aof_pic_label, offset);
10384
10385 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10386 (*chainp)->next = NULL;
10387 (*chainp)->symname = XSTR (x, 0);
10388 return plus_constant (aof_pic_label, offset);
10389}
10390
10391void
10392aof_dump_pic_table (f)
62b10bbc 10393 FILE * f;
32de079a 10394{
62b10bbc 10395 struct pic_chain * chain;
32de079a
RE
10396
10397 if (aof_pic_chain == NULL)
10398 return;
10399
dd18ae56
NC
10400 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10401 PIC_OFFSET_TABLE_REGNUM,
10402 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
10403 fputs ("|x$adcons|\n", f);
10404
10405 for (chain = aof_pic_chain; chain; chain = chain->next)
10406 {
10407 fputs ("\tDCD\t", f);
10408 assemble_name (f, chain->symname);
10409 fputs ("\n", f);
10410 }
10411}
10412
2b835d68
RE
10413int arm_text_section_count = 1;
10414
10415char *
84ed5e79 10416aof_text_section ()
2b835d68
RE
10417{
10418 static char buf[100];
2b835d68
RE
10419 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10420 arm_text_section_count++);
10421 if (flag_pic)
10422 strcat (buf, ", PIC, REENTRANT");
10423 return buf;
10424}
10425
10426static int arm_data_section_count = 1;
10427
10428char *
10429aof_data_section ()
10430{
10431 static char buf[100];
10432 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10433 return buf;
10434}
10435
10436/* The AOF assembler is religiously strict about declarations of
10437 imported and exported symbols, so that it is impossible to declare
956d6950 10438 a function as imported near the beginning of the file, and then to
2b835d68
RE
10439 export it later on. It is, however, possible to delay the decision
10440 until all the functions in the file have been compiled. To get
10441 around this, we maintain a list of the imports and exports, and
10442 delete from it any that are subsequently defined. At the end of
10443 compilation we spit the remainder of the list out before the END
10444 directive. */
10445
10446struct import
10447{
62b10bbc
NC
10448 struct import * next;
10449 char * name;
2b835d68
RE
10450};
10451
62b10bbc 10452static struct import * imports_list = NULL;
2b835d68
RE
10453
10454void
10455aof_add_import (name)
62b10bbc 10456 char * name;
2b835d68 10457{
62b10bbc 10458 struct import * new;
2b835d68
RE
10459
10460 for (new = imports_list; new; new = new->next)
10461 if (new->name == name)
10462 return;
10463
10464 new = (struct import *) xmalloc (sizeof (struct import));
10465 new->next = imports_list;
10466 imports_list = new;
10467 new->name = name;
10468}
10469
10470void
10471aof_delete_import (name)
62b10bbc 10472 char * name;
2b835d68 10473{
62b10bbc 10474 struct import ** old;
2b835d68
RE
10475
10476 for (old = &imports_list; *old; old = & (*old)->next)
10477 {
10478 if ((*old)->name == name)
10479 {
10480 *old = (*old)->next;
10481 return;
10482 }
10483 }
10484}
10485
10486int arm_main_function = 0;
10487
10488void
10489aof_dump_imports (f)
62b10bbc 10490 FILE * f;
2b835d68
RE
10491{
10492 /* The AOF assembler needs this to cause the startup code to be extracted
10493 from the library. Brining in __main causes the whole thing to work
10494 automagically. */
10495 if (arm_main_function)
10496 {
10497 text_section ();
10498 fputs ("\tIMPORT __main\n", f);
10499 fputs ("\tDCD __main\n", f);
10500 }
10501
10502 /* Now dump the remaining imports. */
10503 while (imports_list)
10504 {
10505 fprintf (f, "\tIMPORT\t");
10506 assemble_name (f, imports_list->name);
10507 fputc ('\n', f);
10508 imports_list = imports_list->next;
10509 }
10510}
10511#endif /* AOF_ASSEMBLER */
This page took 2.152757 seconds and 5 git commands to generate.