]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
warn_summary (stageNfilter): Update for recent changes in bootstrap logic.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
b0888988 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001
2398fb2a 3 Free Software Foundation, Inc.
cce8749e 4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 5 and Martin Simmons (@harleqn.co.uk).
b36ba79f 6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
7
8This file is part of GNU CC.
9
10GNU CC is free software; you can redistribute it and/or modify
11it under the terms of the GNU General Public License as published by
12the Free Software Foundation; either version 2, or (at your option)
13any later version.
14
15GNU CC is distributed in the hope that it will be useful,
16but WITHOUT ANY WARRANTY; without even the implied warranty of
17MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18GNU General Public License for more details.
19
20You should have received a copy of the GNU General Public License
21along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
22the Free Software Foundation, 59 Temple Place - Suite 330,
23Boston, MA 02111-1307, USA. */
ff9940b0 24
56636818 25#include "config.h"
43cffd11 26#include "system.h"
cce8749e 27#include "rtl.h"
d5b7b3ae 28#include "tree.h"
c7319d87 29#include "obstack.h"
cce8749e
CH
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "real.h"
33#include "insn-config.h"
34#include "conditions.h"
35#include "insn-flags.h"
36#include "output.h"
37#include "insn-attr.h"
38#include "flags.h"
af48348a 39#include "reload.h"
49ad7cfa 40#include "function.h"
bee06f3d 41#include "expr.h"
ad076f4e 42#include "toplev.h"
aec3cfba 43#include "recog.h"
92a432f4 44#include "ggc.h"
d5b7b3ae 45#include "except.h"
8b97c5f8 46#include "c-pragma.h"
c27ba912 47#include "tm_p.h"
cce8749e 48
d5b7b3ae
RE
49/* Forward definitions of types. */
50typedef struct minipool_node Mnode;
51typedef struct minipool_fixup Mfix;
52
53/* In order to improve the layout of the prototypes below
54 some short type abbreviations are defined here. */
55#define Hint HOST_WIDE_INT
56#define Mmode enum machine_mode
57#define Ulong unsigned long
6d3d9133 58#define Ccstar const char *
d5b7b3ae
RE
59
60/* Forward function declarations. */
61static void arm_add_gc_roots PARAMS ((void));
62static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
d5b7b3ae
RE
63static Ulong bit_count PARAMS ((signed int));
64static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
65static int eliminate_lr2ip PARAMS ((rtx *));
66static rtx emit_multi_reg_push PARAMS ((int));
67static rtx emit_sfm PARAMS ((int, int));
6d3d9133 68static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
69static arm_cc get_arm_condition_code PARAMS ((rtx));
70static void init_fpa_table PARAMS ((void));
71static Hint int_log2 PARAMS ((Hint));
72static rtx is_jump_table PARAMS ((rtx));
6d3d9133
NC
73static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
74static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
d5b7b3ae 75static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
6d3d9133 76static Ccstar shift_op PARAMS ((rtx, Hint *));
d5b7b3ae
RE
77static void arm_init_machine_status PARAMS ((struct function *));
78static void arm_mark_machine_status PARAMS ((struct function *));
f7a80099 79static void arm_free_machine_status PARAMS ((struct function *));
d5b7b3ae
RE
80static int number_of_first_bit_set PARAMS ((int));
81static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
82static void thumb_exit PARAMS ((FILE *, int, rtx));
83static void thumb_pushpop PARAMS ((FILE *, int, int));
6d3d9133 84static Ccstar thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
85static rtx is_jump_table PARAMS ((rtx));
86static Hint get_jump_table_size PARAMS ((rtx));
87static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
88static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
89static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
90static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
91static void assign_minipool_offsets PARAMS ((Mfix *));
92static void arm_print_value PARAMS ((FILE *, rtx));
93static void dump_minipool PARAMS ((rtx));
94static int arm_barrier_cost PARAMS ((rtx));
95static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
96static void push_minipool_barrier PARAMS ((rtx, Hint));
97static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
98static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 99static int current_file_function_operand PARAMS ((rtx));
6d3d9133
NC
100static Ulong arm_compute_save_reg_mask PARAMS ((void));
101static Ulong arm_isr_value PARAMS ((tree));
102static Ulong arm_compute_func_type PARAMS ((void));
d5b7b3ae
RE
103\f
104#undef Hint
105#undef Mmode
106#undef Ulong
6d3d9133 107#undef Ccstar
f3bb6135 108
c7319d87
RE
109/* Obstack for minipool constant handling. */
110static struct obstack minipool_obstack;
111static char *minipool_startobj;
112
113#define obstack_chunk_alloc xmalloc
114#define obstack_chunk_free free
115
c27ba912
DM
116/* The maximum number of insns skipped which will be conditionalised if
117 possible. */
118static int max_insns_skipped = 5;
119
120extern FILE * asm_out_file;
121
6354dc9b 122/* True if we are currently building a constant table. */
13bd191d
PB
123int making_const_table;
124
60d0536b 125/* Define the information needed to generate branch insns. This is
6354dc9b 126 stored from the compare operation. */
ff9940b0 127rtx arm_compare_op0, arm_compare_op1;
ff9940b0 128
6354dc9b 129/* What type of floating point are we tuning for? */
bee06f3d
RE
130enum floating_point_type arm_fpu;
131
6354dc9b 132/* What type of floating point instructions are available? */
b111229a
RE
133enum floating_point_type arm_fpu_arch;
134
6354dc9b 135/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
136enum prog_mode_type arm_prgmode;
137
6354dc9b 138/* Set by the -mfp=... option. */
f9cc092a 139const char * target_fp_name = NULL;
2b835d68 140
b355a481 141/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 142const char * structure_size_string = NULL;
723ae7c1 143int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 144
aec3cfba 145/* Bit values used to identify processor capabilities. */
62b10bbc
NC
146#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
147#define FL_FAST_MULT (1 << 1) /* Fast multiply */
148#define FL_MODE26 (1 << 2) /* 26-bit mode support */
149#define FL_MODE32 (1 << 3) /* 32-bit mode support */
150#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
151#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
152#define FL_THUMB (1 << 6) /* Thumb aware */
153#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
154#define FL_STRONG (1 << 8) /* StrongARM */
b15bca31 155#define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
d19fb8e3 156#define FL_XSCALE (1 << 10) /* XScale */
aec3cfba 157
d5b7b3ae
RE
158/* The bits in this mask specify which instructions we are
159 allowed to generate. */
aec3cfba 160static int insn_flags = 0;
d5b7b3ae 161
aec3cfba
NC
162/* The bits in this mask specify which instruction scheduling options should
163 be used. Note - there is an overlap with the FL_FAST_MULT. For some
164 hardware we want to be able to generate the multiply instructions, but to
165 tune as if they were not present in the architecture. */
166static int tune_flags = 0;
167
168/* The following are used in the arm.md file as equivalents to bits
169 in the above two flag variables. */
170
2b835d68
RE
171/* Nonzero if this is an "M" variant of the processor. */
172int arm_fast_multiply = 0;
173
6354dc9b 174/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
175int arm_arch4 = 0;
176
6354dc9b 177/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
178int arm_arch5 = 0;
179
b15bca31
RE
180/* Nonzero if this chip supports the ARM Architecture 5E extensions. */
181int arm_arch5e = 0;
182
aec3cfba 183/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
184int arm_ld_sched = 0;
185
186/* Nonzero if this chip is a StrongARM. */
187int arm_is_strong = 0;
188
d19fb8e3
NC
189/* Nonzero if this chip is an XScale. */
190int arm_is_xscale = 0;
191
f5a1b0d2
NC
192/* Nonzero if this chip is a an ARM6 or an ARM7. */
193int arm_is_6_or_7 = 0;
b111229a 194
0616531f
RE
195/* Nonzero if generating Thumb instructions. */
196int thumb_code = 0;
197
cce8749e
CH
198/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
199 must report the mode of the memory reference from PRINT_OPERAND to
200 PRINT_OPERAND_ADDRESS. */
f3bb6135 201enum machine_mode output_memory_reference_mode;
cce8749e
CH
202
203/* Nonzero if the prologue must setup `fp'. */
204int current_function_anonymous_args;
205
32de079a 206/* The register number to be used for the PIC offset register. */
ed0e6530 207const char * arm_pic_register_string = NULL;
32de079a
RE
208int arm_pic_register = 9;
209
ff9940b0 210/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 211 is not needed. */
d5b7b3ae 212int return_used_this_function;
ff9940b0 213
aec3cfba
NC
214/* Set to 1 after arm_reorg has started. Reset to start at the start of
215 the next function. */
4b632bf1
RE
216static int after_arm_reorg = 0;
217
aec3cfba 218/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
219static int arm_constant_limit = 3;
220
cce8749e
CH
221/* For an explanation of these variables, see final_prescan_insn below. */
222int arm_ccfsm_state;
84ed5e79 223enum arm_cond_code arm_current_cc;
cce8749e
CH
224rtx arm_target_insn;
225int arm_target_label;
9997d19d
RE
226
227/* The condition codes of the ARM, and the inverse function. */
cd2b33d0 228const char * arm_condition_codes[] =
9997d19d
RE
229{
230 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
231 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
232};
233
f5a1b0d2 234#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 235\f
6354dc9b 236/* Initialization code. */
2b835d68 237
2b835d68
RE
238struct processors
239{
cd2b33d0 240 const char * name;
2b835d68
RE
241 unsigned int flags;
242};
243
244/* Not all of these give usefully different compilation alternatives,
245 but there is no simple way of generalizing them. */
f5a1b0d2
NC
246static struct processors all_cores[] =
247{
248 /* ARM Cores */
249
250 {"arm2", FL_CO_PROC | FL_MODE26 },
251 {"arm250", FL_CO_PROC | FL_MODE26 },
252 {"arm3", FL_CO_PROC | FL_MODE26 },
253 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
254 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
255 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
256 {"arm610", FL_MODE26 | FL_MODE32 },
257 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
258 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
259 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 260 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
261 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
262 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
263 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
264 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
265 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
266 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
267 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
268 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
269 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 270 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
271 {"arm710c", FL_MODE26 | FL_MODE32 },
272 {"arm7100", FL_MODE26 | FL_MODE32 },
273 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
274 /* Doesn't have an external co-proc, but does have embedded fpu. */
275 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
276 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
277 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
278 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
279 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
280 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
281 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
282 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
283 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
284 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
285 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
b15bca31 286 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_XSCALE | FL_ARCH5 | FL_ARCH5E },
f5a1b0d2
NC
287
288 {NULL, 0}
289};
290
291static struct processors all_architectures[] =
2b835d68 292{
f5a1b0d2
NC
293 /* ARM Architectures */
294
62b10bbc
NC
295 { "armv2", FL_CO_PROC | FL_MODE26 },
296 { "armv2a", FL_CO_PROC | FL_MODE26 },
297 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
298 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 299 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
300 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
301 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
302 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
303 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
d19fb8e3
NC
304 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
305 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
62b10bbc 306 { NULL, 0 }
f5a1b0d2
NC
307};
308
309/* This is a magic stucture. The 'string' field is magically filled in
310 with a pointer to the value specified by the user on the command line
311 assuming that the user has specified such a value. */
312
313struct arm_cpu_select arm_select[] =
314{
315 /* string name processors */
316 { NULL, "-mcpu=", all_cores },
317 { NULL, "-march=", all_architectures },
318 { NULL, "-mtune=", all_cores }
2b835d68
RE
319};
320
aec3cfba 321/* Return the number of bits set in value' */
d5b7b3ae 322static unsigned long
aec3cfba
NC
323bit_count (value)
324 signed int value;
325{
d5b7b3ae 326 unsigned long count = 0;
aec3cfba
NC
327
328 while (value)
329 {
5895f793
RE
330 value &= ~(value & -value);
331 ++count;
aec3cfba
NC
332 }
333
334 return count;
335}
336
2b835d68
RE
337/* Fix up any incompatible options that the user has specified.
338 This has now turned into a maze. */
339void
340arm_override_options ()
341{
ed4c4348 342 unsigned i;
f5a1b0d2
NC
343
344 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 345 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 346 {
f5a1b0d2
NC
347 struct arm_cpu_select * ptr = arm_select + i;
348
349 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 350 {
13bd191d 351 const struct processors * sel;
bd9c7e23 352
5895f793 353 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 354 if (streq (ptr->string, sel->name))
bd9c7e23 355 {
aec3cfba
NC
356 if (i == 2)
357 tune_flags = sel->flags;
358 else
b111229a 359 {
aec3cfba
NC
360 /* If we have been given an architecture and a processor
361 make sure that they are compatible. We only generate
362 a warning though, and we prefer the CPU over the
6354dc9b 363 architecture. */
aec3cfba 364 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 365 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
366 ptr->string);
367
368 insn_flags = sel->flags;
b111229a 369 }
f5a1b0d2 370
bd9c7e23
RE
371 break;
372 }
373
374 if (sel->name == NULL)
375 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
376 }
377 }
aec3cfba 378
f5a1b0d2 379 /* If the user did not specify a processor, choose one for them. */
aec3cfba 380 if (insn_flags == 0)
f5a1b0d2
NC
381 {
382 struct processors * sel;
aec3cfba
NC
383 unsigned int sought;
384 static struct cpu_default
385 {
cd2b33d0
NC
386 int cpu;
387 const char * name;
aec3cfba
NC
388 }
389 cpu_defaults[] =
390 {
391 { TARGET_CPU_arm2, "arm2" },
392 { TARGET_CPU_arm6, "arm6" },
393 { TARGET_CPU_arm610, "arm610" },
2aa0c933 394 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
395 { TARGET_CPU_arm7m, "arm7m" },
396 { TARGET_CPU_arm7500fe, "arm7500fe" },
397 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
398 { TARGET_CPU_arm8, "arm8" },
399 { TARGET_CPU_arm810, "arm810" },
400 { TARGET_CPU_arm9, "arm9" },
401 { TARGET_CPU_strongarm, "strongarm" },
d19fb8e3 402 { TARGET_CPU_xscale, "xscale" },
aec3cfba
NC
403 { TARGET_CPU_generic, "arm" },
404 { 0, 0 }
405 };
406 struct cpu_default * def;
407
408 /* Find the default. */
5895f793 409 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
410 if (def->cpu == TARGET_CPU_DEFAULT)
411 break;
412
413 /* Make sure we found the default CPU. */
414 if (def->name == NULL)
415 abort ();
416
417 /* Find the default CPU's flags. */
5895f793 418 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
419 if (streq (def->name, sel->name))
420 break;
421
422 if (sel->name == NULL)
423 abort ();
424
425 insn_flags = sel->flags;
426
427 /* Now check to see if the user has specified some command line
428 switch that require certain abilities from the cpu. */
429 sought = 0;
f5a1b0d2 430
d5b7b3ae 431 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 432 {
aec3cfba
NC
433 sought |= (FL_THUMB | FL_MODE32);
434
435 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 436 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 437
d5b7b3ae 438 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
439 interworking. Therefore we force FL_MODE26 to be removed
440 from insn_flags here (if it was set), so that the search
441 below will always be able to find a compatible processor. */
5895f793 442 insn_flags &= ~FL_MODE26;
f5a1b0d2 443 }
5895f793 444 else if (!TARGET_APCS_32)
f5a1b0d2 445 sought |= FL_MODE26;
d5b7b3ae 446
aec3cfba 447 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 448 {
aec3cfba
NC
449 /* Try to locate a CPU type that supports all of the abilities
450 of the default CPU, plus the extra abilities requested by
451 the user. */
5895f793 452 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 453 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
454 break;
455
456 if (sel->name == NULL)
aec3cfba
NC
457 {
458 unsigned int current_bit_count = 0;
459 struct processors * best_fit = NULL;
460
461 /* Ideally we would like to issue an error message here
462 saying that it was not possible to find a CPU compatible
463 with the default CPU, but which also supports the command
464 line options specified by the programmer, and so they
465 ought to use the -mcpu=<name> command line option to
466 override the default CPU type.
467
468 Unfortunately this does not work with multilibing. We
469 need to be able to support multilibs for -mapcs-26 and for
470 -mthumb-interwork and there is no CPU that can support both
471 options. Instead if we cannot find a cpu that has both the
472 characteristics of the default cpu and the given command line
473 options we scan the array again looking for a best match. */
5895f793 474 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
475 if ((sel->flags & sought) == sought)
476 {
477 unsigned int count;
478
479 count = bit_count (sel->flags & insn_flags);
480
481 if (count >= current_bit_count)
482 {
483 best_fit = sel;
484 current_bit_count = count;
485 }
486 }
f5a1b0d2 487
aec3cfba
NC
488 if (best_fit == NULL)
489 abort ();
490 else
491 sel = best_fit;
492 }
493
494 insn_flags = sel->flags;
f5a1b0d2
NC
495 }
496 }
aec3cfba
NC
497
498 /* If tuning has not been specified, tune for whichever processor or
499 architecture has been selected. */
500 if (tune_flags == 0)
501 tune_flags = insn_flags;
502
f5a1b0d2
NC
503 /* Make sure that the processor choice does not conflict with any of the
504 other command line choices. */
aec3cfba 505 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 506 {
aec3cfba
NC
507 /* If APCS-32 was not the default then it must have been set by the
508 user, so issue a warning message. If the user has specified
509 "-mapcs-32 -mcpu=arm2" then we loose here. */
510 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
511 warning ("target CPU does not support APCS-32" );
5895f793 512 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 513 }
5895f793 514 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
515 {
516 warning ("target CPU does not support APCS-26" );
517 target_flags |= ARM_FLAG_APCS_32;
518 }
519
6cfc7210 520 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
521 {
522 warning ("target CPU does not support interworking" );
6cfc7210 523 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
524 }
525
d5b7b3ae
RE
526 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
527 {
5184f7c5 528 warning ("target CPU does not support THUMB instructions.");
d5b7b3ae
RE
529 target_flags &= ~ARM_FLAG_THUMB;
530 }
531
532 if (TARGET_APCS_FRAME && TARGET_THUMB)
533 {
534 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
535 target_flags &= ~ARM_FLAG_APCS_FRAME;
536 }
d19fb8e3 537
d5b7b3ae
RE
538 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
539 from here where no function is being compiled currently. */
540 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
541 && TARGET_ARM)
542 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
543
544 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
545 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
546
547 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
548 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
549
f5a1b0d2 550 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 551 if (TARGET_INTERWORK)
f5a1b0d2 552 {
5895f793 553 if (!TARGET_APCS_32)
f5a1b0d2
NC
554 warning ("interworking forces APCS-32 to be used" );
555 target_flags |= ARM_FLAG_APCS_32;
556 }
557
5895f793 558 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
559 {
560 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
561 target_flags |= ARM_FLAG_APCS_FRAME;
562 }
aec3cfba 563
2b835d68
RE
564 if (TARGET_POKE_FUNCTION_NAME)
565 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 566
2b835d68
RE
567 if (TARGET_APCS_REENT && flag_pic)
568 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 569
2b835d68 570 if (TARGET_APCS_REENT)
f5a1b0d2 571 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 572
d5b7b3ae
RE
573 /* If this target is normally configured to use APCS frames, warn if they
574 are turned off and debugging is turned on. */
575 if (TARGET_ARM
576 && write_symbols != NO_DEBUG
5895f793 577 && !TARGET_APCS_FRAME
d5b7b3ae
RE
578 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
579 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 580
32de079a
RE
581 /* If stack checking is disabled, we can use r10 as the PIC register,
582 which keeps r9 available. */
5895f793 583 if (flag_pic && !TARGET_APCS_STACK)
32de079a 584 arm_pic_register = 10;
aec3cfba 585
2b835d68
RE
586 if (TARGET_APCS_FLOAT)
587 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 588
aec3cfba 589 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
590 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
591 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
592 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
b15bca31 593 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
a67ded0f 594 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
6f7ebcbb 595
2ca12935
JL
596 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
597 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 598 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
599 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
600 && !(tune_flags & FL_ARCH4))) != 0;
6f7ebcbb 601
bd9c7e23
RE
602 /* Default value for floating point code... if no co-processor
603 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
604 assume the user has an FPA.
605 Note: this does not prevent use of floating point instructions,
606 -msoft-float does that. */
aec3cfba 607 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 608
b111229a 609 if (target_fp_name)
2b835d68 610 {
f5a1b0d2 611 if (streq (target_fp_name, "2"))
b111229a 612 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
613 else if (streq (target_fp_name, "3"))
614 arm_fpu_arch = FP_SOFT3;
2b835d68 615 else
f5a1b0d2 616 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 617 target_fp_name);
2b835d68 618 }
b111229a
RE
619 else
620 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
621
622 if (TARGET_FPE && arm_fpu != FP_HARD)
623 arm_fpu = FP_SOFT2;
aec3cfba 624
f5a1b0d2
NC
625 /* For arm2/3 there is no need to do any scheduling if there is only
626 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
627 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
628 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 629 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 630
cd2b33d0 631 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
632
633 if (structure_size_string != NULL)
634 {
635 int size = strtol (structure_size_string, NULL, 0);
636
637 if (size == 8 || size == 32)
638 arm_structure_size_boundary = size;
639 else
640 warning ("Structure size boundary can only be set to 8 or 32");
641 }
ed0e6530
PB
642
643 if (arm_pic_register_string != NULL)
644 {
645 int pic_register;
646
5895f793 647 if (!flag_pic)
ed0e6530
PB
648 warning ("-mpic-register= is useless without -fpic");
649
650 pic_register = decode_reg_name (arm_pic_register_string);
651
652 /* Prevent the user from choosing an obviously stupid PIC register. */
653 if (pic_register < 0 || call_used_regs[pic_register]
654 || pic_register == HARD_FRAME_POINTER_REGNUM
655 || pic_register == STACK_POINTER_REGNUM
656 || pic_register >= PC_REGNUM)
657 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
658 else
659 arm_pic_register = pic_register;
660 }
d5b7b3ae
RE
661
662 if (TARGET_THUMB && flag_schedule_insns)
663 {
664 /* Don't warn since it's on by default in -O2. */
665 flag_schedule_insns = 0;
666 }
667
f5a1b0d2
NC
668 /* If optimizing for space, don't synthesize constants.
669 For processors with load scheduling, it never costs more than 2 cycles
670 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 671 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 672 arm_constant_limit = 1;
aec3cfba 673
d19fb8e3
NC
674 if (arm_is_xscale)
675 arm_constant_limit = 2;
676
f5a1b0d2
NC
677 /* If optimizing for size, bump the number of instructions that we
678 are prepared to conditionally execute (even on a StrongARM).
679 Otherwise for the StrongARM, which has early execution of branches,
680 a sequence that is worth skipping is shorter. */
681 if (optimize_size)
682 max_insns_skipped = 6;
683 else if (arm_is_strong)
684 max_insns_skipped = 3;
92a432f4
RE
685
686 /* Register global variables with the garbage collector. */
687 arm_add_gc_roots ();
688}
689
690static void
691arm_add_gc_roots ()
692{
693 ggc_add_rtx_root (&arm_compare_op0, 1);
694 ggc_add_rtx_root (&arm_compare_op1, 1);
6d3d9133 695 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root. */
c7319d87
RE
696
697 gcc_obstack_init(&minipool_obstack);
698 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 699}
cce8749e 700\f
6d3d9133
NC
701/* A table of known ARM exception types.
702 For use with the interrupt function attribute. */
703
704typedef struct
705{
706 const char * arg;
707 unsigned long return_value;
708}
709isr_attribute_arg;
710
711static isr_attribute_arg isr_attribute_args [] =
712{
713 { "IRQ", ARM_FT_ISR },
714 { "irq", ARM_FT_ISR },
715 { "FIQ", ARM_FT_FIQ },
716 { "fiq", ARM_FT_FIQ },
717 { "ABORT", ARM_FT_ISR },
718 { "abort", ARM_FT_ISR },
719 { "ABORT", ARM_FT_ISR },
720 { "abort", ARM_FT_ISR },
721 { "UNDEF", ARM_FT_EXCEPTION },
722 { "undef", ARM_FT_EXCEPTION },
723 { "SWI", ARM_FT_EXCEPTION },
724 { "swi", ARM_FT_EXCEPTION },
725 { NULL, ARM_FT_NORMAL }
726};
727
728/* Returns the (interrupt) function type of the current
729 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
730
731static unsigned long
732arm_isr_value (argument)
733 tree argument;
734{
735 isr_attribute_arg * ptr;
736 const char * arg;
737
738 /* No argument - default to IRQ. */
739 if (argument == NULL_TREE)
740 return ARM_FT_ISR;
741
742 /* Get the value of the argument. */
743 if (TREE_VALUE (argument) == NULL_TREE
744 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
745 return ARM_FT_UNKNOWN;
746
747 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
748
749 /* Check it against the list of known arguments. */
750 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
751 if (strcmp (arg, ptr->arg) == 0)
752 return ptr->return_value;
753
754 /* An unrecognised interrupt type. */
755 return ARM_FT_UNKNOWN;
756}
757
758/* Computes the type of the current function. */
759
760static unsigned long
761arm_compute_func_type ()
762{
763 unsigned long type = ARM_FT_UNKNOWN;
764 tree a;
765 tree attr;
766
767 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
768 abort ();
769
770 /* Decide if the current function is volatile. Such functions
771 never return, and many memory cycles can be saved by not storing
772 register values that will never be needed again. This optimization
773 was added to speed up context switching in a kernel application. */
774 if (optimize > 0
775 && current_function_nothrow
776 && TREE_THIS_VOLATILE (current_function_decl))
777 type |= ARM_FT_VOLATILE;
778
779 if (current_function_needs_context)
780 type |= ARM_FT_NESTED;
781
782 attr = DECL_MACHINE_ATTRIBUTES (current_function_decl);
783
784 a = lookup_attribute ("naked", attr);
785 if (a != NULL_TREE)
786 type |= ARM_FT_NAKED;
787
788 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
789 type |= ARM_FT_EXCEPTION_HANDLER;
790 else
791 {
792 a = lookup_attribute ("isr", attr);
793 if (a == NULL_TREE)
794 a = lookup_attribute ("interrupt", attr);
795
796 if (a == NULL_TREE)
797 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
798 else
799 type |= arm_isr_value (TREE_VALUE (a));
800 }
801
802 return type;
803}
804
805/* Returns the type of the current function. */
806
807unsigned long
808arm_current_func_type ()
809{
810 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
811 cfun->machine->func_type = arm_compute_func_type ();
812
813 return cfun->machine->func_type;
814}
815\f
6354dc9b 816/* Return 1 if it is possible to return using a single instruction. */
6d3d9133 817
ff9940b0 818int
b36ba79f
RE
819use_return_insn (iscond)
820 int iscond;
ff9940b0
RE
821{
822 int regno;
6d3d9133 823 unsigned int func_type = arm_current_func_type ();
ff9940b0 824
d5b7b3ae 825 /* Never use a return instruction before reload has run. */
6d3d9133
NC
826 if (!reload_completed)
827 return 0;
828
829 /* Naked functions, volatile functiond and interrupt
830 functions all need special consideration. */
831 if (func_type & (ARM_FT_INTERRUPT | ARM_FT_VOLATILE | ARM_FT_NAKED))
832 return 0;
833
834 /* As do variadic functions. */
835 if (current_function_pretend_args_size
ff9940b0 836 || current_function_anonymous_args
d5b7b3ae 837 /* Of if the function calls __builtin_eh_return () */
6d3d9133 838 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
d5b7b3ae 839 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 840 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 841 && !frame_pointer_needed))
ff9940b0
RE
842 return 0;
843
b111229a 844 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
845 stacked. Similarly, on StrongARM, conditional returns are expensive
846 if they aren't taken and registers have been stacked. */
f5a1b0d2 847 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 848 return 0;
d5b7b3ae 849
f5a1b0d2 850 if ((iscond && arm_is_strong)
6cfc7210 851 || TARGET_INTERWORK)
6ed30148 852 {
d5b7b3ae 853 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
5895f793 854 if (regs_ever_live[regno] && !call_used_regs[regno])
6ed30148
RE
855 return 0;
856
857 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 858 return 0;
6ed30148 859 }
b111229a 860
6d3d9133
NC
861 /* Can't be done if any of the FPU regs are pushed,
862 since this also requires an insn. */
d5b7b3ae
RE
863 if (TARGET_HARD_FLOAT)
864 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 865 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 866 return 0;
ff9940b0
RE
867
868 return 1;
869}
870
cce8749e
CH
871/* Return TRUE if int I is a valid immediate ARM constant. */
872
873int
874const_ok_for_arm (i)
ff9940b0 875 HOST_WIDE_INT i;
cce8749e 876{
5895f793 877 unsigned HOST_WIDE_INT mask = ~HOST_UINT (0xFF);
cce8749e 878
56636818
JL
879 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
880 be all zero, or all one. */
5895f793
RE
881 if ((i & ~HOST_UINT (0xffffffff)) != 0
882 && ((i & ~HOST_UINT (0xffffffff))
883 != ((~HOST_UINT (0))
884 & ~HOST_UINT (0xffffffff))))
56636818
JL
885 return FALSE;
886
e2c671ba
RE
887 /* Fast return for 0 and powers of 2 */
888 if ((i & (i - 1)) == 0)
889 return TRUE;
890
cce8749e
CH
891 do
892 {
e5951263 893 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
f3bb6135 894 return TRUE;
abaa26e5 895 mask =
e5951263
NC
896 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
897 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
5895f793 898 } while (mask != ~HOST_UINT (0xFF));
cce8749e 899
f3bb6135
RE
900 return FALSE;
901}
cce8749e 902
6354dc9b 903/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
904static int
905const_ok_for_op (i, code)
e2c671ba
RE
906 HOST_WIDE_INT i;
907 enum rtx_code code;
e2c671ba
RE
908{
909 if (const_ok_for_arm (i))
910 return 1;
911
912 switch (code)
913 {
914 case PLUS:
915 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
916
917 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
918 case XOR:
919 case IOR:
920 return 0;
921
922 case AND:
923 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
924
925 default:
926 abort ();
927 }
928}
929
930/* Emit a sequence of insns to handle a large constant.
931 CODE is the code of the operation required, it can be any of SET, PLUS,
932 IOR, AND, XOR, MINUS;
933 MODE is the mode in which the operation is being performed;
934 VAL is the integer to operate on;
935 SOURCE is the other operand (a register, or a null-pointer for SET);
936 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
937 either produce a simpler sequence, or we will want to cse the values.
938 Return value is the number of insns emitted. */
e2c671ba
RE
939
940int
941arm_split_constant (code, mode, val, target, source, subtargets)
942 enum rtx_code code;
943 enum machine_mode mode;
944 HOST_WIDE_INT val;
945 rtx target;
946 rtx source;
947 int subtargets;
2b835d68
RE
948{
949 if (subtargets || code == SET
950 || (GET_CODE (target) == REG && GET_CODE (source) == REG
951 && REGNO (target) != REGNO (source)))
952 {
4b632bf1
RE
953 /* After arm_reorg has been called, we can't fix up expensive
954 constants by pushing them into memory so we must synthesise
955 them in-line, regardless of the cost. This is only likely to
956 be more costly on chips that have load delay slots and we are
957 compiling without running the scheduler (so no splitting
aec3cfba
NC
958 occurred before the final instruction emission).
959
960 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 961 */
5895f793 962 if (!after_arm_reorg
4b632bf1
RE
963 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
964 > arm_constant_limit + (code != SET)))
2b835d68
RE
965 {
966 if (code == SET)
967 {
968 /* Currently SET is the only monadic value for CODE, all
969 the rest are diadic. */
43cffd11 970 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
971 return 1;
972 }
973 else
974 {
975 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
976
43cffd11 977 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
978 /* For MINUS, the value is subtracted from, since we never
979 have subtraction of a constant. */
980 if (code == MINUS)
43cffd11 981 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 982 gen_rtx_MINUS (mode, temp, source)));
2b835d68 983 else
43cffd11
RE
984 emit_insn (gen_rtx_SET (VOIDmode, target,
985 gen_rtx (code, mode, source, temp)));
2b835d68
RE
986 return 2;
987 }
988 }
989 }
990
991 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
992}
993
ceebdb09
PB
994static int
995count_insns_for_constant (HOST_WIDE_INT remainder, int i)
996{
997 HOST_WIDE_INT temp1;
998 int num_insns = 0;
999 do
1000 {
1001 int end;
1002
1003 if (i <= 0)
1004 i += 32;
1005 if (remainder & (3 << (i - 2)))
1006 {
1007 end = i - 8;
1008 if (end < 0)
1009 end += 32;
1010 temp1 = remainder & ((0x0ff << end)
1011 | ((i < end) ? (0xff >> (32 - end)) : 0));
1012 remainder &= ~temp1;
1013 num_insns++;
1014 i -= 6;
1015 }
1016 i -= 2;
1017 } while (remainder);
1018 return num_insns;
1019}
1020
2b835d68
RE
1021/* As above, but extra parameter GENERATE which, if clear, suppresses
1022 RTL generation. */
d5b7b3ae 1023static int
2b835d68
RE
1024arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1025 enum rtx_code code;
1026 enum machine_mode mode;
1027 HOST_WIDE_INT val;
1028 rtx target;
1029 rtx source;
1030 int subtargets;
1031 int generate;
e2c671ba 1032{
e2c671ba
RE
1033 int can_invert = 0;
1034 int can_negate = 0;
1035 int can_negate_initial = 0;
1036 int can_shift = 0;
1037 int i;
1038 int num_bits_set = 0;
1039 int set_sign_bit_copies = 0;
1040 int clear_sign_bit_copies = 0;
1041 int clear_zero_bit_copies = 0;
1042 int set_zero_bit_copies = 0;
1043 int insns = 0;
e2c671ba 1044 unsigned HOST_WIDE_INT temp1, temp2;
e5951263 1045 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
e2c671ba 1046
d5b7b3ae 1047 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
1048 check for degenerate cases; these can occur when DImode operations
1049 are split. */
1050 switch (code)
1051 {
1052 case SET:
1053 can_invert = 1;
1054 can_shift = 1;
1055 can_negate = 1;
1056 break;
1057
1058 case PLUS:
1059 can_negate = 1;
1060 can_negate_initial = 1;
1061 break;
1062
1063 case IOR:
e5951263 1064 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 1065 {
2b835d68 1066 if (generate)
43cffd11
RE
1067 emit_insn (gen_rtx_SET (VOIDmode, target,
1068 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
1069 return 1;
1070 }
1071 if (remainder == 0)
1072 {
1073 if (reload_completed && rtx_equal_p (target, source))
1074 return 0;
2b835d68 1075 if (generate)
43cffd11 1076 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1077 return 1;
1078 }
1079 break;
1080
1081 case AND:
1082 if (remainder == 0)
1083 {
2b835d68 1084 if (generate)
43cffd11 1085 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
1086 return 1;
1087 }
e5951263 1088 if (remainder == HOST_UINT (0xffffffff))
e2c671ba
RE
1089 {
1090 if (reload_completed && rtx_equal_p (target, source))
1091 return 0;
2b835d68 1092 if (generate)
43cffd11 1093 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1094 return 1;
1095 }
1096 can_invert = 1;
1097 break;
1098
1099 case XOR:
1100 if (remainder == 0)
1101 {
1102 if (reload_completed && rtx_equal_p (target, source))
1103 return 0;
2b835d68 1104 if (generate)
43cffd11 1105 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
1106 return 1;
1107 }
e5951263 1108 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 1109 {
2b835d68 1110 if (generate)
43cffd11
RE
1111 emit_insn (gen_rtx_SET (VOIDmode, target,
1112 gen_rtx_NOT (mode, source)));
e2c671ba
RE
1113 return 1;
1114 }
1115
1116 /* We don't know how to handle this yet below. */
1117 abort ();
1118
1119 case MINUS:
1120 /* We treat MINUS as (val - source), since (source - val) is always
1121 passed as (source + (-val)). */
1122 if (remainder == 0)
1123 {
2b835d68 1124 if (generate)
43cffd11
RE
1125 emit_insn (gen_rtx_SET (VOIDmode, target,
1126 gen_rtx_NEG (mode, source)));
e2c671ba
RE
1127 return 1;
1128 }
1129 if (const_ok_for_arm (val))
1130 {
2b835d68 1131 if (generate)
43cffd11
RE
1132 emit_insn (gen_rtx_SET (VOIDmode, target,
1133 gen_rtx_MINUS (mode, GEN_INT (val),
1134 source)));
e2c671ba
RE
1135 return 1;
1136 }
1137 can_negate = 1;
1138
1139 break;
1140
1141 default:
1142 abort ();
1143 }
1144
6354dc9b 1145 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
1146 if (const_ok_for_arm (val)
1147 || (can_negate_initial && const_ok_for_arm (-val))
1148 || (can_invert && const_ok_for_arm (~val)))
1149 {
2b835d68 1150 if (generate)
43cffd11
RE
1151 emit_insn (gen_rtx_SET (VOIDmode, target,
1152 (source ? gen_rtx (code, mode, source,
1153 GEN_INT (val))
1154 : GEN_INT (val))));
e2c671ba
RE
1155 return 1;
1156 }
1157
e2c671ba 1158 /* Calculate a few attributes that may be useful for specific
6354dc9b 1159 optimizations. */
e2c671ba
RE
1160 for (i = 31; i >= 0; i--)
1161 {
1162 if ((remainder & (1 << i)) == 0)
1163 clear_sign_bit_copies++;
1164 else
1165 break;
1166 }
1167
1168 for (i = 31; i >= 0; i--)
1169 {
1170 if ((remainder & (1 << i)) != 0)
1171 set_sign_bit_copies++;
1172 else
1173 break;
1174 }
1175
1176 for (i = 0; i <= 31; i++)
1177 {
1178 if ((remainder & (1 << i)) == 0)
1179 clear_zero_bit_copies++;
1180 else
1181 break;
1182 }
1183
1184 for (i = 0; i <= 31; i++)
1185 {
1186 if ((remainder & (1 << i)) != 0)
1187 set_zero_bit_copies++;
1188 else
1189 break;
1190 }
1191
1192 switch (code)
1193 {
1194 case SET:
1195 /* See if we can do this by sign_extending a constant that is known
1196 to be negative. This is a good, way of doing it, since the shift
1197 may well merge into a subsequent insn. */
1198 if (set_sign_bit_copies > 1)
1199 {
1200 if (const_ok_for_arm
1201 (temp1 = ARM_SIGN_EXTEND (remainder
1202 << (set_sign_bit_copies - 1))))
1203 {
2b835d68
RE
1204 if (generate)
1205 {
d499463f 1206 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1207 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1208 GEN_INT (temp1)));
2b835d68
RE
1209 emit_insn (gen_ashrsi3 (target, new_src,
1210 GEN_INT (set_sign_bit_copies - 1)));
1211 }
e2c671ba
RE
1212 return 2;
1213 }
1214 /* For an inverted constant, we will need to set the low bits,
1215 these will be shifted out of harm's way. */
1216 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1217 if (const_ok_for_arm (~temp1))
1218 {
2b835d68
RE
1219 if (generate)
1220 {
d499463f 1221 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1222 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1223 GEN_INT (temp1)));
2b835d68
RE
1224 emit_insn (gen_ashrsi3 (target, new_src,
1225 GEN_INT (set_sign_bit_copies - 1)));
1226 }
e2c671ba
RE
1227 return 2;
1228 }
1229 }
1230
1231 /* See if we can generate this by setting the bottom (or the top)
1232 16 bits, and then shifting these into the other half of the
1233 word. We only look for the simplest cases, to do more would cost
1234 too much. Be careful, however, not to generate this when the
1235 alternative would take fewer insns. */
e5951263 1236 if (val & HOST_UINT (0xffff0000))
e2c671ba 1237 {
e5951263 1238 temp1 = remainder & HOST_UINT (0xffff0000);
e2c671ba
RE
1239 temp2 = remainder & 0x0000ffff;
1240
6354dc9b 1241 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1242 for (i = 9; i < 24; i++)
1243 {
d5b7b3ae 1244 if ((((temp2 | (temp2 << i))
e5951263 1245 & HOST_UINT (0xffffffff)) == remainder)
5895f793 1246 && !const_ok_for_arm (temp2))
e2c671ba 1247 {
d499463f
RE
1248 rtx new_src = (subtargets
1249 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1250 : target);
1251 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1252 source, subtargets, generate);
e2c671ba 1253 source = new_src;
2b835d68 1254 if (generate)
43cffd11
RE
1255 emit_insn (gen_rtx_SET
1256 (VOIDmode, target,
1257 gen_rtx_IOR (mode,
1258 gen_rtx_ASHIFT (mode, source,
1259 GEN_INT (i)),
1260 source)));
e2c671ba
RE
1261 return insns + 1;
1262 }
1263 }
1264
6354dc9b 1265 /* Don't duplicate cases already considered. */
e2c671ba
RE
1266 for (i = 17; i < 24; i++)
1267 {
1268 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1269 && !const_ok_for_arm (temp1))
e2c671ba 1270 {
d499463f
RE
1271 rtx new_src = (subtargets
1272 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1273 : target);
1274 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1275 source, subtargets, generate);
e2c671ba 1276 source = new_src;
2b835d68 1277 if (generate)
43cffd11
RE
1278 emit_insn
1279 (gen_rtx_SET (VOIDmode, target,
1280 gen_rtx_IOR
1281 (mode,
1282 gen_rtx_LSHIFTRT (mode, source,
1283 GEN_INT (i)),
1284 source)));
e2c671ba
RE
1285 return insns + 1;
1286 }
1287 }
1288 }
1289 break;
1290
1291 case IOR:
1292 case XOR:
7b64da89
RE
1293 /* If we have IOR or XOR, and the constant can be loaded in a
1294 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1295 then this can be done in two instructions instead of 3-4. */
1296 if (subtargets
d499463f 1297 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1298 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1299 {
5895f793 1300 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1301 {
2b835d68
RE
1302 if (generate)
1303 {
1304 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1305
43cffd11
RE
1306 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1307 emit_insn (gen_rtx_SET (VOIDmode, target,
1308 gen_rtx (code, mode, source, sub)));
2b835d68 1309 }
e2c671ba
RE
1310 return 2;
1311 }
1312 }
1313
1314 if (code == XOR)
1315 break;
1316
1317 if (set_sign_bit_copies > 8
1318 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1319 {
2b835d68
RE
1320 if (generate)
1321 {
1322 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1323 rtx shift = GEN_INT (set_sign_bit_copies);
1324
43cffd11
RE
1325 emit_insn (gen_rtx_SET (VOIDmode, sub,
1326 gen_rtx_NOT (mode,
1327 gen_rtx_ASHIFT (mode,
1328 source,
f5a1b0d2 1329 shift))));
43cffd11
RE
1330 emit_insn (gen_rtx_SET (VOIDmode, target,
1331 gen_rtx_NOT (mode,
1332 gen_rtx_LSHIFTRT (mode, sub,
1333 shift))));
2b835d68 1334 }
e2c671ba
RE
1335 return 2;
1336 }
1337
1338 if (set_zero_bit_copies > 8
1339 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1340 {
2b835d68
RE
1341 if (generate)
1342 {
1343 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1344 rtx shift = GEN_INT (set_zero_bit_copies);
1345
43cffd11
RE
1346 emit_insn (gen_rtx_SET (VOIDmode, sub,
1347 gen_rtx_NOT (mode,
1348 gen_rtx_LSHIFTRT (mode,
1349 source,
f5a1b0d2 1350 shift))));
43cffd11
RE
1351 emit_insn (gen_rtx_SET (VOIDmode, target,
1352 gen_rtx_NOT (mode,
1353 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1354 shift))));
2b835d68 1355 }
e2c671ba
RE
1356 return 2;
1357 }
1358
5895f793 1359 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1360 {
2b835d68
RE
1361 if (generate)
1362 {
1363 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1364 emit_insn (gen_rtx_SET (VOIDmode, sub,
1365 gen_rtx_NOT (mode, source)));
2b835d68
RE
1366 source = sub;
1367 if (subtargets)
1368 sub = gen_reg_rtx (mode);
43cffd11
RE
1369 emit_insn (gen_rtx_SET (VOIDmode, sub,
1370 gen_rtx_AND (mode, source,
1371 GEN_INT (temp1))));
1372 emit_insn (gen_rtx_SET (VOIDmode, target,
1373 gen_rtx_NOT (mode, sub)));
2b835d68 1374 }
e2c671ba
RE
1375 return 3;
1376 }
1377 break;
1378
1379 case AND:
1380 /* See if two shifts will do 2 or more insn's worth of work. */
1381 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1382 {
e5951263 1383 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
e2c671ba 1384 << (32 - clear_sign_bit_copies))
e5951263 1385 & HOST_UINT (0xffffffff));
e2c671ba 1386
e5951263 1387 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1388 {
2b835d68
RE
1389 if (generate)
1390 {
d499463f 1391 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1392 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1393 new_src, source, subtargets, 1);
1394 source = new_src;
2b835d68
RE
1395 }
1396 else
d499463f
RE
1397 {
1398 rtx targ = subtargets ? NULL_RTX : target;
1399 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1400 targ, source, subtargets, 0);
1401 }
2b835d68
RE
1402 }
1403
1404 if (generate)
1405 {
d499463f
RE
1406 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1407 rtx shift = GEN_INT (clear_sign_bit_copies);
1408
1409 emit_insn (gen_ashlsi3 (new_src, source, shift));
1410 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1411 }
1412
e2c671ba
RE
1413 return insns + 2;
1414 }
1415
1416 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1417 {
1418 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1419
e5951263 1420 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1421 {
2b835d68
RE
1422 if (generate)
1423 {
d499463f
RE
1424 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1425
2b835d68 1426 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1427 new_src, source, subtargets, 1);
1428 source = new_src;
2b835d68
RE
1429 }
1430 else
d499463f
RE
1431 {
1432 rtx targ = subtargets ? NULL_RTX : target;
1433
1434 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1435 targ, source, subtargets, 0);
1436 }
2b835d68
RE
1437 }
1438
1439 if (generate)
1440 {
d499463f
RE
1441 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1442 rtx shift = GEN_INT (clear_zero_bit_copies);
1443
1444 emit_insn (gen_lshrsi3 (new_src, source, shift));
1445 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1446 }
1447
e2c671ba
RE
1448 return insns + 2;
1449 }
1450
1451 break;
1452
1453 default:
1454 break;
1455 }
1456
1457 for (i = 0; i < 32; i++)
1458 if (remainder & (1 << i))
1459 num_bits_set++;
1460
1461 if (code == AND || (can_invert && num_bits_set > 16))
e5951263 1462 remainder = (~remainder) & HOST_UINT (0xffffffff);
e2c671ba 1463 else if (code == PLUS && num_bits_set > 16)
e5951263 1464 remainder = (-remainder) & HOST_UINT (0xffffffff);
e2c671ba
RE
1465 else
1466 {
1467 can_invert = 0;
1468 can_negate = 0;
1469 }
1470
1471 /* Now try and find a way of doing the job in either two or three
1472 instructions.
1473 We start by looking for the largest block of zeros that are aligned on
1474 a 2-bit boundary, we then fill up the temps, wrapping around to the
1475 top of the word when we drop off the bottom.
6354dc9b 1476 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1477 {
1478 int best_start = 0;
1479 int best_consecutive_zeros = 0;
1480
1481 for (i = 0; i < 32; i += 2)
1482 {
1483 int consecutive_zeros = 0;
1484
5895f793 1485 if (!(remainder & (3 << i)))
e2c671ba 1486 {
5895f793 1487 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1488 {
1489 consecutive_zeros += 2;
1490 i += 2;
1491 }
1492 if (consecutive_zeros > best_consecutive_zeros)
1493 {
1494 best_consecutive_zeros = consecutive_zeros;
1495 best_start = i - consecutive_zeros;
1496 }
1497 i -= 2;
1498 }
1499 }
1500
ceebdb09
PB
1501 /* So long as it won't require any more insns to do so, it's
1502 desirable to emit a small constant (in bits 0...9) in the last
1503 insn. This way there is more chance that it can be combined with
1504 a later addressing insn to form a pre-indexed load or store
1505 operation. Consider:
1506
1507 *((volatile int *)0xe0000100) = 1;
1508 *((volatile int *)0xe0000110) = 2;
1509
1510 We want this to wind up as:
1511
1512 mov rA, #0xe0000000
1513 mov rB, #1
1514 str rB, [rA, #0x100]
1515 mov rB, #2
1516 str rB, [rA, #0x110]
1517
1518 rather than having to synthesize both large constants from scratch.
1519
1520 Therefore, we calculate how many insns would be required to emit
1521 the constant starting from `best_start', and also starting from
1522 zero (ie with bit 31 first to be output). If `best_start' doesn't
1523 yield a shorter sequence, we may as well use zero. */
1524 if (best_start != 0
1525 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1526 && (count_insns_for_constant (remainder, 0) <=
1527 count_insns_for_constant (remainder, best_start)))
1528 best_start = 0;
1529
1530 /* Now start emitting the insns. */
e2c671ba
RE
1531 i = best_start;
1532 do
1533 {
1534 int end;
1535
1536 if (i <= 0)
1537 i += 32;
1538 if (remainder & (3 << (i - 2)))
1539 {
1540 end = i - 8;
1541 if (end < 0)
1542 end += 32;
1543 temp1 = remainder & ((0x0ff << end)
1544 | ((i < end) ? (0xff >> (32 - end)) : 0));
1545 remainder &= ~temp1;
1546
d499463f 1547 if (generate)
e2c671ba 1548 {
d499463f
RE
1549 rtx new_src;
1550
1551 if (code == SET)
43cffd11
RE
1552 emit_insn (gen_rtx_SET (VOIDmode,
1553 new_src = (subtargets
1554 ? gen_reg_rtx (mode)
1555 : target),
1556 GEN_INT (can_invert
1557 ? ~temp1 : temp1)));
d499463f 1558 else if (code == MINUS)
43cffd11
RE
1559 emit_insn (gen_rtx_SET (VOIDmode,
1560 new_src = (subtargets
1561 ? gen_reg_rtx (mode)
1562 : target),
1563 gen_rtx (code, mode, GEN_INT (temp1),
1564 source)));
d499463f 1565 else
43cffd11
RE
1566 emit_insn (gen_rtx_SET (VOIDmode,
1567 new_src = (remainder
1568 ? (subtargets
1569 ? gen_reg_rtx (mode)
1570 : target)
1571 : target),
1572 gen_rtx (code, mode, source,
1573 GEN_INT (can_invert ? ~temp1
1574 : (can_negate
1575 ? -temp1
1576 : temp1)))));
d499463f 1577 source = new_src;
e2c671ba
RE
1578 }
1579
d499463f
RE
1580 if (code == SET)
1581 {
1582 can_invert = 0;
1583 code = PLUS;
1584 }
1585 else if (code == MINUS)
1586 code = PLUS;
1587
e2c671ba 1588 insns++;
e2c671ba
RE
1589 i -= 6;
1590 }
1591 i -= 2;
1592 } while (remainder);
1593 }
1594 return insns;
1595}
1596
bd9c7e23
RE
1597/* Canonicalize a comparison so that we are more likely to recognize it.
1598 This can be done for a few constant compares, where we can make the
1599 immediate value easier to load. */
1600enum rtx_code
1601arm_canonicalize_comparison (code, op1)
1602 enum rtx_code code;
62b10bbc 1603 rtx * op1;
bd9c7e23 1604{
ad076f4e 1605 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1606
1607 switch (code)
1608 {
1609 case EQ:
1610 case NE:
1611 return code;
1612
1613 case GT:
1614 case LE:
5895f793
RE
1615 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1616 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1617 {
5895f793 1618 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1619 return code == GT ? GE : LT;
1620 }
1621 break;
1622
1623 case GE:
1624 case LT:
e5951263 1625 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1626 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1627 {
5895f793 1628 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1629 return code == GE ? GT : LE;
1630 }
1631 break;
1632
1633 case GTU:
1634 case LEU:
5895f793
RE
1635 if (i != ~(HOST_UINT (0))
1636 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1637 {
1638 *op1 = GEN_INT (i + 1);
1639 return code == GTU ? GEU : LTU;
1640 }
1641 break;
1642
1643 case GEU:
1644 case LTU:
1645 if (i != 0
5895f793 1646 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1647 {
1648 *op1 = GEN_INT (i - 1);
1649 return code == GEU ? GTU : LEU;
1650 }
1651 break;
1652
1653 default:
1654 abort ();
1655 }
1656
1657 return code;
1658}
bd9c7e23 1659
f5a1b0d2
NC
1660/* Decide whether a type should be returned in memory (true)
1661 or in a register (false). This is called by the macro
1662 RETURN_IN_MEMORY. */
2b835d68
RE
1663int
1664arm_return_in_memory (type)
1665 tree type;
1666{
5895f793 1667 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1668 /* All simple types are returned in registers. */
d7d01975 1669 return 0;
d5b7b3ae
RE
1670
1671 /* For the arm-wince targets we choose to be compitable with Microsoft's
1672 ARM and Thumb compilers, which always return aggregates in memory. */
1673#ifndef ARM_WINCE
1674
d7d01975 1675 if (int_size_in_bytes (type) > 4)
9e291dbe 1676 /* All structures/unions bigger than one word are returned in memory. */
d7d01975 1677 return 1;
d5b7b3ae 1678
d7d01975 1679 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1680 {
1681 tree field;
1682
3a2ea258
RE
1683 /* For a struct the APCS says that we only return in a register
1684 if the type is 'integer like' and every addressable element
1685 has an offset of zero. For practical purposes this means
1686 that the structure can have at most one non bit-field element
1687 and that this element must be the first one in the structure. */
1688
f5a1b0d2
NC
1689 /* Find the first field, ignoring non FIELD_DECL things which will
1690 have been created by C++. */
1691 for (field = TYPE_FIELDS (type);
1692 field && TREE_CODE (field) != FIELD_DECL;
1693 field = TREE_CHAIN (field))
1694 continue;
1695
1696 if (field == NULL)
9e291dbe 1697 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1698
d5b7b3ae
RE
1699 /* Check that the first field is valid for returning in a register. */
1700
1701 /* ... Floats are not allowed */
9e291dbe 1702 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1703 return 1;
1704
d5b7b3ae
RE
1705 /* ... Aggregates that are not themselves valid for returning in
1706 a register are not allowed. */
9e291dbe 1707 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1708 return 1;
6f7ebcbb 1709
3a2ea258
RE
1710 /* Now check the remaining fields, if any. Only bitfields are allowed,
1711 since they are not addressable. */
f5a1b0d2
NC
1712 for (field = TREE_CHAIN (field);
1713 field;
1714 field = TREE_CHAIN (field))
1715 {
1716 if (TREE_CODE (field) != FIELD_DECL)
1717 continue;
1718
5895f793 1719 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1720 return 1;
1721 }
2b835d68
RE
1722
1723 return 0;
1724 }
d7d01975
NC
1725
1726 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1727 {
1728 tree field;
1729
1730 /* Unions can be returned in registers if every element is
1731 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1732 for (field = TYPE_FIELDS (type);
1733 field;
1734 field = TREE_CHAIN (field))
2b835d68 1735 {
f5a1b0d2
NC
1736 if (TREE_CODE (field) != FIELD_DECL)
1737 continue;
1738
6cc8c0b3
NC
1739 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1740 return 1;
1741
f5a1b0d2 1742 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1743 return 1;
1744 }
f5a1b0d2 1745
2b835d68
RE
1746 return 0;
1747 }
d5b7b3ae 1748#endif /* not ARM_WINCE */
f5a1b0d2 1749
d5b7b3ae 1750 /* Return all other types in memory. */
2b835d68
RE
1751 return 1;
1752}
1753
82e9d970
PB
1754/* Initialize a variable CUM of type CUMULATIVE_ARGS
1755 for a call to a function whose data type is FNTYPE.
1756 For a library call, FNTYPE is NULL. */
1757void
1758arm_init_cumulative_args (pcum, fntype, libname, indirect)
1759 CUMULATIVE_ARGS * pcum;
1760 tree fntype;
1761 rtx libname ATTRIBUTE_UNUSED;
1762 int indirect ATTRIBUTE_UNUSED;
1763{
1764 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1765 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1766
82e9d970
PB
1767 pcum->call_cookie = CALL_NORMAL;
1768
1769 if (TARGET_LONG_CALLS)
1770 pcum->call_cookie = CALL_LONG;
1771
1772 /* Check for long call/short call attributes. The attributes
1773 override any command line option. */
1774 if (fntype)
1775 {
1776 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1777 pcum->call_cookie = CALL_SHORT;
1778 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1779 pcum->call_cookie = CALL_LONG;
1780 }
1781}
1782
1783/* Determine where to put an argument to a function.
1784 Value is zero to push the argument on the stack,
1785 or a hard register in which to store the argument.
1786
1787 MODE is the argument's machine mode.
1788 TYPE is the data type of the argument (as a tree).
1789 This is null for libcalls where that information may
1790 not be available.
1791 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1792 the preceding args and about the function being called.
1793 NAMED is nonzero if this argument is a named parameter
1794 (otherwise it is an extra parameter matching an ellipsis). */
1795rtx
1796arm_function_arg (pcum, mode, type, named)
1797 CUMULATIVE_ARGS * pcum;
1798 enum machine_mode mode;
1799 tree type ATTRIBUTE_UNUSED;
1800 int named;
1801{
1802 if (mode == VOIDmode)
1803 /* Compute operand 2 of the call insn. */
1804 return GEN_INT (pcum->call_cookie);
1805
5895f793 1806 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1807 return NULL_RTX;
1808
1809 return gen_rtx_REG (mode, pcum->nregs);
1810}
82e9d970 1811\f
c27ba912
DM
1812/* Encode the current state of the #pragma [no_]long_calls. */
1813typedef enum
82e9d970 1814{
c27ba912
DM
1815 OFF, /* No #pramgma [no_]long_calls is in effect. */
1816 LONG, /* #pragma long_calls is in effect. */
1817 SHORT /* #pragma no_long_calls is in effect. */
1818} arm_pragma_enum;
82e9d970 1819
c27ba912 1820static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1821
8b97c5f8
ZW
1822void
1823arm_pr_long_calls (pfile)
1824 cpp_reader *pfile ATTRIBUTE_UNUSED;
82e9d970 1825{
8b97c5f8
ZW
1826 arm_pragma_long_calls = LONG;
1827}
1828
1829void
1830arm_pr_no_long_calls (pfile)
1831 cpp_reader *pfile ATTRIBUTE_UNUSED;
1832{
1833 arm_pragma_long_calls = SHORT;
1834}
1835
1836void
1837arm_pr_long_calls_off (pfile)
1838 cpp_reader *pfile ATTRIBUTE_UNUSED;
1839{
1840 arm_pragma_long_calls = OFF;
82e9d970 1841}
8b97c5f8 1842
82e9d970
PB
1843\f
1844/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1845 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1846 assigned to TYPE. */
1847int
1848arm_valid_type_attribute_p (type, attributes, identifier, args)
1849 tree type;
1850 tree attributes ATTRIBUTE_UNUSED;
1851 tree identifier;
1852 tree args;
1853{
1854 if ( TREE_CODE (type) != FUNCTION_TYPE
1855 && TREE_CODE (type) != METHOD_TYPE
1856 && TREE_CODE (type) != FIELD_DECL
1857 && TREE_CODE (type) != TYPE_DECL)
1858 return 0;
1859
1860 /* Function calls made to this symbol must be done indirectly, because
1861 it may lie outside of the 26 bit addressing range of a normal function
1862 call. */
1863 if (is_attribute_p ("long_call", identifier))
1864 return (args == NULL_TREE);
c27ba912 1865
82e9d970
PB
1866 /* Whereas these functions are always known to reside within the 26 bit
1867 addressing range. */
1868 if (is_attribute_p ("short_call", identifier))
1869 return (args == NULL_TREE);
1870
6d3d9133
NC
1871 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1872 if (is_attribute_p ("isr", identifier)
1873 || is_attribute_p ("interrupt", identifier))
1874 return arm_isr_value (args);
1875
82e9d970
PB
1876 return 0;
1877}
1878
1879/* Return 0 if the attributes for two types are incompatible, 1 if they
1880 are compatible, and 2 if they are nearly compatible (which causes a
1881 warning to be generated). */
1882int
1883arm_comp_type_attributes (type1, type2)
1884 tree type1;
1885 tree type2;
1886{
1cb8d58a 1887 int l1, l2, s1, s2;
bd7fc26f 1888
82e9d970
PB
1889 /* Check for mismatch of non-default calling convention. */
1890 if (TREE_CODE (type1) != FUNCTION_TYPE)
1891 return 1;
1892
1893 /* Check for mismatched call attributes. */
1cb8d58a
NC
1894 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1895 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1896 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1897 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1898
1899 /* Only bother to check if an attribute is defined. */
1900 if (l1 | l2 | s1 | s2)
1901 {
1902 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1903 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1904 return 0;
82e9d970 1905
bd7fc26f
NC
1906 /* Disallow mixed attributes. */
1907 if ((l1 & s2) || (l2 & s1))
1908 return 0;
1909 }
1910
6d3d9133
NC
1911 /* Check for mismatched ISR attribute. */
1912 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
1913 if (! l1)
1914 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
1915 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
1916 if (! l2)
1917 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
1918 if (l1 != l2)
1919 return 0;
1920
bd7fc26f 1921 return 1;
82e9d970
PB
1922}
1923
c27ba912
DM
1924/* Encode long_call or short_call attribute by prefixing
1925 symbol name in DECL with a special character FLAG. */
1926void
1927arm_encode_call_attribute (decl, flag)
1928 tree decl;
cd2b33d0 1929 int flag;
c27ba912 1930{
3cce094d 1931 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 1932 int len = strlen (str);
d19fb8e3 1933 char * newstr;
c27ba912
DM
1934
1935 if (TREE_CODE (decl) != FUNCTION_DECL)
1936 return;
1937
1938 /* Do not allow weak functions to be treated as short call. */
1939 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1940 return;
c27ba912 1941
520a57c8
ZW
1942 newstr = alloca (len + 2);
1943 newstr[0] = flag;
1944 strcpy (newstr + 1, str);
c27ba912 1945
6d3d9133 1946 newstr = (char *) ggc_alloc_string (newstr, len + 1);
c27ba912
DM
1947 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1948}
1949
1950/* Assigns default attributes to newly defined type. This is used to
1951 set short_call/long_call attributes for function types of
1952 functions defined inside corresponding #pragma scopes. */
1953void
1954arm_set_default_type_attributes (type)
1955 tree type;
1956{
1957 /* Add __attribute__ ((long_call)) to all functions, when
1958 inside #pragma long_calls or __attribute__ ((short_call)),
1959 when inside #pragma no_long_calls. */
1960 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1961 {
1962 tree type_attr_list, attr_name;
1963 type_attr_list = TYPE_ATTRIBUTES (type);
1964
1965 if (arm_pragma_long_calls == LONG)
1966 attr_name = get_identifier ("long_call");
1967 else if (arm_pragma_long_calls == SHORT)
1968 attr_name = get_identifier ("short_call");
1969 else
1970 return;
1971
1972 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1973 TYPE_ATTRIBUTES (type) = type_attr_list;
1974 }
1975}
1976\f
1977/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1978 defined within the current compilation unit. If this caanot be
1979 determined, then 0 is returned. */
1980static int
1981current_file_function_operand (sym_ref)
1982 rtx sym_ref;
1983{
1984 /* This is a bit of a fib. A function will have a short call flag
1985 applied to its name if it has the short call attribute, or it has
1986 already been defined within the current compilation unit. */
1987 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1988 return 1;
1989
6d77b53e 1990 /* The current function is always defined within the current compilation
c27ba912
DM
1991 unit. if it s a weak defintion however, then this may not be the real
1992 defintion of the function, and so we have to say no. */
1993 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 1994 && !DECL_WEAK (current_function_decl))
c27ba912
DM
1995 return 1;
1996
1997 /* We cannot make the determination - default to returning 0. */
1998 return 0;
1999}
2000
2001/* Return non-zero if a 32 bit "long_call" should be generated for
2002 this call. We generate a long_call if the function:
2003
2004 a. has an __attribute__((long call))
2005 or b. is within the scope of a #pragma long_calls
2006 or c. the -mlong-calls command line switch has been specified
2007
2008 However we do not generate a long call if the function:
2009
2010 d. has an __attribute__ ((short_call))
2011 or e. is inside the scope of a #pragma no_long_calls
2012 or f. has an __attribute__ ((section))
2013 or g. is defined within the current compilation unit.
2014
2015 This function will be called by C fragments contained in the machine
2016 description file. CALL_REF and CALL_COOKIE correspond to the matched
2017 rtl operands. CALL_SYMBOL is used to distinguish between
2018 two different callers of the function. It is set to 1 in the
2019 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2020 and "call_value" patterns. This is because of the difference in the
2021 SYM_REFs passed by these patterns. */
2022int
2023arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2024 rtx sym_ref;
2025 int call_cookie;
2026 int call_symbol;
2027{
5895f793 2028 if (!call_symbol)
c27ba912
DM
2029 {
2030 if (GET_CODE (sym_ref) != MEM)
2031 return 0;
2032
2033 sym_ref = XEXP (sym_ref, 0);
2034 }
2035
2036 if (GET_CODE (sym_ref) != SYMBOL_REF)
2037 return 0;
2038
2039 if (call_cookie & CALL_SHORT)
2040 return 0;
2041
2042 if (TARGET_LONG_CALLS && flag_function_sections)
2043 return 1;
2044
87e27392 2045 if (current_file_function_operand (sym_ref))
c27ba912
DM
2046 return 0;
2047
2048 return (call_cookie & CALL_LONG)
2049 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2050 || TARGET_LONG_CALLS;
2051}
f99fce0c
RE
2052
2053/* Return non-zero if it is ok to make a tail-call to DECL. */
2054int
2055arm_function_ok_for_sibcall (decl)
2056 tree decl;
2057{
2058 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2059
2060 /* Never tailcall something for which we have no decl, or if we
2061 are in Thumb mode. */
2062 if (decl == NULL || TARGET_THUMB)
2063 return 0;
2064
2065 /* Get the calling method. */
2066 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2067 call_type = CALL_SHORT;
2068 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2069 call_type = CALL_LONG;
2070
2071 /* Cannot tail-call to long calls, since these are out of range of
2072 a branch instruction. However, if not compiling PIC, we know
2073 we can reach the symbol if it is in this compilation unit. */
5895f793 2074 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
2075 return 0;
2076
2077 /* If we are interworking and the function is not declared static
2078 then we can't tail-call it unless we know that it exists in this
2079 compilation unit (since it might be a Thumb routine). */
5895f793 2080 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
2081 return 0;
2082
6d3d9133
NC
2083 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2084 if (IS_INTERRUPT (arm_current_func_type ()))
2085 return 0;
2086
f99fce0c
RE
2087 /* Everything else is ok. */
2088 return 1;
2089}
2090
82e9d970 2091\f
32de079a
RE
2092int
2093legitimate_pic_operand_p (x)
2094 rtx x;
2095{
d5b7b3ae
RE
2096 if (CONSTANT_P (x)
2097 && flag_pic
32de079a
RE
2098 && (GET_CODE (x) == SYMBOL_REF
2099 || (GET_CODE (x) == CONST
2100 && GET_CODE (XEXP (x, 0)) == PLUS
2101 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2102 return 0;
2103
2104 return 1;
2105}
2106
2107rtx
2108legitimize_pic_address (orig, mode, reg)
2109 rtx orig;
2110 enum machine_mode mode;
2111 rtx reg;
2112{
2113 if (GET_CODE (orig) == SYMBOL_REF)
2114 {
2115 rtx pic_ref, address;
2116 rtx insn;
2117 int subregs = 0;
2118
2119 if (reg == 0)
2120 {
893f3d5b 2121 if (no_new_pseudos)
32de079a
RE
2122 abort ();
2123 else
2124 reg = gen_reg_rtx (Pmode);
2125
2126 subregs = 1;
2127 }
2128
2129#ifdef AOF_ASSEMBLER
2130 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 2131 understands that the PIC register has to be added into the offset. */
32de079a
RE
2132 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2133#else
2134 if (subregs)
2135 address = gen_reg_rtx (Pmode);
2136 else
2137 address = reg;
2138
4bec9f7d
NC
2139 if (TARGET_ARM)
2140 emit_insn (gen_pic_load_addr_arm (address, orig));
2141 else
2142 emit_insn (gen_pic_load_addr_thumb (address, orig));
32de079a 2143
43cffd11
RE
2144 pic_ref = gen_rtx_MEM (Pmode,
2145 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2146 address));
32de079a
RE
2147 RTX_UNCHANGING_P (pic_ref) = 1;
2148 insn = emit_move_insn (reg, pic_ref);
2149#endif
2150 current_function_uses_pic_offset_table = 1;
2151 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2152 by loop. */
43cffd11
RE
2153 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2154 REG_NOTES (insn));
32de079a
RE
2155 return reg;
2156 }
2157 else if (GET_CODE (orig) == CONST)
2158 {
2159 rtx base, offset;
2160
2161 if (GET_CODE (XEXP (orig, 0)) == PLUS
2162 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2163 return orig;
2164
2165 if (reg == 0)
2166 {
893f3d5b 2167 if (no_new_pseudos)
32de079a
RE
2168 abort ();
2169 else
2170 reg = gen_reg_rtx (Pmode);
2171 }
2172
2173 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2174 {
2175 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2176 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2177 base == reg ? 0 : reg);
2178 }
2179 else
2180 abort ();
2181
2182 if (GET_CODE (offset) == CONST_INT)
2183 {
2184 /* The base register doesn't really matter, we only want to
2185 test the index for the appropriate mode. */
f1008e52 2186 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
32de079a 2187
5895f793 2188 if (!no_new_pseudos)
32de079a
RE
2189 offset = force_reg (Pmode, offset);
2190 else
2191 abort ();
2192
2193 win:
2194 if (GET_CODE (offset) == CONST_INT)
2195 return plus_constant_for_output (base, INTVAL (offset));
2196 }
2197
2198 if (GET_MODE_SIZE (mode) > 4
2199 && (GET_MODE_CLASS (mode) == MODE_INT
2200 || TARGET_SOFT_FLOAT))
2201 {
2202 emit_insn (gen_addsi3 (reg, base, offset));
2203 return reg;
2204 }
2205
43cffd11 2206 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
2207 }
2208 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
2209 {
2210 current_function_uses_pic_offset_table = 1;
2211
2212 if (NEED_GOT_RELOC)
d5b7b3ae
RE
2213 {
2214 rtx pic_ref, address = gen_reg_rtx (Pmode);
4bec9f7d
NC
2215
2216 if (TARGET_ARM)
2217 emit_insn (gen_pic_load_addr_arm (address, orig));
2218 else
2219 emit_insn (gen_pic_load_addr_thumb (address, orig));
d19fb8e3 2220
d5b7b3ae
RE
2221 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2222
2223 emit_move_insn (address, pic_ref);
2224 return address;
2225 }
82e9d970 2226 }
32de079a
RE
2227
2228 return orig;
2229}
2230
2231static rtx pic_rtx;
2232
2233int
62b10bbc 2234is_pic (x)
32de079a
RE
2235 rtx x;
2236{
2237 if (x == pic_rtx)
2238 return 1;
2239 return 0;
2240}
2241
2242void
2243arm_finalize_pic ()
2244{
2245#ifndef AOF_ASSEMBLER
2246 rtx l1, pic_tmp, pic_tmp2, seq;
2247 rtx global_offset_table;
2248
ed0e6530 2249 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2250 return;
2251
5895f793 2252 if (!flag_pic)
32de079a
RE
2253 abort ();
2254
2255 start_sequence ();
2256 l1 = gen_label_rtx ();
2257
43cffd11 2258 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2259 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2260 addition, on the Thumb it is 'dot + 4'. */
2261 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2262 if (GOT_PCREL)
2263 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2264 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2265 else
2266 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2267
2268 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2269
d5b7b3ae 2270 if (TARGET_ARM)
4bec9f7d
NC
2271 {
2272 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2273 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2274 }
d5b7b3ae 2275 else
4bec9f7d
NC
2276 {
2277 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2278 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2279 }
32de079a
RE
2280
2281 seq = gen_sequence ();
2282 end_sequence ();
2283 emit_insn_after (seq, get_insns ());
2284
2285 /* Need to emit this whether or not we obey regdecls,
2286 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2287 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2288#endif /* AOF_ASSEMBLER */
2289}
2290
e2c671ba
RE
2291#define REG_OR_SUBREG_REG(X) \
2292 (GET_CODE (X) == REG \
2293 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2294
2295#define REG_OR_SUBREG_RTX(X) \
2296 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2297
d5b7b3ae
RE
2298#ifndef COSTS_N_INSNS
2299#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2300#endif
e2c671ba
RE
2301
2302int
d5b7b3ae 2303arm_rtx_costs (x, code, outer)
e2c671ba 2304 rtx x;
74bbc178 2305 enum rtx_code code;
d5b7b3ae 2306 enum rtx_code outer;
e2c671ba
RE
2307{
2308 enum machine_mode mode = GET_MODE (x);
2309 enum rtx_code subcode;
2310 int extra_cost;
2311
d5b7b3ae
RE
2312 if (TARGET_THUMB)
2313 {
2314 switch (code)
2315 {
2316 case ASHIFT:
2317 case ASHIFTRT:
2318 case LSHIFTRT:
2319 case ROTATERT:
2320 case PLUS:
2321 case MINUS:
2322 case COMPARE:
2323 case NEG:
2324 case NOT:
2325 return COSTS_N_INSNS (1);
2326
2327 case MULT:
2328 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2329 {
2330 int cycles = 0;
2331 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2332
2333 while (i)
2334 {
2335 i >>= 2;
5895f793 2336 cycles++;
d5b7b3ae
RE
2337 }
2338 return COSTS_N_INSNS (2) + cycles;
2339 }
2340 return COSTS_N_INSNS (1) + 16;
2341
2342 case SET:
2343 return (COSTS_N_INSNS (1)
2344 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2345 + GET_CODE (SET_DEST (x)) == MEM));
2346
2347 case CONST_INT:
2348 if (outer == SET)
2349 {
2350 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2351 return 0;
2352 if (thumb_shiftable_const (INTVAL (x)))
2353 return COSTS_N_INSNS (2);
2354 return COSTS_N_INSNS (3);
2355 }
2356 else if (outer == PLUS
2357 && INTVAL (x) < 256 && INTVAL (x) > -256)
2358 return 0;
2359 else if (outer == COMPARE
2360 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2361 return 0;
2362 else if (outer == ASHIFT || outer == ASHIFTRT
2363 || outer == LSHIFTRT)
2364 return 0;
2365 return COSTS_N_INSNS (2);
2366
2367 case CONST:
2368 case CONST_DOUBLE:
2369 case LABEL_REF:
2370 case SYMBOL_REF:
2371 return COSTS_N_INSNS (3);
2372
2373 case UDIV:
2374 case UMOD:
2375 case DIV:
2376 case MOD:
2377 return 100;
2378
2379 case TRUNCATE:
2380 return 99;
2381
2382 case AND:
2383 case XOR:
2384 case IOR:
2385 /* XXX guess. */
2386 return 8;
2387
2388 case ADDRESSOF:
2389 case MEM:
2390 /* XXX another guess. */
2391 /* Memory costs quite a lot for the first word, but subsequent words
2392 load at the equivalent of a single insn each. */
2393 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2394 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2395
2396 case IF_THEN_ELSE:
2397 /* XXX a guess. */
2398 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2399 return 14;
2400 return 2;
2401
2402 case ZERO_EXTEND:
2403 /* XXX still guessing. */
2404 switch (GET_MODE (XEXP (x, 0)))
2405 {
2406 case QImode:
2407 return (1 + (mode == DImode ? 4 : 0)
2408 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2409
2410 case HImode:
2411 return (4 + (mode == DImode ? 4 : 0)
2412 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2413
2414 case SImode:
2415 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2416
2417 default:
2418 return 99;
2419 }
2420
2421 default:
2422 return 99;
2423#if 0
2424 case FFS:
2425 case FLOAT:
2426 case FIX:
2427 case UNSIGNED_FIX:
2428 /* XXX guess */
2429 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2430 rtx_name[code]);
2431 abort ();
2432#endif
2433 }
2434 }
2435
e2c671ba
RE
2436 switch (code)
2437 {
2438 case MEM:
2439 /* Memory costs quite a lot for the first word, but subsequent words
2440 load at the equivalent of a single insn each. */
2441 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2442 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2443
2444 case DIV:
2445 case MOD:
2446 return 100;
2447
2448 case ROTATE:
2449 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2450 return 4;
2451 /* Fall through */
2452 case ROTATERT:
2453 if (mode != SImode)
2454 return 8;
2455 /* Fall through */
2456 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2457 if (mode == DImode)
2458 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2459 + ((GET_CODE (XEXP (x, 0)) == REG
2460 || (GET_CODE (XEXP (x, 0)) == SUBREG
2461 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2462 ? 0 : 8));
2463 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2464 || (GET_CODE (XEXP (x, 0)) == SUBREG
2465 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2466 ? 0 : 4)
2467 + ((GET_CODE (XEXP (x, 1)) == REG
2468 || (GET_CODE (XEXP (x, 1)) == SUBREG
2469 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2470 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2471 ? 0 : 4));
2472
2473 case MINUS:
2474 if (mode == DImode)
2475 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2476 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2477 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2478 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2479 ? 0 : 8));
2480
2481 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2482 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2483 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2484 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2485 ? 0 : 8)
2486 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2487 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2488 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2489 ? 0 : 8));
2490
2491 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2492 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2493 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2494 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2495 || subcode == ASHIFTRT || subcode == LSHIFTRT
2496 || subcode == ROTATE || subcode == ROTATERT
2497 || (subcode == MULT
2498 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2499 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2500 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2501 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2502 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2503 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2504 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2505 return 1;
2506 /* Fall through */
2507
2508 case PLUS:
2509 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2510 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2511 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2512 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2513 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2514 ? 0 : 8));
2515
2516 /* Fall through */
2517 case AND: case XOR: case IOR:
2518 extra_cost = 0;
2519
2520 /* Normally the frame registers will be spilt into reg+const during
2521 reload, so it is a bad idea to combine them with other instructions,
2522 since then they might not be moved outside of loops. As a compromise
2523 we allow integration with ops that have a constant as their second
2524 operand. */
2525 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2526 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2527 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2528 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2529 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2530 extra_cost = 4;
2531
2532 if (mode == DImode)
2533 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2534 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2535 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2536 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2537 ? 0 : 8));
2538
2539 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2540 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2541 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2542 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2543 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2544 ? 0 : 4));
2545
2546 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2547 return (1 + extra_cost
2548 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2549 || subcode == LSHIFTRT || subcode == ASHIFTRT
2550 || subcode == ROTATE || subcode == ROTATERT
2551 || (subcode == MULT
2552 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2553 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2554 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2555 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2556 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2557 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2558 ? 0 : 4));
2559
2560 return 8;
2561
2562 case MULT:
b111229a 2563 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2564 fast variant if it exists at all. */
2b835d68
RE
2565 if (arm_fast_multiply && mode == DImode
2566 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2567 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2568 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2569 return 8;
2570
e2c671ba
RE
2571 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2572 || mode == DImode)
2573 return 30;
2574
2575 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2576 {
2b835d68 2577 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
e5951263 2578 & HOST_UINT (0xffffffff));
e2c671ba
RE
2579 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2580 int j;
6354dc9b
NC
2581
2582 /* Tune as appropriate. */
aec3cfba 2583 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2584
2b835d68 2585 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2586 {
2b835d68 2587 i >>= booth_unit_size;
e2c671ba
RE
2588 add_cost += 2;
2589 }
2590
2591 return add_cost;
2592 }
2593
aec3cfba 2594 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2595 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2596 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2597
56636818
JL
2598 case TRUNCATE:
2599 if (arm_fast_multiply && mode == SImode
2600 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2601 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2602 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2603 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2604 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2605 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2606 return 8;
2607 return 99;
2608
e2c671ba
RE
2609 case NEG:
2610 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2611 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2612 /* Fall through */
2613 case NOT:
2614 if (mode == DImode)
2615 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2616
2617 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2618
2619 case IF_THEN_ELSE:
2620 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2621 return 14;
2622 return 2;
2623
2624 case COMPARE:
2625 return 1;
2626
2627 case ABS:
2628 return 4 + (mode == DImode ? 4 : 0);
2629
2630 case SIGN_EXTEND:
2631 if (GET_MODE (XEXP (x, 0)) == QImode)
2632 return (4 + (mode == DImode ? 4 : 0)
2633 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2634 /* Fall through */
2635 case ZERO_EXTEND:
2636 switch (GET_MODE (XEXP (x, 0)))
2637 {
2638 case QImode:
2639 return (1 + (mode == DImode ? 4 : 0)
2640 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2641
2642 case HImode:
2643 return (4 + (mode == DImode ? 4 : 0)
2644 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2645
2646 case SImode:
2647 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2648
2649 default:
2650 break;
e2c671ba
RE
2651 }
2652 abort ();
2653
d5b7b3ae
RE
2654 case CONST_INT:
2655 if (const_ok_for_arm (INTVAL (x)))
2656 return outer == SET ? 2 : -1;
2657 else if (outer == AND
5895f793 2658 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2659 return -1;
2660 else if ((outer == COMPARE
2661 || outer == PLUS || outer == MINUS)
5895f793 2662 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2663 return -1;
2664 else
2665 return 5;
2666
2667 case CONST:
2668 case LABEL_REF:
2669 case SYMBOL_REF:
2670 return 6;
2671
2672 case CONST_DOUBLE:
2673 if (const_double_rtx_ok_for_fpu (x))
2674 return outer == SET ? 2 : -1;
2675 else if ((outer == COMPARE || outer == PLUS)
2676 && neg_const_double_rtx_ok_for_fpu (x))
2677 return -1;
2678 return 7;
2679
e2c671ba
RE
2680 default:
2681 return 99;
2682 }
2683}
32de079a
RE
2684
2685int
2686arm_adjust_cost (insn, link, dep, cost)
2687 rtx insn;
2688 rtx link;
2689 rtx dep;
2690 int cost;
2691{
2692 rtx i_pat, d_pat;
2693
d19fb8e3
NC
2694 /* Some true dependencies can have a higher cost depending
2695 on precisely how certain input operands are used. */
2696 if (arm_is_xscale
2697 && REG_NOTE_KIND (link) == 0
2698 && recog_memoized (insn) < 0
2699 && recog_memoized (dep) < 0)
2700 {
2701 int shift_opnum = get_attr_shift (insn);
2702 enum attr_type attr_type = get_attr_type (dep);
2703
2704 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2705 operand for INSN. If we have a shifted input operand and the
2706 instruction we depend on is another ALU instruction, then we may
2707 have to account for an additional stall. */
2708 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2709 {
2710 rtx shifted_operand;
2711 int opno;
2712
2713 /* Get the shifted operand. */
2714 extract_insn (insn);
2715 shifted_operand = recog_data.operand[shift_opnum];
2716
2717 /* Iterate over all the operands in DEP. If we write an operand
2718 that overlaps with SHIFTED_OPERAND, then we have increase the
2719 cost of this dependency. */
2720 extract_insn (dep);
2721 preprocess_constraints ();
2722 for (opno = 0; opno < recog_data.n_operands; opno++)
2723 {
2724 /* We can ignore strict inputs. */
2725 if (recog_data.operand_type[opno] == OP_IN)
2726 continue;
2727
2728 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2729 shifted_operand))
2730 return 2;
2731 }
2732 }
2733 }
2734
6354dc9b 2735 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2736 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2737 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2738 return 0;
2739
d5b7b3ae
RE
2740 /* Call insns don't incur a stall, even if they follow a load. */
2741 if (REG_NOTE_KIND (link) == 0
2742 && GET_CODE (insn) == CALL_INSN)
2743 return 1;
2744
32de079a
RE
2745 if ((i_pat = single_set (insn)) != NULL
2746 && GET_CODE (SET_SRC (i_pat)) == MEM
2747 && (d_pat = single_set (dep)) != NULL
2748 && GET_CODE (SET_DEST (d_pat)) == MEM)
2749 {
2750 /* This is a load after a store, there is no conflict if the load reads
2751 from a cached area. Assume that loads from the stack, and from the
2752 constant pool are cached, and that others will miss. This is a
6354dc9b 2753 hack. */
32de079a 2754
32de079a
RE
2755 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2756 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2757 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2758 || reg_mentioned_p (hard_frame_pointer_rtx,
2759 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2760 return 1;
32de079a
RE
2761 }
2762
2763 return cost;
2764}
2765
6354dc9b 2766/* This code has been fixed for cross compilation. */
ff9940b0
RE
2767
2768static int fpa_consts_inited = 0;
2769
cd2b33d0 2770static const char * strings_fpa[8] =
62b10bbc 2771{
2b835d68
RE
2772 "0", "1", "2", "3",
2773 "4", "5", "0.5", "10"
2774};
ff9940b0
RE
2775
2776static REAL_VALUE_TYPE values_fpa[8];
2777
2778static void
2779init_fpa_table ()
2780{
2781 int i;
2782 REAL_VALUE_TYPE r;
2783
2784 for (i = 0; i < 8; i++)
2785 {
2786 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2787 values_fpa[i] = r;
2788 }
f3bb6135 2789
ff9940b0
RE
2790 fpa_consts_inited = 1;
2791}
2792
6354dc9b 2793/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2794
2795int
2796const_double_rtx_ok_for_fpu (x)
2797 rtx x;
2798{
ff9940b0
RE
2799 REAL_VALUE_TYPE r;
2800 int i;
2801
2802 if (!fpa_consts_inited)
2803 init_fpa_table ();
2804
2805 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2806 if (REAL_VALUE_MINUS_ZERO (r))
2807 return 0;
f3bb6135 2808
ff9940b0
RE
2809 for (i = 0; i < 8; i++)
2810 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2811 return 1;
f3bb6135 2812
ff9940b0 2813 return 0;
f3bb6135 2814}
ff9940b0 2815
6354dc9b 2816/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2817
2818int
2819neg_const_double_rtx_ok_for_fpu (x)
2820 rtx x;
2821{
2822 REAL_VALUE_TYPE r;
2823 int i;
2824
2825 if (!fpa_consts_inited)
2826 init_fpa_table ();
2827
2828 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2829 r = REAL_VALUE_NEGATE (r);
2830 if (REAL_VALUE_MINUS_ZERO (r))
2831 return 0;
f3bb6135 2832
ff9940b0
RE
2833 for (i = 0; i < 8; i++)
2834 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2835 return 1;
f3bb6135 2836
ff9940b0 2837 return 0;
f3bb6135 2838}
cce8749e
CH
2839\f
2840/* Predicates for `match_operand' and `match_operator'. */
2841
ff9940b0 2842/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2843 (SUBREG (MEM)...).
2844
2845 This function exists because at the time it was put in it led to better
2846 code. SUBREG(MEM) always needs a reload in the places where
2847 s_register_operand is used, and this seemed to lead to excessive
2848 reloading. */
ff9940b0
RE
2849
2850int
2851s_register_operand (op, mode)
2852 register rtx op;
2853 enum machine_mode mode;
2854{
2855 if (GET_MODE (op) != mode && mode != VOIDmode)
2856 return 0;
2857
2858 if (GET_CODE (op) == SUBREG)
f3bb6135 2859 op = SUBREG_REG (op);
ff9940b0
RE
2860
2861 /* We don't consider registers whose class is NO_REGS
2862 to be a register operand. */
d5b7b3ae 2863 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
2864 return (GET_CODE (op) == REG
2865 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2866 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2867}
2868
b0888988
RE
2869/* A hard register operand (even before reload. */
2870int
2871arm_hard_register_operand (op, mode)
2872 register rtx op;
2873 enum machine_mode mode;
2874{
2875 if (GET_MODE (op) != mode && mode != VOIDmode)
2876 return 0;
2877
2878 return (GET_CODE (op) == REG
2879 && REGNO (op) < FIRST_PSEUDO_REGISTER);
2880}
2881
e2c671ba
RE
2882/* Only accept reg, subreg(reg), const_int. */
2883
2884int
2885reg_or_int_operand (op, mode)
2886 register rtx op;
2887 enum machine_mode mode;
2888{
2889 if (GET_CODE (op) == CONST_INT)
2890 return 1;
2891
2892 if (GET_MODE (op) != mode && mode != VOIDmode)
2893 return 0;
2894
2895 if (GET_CODE (op) == SUBREG)
2896 op = SUBREG_REG (op);
2897
2898 /* We don't consider registers whose class is NO_REGS
2899 to be a register operand. */
2900 return (GET_CODE (op) == REG
2901 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2902 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2903}
2904
ff9940b0
RE
2905/* Return 1 if OP is an item in memory, given that we are in reload. */
2906
2907int
d5b7b3ae 2908arm_reload_memory_operand (op, mode)
ff9940b0 2909 rtx op;
74bbc178 2910 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2911{
2912 int regno = true_regnum (op);
2913
5895f793 2914 return (!CONSTANT_P (op)
ff9940b0
RE
2915 && (regno == -1
2916 || (GET_CODE (op) == REG
2917 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2918}
2919
4d818c85 2920/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae
RE
2921 memory access (architecture V4).
2922 MODE is QImode if called when computing contraints, or VOIDmode when
2923 emitting patterns. In this latter case we cannot use memory_operand()
2924 because it will fail on badly formed MEMs, which is precisly what we are
2925 trying to catch. */
4d818c85
RE
2926int
2927bad_signed_byte_operand (op, mode)
2928 rtx op;
d5b7b3ae 2929 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 2930{
d5b7b3ae 2931#if 0
5895f793 2932 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
2933 return 0;
2934#endif
2935 if (GET_CODE (op) != MEM)
4d818c85
RE
2936 return 0;
2937
2938 op = XEXP (op, 0);
2939
6354dc9b 2940 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2941 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
2942 && (!s_register_operand (XEXP (op, 0), VOIDmode)
2943 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 2944 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2945 return 1;
2946
6354dc9b 2947 /* Big constants are also bad. */
4d818c85
RE
2948 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2949 && (INTVAL (XEXP (op, 1)) > 0xff
2950 || -INTVAL (XEXP (op, 1)) > 0xff))
2951 return 1;
2952
6354dc9b 2953 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2954 return 0;
2955}
2956
cce8749e
CH
2957/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2958
2959int
2960arm_rhs_operand (op, mode)
2961 rtx op;
2962 enum machine_mode mode;
2963{
ff9940b0 2964 return (s_register_operand (op, mode)
cce8749e 2965 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2966}
cce8749e 2967
ff9940b0
RE
2968/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2969 */
2970
2971int
2972arm_rhsm_operand (op, mode)
2973 rtx op;
2974 enum machine_mode mode;
2975{
2976 return (s_register_operand (op, mode)
2977 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2978 || memory_operand (op, mode));
f3bb6135 2979}
ff9940b0
RE
2980
2981/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2982 constant that is valid when negated. */
2983
2984int
2985arm_add_operand (op, mode)
2986 rtx op;
2987 enum machine_mode mode;
2988{
d5b7b3ae
RE
2989 if (TARGET_THUMB)
2990 return thumb_cmp_operand (op, mode);
2991
ff9940b0
RE
2992 return (s_register_operand (op, mode)
2993 || (GET_CODE (op) == CONST_INT
2994 && (const_ok_for_arm (INTVAL (op))
2995 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2996}
ff9940b0
RE
2997
2998int
2999arm_not_operand (op, mode)
3000 rtx op;
3001 enum machine_mode mode;
3002{
3003 return (s_register_operand (op, mode)
3004 || (GET_CODE (op) == CONST_INT
3005 && (const_ok_for_arm (INTVAL (op))
3006 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 3007}
ff9940b0 3008
5165176d
RE
3009/* Return TRUE if the operand is a memory reference which contains an
3010 offsettable address. */
3011int
3012offsettable_memory_operand (op, mode)
3013 register rtx op;
3014 enum machine_mode mode;
3015{
3016 if (mode == VOIDmode)
3017 mode = GET_MODE (op);
3018
3019 return (mode == GET_MODE (op)
3020 && GET_CODE (op) == MEM
3021 && offsettable_address_p (reload_completed | reload_in_progress,
3022 mode, XEXP (op, 0)));
3023}
3024
3025/* Return TRUE if the operand is a memory reference which is, or can be
3026 made word aligned by adjusting the offset. */
3027int
3028alignable_memory_operand (op, mode)
3029 register rtx op;
3030 enum machine_mode mode;
3031{
3032 rtx reg;
3033
3034 if (mode == VOIDmode)
3035 mode = GET_MODE (op);
3036
3037 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3038 return 0;
3039
3040 op = XEXP (op, 0);
3041
3042 return ((GET_CODE (reg = op) == REG
3043 || (GET_CODE (op) == SUBREG
3044 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3045 || (GET_CODE (op) == PLUS
3046 && GET_CODE (XEXP (op, 1)) == CONST_INT
3047 && (GET_CODE (reg = XEXP (op, 0)) == REG
3048 || (GET_CODE (XEXP (op, 0)) == SUBREG
3049 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 3050 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
3051}
3052
b111229a
RE
3053/* Similar to s_register_operand, but does not allow hard integer
3054 registers. */
3055int
3056f_register_operand (op, mode)
3057 register rtx op;
3058 enum machine_mode mode;
3059{
3060 if (GET_MODE (op) != mode && mode != VOIDmode)
3061 return 0;
3062
3063 if (GET_CODE (op) == SUBREG)
3064 op = SUBREG_REG (op);
3065
3066 /* We don't consider registers whose class is NO_REGS
3067 to be a register operand. */
3068 return (GET_CODE (op) == REG
3069 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3070 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3071}
3072
cce8749e
CH
3073/* Return TRUE for valid operands for the rhs of an FPU instruction. */
3074
3075int
3076fpu_rhs_operand (op, mode)
3077 rtx op;
3078 enum machine_mode mode;
3079{
ff9940b0 3080 if (s_register_operand (op, mode))
f3bb6135 3081 return TRUE;
9ce71c6f
BS
3082
3083 if (GET_MODE (op) != mode && mode != VOIDmode)
3084 return FALSE;
3085
3086 if (GET_CODE (op) == CONST_DOUBLE)
3087 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
3088
3089 return FALSE;
3090}
cce8749e 3091
ff9940b0
RE
3092int
3093fpu_add_operand (op, mode)
3094 rtx op;
3095 enum machine_mode mode;
3096{
3097 if (s_register_operand (op, mode))
f3bb6135 3098 return TRUE;
9ce71c6f
BS
3099
3100 if (GET_MODE (op) != mode && mode != VOIDmode)
3101 return FALSE;
3102
3103 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
3104 return (const_double_rtx_ok_for_fpu (op)
3105 || neg_const_double_rtx_ok_for_fpu (op));
3106
3107 return FALSE;
ff9940b0
RE
3108}
3109
cce8749e
CH
3110/* Return nonzero if OP is a constant power of two. */
3111
3112int
3113power_of_two_operand (op, mode)
3114 rtx op;
74bbc178 3115 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
3116{
3117 if (GET_CODE (op) == CONST_INT)
3118 {
d5b7b3ae 3119 HOST_WIDE_INT value = INTVAL (op);
f3bb6135 3120 return value != 0 && (value & (value - 1)) == 0;
cce8749e 3121 }
f3bb6135
RE
3122 return FALSE;
3123}
cce8749e
CH
3124
3125/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 3126 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
3127 Note that this disallows MEM(REG+REG), but allows
3128 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
3129
3130int
3131di_operand (op, mode)
3132 rtx op;
3133 enum machine_mode mode;
3134{
ff9940b0 3135 if (s_register_operand (op, mode))
f3bb6135 3136 return TRUE;
cce8749e 3137
9ce71c6f
BS
3138 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3139 return FALSE;
3140
e9c6b69b
NC
3141 if (GET_CODE (op) == SUBREG)
3142 op = SUBREG_REG (op);
3143
cce8749e
CH
3144 switch (GET_CODE (op))
3145 {
3146 case CONST_DOUBLE:
3147 case CONST_INT:
f3bb6135
RE
3148 return TRUE;
3149
cce8749e 3150 case MEM:
f3bb6135
RE
3151 return memory_address_p (DImode, XEXP (op, 0));
3152
cce8749e 3153 default:
f3bb6135 3154 return FALSE;
cce8749e 3155 }
f3bb6135 3156}
cce8749e 3157
d5b7b3ae
RE
3158/* Like di_operand, but don't accept constants. */
3159int
3160nonimmediate_di_operand (op, mode)
3161 rtx op;
3162 enum machine_mode mode;
3163{
3164 if (s_register_operand (op, mode))
3165 return TRUE;
3166
3167 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3168 return FALSE;
3169
3170 if (GET_CODE (op) == SUBREG)
3171 op = SUBREG_REG (op);
3172
3173 if (GET_CODE (op) == MEM)
3174 return memory_address_p (DImode, XEXP (op, 0));
3175
3176 return FALSE;
3177}
3178
f3139301 3179/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 3180 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
3181 Note that this disallows MEM(REG+REG), but allows
3182 MEM(PRE/POST_INC/DEC(REG)). */
3183
3184int
3185soft_df_operand (op, mode)
3186 rtx op;
3187 enum machine_mode mode;
3188{
3189 if (s_register_operand (op, mode))
3190 return TRUE;
3191
9ce71c6f
BS
3192 if (mode != VOIDmode && GET_MODE (op) != mode)
3193 return FALSE;
3194
37b80d2e
BS
3195 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3196 return FALSE;
3197
e9c6b69b
NC
3198 if (GET_CODE (op) == SUBREG)
3199 op = SUBREG_REG (op);
9ce71c6f 3200
f3139301
DE
3201 switch (GET_CODE (op))
3202 {
3203 case CONST_DOUBLE:
3204 return TRUE;
3205
3206 case MEM:
3207 return memory_address_p (DFmode, XEXP (op, 0));
3208
3209 default:
3210 return FALSE;
3211 }
3212}
3213
d5b7b3ae
RE
3214/* Like soft_df_operand, but don't accept constants. */
3215int
3216nonimmediate_soft_df_operand (op, mode)
3217 rtx op;
3218 enum machine_mode mode;
3219{
3220 if (s_register_operand (op, mode))
3221 return TRUE;
3222
3223 if (mode != VOIDmode && GET_MODE (op) != mode)
3224 return FALSE;
3225
3226 if (GET_CODE (op) == SUBREG)
3227 op = SUBREG_REG (op);
3228
3229 if (GET_CODE (op) == MEM)
3230 return memory_address_p (DFmode, XEXP (op, 0));
3231 return FALSE;
3232}
cce8749e 3233
d5b7b3ae 3234/* Return TRUE for valid index operands. */
cce8749e
CH
3235int
3236index_operand (op, mode)
3237 rtx op;
3238 enum machine_mode mode;
3239{
d5b7b3ae 3240 return (s_register_operand (op, mode)
ff9940b0 3241 || (immediate_operand (op, mode)
d5b7b3ae
RE
3242 && (GET_CODE (op) != CONST_INT
3243 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 3244}
cce8749e 3245
ff9940b0
RE
3246/* Return TRUE for valid shifts by a constant. This also accepts any
3247 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 3248 shift operator in this case was a mult. */
ff9940b0
RE
3249
3250int
3251const_shift_operand (op, mode)
3252 rtx op;
3253 enum machine_mode mode;
3254{
3255 return (power_of_two_operand (op, mode)
3256 || (immediate_operand (op, mode)
d5b7b3ae
RE
3257 && (GET_CODE (op) != CONST_INT
3258 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 3259}
ff9940b0 3260
cce8749e
CH
3261/* Return TRUE for arithmetic operators which can be combined with a multiply
3262 (shift). */
3263
3264int
3265shiftable_operator (x, mode)
3266 rtx x;
3267 enum machine_mode mode;
3268{
3269 if (GET_MODE (x) != mode)
3270 return FALSE;
3271 else
3272 {
3273 enum rtx_code code = GET_CODE (x);
3274
3275 return (code == PLUS || code == MINUS
3276 || code == IOR || code == XOR || code == AND);
3277 }
f3bb6135 3278}
cce8749e 3279
6ab589e0
JL
3280/* Return TRUE for binary logical operators. */
3281
3282int
3283logical_binary_operator (x, mode)
3284 rtx x;
3285 enum machine_mode mode;
3286{
3287 if (GET_MODE (x) != mode)
3288 return FALSE;
3289 else
3290 {
3291 enum rtx_code code = GET_CODE (x);
3292
3293 return (code == IOR || code == XOR || code == AND);
3294 }
3295}
3296
6354dc9b 3297/* Return TRUE for shift operators. */
cce8749e
CH
3298
3299int
3300shift_operator (x, mode)
3301 rtx x;
3302 enum machine_mode mode;
3303{
3304 if (GET_MODE (x) != mode)
3305 return FALSE;
3306 else
3307 {
3308 enum rtx_code code = GET_CODE (x);
3309
ff9940b0 3310 if (code == MULT)
aec3cfba 3311 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 3312
e2c671ba
RE
3313 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3314 || code == ROTATERT);
cce8749e 3315 }
f3bb6135 3316}
ff9940b0 3317
6354dc9b
NC
3318/* Return TRUE if x is EQ or NE. */
3319int
3320equality_operator (x, mode)
f3bb6135 3321 rtx x;
74bbc178 3322 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3323{
f3bb6135 3324 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3325}
3326
e45b72c4
RE
3327/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3328int
3329arm_comparison_operator (x, mode)
3330 rtx x;
3331 enum machine_mode mode;
3332{
3333 return (comparison_operator (x, mode)
3334 && GET_CODE (x) != LTGT
3335 && GET_CODE (x) != UNEQ);
3336}
3337
6354dc9b 3338/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
3339int
3340minmax_operator (x, mode)
3341 rtx x;
3342 enum machine_mode mode;
3343{
3344 enum rtx_code code = GET_CODE (x);
3345
3346 if (GET_MODE (x) != mode)
3347 return FALSE;
f3bb6135 3348
ff9940b0 3349 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3350}
ff9940b0 3351
ff9940b0 3352/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3353 a mode, accept any class CCmode register. */
ff9940b0
RE
3354int
3355cc_register (x, mode)
f3bb6135
RE
3356 rtx x;
3357 enum machine_mode mode;
ff9940b0
RE
3358{
3359 if (mode == VOIDmode)
3360 {
3361 mode = GET_MODE (x);
d5b7b3ae 3362
ff9940b0
RE
3363 if (GET_MODE_CLASS (mode) != MODE_CC)
3364 return FALSE;
3365 }
f3bb6135 3366
d5b7b3ae
RE
3367 if ( GET_MODE (x) == mode
3368 && GET_CODE (x) == REG
3369 && REGNO (x) == CC_REGNUM)
ff9940b0 3370 return TRUE;
f3bb6135 3371
ff9940b0
RE
3372 return FALSE;
3373}
5bbe2d40
RE
3374
3375/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3376 a mode, accept any class CCmode register which indicates a dominance
3377 expression. */
5bbe2d40 3378int
84ed5e79 3379dominant_cc_register (x, mode)
5bbe2d40
RE
3380 rtx x;
3381 enum machine_mode mode;
3382{
3383 if (mode == VOIDmode)
3384 {
3385 mode = GET_MODE (x);
d5b7b3ae 3386
84ed5e79 3387 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3388 return FALSE;
3389 }
3390
d5b7b3ae 3391 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3392 && mode != CC_DLEmode && mode != CC_DLTmode
3393 && mode != CC_DGEmode && mode != CC_DGTmode
3394 && mode != CC_DLEUmode && mode != CC_DLTUmode
3395 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3396 return FALSE;
3397
d5b7b3ae 3398 return cc_register (x, mode);
5bbe2d40
RE
3399}
3400
2b835d68
RE
3401/* Return TRUE if X references a SYMBOL_REF. */
3402int
3403symbol_mentioned_p (x)
3404 rtx x;
3405{
6f7d635c 3406 register const char * fmt;
2b835d68
RE
3407 register int i;
3408
3409 if (GET_CODE (x) == SYMBOL_REF)
3410 return 1;
3411
3412 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3413
2b835d68
RE
3414 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3415 {
3416 if (fmt[i] == 'E')
3417 {
3418 register int j;
3419
3420 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3421 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3422 return 1;
3423 }
3424 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3425 return 1;
3426 }
3427
3428 return 0;
3429}
3430
3431/* Return TRUE if X references a LABEL_REF. */
3432int
3433label_mentioned_p (x)
3434 rtx x;
3435{
6f7d635c 3436 register const char * fmt;
2b835d68
RE
3437 register int i;
3438
3439 if (GET_CODE (x) == LABEL_REF)
3440 return 1;
3441
3442 fmt = GET_RTX_FORMAT (GET_CODE (x));
3443 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3444 {
3445 if (fmt[i] == 'E')
3446 {
3447 register int j;
3448
3449 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3450 if (label_mentioned_p (XVECEXP (x, i, j)))
3451 return 1;
3452 }
3453 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3454 return 1;
3455 }
3456
3457 return 0;
3458}
3459
ff9940b0
RE
3460enum rtx_code
3461minmax_code (x)
f3bb6135 3462 rtx x;
ff9940b0
RE
3463{
3464 enum rtx_code code = GET_CODE (x);
3465
3466 if (code == SMAX)
3467 return GE;
f3bb6135 3468 else if (code == SMIN)
ff9940b0 3469 return LE;
f3bb6135 3470 else if (code == UMIN)
ff9940b0 3471 return LEU;
f3bb6135 3472 else if (code == UMAX)
ff9940b0 3473 return GEU;
f3bb6135 3474
ff9940b0
RE
3475 abort ();
3476}
3477
6354dc9b 3478/* Return 1 if memory locations are adjacent. */
f3bb6135 3479int
ff9940b0
RE
3480adjacent_mem_locations (a, b)
3481 rtx a, b;
3482{
3483 int val0 = 0, val1 = 0;
3484 int reg0, reg1;
3485
3486 if ((GET_CODE (XEXP (a, 0)) == REG
3487 || (GET_CODE (XEXP (a, 0)) == PLUS
3488 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3489 && (GET_CODE (XEXP (b, 0)) == REG
3490 || (GET_CODE (XEXP (b, 0)) == PLUS
3491 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3492 {
3493 if (GET_CODE (XEXP (a, 0)) == PLUS)
3494 {
3495 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3496 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3497 }
3498 else
3499 reg0 = REGNO (XEXP (a, 0));
3500 if (GET_CODE (XEXP (b, 0)) == PLUS)
3501 {
3502 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3503 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3504 }
3505 else
3506 reg1 = REGNO (XEXP (b, 0));
3507 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3508 }
3509 return 0;
3510}
3511
3512/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3513 parallel and the first section will be tested. */
f3bb6135 3514int
ff9940b0
RE
3515load_multiple_operation (op, mode)
3516 rtx op;
74bbc178 3517 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3518{
f3bb6135 3519 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3520 int dest_regno;
3521 rtx src_addr;
f3bb6135 3522 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3523 rtx elt;
3524
3525 if (count <= 1
3526 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3527 return 0;
3528
6354dc9b 3529 /* Check to see if this might be a write-back. */
ff9940b0
RE
3530 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3531 {
3532 i++;
3533 base = 1;
3534
6354dc9b 3535 /* Now check it more carefully. */
ff9940b0
RE
3536 if (GET_CODE (SET_DEST (elt)) != REG
3537 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3538 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3539 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3540 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3541 return 0;
ff9940b0
RE
3542 }
3543
3544 /* Perform a quick check so we don't blow up below. */
3545 if (count <= i
3546 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3547 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3548 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3549 return 0;
3550
3551 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3552 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3553
3554 for (; i < count; i++)
3555 {
ed4c4348 3556 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3557
3558 if (GET_CODE (elt) != SET
3559 || GET_CODE (SET_DEST (elt)) != REG
3560 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3561 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3562 || GET_CODE (SET_SRC (elt)) != MEM
3563 || GET_MODE (SET_SRC (elt)) != SImode
3564 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3565 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3566 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3567 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3568 return 0;
3569 }
3570
3571 return 1;
3572}
3573
3574/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3575 parallel and the first section will be tested. */
f3bb6135 3576int
ff9940b0
RE
3577store_multiple_operation (op, mode)
3578 rtx op;
74bbc178 3579 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3580{
f3bb6135 3581 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3582 int src_regno;
3583 rtx dest_addr;
f3bb6135 3584 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3585 rtx elt;
3586
3587 if (count <= 1
3588 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3589 return 0;
3590
6354dc9b 3591 /* Check to see if this might be a write-back. */
ff9940b0
RE
3592 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3593 {
3594 i++;
3595 base = 1;
3596
6354dc9b 3597 /* Now check it more carefully. */
ff9940b0
RE
3598 if (GET_CODE (SET_DEST (elt)) != REG
3599 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3600 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3601 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3602 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3603 return 0;
ff9940b0
RE
3604 }
3605
3606 /* Perform a quick check so we don't blow up below. */
3607 if (count <= i
3608 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3609 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3610 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3611 return 0;
3612
3613 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3614 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3615
3616 for (; i < count; i++)
3617 {
3618 elt = XVECEXP (op, 0, i);
3619
3620 if (GET_CODE (elt) != SET
3621 || GET_CODE (SET_SRC (elt)) != REG
3622 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3623 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3624 || GET_CODE (SET_DEST (elt)) != MEM
3625 || GET_MODE (SET_DEST (elt)) != SImode
3626 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3627 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3628 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3629 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3630 return 0;
3631 }
3632
3633 return 1;
3634}
e2c671ba 3635
84ed5e79
RE
3636int
3637load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3638 rtx * operands;
84ed5e79 3639 int nops;
62b10bbc
NC
3640 int * regs;
3641 int * base;
3642 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3643{
3644 int unsorted_regs[4];
3645 HOST_WIDE_INT unsorted_offsets[4];
3646 int order[4];
ad076f4e 3647 int base_reg = -1;
84ed5e79
RE
3648 int i;
3649
3650 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3651 extended if required. */
3652 if (nops < 2 || nops > 4)
3653 abort ();
3654
3655 /* Loop over the operands and check that the memory references are
3656 suitable (ie immediate offsets from the same base register). At
3657 the same time, extract the target register, and the memory
3658 offsets. */
3659 for (i = 0; i < nops; i++)
3660 {
3661 rtx reg;
3662 rtx offset;
3663
56636818
JL
3664 /* Convert a subreg of a mem into the mem itself. */
3665 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3666 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3667
84ed5e79
RE
3668 if (GET_CODE (operands[nops + i]) != MEM)
3669 abort ();
3670
3671 /* Don't reorder volatile memory references; it doesn't seem worth
3672 looking for the case where the order is ok anyway. */
3673 if (MEM_VOLATILE_P (operands[nops + i]))
3674 return 0;
3675
3676 offset = const0_rtx;
3677
3678 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3679 || (GET_CODE (reg) == SUBREG
3680 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3681 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3682 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3683 == REG)
3684 || (GET_CODE (reg) == SUBREG
3685 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3686 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3687 == CONST_INT)))
3688 {
3689 if (i == 0)
3690 {
d5b7b3ae 3691 base_reg = REGNO (reg);
84ed5e79
RE
3692 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3693 ? REGNO (operands[i])
3694 : REGNO (SUBREG_REG (operands[i])));
3695 order[0] = 0;
3696 }
3697 else
3698 {
6354dc9b 3699 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3700 /* Not addressed from the same base register. */
3701 return 0;
3702
3703 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3704 ? REGNO (operands[i])
3705 : REGNO (SUBREG_REG (operands[i])));
3706 if (unsorted_regs[i] < unsorted_regs[order[0]])
3707 order[0] = i;
3708 }
3709
3710 /* If it isn't an integer register, or if it overwrites the
3711 base register but isn't the last insn in the list, then
3712 we can't do this. */
3713 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3714 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3715 return 0;
3716
3717 unsorted_offsets[i] = INTVAL (offset);
3718 }
3719 else
3720 /* Not a suitable memory address. */
3721 return 0;
3722 }
3723
3724 /* All the useful information has now been extracted from the
3725 operands into unsorted_regs and unsorted_offsets; additionally,
3726 order[0] has been set to the lowest numbered register in the
3727 list. Sort the registers into order, and check that the memory
3728 offsets are ascending and adjacent. */
3729
3730 for (i = 1; i < nops; i++)
3731 {
3732 int j;
3733
3734 order[i] = order[i - 1];
3735 for (j = 0; j < nops; j++)
3736 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3737 && (order[i] == order[i - 1]
3738 || unsorted_regs[j] < unsorted_regs[order[i]]))
3739 order[i] = j;
3740
3741 /* Have we found a suitable register? if not, one must be used more
3742 than once. */
3743 if (order[i] == order[i - 1])
3744 return 0;
3745
3746 /* Is the memory address adjacent and ascending? */
3747 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3748 return 0;
3749 }
3750
3751 if (base)
3752 {
3753 *base = base_reg;
3754
3755 for (i = 0; i < nops; i++)
3756 regs[i] = unsorted_regs[order[i]];
3757
3758 *load_offset = unsorted_offsets[order[0]];
3759 }
3760
3761 if (unsorted_offsets[order[0]] == 0)
3762 return 1; /* ldmia */
3763
3764 if (unsorted_offsets[order[0]] == 4)
3765 return 2; /* ldmib */
3766
3767 if (unsorted_offsets[order[nops - 1]] == 0)
3768 return 3; /* ldmda */
3769
3770 if (unsorted_offsets[order[nops - 1]] == -4)
3771 return 4; /* ldmdb */
3772
949d79eb
RE
3773 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3774 if the offset isn't small enough. The reason 2 ldrs are faster
3775 is because these ARMs are able to do more than one cache access
3776 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3777 whilst the ARM8 has a double bandwidth cache. This means that
3778 these cores can do both an instruction fetch and a data fetch in
3779 a single cycle, so the trick of calculating the address into a
3780 scratch register (one of the result regs) and then doing a load
3781 multiple actually becomes slower (and no smaller in code size).
3782 That is the transformation
6cc8c0b3
NC
3783
3784 ldr rd1, [rbase + offset]
3785 ldr rd2, [rbase + offset + 4]
3786
3787 to
3788
3789 add rd1, rbase, offset
3790 ldmia rd1, {rd1, rd2}
3791
949d79eb
RE
3792 produces worse code -- '3 cycles + any stalls on rd2' instead of
3793 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3794 access per cycle, the first sequence could never complete in less
3795 than 6 cycles, whereas the ldm sequence would only take 5 and
3796 would make better use of sequential accesses if not hitting the
3797 cache.
3798
3799 We cheat here and test 'arm_ld_sched' which we currently know to
3800 only be true for the ARM8, ARM9 and StrongARM. If this ever
3801 changes, then the test below needs to be reworked. */
f5a1b0d2 3802 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3803 return 0;
3804
84ed5e79
RE
3805 /* Can't do it without setting up the offset, only do this if it takes
3806 no more than one insn. */
3807 return (const_ok_for_arm (unsorted_offsets[order[0]])
3808 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3809}
3810
cd2b33d0 3811const char *
84ed5e79 3812emit_ldm_seq (operands, nops)
62b10bbc 3813 rtx * operands;
84ed5e79
RE
3814 int nops;
3815{
3816 int regs[4];
3817 int base_reg;
3818 HOST_WIDE_INT offset;
3819 char buf[100];
3820 int i;
3821
3822 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3823 {
3824 case 1:
3825 strcpy (buf, "ldm%?ia\t");
3826 break;
3827
3828 case 2:
3829 strcpy (buf, "ldm%?ib\t");
3830 break;
3831
3832 case 3:
3833 strcpy (buf, "ldm%?da\t");
3834 break;
3835
3836 case 4:
3837 strcpy (buf, "ldm%?db\t");
3838 break;
3839
3840 case 5:
3841 if (offset >= 0)
3842 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3843 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3844 (long) offset);
3845 else
3846 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3847 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3848 (long) -offset);
3849 output_asm_insn (buf, operands);
3850 base_reg = regs[0];
3851 strcpy (buf, "ldm%?ia\t");
3852 break;
3853
3854 default:
3855 abort ();
3856 }
3857
3858 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3859 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3860
3861 for (i = 1; i < nops; i++)
3862 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3863 reg_names[regs[i]]);
3864
3865 strcat (buf, "}\t%@ phole ldm");
3866
3867 output_asm_insn (buf, operands);
3868 return "";
3869}
3870
3871int
3872store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3873 rtx * operands;
84ed5e79 3874 int nops;
62b10bbc
NC
3875 int * regs;
3876 int * base;
3877 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3878{
3879 int unsorted_regs[4];
3880 HOST_WIDE_INT unsorted_offsets[4];
3881 int order[4];
ad076f4e 3882 int base_reg = -1;
84ed5e79
RE
3883 int i;
3884
3885 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3886 extended if required. */
3887 if (nops < 2 || nops > 4)
3888 abort ();
3889
3890 /* Loop over the operands and check that the memory references are
3891 suitable (ie immediate offsets from the same base register). At
3892 the same time, extract the target register, and the memory
3893 offsets. */
3894 for (i = 0; i < nops; i++)
3895 {
3896 rtx reg;
3897 rtx offset;
3898
56636818
JL
3899 /* Convert a subreg of a mem into the mem itself. */
3900 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3901 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3902
84ed5e79
RE
3903 if (GET_CODE (operands[nops + i]) != MEM)
3904 abort ();
3905
3906 /* Don't reorder volatile memory references; it doesn't seem worth
3907 looking for the case where the order is ok anyway. */
3908 if (MEM_VOLATILE_P (operands[nops + i]))
3909 return 0;
3910
3911 offset = const0_rtx;
3912
3913 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3914 || (GET_CODE (reg) == SUBREG
3915 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3916 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3917 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3918 == REG)
3919 || (GET_CODE (reg) == SUBREG
3920 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3921 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3922 == CONST_INT)))
3923 {
3924 if (i == 0)
3925 {
62b10bbc 3926 base_reg = REGNO (reg);
84ed5e79
RE
3927 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3928 ? REGNO (operands[i])
3929 : REGNO (SUBREG_REG (operands[i])));
3930 order[0] = 0;
3931 }
3932 else
3933 {
6354dc9b 3934 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3935 /* Not addressed from the same base register. */
3936 return 0;
3937
3938 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3939 ? REGNO (operands[i])
3940 : REGNO (SUBREG_REG (operands[i])));
3941 if (unsorted_regs[i] < unsorted_regs[order[0]])
3942 order[0] = i;
3943 }
3944
3945 /* If it isn't an integer register, then we can't do this. */
3946 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3947 return 0;
3948
3949 unsorted_offsets[i] = INTVAL (offset);
3950 }
3951 else
3952 /* Not a suitable memory address. */
3953 return 0;
3954 }
3955
3956 /* All the useful information has now been extracted from the
3957 operands into unsorted_regs and unsorted_offsets; additionally,
3958 order[0] has been set to the lowest numbered register in the
3959 list. Sort the registers into order, and check that the memory
3960 offsets are ascending and adjacent. */
3961
3962 for (i = 1; i < nops; i++)
3963 {
3964 int j;
3965
3966 order[i] = order[i - 1];
3967 for (j = 0; j < nops; j++)
3968 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3969 && (order[i] == order[i - 1]
3970 || unsorted_regs[j] < unsorted_regs[order[i]]))
3971 order[i] = j;
3972
3973 /* Have we found a suitable register? if not, one must be used more
3974 than once. */
3975 if (order[i] == order[i - 1])
3976 return 0;
3977
3978 /* Is the memory address adjacent and ascending? */
3979 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3980 return 0;
3981 }
3982
3983 if (base)
3984 {
3985 *base = base_reg;
3986
3987 for (i = 0; i < nops; i++)
3988 regs[i] = unsorted_regs[order[i]];
3989
3990 *load_offset = unsorted_offsets[order[0]];
3991 }
3992
3993 if (unsorted_offsets[order[0]] == 0)
3994 return 1; /* stmia */
3995
3996 if (unsorted_offsets[order[0]] == 4)
3997 return 2; /* stmib */
3998
3999 if (unsorted_offsets[order[nops - 1]] == 0)
4000 return 3; /* stmda */
4001
4002 if (unsorted_offsets[order[nops - 1]] == -4)
4003 return 4; /* stmdb */
4004
4005 return 0;
4006}
4007
cd2b33d0 4008const char *
84ed5e79 4009emit_stm_seq (operands, nops)
62b10bbc 4010 rtx * operands;
84ed5e79
RE
4011 int nops;
4012{
4013 int regs[4];
4014 int base_reg;
4015 HOST_WIDE_INT offset;
4016 char buf[100];
4017 int i;
4018
4019 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4020 {
4021 case 1:
4022 strcpy (buf, "stm%?ia\t");
4023 break;
4024
4025 case 2:
4026 strcpy (buf, "stm%?ib\t");
4027 break;
4028
4029 case 3:
4030 strcpy (buf, "stm%?da\t");
4031 break;
4032
4033 case 4:
4034 strcpy (buf, "stm%?db\t");
4035 break;
4036
4037 default:
4038 abort ();
4039 }
4040
4041 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4042 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4043
4044 for (i = 1; i < nops; i++)
4045 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4046 reg_names[regs[i]]);
4047
4048 strcat (buf, "}\t%@ phole stm");
4049
4050 output_asm_insn (buf, operands);
4051 return "";
4052}
4053
e2c671ba
RE
4054int
4055multi_register_push (op, mode)
0a81f500 4056 rtx op;
74bbc178 4057 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
4058{
4059 if (GET_CODE (op) != PARALLEL
4060 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4061 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
b15bca31 4062 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
e2c671ba
RE
4063 return 0;
4064
4065 return 1;
4066}
ff9940b0 4067\f
d7d01975 4068/* Routines for use with attributes. */
f3bb6135 4069
31fdb4d5 4070/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
4071 ATTRIBUTES are any existing attributes and ARGS are
4072 the arguments supplied with ATTR.
31fdb4d5
DE
4073
4074 Supported attributes:
4075
d5b7b3ae
RE
4076 naked:
4077 don't output any prologue or epilogue code, the user is assumed
4078 to do the right thing.
4079
6d3d9133
NC
4080 isr or interrupt:
4081 Interrupt Service Routine.
4082
d5b7b3ae
RE
4083 interfacearm:
4084 Always assume that this function will be entered in ARM mode,
4085 not Thumb mode, and that the caller wishes to be returned to in
4086 ARM mode. */
31fdb4d5 4087int
74bbc178 4088arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 4089 tree decl;
31fdb4d5
DE
4090 tree attr;
4091 tree args;
4092{
6d3d9133
NC
4093 /* The interrupt attribute can take args, so check for it before
4094 rejecting other attributes on the grounds that they did have args. */
4095 if (is_attribute_p ("isr", attr)
4096 || is_attribute_p ("interrupt", attr))
4097 return TREE_CODE (decl) == FUNCTION_DECL;
4098
31fdb4d5
DE
4099 if (args != NULL_TREE)
4100 return 0;
4101
4102 if (is_attribute_p ("naked", attr))
4103 return TREE_CODE (decl) == FUNCTION_DECL;
d19fb8e3 4104
d5b7b3ae
RE
4105#ifdef ARM_PE
4106 if (is_attribute_p ("interfacearm", attr))
4107 return TREE_CODE (decl) == FUNCTION_DECL;
4108#endif /* ARM_PE */
4109
31fdb4d5
DE
4110 return 0;
4111}
f3bb6135 4112\f
6354dc9b 4113/* Routines for use in generating RTL. */
f3bb6135 4114rtx
56636818 4115arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 4116 in_struct_p, scalar_p)
ff9940b0
RE
4117 int base_regno;
4118 int count;
4119 rtx from;
4120 int up;
4121 int write_back;
56636818
JL
4122 int unchanging_p;
4123 int in_struct_p;
c6df88cb 4124 int scalar_p;
ff9940b0
RE
4125{
4126 int i = 0, j;
4127 rtx result;
4128 int sign = up ? 1 : -1;
56636818 4129 rtx mem;
ff9940b0 4130
d19fb8e3
NC
4131 /* XScale has load-store double instructions, but they have stricter
4132 alignment requirements than load-store multiple, so we can not
4133 use them.
4134
4135 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4136 the pipeline until completion.
4137
4138 NREGS CYCLES
4139 1 3
4140 2 4
4141 3 5
4142 4 6
4143
4144 An ldr instruction takes 1-3 cycles, but does not block the
4145 pipeline.
4146
4147 NREGS CYCLES
4148 1 1-3
4149 2 2-6
4150 3 3-9
4151 4 4-12
4152
4153 Best case ldr will always win. However, the more ldr instructions
4154 we issue, the less likely we are to be able to schedule them well.
4155 Using ldr instructions also increases code size.
4156
4157 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4158 for counts of 3 or 4 regs. */
4159 if (arm_is_xscale && count <= 2 && ! optimize_size)
4160 {
4161 rtx seq;
4162
4163 start_sequence ();
4164
4165 for (i = 0; i < count; i++)
4166 {
4167 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4168 RTX_UNCHANGING_P (mem) = unchanging_p;
4169 MEM_IN_STRUCT_P (mem) = in_struct_p;
4170 MEM_SCALAR_P (mem) = scalar_p;
4171 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4172 }
4173
4174 if (write_back)
4175 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4176
4177 seq = gen_sequence ();
4178 end_sequence ();
4179
4180 return seq;
4181 }
4182
43cffd11 4183 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4184 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4185 if (write_back)
f3bb6135 4186 {
ff9940b0 4187 XVECEXP (result, 0, 0)
43cffd11
RE
4188 = gen_rtx_SET (GET_MODE (from), from,
4189 plus_constant (from, count * 4 * sign));
ff9940b0
RE
4190 i = 1;
4191 count++;
f3bb6135
RE
4192 }
4193
ff9940b0 4194 for (j = 0; i < count; i++, j++)
f3bb6135 4195 {
43cffd11 4196 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
4197 RTX_UNCHANGING_P (mem) = unchanging_p;
4198 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4199 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
4200 XVECEXP (result, 0, i)
4201 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
4202 }
4203
ff9940b0
RE
4204 return result;
4205}
4206
f3bb6135 4207rtx
56636818 4208arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 4209 in_struct_p, scalar_p)
ff9940b0
RE
4210 int base_regno;
4211 int count;
4212 rtx to;
4213 int up;
4214 int write_back;
56636818
JL
4215 int unchanging_p;
4216 int in_struct_p;
c6df88cb 4217 int scalar_p;
ff9940b0
RE
4218{
4219 int i = 0, j;
4220 rtx result;
4221 int sign = up ? 1 : -1;
56636818 4222 rtx mem;
ff9940b0 4223
d19fb8e3
NC
4224 /* See arm_gen_load_multiple for discussion of
4225 the pros/cons of ldm/stm usage for XScale. */
4226 if (arm_is_xscale && count <= 2 && ! optimize_size)
4227 {
4228 rtx seq;
4229
4230 start_sequence ();
4231
4232 for (i = 0; i < count; i++)
4233 {
4234 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4235 RTX_UNCHANGING_P (mem) = unchanging_p;
4236 MEM_IN_STRUCT_P (mem) = in_struct_p;
4237 MEM_SCALAR_P (mem) = scalar_p;
4238 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4239 }
4240
4241 if (write_back)
4242 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4243
4244 seq = gen_sequence ();
4245 end_sequence ();
4246
4247 return seq;
4248 }
4249
43cffd11 4250 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4251 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4252 if (write_back)
f3bb6135 4253 {
ff9940b0 4254 XVECEXP (result, 0, 0)
43cffd11
RE
4255 = gen_rtx_SET (GET_MODE (to), to,
4256 plus_constant (to, count * 4 * sign));
ff9940b0
RE
4257 i = 1;
4258 count++;
f3bb6135
RE
4259 }
4260
ff9940b0 4261 for (j = 0; i < count; i++, j++)
f3bb6135 4262 {
43cffd11 4263 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
4264 RTX_UNCHANGING_P (mem) = unchanging_p;
4265 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4266 MEM_SCALAR_P (mem) = scalar_p;
56636818 4267
43cffd11
RE
4268 XVECEXP (result, 0, i)
4269 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
4270 }
4271
ff9940b0
RE
4272 return result;
4273}
4274
880e2516
RE
4275int
4276arm_gen_movstrqi (operands)
62b10bbc 4277 rtx * operands;
880e2516
RE
4278{
4279 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 4280 int i;
880e2516 4281 rtx src, dst;
ad076f4e 4282 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 4283 rtx part_bytes_reg = NULL;
56636818
JL
4284 rtx mem;
4285 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 4286 int dst_scalar_p, src_scalar_p;
880e2516
RE
4287
4288 if (GET_CODE (operands[2]) != CONST_INT
4289 || GET_CODE (operands[3]) != CONST_INT
4290 || INTVAL (operands[2]) > 64
4291 || INTVAL (operands[3]) & 3)
4292 return 0;
4293
4294 st_dst = XEXP (operands[0], 0);
4295 st_src = XEXP (operands[1], 0);
56636818
JL
4296
4297 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4298 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 4299 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
4300 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4301 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 4302 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 4303
880e2516
RE
4304 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4305 fin_src = src = copy_to_mode_reg (SImode, st_src);
4306
d5b7b3ae 4307 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
880e2516
RE
4308 out_words_to_go = INTVAL (operands[2]) / 4;
4309 last_bytes = INTVAL (operands[2]) & 3;
4310
4311 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 4312 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
4313
4314 for (i = 0; in_words_to_go >= 2; i+=4)
4315 {
bd9c7e23 4316 if (in_words_to_go > 4)
56636818 4317 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
4318 src_unchanging_p,
4319 src_in_struct_p,
4320 src_scalar_p));
bd9c7e23
RE
4321 else
4322 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 4323 FALSE, src_unchanging_p,
c6df88cb 4324 src_in_struct_p, src_scalar_p));
bd9c7e23 4325
880e2516
RE
4326 if (out_words_to_go)
4327 {
bd9c7e23 4328 if (out_words_to_go > 4)
56636818
JL
4329 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4330 dst_unchanging_p,
c6df88cb
MM
4331 dst_in_struct_p,
4332 dst_scalar_p));
bd9c7e23
RE
4333 else if (out_words_to_go != 1)
4334 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4335 dst, TRUE,
4336 (last_bytes == 0
56636818
JL
4337 ? FALSE : TRUE),
4338 dst_unchanging_p,
c6df88cb
MM
4339 dst_in_struct_p,
4340 dst_scalar_p));
880e2516
RE
4341 else
4342 {
43cffd11 4343 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
4344 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4345 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4346 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4347 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
4348 if (last_bytes != 0)
4349 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
4350 }
4351 }
4352
4353 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4354 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4355 }
4356
4357 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4358 if (out_words_to_go)
62b10bbc
NC
4359 {
4360 rtx sreg;
4361
4362 mem = gen_rtx_MEM (SImode, src);
4363 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4364 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4365 MEM_SCALAR_P (mem) = src_scalar_p;
4366 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4367 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4368
4369 mem = gen_rtx_MEM (SImode, dst);
4370 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4371 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4372 MEM_SCALAR_P (mem) = dst_scalar_p;
4373 emit_move_insn (mem, sreg);
4374 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4375 in_words_to_go--;
4376
4377 if (in_words_to_go) /* Sanity check */
4378 abort ();
4379 }
880e2516
RE
4380
4381 if (in_words_to_go)
4382 {
4383 if (in_words_to_go < 0)
4384 abort ();
4385
43cffd11 4386 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4387 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4388 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4389 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4390 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4391 }
4392
d5b7b3ae
RE
4393 if (last_bytes && part_bytes_reg == NULL)
4394 abort ();
4395
880e2516
RE
4396 if (BYTES_BIG_ENDIAN && last_bytes)
4397 {
4398 rtx tmp = gen_reg_rtx (SImode);
4399
6354dc9b 4400 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4401 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4402 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4403 part_bytes_reg = tmp;
4404
4405 while (last_bytes)
4406 {
43cffd11 4407 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4408 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4409 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4410 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4411 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 4412
880e2516
RE
4413 if (--last_bytes)
4414 {
4415 tmp = gen_reg_rtx (SImode);
4416 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4417 part_bytes_reg = tmp;
4418 }
4419 }
4420
4421 }
4422 else
4423 {
d5b7b3ae 4424 if (last_bytes > 1)
880e2516 4425 {
d5b7b3ae 4426 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4427 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4428 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4429 MEM_SCALAR_P (mem) = dst_scalar_p;
d5b7b3ae
RE
4430 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4431 last_bytes -= 2;
4432 if (last_bytes)
880e2516
RE
4433 {
4434 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4435
d5b7b3ae
RE
4436 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4437 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4438 part_bytes_reg = tmp;
4439 }
4440 }
d5b7b3ae
RE
4441
4442 if (last_bytes)
4443 {
4444 mem = gen_rtx_MEM (QImode, dst);
4445 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4446 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4447 MEM_SCALAR_P (mem) = dst_scalar_p;
4448 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4449 }
880e2516
RE
4450 }
4451
4452 return 1;
4453}
4454
5165176d
RE
4455/* Generate a memory reference for a half word, such that it will be loaded
4456 into the top 16 bits of the word. We can assume that the address is
4457 known to be alignable and of the form reg, or plus (reg, const). */
4458rtx
d5b7b3ae 4459arm_gen_rotated_half_load (memref)
5165176d
RE
4460 rtx memref;
4461{
4462 HOST_WIDE_INT offset = 0;
4463 rtx base = XEXP (memref, 0);
4464
4465 if (GET_CODE (base) == PLUS)
4466 {
4467 offset = INTVAL (XEXP (base, 1));
4468 base = XEXP (base, 0);
4469 }
4470
956d6950 4471 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4472 if (TARGET_MMU_TRAPS
5165176d
RE
4473 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4474 return NULL;
4475
43cffd11 4476 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4477
4478 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4479 return base;
4480
43cffd11 4481 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4482}
4483
1646cf41
RE
4484/* Select a dominance comparison mode if possible. We support three forms.
4485 COND_OR == 0 => (X && Y)
4486 COND_OR == 1 => ((! X( || Y)
4487 COND_OR == 2 => (X || Y)
4488 If we are unable to support a dominance comparsison we return CC mode.
4489 This will then fail to match for the RTL expressions that generate this
4490 call. */
d19fb8e3 4491
84ed5e79 4492static enum machine_mode
74bbc178 4493select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4494 rtx x;
4495 rtx y;
4496 HOST_WIDE_INT cond_or;
4497{
4498 enum rtx_code cond1, cond2;
4499 int swapped = 0;
4500
4501 /* Currently we will probably get the wrong result if the individual
4502 comparisons are not simple. This also ensures that it is safe to
956d6950 4503 reverse a comparison if necessary. */
84ed5e79
RE
4504 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4505 != CCmode)
4506 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4507 != CCmode))
4508 return CCmode;
4509
1646cf41
RE
4510 /* The if_then_else variant of this tests the second condition if the
4511 first passes, but is true if the first fails. Reverse the first
4512 condition to get a true "inclusive-or" expression. */
4513 if (cond_or == 1)
84ed5e79
RE
4514 cond1 = reverse_condition (cond1);
4515
4516 /* If the comparisons are not equal, and one doesn't dominate the other,
4517 then we can't do this. */
4518 if (cond1 != cond2
5895f793
RE
4519 && !comparison_dominates_p (cond1, cond2)
4520 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4521 return CCmode;
4522
4523 if (swapped)
4524 {
4525 enum rtx_code temp = cond1;
4526 cond1 = cond2;
4527 cond2 = temp;
4528 }
4529
4530 switch (cond1)
4531 {
4532 case EQ:
5895f793 4533 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4534 return CC_DEQmode;
4535
4536 switch (cond2)
4537 {
4538 case LE: return CC_DLEmode;
4539 case LEU: return CC_DLEUmode;
4540 case GE: return CC_DGEmode;
4541 case GEU: return CC_DGEUmode;
ad076f4e 4542 default: break;
84ed5e79
RE
4543 }
4544
4545 break;
4546
4547 case LT:
5895f793 4548 if (cond2 == LT || !cond_or)
84ed5e79
RE
4549 return CC_DLTmode;
4550 if (cond2 == LE)
4551 return CC_DLEmode;
4552 if (cond2 == NE)
4553 return CC_DNEmode;
4554 break;
4555
4556 case GT:
5895f793 4557 if (cond2 == GT || !cond_or)
84ed5e79
RE
4558 return CC_DGTmode;
4559 if (cond2 == GE)
4560 return CC_DGEmode;
4561 if (cond2 == NE)
4562 return CC_DNEmode;
4563 break;
4564
4565 case LTU:
5895f793 4566 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4567 return CC_DLTUmode;
4568 if (cond2 == LEU)
4569 return CC_DLEUmode;
4570 if (cond2 == NE)
4571 return CC_DNEmode;
4572 break;
4573
4574 case GTU:
5895f793 4575 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4576 return CC_DGTUmode;
4577 if (cond2 == GEU)
4578 return CC_DGEUmode;
4579 if (cond2 == NE)
4580 return CC_DNEmode;
4581 break;
4582
4583 /* The remaining cases only occur when both comparisons are the
4584 same. */
4585 case NE:
4586 return CC_DNEmode;
4587
4588 case LE:
4589 return CC_DLEmode;
4590
4591 case GE:
4592 return CC_DGEmode;
4593
4594 case LEU:
4595 return CC_DLEUmode;
4596
4597 case GEU:
4598 return CC_DGEUmode;
ad076f4e
RE
4599
4600 default:
4601 break;
84ed5e79
RE
4602 }
4603
4604 abort ();
4605}
4606
4607enum machine_mode
4608arm_select_cc_mode (op, x, y)
4609 enum rtx_code op;
4610 rtx x;
4611 rtx y;
4612{
4613 /* All floating point compares return CCFP if it is an equality
4614 comparison, and CCFPE otherwise. */
4615 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4616 {
4617 switch (op)
4618 {
4619 case EQ:
4620 case NE:
4621 case UNORDERED:
4622 case ORDERED:
4623 case UNLT:
4624 case UNLE:
4625 case UNGT:
4626 case UNGE:
4627 case UNEQ:
4628 case LTGT:
4629 return CCFPmode;
4630
4631 case LT:
4632 case LE:
4633 case GT:
4634 case GE:
4635 return CCFPEmode;
4636
4637 default:
4638 abort ();
4639 }
4640 }
84ed5e79
RE
4641
4642 /* A compare with a shifted operand. Because of canonicalization, the
4643 comparison will have to be swapped when we emit the assembler. */
4644 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4645 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4646 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4647 || GET_CODE (x) == ROTATERT))
4648 return CC_SWPmode;
4649
956d6950
JL
4650 /* This is a special case that is used by combine to allow a
4651 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4652 followed by a comparison of the shifted integer (only valid for
956d6950 4653 equalities and unsigned inequalities). */
84ed5e79
RE
4654 if (GET_MODE (x) == SImode
4655 && GET_CODE (x) == ASHIFT
4656 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4657 && GET_CODE (XEXP (x, 0)) == SUBREG
4658 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4659 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4660 && (op == EQ || op == NE
4661 || op == GEU || op == GTU || op == LTU || op == LEU)
4662 && GET_CODE (y) == CONST_INT)
4663 return CC_Zmode;
4664
1646cf41
RE
4665 /* A construct for a conditional compare, if the false arm contains
4666 0, then both conditions must be true, otherwise either condition
4667 must be true. Not all conditions are possible, so CCmode is
4668 returned if it can't be done. */
4669 if (GET_CODE (x) == IF_THEN_ELSE
4670 && (XEXP (x, 2) == const0_rtx
4671 || XEXP (x, 2) == const1_rtx)
4672 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4673 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4674 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4675 INTVAL (XEXP (x, 2)));
4676
4677 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4678 if (GET_CODE (x) == AND
4679 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4680 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4681 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4682
4683 if (GET_CODE (x) == IOR
4684 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4685 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4686 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4687
84ed5e79
RE
4688 /* An operation that sets the condition codes as a side-effect, the
4689 V flag is not set correctly, so we can only use comparisons where
4690 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4691 instead. */
4692 if (GET_MODE (x) == SImode
4693 && y == const0_rtx
4694 && (op == EQ || op == NE || op == LT || op == GE)
4695 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4696 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4697 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4698 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4699 || GET_CODE (x) == LSHIFTRT
4700 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4701 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4702 return CC_NOOVmode;
4703
84ed5e79
RE
4704 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4705 return CC_Zmode;
4706
bd9c7e23
RE
4707 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4708 && GET_CODE (x) == PLUS
4709 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4710 return CC_Cmode;
4711
84ed5e79
RE
4712 return CCmode;
4713}
4714
ff9940b0
RE
4715/* X and Y are two things to compare using CODE. Emit the compare insn and
4716 return the rtx for register 0 in the proper mode. FP means this is a
4717 floating point compare: I don't think that it is needed on the arm. */
4718
4719rtx
d5b7b3ae 4720arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4721 enum rtx_code code;
4722 rtx x, y;
4723{
4724 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4725 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4726
43cffd11
RE
4727 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4728 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4729
4730 return cc_reg;
4731}
4732
0a81f500
RE
4733void
4734arm_reload_in_hi (operands)
62b10bbc 4735 rtx * operands;
0a81f500 4736{
f9cc092a
RE
4737 rtx ref = operands[1];
4738 rtx base, scratch;
4739 HOST_WIDE_INT offset = 0;
4740
4741 if (GET_CODE (ref) == SUBREG)
4742 {
4743 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4744 if (BYTES_BIG_ENDIAN)
4745 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4746 - MIN (UNITS_PER_WORD,
4747 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4748 ref = SUBREG_REG (ref);
4749 }
4750
4751 if (GET_CODE (ref) == REG)
4752 {
4753 /* We have a pseudo which has been spilt onto the stack; there
4754 are two cases here: the first where there is a simple
4755 stack-slot replacement and a second where the stack-slot is
4756 out of range, or is used as a subreg. */
4757 if (reg_equiv_mem[REGNO (ref)])
4758 {
4759 ref = reg_equiv_mem[REGNO (ref)];
4760 base = find_replacement (&XEXP (ref, 0));
4761 }
4762 else
6354dc9b 4763 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4764 base = reg_equiv_address[REGNO (ref)];
4765 }
4766 else
4767 base = find_replacement (&XEXP (ref, 0));
0a81f500 4768
e5e809f4
JL
4769 /* Handle the case where the address is too complex to be offset by 1. */
4770 if (GET_CODE (base) == MINUS
4771 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4772 {
f9cc092a 4773 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4774
43cffd11 4775 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4776 base = base_plus;
4777 }
f9cc092a
RE
4778 else if (GET_CODE (base) == PLUS)
4779 {
6354dc9b 4780 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4781 HOST_WIDE_INT hi, lo;
4782
4783 offset += INTVAL (XEXP (base, 1));
4784 base = XEXP (base, 0);
4785
6354dc9b 4786 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4787 /* Valid range for lo is -4095 -> 4095 */
4788 lo = (offset >= 0
4789 ? (offset & 0xfff)
4790 : -((-offset) & 0xfff));
4791
4792 /* Corner case, if lo is the max offset then we would be out of range
4793 once we have added the additional 1 below, so bump the msb into the
4794 pre-loading insn(s). */
4795 if (lo == 4095)
4796 lo &= 0x7ff;
4797
e5951263
NC
4798 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4799 ^ HOST_INT (0x80000000))
4800 - HOST_INT (0x80000000));
f9cc092a
RE
4801
4802 if (hi + lo != offset)
4803 abort ();
4804
4805 if (hi != 0)
4806 {
4807 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4808
4809 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4810 that require more than one insn. */
f9cc092a
RE
4811 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4812 base = base_plus;
4813 offset = lo;
4814 }
4815 }
e5e809f4 4816
f9cc092a
RE
4817 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4818 emit_insn (gen_zero_extendqisi2 (scratch,
4819 gen_rtx_MEM (QImode,
4820 plus_constant (base,
4821 offset))));
43cffd11
RE
4822 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4823 gen_rtx_MEM (QImode,
f9cc092a
RE
4824 plus_constant (base,
4825 offset + 1))));
5895f793 4826 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
4827 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4828 gen_rtx_IOR (SImode,
4829 gen_rtx_ASHIFT
4830 (SImode,
4831 gen_rtx_SUBREG (SImode, operands[0], 0),
4832 GEN_INT (8)),
f9cc092a 4833 scratch)));
0a81f500 4834 else
43cffd11
RE
4835 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4836 gen_rtx_IOR (SImode,
f9cc092a 4837 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4838 GEN_INT (8)),
4839 gen_rtx_SUBREG (SImode, operands[0],
4840 0))));
0a81f500
RE
4841}
4842
f9cc092a
RE
4843/* Handle storing a half-word to memory during reload by synthesising as two
4844 byte stores. Take care not to clobber the input values until after we
4845 have moved them somewhere safe. This code assumes that if the DImode
4846 scratch in operands[2] overlaps either the input value or output address
4847 in some way, then that value must die in this insn (we absolutely need
4848 two scratch registers for some corner cases). */
f3bb6135 4849void
af48348a 4850arm_reload_out_hi (operands)
62b10bbc 4851 rtx * operands;
af48348a 4852{
f9cc092a
RE
4853 rtx ref = operands[0];
4854 rtx outval = operands[1];
4855 rtx base, scratch;
4856 HOST_WIDE_INT offset = 0;
4857
4858 if (GET_CODE (ref) == SUBREG)
4859 {
4860 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4861 if (BYTES_BIG_ENDIAN)
4862 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4863 - MIN (UNITS_PER_WORD,
4864 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4865 ref = SUBREG_REG (ref);
4866 }
4867
4868
4869 if (GET_CODE (ref) == REG)
4870 {
4871 /* We have a pseudo which has been spilt onto the stack; there
4872 are two cases here: the first where there is a simple
4873 stack-slot replacement and a second where the stack-slot is
4874 out of range, or is used as a subreg. */
4875 if (reg_equiv_mem[REGNO (ref)])
4876 {
4877 ref = reg_equiv_mem[REGNO (ref)];
4878 base = find_replacement (&XEXP (ref, 0));
4879 }
4880 else
6354dc9b 4881 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4882 base = reg_equiv_address[REGNO (ref)];
4883 }
4884 else
4885 base = find_replacement (&XEXP (ref, 0));
4886
4887 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4888
4889 /* Handle the case where the address is too complex to be offset by 1. */
4890 if (GET_CODE (base) == MINUS
4891 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4892 {
4893 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4894
4895 /* Be careful not to destroy OUTVAL. */
4896 if (reg_overlap_mentioned_p (base_plus, outval))
4897 {
4898 /* Updating base_plus might destroy outval, see if we can
4899 swap the scratch and base_plus. */
5895f793 4900 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4901 {
4902 rtx tmp = scratch;
4903 scratch = base_plus;
4904 base_plus = tmp;
4905 }
4906 else
4907 {
4908 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4909
4910 /* Be conservative and copy OUTVAL into the scratch now,
4911 this should only be necessary if outval is a subreg
4912 of something larger than a word. */
4913 /* XXX Might this clobber base? I can't see how it can,
4914 since scratch is known to overlap with OUTVAL, and
4915 must be wider than a word. */
4916 emit_insn (gen_movhi (scratch_hi, outval));
4917 outval = scratch_hi;
4918 }
4919 }
4920
4921 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4922 base = base_plus;
4923 }
4924 else if (GET_CODE (base) == PLUS)
4925 {
6354dc9b 4926 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4927 HOST_WIDE_INT hi, lo;
4928
4929 offset += INTVAL (XEXP (base, 1));
4930 base = XEXP (base, 0);
4931
6354dc9b 4932 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4933 /* Valid range for lo is -4095 -> 4095 */
4934 lo = (offset >= 0
4935 ? (offset & 0xfff)
4936 : -((-offset) & 0xfff));
4937
4938 /* Corner case, if lo is the max offset then we would be out of range
4939 once we have added the additional 1 below, so bump the msb into the
4940 pre-loading insn(s). */
4941 if (lo == 4095)
4942 lo &= 0x7ff;
4943
e5951263
NC
4944 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4945 ^ HOST_INT (0x80000000))
5895f793 4946 - HOST_INT (0x80000000));
f9cc092a
RE
4947
4948 if (hi + lo != offset)
4949 abort ();
4950
4951 if (hi != 0)
4952 {
4953 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4954
4955 /* Be careful not to destroy OUTVAL. */
4956 if (reg_overlap_mentioned_p (base_plus, outval))
4957 {
4958 /* Updating base_plus might destroy outval, see if we
4959 can swap the scratch and base_plus. */
5895f793 4960 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4961 {
4962 rtx tmp = scratch;
4963 scratch = base_plus;
4964 base_plus = tmp;
4965 }
4966 else
4967 {
4968 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4969
4970 /* Be conservative and copy outval into scratch now,
4971 this should only be necessary if outval is a
4972 subreg of something larger than a word. */
4973 /* XXX Might this clobber base? I can't see how it
4974 can, since scratch is known to overlap with
4975 outval. */
4976 emit_insn (gen_movhi (scratch_hi, outval));
4977 outval = scratch_hi;
4978 }
4979 }
4980
4981 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4982 that require more than one insn. */
f9cc092a
RE
4983 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4984 base = base_plus;
4985 offset = lo;
4986 }
4987 }
af48348a 4988
b5cc037f
RE
4989 if (BYTES_BIG_ENDIAN)
4990 {
f9cc092a
RE
4991 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4992 plus_constant (base, offset + 1)),
4993 gen_rtx_SUBREG (QImode, outval, 0)));
4994 emit_insn (gen_lshrsi3 (scratch,
4995 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4996 GEN_INT (8)));
f9cc092a
RE
4997 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4998 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4999 }
5000 else
5001 {
f9cc092a
RE
5002 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5003 gen_rtx_SUBREG (QImode, outval, 0)));
5004 emit_insn (gen_lshrsi3 (scratch,
5005 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 5006 GEN_INT (8)));
f9cc092a
RE
5007 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5008 plus_constant (base, offset + 1)),
5009 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 5010 }
af48348a 5011}
2b835d68 5012\f
d5b7b3ae
RE
5013/* Print a symbolic form of X to the debug file, F. */
5014static void
5015arm_print_value (f, x)
5016 FILE * f;
5017 rtx x;
5018{
5019 switch (GET_CODE (x))
5020 {
5021 case CONST_INT:
5022 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5023 return;
5024
5025 case CONST_DOUBLE:
5026 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5027 return;
5028
5029 case CONST_STRING:
5030 fprintf (f, "\"%s\"", XSTR (x, 0));
5031 return;
5032
5033 case SYMBOL_REF:
5034 fprintf (f, "`%s'", XSTR (x, 0));
5035 return;
5036
5037 case LABEL_REF:
5038 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5039 return;
5040
5041 case CONST:
5042 arm_print_value (f, XEXP (x, 0));
5043 return;
5044
5045 case PLUS:
5046 arm_print_value (f, XEXP (x, 0));
5047 fprintf (f, "+");
5048 arm_print_value (f, XEXP (x, 1));
5049 return;
5050
5051 case PC:
5052 fprintf (f, "pc");
5053 return;
5054
5055 default:
5056 fprintf (f, "????");
5057 return;
5058 }
5059}
5060\f
2b835d68 5061/* Routines for manipulation of the constant pool. */
2b835d68 5062
949d79eb
RE
5063/* Arm instructions cannot load a large constant directly into a
5064 register; they have to come from a pc relative load. The constant
5065 must therefore be placed in the addressable range of the pc
5066 relative load. Depending on the precise pc relative load
5067 instruction the range is somewhere between 256 bytes and 4k. This
5068 means that we often have to dump a constant inside a function, and
2b835d68
RE
5069 generate code to branch around it.
5070
949d79eb
RE
5071 It is important to minimize this, since the branches will slow
5072 things down and make the code larger.
2b835d68 5073
949d79eb
RE
5074 Normally we can hide the table after an existing unconditional
5075 branch so that there is no interruption of the flow, but in the
5076 worst case the code looks like this:
2b835d68
RE
5077
5078 ldr rn, L1
949d79eb 5079 ...
2b835d68
RE
5080 b L2
5081 align
5082 L1: .long value
5083 L2:
949d79eb 5084 ...
2b835d68 5085
2b835d68 5086 ldr rn, L3
949d79eb 5087 ...
2b835d68
RE
5088 b L4
5089 align
2b835d68
RE
5090 L3: .long value
5091 L4:
949d79eb
RE
5092 ...
5093
5094 We fix this by performing a scan after scheduling, which notices
5095 which instructions need to have their operands fetched from the
5096 constant table and builds the table.
5097
5098 The algorithm starts by building a table of all the constants that
5099 need fixing up and all the natural barriers in the function (places
5100 where a constant table can be dropped without breaking the flow).
5101 For each fixup we note how far the pc-relative replacement will be
5102 able to reach and the offset of the instruction into the function.
5103
5104 Having built the table we then group the fixes together to form
5105 tables that are as large as possible (subject to addressing
5106 constraints) and emit each table of constants after the last
5107 barrier that is within range of all the instructions in the group.
5108 If a group does not contain a barrier, then we forcibly create one
5109 by inserting a jump instruction into the flow. Once the table has
5110 been inserted, the insns are then modified to reference the
5111 relevant entry in the pool.
5112
6354dc9b 5113 Possible enhancements to the algorithm (not implemented) are:
949d79eb 5114
d5b7b3ae 5115 1) For some processors and object formats, there may be benefit in
949d79eb
RE
5116 aligning the pools to the start of cache lines; this alignment
5117 would need to be taken into account when calculating addressability
6354dc9b 5118 of a pool. */
2b835d68 5119
d5b7b3ae
RE
5120/* These typedefs are located at the start of this file, so that
5121 they can be used in the prototypes there. This comment is to
5122 remind readers of that fact so that the following structures
5123 can be understood more easily.
5124
5125 typedef struct minipool_node Mnode;
5126 typedef struct minipool_fixup Mfix; */
5127
5128struct minipool_node
5129{
5130 /* Doubly linked chain of entries. */
5131 Mnode * next;
5132 Mnode * prev;
5133 /* The maximum offset into the code that this entry can be placed. While
5134 pushing fixes for forward references, all entries are sorted in order
5135 of increasing max_address. */
5136 HOST_WIDE_INT max_address;
5137 /* Similarly for a entry inserted for a backwards ref. */
5138 HOST_WIDE_INT min_address;
5139 /* The number of fixes referencing this entry. This can become zero
5140 if we "unpush" an entry. In this case we ignore the entry when we
5141 come to emit the code. */
5142 int refcount;
5143 /* The offset from the start of the minipool. */
5144 HOST_WIDE_INT offset;
5145 /* The value in table. */
5146 rtx value;
5147 /* The mode of value. */
5148 enum machine_mode mode;
5149 int fix_size;
5150};
5151
5152struct minipool_fixup
2b835d68 5153{
d5b7b3ae
RE
5154 Mfix * next;
5155 rtx insn;
5156 HOST_WIDE_INT address;
5157 rtx * loc;
5158 enum machine_mode mode;
5159 int fix_size;
5160 rtx value;
5161 Mnode * minipool;
5162 HOST_WIDE_INT forwards;
5163 HOST_WIDE_INT backwards;
5164};
2b835d68 5165
d5b7b3ae
RE
5166/* Fixes less than a word need padding out to a word boundary. */
5167#define MINIPOOL_FIX_SIZE(mode) \
5168 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 5169
d5b7b3ae
RE
5170static Mnode * minipool_vector_head;
5171static Mnode * minipool_vector_tail;
5172static rtx minipool_vector_label;
332072db 5173
d5b7b3ae
RE
5174/* The linked list of all minipool fixes required for this function. */
5175Mfix * minipool_fix_head;
5176Mfix * minipool_fix_tail;
5177/* The fix entry for the current minipool, once it has been placed. */
5178Mfix * minipool_barrier;
5179
5180/* Determines if INSN is the start of a jump table. Returns the end
5181 of the TABLE or NULL_RTX. */
5182static rtx
5183is_jump_table (insn)
5184 rtx insn;
2b835d68 5185{
d5b7b3ae 5186 rtx table;
da6558fd 5187
d5b7b3ae
RE
5188 if (GET_CODE (insn) == JUMP_INSN
5189 && JUMP_LABEL (insn) != NULL
5190 && ((table = next_real_insn (JUMP_LABEL (insn)))
5191 == next_real_insn (insn))
5192 && table != NULL
5193 && GET_CODE (table) == JUMP_INSN
5194 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5195 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5196 return table;
5197
5198 return NULL_RTX;
2b835d68
RE
5199}
5200
d5b7b3ae
RE
5201static HOST_WIDE_INT
5202get_jump_table_size (insn)
5203 rtx insn;
2b835d68 5204{
d5b7b3ae
RE
5205 rtx body = PATTERN (insn);
5206 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 5207
d5b7b3ae
RE
5208 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5209}
2b835d68 5210
d5b7b3ae
RE
5211/* Move a minipool fix MP from its current location to before MAX_MP.
5212 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5213 contrains may need updating. */
5214static Mnode *
5215move_minipool_fix_forward_ref (mp, max_mp, max_address)
5216 Mnode * mp;
5217 Mnode * max_mp;
5218 HOST_WIDE_INT max_address;
5219{
5220 /* This should never be true and the code below assumes these are
5221 different. */
5222 if (mp == max_mp)
5223 abort ();
5224
5225 if (max_mp == NULL)
5226 {
5227 if (max_address < mp->max_address)
5228 mp->max_address = max_address;
5229 }
5230 else
2b835d68 5231 {
d5b7b3ae
RE
5232 if (max_address > max_mp->max_address - mp->fix_size)
5233 mp->max_address = max_mp->max_address - mp->fix_size;
5234 else
5235 mp->max_address = max_address;
2b835d68 5236
d5b7b3ae
RE
5237 /* Unlink MP from its current position. Since max_mp is non-null,
5238 mp->prev must be non-null. */
5239 mp->prev->next = mp->next;
5240 if (mp->next != NULL)
5241 mp->next->prev = mp->prev;
5242 else
5243 minipool_vector_tail = mp->prev;
2b835d68 5244
d5b7b3ae
RE
5245 /* Re-insert it before MAX_MP. */
5246 mp->next = max_mp;
5247 mp->prev = max_mp->prev;
5248 max_mp->prev = mp;
5249
5250 if (mp->prev != NULL)
5251 mp->prev->next = mp;
5252 else
5253 minipool_vector_head = mp;
5254 }
2b835d68 5255
d5b7b3ae
RE
5256 /* Save the new entry. */
5257 max_mp = mp;
5258
5259 /* Scan over the preceeding entries and adjust their addresses as
5260 required. */
5261 while (mp->prev != NULL
5262 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5263 {
5264 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5265 mp = mp->prev;
2b835d68
RE
5266 }
5267
d5b7b3ae 5268 return max_mp;
2b835d68
RE
5269}
5270
d5b7b3ae
RE
5271/* Add a constant to the minipool for a forward reference. Returns the
5272 node added or NULL if the constant will not fit in this pool. */
5273static Mnode *
5274add_minipool_forward_ref (fix)
5275 Mfix * fix;
5276{
5277 /* If set, max_mp is the first pool_entry that has a lower
5278 constraint than the one we are trying to add. */
5279 Mnode * max_mp = NULL;
5280 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5281 Mnode * mp;
5282
5283 /* If this fix's address is greater than the address of the first
5284 entry, then we can't put the fix in this pool. We subtract the
5285 size of the current fix to ensure that if the table is fully
5286 packed we still have enough room to insert this value by suffling
5287 the other fixes forwards. */
5288 if (minipool_vector_head &&
5289 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5290 return NULL;
2b835d68 5291
d5b7b3ae
RE
5292 /* Scan the pool to see if a constant with the same value has
5293 already been added. While we are doing this, also note the
5294 location where we must insert the constant if it doesn't already
5295 exist. */
5296 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5297 {
5298 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5299 && fix->mode == mp->mode
5300 && (GET_CODE (fix->value) != CODE_LABEL
5301 || (CODE_LABEL_NUMBER (fix->value)
5302 == CODE_LABEL_NUMBER (mp->value)))
5303 && rtx_equal_p (fix->value, mp->value))
5304 {
5305 /* More than one fix references this entry. */
5306 mp->refcount++;
5307 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5308 }
5309
5310 /* Note the insertion point if necessary. */
5311 if (max_mp == NULL
5312 && mp->max_address > max_address)
5313 max_mp = mp;
5314 }
5315
5316 /* The value is not currently in the minipool, so we need to create
5317 a new entry for it. If MAX_MP is NULL, the entry will be put on
5318 the end of the list since the placement is less constrained than
5319 any existing entry. Otherwise, we insert the new fix before
5320 MAX_MP and, if neceesary, adjust the constraints on the other
5321 entries. */
5322 mp = xmalloc (sizeof (* mp));
5323 mp->fix_size = fix->fix_size;
5324 mp->mode = fix->mode;
5325 mp->value = fix->value;
5326 mp->refcount = 1;
5327 /* Not yet required for a backwards ref. */
5328 mp->min_address = -65536;
5329
5330 if (max_mp == NULL)
5331 {
5332 mp->max_address = max_address;
5333 mp->next = NULL;
5334 mp->prev = minipool_vector_tail;
5335
5336 if (mp->prev == NULL)
5337 {
5338 minipool_vector_head = mp;
5339 minipool_vector_label = gen_label_rtx ();
7551cbc7 5340 }
2b835d68 5341 else
d5b7b3ae 5342 mp->prev->next = mp;
2b835d68 5343
d5b7b3ae
RE
5344 minipool_vector_tail = mp;
5345 }
5346 else
5347 {
5348 if (max_address > max_mp->max_address - mp->fix_size)
5349 mp->max_address = max_mp->max_address - mp->fix_size;
5350 else
5351 mp->max_address = max_address;
5352
5353 mp->next = max_mp;
5354 mp->prev = max_mp->prev;
5355 max_mp->prev = mp;
5356 if (mp->prev != NULL)
5357 mp->prev->next = mp;
5358 else
5359 minipool_vector_head = mp;
5360 }
5361
5362 /* Save the new entry. */
5363 max_mp = mp;
5364
5365 /* Scan over the preceeding entries and adjust their addresses as
5366 required. */
5367 while (mp->prev != NULL
5368 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5369 {
5370 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5371 mp = mp->prev;
2b835d68
RE
5372 }
5373
d5b7b3ae
RE
5374 return max_mp;
5375}
5376
5377static Mnode *
5378move_minipool_fix_backward_ref (mp, min_mp, min_address)
5379 Mnode * mp;
5380 Mnode * min_mp;
5381 HOST_WIDE_INT min_address;
5382{
5383 HOST_WIDE_INT offset;
5384
5385 /* This should never be true, and the code below assumes these are
5386 different. */
5387 if (mp == min_mp)
5388 abort ();
5389
5390 if (min_mp == NULL)
2b835d68 5391 {
d5b7b3ae
RE
5392 if (min_address > mp->min_address)
5393 mp->min_address = min_address;
5394 }
5395 else
5396 {
5397 /* We will adjust this below if it is too loose. */
5398 mp->min_address = min_address;
5399
5400 /* Unlink MP from its current position. Since min_mp is non-null,
5401 mp->next must be non-null. */
5402 mp->next->prev = mp->prev;
5403 if (mp->prev != NULL)
5404 mp->prev->next = mp->next;
5405 else
5406 minipool_vector_head = mp->next;
5407
5408 /* Reinsert it after MIN_MP. */
5409 mp->prev = min_mp;
5410 mp->next = min_mp->next;
5411 min_mp->next = mp;
5412 if (mp->next != NULL)
5413 mp->next->prev = mp;
2b835d68 5414 else
d5b7b3ae
RE
5415 minipool_vector_tail = mp;
5416 }
5417
5418 min_mp = mp;
5419
5420 offset = 0;
5421 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5422 {
5423 mp->offset = offset;
5424 if (mp->refcount > 0)
5425 offset += mp->fix_size;
5426
5427 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5428 mp->next->min_address = mp->min_address + mp->fix_size;
5429 }
5430
5431 return min_mp;
5432}
5433
5434/* Add a constant to the minipool for a backward reference. Returns the
5435 node added or NULL if the constant will not fit in this pool.
5436
5437 Note that the code for insertion for a backwards reference can be
5438 somewhat confusing because the calculated offsets for each fix do
5439 not take into account the size of the pool (which is still under
5440 construction. */
5441static Mnode *
5442add_minipool_backward_ref (fix)
5443 Mfix * fix;
5444{
5445 /* If set, min_mp is the last pool_entry that has a lower constraint
5446 than the one we are trying to add. */
5447 Mnode * min_mp = NULL;
5448 /* This can be negative, since it is only a constraint. */
5449 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5450 Mnode * mp;
5451
5452 /* If we can't reach the current pool from this insn, or if we can't
5453 insert this entry at the end of the pool without pushing other
5454 fixes out of range, then we don't try. This ensures that we
5455 can't fail later on. */
5456 if (min_address >= minipool_barrier->address
5457 || (minipool_vector_tail->min_address + fix->fix_size
5458 >= minipool_barrier->address))
5459 return NULL;
5460
5461 /* Scan the pool to see if a constant with the same value has
5462 already been added. While we are doing this, also note the
5463 location where we must insert the constant if it doesn't already
5464 exist. */
5465 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5466 {
5467 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5468 && fix->mode == mp->mode
5469 && (GET_CODE (fix->value) != CODE_LABEL
5470 || (CODE_LABEL_NUMBER (fix->value)
5471 == CODE_LABEL_NUMBER (mp->value)))
5472 && rtx_equal_p (fix->value, mp->value)
5473 /* Check that there is enough slack to move this entry to the
5474 end of the table (this is conservative). */
5475 && (mp->max_address
5476 > (minipool_barrier->address
5477 + minipool_vector_tail->offset
5478 + minipool_vector_tail->fix_size)))
5479 {
5480 mp->refcount++;
5481 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5482 }
5483
5484 if (min_mp != NULL)
5485 mp->min_address += fix->fix_size;
5486 else
5487 {
5488 /* Note the insertion point if necessary. */
5489 if (mp->min_address < min_address)
5490 min_mp = mp;
5491 else if (mp->max_address
5492 < minipool_barrier->address + mp->offset + fix->fix_size)
5493 {
5494 /* Inserting before this entry would push the fix beyond
5495 its maximum address (which can happen if we have
5496 re-located a forwards fix); force the new fix to come
5497 after it. */
5498 min_mp = mp;
5499 min_address = mp->min_address + fix->fix_size;
5500 }
5501 }
5502 }
5503
5504 /* We need to create a new entry. */
5505 mp = xmalloc (sizeof (* mp));
5506 mp->fix_size = fix->fix_size;
5507 mp->mode = fix->mode;
5508 mp->value = fix->value;
5509 mp->refcount = 1;
5510 mp->max_address = minipool_barrier->address + 65536;
5511
5512 mp->min_address = min_address;
5513
5514 if (min_mp == NULL)
5515 {
5516 mp->prev = NULL;
5517 mp->next = minipool_vector_head;
5518
5519 if (mp->next == NULL)
5520 {
5521 minipool_vector_tail = mp;
5522 minipool_vector_label = gen_label_rtx ();
5523 }
5524 else
5525 mp->next->prev = mp;
5526
5527 minipool_vector_head = mp;
5528 }
5529 else
5530 {
5531 mp->next = min_mp->next;
5532 mp->prev = min_mp;
5533 min_mp->next = mp;
da6558fd 5534
d5b7b3ae
RE
5535 if (mp->next != NULL)
5536 mp->next->prev = mp;
5537 else
5538 minipool_vector_tail = mp;
5539 }
5540
5541 /* Save the new entry. */
5542 min_mp = mp;
5543
5544 if (mp->prev)
5545 mp = mp->prev;
5546 else
5547 mp->offset = 0;
5548
5549 /* Scan over the following entries and adjust their offsets. */
5550 while (mp->next != NULL)
5551 {
5552 if (mp->next->min_address < mp->min_address + mp->fix_size)
5553 mp->next->min_address = mp->min_address + mp->fix_size;
5554
5555 if (mp->refcount)
5556 mp->next->offset = mp->offset + mp->fix_size;
5557 else
5558 mp->next->offset = mp->offset;
5559
5560 mp = mp->next;
5561 }
5562
5563 return min_mp;
5564}
5565
5566static void
5567assign_minipool_offsets (barrier)
5568 Mfix * barrier;
5569{
5570 HOST_WIDE_INT offset = 0;
5571 Mnode * mp;
5572
5573 minipool_barrier = barrier;
5574
5575 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5576 {
5577 mp->offset = offset;
da6558fd 5578
d5b7b3ae
RE
5579 if (mp->refcount > 0)
5580 offset += mp->fix_size;
5581 }
5582}
5583
5584/* Output the literal table */
5585static void
5586dump_minipool (scan)
5587 rtx scan;
5588{
5589 Mnode * mp;
5590 Mnode * nmp;
5591
5592 if (rtl_dump_file)
5593 fprintf (rtl_dump_file,
5594 ";; Emitting minipool after insn %u; address %ld\n",
5595 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5596
5597 scan = emit_label_after (gen_label_rtx (), scan);
5598 scan = emit_insn_after (gen_align_4 (), scan);
5599 scan = emit_label_after (minipool_vector_label, scan);
5600
5601 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5602 {
5603 if (mp->refcount > 0)
5604 {
5605 if (rtl_dump_file)
5606 {
5607 fprintf (rtl_dump_file,
5608 ";; Offset %u, min %ld, max %ld ",
5609 (unsigned) mp->offset, (unsigned long) mp->min_address,
5610 (unsigned long) mp->max_address);
5611 arm_print_value (rtl_dump_file, mp->value);
5612 fputc ('\n', rtl_dump_file);
5613 }
5614
5615 switch (mp->fix_size)
5616 {
5617#ifdef HAVE_consttable_1
5618 case 1:
5619 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5620 break;
5621
5622#endif
5623#ifdef HAVE_consttable_2
5624 case 2:
5625 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5626 break;
5627
5628#endif
5629#ifdef HAVE_consttable_4
5630 case 4:
5631 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5632 break;
5633
5634#endif
5635#ifdef HAVE_consttable_8
5636 case 8:
5637 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5638 break;
5639
5640#endif
5641 default:
5642 abort ();
5643 break;
5644 }
5645 }
5646
5647 nmp = mp->next;
5648 free (mp);
2b835d68
RE
5649 }
5650
d5b7b3ae
RE
5651 minipool_vector_head = minipool_vector_tail = NULL;
5652 scan = emit_insn_after (gen_consttable_end (), scan);
5653 scan = emit_barrier_after (scan);
2b835d68
RE
5654}
5655
d5b7b3ae
RE
5656/* Return the cost of forcibly inserting a barrier after INSN. */
5657static int
5658arm_barrier_cost (insn)
5659 rtx insn;
949d79eb 5660{
d5b7b3ae
RE
5661 /* Basing the location of the pool on the loop depth is preferable,
5662 but at the moment, the basic block information seems to be
5663 corrupt by this stage of the compilation. */
5664 int base_cost = 50;
5665 rtx next = next_nonnote_insn (insn);
5666
5667 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5668 base_cost -= 20;
5669
5670 switch (GET_CODE (insn))
5671 {
5672 case CODE_LABEL:
5673 /* It will always be better to place the table before the label, rather
5674 than after it. */
5675 return 50;
949d79eb 5676
d5b7b3ae
RE
5677 case INSN:
5678 case CALL_INSN:
5679 return base_cost;
5680
5681 case JUMP_INSN:
5682 return base_cost - 10;
5683
5684 default:
5685 return base_cost + 10;
5686 }
5687}
5688
5689/* Find the best place in the insn stream in the range
5690 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5691 Create the barrier by inserting a jump and add a new fix entry for
5692 it. */
5693static Mfix *
5694create_fix_barrier (fix, max_address)
5695 Mfix * fix;
5696 HOST_WIDE_INT max_address;
5697{
5698 HOST_WIDE_INT count = 0;
5699 rtx barrier;
5700 rtx from = fix->insn;
5701 rtx selected = from;
5702 int selected_cost;
5703 HOST_WIDE_INT selected_address;
5704 Mfix * new_fix;
5705 HOST_WIDE_INT max_count = max_address - fix->address;
5706 rtx label = gen_label_rtx ();
5707
5708 selected_cost = arm_barrier_cost (from);
5709 selected_address = fix->address;
5710
5711 while (from && count < max_count)
5712 {
5713 rtx tmp;
5714 int new_cost;
5715
5716 /* This code shouldn't have been called if there was a natural barrier
5717 within range. */
5718 if (GET_CODE (from) == BARRIER)
5719 abort ();
5720
5721 /* Count the length of this insn. */
5722 count += get_attr_length (from);
5723
5724 /* If there is a jump table, add its length. */
5725 tmp = is_jump_table (from);
5726 if (tmp != NULL)
5727 {
5728 count += get_jump_table_size (tmp);
5729
5730 /* Jump tables aren't in a basic block, so base the cost on
5731 the dispatch insn. If we select this location, we will
5732 still put the pool after the table. */
5733 new_cost = arm_barrier_cost (from);
5734
5735 if (count < max_count && new_cost <= selected_cost)
5736 {
5737 selected = tmp;
5738 selected_cost = new_cost;
5739 selected_address = fix->address + count;
5740 }
5741
5742 /* Continue after the dispatch table. */
5743 from = NEXT_INSN (tmp);
5744 continue;
5745 }
5746
5747 new_cost = arm_barrier_cost (from);
5748
5749 if (count < max_count && new_cost <= selected_cost)
5750 {
5751 selected = from;
5752 selected_cost = new_cost;
5753 selected_address = fix->address + count;
5754 }
5755
5756 from = NEXT_INSN (from);
5757 }
5758
5759 /* Create a new JUMP_INSN that branches around a barrier. */
5760 from = emit_jump_insn_after (gen_jump (label), selected);
5761 JUMP_LABEL (from) = label;
5762 barrier = emit_barrier_after (from);
5763 emit_label_after (label, barrier);
5764
5765 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5766 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5767 new_fix->insn = barrier;
5768 new_fix->address = selected_address;
5769 new_fix->next = fix->next;
5770 fix->next = new_fix;
5771
5772 return new_fix;
5773}
5774
5775/* Record that there is a natural barrier in the insn stream at
5776 ADDRESS. */
949d79eb
RE
5777static void
5778push_minipool_barrier (insn, address)
2b835d68 5779 rtx insn;
d5b7b3ae 5780 HOST_WIDE_INT address;
2b835d68 5781{
c7319d87 5782 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 5783
949d79eb
RE
5784 fix->insn = insn;
5785 fix->address = address;
2b835d68 5786
949d79eb
RE
5787 fix->next = NULL;
5788 if (minipool_fix_head != NULL)
5789 minipool_fix_tail->next = fix;
5790 else
5791 minipool_fix_head = fix;
5792
5793 minipool_fix_tail = fix;
5794}
2b835d68 5795
d5b7b3ae
RE
5796/* Record INSN, which will need fixing up to load a value from the
5797 minipool. ADDRESS is the offset of the insn since the start of the
5798 function; LOC is a pointer to the part of the insn which requires
5799 fixing; VALUE is the constant that must be loaded, which is of type
5800 MODE. */
949d79eb
RE
5801static void
5802push_minipool_fix (insn, address, loc, mode, value)
5803 rtx insn;
d5b7b3ae
RE
5804 HOST_WIDE_INT address;
5805 rtx * loc;
949d79eb
RE
5806 enum machine_mode mode;
5807 rtx value;
5808{
c7319d87 5809 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
5810
5811#ifdef AOF_ASSEMBLER
5812 /* PIC symbol refereneces need to be converted into offsets into the
5813 based area. */
d5b7b3ae
RE
5814 /* XXX This shouldn't be done here. */
5815 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5816 value = aof_pic_entry (value);
5817#endif /* AOF_ASSEMBLER */
5818
5819 fix->insn = insn;
5820 fix->address = address;
5821 fix->loc = loc;
5822 fix->mode = mode;
d5b7b3ae 5823 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5824 fix->value = value;
d5b7b3ae
RE
5825 fix->forwards = get_attr_pool_range (insn);
5826 fix->backwards = get_attr_neg_pool_range (insn);
5827 fix->minipool = NULL;
949d79eb
RE
5828
5829 /* If an insn doesn't have a range defined for it, then it isn't
5830 expecting to be reworked by this code. Better to abort now than
5831 to generate duff assembly code. */
d5b7b3ae 5832 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5833 abort ();
5834
d5b7b3ae
RE
5835 if (rtl_dump_file)
5836 {
5837 fprintf (rtl_dump_file,
5838 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5839 GET_MODE_NAME (mode),
5840 INSN_UID (insn), (unsigned long) address,
5841 -1 * (long)fix->backwards, (long)fix->forwards);
5842 arm_print_value (rtl_dump_file, fix->value);
5843 fprintf (rtl_dump_file, "\n");
5844 }
5845
6354dc9b 5846 /* Add it to the chain of fixes. */
949d79eb 5847 fix->next = NULL;
d5b7b3ae 5848
949d79eb
RE
5849 if (minipool_fix_head != NULL)
5850 minipool_fix_tail->next = fix;
5851 else
5852 minipool_fix_head = fix;
5853
5854 minipool_fix_tail = fix;
5855}
5856
d5b7b3ae 5857/* Scan INSN and note any of its operands that need fixing. */
949d79eb
RE
5858static void
5859note_invalid_constants (insn, address)
5860 rtx insn;
d5b7b3ae 5861 HOST_WIDE_INT address;
949d79eb
RE
5862{
5863 int opno;
5864
d5b7b3ae 5865 extract_insn (insn);
949d79eb 5866
5895f793 5867 if (!constrain_operands (1))
949d79eb
RE
5868 fatal_insn_not_found (insn);
5869
d5b7b3ae
RE
5870 /* Fill in recog_op_alt with information about the constraints of this
5871 insn. */
949d79eb
RE
5872 preprocess_constraints ();
5873
1ccbefce 5874 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 5875 {
6354dc9b 5876 /* Things we need to fix can only occur in inputs. */
36ab44c7 5877 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
5878 continue;
5879
5880 /* If this alternative is a memory reference, then any mention
5881 of constants in this alternative is really to fool reload
5882 into allowing us to accept one there. We need to fix them up
5883 now so that we output the right code. */
5884 if (recog_op_alt[opno][which_alternative].memory_ok)
5885 {
1ccbefce 5886 rtx op = recog_data.operand[opno];
949d79eb
RE
5887
5888 if (CONSTANT_P (op))
1ccbefce
RH
5889 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5890 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
5891#if 0
5892 /* RWE: Now we look correctly at the operands for the insn,
5893 this shouldn't be needed any more. */
949d79eb 5894#ifndef AOF_ASSEMBLER
d5b7b3ae 5895 /* XXX Is this still needed? */
b15bca31 5896 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
1ccbefce
RH
5897 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5898 recog_data.operand_mode[opno],
5899 XVECEXP (op, 0, 0));
949d79eb 5900#endif
d5b7b3ae
RE
5901#endif
5902 else if (GET_CODE (op) == MEM
949d79eb
RE
5903 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5904 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
5905 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5906 recog_data.operand_mode[opno],
949d79eb
RE
5907 get_pool_constant (XEXP (op, 0)));
5908 }
2b835d68 5909 }
2b835d68
RE
5910}
5911
5912void
5913arm_reorg (first)
5914 rtx first;
5915{
5916 rtx insn;
d5b7b3ae
RE
5917 HOST_WIDE_INT address = 0;
5918 Mfix * fix;
ad076f4e 5919
949d79eb 5920 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 5921
949d79eb
RE
5922 /* The first insn must always be a note, or the code below won't
5923 scan it properly. */
5924 if (GET_CODE (first) != NOTE)
5925 abort ();
5926
5927 /* Scan all the insns and record the operands that will need fixing. */
5928 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 5929 {
949d79eb 5930 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 5931 push_minipool_barrier (insn, address);
949d79eb
RE
5932 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5933 || GET_CODE (insn) == JUMP_INSN)
5934 {
5935 rtx table;
5936
5937 note_invalid_constants (insn, address);
5938 address += get_attr_length (insn);
d5b7b3ae 5939
949d79eb
RE
5940 /* If the insn is a vector jump, add the size of the table
5941 and skip the table. */
d5b7b3ae 5942 if ((table = is_jump_table (insn)) != NULL)
2b835d68 5943 {
d5b7b3ae 5944 address += get_jump_table_size (table);
949d79eb
RE
5945 insn = table;
5946 }
5947 }
5948 }
332072db 5949
d5b7b3ae
RE
5950 fix = minipool_fix_head;
5951
949d79eb 5952 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 5953 while (fix)
949d79eb 5954 {
d5b7b3ae
RE
5955 Mfix * ftmp;
5956 Mfix * fdel;
5957 Mfix * last_added_fix;
5958 Mfix * last_barrier = NULL;
5959 Mfix * this_fix;
949d79eb
RE
5960
5961 /* Skip any further barriers before the next fix. */
5962 while (fix && GET_CODE (fix->insn) == BARRIER)
5963 fix = fix->next;
5964
d5b7b3ae 5965 /* No more fixes. */
949d79eb
RE
5966 if (fix == NULL)
5967 break;
332072db 5968
d5b7b3ae 5969 last_added_fix = NULL;
2b835d68 5970
d5b7b3ae 5971 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 5972 {
949d79eb 5973 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 5974 {
d5b7b3ae
RE
5975 if (ftmp->address >= minipool_vector_head->max_address)
5976 break;
2b835d68 5977
d5b7b3ae 5978 last_barrier = ftmp;
2b835d68 5979 }
d5b7b3ae
RE
5980 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5981 break;
5982
5983 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 5984 }
949d79eb 5985
d5b7b3ae
RE
5986 /* If we found a barrier, drop back to that; any fixes that we
5987 could have reached but come after the barrier will now go in
5988 the next mini-pool. */
949d79eb
RE
5989 if (last_barrier != NULL)
5990 {
d5b7b3ae
RE
5991 /* Reduce the refcount for those fixes that won't go into this
5992 pool after all. */
5993 for (fdel = last_barrier->next;
5994 fdel && fdel != ftmp;
5995 fdel = fdel->next)
5996 {
5997 fdel->minipool->refcount--;
5998 fdel->minipool = NULL;
5999 }
6000
949d79eb
RE
6001 ftmp = last_barrier;
6002 }
6003 else
2bfa88dc 6004 {
d5b7b3ae
RE
6005 /* ftmp is first fix that we can't fit into this pool and
6006 there no natural barriers that we could use. Insert a
6007 new barrier in the code somewhere between the previous
6008 fix and this one, and arrange to jump around it. */
6009 HOST_WIDE_INT max_address;
6010
6011 /* The last item on the list of fixes must be a barrier, so
6012 we can never run off the end of the list of fixes without
6013 last_barrier being set. */
6014 if (ftmp == NULL)
6015 abort ();
6016
6017 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
6018 /* Check that there isn't another fix that is in range that
6019 we couldn't fit into this pool because the pool was
6020 already too large: we need to put the pool before such an
6021 instruction. */
d5b7b3ae
RE
6022 if (ftmp->address < max_address)
6023 max_address = ftmp->address;
6024
6025 last_barrier = create_fix_barrier (last_added_fix, max_address);
6026 }
6027
6028 assign_minipool_offsets (last_barrier);
6029
6030 while (ftmp)
6031 {
6032 if (GET_CODE (ftmp->insn) != BARRIER
6033 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6034 == NULL))
6035 break;
2bfa88dc 6036
d5b7b3ae 6037 ftmp = ftmp->next;
2bfa88dc 6038 }
949d79eb
RE
6039
6040 /* Scan over the fixes we have identified for this pool, fixing them
6041 up and adding the constants to the pool itself. */
d5b7b3ae 6042 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
6043 this_fix = this_fix->next)
6044 if (GET_CODE (this_fix->insn) != BARRIER)
6045 {
949d79eb
RE
6046 rtx addr
6047 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6048 minipool_vector_label),
d5b7b3ae 6049 this_fix->minipool->offset);
949d79eb
RE
6050 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6051 }
6052
d5b7b3ae 6053 dump_minipool (last_barrier->insn);
949d79eb 6054 fix = ftmp;
2b835d68 6055 }
4b632bf1 6056
949d79eb
RE
6057 /* From now on we must synthesize any constants that we can't handle
6058 directly. This can happen if the RTL gets split during final
6059 instruction generation. */
4b632bf1 6060 after_arm_reorg = 1;
c7319d87
RE
6061
6062 /* Free the minipool memory. */
6063 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 6064}
cce8749e
CH
6065\f
6066/* Routines to output assembly language. */
6067
f3bb6135 6068/* If the rtx is the correct value then return the string of the number.
ff9940b0 6069 In this way we can ensure that valid double constants are generated even
6354dc9b 6070 when cross compiling. */
cd2b33d0 6071const char *
ff9940b0 6072fp_immediate_constant (x)
b5cc037f 6073 rtx x;
ff9940b0
RE
6074{
6075 REAL_VALUE_TYPE r;
6076 int i;
6077
6078 if (!fpa_consts_inited)
6079 init_fpa_table ();
6080
6081 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6082 for (i = 0; i < 8; i++)
6083 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6084 return strings_fpa[i];
f3bb6135 6085
ff9940b0
RE
6086 abort ();
6087}
6088
9997d19d 6089/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 6090static const char *
9997d19d 6091fp_const_from_val (r)
62b10bbc 6092 REAL_VALUE_TYPE * r;
9997d19d
RE
6093{
6094 int i;
6095
5895f793 6096 if (!fpa_consts_inited)
9997d19d
RE
6097 init_fpa_table ();
6098
6099 for (i = 0; i < 8; i++)
6100 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6101 return strings_fpa[i];
6102
6103 abort ();
6104}
ff9940b0 6105
cce8749e
CH
6106/* Output the operands of a LDM/STM instruction to STREAM.
6107 MASK is the ARM register set mask of which only bits 0-15 are important.
6d3d9133
NC
6108 REG is the base register, either the frame pointer or the stack pointer,
6109 INSTR is the possibly suffixed load or store instruction. */
cce8749e 6110
d5b7b3ae 6111static void
6d3d9133 6112print_multi_reg (stream, instr, reg, mask)
62b10bbc 6113 FILE * stream;
cd2b33d0 6114 const char * instr;
dd18ae56
NC
6115 int reg;
6116 int mask;
cce8749e
CH
6117{
6118 int i;
6119 int not_first = FALSE;
6120
1d5473cb 6121 fputc ('\t', stream);
dd18ae56 6122 asm_fprintf (stream, instr, reg);
1d5473cb 6123 fputs (", {", stream);
62b10bbc 6124
d5b7b3ae 6125 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
6126 if (mask & (1 << i))
6127 {
6128 if (not_first)
6129 fprintf (stream, ", ");
62b10bbc 6130
dd18ae56 6131 asm_fprintf (stream, "%r", i);
cce8749e
CH
6132 not_first = TRUE;
6133 }
f3bb6135 6134
6d3d9133 6135 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
f3bb6135 6136}
cce8749e 6137
6354dc9b 6138/* Output a 'call' insn. */
cce8749e 6139
cd2b33d0 6140const char *
cce8749e 6141output_call (operands)
62b10bbc 6142 rtx * operands;
cce8749e 6143{
6354dc9b 6144 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 6145
62b10bbc 6146 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 6147 {
62b10bbc 6148 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 6149 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 6150 }
62b10bbc 6151
1d5473cb 6152 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 6153
6cfc7210 6154 if (TARGET_INTERWORK)
da6558fd
NC
6155 output_asm_insn ("bx%?\t%0", operands);
6156 else
6157 output_asm_insn ("mov%?\t%|pc, %0", operands);
6158
f3bb6135
RE
6159 return "";
6160}
cce8749e 6161
ff9940b0
RE
6162static int
6163eliminate_lr2ip (x)
62b10bbc 6164 rtx * x;
ff9940b0
RE
6165{
6166 int something_changed = 0;
62b10bbc 6167 rtx x0 = * x;
ff9940b0
RE
6168 int code = GET_CODE (x0);
6169 register int i, j;
6f7d635c 6170 register const char * fmt;
ff9940b0
RE
6171
6172 switch (code)
6173 {
6174 case REG:
62b10bbc 6175 if (REGNO (x0) == LR_REGNUM)
ff9940b0 6176 {
62b10bbc 6177 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
6178 return 1;
6179 }
6180 return 0;
6181 default:
6354dc9b 6182 /* Scan through the sub-elements and change any references there. */
ff9940b0 6183 fmt = GET_RTX_FORMAT (code);
62b10bbc 6184
ff9940b0
RE
6185 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6186 if (fmt[i] == 'e')
6187 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6188 else if (fmt[i] == 'E')
6189 for (j = 0; j < XVECLEN (x0, i); j++)
6190 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 6191
ff9940b0
RE
6192 return something_changed;
6193 }
6194}
6195
6354dc9b 6196/* Output a 'call' insn that is a reference in memory. */
ff9940b0 6197
cd2b33d0 6198const char *
ff9940b0 6199output_call_mem (operands)
62b10bbc 6200 rtx * operands;
ff9940b0 6201{
6354dc9b
NC
6202 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6203 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 6204 if (eliminate_lr2ip (&operands[0]))
1d5473cb 6205 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 6206
6cfc7210 6207 if (TARGET_INTERWORK)
da6558fd
NC
6208 {
6209 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6210 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6211 output_asm_insn ("bx%?\t%|ip", operands);
6212 }
6213 else
6214 {
6215 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6216 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6217 }
6218
f3bb6135
RE
6219 return "";
6220}
ff9940b0
RE
6221
6222
6223/* Output a move from arm registers to an fpu registers.
6224 OPERANDS[0] is an fpu register.
6225 OPERANDS[1] is the first registers of an arm register pair. */
6226
cd2b33d0 6227const char *
ff9940b0 6228output_mov_long_double_fpu_from_arm (operands)
62b10bbc 6229 rtx * operands;
ff9940b0
RE
6230{
6231 int arm_reg0 = REGNO (operands[1]);
6232 rtx ops[3];
6233
62b10bbc
NC
6234 if (arm_reg0 == IP_REGNUM)
6235 abort ();
f3bb6135 6236
43cffd11
RE
6237 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6238 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6239 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6240
1d5473cb
RE
6241 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6242 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 6243
f3bb6135
RE
6244 return "";
6245}
ff9940b0
RE
6246
6247/* Output a move from an fpu register to arm registers.
6248 OPERANDS[0] is the first registers of an arm register pair.
6249 OPERANDS[1] is an fpu register. */
6250
cd2b33d0 6251const char *
ff9940b0 6252output_mov_long_double_arm_from_fpu (operands)
62b10bbc 6253 rtx * operands;
ff9940b0
RE
6254{
6255 int arm_reg0 = REGNO (operands[0]);
6256 rtx ops[3];
6257
62b10bbc
NC
6258 if (arm_reg0 == IP_REGNUM)
6259 abort ();
f3bb6135 6260
43cffd11
RE
6261 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6262 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6263 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6264
1d5473cb
RE
6265 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6266 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
6267 return "";
6268}
ff9940b0
RE
6269
6270/* Output a move from arm registers to arm registers of a long double
6271 OPERANDS[0] is the destination.
6272 OPERANDS[1] is the source. */
cd2b33d0 6273const char *
ff9940b0 6274output_mov_long_double_arm_from_arm (operands)
62b10bbc 6275 rtx * operands;
ff9940b0 6276{
6354dc9b 6277 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
6278 int dest_start = REGNO (operands[0]);
6279 int src_start = REGNO (operands[1]);
6280 rtx ops[2];
6281 int i;
6282
6283 if (dest_start < src_start)
6284 {
6285 for (i = 0; i < 3; i++)
6286 {
43cffd11
RE
6287 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6288 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6289 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6290 }
6291 }
6292 else
6293 {
6294 for (i = 2; i >= 0; i--)
6295 {
43cffd11
RE
6296 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6297 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6298 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6299 }
6300 }
f3bb6135 6301
ff9940b0
RE
6302 return "";
6303}
6304
6305
cce8749e
CH
6306/* Output a move from arm registers to an fpu registers.
6307 OPERANDS[0] is an fpu register.
6308 OPERANDS[1] is the first registers of an arm register pair. */
6309
cd2b33d0 6310const char *
cce8749e 6311output_mov_double_fpu_from_arm (operands)
62b10bbc 6312 rtx * operands;
cce8749e
CH
6313{
6314 int arm_reg0 = REGNO (operands[1]);
6315 rtx ops[2];
6316
62b10bbc
NC
6317 if (arm_reg0 == IP_REGNUM)
6318 abort ();
6319
43cffd11
RE
6320 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6321 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6322 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6323 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
6324 return "";
6325}
cce8749e
CH
6326
6327/* Output a move from an fpu register to arm registers.
6328 OPERANDS[0] is the first registers of an arm register pair.
6329 OPERANDS[1] is an fpu register. */
6330
cd2b33d0 6331const char *
cce8749e 6332output_mov_double_arm_from_fpu (operands)
62b10bbc 6333 rtx * operands;
cce8749e
CH
6334{
6335 int arm_reg0 = REGNO (operands[0]);
6336 rtx ops[2];
6337
62b10bbc
NC
6338 if (arm_reg0 == IP_REGNUM)
6339 abort ();
f3bb6135 6340
43cffd11
RE
6341 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6342 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6343 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6344 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
6345 return "";
6346}
cce8749e
CH
6347
6348/* Output a move between double words.
6349 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6350 or MEM<-REG and all MEMs must be offsettable addresses. */
6351
cd2b33d0 6352const char *
cce8749e 6353output_move_double (operands)
aec3cfba 6354 rtx * operands;
cce8749e
CH
6355{
6356 enum rtx_code code0 = GET_CODE (operands[0]);
6357 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 6358 rtx otherops[3];
cce8749e
CH
6359
6360 if (code0 == REG)
6361 {
6362 int reg0 = REGNO (operands[0]);
6363
43cffd11 6364 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 6365
cce8749e
CH
6366 if (code1 == REG)
6367 {
6368 int reg1 = REGNO (operands[1]);
62b10bbc
NC
6369 if (reg1 == IP_REGNUM)
6370 abort ();
f3bb6135 6371
6354dc9b 6372 /* Ensure the second source is not overwritten. */
c1c2bc04 6373 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 6374 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 6375 else
6cfc7210 6376 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6377 }
6378 else if (code1 == CONST_DOUBLE)
6379 {
226a5051
RE
6380 if (GET_MODE (operands[1]) == DFmode)
6381 {
6382 long l[2];
6383 union real_extract u;
6384
4e135bdd 6385 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
226a5051 6386 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
d5b7b3ae
RE
6387 otherops[1] = GEN_INT (l[1]);
6388 operands[1] = GEN_INT (l[0]);
226a5051 6389 }
c1c2bc04
RE
6390 else if (GET_MODE (operands[1]) != VOIDmode)
6391 abort ();
6392 else if (WORDS_BIG_ENDIAN)
6393 {
6394
6395 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6396 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6397 }
226a5051
RE
6398 else
6399 {
c1c2bc04 6400
226a5051
RE
6401 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6402 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6403 }
6cfc7210 6404
c1c2bc04
RE
6405 output_mov_immediate (operands);
6406 output_mov_immediate (otherops);
cce8749e
CH
6407 }
6408 else if (code1 == CONST_INT)
6409 {
56636818
JL
6410#if HOST_BITS_PER_WIDE_INT > 32
6411 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6412 what the upper word is. */
6413 if (WORDS_BIG_ENDIAN)
6414 {
6415 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6416 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6417 }
6418 else
6419 {
6420 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6421 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6422 }
6423#else
6354dc9b 6424 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6425 if (WORDS_BIG_ENDIAN)
6426 {
6427 otherops[1] = operands[1];
6428 operands[1] = (INTVAL (operands[1]) < 0
6429 ? constm1_rtx : const0_rtx);
6430 }
ff9940b0 6431 else
c1c2bc04 6432 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6433#endif
c1c2bc04
RE
6434 output_mov_immediate (otherops);
6435 output_mov_immediate (operands);
cce8749e
CH
6436 }
6437 else if (code1 == MEM)
6438 {
ff9940b0 6439 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6440 {
ff9940b0 6441 case REG:
9997d19d 6442 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6443 break;
2b835d68 6444
ff9940b0 6445 case PRE_INC:
6354dc9b 6446 abort (); /* Should never happen now. */
ff9940b0 6447 break;
2b835d68 6448
ff9940b0 6449 case PRE_DEC:
2b835d68 6450 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6451 break;
2b835d68 6452
ff9940b0 6453 case POST_INC:
9997d19d 6454 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6455 break;
2b835d68 6456
ff9940b0 6457 case POST_DEC:
6354dc9b 6458 abort (); /* Should never happen now. */
ff9940b0 6459 break;
2b835d68
RE
6460
6461 case LABEL_REF:
6462 case CONST:
6463 output_asm_insn ("adr%?\t%0, %1", operands);
6464 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6465 break;
6466
ff9940b0 6467 default:
aec3cfba
NC
6468 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6469 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6470 {
2b835d68
RE
6471 otherops[0] = operands[0];
6472 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6473 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6474 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6475 {
6476 if (GET_CODE (otherops[2]) == CONST_INT)
6477 {
6478 switch (INTVAL (otherops[2]))
6479 {
6480 case -8:
6481 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6482 return "";
6483 case -4:
6484 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6485 return "";
6486 case 4:
6487 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6488 return "";
6489 }
6490 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6491 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6492 else
6493 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6494 }
6495 else
6496 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6497 }
6498 else
6499 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6500
2b835d68
RE
6501 return "ldm%?ia\t%0, %M0";
6502 }
6503 else
6504 {
6505 otherops[1] = adj_offsettable_operand (operands[1], 4);
6506 /* Take care of overlapping base/data reg. */
6507 if (reg_mentioned_p (operands[0], operands[1]))
6508 {
6509 output_asm_insn ("ldr%?\t%0, %1", otherops);
6510 output_asm_insn ("ldr%?\t%0, %1", operands);
6511 }
6512 else
6513 {
6514 output_asm_insn ("ldr%?\t%0, %1", operands);
6515 output_asm_insn ("ldr%?\t%0, %1", otherops);
6516 }
cce8749e
CH
6517 }
6518 }
6519 }
2b835d68 6520 else
6354dc9b 6521 abort (); /* Constraints should prevent this. */
cce8749e
CH
6522 }
6523 else if (code0 == MEM && code1 == REG)
6524 {
62b10bbc
NC
6525 if (REGNO (operands[1]) == IP_REGNUM)
6526 abort ();
2b835d68 6527
ff9940b0
RE
6528 switch (GET_CODE (XEXP (operands[0], 0)))
6529 {
6530 case REG:
9997d19d 6531 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6532 break;
2b835d68 6533
ff9940b0 6534 case PRE_INC:
6354dc9b 6535 abort (); /* Should never happen now. */
ff9940b0 6536 break;
2b835d68 6537
ff9940b0 6538 case PRE_DEC:
2b835d68 6539 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6540 break;
2b835d68 6541
ff9940b0 6542 case POST_INC:
9997d19d 6543 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6544 break;
2b835d68 6545
ff9940b0 6546 case POST_DEC:
6354dc9b 6547 abort (); /* Should never happen now. */
ff9940b0 6548 break;
2b835d68
RE
6549
6550 case PLUS:
6551 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6552 {
6553 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6554 {
6555 case -8:
6556 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6557 return "";
6558
6559 case -4:
6560 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6561 return "";
6562
6563 case 4:
6564 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6565 return "";
6566 }
6567 }
6568 /* Fall through */
6569
ff9940b0 6570 default:
cce8749e 6571 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 6572 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6573 output_asm_insn ("str%?\t%1, %0", operands);
6574 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6575 }
6576 }
2b835d68 6577 else
62b10bbc 6578 abort (); /* Constraints should prevent this */
cce8749e 6579
9997d19d
RE
6580 return "";
6581}
cce8749e
CH
6582
6583
6584/* Output an arbitrary MOV reg, #n.
6585 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6586
cd2b33d0 6587const char *
cce8749e 6588output_mov_immediate (operands)
62b10bbc 6589 rtx * operands;
cce8749e 6590{
f3bb6135 6591 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
6592 int n_ones = 0;
6593 int i;
6594
6595 /* Try to use one MOV */
cce8749e 6596 if (const_ok_for_arm (n))
f3bb6135 6597 {
9997d19d 6598 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
6599 return "";
6600 }
cce8749e
CH
6601
6602 /* Try to use one MVN */
f3bb6135 6603 if (const_ok_for_arm (~n))
cce8749e 6604 {
f3bb6135 6605 operands[1] = GEN_INT (~n);
9997d19d 6606 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 6607 return "";
cce8749e
CH
6608 }
6609
6354dc9b 6610 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
6611
6612 for (i=0; i < 32; i++)
6613 if (n & 1 << i)
6614 n_ones++;
6615
6354dc9b 6616 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
e5951263 6617 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
cce8749e 6618 else
d5b7b3ae 6619 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
f3bb6135
RE
6620
6621 return "";
6622}
cce8749e
CH
6623
6624
6625/* Output an ADD r, s, #n where n may be too big for one instruction. If
6626 adding zero to one register, output nothing. */
6627
cd2b33d0 6628const char *
cce8749e 6629output_add_immediate (operands)
62b10bbc 6630 rtx * operands;
cce8749e 6631{
f3bb6135 6632 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6633
6634 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6635 {
6636 if (n < 0)
6637 output_multi_immediate (operands,
9997d19d
RE
6638 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6639 -n);
cce8749e
CH
6640 else
6641 output_multi_immediate (operands,
9997d19d
RE
6642 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6643 n);
cce8749e 6644 }
f3bb6135
RE
6645
6646 return "";
6647}
cce8749e 6648
cce8749e
CH
6649/* Output a multiple immediate operation.
6650 OPERANDS is the vector of operands referred to in the output patterns.
6651 INSTR1 is the output pattern to use for the first constant.
6652 INSTR2 is the output pattern to use for subsequent constants.
6653 IMMED_OP is the index of the constant slot in OPERANDS.
6654 N is the constant value. */
6655
cd2b33d0 6656static const char *
cce8749e 6657output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6658 rtx * operands;
cd2b33d0
NC
6659 const char * instr1;
6660 const char * instr2;
f3bb6135
RE
6661 int immed_op;
6662 HOST_WIDE_INT n;
cce8749e 6663{
f3bb6135 6664#if HOST_BITS_PER_WIDE_INT > 32
e5951263 6665 n &= HOST_UINT (0xffffffff);
f3bb6135
RE
6666#endif
6667
cce8749e
CH
6668 if (n == 0)
6669 {
6670 operands[immed_op] = const0_rtx;
6354dc9b 6671 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
6672 }
6673 else
6674 {
6675 int i;
cd2b33d0 6676 const char * instr = instr1;
cce8749e 6677
6354dc9b 6678 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6679 for (i = 0; i < 32; i += 2)
6680 {
6681 if (n & (3 << i))
6682 {
f3bb6135
RE
6683 operands[immed_op] = GEN_INT (n & (255 << i));
6684 output_asm_insn (instr, operands);
cce8749e
CH
6685 instr = instr2;
6686 i += 6;
6687 }
6688 }
6689 }
cd2b33d0 6690
f3bb6135 6691 return "";
9997d19d 6692}
cce8749e
CH
6693
6694
6695/* Return the appropriate ARM instruction for the operation code.
6696 The returned result should not be overwritten. OP is the rtx of the
6697 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6698 was shifted. */
6699
cd2b33d0 6700const char *
cce8749e
CH
6701arithmetic_instr (op, shift_first_arg)
6702 rtx op;
f3bb6135 6703 int shift_first_arg;
cce8749e 6704{
9997d19d 6705 switch (GET_CODE (op))
cce8749e
CH
6706 {
6707 case PLUS:
f3bb6135
RE
6708 return "add";
6709
cce8749e 6710 case MINUS:
f3bb6135
RE
6711 return shift_first_arg ? "rsb" : "sub";
6712
cce8749e 6713 case IOR:
f3bb6135
RE
6714 return "orr";
6715
cce8749e 6716 case XOR:
f3bb6135
RE
6717 return "eor";
6718
cce8749e 6719 case AND:
f3bb6135
RE
6720 return "and";
6721
cce8749e 6722 default:
f3bb6135 6723 abort ();
cce8749e 6724 }
f3bb6135 6725}
cce8749e
CH
6726
6727
6728/* Ensure valid constant shifts and return the appropriate shift mnemonic
6729 for the operation code. The returned result should not be overwritten.
6730 OP is the rtx code of the shift.
9997d19d 6731 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6732 shift. */
cce8749e 6733
cd2b33d0 6734static const char *
9997d19d
RE
6735shift_op (op, amountp)
6736 rtx op;
6737 HOST_WIDE_INT *amountp;
cce8749e 6738{
cd2b33d0 6739 const char * mnem;
e2c671ba 6740 enum rtx_code code = GET_CODE (op);
cce8749e 6741
9997d19d
RE
6742 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6743 *amountp = -1;
6744 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6745 *amountp = INTVAL (XEXP (op, 1));
6746 else
6747 abort ();
6748
e2c671ba 6749 switch (code)
cce8749e
CH
6750 {
6751 case ASHIFT:
6752 mnem = "asl";
6753 break;
f3bb6135 6754
cce8749e
CH
6755 case ASHIFTRT:
6756 mnem = "asr";
cce8749e 6757 break;
f3bb6135 6758
cce8749e
CH
6759 case LSHIFTRT:
6760 mnem = "lsr";
cce8749e 6761 break;
f3bb6135 6762
9997d19d
RE
6763 case ROTATERT:
6764 mnem = "ror";
9997d19d
RE
6765 break;
6766
ff9940b0 6767 case MULT:
e2c671ba
RE
6768 /* We never have to worry about the amount being other than a
6769 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6770 if (*amountp != -1)
6771 *amountp = int_log2 (*amountp);
6772 else
6773 abort ();
f3bb6135
RE
6774 return "asl";
6775
cce8749e 6776 default:
f3bb6135 6777 abort ();
cce8749e
CH
6778 }
6779
e2c671ba
RE
6780 if (*amountp != -1)
6781 {
6782 /* This is not 100% correct, but follows from the desire to merge
6783 multiplication by a power of 2 with the recognizer for a
6784 shift. >=32 is not a valid shift for "asl", so we must try and
6785 output a shift that produces the correct arithmetical result.
ddd5a7c1 6786 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6787 is not set correctly if we set the flags; but we never use the
6788 carry bit from such an operation, so we can ignore that. */
6789 if (code == ROTATERT)
6790 *amountp &= 31; /* Rotate is just modulo 32 */
6791 else if (*amountp != (*amountp & 31))
6792 {
6793 if (code == ASHIFT)
6794 mnem = "lsr";
6795 *amountp = 32;
6796 }
6797
6798 /* Shifts of 0 are no-ops. */
6799 if (*amountp == 0)
6800 return NULL;
6801 }
6802
9997d19d
RE
6803 return mnem;
6804}
cce8749e
CH
6805
6806
6354dc9b 6807/* Obtain the shift from the POWER of two. */
18af7313 6808static HOST_WIDE_INT
cce8749e 6809int_log2 (power)
f3bb6135 6810 HOST_WIDE_INT power;
cce8749e 6811{
f3bb6135 6812 HOST_WIDE_INT shift = 0;
cce8749e 6813
e5951263 6814 while ((((HOST_INT (1)) << shift) & power) == 0)
cce8749e
CH
6815 {
6816 if (shift > 31)
f3bb6135 6817 abort ();
cce8749e
CH
6818 shift++;
6819 }
f3bb6135
RE
6820
6821 return shift;
6822}
cce8749e 6823
cce8749e
CH
6824/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6825 /bin/as is horribly restrictive. */
6cfc7210 6826#define MAX_ASCII_LEN 51
cce8749e
CH
6827
6828void
6829output_ascii_pseudo_op (stream, p, len)
62b10bbc 6830 FILE * stream;
3cce094d 6831 const unsigned char * p;
cce8749e
CH
6832 int len;
6833{
6834 int i;
6cfc7210 6835 int len_so_far = 0;
cce8749e 6836
6cfc7210
NC
6837 fputs ("\t.ascii\t\"", stream);
6838
cce8749e
CH
6839 for (i = 0; i < len; i++)
6840 {
6841 register int c = p[i];
6842
6cfc7210 6843 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 6844 {
6cfc7210 6845 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 6846 len_so_far = 0;
cce8749e
CH
6847 }
6848
6cfc7210 6849 switch (c)
cce8749e 6850 {
6cfc7210
NC
6851 case TARGET_TAB:
6852 fputs ("\\t", stream);
6853 len_so_far += 2;
6854 break;
6855
6856 case TARGET_FF:
6857 fputs ("\\f", stream);
6858 len_so_far += 2;
6859 break;
6860
6861 case TARGET_BS:
6862 fputs ("\\b", stream);
6863 len_so_far += 2;
6864 break;
6865
6866 case TARGET_CR:
6867 fputs ("\\r", stream);
6868 len_so_far += 2;
6869 break;
6870
6871 case TARGET_NEWLINE:
6872 fputs ("\\n", stream);
6873 c = p [i + 1];
6874 if ((c >= ' ' && c <= '~')
6875 || c == TARGET_TAB)
6876 /* This is a good place for a line break. */
6877 len_so_far = MAX_ASCII_LEN;
6878 else
6879 len_so_far += 2;
6880 break;
6881
6882 case '\"':
6883 case '\\':
6884 putc ('\\', stream);
5895f793 6885 len_so_far++;
6cfc7210 6886 /* drop through. */
f3bb6135 6887
6cfc7210
NC
6888 default:
6889 if (c >= ' ' && c <= '~')
6890 {
6891 putc (c, stream);
5895f793 6892 len_so_far++;
6cfc7210
NC
6893 }
6894 else
6895 {
6896 fprintf (stream, "\\%03o", c);
6897 len_so_far += 4;
6898 }
6899 break;
cce8749e 6900 }
cce8749e 6901 }
f3bb6135 6902
cce8749e 6903 fputs ("\"\n", stream);
f3bb6135 6904}
cce8749e 6905\f
6d3d9133
NC
6906/* Compute a bit mask of which registers need to be
6907 saved on the stack for the current function. */
6908
6909static unsigned long
6910arm_compute_save_reg_mask ()
6911{
6912 unsigned int save_reg_mask = 0;
6913 unsigned int reg;
6914 unsigned long func_type = arm_current_func_type ();
6915
6916 if (IS_NAKED (func_type))
6917 /* This should never really happen. */
6918 return 0;
6919
6920 /* If we are creating a stack frame, then we must save the frame pointer,
6921 IP (which will hold the old stack pointer), LR and the PC. */
6922 if (frame_pointer_needed)
6923 save_reg_mask |=
6924 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
6925 | (1 << IP_REGNUM)
6926 | (1 << LR_REGNUM)
6927 | (1 << PC_REGNUM);
6928
6929 /* Volatile functions do not return, so there
6930 is no need to save any other registers. */
6931 if (IS_VOLATILE (func_type))
6932 return save_reg_mask;
6933
6934 if (ARM_FUNC_TYPE (func_type) == ARM_FT_ISR)
6935 {
6936 /* FIQ handlers have registers r8 - r12 banked, so
6937 we only need to check r0 - r7, they must save them. */
6938 for (reg = 0; reg < 8; reg++)
6939 if (regs_ever_live[reg])
6940 save_reg_mask |= (1 << reg);
6941 }
6942 else
6943 {
6944 /* In the normal case we only need to save those registers
6945 which are call saved and which are used by this function. */
6946 for (reg = 0; reg <= 10; reg++)
6947 if (regs_ever_live[reg] && ! call_used_regs [reg])
6948 save_reg_mask |= (1 << reg);
6949
6950 /* Handle the frame pointer as a special case. */
6951 if (! TARGET_APCS_FRAME
6952 && ! frame_pointer_needed
6953 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6954 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6955 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
6956
6957 /* If we aren't loading the PIC register,
6958 don't stack it even though it may be live. */
6959 if (flag_pic
6960 && ! TARGET_SINGLE_PIC_BASE
6961 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6962 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
6963 }
6964
6965 /* Decide if we need to save the link register.
6966 Interrupt routines have their own banked link register,
6967 so they never need to save it.
6968 Otheriwse if we do not use the link register we do not need to save
6969 it. If we are pushing other registers onto the stack however, we
6970 can save an instruction in the epilogue by pushing the link register
6971 now and then popping it back into the PC. This incurs extra memory
6972 accesses though, so we only do it when optimising for size, and only
6973 if we know that we will not need a fancy return sequence. */
6974 if (! IS_INTERRUPT (func_type)
6975 && (regs_ever_live [LR_REGNUM]
6976 || (save_reg_mask
6977 && optimize_size
6978 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)))
6979 save_reg_mask |= 1 << LR_REGNUM;
6980
6f7ebcbb
NC
6981 if (cfun->machine->lr_save_eliminated)
6982 save_reg_mask &= ~ (1 << LR_REGNUM);
6983
6d3d9133
NC
6984 return save_reg_mask;
6985}
6986
6987/* Generate a function exit sequence. If REALLY_RETURN is true, then do
6988 everything bar the final return instruction. */
ff9940b0 6989
cd2b33d0 6990const char *
84ed5e79 6991output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
6992 rtx operand;
6993 int really_return;
84ed5e79 6994 int reverse;
ff9940b0 6995{
6d3d9133 6996 char conditional[10];
ff9940b0 6997 char instr[100];
6d3d9133
NC
6998 int reg;
6999 unsigned long live_regs_mask;
7000 unsigned long func_type;
7001
7002 func_type = arm_current_func_type ();
e2c671ba 7003
6d3d9133 7004 if (IS_NAKED (func_type))
d5b7b3ae 7005 return "";
6d3d9133
NC
7006
7007 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7008 {
e2c671ba 7009 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
7010 call, then we have to trust that the called function won't return. */
7011 if (really_return)
7012 {
7013 rtx ops[2];
7014
7015 /* Otherwise, trap an attempted return by aborting. */
7016 ops[0] = operand;
7017 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7018 : "abort");
7019 assemble_external_libcall (ops[1]);
7020 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7021 }
7022
e2c671ba
RE
7023 return "";
7024 }
6d3d9133 7025
5895f793 7026 if (current_function_calls_alloca && !really_return)
62b10bbc 7027 abort ();
ff9940b0 7028
6d3d9133
NC
7029 /* Construct the conditional part of the instruction(s) to be emitted. */
7030 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
d5b7b3ae 7031
6d3d9133 7032 return_used_this_function = 1;
ff9940b0 7033
6d3d9133 7034 live_regs_mask = arm_compute_save_reg_mask ();
ff9940b0 7035
3a5a4282 7036 /* On some ARM architectures it is faster to use LDR rather than LDM to
6d3d9133
NC
7037 load a single register. On other architectures, the cost is the same.
7038 In 26 bit mode we have to use LDM in order to be able to restore the CPSR. */
7039 if ((live_regs_mask == (1 << LR_REGNUM))
7040 && (! really_return || TARGET_APCS_32))
7041 {
7042 if (! really_return)
7043 sprintf (instr, "ldr%s\t%%|lr, [%%|sp], #4", conditional);
7044 else
7045 sprintf (instr, "ldr%s\t%%|pc, [%%|sp], #4", conditional);
7046 }
7047 else if (live_regs_mask)
7048 {
7049 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7050 /* There are two possible reasons for the IP register being saved.
7051 Either a stack frame was created, in which case IP contains the
7052 old stack pointer, or an ISR routine corrupted it. If this in an
7053 ISR routine then just restore IP, otherwise restore IP into SP. */
7054 if (! IS_INTERRUPT (func_type))
7055 {
7056 live_regs_mask &= ~ (1 << IP_REGNUM);
7057 live_regs_mask |= (1 << SP_REGNUM);
7058 }
f3bb6135 7059
6d3d9133 7060 /* Generate the load multiple instruction to restore the registers. */
ff9940b0 7061 if (frame_pointer_needed)
6d3d9133 7062 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
ff9940b0 7063 else
6d3d9133 7064 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
f3bb6135 7065
6d3d9133
NC
7066 for (reg = 0; reg <= SP_REGNUM; reg++)
7067 if (live_regs_mask & (1 << reg))
7068 {
1d5473cb 7069 strcat (instr, "%|");
6d3d9133
NC
7070 strcat (instr, reg_names[reg]);
7071 strcat (instr, ", ");
7072 }
f3bb6135 7073
6d3d9133
NC
7074 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7075 {
7076 /* If we are not restoring the LR register then we will
7077 have added one too many commas to the list above.
7078 Replace it with a closing brace. */
7079 instr [strlen (instr) - 2] = '}';
7080 }
ff9940b0 7081 else
1d5473cb
RE
7082 {
7083 strcat (instr, "%|");
6d3d9133
NC
7084
7085 /* At this point there should only be one or two registers left in
7086 live_regs_mask: always LR, and possibly PC if we created a stack
7087 frame. LR contains the return address. If we do not have any
7088 special requirements for function exit (eg interworking, or ISR)
7089 then we can load this value directly into the PC and save an
7090 instruction. */
7091 if (! TARGET_INTERWORK
7092 && ! IS_INTERRUPT (func_type)
7093 && really_return)
7094 strcat (instr, reg_names [PC_REGNUM]);
da6558fd 7095 else
6d3d9133
NC
7096 strcat (instr, reg_names [LR_REGNUM]);
7097
7098 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
1d5473cb 7099 }
da6558fd 7100
6d3d9133 7101 if (really_return)
da6558fd 7102 {
6d3d9133
NC
7103 /* See if we need to generate an extra instruction to
7104 perform the actual function return. */
7105 switch ((int) ARM_FUNC_TYPE (func_type))
7106 {
7107 case ARM_FT_ISR:
7108 case ARM_FT_FIQ:
7109 output_asm_insn (instr, & operand);
7110
7111 strcpy (instr, "sub");
7112 strcat (instr, conditional);
7113 strcat (instr, "s\t%|pc, %|lr, #4");
7114 break;
7115
7116 case ARM_FT_EXCEPTION:
7117 output_asm_insn (instr, & operand);
7118
7119 strcpy (instr, "mov");
7120 strcat (instr, conditional);
7121 strcat (instr, "s\t%|pc, %|lr");
7122 break;
7123
7124 case ARM_FT_INTERWORKED:
7125 output_asm_insn (instr, & operand);
da6558fd 7126
6d3d9133
NC
7127 strcpy (instr, "bx");
7128 strcat (instr, conditional);
7129 strcat (instr, "\t%|lr");
7130 break;
7131
7132 default:
7133 /* The return has already been handled
7134 by loading the LR into the PC. */
7135 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7136 {
7137 output_asm_insn (instr, & operand);
7138
7139 strcpy (instr, "mov");
7140 strcat (instr, conditional);
7141 if (! TARGET_APCS_32)
7142 strcat (instr, "s");
7143 strcat (instr, "\t%|pc, %|lr");
7144 }
7145 break;
7146 }
da6558fd 7147 }
ff9940b0
RE
7148 }
7149 else if (really_return)
7150 {
6d3d9133
NC
7151 switch ((int) ARM_FUNC_TYPE (func_type))
7152 {
7153 case ARM_FT_ISR:
7154 case ARM_FT_FIQ:
7155 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7156 break;
7157
7158 case ARM_FT_INTERWORKED:
7159 sprintf (instr, "bx%s\t%%|lr", conditional);
7160 break;
7161
7162 case ARM_FT_EXCEPTION:
7163 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7164 break;
7165
7166 default:
7167 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7168 conditional, TARGET_APCS_32 ? "" : "s");
7169 break;
7170 }
ff9940b0 7171 }
6d3d9133
NC
7172 else
7173 /* Nothing to load off the stack, and
7174 no return instruction to generate. */
7175 return "";
f3bb6135 7176
6d3d9133
NC
7177 output_asm_insn (instr, & operand);
7178
ff9940b0
RE
7179 return "";
7180}
7181
ef179a26
NC
7182/* Write the function name into the code section, directly preceding
7183 the function prologue.
7184
7185 Code will be output similar to this:
7186 t0
7187 .ascii "arm_poke_function_name", 0
7188 .align
7189 t1
7190 .word 0xff000000 + (t1 - t0)
7191 arm_poke_function_name
7192 mov ip, sp
7193 stmfd sp!, {fp, ip, lr, pc}
7194 sub fp, ip, #4
7195
7196 When performing a stack backtrace, code can inspect the value
7197 of 'pc' stored at 'fp' + 0. If the trace function then looks
7198 at location pc - 12 and the top 8 bits are set, then we know
7199 that there is a function name embedded immediately preceding this
7200 location and has length ((pc[-3]) & 0xff000000).
7201
7202 We assume that pc is declared as a pointer to an unsigned long.
7203
7204 It is of no benefit to output the function name if we are assembling
7205 a leaf function. These function types will not contain a stack
7206 backtrace structure, therefore it is not possible to determine the
7207 function name. */
7208
7209void
7210arm_poke_function_name (stream, name)
7211 FILE * stream;
7212 char * name;
7213{
7214 unsigned long alignlength;
7215 unsigned long length;
7216 rtx x;
7217
d5b7b3ae
RE
7218 length = strlen (name) + 1;
7219 alignlength = ROUND_UP (length);
ef179a26 7220
949d79eb 7221 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 7222 ASM_OUTPUT_ALIGN (stream, 2);
e5951263 7223 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
ef179a26
NC
7224 ASM_OUTPUT_INT (stream, x);
7225}
7226
6d3d9133
NC
7227/* Place some comments into the assembler stream
7228 describing the current function. */
7229
cce8749e 7230void
d5b7b3ae 7231output_arm_prologue (f, frame_size)
6cfc7210 7232 FILE * f;
cce8749e
CH
7233 int frame_size;
7234{
6d3d9133
NC
7235 unsigned long func_type;
7236
7237 /* Sanity check. */
abaa26e5 7238 if (arm_ccfsm_state || arm_target_insn)
6d3d9133 7239 abort ();
31fdb4d5 7240
6d3d9133
NC
7241 func_type = arm_current_func_type ();
7242
7243 switch ((int) ARM_FUNC_TYPE (func_type))
7244 {
7245 default:
7246 case ARM_FT_NORMAL:
7247 break;
7248 case ARM_FT_INTERWORKED:
7249 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7250 break;
7251 case ARM_FT_EXCEPTION_HANDLER:
7252 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7253 break;
7254 case ARM_FT_ISR:
7255 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7256 break;
7257 case ARM_FT_FIQ:
7258 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7259 break;
7260 case ARM_FT_EXCEPTION:
7261 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7262 break;
7263 }
ff9940b0 7264
6d3d9133
NC
7265 if (IS_NAKED (func_type))
7266 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7267
7268 if (IS_VOLATILE (func_type))
7269 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7270
7271 if (IS_NESTED (func_type))
7272 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7273
dd18ae56
NC
7274 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7275 current_function_args_size,
7276 current_function_pretend_args_size, frame_size);
6d3d9133 7277
dd18ae56
NC
7278 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
7279 frame_pointer_needed,
7280 current_function_anonymous_args);
cce8749e 7281
6f7ebcbb
NC
7282 if (cfun->machine->lr_save_eliminated)
7283 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7284
32de079a
RE
7285#ifdef AOF_ASSEMBLER
7286 if (flag_pic)
dd18ae56 7287 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 7288#endif
6d3d9133
NC
7289
7290 return_used_this_function = 0;
f3bb6135 7291}
cce8749e 7292
cd2b33d0 7293const char *
0616531f
RE
7294arm_output_epilogue (really_return)
7295 int really_return;
cce8749e 7296{
949d79eb 7297 int reg;
6f7ebcbb 7298 unsigned long saved_regs_mask;
6d3d9133 7299 unsigned long func_type;
6354dc9b 7300 /* If we need this, then it will always be at least this much. */
b111229a 7301 int floats_offset = 12;
cce8749e 7302 rtx operands[3];
949d79eb 7303 int frame_size = get_frame_size ();
d5b7b3ae 7304 FILE * f = asm_out_file;
6d3d9133 7305 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
cce8749e 7306
6d3d9133
NC
7307 /* If we have already generated the return instruction
7308 then it is futile to generate anything else. */
b36ba79f 7309 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 7310 return "";
cce8749e 7311
6d3d9133 7312 func_type = arm_current_func_type ();
d5b7b3ae 7313
6d3d9133
NC
7314 if (IS_NAKED (func_type))
7315 /* Naked functions don't have epilogues. */
7316 return "";
0616531f 7317
6d3d9133 7318 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
e2c671ba 7319 {
86efdc8e 7320 rtx op;
6d3d9133
NC
7321
7322 /* A volatile function should never return. Call abort. */
ed0e6530 7323 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 7324 assemble_external_libcall (op);
e2c671ba 7325 output_asm_insn ("bl\t%a0", &op);
6d3d9133 7326
949d79eb 7327 return "";
e2c671ba
RE
7328 }
7329
6d3d9133
NC
7330 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7331 && ! really_return)
7332 /* If we are throwing an exception, then we really must
7333 be doing a return, so we can't tail-call. */
7334 abort ();
7335
6f7ebcbb 7336 saved_regs_mask = arm_compute_save_reg_mask ();
6d3d9133
NC
7337
7338 /* Compute how far away the floats will be. */
7339 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
6f7ebcbb 7340 if (saved_regs_mask & (1 << reg))
6ed30148 7341 floats_offset += 4;
6d3d9133 7342
ff9940b0 7343 if (frame_pointer_needed)
cce8749e 7344 {
b111229a
RE
7345 if (arm_fpu_arch == FP_SOFT2)
7346 {
d5b7b3ae 7347 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 7348 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7349 {
7350 floats_offset += 12;
dd18ae56
NC
7351 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7352 reg, FP_REGNUM, floats_offset);
b111229a
RE
7353 }
7354 }
7355 else
7356 {
d5b7b3ae 7357 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7358
d5b7b3ae 7359 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 7360 {
5895f793 7361 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7362 {
7363 floats_offset += 12;
6cfc7210 7364
6354dc9b 7365 /* We can't unstack more than four registers at once. */
b111229a
RE
7366 if (start_reg - reg == 3)
7367 {
dd18ae56
NC
7368 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7369 reg, FP_REGNUM, floats_offset);
b111229a
RE
7370 start_reg = reg - 1;
7371 }
7372 }
7373 else
7374 {
7375 if (reg != start_reg)
dd18ae56
NC
7376 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7377 reg + 1, start_reg - reg,
7378 FP_REGNUM, floats_offset);
b111229a
RE
7379 start_reg = reg - 1;
7380 }
7381 }
7382
7383 /* Just in case the last register checked also needs unstacking. */
7384 if (reg != start_reg)
dd18ae56
NC
7385 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7386 reg + 1, start_reg - reg,
7387 FP_REGNUM, floats_offset);
b111229a 7388 }
6d3d9133 7389
6f7ebcbb 7390 /* saved_regs_mask should contain the IP, which at the time of stack
6d3d9133
NC
7391 frame generation actually contains the old stack pointer. So a
7392 quick way to unwind the stack is just pop the IP register directly
7393 into the stack pointer. */
6f7ebcbb 7394 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
6d3d9133 7395 abort ();
6f7ebcbb
NC
7396 saved_regs_mask &= ~ (1 << IP_REGNUM);
7397 saved_regs_mask |= (1 << SP_REGNUM);
6d3d9133 7398
6f7ebcbb 7399 /* There are two registers left in saved_regs_mask - LR and PC. We
6d3d9133
NC
7400 only need to restore the LR register (the return address), but to
7401 save time we can load it directly into the PC, unless we need a
7402 special function exit sequence, or we are not really returning. */
7403 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7404 /* Delete the LR from the register mask, so that the LR on
7405 the stack is loaded into the PC in the register mask. */
6f7ebcbb 7406 saved_regs_mask &= ~ (1 << LR_REGNUM);
b111229a 7407 else
6f7ebcbb 7408 saved_regs_mask &= ~ (1 << PC_REGNUM);
6d3d9133 7409
6f7ebcbb 7410 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
cce8749e
CH
7411 }
7412 else
7413 {
d2288d8d 7414 /* Restore stack pointer if necessary. */
56636818 7415 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
7416 {
7417 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
7418 operands[2] = GEN_INT (frame_size
7419 + current_function_outgoing_args_size);
d2288d8d
TG
7420 output_add_immediate (operands);
7421 }
7422
b111229a
RE
7423 if (arm_fpu_arch == FP_SOFT2)
7424 {
d5b7b3ae 7425 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 7426 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
7427 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7428 reg, SP_REGNUM);
b111229a
RE
7429 }
7430 else
7431 {
d5b7b3ae 7432 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 7433
d5b7b3ae 7434 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 7435 {
5895f793 7436 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7437 {
7438 if (reg - start_reg == 3)
7439 {
dd18ae56
NC
7440 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7441 start_reg, SP_REGNUM);
b111229a
RE
7442 start_reg = reg + 1;
7443 }
7444 }
7445 else
7446 {
7447 if (reg != start_reg)
dd18ae56
NC
7448 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7449 start_reg, reg - start_reg,
7450 SP_REGNUM);
6cfc7210 7451
b111229a
RE
7452 start_reg = reg + 1;
7453 }
7454 }
7455
7456 /* Just in case the last register checked also needs unstacking. */
7457 if (reg != start_reg)
dd18ae56
NC
7458 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7459 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7460 }
7461
6d3d9133
NC
7462 /* If we can, restore the LR into the PC. */
7463 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7464 && really_return
7465 && current_function_pretend_args_size == 0
6f7ebcbb 7466 && saved_regs_mask & (1 << LR_REGNUM))
cce8749e 7467 {
6f7ebcbb
NC
7468 saved_regs_mask &= ~ (1 << LR_REGNUM);
7469 saved_regs_mask |= (1 << PC_REGNUM);
6d3d9133 7470 }
d5b7b3ae 7471
6d3d9133
NC
7472 /* Load the registers off the stack. If we only have one register
7473 to load use the LDR instruction - it is faster. */
6f7ebcbb 7474 if (saved_regs_mask == (1 << LR_REGNUM))
6d3d9133
NC
7475 {
7476 /* The excpetion handler ignores the LR, so we do
7477 not really need to load it off the stack. */
7478 if (eh_ofs)
7479 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
32de079a 7480 else
6d3d9133 7481 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
cce8749e 7482 }
6f7ebcbb
NC
7483 else if (saved_regs_mask)
7484 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
6d3d9133
NC
7485
7486 if (current_function_pretend_args_size)
cce8749e 7487 {
6d3d9133
NC
7488 /* Unwind the pre-pushed regs. */
7489 operands[0] = operands[1] = stack_pointer_rtx;
7490 operands[2] = GEN_INT (current_function_pretend_args_size);
7491 output_add_immediate (operands);
7492 }
7493 }
32de079a 7494
6d3d9133
NC
7495 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7496 /* Adjust the stack to remove the exception handler stuff. */
7497 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7498 REGNO (eh_ofs));
b111229a 7499
6d3d9133
NC
7500 if (! really_return)
7501 return "";
d5b7b3ae 7502
6d3d9133
NC
7503 /* Generate the return instruction. */
7504 switch ((int) ARM_FUNC_TYPE (func_type))
7505 {
7506 case ARM_FT_EXCEPTION_HANDLER:
7507 /* Even in 26-bit mode we do a mov (rather than a movs)
7508 because we don't have the PSR bits set in the address. */
7509 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7510 break;
0616531f 7511
6d3d9133
NC
7512 case ARM_FT_ISR:
7513 case ARM_FT_FIQ:
7514 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7515 break;
7516
7517 case ARM_FT_EXCEPTION:
7518 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7519 break;
7520
7521 case ARM_FT_INTERWORKED:
7522 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7523 break;
7524
7525 default:
7526 if (frame_pointer_needed)
7527 /* If we used the frame pointer then the return adddress
7528 will have been loaded off the stack directly into the
7529 PC, so there is no need to issue a MOV instruction
7530 here. */
7531 ;
7532 else if (current_function_pretend_args_size == 0
6f7ebcbb 7533 && (saved_regs_mask & (1 << LR_REGNUM)))
6d3d9133
NC
7534 /* Similarly we may have been able to load LR into the PC
7535 even if we did not create a stack frame. */
7536 ;
7537 else if (TARGET_APCS_32)
7538 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7539 else
7540 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7541 break;
cce8749e 7542 }
f3bb6135 7543
949d79eb
RE
7544 return "";
7545}
7546
7547void
eb3921e8 7548output_func_epilogue (frame_size)
949d79eb
RE
7549 int frame_size;
7550{
d5b7b3ae
RE
7551 if (TARGET_THUMB)
7552 {
7553 /* ??? Probably not safe to set this here, since it assumes that a
7554 function will be emitted as assembly immediately after we generate
7555 RTL for it. This does not happen for inline functions. */
7556 return_used_this_function = 0;
7557 }
7558 else
7559 {
7560 if (use_return_insn (FALSE)
7561 && return_used_this_function
7562 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7563 && !frame_pointer_needed)
d5b7b3ae 7564 abort ();
f3bb6135 7565
d5b7b3ae
RE
7566 /* Reset the ARM-specific per-function variables. */
7567 current_function_anonymous_args = 0;
7568 after_arm_reorg = 0;
7569 }
f3bb6135 7570}
e2c671ba 7571
2c849145
JM
7572/* Generate and emit an insn that we will recognize as a push_multi.
7573 Unfortunately, since this insn does not reflect very well the actual
7574 semantics of the operation, we need to annotate the insn for the benefit
7575 of DWARF2 frame unwind information. */
6d3d9133 7576
2c849145 7577static rtx
e2c671ba
RE
7578emit_multi_reg_push (mask)
7579 int mask;
7580{
7581 int num_regs = 0;
7582 int i, j;
7583 rtx par;
2c849145 7584 rtx dwarf;
87e27392 7585 int dwarf_par_index;
2c849145 7586 rtx tmp, reg;
e2c671ba 7587
d5b7b3ae 7588 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7589 if (mask & (1 << i))
5895f793 7590 num_regs++;
e2c671ba
RE
7591
7592 if (num_regs == 0 || num_regs > 16)
7593 abort ();
7594
87e27392
NC
7595 /* For the body of the insn we are going to generate an UNSPEC in
7596 parallel with several USEs. This allows the insn to be recognised
7597 by the push_multi pattern in the arm.md file. The insn looks
7598 something like this:
7599
7600 (parallel [
b15bca31
RE
7601 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7602 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
87e27392
NC
7603 (use (reg:SI 11 fp))
7604 (use (reg:SI 12 ip))
7605 (use (reg:SI 14 lr))
7606 (use (reg:SI 15 pc))
7607 ])
7608
7609 For the frame note however, we try to be more explicit and actually
7610 show each register being stored into the stack frame, plus a (single)
7611 decrement of the stack pointer. We do it this way in order to be
7612 friendly to the stack unwinding code, which only wants to see a single
7613 stack decrement per instruction. The RTL we generate for the note looks
7614 something like this:
7615
7616 (sequence [
7617 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7618 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7619 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7620 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7621 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7622 (set (mem:SI (plus:SI (reg:SI sp) (const_int 16))) (reg:SI pc))
7623 ])
7624
7625 This sequence is used both by the code to support stack unwinding for
7626 exceptions handlers and the code to generate dwarf2 frame debugging. */
7627
43cffd11 7628 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
87e27392 7629 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_regs + 1));
2c849145 7630 RTX_FRAME_RELATED_P (dwarf) = 1;
87e27392 7631 dwarf_par_index = 1;
e2c671ba 7632
d5b7b3ae 7633 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7634 {
7635 if (mask & (1 << i))
7636 {
2c849145
JM
7637 reg = gen_rtx_REG (SImode, i);
7638
e2c671ba 7639 XVECEXP (par, 0, 0)
43cffd11
RE
7640 = gen_rtx_SET (VOIDmode,
7641 gen_rtx_MEM (BLKmode,
7642 gen_rtx_PRE_DEC (BLKmode,
7643 stack_pointer_rtx)),
7644 gen_rtx_UNSPEC (BLKmode,
2c849145 7645 gen_rtvec (1, reg),
43cffd11 7646 2));
2c849145
JM
7647
7648 tmp = gen_rtx_SET (VOIDmode,
87e27392 7649 gen_rtx_MEM (SImode, stack_pointer_rtx),
2c849145
JM
7650 reg);
7651 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392
NC
7652 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7653 dwarf_par_index ++;
2c849145 7654
e2c671ba
RE
7655 break;
7656 }
7657 }
7658
7659 for (j = 1, i++; j < num_regs; i++)
7660 {
7661 if (mask & (1 << i))
7662 {
2c849145
JM
7663 reg = gen_rtx_REG (SImode, i);
7664
7665 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7666
7667 tmp = gen_rtx_SET (VOIDmode,
7668 gen_rtx_MEM (SImode,
87e27392
NC
7669 gen_rtx_PLUS (SImode,
7670 stack_pointer_rtx,
7671 GEN_INT (4 * j))),
2c849145
JM
7672 reg);
7673 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392 7674 XVECEXP (dwarf, 0, dwarf_par_index ++) = tmp;
2c849145 7675
e2c671ba
RE
7676 j++;
7677 }
7678 }
b111229a 7679
2c849145 7680 par = emit_insn (par);
87e27392
NC
7681
7682 tmp = gen_rtx_SET (SImode,
7683 stack_pointer_rtx,
7684 gen_rtx_PLUS (SImode,
7685 stack_pointer_rtx,
7686 GEN_INT (-4 * num_regs)));
7687 RTX_FRAME_RELATED_P (tmp) = 1;
7688 XVECEXP (dwarf, 0, 0) = tmp;
7689
2c849145
JM
7690 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7691 REG_NOTES (par));
7692 return par;
b111229a
RE
7693}
7694
2c849145 7695static rtx
b111229a
RE
7696emit_sfm (base_reg, count)
7697 int base_reg;
7698 int count;
7699{
7700 rtx par;
2c849145
JM
7701 rtx dwarf;
7702 rtx tmp, reg;
b111229a
RE
7703 int i;
7704
43cffd11 7705 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7706 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7707 RTX_FRAME_RELATED_P (dwarf) = 1;
7708
7709 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7710
7711 XVECEXP (par, 0, 0)
7712 = gen_rtx_SET (VOIDmode,
7713 gen_rtx_MEM (BLKmode,
7714 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7715 gen_rtx_UNSPEC (BLKmode,
2c849145 7716 gen_rtvec (1, reg),
b15bca31 7717 UNSPEC_PUSH_MULT));
2c849145
JM
7718 tmp
7719 = gen_rtx_SET (VOIDmode,
7720 gen_rtx_MEM (XFmode,
7721 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7722 reg);
7723 RTX_FRAME_RELATED_P (tmp) = 1;
7724 XVECEXP (dwarf, 0, count - 1) = tmp;
7725
b111229a 7726 for (i = 1; i < count; i++)
2c849145
JM
7727 {
7728 reg = gen_rtx_REG (XFmode, base_reg++);
7729 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7730
7731 tmp = gen_rtx_SET (VOIDmode,
7732 gen_rtx_MEM (XFmode,
7733 gen_rtx_PRE_DEC (BLKmode,
7734 stack_pointer_rtx)),
7735 reg);
7736 RTX_FRAME_RELATED_P (tmp) = 1;
7737 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7738 }
b111229a 7739
2c849145
JM
7740 par = emit_insn (par);
7741 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7742 REG_NOTES (par));
7743 return par;
e2c671ba
RE
7744}
7745
6d3d9133
NC
7746/* Generate the prologue instructions for entry into an ARM function. */
7747
e2c671ba
RE
7748void
7749arm_expand_prologue ()
7750{
7751 int reg;
6d3d9133 7752 rtx amount;
2c849145 7753 rtx insn;
68dfd979 7754 rtx ip_rtx;
6d3d9133
NC
7755 unsigned long live_regs_mask;
7756 unsigned long func_type;
68dfd979 7757 int fp_offset = 0;
d3236b4d 7758
6d3d9133 7759 func_type = arm_current_func_type ();
e2c671ba 7760
31fdb4d5 7761 /* Naked functions don't have prologues. */
6d3d9133 7762 if (IS_NAKED (func_type))
31fdb4d5
DE
7763 return;
7764
6d3d9133
NC
7765 /* Compute which register we will have to save onto the stack. */
7766 live_regs_mask = arm_compute_save_reg_mask ();
e2c671ba 7767
68dfd979 7768 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
d3236b4d 7769
e2c671ba
RE
7770 if (frame_pointer_needed)
7771 {
6d3d9133 7772 if (IS_NESTED (func_type))
68dfd979
NC
7773 {
7774 /* The Static chain register is the same as the IP register
7775 used as a scratch register during stack frame creation.
7776 To get around this need to find somewhere to store IP
7777 whilst the frame is being created. We try the following
7778 places in order:
7779
6d3d9133 7780 1. The last argument register.
68dfd979
NC
7781 2. A slot on the stack above the frame. (This only
7782 works if the function is not a varargs function).
7783
6d3d9133
NC
7784 If neither of these places is available, we abort (for now).
7785
7786 Note - setting RTX_FRAME_RELATED_P on these insns breaks
7787 the dwarf2 parsing code in various bits of gcc. This ought
7788 to be fixed sometime, but until then the flag is suppressed.
7789 [Use gcc/testsuite/gcc.c-torture/execute/921215-1.c with
7790 "-O3 -g" to test this]. */
d3236b4d 7791
68dfd979
NC
7792 if (regs_ever_live[3] == 0)
7793 {
7794 insn = gen_rtx_REG (SImode, 3);
7795 insn = gen_rtx_SET (SImode, insn, ip_rtx);
d3236b4d 7796 insn = emit_insn (insn);
6d3d9133 7797 /* RTX_FRAME_RELATED_P (insn) = 1; */
68dfd979
NC
7798 }
7799 else if (current_function_pretend_args_size == 0)
7800 {
7801 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
7802 insn = gen_rtx_MEM (SImode, insn);
7803 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
7804 insn = emit_insn (insn);
6d3d9133 7805 /* RTX_FRAME_RELATED_P (insn) = 1; */
68dfd979
NC
7806 fp_offset = 4;
7807 }
7808 else
7809 /* FIXME - the way to handle this situation is to allow
7810 the pretend args to be dumped onto the stack, then
7811 reuse r3 to save IP. This would involve moving the
f18969c0 7812 copying of SP into IP until after the pretend args
68dfd979 7813 have been dumped, but this is not too hard. */
f18969c0
PB
7814 /* [See e.g. gcc.c-torture/execute/nest-stdar-1.c.] */
7815 error ("Unable to find a temporary location for static chain register");
68dfd979
NC
7816 }
7817
68dfd979
NC
7818 if (fp_offset)
7819 {
7820 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
7821 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7822 }
7823 else
7824 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
7825
6d3d9133 7826 insn = emit_insn (insn);
8e56560e 7827 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7828 }
7829
7830 if (current_function_pretend_args_size)
7831 {
6d3d9133
NC
7832 /* Push the argument registers, or reserve space for them. */
7833 if (current_function_anonymous_args)
2c849145
JM
7834 insn = emit_multi_reg_push
7835 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 7836 else
2c849145
JM
7837 insn = emit_insn
7838 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7839 GEN_INT (-current_function_pretend_args_size)));
7840 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7841 }
7842
7843 if (live_regs_mask)
7844 {
2c849145
JM
7845 insn = emit_multi_reg_push (live_regs_mask);
7846 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba 7847 }
d5b7b3ae 7848
6d3d9133 7849 if (! IS_VOLATILE (func_type))
b111229a 7850 {
6d3d9133 7851 /* Save any floating point call-saved registers used by this function. */
b111229a
RE
7852 if (arm_fpu_arch == FP_SOFT2)
7853 {
d5b7b3ae 7854 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 7855 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
7856 {
7857 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7858 insn = gen_rtx_MEM (XFmode, insn);
7859 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7860 gen_rtx_REG (XFmode, reg)));
7861 RTX_FRAME_RELATED_P (insn) = 1;
7862 }
b111229a
RE
7863 }
7864 else
7865 {
d5b7b3ae 7866 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7867
d5b7b3ae 7868 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 7869 {
5895f793 7870 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7871 {
7872 if (start_reg - reg == 3)
7873 {
2c849145
JM
7874 insn = emit_sfm (reg, 4);
7875 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
7876 start_reg = reg - 1;
7877 }
7878 }
7879 else
7880 {
7881 if (start_reg != reg)
2c849145
JM
7882 {
7883 insn = emit_sfm (reg + 1, start_reg - reg);
7884 RTX_FRAME_RELATED_P (insn) = 1;
7885 }
b111229a
RE
7886 start_reg = reg - 1;
7887 }
7888 }
7889
7890 if (start_reg != reg)
2c849145
JM
7891 {
7892 insn = emit_sfm (reg + 1, start_reg - reg);
7893 RTX_FRAME_RELATED_P (insn) = 1;
7894 }
b111229a
RE
7895 }
7896 }
e2c671ba
RE
7897
7898 if (frame_pointer_needed)
2c849145 7899 {
6d3d9133 7900 /* Create the new frame pointer. */
68dfd979
NC
7901 insn = GEN_INT (-(4 + current_function_pretend_args_size + fp_offset));
7902 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 7903 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979 7904
6d3d9133 7905 if (IS_NESTED (func_type))
68dfd979
NC
7906 {
7907 /* Recover the static chain register. */
7908 if (regs_ever_live [3] == 0)
7909 {
7910 insn = gen_rtx_REG (SImode, 3);
7911 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7912 insn = emit_insn (insn);
6d3d9133 7913 /* RTX_FRAME_RELATED_P (insn) = 1; */
68dfd979
NC
7914 }
7915 else /* if (current_function_pretend_args_size == 0) */
7916 {
7917 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
7918 insn = gen_rtx_MEM (SImode, insn);
7919 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7920 insn = emit_insn (insn);
6d3d9133 7921 /* RTX_FRAME_RELATED_P (insn) = 1; */
68dfd979
NC
7922 }
7923 }
2c849145 7924 }
e2c671ba 7925
6d3d9133
NC
7926 amount = GEN_INT (-(get_frame_size ()
7927 + current_function_outgoing_args_size));
7928
e2c671ba
RE
7929 if (amount != const0_rtx)
7930 {
745b9093
JM
7931 /* This add can produce multiple insns for a large constant, so we
7932 need to get tricky. */
7933 rtx last = get_last_insn ();
2c849145
JM
7934 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7935 amount));
745b9093
JM
7936 do
7937 {
7938 last = last ? NEXT_INSN (last) : get_insns ();
7939 RTX_FRAME_RELATED_P (last) = 1;
7940 }
7941 while (last != insn);
e04c2d6c
RE
7942
7943 /* If the frame pointer is needed, emit a special barrier that
7944 will prevent the scheduler from moving stores to the frame
7945 before the stack adjustment. */
7946 if (frame_pointer_needed)
7947 {
7948 rtx unspec = gen_rtx_UNSPEC (SImode,
7949 gen_rtvec (2, stack_pointer_rtx,
b15bca31
RE
7950 hard_frame_pointer_rtx),
7951 UNSPEC_PRLG_STK);
e04c2d6c 7952
6d3d9133 7953 insn = emit_insn (gen_rtx_CLOBBER (VOIDmode,
e04c2d6c
RE
7954 gen_rtx_MEM (BLKmode, unspec)));
7955 }
e2c671ba
RE
7956 }
7957
7958 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
7959 the call to mcount. Similarly if the user has requested no
7960 scheduling in the prolog. */
7961 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba 7962 emit_insn (gen_blockage ());
6f7ebcbb
NC
7963
7964 /* If the link register is being kept alive, with the return address in it,
7965 then make sure that it does not get reused by the ce2 pass. */
7966 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7967 {
7968 emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
7969 cfun->machine->lr_save_eliminated = 1;
7970 }
e2c671ba 7971}
cce8749e 7972\f
9997d19d
RE
7973/* If CODE is 'd', then the X is a condition operand and the instruction
7974 should only be executed if the condition is true.
ddd5a7c1 7975 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
7976 should only be executed if the condition is false: however, if the mode
7977 of the comparison is CCFPEmode, then always execute the instruction -- we
7978 do this because in these circumstances !GE does not necessarily imply LT;
7979 in these cases the instruction pattern will take care to make sure that
7980 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 7981 doing this instruction unconditionally.
9997d19d
RE
7982 If CODE is 'N' then X is a floating point operand that must be negated
7983 before output.
7984 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7985 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7986
7987void
7988arm_print_operand (stream, x, code)
62b10bbc 7989 FILE * stream;
9997d19d
RE
7990 rtx x;
7991 int code;
7992{
7993 switch (code)
7994 {
7995 case '@':
f3139301 7996 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
7997 return;
7998
d5b7b3ae
RE
7999 case '_':
8000 fputs (user_label_prefix, stream);
8001 return;
8002
9997d19d 8003 case '|':
f3139301 8004 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
8005 return;
8006
8007 case '?':
8008 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
8009 {
8010 if (TARGET_THUMB || current_insn_predicate != NULL)
8011 abort ();
8012
8013 fputs (arm_condition_codes[arm_current_cc], stream);
8014 }
8015 else if (current_insn_predicate)
8016 {
8017 enum arm_cond_code code;
8018
8019 if (TARGET_THUMB)
8020 abort ();
8021
8022 code = get_arm_condition_code (current_insn_predicate);
8023 fputs (arm_condition_codes[code], stream);
8024 }
9997d19d
RE
8025 return;
8026
8027 case 'N':
8028 {
8029 REAL_VALUE_TYPE r;
8030 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8031 r = REAL_VALUE_NEGATE (r);
8032 fprintf (stream, "%s", fp_const_from_val (&r));
8033 }
8034 return;
8035
8036 case 'B':
8037 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
8038 {
8039 HOST_WIDE_INT val;
5895f793 8040 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 8041 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8042 }
9997d19d
RE
8043 else
8044 {
8045 putc ('~', stream);
8046 output_addr_const (stream, x);
8047 }
8048 return;
8049
8050 case 'i':
8051 fprintf (stream, "%s", arithmetic_instr (x, 1));
8052 return;
8053
8054 case 'I':
8055 fprintf (stream, "%s", arithmetic_instr (x, 0));
8056 return;
8057
8058 case 'S':
8059 {
8060 HOST_WIDE_INT val;
5895f793 8061 const char * shift = shift_op (x, &val);
9997d19d 8062
e2c671ba
RE
8063 if (shift)
8064 {
5895f793 8065 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
8066 if (val == -1)
8067 arm_print_operand (stream, XEXP (x, 1), 0);
8068 else
4bc74ece
NC
8069 {
8070 fputc ('#', stream);
36ba9cb8 8071 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 8072 }
e2c671ba 8073 }
9997d19d
RE
8074 }
8075 return;
8076
d5b7b3ae
RE
8077 /* An explanation of the 'Q', 'R' and 'H' register operands:
8078
8079 In a pair of registers containing a DI or DF value the 'Q'
8080 operand returns the register number of the register containing
8081 the least signficant part of the value. The 'R' operand returns
8082 the register number of the register containing the most
8083 significant part of the value.
8084
8085 The 'H' operand returns the higher of the two register numbers.
8086 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8087 same as the 'Q' operand, since the most signficant part of the
8088 value is held in the lower number register. The reverse is true
8089 on systems where WORDS_BIG_ENDIAN is false.
8090
8091 The purpose of these operands is to distinguish between cases
8092 where the endian-ness of the values is important (for example
8093 when they are added together), and cases where the endian-ness
8094 is irrelevant, but the order of register operations is important.
8095 For example when loading a value from memory into a register
8096 pair, the endian-ness does not matter. Provided that the value
8097 from the lower memory address is put into the lower numbered
8098 register, and the value from the higher address is put into the
8099 higher numbered register, the load will work regardless of whether
8100 the value being loaded is big-wordian or little-wordian. The
8101 order of the two register loads can matter however, if the address
8102 of the memory location is actually held in one of the registers
8103 being overwritten by the load. */
c1c2bc04 8104 case 'Q':
d5b7b3ae 8105 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 8106 abort ();
d5b7b3ae 8107 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
8108 return;
8109
9997d19d 8110 case 'R':
d5b7b3ae 8111 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 8112 abort ();
d5b7b3ae
RE
8113 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8114 return;
8115
8116 case 'H':
8117 if (REGNO (x) > LAST_ARM_REGNUM)
8118 abort ();
8119 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
8120 return;
8121
8122 case 'm':
d5b7b3ae
RE
8123 asm_fprintf (stream, "%r",
8124 GET_CODE (XEXP (x, 0)) == REG
8125 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
8126 return;
8127
8128 case 'M':
dd18ae56 8129 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae
RE
8130 REGNO (x),
8131 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
8132 return;
8133
8134 case 'd':
5895f793 8135 if (!x)
d5b7b3ae
RE
8136 return;
8137
8138 if (TARGET_ARM)
9997d19d
RE
8139 fputs (arm_condition_codes[get_arm_condition_code (x)],
8140 stream);
d5b7b3ae
RE
8141 else
8142 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
8143 return;
8144
8145 case 'D':
5895f793 8146 if (!x)
d5b7b3ae
RE
8147 return;
8148
8149 if (TARGET_ARM)
8150 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8151 (get_arm_condition_code (x))],
9997d19d 8152 stream);
d5b7b3ae
RE
8153 else
8154 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
8155 return;
8156
8157 default:
8158 if (x == 0)
8159 abort ();
8160
8161 if (GET_CODE (x) == REG)
d5b7b3ae 8162 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
8163 else if (GET_CODE (x) == MEM)
8164 {
8165 output_memory_reference_mode = GET_MODE (x);
8166 output_address (XEXP (x, 0));
8167 }
8168 else if (GET_CODE (x) == CONST_DOUBLE)
8169 fprintf (stream, "#%s", fp_immediate_constant (x));
8170 else if (GET_CODE (x) == NEG)
6354dc9b 8171 abort (); /* This should never happen now. */
9997d19d
RE
8172 else
8173 {
8174 fputc ('#', stream);
8175 output_addr_const (stream, x);
8176 }
8177 }
8178}
cce8749e
CH
8179\f
8180/* A finite state machine takes care of noticing whether or not instructions
8181 can be conditionally executed, and thus decrease execution time and code
8182 size by deleting branch instructions. The fsm is controlled by
8183 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8184
8185/* The state of the fsm controlling condition codes are:
8186 0: normal, do nothing special
8187 1: make ASM_OUTPUT_OPCODE not output this instruction
8188 2: make ASM_OUTPUT_OPCODE not output this instruction
8189 3: make instructions conditional
8190 4: make instructions conditional
8191
8192 State transitions (state->state by whom under condition):
8193 0 -> 1 final_prescan_insn if the `target' is a label
8194 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8195 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8196 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8197 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8198 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8199 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8200 (the target insn is arm_target_insn).
8201
ff9940b0
RE
8202 If the jump clobbers the conditions then we use states 2 and 4.
8203
8204 A similar thing can be done with conditional return insns.
8205
cce8749e
CH
8206 XXX In case the `target' is an unconditional branch, this conditionalising
8207 of the instructions always reduces code size, but not always execution
8208 time. But then, I want to reduce the code size to somewhere near what
8209 /bin/cc produces. */
8210
cce8749e
CH
8211/* Returns the index of the ARM condition code string in
8212 `arm_condition_codes'. COMPARISON should be an rtx like
8213 `(eq (...) (...))'. */
8214
84ed5e79 8215static enum arm_cond_code
cce8749e
CH
8216get_arm_condition_code (comparison)
8217 rtx comparison;
8218{
5165176d 8219 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
8220 register int code;
8221 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
8222
8223 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 8224 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
8225 XEXP (comparison, 1));
8226
8227 switch (mode)
cce8749e 8228 {
84ed5e79
RE
8229 case CC_DNEmode: code = ARM_NE; goto dominance;
8230 case CC_DEQmode: code = ARM_EQ; goto dominance;
8231 case CC_DGEmode: code = ARM_GE; goto dominance;
8232 case CC_DGTmode: code = ARM_GT; goto dominance;
8233 case CC_DLEmode: code = ARM_LE; goto dominance;
8234 case CC_DLTmode: code = ARM_LT; goto dominance;
8235 case CC_DGEUmode: code = ARM_CS; goto dominance;
8236 case CC_DGTUmode: code = ARM_HI; goto dominance;
8237 case CC_DLEUmode: code = ARM_LS; goto dominance;
8238 case CC_DLTUmode: code = ARM_CC;
8239
8240 dominance:
8241 if (comp_code != EQ && comp_code != NE)
8242 abort ();
8243
8244 if (comp_code == EQ)
8245 return ARM_INVERSE_CONDITION_CODE (code);
8246 return code;
8247
5165176d 8248 case CC_NOOVmode:
84ed5e79 8249 switch (comp_code)
5165176d 8250 {
84ed5e79
RE
8251 case NE: return ARM_NE;
8252 case EQ: return ARM_EQ;
8253 case GE: return ARM_PL;
8254 case LT: return ARM_MI;
5165176d
RE
8255 default: abort ();
8256 }
8257
8258 case CC_Zmode:
84ed5e79 8259 switch (comp_code)
5165176d 8260 {
84ed5e79
RE
8261 case NE: return ARM_NE;
8262 case EQ: return ARM_EQ;
5165176d
RE
8263 default: abort ();
8264 }
8265
8266 case CCFPEmode:
e45b72c4
RE
8267 case CCFPmode:
8268 /* These encodings assume that AC=1 in the FPA system control
8269 byte. This allows us to handle all cases except UNEQ and
8270 LTGT. */
84ed5e79
RE
8271 switch (comp_code)
8272 {
8273 case GE: return ARM_GE;
8274 case GT: return ARM_GT;
8275 case LE: return ARM_LS;
8276 case LT: return ARM_MI;
e45b72c4
RE
8277 case NE: return ARM_NE;
8278 case EQ: return ARM_EQ;
8279 case ORDERED: return ARM_VC;
8280 case UNORDERED: return ARM_VS;
8281 case UNLT: return ARM_LT;
8282 case UNLE: return ARM_LE;
8283 case UNGT: return ARM_HI;
8284 case UNGE: return ARM_PL;
8285 /* UNEQ and LTGT do not have a representation. */
8286 case UNEQ: /* Fall through. */
8287 case LTGT: /* Fall through. */
84ed5e79
RE
8288 default: abort ();
8289 }
8290
8291 case CC_SWPmode:
8292 switch (comp_code)
8293 {
8294 case NE: return ARM_NE;
8295 case EQ: return ARM_EQ;
8296 case GE: return ARM_LE;
8297 case GT: return ARM_LT;
8298 case LE: return ARM_GE;
8299 case LT: return ARM_GT;
8300 case GEU: return ARM_LS;
8301 case GTU: return ARM_CC;
8302 case LEU: return ARM_CS;
8303 case LTU: return ARM_HI;
8304 default: abort ();
8305 }
8306
bd9c7e23
RE
8307 case CC_Cmode:
8308 switch (comp_code)
8309 {
8310 case LTU: return ARM_CS;
8311 case GEU: return ARM_CC;
8312 default: abort ();
8313 }
8314
5165176d 8315 case CCmode:
84ed5e79 8316 switch (comp_code)
5165176d 8317 {
84ed5e79
RE
8318 case NE: return ARM_NE;
8319 case EQ: return ARM_EQ;
8320 case GE: return ARM_GE;
8321 case GT: return ARM_GT;
8322 case LE: return ARM_LE;
8323 case LT: return ARM_LT;
8324 case GEU: return ARM_CS;
8325 case GTU: return ARM_HI;
8326 case LEU: return ARM_LS;
8327 case LTU: return ARM_CC;
5165176d
RE
8328 default: abort ();
8329 }
8330
cce8749e
CH
8331 default: abort ();
8332 }
84ed5e79
RE
8333
8334 abort ();
f3bb6135 8335}
cce8749e
CH
8336
8337
8338void
74bbc178 8339arm_final_prescan_insn (insn)
cce8749e 8340 rtx insn;
cce8749e
CH
8341{
8342 /* BODY will hold the body of INSN. */
8343 register rtx body = PATTERN (insn);
8344
8345 /* This will be 1 if trying to repeat the trick, and things need to be
8346 reversed if it appears to fail. */
8347 int reverse = 0;
8348
ff9940b0
RE
8349 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8350 taken are clobbered, even if the rtl suggests otherwise. It also
8351 means that we have to grub around within the jump expression to find
8352 out what the conditions are when the jump isn't taken. */
8353 int jump_clobbers = 0;
8354
6354dc9b 8355 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
8356 int seeking_return = 0;
8357
cce8749e
CH
8358 /* START_INSN will hold the insn from where we start looking. This is the
8359 first insn after the following code_label if REVERSE is true. */
8360 rtx start_insn = insn;
8361
8362 /* If in state 4, check if the target branch is reached, in order to
8363 change back to state 0. */
8364 if (arm_ccfsm_state == 4)
8365 {
8366 if (insn == arm_target_insn)
f5a1b0d2
NC
8367 {
8368 arm_target_insn = NULL;
8369 arm_ccfsm_state = 0;
8370 }
cce8749e
CH
8371 return;
8372 }
8373
8374 /* If in state 3, it is possible to repeat the trick, if this insn is an
8375 unconditional branch to a label, and immediately following this branch
8376 is the previous target label which is only used once, and the label this
8377 branch jumps to is not too far off. */
8378 if (arm_ccfsm_state == 3)
8379 {
8380 if (simplejump_p (insn))
8381 {
8382 start_insn = next_nonnote_insn (start_insn);
8383 if (GET_CODE (start_insn) == BARRIER)
8384 {
8385 /* XXX Isn't this always a barrier? */
8386 start_insn = next_nonnote_insn (start_insn);
8387 }
8388 if (GET_CODE (start_insn) == CODE_LABEL
8389 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8390 && LABEL_NUSES (start_insn) == 1)
8391 reverse = TRUE;
8392 else
8393 return;
8394 }
ff9940b0
RE
8395 else if (GET_CODE (body) == RETURN)
8396 {
8397 start_insn = next_nonnote_insn (start_insn);
8398 if (GET_CODE (start_insn) == BARRIER)
8399 start_insn = next_nonnote_insn (start_insn);
8400 if (GET_CODE (start_insn) == CODE_LABEL
8401 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8402 && LABEL_NUSES (start_insn) == 1)
8403 {
8404 reverse = TRUE;
8405 seeking_return = 1;
8406 }
8407 else
8408 return;
8409 }
cce8749e
CH
8410 else
8411 return;
8412 }
8413
8414 if (arm_ccfsm_state != 0 && !reverse)
8415 abort ();
8416 if (GET_CODE (insn) != JUMP_INSN)
8417 return;
8418
ddd5a7c1 8419 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
8420 the jump should always come first */
8421 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8422 body = XVECEXP (body, 0, 0);
8423
8424#if 0
8425 /* If this is a conditional return then we don't want to know */
8426 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8427 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8428 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8429 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8430 return;
8431#endif
8432
cce8749e
CH
8433 if (reverse
8434 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8435 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8436 {
bd9c7e23
RE
8437 int insns_skipped;
8438 int fail = FALSE, succeed = FALSE;
cce8749e
CH
8439 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8440 int then_not_else = TRUE;
ff9940b0 8441 rtx this_insn = start_insn, label = 0;
cce8749e 8442
e45b72c4
RE
8443 /* If the jump cannot be done with one instruction, we cannot
8444 conditionally execute the instruction in the inverse case. */
ff9940b0 8445 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 8446 {
5bbe2d40
RE
8447 jump_clobbers = 1;
8448 return;
8449 }
ff9940b0 8450
cce8749e
CH
8451 /* Register the insn jumped to. */
8452 if (reverse)
ff9940b0
RE
8453 {
8454 if (!seeking_return)
8455 label = XEXP (SET_SRC (body), 0);
8456 }
cce8749e
CH
8457 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8458 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8459 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8460 {
8461 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8462 then_not_else = FALSE;
8463 }
ff9940b0
RE
8464 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8465 seeking_return = 1;
8466 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8467 {
8468 seeking_return = 1;
8469 then_not_else = FALSE;
8470 }
cce8749e
CH
8471 else
8472 abort ();
8473
8474 /* See how many insns this branch skips, and what kind of insns. If all
8475 insns are okay, and the label or unconditional branch to the same
8476 label is not too far away, succeed. */
8477 for (insns_skipped = 0;
b36ba79f 8478 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
8479 {
8480 rtx scanbody;
8481
8482 this_insn = next_nonnote_insn (this_insn);
8483 if (!this_insn)
8484 break;
8485
cce8749e
CH
8486 switch (GET_CODE (this_insn))
8487 {
8488 case CODE_LABEL:
8489 /* Succeed if it is the target label, otherwise fail since
8490 control falls in from somewhere else. */
8491 if (this_insn == label)
8492 {
ff9940b0
RE
8493 if (jump_clobbers)
8494 {
8495 arm_ccfsm_state = 2;
8496 this_insn = next_nonnote_insn (this_insn);
8497 }
8498 else
8499 arm_ccfsm_state = 1;
cce8749e
CH
8500 succeed = TRUE;
8501 }
8502 else
8503 fail = TRUE;
8504 break;
8505
ff9940b0 8506 case BARRIER:
cce8749e 8507 /* Succeed if the following insn is the target label.
ff9940b0
RE
8508 Otherwise fail.
8509 If return insns are used then the last insn in a function
6354dc9b 8510 will be a barrier. */
cce8749e 8511 this_insn = next_nonnote_insn (this_insn);
ff9940b0 8512 if (this_insn && this_insn == label)
cce8749e 8513 {
ff9940b0
RE
8514 if (jump_clobbers)
8515 {
8516 arm_ccfsm_state = 2;
8517 this_insn = next_nonnote_insn (this_insn);
8518 }
8519 else
8520 arm_ccfsm_state = 1;
cce8749e
CH
8521 succeed = TRUE;
8522 }
8523 else
8524 fail = TRUE;
8525 break;
8526
ff9940b0 8527 case CALL_INSN:
2b835d68 8528 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 8529 calls. */
2b835d68 8530 if (TARGET_APCS_32)
bd9c7e23
RE
8531 {
8532 /* Succeed if the following insn is the target label,
8533 or if the following two insns are a barrier and
8534 the target label. */
8535 this_insn = next_nonnote_insn (this_insn);
8536 if (this_insn && GET_CODE (this_insn) == BARRIER)
8537 this_insn = next_nonnote_insn (this_insn);
8538
8539 if (this_insn && this_insn == label
b36ba79f 8540 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
8541 {
8542 if (jump_clobbers)
8543 {
8544 arm_ccfsm_state = 2;
8545 this_insn = next_nonnote_insn (this_insn);
8546 }
8547 else
8548 arm_ccfsm_state = 1;
8549 succeed = TRUE;
8550 }
8551 else
8552 fail = TRUE;
8553 }
ff9940b0 8554 break;
2b835d68 8555
cce8749e
CH
8556 case JUMP_INSN:
8557 /* If this is an unconditional branch to the same label, succeed.
8558 If it is to another label, do nothing. If it is conditional,
8559 fail. */
914a3b8c 8560 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 8561
ed4c4348 8562 scanbody = PATTERN (this_insn);
ff9940b0
RE
8563 if (GET_CODE (scanbody) == SET
8564 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
8565 {
8566 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
8567 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
8568 {
8569 arm_ccfsm_state = 2;
8570 succeed = TRUE;
8571 }
8572 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
8573 fail = TRUE;
8574 }
b36ba79f
RE
8575 /* Fail if a conditional return is undesirable (eg on a
8576 StrongARM), but still allow this if optimizing for size. */
8577 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
8578 && !use_return_insn (TRUE)
8579 && !optimize_size)
b36ba79f 8580 fail = TRUE;
ff9940b0
RE
8581 else if (GET_CODE (scanbody) == RETURN
8582 && seeking_return)
8583 {
8584 arm_ccfsm_state = 2;
8585 succeed = TRUE;
8586 }
8587 else if (GET_CODE (scanbody) == PARALLEL)
8588 {
8589 switch (get_attr_conds (this_insn))
8590 {
8591 case CONDS_NOCOND:
8592 break;
8593 default:
8594 fail = TRUE;
8595 break;
8596 }
8597 }
4e67550b
RE
8598 else
8599 fail = TRUE; /* Unrecognized jump (eg epilogue). */
8600
cce8749e
CH
8601 break;
8602
8603 case INSN:
ff9940b0
RE
8604 /* Instructions using or affecting the condition codes make it
8605 fail. */
ed4c4348 8606 scanbody = PATTERN (this_insn);
5895f793
RE
8607 if (!(GET_CODE (scanbody) == SET
8608 || GET_CODE (scanbody) == PARALLEL)
74641843 8609 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
8610 fail = TRUE;
8611 break;
8612
8613 default:
8614 break;
8615 }
8616 }
8617 if (succeed)
8618 {
ff9940b0 8619 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 8620 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
8621 else if (seeking_return || arm_ccfsm_state == 2)
8622 {
8623 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
8624 {
8625 this_insn = next_nonnote_insn (this_insn);
8626 if (this_insn && (GET_CODE (this_insn) == BARRIER
8627 || GET_CODE (this_insn) == CODE_LABEL))
8628 abort ();
8629 }
8630 if (!this_insn)
8631 {
8632 /* Oh, dear! we ran off the end.. give up */
8633 recog (PATTERN (insn), insn, NULL_PTR);
8634 arm_ccfsm_state = 0;
abaa26e5 8635 arm_target_insn = NULL;
ff9940b0
RE
8636 return;
8637 }
8638 arm_target_insn = this_insn;
8639 }
cce8749e
CH
8640 else
8641 abort ();
ff9940b0
RE
8642 if (jump_clobbers)
8643 {
8644 if (reverse)
8645 abort ();
8646 arm_current_cc =
8647 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
8648 0), 0), 1));
8649 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
8650 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8651 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
8652 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8653 }
8654 else
8655 {
8656 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
8657 what it was. */
8658 if (!reverse)
8659 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
8660 0));
8661 }
cce8749e 8662
cce8749e
CH
8663 if (reverse || then_not_else)
8664 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8665 }
d5b7b3ae 8666
1ccbefce 8667 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 8668 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 8669 across this call; since the insn has been recognized already we
b020fd92 8670 call recog direct). */
ff9940b0 8671 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 8672 }
f3bb6135 8673}
cce8749e 8674
d5b7b3ae
RE
8675int
8676arm_regno_class (regno)
8677 int regno;
8678{
8679 if (TARGET_THUMB)
8680 {
8681 if (regno == STACK_POINTER_REGNUM)
8682 return STACK_REG;
8683 if (regno == CC_REGNUM)
8684 return CC_REG;
8685 if (regno < 8)
8686 return LO_REGS;
8687 return HI_REGS;
8688 }
8689
8690 if ( regno <= LAST_ARM_REGNUM
8691 || regno == FRAME_POINTER_REGNUM
8692 || regno == ARG_POINTER_REGNUM)
8693 return GENERAL_REGS;
8694
8695 if (regno == CC_REGNUM)
8696 return NO_REGS;
8697
8698 return FPU_REGS;
8699}
8700
8701/* Handle a special case when computing the offset
8702 of an argument from the frame pointer. */
8703int
8704arm_debugger_arg_offset (value, addr)
8705 int value;
8706 rtx addr;
8707{
8708 rtx insn;
8709
8710 /* We are only interested if dbxout_parms() failed to compute the offset. */
8711 if (value != 0)
8712 return 0;
8713
8714 /* We can only cope with the case where the address is held in a register. */
8715 if (GET_CODE (addr) != REG)
8716 return 0;
8717
8718 /* If we are using the frame pointer to point at the argument, then
8719 an offset of 0 is correct. */
cd2b33d0 8720 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
8721 return 0;
8722
8723 /* If we are using the stack pointer to point at the
8724 argument, then an offset of 0 is correct. */
5895f793 8725 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
8726 && REGNO (addr) == SP_REGNUM)
8727 return 0;
8728
8729 /* Oh dear. The argument is pointed to by a register rather
8730 than being held in a register, or being stored at a known
8731 offset from the frame pointer. Since GDB only understands
8732 those two kinds of argument we must translate the address
8733 held in the register into an offset from the frame pointer.
8734 We do this by searching through the insns for the function
8735 looking to see where this register gets its value. If the
8736 register is initialised from the frame pointer plus an offset
8737 then we are in luck and we can continue, otherwise we give up.
8738
8739 This code is exercised by producing debugging information
8740 for a function with arguments like this:
8741
8742 double func (double a, double b, int c, double d) {return d;}
8743
8744 Without this code the stab for parameter 'd' will be set to
8745 an offset of 0 from the frame pointer, rather than 8. */
8746
8747 /* The if() statement says:
8748
8749 If the insn is a normal instruction
8750 and if the insn is setting the value in a register
8751 and if the register being set is the register holding the address of the argument
8752 and if the address is computing by an addition
8753 that involves adding to a register
8754 which is the frame pointer
8755 a constant integer
8756
8757 then... */
8758
8759 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8760 {
8761 if ( GET_CODE (insn) == INSN
8762 && GET_CODE (PATTERN (insn)) == SET
8763 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8764 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8765 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 8766 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
8767 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8768 )
8769 {
8770 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8771
8772 break;
8773 }
8774 }
8775
8776 if (value == 0)
8777 {
8778 debug_rtx (addr);
8779 warning ("Unable to compute real location of stacked parameter");
8780 value = 8; /* XXX magic hack */
8781 }
8782
8783 return value;
8784}
8785
d19fb8e3
NC
8786#define def_builtin(NAME, TYPE, CODE) \
8787 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL_PTR)
8788
8789void
8790arm_init_builtins ()
8791{
78053907
RE
8792#if 0
8793 /* The following code is NOT incorrect, but it trips a bug in the C++
8794 front-end that causes a failure while building libstdc++-v3. When
8795 that bug is fixed, this code can be re-enabled. */
8796
cbd5937a 8797 tree endlink = void_list_node;
d19fb8e3
NC
8798 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
8799 tree pchar_type_node = build_pointer_type (char_type_node);
8800
8801 tree int_ftype_int, void_ftype_pchar;
8802
8803 /* void func (void *) */
8804 void_ftype_pchar
8805 = build_function_type (void_type_node,
8806 tree_cons (NULL_TREE, pchar_type_node, endlink));
8807
8808 /* int func (int) */
8809 int_ftype_int
8810 = build_function_type (integer_type_node, int_endlink);
8811
8812 /* Initialize arm V5 builtins. */
8813 if (arm_arch5)
8814 {
8815 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
b15bca31
RE
8816 }
8817
8818 /* Initialize arm V5E builtins. */
8819 if (arm_arch5e)
8820 {
d19fb8e3
NC
8821 def_builtin ("__builtin_prefetch", void_ftype_pchar,
8822 ARM_BUILTIN_PREFETCH);
8823 }
78053907 8824#endif
d19fb8e3
NC
8825}
8826
8827/* Expand an expression EXP that calls a built-in function,
8828 with result going to TARGET if that's convenient
8829 (and in mode MODE if that's convenient).
8830 SUBTARGET may be used as the target for computing one of EXP's operands.
8831 IGNORE is nonzero if the value is to be ignored. */
8832
8833rtx
8834arm_expand_builtin (exp, target, subtarget, mode, ignore)
8835 tree exp;
8836 rtx target;
8837 rtx subtarget ATTRIBUTE_UNUSED;
8838 enum machine_mode mode ATTRIBUTE_UNUSED;
8839 int ignore ATTRIBUTE_UNUSED;
8840{
8841 enum insn_code icode;
8842 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8843 tree arglist = TREE_OPERAND (exp, 1);
8844 tree arg0;
8845 rtx op0, pat;
8846 enum machine_mode tmode, mode0;
8847 int fcode = DECL_FUNCTION_CODE (fndecl);
8848
8849 switch (fcode)
8850 {
8851 default:
8852 break;
8853
8854 case ARM_BUILTIN_CLZ:
8855 icode = CODE_FOR_clz;
8856 arg0 = TREE_VALUE (arglist);
8857 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8858 tmode = insn_data[icode].operand[0].mode;
8859 mode0 = insn_data[icode].operand[1].mode;
8860
8861 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8862 op0 = copy_to_mode_reg (mode0, op0);
8863 if (target == 0
8864 || GET_MODE (target) != tmode
8865 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8866 target = gen_reg_rtx (tmode);
8867 pat = GEN_FCN (icode) (target, op0);
8868 if (! pat)
8869 return 0;
8870 emit_insn (pat);
8871 return target;
8872
8873 case ARM_BUILTIN_PREFETCH:
8874 icode = CODE_FOR_prefetch;
8875 arg0 = TREE_VALUE (arglist);
8876 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8877
8878 op0 = gen_rtx_MEM (SImode, copy_to_mode_reg (Pmode, op0));
8879
8880 pat = GEN_FCN (icode) (op0);
8881 if (! pat)
8882 return 0;
8883 emit_insn (pat);
8884 return target;
8885 }
8886
8887 /* @@@ Should really do something sensible here. */
8888 return NULL_RTX;
8889}
d5b7b3ae
RE
8890\f
8891/* Recursively search through all of the blocks in a function
8892 checking to see if any of the variables created in that
8893 function match the RTX called 'orig'. If they do then
8894 replace them with the RTX called 'new'. */
8895
8896static void
8897replace_symbols_in_block (block, orig, new)
8898 tree block;
8899 rtx orig;
8900 rtx new;
8901{
8902 for (; block; block = BLOCK_CHAIN (block))
8903 {
8904 tree sym;
8905
5895f793 8906 if (!TREE_USED (block))
d5b7b3ae
RE
8907 continue;
8908
8909 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8910 {
8911 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8912 || DECL_IGNORED_P (sym)
8913 || TREE_CODE (sym) != VAR_DECL
8914 || DECL_EXTERNAL (sym)
5895f793 8915 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
8916 )
8917 continue;
8918
8919 DECL_RTL (sym) = new;
8920 }
8921
8922 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8923 }
8924}
8925
8926/* Return the number (counting from 0) of the least significant set
8927 bit in MASK. */
8928#ifdef __GNUC__
8929inline
8930#endif
8931static int
8932number_of_first_bit_set (mask)
8933 int mask;
8934{
8935 int bit;
8936
8937 for (bit = 0;
8938 (mask & (1 << bit)) == 0;
5895f793 8939 ++bit)
d5b7b3ae
RE
8940 continue;
8941
8942 return bit;
8943}
8944
8945/* Generate code to return from a thumb function.
8946 If 'reg_containing_return_addr' is -1, then the return address is
8947 actually on the stack, at the stack pointer. */
8948static void
8949thumb_exit (f, reg_containing_return_addr, eh_ofs)
8950 FILE * f;
8951 int reg_containing_return_addr;
8952 rtx eh_ofs;
8953{
8954 unsigned regs_available_for_popping;
8955 unsigned regs_to_pop;
8956 int pops_needed;
8957 unsigned available;
8958 unsigned required;
8959 int mode;
8960 int size;
8961 int restore_a4 = FALSE;
8962
8963 /* Compute the registers we need to pop. */
8964 regs_to_pop = 0;
8965 pops_needed = 0;
8966
8967 /* There is an assumption here, that if eh_ofs is not NULL, the
8968 normal return address will have been pushed. */
8969 if (reg_containing_return_addr == -1 || eh_ofs)
8970 {
8971 /* When we are generating a return for __builtin_eh_return,
8972 reg_containing_return_addr must specify the return regno. */
8973 if (eh_ofs && reg_containing_return_addr == -1)
8974 abort ();
8975
8976 regs_to_pop |= 1 << LR_REGNUM;
5895f793 8977 ++pops_needed;
d5b7b3ae
RE
8978 }
8979
8980 if (TARGET_BACKTRACE)
8981 {
8982 /* Restore the (ARM) frame pointer and stack pointer. */
8983 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8984 pops_needed += 2;
8985 }
8986
8987 /* If there is nothing to pop then just emit the BX instruction and
8988 return. */
8989 if (pops_needed == 0)
8990 {
8991 if (eh_ofs)
8992 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8993
8994 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8995 return;
8996 }
8997 /* Otherwise if we are not supporting interworking and we have not created
8998 a backtrace structure and the function was not entered in ARM mode then
8999 just pop the return address straight into the PC. */
5895f793
RE
9000 else if (!TARGET_INTERWORK
9001 && !TARGET_BACKTRACE
9002 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
9003 {
9004 if (eh_ofs)
9005 {
9006 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9007 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9008 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9009 }
9010 else
9011 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9012
9013 return;
9014 }
9015
9016 /* Find out how many of the (return) argument registers we can corrupt. */
9017 regs_available_for_popping = 0;
9018
9019 /* If returning via __builtin_eh_return, the bottom three registers
9020 all contain information needed for the return. */
9021 if (eh_ofs)
9022 size = 12;
9023 else
9024 {
9025#ifdef RTX_CODE
9026 /* If we can deduce the registers used from the function's
9027 return value. This is more reliable that examining
9028 regs_ever_live[] because that will be set if the register is
9029 ever used in the function, not just if the register is used
9030 to hold a return value. */
9031
9032 if (current_function_return_rtx != 0)
9033 mode = GET_MODE (current_function_return_rtx);
9034 else
9035#endif
9036 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9037
9038 size = GET_MODE_SIZE (mode);
9039
9040 if (size == 0)
9041 {
9042 /* In a void function we can use any argument register.
9043 In a function that returns a structure on the stack
9044 we can use the second and third argument registers. */
9045 if (mode == VOIDmode)
9046 regs_available_for_popping =
9047 (1 << ARG_REGISTER (1))
9048 | (1 << ARG_REGISTER (2))
9049 | (1 << ARG_REGISTER (3));
9050 else
9051 regs_available_for_popping =
9052 (1 << ARG_REGISTER (2))
9053 | (1 << ARG_REGISTER (3));
9054 }
9055 else if (size <= 4)
9056 regs_available_for_popping =
9057 (1 << ARG_REGISTER (2))
9058 | (1 << ARG_REGISTER (3));
9059 else if (size <= 8)
9060 regs_available_for_popping =
9061 (1 << ARG_REGISTER (3));
9062 }
9063
9064 /* Match registers to be popped with registers into which we pop them. */
9065 for (available = regs_available_for_popping,
9066 required = regs_to_pop;
9067 required != 0 && available != 0;
9068 available &= ~(available & - available),
9069 required &= ~(required & - required))
9070 -- pops_needed;
9071
9072 /* If we have any popping registers left over, remove them. */
9073 if (available > 0)
5895f793 9074 regs_available_for_popping &= ~available;
d5b7b3ae
RE
9075
9076 /* Otherwise if we need another popping register we can use
9077 the fourth argument register. */
9078 else if (pops_needed)
9079 {
9080 /* If we have not found any free argument registers and
9081 reg a4 contains the return address, we must move it. */
9082 if (regs_available_for_popping == 0
9083 && reg_containing_return_addr == LAST_ARG_REGNUM)
9084 {
9085 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9086 reg_containing_return_addr = LR_REGNUM;
9087 }
9088 else if (size > 12)
9089 {
9090 /* Register a4 is being used to hold part of the return value,
9091 but we have dire need of a free, low register. */
9092 restore_a4 = TRUE;
9093
9094 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9095 }
9096
9097 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9098 {
9099 /* The fourth argument register is available. */
9100 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9101
5895f793 9102 --pops_needed;
d5b7b3ae
RE
9103 }
9104 }
9105
9106 /* Pop as many registers as we can. */
9107 thumb_pushpop (f, regs_available_for_popping, FALSE);
9108
9109 /* Process the registers we popped. */
9110 if (reg_containing_return_addr == -1)
9111 {
9112 /* The return address was popped into the lowest numbered register. */
5895f793 9113 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
9114
9115 reg_containing_return_addr =
9116 number_of_first_bit_set (regs_available_for_popping);
9117
9118 /* Remove this register for the mask of available registers, so that
9119 the return address will not be corrupted by futher pops. */
5895f793 9120 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
9121 }
9122
9123 /* If we popped other registers then handle them here. */
9124 if (regs_available_for_popping)
9125 {
9126 int frame_pointer;
9127
9128 /* Work out which register currently contains the frame pointer. */
9129 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9130
9131 /* Move it into the correct place. */
9132 asm_fprintf (f, "\tmov\t%r, %r\n",
9133 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9134
9135 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
9136 regs_available_for_popping &= ~(1 << frame_pointer);
9137 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
9138
9139 if (regs_available_for_popping)
9140 {
9141 int stack_pointer;
9142
9143 /* We popped the stack pointer as well,
9144 find the register that contains it. */
9145 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9146
9147 /* Move it into the stack register. */
9148 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9149
9150 /* At this point we have popped all necessary registers, so
9151 do not worry about restoring regs_available_for_popping
9152 to its correct value:
9153
9154 assert (pops_needed == 0)
9155 assert (regs_available_for_popping == (1 << frame_pointer))
9156 assert (regs_to_pop == (1 << STACK_POINTER)) */
9157 }
9158 else
9159 {
9160 /* Since we have just move the popped value into the frame
9161 pointer, the popping register is available for reuse, and
9162 we know that we still have the stack pointer left to pop. */
9163 regs_available_for_popping |= (1 << frame_pointer);
9164 }
9165 }
9166
9167 /* If we still have registers left on the stack, but we no longer have
9168 any registers into which we can pop them, then we must move the return
9169 address into the link register and make available the register that
9170 contained it. */
9171 if (regs_available_for_popping == 0 && pops_needed > 0)
9172 {
9173 regs_available_for_popping |= 1 << reg_containing_return_addr;
9174
9175 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9176 reg_containing_return_addr);
9177
9178 reg_containing_return_addr = LR_REGNUM;
9179 }
9180
9181 /* If we have registers left on the stack then pop some more.
9182 We know that at most we will want to pop FP and SP. */
9183 if (pops_needed > 0)
9184 {
9185 int popped_into;
9186 int move_to;
9187
9188 thumb_pushpop (f, regs_available_for_popping, FALSE);
9189
9190 /* We have popped either FP or SP.
9191 Move whichever one it is into the correct register. */
9192 popped_into = number_of_first_bit_set (regs_available_for_popping);
9193 move_to = number_of_first_bit_set (regs_to_pop);
9194
9195 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9196
5895f793 9197 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 9198
5895f793 9199 --pops_needed;
d5b7b3ae
RE
9200 }
9201
9202 /* If we still have not popped everything then we must have only
9203 had one register available to us and we are now popping the SP. */
9204 if (pops_needed > 0)
9205 {
9206 int popped_into;
9207
9208 thumb_pushpop (f, regs_available_for_popping, FALSE);
9209
9210 popped_into = number_of_first_bit_set (regs_available_for_popping);
9211
9212 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9213 /*
9214 assert (regs_to_pop == (1 << STACK_POINTER))
9215 assert (pops_needed == 1)
9216 */
9217 }
9218
9219 /* If necessary restore the a4 register. */
9220 if (restore_a4)
9221 {
9222 if (reg_containing_return_addr != LR_REGNUM)
9223 {
9224 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9225 reg_containing_return_addr = LR_REGNUM;
9226 }
9227
9228 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9229 }
9230
9231 if (eh_ofs)
9232 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9233
9234 /* Return to caller. */
9235 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9236}
9237
9238/* Emit code to push or pop registers to or from the stack. */
9239static void
9240thumb_pushpop (f, mask, push)
9241 FILE * f;
9242 int mask;
9243 int push;
9244{
9245 int regno;
9246 int lo_mask = mask & 0xFF;
9247
5895f793 9248 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
9249 {
9250 /* Special case. Do not generate a POP PC statement here, do it in
9251 thumb_exit() */
9252 thumb_exit (f, -1, NULL_RTX);
9253 return;
9254 }
9255
9256 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9257
9258 /* Look at the low registers first. */
5895f793 9259 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
9260 {
9261 if (lo_mask & 1)
9262 {
9263 asm_fprintf (f, "%r", regno);
9264
9265 if ((lo_mask & ~1) != 0)
9266 fprintf (f, ", ");
9267 }
9268 }
9269
9270 if (push && (mask & (1 << LR_REGNUM)))
9271 {
9272 /* Catch pushing the LR. */
9273 if (mask & 0xFF)
9274 fprintf (f, ", ");
9275
9276 asm_fprintf (f, "%r", LR_REGNUM);
9277 }
9278 else if (!push && (mask & (1 << PC_REGNUM)))
9279 {
9280 /* Catch popping the PC. */
9281 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9282 {
9283 /* The PC is never poped directly, instead
9284 it is popped into r3 and then BX is used. */
9285 fprintf (f, "}\n");
9286
9287 thumb_exit (f, -1, NULL_RTX);
9288
9289 return;
9290 }
9291 else
9292 {
9293 if (mask & 0xFF)
9294 fprintf (f, ", ");
9295
9296 asm_fprintf (f, "%r", PC_REGNUM);
9297 }
9298 }
9299
9300 fprintf (f, "}\n");
9301}
9302\f
9303void
9304thumb_final_prescan_insn (insn)
9305 rtx insn;
9306{
d5b7b3ae 9307 if (flag_print_asm_name)
9d98a694
AO
9308 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9309 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
9310}
9311
9312int
9313thumb_shiftable_const (val)
9314 unsigned HOST_WIDE_INT val;
9315{
9316 unsigned HOST_WIDE_INT mask = 0xff;
9317 int i;
9318
9319 if (val == 0) /* XXX */
9320 return 0;
9321
9322 for (i = 0; i < 25; i++)
9323 if ((val & (mask << i)) == val)
9324 return 1;
9325
9326 return 0;
9327}
9328
9329/* Returns non-zero if the current function contains,
9330 or might contain a far jump. */
9331int
9332thumb_far_jump_used_p (int in_prologue)
9333{
9334 rtx insn;
9335
9336 /* This test is only important for leaf functions. */
5895f793 9337 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
9338
9339 /* If we have already decided that far jumps may be used,
9340 do not bother checking again, and always return true even if
9341 it turns out that they are not being used. Once we have made
9342 the decision that far jumps are present (and that hence the link
9343 register will be pushed onto the stack) we cannot go back on it. */
9344 if (cfun->machine->far_jump_used)
9345 return 1;
9346
9347 /* If this function is not being called from the prologue/epilogue
9348 generation code then it must be being called from the
9349 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 9350 if (!in_prologue)
d5b7b3ae
RE
9351 {
9352 /* In this case we know that we are being asked about the elimination
9353 of the arg pointer register. If that register is not being used,
9354 then there are no arguments on the stack, and we do not have to
9355 worry that a far jump might force the prologue to push the link
9356 register, changing the stack offsets. In this case we can just
9357 return false, since the presence of far jumps in the function will
9358 not affect stack offsets.
9359
9360 If the arg pointer is live (or if it was live, but has now been
9361 eliminated and so set to dead) then we do have to test to see if
9362 the function might contain a far jump. This test can lead to some
9363 false negatives, since before reload is completed, then length of
9364 branch instructions is not known, so gcc defaults to returning their
9365 longest length, which in turn sets the far jump attribute to true.
9366
9367 A false negative will not result in bad code being generated, but it
9368 will result in a needless push and pop of the link register. We
9369 hope that this does not occur too often. */
9370 if (regs_ever_live [ARG_POINTER_REGNUM])
9371 cfun->machine->arg_pointer_live = 1;
5895f793 9372 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
9373 return 0;
9374 }
9375
9376 /* Check to see if the function contains a branch
9377 insn with the far jump attribute set. */
9378 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9379 {
9380 if (GET_CODE (insn) == JUMP_INSN
9381 /* Ignore tablejump patterns. */
9382 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9383 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9384 && get_attr_far_jump (insn) == FAR_JUMP_YES
9385 )
9386 {
9387 /* Record the fact that we have decied that
9388 the function does use far jumps. */
9389 cfun->machine->far_jump_used = 1;
9390 return 1;
9391 }
9392 }
9393
9394 return 0;
9395}
9396
9397/* Return non-zero if FUNC must be entered in ARM mode. */
9398int
9399is_called_in_ARM_mode (func)
9400 tree func;
9401{
9402 if (TREE_CODE (func) != FUNCTION_DECL)
9403 abort ();
9404
9405 /* Ignore the problem about functions whoes address is taken. */
9406 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9407 return TRUE;
9408
9409#ifdef ARM_PE
9410 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
9411#else
9412 return FALSE;
9413#endif
9414}
9415
9416/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 9417const char *
d5b7b3ae
RE
9418thumb_unexpanded_epilogue ()
9419{
9420 int regno;
9421 int live_regs_mask = 0;
9422 int high_regs_pushed = 0;
9423 int leaf_function = leaf_function_p ();
9424 int had_to_push_lr;
9425 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9426
9427 if (return_used_this_function)
9428 return "";
9429
9430 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
5895f793
RE
9431 if (regs_ever_live[regno] && !call_used_regs[regno]
9432 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9433 live_regs_mask |= 1 << regno;
9434
9435 for (regno = 8; regno < 13; regno++)
9436 {
5895f793
RE
9437 if (regs_ever_live[regno] && !call_used_regs[regno]
9438 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9439 high_regs_pushed++;
d5b7b3ae
RE
9440 }
9441
9442 /* The prolog may have pushed some high registers to use as
9443 work registers. eg the testuite file:
9444 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9445 compiles to produce:
9446 push {r4, r5, r6, r7, lr}
9447 mov r7, r9
9448 mov r6, r8
9449 push {r6, r7}
9450 as part of the prolog. We have to undo that pushing here. */
9451
9452 if (high_regs_pushed)
9453 {
9454 int mask = live_regs_mask;
9455 int next_hi_reg;
9456 int size;
9457 int mode;
9458
9459#ifdef RTX_CODE
9460 /* If we can deduce the registers used from the function's return value.
9461 This is more reliable that examining regs_ever_live[] because that
9462 will be set if the register is ever used in the function, not just if
9463 the register is used to hold a return value. */
9464
9465 if (current_function_return_rtx != 0)
9466 mode = GET_MODE (current_function_return_rtx);
9467 else
9468#endif
9469 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9470
9471 size = GET_MODE_SIZE (mode);
9472
9473 /* Unless we are returning a type of size > 12 register r3 is
9474 available. */
9475 if (size < 13)
9476 mask |= 1 << 3;
9477
9478 if (mask == 0)
9479 /* Oh dear! We have no low registers into which we can pop
9480 high registers! */
9481 fatal ("No low registers available for popping high registers");
9482
9483 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
9484 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9485 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9486 break;
9487
9488 while (high_regs_pushed)
9489 {
9490 /* Find lo register(s) into which the high register(s) can
9491 be popped. */
9492 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9493 {
9494 if (mask & (1 << regno))
9495 high_regs_pushed--;
9496 if (high_regs_pushed == 0)
9497 break;
9498 }
9499
9500 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9501
9502 /* Pop the values into the low register(s). */
9503 thumb_pushpop (asm_out_file, mask, 0);
9504
9505 /* Move the value(s) into the high registers. */
9506 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9507 {
9508 if (mask & (1 << regno))
9509 {
9510 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9511 regno);
9512
9513 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
9514 if (regs_ever_live[next_hi_reg]
9515 && !call_used_regs[next_hi_reg]
9516 && !(TARGET_SINGLE_PIC_BASE
9517 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9518 break;
9519 }
9520 }
9521 }
9522 }
9523
5895f793 9524 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
9525 || thumb_far_jump_used_p (1));
9526
9527 if (TARGET_BACKTRACE
9528 && ((live_regs_mask & 0xFF) == 0)
9529 && regs_ever_live [LAST_ARG_REGNUM] != 0)
9530 {
9531 /* The stack backtrace structure creation code had to
9532 push R7 in order to get a work register, so we pop
9533 it now. */
9534 live_regs_mask |= (1 << LAST_LO_REGNUM);
9535 }
9536
9537 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
9538 {
9539 if (had_to_push_lr
5895f793
RE
9540 && !is_called_in_ARM_mode (current_function_decl)
9541 && !eh_ofs)
d5b7b3ae
RE
9542 live_regs_mask |= 1 << PC_REGNUM;
9543
9544 /* Either no argument registers were pushed or a backtrace
9545 structure was created which includes an adjusted stack
9546 pointer, so just pop everything. */
9547 if (live_regs_mask)
9548 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9549
9550 if (eh_ofs)
9551 thumb_exit (asm_out_file, 2, eh_ofs);
9552 /* We have either just popped the return address into the
9553 PC or it is was kept in LR for the entire function or
9554 it is still on the stack because we do not want to
9555 return by doing a pop {pc}. */
9556 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
9557 thumb_exit (asm_out_file,
9558 (had_to_push_lr
9559 && is_called_in_ARM_mode (current_function_decl)) ?
9560 -1 : LR_REGNUM, NULL_RTX);
9561 }
9562 else
9563 {
9564 /* Pop everything but the return address. */
5895f793 9565 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
9566
9567 if (live_regs_mask)
9568 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9569
9570 if (had_to_push_lr)
9571 /* Get the return address into a temporary register. */
9572 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
9573
9574 /* Remove the argument registers that were pushed onto the stack. */
9575 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
9576 SP_REGNUM, SP_REGNUM,
9577 current_function_pretend_args_size);
9578
9579 if (eh_ofs)
9580 thumb_exit (asm_out_file, 2, eh_ofs);
9581 else
9582 thumb_exit (asm_out_file,
9583 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
9584 }
9585
9586 return "";
9587}
9588
9589/* Functions to save and restore machine-specific function data. */
9590
9591static void
9592arm_mark_machine_status (p)
9593 struct function * p;
9594{
6d3d9133 9595 machine_function *machine = p->machine;
d5b7b3ae 9596
f7a80099
NC
9597 if (machine)
9598 {
9599 ggc_mark_rtx (machine->ra_rtx);
9600 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
9601 }
d5b7b3ae
RE
9602}
9603
9604static void
9605arm_init_machine_status (p)
9606 struct function * p;
9607{
9608 p->machine =
6d3d9133
NC
9609 (machine_function *) xcalloc (1, sizeof (machine_function));
9610
9611#if ARM_FT_UNKNOWWN != 0
9612 ((machine_function *) p->machine)->func_type = ARM_FT_UNKNOWN;
9613#endif
d5b7b3ae
RE
9614}
9615
f7a80099
NC
9616static void
9617arm_free_machine_status (p)
9618 struct function * p;
9619{
9620 if (p->machine)
9621 {
9622 free (p->machine);
9623 p->machine = NULL;
9624 }
9625}
9626
d5b7b3ae
RE
9627/* Return an RTX indicating where the return address to the
9628 calling function can be found. */
9629rtx
9630arm_return_addr (count, frame)
9631 int count;
9632 rtx frame ATTRIBUTE_UNUSED;
9633{
9634 rtx reg;
9635
9636 if (count != 0)
9637 return NULL_RTX;
9638
9639 reg = cfun->machine->ra_rtx;
9640
9641 if (reg == NULL)
9642 {
9643 rtx init;
9644
9645 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
9646 the prologue. */
9647 reg = gen_reg_rtx (Pmode);
9648 cfun->machine->ra_rtx = reg;
9649
5895f793 9650 if (!TARGET_APCS_32)
d5b7b3ae
RE
9651 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
9652 GEN_INT (RETURN_ADDR_MASK26));
9653 else
9654 init = gen_rtx_REG (Pmode, LR_REGNUM);
9655
9656 init = gen_rtx_SET (VOIDmode, reg, init);
9657
9658 /* Emit the insn to the prologue with the other argument copies. */
9659 push_topmost_sequence ();
9660 emit_insn_after (init, get_insns ());
9661 pop_topmost_sequence ();
9662 }
9663
9664 return reg;
9665}
9666
9667/* Do anything needed before RTL is emitted for each function. */
9668void
9669arm_init_expanders ()
9670{
9671 /* Arrange to initialize and mark the machine per-function status. */
9672 init_machine_status = arm_init_machine_status;
9673 mark_machine_status = arm_mark_machine_status;
f7a80099 9674 free_machine_status = arm_free_machine_status;
d5b7b3ae
RE
9675}
9676
9677/* Generate the rest of a function's prologue. */
9678void
9679thumb_expand_prologue ()
9680{
9681 HOST_WIDE_INT amount = (get_frame_size ()
9682 + current_function_outgoing_args_size);
6d3d9133
NC
9683 unsigned long func_type;
9684
9685 func_type = arm_current_func_type ();
d5b7b3ae
RE
9686
9687 /* Naked functions don't have prologues. */
6d3d9133 9688 if (IS_NAKED (func_type))
d5b7b3ae
RE
9689 return;
9690
6d3d9133
NC
9691 if (IS_INTERRUPT (func_type))
9692 {
9693 error ("Interrupt Service Routines cannot be coded in Thumb mode.");
9694 return;
9695 }
9696
d5b7b3ae
RE
9697 if (frame_pointer_needed)
9698 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
9699
9700 if (amount)
9701 {
9702 amount = ROUND_UP (amount);
9703
9704 if (amount < 512)
9705 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5895f793 9706 GEN_INT (-amount)));
d5b7b3ae
RE
9707 else
9708 {
9709 int regno;
9710 rtx reg;
9711
9712 /* The stack decrement is too big for an immediate value in a single
9713 insn. In theory we could issue multiple subtracts, but after
9714 three of them it becomes more space efficient to place the full
9715 value in the constant pool and load into a register. (Also the
9716 ARM debugger really likes to see only one stack decrement per
9717 function). So instead we look for a scratch register into which
9718 we can load the decrement, and then we subtract this from the
9719 stack pointer. Unfortunately on the thumb the only available
9720 scratch registers are the argument registers, and we cannot use
9721 these as they may hold arguments to the function. Instead we
9722 attempt to locate a call preserved register which is used by this
9723 function. If we can find one, then we know that it will have
9724 been pushed at the start of the prologue and so we can corrupt
9725 it now. */
9726 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
9727 if (regs_ever_live[regno]
5895f793
RE
9728 && !call_used_regs[regno] /* Paranoia */
9729 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
9730 && !(frame_pointer_needed
9731 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
9732 break;
9733
9734 if (regno > LAST_LO_REGNUM) /* Very unlikely */
9735 {
9736 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
9737
9738 /* Choose an arbitary, non-argument low register. */
9739 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
9740
9741 /* Save it by copying it into a high, scratch register. */
9742 emit_insn (gen_movsi (spare, reg));
9743
9744 /* Decrement the stack. */
5895f793 9745 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9746 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9747 reg));
9748
9749 /* Restore the low register's original value. */
9750 emit_insn (gen_movsi (reg, spare));
9751
9752 /* Emit a USE of the restored scratch register, so that flow
9753 analysis will not consider the restore redundant. The
9754 register won't be used again in this function and isn't
9755 restored by the epilogue. */
9756 emit_insn (gen_rtx_USE (VOIDmode, reg));
9757 }
9758 else
9759 {
9760 reg = gen_rtx (REG, SImode, regno);
9761
5895f793 9762 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9763 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9764 reg));
9765 }
9766 }
9767 }
9768
9769 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9770 emit_insn (gen_blockage ());
9771}
9772
9773void
9774thumb_expand_epilogue ()
9775{
9776 HOST_WIDE_INT amount = (get_frame_size ()
9777 + current_function_outgoing_args_size);
6d3d9133
NC
9778
9779 /* Naked functions don't have prologues. */
9780 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
9781 return;
9782
9783 if (frame_pointer_needed)
9784 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
9785 else if (amount)
9786 {
9787 amount = ROUND_UP (amount);
9788
9789 if (amount < 512)
9790 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9791 GEN_INT (amount)));
9792 else
9793 {
9794 /* r3 is always free in the epilogue. */
9795 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
9796
9797 emit_insn (gen_movsi (reg, GEN_INT (amount)));
9798 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
9799 }
9800 }
9801
9802 /* Emit a USE (stack_pointer_rtx), so that
9803 the stack adjustment will not be deleted. */
9804 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9805
9806 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9807 emit_insn (gen_blockage ());
9808}
9809
9810void
9811output_thumb_prologue (f)
9812 FILE * f;
9813{
9814 int live_regs_mask = 0;
9815 int high_regs_pushed = 0;
d5b7b3ae
RE
9816 int regno;
9817
6d3d9133 9818 if (IS_NAKED (arm_current_func_type ()))
d5b7b3ae
RE
9819 return;
9820
9821 if (is_called_in_ARM_mode (current_function_decl))
9822 {
9823 const char * name;
9824
9825 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9826 abort ();
9827 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9828 abort ();
9829 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9830
9831 /* Generate code sequence to switch us into Thumb mode. */
9832 /* The .code 32 directive has already been emitted by
6d77b53e 9833 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
9834 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9835 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9836
9837 /* Generate a label, so that the debugger will notice the
9838 change in instruction sets. This label is also used by
9839 the assembler to bypass the ARM code when this function
9840 is called from a Thumb encoded function elsewhere in the
9841 same file. Hence the definition of STUB_NAME here must
9842 agree with the definition in gas/config/tc-arm.c */
9843
9844#define STUB_NAME ".real_start_of"
9845
9846 asm_fprintf (f, "\t.code\t16\n");
9847#ifdef ARM_PE
9848 if (arm_dllexport_name_p (name))
e5951263 9849 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
9850#endif
9851 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9852 asm_fprintf (f, "\t.thumb_func\n");
9853 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9854 }
9855
d5b7b3ae
RE
9856 if (current_function_pretend_args_size)
9857 {
6d3d9133 9858 if (current_function_anonymous_args)
d5b7b3ae
RE
9859 {
9860 int num_pushes;
9861
9862 asm_fprintf (f, "\tpush\t{");
9863
9864 num_pushes = NUM_INTS (current_function_pretend_args_size);
9865
9866 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9867 regno <= LAST_ARG_REGNUM;
5895f793 9868 regno++)
d5b7b3ae
RE
9869 asm_fprintf (f, "%r%s", regno,
9870 regno == LAST_ARG_REGNUM ? "" : ", ");
9871
9872 asm_fprintf (f, "}\n");
9873 }
9874 else
9875 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9876 SP_REGNUM, SP_REGNUM,
9877 current_function_pretend_args_size);
9878 }
9879
5895f793
RE
9880 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9881 if (regs_ever_live[regno] && !call_used_regs[regno]
9882 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9883 live_regs_mask |= 1 << regno;
9884
5895f793 9885 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
9886 live_regs_mask |= 1 << LR_REGNUM;
9887
9888 if (TARGET_BACKTRACE)
9889 {
9890 int offset;
9891 int work_register = 0;
9892 int wr;
9893
9894 /* We have been asked to create a stack backtrace structure.
9895 The code looks like this:
9896
9897 0 .align 2
9898 0 func:
9899 0 sub SP, #16 Reserve space for 4 registers.
9900 2 push {R7} Get a work register.
9901 4 add R7, SP, #20 Get the stack pointer before the push.
9902 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9903 8 mov R7, PC Get hold of the start of this code plus 12.
9904 10 str R7, [SP, #16] Store it.
9905 12 mov R7, FP Get hold of the current frame pointer.
9906 14 str R7, [SP, #4] Store it.
9907 16 mov R7, LR Get hold of the current return address.
9908 18 str R7, [SP, #12] Store it.
9909 20 add R7, SP, #16 Point at the start of the backtrace structure.
9910 22 mov FP, R7 Put this value into the frame pointer. */
9911
9912 if ((live_regs_mask & 0xFF) == 0)
9913 {
9914 /* See if the a4 register is free. */
9915
9916 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9917 work_register = LAST_ARG_REGNUM;
9918 else /* We must push a register of our own */
9919 live_regs_mask |= (1 << LAST_LO_REGNUM);
9920 }
9921
9922 if (work_register == 0)
9923 {
9924 /* Select a register from the list that will be pushed to
9925 use as our work register. */
9926 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9927 if ((1 << work_register) & live_regs_mask)
9928 break;
9929 }
9930
9931 asm_fprintf
9932 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9933 SP_REGNUM, SP_REGNUM);
9934
9935 if (live_regs_mask)
9936 thumb_pushpop (f, live_regs_mask, 1);
9937
9938 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9939 if (wr & live_regs_mask)
9940 offset += 4;
9941
9942 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9943 offset + 16 + current_function_pretend_args_size);
9944
9945 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9946 offset + 4);
9947
9948 /* Make sure that the instruction fetching the PC is in the right place
9949 to calculate "start of backtrace creation code + 12". */
9950 if (live_regs_mask)
9951 {
9952 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9953 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9954 offset + 12);
9955 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9956 ARM_HARD_FRAME_POINTER_REGNUM);
9957 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9958 offset);
9959 }
9960 else
9961 {
9962 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9963 ARM_HARD_FRAME_POINTER_REGNUM);
9964 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9965 offset);
9966 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9967 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9968 offset + 12);
9969 }
9970
9971 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9972 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9973 offset + 8);
9974 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9975 offset + 12);
9976 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9977 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9978 }
9979 else if (live_regs_mask)
9980 thumb_pushpop (f, live_regs_mask, 1);
9981
9982 for (regno = 8; regno < 13; regno++)
9983 {
5895f793
RE
9984 if (regs_ever_live[regno] && !call_used_regs[regno]
9985 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9986 high_regs_pushed++;
d5b7b3ae
RE
9987 }
9988
9989 if (high_regs_pushed)
9990 {
9991 int pushable_regs = 0;
9992 int mask = live_regs_mask & 0xff;
9993 int next_hi_reg;
9994
9995 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9996 {
5895f793
RE
9997 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9998 && !(TARGET_SINGLE_PIC_BASE
9999 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
10000 break;
10001 }
10002
10003 pushable_regs = mask;
10004
10005 if (pushable_regs == 0)
10006 {
10007 /* Desperation time -- this probably will never happen. */
10008 if (regs_ever_live[LAST_ARG_REGNUM]
5895f793 10009 || !call_used_regs[LAST_ARG_REGNUM])
d5b7b3ae
RE
10010 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10011 mask = 1 << LAST_ARG_REGNUM;
10012 }
10013
10014 while (high_regs_pushed > 0)
10015 {
10016 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10017 {
10018 if (mask & (1 << regno))
10019 {
10020 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10021
5895f793 10022 high_regs_pushed--;
d5b7b3ae
RE
10023
10024 if (high_regs_pushed)
10025 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10026 next_hi_reg--)
10027 {
10028 if (regs_ever_live[next_hi_reg]
5895f793
RE
10029 && !call_used_regs[next_hi_reg]
10030 && !(TARGET_SINGLE_PIC_BASE
10031 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
10032 break;
10033 }
10034 else
10035 {
5895f793 10036 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
10037 break;
10038 }
10039 }
10040 }
10041
10042 thumb_pushpop (f, mask, 1);
10043 }
10044
10045 if (pushable_regs == 0
10046 && (regs_ever_live[LAST_ARG_REGNUM]
5895f793 10047 || !call_used_regs[LAST_ARG_REGNUM]))
d5b7b3ae
RE
10048 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10049 }
10050}
10051
10052/* Handle the case of a double word load into a low register from
10053 a computed memory address. The computed address may involve a
10054 register which is overwritten by the load. */
10055
cd2b33d0 10056const char *
d5b7b3ae
RE
10057thumb_load_double_from_address (operands)
10058 rtx * operands;
10059{
10060 rtx addr;
10061 rtx base;
10062 rtx offset;
10063 rtx arg1;
10064 rtx arg2;
10065
10066 if (GET_CODE (operands[0]) != REG)
10067 fatal ("thumb_load_double_from_address: destination is not a register");
10068
10069 if (GET_CODE (operands[1]) != MEM)
10070 {
10071 debug_rtx (operands[1]);
10072 fatal ("thumb_load_double_from_address: source is not a computed memory address");
10073 }
10074
10075 /* Get the memory address. */
10076 addr = XEXP (operands[1], 0);
10077
10078 /* Work out how the memory address is computed. */
10079 switch (GET_CODE (addr))
10080 {
10081 case REG:
10082 operands[2] = gen_rtx (MEM, SImode,
10083 plus_constant (XEXP (operands[1], 0), 4));
10084
10085 if (REGNO (operands[0]) == REGNO (addr))
10086 {
10087 output_asm_insn ("ldr\t%H0, %2", operands);
10088 output_asm_insn ("ldr\t%0, %1", operands);
10089 }
10090 else
10091 {
10092 output_asm_insn ("ldr\t%0, %1", operands);
10093 output_asm_insn ("ldr\t%H0, %2", operands);
10094 }
10095 break;
10096
10097 case CONST:
10098 /* Compute <address> + 4 for the high order load. */
10099 operands[2] = gen_rtx (MEM, SImode,
10100 plus_constant (XEXP (operands[1], 0), 4));
10101
10102 output_asm_insn ("ldr\t%0, %1", operands);
10103 output_asm_insn ("ldr\t%H0, %2", operands);
10104 break;
10105
10106 case PLUS:
10107 arg1 = XEXP (addr, 0);
10108 arg2 = XEXP (addr, 1);
10109
10110 if (CONSTANT_P (arg1))
10111 base = arg2, offset = arg1;
10112 else
10113 base = arg1, offset = arg2;
10114
10115 if (GET_CODE (base) != REG)
10116 fatal ("thumb_load_double_from_address: base is not a register");
10117
10118 /* Catch the case of <address> = <reg> + <reg> */
10119 if (GET_CODE (offset) == REG)
10120 {
10121 int reg_offset = REGNO (offset);
10122 int reg_base = REGNO (base);
10123 int reg_dest = REGNO (operands[0]);
10124
10125 /* Add the base and offset registers together into the
10126 higher destination register. */
10127 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10128 reg_dest + 1, reg_base, reg_offset);
10129
10130 /* Load the lower destination register from the address in
10131 the higher destination register. */
10132 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10133 reg_dest, reg_dest + 1);
10134
10135 /* Load the higher destination register from its own address
10136 plus 4. */
10137 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10138 reg_dest + 1, reg_dest + 1);
10139 }
10140 else
10141 {
10142 /* Compute <address> + 4 for the high order load. */
10143 operands[2] = gen_rtx (MEM, SImode,
10144 plus_constant (XEXP (operands[1], 0), 4));
10145
10146 /* If the computed address is held in the low order register
10147 then load the high order register first, otherwise always
10148 load the low order register first. */
10149 if (REGNO (operands[0]) == REGNO (base))
10150 {
10151 output_asm_insn ("ldr\t%H0, %2", operands);
10152 output_asm_insn ("ldr\t%0, %1", operands);
10153 }
10154 else
10155 {
10156 output_asm_insn ("ldr\t%0, %1", operands);
10157 output_asm_insn ("ldr\t%H0, %2", operands);
10158 }
10159 }
10160 break;
10161
10162 case LABEL_REF:
10163 /* With no registers to worry about we can just load the value
10164 directly. */
10165 operands[2] = gen_rtx (MEM, SImode,
10166 plus_constant (XEXP (operands[1], 0), 4));
10167
10168 output_asm_insn ("ldr\t%H0, %2", operands);
10169 output_asm_insn ("ldr\t%0, %1", operands);
10170 break;
10171
10172 default:
10173 debug_rtx (operands[1]);
10174 fatal ("thumb_load_double_from_address: Unhandled address calculation");
10175 break;
10176 }
10177
10178 return "";
10179}
10180
10181
cd2b33d0 10182const char *
d5b7b3ae
RE
10183thumb_output_move_mem_multiple (n, operands)
10184 int n;
10185 rtx * operands;
10186{
10187 rtx tmp;
10188
10189 switch (n)
10190 {
10191 case 2:
ca356f3a 10192 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10193 {
ca356f3a
RE
10194 tmp = operands[4];
10195 operands[4] = operands[5];
10196 operands[5] = tmp;
d5b7b3ae 10197 }
ca356f3a
RE
10198 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10199 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
10200 break;
10201
10202 case 3:
ca356f3a 10203 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10204 {
ca356f3a
RE
10205 tmp = operands[4];
10206 operands[4] = operands[5];
10207 operands[5] = tmp;
d5b7b3ae 10208 }
ca356f3a 10209 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 10210 {
ca356f3a
RE
10211 tmp = operands[5];
10212 operands[5] = operands[6];
10213 operands[6] = tmp;
d5b7b3ae 10214 }
ca356f3a 10215 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 10216 {
ca356f3a
RE
10217 tmp = operands[4];
10218 operands[4] = operands[5];
10219 operands[5] = tmp;
d5b7b3ae
RE
10220 }
10221
ca356f3a
RE
10222 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10223 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
10224 break;
10225
10226 default:
10227 abort ();
10228 }
10229
10230 return "";
10231}
10232
10233/* Routines for generating rtl */
10234
10235void
10236thumb_expand_movstrqi (operands)
10237 rtx * operands;
10238{
10239 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10240 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10241 HOST_WIDE_INT len = INTVAL (operands[2]);
10242 HOST_WIDE_INT offset = 0;
10243
10244 while (len >= 12)
10245 {
ca356f3a 10246 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
10247 len -= 12;
10248 }
10249
10250 if (len >= 8)
10251 {
ca356f3a 10252 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
10253 len -= 8;
10254 }
10255
10256 if (len >= 4)
10257 {
10258 rtx reg = gen_reg_rtx (SImode);
10259 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10260 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10261 len -= 4;
10262 offset += 4;
10263 }
10264
10265 if (len >= 2)
10266 {
10267 rtx reg = gen_reg_rtx (HImode);
10268 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10269 plus_constant (in, offset))));
10270 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10271 reg));
10272 len -= 2;
10273 offset += 2;
10274 }
10275
10276 if (len)
10277 {
10278 rtx reg = gen_reg_rtx (QImode);
10279 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10280 plus_constant (in, offset))));
10281 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10282 reg));
10283 }
10284}
10285
10286int
10287thumb_cmp_operand (op, mode)
10288 rtx op;
10289 enum machine_mode mode;
10290{
10291 return ((GET_CODE (op) == CONST_INT
10292 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10293 || register_operand (op, mode));
10294}
10295
cd2b33d0 10296static const char *
d5b7b3ae
RE
10297thumb_condition_code (x, invert)
10298 rtx x;
10299 int invert;
10300{
cd2b33d0 10301 static const char * conds[] =
d5b7b3ae
RE
10302 {
10303 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10304 "hi", "ls", "ge", "lt", "gt", "le"
10305 };
10306 int val;
10307
10308 switch (GET_CODE (x))
10309 {
10310 case EQ: val = 0; break;
10311 case NE: val = 1; break;
10312 case GEU: val = 2; break;
10313 case LTU: val = 3; break;
10314 case GTU: val = 8; break;
10315 case LEU: val = 9; break;
10316 case GE: val = 10; break;
10317 case LT: val = 11; break;
10318 case GT: val = 12; break;
10319 case LE: val = 13; break;
10320 default:
10321 abort ();
10322 }
10323
10324 return conds[val ^ invert];
10325}
10326
10327/* Handle storing a half-word to memory during reload. */
10328void
10329thumb_reload_out_hi (operands)
10330 rtx * operands;
10331{
10332 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10333}
10334
10335/* Handle storing a half-word to memory during reload. */
10336void
10337thumb_reload_in_hi (operands)
10338 rtx * operands ATTRIBUTE_UNUSED;
10339{
10340 abort ();
10341}
10342
c27ba912
DM
10343/* Return the length of a function name prefix
10344 that starts with the character 'c'. */
10345static int
10346arm_get_strip_length (char c)
10347{
10348 switch (c)
10349 {
10350 ARM_NAME_ENCODING_LENGTHS
10351 default: return 0;
10352 }
10353}
10354
10355/* Return a pointer to a function's name with any
10356 and all prefix encodings stripped from it. */
10357const char *
10358arm_strip_name_encoding (const char * name)
10359{
10360 int skip;
10361
10362 while ((skip = arm_get_strip_length (* name)))
10363 name += skip;
10364
10365 return name;
10366}
10367
2b835d68 10368#ifdef AOF_ASSEMBLER
6354dc9b 10369/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 10370
32de079a
RE
10371rtx aof_pic_label = NULL_RTX;
10372struct pic_chain
10373{
62b10bbc
NC
10374 struct pic_chain * next;
10375 char * symname;
32de079a
RE
10376};
10377
62b10bbc 10378static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
10379
10380rtx
10381aof_pic_entry (x)
10382 rtx x;
10383{
62b10bbc 10384 struct pic_chain ** chainp;
32de079a
RE
10385 int offset;
10386
10387 if (aof_pic_label == NULL_RTX)
10388 {
92a432f4
RE
10389 /* We mark this here and not in arm_add_gc_roots() to avoid
10390 polluting even more code with ifdefs, and because it never
10391 contains anything useful until we assign to it here. */
5895f793 10392 ggc_add_rtx_root (&aof_pic_label, 1);
43cffd11 10393 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
10394 }
10395
10396 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10397 offset += 4, chainp = &(*chainp)->next)
10398 if ((*chainp)->symname == XSTR (x, 0))
10399 return plus_constant (aof_pic_label, offset);
10400
10401 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10402 (*chainp)->next = NULL;
10403 (*chainp)->symname = XSTR (x, 0);
10404 return plus_constant (aof_pic_label, offset);
10405}
10406
10407void
10408aof_dump_pic_table (f)
62b10bbc 10409 FILE * f;
32de079a 10410{
62b10bbc 10411 struct pic_chain * chain;
32de079a
RE
10412
10413 if (aof_pic_chain == NULL)
10414 return;
10415
dd18ae56
NC
10416 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10417 PIC_OFFSET_TABLE_REGNUM,
10418 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
10419 fputs ("|x$adcons|\n", f);
10420
10421 for (chain = aof_pic_chain; chain; chain = chain->next)
10422 {
10423 fputs ("\tDCD\t", f);
10424 assemble_name (f, chain->symname);
10425 fputs ("\n", f);
10426 }
10427}
10428
2b835d68
RE
10429int arm_text_section_count = 1;
10430
10431char *
84ed5e79 10432aof_text_section ()
2b835d68
RE
10433{
10434 static char buf[100];
2b835d68
RE
10435 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10436 arm_text_section_count++);
10437 if (flag_pic)
10438 strcat (buf, ", PIC, REENTRANT");
10439 return buf;
10440}
10441
10442static int arm_data_section_count = 1;
10443
10444char *
10445aof_data_section ()
10446{
10447 static char buf[100];
10448 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10449 return buf;
10450}
10451
10452/* The AOF assembler is religiously strict about declarations of
10453 imported and exported symbols, so that it is impossible to declare
956d6950 10454 a function as imported near the beginning of the file, and then to
2b835d68
RE
10455 export it later on. It is, however, possible to delay the decision
10456 until all the functions in the file have been compiled. To get
10457 around this, we maintain a list of the imports and exports, and
10458 delete from it any that are subsequently defined. At the end of
10459 compilation we spit the remainder of the list out before the END
10460 directive. */
10461
10462struct import
10463{
62b10bbc
NC
10464 struct import * next;
10465 char * name;
2b835d68
RE
10466};
10467
62b10bbc 10468static struct import * imports_list = NULL;
2b835d68
RE
10469
10470void
10471aof_add_import (name)
62b10bbc 10472 char * name;
2b835d68 10473{
62b10bbc 10474 struct import * new;
2b835d68
RE
10475
10476 for (new = imports_list; new; new = new->next)
10477 if (new->name == name)
10478 return;
10479
10480 new = (struct import *) xmalloc (sizeof (struct import));
10481 new->next = imports_list;
10482 imports_list = new;
10483 new->name = name;
10484}
10485
10486void
10487aof_delete_import (name)
62b10bbc 10488 char * name;
2b835d68 10489{
62b10bbc 10490 struct import ** old;
2b835d68
RE
10491
10492 for (old = &imports_list; *old; old = & (*old)->next)
10493 {
10494 if ((*old)->name == name)
10495 {
10496 *old = (*old)->next;
10497 return;
10498 }
10499 }
10500}
10501
10502int arm_main_function = 0;
10503
10504void
10505aof_dump_imports (f)
62b10bbc 10506 FILE * f;
2b835d68
RE
10507{
10508 /* The AOF assembler needs this to cause the startup code to be extracted
10509 from the library. Brining in __main causes the whole thing to work
10510 automagically. */
10511 if (arm_main_function)
10512 {
10513 text_section ();
10514 fputs ("\tIMPORT __main\n", f);
10515 fputs ("\tDCD __main\n", f);
10516 }
10517
10518 /* Now dump the remaining imports. */
10519 while (imports_list)
10520 {
10521 fprintf (f, "\tIMPORT\t");
10522 assemble_name (f, imports_list->name);
10523 fputc ('\n', f);
10524 imports_list = imports_list->next;
10525 }
10526}
10527#endif /* AOF_ASSEMBLER */
This page took 2.129248 seconds and 5 git commands to generate.