]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Do not create interworking functions if the target architecture does not
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
914a3b8c 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e 26#include "rtl.h"
d5b7b3ae 27#include "tree.h"
c7319d87 28#include "obstack.h"
cce8749e
CH
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-flags.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
ad076f4e 41#include "toplev.h"
aec3cfba 42#include "recog.h"
92a432f4 43#include "ggc.h"
d5b7b3ae 44#include "except.h"
8b97c5f8 45#include "c-pragma.h"
c27ba912 46#include "tm_p.h"
cce8749e 47
d5b7b3ae
RE
48/* Forward definitions of types. */
49typedef struct minipool_node Mnode;
50typedef struct minipool_fixup Mfix;
51
52/* In order to improve the layout of the prototypes below
53 some short type abbreviations are defined here. */
54#define Hint HOST_WIDE_INT
55#define Mmode enum machine_mode
56#define Ulong unsigned long
57
58/* Forward function declarations. */
59static void arm_add_gc_roots PARAMS ((void));
60static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
61static int arm_naked_function_p PARAMS ((tree));
62static Ulong bit_count PARAMS ((signed int));
63static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
64static int eliminate_lr2ip PARAMS ((rtx *));
65static rtx emit_multi_reg_push PARAMS ((int));
66static rtx emit_sfm PARAMS ((int, int));
cd2b33d0 67static const char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
68static arm_cc get_arm_condition_code PARAMS ((rtx));
69static void init_fpa_table PARAMS ((void));
70static Hint int_log2 PARAMS ((Hint));
71static rtx is_jump_table PARAMS ((rtx));
cd2b33d0
NC
72static const char * output_multi_immediate PARAMS ((rtx *, const char *, const char *, int, Hint));
73static void print_multi_reg PARAMS ((FILE *, const char *, int, int, int));
d5b7b3ae 74static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
cd2b33d0 75static const char * shift_op PARAMS ((rtx, Hint *));
d5b7b3ae
RE
76static void arm_init_machine_status PARAMS ((struct function *));
77static void arm_mark_machine_status PARAMS ((struct function *));
78static int number_of_first_bit_set PARAMS ((int));
79static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
80static void thumb_exit PARAMS ((FILE *, int, rtx));
81static void thumb_pushpop PARAMS ((FILE *, int, int));
cd2b33d0 82static const char * thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
83static rtx is_jump_table PARAMS ((rtx));
84static Hint get_jump_table_size PARAMS ((rtx));
85static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
86static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
87static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
88static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
89static void assign_minipool_offsets PARAMS ((Mfix *));
90static void arm_print_value PARAMS ((FILE *, rtx));
91static void dump_minipool PARAMS ((rtx));
92static int arm_barrier_cost PARAMS ((rtx));
93static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
94static void push_minipool_barrier PARAMS ((rtx, Hint));
95static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
96static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 97static int current_file_function_operand PARAMS ((rtx));
d5b7b3ae
RE
98\f
99#undef Hint
100#undef Mmode
101#undef Ulong
f3bb6135 102
c7319d87
RE
103/* Obstack for minipool constant handling. */
104static struct obstack minipool_obstack;
105static char *minipool_startobj;
106
107#define obstack_chunk_alloc xmalloc
108#define obstack_chunk_free free
109
c27ba912
DM
110/* The maximum number of insns skipped which will be conditionalised if
111 possible. */
112static int max_insns_skipped = 5;
113
114extern FILE * asm_out_file;
115
6354dc9b 116/* True if we are currently building a constant table. */
13bd191d
PB
117int making_const_table;
118
60d0536b 119/* Define the information needed to generate branch insns. This is
6354dc9b 120 stored from the compare operation. */
ff9940b0 121rtx arm_compare_op0, arm_compare_op1;
ff9940b0 122
6354dc9b 123/* What type of floating point are we tuning for? */
bee06f3d
RE
124enum floating_point_type arm_fpu;
125
6354dc9b 126/* What type of floating point instructions are available? */
b111229a
RE
127enum floating_point_type arm_fpu_arch;
128
6354dc9b 129/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
130enum prog_mode_type arm_prgmode;
131
6354dc9b 132/* Set by the -mfp=... option. */
f9cc092a 133const char * target_fp_name = NULL;
2b835d68 134
b355a481 135/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 136const char * structure_size_string = NULL;
723ae7c1 137int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 138
aec3cfba 139/* Bit values used to identify processor capabilities. */
62b10bbc
NC
140#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
141#define FL_FAST_MULT (1 << 1) /* Fast multiply */
142#define FL_MODE26 (1 << 2) /* 26-bit mode support */
143#define FL_MODE32 (1 << 3) /* 32-bit mode support */
144#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
145#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
146#define FL_THUMB (1 << 6) /* Thumb aware */
147#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
148#define FL_STRONG (1 << 8) /* StrongARM */
d19fb8e3
NC
149#define FL_ARCH5E (1 << 9) /* El Segundo extenstions to v5 */
150#define FL_XSCALE (1 << 10) /* XScale */
aec3cfba 151
d5b7b3ae
RE
152/* The bits in this mask specify which instructions we are
153 allowed to generate. */
aec3cfba 154static int insn_flags = 0;
d5b7b3ae 155
aec3cfba
NC
156/* The bits in this mask specify which instruction scheduling options should
157 be used. Note - there is an overlap with the FL_FAST_MULT. For some
158 hardware we want to be able to generate the multiply instructions, but to
159 tune as if they were not present in the architecture. */
160static int tune_flags = 0;
161
162/* The following are used in the arm.md file as equivalents to bits
163 in the above two flag variables. */
164
2b835d68
RE
165/* Nonzero if this is an "M" variant of the processor. */
166int arm_fast_multiply = 0;
167
6354dc9b 168/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
169int arm_arch4 = 0;
170
6354dc9b 171/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
172int arm_arch5 = 0;
173
aec3cfba 174/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
175int arm_ld_sched = 0;
176
177/* Nonzero if this chip is a StrongARM. */
178int arm_is_strong = 0;
179
d19fb8e3
NC
180/* Nonzero if this chip is an XScale. */
181int arm_is_xscale = 0;
182
f5a1b0d2
NC
183/* Nonzero if this chip is a an ARM6 or an ARM7. */
184int arm_is_6_or_7 = 0;
b111229a 185
0616531f
RE
186/* Nonzero if generating Thumb instructions. */
187int thumb_code = 0;
188
cce8749e
CH
189/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
190 must report the mode of the memory reference from PRINT_OPERAND to
191 PRINT_OPERAND_ADDRESS. */
f3bb6135 192enum machine_mode output_memory_reference_mode;
cce8749e
CH
193
194/* Nonzero if the prologue must setup `fp'. */
195int current_function_anonymous_args;
196
32de079a 197/* The register number to be used for the PIC offset register. */
ed0e6530 198const char * arm_pic_register_string = NULL;
32de079a
RE
199int arm_pic_register = 9;
200
ff9940b0 201/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 202 is not needed. */
d5b7b3ae 203int return_used_this_function;
ff9940b0 204
aec3cfba
NC
205/* Set to 1 after arm_reorg has started. Reset to start at the start of
206 the next function. */
4b632bf1
RE
207static int after_arm_reorg = 0;
208
aec3cfba 209/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
210static int arm_constant_limit = 3;
211
cce8749e
CH
212/* For an explanation of these variables, see final_prescan_insn below. */
213int arm_ccfsm_state;
84ed5e79 214enum arm_cond_code arm_current_cc;
cce8749e
CH
215rtx arm_target_insn;
216int arm_target_label;
9997d19d
RE
217
218/* The condition codes of the ARM, and the inverse function. */
cd2b33d0 219const char * arm_condition_codes[] =
9997d19d
RE
220{
221 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
222 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
223};
224
f5a1b0d2 225#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 226\f
6354dc9b 227/* Initialization code. */
2b835d68 228
2b835d68
RE
229struct processors
230{
cd2b33d0 231 const char * name;
2b835d68
RE
232 unsigned int flags;
233};
234
235/* Not all of these give usefully different compilation alternatives,
236 but there is no simple way of generalizing them. */
f5a1b0d2
NC
237static struct processors all_cores[] =
238{
239 /* ARM Cores */
240
241 {"arm2", FL_CO_PROC | FL_MODE26 },
242 {"arm250", FL_CO_PROC | FL_MODE26 },
243 {"arm3", FL_CO_PROC | FL_MODE26 },
244 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
246 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
247 {"arm610", FL_MODE26 | FL_MODE32 },
248 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
249 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
250 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 251 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
252 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
253 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
254 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
255 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
256 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
257 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
258 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
259 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
260 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 261 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
262 {"arm710c", FL_MODE26 | FL_MODE32 },
263 {"arm7100", FL_MODE26 | FL_MODE32 },
264 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
265 /* Doesn't have an external co-proc, but does have embedded fpu. */
266 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
267 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
268 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
269 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
270 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
271 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
272 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
273 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
274 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
275 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
276 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
d19fb8e3 277 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_XSCALE | FL_ARCH5 },
f5a1b0d2
NC
278
279 {NULL, 0}
280};
281
282static struct processors all_architectures[] =
2b835d68 283{
f5a1b0d2
NC
284 /* ARM Architectures */
285
62b10bbc
NC
286 { "armv2", FL_CO_PROC | FL_MODE26 },
287 { "armv2a", FL_CO_PROC | FL_MODE26 },
288 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
289 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 290 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
291 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
292 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
293 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
294 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
d19fb8e3
NC
295 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
296 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
62b10bbc 297 { NULL, 0 }
f5a1b0d2
NC
298};
299
300/* This is a magic stucture. The 'string' field is magically filled in
301 with a pointer to the value specified by the user on the command line
302 assuming that the user has specified such a value. */
303
304struct arm_cpu_select arm_select[] =
305{
306 /* string name processors */
307 { NULL, "-mcpu=", all_cores },
308 { NULL, "-march=", all_architectures },
309 { NULL, "-mtune=", all_cores }
2b835d68
RE
310};
311
aec3cfba 312/* Return the number of bits set in value' */
d5b7b3ae 313static unsigned long
aec3cfba
NC
314bit_count (value)
315 signed int value;
316{
d5b7b3ae 317 unsigned long count = 0;
aec3cfba
NC
318
319 while (value)
320 {
5895f793
RE
321 value &= ~(value & -value);
322 ++count;
aec3cfba
NC
323 }
324
325 return count;
326}
327
2b835d68
RE
328/* Fix up any incompatible options that the user has specified.
329 This has now turned into a maze. */
330void
331arm_override_options ()
332{
ed4c4348 333 unsigned i;
f5a1b0d2
NC
334
335 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 336 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 337 {
f5a1b0d2
NC
338 struct arm_cpu_select * ptr = arm_select + i;
339
340 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 341 {
13bd191d 342 const struct processors * sel;
bd9c7e23 343
5895f793 344 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 345 if (streq (ptr->string, sel->name))
bd9c7e23 346 {
aec3cfba
NC
347 if (i == 2)
348 tune_flags = sel->flags;
349 else
b111229a 350 {
aec3cfba
NC
351 /* If we have been given an architecture and a processor
352 make sure that they are compatible. We only generate
353 a warning though, and we prefer the CPU over the
6354dc9b 354 architecture. */
aec3cfba 355 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 356 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
357 ptr->string);
358
359 insn_flags = sel->flags;
b111229a 360 }
f5a1b0d2 361
bd9c7e23
RE
362 break;
363 }
364
365 if (sel->name == NULL)
366 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
367 }
368 }
aec3cfba 369
f5a1b0d2 370 /* If the user did not specify a processor, choose one for them. */
aec3cfba 371 if (insn_flags == 0)
f5a1b0d2
NC
372 {
373 struct processors * sel;
aec3cfba
NC
374 unsigned int sought;
375 static struct cpu_default
376 {
cd2b33d0
NC
377 int cpu;
378 const char * name;
aec3cfba
NC
379 }
380 cpu_defaults[] =
381 {
382 { TARGET_CPU_arm2, "arm2" },
383 { TARGET_CPU_arm6, "arm6" },
384 { TARGET_CPU_arm610, "arm610" },
2aa0c933 385 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
386 { TARGET_CPU_arm7m, "arm7m" },
387 { TARGET_CPU_arm7500fe, "arm7500fe" },
388 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
389 { TARGET_CPU_arm8, "arm8" },
390 { TARGET_CPU_arm810, "arm810" },
391 { TARGET_CPU_arm9, "arm9" },
392 { TARGET_CPU_strongarm, "strongarm" },
d19fb8e3 393 { TARGET_CPU_xscale, "xscale" },
aec3cfba
NC
394 { TARGET_CPU_generic, "arm" },
395 { 0, 0 }
396 };
397 struct cpu_default * def;
398
399 /* Find the default. */
5895f793 400 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
401 if (def->cpu == TARGET_CPU_DEFAULT)
402 break;
403
404 /* Make sure we found the default CPU. */
405 if (def->name == NULL)
406 abort ();
407
408 /* Find the default CPU's flags. */
5895f793 409 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
410 if (streq (def->name, sel->name))
411 break;
412
413 if (sel->name == NULL)
414 abort ();
415
416 insn_flags = sel->flags;
417
418 /* Now check to see if the user has specified some command line
419 switch that require certain abilities from the cpu. */
420 sought = 0;
f5a1b0d2 421
d5b7b3ae 422 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 423 {
aec3cfba
NC
424 sought |= (FL_THUMB | FL_MODE32);
425
426 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 427 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 428
d5b7b3ae 429 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
430 interworking. Therefore we force FL_MODE26 to be removed
431 from insn_flags here (if it was set), so that the search
432 below will always be able to find a compatible processor. */
5895f793 433 insn_flags &= ~FL_MODE26;
f5a1b0d2 434 }
5895f793 435 else if (!TARGET_APCS_32)
f5a1b0d2 436 sought |= FL_MODE26;
d5b7b3ae 437
aec3cfba 438 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 439 {
aec3cfba
NC
440 /* Try to locate a CPU type that supports all of the abilities
441 of the default CPU, plus the extra abilities requested by
442 the user. */
5895f793 443 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 444 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
445 break;
446
447 if (sel->name == NULL)
aec3cfba
NC
448 {
449 unsigned int current_bit_count = 0;
450 struct processors * best_fit = NULL;
451
452 /* Ideally we would like to issue an error message here
453 saying that it was not possible to find a CPU compatible
454 with the default CPU, but which also supports the command
455 line options specified by the programmer, and so they
456 ought to use the -mcpu=<name> command line option to
457 override the default CPU type.
458
459 Unfortunately this does not work with multilibing. We
460 need to be able to support multilibs for -mapcs-26 and for
461 -mthumb-interwork and there is no CPU that can support both
462 options. Instead if we cannot find a cpu that has both the
463 characteristics of the default cpu and the given command line
464 options we scan the array again looking for a best match. */
5895f793 465 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
466 if ((sel->flags & sought) == sought)
467 {
468 unsigned int count;
469
470 count = bit_count (sel->flags & insn_flags);
471
472 if (count >= current_bit_count)
473 {
474 best_fit = sel;
475 current_bit_count = count;
476 }
477 }
f5a1b0d2 478
aec3cfba
NC
479 if (best_fit == NULL)
480 abort ();
481 else
482 sel = best_fit;
483 }
484
485 insn_flags = sel->flags;
f5a1b0d2
NC
486 }
487 }
aec3cfba
NC
488
489 /* If tuning has not been specified, tune for whichever processor or
490 architecture has been selected. */
491 if (tune_flags == 0)
492 tune_flags = insn_flags;
493
f5a1b0d2
NC
494 /* Make sure that the processor choice does not conflict with any of the
495 other command line choices. */
aec3cfba 496 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 497 {
aec3cfba
NC
498 /* If APCS-32 was not the default then it must have been set by the
499 user, so issue a warning message. If the user has specified
500 "-mapcs-32 -mcpu=arm2" then we loose here. */
501 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
502 warning ("target CPU does not support APCS-32" );
5895f793 503 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 504 }
5895f793 505 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
506 {
507 warning ("target CPU does not support APCS-26" );
508 target_flags |= ARM_FLAG_APCS_32;
509 }
510
6cfc7210 511 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
512 {
513 warning ("target CPU does not support interworking" );
6cfc7210 514 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
515 }
516
d5b7b3ae
RE
517 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
518 {
519 warning ("target CPU does not supoport THUMB instructions.");
520 target_flags &= ~ARM_FLAG_THUMB;
521 }
522
523 if (TARGET_APCS_FRAME && TARGET_THUMB)
524 {
525 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
526 target_flags &= ~ARM_FLAG_APCS_FRAME;
527 }
d19fb8e3
NC
528
529 if (TARGET_HARD_FLOAT && (tune_flags & FL_XSCALE))
530 {
531 warning ("XScale does not support hardware FP instructions.");
532 target_flags |= ARM_FLAG_SOFT_FLOAT;
533 }
534
d5b7b3ae
RE
535 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
536 from here where no function is being compiled currently. */
537 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
538 && TARGET_ARM)
539 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
540
541 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
542 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
543
544 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
545 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
546
f5a1b0d2 547 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 548 if (TARGET_INTERWORK)
f5a1b0d2 549 {
5895f793 550 if (!TARGET_APCS_32)
f5a1b0d2
NC
551 warning ("interworking forces APCS-32 to be used" );
552 target_flags |= ARM_FLAG_APCS_32;
553 }
554
5895f793 555 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
556 {
557 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
558 target_flags |= ARM_FLAG_APCS_FRAME;
559 }
aec3cfba 560
2b835d68
RE
561 if (TARGET_POKE_FUNCTION_NAME)
562 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 563
2b835d68
RE
564 if (TARGET_APCS_REENT && flag_pic)
565 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 566
2b835d68 567 if (TARGET_APCS_REENT)
f5a1b0d2 568 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 569
d5b7b3ae
RE
570 /* If this target is normally configured to use APCS frames, warn if they
571 are turned off and debugging is turned on. */
572 if (TARGET_ARM
573 && write_symbols != NO_DEBUG
5895f793 574 && !TARGET_APCS_FRAME
d5b7b3ae
RE
575 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
576 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 577
32de079a
RE
578 /* If stack checking is disabled, we can use r10 as the PIC register,
579 which keeps r9 available. */
5895f793 580 if (flag_pic && !TARGET_APCS_STACK)
32de079a 581 arm_pic_register = 10;
aec3cfba 582
2b835d68
RE
583 if (TARGET_APCS_FLOAT)
584 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 585
aec3cfba 586 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
587 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
588 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
589 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
aec3cfba 590
2ca12935
JL
591 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
592 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 593 thumb_code = (TARGET_ARM == 0);
d19fb8e3 594 arm_is_xscale = (tune_flags & FL_XSCALE) != 0;
d5b7b3ae
RE
595 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
596 && !(tune_flags & FL_ARCH4))) != 0;
f5a1b0d2 597
bd9c7e23
RE
598 /* Default value for floating point code... if no co-processor
599 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
600 assume the user has an FPA.
601 Note: this does not prevent use of floating point instructions,
602 -msoft-float does that. */
aec3cfba 603 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 604
b111229a 605 if (target_fp_name)
2b835d68 606 {
f5a1b0d2 607 if (streq (target_fp_name, "2"))
b111229a 608 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
609 else if (streq (target_fp_name, "3"))
610 arm_fpu_arch = FP_SOFT3;
2b835d68 611 else
f5a1b0d2 612 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 613 target_fp_name);
2b835d68 614 }
b111229a
RE
615 else
616 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
617
618 if (TARGET_FPE && arm_fpu != FP_HARD)
619 arm_fpu = FP_SOFT2;
aec3cfba 620
f5a1b0d2
NC
621 /* For arm2/3 there is no need to do any scheduling if there is only
622 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
623 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
624 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 625 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 626
cd2b33d0 627 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
628
629 if (structure_size_string != NULL)
630 {
631 int size = strtol (structure_size_string, NULL, 0);
632
633 if (size == 8 || size == 32)
634 arm_structure_size_boundary = size;
635 else
636 warning ("Structure size boundary can only be set to 8 or 32");
637 }
ed0e6530
PB
638
639 if (arm_pic_register_string != NULL)
640 {
641 int pic_register;
642
5895f793 643 if (!flag_pic)
ed0e6530
PB
644 warning ("-mpic-register= is useless without -fpic");
645
646 pic_register = decode_reg_name (arm_pic_register_string);
647
648 /* Prevent the user from choosing an obviously stupid PIC register. */
649 if (pic_register < 0 || call_used_regs[pic_register]
650 || pic_register == HARD_FRAME_POINTER_REGNUM
651 || pic_register == STACK_POINTER_REGNUM
652 || pic_register >= PC_REGNUM)
653 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
654 else
655 arm_pic_register = pic_register;
656 }
d5b7b3ae
RE
657
658 if (TARGET_THUMB && flag_schedule_insns)
659 {
660 /* Don't warn since it's on by default in -O2. */
661 flag_schedule_insns = 0;
662 }
663
f5a1b0d2
NC
664 /* If optimizing for space, don't synthesize constants.
665 For processors with load scheduling, it never costs more than 2 cycles
666 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 667 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 668 arm_constant_limit = 1;
aec3cfba 669
d19fb8e3
NC
670 if (arm_is_xscale)
671 arm_constant_limit = 2;
672
f5a1b0d2
NC
673 /* If optimizing for size, bump the number of instructions that we
674 are prepared to conditionally execute (even on a StrongARM).
675 Otherwise for the StrongARM, which has early execution of branches,
676 a sequence that is worth skipping is shorter. */
677 if (optimize_size)
678 max_insns_skipped = 6;
679 else if (arm_is_strong)
680 max_insns_skipped = 3;
92a432f4
RE
681
682 /* Register global variables with the garbage collector. */
683 arm_add_gc_roots ();
684}
685
686static void
687arm_add_gc_roots ()
688{
689 ggc_add_rtx_root (&arm_compare_op0, 1);
690 ggc_add_rtx_root (&arm_compare_op1, 1);
691 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
c7319d87
RE
692
693 gcc_obstack_init(&minipool_obstack);
694 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 695}
cce8749e 696\f
6354dc9b 697/* Return 1 if it is possible to return using a single instruction. */
ff9940b0 698int
b36ba79f
RE
699use_return_insn (iscond)
700 int iscond;
ff9940b0
RE
701{
702 int regno;
703
d5b7b3ae 704 /* Never use a return instruction before reload has run. */
5895f793 705 if (!reload_completed
d5b7b3ae 706 /* Or if the function is variadic. */
f5a1b0d2 707 || current_function_pretend_args_size
ff9940b0 708 || current_function_anonymous_args
d5b7b3ae
RE
709 /* Of if the function calls __builtin_eh_return () */
710 || cfun->machine->eh_epilogue_sp_ofs != NULL
711 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 712 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 713 && !frame_pointer_needed))
ff9940b0
RE
714 return 0;
715
b111229a 716 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
717 stacked. Similarly, on StrongARM, conditional returns are expensive
718 if they aren't taken and registers have been stacked. */
f5a1b0d2 719 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 720 return 0;
d5b7b3ae 721
f5a1b0d2 722 if ((iscond && arm_is_strong)
6cfc7210 723 || TARGET_INTERWORK)
6ed30148 724 {
d5b7b3ae 725 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
5895f793 726 if (regs_ever_live[regno] && !call_used_regs[regno])
6ed30148
RE
727 return 0;
728
729 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 730 return 0;
6ed30148 731 }
b111229a 732
ff9940b0 733 /* Can't be done if any of the FPU regs are pushed, since this also
6354dc9b 734 requires an insn. */
d5b7b3ae
RE
735 if (TARGET_HARD_FLOAT)
736 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 737 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 738 return 0;
ff9940b0 739
31fdb4d5
DE
740 /* If a function is naked, don't use the "return" insn. */
741 if (arm_naked_function_p (current_function_decl))
742 return 0;
743
ff9940b0
RE
744 return 1;
745}
746
cce8749e
CH
747/* Return TRUE if int I is a valid immediate ARM constant. */
748
749int
750const_ok_for_arm (i)
ff9940b0 751 HOST_WIDE_INT i;
cce8749e 752{
5895f793 753 unsigned HOST_WIDE_INT mask = ~HOST_UINT (0xFF);
cce8749e 754
56636818
JL
755 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
756 be all zero, or all one. */
5895f793
RE
757 if ((i & ~HOST_UINT (0xffffffff)) != 0
758 && ((i & ~HOST_UINT (0xffffffff))
759 != ((~HOST_UINT (0))
760 & ~HOST_UINT (0xffffffff))))
56636818
JL
761 return FALSE;
762
e2c671ba
RE
763 /* Fast return for 0 and powers of 2 */
764 if ((i & (i - 1)) == 0)
765 return TRUE;
766
cce8749e
CH
767 do
768 {
e5951263 769 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
f3bb6135 770 return TRUE;
abaa26e5 771 mask =
e5951263
NC
772 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
773 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
5895f793 774 } while (mask != ~HOST_UINT (0xFF));
cce8749e 775
f3bb6135
RE
776 return FALSE;
777}
cce8749e 778
6354dc9b 779/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
780static int
781const_ok_for_op (i, code)
e2c671ba
RE
782 HOST_WIDE_INT i;
783 enum rtx_code code;
e2c671ba
RE
784{
785 if (const_ok_for_arm (i))
786 return 1;
787
788 switch (code)
789 {
790 case PLUS:
791 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
792
793 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
794 case XOR:
795 case IOR:
796 return 0;
797
798 case AND:
799 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
800
801 default:
802 abort ();
803 }
804}
805
806/* Emit a sequence of insns to handle a large constant.
807 CODE is the code of the operation required, it can be any of SET, PLUS,
808 IOR, AND, XOR, MINUS;
809 MODE is the mode in which the operation is being performed;
810 VAL is the integer to operate on;
811 SOURCE is the other operand (a register, or a null-pointer for SET);
812 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
813 either produce a simpler sequence, or we will want to cse the values.
814 Return value is the number of insns emitted. */
e2c671ba
RE
815
816int
817arm_split_constant (code, mode, val, target, source, subtargets)
818 enum rtx_code code;
819 enum machine_mode mode;
820 HOST_WIDE_INT val;
821 rtx target;
822 rtx source;
823 int subtargets;
2b835d68
RE
824{
825 if (subtargets || code == SET
826 || (GET_CODE (target) == REG && GET_CODE (source) == REG
827 && REGNO (target) != REGNO (source)))
828 {
4b632bf1
RE
829 /* After arm_reorg has been called, we can't fix up expensive
830 constants by pushing them into memory so we must synthesise
831 them in-line, regardless of the cost. This is only likely to
832 be more costly on chips that have load delay slots and we are
833 compiling without running the scheduler (so no splitting
aec3cfba
NC
834 occurred before the final instruction emission).
835
836 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 837 */
5895f793 838 if (!after_arm_reorg
4b632bf1
RE
839 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
840 > arm_constant_limit + (code != SET)))
2b835d68
RE
841 {
842 if (code == SET)
843 {
844 /* Currently SET is the only monadic value for CODE, all
845 the rest are diadic. */
43cffd11 846 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
847 return 1;
848 }
849 else
850 {
851 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
852
43cffd11 853 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
854 /* For MINUS, the value is subtracted from, since we never
855 have subtraction of a constant. */
856 if (code == MINUS)
43cffd11 857 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 858 gen_rtx_MINUS (mode, temp, source)));
2b835d68 859 else
43cffd11
RE
860 emit_insn (gen_rtx_SET (VOIDmode, target,
861 gen_rtx (code, mode, source, temp)));
2b835d68
RE
862 return 2;
863 }
864 }
865 }
866
867 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
868}
869
870/* As above, but extra parameter GENERATE which, if clear, suppresses
871 RTL generation. */
d5b7b3ae 872static int
2b835d68
RE
873arm_gen_constant (code, mode, val, target, source, subtargets, generate)
874 enum rtx_code code;
875 enum machine_mode mode;
876 HOST_WIDE_INT val;
877 rtx target;
878 rtx source;
879 int subtargets;
880 int generate;
e2c671ba 881{
e2c671ba
RE
882 int can_invert = 0;
883 int can_negate = 0;
884 int can_negate_initial = 0;
885 int can_shift = 0;
886 int i;
887 int num_bits_set = 0;
888 int set_sign_bit_copies = 0;
889 int clear_sign_bit_copies = 0;
890 int clear_zero_bit_copies = 0;
891 int set_zero_bit_copies = 0;
892 int insns = 0;
e2c671ba 893 unsigned HOST_WIDE_INT temp1, temp2;
e5951263 894 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
e2c671ba 895
d5b7b3ae 896 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
897 check for degenerate cases; these can occur when DImode operations
898 are split. */
899 switch (code)
900 {
901 case SET:
902 can_invert = 1;
903 can_shift = 1;
904 can_negate = 1;
905 break;
906
907 case PLUS:
908 can_negate = 1;
909 can_negate_initial = 1;
910 break;
911
912 case IOR:
e5951263 913 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 914 {
2b835d68 915 if (generate)
43cffd11
RE
916 emit_insn (gen_rtx_SET (VOIDmode, target,
917 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
918 return 1;
919 }
920 if (remainder == 0)
921 {
922 if (reload_completed && rtx_equal_p (target, source))
923 return 0;
2b835d68 924 if (generate)
43cffd11 925 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
926 return 1;
927 }
928 break;
929
930 case AND:
931 if (remainder == 0)
932 {
2b835d68 933 if (generate)
43cffd11 934 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
935 return 1;
936 }
e5951263 937 if (remainder == HOST_UINT (0xffffffff))
e2c671ba
RE
938 {
939 if (reload_completed && rtx_equal_p (target, source))
940 return 0;
2b835d68 941 if (generate)
43cffd11 942 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
943 return 1;
944 }
945 can_invert = 1;
946 break;
947
948 case XOR:
949 if (remainder == 0)
950 {
951 if (reload_completed && rtx_equal_p (target, source))
952 return 0;
2b835d68 953 if (generate)
43cffd11 954 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
955 return 1;
956 }
e5951263 957 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 958 {
2b835d68 959 if (generate)
43cffd11
RE
960 emit_insn (gen_rtx_SET (VOIDmode, target,
961 gen_rtx_NOT (mode, source)));
e2c671ba
RE
962 return 1;
963 }
964
965 /* We don't know how to handle this yet below. */
966 abort ();
967
968 case MINUS:
969 /* We treat MINUS as (val - source), since (source - val) is always
970 passed as (source + (-val)). */
971 if (remainder == 0)
972 {
2b835d68 973 if (generate)
43cffd11
RE
974 emit_insn (gen_rtx_SET (VOIDmode, target,
975 gen_rtx_NEG (mode, source)));
e2c671ba
RE
976 return 1;
977 }
978 if (const_ok_for_arm (val))
979 {
2b835d68 980 if (generate)
43cffd11
RE
981 emit_insn (gen_rtx_SET (VOIDmode, target,
982 gen_rtx_MINUS (mode, GEN_INT (val),
983 source)));
e2c671ba
RE
984 return 1;
985 }
986 can_negate = 1;
987
988 break;
989
990 default:
991 abort ();
992 }
993
6354dc9b 994 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
995 if (const_ok_for_arm (val)
996 || (can_negate_initial && const_ok_for_arm (-val))
997 || (can_invert && const_ok_for_arm (~val)))
998 {
2b835d68 999 if (generate)
43cffd11
RE
1000 emit_insn (gen_rtx_SET (VOIDmode, target,
1001 (source ? gen_rtx (code, mode, source,
1002 GEN_INT (val))
1003 : GEN_INT (val))));
e2c671ba
RE
1004 return 1;
1005 }
1006
e2c671ba 1007 /* Calculate a few attributes that may be useful for specific
6354dc9b 1008 optimizations. */
e2c671ba
RE
1009 for (i = 31; i >= 0; i--)
1010 {
1011 if ((remainder & (1 << i)) == 0)
1012 clear_sign_bit_copies++;
1013 else
1014 break;
1015 }
1016
1017 for (i = 31; i >= 0; i--)
1018 {
1019 if ((remainder & (1 << i)) != 0)
1020 set_sign_bit_copies++;
1021 else
1022 break;
1023 }
1024
1025 for (i = 0; i <= 31; i++)
1026 {
1027 if ((remainder & (1 << i)) == 0)
1028 clear_zero_bit_copies++;
1029 else
1030 break;
1031 }
1032
1033 for (i = 0; i <= 31; i++)
1034 {
1035 if ((remainder & (1 << i)) != 0)
1036 set_zero_bit_copies++;
1037 else
1038 break;
1039 }
1040
1041 switch (code)
1042 {
1043 case SET:
1044 /* See if we can do this by sign_extending a constant that is known
1045 to be negative. This is a good, way of doing it, since the shift
1046 may well merge into a subsequent insn. */
1047 if (set_sign_bit_copies > 1)
1048 {
1049 if (const_ok_for_arm
1050 (temp1 = ARM_SIGN_EXTEND (remainder
1051 << (set_sign_bit_copies - 1))))
1052 {
2b835d68
RE
1053 if (generate)
1054 {
d499463f 1055 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1056 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1057 GEN_INT (temp1)));
2b835d68
RE
1058 emit_insn (gen_ashrsi3 (target, new_src,
1059 GEN_INT (set_sign_bit_copies - 1)));
1060 }
e2c671ba
RE
1061 return 2;
1062 }
1063 /* For an inverted constant, we will need to set the low bits,
1064 these will be shifted out of harm's way. */
1065 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1066 if (const_ok_for_arm (~temp1))
1067 {
2b835d68
RE
1068 if (generate)
1069 {
d499463f 1070 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1071 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1072 GEN_INT (temp1)));
2b835d68
RE
1073 emit_insn (gen_ashrsi3 (target, new_src,
1074 GEN_INT (set_sign_bit_copies - 1)));
1075 }
e2c671ba
RE
1076 return 2;
1077 }
1078 }
1079
1080 /* See if we can generate this by setting the bottom (or the top)
1081 16 bits, and then shifting these into the other half of the
1082 word. We only look for the simplest cases, to do more would cost
1083 too much. Be careful, however, not to generate this when the
1084 alternative would take fewer insns. */
e5951263 1085 if (val & HOST_UINT (0xffff0000))
e2c671ba 1086 {
e5951263 1087 temp1 = remainder & HOST_UINT (0xffff0000);
e2c671ba
RE
1088 temp2 = remainder & 0x0000ffff;
1089
6354dc9b 1090 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1091 for (i = 9; i < 24; i++)
1092 {
d5b7b3ae 1093 if ((((temp2 | (temp2 << i))
e5951263 1094 & HOST_UINT (0xffffffff)) == remainder)
5895f793 1095 && !const_ok_for_arm (temp2))
e2c671ba 1096 {
d499463f
RE
1097 rtx new_src = (subtargets
1098 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1099 : target);
1100 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1101 source, subtargets, generate);
e2c671ba 1102 source = new_src;
2b835d68 1103 if (generate)
43cffd11
RE
1104 emit_insn (gen_rtx_SET
1105 (VOIDmode, target,
1106 gen_rtx_IOR (mode,
1107 gen_rtx_ASHIFT (mode, source,
1108 GEN_INT (i)),
1109 source)));
e2c671ba
RE
1110 return insns + 1;
1111 }
1112 }
1113
6354dc9b 1114 /* Don't duplicate cases already considered. */
e2c671ba
RE
1115 for (i = 17; i < 24; i++)
1116 {
1117 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1118 && !const_ok_for_arm (temp1))
e2c671ba 1119 {
d499463f
RE
1120 rtx new_src = (subtargets
1121 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1122 : target);
1123 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1124 source, subtargets, generate);
e2c671ba 1125 source = new_src;
2b835d68 1126 if (generate)
43cffd11
RE
1127 emit_insn
1128 (gen_rtx_SET (VOIDmode, target,
1129 gen_rtx_IOR
1130 (mode,
1131 gen_rtx_LSHIFTRT (mode, source,
1132 GEN_INT (i)),
1133 source)));
e2c671ba
RE
1134 return insns + 1;
1135 }
1136 }
1137 }
1138 break;
1139
1140 case IOR:
1141 case XOR:
7b64da89
RE
1142 /* If we have IOR or XOR, and the constant can be loaded in a
1143 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1144 then this can be done in two instructions instead of 3-4. */
1145 if (subtargets
d499463f 1146 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1147 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1148 {
5895f793 1149 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1150 {
2b835d68
RE
1151 if (generate)
1152 {
1153 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1154
43cffd11
RE
1155 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1156 emit_insn (gen_rtx_SET (VOIDmode, target,
1157 gen_rtx (code, mode, source, sub)));
2b835d68 1158 }
e2c671ba
RE
1159 return 2;
1160 }
1161 }
1162
1163 if (code == XOR)
1164 break;
1165
1166 if (set_sign_bit_copies > 8
1167 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1168 {
2b835d68
RE
1169 if (generate)
1170 {
1171 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1172 rtx shift = GEN_INT (set_sign_bit_copies);
1173
43cffd11
RE
1174 emit_insn (gen_rtx_SET (VOIDmode, sub,
1175 gen_rtx_NOT (mode,
1176 gen_rtx_ASHIFT (mode,
1177 source,
f5a1b0d2 1178 shift))));
43cffd11
RE
1179 emit_insn (gen_rtx_SET (VOIDmode, target,
1180 gen_rtx_NOT (mode,
1181 gen_rtx_LSHIFTRT (mode, sub,
1182 shift))));
2b835d68 1183 }
e2c671ba
RE
1184 return 2;
1185 }
1186
1187 if (set_zero_bit_copies > 8
1188 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1189 {
2b835d68
RE
1190 if (generate)
1191 {
1192 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1193 rtx shift = GEN_INT (set_zero_bit_copies);
1194
43cffd11
RE
1195 emit_insn (gen_rtx_SET (VOIDmode, sub,
1196 gen_rtx_NOT (mode,
1197 gen_rtx_LSHIFTRT (mode,
1198 source,
f5a1b0d2 1199 shift))));
43cffd11
RE
1200 emit_insn (gen_rtx_SET (VOIDmode, target,
1201 gen_rtx_NOT (mode,
1202 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1203 shift))));
2b835d68 1204 }
e2c671ba
RE
1205 return 2;
1206 }
1207
5895f793 1208 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1209 {
2b835d68
RE
1210 if (generate)
1211 {
1212 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1213 emit_insn (gen_rtx_SET (VOIDmode, sub,
1214 gen_rtx_NOT (mode, source)));
2b835d68
RE
1215 source = sub;
1216 if (subtargets)
1217 sub = gen_reg_rtx (mode);
43cffd11
RE
1218 emit_insn (gen_rtx_SET (VOIDmode, sub,
1219 gen_rtx_AND (mode, source,
1220 GEN_INT (temp1))));
1221 emit_insn (gen_rtx_SET (VOIDmode, target,
1222 gen_rtx_NOT (mode, sub)));
2b835d68 1223 }
e2c671ba
RE
1224 return 3;
1225 }
1226 break;
1227
1228 case AND:
1229 /* See if two shifts will do 2 or more insn's worth of work. */
1230 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1231 {
e5951263 1232 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
e2c671ba 1233 << (32 - clear_sign_bit_copies))
e5951263 1234 & HOST_UINT (0xffffffff));
e2c671ba 1235
e5951263 1236 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1237 {
2b835d68
RE
1238 if (generate)
1239 {
d499463f 1240 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1241 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1242 new_src, source, subtargets, 1);
1243 source = new_src;
2b835d68
RE
1244 }
1245 else
d499463f
RE
1246 {
1247 rtx targ = subtargets ? NULL_RTX : target;
1248 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1249 targ, source, subtargets, 0);
1250 }
2b835d68
RE
1251 }
1252
1253 if (generate)
1254 {
d499463f
RE
1255 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1256 rtx shift = GEN_INT (clear_sign_bit_copies);
1257
1258 emit_insn (gen_ashlsi3 (new_src, source, shift));
1259 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1260 }
1261
e2c671ba
RE
1262 return insns + 2;
1263 }
1264
1265 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1266 {
1267 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1268
e5951263 1269 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1270 {
2b835d68
RE
1271 if (generate)
1272 {
d499463f
RE
1273 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1274
2b835d68 1275 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1276 new_src, source, subtargets, 1);
1277 source = new_src;
2b835d68
RE
1278 }
1279 else
d499463f
RE
1280 {
1281 rtx targ = subtargets ? NULL_RTX : target;
1282
1283 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1284 targ, source, subtargets, 0);
1285 }
2b835d68
RE
1286 }
1287
1288 if (generate)
1289 {
d499463f
RE
1290 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1291 rtx shift = GEN_INT (clear_zero_bit_copies);
1292
1293 emit_insn (gen_lshrsi3 (new_src, source, shift));
1294 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1295 }
1296
e2c671ba
RE
1297 return insns + 2;
1298 }
1299
1300 break;
1301
1302 default:
1303 break;
1304 }
1305
1306 for (i = 0; i < 32; i++)
1307 if (remainder & (1 << i))
1308 num_bits_set++;
1309
1310 if (code == AND || (can_invert && num_bits_set > 16))
e5951263 1311 remainder = (~remainder) & HOST_UINT (0xffffffff);
e2c671ba 1312 else if (code == PLUS && num_bits_set > 16)
e5951263 1313 remainder = (-remainder) & HOST_UINT (0xffffffff);
e2c671ba
RE
1314 else
1315 {
1316 can_invert = 0;
1317 can_negate = 0;
1318 }
1319
1320 /* Now try and find a way of doing the job in either two or three
1321 instructions.
1322 We start by looking for the largest block of zeros that are aligned on
1323 a 2-bit boundary, we then fill up the temps, wrapping around to the
1324 top of the word when we drop off the bottom.
6354dc9b 1325 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1326 {
1327 int best_start = 0;
1328 int best_consecutive_zeros = 0;
1329
1330 for (i = 0; i < 32; i += 2)
1331 {
1332 int consecutive_zeros = 0;
1333
5895f793 1334 if (!(remainder & (3 << i)))
e2c671ba 1335 {
5895f793 1336 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1337 {
1338 consecutive_zeros += 2;
1339 i += 2;
1340 }
1341 if (consecutive_zeros > best_consecutive_zeros)
1342 {
1343 best_consecutive_zeros = consecutive_zeros;
1344 best_start = i - consecutive_zeros;
1345 }
1346 i -= 2;
1347 }
1348 }
1349
1350 /* Now start emitting the insns, starting with the one with the highest
1351 bit set: we do this so that the smallest number will be emitted last;
6354dc9b 1352 this is more likely to be combinable with addressing insns. */
e2c671ba
RE
1353 i = best_start;
1354 do
1355 {
1356 int end;
1357
1358 if (i <= 0)
1359 i += 32;
1360 if (remainder & (3 << (i - 2)))
1361 {
1362 end = i - 8;
1363 if (end < 0)
1364 end += 32;
1365 temp1 = remainder & ((0x0ff << end)
1366 | ((i < end) ? (0xff >> (32 - end)) : 0));
1367 remainder &= ~temp1;
1368
d499463f 1369 if (generate)
e2c671ba 1370 {
d499463f
RE
1371 rtx new_src;
1372
1373 if (code == SET)
43cffd11
RE
1374 emit_insn (gen_rtx_SET (VOIDmode,
1375 new_src = (subtargets
1376 ? gen_reg_rtx (mode)
1377 : target),
1378 GEN_INT (can_invert
1379 ? ~temp1 : temp1)));
d499463f 1380 else if (code == MINUS)
43cffd11
RE
1381 emit_insn (gen_rtx_SET (VOIDmode,
1382 new_src = (subtargets
1383 ? gen_reg_rtx (mode)
1384 : target),
1385 gen_rtx (code, mode, GEN_INT (temp1),
1386 source)));
d499463f 1387 else
43cffd11
RE
1388 emit_insn (gen_rtx_SET (VOIDmode,
1389 new_src = (remainder
1390 ? (subtargets
1391 ? gen_reg_rtx (mode)
1392 : target)
1393 : target),
1394 gen_rtx (code, mode, source,
1395 GEN_INT (can_invert ? ~temp1
1396 : (can_negate
1397 ? -temp1
1398 : temp1)))));
d499463f 1399 source = new_src;
e2c671ba
RE
1400 }
1401
d499463f
RE
1402 if (code == SET)
1403 {
1404 can_invert = 0;
1405 code = PLUS;
1406 }
1407 else if (code == MINUS)
1408 code = PLUS;
1409
e2c671ba 1410 insns++;
e2c671ba
RE
1411 i -= 6;
1412 }
1413 i -= 2;
1414 } while (remainder);
1415 }
1416 return insns;
1417}
1418
bd9c7e23
RE
1419/* Canonicalize a comparison so that we are more likely to recognize it.
1420 This can be done for a few constant compares, where we can make the
1421 immediate value easier to load. */
1422enum rtx_code
1423arm_canonicalize_comparison (code, op1)
1424 enum rtx_code code;
62b10bbc 1425 rtx * op1;
bd9c7e23 1426{
ad076f4e 1427 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1428
1429 switch (code)
1430 {
1431 case EQ:
1432 case NE:
1433 return code;
1434
1435 case GT:
1436 case LE:
5895f793
RE
1437 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1438 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1439 {
5895f793 1440 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1441 return code == GT ? GE : LT;
1442 }
1443 break;
1444
1445 case GE:
1446 case LT:
e5951263 1447 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1448 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1449 {
5895f793 1450 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1451 return code == GE ? GT : LE;
1452 }
1453 break;
1454
1455 case GTU:
1456 case LEU:
5895f793
RE
1457 if (i != ~(HOST_UINT (0))
1458 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1459 {
1460 *op1 = GEN_INT (i + 1);
1461 return code == GTU ? GEU : LTU;
1462 }
1463 break;
1464
1465 case GEU:
1466 case LTU:
1467 if (i != 0
5895f793 1468 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1469 {
1470 *op1 = GEN_INT (i - 1);
1471 return code == GEU ? GTU : LEU;
1472 }
1473 break;
1474
1475 default:
1476 abort ();
1477 }
1478
1479 return code;
1480}
bd9c7e23 1481
f5a1b0d2
NC
1482/* Decide whether a type should be returned in memory (true)
1483 or in a register (false). This is called by the macro
1484 RETURN_IN_MEMORY. */
2b835d68
RE
1485int
1486arm_return_in_memory (type)
1487 tree type;
1488{
5895f793 1489 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1490 /* All simple types are returned in registers. */
d7d01975 1491 return 0;
d5b7b3ae
RE
1492
1493 /* For the arm-wince targets we choose to be compitable with Microsoft's
1494 ARM and Thumb compilers, which always return aggregates in memory. */
1495#ifndef ARM_WINCE
1496
d7d01975 1497 if (int_size_in_bytes (type) > 4)
9e291dbe 1498 /* All structures/unions bigger than one word are returned in memory. */
d7d01975 1499 return 1;
d5b7b3ae 1500
d7d01975 1501 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1502 {
1503 tree field;
1504
3a2ea258
RE
1505 /* For a struct the APCS says that we only return in a register
1506 if the type is 'integer like' and every addressable element
1507 has an offset of zero. For practical purposes this means
1508 that the structure can have at most one non bit-field element
1509 and that this element must be the first one in the structure. */
1510
f5a1b0d2
NC
1511 /* Find the first field, ignoring non FIELD_DECL things which will
1512 have been created by C++. */
1513 for (field = TYPE_FIELDS (type);
1514 field && TREE_CODE (field) != FIELD_DECL;
1515 field = TREE_CHAIN (field))
1516 continue;
1517
1518 if (field == NULL)
9e291dbe 1519 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1520
d5b7b3ae
RE
1521 /* Check that the first field is valid for returning in a register. */
1522
1523 /* ... Floats are not allowed */
9e291dbe 1524 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1525 return 1;
1526
d5b7b3ae
RE
1527 /* ... Aggregates that are not themselves valid for returning in
1528 a register are not allowed. */
9e291dbe 1529 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1530 return 1;
d5b7b3ae 1531
3a2ea258
RE
1532 /* Now check the remaining fields, if any. Only bitfields are allowed,
1533 since they are not addressable. */
f5a1b0d2
NC
1534 for (field = TREE_CHAIN (field);
1535 field;
1536 field = TREE_CHAIN (field))
1537 {
1538 if (TREE_CODE (field) != FIELD_DECL)
1539 continue;
1540
5895f793 1541 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1542 return 1;
1543 }
2b835d68
RE
1544
1545 return 0;
1546 }
d7d01975
NC
1547
1548 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1549 {
1550 tree field;
1551
1552 /* Unions can be returned in registers if every element is
1553 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1554 for (field = TYPE_FIELDS (type);
1555 field;
1556 field = TREE_CHAIN (field))
2b835d68 1557 {
f5a1b0d2
NC
1558 if (TREE_CODE (field) != FIELD_DECL)
1559 continue;
1560
6cc8c0b3
NC
1561 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1562 return 1;
1563
f5a1b0d2 1564 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1565 return 1;
1566 }
f5a1b0d2 1567
2b835d68
RE
1568 return 0;
1569 }
d5b7b3ae 1570#endif /* not ARM_WINCE */
f5a1b0d2 1571
d5b7b3ae 1572 /* Return all other types in memory. */
2b835d68
RE
1573 return 1;
1574}
1575
82e9d970
PB
1576/* Initialize a variable CUM of type CUMULATIVE_ARGS
1577 for a call to a function whose data type is FNTYPE.
1578 For a library call, FNTYPE is NULL. */
1579void
1580arm_init_cumulative_args (pcum, fntype, libname, indirect)
1581 CUMULATIVE_ARGS * pcum;
1582 tree fntype;
1583 rtx libname ATTRIBUTE_UNUSED;
1584 int indirect ATTRIBUTE_UNUSED;
1585{
1586 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1587 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1588
82e9d970
PB
1589 pcum->call_cookie = CALL_NORMAL;
1590
1591 if (TARGET_LONG_CALLS)
1592 pcum->call_cookie = CALL_LONG;
1593
1594 /* Check for long call/short call attributes. The attributes
1595 override any command line option. */
1596 if (fntype)
1597 {
1598 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1599 pcum->call_cookie = CALL_SHORT;
1600 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1601 pcum->call_cookie = CALL_LONG;
1602 }
1603}
1604
1605/* Determine where to put an argument to a function.
1606 Value is zero to push the argument on the stack,
1607 or a hard register in which to store the argument.
1608
1609 MODE is the argument's machine mode.
1610 TYPE is the data type of the argument (as a tree).
1611 This is null for libcalls where that information may
1612 not be available.
1613 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1614 the preceding args and about the function being called.
1615 NAMED is nonzero if this argument is a named parameter
1616 (otherwise it is an extra parameter matching an ellipsis). */
1617rtx
1618arm_function_arg (pcum, mode, type, named)
1619 CUMULATIVE_ARGS * pcum;
1620 enum machine_mode mode;
1621 tree type ATTRIBUTE_UNUSED;
1622 int named;
1623{
1624 if (mode == VOIDmode)
1625 /* Compute operand 2 of the call insn. */
1626 return GEN_INT (pcum->call_cookie);
1627
5895f793 1628 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1629 return NULL_RTX;
1630
1631 return gen_rtx_REG (mode, pcum->nregs);
1632}
82e9d970 1633\f
c27ba912
DM
1634/* Encode the current state of the #pragma [no_]long_calls. */
1635typedef enum
82e9d970 1636{
c27ba912
DM
1637 OFF, /* No #pramgma [no_]long_calls is in effect. */
1638 LONG, /* #pragma long_calls is in effect. */
1639 SHORT /* #pragma no_long_calls is in effect. */
1640} arm_pragma_enum;
82e9d970 1641
c27ba912 1642static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1643
8b97c5f8
ZW
1644void
1645arm_pr_long_calls (pfile)
1646 cpp_reader *pfile ATTRIBUTE_UNUSED;
82e9d970 1647{
8b97c5f8
ZW
1648 arm_pragma_long_calls = LONG;
1649}
1650
1651void
1652arm_pr_no_long_calls (pfile)
1653 cpp_reader *pfile ATTRIBUTE_UNUSED;
1654{
1655 arm_pragma_long_calls = SHORT;
1656}
1657
1658void
1659arm_pr_long_calls_off (pfile)
1660 cpp_reader *pfile ATTRIBUTE_UNUSED;
1661{
1662 arm_pragma_long_calls = OFF;
82e9d970 1663}
8b97c5f8 1664
82e9d970
PB
1665\f
1666/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1667 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1668 assigned to TYPE. */
1669int
1670arm_valid_type_attribute_p (type, attributes, identifier, args)
1671 tree type;
1672 tree attributes ATTRIBUTE_UNUSED;
1673 tree identifier;
1674 tree args;
1675{
1676 if ( TREE_CODE (type) != FUNCTION_TYPE
1677 && TREE_CODE (type) != METHOD_TYPE
1678 && TREE_CODE (type) != FIELD_DECL
1679 && TREE_CODE (type) != TYPE_DECL)
1680 return 0;
1681
1682 /* Function calls made to this symbol must be done indirectly, because
1683 it may lie outside of the 26 bit addressing range of a normal function
1684 call. */
1685 if (is_attribute_p ("long_call", identifier))
1686 return (args == NULL_TREE);
c27ba912 1687
82e9d970
PB
1688 /* Whereas these functions are always known to reside within the 26 bit
1689 addressing range. */
1690 if (is_attribute_p ("short_call", identifier))
1691 return (args == NULL_TREE);
1692
1693 return 0;
1694}
1695
1696/* Return 0 if the attributes for two types are incompatible, 1 if they
1697 are compatible, and 2 if they are nearly compatible (which causes a
1698 warning to be generated). */
1699int
1700arm_comp_type_attributes (type1, type2)
1701 tree type1;
1702 tree type2;
1703{
1cb8d58a 1704 int l1, l2, s1, s2;
bd7fc26f 1705
82e9d970
PB
1706 /* Check for mismatch of non-default calling convention. */
1707 if (TREE_CODE (type1) != FUNCTION_TYPE)
1708 return 1;
1709
1710 /* Check for mismatched call attributes. */
1cb8d58a
NC
1711 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1712 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1713 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1714 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1715
1716 /* Only bother to check if an attribute is defined. */
1717 if (l1 | l2 | s1 | s2)
1718 {
1719 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1720 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1721 return 0;
82e9d970 1722
bd7fc26f
NC
1723 /* Disallow mixed attributes. */
1724 if ((l1 & s2) || (l2 & s1))
1725 return 0;
1726 }
1727
1728 return 1;
82e9d970
PB
1729}
1730
c27ba912
DM
1731/* Encode long_call or short_call attribute by prefixing
1732 symbol name in DECL with a special character FLAG. */
1733void
1734arm_encode_call_attribute (decl, flag)
1735 tree decl;
cd2b33d0 1736 int flag;
c27ba912 1737{
3cce094d 1738 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 1739 int len = strlen (str);
d19fb8e3 1740 char * newstr;
c27ba912
DM
1741
1742 if (TREE_CODE (decl) != FUNCTION_DECL)
1743 return;
1744
1745 /* Do not allow weak functions to be treated as short call. */
1746 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1747 return;
c27ba912 1748
520a57c8
ZW
1749 newstr = alloca (len + 2);
1750 newstr[0] = flag;
1751 strcpy (newstr + 1, str);
c27ba912 1752
520a57c8 1753 newstr = ggc_alloc_string (newstr, len + 1);
c27ba912
DM
1754 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1755}
1756
1757/* Assigns default attributes to newly defined type. This is used to
1758 set short_call/long_call attributes for function types of
1759 functions defined inside corresponding #pragma scopes. */
1760void
1761arm_set_default_type_attributes (type)
1762 tree type;
1763{
1764 /* Add __attribute__ ((long_call)) to all functions, when
1765 inside #pragma long_calls or __attribute__ ((short_call)),
1766 when inside #pragma no_long_calls. */
1767 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1768 {
1769 tree type_attr_list, attr_name;
1770 type_attr_list = TYPE_ATTRIBUTES (type);
1771
1772 if (arm_pragma_long_calls == LONG)
1773 attr_name = get_identifier ("long_call");
1774 else if (arm_pragma_long_calls == SHORT)
1775 attr_name = get_identifier ("short_call");
1776 else
1777 return;
1778
1779 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1780 TYPE_ATTRIBUTES (type) = type_attr_list;
1781 }
1782}
1783\f
1784/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1785 defined within the current compilation unit. If this caanot be
1786 determined, then 0 is returned. */
1787static int
1788current_file_function_operand (sym_ref)
1789 rtx sym_ref;
1790{
1791 /* This is a bit of a fib. A function will have a short call flag
1792 applied to its name if it has the short call attribute, or it has
1793 already been defined within the current compilation unit. */
1794 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1795 return 1;
1796
6d77b53e 1797 /* The current function is always defined within the current compilation
c27ba912
DM
1798 unit. if it s a weak defintion however, then this may not be the real
1799 defintion of the function, and so we have to say no. */
1800 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 1801 && !DECL_WEAK (current_function_decl))
c27ba912
DM
1802 return 1;
1803
1804 /* We cannot make the determination - default to returning 0. */
1805 return 0;
1806}
1807
1808/* Return non-zero if a 32 bit "long_call" should be generated for
1809 this call. We generate a long_call if the function:
1810
1811 a. has an __attribute__((long call))
1812 or b. is within the scope of a #pragma long_calls
1813 or c. the -mlong-calls command line switch has been specified
1814
1815 However we do not generate a long call if the function:
1816
1817 d. has an __attribute__ ((short_call))
1818 or e. is inside the scope of a #pragma no_long_calls
1819 or f. has an __attribute__ ((section))
1820 or g. is defined within the current compilation unit.
1821
1822 This function will be called by C fragments contained in the machine
1823 description file. CALL_REF and CALL_COOKIE correspond to the matched
1824 rtl operands. CALL_SYMBOL is used to distinguish between
1825 two different callers of the function. It is set to 1 in the
1826 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1827 and "call_value" patterns. This is because of the difference in the
1828 SYM_REFs passed by these patterns. */
1829int
1830arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1831 rtx sym_ref;
1832 int call_cookie;
1833 int call_symbol;
1834{
5895f793 1835 if (!call_symbol)
c27ba912
DM
1836 {
1837 if (GET_CODE (sym_ref) != MEM)
1838 return 0;
1839
1840 sym_ref = XEXP (sym_ref, 0);
1841 }
1842
1843 if (GET_CODE (sym_ref) != SYMBOL_REF)
1844 return 0;
1845
1846 if (call_cookie & CALL_SHORT)
1847 return 0;
1848
1849 if (TARGET_LONG_CALLS && flag_function_sections)
1850 return 1;
1851
87e27392 1852 if (current_file_function_operand (sym_ref))
c27ba912
DM
1853 return 0;
1854
1855 return (call_cookie & CALL_LONG)
1856 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1857 || TARGET_LONG_CALLS;
1858}
f99fce0c
RE
1859
1860/* Return non-zero if it is ok to make a tail-call to DECL. */
1861int
1862arm_function_ok_for_sibcall (decl)
1863 tree decl;
1864{
1865 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
1866
1867 /* Never tailcall something for which we have no decl, or if we
1868 are in Thumb mode. */
1869 if (decl == NULL || TARGET_THUMB)
1870 return 0;
1871
1872 /* Get the calling method. */
1873 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1874 call_type = CALL_SHORT;
1875 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1876 call_type = CALL_LONG;
1877
1878 /* Cannot tail-call to long calls, since these are out of range of
1879 a branch instruction. However, if not compiling PIC, we know
1880 we can reach the symbol if it is in this compilation unit. */
5895f793 1881 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
1882 return 0;
1883
1884 /* If we are interworking and the function is not declared static
1885 then we can't tail-call it unless we know that it exists in this
1886 compilation unit (since it might be a Thumb routine). */
5895f793 1887 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
1888 return 0;
1889
1890 /* Everything else is ok. */
1891 return 1;
1892}
1893
82e9d970 1894\f
32de079a
RE
1895int
1896legitimate_pic_operand_p (x)
1897 rtx x;
1898{
d5b7b3ae
RE
1899 if (CONSTANT_P (x)
1900 && flag_pic
32de079a
RE
1901 && (GET_CODE (x) == SYMBOL_REF
1902 || (GET_CODE (x) == CONST
1903 && GET_CODE (XEXP (x, 0)) == PLUS
1904 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1905 return 0;
1906
1907 return 1;
1908}
1909
1910rtx
1911legitimize_pic_address (orig, mode, reg)
1912 rtx orig;
1913 enum machine_mode mode;
1914 rtx reg;
1915{
1916 if (GET_CODE (orig) == SYMBOL_REF)
1917 {
1918 rtx pic_ref, address;
1919 rtx insn;
1920 int subregs = 0;
1921
1922 if (reg == 0)
1923 {
893f3d5b 1924 if (no_new_pseudos)
32de079a
RE
1925 abort ();
1926 else
1927 reg = gen_reg_rtx (Pmode);
1928
1929 subregs = 1;
1930 }
1931
1932#ifdef AOF_ASSEMBLER
1933 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 1934 understands that the PIC register has to be added into the offset. */
32de079a
RE
1935 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1936#else
1937 if (subregs)
1938 address = gen_reg_rtx (Pmode);
1939 else
1940 address = reg;
1941
4bec9f7d
NC
1942 if (TARGET_ARM)
1943 emit_insn (gen_pic_load_addr_arm (address, orig));
1944 else
1945 emit_insn (gen_pic_load_addr_thumb (address, orig));
32de079a 1946
43cffd11
RE
1947 pic_ref = gen_rtx_MEM (Pmode,
1948 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1949 address));
32de079a
RE
1950 RTX_UNCHANGING_P (pic_ref) = 1;
1951 insn = emit_move_insn (reg, pic_ref);
1952#endif
1953 current_function_uses_pic_offset_table = 1;
1954 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1955 by loop. */
43cffd11
RE
1956 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1957 REG_NOTES (insn));
32de079a
RE
1958 return reg;
1959 }
1960 else if (GET_CODE (orig) == CONST)
1961 {
1962 rtx base, offset;
1963
1964 if (GET_CODE (XEXP (orig, 0)) == PLUS
1965 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1966 return orig;
1967
1968 if (reg == 0)
1969 {
893f3d5b 1970 if (no_new_pseudos)
32de079a
RE
1971 abort ();
1972 else
1973 reg = gen_reg_rtx (Pmode);
1974 }
1975
1976 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1977 {
1978 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1979 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1980 base == reg ? 0 : reg);
1981 }
1982 else
1983 abort ();
1984
1985 if (GET_CODE (offset) == CONST_INT)
1986 {
1987 /* The base register doesn't really matter, we only want to
1988 test the index for the appropriate mode. */
f1008e52 1989 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
32de079a 1990
5895f793 1991 if (!no_new_pseudos)
32de079a
RE
1992 offset = force_reg (Pmode, offset);
1993 else
1994 abort ();
1995
1996 win:
1997 if (GET_CODE (offset) == CONST_INT)
1998 return plus_constant_for_output (base, INTVAL (offset));
1999 }
2000
2001 if (GET_MODE_SIZE (mode) > 4
2002 && (GET_MODE_CLASS (mode) == MODE_INT
2003 || TARGET_SOFT_FLOAT))
2004 {
2005 emit_insn (gen_addsi3 (reg, base, offset));
2006 return reg;
2007 }
2008
43cffd11 2009 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
2010 }
2011 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
2012 {
2013 current_function_uses_pic_offset_table = 1;
2014
2015 if (NEED_GOT_RELOC)
d5b7b3ae
RE
2016 {
2017 rtx pic_ref, address = gen_reg_rtx (Pmode);
4bec9f7d
NC
2018
2019 if (TARGET_ARM)
2020 emit_insn (gen_pic_load_addr_arm (address, orig));
2021 else
2022 emit_insn (gen_pic_load_addr_thumb (address, orig));
d19fb8e3 2023
d5b7b3ae
RE
2024 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2025
2026 emit_move_insn (address, pic_ref);
2027 return address;
2028 }
82e9d970 2029 }
32de079a
RE
2030
2031 return orig;
2032}
2033
2034static rtx pic_rtx;
2035
2036int
62b10bbc 2037is_pic (x)
32de079a
RE
2038 rtx x;
2039{
2040 if (x == pic_rtx)
2041 return 1;
2042 return 0;
2043}
2044
2045void
2046arm_finalize_pic ()
2047{
2048#ifndef AOF_ASSEMBLER
2049 rtx l1, pic_tmp, pic_tmp2, seq;
2050 rtx global_offset_table;
2051
ed0e6530 2052 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2053 return;
2054
5895f793 2055 if (!flag_pic)
32de079a
RE
2056 abort ();
2057
2058 start_sequence ();
2059 l1 = gen_label_rtx ();
2060
43cffd11 2061 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2062 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2063 addition, on the Thumb it is 'dot + 4'. */
2064 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2065 if (GOT_PCREL)
2066 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2067 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2068 else
2069 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2070
2071 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2072
d5b7b3ae 2073 if (TARGET_ARM)
4bec9f7d
NC
2074 {
2075 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2076 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2077 }
d5b7b3ae 2078 else
4bec9f7d
NC
2079 {
2080 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2081 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2082 }
32de079a
RE
2083
2084 seq = gen_sequence ();
2085 end_sequence ();
2086 emit_insn_after (seq, get_insns ());
2087
2088 /* Need to emit this whether or not we obey regdecls,
2089 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2090 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2091#endif /* AOF_ASSEMBLER */
2092}
2093
e2c671ba
RE
2094#define REG_OR_SUBREG_REG(X) \
2095 (GET_CODE (X) == REG \
2096 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2097
2098#define REG_OR_SUBREG_RTX(X) \
2099 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2100
d5b7b3ae
RE
2101#ifndef COSTS_N_INSNS
2102#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2103#endif
e2c671ba
RE
2104
2105int
d5b7b3ae 2106arm_rtx_costs (x, code, outer)
e2c671ba 2107 rtx x;
74bbc178 2108 enum rtx_code code;
d5b7b3ae 2109 enum rtx_code outer;
e2c671ba
RE
2110{
2111 enum machine_mode mode = GET_MODE (x);
2112 enum rtx_code subcode;
2113 int extra_cost;
2114
d5b7b3ae
RE
2115 if (TARGET_THUMB)
2116 {
2117 switch (code)
2118 {
2119 case ASHIFT:
2120 case ASHIFTRT:
2121 case LSHIFTRT:
2122 case ROTATERT:
2123 case PLUS:
2124 case MINUS:
2125 case COMPARE:
2126 case NEG:
2127 case NOT:
2128 return COSTS_N_INSNS (1);
2129
2130 case MULT:
2131 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2132 {
2133 int cycles = 0;
2134 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2135
2136 while (i)
2137 {
2138 i >>= 2;
5895f793 2139 cycles++;
d5b7b3ae
RE
2140 }
2141 return COSTS_N_INSNS (2) + cycles;
2142 }
2143 return COSTS_N_INSNS (1) + 16;
2144
2145 case SET:
2146 return (COSTS_N_INSNS (1)
2147 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2148 + GET_CODE (SET_DEST (x)) == MEM));
2149
2150 case CONST_INT:
2151 if (outer == SET)
2152 {
2153 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2154 return 0;
2155 if (thumb_shiftable_const (INTVAL (x)))
2156 return COSTS_N_INSNS (2);
2157 return COSTS_N_INSNS (3);
2158 }
2159 else if (outer == PLUS
2160 && INTVAL (x) < 256 && INTVAL (x) > -256)
2161 return 0;
2162 else if (outer == COMPARE
2163 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2164 return 0;
2165 else if (outer == ASHIFT || outer == ASHIFTRT
2166 || outer == LSHIFTRT)
2167 return 0;
2168 return COSTS_N_INSNS (2);
2169
2170 case CONST:
2171 case CONST_DOUBLE:
2172 case LABEL_REF:
2173 case SYMBOL_REF:
2174 return COSTS_N_INSNS (3);
2175
2176 case UDIV:
2177 case UMOD:
2178 case DIV:
2179 case MOD:
2180 return 100;
2181
2182 case TRUNCATE:
2183 return 99;
2184
2185 case AND:
2186 case XOR:
2187 case IOR:
2188 /* XXX guess. */
2189 return 8;
2190
2191 case ADDRESSOF:
2192 case MEM:
2193 /* XXX another guess. */
2194 /* Memory costs quite a lot for the first word, but subsequent words
2195 load at the equivalent of a single insn each. */
2196 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2197 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2198
2199 case IF_THEN_ELSE:
2200 /* XXX a guess. */
2201 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2202 return 14;
2203 return 2;
2204
2205 case ZERO_EXTEND:
2206 /* XXX still guessing. */
2207 switch (GET_MODE (XEXP (x, 0)))
2208 {
2209 case QImode:
2210 return (1 + (mode == DImode ? 4 : 0)
2211 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2212
2213 case HImode:
2214 return (4 + (mode == DImode ? 4 : 0)
2215 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2216
2217 case SImode:
2218 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2219
2220 default:
2221 return 99;
2222 }
2223
2224 default:
2225 return 99;
2226#if 0
2227 case FFS:
2228 case FLOAT:
2229 case FIX:
2230 case UNSIGNED_FIX:
2231 /* XXX guess */
2232 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2233 rtx_name[code]);
2234 abort ();
2235#endif
2236 }
2237 }
2238
e2c671ba
RE
2239 switch (code)
2240 {
2241 case MEM:
2242 /* Memory costs quite a lot for the first word, but subsequent words
2243 load at the equivalent of a single insn each. */
2244 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2245 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2246
2247 case DIV:
2248 case MOD:
2249 return 100;
2250
2251 case ROTATE:
2252 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2253 return 4;
2254 /* Fall through */
2255 case ROTATERT:
2256 if (mode != SImode)
2257 return 8;
2258 /* Fall through */
2259 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2260 if (mode == DImode)
2261 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2262 + ((GET_CODE (XEXP (x, 0)) == REG
2263 || (GET_CODE (XEXP (x, 0)) == SUBREG
2264 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2265 ? 0 : 8));
2266 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2267 || (GET_CODE (XEXP (x, 0)) == SUBREG
2268 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2269 ? 0 : 4)
2270 + ((GET_CODE (XEXP (x, 1)) == REG
2271 || (GET_CODE (XEXP (x, 1)) == SUBREG
2272 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2273 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2274 ? 0 : 4));
2275
2276 case MINUS:
2277 if (mode == DImode)
2278 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2279 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2280 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2281 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2282 ? 0 : 8));
2283
2284 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2285 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2286 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2287 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2288 ? 0 : 8)
2289 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2290 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2291 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2292 ? 0 : 8));
2293
2294 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2295 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2296 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2297 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2298 || subcode == ASHIFTRT || subcode == LSHIFTRT
2299 || subcode == ROTATE || subcode == ROTATERT
2300 || (subcode == MULT
2301 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2302 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2303 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2304 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2305 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2306 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2307 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2308 return 1;
2309 /* Fall through */
2310
2311 case PLUS:
2312 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2313 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2314 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2315 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2316 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2317 ? 0 : 8));
2318
2319 /* Fall through */
2320 case AND: case XOR: case IOR:
2321 extra_cost = 0;
2322
2323 /* Normally the frame registers will be spilt into reg+const during
2324 reload, so it is a bad idea to combine them with other instructions,
2325 since then they might not be moved outside of loops. As a compromise
2326 we allow integration with ops that have a constant as their second
2327 operand. */
2328 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2329 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2330 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2331 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2332 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2333 extra_cost = 4;
2334
2335 if (mode == DImode)
2336 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2337 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2338 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2339 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2340 ? 0 : 8));
2341
2342 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2343 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2344 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2345 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2346 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2347 ? 0 : 4));
2348
2349 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2350 return (1 + extra_cost
2351 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2352 || subcode == LSHIFTRT || subcode == ASHIFTRT
2353 || subcode == ROTATE || subcode == ROTATERT
2354 || (subcode == MULT
2355 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2356 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2357 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2358 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2359 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2360 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2361 ? 0 : 4));
2362
2363 return 8;
2364
2365 case MULT:
b111229a 2366 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2367 fast variant if it exists at all. */
2b835d68
RE
2368 if (arm_fast_multiply && mode == DImode
2369 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2370 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2371 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2372 return 8;
2373
e2c671ba
RE
2374 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2375 || mode == DImode)
2376 return 30;
2377
2378 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2379 {
2b835d68 2380 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
e5951263 2381 & HOST_UINT (0xffffffff));
e2c671ba
RE
2382 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2383 int j;
6354dc9b
NC
2384
2385 /* Tune as appropriate. */
aec3cfba 2386 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2387
2b835d68 2388 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2389 {
2b835d68 2390 i >>= booth_unit_size;
e2c671ba
RE
2391 add_cost += 2;
2392 }
2393
2394 return add_cost;
2395 }
2396
aec3cfba 2397 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2398 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2399 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2400
56636818
JL
2401 case TRUNCATE:
2402 if (arm_fast_multiply && mode == SImode
2403 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2404 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2405 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2406 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2407 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2408 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2409 return 8;
2410 return 99;
2411
e2c671ba
RE
2412 case NEG:
2413 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2414 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2415 /* Fall through */
2416 case NOT:
2417 if (mode == DImode)
2418 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2419
2420 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2421
2422 case IF_THEN_ELSE:
2423 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2424 return 14;
2425 return 2;
2426
2427 case COMPARE:
2428 return 1;
2429
2430 case ABS:
2431 return 4 + (mode == DImode ? 4 : 0);
2432
2433 case SIGN_EXTEND:
2434 if (GET_MODE (XEXP (x, 0)) == QImode)
2435 return (4 + (mode == DImode ? 4 : 0)
2436 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2437 /* Fall through */
2438 case ZERO_EXTEND:
2439 switch (GET_MODE (XEXP (x, 0)))
2440 {
2441 case QImode:
2442 return (1 + (mode == DImode ? 4 : 0)
2443 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2444
2445 case HImode:
2446 return (4 + (mode == DImode ? 4 : 0)
2447 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2448
2449 case SImode:
2450 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2451
2452 default:
2453 break;
e2c671ba
RE
2454 }
2455 abort ();
2456
d5b7b3ae
RE
2457 case CONST_INT:
2458 if (const_ok_for_arm (INTVAL (x)))
2459 return outer == SET ? 2 : -1;
2460 else if (outer == AND
5895f793 2461 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2462 return -1;
2463 else if ((outer == COMPARE
2464 || outer == PLUS || outer == MINUS)
5895f793 2465 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2466 return -1;
2467 else
2468 return 5;
2469
2470 case CONST:
2471 case LABEL_REF:
2472 case SYMBOL_REF:
2473 return 6;
2474
2475 case CONST_DOUBLE:
2476 if (const_double_rtx_ok_for_fpu (x))
2477 return outer == SET ? 2 : -1;
2478 else if ((outer == COMPARE || outer == PLUS)
2479 && neg_const_double_rtx_ok_for_fpu (x))
2480 return -1;
2481 return 7;
2482
e2c671ba
RE
2483 default:
2484 return 99;
2485 }
2486}
32de079a
RE
2487
2488int
2489arm_adjust_cost (insn, link, dep, cost)
2490 rtx insn;
2491 rtx link;
2492 rtx dep;
2493 int cost;
2494{
2495 rtx i_pat, d_pat;
2496
d19fb8e3
NC
2497 /* Some true dependencies can have a higher cost depending
2498 on precisely how certain input operands are used. */
2499 if (arm_is_xscale
2500 && REG_NOTE_KIND (link) == 0
2501 && recog_memoized (insn) < 0
2502 && recog_memoized (dep) < 0)
2503 {
2504 int shift_opnum = get_attr_shift (insn);
2505 enum attr_type attr_type = get_attr_type (dep);
2506
2507 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2508 operand for INSN. If we have a shifted input operand and the
2509 instruction we depend on is another ALU instruction, then we may
2510 have to account for an additional stall. */
2511 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2512 {
2513 rtx shifted_operand;
2514 int opno;
2515
2516 /* Get the shifted operand. */
2517 extract_insn (insn);
2518 shifted_operand = recog_data.operand[shift_opnum];
2519
2520 /* Iterate over all the operands in DEP. If we write an operand
2521 that overlaps with SHIFTED_OPERAND, then we have increase the
2522 cost of this dependency. */
2523 extract_insn (dep);
2524 preprocess_constraints ();
2525 for (opno = 0; opno < recog_data.n_operands; opno++)
2526 {
2527 /* We can ignore strict inputs. */
2528 if (recog_data.operand_type[opno] == OP_IN)
2529 continue;
2530
2531 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2532 shifted_operand))
2533 return 2;
2534 }
2535 }
2536 }
2537
6354dc9b 2538 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2539 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2540 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2541 return 0;
2542
d5b7b3ae
RE
2543 /* Call insns don't incur a stall, even if they follow a load. */
2544 if (REG_NOTE_KIND (link) == 0
2545 && GET_CODE (insn) == CALL_INSN)
2546 return 1;
2547
32de079a
RE
2548 if ((i_pat = single_set (insn)) != NULL
2549 && GET_CODE (SET_SRC (i_pat)) == MEM
2550 && (d_pat = single_set (dep)) != NULL
2551 && GET_CODE (SET_DEST (d_pat)) == MEM)
2552 {
2553 /* This is a load after a store, there is no conflict if the load reads
2554 from a cached area. Assume that loads from the stack, and from the
2555 constant pool are cached, and that others will miss. This is a
6354dc9b 2556 hack. */
32de079a 2557
32de079a
RE
2558 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2559 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2560 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2561 || reg_mentioned_p (hard_frame_pointer_rtx,
2562 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2563 return 1;
32de079a
RE
2564 }
2565
2566 return cost;
2567}
2568
6354dc9b 2569/* This code has been fixed for cross compilation. */
ff9940b0
RE
2570
2571static int fpa_consts_inited = 0;
2572
cd2b33d0 2573static const char * strings_fpa[8] =
62b10bbc 2574{
2b835d68
RE
2575 "0", "1", "2", "3",
2576 "4", "5", "0.5", "10"
2577};
ff9940b0
RE
2578
2579static REAL_VALUE_TYPE values_fpa[8];
2580
2581static void
2582init_fpa_table ()
2583{
2584 int i;
2585 REAL_VALUE_TYPE r;
2586
2587 for (i = 0; i < 8; i++)
2588 {
2589 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2590 values_fpa[i] = r;
2591 }
f3bb6135 2592
ff9940b0
RE
2593 fpa_consts_inited = 1;
2594}
2595
6354dc9b 2596/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2597
2598int
2599const_double_rtx_ok_for_fpu (x)
2600 rtx x;
2601{
ff9940b0
RE
2602 REAL_VALUE_TYPE r;
2603 int i;
2604
2605 if (!fpa_consts_inited)
2606 init_fpa_table ();
2607
2608 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2609 if (REAL_VALUE_MINUS_ZERO (r))
2610 return 0;
f3bb6135 2611
ff9940b0
RE
2612 for (i = 0; i < 8; i++)
2613 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2614 return 1;
f3bb6135 2615
ff9940b0 2616 return 0;
f3bb6135 2617}
ff9940b0 2618
6354dc9b 2619/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2620
2621int
2622neg_const_double_rtx_ok_for_fpu (x)
2623 rtx x;
2624{
2625 REAL_VALUE_TYPE r;
2626 int i;
2627
2628 if (!fpa_consts_inited)
2629 init_fpa_table ();
2630
2631 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2632 r = REAL_VALUE_NEGATE (r);
2633 if (REAL_VALUE_MINUS_ZERO (r))
2634 return 0;
f3bb6135 2635
ff9940b0
RE
2636 for (i = 0; i < 8; i++)
2637 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2638 return 1;
f3bb6135 2639
ff9940b0 2640 return 0;
f3bb6135 2641}
cce8749e
CH
2642\f
2643/* Predicates for `match_operand' and `match_operator'. */
2644
ff9940b0 2645/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2646 (SUBREG (MEM)...).
2647
2648 This function exists because at the time it was put in it led to better
2649 code. SUBREG(MEM) always needs a reload in the places where
2650 s_register_operand is used, and this seemed to lead to excessive
2651 reloading. */
ff9940b0
RE
2652
2653int
2654s_register_operand (op, mode)
2655 register rtx op;
2656 enum machine_mode mode;
2657{
2658 if (GET_MODE (op) != mode && mode != VOIDmode)
2659 return 0;
2660
2661 if (GET_CODE (op) == SUBREG)
f3bb6135 2662 op = SUBREG_REG (op);
ff9940b0
RE
2663
2664 /* We don't consider registers whose class is NO_REGS
2665 to be a register operand. */
d5b7b3ae 2666 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
2667 return (GET_CODE (op) == REG
2668 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2669 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2670}
2671
e2c671ba
RE
2672/* Only accept reg, subreg(reg), const_int. */
2673
2674int
2675reg_or_int_operand (op, mode)
2676 register rtx op;
2677 enum machine_mode mode;
2678{
2679 if (GET_CODE (op) == CONST_INT)
2680 return 1;
2681
2682 if (GET_MODE (op) != mode && mode != VOIDmode)
2683 return 0;
2684
2685 if (GET_CODE (op) == SUBREG)
2686 op = SUBREG_REG (op);
2687
2688 /* We don't consider registers whose class is NO_REGS
2689 to be a register operand. */
2690 return (GET_CODE (op) == REG
2691 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2692 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2693}
2694
ff9940b0
RE
2695/* Return 1 if OP is an item in memory, given that we are in reload. */
2696
2697int
d5b7b3ae 2698arm_reload_memory_operand (op, mode)
ff9940b0 2699 rtx op;
74bbc178 2700 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2701{
2702 int regno = true_regnum (op);
2703
5895f793 2704 return (!CONSTANT_P (op)
ff9940b0
RE
2705 && (regno == -1
2706 || (GET_CODE (op) == REG
2707 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2708}
2709
4d818c85 2710/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae
RE
2711 memory access (architecture V4).
2712 MODE is QImode if called when computing contraints, or VOIDmode when
2713 emitting patterns. In this latter case we cannot use memory_operand()
2714 because it will fail on badly formed MEMs, which is precisly what we are
2715 trying to catch. */
4d818c85
RE
2716int
2717bad_signed_byte_operand (op, mode)
2718 rtx op;
d5b7b3ae 2719 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 2720{
d5b7b3ae 2721#if 0
5895f793 2722 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
2723 return 0;
2724#endif
2725 if (GET_CODE (op) != MEM)
4d818c85
RE
2726 return 0;
2727
2728 op = XEXP (op, 0);
2729
6354dc9b 2730 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2731 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
2732 && (!s_register_operand (XEXP (op, 0), VOIDmode)
2733 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 2734 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2735 return 1;
2736
6354dc9b 2737 /* Big constants are also bad. */
4d818c85
RE
2738 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2739 && (INTVAL (XEXP (op, 1)) > 0xff
2740 || -INTVAL (XEXP (op, 1)) > 0xff))
2741 return 1;
2742
6354dc9b 2743 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2744 return 0;
2745}
2746
cce8749e
CH
2747/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2748
2749int
2750arm_rhs_operand (op, mode)
2751 rtx op;
2752 enum machine_mode mode;
2753{
ff9940b0 2754 return (s_register_operand (op, mode)
cce8749e 2755 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2756}
cce8749e 2757
ff9940b0
RE
2758/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2759 */
2760
2761int
2762arm_rhsm_operand (op, mode)
2763 rtx op;
2764 enum machine_mode mode;
2765{
2766 return (s_register_operand (op, mode)
2767 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2768 || memory_operand (op, mode));
f3bb6135 2769}
ff9940b0
RE
2770
2771/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2772 constant that is valid when negated. */
2773
2774int
2775arm_add_operand (op, mode)
2776 rtx op;
2777 enum machine_mode mode;
2778{
d5b7b3ae
RE
2779 if (TARGET_THUMB)
2780 return thumb_cmp_operand (op, mode);
2781
ff9940b0
RE
2782 return (s_register_operand (op, mode)
2783 || (GET_CODE (op) == CONST_INT
2784 && (const_ok_for_arm (INTVAL (op))
2785 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2786}
ff9940b0
RE
2787
2788int
2789arm_not_operand (op, mode)
2790 rtx op;
2791 enum machine_mode mode;
2792{
2793 return (s_register_operand (op, mode)
2794 || (GET_CODE (op) == CONST_INT
2795 && (const_ok_for_arm (INTVAL (op))
2796 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2797}
ff9940b0 2798
5165176d
RE
2799/* Return TRUE if the operand is a memory reference which contains an
2800 offsettable address. */
2801int
2802offsettable_memory_operand (op, mode)
2803 register rtx op;
2804 enum machine_mode mode;
2805{
2806 if (mode == VOIDmode)
2807 mode = GET_MODE (op);
2808
2809 return (mode == GET_MODE (op)
2810 && GET_CODE (op) == MEM
2811 && offsettable_address_p (reload_completed | reload_in_progress,
2812 mode, XEXP (op, 0)));
2813}
2814
2815/* Return TRUE if the operand is a memory reference which is, or can be
2816 made word aligned by adjusting the offset. */
2817int
2818alignable_memory_operand (op, mode)
2819 register rtx op;
2820 enum machine_mode mode;
2821{
2822 rtx reg;
2823
2824 if (mode == VOIDmode)
2825 mode = GET_MODE (op);
2826
2827 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2828 return 0;
2829
2830 op = XEXP (op, 0);
2831
2832 return ((GET_CODE (reg = op) == REG
2833 || (GET_CODE (op) == SUBREG
2834 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2835 || (GET_CODE (op) == PLUS
2836 && GET_CODE (XEXP (op, 1)) == CONST_INT
2837 && (GET_CODE (reg = XEXP (op, 0)) == REG
2838 || (GET_CODE (XEXP (op, 0)) == SUBREG
2839 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 2840 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
2841}
2842
b111229a
RE
2843/* Similar to s_register_operand, but does not allow hard integer
2844 registers. */
2845int
2846f_register_operand (op, mode)
2847 register rtx op;
2848 enum machine_mode mode;
2849{
2850 if (GET_MODE (op) != mode && mode != VOIDmode)
2851 return 0;
2852
2853 if (GET_CODE (op) == SUBREG)
2854 op = SUBREG_REG (op);
2855
2856 /* We don't consider registers whose class is NO_REGS
2857 to be a register operand. */
2858 return (GET_CODE (op) == REG
2859 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2860 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2861}
2862
cce8749e
CH
2863/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2864
2865int
2866fpu_rhs_operand (op, mode)
2867 rtx op;
2868 enum machine_mode mode;
2869{
ff9940b0 2870 if (s_register_operand (op, mode))
f3bb6135 2871 return TRUE;
9ce71c6f
BS
2872
2873 if (GET_MODE (op) != mode && mode != VOIDmode)
2874 return FALSE;
2875
2876 if (GET_CODE (op) == CONST_DOUBLE)
2877 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
2878
2879 return FALSE;
2880}
cce8749e 2881
ff9940b0
RE
2882int
2883fpu_add_operand (op, mode)
2884 rtx op;
2885 enum machine_mode mode;
2886{
2887 if (s_register_operand (op, mode))
f3bb6135 2888 return TRUE;
9ce71c6f
BS
2889
2890 if (GET_MODE (op) != mode && mode != VOIDmode)
2891 return FALSE;
2892
2893 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2894 return (const_double_rtx_ok_for_fpu (op)
2895 || neg_const_double_rtx_ok_for_fpu (op));
2896
2897 return FALSE;
ff9940b0
RE
2898}
2899
cce8749e
CH
2900/* Return nonzero if OP is a constant power of two. */
2901
2902int
2903power_of_two_operand (op, mode)
2904 rtx op;
74bbc178 2905 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2906{
2907 if (GET_CODE (op) == CONST_INT)
2908 {
d5b7b3ae 2909 HOST_WIDE_INT value = INTVAL (op);
f3bb6135 2910 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2911 }
f3bb6135
RE
2912 return FALSE;
2913}
cce8749e
CH
2914
2915/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2916 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2917 Note that this disallows MEM(REG+REG), but allows
2918 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2919
2920int
2921di_operand (op, mode)
2922 rtx op;
2923 enum machine_mode mode;
2924{
ff9940b0 2925 if (s_register_operand (op, mode))
f3bb6135 2926 return TRUE;
cce8749e 2927
9ce71c6f
BS
2928 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2929 return FALSE;
2930
e9c6b69b
NC
2931 if (GET_CODE (op) == SUBREG)
2932 op = SUBREG_REG (op);
2933
cce8749e
CH
2934 switch (GET_CODE (op))
2935 {
2936 case CONST_DOUBLE:
2937 case CONST_INT:
f3bb6135
RE
2938 return TRUE;
2939
cce8749e 2940 case MEM:
f3bb6135
RE
2941 return memory_address_p (DImode, XEXP (op, 0));
2942
cce8749e 2943 default:
f3bb6135 2944 return FALSE;
cce8749e 2945 }
f3bb6135 2946}
cce8749e 2947
d5b7b3ae
RE
2948/* Like di_operand, but don't accept constants. */
2949int
2950nonimmediate_di_operand (op, mode)
2951 rtx op;
2952 enum machine_mode mode;
2953{
2954 if (s_register_operand (op, mode))
2955 return TRUE;
2956
2957 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2958 return FALSE;
2959
2960 if (GET_CODE (op) == SUBREG)
2961 op = SUBREG_REG (op);
2962
2963 if (GET_CODE (op) == MEM)
2964 return memory_address_p (DImode, XEXP (op, 0));
2965
2966 return FALSE;
2967}
2968
f3139301 2969/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2970 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2971 Note that this disallows MEM(REG+REG), but allows
2972 MEM(PRE/POST_INC/DEC(REG)). */
2973
2974int
2975soft_df_operand (op, mode)
2976 rtx op;
2977 enum machine_mode mode;
2978{
2979 if (s_register_operand (op, mode))
2980 return TRUE;
2981
9ce71c6f
BS
2982 if (mode != VOIDmode && GET_MODE (op) != mode)
2983 return FALSE;
2984
37b80d2e
BS
2985 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2986 return FALSE;
2987
e9c6b69b
NC
2988 if (GET_CODE (op) == SUBREG)
2989 op = SUBREG_REG (op);
9ce71c6f 2990
f3139301
DE
2991 switch (GET_CODE (op))
2992 {
2993 case CONST_DOUBLE:
2994 return TRUE;
2995
2996 case MEM:
2997 return memory_address_p (DFmode, XEXP (op, 0));
2998
2999 default:
3000 return FALSE;
3001 }
3002}
3003
d5b7b3ae
RE
3004/* Like soft_df_operand, but don't accept constants. */
3005int
3006nonimmediate_soft_df_operand (op, mode)
3007 rtx op;
3008 enum machine_mode mode;
3009{
3010 if (s_register_operand (op, mode))
3011 return TRUE;
3012
3013 if (mode != VOIDmode && GET_MODE (op) != mode)
3014 return FALSE;
3015
3016 if (GET_CODE (op) == SUBREG)
3017 op = SUBREG_REG (op);
3018
3019 if (GET_CODE (op) == MEM)
3020 return memory_address_p (DFmode, XEXP (op, 0));
3021 return FALSE;
3022}
cce8749e 3023
d5b7b3ae 3024/* Return TRUE for valid index operands. */
cce8749e
CH
3025int
3026index_operand (op, mode)
3027 rtx op;
3028 enum machine_mode mode;
3029{
d5b7b3ae 3030 return (s_register_operand (op, mode)
ff9940b0 3031 || (immediate_operand (op, mode)
d5b7b3ae
RE
3032 && (GET_CODE (op) != CONST_INT
3033 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 3034}
cce8749e 3035
ff9940b0
RE
3036/* Return TRUE for valid shifts by a constant. This also accepts any
3037 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 3038 shift operator in this case was a mult. */
ff9940b0
RE
3039
3040int
3041const_shift_operand (op, mode)
3042 rtx op;
3043 enum machine_mode mode;
3044{
3045 return (power_of_two_operand (op, mode)
3046 || (immediate_operand (op, mode)
d5b7b3ae
RE
3047 && (GET_CODE (op) != CONST_INT
3048 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 3049}
ff9940b0 3050
cce8749e
CH
3051/* Return TRUE for arithmetic operators which can be combined with a multiply
3052 (shift). */
3053
3054int
3055shiftable_operator (x, mode)
3056 rtx x;
3057 enum machine_mode mode;
3058{
3059 if (GET_MODE (x) != mode)
3060 return FALSE;
3061 else
3062 {
3063 enum rtx_code code = GET_CODE (x);
3064
3065 return (code == PLUS || code == MINUS
3066 || code == IOR || code == XOR || code == AND);
3067 }
f3bb6135 3068}
cce8749e 3069
6ab589e0
JL
3070/* Return TRUE for binary logical operators. */
3071
3072int
3073logical_binary_operator (x, mode)
3074 rtx x;
3075 enum machine_mode mode;
3076{
3077 if (GET_MODE (x) != mode)
3078 return FALSE;
3079 else
3080 {
3081 enum rtx_code code = GET_CODE (x);
3082
3083 return (code == IOR || code == XOR || code == AND);
3084 }
3085}
3086
6354dc9b 3087/* Return TRUE for shift operators. */
cce8749e
CH
3088
3089int
3090shift_operator (x, mode)
3091 rtx x;
3092 enum machine_mode mode;
3093{
3094 if (GET_MODE (x) != mode)
3095 return FALSE;
3096 else
3097 {
3098 enum rtx_code code = GET_CODE (x);
3099
ff9940b0 3100 if (code == MULT)
aec3cfba 3101 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 3102
e2c671ba
RE
3103 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3104 || code == ROTATERT);
cce8749e 3105 }
f3bb6135 3106}
ff9940b0 3107
6354dc9b
NC
3108/* Return TRUE if x is EQ or NE. */
3109int
3110equality_operator (x, mode)
f3bb6135 3111 rtx x;
74bbc178 3112 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3113{
f3bb6135 3114 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3115}
3116
e45b72c4
RE
3117/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3118int
3119arm_comparison_operator (x, mode)
3120 rtx x;
3121 enum machine_mode mode;
3122{
3123 return (comparison_operator (x, mode)
3124 && GET_CODE (x) != LTGT
3125 && GET_CODE (x) != UNEQ);
3126}
3127
6354dc9b 3128/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
3129int
3130minmax_operator (x, mode)
3131 rtx x;
3132 enum machine_mode mode;
3133{
3134 enum rtx_code code = GET_CODE (x);
3135
3136 if (GET_MODE (x) != mode)
3137 return FALSE;
f3bb6135 3138
ff9940b0 3139 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3140}
ff9940b0 3141
ff9940b0 3142/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3143 a mode, accept any class CCmode register. */
ff9940b0
RE
3144int
3145cc_register (x, mode)
f3bb6135
RE
3146 rtx x;
3147 enum machine_mode mode;
ff9940b0
RE
3148{
3149 if (mode == VOIDmode)
3150 {
3151 mode = GET_MODE (x);
d5b7b3ae 3152
ff9940b0
RE
3153 if (GET_MODE_CLASS (mode) != MODE_CC)
3154 return FALSE;
3155 }
f3bb6135 3156
d5b7b3ae
RE
3157 if ( GET_MODE (x) == mode
3158 && GET_CODE (x) == REG
3159 && REGNO (x) == CC_REGNUM)
ff9940b0 3160 return TRUE;
f3bb6135 3161
ff9940b0
RE
3162 return FALSE;
3163}
5bbe2d40
RE
3164
3165/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3166 a mode, accept any class CCmode register which indicates a dominance
3167 expression. */
5bbe2d40 3168int
84ed5e79 3169dominant_cc_register (x, mode)
5bbe2d40
RE
3170 rtx x;
3171 enum machine_mode mode;
3172{
3173 if (mode == VOIDmode)
3174 {
3175 mode = GET_MODE (x);
d5b7b3ae 3176
84ed5e79 3177 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3178 return FALSE;
3179 }
3180
d5b7b3ae 3181 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3182 && mode != CC_DLEmode && mode != CC_DLTmode
3183 && mode != CC_DGEmode && mode != CC_DGTmode
3184 && mode != CC_DLEUmode && mode != CC_DLTUmode
3185 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3186 return FALSE;
3187
d5b7b3ae 3188 return cc_register (x, mode);
5bbe2d40
RE
3189}
3190
2b835d68
RE
3191/* Return TRUE if X references a SYMBOL_REF. */
3192int
3193symbol_mentioned_p (x)
3194 rtx x;
3195{
6f7d635c 3196 register const char * fmt;
2b835d68
RE
3197 register int i;
3198
3199 if (GET_CODE (x) == SYMBOL_REF)
3200 return 1;
3201
3202 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3203
2b835d68
RE
3204 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3205 {
3206 if (fmt[i] == 'E')
3207 {
3208 register int j;
3209
3210 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3211 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3212 return 1;
3213 }
3214 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3215 return 1;
3216 }
3217
3218 return 0;
3219}
3220
3221/* Return TRUE if X references a LABEL_REF. */
3222int
3223label_mentioned_p (x)
3224 rtx x;
3225{
6f7d635c 3226 register const char * fmt;
2b835d68
RE
3227 register int i;
3228
3229 if (GET_CODE (x) == LABEL_REF)
3230 return 1;
3231
3232 fmt = GET_RTX_FORMAT (GET_CODE (x));
3233 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3234 {
3235 if (fmt[i] == 'E')
3236 {
3237 register int j;
3238
3239 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3240 if (label_mentioned_p (XVECEXP (x, i, j)))
3241 return 1;
3242 }
3243 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3244 return 1;
3245 }
3246
3247 return 0;
3248}
3249
ff9940b0
RE
3250enum rtx_code
3251minmax_code (x)
f3bb6135 3252 rtx x;
ff9940b0
RE
3253{
3254 enum rtx_code code = GET_CODE (x);
3255
3256 if (code == SMAX)
3257 return GE;
f3bb6135 3258 else if (code == SMIN)
ff9940b0 3259 return LE;
f3bb6135 3260 else if (code == UMIN)
ff9940b0 3261 return LEU;
f3bb6135 3262 else if (code == UMAX)
ff9940b0 3263 return GEU;
f3bb6135 3264
ff9940b0
RE
3265 abort ();
3266}
3267
6354dc9b 3268/* Return 1 if memory locations are adjacent. */
f3bb6135 3269int
ff9940b0
RE
3270adjacent_mem_locations (a, b)
3271 rtx a, b;
3272{
3273 int val0 = 0, val1 = 0;
3274 int reg0, reg1;
3275
3276 if ((GET_CODE (XEXP (a, 0)) == REG
3277 || (GET_CODE (XEXP (a, 0)) == PLUS
3278 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3279 && (GET_CODE (XEXP (b, 0)) == REG
3280 || (GET_CODE (XEXP (b, 0)) == PLUS
3281 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3282 {
3283 if (GET_CODE (XEXP (a, 0)) == PLUS)
3284 {
3285 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3286 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3287 }
3288 else
3289 reg0 = REGNO (XEXP (a, 0));
3290 if (GET_CODE (XEXP (b, 0)) == PLUS)
3291 {
3292 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3293 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3294 }
3295 else
3296 reg1 = REGNO (XEXP (b, 0));
3297 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3298 }
3299 return 0;
3300}
3301
3302/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3303 parallel and the first section will be tested. */
f3bb6135 3304int
ff9940b0
RE
3305load_multiple_operation (op, mode)
3306 rtx op;
74bbc178 3307 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3308{
f3bb6135 3309 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3310 int dest_regno;
3311 rtx src_addr;
f3bb6135 3312 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3313 rtx elt;
3314
3315 if (count <= 1
3316 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3317 return 0;
3318
6354dc9b 3319 /* Check to see if this might be a write-back. */
ff9940b0
RE
3320 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3321 {
3322 i++;
3323 base = 1;
3324
6354dc9b 3325 /* Now check it more carefully. */
ff9940b0
RE
3326 if (GET_CODE (SET_DEST (elt)) != REG
3327 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3328 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3329 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3330 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3331 return 0;
ff9940b0
RE
3332 }
3333
3334 /* Perform a quick check so we don't blow up below. */
3335 if (count <= i
3336 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3337 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3338 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3339 return 0;
3340
3341 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3342 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3343
3344 for (; i < count; i++)
3345 {
ed4c4348 3346 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3347
3348 if (GET_CODE (elt) != SET
3349 || GET_CODE (SET_DEST (elt)) != REG
3350 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3351 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3352 || GET_CODE (SET_SRC (elt)) != MEM
3353 || GET_MODE (SET_SRC (elt)) != SImode
3354 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3355 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3356 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3357 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3358 return 0;
3359 }
3360
3361 return 1;
3362}
3363
3364/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3365 parallel and the first section will be tested. */
f3bb6135 3366int
ff9940b0
RE
3367store_multiple_operation (op, mode)
3368 rtx op;
74bbc178 3369 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3370{
f3bb6135 3371 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3372 int src_regno;
3373 rtx dest_addr;
f3bb6135 3374 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3375 rtx elt;
3376
3377 if (count <= 1
3378 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3379 return 0;
3380
6354dc9b 3381 /* Check to see if this might be a write-back. */
ff9940b0
RE
3382 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3383 {
3384 i++;
3385 base = 1;
3386
6354dc9b 3387 /* Now check it more carefully. */
ff9940b0
RE
3388 if (GET_CODE (SET_DEST (elt)) != REG
3389 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3390 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3391 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3392 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3393 return 0;
ff9940b0
RE
3394 }
3395
3396 /* Perform a quick check so we don't blow up below. */
3397 if (count <= i
3398 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3399 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3400 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3401 return 0;
3402
3403 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3404 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3405
3406 for (; i < count; i++)
3407 {
3408 elt = XVECEXP (op, 0, i);
3409
3410 if (GET_CODE (elt) != SET
3411 || GET_CODE (SET_SRC (elt)) != REG
3412 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3413 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3414 || GET_CODE (SET_DEST (elt)) != MEM
3415 || GET_MODE (SET_DEST (elt)) != SImode
3416 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3417 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3418 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3419 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3420 return 0;
3421 }
3422
3423 return 1;
3424}
e2c671ba 3425
84ed5e79
RE
3426int
3427load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3428 rtx * operands;
84ed5e79 3429 int nops;
62b10bbc
NC
3430 int * regs;
3431 int * base;
3432 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3433{
3434 int unsorted_regs[4];
3435 HOST_WIDE_INT unsorted_offsets[4];
3436 int order[4];
ad076f4e 3437 int base_reg = -1;
84ed5e79
RE
3438 int i;
3439
3440 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3441 extended if required. */
3442 if (nops < 2 || nops > 4)
3443 abort ();
3444
3445 /* Loop over the operands and check that the memory references are
3446 suitable (ie immediate offsets from the same base register). At
3447 the same time, extract the target register, and the memory
3448 offsets. */
3449 for (i = 0; i < nops; i++)
3450 {
3451 rtx reg;
3452 rtx offset;
3453
56636818
JL
3454 /* Convert a subreg of a mem into the mem itself. */
3455 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3456 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3457
84ed5e79
RE
3458 if (GET_CODE (operands[nops + i]) != MEM)
3459 abort ();
3460
3461 /* Don't reorder volatile memory references; it doesn't seem worth
3462 looking for the case where the order is ok anyway. */
3463 if (MEM_VOLATILE_P (operands[nops + i]))
3464 return 0;
3465
3466 offset = const0_rtx;
3467
3468 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3469 || (GET_CODE (reg) == SUBREG
3470 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3471 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3472 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3473 == REG)
3474 || (GET_CODE (reg) == SUBREG
3475 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3476 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3477 == CONST_INT)))
3478 {
3479 if (i == 0)
3480 {
d5b7b3ae 3481 base_reg = REGNO (reg);
84ed5e79
RE
3482 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3483 ? REGNO (operands[i])
3484 : REGNO (SUBREG_REG (operands[i])));
3485 order[0] = 0;
3486 }
3487 else
3488 {
6354dc9b 3489 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3490 /* Not addressed from the same base register. */
3491 return 0;
3492
3493 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3494 ? REGNO (operands[i])
3495 : REGNO (SUBREG_REG (operands[i])));
3496 if (unsorted_regs[i] < unsorted_regs[order[0]])
3497 order[0] = i;
3498 }
3499
3500 /* If it isn't an integer register, or if it overwrites the
3501 base register but isn't the last insn in the list, then
3502 we can't do this. */
3503 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3504 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3505 return 0;
3506
3507 unsorted_offsets[i] = INTVAL (offset);
3508 }
3509 else
3510 /* Not a suitable memory address. */
3511 return 0;
3512 }
3513
3514 /* All the useful information has now been extracted from the
3515 operands into unsorted_regs and unsorted_offsets; additionally,
3516 order[0] has been set to the lowest numbered register in the
3517 list. Sort the registers into order, and check that the memory
3518 offsets are ascending and adjacent. */
3519
3520 for (i = 1; i < nops; i++)
3521 {
3522 int j;
3523
3524 order[i] = order[i - 1];
3525 for (j = 0; j < nops; j++)
3526 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3527 && (order[i] == order[i - 1]
3528 || unsorted_regs[j] < unsorted_regs[order[i]]))
3529 order[i] = j;
3530
3531 /* Have we found a suitable register? if not, one must be used more
3532 than once. */
3533 if (order[i] == order[i - 1])
3534 return 0;
3535
3536 /* Is the memory address adjacent and ascending? */
3537 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3538 return 0;
3539 }
3540
3541 if (base)
3542 {
3543 *base = base_reg;
3544
3545 for (i = 0; i < nops; i++)
3546 regs[i] = unsorted_regs[order[i]];
3547
3548 *load_offset = unsorted_offsets[order[0]];
3549 }
3550
3551 if (unsorted_offsets[order[0]] == 0)
3552 return 1; /* ldmia */
3553
3554 if (unsorted_offsets[order[0]] == 4)
3555 return 2; /* ldmib */
3556
3557 if (unsorted_offsets[order[nops - 1]] == 0)
3558 return 3; /* ldmda */
3559
3560 if (unsorted_offsets[order[nops - 1]] == -4)
3561 return 4; /* ldmdb */
3562
949d79eb
RE
3563 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3564 if the offset isn't small enough. The reason 2 ldrs are faster
3565 is because these ARMs are able to do more than one cache access
3566 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3567 whilst the ARM8 has a double bandwidth cache. This means that
3568 these cores can do both an instruction fetch and a data fetch in
3569 a single cycle, so the trick of calculating the address into a
3570 scratch register (one of the result regs) and then doing a load
3571 multiple actually becomes slower (and no smaller in code size).
3572 That is the transformation
6cc8c0b3
NC
3573
3574 ldr rd1, [rbase + offset]
3575 ldr rd2, [rbase + offset + 4]
3576
3577 to
3578
3579 add rd1, rbase, offset
3580 ldmia rd1, {rd1, rd2}
3581
949d79eb
RE
3582 produces worse code -- '3 cycles + any stalls on rd2' instead of
3583 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3584 access per cycle, the first sequence could never complete in less
3585 than 6 cycles, whereas the ldm sequence would only take 5 and
3586 would make better use of sequential accesses if not hitting the
3587 cache.
3588
3589 We cheat here and test 'arm_ld_sched' which we currently know to
3590 only be true for the ARM8, ARM9 and StrongARM. If this ever
3591 changes, then the test below needs to be reworked. */
f5a1b0d2 3592 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3593 return 0;
3594
84ed5e79
RE
3595 /* Can't do it without setting up the offset, only do this if it takes
3596 no more than one insn. */
3597 return (const_ok_for_arm (unsorted_offsets[order[0]])
3598 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3599}
3600
cd2b33d0 3601const char *
84ed5e79 3602emit_ldm_seq (operands, nops)
62b10bbc 3603 rtx * operands;
84ed5e79
RE
3604 int nops;
3605{
3606 int regs[4];
3607 int base_reg;
3608 HOST_WIDE_INT offset;
3609 char buf[100];
3610 int i;
3611
3612 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3613 {
3614 case 1:
3615 strcpy (buf, "ldm%?ia\t");
3616 break;
3617
3618 case 2:
3619 strcpy (buf, "ldm%?ib\t");
3620 break;
3621
3622 case 3:
3623 strcpy (buf, "ldm%?da\t");
3624 break;
3625
3626 case 4:
3627 strcpy (buf, "ldm%?db\t");
3628 break;
3629
3630 case 5:
3631 if (offset >= 0)
3632 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3633 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3634 (long) offset);
3635 else
3636 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3637 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3638 (long) -offset);
3639 output_asm_insn (buf, operands);
3640 base_reg = regs[0];
3641 strcpy (buf, "ldm%?ia\t");
3642 break;
3643
3644 default:
3645 abort ();
3646 }
3647
3648 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3649 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3650
3651 for (i = 1; i < nops; i++)
3652 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3653 reg_names[regs[i]]);
3654
3655 strcat (buf, "}\t%@ phole ldm");
3656
3657 output_asm_insn (buf, operands);
3658 return "";
3659}
3660
3661int
3662store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3663 rtx * operands;
84ed5e79 3664 int nops;
62b10bbc
NC
3665 int * regs;
3666 int * base;
3667 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3668{
3669 int unsorted_regs[4];
3670 HOST_WIDE_INT unsorted_offsets[4];
3671 int order[4];
ad076f4e 3672 int base_reg = -1;
84ed5e79
RE
3673 int i;
3674
3675 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3676 extended if required. */
3677 if (nops < 2 || nops > 4)
3678 abort ();
3679
3680 /* Loop over the operands and check that the memory references are
3681 suitable (ie immediate offsets from the same base register). At
3682 the same time, extract the target register, and the memory
3683 offsets. */
3684 for (i = 0; i < nops; i++)
3685 {
3686 rtx reg;
3687 rtx offset;
3688
56636818
JL
3689 /* Convert a subreg of a mem into the mem itself. */
3690 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3691 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3692
84ed5e79
RE
3693 if (GET_CODE (operands[nops + i]) != MEM)
3694 abort ();
3695
3696 /* Don't reorder volatile memory references; it doesn't seem worth
3697 looking for the case where the order is ok anyway. */
3698 if (MEM_VOLATILE_P (operands[nops + i]))
3699 return 0;
3700
3701 offset = const0_rtx;
3702
3703 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3704 || (GET_CODE (reg) == SUBREG
3705 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3706 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3707 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3708 == REG)
3709 || (GET_CODE (reg) == SUBREG
3710 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3711 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3712 == CONST_INT)))
3713 {
3714 if (i == 0)
3715 {
62b10bbc 3716 base_reg = REGNO (reg);
84ed5e79
RE
3717 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3718 ? REGNO (operands[i])
3719 : REGNO (SUBREG_REG (operands[i])));
3720 order[0] = 0;
3721 }
3722 else
3723 {
6354dc9b 3724 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3725 /* Not addressed from the same base register. */
3726 return 0;
3727
3728 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3729 ? REGNO (operands[i])
3730 : REGNO (SUBREG_REG (operands[i])));
3731 if (unsorted_regs[i] < unsorted_regs[order[0]])
3732 order[0] = i;
3733 }
3734
3735 /* If it isn't an integer register, then we can't do this. */
3736 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3737 return 0;
3738
3739 unsorted_offsets[i] = INTVAL (offset);
3740 }
3741 else
3742 /* Not a suitable memory address. */
3743 return 0;
3744 }
3745
3746 /* All the useful information has now been extracted from the
3747 operands into unsorted_regs and unsorted_offsets; additionally,
3748 order[0] has been set to the lowest numbered register in the
3749 list. Sort the registers into order, and check that the memory
3750 offsets are ascending and adjacent. */
3751
3752 for (i = 1; i < nops; i++)
3753 {
3754 int j;
3755
3756 order[i] = order[i - 1];
3757 for (j = 0; j < nops; j++)
3758 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3759 && (order[i] == order[i - 1]
3760 || unsorted_regs[j] < unsorted_regs[order[i]]))
3761 order[i] = j;
3762
3763 /* Have we found a suitable register? if not, one must be used more
3764 than once. */
3765 if (order[i] == order[i - 1])
3766 return 0;
3767
3768 /* Is the memory address adjacent and ascending? */
3769 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3770 return 0;
3771 }
3772
3773 if (base)
3774 {
3775 *base = base_reg;
3776
3777 for (i = 0; i < nops; i++)
3778 regs[i] = unsorted_regs[order[i]];
3779
3780 *load_offset = unsorted_offsets[order[0]];
3781 }
3782
3783 if (unsorted_offsets[order[0]] == 0)
3784 return 1; /* stmia */
3785
3786 if (unsorted_offsets[order[0]] == 4)
3787 return 2; /* stmib */
3788
3789 if (unsorted_offsets[order[nops - 1]] == 0)
3790 return 3; /* stmda */
3791
3792 if (unsorted_offsets[order[nops - 1]] == -4)
3793 return 4; /* stmdb */
3794
3795 return 0;
3796}
3797
cd2b33d0 3798const char *
84ed5e79 3799emit_stm_seq (operands, nops)
62b10bbc 3800 rtx * operands;
84ed5e79
RE
3801 int nops;
3802{
3803 int regs[4];
3804 int base_reg;
3805 HOST_WIDE_INT offset;
3806 char buf[100];
3807 int i;
3808
3809 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3810 {
3811 case 1:
3812 strcpy (buf, "stm%?ia\t");
3813 break;
3814
3815 case 2:
3816 strcpy (buf, "stm%?ib\t");
3817 break;
3818
3819 case 3:
3820 strcpy (buf, "stm%?da\t");
3821 break;
3822
3823 case 4:
3824 strcpy (buf, "stm%?db\t");
3825 break;
3826
3827 default:
3828 abort ();
3829 }
3830
3831 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3832 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3833
3834 for (i = 1; i < nops; i++)
3835 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3836 reg_names[regs[i]]);
3837
3838 strcat (buf, "}\t%@ phole stm");
3839
3840 output_asm_insn (buf, operands);
3841 return "";
3842}
3843
e2c671ba
RE
3844int
3845multi_register_push (op, mode)
0a81f500 3846 rtx op;
74bbc178 3847 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3848{
3849 if (GET_CODE (op) != PARALLEL
3850 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3851 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3852 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3853 return 0;
3854
3855 return 1;
3856}
ff9940b0 3857\f
d7d01975 3858/* Routines for use with attributes. */
f3bb6135 3859
31fdb4d5 3860/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
3861 ATTRIBUTES are any existing attributes and ARGS are
3862 the arguments supplied with ATTR.
31fdb4d5
DE
3863
3864 Supported attributes:
3865
d5b7b3ae
RE
3866 naked:
3867 don't output any prologue or epilogue code, the user is assumed
3868 to do the right thing.
3869
3870 interfacearm:
3871 Always assume that this function will be entered in ARM mode,
3872 not Thumb mode, and that the caller wishes to be returned to in
3873 ARM mode. */
31fdb4d5 3874int
74bbc178 3875arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3876 tree decl;
31fdb4d5
DE
3877 tree attr;
3878 tree args;
3879{
3880 if (args != NULL_TREE)
3881 return 0;
3882
3883 if (is_attribute_p ("naked", attr))
3884 return TREE_CODE (decl) == FUNCTION_DECL;
d19fb8e3 3885
d5b7b3ae
RE
3886#ifdef ARM_PE
3887 if (is_attribute_p ("interfacearm", attr))
3888 return TREE_CODE (decl) == FUNCTION_DECL;
3889#endif /* ARM_PE */
3890
31fdb4d5
DE
3891 return 0;
3892}
3893
3894/* Return non-zero if FUNC is a naked function. */
31fdb4d5
DE
3895static int
3896arm_naked_function_p (func)
3897 tree func;
3898{
3899 tree a;
3900
3901 if (TREE_CODE (func) != FUNCTION_DECL)
3902 abort ();
2e943e99 3903
31fdb4d5
DE
3904 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3905 return a != NULL_TREE;
3906}
f3bb6135 3907\f
6354dc9b 3908/* Routines for use in generating RTL. */
f3bb6135 3909rtx
56636818 3910arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3911 in_struct_p, scalar_p)
ff9940b0
RE
3912 int base_regno;
3913 int count;
3914 rtx from;
3915 int up;
3916 int write_back;
56636818
JL
3917 int unchanging_p;
3918 int in_struct_p;
c6df88cb 3919 int scalar_p;
ff9940b0
RE
3920{
3921 int i = 0, j;
3922 rtx result;
3923 int sign = up ? 1 : -1;
56636818 3924 rtx mem;
ff9940b0 3925
d19fb8e3
NC
3926 /* XScale has load-store double instructions, but they have stricter
3927 alignment requirements than load-store multiple, so we can not
3928 use them.
3929
3930 For XScale ldm requires 2 + NREGS cycles to complete and blocks
3931 the pipeline until completion.
3932
3933 NREGS CYCLES
3934 1 3
3935 2 4
3936 3 5
3937 4 6
3938
3939 An ldr instruction takes 1-3 cycles, but does not block the
3940 pipeline.
3941
3942 NREGS CYCLES
3943 1 1-3
3944 2 2-6
3945 3 3-9
3946 4 4-12
3947
3948 Best case ldr will always win. However, the more ldr instructions
3949 we issue, the less likely we are to be able to schedule them well.
3950 Using ldr instructions also increases code size.
3951
3952 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
3953 for counts of 3 or 4 regs. */
3954 if (arm_is_xscale && count <= 2 && ! optimize_size)
3955 {
3956 rtx seq;
3957
3958 start_sequence ();
3959
3960 for (i = 0; i < count; i++)
3961 {
3962 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
3963 RTX_UNCHANGING_P (mem) = unchanging_p;
3964 MEM_IN_STRUCT_P (mem) = in_struct_p;
3965 MEM_SCALAR_P (mem) = scalar_p;
3966 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
3967 }
3968
3969 if (write_back)
3970 emit_move_insn (from, plus_constant (from, count * 4 * sign));
3971
3972 seq = gen_sequence ();
3973 end_sequence ();
3974
3975 return seq;
3976 }
3977
43cffd11 3978 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3979 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3980 if (write_back)
f3bb6135 3981 {
ff9940b0 3982 XVECEXP (result, 0, 0)
43cffd11
RE
3983 = gen_rtx_SET (GET_MODE (from), from,
3984 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3985 i = 1;
3986 count++;
f3bb6135
RE
3987 }
3988
ff9940b0 3989 for (j = 0; i < count; i++, j++)
f3bb6135 3990 {
43cffd11 3991 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3992 RTX_UNCHANGING_P (mem) = unchanging_p;
3993 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3994 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3995 XVECEXP (result, 0, i)
3996 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3997 }
3998
ff9940b0
RE
3999 return result;
4000}
4001
f3bb6135 4002rtx
56636818 4003arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 4004 in_struct_p, scalar_p)
ff9940b0
RE
4005 int base_regno;
4006 int count;
4007 rtx to;
4008 int up;
4009 int write_back;
56636818
JL
4010 int unchanging_p;
4011 int in_struct_p;
c6df88cb 4012 int scalar_p;
ff9940b0
RE
4013{
4014 int i = 0, j;
4015 rtx result;
4016 int sign = up ? 1 : -1;
56636818 4017 rtx mem;
ff9940b0 4018
d19fb8e3
NC
4019 /* See arm_gen_load_multiple for discussion of
4020 the pros/cons of ldm/stm usage for XScale. */
4021 if (arm_is_xscale && count <= 2 && ! optimize_size)
4022 {
4023 rtx seq;
4024
4025 start_sequence ();
4026
4027 for (i = 0; i < count; i++)
4028 {
4029 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4030 RTX_UNCHANGING_P (mem) = unchanging_p;
4031 MEM_IN_STRUCT_P (mem) = in_struct_p;
4032 MEM_SCALAR_P (mem) = scalar_p;
4033 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4034 }
4035
4036 if (write_back)
4037 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4038
4039 seq = gen_sequence ();
4040 end_sequence ();
4041
4042 return seq;
4043 }
4044
43cffd11 4045 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 4046 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 4047 if (write_back)
f3bb6135 4048 {
ff9940b0 4049 XVECEXP (result, 0, 0)
43cffd11
RE
4050 = gen_rtx_SET (GET_MODE (to), to,
4051 plus_constant (to, count * 4 * sign));
ff9940b0
RE
4052 i = 1;
4053 count++;
f3bb6135
RE
4054 }
4055
ff9940b0 4056 for (j = 0; i < count; i++, j++)
f3bb6135 4057 {
43cffd11 4058 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
4059 RTX_UNCHANGING_P (mem) = unchanging_p;
4060 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 4061 MEM_SCALAR_P (mem) = scalar_p;
56636818 4062
43cffd11
RE
4063 XVECEXP (result, 0, i)
4064 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
4065 }
4066
ff9940b0
RE
4067 return result;
4068}
4069
880e2516
RE
4070int
4071arm_gen_movstrqi (operands)
62b10bbc 4072 rtx * operands;
880e2516
RE
4073{
4074 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 4075 int i;
880e2516 4076 rtx src, dst;
ad076f4e 4077 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 4078 rtx part_bytes_reg = NULL;
56636818
JL
4079 rtx mem;
4080 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 4081 int dst_scalar_p, src_scalar_p;
880e2516
RE
4082
4083 if (GET_CODE (operands[2]) != CONST_INT
4084 || GET_CODE (operands[3]) != CONST_INT
4085 || INTVAL (operands[2]) > 64
4086 || INTVAL (operands[3]) & 3)
4087 return 0;
4088
4089 st_dst = XEXP (operands[0], 0);
4090 st_src = XEXP (operands[1], 0);
56636818
JL
4091
4092 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4093 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 4094 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
4095 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4096 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 4097 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 4098
880e2516
RE
4099 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4100 fin_src = src = copy_to_mode_reg (SImode, st_src);
4101
d5b7b3ae 4102 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
880e2516
RE
4103 out_words_to_go = INTVAL (operands[2]) / 4;
4104 last_bytes = INTVAL (operands[2]) & 3;
4105
4106 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 4107 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
4108
4109 for (i = 0; in_words_to_go >= 2; i+=4)
4110 {
bd9c7e23 4111 if (in_words_to_go > 4)
56636818 4112 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
4113 src_unchanging_p,
4114 src_in_struct_p,
4115 src_scalar_p));
bd9c7e23
RE
4116 else
4117 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 4118 FALSE, src_unchanging_p,
c6df88cb 4119 src_in_struct_p, src_scalar_p));
bd9c7e23 4120
880e2516
RE
4121 if (out_words_to_go)
4122 {
bd9c7e23 4123 if (out_words_to_go > 4)
56636818
JL
4124 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4125 dst_unchanging_p,
c6df88cb
MM
4126 dst_in_struct_p,
4127 dst_scalar_p));
bd9c7e23
RE
4128 else if (out_words_to_go != 1)
4129 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4130 dst, TRUE,
4131 (last_bytes == 0
56636818
JL
4132 ? FALSE : TRUE),
4133 dst_unchanging_p,
c6df88cb
MM
4134 dst_in_struct_p,
4135 dst_scalar_p));
880e2516
RE
4136 else
4137 {
43cffd11 4138 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
4139 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4140 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4141 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4142 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
4143 if (last_bytes != 0)
4144 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
4145 }
4146 }
4147
4148 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4149 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4150 }
4151
4152 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4153 if (out_words_to_go)
62b10bbc
NC
4154 {
4155 rtx sreg;
4156
4157 mem = gen_rtx_MEM (SImode, src);
4158 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4159 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4160 MEM_SCALAR_P (mem) = src_scalar_p;
4161 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4162 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4163
4164 mem = gen_rtx_MEM (SImode, dst);
4165 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4166 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4167 MEM_SCALAR_P (mem) = dst_scalar_p;
4168 emit_move_insn (mem, sreg);
4169 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4170 in_words_to_go--;
4171
4172 if (in_words_to_go) /* Sanity check */
4173 abort ();
4174 }
880e2516
RE
4175
4176 if (in_words_to_go)
4177 {
4178 if (in_words_to_go < 0)
4179 abort ();
4180
43cffd11 4181 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4182 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4183 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4184 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4185 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4186 }
4187
d5b7b3ae
RE
4188 if (last_bytes && part_bytes_reg == NULL)
4189 abort ();
4190
880e2516
RE
4191 if (BYTES_BIG_ENDIAN && last_bytes)
4192 {
4193 rtx tmp = gen_reg_rtx (SImode);
4194
6354dc9b 4195 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4196 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4197 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4198 part_bytes_reg = tmp;
4199
4200 while (last_bytes)
4201 {
43cffd11 4202 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4203 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4204 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4205 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4206 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 4207
880e2516
RE
4208 if (--last_bytes)
4209 {
4210 tmp = gen_reg_rtx (SImode);
4211 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4212 part_bytes_reg = tmp;
4213 }
4214 }
4215
4216 }
4217 else
4218 {
d5b7b3ae 4219 if (last_bytes > 1)
880e2516 4220 {
d5b7b3ae 4221 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4222 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4223 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4224 MEM_SCALAR_P (mem) = dst_scalar_p;
d5b7b3ae
RE
4225 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4226 last_bytes -= 2;
4227 if (last_bytes)
880e2516
RE
4228 {
4229 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4230
d5b7b3ae
RE
4231 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4232 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4233 part_bytes_reg = tmp;
4234 }
4235 }
d5b7b3ae
RE
4236
4237 if (last_bytes)
4238 {
4239 mem = gen_rtx_MEM (QImode, dst);
4240 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4241 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4242 MEM_SCALAR_P (mem) = dst_scalar_p;
4243 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4244 }
880e2516
RE
4245 }
4246
4247 return 1;
4248}
4249
5165176d
RE
4250/* Generate a memory reference for a half word, such that it will be loaded
4251 into the top 16 bits of the word. We can assume that the address is
4252 known to be alignable and of the form reg, or plus (reg, const). */
4253rtx
d5b7b3ae 4254arm_gen_rotated_half_load (memref)
5165176d
RE
4255 rtx memref;
4256{
4257 HOST_WIDE_INT offset = 0;
4258 rtx base = XEXP (memref, 0);
4259
4260 if (GET_CODE (base) == PLUS)
4261 {
4262 offset = INTVAL (XEXP (base, 1));
4263 base = XEXP (base, 0);
4264 }
4265
956d6950 4266 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4267 if (TARGET_MMU_TRAPS
5165176d
RE
4268 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4269 return NULL;
4270
43cffd11 4271 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4272
4273 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4274 return base;
4275
43cffd11 4276 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4277}
4278
1646cf41
RE
4279/* Select a dominance comparison mode if possible. We support three forms.
4280 COND_OR == 0 => (X && Y)
4281 COND_OR == 1 => ((! X( || Y)
4282 COND_OR == 2 => (X || Y)
4283 If we are unable to support a dominance comparsison we return CC mode.
4284 This will then fail to match for the RTL expressions that generate this
4285 call. */
d19fb8e3 4286
84ed5e79 4287static enum machine_mode
74bbc178 4288select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4289 rtx x;
4290 rtx y;
4291 HOST_WIDE_INT cond_or;
4292{
4293 enum rtx_code cond1, cond2;
4294 int swapped = 0;
4295
4296 /* Currently we will probably get the wrong result if the individual
4297 comparisons are not simple. This also ensures that it is safe to
956d6950 4298 reverse a comparison if necessary. */
84ed5e79
RE
4299 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4300 != CCmode)
4301 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4302 != CCmode))
4303 return CCmode;
4304
1646cf41
RE
4305 /* The if_then_else variant of this tests the second condition if the
4306 first passes, but is true if the first fails. Reverse the first
4307 condition to get a true "inclusive-or" expression. */
4308 if (cond_or == 1)
84ed5e79
RE
4309 cond1 = reverse_condition (cond1);
4310
4311 /* If the comparisons are not equal, and one doesn't dominate the other,
4312 then we can't do this. */
4313 if (cond1 != cond2
5895f793
RE
4314 && !comparison_dominates_p (cond1, cond2)
4315 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4316 return CCmode;
4317
4318 if (swapped)
4319 {
4320 enum rtx_code temp = cond1;
4321 cond1 = cond2;
4322 cond2 = temp;
4323 }
4324
4325 switch (cond1)
4326 {
4327 case EQ:
5895f793 4328 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4329 return CC_DEQmode;
4330
4331 switch (cond2)
4332 {
4333 case LE: return CC_DLEmode;
4334 case LEU: return CC_DLEUmode;
4335 case GE: return CC_DGEmode;
4336 case GEU: return CC_DGEUmode;
ad076f4e 4337 default: break;
84ed5e79
RE
4338 }
4339
4340 break;
4341
4342 case LT:
5895f793 4343 if (cond2 == LT || !cond_or)
84ed5e79
RE
4344 return CC_DLTmode;
4345 if (cond2 == LE)
4346 return CC_DLEmode;
4347 if (cond2 == NE)
4348 return CC_DNEmode;
4349 break;
4350
4351 case GT:
5895f793 4352 if (cond2 == GT || !cond_or)
84ed5e79
RE
4353 return CC_DGTmode;
4354 if (cond2 == GE)
4355 return CC_DGEmode;
4356 if (cond2 == NE)
4357 return CC_DNEmode;
4358 break;
4359
4360 case LTU:
5895f793 4361 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4362 return CC_DLTUmode;
4363 if (cond2 == LEU)
4364 return CC_DLEUmode;
4365 if (cond2 == NE)
4366 return CC_DNEmode;
4367 break;
4368
4369 case GTU:
5895f793 4370 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4371 return CC_DGTUmode;
4372 if (cond2 == GEU)
4373 return CC_DGEUmode;
4374 if (cond2 == NE)
4375 return CC_DNEmode;
4376 break;
4377
4378 /* The remaining cases only occur when both comparisons are the
4379 same. */
4380 case NE:
4381 return CC_DNEmode;
4382
4383 case LE:
4384 return CC_DLEmode;
4385
4386 case GE:
4387 return CC_DGEmode;
4388
4389 case LEU:
4390 return CC_DLEUmode;
4391
4392 case GEU:
4393 return CC_DGEUmode;
ad076f4e
RE
4394
4395 default:
4396 break;
84ed5e79
RE
4397 }
4398
4399 abort ();
4400}
4401
4402enum machine_mode
4403arm_select_cc_mode (op, x, y)
4404 enum rtx_code op;
4405 rtx x;
4406 rtx y;
4407{
4408 /* All floating point compares return CCFP if it is an equality
4409 comparison, and CCFPE otherwise. */
4410 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4411 {
4412 switch (op)
4413 {
4414 case EQ:
4415 case NE:
4416 case UNORDERED:
4417 case ORDERED:
4418 case UNLT:
4419 case UNLE:
4420 case UNGT:
4421 case UNGE:
4422 case UNEQ:
4423 case LTGT:
4424 return CCFPmode;
4425
4426 case LT:
4427 case LE:
4428 case GT:
4429 case GE:
4430 return CCFPEmode;
4431
4432 default:
4433 abort ();
4434 }
4435 }
84ed5e79
RE
4436
4437 /* A compare with a shifted operand. Because of canonicalization, the
4438 comparison will have to be swapped when we emit the assembler. */
4439 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4440 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4441 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4442 || GET_CODE (x) == ROTATERT))
4443 return CC_SWPmode;
4444
956d6950
JL
4445 /* This is a special case that is used by combine to allow a
4446 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4447 followed by a comparison of the shifted integer (only valid for
956d6950 4448 equalities and unsigned inequalities). */
84ed5e79
RE
4449 if (GET_MODE (x) == SImode
4450 && GET_CODE (x) == ASHIFT
4451 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4452 && GET_CODE (XEXP (x, 0)) == SUBREG
4453 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4454 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4455 && (op == EQ || op == NE
4456 || op == GEU || op == GTU || op == LTU || op == LEU)
4457 && GET_CODE (y) == CONST_INT)
4458 return CC_Zmode;
4459
1646cf41
RE
4460 /* A construct for a conditional compare, if the false arm contains
4461 0, then both conditions must be true, otherwise either condition
4462 must be true. Not all conditions are possible, so CCmode is
4463 returned if it can't be done. */
4464 if (GET_CODE (x) == IF_THEN_ELSE
4465 && (XEXP (x, 2) == const0_rtx
4466 || XEXP (x, 2) == const1_rtx)
4467 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4468 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4469 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4470 INTVAL (XEXP (x, 2)));
4471
4472 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4473 if (GET_CODE (x) == AND
4474 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4475 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4476 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4477
4478 if (GET_CODE (x) == IOR
4479 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4480 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4481 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4482
84ed5e79
RE
4483 /* An operation that sets the condition codes as a side-effect, the
4484 V flag is not set correctly, so we can only use comparisons where
4485 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4486 instead. */
4487 if (GET_MODE (x) == SImode
4488 && y == const0_rtx
4489 && (op == EQ || op == NE || op == LT || op == GE)
4490 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4491 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4492 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4493 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4494 || GET_CODE (x) == LSHIFTRT
4495 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4496 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4497 return CC_NOOVmode;
4498
84ed5e79
RE
4499 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4500 return CC_Zmode;
4501
bd9c7e23
RE
4502 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4503 && GET_CODE (x) == PLUS
4504 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4505 return CC_Cmode;
4506
84ed5e79
RE
4507 return CCmode;
4508}
4509
ff9940b0
RE
4510/* X and Y are two things to compare using CODE. Emit the compare insn and
4511 return the rtx for register 0 in the proper mode. FP means this is a
4512 floating point compare: I don't think that it is needed on the arm. */
4513
4514rtx
d5b7b3ae 4515arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4516 enum rtx_code code;
4517 rtx x, y;
4518{
4519 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4520 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4521
43cffd11
RE
4522 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4523 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4524
4525 return cc_reg;
4526}
4527
0a81f500
RE
4528void
4529arm_reload_in_hi (operands)
62b10bbc 4530 rtx * operands;
0a81f500 4531{
f9cc092a
RE
4532 rtx ref = operands[1];
4533 rtx base, scratch;
4534 HOST_WIDE_INT offset = 0;
4535
4536 if (GET_CODE (ref) == SUBREG)
4537 {
4538 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4539 if (BYTES_BIG_ENDIAN)
4540 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4541 - MIN (UNITS_PER_WORD,
4542 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4543 ref = SUBREG_REG (ref);
4544 }
4545
4546 if (GET_CODE (ref) == REG)
4547 {
4548 /* We have a pseudo which has been spilt onto the stack; there
4549 are two cases here: the first where there is a simple
4550 stack-slot replacement and a second where the stack-slot is
4551 out of range, or is used as a subreg. */
4552 if (reg_equiv_mem[REGNO (ref)])
4553 {
4554 ref = reg_equiv_mem[REGNO (ref)];
4555 base = find_replacement (&XEXP (ref, 0));
4556 }
4557 else
6354dc9b 4558 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4559 base = reg_equiv_address[REGNO (ref)];
4560 }
4561 else
4562 base = find_replacement (&XEXP (ref, 0));
0a81f500 4563
e5e809f4
JL
4564 /* Handle the case where the address is too complex to be offset by 1. */
4565 if (GET_CODE (base) == MINUS
4566 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4567 {
f9cc092a 4568 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4569
43cffd11 4570 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4571 base = base_plus;
4572 }
f9cc092a
RE
4573 else if (GET_CODE (base) == PLUS)
4574 {
6354dc9b 4575 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4576 HOST_WIDE_INT hi, lo;
4577
4578 offset += INTVAL (XEXP (base, 1));
4579 base = XEXP (base, 0);
4580
6354dc9b 4581 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4582 /* Valid range for lo is -4095 -> 4095 */
4583 lo = (offset >= 0
4584 ? (offset & 0xfff)
4585 : -((-offset) & 0xfff));
4586
4587 /* Corner case, if lo is the max offset then we would be out of range
4588 once we have added the additional 1 below, so bump the msb into the
4589 pre-loading insn(s). */
4590 if (lo == 4095)
4591 lo &= 0x7ff;
4592
e5951263
NC
4593 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4594 ^ HOST_INT (0x80000000))
4595 - HOST_INT (0x80000000));
f9cc092a
RE
4596
4597 if (hi + lo != offset)
4598 abort ();
4599
4600 if (hi != 0)
4601 {
4602 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4603
4604 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4605 that require more than one insn. */
f9cc092a
RE
4606 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4607 base = base_plus;
4608 offset = lo;
4609 }
4610 }
e5e809f4 4611
f9cc092a
RE
4612 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4613 emit_insn (gen_zero_extendqisi2 (scratch,
4614 gen_rtx_MEM (QImode,
4615 plus_constant (base,
4616 offset))));
43cffd11
RE
4617 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4618 gen_rtx_MEM (QImode,
f9cc092a
RE
4619 plus_constant (base,
4620 offset + 1))));
5895f793 4621 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
4622 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4623 gen_rtx_IOR (SImode,
4624 gen_rtx_ASHIFT
4625 (SImode,
4626 gen_rtx_SUBREG (SImode, operands[0], 0),
4627 GEN_INT (8)),
f9cc092a 4628 scratch)));
0a81f500 4629 else
43cffd11
RE
4630 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4631 gen_rtx_IOR (SImode,
f9cc092a 4632 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4633 GEN_INT (8)),
4634 gen_rtx_SUBREG (SImode, operands[0],
4635 0))));
0a81f500
RE
4636}
4637
f9cc092a
RE
4638/* Handle storing a half-word to memory during reload by synthesising as two
4639 byte stores. Take care not to clobber the input values until after we
4640 have moved them somewhere safe. This code assumes that if the DImode
4641 scratch in operands[2] overlaps either the input value or output address
4642 in some way, then that value must die in this insn (we absolutely need
4643 two scratch registers for some corner cases). */
f3bb6135 4644void
af48348a 4645arm_reload_out_hi (operands)
62b10bbc 4646 rtx * operands;
af48348a 4647{
f9cc092a
RE
4648 rtx ref = operands[0];
4649 rtx outval = operands[1];
4650 rtx base, scratch;
4651 HOST_WIDE_INT offset = 0;
4652
4653 if (GET_CODE (ref) == SUBREG)
4654 {
4655 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4656 if (BYTES_BIG_ENDIAN)
4657 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4658 - MIN (UNITS_PER_WORD,
4659 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4660 ref = SUBREG_REG (ref);
4661 }
4662
4663
4664 if (GET_CODE (ref) == REG)
4665 {
4666 /* We have a pseudo which has been spilt onto the stack; there
4667 are two cases here: the first where there is a simple
4668 stack-slot replacement and a second where the stack-slot is
4669 out of range, or is used as a subreg. */
4670 if (reg_equiv_mem[REGNO (ref)])
4671 {
4672 ref = reg_equiv_mem[REGNO (ref)];
4673 base = find_replacement (&XEXP (ref, 0));
4674 }
4675 else
6354dc9b 4676 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4677 base = reg_equiv_address[REGNO (ref)];
4678 }
4679 else
4680 base = find_replacement (&XEXP (ref, 0));
4681
4682 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4683
4684 /* Handle the case where the address is too complex to be offset by 1. */
4685 if (GET_CODE (base) == MINUS
4686 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4687 {
4688 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4689
4690 /* Be careful not to destroy OUTVAL. */
4691 if (reg_overlap_mentioned_p (base_plus, outval))
4692 {
4693 /* Updating base_plus might destroy outval, see if we can
4694 swap the scratch and base_plus. */
5895f793 4695 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4696 {
4697 rtx tmp = scratch;
4698 scratch = base_plus;
4699 base_plus = tmp;
4700 }
4701 else
4702 {
4703 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4704
4705 /* Be conservative and copy OUTVAL into the scratch now,
4706 this should only be necessary if outval is a subreg
4707 of something larger than a word. */
4708 /* XXX Might this clobber base? I can't see how it can,
4709 since scratch is known to overlap with OUTVAL, and
4710 must be wider than a word. */
4711 emit_insn (gen_movhi (scratch_hi, outval));
4712 outval = scratch_hi;
4713 }
4714 }
4715
4716 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4717 base = base_plus;
4718 }
4719 else if (GET_CODE (base) == PLUS)
4720 {
6354dc9b 4721 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4722 HOST_WIDE_INT hi, lo;
4723
4724 offset += INTVAL (XEXP (base, 1));
4725 base = XEXP (base, 0);
4726
6354dc9b 4727 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4728 /* Valid range for lo is -4095 -> 4095 */
4729 lo = (offset >= 0
4730 ? (offset & 0xfff)
4731 : -((-offset) & 0xfff));
4732
4733 /* Corner case, if lo is the max offset then we would be out of range
4734 once we have added the additional 1 below, so bump the msb into the
4735 pre-loading insn(s). */
4736 if (lo == 4095)
4737 lo &= 0x7ff;
4738
e5951263
NC
4739 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4740 ^ HOST_INT (0x80000000))
5895f793 4741 - HOST_INT (0x80000000));
f9cc092a
RE
4742
4743 if (hi + lo != offset)
4744 abort ();
4745
4746 if (hi != 0)
4747 {
4748 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4749
4750 /* Be careful not to destroy OUTVAL. */
4751 if (reg_overlap_mentioned_p (base_plus, outval))
4752 {
4753 /* Updating base_plus might destroy outval, see if we
4754 can swap the scratch and base_plus. */
5895f793 4755 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4756 {
4757 rtx tmp = scratch;
4758 scratch = base_plus;
4759 base_plus = tmp;
4760 }
4761 else
4762 {
4763 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4764
4765 /* Be conservative and copy outval into scratch now,
4766 this should only be necessary if outval is a
4767 subreg of something larger than a word. */
4768 /* XXX Might this clobber base? I can't see how it
4769 can, since scratch is known to overlap with
4770 outval. */
4771 emit_insn (gen_movhi (scratch_hi, outval));
4772 outval = scratch_hi;
4773 }
4774 }
4775
4776 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4777 that require more than one insn. */
f9cc092a
RE
4778 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4779 base = base_plus;
4780 offset = lo;
4781 }
4782 }
af48348a 4783
b5cc037f
RE
4784 if (BYTES_BIG_ENDIAN)
4785 {
f9cc092a
RE
4786 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4787 plus_constant (base, offset + 1)),
4788 gen_rtx_SUBREG (QImode, outval, 0)));
4789 emit_insn (gen_lshrsi3 (scratch,
4790 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4791 GEN_INT (8)));
f9cc092a
RE
4792 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4793 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4794 }
4795 else
4796 {
f9cc092a
RE
4797 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4798 gen_rtx_SUBREG (QImode, outval, 0)));
4799 emit_insn (gen_lshrsi3 (scratch,
4800 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4801 GEN_INT (8)));
f9cc092a
RE
4802 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4803 plus_constant (base, offset + 1)),
4804 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 4805 }
af48348a 4806}
2b835d68 4807\f
d5b7b3ae
RE
4808/* Print a symbolic form of X to the debug file, F. */
4809static void
4810arm_print_value (f, x)
4811 FILE * f;
4812 rtx x;
4813{
4814 switch (GET_CODE (x))
4815 {
4816 case CONST_INT:
4817 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4818 return;
4819
4820 case CONST_DOUBLE:
4821 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4822 return;
4823
4824 case CONST_STRING:
4825 fprintf (f, "\"%s\"", XSTR (x, 0));
4826 return;
4827
4828 case SYMBOL_REF:
4829 fprintf (f, "`%s'", XSTR (x, 0));
4830 return;
4831
4832 case LABEL_REF:
4833 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4834 return;
4835
4836 case CONST:
4837 arm_print_value (f, XEXP (x, 0));
4838 return;
4839
4840 case PLUS:
4841 arm_print_value (f, XEXP (x, 0));
4842 fprintf (f, "+");
4843 arm_print_value (f, XEXP (x, 1));
4844 return;
4845
4846 case PC:
4847 fprintf (f, "pc");
4848 return;
4849
4850 default:
4851 fprintf (f, "????");
4852 return;
4853 }
4854}
4855\f
2b835d68 4856/* Routines for manipulation of the constant pool. */
2b835d68 4857
949d79eb
RE
4858/* Arm instructions cannot load a large constant directly into a
4859 register; they have to come from a pc relative load. The constant
4860 must therefore be placed in the addressable range of the pc
4861 relative load. Depending on the precise pc relative load
4862 instruction the range is somewhere between 256 bytes and 4k. This
4863 means that we often have to dump a constant inside a function, and
2b835d68
RE
4864 generate code to branch around it.
4865
949d79eb
RE
4866 It is important to minimize this, since the branches will slow
4867 things down and make the code larger.
2b835d68 4868
949d79eb
RE
4869 Normally we can hide the table after an existing unconditional
4870 branch so that there is no interruption of the flow, but in the
4871 worst case the code looks like this:
2b835d68
RE
4872
4873 ldr rn, L1
949d79eb 4874 ...
2b835d68
RE
4875 b L2
4876 align
4877 L1: .long value
4878 L2:
949d79eb 4879 ...
2b835d68 4880
2b835d68 4881 ldr rn, L3
949d79eb 4882 ...
2b835d68
RE
4883 b L4
4884 align
2b835d68
RE
4885 L3: .long value
4886 L4:
949d79eb
RE
4887 ...
4888
4889 We fix this by performing a scan after scheduling, which notices
4890 which instructions need to have their operands fetched from the
4891 constant table and builds the table.
4892
4893 The algorithm starts by building a table of all the constants that
4894 need fixing up and all the natural barriers in the function (places
4895 where a constant table can be dropped without breaking the flow).
4896 For each fixup we note how far the pc-relative replacement will be
4897 able to reach and the offset of the instruction into the function.
4898
4899 Having built the table we then group the fixes together to form
4900 tables that are as large as possible (subject to addressing
4901 constraints) and emit each table of constants after the last
4902 barrier that is within range of all the instructions in the group.
4903 If a group does not contain a barrier, then we forcibly create one
4904 by inserting a jump instruction into the flow. Once the table has
4905 been inserted, the insns are then modified to reference the
4906 relevant entry in the pool.
4907
6354dc9b 4908 Possible enhancements to the algorithm (not implemented) are:
949d79eb 4909
d5b7b3ae 4910 1) For some processors and object formats, there may be benefit in
949d79eb
RE
4911 aligning the pools to the start of cache lines; this alignment
4912 would need to be taken into account when calculating addressability
6354dc9b 4913 of a pool. */
2b835d68 4914
d5b7b3ae
RE
4915/* These typedefs are located at the start of this file, so that
4916 they can be used in the prototypes there. This comment is to
4917 remind readers of that fact so that the following structures
4918 can be understood more easily.
4919
4920 typedef struct minipool_node Mnode;
4921 typedef struct minipool_fixup Mfix; */
4922
4923struct minipool_node
4924{
4925 /* Doubly linked chain of entries. */
4926 Mnode * next;
4927 Mnode * prev;
4928 /* The maximum offset into the code that this entry can be placed. While
4929 pushing fixes for forward references, all entries are sorted in order
4930 of increasing max_address. */
4931 HOST_WIDE_INT max_address;
4932 /* Similarly for a entry inserted for a backwards ref. */
4933 HOST_WIDE_INT min_address;
4934 /* The number of fixes referencing this entry. This can become zero
4935 if we "unpush" an entry. In this case we ignore the entry when we
4936 come to emit the code. */
4937 int refcount;
4938 /* The offset from the start of the minipool. */
4939 HOST_WIDE_INT offset;
4940 /* The value in table. */
4941 rtx value;
4942 /* The mode of value. */
4943 enum machine_mode mode;
4944 int fix_size;
4945};
4946
4947struct minipool_fixup
2b835d68 4948{
d5b7b3ae
RE
4949 Mfix * next;
4950 rtx insn;
4951 HOST_WIDE_INT address;
4952 rtx * loc;
4953 enum machine_mode mode;
4954 int fix_size;
4955 rtx value;
4956 Mnode * minipool;
4957 HOST_WIDE_INT forwards;
4958 HOST_WIDE_INT backwards;
4959};
2b835d68 4960
d5b7b3ae
RE
4961/* Fixes less than a word need padding out to a word boundary. */
4962#define MINIPOOL_FIX_SIZE(mode) \
4963 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 4964
d5b7b3ae
RE
4965static Mnode * minipool_vector_head;
4966static Mnode * minipool_vector_tail;
4967static rtx minipool_vector_label;
332072db 4968
d5b7b3ae
RE
4969/* The linked list of all minipool fixes required for this function. */
4970Mfix * minipool_fix_head;
4971Mfix * minipool_fix_tail;
4972/* The fix entry for the current minipool, once it has been placed. */
4973Mfix * minipool_barrier;
4974
4975/* Determines if INSN is the start of a jump table. Returns the end
4976 of the TABLE or NULL_RTX. */
4977static rtx
4978is_jump_table (insn)
4979 rtx insn;
2b835d68 4980{
d5b7b3ae 4981 rtx table;
da6558fd 4982
d5b7b3ae
RE
4983 if (GET_CODE (insn) == JUMP_INSN
4984 && JUMP_LABEL (insn) != NULL
4985 && ((table = next_real_insn (JUMP_LABEL (insn)))
4986 == next_real_insn (insn))
4987 && table != NULL
4988 && GET_CODE (table) == JUMP_INSN
4989 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4990 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4991 return table;
4992
4993 return NULL_RTX;
2b835d68
RE
4994}
4995
d5b7b3ae
RE
4996static HOST_WIDE_INT
4997get_jump_table_size (insn)
4998 rtx insn;
2b835d68 4999{
d5b7b3ae
RE
5000 rtx body = PATTERN (insn);
5001 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 5002
d5b7b3ae
RE
5003 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5004}
2b835d68 5005
d5b7b3ae
RE
5006/* Move a minipool fix MP from its current location to before MAX_MP.
5007 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5008 contrains may need updating. */
5009static Mnode *
5010move_minipool_fix_forward_ref (mp, max_mp, max_address)
5011 Mnode * mp;
5012 Mnode * max_mp;
5013 HOST_WIDE_INT max_address;
5014{
5015 /* This should never be true and the code below assumes these are
5016 different. */
5017 if (mp == max_mp)
5018 abort ();
5019
5020 if (max_mp == NULL)
5021 {
5022 if (max_address < mp->max_address)
5023 mp->max_address = max_address;
5024 }
5025 else
2b835d68 5026 {
d5b7b3ae
RE
5027 if (max_address > max_mp->max_address - mp->fix_size)
5028 mp->max_address = max_mp->max_address - mp->fix_size;
5029 else
5030 mp->max_address = max_address;
2b835d68 5031
d5b7b3ae
RE
5032 /* Unlink MP from its current position. Since max_mp is non-null,
5033 mp->prev must be non-null. */
5034 mp->prev->next = mp->next;
5035 if (mp->next != NULL)
5036 mp->next->prev = mp->prev;
5037 else
5038 minipool_vector_tail = mp->prev;
2b835d68 5039
d5b7b3ae
RE
5040 /* Re-insert it before MAX_MP. */
5041 mp->next = max_mp;
5042 mp->prev = max_mp->prev;
5043 max_mp->prev = mp;
5044
5045 if (mp->prev != NULL)
5046 mp->prev->next = mp;
5047 else
5048 minipool_vector_head = mp;
5049 }
2b835d68 5050
d5b7b3ae
RE
5051 /* Save the new entry. */
5052 max_mp = mp;
5053
5054 /* Scan over the preceeding entries and adjust their addresses as
5055 required. */
5056 while (mp->prev != NULL
5057 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5058 {
5059 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5060 mp = mp->prev;
2b835d68
RE
5061 }
5062
d5b7b3ae 5063 return max_mp;
2b835d68
RE
5064}
5065
d5b7b3ae
RE
5066/* Add a constant to the minipool for a forward reference. Returns the
5067 node added or NULL if the constant will not fit in this pool. */
5068static Mnode *
5069add_minipool_forward_ref (fix)
5070 Mfix * fix;
5071{
5072 /* If set, max_mp is the first pool_entry that has a lower
5073 constraint than the one we are trying to add. */
5074 Mnode * max_mp = NULL;
5075 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5076 Mnode * mp;
5077
5078 /* If this fix's address is greater than the address of the first
5079 entry, then we can't put the fix in this pool. We subtract the
5080 size of the current fix to ensure that if the table is fully
5081 packed we still have enough room to insert this value by suffling
5082 the other fixes forwards. */
5083 if (minipool_vector_head &&
5084 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5085 return NULL;
2b835d68 5086
d5b7b3ae
RE
5087 /* Scan the pool to see if a constant with the same value has
5088 already been added. While we are doing this, also note the
5089 location where we must insert the constant if it doesn't already
5090 exist. */
5091 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5092 {
5093 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5094 && fix->mode == mp->mode
5095 && (GET_CODE (fix->value) != CODE_LABEL
5096 || (CODE_LABEL_NUMBER (fix->value)
5097 == CODE_LABEL_NUMBER (mp->value)))
5098 && rtx_equal_p (fix->value, mp->value))
5099 {
5100 /* More than one fix references this entry. */
5101 mp->refcount++;
5102 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5103 }
5104
5105 /* Note the insertion point if necessary. */
5106 if (max_mp == NULL
5107 && mp->max_address > max_address)
5108 max_mp = mp;
5109 }
5110
5111 /* The value is not currently in the minipool, so we need to create
5112 a new entry for it. If MAX_MP is NULL, the entry will be put on
5113 the end of the list since the placement is less constrained than
5114 any existing entry. Otherwise, we insert the new fix before
5115 MAX_MP and, if neceesary, adjust the constraints on the other
5116 entries. */
5117 mp = xmalloc (sizeof (* mp));
5118 mp->fix_size = fix->fix_size;
5119 mp->mode = fix->mode;
5120 mp->value = fix->value;
5121 mp->refcount = 1;
5122 /* Not yet required for a backwards ref. */
5123 mp->min_address = -65536;
5124
5125 if (max_mp == NULL)
5126 {
5127 mp->max_address = max_address;
5128 mp->next = NULL;
5129 mp->prev = minipool_vector_tail;
5130
5131 if (mp->prev == NULL)
5132 {
5133 minipool_vector_head = mp;
5134 minipool_vector_label = gen_label_rtx ();
7551cbc7 5135 }
2b835d68 5136 else
d5b7b3ae 5137 mp->prev->next = mp;
2b835d68 5138
d5b7b3ae
RE
5139 minipool_vector_tail = mp;
5140 }
5141 else
5142 {
5143 if (max_address > max_mp->max_address - mp->fix_size)
5144 mp->max_address = max_mp->max_address - mp->fix_size;
5145 else
5146 mp->max_address = max_address;
5147
5148 mp->next = max_mp;
5149 mp->prev = max_mp->prev;
5150 max_mp->prev = mp;
5151 if (mp->prev != NULL)
5152 mp->prev->next = mp;
5153 else
5154 minipool_vector_head = mp;
5155 }
5156
5157 /* Save the new entry. */
5158 max_mp = mp;
5159
5160 /* Scan over the preceeding entries and adjust their addresses as
5161 required. */
5162 while (mp->prev != NULL
5163 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5164 {
5165 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5166 mp = mp->prev;
2b835d68
RE
5167 }
5168
d5b7b3ae
RE
5169 return max_mp;
5170}
5171
5172static Mnode *
5173move_minipool_fix_backward_ref (mp, min_mp, min_address)
5174 Mnode * mp;
5175 Mnode * min_mp;
5176 HOST_WIDE_INT min_address;
5177{
5178 HOST_WIDE_INT offset;
5179
5180 /* This should never be true, and the code below assumes these are
5181 different. */
5182 if (mp == min_mp)
5183 abort ();
5184
5185 if (min_mp == NULL)
2b835d68 5186 {
d5b7b3ae
RE
5187 if (min_address > mp->min_address)
5188 mp->min_address = min_address;
5189 }
5190 else
5191 {
5192 /* We will adjust this below if it is too loose. */
5193 mp->min_address = min_address;
5194
5195 /* Unlink MP from its current position. Since min_mp is non-null,
5196 mp->next must be non-null. */
5197 mp->next->prev = mp->prev;
5198 if (mp->prev != NULL)
5199 mp->prev->next = mp->next;
5200 else
5201 minipool_vector_head = mp->next;
5202
5203 /* Reinsert it after MIN_MP. */
5204 mp->prev = min_mp;
5205 mp->next = min_mp->next;
5206 min_mp->next = mp;
5207 if (mp->next != NULL)
5208 mp->next->prev = mp;
2b835d68 5209 else
d5b7b3ae
RE
5210 minipool_vector_tail = mp;
5211 }
5212
5213 min_mp = mp;
5214
5215 offset = 0;
5216 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5217 {
5218 mp->offset = offset;
5219 if (mp->refcount > 0)
5220 offset += mp->fix_size;
5221
5222 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5223 mp->next->min_address = mp->min_address + mp->fix_size;
5224 }
5225
5226 return min_mp;
5227}
5228
5229/* Add a constant to the minipool for a backward reference. Returns the
5230 node added or NULL if the constant will not fit in this pool.
5231
5232 Note that the code for insertion for a backwards reference can be
5233 somewhat confusing because the calculated offsets for each fix do
5234 not take into account the size of the pool (which is still under
5235 construction. */
5236static Mnode *
5237add_minipool_backward_ref (fix)
5238 Mfix * fix;
5239{
5240 /* If set, min_mp is the last pool_entry that has a lower constraint
5241 than the one we are trying to add. */
5242 Mnode * min_mp = NULL;
5243 /* This can be negative, since it is only a constraint. */
5244 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5245 Mnode * mp;
5246
5247 /* If we can't reach the current pool from this insn, or if we can't
5248 insert this entry at the end of the pool without pushing other
5249 fixes out of range, then we don't try. This ensures that we
5250 can't fail later on. */
5251 if (min_address >= minipool_barrier->address
5252 || (minipool_vector_tail->min_address + fix->fix_size
5253 >= minipool_barrier->address))
5254 return NULL;
5255
5256 /* Scan the pool to see if a constant with the same value has
5257 already been added. While we are doing this, also note the
5258 location where we must insert the constant if it doesn't already
5259 exist. */
5260 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5261 {
5262 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5263 && fix->mode == mp->mode
5264 && (GET_CODE (fix->value) != CODE_LABEL
5265 || (CODE_LABEL_NUMBER (fix->value)
5266 == CODE_LABEL_NUMBER (mp->value)))
5267 && rtx_equal_p (fix->value, mp->value)
5268 /* Check that there is enough slack to move this entry to the
5269 end of the table (this is conservative). */
5270 && (mp->max_address
5271 > (minipool_barrier->address
5272 + minipool_vector_tail->offset
5273 + minipool_vector_tail->fix_size)))
5274 {
5275 mp->refcount++;
5276 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5277 }
5278
5279 if (min_mp != NULL)
5280 mp->min_address += fix->fix_size;
5281 else
5282 {
5283 /* Note the insertion point if necessary. */
5284 if (mp->min_address < min_address)
5285 min_mp = mp;
5286 else if (mp->max_address
5287 < minipool_barrier->address + mp->offset + fix->fix_size)
5288 {
5289 /* Inserting before this entry would push the fix beyond
5290 its maximum address (which can happen if we have
5291 re-located a forwards fix); force the new fix to come
5292 after it. */
5293 min_mp = mp;
5294 min_address = mp->min_address + fix->fix_size;
5295 }
5296 }
5297 }
5298
5299 /* We need to create a new entry. */
5300 mp = xmalloc (sizeof (* mp));
5301 mp->fix_size = fix->fix_size;
5302 mp->mode = fix->mode;
5303 mp->value = fix->value;
5304 mp->refcount = 1;
5305 mp->max_address = minipool_barrier->address + 65536;
5306
5307 mp->min_address = min_address;
5308
5309 if (min_mp == NULL)
5310 {
5311 mp->prev = NULL;
5312 mp->next = minipool_vector_head;
5313
5314 if (mp->next == NULL)
5315 {
5316 minipool_vector_tail = mp;
5317 minipool_vector_label = gen_label_rtx ();
5318 }
5319 else
5320 mp->next->prev = mp;
5321
5322 minipool_vector_head = mp;
5323 }
5324 else
5325 {
5326 mp->next = min_mp->next;
5327 mp->prev = min_mp;
5328 min_mp->next = mp;
da6558fd 5329
d5b7b3ae
RE
5330 if (mp->next != NULL)
5331 mp->next->prev = mp;
5332 else
5333 minipool_vector_tail = mp;
5334 }
5335
5336 /* Save the new entry. */
5337 min_mp = mp;
5338
5339 if (mp->prev)
5340 mp = mp->prev;
5341 else
5342 mp->offset = 0;
5343
5344 /* Scan over the following entries and adjust their offsets. */
5345 while (mp->next != NULL)
5346 {
5347 if (mp->next->min_address < mp->min_address + mp->fix_size)
5348 mp->next->min_address = mp->min_address + mp->fix_size;
5349
5350 if (mp->refcount)
5351 mp->next->offset = mp->offset + mp->fix_size;
5352 else
5353 mp->next->offset = mp->offset;
5354
5355 mp = mp->next;
5356 }
5357
5358 return min_mp;
5359}
5360
5361static void
5362assign_minipool_offsets (barrier)
5363 Mfix * barrier;
5364{
5365 HOST_WIDE_INT offset = 0;
5366 Mnode * mp;
5367
5368 minipool_barrier = barrier;
5369
5370 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5371 {
5372 mp->offset = offset;
da6558fd 5373
d5b7b3ae
RE
5374 if (mp->refcount > 0)
5375 offset += mp->fix_size;
5376 }
5377}
5378
5379/* Output the literal table */
5380static void
5381dump_minipool (scan)
5382 rtx scan;
5383{
5384 Mnode * mp;
5385 Mnode * nmp;
5386
5387 if (rtl_dump_file)
5388 fprintf (rtl_dump_file,
5389 ";; Emitting minipool after insn %u; address %ld\n",
5390 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5391
5392 scan = emit_label_after (gen_label_rtx (), scan);
5393 scan = emit_insn_after (gen_align_4 (), scan);
5394 scan = emit_label_after (minipool_vector_label, scan);
5395
5396 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5397 {
5398 if (mp->refcount > 0)
5399 {
5400 if (rtl_dump_file)
5401 {
5402 fprintf (rtl_dump_file,
5403 ";; Offset %u, min %ld, max %ld ",
5404 (unsigned) mp->offset, (unsigned long) mp->min_address,
5405 (unsigned long) mp->max_address);
5406 arm_print_value (rtl_dump_file, mp->value);
5407 fputc ('\n', rtl_dump_file);
5408 }
5409
5410 switch (mp->fix_size)
5411 {
5412#ifdef HAVE_consttable_1
5413 case 1:
5414 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5415 break;
5416
5417#endif
5418#ifdef HAVE_consttable_2
5419 case 2:
5420 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5421 break;
5422
5423#endif
5424#ifdef HAVE_consttable_4
5425 case 4:
5426 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5427 break;
5428
5429#endif
5430#ifdef HAVE_consttable_8
5431 case 8:
5432 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5433 break;
5434
5435#endif
5436 default:
5437 abort ();
5438 break;
5439 }
5440 }
5441
5442 nmp = mp->next;
5443 free (mp);
2b835d68
RE
5444 }
5445
d5b7b3ae
RE
5446 minipool_vector_head = minipool_vector_tail = NULL;
5447 scan = emit_insn_after (gen_consttable_end (), scan);
5448 scan = emit_barrier_after (scan);
2b835d68
RE
5449}
5450
d5b7b3ae
RE
5451/* Return the cost of forcibly inserting a barrier after INSN. */
5452static int
5453arm_barrier_cost (insn)
5454 rtx insn;
949d79eb 5455{
d5b7b3ae
RE
5456 /* Basing the location of the pool on the loop depth is preferable,
5457 but at the moment, the basic block information seems to be
5458 corrupt by this stage of the compilation. */
5459 int base_cost = 50;
5460 rtx next = next_nonnote_insn (insn);
5461
5462 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5463 base_cost -= 20;
5464
5465 switch (GET_CODE (insn))
5466 {
5467 case CODE_LABEL:
5468 /* It will always be better to place the table before the label, rather
5469 than after it. */
5470 return 50;
949d79eb 5471
d5b7b3ae
RE
5472 case INSN:
5473 case CALL_INSN:
5474 return base_cost;
5475
5476 case JUMP_INSN:
5477 return base_cost - 10;
5478
5479 default:
5480 return base_cost + 10;
5481 }
5482}
5483
5484/* Find the best place in the insn stream in the range
5485 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5486 Create the barrier by inserting a jump and add a new fix entry for
5487 it. */
5488static Mfix *
5489create_fix_barrier (fix, max_address)
5490 Mfix * fix;
5491 HOST_WIDE_INT max_address;
5492{
5493 HOST_WIDE_INT count = 0;
5494 rtx barrier;
5495 rtx from = fix->insn;
5496 rtx selected = from;
5497 int selected_cost;
5498 HOST_WIDE_INT selected_address;
5499 Mfix * new_fix;
5500 HOST_WIDE_INT max_count = max_address - fix->address;
5501 rtx label = gen_label_rtx ();
5502
5503 selected_cost = arm_barrier_cost (from);
5504 selected_address = fix->address;
5505
5506 while (from && count < max_count)
5507 {
5508 rtx tmp;
5509 int new_cost;
5510
5511 /* This code shouldn't have been called if there was a natural barrier
5512 within range. */
5513 if (GET_CODE (from) == BARRIER)
5514 abort ();
5515
5516 /* Count the length of this insn. */
5517 count += get_attr_length (from);
5518
5519 /* If there is a jump table, add its length. */
5520 tmp = is_jump_table (from);
5521 if (tmp != NULL)
5522 {
5523 count += get_jump_table_size (tmp);
5524
5525 /* Jump tables aren't in a basic block, so base the cost on
5526 the dispatch insn. If we select this location, we will
5527 still put the pool after the table. */
5528 new_cost = arm_barrier_cost (from);
5529
5530 if (count < max_count && new_cost <= selected_cost)
5531 {
5532 selected = tmp;
5533 selected_cost = new_cost;
5534 selected_address = fix->address + count;
5535 }
5536
5537 /* Continue after the dispatch table. */
5538 from = NEXT_INSN (tmp);
5539 continue;
5540 }
5541
5542 new_cost = arm_barrier_cost (from);
5543
5544 if (count < max_count && new_cost <= selected_cost)
5545 {
5546 selected = from;
5547 selected_cost = new_cost;
5548 selected_address = fix->address + count;
5549 }
5550
5551 from = NEXT_INSN (from);
5552 }
5553
5554 /* Create a new JUMP_INSN that branches around a barrier. */
5555 from = emit_jump_insn_after (gen_jump (label), selected);
5556 JUMP_LABEL (from) = label;
5557 barrier = emit_barrier_after (from);
5558 emit_label_after (label, barrier);
5559
5560 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5561 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5562 new_fix->insn = barrier;
5563 new_fix->address = selected_address;
5564 new_fix->next = fix->next;
5565 fix->next = new_fix;
5566
5567 return new_fix;
5568}
5569
5570/* Record that there is a natural barrier in the insn stream at
5571 ADDRESS. */
949d79eb
RE
5572static void
5573push_minipool_barrier (insn, address)
2b835d68 5574 rtx insn;
d5b7b3ae 5575 HOST_WIDE_INT address;
2b835d68 5576{
c7319d87 5577 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 5578
949d79eb
RE
5579 fix->insn = insn;
5580 fix->address = address;
2b835d68 5581
949d79eb
RE
5582 fix->next = NULL;
5583 if (minipool_fix_head != NULL)
5584 minipool_fix_tail->next = fix;
5585 else
5586 minipool_fix_head = fix;
5587
5588 minipool_fix_tail = fix;
5589}
2b835d68 5590
d5b7b3ae
RE
5591/* Record INSN, which will need fixing up to load a value from the
5592 minipool. ADDRESS is the offset of the insn since the start of the
5593 function; LOC is a pointer to the part of the insn which requires
5594 fixing; VALUE is the constant that must be loaded, which is of type
5595 MODE. */
949d79eb
RE
5596static void
5597push_minipool_fix (insn, address, loc, mode, value)
5598 rtx insn;
d5b7b3ae
RE
5599 HOST_WIDE_INT address;
5600 rtx * loc;
949d79eb
RE
5601 enum machine_mode mode;
5602 rtx value;
5603{
c7319d87 5604 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
5605
5606#ifdef AOF_ASSEMBLER
5607 /* PIC symbol refereneces need to be converted into offsets into the
5608 based area. */
d5b7b3ae
RE
5609 /* XXX This shouldn't be done here. */
5610 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5611 value = aof_pic_entry (value);
5612#endif /* AOF_ASSEMBLER */
5613
5614 fix->insn = insn;
5615 fix->address = address;
5616 fix->loc = loc;
5617 fix->mode = mode;
d5b7b3ae 5618 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5619 fix->value = value;
d5b7b3ae
RE
5620 fix->forwards = get_attr_pool_range (insn);
5621 fix->backwards = get_attr_neg_pool_range (insn);
5622 fix->minipool = NULL;
949d79eb
RE
5623
5624 /* If an insn doesn't have a range defined for it, then it isn't
5625 expecting to be reworked by this code. Better to abort now than
5626 to generate duff assembly code. */
d5b7b3ae 5627 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5628 abort ();
5629
d5b7b3ae
RE
5630 if (rtl_dump_file)
5631 {
5632 fprintf (rtl_dump_file,
5633 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5634 GET_MODE_NAME (mode),
5635 INSN_UID (insn), (unsigned long) address,
5636 -1 * (long)fix->backwards, (long)fix->forwards);
5637 arm_print_value (rtl_dump_file, fix->value);
5638 fprintf (rtl_dump_file, "\n");
5639 }
5640
6354dc9b 5641 /* Add it to the chain of fixes. */
949d79eb 5642 fix->next = NULL;
d5b7b3ae 5643
949d79eb
RE
5644 if (minipool_fix_head != NULL)
5645 minipool_fix_tail->next = fix;
5646 else
5647 minipool_fix_head = fix;
5648
5649 minipool_fix_tail = fix;
5650}
5651
d5b7b3ae 5652/* Scan INSN and note any of its operands that need fixing. */
949d79eb
RE
5653static void
5654note_invalid_constants (insn, address)
5655 rtx insn;
d5b7b3ae 5656 HOST_WIDE_INT address;
949d79eb
RE
5657{
5658 int opno;
5659
d5b7b3ae 5660 extract_insn (insn);
949d79eb 5661
5895f793 5662 if (!constrain_operands (1))
949d79eb
RE
5663 fatal_insn_not_found (insn);
5664
d5b7b3ae
RE
5665 /* Fill in recog_op_alt with information about the constraints of this
5666 insn. */
949d79eb
RE
5667 preprocess_constraints ();
5668
1ccbefce 5669 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 5670 {
6354dc9b 5671 /* Things we need to fix can only occur in inputs. */
36ab44c7 5672 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
5673 continue;
5674
5675 /* If this alternative is a memory reference, then any mention
5676 of constants in this alternative is really to fool reload
5677 into allowing us to accept one there. We need to fix them up
5678 now so that we output the right code. */
5679 if (recog_op_alt[opno][which_alternative].memory_ok)
5680 {
1ccbefce 5681 rtx op = recog_data.operand[opno];
949d79eb
RE
5682
5683 if (CONSTANT_P (op))
1ccbefce
RH
5684 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5685 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
5686#if 0
5687 /* RWE: Now we look correctly at the operands for the insn,
5688 this shouldn't be needed any more. */
949d79eb 5689#ifndef AOF_ASSEMBLER
d5b7b3ae 5690 /* XXX Is this still needed? */
949d79eb 5691 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
5692 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5693 recog_data.operand_mode[opno],
5694 XVECEXP (op, 0, 0));
949d79eb 5695#endif
d5b7b3ae
RE
5696#endif
5697 else if (GET_CODE (op) == MEM
949d79eb
RE
5698 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5699 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
5700 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5701 recog_data.operand_mode[opno],
949d79eb
RE
5702 get_pool_constant (XEXP (op, 0)));
5703 }
2b835d68 5704 }
2b835d68
RE
5705}
5706
5707void
5708arm_reorg (first)
5709 rtx first;
5710{
5711 rtx insn;
d5b7b3ae
RE
5712 HOST_WIDE_INT address = 0;
5713 Mfix * fix;
ad076f4e 5714
949d79eb 5715 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 5716
949d79eb
RE
5717 /* The first insn must always be a note, or the code below won't
5718 scan it properly. */
5719 if (GET_CODE (first) != NOTE)
5720 abort ();
5721
5722 /* Scan all the insns and record the operands that will need fixing. */
5723 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 5724 {
949d79eb 5725 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 5726 push_minipool_barrier (insn, address);
949d79eb
RE
5727 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5728 || GET_CODE (insn) == JUMP_INSN)
5729 {
5730 rtx table;
5731
5732 note_invalid_constants (insn, address);
5733 address += get_attr_length (insn);
d5b7b3ae 5734
949d79eb
RE
5735 /* If the insn is a vector jump, add the size of the table
5736 and skip the table. */
d5b7b3ae 5737 if ((table = is_jump_table (insn)) != NULL)
2b835d68 5738 {
d5b7b3ae 5739 address += get_jump_table_size (table);
949d79eb
RE
5740 insn = table;
5741 }
5742 }
5743 }
332072db 5744
d5b7b3ae
RE
5745 fix = minipool_fix_head;
5746
949d79eb 5747 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 5748 while (fix)
949d79eb 5749 {
d5b7b3ae
RE
5750 Mfix * ftmp;
5751 Mfix * fdel;
5752 Mfix * last_added_fix;
5753 Mfix * last_barrier = NULL;
5754 Mfix * this_fix;
949d79eb
RE
5755
5756 /* Skip any further barriers before the next fix. */
5757 while (fix && GET_CODE (fix->insn) == BARRIER)
5758 fix = fix->next;
5759
d5b7b3ae 5760 /* No more fixes. */
949d79eb
RE
5761 if (fix == NULL)
5762 break;
332072db 5763
d5b7b3ae 5764 last_added_fix = NULL;
2b835d68 5765
d5b7b3ae 5766 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 5767 {
949d79eb 5768 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 5769 {
d5b7b3ae
RE
5770 if (ftmp->address >= minipool_vector_head->max_address)
5771 break;
2b835d68 5772
d5b7b3ae 5773 last_barrier = ftmp;
2b835d68 5774 }
d5b7b3ae
RE
5775 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5776 break;
5777
5778 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 5779 }
949d79eb 5780
d5b7b3ae
RE
5781 /* If we found a barrier, drop back to that; any fixes that we
5782 could have reached but come after the barrier will now go in
5783 the next mini-pool. */
949d79eb
RE
5784 if (last_barrier != NULL)
5785 {
d5b7b3ae
RE
5786 /* Reduce the refcount for those fixes that won't go into this
5787 pool after all. */
5788 for (fdel = last_barrier->next;
5789 fdel && fdel != ftmp;
5790 fdel = fdel->next)
5791 {
5792 fdel->minipool->refcount--;
5793 fdel->minipool = NULL;
5794 }
5795
949d79eb
RE
5796 ftmp = last_barrier;
5797 }
5798 else
2bfa88dc 5799 {
d5b7b3ae
RE
5800 /* ftmp is first fix that we can't fit into this pool and
5801 there no natural barriers that we could use. Insert a
5802 new barrier in the code somewhere between the previous
5803 fix and this one, and arrange to jump around it. */
5804 HOST_WIDE_INT max_address;
5805
5806 /* The last item on the list of fixes must be a barrier, so
5807 we can never run off the end of the list of fixes without
5808 last_barrier being set. */
5809 if (ftmp == NULL)
5810 abort ();
5811
5812 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
5813 /* Check that there isn't another fix that is in range that
5814 we couldn't fit into this pool because the pool was
5815 already too large: we need to put the pool before such an
5816 instruction. */
d5b7b3ae
RE
5817 if (ftmp->address < max_address)
5818 max_address = ftmp->address;
5819
5820 last_barrier = create_fix_barrier (last_added_fix, max_address);
5821 }
5822
5823 assign_minipool_offsets (last_barrier);
5824
5825 while (ftmp)
5826 {
5827 if (GET_CODE (ftmp->insn) != BARRIER
5828 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5829 == NULL))
5830 break;
2bfa88dc 5831
d5b7b3ae 5832 ftmp = ftmp->next;
2bfa88dc 5833 }
949d79eb
RE
5834
5835 /* Scan over the fixes we have identified for this pool, fixing them
5836 up and adding the constants to the pool itself. */
d5b7b3ae 5837 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
5838 this_fix = this_fix->next)
5839 if (GET_CODE (this_fix->insn) != BARRIER)
5840 {
949d79eb
RE
5841 rtx addr
5842 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5843 minipool_vector_label),
d5b7b3ae 5844 this_fix->minipool->offset);
949d79eb
RE
5845 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5846 }
5847
d5b7b3ae 5848 dump_minipool (last_barrier->insn);
949d79eb 5849 fix = ftmp;
2b835d68 5850 }
4b632bf1 5851
949d79eb
RE
5852 /* From now on we must synthesize any constants that we can't handle
5853 directly. This can happen if the RTL gets split during final
5854 instruction generation. */
4b632bf1 5855 after_arm_reorg = 1;
c7319d87
RE
5856
5857 /* Free the minipool memory. */
5858 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 5859}
cce8749e
CH
5860\f
5861/* Routines to output assembly language. */
5862
f3bb6135 5863/* If the rtx is the correct value then return the string of the number.
ff9940b0 5864 In this way we can ensure that valid double constants are generated even
6354dc9b 5865 when cross compiling. */
cd2b33d0 5866const char *
ff9940b0 5867fp_immediate_constant (x)
b5cc037f 5868 rtx x;
ff9940b0
RE
5869{
5870 REAL_VALUE_TYPE r;
5871 int i;
5872
5873 if (!fpa_consts_inited)
5874 init_fpa_table ();
5875
5876 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5877 for (i = 0; i < 8; i++)
5878 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5879 return strings_fpa[i];
f3bb6135 5880
ff9940b0
RE
5881 abort ();
5882}
5883
9997d19d 5884/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 5885static const char *
9997d19d 5886fp_const_from_val (r)
62b10bbc 5887 REAL_VALUE_TYPE * r;
9997d19d
RE
5888{
5889 int i;
5890
5895f793 5891 if (!fpa_consts_inited)
9997d19d
RE
5892 init_fpa_table ();
5893
5894 for (i = 0; i < 8; i++)
5895 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5896 return strings_fpa[i];
5897
5898 abort ();
5899}
ff9940b0 5900
cce8749e
CH
5901/* Output the operands of a LDM/STM instruction to STREAM.
5902 MASK is the ARM register set mask of which only bits 0-15 are important.
5903 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5904 must follow the register list. */
5905
d5b7b3ae 5906static void
dd18ae56 5907print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc 5908 FILE * stream;
cd2b33d0 5909 const char * instr;
dd18ae56
NC
5910 int reg;
5911 int mask;
5912 int hat;
cce8749e
CH
5913{
5914 int i;
5915 int not_first = FALSE;
5916
1d5473cb 5917 fputc ('\t', stream);
dd18ae56 5918 asm_fprintf (stream, instr, reg);
1d5473cb 5919 fputs (", {", stream);
62b10bbc 5920
d5b7b3ae 5921 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
5922 if (mask & (1 << i))
5923 {
5924 if (not_first)
5925 fprintf (stream, ", ");
62b10bbc 5926
dd18ae56 5927 asm_fprintf (stream, "%r", i);
cce8749e
CH
5928 not_first = TRUE;
5929 }
f3bb6135 5930
cce8749e 5931 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 5932}
cce8749e 5933
6354dc9b 5934/* Output a 'call' insn. */
cce8749e 5935
cd2b33d0 5936const char *
cce8749e 5937output_call (operands)
62b10bbc 5938 rtx * operands;
cce8749e 5939{
6354dc9b 5940 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 5941
62b10bbc 5942 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 5943 {
62b10bbc 5944 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 5945 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 5946 }
62b10bbc 5947
1d5473cb 5948 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 5949
6cfc7210 5950 if (TARGET_INTERWORK)
da6558fd
NC
5951 output_asm_insn ("bx%?\t%0", operands);
5952 else
5953 output_asm_insn ("mov%?\t%|pc, %0", operands);
5954
f3bb6135
RE
5955 return "";
5956}
cce8749e 5957
ff9940b0
RE
5958static int
5959eliminate_lr2ip (x)
62b10bbc 5960 rtx * x;
ff9940b0
RE
5961{
5962 int something_changed = 0;
62b10bbc 5963 rtx x0 = * x;
ff9940b0
RE
5964 int code = GET_CODE (x0);
5965 register int i, j;
6f7d635c 5966 register const char * fmt;
ff9940b0
RE
5967
5968 switch (code)
5969 {
5970 case REG:
62b10bbc 5971 if (REGNO (x0) == LR_REGNUM)
ff9940b0 5972 {
62b10bbc 5973 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
5974 return 1;
5975 }
5976 return 0;
5977 default:
6354dc9b 5978 /* Scan through the sub-elements and change any references there. */
ff9940b0 5979 fmt = GET_RTX_FORMAT (code);
62b10bbc 5980
ff9940b0
RE
5981 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5982 if (fmt[i] == 'e')
5983 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5984 else if (fmt[i] == 'E')
5985 for (j = 0; j < XVECLEN (x0, i); j++)
5986 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 5987
ff9940b0
RE
5988 return something_changed;
5989 }
5990}
5991
6354dc9b 5992/* Output a 'call' insn that is a reference in memory. */
ff9940b0 5993
cd2b33d0 5994const char *
ff9940b0 5995output_call_mem (operands)
62b10bbc 5996 rtx * operands;
ff9940b0 5997{
6354dc9b
NC
5998 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5999 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 6000 if (eliminate_lr2ip (&operands[0]))
1d5473cb 6001 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 6002
6cfc7210 6003 if (TARGET_INTERWORK)
da6558fd
NC
6004 {
6005 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6006 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6007 output_asm_insn ("bx%?\t%|ip", operands);
6008 }
6009 else
6010 {
6011 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6012 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6013 }
6014
f3bb6135
RE
6015 return "";
6016}
ff9940b0
RE
6017
6018
6019/* Output a move from arm registers to an fpu registers.
6020 OPERANDS[0] is an fpu register.
6021 OPERANDS[1] is the first registers of an arm register pair. */
6022
cd2b33d0 6023const char *
ff9940b0 6024output_mov_long_double_fpu_from_arm (operands)
62b10bbc 6025 rtx * operands;
ff9940b0
RE
6026{
6027 int arm_reg0 = REGNO (operands[1]);
6028 rtx ops[3];
6029
62b10bbc
NC
6030 if (arm_reg0 == IP_REGNUM)
6031 abort ();
f3bb6135 6032
43cffd11
RE
6033 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6034 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6035 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6036
1d5473cb
RE
6037 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6038 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 6039
f3bb6135
RE
6040 return "";
6041}
ff9940b0
RE
6042
6043/* Output a move from an fpu register to arm registers.
6044 OPERANDS[0] is the first registers of an arm register pair.
6045 OPERANDS[1] is an fpu register. */
6046
cd2b33d0 6047const char *
ff9940b0 6048output_mov_long_double_arm_from_fpu (operands)
62b10bbc 6049 rtx * operands;
ff9940b0
RE
6050{
6051 int arm_reg0 = REGNO (operands[0]);
6052 rtx ops[3];
6053
62b10bbc
NC
6054 if (arm_reg0 == IP_REGNUM)
6055 abort ();
f3bb6135 6056
43cffd11
RE
6057 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6058 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6059 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 6060
1d5473cb
RE
6061 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6062 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
6063 return "";
6064}
ff9940b0
RE
6065
6066/* Output a move from arm registers to arm registers of a long double
6067 OPERANDS[0] is the destination.
6068 OPERANDS[1] is the source. */
cd2b33d0 6069const char *
ff9940b0 6070output_mov_long_double_arm_from_arm (operands)
62b10bbc 6071 rtx * operands;
ff9940b0 6072{
6354dc9b 6073 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
6074 int dest_start = REGNO (operands[0]);
6075 int src_start = REGNO (operands[1]);
6076 rtx ops[2];
6077 int i;
6078
6079 if (dest_start < src_start)
6080 {
6081 for (i = 0; i < 3; i++)
6082 {
43cffd11
RE
6083 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6084 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6085 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6086 }
6087 }
6088 else
6089 {
6090 for (i = 2; i >= 0; i--)
6091 {
43cffd11
RE
6092 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6093 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 6094 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
6095 }
6096 }
f3bb6135 6097
ff9940b0
RE
6098 return "";
6099}
6100
6101
cce8749e
CH
6102/* Output a move from arm registers to an fpu registers.
6103 OPERANDS[0] is an fpu register.
6104 OPERANDS[1] is the first registers of an arm register pair. */
6105
cd2b33d0 6106const char *
cce8749e 6107output_mov_double_fpu_from_arm (operands)
62b10bbc 6108 rtx * operands;
cce8749e
CH
6109{
6110 int arm_reg0 = REGNO (operands[1]);
6111 rtx ops[2];
6112
62b10bbc
NC
6113 if (arm_reg0 == IP_REGNUM)
6114 abort ();
6115
43cffd11
RE
6116 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6117 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6118 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6119 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
6120 return "";
6121}
cce8749e
CH
6122
6123/* Output a move from an fpu register to arm registers.
6124 OPERANDS[0] is the first registers of an arm register pair.
6125 OPERANDS[1] is an fpu register. */
6126
cd2b33d0 6127const char *
cce8749e 6128output_mov_double_arm_from_fpu (operands)
62b10bbc 6129 rtx * operands;
cce8749e
CH
6130{
6131 int arm_reg0 = REGNO (operands[0]);
6132 rtx ops[2];
6133
62b10bbc
NC
6134 if (arm_reg0 == IP_REGNUM)
6135 abort ();
f3bb6135 6136
43cffd11
RE
6137 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6138 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
6139 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6140 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
6141 return "";
6142}
cce8749e
CH
6143
6144/* Output a move between double words.
6145 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6146 or MEM<-REG and all MEMs must be offsettable addresses. */
6147
cd2b33d0 6148const char *
cce8749e 6149output_move_double (operands)
aec3cfba 6150 rtx * operands;
cce8749e
CH
6151{
6152 enum rtx_code code0 = GET_CODE (operands[0]);
6153 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 6154 rtx otherops[3];
cce8749e
CH
6155
6156 if (code0 == REG)
6157 {
6158 int reg0 = REGNO (operands[0]);
6159
43cffd11 6160 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 6161
cce8749e
CH
6162 if (code1 == REG)
6163 {
6164 int reg1 = REGNO (operands[1]);
62b10bbc
NC
6165 if (reg1 == IP_REGNUM)
6166 abort ();
f3bb6135 6167
6354dc9b 6168 /* Ensure the second source is not overwritten. */
c1c2bc04 6169 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 6170 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 6171 else
6cfc7210 6172 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6173 }
6174 else if (code1 == CONST_DOUBLE)
6175 {
226a5051
RE
6176 if (GET_MODE (operands[1]) == DFmode)
6177 {
6178 long l[2];
6179 union real_extract u;
6180
4e135bdd 6181 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
226a5051 6182 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
d5b7b3ae
RE
6183 otherops[1] = GEN_INT (l[1]);
6184 operands[1] = GEN_INT (l[0]);
226a5051 6185 }
c1c2bc04
RE
6186 else if (GET_MODE (operands[1]) != VOIDmode)
6187 abort ();
6188 else if (WORDS_BIG_ENDIAN)
6189 {
6190
6191 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6192 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6193 }
226a5051
RE
6194 else
6195 {
c1c2bc04 6196
226a5051
RE
6197 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6198 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6199 }
6cfc7210 6200
c1c2bc04
RE
6201 output_mov_immediate (operands);
6202 output_mov_immediate (otherops);
cce8749e
CH
6203 }
6204 else if (code1 == CONST_INT)
6205 {
56636818
JL
6206#if HOST_BITS_PER_WIDE_INT > 32
6207 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6208 what the upper word is. */
6209 if (WORDS_BIG_ENDIAN)
6210 {
6211 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6212 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6213 }
6214 else
6215 {
6216 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6217 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6218 }
6219#else
6354dc9b 6220 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6221 if (WORDS_BIG_ENDIAN)
6222 {
6223 otherops[1] = operands[1];
6224 operands[1] = (INTVAL (operands[1]) < 0
6225 ? constm1_rtx : const0_rtx);
6226 }
ff9940b0 6227 else
c1c2bc04 6228 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6229#endif
c1c2bc04
RE
6230 output_mov_immediate (otherops);
6231 output_mov_immediate (operands);
cce8749e
CH
6232 }
6233 else if (code1 == MEM)
6234 {
ff9940b0 6235 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6236 {
ff9940b0 6237 case REG:
9997d19d 6238 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6239 break;
2b835d68 6240
ff9940b0 6241 case PRE_INC:
6354dc9b 6242 abort (); /* Should never happen now. */
ff9940b0 6243 break;
2b835d68 6244
ff9940b0 6245 case PRE_DEC:
2b835d68 6246 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6247 break;
2b835d68 6248
ff9940b0 6249 case POST_INC:
9997d19d 6250 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6251 break;
2b835d68 6252
ff9940b0 6253 case POST_DEC:
6354dc9b 6254 abort (); /* Should never happen now. */
ff9940b0 6255 break;
2b835d68
RE
6256
6257 case LABEL_REF:
6258 case CONST:
6259 output_asm_insn ("adr%?\t%0, %1", operands);
6260 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6261 break;
6262
ff9940b0 6263 default:
aec3cfba
NC
6264 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6265 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6266 {
2b835d68
RE
6267 otherops[0] = operands[0];
6268 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6269 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6270 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6271 {
6272 if (GET_CODE (otherops[2]) == CONST_INT)
6273 {
6274 switch (INTVAL (otherops[2]))
6275 {
6276 case -8:
6277 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6278 return "";
6279 case -4:
6280 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6281 return "";
6282 case 4:
6283 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6284 return "";
6285 }
6286 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6287 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6288 else
6289 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6290 }
6291 else
6292 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6293 }
6294 else
6295 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6296
2b835d68
RE
6297 return "ldm%?ia\t%0, %M0";
6298 }
6299 else
6300 {
6301 otherops[1] = adj_offsettable_operand (operands[1], 4);
6302 /* Take care of overlapping base/data reg. */
6303 if (reg_mentioned_p (operands[0], operands[1]))
6304 {
6305 output_asm_insn ("ldr%?\t%0, %1", otherops);
6306 output_asm_insn ("ldr%?\t%0, %1", operands);
6307 }
6308 else
6309 {
6310 output_asm_insn ("ldr%?\t%0, %1", operands);
6311 output_asm_insn ("ldr%?\t%0, %1", otherops);
6312 }
cce8749e
CH
6313 }
6314 }
6315 }
2b835d68 6316 else
6354dc9b 6317 abort (); /* Constraints should prevent this. */
cce8749e
CH
6318 }
6319 else if (code0 == MEM && code1 == REG)
6320 {
62b10bbc
NC
6321 if (REGNO (operands[1]) == IP_REGNUM)
6322 abort ();
2b835d68 6323
ff9940b0
RE
6324 switch (GET_CODE (XEXP (operands[0], 0)))
6325 {
6326 case REG:
9997d19d 6327 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6328 break;
2b835d68 6329
ff9940b0 6330 case PRE_INC:
6354dc9b 6331 abort (); /* Should never happen now. */
ff9940b0 6332 break;
2b835d68 6333
ff9940b0 6334 case PRE_DEC:
2b835d68 6335 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6336 break;
2b835d68 6337
ff9940b0 6338 case POST_INC:
9997d19d 6339 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6340 break;
2b835d68 6341
ff9940b0 6342 case POST_DEC:
6354dc9b 6343 abort (); /* Should never happen now. */
ff9940b0 6344 break;
2b835d68
RE
6345
6346 case PLUS:
6347 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6348 {
6349 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6350 {
6351 case -8:
6352 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6353 return "";
6354
6355 case -4:
6356 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6357 return "";
6358
6359 case 4:
6360 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6361 return "";
6362 }
6363 }
6364 /* Fall through */
6365
ff9940b0 6366 default:
cce8749e 6367 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 6368 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6369 output_asm_insn ("str%?\t%1, %0", operands);
6370 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6371 }
6372 }
2b835d68 6373 else
62b10bbc 6374 abort (); /* Constraints should prevent this */
cce8749e 6375
9997d19d
RE
6376 return "";
6377}
cce8749e
CH
6378
6379
6380/* Output an arbitrary MOV reg, #n.
6381 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6382
cd2b33d0 6383const char *
cce8749e 6384output_mov_immediate (operands)
62b10bbc 6385 rtx * operands;
cce8749e 6386{
f3bb6135 6387 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
6388 int n_ones = 0;
6389 int i;
6390
6391 /* Try to use one MOV */
cce8749e 6392 if (const_ok_for_arm (n))
f3bb6135 6393 {
9997d19d 6394 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
6395 return "";
6396 }
cce8749e
CH
6397
6398 /* Try to use one MVN */
f3bb6135 6399 if (const_ok_for_arm (~n))
cce8749e 6400 {
f3bb6135 6401 operands[1] = GEN_INT (~n);
9997d19d 6402 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 6403 return "";
cce8749e
CH
6404 }
6405
6354dc9b 6406 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
6407
6408 for (i=0; i < 32; i++)
6409 if (n & 1 << i)
6410 n_ones++;
6411
6354dc9b 6412 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
e5951263 6413 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
cce8749e 6414 else
d5b7b3ae 6415 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
f3bb6135
RE
6416
6417 return "";
6418}
cce8749e
CH
6419
6420
6421/* Output an ADD r, s, #n where n may be too big for one instruction. If
6422 adding zero to one register, output nothing. */
6423
cd2b33d0 6424const char *
cce8749e 6425output_add_immediate (operands)
62b10bbc 6426 rtx * operands;
cce8749e 6427{
f3bb6135 6428 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6429
6430 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6431 {
6432 if (n < 0)
6433 output_multi_immediate (operands,
9997d19d
RE
6434 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6435 -n);
cce8749e
CH
6436 else
6437 output_multi_immediate (operands,
9997d19d
RE
6438 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6439 n);
cce8749e 6440 }
f3bb6135
RE
6441
6442 return "";
6443}
cce8749e 6444
cce8749e
CH
6445/* Output a multiple immediate operation.
6446 OPERANDS is the vector of operands referred to in the output patterns.
6447 INSTR1 is the output pattern to use for the first constant.
6448 INSTR2 is the output pattern to use for subsequent constants.
6449 IMMED_OP is the index of the constant slot in OPERANDS.
6450 N is the constant value. */
6451
cd2b33d0 6452static const char *
cce8749e 6453output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6454 rtx * operands;
cd2b33d0
NC
6455 const char * instr1;
6456 const char * instr2;
f3bb6135
RE
6457 int immed_op;
6458 HOST_WIDE_INT n;
cce8749e 6459{
f3bb6135 6460#if HOST_BITS_PER_WIDE_INT > 32
e5951263 6461 n &= HOST_UINT (0xffffffff);
f3bb6135
RE
6462#endif
6463
cce8749e
CH
6464 if (n == 0)
6465 {
6466 operands[immed_op] = const0_rtx;
6354dc9b 6467 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
6468 }
6469 else
6470 {
6471 int i;
cd2b33d0 6472 const char * instr = instr1;
cce8749e 6473
6354dc9b 6474 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6475 for (i = 0; i < 32; i += 2)
6476 {
6477 if (n & (3 << i))
6478 {
f3bb6135
RE
6479 operands[immed_op] = GEN_INT (n & (255 << i));
6480 output_asm_insn (instr, operands);
cce8749e
CH
6481 instr = instr2;
6482 i += 6;
6483 }
6484 }
6485 }
cd2b33d0 6486
f3bb6135 6487 return "";
9997d19d 6488}
cce8749e
CH
6489
6490
6491/* Return the appropriate ARM instruction for the operation code.
6492 The returned result should not be overwritten. OP is the rtx of the
6493 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6494 was shifted. */
6495
cd2b33d0 6496const char *
cce8749e
CH
6497arithmetic_instr (op, shift_first_arg)
6498 rtx op;
f3bb6135 6499 int shift_first_arg;
cce8749e 6500{
9997d19d 6501 switch (GET_CODE (op))
cce8749e
CH
6502 {
6503 case PLUS:
f3bb6135
RE
6504 return "add";
6505
cce8749e 6506 case MINUS:
f3bb6135
RE
6507 return shift_first_arg ? "rsb" : "sub";
6508
cce8749e 6509 case IOR:
f3bb6135
RE
6510 return "orr";
6511
cce8749e 6512 case XOR:
f3bb6135
RE
6513 return "eor";
6514
cce8749e 6515 case AND:
f3bb6135
RE
6516 return "and";
6517
cce8749e 6518 default:
f3bb6135 6519 abort ();
cce8749e 6520 }
f3bb6135 6521}
cce8749e
CH
6522
6523
6524/* Ensure valid constant shifts and return the appropriate shift mnemonic
6525 for the operation code. The returned result should not be overwritten.
6526 OP is the rtx code of the shift.
9997d19d 6527 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6528 shift. */
cce8749e 6529
cd2b33d0 6530static const char *
9997d19d
RE
6531shift_op (op, amountp)
6532 rtx op;
6533 HOST_WIDE_INT *amountp;
cce8749e 6534{
cd2b33d0 6535 const char * mnem;
e2c671ba 6536 enum rtx_code code = GET_CODE (op);
cce8749e 6537
9997d19d
RE
6538 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6539 *amountp = -1;
6540 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6541 *amountp = INTVAL (XEXP (op, 1));
6542 else
6543 abort ();
6544
e2c671ba 6545 switch (code)
cce8749e
CH
6546 {
6547 case ASHIFT:
6548 mnem = "asl";
6549 break;
f3bb6135 6550
cce8749e
CH
6551 case ASHIFTRT:
6552 mnem = "asr";
cce8749e 6553 break;
f3bb6135 6554
cce8749e
CH
6555 case LSHIFTRT:
6556 mnem = "lsr";
cce8749e 6557 break;
f3bb6135 6558
9997d19d
RE
6559 case ROTATERT:
6560 mnem = "ror";
9997d19d
RE
6561 break;
6562
ff9940b0 6563 case MULT:
e2c671ba
RE
6564 /* We never have to worry about the amount being other than a
6565 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6566 if (*amountp != -1)
6567 *amountp = int_log2 (*amountp);
6568 else
6569 abort ();
f3bb6135
RE
6570 return "asl";
6571
cce8749e 6572 default:
f3bb6135 6573 abort ();
cce8749e
CH
6574 }
6575
e2c671ba
RE
6576 if (*amountp != -1)
6577 {
6578 /* This is not 100% correct, but follows from the desire to merge
6579 multiplication by a power of 2 with the recognizer for a
6580 shift. >=32 is not a valid shift for "asl", so we must try and
6581 output a shift that produces the correct arithmetical result.
ddd5a7c1 6582 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6583 is not set correctly if we set the flags; but we never use the
6584 carry bit from such an operation, so we can ignore that. */
6585 if (code == ROTATERT)
6586 *amountp &= 31; /* Rotate is just modulo 32 */
6587 else if (*amountp != (*amountp & 31))
6588 {
6589 if (code == ASHIFT)
6590 mnem = "lsr";
6591 *amountp = 32;
6592 }
6593
6594 /* Shifts of 0 are no-ops. */
6595 if (*amountp == 0)
6596 return NULL;
6597 }
6598
9997d19d
RE
6599 return mnem;
6600}
cce8749e
CH
6601
6602
6354dc9b 6603/* Obtain the shift from the POWER of two. */
18af7313 6604static HOST_WIDE_INT
cce8749e 6605int_log2 (power)
f3bb6135 6606 HOST_WIDE_INT power;
cce8749e 6607{
f3bb6135 6608 HOST_WIDE_INT shift = 0;
cce8749e 6609
e5951263 6610 while ((((HOST_INT (1)) << shift) & power) == 0)
cce8749e
CH
6611 {
6612 if (shift > 31)
f3bb6135 6613 abort ();
cce8749e
CH
6614 shift++;
6615 }
f3bb6135
RE
6616
6617 return shift;
6618}
cce8749e 6619
cce8749e
CH
6620/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6621 /bin/as is horribly restrictive. */
6cfc7210 6622#define MAX_ASCII_LEN 51
cce8749e
CH
6623
6624void
6625output_ascii_pseudo_op (stream, p, len)
62b10bbc 6626 FILE * stream;
3cce094d 6627 const unsigned char * p;
cce8749e
CH
6628 int len;
6629{
6630 int i;
6cfc7210 6631 int len_so_far = 0;
cce8749e 6632
6cfc7210
NC
6633 fputs ("\t.ascii\t\"", stream);
6634
cce8749e
CH
6635 for (i = 0; i < len; i++)
6636 {
6637 register int c = p[i];
6638
6cfc7210 6639 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 6640 {
6cfc7210 6641 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 6642 len_so_far = 0;
cce8749e
CH
6643 }
6644
6cfc7210 6645 switch (c)
cce8749e 6646 {
6cfc7210
NC
6647 case TARGET_TAB:
6648 fputs ("\\t", stream);
6649 len_so_far += 2;
6650 break;
6651
6652 case TARGET_FF:
6653 fputs ("\\f", stream);
6654 len_so_far += 2;
6655 break;
6656
6657 case TARGET_BS:
6658 fputs ("\\b", stream);
6659 len_so_far += 2;
6660 break;
6661
6662 case TARGET_CR:
6663 fputs ("\\r", stream);
6664 len_so_far += 2;
6665 break;
6666
6667 case TARGET_NEWLINE:
6668 fputs ("\\n", stream);
6669 c = p [i + 1];
6670 if ((c >= ' ' && c <= '~')
6671 || c == TARGET_TAB)
6672 /* This is a good place for a line break. */
6673 len_so_far = MAX_ASCII_LEN;
6674 else
6675 len_so_far += 2;
6676 break;
6677
6678 case '\"':
6679 case '\\':
6680 putc ('\\', stream);
5895f793 6681 len_so_far++;
6cfc7210 6682 /* drop through. */
f3bb6135 6683
6cfc7210
NC
6684 default:
6685 if (c >= ' ' && c <= '~')
6686 {
6687 putc (c, stream);
5895f793 6688 len_so_far++;
6cfc7210
NC
6689 }
6690 else
6691 {
6692 fprintf (stream, "\\%03o", c);
6693 len_so_far += 4;
6694 }
6695 break;
cce8749e 6696 }
cce8749e 6697 }
f3bb6135 6698
cce8749e 6699 fputs ("\"\n", stream);
f3bb6135 6700}
cce8749e 6701\f
ff9940b0 6702
cd2b33d0 6703const char *
84ed5e79 6704output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
6705 rtx operand;
6706 int really_return;
84ed5e79 6707 int reverse;
ff9940b0
RE
6708{
6709 char instr[100];
6710 int reg, live_regs = 0;
46406379 6711 int volatile_func = arm_volatile_func ();
e2c671ba 6712
d5b7b3ae
RE
6713 /* If a function is naked, don't use the "return" insn. */
6714 if (arm_naked_function_p (current_function_decl))
6715 return "";
6716
e2c671ba 6717 return_used_this_function = 1;
d5b7b3ae 6718
62b10bbc 6719 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6720 {
e2c671ba 6721 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
6722 call, then we have to trust that the called function won't return. */
6723 if (really_return)
6724 {
6725 rtx ops[2];
6726
6727 /* Otherwise, trap an attempted return by aborting. */
6728 ops[0] = operand;
6729 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6730 : "abort");
6731 assemble_external_libcall (ops[1]);
6732 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6733 }
6734
e2c671ba
RE
6735 return "";
6736 }
6737
5895f793 6738 if (current_function_calls_alloca && !really_return)
62b10bbc 6739 abort ();
d5b7b3ae 6740
f3bb6135 6741 for (reg = 0; reg <= 10; reg++)
5895f793 6742 if (regs_ever_live[reg] && !call_used_regs[reg])
ff9940b0
RE
6743 live_regs++;
6744
5895f793
RE
6745 if (!TARGET_APCS_FRAME
6746 && !frame_pointer_needed
d5b7b3ae 6747 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6748 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6749 live_regs++;
6750
5895f793 6751 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6752 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6753 live_regs++;
6754
0616531f 6755 if (live_regs || regs_ever_live[LR_REGNUM])
ff9940b0
RE
6756 live_regs++;
6757
6758 if (frame_pointer_needed)
6759 live_regs += 4;
6760
3a5a4282
PB
6761 /* On some ARM architectures it is faster to use LDR rather than LDM to
6762 load a single register. On other architectures, the cost is the same. */
6763 if (live_regs == 1
6764 && regs_ever_live[LR_REGNUM]
5895f793 6765 && !really_return)
d5b7b3ae
RE
6766 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6767 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6768 else if (live_regs == 1
6769 && regs_ever_live[LR_REGNUM]
d5b7b3ae
RE
6770 && TARGET_APCS_32)
6771 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6772 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
3a5a4282 6773 else if (live_regs)
ff9940b0 6774 {
5895f793 6775 if (!regs_ever_live[LR_REGNUM])
ff9940b0 6776 live_regs++;
f3bb6135 6777
ff9940b0 6778 if (frame_pointer_needed)
84ed5e79
RE
6779 strcpy (instr,
6780 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 6781 else
84ed5e79
RE
6782 strcpy (instr,
6783 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
6784
6785 for (reg = 0; reg <= 10; reg++)
62b10bbc 6786 if (regs_ever_live[reg]
5895f793
RE
6787 && (!call_used_regs[reg]
6788 || (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6789 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 6790 {
1d5473cb 6791 strcat (instr, "%|");
ff9940b0
RE
6792 strcat (instr, reg_names[reg]);
6793 if (--live_regs)
6794 strcat (instr, ", ");
6795 }
f3bb6135 6796
ff9940b0
RE
6797 if (frame_pointer_needed)
6798 {
1d5473cb 6799 strcat (instr, "%|");
ff9940b0
RE
6800 strcat (instr, reg_names[11]);
6801 strcat (instr, ", ");
1d5473cb 6802 strcat (instr, "%|");
ff9940b0
RE
6803 strcat (instr, reg_names[13]);
6804 strcat (instr, ", ");
1d5473cb 6805 strcat (instr, "%|");
5895f793 6806 strcat (instr, TARGET_INTERWORK || (!really_return)
62b10bbc 6807 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
6808 }
6809 else
1d5473cb 6810 {
5895f793 6811 if (!TARGET_APCS_FRAME
d5b7b3ae 6812 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6813 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6814 {
6815 strcat (instr, "%|");
6816 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6817 strcat (instr, ", ");
6818 }
6819
1d5473cb 6820 strcat (instr, "%|");
d5b7b3ae 6821
6cfc7210 6822 if (TARGET_INTERWORK && really_return)
62b10bbc 6823 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 6824 else
62b10bbc 6825 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 6826 }
d5b7b3ae 6827
2b835d68 6828 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 6829 output_asm_insn (instr, &operand);
da6558fd 6830
6cfc7210 6831 if (TARGET_INTERWORK && really_return)
da6558fd
NC
6832 {
6833 strcpy (instr, "bx%?");
6834 strcat (instr, reverse ? "%D0" : "%d0");
6835 strcat (instr, "\t%|");
6836 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6837
5895f793 6838 output_asm_insn (instr, &operand);
da6558fd 6839 }
ff9940b0
RE
6840 }
6841 else if (really_return)
6842 {
6cfc7210 6843 if (TARGET_INTERWORK)
25b1c156 6844 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
6845 else
6846 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6847 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd 6848
5895f793 6849 output_asm_insn (instr, &operand);
ff9940b0 6850 }
f3bb6135 6851
ff9940b0
RE
6852 return "";
6853}
6854
e82ea128
DE
6855/* Return nonzero if optimizing and the current function is volatile.
6856 Such functions never return, and many memory cycles can be saved
6857 by not storing register values that will never be needed again.
6858 This optimization was added to speed up context switching in a
6354dc9b 6859 kernel application. */
e2c671ba
RE
6860int
6861arm_volatile_func ()
6862{
6354dc9b
NC
6863 return (optimize > 0
6864 && current_function_nothrow
46406379 6865 && TREE_THIS_VOLATILE (current_function_decl));
e2c671ba
RE
6866}
6867
ef179a26
NC
6868/* Write the function name into the code section, directly preceding
6869 the function prologue.
6870
6871 Code will be output similar to this:
6872 t0
6873 .ascii "arm_poke_function_name", 0
6874 .align
6875 t1
6876 .word 0xff000000 + (t1 - t0)
6877 arm_poke_function_name
6878 mov ip, sp
6879 stmfd sp!, {fp, ip, lr, pc}
6880 sub fp, ip, #4
6881
6882 When performing a stack backtrace, code can inspect the value
6883 of 'pc' stored at 'fp' + 0. If the trace function then looks
6884 at location pc - 12 and the top 8 bits are set, then we know
6885 that there is a function name embedded immediately preceding this
6886 location and has length ((pc[-3]) & 0xff000000).
6887
6888 We assume that pc is declared as a pointer to an unsigned long.
6889
6890 It is of no benefit to output the function name if we are assembling
6891 a leaf function. These function types will not contain a stack
6892 backtrace structure, therefore it is not possible to determine the
6893 function name. */
6894
6895void
6896arm_poke_function_name (stream, name)
6897 FILE * stream;
6898 char * name;
6899{
6900 unsigned long alignlength;
6901 unsigned long length;
6902 rtx x;
6903
d5b7b3ae
RE
6904 length = strlen (name) + 1;
6905 alignlength = ROUND_UP (length);
ef179a26 6906
949d79eb 6907 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 6908 ASM_OUTPUT_ALIGN (stream, 2);
e5951263 6909 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
ef179a26
NC
6910 ASM_OUTPUT_INT (stream, x);
6911}
6912
ff9940b0
RE
6913/* The amount of stack adjustment that happens here, in output_return and in
6914 output_epilogue must be exactly the same as was calculated during reload,
6915 or things will point to the wrong place. The only time we can safely
6916 ignore this constraint is when a function has no arguments on the stack,
6917 no stack frame requirement and no live registers execpt for `lr'. If we
6918 can guarantee that by making all function calls into tail calls and that
6919 lr is not clobbered in any other way, then there is no need to push lr
6354dc9b 6920 onto the stack. */
cce8749e 6921void
d5b7b3ae 6922output_arm_prologue (f, frame_size)
6cfc7210 6923 FILE * f;
cce8749e
CH
6924 int frame_size;
6925{
f3bb6135 6926 int reg, live_regs_mask = 0;
46406379 6927 int volatile_func = arm_volatile_func ();
cce8749e 6928
cce8749e
CH
6929 /* Nonzero if we must stuff some register arguments onto the stack as if
6930 they were passed there. */
6931 int store_arg_regs = 0;
6932
abaa26e5 6933 if (arm_ccfsm_state || arm_target_insn)
6354dc9b 6934 abort (); /* Sanity check. */
31fdb4d5
DE
6935
6936 if (arm_naked_function_p (current_function_decl))
6937 return;
6938
ff9940b0 6939 return_used_this_function = 0;
ff9940b0 6940
dd18ae56
NC
6941 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6942 current_function_args_size,
6943 current_function_pretend_args_size, frame_size);
6944 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6945 frame_pointer_needed,
6946 current_function_anonymous_args);
cce8749e 6947
e2c671ba 6948 if (volatile_func)
dd18ae56 6949 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 6950
68dfd979
NC
6951 if (current_function_needs_context)
6952 asm_fprintf (f, "\t%@ Nested function.\n");
6953
cce8749e
CH
6954 if (current_function_anonymous_args && current_function_pretend_args_size)
6955 store_arg_regs = 1;
6956
f3bb6135 6957 for (reg = 0; reg <= 10; reg++)
5895f793 6958 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e
CH
6959 live_regs_mask |= (1 << reg);
6960
5895f793
RE
6961 if (!TARGET_APCS_FRAME
6962 && !frame_pointer_needed
d5b7b3ae 6963 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6964 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6965 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6966
5895f793 6967 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6968 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6969 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6970
ff9940b0 6971 if (frame_pointer_needed)
e2c671ba 6972 live_regs_mask |= 0xD800;
62b10bbc 6973 else if (regs_ever_live[LR_REGNUM])
ff9940b0 6974 {
62b10bbc 6975 live_regs_mask |= 1 << LR_REGNUM;
cce8749e
CH
6976 }
6977
0616531f
RE
6978 if (live_regs_mask)
6979 /* If a di mode load/store multiple is used, and the base register
6980 is r3, then r4 can become an ever live register without lr
6981 doing so, in this case we need to push lr as well, or we
6982 will fail to get a proper return. */
6983 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
6984
6985#ifdef AOF_ASSEMBLER
6986 if (flag_pic)
dd18ae56 6987 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 6988#endif
f3bb6135 6989}
cce8749e 6990
cd2b33d0 6991const char *
0616531f
RE
6992arm_output_epilogue (really_return)
6993 int really_return;
cce8749e 6994{
949d79eb
RE
6995 int reg;
6996 int live_regs_mask = 0;
6354dc9b 6997 /* If we need this, then it will always be at least this much. */
b111229a 6998 int floats_offset = 12;
cce8749e 6999 rtx operands[3];
949d79eb 7000 int frame_size = get_frame_size ();
d5b7b3ae
RE
7001 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7002 FILE * f = asm_out_file;
e5951263 7003 int volatile_func = arm_volatile_func ();
d5b7b3ae 7004 int return_regnum;
cce8749e 7005
b36ba79f 7006 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 7007 return "";
cce8749e 7008
31fdb4d5
DE
7009 /* Naked functions don't have epilogues. */
7010 if (arm_naked_function_p (current_function_decl))
949d79eb 7011 return "";
31fdb4d5 7012
d5b7b3ae
RE
7013 /* If we are throwing an exception, the address we want to jump to is in
7014 R1; otherwise, it's in LR. */
7015 return_regnum = eh_ofs ? 2 : LR_REGNUM;
7016
0616531f
RE
7017 /* If we are throwing an exception, then we really must be doing a return,
7018 so we can't tail-call. */
5895f793 7019 if (eh_ofs && !really_return)
0616531f
RE
7020 abort();
7021
e2c671ba 7022 /* A volatile function should never return. Call abort. */
c11145f6 7023 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 7024 {
86efdc8e 7025 rtx op;
ed0e6530 7026 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 7027 assemble_external_libcall (op);
e2c671ba 7028 output_asm_insn ("bl\t%a0", &op);
949d79eb 7029 return "";
e2c671ba
RE
7030 }
7031
f3bb6135 7032 for (reg = 0; reg <= 10; reg++)
5895f793 7033 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e 7034 {
ff9940b0
RE
7035 live_regs_mask |= (1 << reg);
7036 floats_offset += 4;
cce8749e
CH
7037 }
7038
d5b7b3ae 7039 /* Handle the frame pointer as a special case. */
5895f793
RE
7040 if (!TARGET_APCS_FRAME
7041 && !frame_pointer_needed
d5b7b3ae 7042 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 7043 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
7044 {
7045 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
7046 floats_offset += 4;
7047 }
7048
ed0e6530
PB
7049 /* If we aren't loading the PIC register, don't stack it even though it may
7050 be live. */
5895f793 7051 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 7052 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
7053 {
7054 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
7055 floats_offset += 4;
7056 }
7057
ff9940b0 7058 if (frame_pointer_needed)
cce8749e 7059 {
b111229a
RE
7060 if (arm_fpu_arch == FP_SOFT2)
7061 {
d5b7b3ae 7062 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 7063 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7064 {
7065 floats_offset += 12;
dd18ae56
NC
7066 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7067 reg, FP_REGNUM, floats_offset);
b111229a
RE
7068 }
7069 }
7070 else
7071 {
d5b7b3ae 7072 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7073
d5b7b3ae 7074 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 7075 {
5895f793 7076 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7077 {
7078 floats_offset += 12;
6cfc7210 7079
6354dc9b 7080 /* We can't unstack more than four registers at once. */
b111229a
RE
7081 if (start_reg - reg == 3)
7082 {
dd18ae56
NC
7083 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7084 reg, FP_REGNUM, floats_offset);
b111229a
RE
7085 start_reg = reg - 1;
7086 }
7087 }
7088 else
7089 {
7090 if (reg != start_reg)
dd18ae56
NC
7091 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7092 reg + 1, start_reg - reg,
7093 FP_REGNUM, floats_offset);
b111229a
RE
7094 start_reg = reg - 1;
7095 }
7096 }
7097
7098 /* Just in case the last register checked also needs unstacking. */
7099 if (reg != start_reg)
dd18ae56
NC
7100 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7101 reg + 1, start_reg - reg,
7102 FP_REGNUM, floats_offset);
b111229a 7103 }
da6558fd 7104
6cfc7210 7105 if (TARGET_INTERWORK)
b111229a
RE
7106 {
7107 live_regs_mask |= 0x6800;
dd18ae56 7108 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
d5b7b3ae
RE
7109 if (eh_ofs)
7110 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7111 REGNO (eh_ofs));
0616531f
RE
7112 if (really_return)
7113 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
d5b7b3ae 7114 }
5895f793 7115 else if (eh_ofs || !really_return)
d5b7b3ae
RE
7116 {
7117 live_regs_mask |= 0x6800;
7118 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
0616531f
RE
7119 if (eh_ofs)
7120 {
7121 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7122 REGNO (eh_ofs));
7123 /* Even in 26-bit mode we do a mov (rather than a movs)
7124 because we don't have the PSR bits set in the
7125 address. */
7126 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7127 }
b111229a
RE
7128 }
7129 else
7130 {
7131 live_regs_mask |= 0xA800;
dd18ae56 7132 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
7133 TARGET_APCS_32 ? FALSE : TRUE);
7134 }
cce8749e
CH
7135 }
7136 else
7137 {
d2288d8d 7138 /* Restore stack pointer if necessary. */
56636818 7139 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
7140 {
7141 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
7142 operands[2] = GEN_INT (frame_size
7143 + current_function_outgoing_args_size);
d2288d8d
TG
7144 output_add_immediate (operands);
7145 }
7146
b111229a
RE
7147 if (arm_fpu_arch == FP_SOFT2)
7148 {
d5b7b3ae 7149 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 7150 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
7151 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7152 reg, SP_REGNUM);
b111229a
RE
7153 }
7154 else
7155 {
d5b7b3ae 7156 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 7157
d5b7b3ae 7158 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 7159 {
5895f793 7160 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7161 {
7162 if (reg - start_reg == 3)
7163 {
dd18ae56
NC
7164 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7165 start_reg, SP_REGNUM);
b111229a
RE
7166 start_reg = reg + 1;
7167 }
7168 }
7169 else
7170 {
7171 if (reg != start_reg)
dd18ae56
NC
7172 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7173 start_reg, reg - start_reg,
7174 SP_REGNUM);
6cfc7210 7175
b111229a
RE
7176 start_reg = reg + 1;
7177 }
7178 }
7179
7180 /* Just in case the last register checked also needs unstacking. */
7181 if (reg != start_reg)
dd18ae56
NC
7182 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7183 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7184 }
7185
62b10bbc 7186 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 7187 {
6cfc7210 7188 if (TARGET_INTERWORK)
b111229a 7189 {
0616531f 7190 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2 7191
d5b7b3ae
RE
7192 /* Handle LR on its own. */
7193 if (live_regs_mask == (1 << LR_REGNUM))
7194 {
7195 if (eh_ofs)
7196 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7197 SP_REGNUM);
7198 else
7199 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7200 SP_REGNUM);
7201 }
7202 else if (live_regs_mask != 0)
7203 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7204 FALSE);
7205
7206 if (eh_ofs)
7207 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7208 REGNO (eh_ofs));
7209
0616531f
RE
7210 if (really_return)
7211 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
b111229a 7212 }
d5b7b3ae
RE
7213 else if (eh_ofs)
7214 {
7215 if (live_regs_mask == 0)
7216 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7217 else
7218 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
7219 live_regs_mask | (1 << LR_REGNUM), FALSE);
7220
7221 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7222 REGNO (eh_ofs));
7223 /* Jump to the target; even in 26-bit mode. */
7224 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7225 }
5895f793 7226 else if (TARGET_APCS_32 && live_regs_mask == 0 && !really_return)
0616531f
RE
7227 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7228 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
d5b7b3ae 7229 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
5895f793 7230 else if (!really_return)
0616531f
RE
7231 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7232 live_regs_mask | (1 << LR_REGNUM), FALSE);
32de079a 7233 else
d5b7b3ae
RE
7234 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7235 live_regs_mask | (1 << PC_REGNUM),
32de079a 7236 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
7237 }
7238 else
7239 {
62b10bbc 7240 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 7241 {
6354dc9b 7242 /* Restore the integer regs, and the return address into lr. */
0616531f 7243 live_regs_mask |= 1 << LR_REGNUM;
32de079a 7244
d5b7b3ae
RE
7245 if (live_regs_mask == (1 << LR_REGNUM))
7246 {
7247 if (eh_ofs)
7248 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7249 SP_REGNUM);
7250 else
7251 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7252 SP_REGNUM);
7253 }
7254 else if (live_regs_mask != 0)
7255 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7256 FALSE);
cce8749e 7257 }
b111229a 7258
cce8749e
CH
7259 if (current_function_pretend_args_size)
7260 {
6354dc9b 7261 /* Unwind the pre-pushed regs. */
cce8749e 7262 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 7263 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
7264 output_add_immediate (operands);
7265 }
d5b7b3ae
RE
7266
7267 if (eh_ofs)
7268 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7269 REGNO (eh_ofs));
0616531f
RE
7270
7271 if (really_return)
7272 {
7273 /* And finally, go home. */
7274 if (TARGET_INTERWORK)
7275 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7276 else if (TARGET_APCS_32 || eh_ofs)
7277 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7278 else
7279 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7280 }
cce8749e
CH
7281 }
7282 }
f3bb6135 7283
949d79eb
RE
7284 return "";
7285}
7286
7287void
eb3921e8 7288output_func_epilogue (frame_size)
949d79eb
RE
7289 int frame_size;
7290{
d5b7b3ae
RE
7291 if (TARGET_THUMB)
7292 {
7293 /* ??? Probably not safe to set this here, since it assumes that a
7294 function will be emitted as assembly immediately after we generate
7295 RTL for it. This does not happen for inline functions. */
7296 return_used_this_function = 0;
7297 }
7298 else
7299 {
7300 if (use_return_insn (FALSE)
7301 && return_used_this_function
7302 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7303 && !frame_pointer_needed)
d5b7b3ae 7304 abort ();
f3bb6135 7305
d5b7b3ae
RE
7306 /* Reset the ARM-specific per-function variables. */
7307 current_function_anonymous_args = 0;
7308 after_arm_reorg = 0;
7309 }
f3bb6135 7310}
e2c671ba 7311
2c849145
JM
7312/* Generate and emit an insn that we will recognize as a push_multi.
7313 Unfortunately, since this insn does not reflect very well the actual
7314 semantics of the operation, we need to annotate the insn for the benefit
7315 of DWARF2 frame unwind information. */
2c849145 7316static rtx
e2c671ba
RE
7317emit_multi_reg_push (mask)
7318 int mask;
7319{
7320 int num_regs = 0;
7321 int i, j;
7322 rtx par;
2c849145 7323 rtx dwarf;
87e27392 7324 int dwarf_par_index;
2c849145 7325 rtx tmp, reg;
e2c671ba 7326
d5b7b3ae 7327 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7328 if (mask & (1 << i))
5895f793 7329 num_regs++;
e2c671ba
RE
7330
7331 if (num_regs == 0 || num_regs > 16)
7332 abort ();
7333
87e27392
NC
7334 /* For the body of the insn we are going to generate an UNSPEC in
7335 parallel with several USEs. This allows the insn to be recognised
7336 by the push_multi pattern in the arm.md file. The insn looks
7337 something like this:
7338
7339 (parallel [
7340 (set (mem:BLK (pre_dec:BLK (reg:SI sp))) (unspec:BLK [(reg:SI r4)] 2))
7341 (use (reg:SI 11 fp))
7342 (use (reg:SI 12 ip))
7343 (use (reg:SI 14 lr))
7344 (use (reg:SI 15 pc))
7345 ])
7346
7347 For the frame note however, we try to be more explicit and actually
7348 show each register being stored into the stack frame, plus a (single)
7349 decrement of the stack pointer. We do it this way in order to be
7350 friendly to the stack unwinding code, which only wants to see a single
7351 stack decrement per instruction. The RTL we generate for the note looks
7352 something like this:
7353
7354 (sequence [
7355 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7356 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7357 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7358 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7359 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7360 (set (mem:SI (plus:SI (reg:SI sp) (const_int 16))) (reg:SI pc))
7361 ])
7362
7363 This sequence is used both by the code to support stack unwinding for
7364 exceptions handlers and the code to generate dwarf2 frame debugging. */
7365
43cffd11 7366 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
87e27392 7367 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_regs + 1));
2c849145 7368 RTX_FRAME_RELATED_P (dwarf) = 1;
87e27392 7369 dwarf_par_index = 1;
e2c671ba 7370
d5b7b3ae 7371 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7372 {
7373 if (mask & (1 << i))
7374 {
2c849145
JM
7375 reg = gen_rtx_REG (SImode, i);
7376
e2c671ba 7377 XVECEXP (par, 0, 0)
43cffd11
RE
7378 = gen_rtx_SET (VOIDmode,
7379 gen_rtx_MEM (BLKmode,
7380 gen_rtx_PRE_DEC (BLKmode,
7381 stack_pointer_rtx)),
7382 gen_rtx_UNSPEC (BLKmode,
2c849145 7383 gen_rtvec (1, reg),
43cffd11 7384 2));
2c849145
JM
7385
7386 tmp = gen_rtx_SET (VOIDmode,
87e27392 7387 gen_rtx_MEM (SImode, stack_pointer_rtx),
2c849145
JM
7388 reg);
7389 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392
NC
7390 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7391 dwarf_par_index ++;
2c849145 7392
e2c671ba
RE
7393 break;
7394 }
7395 }
7396
7397 for (j = 1, i++; j < num_regs; i++)
7398 {
7399 if (mask & (1 << i))
7400 {
2c849145
JM
7401 reg = gen_rtx_REG (SImode, i);
7402
7403 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7404
7405 tmp = gen_rtx_SET (VOIDmode,
7406 gen_rtx_MEM (SImode,
87e27392
NC
7407 gen_rtx_PLUS (SImode,
7408 stack_pointer_rtx,
7409 GEN_INT (4 * j))),
2c849145
JM
7410 reg);
7411 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392 7412 XVECEXP (dwarf, 0, dwarf_par_index ++) = tmp;
2c849145 7413
e2c671ba
RE
7414 j++;
7415 }
7416 }
b111229a 7417
2c849145 7418 par = emit_insn (par);
87e27392
NC
7419
7420 tmp = gen_rtx_SET (SImode,
7421 stack_pointer_rtx,
7422 gen_rtx_PLUS (SImode,
7423 stack_pointer_rtx,
7424 GEN_INT (-4 * num_regs)));
7425 RTX_FRAME_RELATED_P (tmp) = 1;
7426 XVECEXP (dwarf, 0, 0) = tmp;
7427
2c849145
JM
7428 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7429 REG_NOTES (par));
7430 return par;
b111229a
RE
7431}
7432
2c849145 7433static rtx
b111229a
RE
7434emit_sfm (base_reg, count)
7435 int base_reg;
7436 int count;
7437{
7438 rtx par;
2c849145
JM
7439 rtx dwarf;
7440 rtx tmp, reg;
b111229a
RE
7441 int i;
7442
43cffd11 7443 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7444 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7445 RTX_FRAME_RELATED_P (dwarf) = 1;
7446
7447 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7448
7449 XVECEXP (par, 0, 0)
7450 = gen_rtx_SET (VOIDmode,
7451 gen_rtx_MEM (BLKmode,
7452 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7453 gen_rtx_UNSPEC (BLKmode,
2c849145 7454 gen_rtvec (1, reg),
43cffd11 7455 2));
2c849145
JM
7456 tmp
7457 = gen_rtx_SET (VOIDmode,
7458 gen_rtx_MEM (XFmode,
7459 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7460 reg);
7461 RTX_FRAME_RELATED_P (tmp) = 1;
7462 XVECEXP (dwarf, 0, count - 1) = tmp;
7463
b111229a 7464 for (i = 1; i < count; i++)
2c849145
JM
7465 {
7466 reg = gen_rtx_REG (XFmode, base_reg++);
7467 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7468
7469 tmp = gen_rtx_SET (VOIDmode,
7470 gen_rtx_MEM (XFmode,
7471 gen_rtx_PRE_DEC (BLKmode,
7472 stack_pointer_rtx)),
7473 reg);
7474 RTX_FRAME_RELATED_P (tmp) = 1;
7475 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7476 }
b111229a 7477
2c849145
JM
7478 par = emit_insn (par);
7479 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7480 REG_NOTES (par));
7481 return par;
e2c671ba
RE
7482}
7483
7484void
7485arm_expand_prologue ()
7486{
7487 int reg;
56636818
JL
7488 rtx amount = GEN_INT (-(get_frame_size ()
7489 + current_function_outgoing_args_size));
e2c671ba
RE
7490 int live_regs_mask = 0;
7491 int store_arg_regs = 0;
949d79eb
RE
7492 /* If this function doesn't return, then there is no need to push
7493 the call-saved regs. */
46406379 7494 int volatile_func = arm_volatile_func ();
2c849145 7495 rtx insn;
68dfd979
NC
7496 rtx ip_rtx;
7497 int fp_offset = 0;
d19fb8e3 7498
e2c671ba 7499
31fdb4d5
DE
7500 /* Naked functions don't have prologues. */
7501 if (arm_naked_function_p (current_function_decl))
7502 return;
7503
e2c671ba
RE
7504 if (current_function_anonymous_args && current_function_pretend_args_size)
7505 store_arg_regs = 1;
7506
5895f793 7507 if (!volatile_func)
6ed30148
RE
7508 {
7509 for (reg = 0; reg <= 10; reg++)
5895f793 7510 if (regs_ever_live[reg] && !call_used_regs[reg])
6ed30148
RE
7511 live_regs_mask |= 1 << reg;
7512
5895f793
RE
7513 if (!TARGET_APCS_FRAME
7514 && !frame_pointer_needed
d5b7b3ae 7515 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 7516 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
7517 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7518
6ed30148
RE
7519 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7520 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 7521
62b10bbc
NC
7522 if (regs_ever_live[LR_REGNUM])
7523 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 7524 }
e2c671ba 7525
68dfd979
NC
7526 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
7527
e2c671ba
RE
7528 if (frame_pointer_needed)
7529 {
68dfd979
NC
7530 if (current_function_needs_context)
7531 {
7532 /* The Static chain register is the same as the IP register
7533 used as a scratch register during stack frame creation.
7534 To get around this need to find somewhere to store IP
7535 whilst the frame is being created. We try the following
7536 places in order:
7537
7538 1. An unused argument register.
7539 2. A slot on the stack above the frame. (This only
7540 works if the function is not a varargs function).
7541
7542 If neither of these places is available, we abort (for now). */
7543 if (regs_ever_live[3] == 0)
7544 {
7545 insn = gen_rtx_REG (SImode, 3);
7546 insn = gen_rtx_SET (SImode, insn, ip_rtx);
7547 insn = emit_insn (insn);
7548 RTX_FRAME_RELATED_P (insn) = 1;
7549 }
7550 else if (current_function_pretend_args_size == 0)
7551 {
7552 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
7553 insn = gen_rtx_MEM (SImode, insn);
7554 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
7555 insn = emit_insn (insn);
7556 RTX_FRAME_RELATED_P (insn) = 1;
7557 fp_offset = 4;
7558 }
7559 else
7560 /* FIXME - the way to handle this situation is to allow
7561 the pretend args to be dumped onto the stack, then
7562 reuse r3 to save IP. This would involve moving the
7563 copying os SP into IP until after the pretend args
7564 have been dumped, but this is not too hard. */
7565 error ("Unable to find a temporary location for static chanin register");
7566 }
7567
e2c671ba 7568 live_regs_mask |= 0xD800;
68dfd979
NC
7569
7570 if (fp_offset)
7571 {
7572 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
7573 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7574 }
7575 else
7576 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
7577
7578 insn = emit_insn (insn);
2c849145 7579 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7580 }
7581
7582 if (current_function_pretend_args_size)
7583 {
7584 if (store_arg_regs)
2c849145
JM
7585 insn = emit_multi_reg_push
7586 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 7587 else
2c849145
JM
7588 insn = emit_insn
7589 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7590 GEN_INT (-current_function_pretend_args_size)));
7591 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7592 }
7593
7594 if (live_regs_mask)
7595 {
7596 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 7597 we won't get a proper return. */
62b10bbc 7598 live_regs_mask |= 1 << LR_REGNUM;
2c849145
JM
7599 insn = emit_multi_reg_push (live_regs_mask);
7600 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7601 }
7602
d5b7b3ae
RE
7603 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7604
5895f793 7605 if (!volatile_func)
b111229a
RE
7606 {
7607 if (arm_fpu_arch == FP_SOFT2)
7608 {
d5b7b3ae 7609 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 7610 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
7611 {
7612 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7613 insn = gen_rtx_MEM (XFmode, insn);
7614 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7615 gen_rtx_REG (XFmode, reg)));
7616 RTX_FRAME_RELATED_P (insn) = 1;
7617 }
b111229a
RE
7618 }
7619 else
7620 {
d5b7b3ae 7621 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7622
d5b7b3ae 7623 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 7624 {
5895f793 7625 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7626 {
7627 if (start_reg - reg == 3)
7628 {
2c849145
JM
7629 insn = emit_sfm (reg, 4);
7630 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
7631 start_reg = reg - 1;
7632 }
7633 }
7634 else
7635 {
7636 if (start_reg != reg)
2c849145
JM
7637 {
7638 insn = emit_sfm (reg + 1, start_reg - reg);
7639 RTX_FRAME_RELATED_P (insn) = 1;
7640 }
b111229a
RE
7641 start_reg = reg - 1;
7642 }
7643 }
7644
7645 if (start_reg != reg)
2c849145
JM
7646 {
7647 insn = emit_sfm (reg + 1, start_reg - reg);
7648 RTX_FRAME_RELATED_P (insn) = 1;
7649 }
b111229a
RE
7650 }
7651 }
e2c671ba
RE
7652
7653 if (frame_pointer_needed)
2c849145 7654 {
68dfd979
NC
7655 insn = GEN_INT (-(4 + current_function_pretend_args_size + fp_offset));
7656 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 7657 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979
NC
7658
7659 if (current_function_needs_context)
7660 {
7661 /* Recover the static chain register. */
7662 if (regs_ever_live [3] == 0)
7663 {
7664 insn = gen_rtx_REG (SImode, 3);
7665 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7666 insn = emit_insn (insn);
7667 RTX_FRAME_RELATED_P (insn) = 1;
7668 }
7669 else /* if (current_function_pretend_args_size == 0) */
7670 {
7671 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
7672 insn = gen_rtx_MEM (SImode, insn);
7673 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7674 insn = emit_insn (insn);
7675 RTX_FRAME_RELATED_P (insn) = 1;
7676 }
7677 }
2c849145 7678 }
e2c671ba
RE
7679
7680 if (amount != const0_rtx)
7681 {
2c849145
JM
7682 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7683 amount));
7684 RTX_FRAME_RELATED_P (insn) = 1;
e04c2d6c
RE
7685
7686 /* If the frame pointer is needed, emit a special barrier that
7687 will prevent the scheduler from moving stores to the frame
7688 before the stack adjustment. */
7689 if (frame_pointer_needed)
7690 {
7691 rtx unspec = gen_rtx_UNSPEC (SImode,
7692 gen_rtvec (2, stack_pointer_rtx,
7693 hard_frame_pointer_rtx), 4);
7694
7695 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7696 gen_rtx_MEM (BLKmode, unspec)));
7697 }
e2c671ba
RE
7698 }
7699
7700 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
7701 the call to mcount. Similarly if the user has requested no
7702 scheduling in the prolog. */
7703 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
7704 emit_insn (gen_blockage ());
7705}
cce8749e 7706\f
9997d19d
RE
7707/* If CODE is 'd', then the X is a condition operand and the instruction
7708 should only be executed if the condition is true.
ddd5a7c1 7709 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
7710 should only be executed if the condition is false: however, if the mode
7711 of the comparison is CCFPEmode, then always execute the instruction -- we
7712 do this because in these circumstances !GE does not necessarily imply LT;
7713 in these cases the instruction pattern will take care to make sure that
7714 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 7715 doing this instruction unconditionally.
9997d19d
RE
7716 If CODE is 'N' then X is a floating point operand that must be negated
7717 before output.
7718 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7719 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7720
7721void
7722arm_print_operand (stream, x, code)
62b10bbc 7723 FILE * stream;
9997d19d
RE
7724 rtx x;
7725 int code;
7726{
7727 switch (code)
7728 {
7729 case '@':
f3139301 7730 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
7731 return;
7732
d5b7b3ae
RE
7733 case '_':
7734 fputs (user_label_prefix, stream);
7735 return;
7736
9997d19d 7737 case '|':
f3139301 7738 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
7739 return;
7740
7741 case '?':
7742 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
7743 {
7744 if (TARGET_THUMB || current_insn_predicate != NULL)
7745 abort ();
7746
7747 fputs (arm_condition_codes[arm_current_cc], stream);
7748 }
7749 else if (current_insn_predicate)
7750 {
7751 enum arm_cond_code code;
7752
7753 if (TARGET_THUMB)
7754 abort ();
7755
7756 code = get_arm_condition_code (current_insn_predicate);
7757 fputs (arm_condition_codes[code], stream);
7758 }
9997d19d
RE
7759 return;
7760
7761 case 'N':
7762 {
7763 REAL_VALUE_TYPE r;
7764 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7765 r = REAL_VALUE_NEGATE (r);
7766 fprintf (stream, "%s", fp_const_from_val (&r));
7767 }
7768 return;
7769
7770 case 'B':
7771 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
7772 {
7773 HOST_WIDE_INT val;
5895f793 7774 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 7775 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7776 }
9997d19d
RE
7777 else
7778 {
7779 putc ('~', stream);
7780 output_addr_const (stream, x);
7781 }
7782 return;
7783
7784 case 'i':
7785 fprintf (stream, "%s", arithmetic_instr (x, 1));
7786 return;
7787
7788 case 'I':
7789 fprintf (stream, "%s", arithmetic_instr (x, 0));
7790 return;
7791
7792 case 'S':
7793 {
7794 HOST_WIDE_INT val;
5895f793 7795 const char * shift = shift_op (x, &val);
9997d19d 7796
e2c671ba
RE
7797 if (shift)
7798 {
5895f793 7799 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
7800 if (val == -1)
7801 arm_print_operand (stream, XEXP (x, 1), 0);
7802 else
4bc74ece
NC
7803 {
7804 fputc ('#', stream);
36ba9cb8 7805 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7806 }
e2c671ba 7807 }
9997d19d
RE
7808 }
7809 return;
7810
d5b7b3ae
RE
7811 /* An explanation of the 'Q', 'R' and 'H' register operands:
7812
7813 In a pair of registers containing a DI or DF value the 'Q'
7814 operand returns the register number of the register containing
7815 the least signficant part of the value. The 'R' operand returns
7816 the register number of the register containing the most
7817 significant part of the value.
7818
7819 The 'H' operand returns the higher of the two register numbers.
7820 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7821 same as the 'Q' operand, since the most signficant part of the
7822 value is held in the lower number register. The reverse is true
7823 on systems where WORDS_BIG_ENDIAN is false.
7824
7825 The purpose of these operands is to distinguish between cases
7826 where the endian-ness of the values is important (for example
7827 when they are added together), and cases where the endian-ness
7828 is irrelevant, but the order of register operations is important.
7829 For example when loading a value from memory into a register
7830 pair, the endian-ness does not matter. Provided that the value
7831 from the lower memory address is put into the lower numbered
7832 register, and the value from the higher address is put into the
7833 higher numbered register, the load will work regardless of whether
7834 the value being loaded is big-wordian or little-wordian. The
7835 order of the two register loads can matter however, if the address
7836 of the memory location is actually held in one of the registers
7837 being overwritten by the load. */
c1c2bc04 7838 case 'Q':
d5b7b3ae 7839 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 7840 abort ();
d5b7b3ae 7841 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
7842 return;
7843
9997d19d 7844 case 'R':
d5b7b3ae 7845 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 7846 abort ();
d5b7b3ae
RE
7847 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7848 return;
7849
7850 case 'H':
7851 if (REGNO (x) > LAST_ARM_REGNUM)
7852 abort ();
7853 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
7854 return;
7855
7856 case 'm':
d5b7b3ae
RE
7857 asm_fprintf (stream, "%r",
7858 GET_CODE (XEXP (x, 0)) == REG
7859 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
7860 return;
7861
7862 case 'M':
dd18ae56 7863 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae
RE
7864 REGNO (x),
7865 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
7866 return;
7867
7868 case 'd':
5895f793 7869 if (!x)
d5b7b3ae
RE
7870 return;
7871
7872 if (TARGET_ARM)
9997d19d
RE
7873 fputs (arm_condition_codes[get_arm_condition_code (x)],
7874 stream);
d5b7b3ae
RE
7875 else
7876 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
7877 return;
7878
7879 case 'D':
5895f793 7880 if (!x)
d5b7b3ae
RE
7881 return;
7882
7883 if (TARGET_ARM)
7884 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7885 (get_arm_condition_code (x))],
9997d19d 7886 stream);
d5b7b3ae
RE
7887 else
7888 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
7889 return;
7890
7891 default:
7892 if (x == 0)
7893 abort ();
7894
7895 if (GET_CODE (x) == REG)
d5b7b3ae 7896 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
7897 else if (GET_CODE (x) == MEM)
7898 {
7899 output_memory_reference_mode = GET_MODE (x);
7900 output_address (XEXP (x, 0));
7901 }
7902 else if (GET_CODE (x) == CONST_DOUBLE)
7903 fprintf (stream, "#%s", fp_immediate_constant (x));
7904 else if (GET_CODE (x) == NEG)
6354dc9b 7905 abort (); /* This should never happen now. */
9997d19d
RE
7906 else
7907 {
7908 fputc ('#', stream);
7909 output_addr_const (stream, x);
7910 }
7911 }
7912}
cce8749e
CH
7913\f
7914/* A finite state machine takes care of noticing whether or not instructions
7915 can be conditionally executed, and thus decrease execution time and code
7916 size by deleting branch instructions. The fsm is controlled by
7917 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7918
7919/* The state of the fsm controlling condition codes are:
7920 0: normal, do nothing special
7921 1: make ASM_OUTPUT_OPCODE not output this instruction
7922 2: make ASM_OUTPUT_OPCODE not output this instruction
7923 3: make instructions conditional
7924 4: make instructions conditional
7925
7926 State transitions (state->state by whom under condition):
7927 0 -> 1 final_prescan_insn if the `target' is a label
7928 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7929 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7930 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7931 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7932 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7933 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7934 (the target insn is arm_target_insn).
7935
ff9940b0
RE
7936 If the jump clobbers the conditions then we use states 2 and 4.
7937
7938 A similar thing can be done with conditional return insns.
7939
cce8749e
CH
7940 XXX In case the `target' is an unconditional branch, this conditionalising
7941 of the instructions always reduces code size, but not always execution
7942 time. But then, I want to reduce the code size to somewhere near what
7943 /bin/cc produces. */
7944
cce8749e
CH
7945/* Returns the index of the ARM condition code string in
7946 `arm_condition_codes'. COMPARISON should be an rtx like
7947 `(eq (...) (...))'. */
7948
84ed5e79 7949static enum arm_cond_code
cce8749e
CH
7950get_arm_condition_code (comparison)
7951 rtx comparison;
7952{
5165176d 7953 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
7954 register int code;
7955 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
7956
7957 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 7958 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
7959 XEXP (comparison, 1));
7960
7961 switch (mode)
cce8749e 7962 {
84ed5e79
RE
7963 case CC_DNEmode: code = ARM_NE; goto dominance;
7964 case CC_DEQmode: code = ARM_EQ; goto dominance;
7965 case CC_DGEmode: code = ARM_GE; goto dominance;
7966 case CC_DGTmode: code = ARM_GT; goto dominance;
7967 case CC_DLEmode: code = ARM_LE; goto dominance;
7968 case CC_DLTmode: code = ARM_LT; goto dominance;
7969 case CC_DGEUmode: code = ARM_CS; goto dominance;
7970 case CC_DGTUmode: code = ARM_HI; goto dominance;
7971 case CC_DLEUmode: code = ARM_LS; goto dominance;
7972 case CC_DLTUmode: code = ARM_CC;
7973
7974 dominance:
7975 if (comp_code != EQ && comp_code != NE)
7976 abort ();
7977
7978 if (comp_code == EQ)
7979 return ARM_INVERSE_CONDITION_CODE (code);
7980 return code;
7981
5165176d 7982 case CC_NOOVmode:
84ed5e79 7983 switch (comp_code)
5165176d 7984 {
84ed5e79
RE
7985 case NE: return ARM_NE;
7986 case EQ: return ARM_EQ;
7987 case GE: return ARM_PL;
7988 case LT: return ARM_MI;
5165176d
RE
7989 default: abort ();
7990 }
7991
7992 case CC_Zmode:
84ed5e79 7993 switch (comp_code)
5165176d 7994 {
84ed5e79
RE
7995 case NE: return ARM_NE;
7996 case EQ: return ARM_EQ;
5165176d
RE
7997 default: abort ();
7998 }
7999
8000 case CCFPEmode:
e45b72c4
RE
8001 case CCFPmode:
8002 /* These encodings assume that AC=1 in the FPA system control
8003 byte. This allows us to handle all cases except UNEQ and
8004 LTGT. */
84ed5e79
RE
8005 switch (comp_code)
8006 {
8007 case GE: return ARM_GE;
8008 case GT: return ARM_GT;
8009 case LE: return ARM_LS;
8010 case LT: return ARM_MI;
e45b72c4
RE
8011 case NE: return ARM_NE;
8012 case EQ: return ARM_EQ;
8013 case ORDERED: return ARM_VC;
8014 case UNORDERED: return ARM_VS;
8015 case UNLT: return ARM_LT;
8016 case UNLE: return ARM_LE;
8017 case UNGT: return ARM_HI;
8018 case UNGE: return ARM_PL;
8019 /* UNEQ and LTGT do not have a representation. */
8020 case UNEQ: /* Fall through. */
8021 case LTGT: /* Fall through. */
84ed5e79
RE
8022 default: abort ();
8023 }
8024
8025 case CC_SWPmode:
8026 switch (comp_code)
8027 {
8028 case NE: return ARM_NE;
8029 case EQ: return ARM_EQ;
8030 case GE: return ARM_LE;
8031 case GT: return ARM_LT;
8032 case LE: return ARM_GE;
8033 case LT: return ARM_GT;
8034 case GEU: return ARM_LS;
8035 case GTU: return ARM_CC;
8036 case LEU: return ARM_CS;
8037 case LTU: return ARM_HI;
8038 default: abort ();
8039 }
8040
bd9c7e23
RE
8041 case CC_Cmode:
8042 switch (comp_code)
8043 {
8044 case LTU: return ARM_CS;
8045 case GEU: return ARM_CC;
8046 default: abort ();
8047 }
8048
5165176d 8049 case CCmode:
84ed5e79 8050 switch (comp_code)
5165176d 8051 {
84ed5e79
RE
8052 case NE: return ARM_NE;
8053 case EQ: return ARM_EQ;
8054 case GE: return ARM_GE;
8055 case GT: return ARM_GT;
8056 case LE: return ARM_LE;
8057 case LT: return ARM_LT;
8058 case GEU: return ARM_CS;
8059 case GTU: return ARM_HI;
8060 case LEU: return ARM_LS;
8061 case LTU: return ARM_CC;
5165176d
RE
8062 default: abort ();
8063 }
8064
cce8749e
CH
8065 default: abort ();
8066 }
84ed5e79
RE
8067
8068 abort ();
f3bb6135 8069}
cce8749e
CH
8070
8071
8072void
74bbc178 8073arm_final_prescan_insn (insn)
cce8749e 8074 rtx insn;
cce8749e
CH
8075{
8076 /* BODY will hold the body of INSN. */
8077 register rtx body = PATTERN (insn);
8078
8079 /* This will be 1 if trying to repeat the trick, and things need to be
8080 reversed if it appears to fail. */
8081 int reverse = 0;
8082
ff9940b0
RE
8083 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8084 taken are clobbered, even if the rtl suggests otherwise. It also
8085 means that we have to grub around within the jump expression to find
8086 out what the conditions are when the jump isn't taken. */
8087 int jump_clobbers = 0;
8088
6354dc9b 8089 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
8090 int seeking_return = 0;
8091
cce8749e
CH
8092 /* START_INSN will hold the insn from where we start looking. This is the
8093 first insn after the following code_label if REVERSE is true. */
8094 rtx start_insn = insn;
8095
8096 /* If in state 4, check if the target branch is reached, in order to
8097 change back to state 0. */
8098 if (arm_ccfsm_state == 4)
8099 {
8100 if (insn == arm_target_insn)
f5a1b0d2
NC
8101 {
8102 arm_target_insn = NULL;
8103 arm_ccfsm_state = 0;
8104 }
cce8749e
CH
8105 return;
8106 }
8107
8108 /* If in state 3, it is possible to repeat the trick, if this insn is an
8109 unconditional branch to a label, and immediately following this branch
8110 is the previous target label which is only used once, and the label this
8111 branch jumps to is not too far off. */
8112 if (arm_ccfsm_state == 3)
8113 {
8114 if (simplejump_p (insn))
8115 {
8116 start_insn = next_nonnote_insn (start_insn);
8117 if (GET_CODE (start_insn) == BARRIER)
8118 {
8119 /* XXX Isn't this always a barrier? */
8120 start_insn = next_nonnote_insn (start_insn);
8121 }
8122 if (GET_CODE (start_insn) == CODE_LABEL
8123 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8124 && LABEL_NUSES (start_insn) == 1)
8125 reverse = TRUE;
8126 else
8127 return;
8128 }
ff9940b0
RE
8129 else if (GET_CODE (body) == RETURN)
8130 {
8131 start_insn = next_nonnote_insn (start_insn);
8132 if (GET_CODE (start_insn) == BARRIER)
8133 start_insn = next_nonnote_insn (start_insn);
8134 if (GET_CODE (start_insn) == CODE_LABEL
8135 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8136 && LABEL_NUSES (start_insn) == 1)
8137 {
8138 reverse = TRUE;
8139 seeking_return = 1;
8140 }
8141 else
8142 return;
8143 }
cce8749e
CH
8144 else
8145 return;
8146 }
8147
8148 if (arm_ccfsm_state != 0 && !reverse)
8149 abort ();
8150 if (GET_CODE (insn) != JUMP_INSN)
8151 return;
8152
ddd5a7c1 8153 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
8154 the jump should always come first */
8155 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8156 body = XVECEXP (body, 0, 0);
8157
8158#if 0
8159 /* If this is a conditional return then we don't want to know */
8160 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8161 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8162 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8163 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8164 return;
8165#endif
8166
cce8749e
CH
8167 if (reverse
8168 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8169 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8170 {
bd9c7e23
RE
8171 int insns_skipped;
8172 int fail = FALSE, succeed = FALSE;
cce8749e
CH
8173 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8174 int then_not_else = TRUE;
ff9940b0 8175 rtx this_insn = start_insn, label = 0;
cce8749e 8176
e45b72c4
RE
8177 /* If the jump cannot be done with one instruction, we cannot
8178 conditionally execute the instruction in the inverse case. */
ff9940b0 8179 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 8180 {
5bbe2d40
RE
8181 jump_clobbers = 1;
8182 return;
8183 }
ff9940b0 8184
cce8749e
CH
8185 /* Register the insn jumped to. */
8186 if (reverse)
ff9940b0
RE
8187 {
8188 if (!seeking_return)
8189 label = XEXP (SET_SRC (body), 0);
8190 }
cce8749e
CH
8191 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8192 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8193 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8194 {
8195 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8196 then_not_else = FALSE;
8197 }
ff9940b0
RE
8198 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8199 seeking_return = 1;
8200 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8201 {
8202 seeking_return = 1;
8203 then_not_else = FALSE;
8204 }
cce8749e
CH
8205 else
8206 abort ();
8207
8208 /* See how many insns this branch skips, and what kind of insns. If all
8209 insns are okay, and the label or unconditional branch to the same
8210 label is not too far away, succeed. */
8211 for (insns_skipped = 0;
b36ba79f 8212 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
8213 {
8214 rtx scanbody;
8215
8216 this_insn = next_nonnote_insn (this_insn);
8217 if (!this_insn)
8218 break;
8219
cce8749e
CH
8220 switch (GET_CODE (this_insn))
8221 {
8222 case CODE_LABEL:
8223 /* Succeed if it is the target label, otherwise fail since
8224 control falls in from somewhere else. */
8225 if (this_insn == label)
8226 {
ff9940b0
RE
8227 if (jump_clobbers)
8228 {
8229 arm_ccfsm_state = 2;
8230 this_insn = next_nonnote_insn (this_insn);
8231 }
8232 else
8233 arm_ccfsm_state = 1;
cce8749e
CH
8234 succeed = TRUE;
8235 }
8236 else
8237 fail = TRUE;
8238 break;
8239
ff9940b0 8240 case BARRIER:
cce8749e 8241 /* Succeed if the following insn is the target label.
ff9940b0
RE
8242 Otherwise fail.
8243 If return insns are used then the last insn in a function
6354dc9b 8244 will be a barrier. */
cce8749e 8245 this_insn = next_nonnote_insn (this_insn);
ff9940b0 8246 if (this_insn && this_insn == label)
cce8749e 8247 {
ff9940b0
RE
8248 if (jump_clobbers)
8249 {
8250 arm_ccfsm_state = 2;
8251 this_insn = next_nonnote_insn (this_insn);
8252 }
8253 else
8254 arm_ccfsm_state = 1;
cce8749e
CH
8255 succeed = TRUE;
8256 }
8257 else
8258 fail = TRUE;
8259 break;
8260
ff9940b0 8261 case CALL_INSN:
2b835d68 8262 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 8263 calls. */
2b835d68 8264 if (TARGET_APCS_32)
bd9c7e23
RE
8265 {
8266 /* Succeed if the following insn is the target label,
8267 or if the following two insns are a barrier and
8268 the target label. */
8269 this_insn = next_nonnote_insn (this_insn);
8270 if (this_insn && GET_CODE (this_insn) == BARRIER)
8271 this_insn = next_nonnote_insn (this_insn);
8272
8273 if (this_insn && this_insn == label
b36ba79f 8274 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
8275 {
8276 if (jump_clobbers)
8277 {
8278 arm_ccfsm_state = 2;
8279 this_insn = next_nonnote_insn (this_insn);
8280 }
8281 else
8282 arm_ccfsm_state = 1;
8283 succeed = TRUE;
8284 }
8285 else
8286 fail = TRUE;
8287 }
ff9940b0 8288 break;
2b835d68 8289
cce8749e
CH
8290 case JUMP_INSN:
8291 /* If this is an unconditional branch to the same label, succeed.
8292 If it is to another label, do nothing. If it is conditional,
8293 fail. */
914a3b8c 8294 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 8295
ed4c4348 8296 scanbody = PATTERN (this_insn);
ff9940b0
RE
8297 if (GET_CODE (scanbody) == SET
8298 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
8299 {
8300 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
8301 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
8302 {
8303 arm_ccfsm_state = 2;
8304 succeed = TRUE;
8305 }
8306 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
8307 fail = TRUE;
8308 }
b36ba79f
RE
8309 /* Fail if a conditional return is undesirable (eg on a
8310 StrongARM), but still allow this if optimizing for size. */
8311 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
8312 && !use_return_insn (TRUE)
8313 && !optimize_size)
b36ba79f 8314 fail = TRUE;
ff9940b0
RE
8315 else if (GET_CODE (scanbody) == RETURN
8316 && seeking_return)
8317 {
8318 arm_ccfsm_state = 2;
8319 succeed = TRUE;
8320 }
8321 else if (GET_CODE (scanbody) == PARALLEL)
8322 {
8323 switch (get_attr_conds (this_insn))
8324 {
8325 case CONDS_NOCOND:
8326 break;
8327 default:
8328 fail = TRUE;
8329 break;
8330 }
8331 }
4e67550b
RE
8332 else
8333 fail = TRUE; /* Unrecognized jump (eg epilogue). */
8334
cce8749e
CH
8335 break;
8336
8337 case INSN:
ff9940b0
RE
8338 /* Instructions using or affecting the condition codes make it
8339 fail. */
ed4c4348 8340 scanbody = PATTERN (this_insn);
5895f793
RE
8341 if (!(GET_CODE (scanbody) == SET
8342 || GET_CODE (scanbody) == PARALLEL)
74641843 8343 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
8344 fail = TRUE;
8345 break;
8346
8347 default:
8348 break;
8349 }
8350 }
8351 if (succeed)
8352 {
ff9940b0 8353 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 8354 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
8355 else if (seeking_return || arm_ccfsm_state == 2)
8356 {
8357 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
8358 {
8359 this_insn = next_nonnote_insn (this_insn);
8360 if (this_insn && (GET_CODE (this_insn) == BARRIER
8361 || GET_CODE (this_insn) == CODE_LABEL))
8362 abort ();
8363 }
8364 if (!this_insn)
8365 {
8366 /* Oh, dear! we ran off the end.. give up */
8367 recog (PATTERN (insn), insn, NULL_PTR);
8368 arm_ccfsm_state = 0;
abaa26e5 8369 arm_target_insn = NULL;
ff9940b0
RE
8370 return;
8371 }
8372 arm_target_insn = this_insn;
8373 }
cce8749e
CH
8374 else
8375 abort ();
ff9940b0
RE
8376 if (jump_clobbers)
8377 {
8378 if (reverse)
8379 abort ();
8380 arm_current_cc =
8381 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
8382 0), 0), 1));
8383 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
8384 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8385 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
8386 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8387 }
8388 else
8389 {
8390 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
8391 what it was. */
8392 if (!reverse)
8393 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
8394 0));
8395 }
cce8749e 8396
cce8749e
CH
8397 if (reverse || then_not_else)
8398 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8399 }
d5b7b3ae 8400
1ccbefce 8401 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 8402 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 8403 across this call; since the insn has been recognized already we
b020fd92 8404 call recog direct). */
ff9940b0 8405 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 8406 }
f3bb6135 8407}
cce8749e 8408
d5b7b3ae
RE
8409int
8410arm_regno_class (regno)
8411 int regno;
8412{
8413 if (TARGET_THUMB)
8414 {
8415 if (regno == STACK_POINTER_REGNUM)
8416 return STACK_REG;
8417 if (regno == CC_REGNUM)
8418 return CC_REG;
8419 if (regno < 8)
8420 return LO_REGS;
8421 return HI_REGS;
8422 }
8423
8424 if ( regno <= LAST_ARM_REGNUM
8425 || regno == FRAME_POINTER_REGNUM
8426 || regno == ARG_POINTER_REGNUM)
8427 return GENERAL_REGS;
8428
8429 if (regno == CC_REGNUM)
8430 return NO_REGS;
8431
8432 return FPU_REGS;
8433}
8434
8435/* Handle a special case when computing the offset
8436 of an argument from the frame pointer. */
8437int
8438arm_debugger_arg_offset (value, addr)
8439 int value;
8440 rtx addr;
8441{
8442 rtx insn;
8443
8444 /* We are only interested if dbxout_parms() failed to compute the offset. */
8445 if (value != 0)
8446 return 0;
8447
8448 /* We can only cope with the case where the address is held in a register. */
8449 if (GET_CODE (addr) != REG)
8450 return 0;
8451
8452 /* If we are using the frame pointer to point at the argument, then
8453 an offset of 0 is correct. */
cd2b33d0 8454 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
8455 return 0;
8456
8457 /* If we are using the stack pointer to point at the
8458 argument, then an offset of 0 is correct. */
5895f793 8459 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
8460 && REGNO (addr) == SP_REGNUM)
8461 return 0;
8462
8463 /* Oh dear. The argument is pointed to by a register rather
8464 than being held in a register, or being stored at a known
8465 offset from the frame pointer. Since GDB only understands
8466 those two kinds of argument we must translate the address
8467 held in the register into an offset from the frame pointer.
8468 We do this by searching through the insns for the function
8469 looking to see where this register gets its value. If the
8470 register is initialised from the frame pointer plus an offset
8471 then we are in luck and we can continue, otherwise we give up.
8472
8473 This code is exercised by producing debugging information
8474 for a function with arguments like this:
8475
8476 double func (double a, double b, int c, double d) {return d;}
8477
8478 Without this code the stab for parameter 'd' will be set to
8479 an offset of 0 from the frame pointer, rather than 8. */
8480
8481 /* The if() statement says:
8482
8483 If the insn is a normal instruction
8484 and if the insn is setting the value in a register
8485 and if the register being set is the register holding the address of the argument
8486 and if the address is computing by an addition
8487 that involves adding to a register
8488 which is the frame pointer
8489 a constant integer
8490
8491 then... */
8492
8493 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8494 {
8495 if ( GET_CODE (insn) == INSN
8496 && GET_CODE (PATTERN (insn)) == SET
8497 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8498 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8499 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 8500 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
8501 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8502 )
8503 {
8504 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8505
8506 break;
8507 }
8508 }
8509
8510 if (value == 0)
8511 {
8512 debug_rtx (addr);
8513 warning ("Unable to compute real location of stacked parameter");
8514 value = 8; /* XXX magic hack */
8515 }
8516
8517 return value;
8518}
8519
d19fb8e3
NC
8520#define def_builtin(NAME, TYPE, CODE) \
8521 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL_PTR)
8522
8523void
8524arm_init_builtins ()
8525{
8526 tree endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
8527 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
8528 tree pchar_type_node = build_pointer_type (char_type_node);
8529
8530 tree int_ftype_int, void_ftype_pchar;
8531
8532 /* void func (void *) */
8533 void_ftype_pchar
8534 = build_function_type (void_type_node,
8535 tree_cons (NULL_TREE, pchar_type_node, endlink));
8536
8537 /* int func (int) */
8538 int_ftype_int
8539 = build_function_type (integer_type_node, int_endlink);
8540
8541 /* Initialize arm V5 builtins. */
8542 if (arm_arch5)
8543 {
8544 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
8545 def_builtin ("__builtin_prefetch", void_ftype_pchar,
8546 ARM_BUILTIN_PREFETCH);
8547 }
8548}
8549
8550/* Expand an expression EXP that calls a built-in function,
8551 with result going to TARGET if that's convenient
8552 (and in mode MODE if that's convenient).
8553 SUBTARGET may be used as the target for computing one of EXP's operands.
8554 IGNORE is nonzero if the value is to be ignored. */
8555
8556rtx
8557arm_expand_builtin (exp, target, subtarget, mode, ignore)
8558 tree exp;
8559 rtx target;
8560 rtx subtarget ATTRIBUTE_UNUSED;
8561 enum machine_mode mode ATTRIBUTE_UNUSED;
8562 int ignore ATTRIBUTE_UNUSED;
8563{
8564 enum insn_code icode;
8565 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8566 tree arglist = TREE_OPERAND (exp, 1);
8567 tree arg0;
8568 rtx op0, pat;
8569 enum machine_mode tmode, mode0;
8570 int fcode = DECL_FUNCTION_CODE (fndecl);
8571
8572 switch (fcode)
8573 {
8574 default:
8575 break;
8576
8577 case ARM_BUILTIN_CLZ:
8578 icode = CODE_FOR_clz;
8579 arg0 = TREE_VALUE (arglist);
8580 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8581 tmode = insn_data[icode].operand[0].mode;
8582 mode0 = insn_data[icode].operand[1].mode;
8583
8584 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8585 op0 = copy_to_mode_reg (mode0, op0);
8586 if (target == 0
8587 || GET_MODE (target) != tmode
8588 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8589 target = gen_reg_rtx (tmode);
8590 pat = GEN_FCN (icode) (target, op0);
8591 if (! pat)
8592 return 0;
8593 emit_insn (pat);
8594 return target;
8595
8596 case ARM_BUILTIN_PREFETCH:
8597 icode = CODE_FOR_prefetch;
8598 arg0 = TREE_VALUE (arglist);
8599 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8600
8601 op0 = gen_rtx_MEM (SImode, copy_to_mode_reg (Pmode, op0));
8602
8603 pat = GEN_FCN (icode) (op0);
8604 if (! pat)
8605 return 0;
8606 emit_insn (pat);
8607 return target;
8608 }
8609
8610 /* @@@ Should really do something sensible here. */
8611 return NULL_RTX;
8612}
d5b7b3ae
RE
8613\f
8614/* Recursively search through all of the blocks in a function
8615 checking to see if any of the variables created in that
8616 function match the RTX called 'orig'. If they do then
8617 replace them with the RTX called 'new'. */
8618
8619static void
8620replace_symbols_in_block (block, orig, new)
8621 tree block;
8622 rtx orig;
8623 rtx new;
8624{
8625 for (; block; block = BLOCK_CHAIN (block))
8626 {
8627 tree sym;
8628
5895f793 8629 if (!TREE_USED (block))
d5b7b3ae
RE
8630 continue;
8631
8632 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8633 {
8634 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8635 || DECL_IGNORED_P (sym)
8636 || TREE_CODE (sym) != VAR_DECL
8637 || DECL_EXTERNAL (sym)
5895f793 8638 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
8639 )
8640 continue;
8641
8642 DECL_RTL (sym) = new;
8643 }
8644
8645 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8646 }
8647}
8648
8649/* Return the number (counting from 0) of the least significant set
8650 bit in MASK. */
8651#ifdef __GNUC__
8652inline
8653#endif
8654static int
8655number_of_first_bit_set (mask)
8656 int mask;
8657{
8658 int bit;
8659
8660 for (bit = 0;
8661 (mask & (1 << bit)) == 0;
5895f793 8662 ++bit)
d5b7b3ae
RE
8663 continue;
8664
8665 return bit;
8666}
8667
8668/* Generate code to return from a thumb function.
8669 If 'reg_containing_return_addr' is -1, then the return address is
8670 actually on the stack, at the stack pointer. */
8671static void
8672thumb_exit (f, reg_containing_return_addr, eh_ofs)
8673 FILE * f;
8674 int reg_containing_return_addr;
8675 rtx eh_ofs;
8676{
8677 unsigned regs_available_for_popping;
8678 unsigned regs_to_pop;
8679 int pops_needed;
8680 unsigned available;
8681 unsigned required;
8682 int mode;
8683 int size;
8684 int restore_a4 = FALSE;
8685
8686 /* Compute the registers we need to pop. */
8687 regs_to_pop = 0;
8688 pops_needed = 0;
8689
8690 /* There is an assumption here, that if eh_ofs is not NULL, the
8691 normal return address will have been pushed. */
8692 if (reg_containing_return_addr == -1 || eh_ofs)
8693 {
8694 /* When we are generating a return for __builtin_eh_return,
8695 reg_containing_return_addr must specify the return regno. */
8696 if (eh_ofs && reg_containing_return_addr == -1)
8697 abort ();
8698
8699 regs_to_pop |= 1 << LR_REGNUM;
5895f793 8700 ++pops_needed;
d5b7b3ae
RE
8701 }
8702
8703 if (TARGET_BACKTRACE)
8704 {
8705 /* Restore the (ARM) frame pointer and stack pointer. */
8706 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8707 pops_needed += 2;
8708 }
8709
8710 /* If there is nothing to pop then just emit the BX instruction and
8711 return. */
8712 if (pops_needed == 0)
8713 {
8714 if (eh_ofs)
8715 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8716
8717 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8718 return;
8719 }
8720 /* Otherwise if we are not supporting interworking and we have not created
8721 a backtrace structure and the function was not entered in ARM mode then
8722 just pop the return address straight into the PC. */
5895f793
RE
8723 else if (!TARGET_INTERWORK
8724 && !TARGET_BACKTRACE
8725 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
8726 {
8727 if (eh_ofs)
8728 {
8729 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8730 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8731 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8732 }
8733 else
8734 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8735
8736 return;
8737 }
8738
8739 /* Find out how many of the (return) argument registers we can corrupt. */
8740 regs_available_for_popping = 0;
8741
8742 /* If returning via __builtin_eh_return, the bottom three registers
8743 all contain information needed for the return. */
8744 if (eh_ofs)
8745 size = 12;
8746 else
8747 {
8748#ifdef RTX_CODE
8749 /* If we can deduce the registers used from the function's
8750 return value. This is more reliable that examining
8751 regs_ever_live[] because that will be set if the register is
8752 ever used in the function, not just if the register is used
8753 to hold a return value. */
8754
8755 if (current_function_return_rtx != 0)
8756 mode = GET_MODE (current_function_return_rtx);
8757 else
8758#endif
8759 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8760
8761 size = GET_MODE_SIZE (mode);
8762
8763 if (size == 0)
8764 {
8765 /* In a void function we can use any argument register.
8766 In a function that returns a structure on the stack
8767 we can use the second and third argument registers. */
8768 if (mode == VOIDmode)
8769 regs_available_for_popping =
8770 (1 << ARG_REGISTER (1))
8771 | (1 << ARG_REGISTER (2))
8772 | (1 << ARG_REGISTER (3));
8773 else
8774 regs_available_for_popping =
8775 (1 << ARG_REGISTER (2))
8776 | (1 << ARG_REGISTER (3));
8777 }
8778 else if (size <= 4)
8779 regs_available_for_popping =
8780 (1 << ARG_REGISTER (2))
8781 | (1 << ARG_REGISTER (3));
8782 else if (size <= 8)
8783 regs_available_for_popping =
8784 (1 << ARG_REGISTER (3));
8785 }
8786
8787 /* Match registers to be popped with registers into which we pop them. */
8788 for (available = regs_available_for_popping,
8789 required = regs_to_pop;
8790 required != 0 && available != 0;
8791 available &= ~(available & - available),
8792 required &= ~(required & - required))
8793 -- pops_needed;
8794
8795 /* If we have any popping registers left over, remove them. */
8796 if (available > 0)
5895f793 8797 regs_available_for_popping &= ~available;
d5b7b3ae
RE
8798
8799 /* Otherwise if we need another popping register we can use
8800 the fourth argument register. */
8801 else if (pops_needed)
8802 {
8803 /* If we have not found any free argument registers and
8804 reg a4 contains the return address, we must move it. */
8805 if (regs_available_for_popping == 0
8806 && reg_containing_return_addr == LAST_ARG_REGNUM)
8807 {
8808 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8809 reg_containing_return_addr = LR_REGNUM;
8810 }
8811 else if (size > 12)
8812 {
8813 /* Register a4 is being used to hold part of the return value,
8814 but we have dire need of a free, low register. */
8815 restore_a4 = TRUE;
8816
8817 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8818 }
8819
8820 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8821 {
8822 /* The fourth argument register is available. */
8823 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8824
5895f793 8825 --pops_needed;
d5b7b3ae
RE
8826 }
8827 }
8828
8829 /* Pop as many registers as we can. */
8830 thumb_pushpop (f, regs_available_for_popping, FALSE);
8831
8832 /* Process the registers we popped. */
8833 if (reg_containing_return_addr == -1)
8834 {
8835 /* The return address was popped into the lowest numbered register. */
5895f793 8836 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
8837
8838 reg_containing_return_addr =
8839 number_of_first_bit_set (regs_available_for_popping);
8840
8841 /* Remove this register for the mask of available registers, so that
8842 the return address will not be corrupted by futher pops. */
5895f793 8843 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
8844 }
8845
8846 /* If we popped other registers then handle them here. */
8847 if (regs_available_for_popping)
8848 {
8849 int frame_pointer;
8850
8851 /* Work out which register currently contains the frame pointer. */
8852 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8853
8854 /* Move it into the correct place. */
8855 asm_fprintf (f, "\tmov\t%r, %r\n",
8856 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8857
8858 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
8859 regs_available_for_popping &= ~(1 << frame_pointer);
8860 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
8861
8862 if (regs_available_for_popping)
8863 {
8864 int stack_pointer;
8865
8866 /* We popped the stack pointer as well,
8867 find the register that contains it. */
8868 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8869
8870 /* Move it into the stack register. */
8871 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8872
8873 /* At this point we have popped all necessary registers, so
8874 do not worry about restoring regs_available_for_popping
8875 to its correct value:
8876
8877 assert (pops_needed == 0)
8878 assert (regs_available_for_popping == (1 << frame_pointer))
8879 assert (regs_to_pop == (1 << STACK_POINTER)) */
8880 }
8881 else
8882 {
8883 /* Since we have just move the popped value into the frame
8884 pointer, the popping register is available for reuse, and
8885 we know that we still have the stack pointer left to pop. */
8886 regs_available_for_popping |= (1 << frame_pointer);
8887 }
8888 }
8889
8890 /* If we still have registers left on the stack, but we no longer have
8891 any registers into which we can pop them, then we must move the return
8892 address into the link register and make available the register that
8893 contained it. */
8894 if (regs_available_for_popping == 0 && pops_needed > 0)
8895 {
8896 regs_available_for_popping |= 1 << reg_containing_return_addr;
8897
8898 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8899 reg_containing_return_addr);
8900
8901 reg_containing_return_addr = LR_REGNUM;
8902 }
8903
8904 /* If we have registers left on the stack then pop some more.
8905 We know that at most we will want to pop FP and SP. */
8906 if (pops_needed > 0)
8907 {
8908 int popped_into;
8909 int move_to;
8910
8911 thumb_pushpop (f, regs_available_for_popping, FALSE);
8912
8913 /* We have popped either FP or SP.
8914 Move whichever one it is into the correct register. */
8915 popped_into = number_of_first_bit_set (regs_available_for_popping);
8916 move_to = number_of_first_bit_set (regs_to_pop);
8917
8918 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8919
5895f793 8920 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 8921
5895f793 8922 --pops_needed;
d5b7b3ae
RE
8923 }
8924
8925 /* If we still have not popped everything then we must have only
8926 had one register available to us and we are now popping the SP. */
8927 if (pops_needed > 0)
8928 {
8929 int popped_into;
8930
8931 thumb_pushpop (f, regs_available_for_popping, FALSE);
8932
8933 popped_into = number_of_first_bit_set (regs_available_for_popping);
8934
8935 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8936 /*
8937 assert (regs_to_pop == (1 << STACK_POINTER))
8938 assert (pops_needed == 1)
8939 */
8940 }
8941
8942 /* If necessary restore the a4 register. */
8943 if (restore_a4)
8944 {
8945 if (reg_containing_return_addr != LR_REGNUM)
8946 {
8947 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8948 reg_containing_return_addr = LR_REGNUM;
8949 }
8950
8951 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8952 }
8953
8954 if (eh_ofs)
8955 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8956
8957 /* Return to caller. */
8958 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8959}
8960
8961/* Emit code to push or pop registers to or from the stack. */
8962static void
8963thumb_pushpop (f, mask, push)
8964 FILE * f;
8965 int mask;
8966 int push;
8967{
8968 int regno;
8969 int lo_mask = mask & 0xFF;
8970
5895f793 8971 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
8972 {
8973 /* Special case. Do not generate a POP PC statement here, do it in
8974 thumb_exit() */
8975 thumb_exit (f, -1, NULL_RTX);
8976 return;
8977 }
8978
8979 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8980
8981 /* Look at the low registers first. */
5895f793 8982 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
8983 {
8984 if (lo_mask & 1)
8985 {
8986 asm_fprintf (f, "%r", regno);
8987
8988 if ((lo_mask & ~1) != 0)
8989 fprintf (f, ", ");
8990 }
8991 }
8992
8993 if (push && (mask & (1 << LR_REGNUM)))
8994 {
8995 /* Catch pushing the LR. */
8996 if (mask & 0xFF)
8997 fprintf (f, ", ");
8998
8999 asm_fprintf (f, "%r", LR_REGNUM);
9000 }
9001 else if (!push && (mask & (1 << PC_REGNUM)))
9002 {
9003 /* Catch popping the PC. */
9004 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9005 {
9006 /* The PC is never poped directly, instead
9007 it is popped into r3 and then BX is used. */
9008 fprintf (f, "}\n");
9009
9010 thumb_exit (f, -1, NULL_RTX);
9011
9012 return;
9013 }
9014 else
9015 {
9016 if (mask & 0xFF)
9017 fprintf (f, ", ");
9018
9019 asm_fprintf (f, "%r", PC_REGNUM);
9020 }
9021 }
9022
9023 fprintf (f, "}\n");
9024}
9025\f
9026void
9027thumb_final_prescan_insn (insn)
9028 rtx insn;
9029{
d5b7b3ae 9030 if (flag_print_asm_name)
9d98a694
AO
9031 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9032 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
9033}
9034
9035int
9036thumb_shiftable_const (val)
9037 unsigned HOST_WIDE_INT val;
9038{
9039 unsigned HOST_WIDE_INT mask = 0xff;
9040 int i;
9041
9042 if (val == 0) /* XXX */
9043 return 0;
9044
9045 for (i = 0; i < 25; i++)
9046 if ((val & (mask << i)) == val)
9047 return 1;
9048
9049 return 0;
9050}
9051
9052/* Returns non-zero if the current function contains,
9053 or might contain a far jump. */
9054int
9055thumb_far_jump_used_p (int in_prologue)
9056{
9057 rtx insn;
9058
9059 /* This test is only important for leaf functions. */
5895f793 9060 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
9061
9062 /* If we have already decided that far jumps may be used,
9063 do not bother checking again, and always return true even if
9064 it turns out that they are not being used. Once we have made
9065 the decision that far jumps are present (and that hence the link
9066 register will be pushed onto the stack) we cannot go back on it. */
9067 if (cfun->machine->far_jump_used)
9068 return 1;
9069
9070 /* If this function is not being called from the prologue/epilogue
9071 generation code then it must be being called from the
9072 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 9073 if (!in_prologue)
d5b7b3ae
RE
9074 {
9075 /* In this case we know that we are being asked about the elimination
9076 of the arg pointer register. If that register is not being used,
9077 then there are no arguments on the stack, and we do not have to
9078 worry that a far jump might force the prologue to push the link
9079 register, changing the stack offsets. In this case we can just
9080 return false, since the presence of far jumps in the function will
9081 not affect stack offsets.
9082
9083 If the arg pointer is live (or if it was live, but has now been
9084 eliminated and so set to dead) then we do have to test to see if
9085 the function might contain a far jump. This test can lead to some
9086 false negatives, since before reload is completed, then length of
9087 branch instructions is not known, so gcc defaults to returning their
9088 longest length, which in turn sets the far jump attribute to true.
9089
9090 A false negative will not result in bad code being generated, but it
9091 will result in a needless push and pop of the link register. We
9092 hope that this does not occur too often. */
9093 if (regs_ever_live [ARG_POINTER_REGNUM])
9094 cfun->machine->arg_pointer_live = 1;
5895f793 9095 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
9096 return 0;
9097 }
9098
9099 /* Check to see if the function contains a branch
9100 insn with the far jump attribute set. */
9101 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9102 {
9103 if (GET_CODE (insn) == JUMP_INSN
9104 /* Ignore tablejump patterns. */
9105 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9106 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9107 && get_attr_far_jump (insn) == FAR_JUMP_YES
9108 )
9109 {
9110 /* Record the fact that we have decied that
9111 the function does use far jumps. */
9112 cfun->machine->far_jump_used = 1;
9113 return 1;
9114 }
9115 }
9116
9117 return 0;
9118}
9119
9120/* Return non-zero if FUNC must be entered in ARM mode. */
9121int
9122is_called_in_ARM_mode (func)
9123 tree func;
9124{
9125 if (TREE_CODE (func) != FUNCTION_DECL)
9126 abort ();
9127
9128 /* Ignore the problem about functions whoes address is taken. */
9129 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9130 return TRUE;
9131
9132#ifdef ARM_PE
9133 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
9134#else
9135 return FALSE;
9136#endif
9137}
9138
9139/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 9140const char *
d5b7b3ae
RE
9141thumb_unexpanded_epilogue ()
9142{
9143 int regno;
9144 int live_regs_mask = 0;
9145 int high_regs_pushed = 0;
9146 int leaf_function = leaf_function_p ();
9147 int had_to_push_lr;
9148 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9149
9150 if (return_used_this_function)
9151 return "";
9152
9153 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
5895f793
RE
9154 if (regs_ever_live[regno] && !call_used_regs[regno]
9155 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9156 live_regs_mask |= 1 << regno;
9157
9158 for (regno = 8; regno < 13; regno++)
9159 {
5895f793
RE
9160 if (regs_ever_live[regno] && !call_used_regs[regno]
9161 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9162 high_regs_pushed++;
d5b7b3ae
RE
9163 }
9164
9165 /* The prolog may have pushed some high registers to use as
9166 work registers. eg the testuite file:
9167 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9168 compiles to produce:
9169 push {r4, r5, r6, r7, lr}
9170 mov r7, r9
9171 mov r6, r8
9172 push {r6, r7}
9173 as part of the prolog. We have to undo that pushing here. */
9174
9175 if (high_regs_pushed)
9176 {
9177 int mask = live_regs_mask;
9178 int next_hi_reg;
9179 int size;
9180 int mode;
9181
9182#ifdef RTX_CODE
9183 /* If we can deduce the registers used from the function's return value.
9184 This is more reliable that examining regs_ever_live[] because that
9185 will be set if the register is ever used in the function, not just if
9186 the register is used to hold a return value. */
9187
9188 if (current_function_return_rtx != 0)
9189 mode = GET_MODE (current_function_return_rtx);
9190 else
9191#endif
9192 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9193
9194 size = GET_MODE_SIZE (mode);
9195
9196 /* Unless we are returning a type of size > 12 register r3 is
9197 available. */
9198 if (size < 13)
9199 mask |= 1 << 3;
9200
9201 if (mask == 0)
9202 /* Oh dear! We have no low registers into which we can pop
9203 high registers! */
9204 fatal ("No low registers available for popping high registers");
9205
9206 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
9207 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9208 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9209 break;
9210
9211 while (high_regs_pushed)
9212 {
9213 /* Find lo register(s) into which the high register(s) can
9214 be popped. */
9215 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9216 {
9217 if (mask & (1 << regno))
9218 high_regs_pushed--;
9219 if (high_regs_pushed == 0)
9220 break;
9221 }
9222
9223 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9224
9225 /* Pop the values into the low register(s). */
9226 thumb_pushpop (asm_out_file, mask, 0);
9227
9228 /* Move the value(s) into the high registers. */
9229 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9230 {
9231 if (mask & (1 << regno))
9232 {
9233 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9234 regno);
9235
9236 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
9237 if (regs_ever_live[next_hi_reg]
9238 && !call_used_regs[next_hi_reg]
9239 && !(TARGET_SINGLE_PIC_BASE
9240 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9241 break;
9242 }
9243 }
9244 }
9245 }
9246
5895f793 9247 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
9248 || thumb_far_jump_used_p (1));
9249
9250 if (TARGET_BACKTRACE
9251 && ((live_regs_mask & 0xFF) == 0)
9252 && regs_ever_live [LAST_ARG_REGNUM] != 0)
9253 {
9254 /* The stack backtrace structure creation code had to
9255 push R7 in order to get a work register, so we pop
9256 it now. */
9257 live_regs_mask |= (1 << LAST_LO_REGNUM);
9258 }
9259
9260 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
9261 {
9262 if (had_to_push_lr
5895f793
RE
9263 && !is_called_in_ARM_mode (current_function_decl)
9264 && !eh_ofs)
d5b7b3ae
RE
9265 live_regs_mask |= 1 << PC_REGNUM;
9266
9267 /* Either no argument registers were pushed or a backtrace
9268 structure was created which includes an adjusted stack
9269 pointer, so just pop everything. */
9270 if (live_regs_mask)
9271 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9272
9273 if (eh_ofs)
9274 thumb_exit (asm_out_file, 2, eh_ofs);
9275 /* We have either just popped the return address into the
9276 PC or it is was kept in LR for the entire function or
9277 it is still on the stack because we do not want to
9278 return by doing a pop {pc}. */
9279 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
9280 thumb_exit (asm_out_file,
9281 (had_to_push_lr
9282 && is_called_in_ARM_mode (current_function_decl)) ?
9283 -1 : LR_REGNUM, NULL_RTX);
9284 }
9285 else
9286 {
9287 /* Pop everything but the return address. */
5895f793 9288 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
9289
9290 if (live_regs_mask)
9291 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9292
9293 if (had_to_push_lr)
9294 /* Get the return address into a temporary register. */
9295 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
9296
9297 /* Remove the argument registers that were pushed onto the stack. */
9298 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
9299 SP_REGNUM, SP_REGNUM,
9300 current_function_pretend_args_size);
9301
9302 if (eh_ofs)
9303 thumb_exit (asm_out_file, 2, eh_ofs);
9304 else
9305 thumb_exit (asm_out_file,
9306 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
9307 }
9308
9309 return "";
9310}
9311
9312/* Functions to save and restore machine-specific function data. */
9313
9314static void
9315arm_mark_machine_status (p)
9316 struct function * p;
9317{
9318 struct machine_function *machine = p->machine;
9319
9320 ggc_mark_rtx (machine->ra_rtx);
9321 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
9322}
9323
9324static void
9325arm_init_machine_status (p)
9326 struct function * p;
9327{
9328 p->machine =
9329 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
9330}
9331
9332/* Return an RTX indicating where the return address to the
9333 calling function can be found. */
9334rtx
9335arm_return_addr (count, frame)
9336 int count;
9337 rtx frame ATTRIBUTE_UNUSED;
9338{
9339 rtx reg;
9340
9341 if (count != 0)
9342 return NULL_RTX;
9343
9344 reg = cfun->machine->ra_rtx;
9345
9346 if (reg == NULL)
9347 {
9348 rtx init;
9349
9350 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
9351 the prologue. */
9352 reg = gen_reg_rtx (Pmode);
9353 cfun->machine->ra_rtx = reg;
9354
5895f793 9355 if (!TARGET_APCS_32)
d5b7b3ae
RE
9356 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
9357 GEN_INT (RETURN_ADDR_MASK26));
9358 else
9359 init = gen_rtx_REG (Pmode, LR_REGNUM);
9360
9361 init = gen_rtx_SET (VOIDmode, reg, init);
9362
9363 /* Emit the insn to the prologue with the other argument copies. */
9364 push_topmost_sequence ();
9365 emit_insn_after (init, get_insns ());
9366 pop_topmost_sequence ();
9367 }
9368
9369 return reg;
9370}
9371
9372/* Do anything needed before RTL is emitted for each function. */
9373void
9374arm_init_expanders ()
9375{
9376 /* Arrange to initialize and mark the machine per-function status. */
9377 init_machine_status = arm_init_machine_status;
9378 mark_machine_status = arm_mark_machine_status;
9379}
9380
9381/* Generate the rest of a function's prologue. */
9382void
9383thumb_expand_prologue ()
9384{
9385 HOST_WIDE_INT amount = (get_frame_size ()
9386 + current_function_outgoing_args_size);
9387
9388 /* Naked functions don't have prologues. */
9389 if (arm_naked_function_p (current_function_decl))
9390 return;
9391
9392 if (frame_pointer_needed)
9393 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
9394
9395 if (amount)
9396 {
9397 amount = ROUND_UP (amount);
9398
9399 if (amount < 512)
9400 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5895f793 9401 GEN_INT (-amount)));
d5b7b3ae
RE
9402 else
9403 {
9404 int regno;
9405 rtx reg;
9406
9407 /* The stack decrement is too big for an immediate value in a single
9408 insn. In theory we could issue multiple subtracts, but after
9409 three of them it becomes more space efficient to place the full
9410 value in the constant pool and load into a register. (Also the
9411 ARM debugger really likes to see only one stack decrement per
9412 function). So instead we look for a scratch register into which
9413 we can load the decrement, and then we subtract this from the
9414 stack pointer. Unfortunately on the thumb the only available
9415 scratch registers are the argument registers, and we cannot use
9416 these as they may hold arguments to the function. Instead we
9417 attempt to locate a call preserved register which is used by this
9418 function. If we can find one, then we know that it will have
9419 been pushed at the start of the prologue and so we can corrupt
9420 it now. */
9421 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
9422 if (regs_ever_live[regno]
5895f793
RE
9423 && !call_used_regs[regno] /* Paranoia */
9424 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
9425 && !(frame_pointer_needed
9426 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
9427 break;
9428
9429 if (regno > LAST_LO_REGNUM) /* Very unlikely */
9430 {
9431 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
9432
9433 /* Choose an arbitary, non-argument low register. */
9434 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
9435
9436 /* Save it by copying it into a high, scratch register. */
9437 emit_insn (gen_movsi (spare, reg));
9438
9439 /* Decrement the stack. */
5895f793 9440 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9441 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9442 reg));
9443
9444 /* Restore the low register's original value. */
9445 emit_insn (gen_movsi (reg, spare));
9446
9447 /* Emit a USE of the restored scratch register, so that flow
9448 analysis will not consider the restore redundant. The
9449 register won't be used again in this function and isn't
9450 restored by the epilogue. */
9451 emit_insn (gen_rtx_USE (VOIDmode, reg));
9452 }
9453 else
9454 {
9455 reg = gen_rtx (REG, SImode, regno);
9456
5895f793 9457 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9458 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9459 reg));
9460 }
9461 }
9462 }
9463
9464 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9465 emit_insn (gen_blockage ());
9466}
9467
9468void
9469thumb_expand_epilogue ()
9470{
9471 HOST_WIDE_INT amount = (get_frame_size ()
9472 + current_function_outgoing_args_size);
9473
9474 /* Naked functions don't have epilogues. */
9475 if (arm_naked_function_p (current_function_decl))
9476 return;
9477
9478 if (frame_pointer_needed)
9479 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
9480 else if (amount)
9481 {
9482 amount = ROUND_UP (amount);
9483
9484 if (amount < 512)
9485 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9486 GEN_INT (amount)));
9487 else
9488 {
9489 /* r3 is always free in the epilogue. */
9490 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
9491
9492 emit_insn (gen_movsi (reg, GEN_INT (amount)));
9493 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
9494 }
9495 }
9496
9497 /* Emit a USE (stack_pointer_rtx), so that
9498 the stack adjustment will not be deleted. */
9499 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9500
9501 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9502 emit_insn (gen_blockage ());
9503}
9504
9505void
9506output_thumb_prologue (f)
9507 FILE * f;
9508{
9509 int live_regs_mask = 0;
9510 int high_regs_pushed = 0;
9511 int store_arg_regs = 0;
9512 int regno;
9513
9514 if (arm_naked_function_p (current_function_decl))
9515 return;
9516
9517 if (is_called_in_ARM_mode (current_function_decl))
9518 {
9519 const char * name;
9520
9521 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9522 abort ();
9523 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9524 abort ();
9525 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9526
9527 /* Generate code sequence to switch us into Thumb mode. */
9528 /* The .code 32 directive has already been emitted by
6d77b53e 9529 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
9530 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9531 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9532
9533 /* Generate a label, so that the debugger will notice the
9534 change in instruction sets. This label is also used by
9535 the assembler to bypass the ARM code when this function
9536 is called from a Thumb encoded function elsewhere in the
9537 same file. Hence the definition of STUB_NAME here must
9538 agree with the definition in gas/config/tc-arm.c */
9539
9540#define STUB_NAME ".real_start_of"
9541
9542 asm_fprintf (f, "\t.code\t16\n");
9543#ifdef ARM_PE
9544 if (arm_dllexport_name_p (name))
e5951263 9545 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
9546#endif
9547 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9548 asm_fprintf (f, "\t.thumb_func\n");
9549 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9550 }
9551
9552 if (current_function_anonymous_args && current_function_pretend_args_size)
9553 store_arg_regs = 1;
9554
9555 if (current_function_pretend_args_size)
9556 {
9557 if (store_arg_regs)
9558 {
9559 int num_pushes;
9560
9561 asm_fprintf (f, "\tpush\t{");
9562
9563 num_pushes = NUM_INTS (current_function_pretend_args_size);
9564
9565 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9566 regno <= LAST_ARG_REGNUM;
5895f793 9567 regno++)
d5b7b3ae
RE
9568 asm_fprintf (f, "%r%s", regno,
9569 regno == LAST_ARG_REGNUM ? "" : ", ");
9570
9571 asm_fprintf (f, "}\n");
9572 }
9573 else
9574 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9575 SP_REGNUM, SP_REGNUM,
9576 current_function_pretend_args_size);
9577 }
9578
5895f793
RE
9579 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9580 if (regs_ever_live[regno] && !call_used_regs[regno]
9581 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9582 live_regs_mask |= 1 << regno;
9583
5895f793 9584 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
9585 live_regs_mask |= 1 << LR_REGNUM;
9586
9587 if (TARGET_BACKTRACE)
9588 {
9589 int offset;
9590 int work_register = 0;
9591 int wr;
9592
9593 /* We have been asked to create a stack backtrace structure.
9594 The code looks like this:
9595
9596 0 .align 2
9597 0 func:
9598 0 sub SP, #16 Reserve space for 4 registers.
9599 2 push {R7} Get a work register.
9600 4 add R7, SP, #20 Get the stack pointer before the push.
9601 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9602 8 mov R7, PC Get hold of the start of this code plus 12.
9603 10 str R7, [SP, #16] Store it.
9604 12 mov R7, FP Get hold of the current frame pointer.
9605 14 str R7, [SP, #4] Store it.
9606 16 mov R7, LR Get hold of the current return address.
9607 18 str R7, [SP, #12] Store it.
9608 20 add R7, SP, #16 Point at the start of the backtrace structure.
9609 22 mov FP, R7 Put this value into the frame pointer. */
9610
9611 if ((live_regs_mask & 0xFF) == 0)
9612 {
9613 /* See if the a4 register is free. */
9614
9615 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9616 work_register = LAST_ARG_REGNUM;
9617 else /* We must push a register of our own */
9618 live_regs_mask |= (1 << LAST_LO_REGNUM);
9619 }
9620
9621 if (work_register == 0)
9622 {
9623 /* Select a register from the list that will be pushed to
9624 use as our work register. */
9625 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9626 if ((1 << work_register) & live_regs_mask)
9627 break;
9628 }
9629
9630 asm_fprintf
9631 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9632 SP_REGNUM, SP_REGNUM);
9633
9634 if (live_regs_mask)
9635 thumb_pushpop (f, live_regs_mask, 1);
9636
9637 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9638 if (wr & live_regs_mask)
9639 offset += 4;
9640
9641 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9642 offset + 16 + current_function_pretend_args_size);
9643
9644 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9645 offset + 4);
9646
9647 /* Make sure that the instruction fetching the PC is in the right place
9648 to calculate "start of backtrace creation code + 12". */
9649 if (live_regs_mask)
9650 {
9651 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9652 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9653 offset + 12);
9654 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9655 ARM_HARD_FRAME_POINTER_REGNUM);
9656 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9657 offset);
9658 }
9659 else
9660 {
9661 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9662 ARM_HARD_FRAME_POINTER_REGNUM);
9663 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9664 offset);
9665 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9666 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9667 offset + 12);
9668 }
9669
9670 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9671 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9672 offset + 8);
9673 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9674 offset + 12);
9675 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9676 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9677 }
9678 else if (live_regs_mask)
9679 thumb_pushpop (f, live_regs_mask, 1);
9680
9681 for (regno = 8; regno < 13; regno++)
9682 {
5895f793
RE
9683 if (regs_ever_live[regno] && !call_used_regs[regno]
9684 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9685 high_regs_pushed++;
d5b7b3ae
RE
9686 }
9687
9688 if (high_regs_pushed)
9689 {
9690 int pushable_regs = 0;
9691 int mask = live_regs_mask & 0xff;
9692 int next_hi_reg;
9693
9694 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9695 {
5895f793
RE
9696 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9697 && !(TARGET_SINGLE_PIC_BASE
9698 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9699 break;
9700 }
9701
9702 pushable_regs = mask;
9703
9704 if (pushable_regs == 0)
9705 {
9706 /* Desperation time -- this probably will never happen. */
9707 if (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9708 || !call_used_regs[LAST_ARG_REGNUM])
d5b7b3ae
RE
9709 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9710 mask = 1 << LAST_ARG_REGNUM;
9711 }
9712
9713 while (high_regs_pushed > 0)
9714 {
9715 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9716 {
9717 if (mask & (1 << regno))
9718 {
9719 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9720
5895f793 9721 high_regs_pushed--;
d5b7b3ae
RE
9722
9723 if (high_regs_pushed)
9724 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9725 next_hi_reg--)
9726 {
9727 if (regs_ever_live[next_hi_reg]
5895f793
RE
9728 && !call_used_regs[next_hi_reg]
9729 && !(TARGET_SINGLE_PIC_BASE
9730 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9731 break;
9732 }
9733 else
9734 {
5895f793 9735 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
9736 break;
9737 }
9738 }
9739 }
9740
9741 thumb_pushpop (f, mask, 1);
9742 }
9743
9744 if (pushable_regs == 0
9745 && (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9746 || !call_used_regs[LAST_ARG_REGNUM]))
d5b7b3ae
RE
9747 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9748 }
9749}
9750
9751/* Handle the case of a double word load into a low register from
9752 a computed memory address. The computed address may involve a
9753 register which is overwritten by the load. */
9754
cd2b33d0 9755const char *
d5b7b3ae
RE
9756thumb_load_double_from_address (operands)
9757 rtx * operands;
9758{
9759 rtx addr;
9760 rtx base;
9761 rtx offset;
9762 rtx arg1;
9763 rtx arg2;
9764
9765 if (GET_CODE (operands[0]) != REG)
9766 fatal ("thumb_load_double_from_address: destination is not a register");
9767
9768 if (GET_CODE (operands[1]) != MEM)
9769 {
9770 debug_rtx (operands[1]);
9771 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9772 }
9773
9774 /* Get the memory address. */
9775 addr = XEXP (operands[1], 0);
9776
9777 /* Work out how the memory address is computed. */
9778 switch (GET_CODE (addr))
9779 {
9780 case REG:
9781 operands[2] = gen_rtx (MEM, SImode,
9782 plus_constant (XEXP (operands[1], 0), 4));
9783
9784 if (REGNO (operands[0]) == REGNO (addr))
9785 {
9786 output_asm_insn ("ldr\t%H0, %2", operands);
9787 output_asm_insn ("ldr\t%0, %1", operands);
9788 }
9789 else
9790 {
9791 output_asm_insn ("ldr\t%0, %1", operands);
9792 output_asm_insn ("ldr\t%H0, %2", operands);
9793 }
9794 break;
9795
9796 case CONST:
9797 /* Compute <address> + 4 for the high order load. */
9798 operands[2] = gen_rtx (MEM, SImode,
9799 plus_constant (XEXP (operands[1], 0), 4));
9800
9801 output_asm_insn ("ldr\t%0, %1", operands);
9802 output_asm_insn ("ldr\t%H0, %2", operands);
9803 break;
9804
9805 case PLUS:
9806 arg1 = XEXP (addr, 0);
9807 arg2 = XEXP (addr, 1);
9808
9809 if (CONSTANT_P (arg1))
9810 base = arg2, offset = arg1;
9811 else
9812 base = arg1, offset = arg2;
9813
9814 if (GET_CODE (base) != REG)
9815 fatal ("thumb_load_double_from_address: base is not a register");
9816
9817 /* Catch the case of <address> = <reg> + <reg> */
9818 if (GET_CODE (offset) == REG)
9819 {
9820 int reg_offset = REGNO (offset);
9821 int reg_base = REGNO (base);
9822 int reg_dest = REGNO (operands[0]);
9823
9824 /* Add the base and offset registers together into the
9825 higher destination register. */
9826 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9827 reg_dest + 1, reg_base, reg_offset);
9828
9829 /* Load the lower destination register from the address in
9830 the higher destination register. */
9831 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9832 reg_dest, reg_dest + 1);
9833
9834 /* Load the higher destination register from its own address
9835 plus 4. */
9836 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9837 reg_dest + 1, reg_dest + 1);
9838 }
9839 else
9840 {
9841 /* Compute <address> + 4 for the high order load. */
9842 operands[2] = gen_rtx (MEM, SImode,
9843 plus_constant (XEXP (operands[1], 0), 4));
9844
9845 /* If the computed address is held in the low order register
9846 then load the high order register first, otherwise always
9847 load the low order register first. */
9848 if (REGNO (operands[0]) == REGNO (base))
9849 {
9850 output_asm_insn ("ldr\t%H0, %2", operands);
9851 output_asm_insn ("ldr\t%0, %1", operands);
9852 }
9853 else
9854 {
9855 output_asm_insn ("ldr\t%0, %1", operands);
9856 output_asm_insn ("ldr\t%H0, %2", operands);
9857 }
9858 }
9859 break;
9860
9861 case LABEL_REF:
9862 /* With no registers to worry about we can just load the value
9863 directly. */
9864 operands[2] = gen_rtx (MEM, SImode,
9865 plus_constant (XEXP (operands[1], 0), 4));
9866
9867 output_asm_insn ("ldr\t%H0, %2", operands);
9868 output_asm_insn ("ldr\t%0, %1", operands);
9869 break;
9870
9871 default:
9872 debug_rtx (operands[1]);
9873 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9874 break;
9875 }
9876
9877 return "";
9878}
9879
9880
cd2b33d0 9881const char *
d5b7b3ae
RE
9882thumb_output_move_mem_multiple (n, operands)
9883 int n;
9884 rtx * operands;
9885{
9886 rtx tmp;
9887
9888 switch (n)
9889 {
9890 case 2:
ca356f3a 9891 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9892 {
ca356f3a
RE
9893 tmp = operands[4];
9894 operands[4] = operands[5];
9895 operands[5] = tmp;
d5b7b3ae 9896 }
ca356f3a
RE
9897 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
9898 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
9899 break;
9900
9901 case 3:
ca356f3a 9902 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9903 {
ca356f3a
RE
9904 tmp = operands[4];
9905 operands[4] = operands[5];
9906 operands[5] = tmp;
d5b7b3ae 9907 }
ca356f3a 9908 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 9909 {
ca356f3a
RE
9910 tmp = operands[5];
9911 operands[5] = operands[6];
9912 operands[6] = tmp;
d5b7b3ae 9913 }
ca356f3a 9914 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9915 {
ca356f3a
RE
9916 tmp = operands[4];
9917 operands[4] = operands[5];
9918 operands[5] = tmp;
d5b7b3ae
RE
9919 }
9920
ca356f3a
RE
9921 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
9922 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
9923 break;
9924
9925 default:
9926 abort ();
9927 }
9928
9929 return "";
9930}
9931
9932/* Routines for generating rtl */
9933
9934void
9935thumb_expand_movstrqi (operands)
9936 rtx * operands;
9937{
9938 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9939 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9940 HOST_WIDE_INT len = INTVAL (operands[2]);
9941 HOST_WIDE_INT offset = 0;
9942
9943 while (len >= 12)
9944 {
ca356f3a 9945 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
9946 len -= 12;
9947 }
9948
9949 if (len >= 8)
9950 {
ca356f3a 9951 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
9952 len -= 8;
9953 }
9954
9955 if (len >= 4)
9956 {
9957 rtx reg = gen_reg_rtx (SImode);
9958 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9959 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9960 len -= 4;
9961 offset += 4;
9962 }
9963
9964 if (len >= 2)
9965 {
9966 rtx reg = gen_reg_rtx (HImode);
9967 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9968 plus_constant (in, offset))));
9969 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9970 reg));
9971 len -= 2;
9972 offset += 2;
9973 }
9974
9975 if (len)
9976 {
9977 rtx reg = gen_reg_rtx (QImode);
9978 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9979 plus_constant (in, offset))));
9980 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9981 reg));
9982 }
9983}
9984
9985int
9986thumb_cmp_operand (op, mode)
9987 rtx op;
9988 enum machine_mode mode;
9989{
9990 return ((GET_CODE (op) == CONST_INT
9991 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9992 || register_operand (op, mode));
9993}
9994
cd2b33d0 9995static const char *
d5b7b3ae
RE
9996thumb_condition_code (x, invert)
9997 rtx x;
9998 int invert;
9999{
cd2b33d0 10000 static const char * conds[] =
d5b7b3ae
RE
10001 {
10002 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10003 "hi", "ls", "ge", "lt", "gt", "le"
10004 };
10005 int val;
10006
10007 switch (GET_CODE (x))
10008 {
10009 case EQ: val = 0; break;
10010 case NE: val = 1; break;
10011 case GEU: val = 2; break;
10012 case LTU: val = 3; break;
10013 case GTU: val = 8; break;
10014 case LEU: val = 9; break;
10015 case GE: val = 10; break;
10016 case LT: val = 11; break;
10017 case GT: val = 12; break;
10018 case LE: val = 13; break;
10019 default:
10020 abort ();
10021 }
10022
10023 return conds[val ^ invert];
10024}
10025
10026/* Handle storing a half-word to memory during reload. */
10027void
10028thumb_reload_out_hi (operands)
10029 rtx * operands;
10030{
10031 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10032}
10033
10034/* Handle storing a half-word to memory during reload. */
10035void
10036thumb_reload_in_hi (operands)
10037 rtx * operands ATTRIBUTE_UNUSED;
10038{
10039 abort ();
10040}
10041
c27ba912
DM
10042/* Return the length of a function name prefix
10043 that starts with the character 'c'. */
10044static int
10045arm_get_strip_length (char c)
10046{
10047 switch (c)
10048 {
10049 ARM_NAME_ENCODING_LENGTHS
10050 default: return 0;
10051 }
10052}
10053
10054/* Return a pointer to a function's name with any
10055 and all prefix encodings stripped from it. */
10056const char *
10057arm_strip_name_encoding (const char * name)
10058{
10059 int skip;
10060
10061 while ((skip = arm_get_strip_length (* name)))
10062 name += skip;
10063
10064 return name;
10065}
10066
2b835d68 10067#ifdef AOF_ASSEMBLER
6354dc9b 10068/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 10069
32de079a
RE
10070rtx aof_pic_label = NULL_RTX;
10071struct pic_chain
10072{
62b10bbc
NC
10073 struct pic_chain * next;
10074 char * symname;
32de079a
RE
10075};
10076
62b10bbc 10077static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
10078
10079rtx
10080aof_pic_entry (x)
10081 rtx x;
10082{
62b10bbc 10083 struct pic_chain ** chainp;
32de079a
RE
10084 int offset;
10085
10086 if (aof_pic_label == NULL_RTX)
10087 {
92a432f4
RE
10088 /* We mark this here and not in arm_add_gc_roots() to avoid
10089 polluting even more code with ifdefs, and because it never
10090 contains anything useful until we assign to it here. */
5895f793 10091 ggc_add_rtx_root (&aof_pic_label, 1);
43cffd11 10092 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
10093 }
10094
10095 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10096 offset += 4, chainp = &(*chainp)->next)
10097 if ((*chainp)->symname == XSTR (x, 0))
10098 return plus_constant (aof_pic_label, offset);
10099
10100 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10101 (*chainp)->next = NULL;
10102 (*chainp)->symname = XSTR (x, 0);
10103 return plus_constant (aof_pic_label, offset);
10104}
10105
10106void
10107aof_dump_pic_table (f)
62b10bbc 10108 FILE * f;
32de079a 10109{
62b10bbc 10110 struct pic_chain * chain;
32de079a
RE
10111
10112 if (aof_pic_chain == NULL)
10113 return;
10114
dd18ae56
NC
10115 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10116 PIC_OFFSET_TABLE_REGNUM,
10117 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
10118 fputs ("|x$adcons|\n", f);
10119
10120 for (chain = aof_pic_chain; chain; chain = chain->next)
10121 {
10122 fputs ("\tDCD\t", f);
10123 assemble_name (f, chain->symname);
10124 fputs ("\n", f);
10125 }
10126}
10127
2b835d68
RE
10128int arm_text_section_count = 1;
10129
10130char *
84ed5e79 10131aof_text_section ()
2b835d68
RE
10132{
10133 static char buf[100];
2b835d68
RE
10134 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10135 arm_text_section_count++);
10136 if (flag_pic)
10137 strcat (buf, ", PIC, REENTRANT");
10138 return buf;
10139}
10140
10141static int arm_data_section_count = 1;
10142
10143char *
10144aof_data_section ()
10145{
10146 static char buf[100];
10147 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10148 return buf;
10149}
10150
10151/* The AOF assembler is religiously strict about declarations of
10152 imported and exported symbols, so that it is impossible to declare
956d6950 10153 a function as imported near the beginning of the file, and then to
2b835d68
RE
10154 export it later on. It is, however, possible to delay the decision
10155 until all the functions in the file have been compiled. To get
10156 around this, we maintain a list of the imports and exports, and
10157 delete from it any that are subsequently defined. At the end of
10158 compilation we spit the remainder of the list out before the END
10159 directive. */
10160
10161struct import
10162{
62b10bbc
NC
10163 struct import * next;
10164 char * name;
2b835d68
RE
10165};
10166
62b10bbc 10167static struct import * imports_list = NULL;
2b835d68
RE
10168
10169void
10170aof_add_import (name)
62b10bbc 10171 char * name;
2b835d68 10172{
62b10bbc 10173 struct import * new;
2b835d68
RE
10174
10175 for (new = imports_list; new; new = new->next)
10176 if (new->name == name)
10177 return;
10178
10179 new = (struct import *) xmalloc (sizeof (struct import));
10180 new->next = imports_list;
10181 imports_list = new;
10182 new->name = name;
10183}
10184
10185void
10186aof_delete_import (name)
62b10bbc 10187 char * name;
2b835d68 10188{
62b10bbc 10189 struct import ** old;
2b835d68
RE
10190
10191 for (old = &imports_list; *old; old = & (*old)->next)
10192 {
10193 if ((*old)->name == name)
10194 {
10195 *old = (*old)->next;
10196 return;
10197 }
10198 }
10199}
10200
10201int arm_main_function = 0;
10202
10203void
10204aof_dump_imports (f)
62b10bbc 10205 FILE * f;
2b835d68
RE
10206{
10207 /* The AOF assembler needs this to cause the startup code to be extracted
10208 from the library. Brining in __main causes the whole thing to work
10209 automagically. */
10210 if (arm_main_function)
10211 {
10212 text_section ();
10213 fputs ("\tIMPORT __main\n", f);
10214 fputs ("\tDCD __main\n", f);
10215 }
10216
10217 /* Now dump the remaining imports. */
10218 while (imports_list)
10219 {
10220 fprintf (f, "\tIMPORT\t");
10221 assemble_name (f, imports_list->name);
10222 fputc ('\n', f);
10223 imports_list = imports_list->next;
10224 }
10225}
10226#endif /* AOF_ASSEMBLER */
This page took 2.004091 seconds and 5 git commands to generate.