]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
i386.c (ix86_force_to_memory, [...]): New.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
914a3b8c 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e 26#include "rtl.h"
d5b7b3ae 27#include "tree.h"
c7319d87 28#include "obstack.h"
cce8749e
CH
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-flags.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
ad076f4e 41#include "toplev.h"
aec3cfba 42#include "recog.h"
92a432f4 43#include "ggc.h"
d5b7b3ae 44#include "except.h"
8b97c5f8 45#include "c-pragma.h"
c27ba912 46#include "tm_p.h"
cce8749e 47
d5b7b3ae
RE
48/* Forward definitions of types. */
49typedef struct minipool_node Mnode;
50typedef struct minipool_fixup Mfix;
51
52/* In order to improve the layout of the prototypes below
53 some short type abbreviations are defined here. */
54#define Hint HOST_WIDE_INT
55#define Mmode enum machine_mode
56#define Ulong unsigned long
57
58/* Forward function declarations. */
59static void arm_add_gc_roots PARAMS ((void));
60static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
61static int arm_naked_function_p PARAMS ((tree));
62static Ulong bit_count PARAMS ((signed int));
63static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
64static int eliminate_lr2ip PARAMS ((rtx *));
65static rtx emit_multi_reg_push PARAMS ((int));
66static rtx emit_sfm PARAMS ((int, int));
cd2b33d0 67static const char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
68static arm_cc get_arm_condition_code PARAMS ((rtx));
69static void init_fpa_table PARAMS ((void));
70static Hint int_log2 PARAMS ((Hint));
71static rtx is_jump_table PARAMS ((rtx));
cd2b33d0
NC
72static const char * output_multi_immediate PARAMS ((rtx *, const char *, const char *, int, Hint));
73static void print_multi_reg PARAMS ((FILE *, const char *, int, int, int));
d5b7b3ae 74static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
cd2b33d0 75static const char * shift_op PARAMS ((rtx, Hint *));
d5b7b3ae
RE
76static void arm_init_machine_status PARAMS ((struct function *));
77static void arm_mark_machine_status PARAMS ((struct function *));
78static int number_of_first_bit_set PARAMS ((int));
79static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
80static void thumb_exit PARAMS ((FILE *, int, rtx));
81static void thumb_pushpop PARAMS ((FILE *, int, int));
cd2b33d0 82static const char * thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
83static rtx is_jump_table PARAMS ((rtx));
84static Hint get_jump_table_size PARAMS ((rtx));
85static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
86static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
87static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
88static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
89static void assign_minipool_offsets PARAMS ((Mfix *));
90static void arm_print_value PARAMS ((FILE *, rtx));
91static void dump_minipool PARAMS ((rtx));
92static int arm_barrier_cost PARAMS ((rtx));
93static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
94static void push_minipool_barrier PARAMS ((rtx, Hint));
95static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
96static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 97static int current_file_function_operand PARAMS ((rtx));
d5b7b3ae
RE
98\f
99#undef Hint
100#undef Mmode
101#undef Ulong
f3bb6135 102
c7319d87
RE
103/* Obstack for minipool constant handling. */
104static struct obstack minipool_obstack;
105static char *minipool_startobj;
106
107#define obstack_chunk_alloc xmalloc
108#define obstack_chunk_free free
109
c27ba912
DM
110/* The maximum number of insns skipped which will be conditionalised if
111 possible. */
112static int max_insns_skipped = 5;
113
114extern FILE * asm_out_file;
115
6354dc9b 116/* True if we are currently building a constant table. */
13bd191d
PB
117int making_const_table;
118
60d0536b 119/* Define the information needed to generate branch insns. This is
6354dc9b 120 stored from the compare operation. */
ff9940b0 121rtx arm_compare_op0, arm_compare_op1;
ff9940b0 122
6354dc9b 123/* What type of floating point are we tuning for? */
bee06f3d
RE
124enum floating_point_type arm_fpu;
125
6354dc9b 126/* What type of floating point instructions are available? */
b111229a
RE
127enum floating_point_type arm_fpu_arch;
128
6354dc9b 129/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
130enum prog_mode_type arm_prgmode;
131
6354dc9b 132/* Set by the -mfp=... option. */
f9cc092a 133const char * target_fp_name = NULL;
2b835d68 134
b355a481 135/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 136const char * structure_size_string = NULL;
723ae7c1 137int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 138
aec3cfba 139/* Bit values used to identify processor capabilities. */
62b10bbc
NC
140#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
141#define FL_FAST_MULT (1 << 1) /* Fast multiply */
142#define FL_MODE26 (1 << 2) /* 26-bit mode support */
143#define FL_MODE32 (1 << 3) /* 32-bit mode support */
144#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
145#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
146#define FL_THUMB (1 << 6) /* Thumb aware */
147#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
148#define FL_STRONG (1 << 8) /* StrongARM */
aec3cfba 149
d5b7b3ae
RE
150/* The bits in this mask specify which instructions we are
151 allowed to generate. */
aec3cfba 152static int insn_flags = 0;
d5b7b3ae 153
aec3cfba
NC
154/* The bits in this mask specify which instruction scheduling options should
155 be used. Note - there is an overlap with the FL_FAST_MULT. For some
156 hardware we want to be able to generate the multiply instructions, but to
157 tune as if they were not present in the architecture. */
158static int tune_flags = 0;
159
160/* The following are used in the arm.md file as equivalents to bits
161 in the above two flag variables. */
162
2b835d68
RE
163/* Nonzero if this is an "M" variant of the processor. */
164int arm_fast_multiply = 0;
165
6354dc9b 166/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
167int arm_arch4 = 0;
168
6354dc9b 169/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
170int arm_arch5 = 0;
171
aec3cfba 172/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
173int arm_ld_sched = 0;
174
175/* Nonzero if this chip is a StrongARM. */
176int arm_is_strong = 0;
177
178/* Nonzero if this chip is a an ARM6 or an ARM7. */
179int arm_is_6_or_7 = 0;
b111229a 180
0616531f
RE
181/* Nonzero if generating Thumb instructions. */
182int thumb_code = 0;
183
cce8749e
CH
184/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
185 must report the mode of the memory reference from PRINT_OPERAND to
186 PRINT_OPERAND_ADDRESS. */
f3bb6135 187enum machine_mode output_memory_reference_mode;
cce8749e
CH
188
189/* Nonzero if the prologue must setup `fp'. */
190int current_function_anonymous_args;
191
32de079a 192/* The register number to be used for the PIC offset register. */
ed0e6530 193const char * arm_pic_register_string = NULL;
32de079a
RE
194int arm_pic_register = 9;
195
ff9940b0 196/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 197 is not needed. */
d5b7b3ae 198int return_used_this_function;
ff9940b0 199
aec3cfba
NC
200/* Set to 1 after arm_reorg has started. Reset to start at the start of
201 the next function. */
4b632bf1
RE
202static int after_arm_reorg = 0;
203
aec3cfba 204/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
205static int arm_constant_limit = 3;
206
cce8749e
CH
207/* For an explanation of these variables, see final_prescan_insn below. */
208int arm_ccfsm_state;
84ed5e79 209enum arm_cond_code arm_current_cc;
cce8749e
CH
210rtx arm_target_insn;
211int arm_target_label;
9997d19d
RE
212
213/* The condition codes of the ARM, and the inverse function. */
cd2b33d0 214const char * arm_condition_codes[] =
9997d19d
RE
215{
216 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
217 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
218};
219
f5a1b0d2 220#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 221\f
6354dc9b 222/* Initialization code. */
2b835d68 223
2b835d68
RE
224struct processors
225{
cd2b33d0 226 const char * name;
2b835d68
RE
227 unsigned int flags;
228};
229
230/* Not all of these give usefully different compilation alternatives,
231 but there is no simple way of generalizing them. */
f5a1b0d2
NC
232static struct processors all_cores[] =
233{
234 /* ARM Cores */
235
236 {"arm2", FL_CO_PROC | FL_MODE26 },
237 {"arm250", FL_CO_PROC | FL_MODE26 },
238 {"arm3", FL_CO_PROC | FL_MODE26 },
239 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
240 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
241 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
242 {"arm610", FL_MODE26 | FL_MODE32 },
243 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
244 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 246 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
247 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
248 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
249 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
250 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
251 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
252 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
253 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
254 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
255 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 256 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
257 {"arm710c", FL_MODE26 | FL_MODE32 },
258 {"arm7100", FL_MODE26 | FL_MODE32 },
259 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
260 /* Doesn't have an external co-proc, but does have embedded fpu. */
261 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
262 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
263 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
264 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
265 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
266 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
267 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
268 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
269 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
270 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
271 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
272
273 {NULL, 0}
274};
275
276static struct processors all_architectures[] =
2b835d68 277{
f5a1b0d2
NC
278 /* ARM Architectures */
279
62b10bbc
NC
280 { "armv2", FL_CO_PROC | FL_MODE26 },
281 { "armv2a", FL_CO_PROC | FL_MODE26 },
282 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
283 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 284 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
285 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
286 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
287 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
288 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
289 { NULL, 0 }
f5a1b0d2
NC
290};
291
292/* This is a magic stucture. The 'string' field is magically filled in
293 with a pointer to the value specified by the user on the command line
294 assuming that the user has specified such a value. */
295
296struct arm_cpu_select arm_select[] =
297{
298 /* string name processors */
299 { NULL, "-mcpu=", all_cores },
300 { NULL, "-march=", all_architectures },
301 { NULL, "-mtune=", all_cores }
2b835d68
RE
302};
303
aec3cfba 304/* Return the number of bits set in value' */
d5b7b3ae 305static unsigned long
aec3cfba
NC
306bit_count (value)
307 signed int value;
308{
d5b7b3ae 309 unsigned long count = 0;
aec3cfba
NC
310
311 while (value)
312 {
5895f793
RE
313 value &= ~(value & -value);
314 ++count;
aec3cfba
NC
315 }
316
317 return count;
318}
319
2b835d68
RE
320/* Fix up any incompatible options that the user has specified.
321 This has now turned into a maze. */
322void
323arm_override_options ()
324{
ed4c4348 325 unsigned i;
f5a1b0d2
NC
326
327 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 328 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 329 {
f5a1b0d2
NC
330 struct arm_cpu_select * ptr = arm_select + i;
331
332 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 333 {
13bd191d 334 const struct processors * sel;
bd9c7e23 335
5895f793 336 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 337 if (streq (ptr->string, sel->name))
bd9c7e23 338 {
aec3cfba
NC
339 if (i == 2)
340 tune_flags = sel->flags;
341 else
b111229a 342 {
aec3cfba
NC
343 /* If we have been given an architecture and a processor
344 make sure that they are compatible. We only generate
345 a warning though, and we prefer the CPU over the
6354dc9b 346 architecture. */
aec3cfba 347 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 348 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
349 ptr->string);
350
351 insn_flags = sel->flags;
b111229a 352 }
f5a1b0d2 353
bd9c7e23
RE
354 break;
355 }
356
357 if (sel->name == NULL)
358 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
359 }
360 }
aec3cfba 361
f5a1b0d2 362 /* If the user did not specify a processor, choose one for them. */
aec3cfba 363 if (insn_flags == 0)
f5a1b0d2
NC
364 {
365 struct processors * sel;
aec3cfba
NC
366 unsigned int sought;
367 static struct cpu_default
368 {
cd2b33d0
NC
369 int cpu;
370 const char * name;
aec3cfba
NC
371 }
372 cpu_defaults[] =
373 {
374 { TARGET_CPU_arm2, "arm2" },
375 { TARGET_CPU_arm6, "arm6" },
376 { TARGET_CPU_arm610, "arm610" },
2aa0c933 377 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
378 { TARGET_CPU_arm7m, "arm7m" },
379 { TARGET_CPU_arm7500fe, "arm7500fe" },
380 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
381 { TARGET_CPU_arm8, "arm8" },
382 { TARGET_CPU_arm810, "arm810" },
383 { TARGET_CPU_arm9, "arm9" },
384 { TARGET_CPU_strongarm, "strongarm" },
385 { TARGET_CPU_generic, "arm" },
386 { 0, 0 }
387 };
388 struct cpu_default * def;
389
390 /* Find the default. */
5895f793 391 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
392 if (def->cpu == TARGET_CPU_DEFAULT)
393 break;
394
395 /* Make sure we found the default CPU. */
396 if (def->name == NULL)
397 abort ();
398
399 /* Find the default CPU's flags. */
5895f793 400 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
401 if (streq (def->name, sel->name))
402 break;
403
404 if (sel->name == NULL)
405 abort ();
406
407 insn_flags = sel->flags;
408
409 /* Now check to see if the user has specified some command line
410 switch that require certain abilities from the cpu. */
411 sought = 0;
f5a1b0d2 412
d5b7b3ae 413 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 414 {
aec3cfba
NC
415 sought |= (FL_THUMB | FL_MODE32);
416
417 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 418 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 419
d5b7b3ae 420 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
421 interworking. Therefore we force FL_MODE26 to be removed
422 from insn_flags here (if it was set), so that the search
423 below will always be able to find a compatible processor. */
5895f793 424 insn_flags &= ~FL_MODE26;
f5a1b0d2 425 }
5895f793 426 else if (!TARGET_APCS_32)
f5a1b0d2 427 sought |= FL_MODE26;
d5b7b3ae 428
aec3cfba 429 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 430 {
aec3cfba
NC
431 /* Try to locate a CPU type that supports all of the abilities
432 of the default CPU, plus the extra abilities requested by
433 the user. */
5895f793 434 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 435 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
436 break;
437
438 if (sel->name == NULL)
aec3cfba
NC
439 {
440 unsigned int current_bit_count = 0;
441 struct processors * best_fit = NULL;
442
443 /* Ideally we would like to issue an error message here
444 saying that it was not possible to find a CPU compatible
445 with the default CPU, but which also supports the command
446 line options specified by the programmer, and so they
447 ought to use the -mcpu=<name> command line option to
448 override the default CPU type.
449
450 Unfortunately this does not work with multilibing. We
451 need to be able to support multilibs for -mapcs-26 and for
452 -mthumb-interwork and there is no CPU that can support both
453 options. Instead if we cannot find a cpu that has both the
454 characteristics of the default cpu and the given command line
455 options we scan the array again looking for a best match. */
5895f793 456 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
457 if ((sel->flags & sought) == sought)
458 {
459 unsigned int count;
460
461 count = bit_count (sel->flags & insn_flags);
462
463 if (count >= current_bit_count)
464 {
465 best_fit = sel;
466 current_bit_count = count;
467 }
468 }
f5a1b0d2 469
aec3cfba
NC
470 if (best_fit == NULL)
471 abort ();
472 else
473 sel = best_fit;
474 }
475
476 insn_flags = sel->flags;
f5a1b0d2
NC
477 }
478 }
aec3cfba
NC
479
480 /* If tuning has not been specified, tune for whichever processor or
481 architecture has been selected. */
482 if (tune_flags == 0)
483 tune_flags = insn_flags;
484
f5a1b0d2
NC
485 /* Make sure that the processor choice does not conflict with any of the
486 other command line choices. */
aec3cfba 487 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 488 {
aec3cfba
NC
489 /* If APCS-32 was not the default then it must have been set by the
490 user, so issue a warning message. If the user has specified
491 "-mapcs-32 -mcpu=arm2" then we loose here. */
492 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
493 warning ("target CPU does not support APCS-32" );
5895f793 494 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 495 }
5895f793 496 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
497 {
498 warning ("target CPU does not support APCS-26" );
499 target_flags |= ARM_FLAG_APCS_32;
500 }
501
6cfc7210 502 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
503 {
504 warning ("target CPU does not support interworking" );
6cfc7210 505 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
506 }
507
d5b7b3ae
RE
508 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
509 {
510 warning ("target CPU does not supoport THUMB instructions.");
511 target_flags &= ~ARM_FLAG_THUMB;
512 }
513
514 if (TARGET_APCS_FRAME && TARGET_THUMB)
515 {
516 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
517 target_flags &= ~ARM_FLAG_APCS_FRAME;
518 }
519
520 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
521 from here where no function is being compiled currently. */
522 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
523 && TARGET_ARM)
524 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
525
526 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
527 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
528
529 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
530 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
531
f5a1b0d2 532 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 533 if (TARGET_INTERWORK)
f5a1b0d2 534 {
5895f793 535 if (!TARGET_APCS_32)
f5a1b0d2
NC
536 warning ("interworking forces APCS-32 to be used" );
537 target_flags |= ARM_FLAG_APCS_32;
538 }
539
5895f793 540 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
541 {
542 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
543 target_flags |= ARM_FLAG_APCS_FRAME;
544 }
aec3cfba 545
2b835d68
RE
546 if (TARGET_POKE_FUNCTION_NAME)
547 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 548
2b835d68
RE
549 if (TARGET_APCS_REENT && flag_pic)
550 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 551
2b835d68 552 if (TARGET_APCS_REENT)
f5a1b0d2 553 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 554
d5b7b3ae
RE
555 /* If this target is normally configured to use APCS frames, warn if they
556 are turned off and debugging is turned on. */
557 if (TARGET_ARM
558 && write_symbols != NO_DEBUG
5895f793 559 && !TARGET_APCS_FRAME
d5b7b3ae
RE
560 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
561 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 562
32de079a
RE
563 /* If stack checking is disabled, we can use r10 as the PIC register,
564 which keeps r9 available. */
5895f793 565 if (flag_pic && !TARGET_APCS_STACK)
32de079a 566 arm_pic_register = 10;
aec3cfba 567
2b835d68
RE
568 if (TARGET_APCS_FLOAT)
569 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 570
aec3cfba 571 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
572 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
573 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
574 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
aec3cfba 575
2ca12935
JL
576 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
577 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 578 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
579 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
580 && !(tune_flags & FL_ARCH4))) != 0;
f5a1b0d2 581
bd9c7e23
RE
582 /* Default value for floating point code... if no co-processor
583 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
584 assume the user has an FPA.
585 Note: this does not prevent use of floating point instructions,
586 -msoft-float does that. */
aec3cfba 587 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 588
b111229a 589 if (target_fp_name)
2b835d68 590 {
f5a1b0d2 591 if (streq (target_fp_name, "2"))
b111229a 592 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
593 else if (streq (target_fp_name, "3"))
594 arm_fpu_arch = FP_SOFT3;
2b835d68 595 else
f5a1b0d2 596 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 597 target_fp_name);
2b835d68 598 }
b111229a
RE
599 else
600 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
601
602 if (TARGET_FPE && arm_fpu != FP_HARD)
603 arm_fpu = FP_SOFT2;
aec3cfba 604
f5a1b0d2
NC
605 /* For arm2/3 there is no need to do any scheduling if there is only
606 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
607 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
608 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 609 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 610
cd2b33d0 611 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
612
613 if (structure_size_string != NULL)
614 {
615 int size = strtol (structure_size_string, NULL, 0);
616
617 if (size == 8 || size == 32)
618 arm_structure_size_boundary = size;
619 else
620 warning ("Structure size boundary can only be set to 8 or 32");
621 }
ed0e6530
PB
622
623 if (arm_pic_register_string != NULL)
624 {
625 int pic_register;
626
5895f793 627 if (!flag_pic)
ed0e6530
PB
628 warning ("-mpic-register= is useless without -fpic");
629
630 pic_register = decode_reg_name (arm_pic_register_string);
631
632 /* Prevent the user from choosing an obviously stupid PIC register. */
633 if (pic_register < 0 || call_used_regs[pic_register]
634 || pic_register == HARD_FRAME_POINTER_REGNUM
635 || pic_register == STACK_POINTER_REGNUM
636 || pic_register >= PC_REGNUM)
637 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
638 else
639 arm_pic_register = pic_register;
640 }
d5b7b3ae
RE
641
642 if (TARGET_THUMB && flag_schedule_insns)
643 {
644 /* Don't warn since it's on by default in -O2. */
645 flag_schedule_insns = 0;
646 }
647
f5a1b0d2
NC
648 /* If optimizing for space, don't synthesize constants.
649 For processors with load scheduling, it never costs more than 2 cycles
650 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 651 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 652 arm_constant_limit = 1;
aec3cfba 653
f5a1b0d2
NC
654 /* If optimizing for size, bump the number of instructions that we
655 are prepared to conditionally execute (even on a StrongARM).
656 Otherwise for the StrongARM, which has early execution of branches,
657 a sequence that is worth skipping is shorter. */
658 if (optimize_size)
659 max_insns_skipped = 6;
660 else if (arm_is_strong)
661 max_insns_skipped = 3;
92a432f4
RE
662
663 /* Register global variables with the garbage collector. */
664 arm_add_gc_roots ();
665}
666
667static void
668arm_add_gc_roots ()
669{
670 ggc_add_rtx_root (&arm_compare_op0, 1);
671 ggc_add_rtx_root (&arm_compare_op1, 1);
672 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
c7319d87
RE
673
674 gcc_obstack_init(&minipool_obstack);
675 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 676}
cce8749e 677\f
6354dc9b 678/* Return 1 if it is possible to return using a single instruction. */
ff9940b0 679int
b36ba79f
RE
680use_return_insn (iscond)
681 int iscond;
ff9940b0
RE
682{
683 int regno;
684
d5b7b3ae 685 /* Never use a return instruction before reload has run. */
5895f793 686 if (!reload_completed
d5b7b3ae 687 /* Or if the function is variadic. */
f5a1b0d2 688 || current_function_pretend_args_size
ff9940b0 689 || current_function_anonymous_args
d5b7b3ae
RE
690 /* Of if the function calls __builtin_eh_return () */
691 || cfun->machine->eh_epilogue_sp_ofs != NULL
692 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 693 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 694 && !frame_pointer_needed))
ff9940b0
RE
695 return 0;
696
b111229a 697 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
698 stacked. Similarly, on StrongARM, conditional returns are expensive
699 if they aren't taken and registers have been stacked. */
f5a1b0d2 700 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 701 return 0;
d5b7b3ae 702
f5a1b0d2 703 if ((iscond && arm_is_strong)
6cfc7210 704 || TARGET_INTERWORK)
6ed30148 705 {
d5b7b3ae 706 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
5895f793 707 if (regs_ever_live[regno] && !call_used_regs[regno])
6ed30148
RE
708 return 0;
709
710 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 711 return 0;
6ed30148 712 }
b111229a 713
ff9940b0 714 /* Can't be done if any of the FPU regs are pushed, since this also
6354dc9b 715 requires an insn. */
d5b7b3ae
RE
716 if (TARGET_HARD_FLOAT)
717 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 718 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 719 return 0;
ff9940b0 720
31fdb4d5
DE
721 /* If a function is naked, don't use the "return" insn. */
722 if (arm_naked_function_p (current_function_decl))
723 return 0;
724
ff9940b0
RE
725 return 1;
726}
727
cce8749e
CH
728/* Return TRUE if int I is a valid immediate ARM constant. */
729
730int
731const_ok_for_arm (i)
ff9940b0 732 HOST_WIDE_INT i;
cce8749e 733{
5895f793 734 unsigned HOST_WIDE_INT mask = ~HOST_UINT (0xFF);
cce8749e 735
56636818
JL
736 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
737 be all zero, or all one. */
5895f793
RE
738 if ((i & ~HOST_UINT (0xffffffff)) != 0
739 && ((i & ~HOST_UINT (0xffffffff))
740 != ((~HOST_UINT (0))
741 & ~HOST_UINT (0xffffffff))))
56636818
JL
742 return FALSE;
743
e2c671ba
RE
744 /* Fast return for 0 and powers of 2 */
745 if ((i & (i - 1)) == 0)
746 return TRUE;
747
cce8749e
CH
748 do
749 {
e5951263 750 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
f3bb6135 751 return TRUE;
abaa26e5 752 mask =
e5951263
NC
753 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
754 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
5895f793 755 } while (mask != ~HOST_UINT (0xFF));
cce8749e 756
f3bb6135
RE
757 return FALSE;
758}
cce8749e 759
6354dc9b 760/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
761static int
762const_ok_for_op (i, code)
e2c671ba
RE
763 HOST_WIDE_INT i;
764 enum rtx_code code;
e2c671ba
RE
765{
766 if (const_ok_for_arm (i))
767 return 1;
768
769 switch (code)
770 {
771 case PLUS:
772 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
773
774 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
775 case XOR:
776 case IOR:
777 return 0;
778
779 case AND:
780 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
781
782 default:
783 abort ();
784 }
785}
786
787/* Emit a sequence of insns to handle a large constant.
788 CODE is the code of the operation required, it can be any of SET, PLUS,
789 IOR, AND, XOR, MINUS;
790 MODE is the mode in which the operation is being performed;
791 VAL is the integer to operate on;
792 SOURCE is the other operand (a register, or a null-pointer for SET);
793 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
794 either produce a simpler sequence, or we will want to cse the values.
795 Return value is the number of insns emitted. */
e2c671ba
RE
796
797int
798arm_split_constant (code, mode, val, target, source, subtargets)
799 enum rtx_code code;
800 enum machine_mode mode;
801 HOST_WIDE_INT val;
802 rtx target;
803 rtx source;
804 int subtargets;
2b835d68
RE
805{
806 if (subtargets || code == SET
807 || (GET_CODE (target) == REG && GET_CODE (source) == REG
808 && REGNO (target) != REGNO (source)))
809 {
4b632bf1
RE
810 /* After arm_reorg has been called, we can't fix up expensive
811 constants by pushing them into memory so we must synthesise
812 them in-line, regardless of the cost. This is only likely to
813 be more costly on chips that have load delay slots and we are
814 compiling without running the scheduler (so no splitting
aec3cfba
NC
815 occurred before the final instruction emission).
816
817 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 818 */
5895f793 819 if (!after_arm_reorg
4b632bf1
RE
820 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
821 > arm_constant_limit + (code != SET)))
2b835d68
RE
822 {
823 if (code == SET)
824 {
825 /* Currently SET is the only monadic value for CODE, all
826 the rest are diadic. */
43cffd11 827 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
828 return 1;
829 }
830 else
831 {
832 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
833
43cffd11 834 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
835 /* For MINUS, the value is subtracted from, since we never
836 have subtraction of a constant. */
837 if (code == MINUS)
43cffd11 838 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 839 gen_rtx_MINUS (mode, temp, source)));
2b835d68 840 else
43cffd11
RE
841 emit_insn (gen_rtx_SET (VOIDmode, target,
842 gen_rtx (code, mode, source, temp)));
2b835d68
RE
843 return 2;
844 }
845 }
846 }
847
848 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
849}
850
851/* As above, but extra parameter GENERATE which, if clear, suppresses
852 RTL generation. */
d5b7b3ae 853static int
2b835d68
RE
854arm_gen_constant (code, mode, val, target, source, subtargets, generate)
855 enum rtx_code code;
856 enum machine_mode mode;
857 HOST_WIDE_INT val;
858 rtx target;
859 rtx source;
860 int subtargets;
861 int generate;
e2c671ba 862{
e2c671ba
RE
863 int can_invert = 0;
864 int can_negate = 0;
865 int can_negate_initial = 0;
866 int can_shift = 0;
867 int i;
868 int num_bits_set = 0;
869 int set_sign_bit_copies = 0;
870 int clear_sign_bit_copies = 0;
871 int clear_zero_bit_copies = 0;
872 int set_zero_bit_copies = 0;
873 int insns = 0;
e2c671ba 874 unsigned HOST_WIDE_INT temp1, temp2;
e5951263 875 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
e2c671ba 876
d5b7b3ae 877 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
878 check for degenerate cases; these can occur when DImode operations
879 are split. */
880 switch (code)
881 {
882 case SET:
883 can_invert = 1;
884 can_shift = 1;
885 can_negate = 1;
886 break;
887
888 case PLUS:
889 can_negate = 1;
890 can_negate_initial = 1;
891 break;
892
893 case IOR:
e5951263 894 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 895 {
2b835d68 896 if (generate)
43cffd11
RE
897 emit_insn (gen_rtx_SET (VOIDmode, target,
898 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
899 return 1;
900 }
901 if (remainder == 0)
902 {
903 if (reload_completed && rtx_equal_p (target, source))
904 return 0;
2b835d68 905 if (generate)
43cffd11 906 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
907 return 1;
908 }
909 break;
910
911 case AND:
912 if (remainder == 0)
913 {
2b835d68 914 if (generate)
43cffd11 915 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
916 return 1;
917 }
e5951263 918 if (remainder == HOST_UINT (0xffffffff))
e2c671ba
RE
919 {
920 if (reload_completed && rtx_equal_p (target, source))
921 return 0;
2b835d68 922 if (generate)
43cffd11 923 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
924 return 1;
925 }
926 can_invert = 1;
927 break;
928
929 case XOR:
930 if (remainder == 0)
931 {
932 if (reload_completed && rtx_equal_p (target, source))
933 return 0;
2b835d68 934 if (generate)
43cffd11 935 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
936 return 1;
937 }
e5951263 938 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 939 {
2b835d68 940 if (generate)
43cffd11
RE
941 emit_insn (gen_rtx_SET (VOIDmode, target,
942 gen_rtx_NOT (mode, source)));
e2c671ba
RE
943 return 1;
944 }
945
946 /* We don't know how to handle this yet below. */
947 abort ();
948
949 case MINUS:
950 /* We treat MINUS as (val - source), since (source - val) is always
951 passed as (source + (-val)). */
952 if (remainder == 0)
953 {
2b835d68 954 if (generate)
43cffd11
RE
955 emit_insn (gen_rtx_SET (VOIDmode, target,
956 gen_rtx_NEG (mode, source)));
e2c671ba
RE
957 return 1;
958 }
959 if (const_ok_for_arm (val))
960 {
2b835d68 961 if (generate)
43cffd11
RE
962 emit_insn (gen_rtx_SET (VOIDmode, target,
963 gen_rtx_MINUS (mode, GEN_INT (val),
964 source)));
e2c671ba
RE
965 return 1;
966 }
967 can_negate = 1;
968
969 break;
970
971 default:
972 abort ();
973 }
974
6354dc9b 975 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
976 if (const_ok_for_arm (val)
977 || (can_negate_initial && const_ok_for_arm (-val))
978 || (can_invert && const_ok_for_arm (~val)))
979 {
2b835d68 980 if (generate)
43cffd11
RE
981 emit_insn (gen_rtx_SET (VOIDmode, target,
982 (source ? gen_rtx (code, mode, source,
983 GEN_INT (val))
984 : GEN_INT (val))));
e2c671ba
RE
985 return 1;
986 }
987
e2c671ba 988 /* Calculate a few attributes that may be useful for specific
6354dc9b 989 optimizations. */
e2c671ba
RE
990 for (i = 31; i >= 0; i--)
991 {
992 if ((remainder & (1 << i)) == 0)
993 clear_sign_bit_copies++;
994 else
995 break;
996 }
997
998 for (i = 31; i >= 0; i--)
999 {
1000 if ((remainder & (1 << i)) != 0)
1001 set_sign_bit_copies++;
1002 else
1003 break;
1004 }
1005
1006 for (i = 0; i <= 31; i++)
1007 {
1008 if ((remainder & (1 << i)) == 0)
1009 clear_zero_bit_copies++;
1010 else
1011 break;
1012 }
1013
1014 for (i = 0; i <= 31; i++)
1015 {
1016 if ((remainder & (1 << i)) != 0)
1017 set_zero_bit_copies++;
1018 else
1019 break;
1020 }
1021
1022 switch (code)
1023 {
1024 case SET:
1025 /* See if we can do this by sign_extending a constant that is known
1026 to be negative. This is a good, way of doing it, since the shift
1027 may well merge into a subsequent insn. */
1028 if (set_sign_bit_copies > 1)
1029 {
1030 if (const_ok_for_arm
1031 (temp1 = ARM_SIGN_EXTEND (remainder
1032 << (set_sign_bit_copies - 1))))
1033 {
2b835d68
RE
1034 if (generate)
1035 {
d499463f 1036 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1037 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1038 GEN_INT (temp1)));
2b835d68
RE
1039 emit_insn (gen_ashrsi3 (target, new_src,
1040 GEN_INT (set_sign_bit_copies - 1)));
1041 }
e2c671ba
RE
1042 return 2;
1043 }
1044 /* For an inverted constant, we will need to set the low bits,
1045 these will be shifted out of harm's way. */
1046 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1047 if (const_ok_for_arm (~temp1))
1048 {
2b835d68
RE
1049 if (generate)
1050 {
d499463f 1051 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1052 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1053 GEN_INT (temp1)));
2b835d68
RE
1054 emit_insn (gen_ashrsi3 (target, new_src,
1055 GEN_INT (set_sign_bit_copies - 1)));
1056 }
e2c671ba
RE
1057 return 2;
1058 }
1059 }
1060
1061 /* See if we can generate this by setting the bottom (or the top)
1062 16 bits, and then shifting these into the other half of the
1063 word. We only look for the simplest cases, to do more would cost
1064 too much. Be careful, however, not to generate this when the
1065 alternative would take fewer insns. */
e5951263 1066 if (val & HOST_UINT (0xffff0000))
e2c671ba 1067 {
e5951263 1068 temp1 = remainder & HOST_UINT (0xffff0000);
e2c671ba
RE
1069 temp2 = remainder & 0x0000ffff;
1070
6354dc9b 1071 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1072 for (i = 9; i < 24; i++)
1073 {
d5b7b3ae 1074 if ((((temp2 | (temp2 << i))
e5951263 1075 & HOST_UINT (0xffffffff)) == remainder)
5895f793 1076 && !const_ok_for_arm (temp2))
e2c671ba 1077 {
d499463f
RE
1078 rtx new_src = (subtargets
1079 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1080 : target);
1081 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1082 source, subtargets, generate);
e2c671ba 1083 source = new_src;
2b835d68 1084 if (generate)
43cffd11
RE
1085 emit_insn (gen_rtx_SET
1086 (VOIDmode, target,
1087 gen_rtx_IOR (mode,
1088 gen_rtx_ASHIFT (mode, source,
1089 GEN_INT (i)),
1090 source)));
e2c671ba
RE
1091 return insns + 1;
1092 }
1093 }
1094
6354dc9b 1095 /* Don't duplicate cases already considered. */
e2c671ba
RE
1096 for (i = 17; i < 24; i++)
1097 {
1098 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1099 && !const_ok_for_arm (temp1))
e2c671ba 1100 {
d499463f
RE
1101 rtx new_src = (subtargets
1102 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1103 : target);
1104 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1105 source, subtargets, generate);
e2c671ba 1106 source = new_src;
2b835d68 1107 if (generate)
43cffd11
RE
1108 emit_insn
1109 (gen_rtx_SET (VOIDmode, target,
1110 gen_rtx_IOR
1111 (mode,
1112 gen_rtx_LSHIFTRT (mode, source,
1113 GEN_INT (i)),
1114 source)));
e2c671ba
RE
1115 return insns + 1;
1116 }
1117 }
1118 }
1119 break;
1120
1121 case IOR:
1122 case XOR:
7b64da89
RE
1123 /* If we have IOR or XOR, and the constant can be loaded in a
1124 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1125 then this can be done in two instructions instead of 3-4. */
1126 if (subtargets
d499463f 1127 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1128 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1129 {
5895f793 1130 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1131 {
2b835d68
RE
1132 if (generate)
1133 {
1134 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1135
43cffd11
RE
1136 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1137 emit_insn (gen_rtx_SET (VOIDmode, target,
1138 gen_rtx (code, mode, source, sub)));
2b835d68 1139 }
e2c671ba
RE
1140 return 2;
1141 }
1142 }
1143
1144 if (code == XOR)
1145 break;
1146
1147 if (set_sign_bit_copies > 8
1148 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1149 {
2b835d68
RE
1150 if (generate)
1151 {
1152 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1153 rtx shift = GEN_INT (set_sign_bit_copies);
1154
43cffd11
RE
1155 emit_insn (gen_rtx_SET (VOIDmode, sub,
1156 gen_rtx_NOT (mode,
1157 gen_rtx_ASHIFT (mode,
1158 source,
f5a1b0d2 1159 shift))));
43cffd11
RE
1160 emit_insn (gen_rtx_SET (VOIDmode, target,
1161 gen_rtx_NOT (mode,
1162 gen_rtx_LSHIFTRT (mode, sub,
1163 shift))));
2b835d68 1164 }
e2c671ba
RE
1165 return 2;
1166 }
1167
1168 if (set_zero_bit_copies > 8
1169 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1170 {
2b835d68
RE
1171 if (generate)
1172 {
1173 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1174 rtx shift = GEN_INT (set_zero_bit_copies);
1175
43cffd11
RE
1176 emit_insn (gen_rtx_SET (VOIDmode, sub,
1177 gen_rtx_NOT (mode,
1178 gen_rtx_LSHIFTRT (mode,
1179 source,
f5a1b0d2 1180 shift))));
43cffd11
RE
1181 emit_insn (gen_rtx_SET (VOIDmode, target,
1182 gen_rtx_NOT (mode,
1183 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1184 shift))));
2b835d68 1185 }
e2c671ba
RE
1186 return 2;
1187 }
1188
5895f793 1189 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1190 {
2b835d68
RE
1191 if (generate)
1192 {
1193 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1194 emit_insn (gen_rtx_SET (VOIDmode, sub,
1195 gen_rtx_NOT (mode, source)));
2b835d68
RE
1196 source = sub;
1197 if (subtargets)
1198 sub = gen_reg_rtx (mode);
43cffd11
RE
1199 emit_insn (gen_rtx_SET (VOIDmode, sub,
1200 gen_rtx_AND (mode, source,
1201 GEN_INT (temp1))));
1202 emit_insn (gen_rtx_SET (VOIDmode, target,
1203 gen_rtx_NOT (mode, sub)));
2b835d68 1204 }
e2c671ba
RE
1205 return 3;
1206 }
1207 break;
1208
1209 case AND:
1210 /* See if two shifts will do 2 or more insn's worth of work. */
1211 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1212 {
e5951263 1213 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
e2c671ba 1214 << (32 - clear_sign_bit_copies))
e5951263 1215 & HOST_UINT (0xffffffff));
e2c671ba 1216
e5951263 1217 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1218 {
2b835d68
RE
1219 if (generate)
1220 {
d499463f 1221 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1222 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1223 new_src, source, subtargets, 1);
1224 source = new_src;
2b835d68
RE
1225 }
1226 else
d499463f
RE
1227 {
1228 rtx targ = subtargets ? NULL_RTX : target;
1229 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1230 targ, source, subtargets, 0);
1231 }
2b835d68
RE
1232 }
1233
1234 if (generate)
1235 {
d499463f
RE
1236 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1237 rtx shift = GEN_INT (clear_sign_bit_copies);
1238
1239 emit_insn (gen_ashlsi3 (new_src, source, shift));
1240 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1241 }
1242
e2c671ba
RE
1243 return insns + 2;
1244 }
1245
1246 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1247 {
1248 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1249
e5951263 1250 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1251 {
2b835d68
RE
1252 if (generate)
1253 {
d499463f
RE
1254 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1255
2b835d68 1256 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1257 new_src, source, subtargets, 1);
1258 source = new_src;
2b835d68
RE
1259 }
1260 else
d499463f
RE
1261 {
1262 rtx targ = subtargets ? NULL_RTX : target;
1263
1264 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1265 targ, source, subtargets, 0);
1266 }
2b835d68
RE
1267 }
1268
1269 if (generate)
1270 {
d499463f
RE
1271 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1272 rtx shift = GEN_INT (clear_zero_bit_copies);
1273
1274 emit_insn (gen_lshrsi3 (new_src, source, shift));
1275 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1276 }
1277
e2c671ba
RE
1278 return insns + 2;
1279 }
1280
1281 break;
1282
1283 default:
1284 break;
1285 }
1286
1287 for (i = 0; i < 32; i++)
1288 if (remainder & (1 << i))
1289 num_bits_set++;
1290
1291 if (code == AND || (can_invert && num_bits_set > 16))
e5951263 1292 remainder = (~remainder) & HOST_UINT (0xffffffff);
e2c671ba 1293 else if (code == PLUS && num_bits_set > 16)
e5951263 1294 remainder = (-remainder) & HOST_UINT (0xffffffff);
e2c671ba
RE
1295 else
1296 {
1297 can_invert = 0;
1298 can_negate = 0;
1299 }
1300
1301 /* Now try and find a way of doing the job in either two or three
1302 instructions.
1303 We start by looking for the largest block of zeros that are aligned on
1304 a 2-bit boundary, we then fill up the temps, wrapping around to the
1305 top of the word when we drop off the bottom.
6354dc9b 1306 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1307 {
1308 int best_start = 0;
1309 int best_consecutive_zeros = 0;
1310
1311 for (i = 0; i < 32; i += 2)
1312 {
1313 int consecutive_zeros = 0;
1314
5895f793 1315 if (!(remainder & (3 << i)))
e2c671ba 1316 {
5895f793 1317 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1318 {
1319 consecutive_zeros += 2;
1320 i += 2;
1321 }
1322 if (consecutive_zeros > best_consecutive_zeros)
1323 {
1324 best_consecutive_zeros = consecutive_zeros;
1325 best_start = i - consecutive_zeros;
1326 }
1327 i -= 2;
1328 }
1329 }
1330
1331 /* Now start emitting the insns, starting with the one with the highest
1332 bit set: we do this so that the smallest number will be emitted last;
6354dc9b 1333 this is more likely to be combinable with addressing insns. */
e2c671ba
RE
1334 i = best_start;
1335 do
1336 {
1337 int end;
1338
1339 if (i <= 0)
1340 i += 32;
1341 if (remainder & (3 << (i - 2)))
1342 {
1343 end = i - 8;
1344 if (end < 0)
1345 end += 32;
1346 temp1 = remainder & ((0x0ff << end)
1347 | ((i < end) ? (0xff >> (32 - end)) : 0));
1348 remainder &= ~temp1;
1349
d499463f 1350 if (generate)
e2c671ba 1351 {
d499463f
RE
1352 rtx new_src;
1353
1354 if (code == SET)
43cffd11
RE
1355 emit_insn (gen_rtx_SET (VOIDmode,
1356 new_src = (subtargets
1357 ? gen_reg_rtx (mode)
1358 : target),
1359 GEN_INT (can_invert
1360 ? ~temp1 : temp1)));
d499463f 1361 else if (code == MINUS)
43cffd11
RE
1362 emit_insn (gen_rtx_SET (VOIDmode,
1363 new_src = (subtargets
1364 ? gen_reg_rtx (mode)
1365 : target),
1366 gen_rtx (code, mode, GEN_INT (temp1),
1367 source)));
d499463f 1368 else
43cffd11
RE
1369 emit_insn (gen_rtx_SET (VOIDmode,
1370 new_src = (remainder
1371 ? (subtargets
1372 ? gen_reg_rtx (mode)
1373 : target)
1374 : target),
1375 gen_rtx (code, mode, source,
1376 GEN_INT (can_invert ? ~temp1
1377 : (can_negate
1378 ? -temp1
1379 : temp1)))));
d499463f 1380 source = new_src;
e2c671ba
RE
1381 }
1382
d499463f
RE
1383 if (code == SET)
1384 {
1385 can_invert = 0;
1386 code = PLUS;
1387 }
1388 else if (code == MINUS)
1389 code = PLUS;
1390
e2c671ba 1391 insns++;
e2c671ba
RE
1392 i -= 6;
1393 }
1394 i -= 2;
1395 } while (remainder);
1396 }
1397 return insns;
1398}
1399
bd9c7e23
RE
1400/* Canonicalize a comparison so that we are more likely to recognize it.
1401 This can be done for a few constant compares, where we can make the
1402 immediate value easier to load. */
1403enum rtx_code
1404arm_canonicalize_comparison (code, op1)
1405 enum rtx_code code;
62b10bbc 1406 rtx * op1;
bd9c7e23 1407{
ad076f4e 1408 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1409
1410 switch (code)
1411 {
1412 case EQ:
1413 case NE:
1414 return code;
1415
1416 case GT:
1417 case LE:
5895f793
RE
1418 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1419 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1420 {
5895f793 1421 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1422 return code == GT ? GE : LT;
1423 }
1424 break;
1425
1426 case GE:
1427 case LT:
e5951263 1428 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1429 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1430 {
5895f793 1431 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1432 return code == GE ? GT : LE;
1433 }
1434 break;
1435
1436 case GTU:
1437 case LEU:
5895f793
RE
1438 if (i != ~(HOST_UINT (0))
1439 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1440 {
1441 *op1 = GEN_INT (i + 1);
1442 return code == GTU ? GEU : LTU;
1443 }
1444 break;
1445
1446 case GEU:
1447 case LTU:
1448 if (i != 0
5895f793 1449 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1450 {
1451 *op1 = GEN_INT (i - 1);
1452 return code == GEU ? GTU : LEU;
1453 }
1454 break;
1455
1456 default:
1457 abort ();
1458 }
1459
1460 return code;
1461}
bd9c7e23 1462
f5a1b0d2
NC
1463/* Decide whether a type should be returned in memory (true)
1464 or in a register (false). This is called by the macro
1465 RETURN_IN_MEMORY. */
2b835d68
RE
1466int
1467arm_return_in_memory (type)
1468 tree type;
1469{
5895f793 1470 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1471 /* All simple types are returned in registers. */
d7d01975 1472 return 0;
d5b7b3ae
RE
1473
1474 /* For the arm-wince targets we choose to be compitable with Microsoft's
1475 ARM and Thumb compilers, which always return aggregates in memory. */
1476#ifndef ARM_WINCE
1477
d7d01975 1478 if (int_size_in_bytes (type) > 4)
9e291dbe 1479 /* All structures/unions bigger than one word are returned in memory. */
d7d01975 1480 return 1;
d5b7b3ae 1481
d7d01975 1482 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1483 {
1484 tree field;
1485
3a2ea258
RE
1486 /* For a struct the APCS says that we only return in a register
1487 if the type is 'integer like' and every addressable element
1488 has an offset of zero. For practical purposes this means
1489 that the structure can have at most one non bit-field element
1490 and that this element must be the first one in the structure. */
1491
f5a1b0d2
NC
1492 /* Find the first field, ignoring non FIELD_DECL things which will
1493 have been created by C++. */
1494 for (field = TYPE_FIELDS (type);
1495 field && TREE_CODE (field) != FIELD_DECL;
1496 field = TREE_CHAIN (field))
1497 continue;
1498
1499 if (field == NULL)
9e291dbe 1500 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1501
d5b7b3ae
RE
1502 /* Check that the first field is valid for returning in a register. */
1503
1504 /* ... Floats are not allowed */
9e291dbe 1505 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1506 return 1;
1507
d5b7b3ae
RE
1508 /* ... Aggregates that are not themselves valid for returning in
1509 a register are not allowed. */
9e291dbe 1510 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1511 return 1;
d5b7b3ae 1512
3a2ea258
RE
1513 /* Now check the remaining fields, if any. Only bitfields are allowed,
1514 since they are not addressable. */
f5a1b0d2
NC
1515 for (field = TREE_CHAIN (field);
1516 field;
1517 field = TREE_CHAIN (field))
1518 {
1519 if (TREE_CODE (field) != FIELD_DECL)
1520 continue;
1521
5895f793 1522 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1523 return 1;
1524 }
2b835d68
RE
1525
1526 return 0;
1527 }
d7d01975
NC
1528
1529 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1530 {
1531 tree field;
1532
1533 /* Unions can be returned in registers if every element is
1534 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1535 for (field = TYPE_FIELDS (type);
1536 field;
1537 field = TREE_CHAIN (field))
2b835d68 1538 {
f5a1b0d2
NC
1539 if (TREE_CODE (field) != FIELD_DECL)
1540 continue;
1541
6cc8c0b3
NC
1542 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1543 return 1;
1544
f5a1b0d2 1545 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1546 return 1;
1547 }
f5a1b0d2 1548
2b835d68
RE
1549 return 0;
1550 }
d5b7b3ae 1551#endif /* not ARM_WINCE */
f5a1b0d2 1552
d5b7b3ae 1553 /* Return all other types in memory. */
2b835d68
RE
1554 return 1;
1555}
1556
82e9d970
PB
1557/* Initialize a variable CUM of type CUMULATIVE_ARGS
1558 for a call to a function whose data type is FNTYPE.
1559 For a library call, FNTYPE is NULL. */
1560void
1561arm_init_cumulative_args (pcum, fntype, libname, indirect)
1562 CUMULATIVE_ARGS * pcum;
1563 tree fntype;
1564 rtx libname ATTRIBUTE_UNUSED;
1565 int indirect ATTRIBUTE_UNUSED;
1566{
1567 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1568 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1569
82e9d970
PB
1570 pcum->call_cookie = CALL_NORMAL;
1571
1572 if (TARGET_LONG_CALLS)
1573 pcum->call_cookie = CALL_LONG;
1574
1575 /* Check for long call/short call attributes. The attributes
1576 override any command line option. */
1577 if (fntype)
1578 {
1579 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1580 pcum->call_cookie = CALL_SHORT;
1581 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1582 pcum->call_cookie = CALL_LONG;
1583 }
1584}
1585
1586/* Determine where to put an argument to a function.
1587 Value is zero to push the argument on the stack,
1588 or a hard register in which to store the argument.
1589
1590 MODE is the argument's machine mode.
1591 TYPE is the data type of the argument (as a tree).
1592 This is null for libcalls where that information may
1593 not be available.
1594 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1595 the preceding args and about the function being called.
1596 NAMED is nonzero if this argument is a named parameter
1597 (otherwise it is an extra parameter matching an ellipsis). */
1598rtx
1599arm_function_arg (pcum, mode, type, named)
1600 CUMULATIVE_ARGS * pcum;
1601 enum machine_mode mode;
1602 tree type ATTRIBUTE_UNUSED;
1603 int named;
1604{
1605 if (mode == VOIDmode)
1606 /* Compute operand 2 of the call insn. */
1607 return GEN_INT (pcum->call_cookie);
1608
5895f793 1609 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1610 return NULL_RTX;
1611
1612 return gen_rtx_REG (mode, pcum->nregs);
1613}
82e9d970 1614\f
c27ba912
DM
1615/* Encode the current state of the #pragma [no_]long_calls. */
1616typedef enum
82e9d970 1617{
c27ba912
DM
1618 OFF, /* No #pramgma [no_]long_calls is in effect. */
1619 LONG, /* #pragma long_calls is in effect. */
1620 SHORT /* #pragma no_long_calls is in effect. */
1621} arm_pragma_enum;
82e9d970 1622
c27ba912 1623static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1624
8b97c5f8
ZW
1625void
1626arm_pr_long_calls (pfile)
1627 cpp_reader *pfile ATTRIBUTE_UNUSED;
82e9d970 1628{
8b97c5f8
ZW
1629 arm_pragma_long_calls = LONG;
1630}
1631
1632void
1633arm_pr_no_long_calls (pfile)
1634 cpp_reader *pfile ATTRIBUTE_UNUSED;
1635{
1636 arm_pragma_long_calls = SHORT;
1637}
1638
1639void
1640arm_pr_long_calls_off (pfile)
1641 cpp_reader *pfile ATTRIBUTE_UNUSED;
1642{
1643 arm_pragma_long_calls = OFF;
82e9d970 1644}
8b97c5f8 1645
82e9d970
PB
1646\f
1647/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1648 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1649 assigned to TYPE. */
1650int
1651arm_valid_type_attribute_p (type, attributes, identifier, args)
1652 tree type;
1653 tree attributes ATTRIBUTE_UNUSED;
1654 tree identifier;
1655 tree args;
1656{
1657 if ( TREE_CODE (type) != FUNCTION_TYPE
1658 && TREE_CODE (type) != METHOD_TYPE
1659 && TREE_CODE (type) != FIELD_DECL
1660 && TREE_CODE (type) != TYPE_DECL)
1661 return 0;
1662
1663 /* Function calls made to this symbol must be done indirectly, because
1664 it may lie outside of the 26 bit addressing range of a normal function
1665 call. */
1666 if (is_attribute_p ("long_call", identifier))
1667 return (args == NULL_TREE);
c27ba912 1668
82e9d970
PB
1669 /* Whereas these functions are always known to reside within the 26 bit
1670 addressing range. */
1671 if (is_attribute_p ("short_call", identifier))
1672 return (args == NULL_TREE);
1673
1674 return 0;
1675}
1676
1677/* Return 0 if the attributes for two types are incompatible, 1 if they
1678 are compatible, and 2 if they are nearly compatible (which causes a
1679 warning to be generated). */
1680int
1681arm_comp_type_attributes (type1, type2)
1682 tree type1;
1683 tree type2;
1684{
1cb8d58a 1685 int l1, l2, s1, s2;
bd7fc26f 1686
82e9d970
PB
1687 /* Check for mismatch of non-default calling convention. */
1688 if (TREE_CODE (type1) != FUNCTION_TYPE)
1689 return 1;
1690
1691 /* Check for mismatched call attributes. */
1cb8d58a
NC
1692 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1693 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1694 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1695 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1696
1697 /* Only bother to check if an attribute is defined. */
1698 if (l1 | l2 | s1 | s2)
1699 {
1700 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1701 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1702 return 0;
82e9d970 1703
bd7fc26f
NC
1704 /* Disallow mixed attributes. */
1705 if ((l1 & s2) || (l2 & s1))
1706 return 0;
1707 }
1708
1709 return 1;
82e9d970
PB
1710}
1711
c27ba912
DM
1712/* Encode long_call or short_call attribute by prefixing
1713 symbol name in DECL with a special character FLAG. */
1714void
1715arm_encode_call_attribute (decl, flag)
1716 tree decl;
cd2b33d0 1717 int flag;
c27ba912 1718{
3cce094d 1719 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b 1720 int len = strlen (str);
57f56af4 1721 const char * newstr;
c27ba912
DM
1722
1723 if (TREE_CODE (decl) != FUNCTION_DECL)
1724 return;
1725
1726 /* Do not allow weak functions to be treated as short call. */
1727 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1728 return;
c27ba912 1729
520a57c8
ZW
1730 newstr = alloca (len + 2);
1731 newstr[0] = flag;
1732 strcpy (newstr + 1, str);
c27ba912 1733
520a57c8 1734 newstr = ggc_alloc_string (newstr, len + 1);
c27ba912
DM
1735 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1736}
1737
1738/* Assigns default attributes to newly defined type. This is used to
1739 set short_call/long_call attributes for function types of
1740 functions defined inside corresponding #pragma scopes. */
1741void
1742arm_set_default_type_attributes (type)
1743 tree type;
1744{
1745 /* Add __attribute__ ((long_call)) to all functions, when
1746 inside #pragma long_calls or __attribute__ ((short_call)),
1747 when inside #pragma no_long_calls. */
1748 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1749 {
1750 tree type_attr_list, attr_name;
1751 type_attr_list = TYPE_ATTRIBUTES (type);
1752
1753 if (arm_pragma_long_calls == LONG)
1754 attr_name = get_identifier ("long_call");
1755 else if (arm_pragma_long_calls == SHORT)
1756 attr_name = get_identifier ("short_call");
1757 else
1758 return;
1759
1760 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1761 TYPE_ATTRIBUTES (type) = type_attr_list;
1762 }
1763}
1764\f
1765/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1766 defined within the current compilation unit. If this caanot be
1767 determined, then 0 is returned. */
1768static int
1769current_file_function_operand (sym_ref)
1770 rtx sym_ref;
1771{
1772 /* This is a bit of a fib. A function will have a short call flag
1773 applied to its name if it has the short call attribute, or it has
1774 already been defined within the current compilation unit. */
1775 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1776 return 1;
1777
6d77b53e 1778 /* The current function is always defined within the current compilation
c27ba912
DM
1779 unit. if it s a weak defintion however, then this may not be the real
1780 defintion of the function, and so we have to say no. */
1781 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 1782 && !DECL_WEAK (current_function_decl))
c27ba912
DM
1783 return 1;
1784
1785 /* We cannot make the determination - default to returning 0. */
1786 return 0;
1787}
1788
1789/* Return non-zero if a 32 bit "long_call" should be generated for
1790 this call. We generate a long_call if the function:
1791
1792 a. has an __attribute__((long call))
1793 or b. is within the scope of a #pragma long_calls
1794 or c. the -mlong-calls command line switch has been specified
1795
1796 However we do not generate a long call if the function:
1797
1798 d. has an __attribute__ ((short_call))
1799 or e. is inside the scope of a #pragma no_long_calls
1800 or f. has an __attribute__ ((section))
1801 or g. is defined within the current compilation unit.
1802
1803 This function will be called by C fragments contained in the machine
1804 description file. CALL_REF and CALL_COOKIE correspond to the matched
1805 rtl operands. CALL_SYMBOL is used to distinguish between
1806 two different callers of the function. It is set to 1 in the
1807 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1808 and "call_value" patterns. This is because of the difference in the
1809 SYM_REFs passed by these patterns. */
1810int
1811arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1812 rtx sym_ref;
1813 int call_cookie;
1814 int call_symbol;
1815{
5895f793 1816 if (!call_symbol)
c27ba912
DM
1817 {
1818 if (GET_CODE (sym_ref) != MEM)
1819 return 0;
1820
1821 sym_ref = XEXP (sym_ref, 0);
1822 }
1823
1824 if (GET_CODE (sym_ref) != SYMBOL_REF)
1825 return 0;
1826
1827 if (call_cookie & CALL_SHORT)
1828 return 0;
1829
1830 if (TARGET_LONG_CALLS && flag_function_sections)
1831 return 1;
1832
87e27392 1833 if (current_file_function_operand (sym_ref))
c27ba912
DM
1834 return 0;
1835
1836 return (call_cookie & CALL_LONG)
1837 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1838 || TARGET_LONG_CALLS;
1839}
f99fce0c
RE
1840
1841/* Return non-zero if it is ok to make a tail-call to DECL. */
1842int
1843arm_function_ok_for_sibcall (decl)
1844 tree decl;
1845{
1846 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
1847
1848 /* Never tailcall something for which we have no decl, or if we
1849 are in Thumb mode. */
1850 if (decl == NULL || TARGET_THUMB)
1851 return 0;
1852
1853 /* Get the calling method. */
1854 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1855 call_type = CALL_SHORT;
1856 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1857 call_type = CALL_LONG;
1858
1859 /* Cannot tail-call to long calls, since these are out of range of
1860 a branch instruction. However, if not compiling PIC, we know
1861 we can reach the symbol if it is in this compilation unit. */
5895f793 1862 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
1863 return 0;
1864
1865 /* If we are interworking and the function is not declared static
1866 then we can't tail-call it unless we know that it exists in this
1867 compilation unit (since it might be a Thumb routine). */
5895f793 1868 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
1869 return 0;
1870
1871 /* Everything else is ok. */
1872 return 1;
1873}
1874
82e9d970 1875\f
32de079a
RE
1876int
1877legitimate_pic_operand_p (x)
1878 rtx x;
1879{
d5b7b3ae
RE
1880 if (CONSTANT_P (x)
1881 && flag_pic
32de079a
RE
1882 && (GET_CODE (x) == SYMBOL_REF
1883 || (GET_CODE (x) == CONST
1884 && GET_CODE (XEXP (x, 0)) == PLUS
1885 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1886 return 0;
1887
1888 return 1;
1889}
1890
1891rtx
1892legitimize_pic_address (orig, mode, reg)
1893 rtx orig;
1894 enum machine_mode mode;
1895 rtx reg;
1896{
1897 if (GET_CODE (orig) == SYMBOL_REF)
1898 {
1899 rtx pic_ref, address;
1900 rtx insn;
1901 int subregs = 0;
1902
1903 if (reg == 0)
1904 {
893f3d5b 1905 if (no_new_pseudos)
32de079a
RE
1906 abort ();
1907 else
1908 reg = gen_reg_rtx (Pmode);
1909
1910 subregs = 1;
1911 }
1912
1913#ifdef AOF_ASSEMBLER
1914 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 1915 understands that the PIC register has to be added into the offset. */
32de079a
RE
1916 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1917#else
1918 if (subregs)
1919 address = gen_reg_rtx (Pmode);
1920 else
1921 address = reg;
1922
1923 emit_insn (gen_pic_load_addr (address, orig));
1924
43cffd11
RE
1925 pic_ref = gen_rtx_MEM (Pmode,
1926 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1927 address));
32de079a
RE
1928 RTX_UNCHANGING_P (pic_ref) = 1;
1929 insn = emit_move_insn (reg, pic_ref);
1930#endif
1931 current_function_uses_pic_offset_table = 1;
1932 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1933 by loop. */
43cffd11
RE
1934 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1935 REG_NOTES (insn));
32de079a
RE
1936 return reg;
1937 }
1938 else if (GET_CODE (orig) == CONST)
1939 {
1940 rtx base, offset;
1941
1942 if (GET_CODE (XEXP (orig, 0)) == PLUS
1943 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1944 return orig;
1945
1946 if (reg == 0)
1947 {
893f3d5b 1948 if (no_new_pseudos)
32de079a
RE
1949 abort ();
1950 else
1951 reg = gen_reg_rtx (Pmode);
1952 }
1953
1954 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1955 {
1956 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1957 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1958 base == reg ? 0 : reg);
1959 }
1960 else
1961 abort ();
1962
1963 if (GET_CODE (offset) == CONST_INT)
1964 {
1965 /* The base register doesn't really matter, we only want to
1966 test the index for the appropriate mode. */
1967 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1968
5895f793 1969 if (!no_new_pseudos)
32de079a
RE
1970 offset = force_reg (Pmode, offset);
1971 else
1972 abort ();
1973
1974 win:
1975 if (GET_CODE (offset) == CONST_INT)
1976 return plus_constant_for_output (base, INTVAL (offset));
1977 }
1978
1979 if (GET_MODE_SIZE (mode) > 4
1980 && (GET_MODE_CLASS (mode) == MODE_INT
1981 || TARGET_SOFT_FLOAT))
1982 {
1983 emit_insn (gen_addsi3 (reg, base, offset));
1984 return reg;
1985 }
1986
43cffd11 1987 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1988 }
1989 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
1990 {
1991 current_function_uses_pic_offset_table = 1;
1992
1993 if (NEED_GOT_RELOC)
d5b7b3ae
RE
1994 {
1995 rtx pic_ref, address = gen_reg_rtx (Pmode);
1996
1997 emit_insn (gen_pic_load_addr (address, orig));
1998 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1999
2000 emit_move_insn (address, pic_ref);
2001 return address;
2002 }
82e9d970 2003 }
32de079a
RE
2004
2005 return orig;
2006}
2007
2008static rtx pic_rtx;
2009
2010int
62b10bbc 2011is_pic (x)
32de079a
RE
2012 rtx x;
2013{
2014 if (x == pic_rtx)
2015 return 1;
2016 return 0;
2017}
2018
2019void
2020arm_finalize_pic ()
2021{
2022#ifndef AOF_ASSEMBLER
2023 rtx l1, pic_tmp, pic_tmp2, seq;
2024 rtx global_offset_table;
2025
ed0e6530 2026 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2027 return;
2028
5895f793 2029 if (!flag_pic)
32de079a
RE
2030 abort ();
2031
2032 start_sequence ();
2033 l1 = gen_label_rtx ();
2034
43cffd11 2035 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2036 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2037 addition, on the Thumb it is 'dot + 4'. */
2038 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2039 if (GOT_PCREL)
2040 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2041 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2042 else
2043 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2044
2045 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2046
32de079a 2047 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
d5b7b3ae
RE
2048 if (TARGET_ARM)
2049 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2050 else
2051 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
32de079a
RE
2052
2053 seq = gen_sequence ();
2054 end_sequence ();
2055 emit_insn_after (seq, get_insns ());
2056
2057 /* Need to emit this whether or not we obey regdecls,
2058 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2059 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2060#endif /* AOF_ASSEMBLER */
2061}
2062
e2c671ba
RE
2063#define REG_OR_SUBREG_REG(X) \
2064 (GET_CODE (X) == REG \
2065 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2066
2067#define REG_OR_SUBREG_RTX(X) \
2068 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2069
d5b7b3ae
RE
2070#ifndef COSTS_N_INSNS
2071#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2072#endif
e2c671ba
RE
2073
2074int
d5b7b3ae 2075arm_rtx_costs (x, code, outer)
e2c671ba 2076 rtx x;
74bbc178 2077 enum rtx_code code;
d5b7b3ae 2078 enum rtx_code outer;
e2c671ba
RE
2079{
2080 enum machine_mode mode = GET_MODE (x);
2081 enum rtx_code subcode;
2082 int extra_cost;
2083
d5b7b3ae
RE
2084 if (TARGET_THUMB)
2085 {
2086 switch (code)
2087 {
2088 case ASHIFT:
2089 case ASHIFTRT:
2090 case LSHIFTRT:
2091 case ROTATERT:
2092 case PLUS:
2093 case MINUS:
2094 case COMPARE:
2095 case NEG:
2096 case NOT:
2097 return COSTS_N_INSNS (1);
2098
2099 case MULT:
2100 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2101 {
2102 int cycles = 0;
2103 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2104
2105 while (i)
2106 {
2107 i >>= 2;
5895f793 2108 cycles++;
d5b7b3ae
RE
2109 }
2110 return COSTS_N_INSNS (2) + cycles;
2111 }
2112 return COSTS_N_INSNS (1) + 16;
2113
2114 case SET:
2115 return (COSTS_N_INSNS (1)
2116 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2117 + GET_CODE (SET_DEST (x)) == MEM));
2118
2119 case CONST_INT:
2120 if (outer == SET)
2121 {
2122 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2123 return 0;
2124 if (thumb_shiftable_const (INTVAL (x)))
2125 return COSTS_N_INSNS (2);
2126 return COSTS_N_INSNS (3);
2127 }
2128 else if (outer == PLUS
2129 && INTVAL (x) < 256 && INTVAL (x) > -256)
2130 return 0;
2131 else if (outer == COMPARE
2132 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2133 return 0;
2134 else if (outer == ASHIFT || outer == ASHIFTRT
2135 || outer == LSHIFTRT)
2136 return 0;
2137 return COSTS_N_INSNS (2);
2138
2139 case CONST:
2140 case CONST_DOUBLE:
2141 case LABEL_REF:
2142 case SYMBOL_REF:
2143 return COSTS_N_INSNS (3);
2144
2145 case UDIV:
2146 case UMOD:
2147 case DIV:
2148 case MOD:
2149 return 100;
2150
2151 case TRUNCATE:
2152 return 99;
2153
2154 case AND:
2155 case XOR:
2156 case IOR:
2157 /* XXX guess. */
2158 return 8;
2159
2160 case ADDRESSOF:
2161 case MEM:
2162 /* XXX another guess. */
2163 /* Memory costs quite a lot for the first word, but subsequent words
2164 load at the equivalent of a single insn each. */
2165 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2166 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2167
2168 case IF_THEN_ELSE:
2169 /* XXX a guess. */
2170 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2171 return 14;
2172 return 2;
2173
2174 case ZERO_EXTEND:
2175 /* XXX still guessing. */
2176 switch (GET_MODE (XEXP (x, 0)))
2177 {
2178 case QImode:
2179 return (1 + (mode == DImode ? 4 : 0)
2180 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2181
2182 case HImode:
2183 return (4 + (mode == DImode ? 4 : 0)
2184 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2185
2186 case SImode:
2187 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2188
2189 default:
2190 return 99;
2191 }
2192
2193 default:
2194 return 99;
2195#if 0
2196 case FFS:
2197 case FLOAT:
2198 case FIX:
2199 case UNSIGNED_FIX:
2200 /* XXX guess */
2201 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2202 rtx_name[code]);
2203 abort ();
2204#endif
2205 }
2206 }
2207
e2c671ba
RE
2208 switch (code)
2209 {
2210 case MEM:
2211 /* Memory costs quite a lot for the first word, but subsequent words
2212 load at the equivalent of a single insn each. */
2213 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2214 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2215
2216 case DIV:
2217 case MOD:
2218 return 100;
2219
2220 case ROTATE:
2221 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2222 return 4;
2223 /* Fall through */
2224 case ROTATERT:
2225 if (mode != SImode)
2226 return 8;
2227 /* Fall through */
2228 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2229 if (mode == DImode)
2230 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2231 + ((GET_CODE (XEXP (x, 0)) == REG
2232 || (GET_CODE (XEXP (x, 0)) == SUBREG
2233 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2234 ? 0 : 8));
2235 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2236 || (GET_CODE (XEXP (x, 0)) == SUBREG
2237 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2238 ? 0 : 4)
2239 + ((GET_CODE (XEXP (x, 1)) == REG
2240 || (GET_CODE (XEXP (x, 1)) == SUBREG
2241 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2242 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2243 ? 0 : 4));
2244
2245 case MINUS:
2246 if (mode == DImode)
2247 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2248 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2249 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2250 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2251 ? 0 : 8));
2252
2253 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2254 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2255 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2256 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2257 ? 0 : 8)
2258 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2259 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2260 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2261 ? 0 : 8));
2262
2263 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2264 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2265 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2266 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2267 || subcode == ASHIFTRT || subcode == LSHIFTRT
2268 || subcode == ROTATE || subcode == ROTATERT
2269 || (subcode == MULT
2270 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2271 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2272 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2273 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2274 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2275 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2276 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2277 return 1;
2278 /* Fall through */
2279
2280 case PLUS:
2281 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2282 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2283 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2284 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2285 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2286 ? 0 : 8));
2287
2288 /* Fall through */
2289 case AND: case XOR: case IOR:
2290 extra_cost = 0;
2291
2292 /* Normally the frame registers will be spilt into reg+const during
2293 reload, so it is a bad idea to combine them with other instructions,
2294 since then they might not be moved outside of loops. As a compromise
2295 we allow integration with ops that have a constant as their second
2296 operand. */
2297 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2298 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2299 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2300 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2301 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2302 extra_cost = 4;
2303
2304 if (mode == DImode)
2305 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2306 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2307 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2308 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2309 ? 0 : 8));
2310
2311 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2312 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2313 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2314 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2315 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2316 ? 0 : 4));
2317
2318 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2319 return (1 + extra_cost
2320 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2321 || subcode == LSHIFTRT || subcode == ASHIFTRT
2322 || subcode == ROTATE || subcode == ROTATERT
2323 || (subcode == MULT
2324 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2325 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2326 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2327 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2328 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2329 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2330 ? 0 : 4));
2331
2332 return 8;
2333
2334 case MULT:
b111229a 2335 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2336 fast variant if it exists at all. */
2b835d68
RE
2337 if (arm_fast_multiply && mode == DImode
2338 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2339 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2340 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2341 return 8;
2342
e2c671ba
RE
2343 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2344 || mode == DImode)
2345 return 30;
2346
2347 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2348 {
2b835d68 2349 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
e5951263 2350 & HOST_UINT (0xffffffff));
e2c671ba
RE
2351 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2352 int j;
6354dc9b
NC
2353
2354 /* Tune as appropriate. */
aec3cfba 2355 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2356
2b835d68 2357 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2358 {
2b835d68 2359 i >>= booth_unit_size;
e2c671ba
RE
2360 add_cost += 2;
2361 }
2362
2363 return add_cost;
2364 }
2365
aec3cfba 2366 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2367 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2368 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2369
56636818
JL
2370 case TRUNCATE:
2371 if (arm_fast_multiply && mode == SImode
2372 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2373 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2374 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2375 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2376 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2377 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2378 return 8;
2379 return 99;
2380
e2c671ba
RE
2381 case NEG:
2382 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2383 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2384 /* Fall through */
2385 case NOT:
2386 if (mode == DImode)
2387 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2388
2389 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2390
2391 case IF_THEN_ELSE:
2392 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2393 return 14;
2394 return 2;
2395
2396 case COMPARE:
2397 return 1;
2398
2399 case ABS:
2400 return 4 + (mode == DImode ? 4 : 0);
2401
2402 case SIGN_EXTEND:
2403 if (GET_MODE (XEXP (x, 0)) == QImode)
2404 return (4 + (mode == DImode ? 4 : 0)
2405 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2406 /* Fall through */
2407 case ZERO_EXTEND:
2408 switch (GET_MODE (XEXP (x, 0)))
2409 {
2410 case QImode:
2411 return (1 + (mode == DImode ? 4 : 0)
2412 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2413
2414 case HImode:
2415 return (4 + (mode == DImode ? 4 : 0)
2416 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2417
2418 case SImode:
2419 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2420
2421 default:
2422 break;
e2c671ba
RE
2423 }
2424 abort ();
2425
d5b7b3ae
RE
2426 case CONST_INT:
2427 if (const_ok_for_arm (INTVAL (x)))
2428 return outer == SET ? 2 : -1;
2429 else if (outer == AND
5895f793 2430 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2431 return -1;
2432 else if ((outer == COMPARE
2433 || outer == PLUS || outer == MINUS)
5895f793 2434 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2435 return -1;
2436 else
2437 return 5;
2438
2439 case CONST:
2440 case LABEL_REF:
2441 case SYMBOL_REF:
2442 return 6;
2443
2444 case CONST_DOUBLE:
2445 if (const_double_rtx_ok_for_fpu (x))
2446 return outer == SET ? 2 : -1;
2447 else if ((outer == COMPARE || outer == PLUS)
2448 && neg_const_double_rtx_ok_for_fpu (x))
2449 return -1;
2450 return 7;
2451
e2c671ba
RE
2452 default:
2453 return 99;
2454 }
2455}
32de079a
RE
2456
2457int
2458arm_adjust_cost (insn, link, dep, cost)
2459 rtx insn;
2460 rtx link;
2461 rtx dep;
2462 int cost;
2463{
2464 rtx i_pat, d_pat;
2465
6354dc9b 2466 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2467 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2468 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2469 return 0;
2470
d5b7b3ae
RE
2471 /* Call insns don't incur a stall, even if they follow a load. */
2472 if (REG_NOTE_KIND (link) == 0
2473 && GET_CODE (insn) == CALL_INSN)
2474 return 1;
2475
32de079a
RE
2476 if ((i_pat = single_set (insn)) != NULL
2477 && GET_CODE (SET_SRC (i_pat)) == MEM
2478 && (d_pat = single_set (dep)) != NULL
2479 && GET_CODE (SET_DEST (d_pat)) == MEM)
2480 {
2481 /* This is a load after a store, there is no conflict if the load reads
2482 from a cached area. Assume that loads from the stack, and from the
2483 constant pool are cached, and that others will miss. This is a
6354dc9b 2484 hack. */
32de079a 2485
32de079a
RE
2486 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2487 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2488 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2489 || reg_mentioned_p (hard_frame_pointer_rtx,
2490 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2491 return 1;
32de079a
RE
2492 }
2493
2494 return cost;
2495}
2496
6354dc9b 2497/* This code has been fixed for cross compilation. */
ff9940b0
RE
2498
2499static int fpa_consts_inited = 0;
2500
cd2b33d0 2501static const char * strings_fpa[8] =
62b10bbc 2502{
2b835d68
RE
2503 "0", "1", "2", "3",
2504 "4", "5", "0.5", "10"
2505};
ff9940b0
RE
2506
2507static REAL_VALUE_TYPE values_fpa[8];
2508
2509static void
2510init_fpa_table ()
2511{
2512 int i;
2513 REAL_VALUE_TYPE r;
2514
2515 for (i = 0; i < 8; i++)
2516 {
2517 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2518 values_fpa[i] = r;
2519 }
f3bb6135 2520
ff9940b0
RE
2521 fpa_consts_inited = 1;
2522}
2523
6354dc9b 2524/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2525
2526int
2527const_double_rtx_ok_for_fpu (x)
2528 rtx x;
2529{
ff9940b0
RE
2530 REAL_VALUE_TYPE r;
2531 int i;
2532
2533 if (!fpa_consts_inited)
2534 init_fpa_table ();
2535
2536 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2537 if (REAL_VALUE_MINUS_ZERO (r))
2538 return 0;
f3bb6135 2539
ff9940b0
RE
2540 for (i = 0; i < 8; i++)
2541 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2542 return 1;
f3bb6135 2543
ff9940b0 2544 return 0;
f3bb6135 2545}
ff9940b0 2546
6354dc9b 2547/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2548
2549int
2550neg_const_double_rtx_ok_for_fpu (x)
2551 rtx x;
2552{
2553 REAL_VALUE_TYPE r;
2554 int i;
2555
2556 if (!fpa_consts_inited)
2557 init_fpa_table ();
2558
2559 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2560 r = REAL_VALUE_NEGATE (r);
2561 if (REAL_VALUE_MINUS_ZERO (r))
2562 return 0;
f3bb6135 2563
ff9940b0
RE
2564 for (i = 0; i < 8; i++)
2565 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2566 return 1;
f3bb6135 2567
ff9940b0 2568 return 0;
f3bb6135 2569}
cce8749e
CH
2570\f
2571/* Predicates for `match_operand' and `match_operator'. */
2572
ff9940b0 2573/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2574 (SUBREG (MEM)...).
2575
2576 This function exists because at the time it was put in it led to better
2577 code. SUBREG(MEM) always needs a reload in the places where
2578 s_register_operand is used, and this seemed to lead to excessive
2579 reloading. */
ff9940b0
RE
2580
2581int
2582s_register_operand (op, mode)
2583 register rtx op;
2584 enum machine_mode mode;
2585{
2586 if (GET_MODE (op) != mode && mode != VOIDmode)
2587 return 0;
2588
2589 if (GET_CODE (op) == SUBREG)
f3bb6135 2590 op = SUBREG_REG (op);
ff9940b0
RE
2591
2592 /* We don't consider registers whose class is NO_REGS
2593 to be a register operand. */
d5b7b3ae 2594 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
2595 return (GET_CODE (op) == REG
2596 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2597 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2598}
2599
e2c671ba
RE
2600/* Only accept reg, subreg(reg), const_int. */
2601
2602int
2603reg_or_int_operand (op, mode)
2604 register rtx op;
2605 enum machine_mode mode;
2606{
2607 if (GET_CODE (op) == CONST_INT)
2608 return 1;
2609
2610 if (GET_MODE (op) != mode && mode != VOIDmode)
2611 return 0;
2612
2613 if (GET_CODE (op) == SUBREG)
2614 op = SUBREG_REG (op);
2615
2616 /* We don't consider registers whose class is NO_REGS
2617 to be a register operand. */
2618 return (GET_CODE (op) == REG
2619 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2620 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2621}
2622
ff9940b0
RE
2623/* Return 1 if OP is an item in memory, given that we are in reload. */
2624
2625int
d5b7b3ae 2626arm_reload_memory_operand (op, mode)
ff9940b0 2627 rtx op;
74bbc178 2628 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2629{
2630 int regno = true_regnum (op);
2631
5895f793 2632 return (!CONSTANT_P (op)
ff9940b0
RE
2633 && (regno == -1
2634 || (GET_CODE (op) == REG
2635 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2636}
2637
4d818c85 2638/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae
RE
2639 memory access (architecture V4).
2640 MODE is QImode if called when computing contraints, or VOIDmode when
2641 emitting patterns. In this latter case we cannot use memory_operand()
2642 because it will fail on badly formed MEMs, which is precisly what we are
2643 trying to catch. */
4d818c85
RE
2644int
2645bad_signed_byte_operand (op, mode)
2646 rtx op;
d5b7b3ae 2647 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 2648{
d5b7b3ae 2649#if 0
5895f793 2650 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
2651 return 0;
2652#endif
2653 if (GET_CODE (op) != MEM)
4d818c85
RE
2654 return 0;
2655
2656 op = XEXP (op, 0);
2657
6354dc9b 2658 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2659 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
2660 && (!s_register_operand (XEXP (op, 0), VOIDmode)
2661 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 2662 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2663 return 1;
2664
6354dc9b 2665 /* Big constants are also bad. */
4d818c85
RE
2666 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2667 && (INTVAL (XEXP (op, 1)) > 0xff
2668 || -INTVAL (XEXP (op, 1)) > 0xff))
2669 return 1;
2670
6354dc9b 2671 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2672 return 0;
2673}
2674
cce8749e
CH
2675/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2676
2677int
2678arm_rhs_operand (op, mode)
2679 rtx op;
2680 enum machine_mode mode;
2681{
ff9940b0 2682 return (s_register_operand (op, mode)
cce8749e 2683 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2684}
cce8749e 2685
ff9940b0
RE
2686/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2687 */
2688
2689int
2690arm_rhsm_operand (op, mode)
2691 rtx op;
2692 enum machine_mode mode;
2693{
2694 return (s_register_operand (op, mode)
2695 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2696 || memory_operand (op, mode));
f3bb6135 2697}
ff9940b0
RE
2698
2699/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2700 constant that is valid when negated. */
2701
2702int
2703arm_add_operand (op, mode)
2704 rtx op;
2705 enum machine_mode mode;
2706{
d5b7b3ae
RE
2707 if (TARGET_THUMB)
2708 return thumb_cmp_operand (op, mode);
2709
ff9940b0
RE
2710 return (s_register_operand (op, mode)
2711 || (GET_CODE (op) == CONST_INT
2712 && (const_ok_for_arm (INTVAL (op))
2713 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2714}
ff9940b0
RE
2715
2716int
2717arm_not_operand (op, mode)
2718 rtx op;
2719 enum machine_mode mode;
2720{
2721 return (s_register_operand (op, mode)
2722 || (GET_CODE (op) == CONST_INT
2723 && (const_ok_for_arm (INTVAL (op))
2724 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2725}
ff9940b0 2726
5165176d
RE
2727/* Return TRUE if the operand is a memory reference which contains an
2728 offsettable address. */
2729int
2730offsettable_memory_operand (op, mode)
2731 register rtx op;
2732 enum machine_mode mode;
2733{
2734 if (mode == VOIDmode)
2735 mode = GET_MODE (op);
2736
2737 return (mode == GET_MODE (op)
2738 && GET_CODE (op) == MEM
2739 && offsettable_address_p (reload_completed | reload_in_progress,
2740 mode, XEXP (op, 0)));
2741}
2742
2743/* Return TRUE if the operand is a memory reference which is, or can be
2744 made word aligned by adjusting the offset. */
2745int
2746alignable_memory_operand (op, mode)
2747 register rtx op;
2748 enum machine_mode mode;
2749{
2750 rtx reg;
2751
2752 if (mode == VOIDmode)
2753 mode = GET_MODE (op);
2754
2755 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2756 return 0;
2757
2758 op = XEXP (op, 0);
2759
2760 return ((GET_CODE (reg = op) == REG
2761 || (GET_CODE (op) == SUBREG
2762 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2763 || (GET_CODE (op) == PLUS
2764 && GET_CODE (XEXP (op, 1)) == CONST_INT
2765 && (GET_CODE (reg = XEXP (op, 0)) == REG
2766 || (GET_CODE (XEXP (op, 0)) == SUBREG
2767 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 2768 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
2769}
2770
b111229a
RE
2771/* Similar to s_register_operand, but does not allow hard integer
2772 registers. */
2773int
2774f_register_operand (op, mode)
2775 register rtx op;
2776 enum machine_mode mode;
2777{
2778 if (GET_MODE (op) != mode && mode != VOIDmode)
2779 return 0;
2780
2781 if (GET_CODE (op) == SUBREG)
2782 op = SUBREG_REG (op);
2783
2784 /* We don't consider registers whose class is NO_REGS
2785 to be a register operand. */
2786 return (GET_CODE (op) == REG
2787 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2788 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2789}
2790
cce8749e
CH
2791/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2792
2793int
2794fpu_rhs_operand (op, mode)
2795 rtx op;
2796 enum machine_mode mode;
2797{
ff9940b0 2798 if (s_register_operand (op, mode))
f3bb6135 2799 return TRUE;
9ce71c6f
BS
2800
2801 if (GET_MODE (op) != mode && mode != VOIDmode)
2802 return FALSE;
2803
2804 if (GET_CODE (op) == CONST_DOUBLE)
2805 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
2806
2807 return FALSE;
2808}
cce8749e 2809
ff9940b0
RE
2810int
2811fpu_add_operand (op, mode)
2812 rtx op;
2813 enum machine_mode mode;
2814{
2815 if (s_register_operand (op, mode))
f3bb6135 2816 return TRUE;
9ce71c6f
BS
2817
2818 if (GET_MODE (op) != mode && mode != VOIDmode)
2819 return FALSE;
2820
2821 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2822 return (const_double_rtx_ok_for_fpu (op)
2823 || neg_const_double_rtx_ok_for_fpu (op));
2824
2825 return FALSE;
ff9940b0
RE
2826}
2827
cce8749e
CH
2828/* Return nonzero if OP is a constant power of two. */
2829
2830int
2831power_of_two_operand (op, mode)
2832 rtx op;
74bbc178 2833 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2834{
2835 if (GET_CODE (op) == CONST_INT)
2836 {
d5b7b3ae 2837 HOST_WIDE_INT value = INTVAL (op);
f3bb6135 2838 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2839 }
f3bb6135
RE
2840 return FALSE;
2841}
cce8749e
CH
2842
2843/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2844 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2845 Note that this disallows MEM(REG+REG), but allows
2846 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2847
2848int
2849di_operand (op, mode)
2850 rtx op;
2851 enum machine_mode mode;
2852{
ff9940b0 2853 if (s_register_operand (op, mode))
f3bb6135 2854 return TRUE;
cce8749e 2855
9ce71c6f
BS
2856 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2857 return FALSE;
2858
e9c6b69b
NC
2859 if (GET_CODE (op) == SUBREG)
2860 op = SUBREG_REG (op);
2861
cce8749e
CH
2862 switch (GET_CODE (op))
2863 {
2864 case CONST_DOUBLE:
2865 case CONST_INT:
f3bb6135
RE
2866 return TRUE;
2867
cce8749e 2868 case MEM:
f3bb6135
RE
2869 return memory_address_p (DImode, XEXP (op, 0));
2870
cce8749e 2871 default:
f3bb6135 2872 return FALSE;
cce8749e 2873 }
f3bb6135 2874}
cce8749e 2875
d5b7b3ae
RE
2876/* Like di_operand, but don't accept constants. */
2877int
2878nonimmediate_di_operand (op, mode)
2879 rtx op;
2880 enum machine_mode mode;
2881{
2882 if (s_register_operand (op, mode))
2883 return TRUE;
2884
2885 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2886 return FALSE;
2887
2888 if (GET_CODE (op) == SUBREG)
2889 op = SUBREG_REG (op);
2890
2891 if (GET_CODE (op) == MEM)
2892 return memory_address_p (DImode, XEXP (op, 0));
2893
2894 return FALSE;
2895}
2896
f3139301 2897/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2898 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2899 Note that this disallows MEM(REG+REG), but allows
2900 MEM(PRE/POST_INC/DEC(REG)). */
2901
2902int
2903soft_df_operand (op, mode)
2904 rtx op;
2905 enum machine_mode mode;
2906{
2907 if (s_register_operand (op, mode))
2908 return TRUE;
2909
9ce71c6f
BS
2910 if (mode != VOIDmode && GET_MODE (op) != mode)
2911 return FALSE;
2912
37b80d2e
BS
2913 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2914 return FALSE;
2915
e9c6b69b
NC
2916 if (GET_CODE (op) == SUBREG)
2917 op = SUBREG_REG (op);
9ce71c6f 2918
f3139301
DE
2919 switch (GET_CODE (op))
2920 {
2921 case CONST_DOUBLE:
2922 return TRUE;
2923
2924 case MEM:
2925 return memory_address_p (DFmode, XEXP (op, 0));
2926
2927 default:
2928 return FALSE;
2929 }
2930}
2931
d5b7b3ae
RE
2932/* Like soft_df_operand, but don't accept constants. */
2933int
2934nonimmediate_soft_df_operand (op, mode)
2935 rtx op;
2936 enum machine_mode mode;
2937{
2938 if (s_register_operand (op, mode))
2939 return TRUE;
2940
2941 if (mode != VOIDmode && GET_MODE (op) != mode)
2942 return FALSE;
2943
2944 if (GET_CODE (op) == SUBREG)
2945 op = SUBREG_REG (op);
2946
2947 if (GET_CODE (op) == MEM)
2948 return memory_address_p (DFmode, XEXP (op, 0));
2949 return FALSE;
2950}
cce8749e 2951
d5b7b3ae 2952/* Return TRUE for valid index operands. */
cce8749e
CH
2953int
2954index_operand (op, mode)
2955 rtx op;
2956 enum machine_mode mode;
2957{
d5b7b3ae 2958 return (s_register_operand (op, mode)
ff9940b0 2959 || (immediate_operand (op, mode)
d5b7b3ae
RE
2960 && (GET_CODE (op) != CONST_INT
2961 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 2962}
cce8749e 2963
ff9940b0
RE
2964/* Return TRUE for valid shifts by a constant. This also accepts any
2965 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 2966 shift operator in this case was a mult. */
ff9940b0
RE
2967
2968int
2969const_shift_operand (op, mode)
2970 rtx op;
2971 enum machine_mode mode;
2972{
2973 return (power_of_two_operand (op, mode)
2974 || (immediate_operand (op, mode)
d5b7b3ae
RE
2975 && (GET_CODE (op) != CONST_INT
2976 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 2977}
ff9940b0 2978
cce8749e
CH
2979/* Return TRUE for arithmetic operators which can be combined with a multiply
2980 (shift). */
2981
2982int
2983shiftable_operator (x, mode)
2984 rtx x;
2985 enum machine_mode mode;
2986{
2987 if (GET_MODE (x) != mode)
2988 return FALSE;
2989 else
2990 {
2991 enum rtx_code code = GET_CODE (x);
2992
2993 return (code == PLUS || code == MINUS
2994 || code == IOR || code == XOR || code == AND);
2995 }
f3bb6135 2996}
cce8749e 2997
6ab589e0
JL
2998/* Return TRUE for binary logical operators. */
2999
3000int
3001logical_binary_operator (x, mode)
3002 rtx x;
3003 enum machine_mode mode;
3004{
3005 if (GET_MODE (x) != mode)
3006 return FALSE;
3007 else
3008 {
3009 enum rtx_code code = GET_CODE (x);
3010
3011 return (code == IOR || code == XOR || code == AND);
3012 }
3013}
3014
6354dc9b 3015/* Return TRUE for shift operators. */
cce8749e
CH
3016
3017int
3018shift_operator (x, mode)
3019 rtx x;
3020 enum machine_mode mode;
3021{
3022 if (GET_MODE (x) != mode)
3023 return FALSE;
3024 else
3025 {
3026 enum rtx_code code = GET_CODE (x);
3027
ff9940b0 3028 if (code == MULT)
aec3cfba 3029 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 3030
e2c671ba
RE
3031 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3032 || code == ROTATERT);
cce8749e 3033 }
f3bb6135 3034}
ff9940b0 3035
6354dc9b
NC
3036/* Return TRUE if x is EQ or NE. */
3037int
3038equality_operator (x, mode)
f3bb6135 3039 rtx x;
74bbc178 3040 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3041{
f3bb6135 3042 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3043}
3044
e45b72c4
RE
3045/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3046int
3047arm_comparison_operator (x, mode)
3048 rtx x;
3049 enum machine_mode mode;
3050{
3051 return (comparison_operator (x, mode)
3052 && GET_CODE (x) != LTGT
3053 && GET_CODE (x) != UNEQ);
3054}
3055
6354dc9b 3056/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
3057int
3058minmax_operator (x, mode)
3059 rtx x;
3060 enum machine_mode mode;
3061{
3062 enum rtx_code code = GET_CODE (x);
3063
3064 if (GET_MODE (x) != mode)
3065 return FALSE;
f3bb6135 3066
ff9940b0 3067 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3068}
ff9940b0 3069
ff9940b0 3070/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3071 a mode, accept any class CCmode register. */
ff9940b0
RE
3072int
3073cc_register (x, mode)
f3bb6135
RE
3074 rtx x;
3075 enum machine_mode mode;
ff9940b0
RE
3076{
3077 if (mode == VOIDmode)
3078 {
3079 mode = GET_MODE (x);
d5b7b3ae 3080
ff9940b0
RE
3081 if (GET_MODE_CLASS (mode) != MODE_CC)
3082 return FALSE;
3083 }
f3bb6135 3084
d5b7b3ae
RE
3085 if ( GET_MODE (x) == mode
3086 && GET_CODE (x) == REG
3087 && REGNO (x) == CC_REGNUM)
ff9940b0 3088 return TRUE;
f3bb6135 3089
ff9940b0
RE
3090 return FALSE;
3091}
5bbe2d40
RE
3092
3093/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3094 a mode, accept any class CCmode register which indicates a dominance
3095 expression. */
5bbe2d40 3096int
84ed5e79 3097dominant_cc_register (x, mode)
5bbe2d40
RE
3098 rtx x;
3099 enum machine_mode mode;
3100{
3101 if (mode == VOIDmode)
3102 {
3103 mode = GET_MODE (x);
d5b7b3ae 3104
84ed5e79 3105 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3106 return FALSE;
3107 }
3108
d5b7b3ae 3109 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3110 && mode != CC_DLEmode && mode != CC_DLTmode
3111 && mode != CC_DGEmode && mode != CC_DGTmode
3112 && mode != CC_DLEUmode && mode != CC_DLTUmode
3113 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3114 return FALSE;
3115
d5b7b3ae 3116 return cc_register (x, mode);
5bbe2d40
RE
3117}
3118
2b835d68
RE
3119/* Return TRUE if X references a SYMBOL_REF. */
3120int
3121symbol_mentioned_p (x)
3122 rtx x;
3123{
6f7d635c 3124 register const char * fmt;
2b835d68
RE
3125 register int i;
3126
3127 if (GET_CODE (x) == SYMBOL_REF)
3128 return 1;
3129
3130 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3131
2b835d68
RE
3132 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3133 {
3134 if (fmt[i] == 'E')
3135 {
3136 register int j;
3137
3138 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3139 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3140 return 1;
3141 }
3142 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3143 return 1;
3144 }
3145
3146 return 0;
3147}
3148
3149/* Return TRUE if X references a LABEL_REF. */
3150int
3151label_mentioned_p (x)
3152 rtx x;
3153{
6f7d635c 3154 register const char * fmt;
2b835d68
RE
3155 register int i;
3156
3157 if (GET_CODE (x) == LABEL_REF)
3158 return 1;
3159
3160 fmt = GET_RTX_FORMAT (GET_CODE (x));
3161 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3162 {
3163 if (fmt[i] == 'E')
3164 {
3165 register int j;
3166
3167 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3168 if (label_mentioned_p (XVECEXP (x, i, j)))
3169 return 1;
3170 }
3171 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3172 return 1;
3173 }
3174
3175 return 0;
3176}
3177
ff9940b0
RE
3178enum rtx_code
3179minmax_code (x)
f3bb6135 3180 rtx x;
ff9940b0
RE
3181{
3182 enum rtx_code code = GET_CODE (x);
3183
3184 if (code == SMAX)
3185 return GE;
f3bb6135 3186 else if (code == SMIN)
ff9940b0 3187 return LE;
f3bb6135 3188 else if (code == UMIN)
ff9940b0 3189 return LEU;
f3bb6135 3190 else if (code == UMAX)
ff9940b0 3191 return GEU;
f3bb6135 3192
ff9940b0
RE
3193 abort ();
3194}
3195
6354dc9b 3196/* Return 1 if memory locations are adjacent. */
f3bb6135 3197int
ff9940b0
RE
3198adjacent_mem_locations (a, b)
3199 rtx a, b;
3200{
3201 int val0 = 0, val1 = 0;
3202 int reg0, reg1;
3203
3204 if ((GET_CODE (XEXP (a, 0)) == REG
3205 || (GET_CODE (XEXP (a, 0)) == PLUS
3206 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3207 && (GET_CODE (XEXP (b, 0)) == REG
3208 || (GET_CODE (XEXP (b, 0)) == PLUS
3209 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3210 {
3211 if (GET_CODE (XEXP (a, 0)) == PLUS)
3212 {
3213 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3214 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3215 }
3216 else
3217 reg0 = REGNO (XEXP (a, 0));
3218 if (GET_CODE (XEXP (b, 0)) == PLUS)
3219 {
3220 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3221 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3222 }
3223 else
3224 reg1 = REGNO (XEXP (b, 0));
3225 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3226 }
3227 return 0;
3228}
3229
3230/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3231 parallel and the first section will be tested. */
f3bb6135 3232int
ff9940b0
RE
3233load_multiple_operation (op, mode)
3234 rtx op;
74bbc178 3235 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3236{
f3bb6135 3237 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3238 int dest_regno;
3239 rtx src_addr;
f3bb6135 3240 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3241 rtx elt;
3242
3243 if (count <= 1
3244 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3245 return 0;
3246
6354dc9b 3247 /* Check to see if this might be a write-back. */
ff9940b0
RE
3248 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3249 {
3250 i++;
3251 base = 1;
3252
6354dc9b 3253 /* Now check it more carefully. */
ff9940b0
RE
3254 if (GET_CODE (SET_DEST (elt)) != REG
3255 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3256 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3257 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3258 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3259 return 0;
ff9940b0
RE
3260 }
3261
3262 /* Perform a quick check so we don't blow up below. */
3263 if (count <= i
3264 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3265 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3266 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3267 return 0;
3268
3269 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3270 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3271
3272 for (; i < count; i++)
3273 {
ed4c4348 3274 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3275
3276 if (GET_CODE (elt) != SET
3277 || GET_CODE (SET_DEST (elt)) != REG
3278 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3279 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3280 || GET_CODE (SET_SRC (elt)) != MEM
3281 || GET_MODE (SET_SRC (elt)) != SImode
3282 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3283 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3284 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3285 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3286 return 0;
3287 }
3288
3289 return 1;
3290}
3291
3292/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3293 parallel and the first section will be tested. */
f3bb6135 3294int
ff9940b0
RE
3295store_multiple_operation (op, mode)
3296 rtx op;
74bbc178 3297 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3298{
f3bb6135 3299 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3300 int src_regno;
3301 rtx dest_addr;
f3bb6135 3302 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3303 rtx elt;
3304
3305 if (count <= 1
3306 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3307 return 0;
3308
6354dc9b 3309 /* Check to see if this might be a write-back. */
ff9940b0
RE
3310 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3311 {
3312 i++;
3313 base = 1;
3314
6354dc9b 3315 /* Now check it more carefully. */
ff9940b0
RE
3316 if (GET_CODE (SET_DEST (elt)) != REG
3317 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3318 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3319 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3320 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3321 return 0;
ff9940b0
RE
3322 }
3323
3324 /* Perform a quick check so we don't blow up below. */
3325 if (count <= i
3326 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3327 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3328 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3329 return 0;
3330
3331 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3332 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3333
3334 for (; i < count; i++)
3335 {
3336 elt = XVECEXP (op, 0, i);
3337
3338 if (GET_CODE (elt) != SET
3339 || GET_CODE (SET_SRC (elt)) != REG
3340 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3341 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3342 || GET_CODE (SET_DEST (elt)) != MEM
3343 || GET_MODE (SET_DEST (elt)) != SImode
3344 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3345 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3346 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3347 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3348 return 0;
3349 }
3350
3351 return 1;
3352}
e2c671ba 3353
84ed5e79
RE
3354int
3355load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3356 rtx * operands;
84ed5e79 3357 int nops;
62b10bbc
NC
3358 int * regs;
3359 int * base;
3360 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3361{
3362 int unsorted_regs[4];
3363 HOST_WIDE_INT unsorted_offsets[4];
3364 int order[4];
ad076f4e 3365 int base_reg = -1;
84ed5e79
RE
3366 int i;
3367
3368 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3369 extended if required. */
3370 if (nops < 2 || nops > 4)
3371 abort ();
3372
3373 /* Loop over the operands and check that the memory references are
3374 suitable (ie immediate offsets from the same base register). At
3375 the same time, extract the target register, and the memory
3376 offsets. */
3377 for (i = 0; i < nops; i++)
3378 {
3379 rtx reg;
3380 rtx offset;
3381
56636818
JL
3382 /* Convert a subreg of a mem into the mem itself. */
3383 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3384 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3385
84ed5e79
RE
3386 if (GET_CODE (operands[nops + i]) != MEM)
3387 abort ();
3388
3389 /* Don't reorder volatile memory references; it doesn't seem worth
3390 looking for the case where the order is ok anyway. */
3391 if (MEM_VOLATILE_P (operands[nops + i]))
3392 return 0;
3393
3394 offset = const0_rtx;
3395
3396 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3397 || (GET_CODE (reg) == SUBREG
3398 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3399 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3400 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3401 == REG)
3402 || (GET_CODE (reg) == SUBREG
3403 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3404 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3405 == CONST_INT)))
3406 {
3407 if (i == 0)
3408 {
d5b7b3ae 3409 base_reg = REGNO (reg);
84ed5e79
RE
3410 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3411 ? REGNO (operands[i])
3412 : REGNO (SUBREG_REG (operands[i])));
3413 order[0] = 0;
3414 }
3415 else
3416 {
6354dc9b 3417 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3418 /* Not addressed from the same base register. */
3419 return 0;
3420
3421 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3422 ? REGNO (operands[i])
3423 : REGNO (SUBREG_REG (operands[i])));
3424 if (unsorted_regs[i] < unsorted_regs[order[0]])
3425 order[0] = i;
3426 }
3427
3428 /* If it isn't an integer register, or if it overwrites the
3429 base register but isn't the last insn in the list, then
3430 we can't do this. */
3431 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3432 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3433 return 0;
3434
3435 unsorted_offsets[i] = INTVAL (offset);
3436 }
3437 else
3438 /* Not a suitable memory address. */
3439 return 0;
3440 }
3441
3442 /* All the useful information has now been extracted from the
3443 operands into unsorted_regs and unsorted_offsets; additionally,
3444 order[0] has been set to the lowest numbered register in the
3445 list. Sort the registers into order, and check that the memory
3446 offsets are ascending and adjacent. */
3447
3448 for (i = 1; i < nops; i++)
3449 {
3450 int j;
3451
3452 order[i] = order[i - 1];
3453 for (j = 0; j < nops; j++)
3454 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3455 && (order[i] == order[i - 1]
3456 || unsorted_regs[j] < unsorted_regs[order[i]]))
3457 order[i] = j;
3458
3459 /* Have we found a suitable register? if not, one must be used more
3460 than once. */
3461 if (order[i] == order[i - 1])
3462 return 0;
3463
3464 /* Is the memory address adjacent and ascending? */
3465 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3466 return 0;
3467 }
3468
3469 if (base)
3470 {
3471 *base = base_reg;
3472
3473 for (i = 0; i < nops; i++)
3474 regs[i] = unsorted_regs[order[i]];
3475
3476 *load_offset = unsorted_offsets[order[0]];
3477 }
3478
3479 if (unsorted_offsets[order[0]] == 0)
3480 return 1; /* ldmia */
3481
3482 if (unsorted_offsets[order[0]] == 4)
3483 return 2; /* ldmib */
3484
3485 if (unsorted_offsets[order[nops - 1]] == 0)
3486 return 3; /* ldmda */
3487
3488 if (unsorted_offsets[order[nops - 1]] == -4)
3489 return 4; /* ldmdb */
3490
949d79eb
RE
3491 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3492 if the offset isn't small enough. The reason 2 ldrs are faster
3493 is because these ARMs are able to do more than one cache access
3494 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3495 whilst the ARM8 has a double bandwidth cache. This means that
3496 these cores can do both an instruction fetch and a data fetch in
3497 a single cycle, so the trick of calculating the address into a
3498 scratch register (one of the result regs) and then doing a load
3499 multiple actually becomes slower (and no smaller in code size).
3500 That is the transformation
6cc8c0b3
NC
3501
3502 ldr rd1, [rbase + offset]
3503 ldr rd2, [rbase + offset + 4]
3504
3505 to
3506
3507 add rd1, rbase, offset
3508 ldmia rd1, {rd1, rd2}
3509
949d79eb
RE
3510 produces worse code -- '3 cycles + any stalls on rd2' instead of
3511 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3512 access per cycle, the first sequence could never complete in less
3513 than 6 cycles, whereas the ldm sequence would only take 5 and
3514 would make better use of sequential accesses if not hitting the
3515 cache.
3516
3517 We cheat here and test 'arm_ld_sched' which we currently know to
3518 only be true for the ARM8, ARM9 and StrongARM. If this ever
3519 changes, then the test below needs to be reworked. */
f5a1b0d2 3520 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3521 return 0;
3522
84ed5e79
RE
3523 /* Can't do it without setting up the offset, only do this if it takes
3524 no more than one insn. */
3525 return (const_ok_for_arm (unsorted_offsets[order[0]])
3526 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3527}
3528
cd2b33d0 3529const char *
84ed5e79 3530emit_ldm_seq (operands, nops)
62b10bbc 3531 rtx * operands;
84ed5e79
RE
3532 int nops;
3533{
3534 int regs[4];
3535 int base_reg;
3536 HOST_WIDE_INT offset;
3537 char buf[100];
3538 int i;
3539
3540 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3541 {
3542 case 1:
3543 strcpy (buf, "ldm%?ia\t");
3544 break;
3545
3546 case 2:
3547 strcpy (buf, "ldm%?ib\t");
3548 break;
3549
3550 case 3:
3551 strcpy (buf, "ldm%?da\t");
3552 break;
3553
3554 case 4:
3555 strcpy (buf, "ldm%?db\t");
3556 break;
3557
3558 case 5:
3559 if (offset >= 0)
3560 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3561 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3562 (long) offset);
3563 else
3564 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3565 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3566 (long) -offset);
3567 output_asm_insn (buf, operands);
3568 base_reg = regs[0];
3569 strcpy (buf, "ldm%?ia\t");
3570 break;
3571
3572 default:
3573 abort ();
3574 }
3575
3576 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3577 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3578
3579 for (i = 1; i < nops; i++)
3580 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3581 reg_names[regs[i]]);
3582
3583 strcat (buf, "}\t%@ phole ldm");
3584
3585 output_asm_insn (buf, operands);
3586 return "";
3587}
3588
3589int
3590store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3591 rtx * operands;
84ed5e79 3592 int nops;
62b10bbc
NC
3593 int * regs;
3594 int * base;
3595 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3596{
3597 int unsorted_regs[4];
3598 HOST_WIDE_INT unsorted_offsets[4];
3599 int order[4];
ad076f4e 3600 int base_reg = -1;
84ed5e79
RE
3601 int i;
3602
3603 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3604 extended if required. */
3605 if (nops < 2 || nops > 4)
3606 abort ();
3607
3608 /* Loop over the operands and check that the memory references are
3609 suitable (ie immediate offsets from the same base register). At
3610 the same time, extract the target register, and the memory
3611 offsets. */
3612 for (i = 0; i < nops; i++)
3613 {
3614 rtx reg;
3615 rtx offset;
3616
56636818
JL
3617 /* Convert a subreg of a mem into the mem itself. */
3618 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3619 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3620
84ed5e79
RE
3621 if (GET_CODE (operands[nops + i]) != MEM)
3622 abort ();
3623
3624 /* Don't reorder volatile memory references; it doesn't seem worth
3625 looking for the case where the order is ok anyway. */
3626 if (MEM_VOLATILE_P (operands[nops + i]))
3627 return 0;
3628
3629 offset = const0_rtx;
3630
3631 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3632 || (GET_CODE (reg) == SUBREG
3633 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3634 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3635 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3636 == REG)
3637 || (GET_CODE (reg) == SUBREG
3638 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3639 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3640 == CONST_INT)))
3641 {
3642 if (i == 0)
3643 {
62b10bbc 3644 base_reg = REGNO (reg);
84ed5e79
RE
3645 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3646 ? REGNO (operands[i])
3647 : REGNO (SUBREG_REG (operands[i])));
3648 order[0] = 0;
3649 }
3650 else
3651 {
6354dc9b 3652 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3653 /* Not addressed from the same base register. */
3654 return 0;
3655
3656 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3657 ? REGNO (operands[i])
3658 : REGNO (SUBREG_REG (operands[i])));
3659 if (unsorted_regs[i] < unsorted_regs[order[0]])
3660 order[0] = i;
3661 }
3662
3663 /* If it isn't an integer register, then we can't do this. */
3664 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3665 return 0;
3666
3667 unsorted_offsets[i] = INTVAL (offset);
3668 }
3669 else
3670 /* Not a suitable memory address. */
3671 return 0;
3672 }
3673
3674 /* All the useful information has now been extracted from the
3675 operands into unsorted_regs and unsorted_offsets; additionally,
3676 order[0] has been set to the lowest numbered register in the
3677 list. Sort the registers into order, and check that the memory
3678 offsets are ascending and adjacent. */
3679
3680 for (i = 1; i < nops; i++)
3681 {
3682 int j;
3683
3684 order[i] = order[i - 1];
3685 for (j = 0; j < nops; j++)
3686 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3687 && (order[i] == order[i - 1]
3688 || unsorted_regs[j] < unsorted_regs[order[i]]))
3689 order[i] = j;
3690
3691 /* Have we found a suitable register? if not, one must be used more
3692 than once. */
3693 if (order[i] == order[i - 1])
3694 return 0;
3695
3696 /* Is the memory address adjacent and ascending? */
3697 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3698 return 0;
3699 }
3700
3701 if (base)
3702 {
3703 *base = base_reg;
3704
3705 for (i = 0; i < nops; i++)
3706 regs[i] = unsorted_regs[order[i]];
3707
3708 *load_offset = unsorted_offsets[order[0]];
3709 }
3710
3711 if (unsorted_offsets[order[0]] == 0)
3712 return 1; /* stmia */
3713
3714 if (unsorted_offsets[order[0]] == 4)
3715 return 2; /* stmib */
3716
3717 if (unsorted_offsets[order[nops - 1]] == 0)
3718 return 3; /* stmda */
3719
3720 if (unsorted_offsets[order[nops - 1]] == -4)
3721 return 4; /* stmdb */
3722
3723 return 0;
3724}
3725
cd2b33d0 3726const char *
84ed5e79 3727emit_stm_seq (operands, nops)
62b10bbc 3728 rtx * operands;
84ed5e79
RE
3729 int nops;
3730{
3731 int regs[4];
3732 int base_reg;
3733 HOST_WIDE_INT offset;
3734 char buf[100];
3735 int i;
3736
3737 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3738 {
3739 case 1:
3740 strcpy (buf, "stm%?ia\t");
3741 break;
3742
3743 case 2:
3744 strcpy (buf, "stm%?ib\t");
3745 break;
3746
3747 case 3:
3748 strcpy (buf, "stm%?da\t");
3749 break;
3750
3751 case 4:
3752 strcpy (buf, "stm%?db\t");
3753 break;
3754
3755 default:
3756 abort ();
3757 }
3758
3759 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3760 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3761
3762 for (i = 1; i < nops; i++)
3763 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3764 reg_names[regs[i]]);
3765
3766 strcat (buf, "}\t%@ phole stm");
3767
3768 output_asm_insn (buf, operands);
3769 return "";
3770}
3771
e2c671ba
RE
3772int
3773multi_register_push (op, mode)
0a81f500 3774 rtx op;
74bbc178 3775 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3776{
3777 if (GET_CODE (op) != PARALLEL
3778 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3779 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3780 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3781 return 0;
3782
3783 return 1;
3784}
ff9940b0 3785\f
d7d01975 3786/* Routines for use with attributes. */
f3bb6135 3787
31fdb4d5 3788/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
3789 ATTRIBUTES are any existing attributes and ARGS are
3790 the arguments supplied with ATTR.
31fdb4d5
DE
3791
3792 Supported attributes:
3793
d5b7b3ae
RE
3794 naked:
3795 don't output any prologue or epilogue code, the user is assumed
3796 to do the right thing.
3797
3798 interfacearm:
3799 Always assume that this function will be entered in ARM mode,
3800 not Thumb mode, and that the caller wishes to be returned to in
3801 ARM mode. */
31fdb4d5 3802int
74bbc178 3803arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3804 tree decl;
31fdb4d5
DE
3805 tree attr;
3806 tree args;
3807{
3808 if (args != NULL_TREE)
3809 return 0;
3810
3811 if (is_attribute_p ("naked", attr))
3812 return TREE_CODE (decl) == FUNCTION_DECL;
d5b7b3ae
RE
3813
3814#ifdef ARM_PE
3815 if (is_attribute_p ("interfacearm", attr))
3816 return TREE_CODE (decl) == FUNCTION_DECL;
3817#endif /* ARM_PE */
3818
31fdb4d5
DE
3819 return 0;
3820}
3821
3822/* Return non-zero if FUNC is a naked function. */
31fdb4d5
DE
3823static int
3824arm_naked_function_p (func)
3825 tree func;
3826{
3827 tree a;
3828
3829 if (TREE_CODE (func) != FUNCTION_DECL)
3830 abort ();
2e943e99 3831
31fdb4d5
DE
3832 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3833 return a != NULL_TREE;
3834}
f3bb6135 3835\f
6354dc9b 3836/* Routines for use in generating RTL. */
f3bb6135 3837rtx
56636818 3838arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3839 in_struct_p, scalar_p)
ff9940b0
RE
3840 int base_regno;
3841 int count;
3842 rtx from;
3843 int up;
3844 int write_back;
56636818
JL
3845 int unchanging_p;
3846 int in_struct_p;
c6df88cb 3847 int scalar_p;
ff9940b0
RE
3848{
3849 int i = 0, j;
3850 rtx result;
3851 int sign = up ? 1 : -1;
56636818 3852 rtx mem;
ff9940b0 3853
43cffd11 3854 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3855 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3856 if (write_back)
f3bb6135 3857 {
ff9940b0 3858 XVECEXP (result, 0, 0)
43cffd11
RE
3859 = gen_rtx_SET (GET_MODE (from), from,
3860 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3861 i = 1;
3862 count++;
f3bb6135
RE
3863 }
3864
ff9940b0 3865 for (j = 0; i < count; i++, j++)
f3bb6135 3866 {
43cffd11 3867 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3868 RTX_UNCHANGING_P (mem) = unchanging_p;
3869 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3870 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3871 XVECEXP (result, 0, i)
3872 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3873 }
3874
ff9940b0
RE
3875 return result;
3876}
3877
f3bb6135 3878rtx
56636818 3879arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3880 in_struct_p, scalar_p)
ff9940b0
RE
3881 int base_regno;
3882 int count;
3883 rtx to;
3884 int up;
3885 int write_back;
56636818
JL
3886 int unchanging_p;
3887 int in_struct_p;
c6df88cb 3888 int scalar_p;
ff9940b0
RE
3889{
3890 int i = 0, j;
3891 rtx result;
3892 int sign = up ? 1 : -1;
56636818 3893 rtx mem;
ff9940b0 3894
43cffd11 3895 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3896 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3897 if (write_back)
f3bb6135 3898 {
ff9940b0 3899 XVECEXP (result, 0, 0)
43cffd11
RE
3900 = gen_rtx_SET (GET_MODE (to), to,
3901 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3902 i = 1;
3903 count++;
f3bb6135
RE
3904 }
3905
ff9940b0 3906 for (j = 0; i < count; i++, j++)
f3bb6135 3907 {
43cffd11 3908 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3909 RTX_UNCHANGING_P (mem) = unchanging_p;
3910 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3911 MEM_SCALAR_P (mem) = scalar_p;
56636818 3912
43cffd11
RE
3913 XVECEXP (result, 0, i)
3914 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3915 }
3916
ff9940b0
RE
3917 return result;
3918}
3919
880e2516
RE
3920int
3921arm_gen_movstrqi (operands)
62b10bbc 3922 rtx * operands;
880e2516
RE
3923{
3924 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3925 int i;
880e2516 3926 rtx src, dst;
ad076f4e 3927 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3928 rtx part_bytes_reg = NULL;
56636818
JL
3929 rtx mem;
3930 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3931 int dst_scalar_p, src_scalar_p;
880e2516
RE
3932
3933 if (GET_CODE (operands[2]) != CONST_INT
3934 || GET_CODE (operands[3]) != CONST_INT
3935 || INTVAL (operands[2]) > 64
3936 || INTVAL (operands[3]) & 3)
3937 return 0;
3938
3939 st_dst = XEXP (operands[0], 0);
3940 st_src = XEXP (operands[1], 0);
56636818
JL
3941
3942 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3943 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3944 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3945 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3946 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3947 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3948
880e2516
RE
3949 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3950 fin_src = src = copy_to_mode_reg (SImode, st_src);
3951
d5b7b3ae 3952 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
880e2516
RE
3953 out_words_to_go = INTVAL (operands[2]) / 4;
3954 last_bytes = INTVAL (operands[2]) & 3;
3955
3956 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3957 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3958
3959 for (i = 0; in_words_to_go >= 2; i+=4)
3960 {
bd9c7e23 3961 if (in_words_to_go > 4)
56636818 3962 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3963 src_unchanging_p,
3964 src_in_struct_p,
3965 src_scalar_p));
bd9c7e23
RE
3966 else
3967 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3968 FALSE, src_unchanging_p,
c6df88cb 3969 src_in_struct_p, src_scalar_p));
bd9c7e23 3970
880e2516
RE
3971 if (out_words_to_go)
3972 {
bd9c7e23 3973 if (out_words_to_go > 4)
56636818
JL
3974 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3975 dst_unchanging_p,
c6df88cb
MM
3976 dst_in_struct_p,
3977 dst_scalar_p));
bd9c7e23
RE
3978 else if (out_words_to_go != 1)
3979 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3980 dst, TRUE,
3981 (last_bytes == 0
56636818
JL
3982 ? FALSE : TRUE),
3983 dst_unchanging_p,
c6df88cb
MM
3984 dst_in_struct_p,
3985 dst_scalar_p));
880e2516
RE
3986 else
3987 {
43cffd11 3988 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3989 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3990 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3991 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3992 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3993 if (last_bytes != 0)
3994 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3995 }
3996 }
3997
3998 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3999 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4000 }
4001
4002 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4003 if (out_words_to_go)
62b10bbc
NC
4004 {
4005 rtx sreg;
4006
4007 mem = gen_rtx_MEM (SImode, src);
4008 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4009 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4010 MEM_SCALAR_P (mem) = src_scalar_p;
4011 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4012 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4013
4014 mem = gen_rtx_MEM (SImode, dst);
4015 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4016 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4017 MEM_SCALAR_P (mem) = dst_scalar_p;
4018 emit_move_insn (mem, sreg);
4019 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4020 in_words_to_go--;
4021
4022 if (in_words_to_go) /* Sanity check */
4023 abort ();
4024 }
880e2516
RE
4025
4026 if (in_words_to_go)
4027 {
4028 if (in_words_to_go < 0)
4029 abort ();
4030
43cffd11 4031 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4032 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4033 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4034 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4035 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4036 }
4037
d5b7b3ae
RE
4038 if (last_bytes && part_bytes_reg == NULL)
4039 abort ();
4040
880e2516
RE
4041 if (BYTES_BIG_ENDIAN && last_bytes)
4042 {
4043 rtx tmp = gen_reg_rtx (SImode);
4044
6354dc9b 4045 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4046 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4047 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4048 part_bytes_reg = tmp;
4049
4050 while (last_bytes)
4051 {
43cffd11 4052 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4053 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4054 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4055 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4056 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 4057
880e2516
RE
4058 if (--last_bytes)
4059 {
4060 tmp = gen_reg_rtx (SImode);
4061 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4062 part_bytes_reg = tmp;
4063 }
4064 }
4065
4066 }
4067 else
4068 {
d5b7b3ae 4069 if (last_bytes > 1)
880e2516 4070 {
d5b7b3ae 4071 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4072 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4073 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4074 MEM_SCALAR_P (mem) = dst_scalar_p;
d5b7b3ae
RE
4075 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4076 last_bytes -= 2;
4077 if (last_bytes)
880e2516
RE
4078 {
4079 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4080
d5b7b3ae
RE
4081 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4082 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4083 part_bytes_reg = tmp;
4084 }
4085 }
d5b7b3ae
RE
4086
4087 if (last_bytes)
4088 {
4089 mem = gen_rtx_MEM (QImode, dst);
4090 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4091 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4092 MEM_SCALAR_P (mem) = dst_scalar_p;
4093 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4094 }
880e2516
RE
4095 }
4096
4097 return 1;
4098}
4099
5165176d
RE
4100/* Generate a memory reference for a half word, such that it will be loaded
4101 into the top 16 bits of the word. We can assume that the address is
4102 known to be alignable and of the form reg, or plus (reg, const). */
4103rtx
d5b7b3ae 4104arm_gen_rotated_half_load (memref)
5165176d
RE
4105 rtx memref;
4106{
4107 HOST_WIDE_INT offset = 0;
4108 rtx base = XEXP (memref, 0);
4109
4110 if (GET_CODE (base) == PLUS)
4111 {
4112 offset = INTVAL (XEXP (base, 1));
4113 base = XEXP (base, 0);
4114 }
4115
956d6950 4116 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4117 if (TARGET_MMU_TRAPS
5165176d
RE
4118 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4119 return NULL;
4120
43cffd11 4121 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4122
4123 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4124 return base;
4125
43cffd11 4126 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4127}
4128
84ed5e79 4129static enum machine_mode
74bbc178 4130select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4131 rtx x;
4132 rtx y;
4133 HOST_WIDE_INT cond_or;
4134{
4135 enum rtx_code cond1, cond2;
4136 int swapped = 0;
4137
4138 /* Currently we will probably get the wrong result if the individual
4139 comparisons are not simple. This also ensures that it is safe to
956d6950 4140 reverse a comparison if necessary. */
84ed5e79
RE
4141 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4142 != CCmode)
4143 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4144 != CCmode))
4145 return CCmode;
4146
4147 if (cond_or)
4148 cond1 = reverse_condition (cond1);
4149
4150 /* If the comparisons are not equal, and one doesn't dominate the other,
4151 then we can't do this. */
4152 if (cond1 != cond2
5895f793
RE
4153 && !comparison_dominates_p (cond1, cond2)
4154 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4155 return CCmode;
4156
4157 if (swapped)
4158 {
4159 enum rtx_code temp = cond1;
4160 cond1 = cond2;
4161 cond2 = temp;
4162 }
4163
4164 switch (cond1)
4165 {
4166 case EQ:
5895f793 4167 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4168 return CC_DEQmode;
4169
4170 switch (cond2)
4171 {
4172 case LE: return CC_DLEmode;
4173 case LEU: return CC_DLEUmode;
4174 case GE: return CC_DGEmode;
4175 case GEU: return CC_DGEUmode;
ad076f4e 4176 default: break;
84ed5e79
RE
4177 }
4178
4179 break;
4180
4181 case LT:
5895f793 4182 if (cond2 == LT || !cond_or)
84ed5e79
RE
4183 return CC_DLTmode;
4184 if (cond2 == LE)
4185 return CC_DLEmode;
4186 if (cond2 == NE)
4187 return CC_DNEmode;
4188 break;
4189
4190 case GT:
5895f793 4191 if (cond2 == GT || !cond_or)
84ed5e79
RE
4192 return CC_DGTmode;
4193 if (cond2 == GE)
4194 return CC_DGEmode;
4195 if (cond2 == NE)
4196 return CC_DNEmode;
4197 break;
4198
4199 case LTU:
5895f793 4200 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4201 return CC_DLTUmode;
4202 if (cond2 == LEU)
4203 return CC_DLEUmode;
4204 if (cond2 == NE)
4205 return CC_DNEmode;
4206 break;
4207
4208 case GTU:
5895f793 4209 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4210 return CC_DGTUmode;
4211 if (cond2 == GEU)
4212 return CC_DGEUmode;
4213 if (cond2 == NE)
4214 return CC_DNEmode;
4215 break;
4216
4217 /* The remaining cases only occur when both comparisons are the
4218 same. */
4219 case NE:
4220 return CC_DNEmode;
4221
4222 case LE:
4223 return CC_DLEmode;
4224
4225 case GE:
4226 return CC_DGEmode;
4227
4228 case LEU:
4229 return CC_DLEUmode;
4230
4231 case GEU:
4232 return CC_DGEUmode;
ad076f4e
RE
4233
4234 default:
4235 break;
84ed5e79
RE
4236 }
4237
4238 abort ();
4239}
4240
4241enum machine_mode
4242arm_select_cc_mode (op, x, y)
4243 enum rtx_code op;
4244 rtx x;
4245 rtx y;
4246{
4247 /* All floating point compares return CCFP if it is an equality
4248 comparison, and CCFPE otherwise. */
4249 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4250 {
4251 switch (op)
4252 {
4253 case EQ:
4254 case NE:
4255 case UNORDERED:
4256 case ORDERED:
4257 case UNLT:
4258 case UNLE:
4259 case UNGT:
4260 case UNGE:
4261 case UNEQ:
4262 case LTGT:
4263 return CCFPmode;
4264
4265 case LT:
4266 case LE:
4267 case GT:
4268 case GE:
4269 return CCFPEmode;
4270
4271 default:
4272 abort ();
4273 }
4274 }
84ed5e79
RE
4275
4276 /* A compare with a shifted operand. Because of canonicalization, the
4277 comparison will have to be swapped when we emit the assembler. */
4278 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4279 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4280 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4281 || GET_CODE (x) == ROTATERT))
4282 return CC_SWPmode;
4283
956d6950
JL
4284 /* This is a special case that is used by combine to allow a
4285 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4286 followed by a comparison of the shifted integer (only valid for
956d6950 4287 equalities and unsigned inequalities). */
84ed5e79
RE
4288 if (GET_MODE (x) == SImode
4289 && GET_CODE (x) == ASHIFT
4290 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4291 && GET_CODE (XEXP (x, 0)) == SUBREG
4292 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4293 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4294 && (op == EQ || op == NE
4295 || op == GEU || op == GTU || op == LTU || op == LEU)
4296 && GET_CODE (y) == CONST_INT)
4297 return CC_Zmode;
4298
4299 /* An operation that sets the condition codes as a side-effect, the
4300 V flag is not set correctly, so we can only use comparisons where
4301 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4302 instead. */
4303 if (GET_MODE (x) == SImode
4304 && y == const0_rtx
4305 && (op == EQ || op == NE || op == LT || op == GE)
4306 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4307 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4308 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4309 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4310 || GET_CODE (x) == LSHIFTRT
4311 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4312 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4313 return CC_NOOVmode;
4314
4315 /* A construct for a conditional compare, if the false arm contains
4316 0, then both conditions must be true, otherwise either condition
4317 must be true. Not all conditions are possible, so CCmode is
4318 returned if it can't be done. */
4319 if (GET_CODE (x) == IF_THEN_ELSE
4320 && (XEXP (x, 2) == const0_rtx
4321 || XEXP (x, 2) == const1_rtx)
4322 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4323 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 4324 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
4325 INTVAL (XEXP (x, 2)));
4326
4327 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4328 return CC_Zmode;
4329
bd9c7e23
RE
4330 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4331 && GET_CODE (x) == PLUS
4332 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4333 return CC_Cmode;
4334
84ed5e79
RE
4335 return CCmode;
4336}
4337
ff9940b0
RE
4338/* X and Y are two things to compare using CODE. Emit the compare insn and
4339 return the rtx for register 0 in the proper mode. FP means this is a
4340 floating point compare: I don't think that it is needed on the arm. */
4341
4342rtx
d5b7b3ae 4343arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4344 enum rtx_code code;
4345 rtx x, y;
4346{
4347 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4348 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4349
43cffd11
RE
4350 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4351 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4352
4353 return cc_reg;
4354}
4355
0a81f500
RE
4356void
4357arm_reload_in_hi (operands)
62b10bbc 4358 rtx * operands;
0a81f500 4359{
f9cc092a
RE
4360 rtx ref = operands[1];
4361 rtx base, scratch;
4362 HOST_WIDE_INT offset = 0;
4363
4364 if (GET_CODE (ref) == SUBREG)
4365 {
4366 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4367 if (BYTES_BIG_ENDIAN)
4368 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4369 - MIN (UNITS_PER_WORD,
4370 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4371 ref = SUBREG_REG (ref);
4372 }
4373
4374 if (GET_CODE (ref) == REG)
4375 {
4376 /* We have a pseudo which has been spilt onto the stack; there
4377 are two cases here: the first where there is a simple
4378 stack-slot replacement and a second where the stack-slot is
4379 out of range, or is used as a subreg. */
4380 if (reg_equiv_mem[REGNO (ref)])
4381 {
4382 ref = reg_equiv_mem[REGNO (ref)];
4383 base = find_replacement (&XEXP (ref, 0));
4384 }
4385 else
6354dc9b 4386 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4387 base = reg_equiv_address[REGNO (ref)];
4388 }
4389 else
4390 base = find_replacement (&XEXP (ref, 0));
0a81f500 4391
e5e809f4
JL
4392 /* Handle the case where the address is too complex to be offset by 1. */
4393 if (GET_CODE (base) == MINUS
4394 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4395 {
f9cc092a 4396 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4397
43cffd11 4398 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4399 base = base_plus;
4400 }
f9cc092a
RE
4401 else if (GET_CODE (base) == PLUS)
4402 {
6354dc9b 4403 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4404 HOST_WIDE_INT hi, lo;
4405
4406 offset += INTVAL (XEXP (base, 1));
4407 base = XEXP (base, 0);
4408
6354dc9b 4409 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4410 /* Valid range for lo is -4095 -> 4095 */
4411 lo = (offset >= 0
4412 ? (offset & 0xfff)
4413 : -((-offset) & 0xfff));
4414
4415 /* Corner case, if lo is the max offset then we would be out of range
4416 once we have added the additional 1 below, so bump the msb into the
4417 pre-loading insn(s). */
4418 if (lo == 4095)
4419 lo &= 0x7ff;
4420
e5951263
NC
4421 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4422 ^ HOST_INT (0x80000000))
4423 - HOST_INT (0x80000000));
f9cc092a
RE
4424
4425 if (hi + lo != offset)
4426 abort ();
4427
4428 if (hi != 0)
4429 {
4430 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4431
4432 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4433 that require more than one insn. */
f9cc092a
RE
4434 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4435 base = base_plus;
4436 offset = lo;
4437 }
4438 }
e5e809f4 4439
f9cc092a
RE
4440 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4441 emit_insn (gen_zero_extendqisi2 (scratch,
4442 gen_rtx_MEM (QImode,
4443 plus_constant (base,
4444 offset))));
43cffd11
RE
4445 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4446 gen_rtx_MEM (QImode,
f9cc092a
RE
4447 plus_constant (base,
4448 offset + 1))));
5895f793 4449 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
4450 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4451 gen_rtx_IOR (SImode,
4452 gen_rtx_ASHIFT
4453 (SImode,
4454 gen_rtx_SUBREG (SImode, operands[0], 0),
4455 GEN_INT (8)),
f9cc092a 4456 scratch)));
0a81f500 4457 else
43cffd11
RE
4458 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4459 gen_rtx_IOR (SImode,
f9cc092a 4460 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4461 GEN_INT (8)),
4462 gen_rtx_SUBREG (SImode, operands[0],
4463 0))));
0a81f500
RE
4464}
4465
f9cc092a
RE
4466/* Handle storing a half-word to memory during reload by synthesising as two
4467 byte stores. Take care not to clobber the input values until after we
4468 have moved them somewhere safe. This code assumes that if the DImode
4469 scratch in operands[2] overlaps either the input value or output address
4470 in some way, then that value must die in this insn (we absolutely need
4471 two scratch registers for some corner cases). */
f3bb6135 4472void
af48348a 4473arm_reload_out_hi (operands)
62b10bbc 4474 rtx * operands;
af48348a 4475{
f9cc092a
RE
4476 rtx ref = operands[0];
4477 rtx outval = operands[1];
4478 rtx base, scratch;
4479 HOST_WIDE_INT offset = 0;
4480
4481 if (GET_CODE (ref) == SUBREG)
4482 {
4483 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4484 if (BYTES_BIG_ENDIAN)
4485 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4486 - MIN (UNITS_PER_WORD,
4487 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4488 ref = SUBREG_REG (ref);
4489 }
4490
4491
4492 if (GET_CODE (ref) == REG)
4493 {
4494 /* We have a pseudo which has been spilt onto the stack; there
4495 are two cases here: the first where there is a simple
4496 stack-slot replacement and a second where the stack-slot is
4497 out of range, or is used as a subreg. */
4498 if (reg_equiv_mem[REGNO (ref)])
4499 {
4500 ref = reg_equiv_mem[REGNO (ref)];
4501 base = find_replacement (&XEXP (ref, 0));
4502 }
4503 else
6354dc9b 4504 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4505 base = reg_equiv_address[REGNO (ref)];
4506 }
4507 else
4508 base = find_replacement (&XEXP (ref, 0));
4509
4510 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4511
4512 /* Handle the case where the address is too complex to be offset by 1. */
4513 if (GET_CODE (base) == MINUS
4514 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4515 {
4516 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4517
4518 /* Be careful not to destroy OUTVAL. */
4519 if (reg_overlap_mentioned_p (base_plus, outval))
4520 {
4521 /* Updating base_plus might destroy outval, see if we can
4522 swap the scratch and base_plus. */
5895f793 4523 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4524 {
4525 rtx tmp = scratch;
4526 scratch = base_plus;
4527 base_plus = tmp;
4528 }
4529 else
4530 {
4531 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4532
4533 /* Be conservative and copy OUTVAL into the scratch now,
4534 this should only be necessary if outval is a subreg
4535 of something larger than a word. */
4536 /* XXX Might this clobber base? I can't see how it can,
4537 since scratch is known to overlap with OUTVAL, and
4538 must be wider than a word. */
4539 emit_insn (gen_movhi (scratch_hi, outval));
4540 outval = scratch_hi;
4541 }
4542 }
4543
4544 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4545 base = base_plus;
4546 }
4547 else if (GET_CODE (base) == PLUS)
4548 {
6354dc9b 4549 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4550 HOST_WIDE_INT hi, lo;
4551
4552 offset += INTVAL (XEXP (base, 1));
4553 base = XEXP (base, 0);
4554
6354dc9b 4555 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4556 /* Valid range for lo is -4095 -> 4095 */
4557 lo = (offset >= 0
4558 ? (offset & 0xfff)
4559 : -((-offset) & 0xfff));
4560
4561 /* Corner case, if lo is the max offset then we would be out of range
4562 once we have added the additional 1 below, so bump the msb into the
4563 pre-loading insn(s). */
4564 if (lo == 4095)
4565 lo &= 0x7ff;
4566
e5951263
NC
4567 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4568 ^ HOST_INT (0x80000000))
5895f793 4569 - HOST_INT (0x80000000));
f9cc092a
RE
4570
4571 if (hi + lo != offset)
4572 abort ();
4573
4574 if (hi != 0)
4575 {
4576 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4577
4578 /* Be careful not to destroy OUTVAL. */
4579 if (reg_overlap_mentioned_p (base_plus, outval))
4580 {
4581 /* Updating base_plus might destroy outval, see if we
4582 can swap the scratch and base_plus. */
5895f793 4583 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4584 {
4585 rtx tmp = scratch;
4586 scratch = base_plus;
4587 base_plus = tmp;
4588 }
4589 else
4590 {
4591 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4592
4593 /* Be conservative and copy outval into scratch now,
4594 this should only be necessary if outval is a
4595 subreg of something larger than a word. */
4596 /* XXX Might this clobber base? I can't see how it
4597 can, since scratch is known to overlap with
4598 outval. */
4599 emit_insn (gen_movhi (scratch_hi, outval));
4600 outval = scratch_hi;
4601 }
4602 }
4603
4604 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4605 that require more than one insn. */
f9cc092a
RE
4606 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4607 base = base_plus;
4608 offset = lo;
4609 }
4610 }
af48348a 4611
b5cc037f
RE
4612 if (BYTES_BIG_ENDIAN)
4613 {
f9cc092a
RE
4614 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4615 plus_constant (base, offset + 1)),
4616 gen_rtx_SUBREG (QImode, outval, 0)));
4617 emit_insn (gen_lshrsi3 (scratch,
4618 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4619 GEN_INT (8)));
f9cc092a
RE
4620 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4621 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4622 }
4623 else
4624 {
f9cc092a
RE
4625 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4626 gen_rtx_SUBREG (QImode, outval, 0)));
4627 emit_insn (gen_lshrsi3 (scratch,
4628 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4629 GEN_INT (8)));
f9cc092a
RE
4630 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4631 plus_constant (base, offset + 1)),
4632 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 4633 }
af48348a 4634}
2b835d68 4635\f
d5b7b3ae
RE
4636/* Print a symbolic form of X to the debug file, F. */
4637static void
4638arm_print_value (f, x)
4639 FILE * f;
4640 rtx x;
4641{
4642 switch (GET_CODE (x))
4643 {
4644 case CONST_INT:
4645 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4646 return;
4647
4648 case CONST_DOUBLE:
4649 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4650 return;
4651
4652 case CONST_STRING:
4653 fprintf (f, "\"%s\"", XSTR (x, 0));
4654 return;
4655
4656 case SYMBOL_REF:
4657 fprintf (f, "`%s'", XSTR (x, 0));
4658 return;
4659
4660 case LABEL_REF:
4661 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4662 return;
4663
4664 case CONST:
4665 arm_print_value (f, XEXP (x, 0));
4666 return;
4667
4668 case PLUS:
4669 arm_print_value (f, XEXP (x, 0));
4670 fprintf (f, "+");
4671 arm_print_value (f, XEXP (x, 1));
4672 return;
4673
4674 case PC:
4675 fprintf (f, "pc");
4676 return;
4677
4678 default:
4679 fprintf (f, "????");
4680 return;
4681 }
4682}
4683\f
2b835d68 4684/* Routines for manipulation of the constant pool. */
2b835d68 4685
949d79eb
RE
4686/* Arm instructions cannot load a large constant directly into a
4687 register; they have to come from a pc relative load. The constant
4688 must therefore be placed in the addressable range of the pc
4689 relative load. Depending on the precise pc relative load
4690 instruction the range is somewhere between 256 bytes and 4k. This
4691 means that we often have to dump a constant inside a function, and
2b835d68
RE
4692 generate code to branch around it.
4693
949d79eb
RE
4694 It is important to minimize this, since the branches will slow
4695 things down and make the code larger.
2b835d68 4696
949d79eb
RE
4697 Normally we can hide the table after an existing unconditional
4698 branch so that there is no interruption of the flow, but in the
4699 worst case the code looks like this:
2b835d68
RE
4700
4701 ldr rn, L1
949d79eb 4702 ...
2b835d68
RE
4703 b L2
4704 align
4705 L1: .long value
4706 L2:
949d79eb 4707 ...
2b835d68 4708
2b835d68 4709 ldr rn, L3
949d79eb 4710 ...
2b835d68
RE
4711 b L4
4712 align
2b835d68
RE
4713 L3: .long value
4714 L4:
949d79eb
RE
4715 ...
4716
4717 We fix this by performing a scan after scheduling, which notices
4718 which instructions need to have their operands fetched from the
4719 constant table and builds the table.
4720
4721 The algorithm starts by building a table of all the constants that
4722 need fixing up and all the natural barriers in the function (places
4723 where a constant table can be dropped without breaking the flow).
4724 For each fixup we note how far the pc-relative replacement will be
4725 able to reach and the offset of the instruction into the function.
4726
4727 Having built the table we then group the fixes together to form
4728 tables that are as large as possible (subject to addressing
4729 constraints) and emit each table of constants after the last
4730 barrier that is within range of all the instructions in the group.
4731 If a group does not contain a barrier, then we forcibly create one
4732 by inserting a jump instruction into the flow. Once the table has
4733 been inserted, the insns are then modified to reference the
4734 relevant entry in the pool.
4735
6354dc9b 4736 Possible enhancements to the algorithm (not implemented) are:
949d79eb 4737
d5b7b3ae 4738 1) For some processors and object formats, there may be benefit in
949d79eb
RE
4739 aligning the pools to the start of cache lines; this alignment
4740 would need to be taken into account when calculating addressability
6354dc9b 4741 of a pool. */
2b835d68 4742
d5b7b3ae
RE
4743/* These typedefs are located at the start of this file, so that
4744 they can be used in the prototypes there. This comment is to
4745 remind readers of that fact so that the following structures
4746 can be understood more easily.
4747
4748 typedef struct minipool_node Mnode;
4749 typedef struct minipool_fixup Mfix; */
4750
4751struct minipool_node
4752{
4753 /* Doubly linked chain of entries. */
4754 Mnode * next;
4755 Mnode * prev;
4756 /* The maximum offset into the code that this entry can be placed. While
4757 pushing fixes for forward references, all entries are sorted in order
4758 of increasing max_address. */
4759 HOST_WIDE_INT max_address;
4760 /* Similarly for a entry inserted for a backwards ref. */
4761 HOST_WIDE_INT min_address;
4762 /* The number of fixes referencing this entry. This can become zero
4763 if we "unpush" an entry. In this case we ignore the entry when we
4764 come to emit the code. */
4765 int refcount;
4766 /* The offset from the start of the minipool. */
4767 HOST_WIDE_INT offset;
4768 /* The value in table. */
4769 rtx value;
4770 /* The mode of value. */
4771 enum machine_mode mode;
4772 int fix_size;
4773};
4774
4775struct minipool_fixup
2b835d68 4776{
d5b7b3ae
RE
4777 Mfix * next;
4778 rtx insn;
4779 HOST_WIDE_INT address;
4780 rtx * loc;
4781 enum machine_mode mode;
4782 int fix_size;
4783 rtx value;
4784 Mnode * minipool;
4785 HOST_WIDE_INT forwards;
4786 HOST_WIDE_INT backwards;
4787};
2b835d68 4788
d5b7b3ae
RE
4789/* Fixes less than a word need padding out to a word boundary. */
4790#define MINIPOOL_FIX_SIZE(mode) \
4791 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 4792
d5b7b3ae
RE
4793static Mnode * minipool_vector_head;
4794static Mnode * minipool_vector_tail;
4795static rtx minipool_vector_label;
332072db 4796
d5b7b3ae
RE
4797/* The linked list of all minipool fixes required for this function. */
4798Mfix * minipool_fix_head;
4799Mfix * minipool_fix_tail;
4800/* The fix entry for the current minipool, once it has been placed. */
4801Mfix * minipool_barrier;
4802
4803/* Determines if INSN is the start of a jump table. Returns the end
4804 of the TABLE or NULL_RTX. */
4805static rtx
4806is_jump_table (insn)
4807 rtx insn;
2b835d68 4808{
d5b7b3ae 4809 rtx table;
da6558fd 4810
d5b7b3ae
RE
4811 if (GET_CODE (insn) == JUMP_INSN
4812 && JUMP_LABEL (insn) != NULL
4813 && ((table = next_real_insn (JUMP_LABEL (insn)))
4814 == next_real_insn (insn))
4815 && table != NULL
4816 && GET_CODE (table) == JUMP_INSN
4817 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4818 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4819 return table;
4820
4821 return NULL_RTX;
2b835d68
RE
4822}
4823
d5b7b3ae
RE
4824static HOST_WIDE_INT
4825get_jump_table_size (insn)
4826 rtx insn;
2b835d68 4827{
d5b7b3ae
RE
4828 rtx body = PATTERN (insn);
4829 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 4830
d5b7b3ae
RE
4831 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
4832}
2b835d68 4833
d5b7b3ae
RE
4834/* Move a minipool fix MP from its current location to before MAX_MP.
4835 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
4836 contrains may need updating. */
4837static Mnode *
4838move_minipool_fix_forward_ref (mp, max_mp, max_address)
4839 Mnode * mp;
4840 Mnode * max_mp;
4841 HOST_WIDE_INT max_address;
4842{
4843 /* This should never be true and the code below assumes these are
4844 different. */
4845 if (mp == max_mp)
4846 abort ();
4847
4848 if (max_mp == NULL)
4849 {
4850 if (max_address < mp->max_address)
4851 mp->max_address = max_address;
4852 }
4853 else
2b835d68 4854 {
d5b7b3ae
RE
4855 if (max_address > max_mp->max_address - mp->fix_size)
4856 mp->max_address = max_mp->max_address - mp->fix_size;
4857 else
4858 mp->max_address = max_address;
2b835d68 4859
d5b7b3ae
RE
4860 /* Unlink MP from its current position. Since max_mp is non-null,
4861 mp->prev must be non-null. */
4862 mp->prev->next = mp->next;
4863 if (mp->next != NULL)
4864 mp->next->prev = mp->prev;
4865 else
4866 minipool_vector_tail = mp->prev;
2b835d68 4867
d5b7b3ae
RE
4868 /* Re-insert it before MAX_MP. */
4869 mp->next = max_mp;
4870 mp->prev = max_mp->prev;
4871 max_mp->prev = mp;
4872
4873 if (mp->prev != NULL)
4874 mp->prev->next = mp;
4875 else
4876 minipool_vector_head = mp;
4877 }
2b835d68 4878
d5b7b3ae
RE
4879 /* Save the new entry. */
4880 max_mp = mp;
4881
4882 /* Scan over the preceeding entries and adjust their addresses as
4883 required. */
4884 while (mp->prev != NULL
4885 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4886 {
4887 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4888 mp = mp->prev;
2b835d68
RE
4889 }
4890
d5b7b3ae 4891 return max_mp;
2b835d68
RE
4892}
4893
d5b7b3ae
RE
4894/* Add a constant to the minipool for a forward reference. Returns the
4895 node added or NULL if the constant will not fit in this pool. */
4896static Mnode *
4897add_minipool_forward_ref (fix)
4898 Mfix * fix;
4899{
4900 /* If set, max_mp is the first pool_entry that has a lower
4901 constraint than the one we are trying to add. */
4902 Mnode * max_mp = NULL;
4903 HOST_WIDE_INT max_address = fix->address + fix->forwards;
4904 Mnode * mp;
4905
4906 /* If this fix's address is greater than the address of the first
4907 entry, then we can't put the fix in this pool. We subtract the
4908 size of the current fix to ensure that if the table is fully
4909 packed we still have enough room to insert this value by suffling
4910 the other fixes forwards. */
4911 if (minipool_vector_head &&
4912 fix->address >= minipool_vector_head->max_address - fix->fix_size)
4913 return NULL;
2b835d68 4914
d5b7b3ae
RE
4915 /* Scan the pool to see if a constant with the same value has
4916 already been added. While we are doing this, also note the
4917 location where we must insert the constant if it doesn't already
4918 exist. */
4919 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4920 {
4921 if (GET_CODE (fix->value) == GET_CODE (mp->value)
4922 && fix->mode == mp->mode
4923 && (GET_CODE (fix->value) != CODE_LABEL
4924 || (CODE_LABEL_NUMBER (fix->value)
4925 == CODE_LABEL_NUMBER (mp->value)))
4926 && rtx_equal_p (fix->value, mp->value))
4927 {
4928 /* More than one fix references this entry. */
4929 mp->refcount++;
4930 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
4931 }
4932
4933 /* Note the insertion point if necessary. */
4934 if (max_mp == NULL
4935 && mp->max_address > max_address)
4936 max_mp = mp;
4937 }
4938
4939 /* The value is not currently in the minipool, so we need to create
4940 a new entry for it. If MAX_MP is NULL, the entry will be put on
4941 the end of the list since the placement is less constrained than
4942 any existing entry. Otherwise, we insert the new fix before
4943 MAX_MP and, if neceesary, adjust the constraints on the other
4944 entries. */
4945 mp = xmalloc (sizeof (* mp));
4946 mp->fix_size = fix->fix_size;
4947 mp->mode = fix->mode;
4948 mp->value = fix->value;
4949 mp->refcount = 1;
4950 /* Not yet required for a backwards ref. */
4951 mp->min_address = -65536;
4952
4953 if (max_mp == NULL)
4954 {
4955 mp->max_address = max_address;
4956 mp->next = NULL;
4957 mp->prev = minipool_vector_tail;
4958
4959 if (mp->prev == NULL)
4960 {
4961 minipool_vector_head = mp;
4962 minipool_vector_label = gen_label_rtx ();
7551cbc7 4963 }
2b835d68 4964 else
d5b7b3ae 4965 mp->prev->next = mp;
2b835d68 4966
d5b7b3ae
RE
4967 minipool_vector_tail = mp;
4968 }
4969 else
4970 {
4971 if (max_address > max_mp->max_address - mp->fix_size)
4972 mp->max_address = max_mp->max_address - mp->fix_size;
4973 else
4974 mp->max_address = max_address;
4975
4976 mp->next = max_mp;
4977 mp->prev = max_mp->prev;
4978 max_mp->prev = mp;
4979 if (mp->prev != NULL)
4980 mp->prev->next = mp;
4981 else
4982 minipool_vector_head = mp;
4983 }
4984
4985 /* Save the new entry. */
4986 max_mp = mp;
4987
4988 /* Scan over the preceeding entries and adjust their addresses as
4989 required. */
4990 while (mp->prev != NULL
4991 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4992 {
4993 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4994 mp = mp->prev;
2b835d68
RE
4995 }
4996
d5b7b3ae
RE
4997 return max_mp;
4998}
4999
5000static Mnode *
5001move_minipool_fix_backward_ref (mp, min_mp, min_address)
5002 Mnode * mp;
5003 Mnode * min_mp;
5004 HOST_WIDE_INT min_address;
5005{
5006 HOST_WIDE_INT offset;
5007
5008 /* This should never be true, and the code below assumes these are
5009 different. */
5010 if (mp == min_mp)
5011 abort ();
5012
5013 if (min_mp == NULL)
2b835d68 5014 {
d5b7b3ae
RE
5015 if (min_address > mp->min_address)
5016 mp->min_address = min_address;
5017 }
5018 else
5019 {
5020 /* We will adjust this below if it is too loose. */
5021 mp->min_address = min_address;
5022
5023 /* Unlink MP from its current position. Since min_mp is non-null,
5024 mp->next must be non-null. */
5025 mp->next->prev = mp->prev;
5026 if (mp->prev != NULL)
5027 mp->prev->next = mp->next;
5028 else
5029 minipool_vector_head = mp->next;
5030
5031 /* Reinsert it after MIN_MP. */
5032 mp->prev = min_mp;
5033 mp->next = min_mp->next;
5034 min_mp->next = mp;
5035 if (mp->next != NULL)
5036 mp->next->prev = mp;
2b835d68 5037 else
d5b7b3ae
RE
5038 minipool_vector_tail = mp;
5039 }
5040
5041 min_mp = mp;
5042
5043 offset = 0;
5044 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5045 {
5046 mp->offset = offset;
5047 if (mp->refcount > 0)
5048 offset += mp->fix_size;
5049
5050 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5051 mp->next->min_address = mp->min_address + mp->fix_size;
5052 }
5053
5054 return min_mp;
5055}
5056
5057/* Add a constant to the minipool for a backward reference. Returns the
5058 node added or NULL if the constant will not fit in this pool.
5059
5060 Note that the code for insertion for a backwards reference can be
5061 somewhat confusing because the calculated offsets for each fix do
5062 not take into account the size of the pool (which is still under
5063 construction. */
5064static Mnode *
5065add_minipool_backward_ref (fix)
5066 Mfix * fix;
5067{
5068 /* If set, min_mp is the last pool_entry that has a lower constraint
5069 than the one we are trying to add. */
5070 Mnode * min_mp = NULL;
5071 /* This can be negative, since it is only a constraint. */
5072 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5073 Mnode * mp;
5074
5075 /* If we can't reach the current pool from this insn, or if we can't
5076 insert this entry at the end of the pool without pushing other
5077 fixes out of range, then we don't try. This ensures that we
5078 can't fail later on. */
5079 if (min_address >= minipool_barrier->address
5080 || (minipool_vector_tail->min_address + fix->fix_size
5081 >= minipool_barrier->address))
5082 return NULL;
5083
5084 /* Scan the pool to see if a constant with the same value has
5085 already been added. While we are doing this, also note the
5086 location where we must insert the constant if it doesn't already
5087 exist. */
5088 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5089 {
5090 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5091 && fix->mode == mp->mode
5092 && (GET_CODE (fix->value) != CODE_LABEL
5093 || (CODE_LABEL_NUMBER (fix->value)
5094 == CODE_LABEL_NUMBER (mp->value)))
5095 && rtx_equal_p (fix->value, mp->value)
5096 /* Check that there is enough slack to move this entry to the
5097 end of the table (this is conservative). */
5098 && (mp->max_address
5099 > (minipool_barrier->address
5100 + minipool_vector_tail->offset
5101 + minipool_vector_tail->fix_size)))
5102 {
5103 mp->refcount++;
5104 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5105 }
5106
5107 if (min_mp != NULL)
5108 mp->min_address += fix->fix_size;
5109 else
5110 {
5111 /* Note the insertion point if necessary. */
5112 if (mp->min_address < min_address)
5113 min_mp = mp;
5114 else if (mp->max_address
5115 < minipool_barrier->address + mp->offset + fix->fix_size)
5116 {
5117 /* Inserting before this entry would push the fix beyond
5118 its maximum address (which can happen if we have
5119 re-located a forwards fix); force the new fix to come
5120 after it. */
5121 min_mp = mp;
5122 min_address = mp->min_address + fix->fix_size;
5123 }
5124 }
5125 }
5126
5127 /* We need to create a new entry. */
5128 mp = xmalloc (sizeof (* mp));
5129 mp->fix_size = fix->fix_size;
5130 mp->mode = fix->mode;
5131 mp->value = fix->value;
5132 mp->refcount = 1;
5133 mp->max_address = minipool_barrier->address + 65536;
5134
5135 mp->min_address = min_address;
5136
5137 if (min_mp == NULL)
5138 {
5139 mp->prev = NULL;
5140 mp->next = minipool_vector_head;
5141
5142 if (mp->next == NULL)
5143 {
5144 minipool_vector_tail = mp;
5145 minipool_vector_label = gen_label_rtx ();
5146 }
5147 else
5148 mp->next->prev = mp;
5149
5150 minipool_vector_head = mp;
5151 }
5152 else
5153 {
5154 mp->next = min_mp->next;
5155 mp->prev = min_mp;
5156 min_mp->next = mp;
da6558fd 5157
d5b7b3ae
RE
5158 if (mp->next != NULL)
5159 mp->next->prev = mp;
5160 else
5161 minipool_vector_tail = mp;
5162 }
5163
5164 /* Save the new entry. */
5165 min_mp = mp;
5166
5167 if (mp->prev)
5168 mp = mp->prev;
5169 else
5170 mp->offset = 0;
5171
5172 /* Scan over the following entries and adjust their offsets. */
5173 while (mp->next != NULL)
5174 {
5175 if (mp->next->min_address < mp->min_address + mp->fix_size)
5176 mp->next->min_address = mp->min_address + mp->fix_size;
5177
5178 if (mp->refcount)
5179 mp->next->offset = mp->offset + mp->fix_size;
5180 else
5181 mp->next->offset = mp->offset;
5182
5183 mp = mp->next;
5184 }
5185
5186 return min_mp;
5187}
5188
5189static void
5190assign_minipool_offsets (barrier)
5191 Mfix * barrier;
5192{
5193 HOST_WIDE_INT offset = 0;
5194 Mnode * mp;
5195
5196 minipool_barrier = barrier;
5197
5198 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5199 {
5200 mp->offset = offset;
da6558fd 5201
d5b7b3ae
RE
5202 if (mp->refcount > 0)
5203 offset += mp->fix_size;
5204 }
5205}
5206
5207/* Output the literal table */
5208static void
5209dump_minipool (scan)
5210 rtx scan;
5211{
5212 Mnode * mp;
5213 Mnode * nmp;
5214
5215 if (rtl_dump_file)
5216 fprintf (rtl_dump_file,
5217 ";; Emitting minipool after insn %u; address %ld\n",
5218 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5219
5220 scan = emit_label_after (gen_label_rtx (), scan);
5221 scan = emit_insn_after (gen_align_4 (), scan);
5222 scan = emit_label_after (minipool_vector_label, scan);
5223
5224 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5225 {
5226 if (mp->refcount > 0)
5227 {
5228 if (rtl_dump_file)
5229 {
5230 fprintf (rtl_dump_file,
5231 ";; Offset %u, min %ld, max %ld ",
5232 (unsigned) mp->offset, (unsigned long) mp->min_address,
5233 (unsigned long) mp->max_address);
5234 arm_print_value (rtl_dump_file, mp->value);
5235 fputc ('\n', rtl_dump_file);
5236 }
5237
5238 switch (mp->fix_size)
5239 {
5240#ifdef HAVE_consttable_1
5241 case 1:
5242 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5243 break;
5244
5245#endif
5246#ifdef HAVE_consttable_2
5247 case 2:
5248 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5249 break;
5250
5251#endif
5252#ifdef HAVE_consttable_4
5253 case 4:
5254 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5255 break;
5256
5257#endif
5258#ifdef HAVE_consttable_8
5259 case 8:
5260 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5261 break;
5262
5263#endif
5264 default:
5265 abort ();
5266 break;
5267 }
5268 }
5269
5270 nmp = mp->next;
5271 free (mp);
2b835d68
RE
5272 }
5273
d5b7b3ae
RE
5274 minipool_vector_head = minipool_vector_tail = NULL;
5275 scan = emit_insn_after (gen_consttable_end (), scan);
5276 scan = emit_barrier_after (scan);
2b835d68
RE
5277}
5278
d5b7b3ae
RE
5279/* Return the cost of forcibly inserting a barrier after INSN. */
5280static int
5281arm_barrier_cost (insn)
5282 rtx insn;
949d79eb 5283{
d5b7b3ae
RE
5284 /* Basing the location of the pool on the loop depth is preferable,
5285 but at the moment, the basic block information seems to be
5286 corrupt by this stage of the compilation. */
5287 int base_cost = 50;
5288 rtx next = next_nonnote_insn (insn);
5289
5290 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5291 base_cost -= 20;
5292
5293 switch (GET_CODE (insn))
5294 {
5295 case CODE_LABEL:
5296 /* It will always be better to place the table before the label, rather
5297 than after it. */
5298 return 50;
949d79eb 5299
d5b7b3ae
RE
5300 case INSN:
5301 case CALL_INSN:
5302 return base_cost;
5303
5304 case JUMP_INSN:
5305 return base_cost - 10;
5306
5307 default:
5308 return base_cost + 10;
5309 }
5310}
5311
5312/* Find the best place in the insn stream in the range
5313 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5314 Create the barrier by inserting a jump and add a new fix entry for
5315 it. */
5316static Mfix *
5317create_fix_barrier (fix, max_address)
5318 Mfix * fix;
5319 HOST_WIDE_INT max_address;
5320{
5321 HOST_WIDE_INT count = 0;
5322 rtx barrier;
5323 rtx from = fix->insn;
5324 rtx selected = from;
5325 int selected_cost;
5326 HOST_WIDE_INT selected_address;
5327 Mfix * new_fix;
5328 HOST_WIDE_INT max_count = max_address - fix->address;
5329 rtx label = gen_label_rtx ();
5330
5331 selected_cost = arm_barrier_cost (from);
5332 selected_address = fix->address;
5333
5334 while (from && count < max_count)
5335 {
5336 rtx tmp;
5337 int new_cost;
5338
5339 /* This code shouldn't have been called if there was a natural barrier
5340 within range. */
5341 if (GET_CODE (from) == BARRIER)
5342 abort ();
5343
5344 /* Count the length of this insn. */
5345 count += get_attr_length (from);
5346
5347 /* If there is a jump table, add its length. */
5348 tmp = is_jump_table (from);
5349 if (tmp != NULL)
5350 {
5351 count += get_jump_table_size (tmp);
5352
5353 /* Jump tables aren't in a basic block, so base the cost on
5354 the dispatch insn. If we select this location, we will
5355 still put the pool after the table. */
5356 new_cost = arm_barrier_cost (from);
5357
5358 if (count < max_count && new_cost <= selected_cost)
5359 {
5360 selected = tmp;
5361 selected_cost = new_cost;
5362 selected_address = fix->address + count;
5363 }
5364
5365 /* Continue after the dispatch table. */
5366 from = NEXT_INSN (tmp);
5367 continue;
5368 }
5369
5370 new_cost = arm_barrier_cost (from);
5371
5372 if (count < max_count && new_cost <= selected_cost)
5373 {
5374 selected = from;
5375 selected_cost = new_cost;
5376 selected_address = fix->address + count;
5377 }
5378
5379 from = NEXT_INSN (from);
5380 }
5381
5382 /* Create a new JUMP_INSN that branches around a barrier. */
5383 from = emit_jump_insn_after (gen_jump (label), selected);
5384 JUMP_LABEL (from) = label;
5385 barrier = emit_barrier_after (from);
5386 emit_label_after (label, barrier);
5387
5388 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5389 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5390 new_fix->insn = barrier;
5391 new_fix->address = selected_address;
5392 new_fix->next = fix->next;
5393 fix->next = new_fix;
5394
5395 return new_fix;
5396}
5397
5398/* Record that there is a natural barrier in the insn stream at
5399 ADDRESS. */
949d79eb
RE
5400static void
5401push_minipool_barrier (insn, address)
2b835d68 5402 rtx insn;
d5b7b3ae 5403 HOST_WIDE_INT address;
2b835d68 5404{
c7319d87 5405 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 5406
949d79eb
RE
5407 fix->insn = insn;
5408 fix->address = address;
2b835d68 5409
949d79eb
RE
5410 fix->next = NULL;
5411 if (minipool_fix_head != NULL)
5412 minipool_fix_tail->next = fix;
5413 else
5414 minipool_fix_head = fix;
5415
5416 minipool_fix_tail = fix;
5417}
2b835d68 5418
d5b7b3ae
RE
5419/* Record INSN, which will need fixing up to load a value from the
5420 minipool. ADDRESS is the offset of the insn since the start of the
5421 function; LOC is a pointer to the part of the insn which requires
5422 fixing; VALUE is the constant that must be loaded, which is of type
5423 MODE. */
949d79eb
RE
5424static void
5425push_minipool_fix (insn, address, loc, mode, value)
5426 rtx insn;
d5b7b3ae
RE
5427 HOST_WIDE_INT address;
5428 rtx * loc;
949d79eb
RE
5429 enum machine_mode mode;
5430 rtx value;
5431{
c7319d87 5432 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
5433
5434#ifdef AOF_ASSEMBLER
5435 /* PIC symbol refereneces need to be converted into offsets into the
5436 based area. */
d5b7b3ae
RE
5437 /* XXX This shouldn't be done here. */
5438 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5439 value = aof_pic_entry (value);
5440#endif /* AOF_ASSEMBLER */
5441
5442 fix->insn = insn;
5443 fix->address = address;
5444 fix->loc = loc;
5445 fix->mode = mode;
d5b7b3ae 5446 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5447 fix->value = value;
d5b7b3ae
RE
5448 fix->forwards = get_attr_pool_range (insn);
5449 fix->backwards = get_attr_neg_pool_range (insn);
5450 fix->minipool = NULL;
949d79eb
RE
5451
5452 /* If an insn doesn't have a range defined for it, then it isn't
5453 expecting to be reworked by this code. Better to abort now than
5454 to generate duff assembly code. */
d5b7b3ae 5455 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5456 abort ();
5457
d5b7b3ae
RE
5458 if (rtl_dump_file)
5459 {
5460 fprintf (rtl_dump_file,
5461 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5462 GET_MODE_NAME (mode),
5463 INSN_UID (insn), (unsigned long) address,
5464 -1 * (long)fix->backwards, (long)fix->forwards);
5465 arm_print_value (rtl_dump_file, fix->value);
5466 fprintf (rtl_dump_file, "\n");
5467 }
5468
6354dc9b 5469 /* Add it to the chain of fixes. */
949d79eb 5470 fix->next = NULL;
d5b7b3ae 5471
949d79eb
RE
5472 if (minipool_fix_head != NULL)
5473 minipool_fix_tail->next = fix;
5474 else
5475 minipool_fix_head = fix;
5476
5477 minipool_fix_tail = fix;
5478}
5479
d5b7b3ae 5480/* Scan INSN and note any of its operands that need fixing. */
949d79eb
RE
5481static void
5482note_invalid_constants (insn, address)
5483 rtx insn;
d5b7b3ae 5484 HOST_WIDE_INT address;
949d79eb
RE
5485{
5486 int opno;
5487
d5b7b3ae 5488 extract_insn (insn);
949d79eb 5489
5895f793 5490 if (!constrain_operands (1))
949d79eb
RE
5491 fatal_insn_not_found (insn);
5492
d5b7b3ae
RE
5493 /* Fill in recog_op_alt with information about the constraints of this
5494 insn. */
949d79eb
RE
5495 preprocess_constraints ();
5496
1ccbefce 5497 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 5498 {
6354dc9b 5499 /* Things we need to fix can only occur in inputs. */
36ab44c7 5500 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
5501 continue;
5502
5503 /* If this alternative is a memory reference, then any mention
5504 of constants in this alternative is really to fool reload
5505 into allowing us to accept one there. We need to fix them up
5506 now so that we output the right code. */
5507 if (recog_op_alt[opno][which_alternative].memory_ok)
5508 {
1ccbefce 5509 rtx op = recog_data.operand[opno];
949d79eb
RE
5510
5511 if (CONSTANT_P (op))
1ccbefce
RH
5512 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5513 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
5514#if 0
5515 /* RWE: Now we look correctly at the operands for the insn,
5516 this shouldn't be needed any more. */
949d79eb 5517#ifndef AOF_ASSEMBLER
d5b7b3ae 5518 /* XXX Is this still needed? */
949d79eb 5519 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
5520 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5521 recog_data.operand_mode[opno],
5522 XVECEXP (op, 0, 0));
949d79eb 5523#endif
d5b7b3ae
RE
5524#endif
5525 else if (GET_CODE (op) == MEM
949d79eb
RE
5526 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5527 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
5528 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5529 recog_data.operand_mode[opno],
949d79eb
RE
5530 get_pool_constant (XEXP (op, 0)));
5531 }
2b835d68 5532 }
2b835d68
RE
5533}
5534
5535void
5536arm_reorg (first)
5537 rtx first;
5538{
5539 rtx insn;
d5b7b3ae
RE
5540 HOST_WIDE_INT address = 0;
5541 Mfix * fix;
ad076f4e 5542
949d79eb 5543 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 5544
949d79eb
RE
5545 /* The first insn must always be a note, or the code below won't
5546 scan it properly. */
5547 if (GET_CODE (first) != NOTE)
5548 abort ();
5549
5550 /* Scan all the insns and record the operands that will need fixing. */
5551 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 5552 {
2b835d68 5553
949d79eb 5554 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 5555 push_minipool_barrier (insn, address);
949d79eb
RE
5556 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5557 || GET_CODE (insn) == JUMP_INSN)
5558 {
5559 rtx table;
5560
5561 note_invalid_constants (insn, address);
5562 address += get_attr_length (insn);
d5b7b3ae 5563
949d79eb
RE
5564 /* If the insn is a vector jump, add the size of the table
5565 and skip the table. */
d5b7b3ae 5566 if ((table = is_jump_table (insn)) != NULL)
2b835d68 5567 {
d5b7b3ae 5568 address += get_jump_table_size (table);
949d79eb
RE
5569 insn = table;
5570 }
5571 }
5572 }
332072db 5573
d5b7b3ae
RE
5574 fix = minipool_fix_head;
5575
949d79eb 5576 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 5577 while (fix)
949d79eb 5578 {
d5b7b3ae
RE
5579 Mfix * ftmp;
5580 Mfix * fdel;
5581 Mfix * last_added_fix;
5582 Mfix * last_barrier = NULL;
5583 Mfix * this_fix;
949d79eb
RE
5584
5585 /* Skip any further barriers before the next fix. */
5586 while (fix && GET_CODE (fix->insn) == BARRIER)
5587 fix = fix->next;
5588
d5b7b3ae 5589 /* No more fixes. */
949d79eb
RE
5590 if (fix == NULL)
5591 break;
332072db 5592
d5b7b3ae 5593 last_added_fix = NULL;
2b835d68 5594
d5b7b3ae 5595 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 5596 {
949d79eb 5597 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 5598 {
d5b7b3ae
RE
5599 if (ftmp->address >= minipool_vector_head->max_address)
5600 break;
2b835d68 5601
d5b7b3ae 5602 last_barrier = ftmp;
2b835d68 5603 }
d5b7b3ae
RE
5604 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5605 break;
5606
5607 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 5608 }
949d79eb 5609
d5b7b3ae
RE
5610 /* If we found a barrier, drop back to that; any fixes that we
5611 could have reached but come after the barrier will now go in
5612 the next mini-pool. */
949d79eb
RE
5613 if (last_barrier != NULL)
5614 {
d5b7b3ae
RE
5615 /* Reduce the refcount for those fixes that won't go into this
5616 pool after all. */
5617 for (fdel = last_barrier->next;
5618 fdel && fdel != ftmp;
5619 fdel = fdel->next)
5620 {
5621 fdel->minipool->refcount--;
5622 fdel->minipool = NULL;
5623 }
5624
949d79eb
RE
5625 ftmp = last_barrier;
5626 }
5627 else
2bfa88dc 5628 {
d5b7b3ae
RE
5629 /* ftmp is first fix that we can't fit into this pool and
5630 there no natural barriers that we could use. Insert a
5631 new barrier in the code somewhere between the previous
5632 fix and this one, and arrange to jump around it. */
5633 HOST_WIDE_INT max_address;
5634
5635 /* The last item on the list of fixes must be a barrier, so
5636 we can never run off the end of the list of fixes without
5637 last_barrier being set. */
5638 if (ftmp == NULL)
5639 abort ();
5640
5641 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
5642 /* Check that there isn't another fix that is in range that
5643 we couldn't fit into this pool because the pool was
5644 already too large: we need to put the pool before such an
5645 instruction. */
d5b7b3ae
RE
5646 if (ftmp->address < max_address)
5647 max_address = ftmp->address;
5648
5649 last_barrier = create_fix_barrier (last_added_fix, max_address);
5650 }
5651
5652 assign_minipool_offsets (last_barrier);
5653
5654 while (ftmp)
5655 {
5656 if (GET_CODE (ftmp->insn) != BARRIER
5657 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5658 == NULL))
5659 break;
2bfa88dc 5660
d5b7b3ae 5661 ftmp = ftmp->next;
2bfa88dc 5662 }
949d79eb
RE
5663
5664 /* Scan over the fixes we have identified for this pool, fixing them
5665 up and adding the constants to the pool itself. */
d5b7b3ae 5666 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
5667 this_fix = this_fix->next)
5668 if (GET_CODE (this_fix->insn) != BARRIER)
5669 {
949d79eb
RE
5670 rtx addr
5671 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5672 minipool_vector_label),
d5b7b3ae 5673 this_fix->minipool->offset);
949d79eb
RE
5674 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5675 }
5676
d5b7b3ae 5677 dump_minipool (last_barrier->insn);
949d79eb 5678 fix = ftmp;
2b835d68 5679 }
4b632bf1 5680
949d79eb
RE
5681 /* From now on we must synthesize any constants that we can't handle
5682 directly. This can happen if the RTL gets split during final
5683 instruction generation. */
4b632bf1 5684 after_arm_reorg = 1;
c7319d87
RE
5685
5686 /* Free the minipool memory. */
5687 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 5688}
cce8749e
CH
5689\f
5690/* Routines to output assembly language. */
5691
f3bb6135 5692/* If the rtx is the correct value then return the string of the number.
ff9940b0 5693 In this way we can ensure that valid double constants are generated even
6354dc9b 5694 when cross compiling. */
cd2b33d0 5695const char *
ff9940b0 5696fp_immediate_constant (x)
b5cc037f 5697 rtx x;
ff9940b0
RE
5698{
5699 REAL_VALUE_TYPE r;
5700 int i;
5701
5702 if (!fpa_consts_inited)
5703 init_fpa_table ();
5704
5705 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5706 for (i = 0; i < 8; i++)
5707 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5708 return strings_fpa[i];
f3bb6135 5709
ff9940b0
RE
5710 abort ();
5711}
5712
9997d19d 5713/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 5714static const char *
9997d19d 5715fp_const_from_val (r)
62b10bbc 5716 REAL_VALUE_TYPE * r;
9997d19d
RE
5717{
5718 int i;
5719
5895f793 5720 if (!fpa_consts_inited)
9997d19d
RE
5721 init_fpa_table ();
5722
5723 for (i = 0; i < 8; i++)
5724 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5725 return strings_fpa[i];
5726
5727 abort ();
5728}
ff9940b0 5729
cce8749e
CH
5730/* Output the operands of a LDM/STM instruction to STREAM.
5731 MASK is the ARM register set mask of which only bits 0-15 are important.
5732 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5733 must follow the register list. */
5734
d5b7b3ae 5735static void
dd18ae56 5736print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc 5737 FILE * stream;
cd2b33d0 5738 const char * instr;
dd18ae56
NC
5739 int reg;
5740 int mask;
5741 int hat;
cce8749e
CH
5742{
5743 int i;
5744 int not_first = FALSE;
5745
1d5473cb 5746 fputc ('\t', stream);
dd18ae56 5747 asm_fprintf (stream, instr, reg);
1d5473cb 5748 fputs (", {", stream);
62b10bbc 5749
d5b7b3ae 5750 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
5751 if (mask & (1 << i))
5752 {
5753 if (not_first)
5754 fprintf (stream, ", ");
62b10bbc 5755
dd18ae56 5756 asm_fprintf (stream, "%r", i);
cce8749e
CH
5757 not_first = TRUE;
5758 }
f3bb6135 5759
cce8749e 5760 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 5761}
cce8749e 5762
6354dc9b 5763/* Output a 'call' insn. */
cce8749e 5764
cd2b33d0 5765const char *
cce8749e 5766output_call (operands)
62b10bbc 5767 rtx * operands;
cce8749e 5768{
6354dc9b 5769 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 5770
62b10bbc 5771 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 5772 {
62b10bbc 5773 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 5774 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 5775 }
62b10bbc 5776
1d5473cb 5777 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 5778
6cfc7210 5779 if (TARGET_INTERWORK)
da6558fd
NC
5780 output_asm_insn ("bx%?\t%0", operands);
5781 else
5782 output_asm_insn ("mov%?\t%|pc, %0", operands);
5783
f3bb6135
RE
5784 return "";
5785}
cce8749e 5786
ff9940b0
RE
5787static int
5788eliminate_lr2ip (x)
62b10bbc 5789 rtx * x;
ff9940b0
RE
5790{
5791 int something_changed = 0;
62b10bbc 5792 rtx x0 = * x;
ff9940b0
RE
5793 int code = GET_CODE (x0);
5794 register int i, j;
6f7d635c 5795 register const char * fmt;
ff9940b0
RE
5796
5797 switch (code)
5798 {
5799 case REG:
62b10bbc 5800 if (REGNO (x0) == LR_REGNUM)
ff9940b0 5801 {
62b10bbc 5802 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
5803 return 1;
5804 }
5805 return 0;
5806 default:
6354dc9b 5807 /* Scan through the sub-elements and change any references there. */
ff9940b0 5808 fmt = GET_RTX_FORMAT (code);
62b10bbc 5809
ff9940b0
RE
5810 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5811 if (fmt[i] == 'e')
5812 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5813 else if (fmt[i] == 'E')
5814 for (j = 0; j < XVECLEN (x0, i); j++)
5815 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 5816
ff9940b0
RE
5817 return something_changed;
5818 }
5819}
5820
6354dc9b 5821/* Output a 'call' insn that is a reference in memory. */
ff9940b0 5822
cd2b33d0 5823const char *
ff9940b0 5824output_call_mem (operands)
62b10bbc 5825 rtx * operands;
ff9940b0 5826{
6354dc9b
NC
5827 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5828 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 5829 if (eliminate_lr2ip (&operands[0]))
1d5473cb 5830 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 5831
6cfc7210 5832 if (TARGET_INTERWORK)
da6558fd
NC
5833 {
5834 output_asm_insn ("ldr%?\t%|ip, %0", operands);
5835 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5836 output_asm_insn ("bx%?\t%|ip", operands);
5837 }
5838 else
5839 {
5840 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5841 output_asm_insn ("ldr%?\t%|pc, %0", operands);
5842 }
5843
f3bb6135
RE
5844 return "";
5845}
ff9940b0
RE
5846
5847
5848/* Output a move from arm registers to an fpu registers.
5849 OPERANDS[0] is an fpu register.
5850 OPERANDS[1] is the first registers of an arm register pair. */
5851
cd2b33d0 5852const char *
ff9940b0 5853output_mov_long_double_fpu_from_arm (operands)
62b10bbc 5854 rtx * operands;
ff9940b0
RE
5855{
5856 int arm_reg0 = REGNO (operands[1]);
5857 rtx ops[3];
5858
62b10bbc
NC
5859 if (arm_reg0 == IP_REGNUM)
5860 abort ();
f3bb6135 5861
43cffd11
RE
5862 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5863 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5864 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5865
1d5473cb
RE
5866 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
5867 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 5868
f3bb6135
RE
5869 return "";
5870}
ff9940b0
RE
5871
5872/* Output a move from an fpu register to arm registers.
5873 OPERANDS[0] is the first registers of an arm register pair.
5874 OPERANDS[1] is an fpu register. */
5875
cd2b33d0 5876const char *
ff9940b0 5877output_mov_long_double_arm_from_fpu (operands)
62b10bbc 5878 rtx * operands;
ff9940b0
RE
5879{
5880 int arm_reg0 = REGNO (operands[0]);
5881 rtx ops[3];
5882
62b10bbc
NC
5883 if (arm_reg0 == IP_REGNUM)
5884 abort ();
f3bb6135 5885
43cffd11
RE
5886 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5887 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5888 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5889
1d5473cb
RE
5890 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
5891 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
5892 return "";
5893}
ff9940b0
RE
5894
5895/* Output a move from arm registers to arm registers of a long double
5896 OPERANDS[0] is the destination.
5897 OPERANDS[1] is the source. */
cd2b33d0 5898const char *
ff9940b0 5899output_mov_long_double_arm_from_arm (operands)
62b10bbc 5900 rtx * operands;
ff9940b0 5901{
6354dc9b 5902 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
5903 int dest_start = REGNO (operands[0]);
5904 int src_start = REGNO (operands[1]);
5905 rtx ops[2];
5906 int i;
5907
5908 if (dest_start < src_start)
5909 {
5910 for (i = 0; i < 3; i++)
5911 {
43cffd11
RE
5912 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5913 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5914 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5915 }
5916 }
5917 else
5918 {
5919 for (i = 2; i >= 0; i--)
5920 {
43cffd11
RE
5921 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5922 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5923 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5924 }
5925 }
f3bb6135 5926
ff9940b0
RE
5927 return "";
5928}
5929
5930
cce8749e
CH
5931/* Output a move from arm registers to an fpu registers.
5932 OPERANDS[0] is an fpu register.
5933 OPERANDS[1] is the first registers of an arm register pair. */
5934
cd2b33d0 5935const char *
cce8749e 5936output_mov_double_fpu_from_arm (operands)
62b10bbc 5937 rtx * operands;
cce8749e
CH
5938{
5939 int arm_reg0 = REGNO (operands[1]);
5940 rtx ops[2];
5941
62b10bbc
NC
5942 if (arm_reg0 == IP_REGNUM)
5943 abort ();
5944
43cffd11
RE
5945 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5946 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5947 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5948 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
5949 return "";
5950}
cce8749e
CH
5951
5952/* Output a move from an fpu register to arm registers.
5953 OPERANDS[0] is the first registers of an arm register pair.
5954 OPERANDS[1] is an fpu register. */
5955
cd2b33d0 5956const char *
cce8749e 5957output_mov_double_arm_from_fpu (operands)
62b10bbc 5958 rtx * operands;
cce8749e
CH
5959{
5960 int arm_reg0 = REGNO (operands[0]);
5961 rtx ops[2];
5962
62b10bbc
NC
5963 if (arm_reg0 == IP_REGNUM)
5964 abort ();
f3bb6135 5965
43cffd11
RE
5966 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5967 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5968 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5969 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
5970 return "";
5971}
cce8749e
CH
5972
5973/* Output a move between double words.
5974 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5975 or MEM<-REG and all MEMs must be offsettable addresses. */
5976
cd2b33d0 5977const char *
cce8749e 5978output_move_double (operands)
aec3cfba 5979 rtx * operands;
cce8749e
CH
5980{
5981 enum rtx_code code0 = GET_CODE (operands[0]);
5982 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 5983 rtx otherops[3];
cce8749e
CH
5984
5985 if (code0 == REG)
5986 {
5987 int reg0 = REGNO (operands[0]);
5988
43cffd11 5989 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 5990
cce8749e
CH
5991 if (code1 == REG)
5992 {
5993 int reg1 = REGNO (operands[1]);
62b10bbc
NC
5994 if (reg1 == IP_REGNUM)
5995 abort ();
f3bb6135 5996
6354dc9b 5997 /* Ensure the second source is not overwritten. */
c1c2bc04 5998 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 5999 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 6000 else
6cfc7210 6001 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6002 }
6003 else if (code1 == CONST_DOUBLE)
6004 {
226a5051
RE
6005 if (GET_MODE (operands[1]) == DFmode)
6006 {
6007 long l[2];
6008 union real_extract u;
6009
4e135bdd 6010 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
226a5051 6011 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
d5b7b3ae
RE
6012 otherops[1] = GEN_INT (l[1]);
6013 operands[1] = GEN_INT (l[0]);
226a5051 6014 }
c1c2bc04
RE
6015 else if (GET_MODE (operands[1]) != VOIDmode)
6016 abort ();
6017 else if (WORDS_BIG_ENDIAN)
6018 {
6019
6020 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6021 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6022 }
226a5051
RE
6023 else
6024 {
c1c2bc04 6025
226a5051
RE
6026 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6027 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6028 }
6cfc7210 6029
c1c2bc04
RE
6030 output_mov_immediate (operands);
6031 output_mov_immediate (otherops);
cce8749e
CH
6032 }
6033 else if (code1 == CONST_INT)
6034 {
56636818
JL
6035#if HOST_BITS_PER_WIDE_INT > 32
6036 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6037 what the upper word is. */
6038 if (WORDS_BIG_ENDIAN)
6039 {
6040 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6041 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6042 }
6043 else
6044 {
6045 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6046 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6047 }
6048#else
6354dc9b 6049 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6050 if (WORDS_BIG_ENDIAN)
6051 {
6052 otherops[1] = operands[1];
6053 operands[1] = (INTVAL (operands[1]) < 0
6054 ? constm1_rtx : const0_rtx);
6055 }
ff9940b0 6056 else
c1c2bc04 6057 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6058#endif
c1c2bc04
RE
6059 output_mov_immediate (otherops);
6060 output_mov_immediate (operands);
cce8749e
CH
6061 }
6062 else if (code1 == MEM)
6063 {
ff9940b0 6064 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6065 {
ff9940b0 6066 case REG:
9997d19d 6067 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6068 break;
2b835d68 6069
ff9940b0 6070 case PRE_INC:
6354dc9b 6071 abort (); /* Should never happen now. */
ff9940b0 6072 break;
2b835d68 6073
ff9940b0 6074 case PRE_DEC:
2b835d68 6075 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6076 break;
2b835d68 6077
ff9940b0 6078 case POST_INC:
9997d19d 6079 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6080 break;
2b835d68 6081
ff9940b0 6082 case POST_DEC:
6354dc9b 6083 abort (); /* Should never happen now. */
ff9940b0 6084 break;
2b835d68
RE
6085
6086 case LABEL_REF:
6087 case CONST:
6088 output_asm_insn ("adr%?\t%0, %1", operands);
6089 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6090 break;
6091
ff9940b0 6092 default:
aec3cfba
NC
6093 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6094 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6095 {
2b835d68
RE
6096 otherops[0] = operands[0];
6097 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6098 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6099 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6100 {
6101 if (GET_CODE (otherops[2]) == CONST_INT)
6102 {
6103 switch (INTVAL (otherops[2]))
6104 {
6105 case -8:
6106 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6107 return "";
6108 case -4:
6109 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6110 return "";
6111 case 4:
6112 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6113 return "";
6114 }
6115 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6116 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6117 else
6118 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6119 }
6120 else
6121 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6122 }
6123 else
6124 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6125
2b835d68
RE
6126 return "ldm%?ia\t%0, %M0";
6127 }
6128 else
6129 {
6130 otherops[1] = adj_offsettable_operand (operands[1], 4);
6131 /* Take care of overlapping base/data reg. */
6132 if (reg_mentioned_p (operands[0], operands[1]))
6133 {
6134 output_asm_insn ("ldr%?\t%0, %1", otherops);
6135 output_asm_insn ("ldr%?\t%0, %1", operands);
6136 }
6137 else
6138 {
6139 output_asm_insn ("ldr%?\t%0, %1", operands);
6140 output_asm_insn ("ldr%?\t%0, %1", otherops);
6141 }
cce8749e
CH
6142 }
6143 }
6144 }
2b835d68 6145 else
6354dc9b 6146 abort (); /* Constraints should prevent this. */
cce8749e
CH
6147 }
6148 else if (code0 == MEM && code1 == REG)
6149 {
62b10bbc
NC
6150 if (REGNO (operands[1]) == IP_REGNUM)
6151 abort ();
2b835d68 6152
ff9940b0
RE
6153 switch (GET_CODE (XEXP (operands[0], 0)))
6154 {
6155 case REG:
9997d19d 6156 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6157 break;
2b835d68 6158
ff9940b0 6159 case PRE_INC:
6354dc9b 6160 abort (); /* Should never happen now. */
ff9940b0 6161 break;
2b835d68 6162
ff9940b0 6163 case PRE_DEC:
2b835d68 6164 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6165 break;
2b835d68 6166
ff9940b0 6167 case POST_INC:
9997d19d 6168 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6169 break;
2b835d68 6170
ff9940b0 6171 case POST_DEC:
6354dc9b 6172 abort (); /* Should never happen now. */
ff9940b0 6173 break;
2b835d68
RE
6174
6175 case PLUS:
6176 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6177 {
6178 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6179 {
6180 case -8:
6181 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6182 return "";
6183
6184 case -4:
6185 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6186 return "";
6187
6188 case 4:
6189 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6190 return "";
6191 }
6192 }
6193 /* Fall through */
6194
ff9940b0 6195 default:
cce8749e 6196 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 6197 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6198 output_asm_insn ("str%?\t%1, %0", operands);
6199 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6200 }
6201 }
2b835d68 6202 else
62b10bbc 6203 abort (); /* Constraints should prevent this */
cce8749e 6204
9997d19d
RE
6205 return "";
6206}
cce8749e
CH
6207
6208
6209/* Output an arbitrary MOV reg, #n.
6210 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6211
cd2b33d0 6212const char *
cce8749e 6213output_mov_immediate (operands)
62b10bbc 6214 rtx * operands;
cce8749e 6215{
f3bb6135 6216 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
6217 int n_ones = 0;
6218 int i;
6219
6220 /* Try to use one MOV */
cce8749e 6221 if (const_ok_for_arm (n))
f3bb6135 6222 {
9997d19d 6223 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
6224 return "";
6225 }
cce8749e
CH
6226
6227 /* Try to use one MVN */
f3bb6135 6228 if (const_ok_for_arm (~n))
cce8749e 6229 {
f3bb6135 6230 operands[1] = GEN_INT (~n);
9997d19d 6231 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 6232 return "";
cce8749e
CH
6233 }
6234
6354dc9b 6235 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
6236
6237 for (i=0; i < 32; i++)
6238 if (n & 1 << i)
6239 n_ones++;
6240
6354dc9b 6241 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
e5951263 6242 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
cce8749e 6243 else
d5b7b3ae 6244 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
f3bb6135
RE
6245
6246 return "";
6247}
cce8749e
CH
6248
6249
6250/* Output an ADD r, s, #n where n may be too big for one instruction. If
6251 adding zero to one register, output nothing. */
6252
cd2b33d0 6253const char *
cce8749e 6254output_add_immediate (operands)
62b10bbc 6255 rtx * operands;
cce8749e 6256{
f3bb6135 6257 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6258
6259 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6260 {
6261 if (n < 0)
6262 output_multi_immediate (operands,
9997d19d
RE
6263 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6264 -n);
cce8749e
CH
6265 else
6266 output_multi_immediate (operands,
9997d19d
RE
6267 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6268 n);
cce8749e 6269 }
f3bb6135
RE
6270
6271 return "";
6272}
cce8749e 6273
cce8749e
CH
6274/* Output a multiple immediate operation.
6275 OPERANDS is the vector of operands referred to in the output patterns.
6276 INSTR1 is the output pattern to use for the first constant.
6277 INSTR2 is the output pattern to use for subsequent constants.
6278 IMMED_OP is the index of the constant slot in OPERANDS.
6279 N is the constant value. */
6280
cd2b33d0 6281static const char *
cce8749e 6282output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6283 rtx * operands;
cd2b33d0
NC
6284 const char * instr1;
6285 const char * instr2;
f3bb6135
RE
6286 int immed_op;
6287 HOST_WIDE_INT n;
cce8749e 6288{
f3bb6135 6289#if HOST_BITS_PER_WIDE_INT > 32
e5951263 6290 n &= HOST_UINT (0xffffffff);
f3bb6135
RE
6291#endif
6292
cce8749e
CH
6293 if (n == 0)
6294 {
6295 operands[immed_op] = const0_rtx;
6354dc9b 6296 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
6297 }
6298 else
6299 {
6300 int i;
cd2b33d0 6301 const char * instr = instr1;
cce8749e 6302
6354dc9b 6303 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6304 for (i = 0; i < 32; i += 2)
6305 {
6306 if (n & (3 << i))
6307 {
f3bb6135
RE
6308 operands[immed_op] = GEN_INT (n & (255 << i));
6309 output_asm_insn (instr, operands);
cce8749e
CH
6310 instr = instr2;
6311 i += 6;
6312 }
6313 }
6314 }
cd2b33d0 6315
f3bb6135 6316 return "";
9997d19d 6317}
cce8749e
CH
6318
6319
6320/* Return the appropriate ARM instruction for the operation code.
6321 The returned result should not be overwritten. OP is the rtx of the
6322 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6323 was shifted. */
6324
cd2b33d0 6325const char *
cce8749e
CH
6326arithmetic_instr (op, shift_first_arg)
6327 rtx op;
f3bb6135 6328 int shift_first_arg;
cce8749e 6329{
9997d19d 6330 switch (GET_CODE (op))
cce8749e
CH
6331 {
6332 case PLUS:
f3bb6135
RE
6333 return "add";
6334
cce8749e 6335 case MINUS:
f3bb6135
RE
6336 return shift_first_arg ? "rsb" : "sub";
6337
cce8749e 6338 case IOR:
f3bb6135
RE
6339 return "orr";
6340
cce8749e 6341 case XOR:
f3bb6135
RE
6342 return "eor";
6343
cce8749e 6344 case AND:
f3bb6135
RE
6345 return "and";
6346
cce8749e 6347 default:
f3bb6135 6348 abort ();
cce8749e 6349 }
f3bb6135 6350}
cce8749e
CH
6351
6352
6353/* Ensure valid constant shifts and return the appropriate shift mnemonic
6354 for the operation code. The returned result should not be overwritten.
6355 OP is the rtx code of the shift.
9997d19d 6356 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6357 shift. */
cce8749e 6358
cd2b33d0 6359static const char *
9997d19d
RE
6360shift_op (op, amountp)
6361 rtx op;
6362 HOST_WIDE_INT *amountp;
cce8749e 6363{
cd2b33d0 6364 const char * mnem;
e2c671ba 6365 enum rtx_code code = GET_CODE (op);
cce8749e 6366
9997d19d
RE
6367 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6368 *amountp = -1;
6369 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6370 *amountp = INTVAL (XEXP (op, 1));
6371 else
6372 abort ();
6373
e2c671ba 6374 switch (code)
cce8749e
CH
6375 {
6376 case ASHIFT:
6377 mnem = "asl";
6378 break;
f3bb6135 6379
cce8749e
CH
6380 case ASHIFTRT:
6381 mnem = "asr";
cce8749e 6382 break;
f3bb6135 6383
cce8749e
CH
6384 case LSHIFTRT:
6385 mnem = "lsr";
cce8749e 6386 break;
f3bb6135 6387
9997d19d
RE
6388 case ROTATERT:
6389 mnem = "ror";
9997d19d
RE
6390 break;
6391
ff9940b0 6392 case MULT:
e2c671ba
RE
6393 /* We never have to worry about the amount being other than a
6394 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6395 if (*amountp != -1)
6396 *amountp = int_log2 (*amountp);
6397 else
6398 abort ();
f3bb6135
RE
6399 return "asl";
6400
cce8749e 6401 default:
f3bb6135 6402 abort ();
cce8749e
CH
6403 }
6404
e2c671ba
RE
6405 if (*amountp != -1)
6406 {
6407 /* This is not 100% correct, but follows from the desire to merge
6408 multiplication by a power of 2 with the recognizer for a
6409 shift. >=32 is not a valid shift for "asl", so we must try and
6410 output a shift that produces the correct arithmetical result.
ddd5a7c1 6411 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6412 is not set correctly if we set the flags; but we never use the
6413 carry bit from such an operation, so we can ignore that. */
6414 if (code == ROTATERT)
6415 *amountp &= 31; /* Rotate is just modulo 32 */
6416 else if (*amountp != (*amountp & 31))
6417 {
6418 if (code == ASHIFT)
6419 mnem = "lsr";
6420 *amountp = 32;
6421 }
6422
6423 /* Shifts of 0 are no-ops. */
6424 if (*amountp == 0)
6425 return NULL;
6426 }
6427
9997d19d
RE
6428 return mnem;
6429}
cce8749e
CH
6430
6431
6354dc9b 6432/* Obtain the shift from the POWER of two. */
18af7313 6433static HOST_WIDE_INT
cce8749e 6434int_log2 (power)
f3bb6135 6435 HOST_WIDE_INT power;
cce8749e 6436{
f3bb6135 6437 HOST_WIDE_INT shift = 0;
cce8749e 6438
e5951263 6439 while ((((HOST_INT (1)) << shift) & power) == 0)
cce8749e
CH
6440 {
6441 if (shift > 31)
f3bb6135 6442 abort ();
cce8749e
CH
6443 shift++;
6444 }
f3bb6135
RE
6445
6446 return shift;
6447}
cce8749e 6448
cce8749e
CH
6449/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6450 /bin/as is horribly restrictive. */
6cfc7210 6451#define MAX_ASCII_LEN 51
cce8749e
CH
6452
6453void
6454output_ascii_pseudo_op (stream, p, len)
62b10bbc 6455 FILE * stream;
3cce094d 6456 const unsigned char * p;
cce8749e
CH
6457 int len;
6458{
6459 int i;
6cfc7210 6460 int len_so_far = 0;
cce8749e 6461
6cfc7210
NC
6462 fputs ("\t.ascii\t\"", stream);
6463
cce8749e
CH
6464 for (i = 0; i < len; i++)
6465 {
6466 register int c = p[i];
6467
6cfc7210 6468 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 6469 {
6cfc7210 6470 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 6471 len_so_far = 0;
cce8749e
CH
6472 }
6473
6cfc7210 6474 switch (c)
cce8749e 6475 {
6cfc7210
NC
6476 case TARGET_TAB:
6477 fputs ("\\t", stream);
6478 len_so_far += 2;
6479 break;
6480
6481 case TARGET_FF:
6482 fputs ("\\f", stream);
6483 len_so_far += 2;
6484 break;
6485
6486 case TARGET_BS:
6487 fputs ("\\b", stream);
6488 len_so_far += 2;
6489 break;
6490
6491 case TARGET_CR:
6492 fputs ("\\r", stream);
6493 len_so_far += 2;
6494 break;
6495
6496 case TARGET_NEWLINE:
6497 fputs ("\\n", stream);
6498 c = p [i + 1];
6499 if ((c >= ' ' && c <= '~')
6500 || c == TARGET_TAB)
6501 /* This is a good place for a line break. */
6502 len_so_far = MAX_ASCII_LEN;
6503 else
6504 len_so_far += 2;
6505 break;
6506
6507 case '\"':
6508 case '\\':
6509 putc ('\\', stream);
5895f793 6510 len_so_far++;
6cfc7210 6511 /* drop through. */
f3bb6135 6512
6cfc7210
NC
6513 default:
6514 if (c >= ' ' && c <= '~')
6515 {
6516 putc (c, stream);
5895f793 6517 len_so_far++;
6cfc7210
NC
6518 }
6519 else
6520 {
6521 fprintf (stream, "\\%03o", c);
6522 len_so_far += 4;
6523 }
6524 break;
cce8749e 6525 }
cce8749e 6526 }
f3bb6135 6527
cce8749e 6528 fputs ("\"\n", stream);
f3bb6135 6529}
cce8749e 6530\f
ff9940b0 6531
cd2b33d0 6532const char *
84ed5e79 6533output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
6534 rtx operand;
6535 int really_return;
84ed5e79 6536 int reverse;
ff9940b0
RE
6537{
6538 char instr[100];
6539 int reg, live_regs = 0;
46406379 6540 int volatile_func = arm_volatile_func ();
e2c671ba 6541
d5b7b3ae
RE
6542 /* If a function is naked, don't use the "return" insn. */
6543 if (arm_naked_function_p (current_function_decl))
6544 return "";
6545
e2c671ba 6546 return_used_this_function = 1;
d5b7b3ae 6547
62b10bbc 6548 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6549 {
e2c671ba 6550 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
6551 call, then we have to trust that the called function won't return. */
6552 if (really_return)
6553 {
6554 rtx ops[2];
6555
6556 /* Otherwise, trap an attempted return by aborting. */
6557 ops[0] = operand;
6558 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6559 : "abort");
6560 assemble_external_libcall (ops[1]);
6561 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6562 }
6563
e2c671ba
RE
6564 return "";
6565 }
6566
5895f793 6567 if (current_function_calls_alloca && !really_return)
62b10bbc 6568 abort ();
d5b7b3ae 6569
f3bb6135 6570 for (reg = 0; reg <= 10; reg++)
5895f793 6571 if (regs_ever_live[reg] && !call_used_regs[reg])
ff9940b0
RE
6572 live_regs++;
6573
5895f793
RE
6574 if (!TARGET_APCS_FRAME
6575 && !frame_pointer_needed
d5b7b3ae 6576 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6577 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6578 live_regs++;
6579
5895f793 6580 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6581 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6582 live_regs++;
6583
0616531f 6584 if (live_regs || regs_ever_live[LR_REGNUM])
ff9940b0
RE
6585 live_regs++;
6586
6587 if (frame_pointer_needed)
6588 live_regs += 4;
6589
3a5a4282
PB
6590 /* On some ARM architectures it is faster to use LDR rather than LDM to
6591 load a single register. On other architectures, the cost is the same. */
6592 if (live_regs == 1
6593 && regs_ever_live[LR_REGNUM]
5895f793 6594 && !really_return)
d5b7b3ae
RE
6595 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6596 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6597 else if (live_regs == 1
6598 && regs_ever_live[LR_REGNUM]
d5b7b3ae
RE
6599 && TARGET_APCS_32)
6600 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6601 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
3a5a4282 6602 else if (live_regs)
ff9940b0 6603 {
5895f793 6604 if (!regs_ever_live[LR_REGNUM])
ff9940b0 6605 live_regs++;
f3bb6135 6606
ff9940b0 6607 if (frame_pointer_needed)
84ed5e79
RE
6608 strcpy (instr,
6609 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 6610 else
84ed5e79
RE
6611 strcpy (instr,
6612 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
6613
6614 for (reg = 0; reg <= 10; reg++)
62b10bbc 6615 if (regs_ever_live[reg]
5895f793
RE
6616 && (!call_used_regs[reg]
6617 || (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6618 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 6619 {
1d5473cb 6620 strcat (instr, "%|");
ff9940b0
RE
6621 strcat (instr, reg_names[reg]);
6622 if (--live_regs)
6623 strcat (instr, ", ");
6624 }
f3bb6135 6625
ff9940b0
RE
6626 if (frame_pointer_needed)
6627 {
1d5473cb 6628 strcat (instr, "%|");
ff9940b0
RE
6629 strcat (instr, reg_names[11]);
6630 strcat (instr, ", ");
1d5473cb 6631 strcat (instr, "%|");
ff9940b0
RE
6632 strcat (instr, reg_names[13]);
6633 strcat (instr, ", ");
1d5473cb 6634 strcat (instr, "%|");
5895f793 6635 strcat (instr, TARGET_INTERWORK || (!really_return)
62b10bbc 6636 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
6637 }
6638 else
1d5473cb 6639 {
5895f793 6640 if (!TARGET_APCS_FRAME
d5b7b3ae 6641 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6642 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6643 {
6644 strcat (instr, "%|");
6645 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6646 strcat (instr, ", ");
6647 }
6648
1d5473cb 6649 strcat (instr, "%|");
d5b7b3ae 6650
6cfc7210 6651 if (TARGET_INTERWORK && really_return)
62b10bbc 6652 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 6653 else
62b10bbc 6654 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 6655 }
d5b7b3ae 6656
2b835d68 6657 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 6658 output_asm_insn (instr, &operand);
da6558fd 6659
6cfc7210 6660 if (TARGET_INTERWORK && really_return)
da6558fd
NC
6661 {
6662 strcpy (instr, "bx%?");
6663 strcat (instr, reverse ? "%D0" : "%d0");
6664 strcat (instr, "\t%|");
6665 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6666
5895f793 6667 output_asm_insn (instr, &operand);
da6558fd 6668 }
ff9940b0
RE
6669 }
6670 else if (really_return)
6671 {
6cfc7210 6672 if (TARGET_INTERWORK)
25b1c156 6673 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
6674 else
6675 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6676 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd 6677
5895f793 6678 output_asm_insn (instr, &operand);
ff9940b0 6679 }
f3bb6135 6680
ff9940b0
RE
6681 return "";
6682}
6683
e82ea128
DE
6684/* Return nonzero if optimizing and the current function is volatile.
6685 Such functions never return, and many memory cycles can be saved
6686 by not storing register values that will never be needed again.
6687 This optimization was added to speed up context switching in a
6354dc9b 6688 kernel application. */
e2c671ba
RE
6689int
6690arm_volatile_func ()
6691{
6354dc9b
NC
6692 return (optimize > 0
6693 && current_function_nothrow
46406379 6694 && TREE_THIS_VOLATILE (current_function_decl));
e2c671ba
RE
6695}
6696
ef179a26
NC
6697/* Write the function name into the code section, directly preceding
6698 the function prologue.
6699
6700 Code will be output similar to this:
6701 t0
6702 .ascii "arm_poke_function_name", 0
6703 .align
6704 t1
6705 .word 0xff000000 + (t1 - t0)
6706 arm_poke_function_name
6707 mov ip, sp
6708 stmfd sp!, {fp, ip, lr, pc}
6709 sub fp, ip, #4
6710
6711 When performing a stack backtrace, code can inspect the value
6712 of 'pc' stored at 'fp' + 0. If the trace function then looks
6713 at location pc - 12 and the top 8 bits are set, then we know
6714 that there is a function name embedded immediately preceding this
6715 location and has length ((pc[-3]) & 0xff000000).
6716
6717 We assume that pc is declared as a pointer to an unsigned long.
6718
6719 It is of no benefit to output the function name if we are assembling
6720 a leaf function. These function types will not contain a stack
6721 backtrace structure, therefore it is not possible to determine the
6722 function name. */
6723
6724void
6725arm_poke_function_name (stream, name)
6726 FILE * stream;
6727 char * name;
6728{
6729 unsigned long alignlength;
6730 unsigned long length;
6731 rtx x;
6732
d5b7b3ae
RE
6733 length = strlen (name) + 1;
6734 alignlength = ROUND_UP (length);
ef179a26 6735
949d79eb 6736 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 6737 ASM_OUTPUT_ALIGN (stream, 2);
e5951263 6738 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
ef179a26
NC
6739 ASM_OUTPUT_INT (stream, x);
6740}
6741
ff9940b0
RE
6742/* The amount of stack adjustment that happens here, in output_return and in
6743 output_epilogue must be exactly the same as was calculated during reload,
6744 or things will point to the wrong place. The only time we can safely
6745 ignore this constraint is when a function has no arguments on the stack,
6746 no stack frame requirement and no live registers execpt for `lr'. If we
6747 can guarantee that by making all function calls into tail calls and that
6748 lr is not clobbered in any other way, then there is no need to push lr
6354dc9b 6749 onto the stack. */
cce8749e 6750void
d5b7b3ae 6751output_arm_prologue (f, frame_size)
6cfc7210 6752 FILE * f;
cce8749e
CH
6753 int frame_size;
6754{
f3bb6135 6755 int reg, live_regs_mask = 0;
46406379 6756 int volatile_func = arm_volatile_func ();
cce8749e 6757
cce8749e
CH
6758 /* Nonzero if we must stuff some register arguments onto the stack as if
6759 they were passed there. */
6760 int store_arg_regs = 0;
6761
abaa26e5 6762 if (arm_ccfsm_state || arm_target_insn)
6354dc9b 6763 abort (); /* Sanity check. */
31fdb4d5
DE
6764
6765 if (arm_naked_function_p (current_function_decl))
6766 return;
6767
ff9940b0 6768 return_used_this_function = 0;
ff9940b0 6769
dd18ae56
NC
6770 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6771 current_function_args_size,
6772 current_function_pretend_args_size, frame_size);
6773 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6774 frame_pointer_needed,
6775 current_function_anonymous_args);
cce8749e 6776
e2c671ba 6777 if (volatile_func)
dd18ae56 6778 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 6779
68dfd979
NC
6780 if (current_function_needs_context)
6781 asm_fprintf (f, "\t%@ Nested function.\n");
6782
cce8749e
CH
6783 if (current_function_anonymous_args && current_function_pretend_args_size)
6784 store_arg_regs = 1;
6785
f3bb6135 6786 for (reg = 0; reg <= 10; reg++)
5895f793 6787 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e
CH
6788 live_regs_mask |= (1 << reg);
6789
5895f793
RE
6790 if (!TARGET_APCS_FRAME
6791 && !frame_pointer_needed
d5b7b3ae 6792 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6793 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6794 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6795
5895f793 6796 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6797 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6798 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6799
ff9940b0 6800 if (frame_pointer_needed)
e2c671ba 6801 live_regs_mask |= 0xD800;
62b10bbc 6802 else if (regs_ever_live[LR_REGNUM])
ff9940b0 6803 {
62b10bbc 6804 live_regs_mask |= 1 << LR_REGNUM;
cce8749e
CH
6805 }
6806
0616531f
RE
6807 if (live_regs_mask)
6808 /* If a di mode load/store multiple is used, and the base register
6809 is r3, then r4 can become an ever live register without lr
6810 doing so, in this case we need to push lr as well, or we
6811 will fail to get a proper return. */
6812 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
6813
6814#ifdef AOF_ASSEMBLER
6815 if (flag_pic)
dd18ae56 6816 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 6817#endif
f3bb6135 6818}
cce8749e 6819
cd2b33d0 6820const char *
0616531f
RE
6821arm_output_epilogue (really_return)
6822 int really_return;
cce8749e 6823{
949d79eb
RE
6824 int reg;
6825 int live_regs_mask = 0;
6354dc9b 6826 /* If we need this, then it will always be at least this much. */
b111229a 6827 int floats_offset = 12;
cce8749e 6828 rtx operands[3];
949d79eb 6829 int frame_size = get_frame_size ();
d5b7b3ae
RE
6830 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
6831 FILE * f = asm_out_file;
e5951263 6832 int volatile_func = arm_volatile_func ();
d5b7b3ae 6833 int return_regnum;
cce8749e 6834
b36ba79f 6835 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 6836 return "";
cce8749e 6837
31fdb4d5
DE
6838 /* Naked functions don't have epilogues. */
6839 if (arm_naked_function_p (current_function_decl))
949d79eb 6840 return "";
31fdb4d5 6841
d5b7b3ae
RE
6842 /* If we are throwing an exception, the address we want to jump to is in
6843 R1; otherwise, it's in LR. */
6844 return_regnum = eh_ofs ? 2 : LR_REGNUM;
6845
0616531f
RE
6846 /* If we are throwing an exception, then we really must be doing a return,
6847 so we can't tail-call. */
5895f793 6848 if (eh_ofs && !really_return)
0616531f
RE
6849 abort();
6850
e2c671ba 6851 /* A volatile function should never return. Call abort. */
c11145f6 6852 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6853 {
86efdc8e 6854 rtx op;
ed0e6530 6855 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 6856 assemble_external_libcall (op);
e2c671ba 6857 output_asm_insn ("bl\t%a0", &op);
949d79eb 6858 return "";
e2c671ba
RE
6859 }
6860
f3bb6135 6861 for (reg = 0; reg <= 10; reg++)
5895f793 6862 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e 6863 {
ff9940b0
RE
6864 live_regs_mask |= (1 << reg);
6865 floats_offset += 4;
cce8749e
CH
6866 }
6867
d5b7b3ae 6868 /* Handle the frame pointer as a special case. */
5895f793
RE
6869 if (!TARGET_APCS_FRAME
6870 && !frame_pointer_needed
d5b7b3ae 6871 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6872 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6873 {
6874 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6875 floats_offset += 4;
6876 }
6877
ed0e6530
PB
6878 /* If we aren't loading the PIC register, don't stack it even though it may
6879 be live. */
5895f793 6880 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6881 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6882 {
6883 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6884 floats_offset += 4;
6885 }
6886
ff9940b0 6887 if (frame_pointer_needed)
cce8749e 6888 {
b111229a
RE
6889 if (arm_fpu_arch == FP_SOFT2)
6890 {
d5b7b3ae 6891 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 6892 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6893 {
6894 floats_offset += 12;
dd18ae56
NC
6895 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6896 reg, FP_REGNUM, floats_offset);
b111229a
RE
6897 }
6898 }
6899 else
6900 {
d5b7b3ae 6901 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 6902
d5b7b3ae 6903 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 6904 {
5895f793 6905 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6906 {
6907 floats_offset += 12;
6cfc7210 6908
6354dc9b 6909 /* We can't unstack more than four registers at once. */
b111229a
RE
6910 if (start_reg - reg == 3)
6911 {
dd18ae56
NC
6912 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6913 reg, FP_REGNUM, floats_offset);
b111229a
RE
6914 start_reg = reg - 1;
6915 }
6916 }
6917 else
6918 {
6919 if (reg != start_reg)
dd18ae56
NC
6920 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6921 reg + 1, start_reg - reg,
6922 FP_REGNUM, floats_offset);
b111229a
RE
6923 start_reg = reg - 1;
6924 }
6925 }
6926
6927 /* Just in case the last register checked also needs unstacking. */
6928 if (reg != start_reg)
dd18ae56
NC
6929 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6930 reg + 1, start_reg - reg,
6931 FP_REGNUM, floats_offset);
b111229a 6932 }
da6558fd 6933
6cfc7210 6934 if (TARGET_INTERWORK)
b111229a
RE
6935 {
6936 live_regs_mask |= 0x6800;
dd18ae56 6937 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
d5b7b3ae
RE
6938 if (eh_ofs)
6939 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6940 REGNO (eh_ofs));
0616531f
RE
6941 if (really_return)
6942 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
d5b7b3ae 6943 }
5895f793 6944 else if (eh_ofs || !really_return)
d5b7b3ae
RE
6945 {
6946 live_regs_mask |= 0x6800;
6947 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
0616531f
RE
6948 if (eh_ofs)
6949 {
6950 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6951 REGNO (eh_ofs));
6952 /* Even in 26-bit mode we do a mov (rather than a movs)
6953 because we don't have the PSR bits set in the
6954 address. */
6955 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6956 }
b111229a
RE
6957 }
6958 else
6959 {
6960 live_regs_mask |= 0xA800;
dd18ae56 6961 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
6962 TARGET_APCS_32 ? FALSE : TRUE);
6963 }
cce8749e
CH
6964 }
6965 else
6966 {
d2288d8d 6967 /* Restore stack pointer if necessary. */
56636818 6968 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
6969 {
6970 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
6971 operands[2] = GEN_INT (frame_size
6972 + current_function_outgoing_args_size);
d2288d8d
TG
6973 output_add_immediate (operands);
6974 }
6975
b111229a
RE
6976 if (arm_fpu_arch == FP_SOFT2)
6977 {
d5b7b3ae 6978 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 6979 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
6980 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6981 reg, SP_REGNUM);
b111229a
RE
6982 }
6983 else
6984 {
d5b7b3ae 6985 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 6986
d5b7b3ae 6987 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 6988 {
5895f793 6989 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6990 {
6991 if (reg - start_reg == 3)
6992 {
dd18ae56
NC
6993 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6994 start_reg, SP_REGNUM);
b111229a
RE
6995 start_reg = reg + 1;
6996 }
6997 }
6998 else
6999 {
7000 if (reg != start_reg)
dd18ae56
NC
7001 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7002 start_reg, reg - start_reg,
7003 SP_REGNUM);
6cfc7210 7004
b111229a
RE
7005 start_reg = reg + 1;
7006 }
7007 }
7008
7009 /* Just in case the last register checked also needs unstacking. */
7010 if (reg != start_reg)
dd18ae56
NC
7011 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7012 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7013 }
7014
62b10bbc 7015 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 7016 {
6cfc7210 7017 if (TARGET_INTERWORK)
b111229a 7018 {
0616531f 7019 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2 7020
d5b7b3ae
RE
7021 /* Handle LR on its own. */
7022 if (live_regs_mask == (1 << LR_REGNUM))
7023 {
7024 if (eh_ofs)
7025 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7026 SP_REGNUM);
7027 else
7028 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7029 SP_REGNUM);
7030 }
7031 else if (live_regs_mask != 0)
7032 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7033 FALSE);
7034
7035 if (eh_ofs)
7036 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7037 REGNO (eh_ofs));
7038
0616531f
RE
7039 if (really_return)
7040 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
b111229a 7041 }
d5b7b3ae
RE
7042 else if (eh_ofs)
7043 {
7044 if (live_regs_mask == 0)
7045 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7046 else
7047 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
7048 live_regs_mask | (1 << LR_REGNUM), FALSE);
7049
7050 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7051 REGNO (eh_ofs));
7052 /* Jump to the target; even in 26-bit mode. */
7053 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7054 }
5895f793 7055 else if (TARGET_APCS_32 && live_regs_mask == 0 && !really_return)
0616531f
RE
7056 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7057 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
d5b7b3ae 7058 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
5895f793 7059 else if (!really_return)
0616531f
RE
7060 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7061 live_regs_mask | (1 << LR_REGNUM), FALSE);
32de079a 7062 else
d5b7b3ae
RE
7063 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7064 live_regs_mask | (1 << PC_REGNUM),
32de079a 7065 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
7066 }
7067 else
7068 {
62b10bbc 7069 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 7070 {
6354dc9b 7071 /* Restore the integer regs, and the return address into lr. */
0616531f 7072 live_regs_mask |= 1 << LR_REGNUM;
32de079a 7073
d5b7b3ae
RE
7074 if (live_regs_mask == (1 << LR_REGNUM))
7075 {
7076 if (eh_ofs)
7077 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7078 SP_REGNUM);
7079 else
7080 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7081 SP_REGNUM);
7082 }
7083 else if (live_regs_mask != 0)
7084 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7085 FALSE);
cce8749e 7086 }
b111229a 7087
cce8749e
CH
7088 if (current_function_pretend_args_size)
7089 {
6354dc9b 7090 /* Unwind the pre-pushed regs. */
cce8749e 7091 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 7092 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
7093 output_add_immediate (operands);
7094 }
d5b7b3ae
RE
7095
7096 if (eh_ofs)
7097 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7098 REGNO (eh_ofs));
0616531f
RE
7099
7100 if (really_return)
7101 {
7102 /* And finally, go home. */
7103 if (TARGET_INTERWORK)
7104 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7105 else if (TARGET_APCS_32 || eh_ofs)
7106 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7107 else
7108 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7109 }
cce8749e
CH
7110 }
7111 }
f3bb6135 7112
949d79eb
RE
7113 return "";
7114}
7115
7116void
eb3921e8 7117output_func_epilogue (frame_size)
949d79eb
RE
7118 int frame_size;
7119{
d5b7b3ae
RE
7120 if (TARGET_THUMB)
7121 {
7122 /* ??? Probably not safe to set this here, since it assumes that a
7123 function will be emitted as assembly immediately after we generate
7124 RTL for it. This does not happen for inline functions. */
7125 return_used_this_function = 0;
7126 }
7127 else
7128 {
7129 if (use_return_insn (FALSE)
7130 && return_used_this_function
7131 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7132 && !frame_pointer_needed)
d5b7b3ae 7133 abort ();
f3bb6135 7134
d5b7b3ae
RE
7135 /* Reset the ARM-specific per-function variables. */
7136 current_function_anonymous_args = 0;
7137 after_arm_reorg = 0;
7138 }
f3bb6135 7139}
e2c671ba 7140
2c849145
JM
7141/* Generate and emit an insn that we will recognize as a push_multi.
7142 Unfortunately, since this insn does not reflect very well the actual
7143 semantics of the operation, we need to annotate the insn for the benefit
7144 of DWARF2 frame unwind information. */
2c849145 7145static rtx
e2c671ba
RE
7146emit_multi_reg_push (mask)
7147 int mask;
7148{
7149 int num_regs = 0;
7150 int i, j;
7151 rtx par;
2c849145 7152 rtx dwarf;
87e27392 7153 int dwarf_par_index;
2c849145 7154 rtx tmp, reg;
e2c671ba 7155
d5b7b3ae 7156 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7157 if (mask & (1 << i))
5895f793 7158 num_regs++;
e2c671ba
RE
7159
7160 if (num_regs == 0 || num_regs > 16)
7161 abort ();
7162
87e27392
NC
7163 /* For the body of the insn we are going to generate an UNSPEC in
7164 parallel with several USEs. This allows the insn to be recognised
7165 by the push_multi pattern in the arm.md file. The insn looks
7166 something like this:
7167
7168 (parallel [
7169 (set (mem:BLK (pre_dec:BLK (reg:SI sp))) (unspec:BLK [(reg:SI r4)] 2))
7170 (use (reg:SI 11 fp))
7171 (use (reg:SI 12 ip))
7172 (use (reg:SI 14 lr))
7173 (use (reg:SI 15 pc))
7174 ])
7175
7176 For the frame note however, we try to be more explicit and actually
7177 show each register being stored into the stack frame, plus a (single)
7178 decrement of the stack pointer. We do it this way in order to be
7179 friendly to the stack unwinding code, which only wants to see a single
7180 stack decrement per instruction. The RTL we generate for the note looks
7181 something like this:
7182
7183 (sequence [
7184 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7185 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7186 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7187 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7188 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7189 (set (mem:SI (plus:SI (reg:SI sp) (const_int 16))) (reg:SI pc))
7190 ])
7191
7192 This sequence is used both by the code to support stack unwinding for
7193 exceptions handlers and the code to generate dwarf2 frame debugging. */
7194
43cffd11 7195 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
87e27392 7196 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_regs + 1));
2c849145 7197 RTX_FRAME_RELATED_P (dwarf) = 1;
87e27392 7198 dwarf_par_index = 1;
e2c671ba 7199
d5b7b3ae 7200 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7201 {
7202 if (mask & (1 << i))
7203 {
2c849145
JM
7204 reg = gen_rtx_REG (SImode, i);
7205
e2c671ba 7206 XVECEXP (par, 0, 0)
43cffd11
RE
7207 = gen_rtx_SET (VOIDmode,
7208 gen_rtx_MEM (BLKmode,
7209 gen_rtx_PRE_DEC (BLKmode,
7210 stack_pointer_rtx)),
7211 gen_rtx_UNSPEC (BLKmode,
2c849145 7212 gen_rtvec (1, reg),
43cffd11 7213 2));
2c849145
JM
7214
7215 tmp = gen_rtx_SET (VOIDmode,
87e27392 7216 gen_rtx_MEM (SImode, stack_pointer_rtx),
2c849145
JM
7217 reg);
7218 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392
NC
7219 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7220 dwarf_par_index ++;
2c849145 7221
e2c671ba
RE
7222 break;
7223 }
7224 }
7225
7226 for (j = 1, i++; j < num_regs; i++)
7227 {
7228 if (mask & (1 << i))
7229 {
2c849145
JM
7230 reg = gen_rtx_REG (SImode, i);
7231
7232 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7233
7234 tmp = gen_rtx_SET (VOIDmode,
7235 gen_rtx_MEM (SImode,
87e27392
NC
7236 gen_rtx_PLUS (SImode,
7237 stack_pointer_rtx,
7238 GEN_INT (4 * j))),
2c849145
JM
7239 reg);
7240 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392 7241 XVECEXP (dwarf, 0, dwarf_par_index ++) = tmp;
2c849145 7242
e2c671ba
RE
7243 j++;
7244 }
7245 }
b111229a 7246
2c849145 7247 par = emit_insn (par);
87e27392
NC
7248
7249 tmp = gen_rtx_SET (SImode,
7250 stack_pointer_rtx,
7251 gen_rtx_PLUS (SImode,
7252 stack_pointer_rtx,
7253 GEN_INT (-4 * num_regs)));
7254 RTX_FRAME_RELATED_P (tmp) = 1;
7255 XVECEXP (dwarf, 0, 0) = tmp;
7256
2c849145
JM
7257 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7258 REG_NOTES (par));
7259 return par;
b111229a
RE
7260}
7261
2c849145 7262static rtx
b111229a
RE
7263emit_sfm (base_reg, count)
7264 int base_reg;
7265 int count;
7266{
7267 rtx par;
2c849145
JM
7268 rtx dwarf;
7269 rtx tmp, reg;
b111229a
RE
7270 int i;
7271
43cffd11 7272 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7273 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7274 RTX_FRAME_RELATED_P (dwarf) = 1;
7275
7276 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7277
7278 XVECEXP (par, 0, 0)
7279 = gen_rtx_SET (VOIDmode,
7280 gen_rtx_MEM (BLKmode,
7281 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7282 gen_rtx_UNSPEC (BLKmode,
2c849145 7283 gen_rtvec (1, reg),
43cffd11 7284 2));
2c849145
JM
7285 tmp
7286 = gen_rtx_SET (VOIDmode,
7287 gen_rtx_MEM (XFmode,
7288 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7289 reg);
7290 RTX_FRAME_RELATED_P (tmp) = 1;
7291 XVECEXP (dwarf, 0, count - 1) = tmp;
7292
b111229a 7293 for (i = 1; i < count; i++)
2c849145
JM
7294 {
7295 reg = gen_rtx_REG (XFmode, base_reg++);
7296 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7297
7298 tmp = gen_rtx_SET (VOIDmode,
7299 gen_rtx_MEM (XFmode,
7300 gen_rtx_PRE_DEC (BLKmode,
7301 stack_pointer_rtx)),
7302 reg);
7303 RTX_FRAME_RELATED_P (tmp) = 1;
7304 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7305 }
b111229a 7306
2c849145
JM
7307 par = emit_insn (par);
7308 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7309 REG_NOTES (par));
7310 return par;
e2c671ba
RE
7311}
7312
7313void
7314arm_expand_prologue ()
7315{
7316 int reg;
56636818
JL
7317 rtx amount = GEN_INT (-(get_frame_size ()
7318 + current_function_outgoing_args_size));
e2c671ba
RE
7319 int live_regs_mask = 0;
7320 int store_arg_regs = 0;
949d79eb
RE
7321 /* If this function doesn't return, then there is no need to push
7322 the call-saved regs. */
46406379 7323 int volatile_func = arm_volatile_func ();
2c849145 7324 rtx insn;
68dfd979
NC
7325 rtx ip_rtx;
7326 int fp_offset = 0;
7327
e2c671ba 7328
31fdb4d5
DE
7329 /* Naked functions don't have prologues. */
7330 if (arm_naked_function_p (current_function_decl))
7331 return;
7332
e2c671ba
RE
7333 if (current_function_anonymous_args && current_function_pretend_args_size)
7334 store_arg_regs = 1;
7335
5895f793 7336 if (!volatile_func)
6ed30148
RE
7337 {
7338 for (reg = 0; reg <= 10; reg++)
5895f793 7339 if (regs_ever_live[reg] && !call_used_regs[reg])
6ed30148
RE
7340 live_regs_mask |= 1 << reg;
7341
5895f793
RE
7342 if (!TARGET_APCS_FRAME
7343 && !frame_pointer_needed
d5b7b3ae 7344 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 7345 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
7346 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7347
6ed30148
RE
7348 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7349 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 7350
62b10bbc
NC
7351 if (regs_ever_live[LR_REGNUM])
7352 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 7353 }
e2c671ba 7354
68dfd979
NC
7355 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
7356
e2c671ba
RE
7357 if (frame_pointer_needed)
7358 {
68dfd979
NC
7359 if (current_function_needs_context)
7360 {
7361 /* The Static chain register is the same as the IP register
7362 used as a scratch register during stack frame creation.
7363 To get around this need to find somewhere to store IP
7364 whilst the frame is being created. We try the following
7365 places in order:
7366
7367 1. An unused argument register.
7368 2. A slot on the stack above the frame. (This only
7369 works if the function is not a varargs function).
7370
7371 If neither of these places is available, we abort (for now). */
7372 if (regs_ever_live[3] == 0)
7373 {
7374 insn = gen_rtx_REG (SImode, 3);
7375 insn = gen_rtx_SET (SImode, insn, ip_rtx);
7376 insn = emit_insn (insn);
7377 RTX_FRAME_RELATED_P (insn) = 1;
7378 }
7379 else if (current_function_pretend_args_size == 0)
7380 {
7381 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
7382 insn = gen_rtx_MEM (SImode, insn);
7383 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
7384 insn = emit_insn (insn);
7385 RTX_FRAME_RELATED_P (insn) = 1;
7386 fp_offset = 4;
7387 }
7388 else
7389 /* FIXME - the way to handle this situation is to allow
7390 the pretend args to be dumped onto the stack, then
7391 reuse r3 to save IP. This would involve moving the
7392 copying os SP into IP until after the pretend args
7393 have been dumped, but this is not too hard. */
7394 error ("Unable to find a temporary location for static chanin register");
7395 }
7396
e2c671ba 7397 live_regs_mask |= 0xD800;
68dfd979
NC
7398
7399 if (fp_offset)
7400 {
7401 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
7402 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7403 }
7404 else
7405 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
7406
7407 insn = emit_insn (insn);
2c849145 7408 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7409 }
7410
7411 if (current_function_pretend_args_size)
7412 {
7413 if (store_arg_regs)
2c849145
JM
7414 insn = emit_multi_reg_push
7415 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 7416 else
2c849145
JM
7417 insn = emit_insn
7418 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7419 GEN_INT (-current_function_pretend_args_size)));
7420 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7421 }
7422
7423 if (live_regs_mask)
7424 {
7425 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 7426 we won't get a proper return. */
62b10bbc 7427 live_regs_mask |= 1 << LR_REGNUM;
2c849145
JM
7428 insn = emit_multi_reg_push (live_regs_mask);
7429 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7430 }
7431
d5b7b3ae
RE
7432 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7433
5895f793 7434 if (!volatile_func)
b111229a
RE
7435 {
7436 if (arm_fpu_arch == FP_SOFT2)
7437 {
d5b7b3ae 7438 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 7439 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
7440 {
7441 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7442 insn = gen_rtx_MEM (XFmode, insn);
7443 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7444 gen_rtx_REG (XFmode, reg)));
7445 RTX_FRAME_RELATED_P (insn) = 1;
7446 }
b111229a
RE
7447 }
7448 else
7449 {
d5b7b3ae 7450 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7451
d5b7b3ae 7452 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 7453 {
5895f793 7454 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7455 {
7456 if (start_reg - reg == 3)
7457 {
2c849145
JM
7458 insn = emit_sfm (reg, 4);
7459 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
7460 start_reg = reg - 1;
7461 }
7462 }
7463 else
7464 {
7465 if (start_reg != reg)
2c849145
JM
7466 {
7467 insn = emit_sfm (reg + 1, start_reg - reg);
7468 RTX_FRAME_RELATED_P (insn) = 1;
7469 }
b111229a
RE
7470 start_reg = reg - 1;
7471 }
7472 }
7473
7474 if (start_reg != reg)
2c849145
JM
7475 {
7476 insn = emit_sfm (reg + 1, start_reg - reg);
7477 RTX_FRAME_RELATED_P (insn) = 1;
7478 }
b111229a
RE
7479 }
7480 }
e2c671ba
RE
7481
7482 if (frame_pointer_needed)
2c849145 7483 {
68dfd979
NC
7484 insn = GEN_INT (-(4 + current_function_pretend_args_size + fp_offset));
7485 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
2c849145 7486 RTX_FRAME_RELATED_P (insn) = 1;
68dfd979
NC
7487
7488 if (current_function_needs_context)
7489 {
7490 /* Recover the static chain register. */
7491 if (regs_ever_live [3] == 0)
7492 {
7493 insn = gen_rtx_REG (SImode, 3);
7494 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7495 insn = emit_insn (insn);
7496 RTX_FRAME_RELATED_P (insn) = 1;
7497 }
7498 else /* if (current_function_pretend_args_size == 0) */
7499 {
7500 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
7501 insn = gen_rtx_MEM (SImode, insn);
7502 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7503 insn = emit_insn (insn);
7504 RTX_FRAME_RELATED_P (insn) = 1;
7505 }
7506 }
2c849145 7507 }
e2c671ba
RE
7508
7509 if (amount != const0_rtx)
7510 {
2c849145
JM
7511 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7512 amount));
7513 RTX_FRAME_RELATED_P (insn) = 1;
e04c2d6c
RE
7514
7515 /* If the frame pointer is needed, emit a special barrier that
7516 will prevent the scheduler from moving stores to the frame
7517 before the stack adjustment. */
7518 if (frame_pointer_needed)
7519 {
7520 rtx unspec = gen_rtx_UNSPEC (SImode,
7521 gen_rtvec (2, stack_pointer_rtx,
7522 hard_frame_pointer_rtx), 4);
7523
7524 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7525 gen_rtx_MEM (BLKmode, unspec)));
7526 }
e2c671ba
RE
7527 }
7528
7529 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
7530 the call to mcount. Similarly if the user has requested no
7531 scheduling in the prolog. */
7532 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
7533 emit_insn (gen_blockage ());
7534}
cce8749e 7535\f
9997d19d
RE
7536/* If CODE is 'd', then the X is a condition operand and the instruction
7537 should only be executed if the condition is true.
ddd5a7c1 7538 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
7539 should only be executed if the condition is false: however, if the mode
7540 of the comparison is CCFPEmode, then always execute the instruction -- we
7541 do this because in these circumstances !GE does not necessarily imply LT;
7542 in these cases the instruction pattern will take care to make sure that
7543 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 7544 doing this instruction unconditionally.
9997d19d
RE
7545 If CODE is 'N' then X is a floating point operand that must be negated
7546 before output.
7547 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7548 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7549
7550void
7551arm_print_operand (stream, x, code)
62b10bbc 7552 FILE * stream;
9997d19d
RE
7553 rtx x;
7554 int code;
7555{
7556 switch (code)
7557 {
7558 case '@':
f3139301 7559 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
7560 return;
7561
d5b7b3ae
RE
7562 case '_':
7563 fputs (user_label_prefix, stream);
7564 return;
7565
9997d19d 7566 case '|':
f3139301 7567 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
7568 return;
7569
7570 case '?':
7571 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
7572 {
7573 if (TARGET_THUMB || current_insn_predicate != NULL)
7574 abort ();
7575
7576 fputs (arm_condition_codes[arm_current_cc], stream);
7577 }
7578 else if (current_insn_predicate)
7579 {
7580 enum arm_cond_code code;
7581
7582 if (TARGET_THUMB)
7583 abort ();
7584
7585 code = get_arm_condition_code (current_insn_predicate);
7586 fputs (arm_condition_codes[code], stream);
7587 }
9997d19d
RE
7588 return;
7589
7590 case 'N':
7591 {
7592 REAL_VALUE_TYPE r;
7593 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7594 r = REAL_VALUE_NEGATE (r);
7595 fprintf (stream, "%s", fp_const_from_val (&r));
7596 }
7597 return;
7598
7599 case 'B':
7600 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
7601 {
7602 HOST_WIDE_INT val;
5895f793 7603 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 7604 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7605 }
9997d19d
RE
7606 else
7607 {
7608 putc ('~', stream);
7609 output_addr_const (stream, x);
7610 }
7611 return;
7612
7613 case 'i':
7614 fprintf (stream, "%s", arithmetic_instr (x, 1));
7615 return;
7616
7617 case 'I':
7618 fprintf (stream, "%s", arithmetic_instr (x, 0));
7619 return;
7620
7621 case 'S':
7622 {
7623 HOST_WIDE_INT val;
5895f793 7624 const char * shift = shift_op (x, &val);
9997d19d 7625
e2c671ba
RE
7626 if (shift)
7627 {
5895f793 7628 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
7629 if (val == -1)
7630 arm_print_operand (stream, XEXP (x, 1), 0);
7631 else
4bc74ece
NC
7632 {
7633 fputc ('#', stream);
36ba9cb8 7634 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7635 }
e2c671ba 7636 }
9997d19d
RE
7637 }
7638 return;
7639
d5b7b3ae
RE
7640 /* An explanation of the 'Q', 'R' and 'H' register operands:
7641
7642 In a pair of registers containing a DI or DF value the 'Q'
7643 operand returns the register number of the register containing
7644 the least signficant part of the value. The 'R' operand returns
7645 the register number of the register containing the most
7646 significant part of the value.
7647
7648 The 'H' operand returns the higher of the two register numbers.
7649 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7650 same as the 'Q' operand, since the most signficant part of the
7651 value is held in the lower number register. The reverse is true
7652 on systems where WORDS_BIG_ENDIAN is false.
7653
7654 The purpose of these operands is to distinguish between cases
7655 where the endian-ness of the values is important (for example
7656 when they are added together), and cases where the endian-ness
7657 is irrelevant, but the order of register operations is important.
7658 For example when loading a value from memory into a register
7659 pair, the endian-ness does not matter. Provided that the value
7660 from the lower memory address is put into the lower numbered
7661 register, and the value from the higher address is put into the
7662 higher numbered register, the load will work regardless of whether
7663 the value being loaded is big-wordian or little-wordian. The
7664 order of the two register loads can matter however, if the address
7665 of the memory location is actually held in one of the registers
7666 being overwritten by the load. */
c1c2bc04 7667 case 'Q':
d5b7b3ae 7668 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 7669 abort ();
d5b7b3ae 7670 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
7671 return;
7672
9997d19d 7673 case 'R':
d5b7b3ae 7674 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 7675 abort ();
d5b7b3ae
RE
7676 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7677 return;
7678
7679 case 'H':
7680 if (REGNO (x) > LAST_ARM_REGNUM)
7681 abort ();
7682 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
7683 return;
7684
7685 case 'm':
d5b7b3ae
RE
7686 asm_fprintf (stream, "%r",
7687 GET_CODE (XEXP (x, 0)) == REG
7688 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
7689 return;
7690
7691 case 'M':
dd18ae56 7692 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae
RE
7693 REGNO (x),
7694 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
7695 return;
7696
7697 case 'd':
5895f793 7698 if (!x)
d5b7b3ae
RE
7699 return;
7700
7701 if (TARGET_ARM)
9997d19d
RE
7702 fputs (arm_condition_codes[get_arm_condition_code (x)],
7703 stream);
d5b7b3ae
RE
7704 else
7705 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
7706 return;
7707
7708 case 'D':
5895f793 7709 if (!x)
d5b7b3ae
RE
7710 return;
7711
7712 if (TARGET_ARM)
7713 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7714 (get_arm_condition_code (x))],
9997d19d 7715 stream);
d5b7b3ae
RE
7716 else
7717 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
7718 return;
7719
7720 default:
7721 if (x == 0)
7722 abort ();
7723
7724 if (GET_CODE (x) == REG)
d5b7b3ae 7725 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
7726 else if (GET_CODE (x) == MEM)
7727 {
7728 output_memory_reference_mode = GET_MODE (x);
7729 output_address (XEXP (x, 0));
7730 }
7731 else if (GET_CODE (x) == CONST_DOUBLE)
7732 fprintf (stream, "#%s", fp_immediate_constant (x));
7733 else if (GET_CODE (x) == NEG)
6354dc9b 7734 abort (); /* This should never happen now. */
9997d19d
RE
7735 else
7736 {
7737 fputc ('#', stream);
7738 output_addr_const (stream, x);
7739 }
7740 }
7741}
cce8749e
CH
7742\f
7743/* A finite state machine takes care of noticing whether or not instructions
7744 can be conditionally executed, and thus decrease execution time and code
7745 size by deleting branch instructions. The fsm is controlled by
7746 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7747
7748/* The state of the fsm controlling condition codes are:
7749 0: normal, do nothing special
7750 1: make ASM_OUTPUT_OPCODE not output this instruction
7751 2: make ASM_OUTPUT_OPCODE not output this instruction
7752 3: make instructions conditional
7753 4: make instructions conditional
7754
7755 State transitions (state->state by whom under condition):
7756 0 -> 1 final_prescan_insn if the `target' is a label
7757 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7758 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7759 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7760 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7761 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7762 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7763 (the target insn is arm_target_insn).
7764
ff9940b0
RE
7765 If the jump clobbers the conditions then we use states 2 and 4.
7766
7767 A similar thing can be done with conditional return insns.
7768
cce8749e
CH
7769 XXX In case the `target' is an unconditional branch, this conditionalising
7770 of the instructions always reduces code size, but not always execution
7771 time. But then, I want to reduce the code size to somewhere near what
7772 /bin/cc produces. */
7773
cce8749e
CH
7774/* Returns the index of the ARM condition code string in
7775 `arm_condition_codes'. COMPARISON should be an rtx like
7776 `(eq (...) (...))'. */
7777
84ed5e79 7778static enum arm_cond_code
cce8749e
CH
7779get_arm_condition_code (comparison)
7780 rtx comparison;
7781{
5165176d 7782 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
7783 register int code;
7784 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
7785
7786 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 7787 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
7788 XEXP (comparison, 1));
7789
7790 switch (mode)
cce8749e 7791 {
84ed5e79
RE
7792 case CC_DNEmode: code = ARM_NE; goto dominance;
7793 case CC_DEQmode: code = ARM_EQ; goto dominance;
7794 case CC_DGEmode: code = ARM_GE; goto dominance;
7795 case CC_DGTmode: code = ARM_GT; goto dominance;
7796 case CC_DLEmode: code = ARM_LE; goto dominance;
7797 case CC_DLTmode: code = ARM_LT; goto dominance;
7798 case CC_DGEUmode: code = ARM_CS; goto dominance;
7799 case CC_DGTUmode: code = ARM_HI; goto dominance;
7800 case CC_DLEUmode: code = ARM_LS; goto dominance;
7801 case CC_DLTUmode: code = ARM_CC;
7802
7803 dominance:
7804 if (comp_code != EQ && comp_code != NE)
7805 abort ();
7806
7807 if (comp_code == EQ)
7808 return ARM_INVERSE_CONDITION_CODE (code);
7809 return code;
7810
5165176d 7811 case CC_NOOVmode:
84ed5e79 7812 switch (comp_code)
5165176d 7813 {
84ed5e79
RE
7814 case NE: return ARM_NE;
7815 case EQ: return ARM_EQ;
7816 case GE: return ARM_PL;
7817 case LT: return ARM_MI;
5165176d
RE
7818 default: abort ();
7819 }
7820
7821 case CC_Zmode:
84ed5e79 7822 switch (comp_code)
5165176d 7823 {
84ed5e79
RE
7824 case NE: return ARM_NE;
7825 case EQ: return ARM_EQ;
5165176d
RE
7826 default: abort ();
7827 }
7828
7829 case CCFPEmode:
e45b72c4
RE
7830 case CCFPmode:
7831 /* These encodings assume that AC=1 in the FPA system control
7832 byte. This allows us to handle all cases except UNEQ and
7833 LTGT. */
84ed5e79
RE
7834 switch (comp_code)
7835 {
7836 case GE: return ARM_GE;
7837 case GT: return ARM_GT;
7838 case LE: return ARM_LS;
7839 case LT: return ARM_MI;
e45b72c4
RE
7840 case NE: return ARM_NE;
7841 case EQ: return ARM_EQ;
7842 case ORDERED: return ARM_VC;
7843 case UNORDERED: return ARM_VS;
7844 case UNLT: return ARM_LT;
7845 case UNLE: return ARM_LE;
7846 case UNGT: return ARM_HI;
7847 case UNGE: return ARM_PL;
7848 /* UNEQ and LTGT do not have a representation. */
7849 case UNEQ: /* Fall through. */
7850 case LTGT: /* Fall through. */
84ed5e79
RE
7851 default: abort ();
7852 }
7853
7854 case CC_SWPmode:
7855 switch (comp_code)
7856 {
7857 case NE: return ARM_NE;
7858 case EQ: return ARM_EQ;
7859 case GE: return ARM_LE;
7860 case GT: return ARM_LT;
7861 case LE: return ARM_GE;
7862 case LT: return ARM_GT;
7863 case GEU: return ARM_LS;
7864 case GTU: return ARM_CC;
7865 case LEU: return ARM_CS;
7866 case LTU: return ARM_HI;
7867 default: abort ();
7868 }
7869
bd9c7e23
RE
7870 case CC_Cmode:
7871 switch (comp_code)
7872 {
7873 case LTU: return ARM_CS;
7874 case GEU: return ARM_CC;
7875 default: abort ();
7876 }
7877
5165176d 7878 case CCmode:
84ed5e79 7879 switch (comp_code)
5165176d 7880 {
84ed5e79
RE
7881 case NE: return ARM_NE;
7882 case EQ: return ARM_EQ;
7883 case GE: return ARM_GE;
7884 case GT: return ARM_GT;
7885 case LE: return ARM_LE;
7886 case LT: return ARM_LT;
7887 case GEU: return ARM_CS;
7888 case GTU: return ARM_HI;
7889 case LEU: return ARM_LS;
7890 case LTU: return ARM_CC;
5165176d
RE
7891 default: abort ();
7892 }
7893
cce8749e
CH
7894 default: abort ();
7895 }
84ed5e79
RE
7896
7897 abort ();
f3bb6135 7898}
cce8749e
CH
7899
7900
7901void
74bbc178 7902arm_final_prescan_insn (insn)
cce8749e 7903 rtx insn;
cce8749e
CH
7904{
7905 /* BODY will hold the body of INSN. */
7906 register rtx body = PATTERN (insn);
7907
7908 /* This will be 1 if trying to repeat the trick, and things need to be
7909 reversed if it appears to fail. */
7910 int reverse = 0;
7911
ff9940b0
RE
7912 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
7913 taken are clobbered, even if the rtl suggests otherwise. It also
7914 means that we have to grub around within the jump expression to find
7915 out what the conditions are when the jump isn't taken. */
7916 int jump_clobbers = 0;
7917
6354dc9b 7918 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
7919 int seeking_return = 0;
7920
cce8749e
CH
7921 /* START_INSN will hold the insn from where we start looking. This is the
7922 first insn after the following code_label if REVERSE is true. */
7923 rtx start_insn = insn;
7924
7925 /* If in state 4, check if the target branch is reached, in order to
7926 change back to state 0. */
7927 if (arm_ccfsm_state == 4)
7928 {
7929 if (insn == arm_target_insn)
f5a1b0d2
NC
7930 {
7931 arm_target_insn = NULL;
7932 arm_ccfsm_state = 0;
7933 }
cce8749e
CH
7934 return;
7935 }
7936
7937 /* If in state 3, it is possible to repeat the trick, if this insn is an
7938 unconditional branch to a label, and immediately following this branch
7939 is the previous target label which is only used once, and the label this
7940 branch jumps to is not too far off. */
7941 if (arm_ccfsm_state == 3)
7942 {
7943 if (simplejump_p (insn))
7944 {
7945 start_insn = next_nonnote_insn (start_insn);
7946 if (GET_CODE (start_insn) == BARRIER)
7947 {
7948 /* XXX Isn't this always a barrier? */
7949 start_insn = next_nonnote_insn (start_insn);
7950 }
7951 if (GET_CODE (start_insn) == CODE_LABEL
7952 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7953 && LABEL_NUSES (start_insn) == 1)
7954 reverse = TRUE;
7955 else
7956 return;
7957 }
ff9940b0
RE
7958 else if (GET_CODE (body) == RETURN)
7959 {
7960 start_insn = next_nonnote_insn (start_insn);
7961 if (GET_CODE (start_insn) == BARRIER)
7962 start_insn = next_nonnote_insn (start_insn);
7963 if (GET_CODE (start_insn) == CODE_LABEL
7964 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7965 && LABEL_NUSES (start_insn) == 1)
7966 {
7967 reverse = TRUE;
7968 seeking_return = 1;
7969 }
7970 else
7971 return;
7972 }
cce8749e
CH
7973 else
7974 return;
7975 }
7976
7977 if (arm_ccfsm_state != 0 && !reverse)
7978 abort ();
7979 if (GET_CODE (insn) != JUMP_INSN)
7980 return;
7981
ddd5a7c1 7982 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
7983 the jump should always come first */
7984 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
7985 body = XVECEXP (body, 0, 0);
7986
7987#if 0
7988 /* If this is a conditional return then we don't want to know */
7989 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7990 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
7991 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
7992 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
7993 return;
7994#endif
7995
cce8749e
CH
7996 if (reverse
7997 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7998 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
7999 {
bd9c7e23
RE
8000 int insns_skipped;
8001 int fail = FALSE, succeed = FALSE;
cce8749e
CH
8002 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8003 int then_not_else = TRUE;
ff9940b0 8004 rtx this_insn = start_insn, label = 0;
cce8749e 8005
e45b72c4
RE
8006 /* If the jump cannot be done with one instruction, we cannot
8007 conditionally execute the instruction in the inverse case. */
ff9940b0 8008 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 8009 {
5bbe2d40
RE
8010 jump_clobbers = 1;
8011 return;
8012 }
ff9940b0 8013
cce8749e
CH
8014 /* Register the insn jumped to. */
8015 if (reverse)
ff9940b0
RE
8016 {
8017 if (!seeking_return)
8018 label = XEXP (SET_SRC (body), 0);
8019 }
cce8749e
CH
8020 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8021 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8022 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8023 {
8024 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8025 then_not_else = FALSE;
8026 }
ff9940b0
RE
8027 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8028 seeking_return = 1;
8029 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8030 {
8031 seeking_return = 1;
8032 then_not_else = FALSE;
8033 }
cce8749e
CH
8034 else
8035 abort ();
8036
8037 /* See how many insns this branch skips, and what kind of insns. If all
8038 insns are okay, and the label or unconditional branch to the same
8039 label is not too far away, succeed. */
8040 for (insns_skipped = 0;
b36ba79f 8041 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
8042 {
8043 rtx scanbody;
8044
8045 this_insn = next_nonnote_insn (this_insn);
8046 if (!this_insn)
8047 break;
8048
cce8749e
CH
8049 switch (GET_CODE (this_insn))
8050 {
8051 case CODE_LABEL:
8052 /* Succeed if it is the target label, otherwise fail since
8053 control falls in from somewhere else. */
8054 if (this_insn == label)
8055 {
ff9940b0
RE
8056 if (jump_clobbers)
8057 {
8058 arm_ccfsm_state = 2;
8059 this_insn = next_nonnote_insn (this_insn);
8060 }
8061 else
8062 arm_ccfsm_state = 1;
cce8749e
CH
8063 succeed = TRUE;
8064 }
8065 else
8066 fail = TRUE;
8067 break;
8068
ff9940b0 8069 case BARRIER:
cce8749e 8070 /* Succeed if the following insn is the target label.
ff9940b0
RE
8071 Otherwise fail.
8072 If return insns are used then the last insn in a function
6354dc9b 8073 will be a barrier. */
cce8749e 8074 this_insn = next_nonnote_insn (this_insn);
ff9940b0 8075 if (this_insn && this_insn == label)
cce8749e 8076 {
ff9940b0
RE
8077 if (jump_clobbers)
8078 {
8079 arm_ccfsm_state = 2;
8080 this_insn = next_nonnote_insn (this_insn);
8081 }
8082 else
8083 arm_ccfsm_state = 1;
cce8749e
CH
8084 succeed = TRUE;
8085 }
8086 else
8087 fail = TRUE;
8088 break;
8089
ff9940b0 8090 case CALL_INSN:
2b835d68 8091 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 8092 calls. */
2b835d68 8093 if (TARGET_APCS_32)
bd9c7e23
RE
8094 {
8095 /* Succeed if the following insn is the target label,
8096 or if the following two insns are a barrier and
8097 the target label. */
8098 this_insn = next_nonnote_insn (this_insn);
8099 if (this_insn && GET_CODE (this_insn) == BARRIER)
8100 this_insn = next_nonnote_insn (this_insn);
8101
8102 if (this_insn && this_insn == label
b36ba79f 8103 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
8104 {
8105 if (jump_clobbers)
8106 {
8107 arm_ccfsm_state = 2;
8108 this_insn = next_nonnote_insn (this_insn);
8109 }
8110 else
8111 arm_ccfsm_state = 1;
8112 succeed = TRUE;
8113 }
8114 else
8115 fail = TRUE;
8116 }
ff9940b0 8117 break;
2b835d68 8118
cce8749e
CH
8119 case JUMP_INSN:
8120 /* If this is an unconditional branch to the same label, succeed.
8121 If it is to another label, do nothing. If it is conditional,
8122 fail. */
914a3b8c 8123 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 8124
ed4c4348 8125 scanbody = PATTERN (this_insn);
ff9940b0
RE
8126 if (GET_CODE (scanbody) == SET
8127 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
8128 {
8129 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
8130 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
8131 {
8132 arm_ccfsm_state = 2;
8133 succeed = TRUE;
8134 }
8135 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
8136 fail = TRUE;
8137 }
b36ba79f
RE
8138 /* Fail if a conditional return is undesirable (eg on a
8139 StrongARM), but still allow this if optimizing for size. */
8140 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
8141 && !use_return_insn (TRUE)
8142 && !optimize_size)
b36ba79f 8143 fail = TRUE;
ff9940b0
RE
8144 else if (GET_CODE (scanbody) == RETURN
8145 && seeking_return)
8146 {
8147 arm_ccfsm_state = 2;
8148 succeed = TRUE;
8149 }
8150 else if (GET_CODE (scanbody) == PARALLEL)
8151 {
8152 switch (get_attr_conds (this_insn))
8153 {
8154 case CONDS_NOCOND:
8155 break;
8156 default:
8157 fail = TRUE;
8158 break;
8159 }
8160 }
4e67550b
RE
8161 else
8162 fail = TRUE; /* Unrecognized jump (eg epilogue). */
8163
cce8749e
CH
8164 break;
8165
8166 case INSN:
ff9940b0
RE
8167 /* Instructions using or affecting the condition codes make it
8168 fail. */
ed4c4348 8169 scanbody = PATTERN (this_insn);
5895f793
RE
8170 if (!(GET_CODE (scanbody) == SET
8171 || GET_CODE (scanbody) == PARALLEL)
74641843 8172 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
8173 fail = TRUE;
8174 break;
8175
8176 default:
8177 break;
8178 }
8179 }
8180 if (succeed)
8181 {
ff9940b0 8182 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 8183 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
8184 else if (seeking_return || arm_ccfsm_state == 2)
8185 {
8186 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
8187 {
8188 this_insn = next_nonnote_insn (this_insn);
8189 if (this_insn && (GET_CODE (this_insn) == BARRIER
8190 || GET_CODE (this_insn) == CODE_LABEL))
8191 abort ();
8192 }
8193 if (!this_insn)
8194 {
8195 /* Oh, dear! we ran off the end.. give up */
8196 recog (PATTERN (insn), insn, NULL_PTR);
8197 arm_ccfsm_state = 0;
abaa26e5 8198 arm_target_insn = NULL;
ff9940b0
RE
8199 return;
8200 }
8201 arm_target_insn = this_insn;
8202 }
cce8749e
CH
8203 else
8204 abort ();
ff9940b0
RE
8205 if (jump_clobbers)
8206 {
8207 if (reverse)
8208 abort ();
8209 arm_current_cc =
8210 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
8211 0), 0), 1));
8212 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
8213 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8214 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
8215 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8216 }
8217 else
8218 {
8219 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
8220 what it was. */
8221 if (!reverse)
8222 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
8223 0));
8224 }
cce8749e 8225
cce8749e
CH
8226 if (reverse || then_not_else)
8227 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8228 }
d5b7b3ae 8229
1ccbefce 8230 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 8231 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 8232 across this call; since the insn has been recognized already we
b020fd92 8233 call recog direct). */
ff9940b0 8234 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 8235 }
f3bb6135 8236}
cce8749e 8237
d5b7b3ae
RE
8238int
8239arm_regno_class (regno)
8240 int regno;
8241{
8242 if (TARGET_THUMB)
8243 {
8244 if (regno == STACK_POINTER_REGNUM)
8245 return STACK_REG;
8246 if (regno == CC_REGNUM)
8247 return CC_REG;
8248 if (regno < 8)
8249 return LO_REGS;
8250 return HI_REGS;
8251 }
8252
8253 if ( regno <= LAST_ARM_REGNUM
8254 || regno == FRAME_POINTER_REGNUM
8255 || regno == ARG_POINTER_REGNUM)
8256 return GENERAL_REGS;
8257
8258 if (regno == CC_REGNUM)
8259 return NO_REGS;
8260
8261 return FPU_REGS;
8262}
8263
8264/* Handle a special case when computing the offset
8265 of an argument from the frame pointer. */
8266int
8267arm_debugger_arg_offset (value, addr)
8268 int value;
8269 rtx addr;
8270{
8271 rtx insn;
8272
8273 /* We are only interested if dbxout_parms() failed to compute the offset. */
8274 if (value != 0)
8275 return 0;
8276
8277 /* We can only cope with the case where the address is held in a register. */
8278 if (GET_CODE (addr) != REG)
8279 return 0;
8280
8281 /* If we are using the frame pointer to point at the argument, then
8282 an offset of 0 is correct. */
cd2b33d0 8283 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
8284 return 0;
8285
8286 /* If we are using the stack pointer to point at the
8287 argument, then an offset of 0 is correct. */
5895f793 8288 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
8289 && REGNO (addr) == SP_REGNUM)
8290 return 0;
8291
8292 /* Oh dear. The argument is pointed to by a register rather
8293 than being held in a register, or being stored at a known
8294 offset from the frame pointer. Since GDB only understands
8295 those two kinds of argument we must translate the address
8296 held in the register into an offset from the frame pointer.
8297 We do this by searching through the insns for the function
8298 looking to see where this register gets its value. If the
8299 register is initialised from the frame pointer plus an offset
8300 then we are in luck and we can continue, otherwise we give up.
8301
8302 This code is exercised by producing debugging information
8303 for a function with arguments like this:
8304
8305 double func (double a, double b, int c, double d) {return d;}
8306
8307 Without this code the stab for parameter 'd' will be set to
8308 an offset of 0 from the frame pointer, rather than 8. */
8309
8310 /* The if() statement says:
8311
8312 If the insn is a normal instruction
8313 and if the insn is setting the value in a register
8314 and if the register being set is the register holding the address of the argument
8315 and if the address is computing by an addition
8316 that involves adding to a register
8317 which is the frame pointer
8318 a constant integer
8319
8320 then... */
8321
8322 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8323 {
8324 if ( GET_CODE (insn) == INSN
8325 && GET_CODE (PATTERN (insn)) == SET
8326 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8327 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8328 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 8329 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
8330 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8331 )
8332 {
8333 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8334
8335 break;
8336 }
8337 }
8338
8339 if (value == 0)
8340 {
8341 debug_rtx (addr);
8342 warning ("Unable to compute real location of stacked parameter");
8343 value = 8; /* XXX magic hack */
8344 }
8345
8346 return value;
8347}
8348
8349\f
8350/* Recursively search through all of the blocks in a function
8351 checking to see if any of the variables created in that
8352 function match the RTX called 'orig'. If they do then
8353 replace them with the RTX called 'new'. */
8354
8355static void
8356replace_symbols_in_block (block, orig, new)
8357 tree block;
8358 rtx orig;
8359 rtx new;
8360{
8361 for (; block; block = BLOCK_CHAIN (block))
8362 {
8363 tree sym;
8364
5895f793 8365 if (!TREE_USED (block))
d5b7b3ae
RE
8366 continue;
8367
8368 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8369 {
8370 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8371 || DECL_IGNORED_P (sym)
8372 || TREE_CODE (sym) != VAR_DECL
8373 || DECL_EXTERNAL (sym)
5895f793 8374 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
8375 )
8376 continue;
8377
8378 DECL_RTL (sym) = new;
8379 }
8380
8381 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8382 }
8383}
8384
8385/* Return the number (counting from 0) of the least significant set
8386 bit in MASK. */
8387#ifdef __GNUC__
8388inline
8389#endif
8390static int
8391number_of_first_bit_set (mask)
8392 int mask;
8393{
8394 int bit;
8395
8396 for (bit = 0;
8397 (mask & (1 << bit)) == 0;
5895f793 8398 ++bit)
d5b7b3ae
RE
8399 continue;
8400
8401 return bit;
8402}
8403
8404/* Generate code to return from a thumb function.
8405 If 'reg_containing_return_addr' is -1, then the return address is
8406 actually on the stack, at the stack pointer. */
8407static void
8408thumb_exit (f, reg_containing_return_addr, eh_ofs)
8409 FILE * f;
8410 int reg_containing_return_addr;
8411 rtx eh_ofs;
8412{
8413 unsigned regs_available_for_popping;
8414 unsigned regs_to_pop;
8415 int pops_needed;
8416 unsigned available;
8417 unsigned required;
8418 int mode;
8419 int size;
8420 int restore_a4 = FALSE;
8421
8422 /* Compute the registers we need to pop. */
8423 regs_to_pop = 0;
8424 pops_needed = 0;
8425
8426 /* There is an assumption here, that if eh_ofs is not NULL, the
8427 normal return address will have been pushed. */
8428 if (reg_containing_return_addr == -1 || eh_ofs)
8429 {
8430 /* When we are generating a return for __builtin_eh_return,
8431 reg_containing_return_addr must specify the return regno. */
8432 if (eh_ofs && reg_containing_return_addr == -1)
8433 abort ();
8434
8435 regs_to_pop |= 1 << LR_REGNUM;
5895f793 8436 ++pops_needed;
d5b7b3ae
RE
8437 }
8438
8439 if (TARGET_BACKTRACE)
8440 {
8441 /* Restore the (ARM) frame pointer and stack pointer. */
8442 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8443 pops_needed += 2;
8444 }
8445
8446 /* If there is nothing to pop then just emit the BX instruction and
8447 return. */
8448 if (pops_needed == 0)
8449 {
8450 if (eh_ofs)
8451 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8452
8453 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8454 return;
8455 }
8456 /* Otherwise if we are not supporting interworking and we have not created
8457 a backtrace structure and the function was not entered in ARM mode then
8458 just pop the return address straight into the PC. */
5895f793
RE
8459 else if (!TARGET_INTERWORK
8460 && !TARGET_BACKTRACE
8461 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
8462 {
8463 if (eh_ofs)
8464 {
8465 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8466 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8467 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8468 }
8469 else
8470 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8471
8472 return;
8473 }
8474
8475 /* Find out how many of the (return) argument registers we can corrupt. */
8476 regs_available_for_popping = 0;
8477
8478 /* If returning via __builtin_eh_return, the bottom three registers
8479 all contain information needed for the return. */
8480 if (eh_ofs)
8481 size = 12;
8482 else
8483 {
8484#ifdef RTX_CODE
8485 /* If we can deduce the registers used from the function's
8486 return value. This is more reliable that examining
8487 regs_ever_live[] because that will be set if the register is
8488 ever used in the function, not just if the register is used
8489 to hold a return value. */
8490
8491 if (current_function_return_rtx != 0)
8492 mode = GET_MODE (current_function_return_rtx);
8493 else
8494#endif
8495 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8496
8497 size = GET_MODE_SIZE (mode);
8498
8499 if (size == 0)
8500 {
8501 /* In a void function we can use any argument register.
8502 In a function that returns a structure on the stack
8503 we can use the second and third argument registers. */
8504 if (mode == VOIDmode)
8505 regs_available_for_popping =
8506 (1 << ARG_REGISTER (1))
8507 | (1 << ARG_REGISTER (2))
8508 | (1 << ARG_REGISTER (3));
8509 else
8510 regs_available_for_popping =
8511 (1 << ARG_REGISTER (2))
8512 | (1 << ARG_REGISTER (3));
8513 }
8514 else if (size <= 4)
8515 regs_available_for_popping =
8516 (1 << ARG_REGISTER (2))
8517 | (1 << ARG_REGISTER (3));
8518 else if (size <= 8)
8519 regs_available_for_popping =
8520 (1 << ARG_REGISTER (3));
8521 }
8522
8523 /* Match registers to be popped with registers into which we pop them. */
8524 for (available = regs_available_for_popping,
8525 required = regs_to_pop;
8526 required != 0 && available != 0;
8527 available &= ~(available & - available),
8528 required &= ~(required & - required))
8529 -- pops_needed;
8530
8531 /* If we have any popping registers left over, remove them. */
8532 if (available > 0)
5895f793 8533 regs_available_for_popping &= ~available;
d5b7b3ae
RE
8534
8535 /* Otherwise if we need another popping register we can use
8536 the fourth argument register. */
8537 else if (pops_needed)
8538 {
8539 /* If we have not found any free argument registers and
8540 reg a4 contains the return address, we must move it. */
8541 if (regs_available_for_popping == 0
8542 && reg_containing_return_addr == LAST_ARG_REGNUM)
8543 {
8544 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8545 reg_containing_return_addr = LR_REGNUM;
8546 }
8547 else if (size > 12)
8548 {
8549 /* Register a4 is being used to hold part of the return value,
8550 but we have dire need of a free, low register. */
8551 restore_a4 = TRUE;
8552
8553 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8554 }
8555
8556 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8557 {
8558 /* The fourth argument register is available. */
8559 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8560
5895f793 8561 --pops_needed;
d5b7b3ae
RE
8562 }
8563 }
8564
8565 /* Pop as many registers as we can. */
8566 thumb_pushpop (f, regs_available_for_popping, FALSE);
8567
8568 /* Process the registers we popped. */
8569 if (reg_containing_return_addr == -1)
8570 {
8571 /* The return address was popped into the lowest numbered register. */
5895f793 8572 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
8573
8574 reg_containing_return_addr =
8575 number_of_first_bit_set (regs_available_for_popping);
8576
8577 /* Remove this register for the mask of available registers, so that
8578 the return address will not be corrupted by futher pops. */
5895f793 8579 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
8580 }
8581
8582 /* If we popped other registers then handle them here. */
8583 if (regs_available_for_popping)
8584 {
8585 int frame_pointer;
8586
8587 /* Work out which register currently contains the frame pointer. */
8588 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8589
8590 /* Move it into the correct place. */
8591 asm_fprintf (f, "\tmov\t%r, %r\n",
8592 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8593
8594 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
8595 regs_available_for_popping &= ~(1 << frame_pointer);
8596 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
8597
8598 if (regs_available_for_popping)
8599 {
8600 int stack_pointer;
8601
8602 /* We popped the stack pointer as well,
8603 find the register that contains it. */
8604 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8605
8606 /* Move it into the stack register. */
8607 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8608
8609 /* At this point we have popped all necessary registers, so
8610 do not worry about restoring regs_available_for_popping
8611 to its correct value:
8612
8613 assert (pops_needed == 0)
8614 assert (regs_available_for_popping == (1 << frame_pointer))
8615 assert (regs_to_pop == (1 << STACK_POINTER)) */
8616 }
8617 else
8618 {
8619 /* Since we have just move the popped value into the frame
8620 pointer, the popping register is available for reuse, and
8621 we know that we still have the stack pointer left to pop. */
8622 regs_available_for_popping |= (1 << frame_pointer);
8623 }
8624 }
8625
8626 /* If we still have registers left on the stack, but we no longer have
8627 any registers into which we can pop them, then we must move the return
8628 address into the link register and make available the register that
8629 contained it. */
8630 if (regs_available_for_popping == 0 && pops_needed > 0)
8631 {
8632 regs_available_for_popping |= 1 << reg_containing_return_addr;
8633
8634 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8635 reg_containing_return_addr);
8636
8637 reg_containing_return_addr = LR_REGNUM;
8638 }
8639
8640 /* If we have registers left on the stack then pop some more.
8641 We know that at most we will want to pop FP and SP. */
8642 if (pops_needed > 0)
8643 {
8644 int popped_into;
8645 int move_to;
8646
8647 thumb_pushpop (f, regs_available_for_popping, FALSE);
8648
8649 /* We have popped either FP or SP.
8650 Move whichever one it is into the correct register. */
8651 popped_into = number_of_first_bit_set (regs_available_for_popping);
8652 move_to = number_of_first_bit_set (regs_to_pop);
8653
8654 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8655
5895f793 8656 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 8657
5895f793 8658 --pops_needed;
d5b7b3ae
RE
8659 }
8660
8661 /* If we still have not popped everything then we must have only
8662 had one register available to us and we are now popping the SP. */
8663 if (pops_needed > 0)
8664 {
8665 int popped_into;
8666
8667 thumb_pushpop (f, regs_available_for_popping, FALSE);
8668
8669 popped_into = number_of_first_bit_set (regs_available_for_popping);
8670
8671 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8672 /*
8673 assert (regs_to_pop == (1 << STACK_POINTER))
8674 assert (pops_needed == 1)
8675 */
8676 }
8677
8678 /* If necessary restore the a4 register. */
8679 if (restore_a4)
8680 {
8681 if (reg_containing_return_addr != LR_REGNUM)
8682 {
8683 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8684 reg_containing_return_addr = LR_REGNUM;
8685 }
8686
8687 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8688 }
8689
8690 if (eh_ofs)
8691 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8692
8693 /* Return to caller. */
8694 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8695}
8696
8697/* Emit code to push or pop registers to or from the stack. */
8698static void
8699thumb_pushpop (f, mask, push)
8700 FILE * f;
8701 int mask;
8702 int push;
8703{
8704 int regno;
8705 int lo_mask = mask & 0xFF;
8706
5895f793 8707 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
8708 {
8709 /* Special case. Do not generate a POP PC statement here, do it in
8710 thumb_exit() */
8711 thumb_exit (f, -1, NULL_RTX);
8712 return;
8713 }
8714
8715 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8716
8717 /* Look at the low registers first. */
5895f793 8718 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
8719 {
8720 if (lo_mask & 1)
8721 {
8722 asm_fprintf (f, "%r", regno);
8723
8724 if ((lo_mask & ~1) != 0)
8725 fprintf (f, ", ");
8726 }
8727 }
8728
8729 if (push && (mask & (1 << LR_REGNUM)))
8730 {
8731 /* Catch pushing the LR. */
8732 if (mask & 0xFF)
8733 fprintf (f, ", ");
8734
8735 asm_fprintf (f, "%r", LR_REGNUM);
8736 }
8737 else if (!push && (mask & (1 << PC_REGNUM)))
8738 {
8739 /* Catch popping the PC. */
8740 if (TARGET_INTERWORK || TARGET_BACKTRACE)
8741 {
8742 /* The PC is never poped directly, instead
8743 it is popped into r3 and then BX is used. */
8744 fprintf (f, "}\n");
8745
8746 thumb_exit (f, -1, NULL_RTX);
8747
8748 return;
8749 }
8750 else
8751 {
8752 if (mask & 0xFF)
8753 fprintf (f, ", ");
8754
8755 asm_fprintf (f, "%r", PC_REGNUM);
8756 }
8757 }
8758
8759 fprintf (f, "}\n");
8760}
8761\f
8762void
8763thumb_final_prescan_insn (insn)
8764 rtx insn;
8765{
d5b7b3ae 8766 if (flag_print_asm_name)
9d98a694
AO
8767 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
8768 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
8769}
8770
8771int
8772thumb_shiftable_const (val)
8773 unsigned HOST_WIDE_INT val;
8774{
8775 unsigned HOST_WIDE_INT mask = 0xff;
8776 int i;
8777
8778 if (val == 0) /* XXX */
8779 return 0;
8780
8781 for (i = 0; i < 25; i++)
8782 if ((val & (mask << i)) == val)
8783 return 1;
8784
8785 return 0;
8786}
8787
8788/* Returns non-zero if the current function contains,
8789 or might contain a far jump. */
8790int
8791thumb_far_jump_used_p (int in_prologue)
8792{
8793 rtx insn;
8794
8795 /* This test is only important for leaf functions. */
5895f793 8796 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
8797
8798 /* If we have already decided that far jumps may be used,
8799 do not bother checking again, and always return true even if
8800 it turns out that they are not being used. Once we have made
8801 the decision that far jumps are present (and that hence the link
8802 register will be pushed onto the stack) we cannot go back on it. */
8803 if (cfun->machine->far_jump_used)
8804 return 1;
8805
8806 /* If this function is not being called from the prologue/epilogue
8807 generation code then it must be being called from the
8808 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 8809 if (!in_prologue)
d5b7b3ae
RE
8810 {
8811 /* In this case we know that we are being asked about the elimination
8812 of the arg pointer register. If that register is not being used,
8813 then there are no arguments on the stack, and we do not have to
8814 worry that a far jump might force the prologue to push the link
8815 register, changing the stack offsets. In this case we can just
8816 return false, since the presence of far jumps in the function will
8817 not affect stack offsets.
8818
8819 If the arg pointer is live (or if it was live, but has now been
8820 eliminated and so set to dead) then we do have to test to see if
8821 the function might contain a far jump. This test can lead to some
8822 false negatives, since before reload is completed, then length of
8823 branch instructions is not known, so gcc defaults to returning their
8824 longest length, which in turn sets the far jump attribute to true.
8825
8826 A false negative will not result in bad code being generated, but it
8827 will result in a needless push and pop of the link register. We
8828 hope that this does not occur too often. */
8829 if (regs_ever_live [ARG_POINTER_REGNUM])
8830 cfun->machine->arg_pointer_live = 1;
5895f793 8831 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
8832 return 0;
8833 }
8834
8835 /* Check to see if the function contains a branch
8836 insn with the far jump attribute set. */
8837 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8838 {
8839 if (GET_CODE (insn) == JUMP_INSN
8840 /* Ignore tablejump patterns. */
8841 && GET_CODE (PATTERN (insn)) != ADDR_VEC
8842 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
8843 && get_attr_far_jump (insn) == FAR_JUMP_YES
8844 )
8845 {
8846 /* Record the fact that we have decied that
8847 the function does use far jumps. */
8848 cfun->machine->far_jump_used = 1;
8849 return 1;
8850 }
8851 }
8852
8853 return 0;
8854}
8855
8856/* Return non-zero if FUNC must be entered in ARM mode. */
8857int
8858is_called_in_ARM_mode (func)
8859 tree func;
8860{
8861 if (TREE_CODE (func) != FUNCTION_DECL)
8862 abort ();
8863
8864 /* Ignore the problem about functions whoes address is taken. */
8865 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
8866 return TRUE;
8867
8868#ifdef ARM_PE
8869 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
8870#else
8871 return FALSE;
8872#endif
8873}
8874
8875/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 8876const char *
d5b7b3ae
RE
8877thumb_unexpanded_epilogue ()
8878{
8879 int regno;
8880 int live_regs_mask = 0;
8881 int high_regs_pushed = 0;
8882 int leaf_function = leaf_function_p ();
8883 int had_to_push_lr;
8884 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8885
8886 if (return_used_this_function)
8887 return "";
8888
8889 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
5895f793
RE
8890 if (regs_ever_live[regno] && !call_used_regs[regno]
8891 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
8892 live_regs_mask |= 1 << regno;
8893
8894 for (regno = 8; regno < 13; regno++)
8895 {
5895f793
RE
8896 if (regs_ever_live[regno] && !call_used_regs[regno]
8897 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8898 high_regs_pushed++;
d5b7b3ae
RE
8899 }
8900
8901 /* The prolog may have pushed some high registers to use as
8902 work registers. eg the testuite file:
8903 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
8904 compiles to produce:
8905 push {r4, r5, r6, r7, lr}
8906 mov r7, r9
8907 mov r6, r8
8908 push {r6, r7}
8909 as part of the prolog. We have to undo that pushing here. */
8910
8911 if (high_regs_pushed)
8912 {
8913 int mask = live_regs_mask;
8914 int next_hi_reg;
8915 int size;
8916 int mode;
8917
8918#ifdef RTX_CODE
8919 /* If we can deduce the registers used from the function's return value.
8920 This is more reliable that examining regs_ever_live[] because that
8921 will be set if the register is ever used in the function, not just if
8922 the register is used to hold a return value. */
8923
8924 if (current_function_return_rtx != 0)
8925 mode = GET_MODE (current_function_return_rtx);
8926 else
8927#endif
8928 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8929
8930 size = GET_MODE_SIZE (mode);
8931
8932 /* Unless we are returning a type of size > 12 register r3 is
8933 available. */
8934 if (size < 13)
8935 mask |= 1 << 3;
8936
8937 if (mask == 0)
8938 /* Oh dear! We have no low registers into which we can pop
8939 high registers! */
8940 fatal ("No low registers available for popping high registers");
8941
8942 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
8943 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
8944 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
8945 break;
8946
8947 while (high_regs_pushed)
8948 {
8949 /* Find lo register(s) into which the high register(s) can
8950 be popped. */
8951 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8952 {
8953 if (mask & (1 << regno))
8954 high_regs_pushed--;
8955 if (high_regs_pushed == 0)
8956 break;
8957 }
8958
8959 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
8960
8961 /* Pop the values into the low register(s). */
8962 thumb_pushpop (asm_out_file, mask, 0);
8963
8964 /* Move the value(s) into the high registers. */
8965 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8966 {
8967 if (mask & (1 << regno))
8968 {
8969 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
8970 regno);
8971
8972 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
8973 if (regs_ever_live[next_hi_reg]
8974 && !call_used_regs[next_hi_reg]
8975 && !(TARGET_SINGLE_PIC_BASE
8976 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
8977 break;
8978 }
8979 }
8980 }
8981 }
8982
5895f793 8983 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
8984 || thumb_far_jump_used_p (1));
8985
8986 if (TARGET_BACKTRACE
8987 && ((live_regs_mask & 0xFF) == 0)
8988 && regs_ever_live [LAST_ARG_REGNUM] != 0)
8989 {
8990 /* The stack backtrace structure creation code had to
8991 push R7 in order to get a work register, so we pop
8992 it now. */
8993 live_regs_mask |= (1 << LAST_LO_REGNUM);
8994 }
8995
8996 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
8997 {
8998 if (had_to_push_lr
5895f793
RE
8999 && !is_called_in_ARM_mode (current_function_decl)
9000 && !eh_ofs)
d5b7b3ae
RE
9001 live_regs_mask |= 1 << PC_REGNUM;
9002
9003 /* Either no argument registers were pushed or a backtrace
9004 structure was created which includes an adjusted stack
9005 pointer, so just pop everything. */
9006 if (live_regs_mask)
9007 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9008
9009 if (eh_ofs)
9010 thumb_exit (asm_out_file, 2, eh_ofs);
9011 /* We have either just popped the return address into the
9012 PC or it is was kept in LR for the entire function or
9013 it is still on the stack because we do not want to
9014 return by doing a pop {pc}. */
9015 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
9016 thumb_exit (asm_out_file,
9017 (had_to_push_lr
9018 && is_called_in_ARM_mode (current_function_decl)) ?
9019 -1 : LR_REGNUM, NULL_RTX);
9020 }
9021 else
9022 {
9023 /* Pop everything but the return address. */
5895f793 9024 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
9025
9026 if (live_regs_mask)
9027 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9028
9029 if (had_to_push_lr)
9030 /* Get the return address into a temporary register. */
9031 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
9032
9033 /* Remove the argument registers that were pushed onto the stack. */
9034 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
9035 SP_REGNUM, SP_REGNUM,
9036 current_function_pretend_args_size);
9037
9038 if (eh_ofs)
9039 thumb_exit (asm_out_file, 2, eh_ofs);
9040 else
9041 thumb_exit (asm_out_file,
9042 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
9043 }
9044
9045 return "";
9046}
9047
9048/* Functions to save and restore machine-specific function data. */
9049
9050static void
9051arm_mark_machine_status (p)
9052 struct function * p;
9053{
9054 struct machine_function *machine = p->machine;
9055
9056 ggc_mark_rtx (machine->ra_rtx);
9057 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
9058}
9059
9060static void
9061arm_init_machine_status (p)
9062 struct function * p;
9063{
9064 p->machine =
9065 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
9066}
9067
9068/* Return an RTX indicating where the return address to the
9069 calling function can be found. */
9070rtx
9071arm_return_addr (count, frame)
9072 int count;
9073 rtx frame ATTRIBUTE_UNUSED;
9074{
9075 rtx reg;
9076
9077 if (count != 0)
9078 return NULL_RTX;
9079
9080 reg = cfun->machine->ra_rtx;
9081
9082 if (reg == NULL)
9083 {
9084 rtx init;
9085
9086 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
9087 the prologue. */
9088 reg = gen_reg_rtx (Pmode);
9089 cfun->machine->ra_rtx = reg;
9090
5895f793 9091 if (!TARGET_APCS_32)
d5b7b3ae
RE
9092 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
9093 GEN_INT (RETURN_ADDR_MASK26));
9094 else
9095 init = gen_rtx_REG (Pmode, LR_REGNUM);
9096
9097 init = gen_rtx_SET (VOIDmode, reg, init);
9098
9099 /* Emit the insn to the prologue with the other argument copies. */
9100 push_topmost_sequence ();
9101 emit_insn_after (init, get_insns ());
9102 pop_topmost_sequence ();
9103 }
9104
9105 return reg;
9106}
9107
9108/* Do anything needed before RTL is emitted for each function. */
9109void
9110arm_init_expanders ()
9111{
9112 /* Arrange to initialize and mark the machine per-function status. */
9113 init_machine_status = arm_init_machine_status;
9114 mark_machine_status = arm_mark_machine_status;
9115}
9116
9117/* Generate the rest of a function's prologue. */
9118void
9119thumb_expand_prologue ()
9120{
9121 HOST_WIDE_INT amount = (get_frame_size ()
9122 + current_function_outgoing_args_size);
9123
9124 /* Naked functions don't have prologues. */
9125 if (arm_naked_function_p (current_function_decl))
9126 return;
9127
9128 if (frame_pointer_needed)
9129 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
9130
9131 if (amount)
9132 {
9133 amount = ROUND_UP (amount);
9134
9135 if (amount < 512)
9136 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5895f793 9137 GEN_INT (-amount)));
d5b7b3ae
RE
9138 else
9139 {
9140 int regno;
9141 rtx reg;
9142
9143 /* The stack decrement is too big for an immediate value in a single
9144 insn. In theory we could issue multiple subtracts, but after
9145 three of them it becomes more space efficient to place the full
9146 value in the constant pool and load into a register. (Also the
9147 ARM debugger really likes to see only one stack decrement per
9148 function). So instead we look for a scratch register into which
9149 we can load the decrement, and then we subtract this from the
9150 stack pointer. Unfortunately on the thumb the only available
9151 scratch registers are the argument registers, and we cannot use
9152 these as they may hold arguments to the function. Instead we
9153 attempt to locate a call preserved register which is used by this
9154 function. If we can find one, then we know that it will have
9155 been pushed at the start of the prologue and so we can corrupt
9156 it now. */
9157 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
9158 if (regs_ever_live[regno]
5895f793
RE
9159 && !call_used_regs[regno] /* Paranoia */
9160 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
9161 && !(frame_pointer_needed
9162 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
9163 break;
9164
9165 if (regno > LAST_LO_REGNUM) /* Very unlikely */
9166 {
9167 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
9168
9169 /* Choose an arbitary, non-argument low register. */
9170 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
9171
9172 /* Save it by copying it into a high, scratch register. */
9173 emit_insn (gen_movsi (spare, reg));
9174
9175 /* Decrement the stack. */
5895f793 9176 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9177 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9178 reg));
9179
9180 /* Restore the low register's original value. */
9181 emit_insn (gen_movsi (reg, spare));
9182
9183 /* Emit a USE of the restored scratch register, so that flow
9184 analysis will not consider the restore redundant. The
9185 register won't be used again in this function and isn't
9186 restored by the epilogue. */
9187 emit_insn (gen_rtx_USE (VOIDmode, reg));
9188 }
9189 else
9190 {
9191 reg = gen_rtx (REG, SImode, regno);
9192
5895f793 9193 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9194 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9195 reg));
9196 }
9197 }
9198 }
9199
9200 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9201 emit_insn (gen_blockage ());
9202}
9203
9204void
9205thumb_expand_epilogue ()
9206{
9207 HOST_WIDE_INT amount = (get_frame_size ()
9208 + current_function_outgoing_args_size);
9209
9210 /* Naked functions don't have epilogues. */
9211 if (arm_naked_function_p (current_function_decl))
9212 return;
9213
9214 if (frame_pointer_needed)
9215 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
9216 else if (amount)
9217 {
9218 amount = ROUND_UP (amount);
9219
9220 if (amount < 512)
9221 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9222 GEN_INT (amount)));
9223 else
9224 {
9225 /* r3 is always free in the epilogue. */
9226 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
9227
9228 emit_insn (gen_movsi (reg, GEN_INT (amount)));
9229 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
9230 }
9231 }
9232
9233 /* Emit a USE (stack_pointer_rtx), so that
9234 the stack adjustment will not be deleted. */
9235 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9236
9237 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9238 emit_insn (gen_blockage ());
9239}
9240
9241void
9242output_thumb_prologue (f)
9243 FILE * f;
9244{
9245 int live_regs_mask = 0;
9246 int high_regs_pushed = 0;
9247 int store_arg_regs = 0;
9248 int regno;
9249
9250 if (arm_naked_function_p (current_function_decl))
9251 return;
9252
9253 if (is_called_in_ARM_mode (current_function_decl))
9254 {
9255 const char * name;
9256
9257 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9258 abort ();
9259 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9260 abort ();
9261 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9262
9263 /* Generate code sequence to switch us into Thumb mode. */
9264 /* The .code 32 directive has already been emitted by
6d77b53e 9265 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
9266 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9267 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9268
9269 /* Generate a label, so that the debugger will notice the
9270 change in instruction sets. This label is also used by
9271 the assembler to bypass the ARM code when this function
9272 is called from a Thumb encoded function elsewhere in the
9273 same file. Hence the definition of STUB_NAME here must
9274 agree with the definition in gas/config/tc-arm.c */
9275
9276#define STUB_NAME ".real_start_of"
9277
9278 asm_fprintf (f, "\t.code\t16\n");
9279#ifdef ARM_PE
9280 if (arm_dllexport_name_p (name))
e5951263 9281 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
9282#endif
9283 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9284 asm_fprintf (f, "\t.thumb_func\n");
9285 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9286 }
9287
9288 if (current_function_anonymous_args && current_function_pretend_args_size)
9289 store_arg_regs = 1;
9290
9291 if (current_function_pretend_args_size)
9292 {
9293 if (store_arg_regs)
9294 {
9295 int num_pushes;
9296
9297 asm_fprintf (f, "\tpush\t{");
9298
9299 num_pushes = NUM_INTS (current_function_pretend_args_size);
9300
9301 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9302 regno <= LAST_ARG_REGNUM;
5895f793 9303 regno++)
d5b7b3ae
RE
9304 asm_fprintf (f, "%r%s", regno,
9305 regno == LAST_ARG_REGNUM ? "" : ", ");
9306
9307 asm_fprintf (f, "}\n");
9308 }
9309 else
9310 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9311 SP_REGNUM, SP_REGNUM,
9312 current_function_pretend_args_size);
9313 }
9314
5895f793
RE
9315 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9316 if (regs_ever_live[regno] && !call_used_regs[regno]
9317 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9318 live_regs_mask |= 1 << regno;
9319
5895f793 9320 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
9321 live_regs_mask |= 1 << LR_REGNUM;
9322
9323 if (TARGET_BACKTRACE)
9324 {
9325 int offset;
9326 int work_register = 0;
9327 int wr;
9328
9329 /* We have been asked to create a stack backtrace structure.
9330 The code looks like this:
9331
9332 0 .align 2
9333 0 func:
9334 0 sub SP, #16 Reserve space for 4 registers.
9335 2 push {R7} Get a work register.
9336 4 add R7, SP, #20 Get the stack pointer before the push.
9337 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9338 8 mov R7, PC Get hold of the start of this code plus 12.
9339 10 str R7, [SP, #16] Store it.
9340 12 mov R7, FP Get hold of the current frame pointer.
9341 14 str R7, [SP, #4] Store it.
9342 16 mov R7, LR Get hold of the current return address.
9343 18 str R7, [SP, #12] Store it.
9344 20 add R7, SP, #16 Point at the start of the backtrace structure.
9345 22 mov FP, R7 Put this value into the frame pointer. */
9346
9347 if ((live_regs_mask & 0xFF) == 0)
9348 {
9349 /* See if the a4 register is free. */
9350
9351 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9352 work_register = LAST_ARG_REGNUM;
9353 else /* We must push a register of our own */
9354 live_regs_mask |= (1 << LAST_LO_REGNUM);
9355 }
9356
9357 if (work_register == 0)
9358 {
9359 /* Select a register from the list that will be pushed to
9360 use as our work register. */
9361 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9362 if ((1 << work_register) & live_regs_mask)
9363 break;
9364 }
9365
9366 asm_fprintf
9367 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9368 SP_REGNUM, SP_REGNUM);
9369
9370 if (live_regs_mask)
9371 thumb_pushpop (f, live_regs_mask, 1);
9372
9373 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9374 if (wr & live_regs_mask)
9375 offset += 4;
9376
9377 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9378 offset + 16 + current_function_pretend_args_size);
9379
9380 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9381 offset + 4);
9382
9383 /* Make sure that the instruction fetching the PC is in the right place
9384 to calculate "start of backtrace creation code + 12". */
9385 if (live_regs_mask)
9386 {
9387 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9388 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9389 offset + 12);
9390 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9391 ARM_HARD_FRAME_POINTER_REGNUM);
9392 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9393 offset);
9394 }
9395 else
9396 {
9397 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9398 ARM_HARD_FRAME_POINTER_REGNUM);
9399 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9400 offset);
9401 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9402 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9403 offset + 12);
9404 }
9405
9406 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9407 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9408 offset + 8);
9409 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9410 offset + 12);
9411 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9412 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9413 }
9414 else if (live_regs_mask)
9415 thumb_pushpop (f, live_regs_mask, 1);
9416
9417 for (regno = 8; regno < 13; regno++)
9418 {
5895f793
RE
9419 if (regs_ever_live[regno] && !call_used_regs[regno]
9420 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9421 high_regs_pushed++;
d5b7b3ae
RE
9422 }
9423
9424 if (high_regs_pushed)
9425 {
9426 int pushable_regs = 0;
9427 int mask = live_regs_mask & 0xff;
9428 int next_hi_reg;
9429
9430 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9431 {
5895f793
RE
9432 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9433 && !(TARGET_SINGLE_PIC_BASE
9434 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9435 break;
9436 }
9437
9438 pushable_regs = mask;
9439
9440 if (pushable_regs == 0)
9441 {
9442 /* Desperation time -- this probably will never happen. */
9443 if (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9444 || !call_used_regs[LAST_ARG_REGNUM])
d5b7b3ae
RE
9445 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9446 mask = 1 << LAST_ARG_REGNUM;
9447 }
9448
9449 while (high_regs_pushed > 0)
9450 {
9451 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9452 {
9453 if (mask & (1 << regno))
9454 {
9455 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9456
5895f793 9457 high_regs_pushed--;
d5b7b3ae
RE
9458
9459 if (high_regs_pushed)
9460 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9461 next_hi_reg--)
9462 {
9463 if (regs_ever_live[next_hi_reg]
5895f793
RE
9464 && !call_used_regs[next_hi_reg]
9465 && !(TARGET_SINGLE_PIC_BASE
9466 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9467 break;
9468 }
9469 else
9470 {
5895f793 9471 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
9472 break;
9473 }
9474 }
9475 }
9476
9477 thumb_pushpop (f, mask, 1);
9478 }
9479
9480 if (pushable_regs == 0
9481 && (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9482 || !call_used_regs[LAST_ARG_REGNUM]))
d5b7b3ae
RE
9483 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9484 }
9485}
9486
9487/* Handle the case of a double word load into a low register from
9488 a computed memory address. The computed address may involve a
9489 register which is overwritten by the load. */
9490
cd2b33d0 9491const char *
d5b7b3ae
RE
9492thumb_load_double_from_address (operands)
9493 rtx * operands;
9494{
9495 rtx addr;
9496 rtx base;
9497 rtx offset;
9498 rtx arg1;
9499 rtx arg2;
9500
9501 if (GET_CODE (operands[0]) != REG)
9502 fatal ("thumb_load_double_from_address: destination is not a register");
9503
9504 if (GET_CODE (operands[1]) != MEM)
9505 {
9506 debug_rtx (operands[1]);
9507 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9508 }
9509
9510 /* Get the memory address. */
9511 addr = XEXP (operands[1], 0);
9512
9513 /* Work out how the memory address is computed. */
9514 switch (GET_CODE (addr))
9515 {
9516 case REG:
9517 operands[2] = gen_rtx (MEM, SImode,
9518 plus_constant (XEXP (operands[1], 0), 4));
9519
9520 if (REGNO (operands[0]) == REGNO (addr))
9521 {
9522 output_asm_insn ("ldr\t%H0, %2", operands);
9523 output_asm_insn ("ldr\t%0, %1", operands);
9524 }
9525 else
9526 {
9527 output_asm_insn ("ldr\t%0, %1", operands);
9528 output_asm_insn ("ldr\t%H0, %2", operands);
9529 }
9530 break;
9531
9532 case CONST:
9533 /* Compute <address> + 4 for the high order load. */
9534 operands[2] = gen_rtx (MEM, SImode,
9535 plus_constant (XEXP (operands[1], 0), 4));
9536
9537 output_asm_insn ("ldr\t%0, %1", operands);
9538 output_asm_insn ("ldr\t%H0, %2", operands);
9539 break;
9540
9541 case PLUS:
9542 arg1 = XEXP (addr, 0);
9543 arg2 = XEXP (addr, 1);
9544
9545 if (CONSTANT_P (arg1))
9546 base = arg2, offset = arg1;
9547 else
9548 base = arg1, offset = arg2;
9549
9550 if (GET_CODE (base) != REG)
9551 fatal ("thumb_load_double_from_address: base is not a register");
9552
9553 /* Catch the case of <address> = <reg> + <reg> */
9554 if (GET_CODE (offset) == REG)
9555 {
9556 int reg_offset = REGNO (offset);
9557 int reg_base = REGNO (base);
9558 int reg_dest = REGNO (operands[0]);
9559
9560 /* Add the base and offset registers together into the
9561 higher destination register. */
9562 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9563 reg_dest + 1, reg_base, reg_offset);
9564
9565 /* Load the lower destination register from the address in
9566 the higher destination register. */
9567 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9568 reg_dest, reg_dest + 1);
9569
9570 /* Load the higher destination register from its own address
9571 plus 4. */
9572 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9573 reg_dest + 1, reg_dest + 1);
9574 }
9575 else
9576 {
9577 /* Compute <address> + 4 for the high order load. */
9578 operands[2] = gen_rtx (MEM, SImode,
9579 plus_constant (XEXP (operands[1], 0), 4));
9580
9581 /* If the computed address is held in the low order register
9582 then load the high order register first, otherwise always
9583 load the low order register first. */
9584 if (REGNO (operands[0]) == REGNO (base))
9585 {
9586 output_asm_insn ("ldr\t%H0, %2", operands);
9587 output_asm_insn ("ldr\t%0, %1", operands);
9588 }
9589 else
9590 {
9591 output_asm_insn ("ldr\t%0, %1", operands);
9592 output_asm_insn ("ldr\t%H0, %2", operands);
9593 }
9594 }
9595 break;
9596
9597 case LABEL_REF:
9598 /* With no registers to worry about we can just load the value
9599 directly. */
9600 operands[2] = gen_rtx (MEM, SImode,
9601 plus_constant (XEXP (operands[1], 0), 4));
9602
9603 output_asm_insn ("ldr\t%H0, %2", operands);
9604 output_asm_insn ("ldr\t%0, %1", operands);
9605 break;
9606
9607 default:
9608 debug_rtx (operands[1]);
9609 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9610 break;
9611 }
9612
9613 return "";
9614}
9615
9616
cd2b33d0 9617const char *
d5b7b3ae
RE
9618thumb_output_move_mem_multiple (n, operands)
9619 int n;
9620 rtx * operands;
9621{
9622 rtx tmp;
9623
9624 switch (n)
9625 {
9626 case 2:
ca356f3a 9627 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9628 {
ca356f3a
RE
9629 tmp = operands[4];
9630 operands[4] = operands[5];
9631 operands[5] = tmp;
d5b7b3ae 9632 }
ca356f3a
RE
9633 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
9634 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
9635 break;
9636
9637 case 3:
ca356f3a 9638 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9639 {
ca356f3a
RE
9640 tmp = operands[4];
9641 operands[4] = operands[5];
9642 operands[5] = tmp;
d5b7b3ae 9643 }
ca356f3a 9644 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 9645 {
ca356f3a
RE
9646 tmp = operands[5];
9647 operands[5] = operands[6];
9648 operands[6] = tmp;
d5b7b3ae 9649 }
ca356f3a 9650 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9651 {
ca356f3a
RE
9652 tmp = operands[4];
9653 operands[4] = operands[5];
9654 operands[5] = tmp;
d5b7b3ae
RE
9655 }
9656
ca356f3a
RE
9657 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
9658 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
9659 break;
9660
9661 default:
9662 abort ();
9663 }
9664
9665 return "";
9666}
9667
9668/* Routines for generating rtl */
9669
9670void
9671thumb_expand_movstrqi (operands)
9672 rtx * operands;
9673{
9674 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9675 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9676 HOST_WIDE_INT len = INTVAL (operands[2]);
9677 HOST_WIDE_INT offset = 0;
9678
9679 while (len >= 12)
9680 {
ca356f3a 9681 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
9682 len -= 12;
9683 }
9684
9685 if (len >= 8)
9686 {
ca356f3a 9687 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
9688 len -= 8;
9689 }
9690
9691 if (len >= 4)
9692 {
9693 rtx reg = gen_reg_rtx (SImode);
9694 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9695 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9696 len -= 4;
9697 offset += 4;
9698 }
9699
9700 if (len >= 2)
9701 {
9702 rtx reg = gen_reg_rtx (HImode);
9703 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9704 plus_constant (in, offset))));
9705 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9706 reg));
9707 len -= 2;
9708 offset += 2;
9709 }
9710
9711 if (len)
9712 {
9713 rtx reg = gen_reg_rtx (QImode);
9714 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9715 plus_constant (in, offset))));
9716 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9717 reg));
9718 }
9719}
9720
9721int
9722thumb_cmp_operand (op, mode)
9723 rtx op;
9724 enum machine_mode mode;
9725{
9726 return ((GET_CODE (op) == CONST_INT
9727 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9728 || register_operand (op, mode));
9729}
9730
cd2b33d0 9731static const char *
d5b7b3ae
RE
9732thumb_condition_code (x, invert)
9733 rtx x;
9734 int invert;
9735{
cd2b33d0 9736 static const char * conds[] =
d5b7b3ae
RE
9737 {
9738 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
9739 "hi", "ls", "ge", "lt", "gt", "le"
9740 };
9741 int val;
9742
9743 switch (GET_CODE (x))
9744 {
9745 case EQ: val = 0; break;
9746 case NE: val = 1; break;
9747 case GEU: val = 2; break;
9748 case LTU: val = 3; break;
9749 case GTU: val = 8; break;
9750 case LEU: val = 9; break;
9751 case GE: val = 10; break;
9752 case LT: val = 11; break;
9753 case GT: val = 12; break;
9754 case LE: val = 13; break;
9755 default:
9756 abort ();
9757 }
9758
9759 return conds[val ^ invert];
9760}
9761
9762/* Handle storing a half-word to memory during reload. */
9763void
9764thumb_reload_out_hi (operands)
9765 rtx * operands;
9766{
9767 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
9768}
9769
9770/* Handle storing a half-word to memory during reload. */
9771void
9772thumb_reload_in_hi (operands)
9773 rtx * operands ATTRIBUTE_UNUSED;
9774{
9775 abort ();
9776}
9777
c27ba912
DM
9778/* Return the length of a function name prefix
9779 that starts with the character 'c'. */
9780static int
9781arm_get_strip_length (char c)
9782{
9783 switch (c)
9784 {
9785 ARM_NAME_ENCODING_LENGTHS
9786 default: return 0;
9787 }
9788}
9789
9790/* Return a pointer to a function's name with any
9791 and all prefix encodings stripped from it. */
9792const char *
9793arm_strip_name_encoding (const char * name)
9794{
9795 int skip;
9796
9797 while ((skip = arm_get_strip_length (* name)))
9798 name += skip;
9799
9800 return name;
9801}
9802
2b835d68 9803#ifdef AOF_ASSEMBLER
6354dc9b 9804/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 9805
32de079a
RE
9806rtx aof_pic_label = NULL_RTX;
9807struct pic_chain
9808{
62b10bbc
NC
9809 struct pic_chain * next;
9810 char * symname;
32de079a
RE
9811};
9812
62b10bbc 9813static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
9814
9815rtx
9816aof_pic_entry (x)
9817 rtx x;
9818{
62b10bbc 9819 struct pic_chain ** chainp;
32de079a
RE
9820 int offset;
9821
9822 if (aof_pic_label == NULL_RTX)
9823 {
92a432f4
RE
9824 /* We mark this here and not in arm_add_gc_roots() to avoid
9825 polluting even more code with ifdefs, and because it never
9826 contains anything useful until we assign to it here. */
5895f793 9827 ggc_add_rtx_root (&aof_pic_label, 1);
43cffd11 9828 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
9829 }
9830
9831 for (offset = 0, chainp = &aof_pic_chain; *chainp;
9832 offset += 4, chainp = &(*chainp)->next)
9833 if ((*chainp)->symname == XSTR (x, 0))
9834 return plus_constant (aof_pic_label, offset);
9835
9836 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
9837 (*chainp)->next = NULL;
9838 (*chainp)->symname = XSTR (x, 0);
9839 return plus_constant (aof_pic_label, offset);
9840}
9841
9842void
9843aof_dump_pic_table (f)
62b10bbc 9844 FILE * f;
32de079a 9845{
62b10bbc 9846 struct pic_chain * chain;
32de079a
RE
9847
9848 if (aof_pic_chain == NULL)
9849 return;
9850
dd18ae56
NC
9851 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
9852 PIC_OFFSET_TABLE_REGNUM,
9853 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
9854 fputs ("|x$adcons|\n", f);
9855
9856 for (chain = aof_pic_chain; chain; chain = chain->next)
9857 {
9858 fputs ("\tDCD\t", f);
9859 assemble_name (f, chain->symname);
9860 fputs ("\n", f);
9861 }
9862}
9863
2b835d68
RE
9864int arm_text_section_count = 1;
9865
9866char *
84ed5e79 9867aof_text_section ()
2b835d68
RE
9868{
9869 static char buf[100];
2b835d68
RE
9870 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
9871 arm_text_section_count++);
9872 if (flag_pic)
9873 strcat (buf, ", PIC, REENTRANT");
9874 return buf;
9875}
9876
9877static int arm_data_section_count = 1;
9878
9879char *
9880aof_data_section ()
9881{
9882 static char buf[100];
9883 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
9884 return buf;
9885}
9886
9887/* The AOF assembler is religiously strict about declarations of
9888 imported and exported symbols, so that it is impossible to declare
956d6950 9889 a function as imported near the beginning of the file, and then to
2b835d68
RE
9890 export it later on. It is, however, possible to delay the decision
9891 until all the functions in the file have been compiled. To get
9892 around this, we maintain a list of the imports and exports, and
9893 delete from it any that are subsequently defined. At the end of
9894 compilation we spit the remainder of the list out before the END
9895 directive. */
9896
9897struct import
9898{
62b10bbc
NC
9899 struct import * next;
9900 char * name;
2b835d68
RE
9901};
9902
62b10bbc 9903static struct import * imports_list = NULL;
2b835d68
RE
9904
9905void
9906aof_add_import (name)
62b10bbc 9907 char * name;
2b835d68 9908{
62b10bbc 9909 struct import * new;
2b835d68
RE
9910
9911 for (new = imports_list; new; new = new->next)
9912 if (new->name == name)
9913 return;
9914
9915 new = (struct import *) xmalloc (sizeof (struct import));
9916 new->next = imports_list;
9917 imports_list = new;
9918 new->name = name;
9919}
9920
9921void
9922aof_delete_import (name)
62b10bbc 9923 char * name;
2b835d68 9924{
62b10bbc 9925 struct import ** old;
2b835d68
RE
9926
9927 for (old = &imports_list; *old; old = & (*old)->next)
9928 {
9929 if ((*old)->name == name)
9930 {
9931 *old = (*old)->next;
9932 return;
9933 }
9934 }
9935}
9936
9937int arm_main_function = 0;
9938
9939void
9940aof_dump_imports (f)
62b10bbc 9941 FILE * f;
2b835d68
RE
9942{
9943 /* The AOF assembler needs this to cause the startup code to be extracted
9944 from the library. Brining in __main causes the whole thing to work
9945 automagically. */
9946 if (arm_main_function)
9947 {
9948 text_section ();
9949 fputs ("\tIMPORT __main\n", f);
9950 fputs ("\tDCD __main\n", f);
9951 }
9952
9953 /* Now dump the remaining imports. */
9954 while (imports_list)
9955 {
9956 fprintf (f, "\tIMPORT\t");
9957 assemble_name (f, imports_list->name);
9958 fputc ('\n', f);
9959 imports_list = imports_list->next;
9960 }
9961}
9962#endif /* AOF_ASSEMBLER */
This page took 2.01002 seconds and 5 git commands to generate.