]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
gcc.texi (Service): Update to reflect current practice and location of the GNU servic...
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
914a3b8c 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e 26#include "rtl.h"
d5b7b3ae 27#include "tree.h"
c7319d87 28#include "obstack.h"
cce8749e
CH
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-flags.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
ad076f4e 41#include "toplev.h"
aec3cfba 42#include "recog.h"
92a432f4 43#include "ggc.h"
d5b7b3ae 44#include "except.h"
8b97c5f8 45#include "c-pragma.h"
c27ba912 46#include "tm_p.h"
cce8749e 47
d5b7b3ae
RE
48/* Forward definitions of types. */
49typedef struct minipool_node Mnode;
50typedef struct minipool_fixup Mfix;
51
52/* In order to improve the layout of the prototypes below
53 some short type abbreviations are defined here. */
54#define Hint HOST_WIDE_INT
55#define Mmode enum machine_mode
56#define Ulong unsigned long
57
58/* Forward function declarations. */
59static void arm_add_gc_roots PARAMS ((void));
60static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
61static int arm_naked_function_p PARAMS ((tree));
62static Ulong bit_count PARAMS ((signed int));
63static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
64static int eliminate_lr2ip PARAMS ((rtx *));
65static rtx emit_multi_reg_push PARAMS ((int));
66static rtx emit_sfm PARAMS ((int, int));
cd2b33d0 67static const char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
68static arm_cc get_arm_condition_code PARAMS ((rtx));
69static void init_fpa_table PARAMS ((void));
70static Hint int_log2 PARAMS ((Hint));
71static rtx is_jump_table PARAMS ((rtx));
cd2b33d0
NC
72static const char * output_multi_immediate PARAMS ((rtx *, const char *, const char *, int, Hint));
73static void print_multi_reg PARAMS ((FILE *, const char *, int, int, int));
d5b7b3ae 74static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
cd2b33d0 75static const char * shift_op PARAMS ((rtx, Hint *));
d5b7b3ae
RE
76static void arm_init_machine_status PARAMS ((struct function *));
77static void arm_mark_machine_status PARAMS ((struct function *));
78static int number_of_first_bit_set PARAMS ((int));
79static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
80static void thumb_exit PARAMS ((FILE *, int, rtx));
81static void thumb_pushpop PARAMS ((FILE *, int, int));
cd2b33d0 82static const char * thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
83static rtx is_jump_table PARAMS ((rtx));
84static Hint get_jump_table_size PARAMS ((rtx));
85static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
86static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
87static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
88static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
89static void assign_minipool_offsets PARAMS ((Mfix *));
90static void arm_print_value PARAMS ((FILE *, rtx));
91static void dump_minipool PARAMS ((rtx));
92static int arm_barrier_cost PARAMS ((rtx));
93static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
94static void push_minipool_barrier PARAMS ((rtx, Hint));
95static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
96static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 97static int current_file_function_operand PARAMS ((rtx));
d5b7b3ae
RE
98\f
99#undef Hint
100#undef Mmode
101#undef Ulong
f3bb6135 102
c7319d87
RE
103/* Obstack for minipool constant handling. */
104static struct obstack minipool_obstack;
105static char *minipool_startobj;
106
107#define obstack_chunk_alloc xmalloc
108#define obstack_chunk_free free
109
c27ba912
DM
110/* The maximum number of insns skipped which will be conditionalised if
111 possible. */
112static int max_insns_skipped = 5;
113
114extern FILE * asm_out_file;
115
6354dc9b 116/* True if we are currently building a constant table. */
13bd191d
PB
117int making_const_table;
118
60d0536b 119/* Define the information needed to generate branch insns. This is
6354dc9b 120 stored from the compare operation. */
ff9940b0 121rtx arm_compare_op0, arm_compare_op1;
ff9940b0 122
6354dc9b 123/* What type of floating point are we tuning for? */
bee06f3d
RE
124enum floating_point_type arm_fpu;
125
6354dc9b 126/* What type of floating point instructions are available? */
b111229a
RE
127enum floating_point_type arm_fpu_arch;
128
6354dc9b 129/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
130enum prog_mode_type arm_prgmode;
131
6354dc9b 132/* Set by the -mfp=... option. */
f9cc092a 133const char * target_fp_name = NULL;
2b835d68 134
b355a481 135/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 136const char * structure_size_string = NULL;
723ae7c1 137int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 138
aec3cfba 139/* Bit values used to identify processor capabilities. */
62b10bbc
NC
140#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
141#define FL_FAST_MULT (1 << 1) /* Fast multiply */
142#define FL_MODE26 (1 << 2) /* 26-bit mode support */
143#define FL_MODE32 (1 << 3) /* 32-bit mode support */
144#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
145#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
146#define FL_THUMB (1 << 6) /* Thumb aware */
147#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
148#define FL_STRONG (1 << 8) /* StrongARM */
aec3cfba 149
d5b7b3ae
RE
150/* The bits in this mask specify which instructions we are
151 allowed to generate. */
aec3cfba 152static int insn_flags = 0;
d5b7b3ae 153
aec3cfba
NC
154/* The bits in this mask specify which instruction scheduling options should
155 be used. Note - there is an overlap with the FL_FAST_MULT. For some
156 hardware we want to be able to generate the multiply instructions, but to
157 tune as if they were not present in the architecture. */
158static int tune_flags = 0;
159
160/* The following are used in the arm.md file as equivalents to bits
161 in the above two flag variables. */
162
2b835d68
RE
163/* Nonzero if this is an "M" variant of the processor. */
164int arm_fast_multiply = 0;
165
6354dc9b 166/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
167int arm_arch4 = 0;
168
6354dc9b 169/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
170int arm_arch5 = 0;
171
aec3cfba 172/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
173int arm_ld_sched = 0;
174
175/* Nonzero if this chip is a StrongARM. */
176int arm_is_strong = 0;
177
178/* Nonzero if this chip is a an ARM6 or an ARM7. */
179int arm_is_6_or_7 = 0;
b111229a 180
0616531f
RE
181/* Nonzero if generating Thumb instructions. */
182int thumb_code = 0;
183
cce8749e
CH
184/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
185 must report the mode of the memory reference from PRINT_OPERAND to
186 PRINT_OPERAND_ADDRESS. */
f3bb6135 187enum machine_mode output_memory_reference_mode;
cce8749e
CH
188
189/* Nonzero if the prologue must setup `fp'. */
190int current_function_anonymous_args;
191
32de079a 192/* The register number to be used for the PIC offset register. */
ed0e6530 193const char * arm_pic_register_string = NULL;
32de079a
RE
194int arm_pic_register = 9;
195
ff9940b0 196/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 197 is not needed. */
d5b7b3ae 198int return_used_this_function;
ff9940b0 199
aec3cfba
NC
200/* Set to 1 after arm_reorg has started. Reset to start at the start of
201 the next function. */
4b632bf1
RE
202static int after_arm_reorg = 0;
203
aec3cfba 204/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
205static int arm_constant_limit = 3;
206
cce8749e
CH
207/* For an explanation of these variables, see final_prescan_insn below. */
208int arm_ccfsm_state;
84ed5e79 209enum arm_cond_code arm_current_cc;
cce8749e
CH
210rtx arm_target_insn;
211int arm_target_label;
9997d19d
RE
212
213/* The condition codes of the ARM, and the inverse function. */
cd2b33d0 214const char * arm_condition_codes[] =
9997d19d
RE
215{
216 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
217 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
218};
219
f5a1b0d2 220#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 221\f
6354dc9b 222/* Initialization code. */
2b835d68 223
2b835d68
RE
224struct processors
225{
cd2b33d0 226 const char * name;
2b835d68
RE
227 unsigned int flags;
228};
229
230/* Not all of these give usefully different compilation alternatives,
231 but there is no simple way of generalizing them. */
f5a1b0d2
NC
232static struct processors all_cores[] =
233{
234 /* ARM Cores */
235
236 {"arm2", FL_CO_PROC | FL_MODE26 },
237 {"arm250", FL_CO_PROC | FL_MODE26 },
238 {"arm3", FL_CO_PROC | FL_MODE26 },
239 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
240 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
241 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
242 {"arm610", FL_MODE26 | FL_MODE32 },
243 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
244 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 246 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
247 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
248 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
249 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
250 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
251 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
252 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
253 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
254 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
255 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 256 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
257 {"arm710c", FL_MODE26 | FL_MODE32 },
258 {"arm7100", FL_MODE26 | FL_MODE32 },
259 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
260 /* Doesn't have an external co-proc, but does have embedded fpu. */
261 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
262 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
263 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
264 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
265 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
266 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
267 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
268 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
269 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
270 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
271 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
272
273 {NULL, 0}
274};
275
276static struct processors all_architectures[] =
2b835d68 277{
f5a1b0d2
NC
278 /* ARM Architectures */
279
62b10bbc
NC
280 { "armv2", FL_CO_PROC | FL_MODE26 },
281 { "armv2a", FL_CO_PROC | FL_MODE26 },
282 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
283 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 284 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
285 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
286 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
287 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
288 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
289 { NULL, 0 }
f5a1b0d2
NC
290};
291
292/* This is a magic stucture. The 'string' field is magically filled in
293 with a pointer to the value specified by the user on the command line
294 assuming that the user has specified such a value. */
295
296struct arm_cpu_select arm_select[] =
297{
298 /* string name processors */
299 { NULL, "-mcpu=", all_cores },
300 { NULL, "-march=", all_architectures },
301 { NULL, "-mtune=", all_cores }
2b835d68
RE
302};
303
aec3cfba 304/* Return the number of bits set in value' */
d5b7b3ae 305static unsigned long
aec3cfba
NC
306bit_count (value)
307 signed int value;
308{
d5b7b3ae 309 unsigned long count = 0;
aec3cfba
NC
310
311 while (value)
312 {
5895f793
RE
313 value &= ~(value & -value);
314 ++count;
aec3cfba
NC
315 }
316
317 return count;
318}
319
2b835d68
RE
320/* Fix up any incompatible options that the user has specified.
321 This has now turned into a maze. */
322void
323arm_override_options ()
324{
ed4c4348 325 unsigned i;
f5a1b0d2
NC
326
327 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 328 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 329 {
f5a1b0d2
NC
330 struct arm_cpu_select * ptr = arm_select + i;
331
332 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 333 {
13bd191d 334 const struct processors * sel;
bd9c7e23 335
5895f793 336 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 337 if (streq (ptr->string, sel->name))
bd9c7e23 338 {
aec3cfba
NC
339 if (i == 2)
340 tune_flags = sel->flags;
341 else
b111229a 342 {
aec3cfba
NC
343 /* If we have been given an architecture and a processor
344 make sure that they are compatible. We only generate
345 a warning though, and we prefer the CPU over the
6354dc9b 346 architecture. */
aec3cfba 347 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 348 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
349 ptr->string);
350
351 insn_flags = sel->flags;
b111229a 352 }
f5a1b0d2 353
bd9c7e23
RE
354 break;
355 }
356
357 if (sel->name == NULL)
358 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
359 }
360 }
aec3cfba 361
f5a1b0d2 362 /* If the user did not specify a processor, choose one for them. */
aec3cfba 363 if (insn_flags == 0)
f5a1b0d2
NC
364 {
365 struct processors * sel;
aec3cfba
NC
366 unsigned int sought;
367 static struct cpu_default
368 {
cd2b33d0
NC
369 int cpu;
370 const char * name;
aec3cfba
NC
371 }
372 cpu_defaults[] =
373 {
374 { TARGET_CPU_arm2, "arm2" },
375 { TARGET_CPU_arm6, "arm6" },
376 { TARGET_CPU_arm610, "arm610" },
2aa0c933 377 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
378 { TARGET_CPU_arm7m, "arm7m" },
379 { TARGET_CPU_arm7500fe, "arm7500fe" },
380 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
381 { TARGET_CPU_arm8, "arm8" },
382 { TARGET_CPU_arm810, "arm810" },
383 { TARGET_CPU_arm9, "arm9" },
384 { TARGET_CPU_strongarm, "strongarm" },
385 { TARGET_CPU_generic, "arm" },
386 { 0, 0 }
387 };
388 struct cpu_default * def;
389
390 /* Find the default. */
5895f793 391 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
392 if (def->cpu == TARGET_CPU_DEFAULT)
393 break;
394
395 /* Make sure we found the default CPU. */
396 if (def->name == NULL)
397 abort ();
398
399 /* Find the default CPU's flags. */
5895f793 400 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
401 if (streq (def->name, sel->name))
402 break;
403
404 if (sel->name == NULL)
405 abort ();
406
407 insn_flags = sel->flags;
408
409 /* Now check to see if the user has specified some command line
410 switch that require certain abilities from the cpu. */
411 sought = 0;
f5a1b0d2 412
d5b7b3ae 413 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 414 {
aec3cfba
NC
415 sought |= (FL_THUMB | FL_MODE32);
416
417 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 418 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 419
d5b7b3ae 420 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
421 interworking. Therefore we force FL_MODE26 to be removed
422 from insn_flags here (if it was set), so that the search
423 below will always be able to find a compatible processor. */
5895f793 424 insn_flags &= ~FL_MODE26;
f5a1b0d2 425 }
5895f793 426 else if (!TARGET_APCS_32)
f5a1b0d2 427 sought |= FL_MODE26;
d5b7b3ae 428
aec3cfba 429 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 430 {
aec3cfba
NC
431 /* Try to locate a CPU type that supports all of the abilities
432 of the default CPU, plus the extra abilities requested by
433 the user. */
5895f793 434 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 435 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
436 break;
437
438 if (sel->name == NULL)
aec3cfba
NC
439 {
440 unsigned int current_bit_count = 0;
441 struct processors * best_fit = NULL;
442
443 /* Ideally we would like to issue an error message here
444 saying that it was not possible to find a CPU compatible
445 with the default CPU, but which also supports the command
446 line options specified by the programmer, and so they
447 ought to use the -mcpu=<name> command line option to
448 override the default CPU type.
449
450 Unfortunately this does not work with multilibing. We
451 need to be able to support multilibs for -mapcs-26 and for
452 -mthumb-interwork and there is no CPU that can support both
453 options. Instead if we cannot find a cpu that has both the
454 characteristics of the default cpu and the given command line
455 options we scan the array again looking for a best match. */
5895f793 456 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
457 if ((sel->flags & sought) == sought)
458 {
459 unsigned int count;
460
461 count = bit_count (sel->flags & insn_flags);
462
463 if (count >= current_bit_count)
464 {
465 best_fit = sel;
466 current_bit_count = count;
467 }
468 }
f5a1b0d2 469
aec3cfba
NC
470 if (best_fit == NULL)
471 abort ();
472 else
473 sel = best_fit;
474 }
475
476 insn_flags = sel->flags;
f5a1b0d2
NC
477 }
478 }
aec3cfba
NC
479
480 /* If tuning has not been specified, tune for whichever processor or
481 architecture has been selected. */
482 if (tune_flags == 0)
483 tune_flags = insn_flags;
484
f5a1b0d2
NC
485 /* Make sure that the processor choice does not conflict with any of the
486 other command line choices. */
aec3cfba 487 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 488 {
aec3cfba
NC
489 /* If APCS-32 was not the default then it must have been set by the
490 user, so issue a warning message. If the user has specified
491 "-mapcs-32 -mcpu=arm2" then we loose here. */
492 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
493 warning ("target CPU does not support APCS-32" );
5895f793 494 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 495 }
5895f793 496 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
497 {
498 warning ("target CPU does not support APCS-26" );
499 target_flags |= ARM_FLAG_APCS_32;
500 }
501
6cfc7210 502 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
503 {
504 warning ("target CPU does not support interworking" );
6cfc7210 505 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
506 }
507
d5b7b3ae
RE
508 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
509 {
510 warning ("target CPU does not supoport THUMB instructions.");
511 target_flags &= ~ARM_FLAG_THUMB;
512 }
513
514 if (TARGET_APCS_FRAME && TARGET_THUMB)
515 {
516 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
517 target_flags &= ~ARM_FLAG_APCS_FRAME;
518 }
519
520 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
521 from here where no function is being compiled currently. */
522 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
523 && TARGET_ARM)
524 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
525
526 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
527 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
528
529 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
530 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
531
f5a1b0d2 532 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 533 if (TARGET_INTERWORK)
f5a1b0d2 534 {
5895f793 535 if (!TARGET_APCS_32)
f5a1b0d2
NC
536 warning ("interworking forces APCS-32 to be used" );
537 target_flags |= ARM_FLAG_APCS_32;
538 }
539
5895f793 540 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
541 {
542 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
543 target_flags |= ARM_FLAG_APCS_FRAME;
544 }
aec3cfba 545
2b835d68
RE
546 if (TARGET_POKE_FUNCTION_NAME)
547 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 548
2b835d68
RE
549 if (TARGET_APCS_REENT && flag_pic)
550 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 551
2b835d68 552 if (TARGET_APCS_REENT)
f5a1b0d2 553 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 554
d5b7b3ae
RE
555 /* If this target is normally configured to use APCS frames, warn if they
556 are turned off and debugging is turned on. */
557 if (TARGET_ARM
558 && write_symbols != NO_DEBUG
5895f793 559 && !TARGET_APCS_FRAME
d5b7b3ae
RE
560 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
561 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 562
32de079a
RE
563 /* If stack checking is disabled, we can use r10 as the PIC register,
564 which keeps r9 available. */
5895f793 565 if (flag_pic && !TARGET_APCS_STACK)
32de079a 566 arm_pic_register = 10;
aec3cfba 567
2b835d68
RE
568 if (TARGET_APCS_FLOAT)
569 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 570
aec3cfba 571 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
572 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
573 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
574 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
aec3cfba 575
2ca12935
JL
576 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
577 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 578 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
579 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
580 && !(tune_flags & FL_ARCH4))) != 0;
f5a1b0d2 581
bd9c7e23
RE
582 /* Default value for floating point code... if no co-processor
583 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
584 assume the user has an FPA.
585 Note: this does not prevent use of floating point instructions,
586 -msoft-float does that. */
aec3cfba 587 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 588
b111229a 589 if (target_fp_name)
2b835d68 590 {
f5a1b0d2 591 if (streq (target_fp_name, "2"))
b111229a 592 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
593 else if (streq (target_fp_name, "3"))
594 arm_fpu_arch = FP_SOFT3;
2b835d68 595 else
f5a1b0d2 596 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 597 target_fp_name);
2b835d68 598 }
b111229a
RE
599 else
600 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
601
602 if (TARGET_FPE && arm_fpu != FP_HARD)
603 arm_fpu = FP_SOFT2;
aec3cfba 604
f5a1b0d2
NC
605 /* For arm2/3 there is no need to do any scheduling if there is only
606 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
607 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
608 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 609 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 610
cd2b33d0 611 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
612
613 if (structure_size_string != NULL)
614 {
615 int size = strtol (structure_size_string, NULL, 0);
616
617 if (size == 8 || size == 32)
618 arm_structure_size_boundary = size;
619 else
620 warning ("Structure size boundary can only be set to 8 or 32");
621 }
ed0e6530
PB
622
623 if (arm_pic_register_string != NULL)
624 {
625 int pic_register;
626
5895f793 627 if (!flag_pic)
ed0e6530
PB
628 warning ("-mpic-register= is useless without -fpic");
629
630 pic_register = decode_reg_name (arm_pic_register_string);
631
632 /* Prevent the user from choosing an obviously stupid PIC register. */
633 if (pic_register < 0 || call_used_regs[pic_register]
634 || pic_register == HARD_FRAME_POINTER_REGNUM
635 || pic_register == STACK_POINTER_REGNUM
636 || pic_register >= PC_REGNUM)
637 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
638 else
639 arm_pic_register = pic_register;
640 }
d5b7b3ae
RE
641
642 if (TARGET_THUMB && flag_schedule_insns)
643 {
644 /* Don't warn since it's on by default in -O2. */
645 flag_schedule_insns = 0;
646 }
647
f5a1b0d2
NC
648 /* If optimizing for space, don't synthesize constants.
649 For processors with load scheduling, it never costs more than 2 cycles
650 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 651 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 652 arm_constant_limit = 1;
aec3cfba 653
f5a1b0d2
NC
654 /* If optimizing for size, bump the number of instructions that we
655 are prepared to conditionally execute (even on a StrongARM).
656 Otherwise for the StrongARM, which has early execution of branches,
657 a sequence that is worth skipping is shorter. */
658 if (optimize_size)
659 max_insns_skipped = 6;
660 else if (arm_is_strong)
661 max_insns_skipped = 3;
92a432f4
RE
662
663 /* Register global variables with the garbage collector. */
664 arm_add_gc_roots ();
665}
666
667static void
668arm_add_gc_roots ()
669{
670 ggc_add_rtx_root (&arm_compare_op0, 1);
671 ggc_add_rtx_root (&arm_compare_op1, 1);
672 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
c7319d87
RE
673
674 gcc_obstack_init(&minipool_obstack);
675 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 676}
cce8749e 677\f
6354dc9b 678/* Return 1 if it is possible to return using a single instruction. */
ff9940b0 679int
b36ba79f
RE
680use_return_insn (iscond)
681 int iscond;
ff9940b0
RE
682{
683 int regno;
684
d5b7b3ae 685 /* Never use a return instruction before reload has run. */
5895f793 686 if (!reload_completed
d5b7b3ae 687 /* Or if the function is variadic. */
f5a1b0d2 688 || current_function_pretend_args_size
ff9940b0 689 || current_function_anonymous_args
d5b7b3ae
RE
690 /* Of if the function calls __builtin_eh_return () */
691 || cfun->machine->eh_epilogue_sp_ofs != NULL
692 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 693 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 694 && !frame_pointer_needed))
ff9940b0
RE
695 return 0;
696
b111229a 697 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
698 stacked. Similarly, on StrongARM, conditional returns are expensive
699 if they aren't taken and registers have been stacked. */
f5a1b0d2 700 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 701 return 0;
d5b7b3ae 702
f5a1b0d2 703 if ((iscond && arm_is_strong)
6cfc7210 704 || TARGET_INTERWORK)
6ed30148 705 {
d5b7b3ae 706 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
5895f793 707 if (regs_ever_live[regno] && !call_used_regs[regno])
6ed30148
RE
708 return 0;
709
710 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 711 return 0;
6ed30148 712 }
b111229a 713
ff9940b0 714 /* Can't be done if any of the FPU regs are pushed, since this also
6354dc9b 715 requires an insn. */
d5b7b3ae
RE
716 if (TARGET_HARD_FLOAT)
717 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 718 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 719 return 0;
ff9940b0 720
31fdb4d5
DE
721 /* If a function is naked, don't use the "return" insn. */
722 if (arm_naked_function_p (current_function_decl))
723 return 0;
724
ff9940b0
RE
725 return 1;
726}
727
cce8749e
CH
728/* Return TRUE if int I is a valid immediate ARM constant. */
729
730int
731const_ok_for_arm (i)
ff9940b0 732 HOST_WIDE_INT i;
cce8749e 733{
5895f793 734 unsigned HOST_WIDE_INT mask = ~HOST_UINT (0xFF);
cce8749e 735
56636818
JL
736 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
737 be all zero, or all one. */
5895f793
RE
738 if ((i & ~HOST_UINT (0xffffffff)) != 0
739 && ((i & ~HOST_UINT (0xffffffff))
740 != ((~HOST_UINT (0))
741 & ~HOST_UINT (0xffffffff))))
56636818
JL
742 return FALSE;
743
e2c671ba
RE
744 /* Fast return for 0 and powers of 2 */
745 if ((i & (i - 1)) == 0)
746 return TRUE;
747
cce8749e
CH
748 do
749 {
e5951263 750 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
f3bb6135 751 return TRUE;
abaa26e5 752 mask =
e5951263
NC
753 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
754 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
5895f793 755 } while (mask != ~HOST_UINT (0xFF));
cce8749e 756
f3bb6135
RE
757 return FALSE;
758}
cce8749e 759
6354dc9b 760/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
761static int
762const_ok_for_op (i, code)
e2c671ba
RE
763 HOST_WIDE_INT i;
764 enum rtx_code code;
e2c671ba
RE
765{
766 if (const_ok_for_arm (i))
767 return 1;
768
769 switch (code)
770 {
771 case PLUS:
772 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
773
774 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
775 case XOR:
776 case IOR:
777 return 0;
778
779 case AND:
780 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
781
782 default:
783 abort ();
784 }
785}
786
787/* Emit a sequence of insns to handle a large constant.
788 CODE is the code of the operation required, it can be any of SET, PLUS,
789 IOR, AND, XOR, MINUS;
790 MODE is the mode in which the operation is being performed;
791 VAL is the integer to operate on;
792 SOURCE is the other operand (a register, or a null-pointer for SET);
793 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
794 either produce a simpler sequence, or we will want to cse the values.
795 Return value is the number of insns emitted. */
e2c671ba
RE
796
797int
798arm_split_constant (code, mode, val, target, source, subtargets)
799 enum rtx_code code;
800 enum machine_mode mode;
801 HOST_WIDE_INT val;
802 rtx target;
803 rtx source;
804 int subtargets;
2b835d68
RE
805{
806 if (subtargets || code == SET
807 || (GET_CODE (target) == REG && GET_CODE (source) == REG
808 && REGNO (target) != REGNO (source)))
809 {
4b632bf1
RE
810 /* After arm_reorg has been called, we can't fix up expensive
811 constants by pushing them into memory so we must synthesise
812 them in-line, regardless of the cost. This is only likely to
813 be more costly on chips that have load delay slots and we are
814 compiling without running the scheduler (so no splitting
aec3cfba
NC
815 occurred before the final instruction emission).
816
817 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 818 */
5895f793 819 if (!after_arm_reorg
4b632bf1
RE
820 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
821 > arm_constant_limit + (code != SET)))
2b835d68
RE
822 {
823 if (code == SET)
824 {
825 /* Currently SET is the only monadic value for CODE, all
826 the rest are diadic. */
43cffd11 827 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
828 return 1;
829 }
830 else
831 {
832 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
833
43cffd11 834 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
835 /* For MINUS, the value is subtracted from, since we never
836 have subtraction of a constant. */
837 if (code == MINUS)
43cffd11 838 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 839 gen_rtx_MINUS (mode, temp, source)));
2b835d68 840 else
43cffd11
RE
841 emit_insn (gen_rtx_SET (VOIDmode, target,
842 gen_rtx (code, mode, source, temp)));
2b835d68
RE
843 return 2;
844 }
845 }
846 }
847
848 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
849}
850
851/* As above, but extra parameter GENERATE which, if clear, suppresses
852 RTL generation. */
d5b7b3ae 853static int
2b835d68
RE
854arm_gen_constant (code, mode, val, target, source, subtargets, generate)
855 enum rtx_code code;
856 enum machine_mode mode;
857 HOST_WIDE_INT val;
858 rtx target;
859 rtx source;
860 int subtargets;
861 int generate;
e2c671ba 862{
e2c671ba
RE
863 int can_invert = 0;
864 int can_negate = 0;
865 int can_negate_initial = 0;
866 int can_shift = 0;
867 int i;
868 int num_bits_set = 0;
869 int set_sign_bit_copies = 0;
870 int clear_sign_bit_copies = 0;
871 int clear_zero_bit_copies = 0;
872 int set_zero_bit_copies = 0;
873 int insns = 0;
e2c671ba 874 unsigned HOST_WIDE_INT temp1, temp2;
e5951263 875 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
e2c671ba 876
d5b7b3ae 877 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
878 check for degenerate cases; these can occur when DImode operations
879 are split. */
880 switch (code)
881 {
882 case SET:
883 can_invert = 1;
884 can_shift = 1;
885 can_negate = 1;
886 break;
887
888 case PLUS:
889 can_negate = 1;
890 can_negate_initial = 1;
891 break;
892
893 case IOR:
e5951263 894 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 895 {
2b835d68 896 if (generate)
43cffd11
RE
897 emit_insn (gen_rtx_SET (VOIDmode, target,
898 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
899 return 1;
900 }
901 if (remainder == 0)
902 {
903 if (reload_completed && rtx_equal_p (target, source))
904 return 0;
2b835d68 905 if (generate)
43cffd11 906 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
907 return 1;
908 }
909 break;
910
911 case AND:
912 if (remainder == 0)
913 {
2b835d68 914 if (generate)
43cffd11 915 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
916 return 1;
917 }
e5951263 918 if (remainder == HOST_UINT (0xffffffff))
e2c671ba
RE
919 {
920 if (reload_completed && rtx_equal_p (target, source))
921 return 0;
2b835d68 922 if (generate)
43cffd11 923 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
924 return 1;
925 }
926 can_invert = 1;
927 break;
928
929 case XOR:
930 if (remainder == 0)
931 {
932 if (reload_completed && rtx_equal_p (target, source))
933 return 0;
2b835d68 934 if (generate)
43cffd11 935 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
936 return 1;
937 }
e5951263 938 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 939 {
2b835d68 940 if (generate)
43cffd11
RE
941 emit_insn (gen_rtx_SET (VOIDmode, target,
942 gen_rtx_NOT (mode, source)));
e2c671ba
RE
943 return 1;
944 }
945
946 /* We don't know how to handle this yet below. */
947 abort ();
948
949 case MINUS:
950 /* We treat MINUS as (val - source), since (source - val) is always
951 passed as (source + (-val)). */
952 if (remainder == 0)
953 {
2b835d68 954 if (generate)
43cffd11
RE
955 emit_insn (gen_rtx_SET (VOIDmode, target,
956 gen_rtx_NEG (mode, source)));
e2c671ba
RE
957 return 1;
958 }
959 if (const_ok_for_arm (val))
960 {
2b835d68 961 if (generate)
43cffd11
RE
962 emit_insn (gen_rtx_SET (VOIDmode, target,
963 gen_rtx_MINUS (mode, GEN_INT (val),
964 source)));
e2c671ba
RE
965 return 1;
966 }
967 can_negate = 1;
968
969 break;
970
971 default:
972 abort ();
973 }
974
6354dc9b 975 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
976 if (const_ok_for_arm (val)
977 || (can_negate_initial && const_ok_for_arm (-val))
978 || (can_invert && const_ok_for_arm (~val)))
979 {
2b835d68 980 if (generate)
43cffd11
RE
981 emit_insn (gen_rtx_SET (VOIDmode, target,
982 (source ? gen_rtx (code, mode, source,
983 GEN_INT (val))
984 : GEN_INT (val))));
e2c671ba
RE
985 return 1;
986 }
987
e2c671ba 988 /* Calculate a few attributes that may be useful for specific
6354dc9b 989 optimizations. */
e2c671ba
RE
990 for (i = 31; i >= 0; i--)
991 {
992 if ((remainder & (1 << i)) == 0)
993 clear_sign_bit_copies++;
994 else
995 break;
996 }
997
998 for (i = 31; i >= 0; i--)
999 {
1000 if ((remainder & (1 << i)) != 0)
1001 set_sign_bit_copies++;
1002 else
1003 break;
1004 }
1005
1006 for (i = 0; i <= 31; i++)
1007 {
1008 if ((remainder & (1 << i)) == 0)
1009 clear_zero_bit_copies++;
1010 else
1011 break;
1012 }
1013
1014 for (i = 0; i <= 31; i++)
1015 {
1016 if ((remainder & (1 << i)) != 0)
1017 set_zero_bit_copies++;
1018 else
1019 break;
1020 }
1021
1022 switch (code)
1023 {
1024 case SET:
1025 /* See if we can do this by sign_extending a constant that is known
1026 to be negative. This is a good, way of doing it, since the shift
1027 may well merge into a subsequent insn. */
1028 if (set_sign_bit_copies > 1)
1029 {
1030 if (const_ok_for_arm
1031 (temp1 = ARM_SIGN_EXTEND (remainder
1032 << (set_sign_bit_copies - 1))))
1033 {
2b835d68
RE
1034 if (generate)
1035 {
d499463f 1036 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1037 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1038 GEN_INT (temp1)));
2b835d68
RE
1039 emit_insn (gen_ashrsi3 (target, new_src,
1040 GEN_INT (set_sign_bit_copies - 1)));
1041 }
e2c671ba
RE
1042 return 2;
1043 }
1044 /* For an inverted constant, we will need to set the low bits,
1045 these will be shifted out of harm's way. */
1046 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1047 if (const_ok_for_arm (~temp1))
1048 {
2b835d68
RE
1049 if (generate)
1050 {
d499463f 1051 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1052 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1053 GEN_INT (temp1)));
2b835d68
RE
1054 emit_insn (gen_ashrsi3 (target, new_src,
1055 GEN_INT (set_sign_bit_copies - 1)));
1056 }
e2c671ba
RE
1057 return 2;
1058 }
1059 }
1060
1061 /* See if we can generate this by setting the bottom (or the top)
1062 16 bits, and then shifting these into the other half of the
1063 word. We only look for the simplest cases, to do more would cost
1064 too much. Be careful, however, not to generate this when the
1065 alternative would take fewer insns. */
e5951263 1066 if (val & HOST_UINT (0xffff0000))
e2c671ba 1067 {
e5951263 1068 temp1 = remainder & HOST_UINT (0xffff0000);
e2c671ba
RE
1069 temp2 = remainder & 0x0000ffff;
1070
6354dc9b 1071 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1072 for (i = 9; i < 24; i++)
1073 {
d5b7b3ae 1074 if ((((temp2 | (temp2 << i))
e5951263 1075 & HOST_UINT (0xffffffff)) == remainder)
5895f793 1076 && !const_ok_for_arm (temp2))
e2c671ba 1077 {
d499463f
RE
1078 rtx new_src = (subtargets
1079 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1080 : target);
1081 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1082 source, subtargets, generate);
e2c671ba 1083 source = new_src;
2b835d68 1084 if (generate)
43cffd11
RE
1085 emit_insn (gen_rtx_SET
1086 (VOIDmode, target,
1087 gen_rtx_IOR (mode,
1088 gen_rtx_ASHIFT (mode, source,
1089 GEN_INT (i)),
1090 source)));
e2c671ba
RE
1091 return insns + 1;
1092 }
1093 }
1094
6354dc9b 1095 /* Don't duplicate cases already considered. */
e2c671ba
RE
1096 for (i = 17; i < 24; i++)
1097 {
1098 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1099 && !const_ok_for_arm (temp1))
e2c671ba 1100 {
d499463f
RE
1101 rtx new_src = (subtargets
1102 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1103 : target);
1104 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1105 source, subtargets, generate);
e2c671ba 1106 source = new_src;
2b835d68 1107 if (generate)
43cffd11
RE
1108 emit_insn
1109 (gen_rtx_SET (VOIDmode, target,
1110 gen_rtx_IOR
1111 (mode,
1112 gen_rtx_LSHIFTRT (mode, source,
1113 GEN_INT (i)),
1114 source)));
e2c671ba
RE
1115 return insns + 1;
1116 }
1117 }
1118 }
1119 break;
1120
1121 case IOR:
1122 case XOR:
7b64da89
RE
1123 /* If we have IOR or XOR, and the constant can be loaded in a
1124 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1125 then this can be done in two instructions instead of 3-4. */
1126 if (subtargets
d499463f 1127 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1128 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1129 {
5895f793 1130 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1131 {
2b835d68
RE
1132 if (generate)
1133 {
1134 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1135
43cffd11
RE
1136 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1137 emit_insn (gen_rtx_SET (VOIDmode, target,
1138 gen_rtx (code, mode, source, sub)));
2b835d68 1139 }
e2c671ba
RE
1140 return 2;
1141 }
1142 }
1143
1144 if (code == XOR)
1145 break;
1146
1147 if (set_sign_bit_copies > 8
1148 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1149 {
2b835d68
RE
1150 if (generate)
1151 {
1152 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1153 rtx shift = GEN_INT (set_sign_bit_copies);
1154
43cffd11
RE
1155 emit_insn (gen_rtx_SET (VOIDmode, sub,
1156 gen_rtx_NOT (mode,
1157 gen_rtx_ASHIFT (mode,
1158 source,
f5a1b0d2 1159 shift))));
43cffd11
RE
1160 emit_insn (gen_rtx_SET (VOIDmode, target,
1161 gen_rtx_NOT (mode,
1162 gen_rtx_LSHIFTRT (mode, sub,
1163 shift))));
2b835d68 1164 }
e2c671ba
RE
1165 return 2;
1166 }
1167
1168 if (set_zero_bit_copies > 8
1169 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1170 {
2b835d68
RE
1171 if (generate)
1172 {
1173 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1174 rtx shift = GEN_INT (set_zero_bit_copies);
1175
43cffd11
RE
1176 emit_insn (gen_rtx_SET (VOIDmode, sub,
1177 gen_rtx_NOT (mode,
1178 gen_rtx_LSHIFTRT (mode,
1179 source,
f5a1b0d2 1180 shift))));
43cffd11
RE
1181 emit_insn (gen_rtx_SET (VOIDmode, target,
1182 gen_rtx_NOT (mode,
1183 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1184 shift))));
2b835d68 1185 }
e2c671ba
RE
1186 return 2;
1187 }
1188
5895f793 1189 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1190 {
2b835d68
RE
1191 if (generate)
1192 {
1193 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1194 emit_insn (gen_rtx_SET (VOIDmode, sub,
1195 gen_rtx_NOT (mode, source)));
2b835d68
RE
1196 source = sub;
1197 if (subtargets)
1198 sub = gen_reg_rtx (mode);
43cffd11
RE
1199 emit_insn (gen_rtx_SET (VOIDmode, sub,
1200 gen_rtx_AND (mode, source,
1201 GEN_INT (temp1))));
1202 emit_insn (gen_rtx_SET (VOIDmode, target,
1203 gen_rtx_NOT (mode, sub)));
2b835d68 1204 }
e2c671ba
RE
1205 return 3;
1206 }
1207 break;
1208
1209 case AND:
1210 /* See if two shifts will do 2 or more insn's worth of work. */
1211 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1212 {
e5951263 1213 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
e2c671ba 1214 << (32 - clear_sign_bit_copies))
e5951263 1215 & HOST_UINT (0xffffffff));
e2c671ba 1216
e5951263 1217 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1218 {
2b835d68
RE
1219 if (generate)
1220 {
d499463f 1221 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1222 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1223 new_src, source, subtargets, 1);
1224 source = new_src;
2b835d68
RE
1225 }
1226 else
d499463f
RE
1227 {
1228 rtx targ = subtargets ? NULL_RTX : target;
1229 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1230 targ, source, subtargets, 0);
1231 }
2b835d68
RE
1232 }
1233
1234 if (generate)
1235 {
d499463f
RE
1236 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1237 rtx shift = GEN_INT (clear_sign_bit_copies);
1238
1239 emit_insn (gen_ashlsi3 (new_src, source, shift));
1240 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1241 }
1242
e2c671ba
RE
1243 return insns + 2;
1244 }
1245
1246 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1247 {
1248 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1249
e5951263 1250 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1251 {
2b835d68
RE
1252 if (generate)
1253 {
d499463f
RE
1254 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1255
2b835d68 1256 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1257 new_src, source, subtargets, 1);
1258 source = new_src;
2b835d68
RE
1259 }
1260 else
d499463f
RE
1261 {
1262 rtx targ = subtargets ? NULL_RTX : target;
1263
1264 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1265 targ, source, subtargets, 0);
1266 }
2b835d68
RE
1267 }
1268
1269 if (generate)
1270 {
d499463f
RE
1271 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1272 rtx shift = GEN_INT (clear_zero_bit_copies);
1273
1274 emit_insn (gen_lshrsi3 (new_src, source, shift));
1275 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1276 }
1277
e2c671ba
RE
1278 return insns + 2;
1279 }
1280
1281 break;
1282
1283 default:
1284 break;
1285 }
1286
1287 for (i = 0; i < 32; i++)
1288 if (remainder & (1 << i))
1289 num_bits_set++;
1290
1291 if (code == AND || (can_invert && num_bits_set > 16))
e5951263 1292 remainder = (~remainder) & HOST_UINT (0xffffffff);
e2c671ba 1293 else if (code == PLUS && num_bits_set > 16)
e5951263 1294 remainder = (-remainder) & HOST_UINT (0xffffffff);
e2c671ba
RE
1295 else
1296 {
1297 can_invert = 0;
1298 can_negate = 0;
1299 }
1300
1301 /* Now try and find a way of doing the job in either two or three
1302 instructions.
1303 We start by looking for the largest block of zeros that are aligned on
1304 a 2-bit boundary, we then fill up the temps, wrapping around to the
1305 top of the word when we drop off the bottom.
6354dc9b 1306 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1307 {
1308 int best_start = 0;
1309 int best_consecutive_zeros = 0;
1310
1311 for (i = 0; i < 32; i += 2)
1312 {
1313 int consecutive_zeros = 0;
1314
5895f793 1315 if (!(remainder & (3 << i)))
e2c671ba 1316 {
5895f793 1317 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1318 {
1319 consecutive_zeros += 2;
1320 i += 2;
1321 }
1322 if (consecutive_zeros > best_consecutive_zeros)
1323 {
1324 best_consecutive_zeros = consecutive_zeros;
1325 best_start = i - consecutive_zeros;
1326 }
1327 i -= 2;
1328 }
1329 }
1330
1331 /* Now start emitting the insns, starting with the one with the highest
1332 bit set: we do this so that the smallest number will be emitted last;
6354dc9b 1333 this is more likely to be combinable with addressing insns. */
e2c671ba
RE
1334 i = best_start;
1335 do
1336 {
1337 int end;
1338
1339 if (i <= 0)
1340 i += 32;
1341 if (remainder & (3 << (i - 2)))
1342 {
1343 end = i - 8;
1344 if (end < 0)
1345 end += 32;
1346 temp1 = remainder & ((0x0ff << end)
1347 | ((i < end) ? (0xff >> (32 - end)) : 0));
1348 remainder &= ~temp1;
1349
d499463f 1350 if (generate)
e2c671ba 1351 {
d499463f
RE
1352 rtx new_src;
1353
1354 if (code == SET)
43cffd11
RE
1355 emit_insn (gen_rtx_SET (VOIDmode,
1356 new_src = (subtargets
1357 ? gen_reg_rtx (mode)
1358 : target),
1359 GEN_INT (can_invert
1360 ? ~temp1 : temp1)));
d499463f 1361 else if (code == MINUS)
43cffd11
RE
1362 emit_insn (gen_rtx_SET (VOIDmode,
1363 new_src = (subtargets
1364 ? gen_reg_rtx (mode)
1365 : target),
1366 gen_rtx (code, mode, GEN_INT (temp1),
1367 source)));
d499463f 1368 else
43cffd11
RE
1369 emit_insn (gen_rtx_SET (VOIDmode,
1370 new_src = (remainder
1371 ? (subtargets
1372 ? gen_reg_rtx (mode)
1373 : target)
1374 : target),
1375 gen_rtx (code, mode, source,
1376 GEN_INT (can_invert ? ~temp1
1377 : (can_negate
1378 ? -temp1
1379 : temp1)))));
d499463f 1380 source = new_src;
e2c671ba
RE
1381 }
1382
d499463f
RE
1383 if (code == SET)
1384 {
1385 can_invert = 0;
1386 code = PLUS;
1387 }
1388 else if (code == MINUS)
1389 code = PLUS;
1390
e2c671ba 1391 insns++;
e2c671ba
RE
1392 i -= 6;
1393 }
1394 i -= 2;
1395 } while (remainder);
1396 }
1397 return insns;
1398}
1399
bd9c7e23
RE
1400/* Canonicalize a comparison so that we are more likely to recognize it.
1401 This can be done for a few constant compares, where we can make the
1402 immediate value easier to load. */
1403enum rtx_code
1404arm_canonicalize_comparison (code, op1)
1405 enum rtx_code code;
62b10bbc 1406 rtx * op1;
bd9c7e23 1407{
ad076f4e 1408 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1409
1410 switch (code)
1411 {
1412 case EQ:
1413 case NE:
1414 return code;
1415
1416 case GT:
1417 case LE:
5895f793
RE
1418 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1419 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1420 {
5895f793 1421 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1422 return code == GT ? GE : LT;
1423 }
1424 break;
1425
1426 case GE:
1427 case LT:
e5951263 1428 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1429 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1430 {
5895f793 1431 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1432 return code == GE ? GT : LE;
1433 }
1434 break;
1435
1436 case GTU:
1437 case LEU:
5895f793
RE
1438 if (i != ~(HOST_UINT (0))
1439 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1440 {
1441 *op1 = GEN_INT (i + 1);
1442 return code == GTU ? GEU : LTU;
1443 }
1444 break;
1445
1446 case GEU:
1447 case LTU:
1448 if (i != 0
5895f793 1449 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1450 {
1451 *op1 = GEN_INT (i - 1);
1452 return code == GEU ? GTU : LEU;
1453 }
1454 break;
1455
1456 default:
1457 abort ();
1458 }
1459
1460 return code;
1461}
bd9c7e23 1462
f5a1b0d2
NC
1463/* Decide whether a type should be returned in memory (true)
1464 or in a register (false). This is called by the macro
1465 RETURN_IN_MEMORY. */
2b835d68
RE
1466int
1467arm_return_in_memory (type)
1468 tree type;
1469{
5895f793 1470 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1471 /* All simple types are returned in registers. */
d7d01975 1472 return 0;
d5b7b3ae
RE
1473
1474 /* For the arm-wince targets we choose to be compitable with Microsoft's
1475 ARM and Thumb compilers, which always return aggregates in memory. */
1476#ifndef ARM_WINCE
1477
d7d01975 1478 if (int_size_in_bytes (type) > 4)
9e291dbe 1479 /* All structures/unions bigger than one word are returned in memory. */
d7d01975 1480 return 1;
d5b7b3ae 1481
d7d01975 1482 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1483 {
1484 tree field;
1485
3a2ea258
RE
1486 /* For a struct the APCS says that we only return in a register
1487 if the type is 'integer like' and every addressable element
1488 has an offset of zero. For practical purposes this means
1489 that the structure can have at most one non bit-field element
1490 and that this element must be the first one in the structure. */
1491
f5a1b0d2
NC
1492 /* Find the first field, ignoring non FIELD_DECL things which will
1493 have been created by C++. */
1494 for (field = TYPE_FIELDS (type);
1495 field && TREE_CODE (field) != FIELD_DECL;
1496 field = TREE_CHAIN (field))
1497 continue;
1498
1499 if (field == NULL)
9e291dbe 1500 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1501
d5b7b3ae
RE
1502 /* Check that the first field is valid for returning in a register. */
1503
1504 /* ... Floats are not allowed */
9e291dbe 1505 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1506 return 1;
1507
d5b7b3ae
RE
1508 /* ... Aggregates that are not themselves valid for returning in
1509 a register are not allowed. */
9e291dbe 1510 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1511 return 1;
d5b7b3ae 1512
3a2ea258
RE
1513 /* Now check the remaining fields, if any. Only bitfields are allowed,
1514 since they are not addressable. */
f5a1b0d2
NC
1515 for (field = TREE_CHAIN (field);
1516 field;
1517 field = TREE_CHAIN (field))
1518 {
1519 if (TREE_CODE (field) != FIELD_DECL)
1520 continue;
1521
5895f793 1522 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1523 return 1;
1524 }
2b835d68
RE
1525
1526 return 0;
1527 }
d7d01975
NC
1528
1529 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1530 {
1531 tree field;
1532
1533 /* Unions can be returned in registers if every element is
1534 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1535 for (field = TYPE_FIELDS (type);
1536 field;
1537 field = TREE_CHAIN (field))
2b835d68 1538 {
f5a1b0d2
NC
1539 if (TREE_CODE (field) != FIELD_DECL)
1540 continue;
1541
6cc8c0b3
NC
1542 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1543 return 1;
1544
f5a1b0d2 1545 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1546 return 1;
1547 }
f5a1b0d2 1548
2b835d68
RE
1549 return 0;
1550 }
d5b7b3ae 1551#endif /* not ARM_WINCE */
f5a1b0d2 1552
d5b7b3ae 1553 /* Return all other types in memory. */
2b835d68
RE
1554 return 1;
1555}
1556
82e9d970
PB
1557/* Initialize a variable CUM of type CUMULATIVE_ARGS
1558 for a call to a function whose data type is FNTYPE.
1559 For a library call, FNTYPE is NULL. */
1560void
1561arm_init_cumulative_args (pcum, fntype, libname, indirect)
1562 CUMULATIVE_ARGS * pcum;
1563 tree fntype;
1564 rtx libname ATTRIBUTE_UNUSED;
1565 int indirect ATTRIBUTE_UNUSED;
1566{
1567 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1568 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1569
82e9d970
PB
1570 pcum->call_cookie = CALL_NORMAL;
1571
1572 if (TARGET_LONG_CALLS)
1573 pcum->call_cookie = CALL_LONG;
1574
1575 /* Check for long call/short call attributes. The attributes
1576 override any command line option. */
1577 if (fntype)
1578 {
1579 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1580 pcum->call_cookie = CALL_SHORT;
1581 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1582 pcum->call_cookie = CALL_LONG;
1583 }
1584}
1585
1586/* Determine where to put an argument to a function.
1587 Value is zero to push the argument on the stack,
1588 or a hard register in which to store the argument.
1589
1590 MODE is the argument's machine mode.
1591 TYPE is the data type of the argument (as a tree).
1592 This is null for libcalls where that information may
1593 not be available.
1594 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1595 the preceding args and about the function being called.
1596 NAMED is nonzero if this argument is a named parameter
1597 (otherwise it is an extra parameter matching an ellipsis). */
1598rtx
1599arm_function_arg (pcum, mode, type, named)
1600 CUMULATIVE_ARGS * pcum;
1601 enum machine_mode mode;
1602 tree type ATTRIBUTE_UNUSED;
1603 int named;
1604{
1605 if (mode == VOIDmode)
1606 /* Compute operand 2 of the call insn. */
1607 return GEN_INT (pcum->call_cookie);
1608
5895f793 1609 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1610 return NULL_RTX;
1611
1612 return gen_rtx_REG (mode, pcum->nregs);
1613}
82e9d970 1614\f
c27ba912
DM
1615/* Encode the current state of the #pragma [no_]long_calls. */
1616typedef enum
82e9d970 1617{
c27ba912
DM
1618 OFF, /* No #pramgma [no_]long_calls is in effect. */
1619 LONG, /* #pragma long_calls is in effect. */
1620 SHORT /* #pragma no_long_calls is in effect. */
1621} arm_pragma_enum;
82e9d970 1622
c27ba912 1623static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1624
8b97c5f8
ZW
1625void
1626arm_pr_long_calls (pfile)
1627 cpp_reader *pfile ATTRIBUTE_UNUSED;
82e9d970 1628{
8b97c5f8
ZW
1629 arm_pragma_long_calls = LONG;
1630}
1631
1632void
1633arm_pr_no_long_calls (pfile)
1634 cpp_reader *pfile ATTRIBUTE_UNUSED;
1635{
1636 arm_pragma_long_calls = SHORT;
1637}
1638
1639void
1640arm_pr_long_calls_off (pfile)
1641 cpp_reader *pfile ATTRIBUTE_UNUSED;
1642{
1643 arm_pragma_long_calls = OFF;
82e9d970 1644}
8b97c5f8 1645
82e9d970
PB
1646\f
1647/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1648 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1649 assigned to TYPE. */
1650int
1651arm_valid_type_attribute_p (type, attributes, identifier, args)
1652 tree type;
1653 tree attributes ATTRIBUTE_UNUSED;
1654 tree identifier;
1655 tree args;
1656{
1657 if ( TREE_CODE (type) != FUNCTION_TYPE
1658 && TREE_CODE (type) != METHOD_TYPE
1659 && TREE_CODE (type) != FIELD_DECL
1660 && TREE_CODE (type) != TYPE_DECL)
1661 return 0;
1662
1663 /* Function calls made to this symbol must be done indirectly, because
1664 it may lie outside of the 26 bit addressing range of a normal function
1665 call. */
1666 if (is_attribute_p ("long_call", identifier))
1667 return (args == NULL_TREE);
c27ba912 1668
82e9d970
PB
1669 /* Whereas these functions are always known to reside within the 26 bit
1670 addressing range. */
1671 if (is_attribute_p ("short_call", identifier))
1672 return (args == NULL_TREE);
1673
1674 return 0;
1675}
1676
1677/* Return 0 if the attributes for two types are incompatible, 1 if they
1678 are compatible, and 2 if they are nearly compatible (which causes a
1679 warning to be generated). */
1680int
1681arm_comp_type_attributes (type1, type2)
1682 tree type1;
1683 tree type2;
1684{
1cb8d58a 1685 int l1, l2, s1, s2;
bd7fc26f 1686
82e9d970
PB
1687 /* Check for mismatch of non-default calling convention. */
1688 if (TREE_CODE (type1) != FUNCTION_TYPE)
1689 return 1;
1690
1691 /* Check for mismatched call attributes. */
1cb8d58a
NC
1692 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1693 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1694 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1695 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1696
1697 /* Only bother to check if an attribute is defined. */
1698 if (l1 | l2 | s1 | s2)
1699 {
1700 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1701 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1702 return 0;
82e9d970 1703
bd7fc26f
NC
1704 /* Disallow mixed attributes. */
1705 if ((l1 & s2) || (l2 & s1))
1706 return 0;
1707 }
1708
1709 return 1;
82e9d970
PB
1710}
1711
c27ba912
DM
1712/* Encode long_call or short_call attribute by prefixing
1713 symbol name in DECL with a special character FLAG. */
1714void
1715arm_encode_call_attribute (decl, flag)
1716 tree decl;
cd2b33d0 1717 int flag;
c27ba912 1718{
3cce094d 1719 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b
NC
1720 int len = strlen (str);
1721 char * newstr;
c27ba912
DM
1722
1723 if (TREE_CODE (decl) != FUNCTION_DECL)
1724 return;
1725
1726 /* Do not allow weak functions to be treated as short call. */
1727 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1728 return;
1729
1f8f4a0b 1730 newstr = ggc_alloc_string (NULL, len + 2);
c27ba912
DM
1731
1732 sprintf (newstr, "%c%s", flag, str);
1733
1734 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1735}
1736
1737/* Assigns default attributes to newly defined type. This is used to
1738 set short_call/long_call attributes for function types of
1739 functions defined inside corresponding #pragma scopes. */
1740void
1741arm_set_default_type_attributes (type)
1742 tree type;
1743{
1744 /* Add __attribute__ ((long_call)) to all functions, when
1745 inside #pragma long_calls or __attribute__ ((short_call)),
1746 when inside #pragma no_long_calls. */
1747 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1748 {
1749 tree type_attr_list, attr_name;
1750 type_attr_list = TYPE_ATTRIBUTES (type);
1751
1752 if (arm_pragma_long_calls == LONG)
1753 attr_name = get_identifier ("long_call");
1754 else if (arm_pragma_long_calls == SHORT)
1755 attr_name = get_identifier ("short_call");
1756 else
1757 return;
1758
1759 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1760 TYPE_ATTRIBUTES (type) = type_attr_list;
1761 }
1762}
1763\f
1764/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1765 defined within the current compilation unit. If this caanot be
1766 determined, then 0 is returned. */
1767static int
1768current_file_function_operand (sym_ref)
1769 rtx sym_ref;
1770{
1771 /* This is a bit of a fib. A function will have a short call flag
1772 applied to its name if it has the short call attribute, or it has
1773 already been defined within the current compilation unit. */
1774 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1775 return 1;
1776
6d77b53e 1777 /* The current function is always defined within the current compilation
c27ba912
DM
1778 unit. if it s a weak defintion however, then this may not be the real
1779 defintion of the function, and so we have to say no. */
1780 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 1781 && !DECL_WEAK (current_function_decl))
c27ba912
DM
1782 return 1;
1783
1784 /* We cannot make the determination - default to returning 0. */
1785 return 0;
1786}
1787
1788/* Return non-zero if a 32 bit "long_call" should be generated for
1789 this call. We generate a long_call if the function:
1790
1791 a. has an __attribute__((long call))
1792 or b. is within the scope of a #pragma long_calls
1793 or c. the -mlong-calls command line switch has been specified
1794
1795 However we do not generate a long call if the function:
1796
1797 d. has an __attribute__ ((short_call))
1798 or e. is inside the scope of a #pragma no_long_calls
1799 or f. has an __attribute__ ((section))
1800 or g. is defined within the current compilation unit.
1801
1802 This function will be called by C fragments contained in the machine
1803 description file. CALL_REF and CALL_COOKIE correspond to the matched
1804 rtl operands. CALL_SYMBOL is used to distinguish between
1805 two different callers of the function. It is set to 1 in the
1806 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1807 and "call_value" patterns. This is because of the difference in the
1808 SYM_REFs passed by these patterns. */
1809int
1810arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1811 rtx sym_ref;
1812 int call_cookie;
1813 int call_symbol;
1814{
5895f793 1815 if (!call_symbol)
c27ba912
DM
1816 {
1817 if (GET_CODE (sym_ref) != MEM)
1818 return 0;
1819
1820 sym_ref = XEXP (sym_ref, 0);
1821 }
1822
1823 if (GET_CODE (sym_ref) != SYMBOL_REF)
1824 return 0;
1825
1826 if (call_cookie & CALL_SHORT)
1827 return 0;
1828
1829 if (TARGET_LONG_CALLS && flag_function_sections)
1830 return 1;
1831
87e27392 1832 if (current_file_function_operand (sym_ref))
c27ba912
DM
1833 return 0;
1834
1835 return (call_cookie & CALL_LONG)
1836 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1837 || TARGET_LONG_CALLS;
1838}
f99fce0c
RE
1839
1840/* Return non-zero if it is ok to make a tail-call to DECL. */
1841int
1842arm_function_ok_for_sibcall (decl)
1843 tree decl;
1844{
1845 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
1846
1847 /* Never tailcall something for which we have no decl, or if we
1848 are in Thumb mode. */
1849 if (decl == NULL || TARGET_THUMB)
1850 return 0;
1851
1852 /* Get the calling method. */
1853 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1854 call_type = CALL_SHORT;
1855 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1856 call_type = CALL_LONG;
1857
1858 /* Cannot tail-call to long calls, since these are out of range of
1859 a branch instruction. However, if not compiling PIC, we know
1860 we can reach the symbol if it is in this compilation unit. */
5895f793 1861 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
1862 return 0;
1863
1864 /* If we are interworking and the function is not declared static
1865 then we can't tail-call it unless we know that it exists in this
1866 compilation unit (since it might be a Thumb routine). */
5895f793 1867 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
1868 return 0;
1869
1870 /* Everything else is ok. */
1871 return 1;
1872}
1873
82e9d970 1874\f
32de079a
RE
1875int
1876legitimate_pic_operand_p (x)
1877 rtx x;
1878{
d5b7b3ae
RE
1879 if (CONSTANT_P (x)
1880 && flag_pic
32de079a
RE
1881 && (GET_CODE (x) == SYMBOL_REF
1882 || (GET_CODE (x) == CONST
1883 && GET_CODE (XEXP (x, 0)) == PLUS
1884 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1885 return 0;
1886
1887 return 1;
1888}
1889
1890rtx
1891legitimize_pic_address (orig, mode, reg)
1892 rtx orig;
1893 enum machine_mode mode;
1894 rtx reg;
1895{
1896 if (GET_CODE (orig) == SYMBOL_REF)
1897 {
1898 rtx pic_ref, address;
1899 rtx insn;
1900 int subregs = 0;
1901
1902 if (reg == 0)
1903 {
893f3d5b 1904 if (no_new_pseudos)
32de079a
RE
1905 abort ();
1906 else
1907 reg = gen_reg_rtx (Pmode);
1908
1909 subregs = 1;
1910 }
1911
1912#ifdef AOF_ASSEMBLER
1913 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 1914 understands that the PIC register has to be added into the offset. */
32de079a
RE
1915 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1916#else
1917 if (subregs)
1918 address = gen_reg_rtx (Pmode);
1919 else
1920 address = reg;
1921
1922 emit_insn (gen_pic_load_addr (address, orig));
1923
43cffd11
RE
1924 pic_ref = gen_rtx_MEM (Pmode,
1925 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1926 address));
32de079a
RE
1927 RTX_UNCHANGING_P (pic_ref) = 1;
1928 insn = emit_move_insn (reg, pic_ref);
1929#endif
1930 current_function_uses_pic_offset_table = 1;
1931 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1932 by loop. */
43cffd11
RE
1933 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1934 REG_NOTES (insn));
32de079a
RE
1935 return reg;
1936 }
1937 else if (GET_CODE (orig) == CONST)
1938 {
1939 rtx base, offset;
1940
1941 if (GET_CODE (XEXP (orig, 0)) == PLUS
1942 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1943 return orig;
1944
1945 if (reg == 0)
1946 {
893f3d5b 1947 if (no_new_pseudos)
32de079a
RE
1948 abort ();
1949 else
1950 reg = gen_reg_rtx (Pmode);
1951 }
1952
1953 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1954 {
1955 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1956 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1957 base == reg ? 0 : reg);
1958 }
1959 else
1960 abort ();
1961
1962 if (GET_CODE (offset) == CONST_INT)
1963 {
1964 /* The base register doesn't really matter, we only want to
1965 test the index for the appropriate mode. */
1966 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1967
5895f793 1968 if (!no_new_pseudos)
32de079a
RE
1969 offset = force_reg (Pmode, offset);
1970 else
1971 abort ();
1972
1973 win:
1974 if (GET_CODE (offset) == CONST_INT)
1975 return plus_constant_for_output (base, INTVAL (offset));
1976 }
1977
1978 if (GET_MODE_SIZE (mode) > 4
1979 && (GET_MODE_CLASS (mode) == MODE_INT
1980 || TARGET_SOFT_FLOAT))
1981 {
1982 emit_insn (gen_addsi3 (reg, base, offset));
1983 return reg;
1984 }
1985
43cffd11 1986 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1987 }
1988 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
1989 {
1990 current_function_uses_pic_offset_table = 1;
1991
1992 if (NEED_GOT_RELOC)
d5b7b3ae
RE
1993 {
1994 rtx pic_ref, address = gen_reg_rtx (Pmode);
1995
1996 emit_insn (gen_pic_load_addr (address, orig));
1997 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1998
1999 emit_move_insn (address, pic_ref);
2000 return address;
2001 }
82e9d970 2002 }
32de079a
RE
2003
2004 return orig;
2005}
2006
2007static rtx pic_rtx;
2008
2009int
62b10bbc 2010is_pic (x)
32de079a
RE
2011 rtx x;
2012{
2013 if (x == pic_rtx)
2014 return 1;
2015 return 0;
2016}
2017
2018void
2019arm_finalize_pic ()
2020{
2021#ifndef AOF_ASSEMBLER
2022 rtx l1, pic_tmp, pic_tmp2, seq;
2023 rtx global_offset_table;
2024
ed0e6530 2025 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2026 return;
2027
5895f793 2028 if (!flag_pic)
32de079a
RE
2029 abort ();
2030
2031 start_sequence ();
2032 l1 = gen_label_rtx ();
2033
43cffd11 2034 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2035 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2036 addition, on the Thumb it is 'dot + 4'. */
2037 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2038 if (GOT_PCREL)
2039 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2040 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2041 else
2042 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2043
2044 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2045
32de079a 2046 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
d5b7b3ae
RE
2047 if (TARGET_ARM)
2048 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2049 else
2050 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
32de079a
RE
2051
2052 seq = gen_sequence ();
2053 end_sequence ();
2054 emit_insn_after (seq, get_insns ());
2055
2056 /* Need to emit this whether or not we obey regdecls,
2057 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2058 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2059#endif /* AOF_ASSEMBLER */
2060}
2061
e2c671ba
RE
2062#define REG_OR_SUBREG_REG(X) \
2063 (GET_CODE (X) == REG \
2064 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2065
2066#define REG_OR_SUBREG_RTX(X) \
2067 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2068
d5b7b3ae
RE
2069#ifndef COSTS_N_INSNS
2070#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2071#endif
e2c671ba
RE
2072
2073int
d5b7b3ae 2074arm_rtx_costs (x, code, outer)
e2c671ba 2075 rtx x;
74bbc178 2076 enum rtx_code code;
d5b7b3ae 2077 enum rtx_code outer;
e2c671ba
RE
2078{
2079 enum machine_mode mode = GET_MODE (x);
2080 enum rtx_code subcode;
2081 int extra_cost;
2082
d5b7b3ae
RE
2083 if (TARGET_THUMB)
2084 {
2085 switch (code)
2086 {
2087 case ASHIFT:
2088 case ASHIFTRT:
2089 case LSHIFTRT:
2090 case ROTATERT:
2091 case PLUS:
2092 case MINUS:
2093 case COMPARE:
2094 case NEG:
2095 case NOT:
2096 return COSTS_N_INSNS (1);
2097
2098 case MULT:
2099 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2100 {
2101 int cycles = 0;
2102 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2103
2104 while (i)
2105 {
2106 i >>= 2;
5895f793 2107 cycles++;
d5b7b3ae
RE
2108 }
2109 return COSTS_N_INSNS (2) + cycles;
2110 }
2111 return COSTS_N_INSNS (1) + 16;
2112
2113 case SET:
2114 return (COSTS_N_INSNS (1)
2115 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2116 + GET_CODE (SET_DEST (x)) == MEM));
2117
2118 case CONST_INT:
2119 if (outer == SET)
2120 {
2121 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2122 return 0;
2123 if (thumb_shiftable_const (INTVAL (x)))
2124 return COSTS_N_INSNS (2);
2125 return COSTS_N_INSNS (3);
2126 }
2127 else if (outer == PLUS
2128 && INTVAL (x) < 256 && INTVAL (x) > -256)
2129 return 0;
2130 else if (outer == COMPARE
2131 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2132 return 0;
2133 else if (outer == ASHIFT || outer == ASHIFTRT
2134 || outer == LSHIFTRT)
2135 return 0;
2136 return COSTS_N_INSNS (2);
2137
2138 case CONST:
2139 case CONST_DOUBLE:
2140 case LABEL_REF:
2141 case SYMBOL_REF:
2142 return COSTS_N_INSNS (3);
2143
2144 case UDIV:
2145 case UMOD:
2146 case DIV:
2147 case MOD:
2148 return 100;
2149
2150 case TRUNCATE:
2151 return 99;
2152
2153 case AND:
2154 case XOR:
2155 case IOR:
2156 /* XXX guess. */
2157 return 8;
2158
2159 case ADDRESSOF:
2160 case MEM:
2161 /* XXX another guess. */
2162 /* Memory costs quite a lot for the first word, but subsequent words
2163 load at the equivalent of a single insn each. */
2164 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2165 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2166
2167 case IF_THEN_ELSE:
2168 /* XXX a guess. */
2169 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2170 return 14;
2171 return 2;
2172
2173 case ZERO_EXTEND:
2174 /* XXX still guessing. */
2175 switch (GET_MODE (XEXP (x, 0)))
2176 {
2177 case QImode:
2178 return (1 + (mode == DImode ? 4 : 0)
2179 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2180
2181 case HImode:
2182 return (4 + (mode == DImode ? 4 : 0)
2183 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2184
2185 case SImode:
2186 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2187
2188 default:
2189 return 99;
2190 }
2191
2192 default:
2193 return 99;
2194#if 0
2195 case FFS:
2196 case FLOAT:
2197 case FIX:
2198 case UNSIGNED_FIX:
2199 /* XXX guess */
2200 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2201 rtx_name[code]);
2202 abort ();
2203#endif
2204 }
2205 }
2206
e2c671ba
RE
2207 switch (code)
2208 {
2209 case MEM:
2210 /* Memory costs quite a lot for the first word, but subsequent words
2211 load at the equivalent of a single insn each. */
2212 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2213 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2214
2215 case DIV:
2216 case MOD:
2217 return 100;
2218
2219 case ROTATE:
2220 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2221 return 4;
2222 /* Fall through */
2223 case ROTATERT:
2224 if (mode != SImode)
2225 return 8;
2226 /* Fall through */
2227 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2228 if (mode == DImode)
2229 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2230 + ((GET_CODE (XEXP (x, 0)) == REG
2231 || (GET_CODE (XEXP (x, 0)) == SUBREG
2232 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2233 ? 0 : 8));
2234 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2235 || (GET_CODE (XEXP (x, 0)) == SUBREG
2236 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2237 ? 0 : 4)
2238 + ((GET_CODE (XEXP (x, 1)) == REG
2239 || (GET_CODE (XEXP (x, 1)) == SUBREG
2240 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2241 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2242 ? 0 : 4));
2243
2244 case MINUS:
2245 if (mode == DImode)
2246 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2247 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2248 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2249 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2250 ? 0 : 8));
2251
2252 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2253 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2254 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2255 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2256 ? 0 : 8)
2257 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2258 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2259 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2260 ? 0 : 8));
2261
2262 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2263 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2264 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2265 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2266 || subcode == ASHIFTRT || subcode == LSHIFTRT
2267 || subcode == ROTATE || subcode == ROTATERT
2268 || (subcode == MULT
2269 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2270 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2271 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2272 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2273 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2274 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2275 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2276 return 1;
2277 /* Fall through */
2278
2279 case PLUS:
2280 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2281 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2282 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2283 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2284 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2285 ? 0 : 8));
2286
2287 /* Fall through */
2288 case AND: case XOR: case IOR:
2289 extra_cost = 0;
2290
2291 /* Normally the frame registers will be spilt into reg+const during
2292 reload, so it is a bad idea to combine them with other instructions,
2293 since then they might not be moved outside of loops. As a compromise
2294 we allow integration with ops that have a constant as their second
2295 operand. */
2296 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2297 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2298 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2299 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2300 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2301 extra_cost = 4;
2302
2303 if (mode == DImode)
2304 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2305 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2306 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2307 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2308 ? 0 : 8));
2309
2310 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2311 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2312 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2313 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2314 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2315 ? 0 : 4));
2316
2317 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2318 return (1 + extra_cost
2319 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2320 || subcode == LSHIFTRT || subcode == ASHIFTRT
2321 || subcode == ROTATE || subcode == ROTATERT
2322 || (subcode == MULT
2323 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2324 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2325 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2326 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2327 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2328 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2329 ? 0 : 4));
2330
2331 return 8;
2332
2333 case MULT:
b111229a 2334 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2335 fast variant if it exists at all. */
2b835d68
RE
2336 if (arm_fast_multiply && mode == DImode
2337 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2338 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2339 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2340 return 8;
2341
e2c671ba
RE
2342 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2343 || mode == DImode)
2344 return 30;
2345
2346 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2347 {
2b835d68 2348 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
e5951263 2349 & HOST_UINT (0xffffffff));
e2c671ba
RE
2350 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2351 int j;
6354dc9b
NC
2352
2353 /* Tune as appropriate. */
aec3cfba 2354 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2355
2b835d68 2356 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2357 {
2b835d68 2358 i >>= booth_unit_size;
e2c671ba
RE
2359 add_cost += 2;
2360 }
2361
2362 return add_cost;
2363 }
2364
aec3cfba 2365 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2366 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2367 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2368
56636818
JL
2369 case TRUNCATE:
2370 if (arm_fast_multiply && mode == SImode
2371 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2372 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2373 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2374 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2375 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2376 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2377 return 8;
2378 return 99;
2379
e2c671ba
RE
2380 case NEG:
2381 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2382 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2383 /* Fall through */
2384 case NOT:
2385 if (mode == DImode)
2386 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2387
2388 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2389
2390 case IF_THEN_ELSE:
2391 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2392 return 14;
2393 return 2;
2394
2395 case COMPARE:
2396 return 1;
2397
2398 case ABS:
2399 return 4 + (mode == DImode ? 4 : 0);
2400
2401 case SIGN_EXTEND:
2402 if (GET_MODE (XEXP (x, 0)) == QImode)
2403 return (4 + (mode == DImode ? 4 : 0)
2404 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2405 /* Fall through */
2406 case ZERO_EXTEND:
2407 switch (GET_MODE (XEXP (x, 0)))
2408 {
2409 case QImode:
2410 return (1 + (mode == DImode ? 4 : 0)
2411 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2412
2413 case HImode:
2414 return (4 + (mode == DImode ? 4 : 0)
2415 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2416
2417 case SImode:
2418 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2419
2420 default:
2421 break;
e2c671ba
RE
2422 }
2423 abort ();
2424
d5b7b3ae
RE
2425 case CONST_INT:
2426 if (const_ok_for_arm (INTVAL (x)))
2427 return outer == SET ? 2 : -1;
2428 else if (outer == AND
5895f793 2429 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2430 return -1;
2431 else if ((outer == COMPARE
2432 || outer == PLUS || outer == MINUS)
5895f793 2433 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2434 return -1;
2435 else
2436 return 5;
2437
2438 case CONST:
2439 case LABEL_REF:
2440 case SYMBOL_REF:
2441 return 6;
2442
2443 case CONST_DOUBLE:
2444 if (const_double_rtx_ok_for_fpu (x))
2445 return outer == SET ? 2 : -1;
2446 else if ((outer == COMPARE || outer == PLUS)
2447 && neg_const_double_rtx_ok_for_fpu (x))
2448 return -1;
2449 return 7;
2450
e2c671ba
RE
2451 default:
2452 return 99;
2453 }
2454}
32de079a
RE
2455
2456int
2457arm_adjust_cost (insn, link, dep, cost)
2458 rtx insn;
2459 rtx link;
2460 rtx dep;
2461 int cost;
2462{
2463 rtx i_pat, d_pat;
2464
6354dc9b 2465 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2466 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2467 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2468 return 0;
2469
d5b7b3ae
RE
2470 /* Call insns don't incur a stall, even if they follow a load. */
2471 if (REG_NOTE_KIND (link) == 0
2472 && GET_CODE (insn) == CALL_INSN)
2473 return 1;
2474
32de079a
RE
2475 if ((i_pat = single_set (insn)) != NULL
2476 && GET_CODE (SET_SRC (i_pat)) == MEM
2477 && (d_pat = single_set (dep)) != NULL
2478 && GET_CODE (SET_DEST (d_pat)) == MEM)
2479 {
2480 /* This is a load after a store, there is no conflict if the load reads
2481 from a cached area. Assume that loads from the stack, and from the
2482 constant pool are cached, and that others will miss. This is a
6354dc9b 2483 hack. */
32de079a 2484
32de079a
RE
2485 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2486 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2487 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2488 || reg_mentioned_p (hard_frame_pointer_rtx,
2489 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2490 return 1;
32de079a
RE
2491 }
2492
2493 return cost;
2494}
2495
6354dc9b 2496/* This code has been fixed for cross compilation. */
ff9940b0
RE
2497
2498static int fpa_consts_inited = 0;
2499
cd2b33d0 2500static const char * strings_fpa[8] =
62b10bbc 2501{
2b835d68
RE
2502 "0", "1", "2", "3",
2503 "4", "5", "0.5", "10"
2504};
ff9940b0
RE
2505
2506static REAL_VALUE_TYPE values_fpa[8];
2507
2508static void
2509init_fpa_table ()
2510{
2511 int i;
2512 REAL_VALUE_TYPE r;
2513
2514 for (i = 0; i < 8; i++)
2515 {
2516 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2517 values_fpa[i] = r;
2518 }
f3bb6135 2519
ff9940b0
RE
2520 fpa_consts_inited = 1;
2521}
2522
6354dc9b 2523/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2524
2525int
2526const_double_rtx_ok_for_fpu (x)
2527 rtx x;
2528{
ff9940b0
RE
2529 REAL_VALUE_TYPE r;
2530 int i;
2531
2532 if (!fpa_consts_inited)
2533 init_fpa_table ();
2534
2535 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2536 if (REAL_VALUE_MINUS_ZERO (r))
2537 return 0;
f3bb6135 2538
ff9940b0
RE
2539 for (i = 0; i < 8; i++)
2540 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2541 return 1;
f3bb6135 2542
ff9940b0 2543 return 0;
f3bb6135 2544}
ff9940b0 2545
6354dc9b 2546/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2547
2548int
2549neg_const_double_rtx_ok_for_fpu (x)
2550 rtx x;
2551{
2552 REAL_VALUE_TYPE r;
2553 int i;
2554
2555 if (!fpa_consts_inited)
2556 init_fpa_table ();
2557
2558 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2559 r = REAL_VALUE_NEGATE (r);
2560 if (REAL_VALUE_MINUS_ZERO (r))
2561 return 0;
f3bb6135 2562
ff9940b0
RE
2563 for (i = 0; i < 8; i++)
2564 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2565 return 1;
f3bb6135 2566
ff9940b0 2567 return 0;
f3bb6135 2568}
cce8749e
CH
2569\f
2570/* Predicates for `match_operand' and `match_operator'. */
2571
ff9940b0 2572/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2573 (SUBREG (MEM)...).
2574
2575 This function exists because at the time it was put in it led to better
2576 code. SUBREG(MEM) always needs a reload in the places where
2577 s_register_operand is used, and this seemed to lead to excessive
2578 reloading. */
ff9940b0
RE
2579
2580int
2581s_register_operand (op, mode)
2582 register rtx op;
2583 enum machine_mode mode;
2584{
2585 if (GET_MODE (op) != mode && mode != VOIDmode)
2586 return 0;
2587
2588 if (GET_CODE (op) == SUBREG)
f3bb6135 2589 op = SUBREG_REG (op);
ff9940b0
RE
2590
2591 /* We don't consider registers whose class is NO_REGS
2592 to be a register operand. */
d5b7b3ae 2593 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
2594 return (GET_CODE (op) == REG
2595 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2596 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2597}
2598
e2c671ba
RE
2599/* Only accept reg, subreg(reg), const_int. */
2600
2601int
2602reg_or_int_operand (op, mode)
2603 register rtx op;
2604 enum machine_mode mode;
2605{
2606 if (GET_CODE (op) == CONST_INT)
2607 return 1;
2608
2609 if (GET_MODE (op) != mode && mode != VOIDmode)
2610 return 0;
2611
2612 if (GET_CODE (op) == SUBREG)
2613 op = SUBREG_REG (op);
2614
2615 /* We don't consider registers whose class is NO_REGS
2616 to be a register operand. */
2617 return (GET_CODE (op) == REG
2618 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2619 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2620}
2621
ff9940b0
RE
2622/* Return 1 if OP is an item in memory, given that we are in reload. */
2623
2624int
d5b7b3ae 2625arm_reload_memory_operand (op, mode)
ff9940b0 2626 rtx op;
74bbc178 2627 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2628{
2629 int regno = true_regnum (op);
2630
5895f793 2631 return (!CONSTANT_P (op)
ff9940b0
RE
2632 && (regno == -1
2633 || (GET_CODE (op) == REG
2634 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2635}
2636
4d818c85 2637/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae
RE
2638 memory access (architecture V4).
2639 MODE is QImode if called when computing contraints, or VOIDmode when
2640 emitting patterns. In this latter case we cannot use memory_operand()
2641 because it will fail on badly formed MEMs, which is precisly what we are
2642 trying to catch. */
4d818c85
RE
2643int
2644bad_signed_byte_operand (op, mode)
2645 rtx op;
d5b7b3ae 2646 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 2647{
d5b7b3ae 2648#if 0
5895f793 2649 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
2650 return 0;
2651#endif
2652 if (GET_CODE (op) != MEM)
4d818c85
RE
2653 return 0;
2654
2655 op = XEXP (op, 0);
2656
6354dc9b 2657 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2658 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
2659 && (!s_register_operand (XEXP (op, 0), VOIDmode)
2660 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 2661 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2662 return 1;
2663
6354dc9b 2664 /* Big constants are also bad. */
4d818c85
RE
2665 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2666 && (INTVAL (XEXP (op, 1)) > 0xff
2667 || -INTVAL (XEXP (op, 1)) > 0xff))
2668 return 1;
2669
6354dc9b 2670 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2671 return 0;
2672}
2673
cce8749e
CH
2674/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2675
2676int
2677arm_rhs_operand (op, mode)
2678 rtx op;
2679 enum machine_mode mode;
2680{
ff9940b0 2681 return (s_register_operand (op, mode)
cce8749e 2682 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2683}
cce8749e 2684
ff9940b0
RE
2685/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2686 */
2687
2688int
2689arm_rhsm_operand (op, mode)
2690 rtx op;
2691 enum machine_mode mode;
2692{
2693 return (s_register_operand (op, mode)
2694 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2695 || memory_operand (op, mode));
f3bb6135 2696}
ff9940b0
RE
2697
2698/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2699 constant that is valid when negated. */
2700
2701int
2702arm_add_operand (op, mode)
2703 rtx op;
2704 enum machine_mode mode;
2705{
d5b7b3ae
RE
2706 if (TARGET_THUMB)
2707 return thumb_cmp_operand (op, mode);
2708
ff9940b0
RE
2709 return (s_register_operand (op, mode)
2710 || (GET_CODE (op) == CONST_INT
2711 && (const_ok_for_arm (INTVAL (op))
2712 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2713}
ff9940b0
RE
2714
2715int
2716arm_not_operand (op, mode)
2717 rtx op;
2718 enum machine_mode mode;
2719{
2720 return (s_register_operand (op, mode)
2721 || (GET_CODE (op) == CONST_INT
2722 && (const_ok_for_arm (INTVAL (op))
2723 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2724}
ff9940b0 2725
5165176d
RE
2726/* Return TRUE if the operand is a memory reference which contains an
2727 offsettable address. */
2728int
2729offsettable_memory_operand (op, mode)
2730 register rtx op;
2731 enum machine_mode mode;
2732{
2733 if (mode == VOIDmode)
2734 mode = GET_MODE (op);
2735
2736 return (mode == GET_MODE (op)
2737 && GET_CODE (op) == MEM
2738 && offsettable_address_p (reload_completed | reload_in_progress,
2739 mode, XEXP (op, 0)));
2740}
2741
2742/* Return TRUE if the operand is a memory reference which is, or can be
2743 made word aligned by adjusting the offset. */
2744int
2745alignable_memory_operand (op, mode)
2746 register rtx op;
2747 enum machine_mode mode;
2748{
2749 rtx reg;
2750
2751 if (mode == VOIDmode)
2752 mode = GET_MODE (op);
2753
2754 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2755 return 0;
2756
2757 op = XEXP (op, 0);
2758
2759 return ((GET_CODE (reg = op) == REG
2760 || (GET_CODE (op) == SUBREG
2761 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2762 || (GET_CODE (op) == PLUS
2763 && GET_CODE (XEXP (op, 1)) == CONST_INT
2764 && (GET_CODE (reg = XEXP (op, 0)) == REG
2765 || (GET_CODE (XEXP (op, 0)) == SUBREG
2766 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 2767 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
2768}
2769
b111229a
RE
2770/* Similar to s_register_operand, but does not allow hard integer
2771 registers. */
2772int
2773f_register_operand (op, mode)
2774 register rtx op;
2775 enum machine_mode mode;
2776{
2777 if (GET_MODE (op) != mode && mode != VOIDmode)
2778 return 0;
2779
2780 if (GET_CODE (op) == SUBREG)
2781 op = SUBREG_REG (op);
2782
2783 /* We don't consider registers whose class is NO_REGS
2784 to be a register operand. */
2785 return (GET_CODE (op) == REG
2786 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2787 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2788}
2789
cce8749e
CH
2790/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2791
2792int
2793fpu_rhs_operand (op, mode)
2794 rtx op;
2795 enum machine_mode mode;
2796{
ff9940b0 2797 if (s_register_operand (op, mode))
f3bb6135 2798 return TRUE;
9ce71c6f
BS
2799
2800 if (GET_MODE (op) != mode && mode != VOIDmode)
2801 return FALSE;
2802
2803 if (GET_CODE (op) == CONST_DOUBLE)
2804 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
2805
2806 return FALSE;
2807}
cce8749e 2808
ff9940b0
RE
2809int
2810fpu_add_operand (op, mode)
2811 rtx op;
2812 enum machine_mode mode;
2813{
2814 if (s_register_operand (op, mode))
f3bb6135 2815 return TRUE;
9ce71c6f
BS
2816
2817 if (GET_MODE (op) != mode && mode != VOIDmode)
2818 return FALSE;
2819
2820 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2821 return (const_double_rtx_ok_for_fpu (op)
2822 || neg_const_double_rtx_ok_for_fpu (op));
2823
2824 return FALSE;
ff9940b0
RE
2825}
2826
cce8749e
CH
2827/* Return nonzero if OP is a constant power of two. */
2828
2829int
2830power_of_two_operand (op, mode)
2831 rtx op;
74bbc178 2832 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2833{
2834 if (GET_CODE (op) == CONST_INT)
2835 {
d5b7b3ae 2836 HOST_WIDE_INT value = INTVAL (op);
f3bb6135 2837 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2838 }
f3bb6135
RE
2839 return FALSE;
2840}
cce8749e
CH
2841
2842/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2843 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2844 Note that this disallows MEM(REG+REG), but allows
2845 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2846
2847int
2848di_operand (op, mode)
2849 rtx op;
2850 enum machine_mode mode;
2851{
ff9940b0 2852 if (s_register_operand (op, mode))
f3bb6135 2853 return TRUE;
cce8749e 2854
9ce71c6f
BS
2855 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2856 return FALSE;
2857
e9c6b69b
NC
2858 if (GET_CODE (op) == SUBREG)
2859 op = SUBREG_REG (op);
2860
cce8749e
CH
2861 switch (GET_CODE (op))
2862 {
2863 case CONST_DOUBLE:
2864 case CONST_INT:
f3bb6135
RE
2865 return TRUE;
2866
cce8749e 2867 case MEM:
f3bb6135
RE
2868 return memory_address_p (DImode, XEXP (op, 0));
2869
cce8749e 2870 default:
f3bb6135 2871 return FALSE;
cce8749e 2872 }
f3bb6135 2873}
cce8749e 2874
d5b7b3ae
RE
2875/* Like di_operand, but don't accept constants. */
2876int
2877nonimmediate_di_operand (op, mode)
2878 rtx op;
2879 enum machine_mode mode;
2880{
2881 if (s_register_operand (op, mode))
2882 return TRUE;
2883
2884 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2885 return FALSE;
2886
2887 if (GET_CODE (op) == SUBREG)
2888 op = SUBREG_REG (op);
2889
2890 if (GET_CODE (op) == MEM)
2891 return memory_address_p (DImode, XEXP (op, 0));
2892
2893 return FALSE;
2894}
2895
f3139301 2896/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2897 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2898 Note that this disallows MEM(REG+REG), but allows
2899 MEM(PRE/POST_INC/DEC(REG)). */
2900
2901int
2902soft_df_operand (op, mode)
2903 rtx op;
2904 enum machine_mode mode;
2905{
2906 if (s_register_operand (op, mode))
2907 return TRUE;
2908
9ce71c6f
BS
2909 if (mode != VOIDmode && GET_MODE (op) != mode)
2910 return FALSE;
2911
37b80d2e
BS
2912 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2913 return FALSE;
2914
e9c6b69b
NC
2915 if (GET_CODE (op) == SUBREG)
2916 op = SUBREG_REG (op);
9ce71c6f 2917
f3139301
DE
2918 switch (GET_CODE (op))
2919 {
2920 case CONST_DOUBLE:
2921 return TRUE;
2922
2923 case MEM:
2924 return memory_address_p (DFmode, XEXP (op, 0));
2925
2926 default:
2927 return FALSE;
2928 }
2929}
2930
d5b7b3ae
RE
2931/* Like soft_df_operand, but don't accept constants. */
2932int
2933nonimmediate_soft_df_operand (op, mode)
2934 rtx op;
2935 enum machine_mode mode;
2936{
2937 if (s_register_operand (op, mode))
2938 return TRUE;
2939
2940 if (mode != VOIDmode && GET_MODE (op) != mode)
2941 return FALSE;
2942
2943 if (GET_CODE (op) == SUBREG)
2944 op = SUBREG_REG (op);
2945
2946 if (GET_CODE (op) == MEM)
2947 return memory_address_p (DFmode, XEXP (op, 0));
2948 return FALSE;
2949}
cce8749e 2950
d5b7b3ae 2951/* Return TRUE for valid index operands. */
cce8749e
CH
2952int
2953index_operand (op, mode)
2954 rtx op;
2955 enum machine_mode mode;
2956{
d5b7b3ae 2957 return (s_register_operand (op, mode)
ff9940b0 2958 || (immediate_operand (op, mode)
d5b7b3ae
RE
2959 && (GET_CODE (op) != CONST_INT
2960 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 2961}
cce8749e 2962
ff9940b0
RE
2963/* Return TRUE for valid shifts by a constant. This also accepts any
2964 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 2965 shift operator in this case was a mult. */
ff9940b0
RE
2966
2967int
2968const_shift_operand (op, mode)
2969 rtx op;
2970 enum machine_mode mode;
2971{
2972 return (power_of_two_operand (op, mode)
2973 || (immediate_operand (op, mode)
d5b7b3ae
RE
2974 && (GET_CODE (op) != CONST_INT
2975 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 2976}
ff9940b0 2977
cce8749e
CH
2978/* Return TRUE for arithmetic operators which can be combined with a multiply
2979 (shift). */
2980
2981int
2982shiftable_operator (x, mode)
2983 rtx x;
2984 enum machine_mode mode;
2985{
2986 if (GET_MODE (x) != mode)
2987 return FALSE;
2988 else
2989 {
2990 enum rtx_code code = GET_CODE (x);
2991
2992 return (code == PLUS || code == MINUS
2993 || code == IOR || code == XOR || code == AND);
2994 }
f3bb6135 2995}
cce8749e 2996
6ab589e0
JL
2997/* Return TRUE for binary logical operators. */
2998
2999int
3000logical_binary_operator (x, mode)
3001 rtx x;
3002 enum machine_mode mode;
3003{
3004 if (GET_MODE (x) != mode)
3005 return FALSE;
3006 else
3007 {
3008 enum rtx_code code = GET_CODE (x);
3009
3010 return (code == IOR || code == XOR || code == AND);
3011 }
3012}
3013
6354dc9b 3014/* Return TRUE for shift operators. */
cce8749e
CH
3015
3016int
3017shift_operator (x, mode)
3018 rtx x;
3019 enum machine_mode mode;
3020{
3021 if (GET_MODE (x) != mode)
3022 return FALSE;
3023 else
3024 {
3025 enum rtx_code code = GET_CODE (x);
3026
ff9940b0 3027 if (code == MULT)
aec3cfba 3028 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 3029
e2c671ba
RE
3030 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3031 || code == ROTATERT);
cce8749e 3032 }
f3bb6135 3033}
ff9940b0 3034
6354dc9b
NC
3035/* Return TRUE if x is EQ or NE. */
3036int
3037equality_operator (x, mode)
f3bb6135 3038 rtx x;
74bbc178 3039 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3040{
f3bb6135 3041 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3042}
3043
e45b72c4
RE
3044/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3045int
3046arm_comparison_operator (x, mode)
3047 rtx x;
3048 enum machine_mode mode;
3049{
3050 return (comparison_operator (x, mode)
3051 && GET_CODE (x) != LTGT
3052 && GET_CODE (x) != UNEQ);
3053}
3054
6354dc9b 3055/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
3056int
3057minmax_operator (x, mode)
3058 rtx x;
3059 enum machine_mode mode;
3060{
3061 enum rtx_code code = GET_CODE (x);
3062
3063 if (GET_MODE (x) != mode)
3064 return FALSE;
f3bb6135 3065
ff9940b0 3066 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3067}
ff9940b0 3068
ff9940b0 3069/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3070 a mode, accept any class CCmode register. */
ff9940b0
RE
3071int
3072cc_register (x, mode)
f3bb6135
RE
3073 rtx x;
3074 enum machine_mode mode;
ff9940b0
RE
3075{
3076 if (mode == VOIDmode)
3077 {
3078 mode = GET_MODE (x);
d5b7b3ae 3079
ff9940b0
RE
3080 if (GET_MODE_CLASS (mode) != MODE_CC)
3081 return FALSE;
3082 }
f3bb6135 3083
d5b7b3ae
RE
3084 if ( GET_MODE (x) == mode
3085 && GET_CODE (x) == REG
3086 && REGNO (x) == CC_REGNUM)
ff9940b0 3087 return TRUE;
f3bb6135 3088
ff9940b0
RE
3089 return FALSE;
3090}
5bbe2d40
RE
3091
3092/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3093 a mode, accept any class CCmode register which indicates a dominance
3094 expression. */
5bbe2d40 3095int
84ed5e79 3096dominant_cc_register (x, mode)
5bbe2d40
RE
3097 rtx x;
3098 enum machine_mode mode;
3099{
3100 if (mode == VOIDmode)
3101 {
3102 mode = GET_MODE (x);
d5b7b3ae 3103
84ed5e79 3104 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3105 return FALSE;
3106 }
3107
d5b7b3ae 3108 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3109 && mode != CC_DLEmode && mode != CC_DLTmode
3110 && mode != CC_DGEmode && mode != CC_DGTmode
3111 && mode != CC_DLEUmode && mode != CC_DLTUmode
3112 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3113 return FALSE;
3114
d5b7b3ae 3115 return cc_register (x, mode);
5bbe2d40
RE
3116}
3117
2b835d68
RE
3118/* Return TRUE if X references a SYMBOL_REF. */
3119int
3120symbol_mentioned_p (x)
3121 rtx x;
3122{
6f7d635c 3123 register const char * fmt;
2b835d68
RE
3124 register int i;
3125
3126 if (GET_CODE (x) == SYMBOL_REF)
3127 return 1;
3128
3129 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3130
2b835d68
RE
3131 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3132 {
3133 if (fmt[i] == 'E')
3134 {
3135 register int j;
3136
3137 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3138 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3139 return 1;
3140 }
3141 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3142 return 1;
3143 }
3144
3145 return 0;
3146}
3147
3148/* Return TRUE if X references a LABEL_REF. */
3149int
3150label_mentioned_p (x)
3151 rtx x;
3152{
6f7d635c 3153 register const char * fmt;
2b835d68
RE
3154 register int i;
3155
3156 if (GET_CODE (x) == LABEL_REF)
3157 return 1;
3158
3159 fmt = GET_RTX_FORMAT (GET_CODE (x));
3160 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3161 {
3162 if (fmt[i] == 'E')
3163 {
3164 register int j;
3165
3166 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3167 if (label_mentioned_p (XVECEXP (x, i, j)))
3168 return 1;
3169 }
3170 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3171 return 1;
3172 }
3173
3174 return 0;
3175}
3176
ff9940b0
RE
3177enum rtx_code
3178minmax_code (x)
f3bb6135 3179 rtx x;
ff9940b0
RE
3180{
3181 enum rtx_code code = GET_CODE (x);
3182
3183 if (code == SMAX)
3184 return GE;
f3bb6135 3185 else if (code == SMIN)
ff9940b0 3186 return LE;
f3bb6135 3187 else if (code == UMIN)
ff9940b0 3188 return LEU;
f3bb6135 3189 else if (code == UMAX)
ff9940b0 3190 return GEU;
f3bb6135 3191
ff9940b0
RE
3192 abort ();
3193}
3194
6354dc9b 3195/* Return 1 if memory locations are adjacent. */
f3bb6135 3196int
ff9940b0
RE
3197adjacent_mem_locations (a, b)
3198 rtx a, b;
3199{
3200 int val0 = 0, val1 = 0;
3201 int reg0, reg1;
3202
3203 if ((GET_CODE (XEXP (a, 0)) == REG
3204 || (GET_CODE (XEXP (a, 0)) == PLUS
3205 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3206 && (GET_CODE (XEXP (b, 0)) == REG
3207 || (GET_CODE (XEXP (b, 0)) == PLUS
3208 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3209 {
3210 if (GET_CODE (XEXP (a, 0)) == PLUS)
3211 {
3212 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3213 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3214 }
3215 else
3216 reg0 = REGNO (XEXP (a, 0));
3217 if (GET_CODE (XEXP (b, 0)) == PLUS)
3218 {
3219 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3220 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3221 }
3222 else
3223 reg1 = REGNO (XEXP (b, 0));
3224 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3225 }
3226 return 0;
3227}
3228
3229/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3230 parallel and the first section will be tested. */
f3bb6135 3231int
ff9940b0
RE
3232load_multiple_operation (op, mode)
3233 rtx op;
74bbc178 3234 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3235{
f3bb6135 3236 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3237 int dest_regno;
3238 rtx src_addr;
f3bb6135 3239 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3240 rtx elt;
3241
3242 if (count <= 1
3243 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3244 return 0;
3245
6354dc9b 3246 /* Check to see if this might be a write-back. */
ff9940b0
RE
3247 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3248 {
3249 i++;
3250 base = 1;
3251
6354dc9b 3252 /* Now check it more carefully. */
ff9940b0
RE
3253 if (GET_CODE (SET_DEST (elt)) != REG
3254 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3255 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3256 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3257 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3258 return 0;
ff9940b0
RE
3259 }
3260
3261 /* Perform a quick check so we don't blow up below. */
3262 if (count <= i
3263 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3264 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3265 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3266 return 0;
3267
3268 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3269 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3270
3271 for (; i < count; i++)
3272 {
ed4c4348 3273 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3274
3275 if (GET_CODE (elt) != SET
3276 || GET_CODE (SET_DEST (elt)) != REG
3277 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3278 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3279 || GET_CODE (SET_SRC (elt)) != MEM
3280 || GET_MODE (SET_SRC (elt)) != SImode
3281 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3282 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3283 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3284 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3285 return 0;
3286 }
3287
3288 return 1;
3289}
3290
3291/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3292 parallel and the first section will be tested. */
f3bb6135 3293int
ff9940b0
RE
3294store_multiple_operation (op, mode)
3295 rtx op;
74bbc178 3296 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3297{
f3bb6135 3298 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3299 int src_regno;
3300 rtx dest_addr;
f3bb6135 3301 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3302 rtx elt;
3303
3304 if (count <= 1
3305 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3306 return 0;
3307
6354dc9b 3308 /* Check to see if this might be a write-back. */
ff9940b0
RE
3309 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3310 {
3311 i++;
3312 base = 1;
3313
6354dc9b 3314 /* Now check it more carefully. */
ff9940b0
RE
3315 if (GET_CODE (SET_DEST (elt)) != REG
3316 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3317 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3318 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3319 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3320 return 0;
ff9940b0
RE
3321 }
3322
3323 /* Perform a quick check so we don't blow up below. */
3324 if (count <= i
3325 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3326 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3327 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3328 return 0;
3329
3330 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3331 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3332
3333 for (; i < count; i++)
3334 {
3335 elt = XVECEXP (op, 0, i);
3336
3337 if (GET_CODE (elt) != SET
3338 || GET_CODE (SET_SRC (elt)) != REG
3339 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3340 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3341 || GET_CODE (SET_DEST (elt)) != MEM
3342 || GET_MODE (SET_DEST (elt)) != SImode
3343 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3344 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3345 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3346 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3347 return 0;
3348 }
3349
3350 return 1;
3351}
e2c671ba 3352
84ed5e79
RE
3353int
3354load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3355 rtx * operands;
84ed5e79 3356 int nops;
62b10bbc
NC
3357 int * regs;
3358 int * base;
3359 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3360{
3361 int unsorted_regs[4];
3362 HOST_WIDE_INT unsorted_offsets[4];
3363 int order[4];
ad076f4e 3364 int base_reg = -1;
84ed5e79
RE
3365 int i;
3366
3367 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3368 extended if required. */
3369 if (nops < 2 || nops > 4)
3370 abort ();
3371
3372 /* Loop over the operands and check that the memory references are
3373 suitable (ie immediate offsets from the same base register). At
3374 the same time, extract the target register, and the memory
3375 offsets. */
3376 for (i = 0; i < nops; i++)
3377 {
3378 rtx reg;
3379 rtx offset;
3380
56636818
JL
3381 /* Convert a subreg of a mem into the mem itself. */
3382 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3383 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3384
84ed5e79
RE
3385 if (GET_CODE (operands[nops + i]) != MEM)
3386 abort ();
3387
3388 /* Don't reorder volatile memory references; it doesn't seem worth
3389 looking for the case where the order is ok anyway. */
3390 if (MEM_VOLATILE_P (operands[nops + i]))
3391 return 0;
3392
3393 offset = const0_rtx;
3394
3395 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3396 || (GET_CODE (reg) == SUBREG
3397 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3398 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3399 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3400 == REG)
3401 || (GET_CODE (reg) == SUBREG
3402 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3403 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3404 == CONST_INT)))
3405 {
3406 if (i == 0)
3407 {
d5b7b3ae 3408 base_reg = REGNO (reg);
84ed5e79
RE
3409 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3410 ? REGNO (operands[i])
3411 : REGNO (SUBREG_REG (operands[i])));
3412 order[0] = 0;
3413 }
3414 else
3415 {
6354dc9b 3416 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3417 /* Not addressed from the same base register. */
3418 return 0;
3419
3420 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3421 ? REGNO (operands[i])
3422 : REGNO (SUBREG_REG (operands[i])));
3423 if (unsorted_regs[i] < unsorted_regs[order[0]])
3424 order[0] = i;
3425 }
3426
3427 /* If it isn't an integer register, or if it overwrites the
3428 base register but isn't the last insn in the list, then
3429 we can't do this. */
3430 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3431 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3432 return 0;
3433
3434 unsorted_offsets[i] = INTVAL (offset);
3435 }
3436 else
3437 /* Not a suitable memory address. */
3438 return 0;
3439 }
3440
3441 /* All the useful information has now been extracted from the
3442 operands into unsorted_regs and unsorted_offsets; additionally,
3443 order[0] has been set to the lowest numbered register in the
3444 list. Sort the registers into order, and check that the memory
3445 offsets are ascending and adjacent. */
3446
3447 for (i = 1; i < nops; i++)
3448 {
3449 int j;
3450
3451 order[i] = order[i - 1];
3452 for (j = 0; j < nops; j++)
3453 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3454 && (order[i] == order[i - 1]
3455 || unsorted_regs[j] < unsorted_regs[order[i]]))
3456 order[i] = j;
3457
3458 /* Have we found a suitable register? if not, one must be used more
3459 than once. */
3460 if (order[i] == order[i - 1])
3461 return 0;
3462
3463 /* Is the memory address adjacent and ascending? */
3464 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3465 return 0;
3466 }
3467
3468 if (base)
3469 {
3470 *base = base_reg;
3471
3472 for (i = 0; i < nops; i++)
3473 regs[i] = unsorted_regs[order[i]];
3474
3475 *load_offset = unsorted_offsets[order[0]];
3476 }
3477
3478 if (unsorted_offsets[order[0]] == 0)
3479 return 1; /* ldmia */
3480
3481 if (unsorted_offsets[order[0]] == 4)
3482 return 2; /* ldmib */
3483
3484 if (unsorted_offsets[order[nops - 1]] == 0)
3485 return 3; /* ldmda */
3486
3487 if (unsorted_offsets[order[nops - 1]] == -4)
3488 return 4; /* ldmdb */
3489
949d79eb
RE
3490 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3491 if the offset isn't small enough. The reason 2 ldrs are faster
3492 is because these ARMs are able to do more than one cache access
3493 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3494 whilst the ARM8 has a double bandwidth cache. This means that
3495 these cores can do both an instruction fetch and a data fetch in
3496 a single cycle, so the trick of calculating the address into a
3497 scratch register (one of the result regs) and then doing a load
3498 multiple actually becomes slower (and no smaller in code size).
3499 That is the transformation
6cc8c0b3
NC
3500
3501 ldr rd1, [rbase + offset]
3502 ldr rd2, [rbase + offset + 4]
3503
3504 to
3505
3506 add rd1, rbase, offset
3507 ldmia rd1, {rd1, rd2}
3508
949d79eb
RE
3509 produces worse code -- '3 cycles + any stalls on rd2' instead of
3510 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3511 access per cycle, the first sequence could never complete in less
3512 than 6 cycles, whereas the ldm sequence would only take 5 and
3513 would make better use of sequential accesses if not hitting the
3514 cache.
3515
3516 We cheat here and test 'arm_ld_sched' which we currently know to
3517 only be true for the ARM8, ARM9 and StrongARM. If this ever
3518 changes, then the test below needs to be reworked. */
f5a1b0d2 3519 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3520 return 0;
3521
84ed5e79
RE
3522 /* Can't do it without setting up the offset, only do this if it takes
3523 no more than one insn. */
3524 return (const_ok_for_arm (unsorted_offsets[order[0]])
3525 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3526}
3527
cd2b33d0 3528const char *
84ed5e79 3529emit_ldm_seq (operands, nops)
62b10bbc 3530 rtx * operands;
84ed5e79
RE
3531 int nops;
3532{
3533 int regs[4];
3534 int base_reg;
3535 HOST_WIDE_INT offset;
3536 char buf[100];
3537 int i;
3538
3539 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3540 {
3541 case 1:
3542 strcpy (buf, "ldm%?ia\t");
3543 break;
3544
3545 case 2:
3546 strcpy (buf, "ldm%?ib\t");
3547 break;
3548
3549 case 3:
3550 strcpy (buf, "ldm%?da\t");
3551 break;
3552
3553 case 4:
3554 strcpy (buf, "ldm%?db\t");
3555 break;
3556
3557 case 5:
3558 if (offset >= 0)
3559 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3560 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3561 (long) offset);
3562 else
3563 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3564 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3565 (long) -offset);
3566 output_asm_insn (buf, operands);
3567 base_reg = regs[0];
3568 strcpy (buf, "ldm%?ia\t");
3569 break;
3570
3571 default:
3572 abort ();
3573 }
3574
3575 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3576 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3577
3578 for (i = 1; i < nops; i++)
3579 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3580 reg_names[regs[i]]);
3581
3582 strcat (buf, "}\t%@ phole ldm");
3583
3584 output_asm_insn (buf, operands);
3585 return "";
3586}
3587
3588int
3589store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3590 rtx * operands;
84ed5e79 3591 int nops;
62b10bbc
NC
3592 int * regs;
3593 int * base;
3594 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3595{
3596 int unsorted_regs[4];
3597 HOST_WIDE_INT unsorted_offsets[4];
3598 int order[4];
ad076f4e 3599 int base_reg = -1;
84ed5e79
RE
3600 int i;
3601
3602 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3603 extended if required. */
3604 if (nops < 2 || nops > 4)
3605 abort ();
3606
3607 /* Loop over the operands and check that the memory references are
3608 suitable (ie immediate offsets from the same base register). At
3609 the same time, extract the target register, and the memory
3610 offsets. */
3611 for (i = 0; i < nops; i++)
3612 {
3613 rtx reg;
3614 rtx offset;
3615
56636818
JL
3616 /* Convert a subreg of a mem into the mem itself. */
3617 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3618 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3619
84ed5e79
RE
3620 if (GET_CODE (operands[nops + i]) != MEM)
3621 abort ();
3622
3623 /* Don't reorder volatile memory references; it doesn't seem worth
3624 looking for the case where the order is ok anyway. */
3625 if (MEM_VOLATILE_P (operands[nops + i]))
3626 return 0;
3627
3628 offset = const0_rtx;
3629
3630 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3631 || (GET_CODE (reg) == SUBREG
3632 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3633 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3634 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3635 == REG)
3636 || (GET_CODE (reg) == SUBREG
3637 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3638 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3639 == CONST_INT)))
3640 {
3641 if (i == 0)
3642 {
62b10bbc 3643 base_reg = REGNO (reg);
84ed5e79
RE
3644 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3645 ? REGNO (operands[i])
3646 : REGNO (SUBREG_REG (operands[i])));
3647 order[0] = 0;
3648 }
3649 else
3650 {
6354dc9b 3651 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3652 /* Not addressed from the same base register. */
3653 return 0;
3654
3655 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3656 ? REGNO (operands[i])
3657 : REGNO (SUBREG_REG (operands[i])));
3658 if (unsorted_regs[i] < unsorted_regs[order[0]])
3659 order[0] = i;
3660 }
3661
3662 /* If it isn't an integer register, then we can't do this. */
3663 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3664 return 0;
3665
3666 unsorted_offsets[i] = INTVAL (offset);
3667 }
3668 else
3669 /* Not a suitable memory address. */
3670 return 0;
3671 }
3672
3673 /* All the useful information has now been extracted from the
3674 operands into unsorted_regs and unsorted_offsets; additionally,
3675 order[0] has been set to the lowest numbered register in the
3676 list. Sort the registers into order, and check that the memory
3677 offsets are ascending and adjacent. */
3678
3679 for (i = 1; i < nops; i++)
3680 {
3681 int j;
3682
3683 order[i] = order[i - 1];
3684 for (j = 0; j < nops; j++)
3685 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3686 && (order[i] == order[i - 1]
3687 || unsorted_regs[j] < unsorted_regs[order[i]]))
3688 order[i] = j;
3689
3690 /* Have we found a suitable register? if not, one must be used more
3691 than once. */
3692 if (order[i] == order[i - 1])
3693 return 0;
3694
3695 /* Is the memory address adjacent and ascending? */
3696 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3697 return 0;
3698 }
3699
3700 if (base)
3701 {
3702 *base = base_reg;
3703
3704 for (i = 0; i < nops; i++)
3705 regs[i] = unsorted_regs[order[i]];
3706
3707 *load_offset = unsorted_offsets[order[0]];
3708 }
3709
3710 if (unsorted_offsets[order[0]] == 0)
3711 return 1; /* stmia */
3712
3713 if (unsorted_offsets[order[0]] == 4)
3714 return 2; /* stmib */
3715
3716 if (unsorted_offsets[order[nops - 1]] == 0)
3717 return 3; /* stmda */
3718
3719 if (unsorted_offsets[order[nops - 1]] == -4)
3720 return 4; /* stmdb */
3721
3722 return 0;
3723}
3724
cd2b33d0 3725const char *
84ed5e79 3726emit_stm_seq (operands, nops)
62b10bbc 3727 rtx * operands;
84ed5e79
RE
3728 int nops;
3729{
3730 int regs[4];
3731 int base_reg;
3732 HOST_WIDE_INT offset;
3733 char buf[100];
3734 int i;
3735
3736 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3737 {
3738 case 1:
3739 strcpy (buf, "stm%?ia\t");
3740 break;
3741
3742 case 2:
3743 strcpy (buf, "stm%?ib\t");
3744 break;
3745
3746 case 3:
3747 strcpy (buf, "stm%?da\t");
3748 break;
3749
3750 case 4:
3751 strcpy (buf, "stm%?db\t");
3752 break;
3753
3754 default:
3755 abort ();
3756 }
3757
3758 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3759 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3760
3761 for (i = 1; i < nops; i++)
3762 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3763 reg_names[regs[i]]);
3764
3765 strcat (buf, "}\t%@ phole stm");
3766
3767 output_asm_insn (buf, operands);
3768 return "";
3769}
3770
e2c671ba
RE
3771int
3772multi_register_push (op, mode)
0a81f500 3773 rtx op;
74bbc178 3774 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3775{
3776 if (GET_CODE (op) != PARALLEL
3777 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3778 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3779 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3780 return 0;
3781
3782 return 1;
3783}
ff9940b0 3784\f
d7d01975 3785/* Routines for use with attributes. */
f3bb6135 3786
31fdb4d5 3787/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
3788 ATTRIBUTES are any existing attributes and ARGS are
3789 the arguments supplied with ATTR.
31fdb4d5
DE
3790
3791 Supported attributes:
3792
d5b7b3ae
RE
3793 naked:
3794 don't output any prologue or epilogue code, the user is assumed
3795 to do the right thing.
3796
3797 interfacearm:
3798 Always assume that this function will be entered in ARM mode,
3799 not Thumb mode, and that the caller wishes to be returned to in
3800 ARM mode. */
31fdb4d5 3801int
74bbc178 3802arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3803 tree decl;
31fdb4d5
DE
3804 tree attr;
3805 tree args;
3806{
3807 if (args != NULL_TREE)
3808 return 0;
3809
3810 if (is_attribute_p ("naked", attr))
3811 return TREE_CODE (decl) == FUNCTION_DECL;
d5b7b3ae
RE
3812
3813#ifdef ARM_PE
3814 if (is_attribute_p ("interfacearm", attr))
3815 return TREE_CODE (decl) == FUNCTION_DECL;
3816#endif /* ARM_PE */
3817
31fdb4d5
DE
3818 return 0;
3819}
3820
3821/* Return non-zero if FUNC is a naked function. */
31fdb4d5
DE
3822static int
3823arm_naked_function_p (func)
3824 tree func;
3825{
3826 tree a;
3827
3828 if (TREE_CODE (func) != FUNCTION_DECL)
3829 abort ();
2e943e99 3830
31fdb4d5
DE
3831 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3832 return a != NULL_TREE;
3833}
f3bb6135 3834\f
6354dc9b 3835/* Routines for use in generating RTL. */
f3bb6135 3836rtx
56636818 3837arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3838 in_struct_p, scalar_p)
ff9940b0
RE
3839 int base_regno;
3840 int count;
3841 rtx from;
3842 int up;
3843 int write_back;
56636818
JL
3844 int unchanging_p;
3845 int in_struct_p;
c6df88cb 3846 int scalar_p;
ff9940b0
RE
3847{
3848 int i = 0, j;
3849 rtx result;
3850 int sign = up ? 1 : -1;
56636818 3851 rtx mem;
ff9940b0 3852
43cffd11 3853 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3854 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3855 if (write_back)
f3bb6135 3856 {
ff9940b0 3857 XVECEXP (result, 0, 0)
43cffd11
RE
3858 = gen_rtx_SET (GET_MODE (from), from,
3859 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3860 i = 1;
3861 count++;
f3bb6135
RE
3862 }
3863
ff9940b0 3864 for (j = 0; i < count; i++, j++)
f3bb6135 3865 {
43cffd11 3866 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3867 RTX_UNCHANGING_P (mem) = unchanging_p;
3868 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3869 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3870 XVECEXP (result, 0, i)
3871 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3872 }
3873
ff9940b0
RE
3874 return result;
3875}
3876
f3bb6135 3877rtx
56636818 3878arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3879 in_struct_p, scalar_p)
ff9940b0
RE
3880 int base_regno;
3881 int count;
3882 rtx to;
3883 int up;
3884 int write_back;
56636818
JL
3885 int unchanging_p;
3886 int in_struct_p;
c6df88cb 3887 int scalar_p;
ff9940b0
RE
3888{
3889 int i = 0, j;
3890 rtx result;
3891 int sign = up ? 1 : -1;
56636818 3892 rtx mem;
ff9940b0 3893
43cffd11 3894 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3895 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3896 if (write_back)
f3bb6135 3897 {
ff9940b0 3898 XVECEXP (result, 0, 0)
43cffd11
RE
3899 = gen_rtx_SET (GET_MODE (to), to,
3900 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3901 i = 1;
3902 count++;
f3bb6135
RE
3903 }
3904
ff9940b0 3905 for (j = 0; i < count; i++, j++)
f3bb6135 3906 {
43cffd11 3907 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3908 RTX_UNCHANGING_P (mem) = unchanging_p;
3909 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3910 MEM_SCALAR_P (mem) = scalar_p;
56636818 3911
43cffd11
RE
3912 XVECEXP (result, 0, i)
3913 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3914 }
3915
ff9940b0
RE
3916 return result;
3917}
3918
880e2516
RE
3919int
3920arm_gen_movstrqi (operands)
62b10bbc 3921 rtx * operands;
880e2516
RE
3922{
3923 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3924 int i;
880e2516 3925 rtx src, dst;
ad076f4e 3926 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3927 rtx part_bytes_reg = NULL;
56636818
JL
3928 rtx mem;
3929 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3930 int dst_scalar_p, src_scalar_p;
880e2516
RE
3931
3932 if (GET_CODE (operands[2]) != CONST_INT
3933 || GET_CODE (operands[3]) != CONST_INT
3934 || INTVAL (operands[2]) > 64
3935 || INTVAL (operands[3]) & 3)
3936 return 0;
3937
3938 st_dst = XEXP (operands[0], 0);
3939 st_src = XEXP (operands[1], 0);
56636818
JL
3940
3941 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3942 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3943 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3944 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3945 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3946 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3947
880e2516
RE
3948 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3949 fin_src = src = copy_to_mode_reg (SImode, st_src);
3950
d5b7b3ae 3951 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
880e2516
RE
3952 out_words_to_go = INTVAL (operands[2]) / 4;
3953 last_bytes = INTVAL (operands[2]) & 3;
3954
3955 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3956 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3957
3958 for (i = 0; in_words_to_go >= 2; i+=4)
3959 {
bd9c7e23 3960 if (in_words_to_go > 4)
56636818 3961 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3962 src_unchanging_p,
3963 src_in_struct_p,
3964 src_scalar_p));
bd9c7e23
RE
3965 else
3966 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3967 FALSE, src_unchanging_p,
c6df88cb 3968 src_in_struct_p, src_scalar_p));
bd9c7e23 3969
880e2516
RE
3970 if (out_words_to_go)
3971 {
bd9c7e23 3972 if (out_words_to_go > 4)
56636818
JL
3973 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3974 dst_unchanging_p,
c6df88cb
MM
3975 dst_in_struct_p,
3976 dst_scalar_p));
bd9c7e23
RE
3977 else if (out_words_to_go != 1)
3978 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3979 dst, TRUE,
3980 (last_bytes == 0
56636818
JL
3981 ? FALSE : TRUE),
3982 dst_unchanging_p,
c6df88cb
MM
3983 dst_in_struct_p,
3984 dst_scalar_p));
880e2516
RE
3985 else
3986 {
43cffd11 3987 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3988 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3989 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3990 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3991 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3992 if (last_bytes != 0)
3993 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3994 }
3995 }
3996
3997 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3998 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3999 }
4000
4001 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4002 if (out_words_to_go)
62b10bbc
NC
4003 {
4004 rtx sreg;
4005
4006 mem = gen_rtx_MEM (SImode, src);
4007 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4008 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4009 MEM_SCALAR_P (mem) = src_scalar_p;
4010 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4011 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4012
4013 mem = gen_rtx_MEM (SImode, dst);
4014 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4015 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4016 MEM_SCALAR_P (mem) = dst_scalar_p;
4017 emit_move_insn (mem, sreg);
4018 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4019 in_words_to_go--;
4020
4021 if (in_words_to_go) /* Sanity check */
4022 abort ();
4023 }
880e2516
RE
4024
4025 if (in_words_to_go)
4026 {
4027 if (in_words_to_go < 0)
4028 abort ();
4029
43cffd11 4030 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4031 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4032 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4033 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4034 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4035 }
4036
d5b7b3ae
RE
4037 if (last_bytes && part_bytes_reg == NULL)
4038 abort ();
4039
880e2516
RE
4040 if (BYTES_BIG_ENDIAN && last_bytes)
4041 {
4042 rtx tmp = gen_reg_rtx (SImode);
4043
6354dc9b 4044 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4045 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4046 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4047 part_bytes_reg = tmp;
4048
4049 while (last_bytes)
4050 {
43cffd11 4051 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4052 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4053 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4054 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4055 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 4056
880e2516
RE
4057 if (--last_bytes)
4058 {
4059 tmp = gen_reg_rtx (SImode);
4060 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4061 part_bytes_reg = tmp;
4062 }
4063 }
4064
4065 }
4066 else
4067 {
d5b7b3ae 4068 if (last_bytes > 1)
880e2516 4069 {
d5b7b3ae 4070 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4071 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4072 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4073 MEM_SCALAR_P (mem) = dst_scalar_p;
d5b7b3ae
RE
4074 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4075 last_bytes -= 2;
4076 if (last_bytes)
880e2516
RE
4077 {
4078 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4079
d5b7b3ae
RE
4080 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4081 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4082 part_bytes_reg = tmp;
4083 }
4084 }
d5b7b3ae
RE
4085
4086 if (last_bytes)
4087 {
4088 mem = gen_rtx_MEM (QImode, dst);
4089 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4090 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4091 MEM_SCALAR_P (mem) = dst_scalar_p;
4092 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4093 }
880e2516
RE
4094 }
4095
4096 return 1;
4097}
4098
5165176d
RE
4099/* Generate a memory reference for a half word, such that it will be loaded
4100 into the top 16 bits of the word. We can assume that the address is
4101 known to be alignable and of the form reg, or plus (reg, const). */
4102rtx
d5b7b3ae 4103arm_gen_rotated_half_load (memref)
5165176d
RE
4104 rtx memref;
4105{
4106 HOST_WIDE_INT offset = 0;
4107 rtx base = XEXP (memref, 0);
4108
4109 if (GET_CODE (base) == PLUS)
4110 {
4111 offset = INTVAL (XEXP (base, 1));
4112 base = XEXP (base, 0);
4113 }
4114
956d6950 4115 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4116 if (TARGET_MMU_TRAPS
5165176d
RE
4117 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4118 return NULL;
4119
43cffd11 4120 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4121
4122 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4123 return base;
4124
43cffd11 4125 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4126}
4127
84ed5e79 4128static enum machine_mode
74bbc178 4129select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4130 rtx x;
4131 rtx y;
4132 HOST_WIDE_INT cond_or;
4133{
4134 enum rtx_code cond1, cond2;
4135 int swapped = 0;
4136
4137 /* Currently we will probably get the wrong result if the individual
4138 comparisons are not simple. This also ensures that it is safe to
956d6950 4139 reverse a comparison if necessary. */
84ed5e79
RE
4140 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4141 != CCmode)
4142 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4143 != CCmode))
4144 return CCmode;
4145
4146 if (cond_or)
4147 cond1 = reverse_condition (cond1);
4148
4149 /* If the comparisons are not equal, and one doesn't dominate the other,
4150 then we can't do this. */
4151 if (cond1 != cond2
5895f793
RE
4152 && !comparison_dominates_p (cond1, cond2)
4153 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4154 return CCmode;
4155
4156 if (swapped)
4157 {
4158 enum rtx_code temp = cond1;
4159 cond1 = cond2;
4160 cond2 = temp;
4161 }
4162
4163 switch (cond1)
4164 {
4165 case EQ:
5895f793 4166 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4167 return CC_DEQmode;
4168
4169 switch (cond2)
4170 {
4171 case LE: return CC_DLEmode;
4172 case LEU: return CC_DLEUmode;
4173 case GE: return CC_DGEmode;
4174 case GEU: return CC_DGEUmode;
ad076f4e 4175 default: break;
84ed5e79
RE
4176 }
4177
4178 break;
4179
4180 case LT:
5895f793 4181 if (cond2 == LT || !cond_or)
84ed5e79
RE
4182 return CC_DLTmode;
4183 if (cond2 == LE)
4184 return CC_DLEmode;
4185 if (cond2 == NE)
4186 return CC_DNEmode;
4187 break;
4188
4189 case GT:
5895f793 4190 if (cond2 == GT || !cond_or)
84ed5e79
RE
4191 return CC_DGTmode;
4192 if (cond2 == GE)
4193 return CC_DGEmode;
4194 if (cond2 == NE)
4195 return CC_DNEmode;
4196 break;
4197
4198 case LTU:
5895f793 4199 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4200 return CC_DLTUmode;
4201 if (cond2 == LEU)
4202 return CC_DLEUmode;
4203 if (cond2 == NE)
4204 return CC_DNEmode;
4205 break;
4206
4207 case GTU:
5895f793 4208 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4209 return CC_DGTUmode;
4210 if (cond2 == GEU)
4211 return CC_DGEUmode;
4212 if (cond2 == NE)
4213 return CC_DNEmode;
4214 break;
4215
4216 /* The remaining cases only occur when both comparisons are the
4217 same. */
4218 case NE:
4219 return CC_DNEmode;
4220
4221 case LE:
4222 return CC_DLEmode;
4223
4224 case GE:
4225 return CC_DGEmode;
4226
4227 case LEU:
4228 return CC_DLEUmode;
4229
4230 case GEU:
4231 return CC_DGEUmode;
ad076f4e
RE
4232
4233 default:
4234 break;
84ed5e79
RE
4235 }
4236
4237 abort ();
4238}
4239
4240enum machine_mode
4241arm_select_cc_mode (op, x, y)
4242 enum rtx_code op;
4243 rtx x;
4244 rtx y;
4245{
4246 /* All floating point compares return CCFP if it is an equality
4247 comparison, and CCFPE otherwise. */
4248 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4249 {
4250 switch (op)
4251 {
4252 case EQ:
4253 case NE:
4254 case UNORDERED:
4255 case ORDERED:
4256 case UNLT:
4257 case UNLE:
4258 case UNGT:
4259 case UNGE:
4260 case UNEQ:
4261 case LTGT:
4262 return CCFPmode;
4263
4264 case LT:
4265 case LE:
4266 case GT:
4267 case GE:
4268 return CCFPEmode;
4269
4270 default:
4271 abort ();
4272 }
4273 }
84ed5e79
RE
4274
4275 /* A compare with a shifted operand. Because of canonicalization, the
4276 comparison will have to be swapped when we emit the assembler. */
4277 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4278 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4279 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4280 || GET_CODE (x) == ROTATERT))
4281 return CC_SWPmode;
4282
956d6950
JL
4283 /* This is a special case that is used by combine to allow a
4284 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4285 followed by a comparison of the shifted integer (only valid for
956d6950 4286 equalities and unsigned inequalities). */
84ed5e79
RE
4287 if (GET_MODE (x) == SImode
4288 && GET_CODE (x) == ASHIFT
4289 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4290 && GET_CODE (XEXP (x, 0)) == SUBREG
4291 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4292 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4293 && (op == EQ || op == NE
4294 || op == GEU || op == GTU || op == LTU || op == LEU)
4295 && GET_CODE (y) == CONST_INT)
4296 return CC_Zmode;
4297
4298 /* An operation that sets the condition codes as a side-effect, the
4299 V flag is not set correctly, so we can only use comparisons where
4300 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4301 instead. */
4302 if (GET_MODE (x) == SImode
4303 && y == const0_rtx
4304 && (op == EQ || op == NE || op == LT || op == GE)
4305 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4306 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4307 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4308 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4309 || GET_CODE (x) == LSHIFTRT
4310 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4311 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4312 return CC_NOOVmode;
4313
4314 /* A construct for a conditional compare, if the false arm contains
4315 0, then both conditions must be true, otherwise either condition
4316 must be true. Not all conditions are possible, so CCmode is
4317 returned if it can't be done. */
4318 if (GET_CODE (x) == IF_THEN_ELSE
4319 && (XEXP (x, 2) == const0_rtx
4320 || XEXP (x, 2) == const1_rtx)
4321 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4322 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 4323 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
4324 INTVAL (XEXP (x, 2)));
4325
4326 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4327 return CC_Zmode;
4328
bd9c7e23
RE
4329 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4330 && GET_CODE (x) == PLUS
4331 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4332 return CC_Cmode;
4333
84ed5e79
RE
4334 return CCmode;
4335}
4336
ff9940b0
RE
4337/* X and Y are two things to compare using CODE. Emit the compare insn and
4338 return the rtx for register 0 in the proper mode. FP means this is a
4339 floating point compare: I don't think that it is needed on the arm. */
4340
4341rtx
d5b7b3ae 4342arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4343 enum rtx_code code;
4344 rtx x, y;
4345{
4346 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4347 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4348
43cffd11
RE
4349 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4350 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4351
4352 return cc_reg;
4353}
4354
0a81f500
RE
4355void
4356arm_reload_in_hi (operands)
62b10bbc 4357 rtx * operands;
0a81f500 4358{
f9cc092a
RE
4359 rtx ref = operands[1];
4360 rtx base, scratch;
4361 HOST_WIDE_INT offset = 0;
4362
4363 if (GET_CODE (ref) == SUBREG)
4364 {
4365 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4366 if (BYTES_BIG_ENDIAN)
4367 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4368 - MIN (UNITS_PER_WORD,
4369 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4370 ref = SUBREG_REG (ref);
4371 }
4372
4373 if (GET_CODE (ref) == REG)
4374 {
4375 /* We have a pseudo which has been spilt onto the stack; there
4376 are two cases here: the first where there is a simple
4377 stack-slot replacement and a second where the stack-slot is
4378 out of range, or is used as a subreg. */
4379 if (reg_equiv_mem[REGNO (ref)])
4380 {
4381 ref = reg_equiv_mem[REGNO (ref)];
4382 base = find_replacement (&XEXP (ref, 0));
4383 }
4384 else
6354dc9b 4385 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4386 base = reg_equiv_address[REGNO (ref)];
4387 }
4388 else
4389 base = find_replacement (&XEXP (ref, 0));
0a81f500 4390
e5e809f4
JL
4391 /* Handle the case where the address is too complex to be offset by 1. */
4392 if (GET_CODE (base) == MINUS
4393 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4394 {
f9cc092a 4395 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4396
43cffd11 4397 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4398 base = base_plus;
4399 }
f9cc092a
RE
4400 else if (GET_CODE (base) == PLUS)
4401 {
6354dc9b 4402 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4403 HOST_WIDE_INT hi, lo;
4404
4405 offset += INTVAL (XEXP (base, 1));
4406 base = XEXP (base, 0);
4407
6354dc9b 4408 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4409 /* Valid range for lo is -4095 -> 4095 */
4410 lo = (offset >= 0
4411 ? (offset & 0xfff)
4412 : -((-offset) & 0xfff));
4413
4414 /* Corner case, if lo is the max offset then we would be out of range
4415 once we have added the additional 1 below, so bump the msb into the
4416 pre-loading insn(s). */
4417 if (lo == 4095)
4418 lo &= 0x7ff;
4419
e5951263
NC
4420 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4421 ^ HOST_INT (0x80000000))
4422 - HOST_INT (0x80000000));
f9cc092a
RE
4423
4424 if (hi + lo != offset)
4425 abort ();
4426
4427 if (hi != 0)
4428 {
4429 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4430
4431 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4432 that require more than one insn. */
f9cc092a
RE
4433 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4434 base = base_plus;
4435 offset = lo;
4436 }
4437 }
e5e809f4 4438
f9cc092a
RE
4439 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4440 emit_insn (gen_zero_extendqisi2 (scratch,
4441 gen_rtx_MEM (QImode,
4442 plus_constant (base,
4443 offset))));
43cffd11
RE
4444 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4445 gen_rtx_MEM (QImode,
f9cc092a
RE
4446 plus_constant (base,
4447 offset + 1))));
5895f793 4448 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
4449 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4450 gen_rtx_IOR (SImode,
4451 gen_rtx_ASHIFT
4452 (SImode,
4453 gen_rtx_SUBREG (SImode, operands[0], 0),
4454 GEN_INT (8)),
f9cc092a 4455 scratch)));
0a81f500 4456 else
43cffd11
RE
4457 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4458 gen_rtx_IOR (SImode,
f9cc092a 4459 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4460 GEN_INT (8)),
4461 gen_rtx_SUBREG (SImode, operands[0],
4462 0))));
0a81f500
RE
4463}
4464
f9cc092a
RE
4465/* Handle storing a half-word to memory during reload by synthesising as two
4466 byte stores. Take care not to clobber the input values until after we
4467 have moved them somewhere safe. This code assumes that if the DImode
4468 scratch in operands[2] overlaps either the input value or output address
4469 in some way, then that value must die in this insn (we absolutely need
4470 two scratch registers for some corner cases). */
f3bb6135 4471void
af48348a 4472arm_reload_out_hi (operands)
62b10bbc 4473 rtx * operands;
af48348a 4474{
f9cc092a
RE
4475 rtx ref = operands[0];
4476 rtx outval = operands[1];
4477 rtx base, scratch;
4478 HOST_WIDE_INT offset = 0;
4479
4480 if (GET_CODE (ref) == SUBREG)
4481 {
4482 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4483 if (BYTES_BIG_ENDIAN)
4484 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4485 - MIN (UNITS_PER_WORD,
4486 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4487 ref = SUBREG_REG (ref);
4488 }
4489
4490
4491 if (GET_CODE (ref) == REG)
4492 {
4493 /* We have a pseudo which has been spilt onto the stack; there
4494 are two cases here: the first where there is a simple
4495 stack-slot replacement and a second where the stack-slot is
4496 out of range, or is used as a subreg. */
4497 if (reg_equiv_mem[REGNO (ref)])
4498 {
4499 ref = reg_equiv_mem[REGNO (ref)];
4500 base = find_replacement (&XEXP (ref, 0));
4501 }
4502 else
6354dc9b 4503 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4504 base = reg_equiv_address[REGNO (ref)];
4505 }
4506 else
4507 base = find_replacement (&XEXP (ref, 0));
4508
4509 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4510
4511 /* Handle the case where the address is too complex to be offset by 1. */
4512 if (GET_CODE (base) == MINUS
4513 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4514 {
4515 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4516
4517 /* Be careful not to destroy OUTVAL. */
4518 if (reg_overlap_mentioned_p (base_plus, outval))
4519 {
4520 /* Updating base_plus might destroy outval, see if we can
4521 swap the scratch and base_plus. */
5895f793 4522 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4523 {
4524 rtx tmp = scratch;
4525 scratch = base_plus;
4526 base_plus = tmp;
4527 }
4528 else
4529 {
4530 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4531
4532 /* Be conservative and copy OUTVAL into the scratch now,
4533 this should only be necessary if outval is a subreg
4534 of something larger than a word. */
4535 /* XXX Might this clobber base? I can't see how it can,
4536 since scratch is known to overlap with OUTVAL, and
4537 must be wider than a word. */
4538 emit_insn (gen_movhi (scratch_hi, outval));
4539 outval = scratch_hi;
4540 }
4541 }
4542
4543 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4544 base = base_plus;
4545 }
4546 else if (GET_CODE (base) == PLUS)
4547 {
6354dc9b 4548 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4549 HOST_WIDE_INT hi, lo;
4550
4551 offset += INTVAL (XEXP (base, 1));
4552 base = XEXP (base, 0);
4553
6354dc9b 4554 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4555 /* Valid range for lo is -4095 -> 4095 */
4556 lo = (offset >= 0
4557 ? (offset & 0xfff)
4558 : -((-offset) & 0xfff));
4559
4560 /* Corner case, if lo is the max offset then we would be out of range
4561 once we have added the additional 1 below, so bump the msb into the
4562 pre-loading insn(s). */
4563 if (lo == 4095)
4564 lo &= 0x7ff;
4565
e5951263
NC
4566 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4567 ^ HOST_INT (0x80000000))
5895f793 4568 - HOST_INT (0x80000000));
f9cc092a
RE
4569
4570 if (hi + lo != offset)
4571 abort ();
4572
4573 if (hi != 0)
4574 {
4575 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4576
4577 /* Be careful not to destroy OUTVAL. */
4578 if (reg_overlap_mentioned_p (base_plus, outval))
4579 {
4580 /* Updating base_plus might destroy outval, see if we
4581 can swap the scratch and base_plus. */
5895f793 4582 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4583 {
4584 rtx tmp = scratch;
4585 scratch = base_plus;
4586 base_plus = tmp;
4587 }
4588 else
4589 {
4590 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4591
4592 /* Be conservative and copy outval into scratch now,
4593 this should only be necessary if outval is a
4594 subreg of something larger than a word. */
4595 /* XXX Might this clobber base? I can't see how it
4596 can, since scratch is known to overlap with
4597 outval. */
4598 emit_insn (gen_movhi (scratch_hi, outval));
4599 outval = scratch_hi;
4600 }
4601 }
4602
4603 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4604 that require more than one insn. */
f9cc092a
RE
4605 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4606 base = base_plus;
4607 offset = lo;
4608 }
4609 }
af48348a 4610
b5cc037f
RE
4611 if (BYTES_BIG_ENDIAN)
4612 {
f9cc092a
RE
4613 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4614 plus_constant (base, offset + 1)),
4615 gen_rtx_SUBREG (QImode, outval, 0)));
4616 emit_insn (gen_lshrsi3 (scratch,
4617 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4618 GEN_INT (8)));
f9cc092a
RE
4619 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4620 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4621 }
4622 else
4623 {
f9cc092a
RE
4624 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4625 gen_rtx_SUBREG (QImode, outval, 0)));
4626 emit_insn (gen_lshrsi3 (scratch,
4627 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4628 GEN_INT (8)));
f9cc092a
RE
4629 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4630 plus_constant (base, offset + 1)),
4631 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 4632 }
af48348a 4633}
2b835d68 4634\f
d5b7b3ae
RE
4635/* Print a symbolic form of X to the debug file, F. */
4636static void
4637arm_print_value (f, x)
4638 FILE * f;
4639 rtx x;
4640{
4641 switch (GET_CODE (x))
4642 {
4643 case CONST_INT:
4644 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4645 return;
4646
4647 case CONST_DOUBLE:
4648 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4649 return;
4650
4651 case CONST_STRING:
4652 fprintf (f, "\"%s\"", XSTR (x, 0));
4653 return;
4654
4655 case SYMBOL_REF:
4656 fprintf (f, "`%s'", XSTR (x, 0));
4657 return;
4658
4659 case LABEL_REF:
4660 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4661 return;
4662
4663 case CONST:
4664 arm_print_value (f, XEXP (x, 0));
4665 return;
4666
4667 case PLUS:
4668 arm_print_value (f, XEXP (x, 0));
4669 fprintf (f, "+");
4670 arm_print_value (f, XEXP (x, 1));
4671 return;
4672
4673 case PC:
4674 fprintf (f, "pc");
4675 return;
4676
4677 default:
4678 fprintf (f, "????");
4679 return;
4680 }
4681}
4682\f
2b835d68 4683/* Routines for manipulation of the constant pool. */
2b835d68 4684
949d79eb
RE
4685/* Arm instructions cannot load a large constant directly into a
4686 register; they have to come from a pc relative load. The constant
4687 must therefore be placed in the addressable range of the pc
4688 relative load. Depending on the precise pc relative load
4689 instruction the range is somewhere between 256 bytes and 4k. This
4690 means that we often have to dump a constant inside a function, and
2b835d68
RE
4691 generate code to branch around it.
4692
949d79eb
RE
4693 It is important to minimize this, since the branches will slow
4694 things down and make the code larger.
2b835d68 4695
949d79eb
RE
4696 Normally we can hide the table after an existing unconditional
4697 branch so that there is no interruption of the flow, but in the
4698 worst case the code looks like this:
2b835d68
RE
4699
4700 ldr rn, L1
949d79eb 4701 ...
2b835d68
RE
4702 b L2
4703 align
4704 L1: .long value
4705 L2:
949d79eb 4706 ...
2b835d68 4707
2b835d68 4708 ldr rn, L3
949d79eb 4709 ...
2b835d68
RE
4710 b L4
4711 align
2b835d68
RE
4712 L3: .long value
4713 L4:
949d79eb
RE
4714 ...
4715
4716 We fix this by performing a scan after scheduling, which notices
4717 which instructions need to have their operands fetched from the
4718 constant table and builds the table.
4719
4720 The algorithm starts by building a table of all the constants that
4721 need fixing up and all the natural barriers in the function (places
4722 where a constant table can be dropped without breaking the flow).
4723 For each fixup we note how far the pc-relative replacement will be
4724 able to reach and the offset of the instruction into the function.
4725
4726 Having built the table we then group the fixes together to form
4727 tables that are as large as possible (subject to addressing
4728 constraints) and emit each table of constants after the last
4729 barrier that is within range of all the instructions in the group.
4730 If a group does not contain a barrier, then we forcibly create one
4731 by inserting a jump instruction into the flow. Once the table has
4732 been inserted, the insns are then modified to reference the
4733 relevant entry in the pool.
4734
6354dc9b 4735 Possible enhancements to the algorithm (not implemented) are:
949d79eb 4736
d5b7b3ae 4737 1) For some processors and object formats, there may be benefit in
949d79eb
RE
4738 aligning the pools to the start of cache lines; this alignment
4739 would need to be taken into account when calculating addressability
6354dc9b 4740 of a pool. */
2b835d68 4741
d5b7b3ae
RE
4742/* These typedefs are located at the start of this file, so that
4743 they can be used in the prototypes there. This comment is to
4744 remind readers of that fact so that the following structures
4745 can be understood more easily.
4746
4747 typedef struct minipool_node Mnode;
4748 typedef struct minipool_fixup Mfix; */
4749
4750struct minipool_node
4751{
4752 /* Doubly linked chain of entries. */
4753 Mnode * next;
4754 Mnode * prev;
4755 /* The maximum offset into the code that this entry can be placed. While
4756 pushing fixes for forward references, all entries are sorted in order
4757 of increasing max_address. */
4758 HOST_WIDE_INT max_address;
4759 /* Similarly for a entry inserted for a backwards ref. */
4760 HOST_WIDE_INT min_address;
4761 /* The number of fixes referencing this entry. This can become zero
4762 if we "unpush" an entry. In this case we ignore the entry when we
4763 come to emit the code. */
4764 int refcount;
4765 /* The offset from the start of the minipool. */
4766 HOST_WIDE_INT offset;
4767 /* The value in table. */
4768 rtx value;
4769 /* The mode of value. */
4770 enum machine_mode mode;
4771 int fix_size;
4772};
4773
4774struct minipool_fixup
2b835d68 4775{
d5b7b3ae
RE
4776 Mfix * next;
4777 rtx insn;
4778 HOST_WIDE_INT address;
4779 rtx * loc;
4780 enum machine_mode mode;
4781 int fix_size;
4782 rtx value;
4783 Mnode * minipool;
4784 HOST_WIDE_INT forwards;
4785 HOST_WIDE_INT backwards;
4786};
2b835d68 4787
d5b7b3ae
RE
4788/* Fixes less than a word need padding out to a word boundary. */
4789#define MINIPOOL_FIX_SIZE(mode) \
4790 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 4791
d5b7b3ae
RE
4792static Mnode * minipool_vector_head;
4793static Mnode * minipool_vector_tail;
4794static rtx minipool_vector_label;
332072db 4795
d5b7b3ae
RE
4796/* The linked list of all minipool fixes required for this function. */
4797Mfix * minipool_fix_head;
4798Mfix * minipool_fix_tail;
4799/* The fix entry for the current minipool, once it has been placed. */
4800Mfix * minipool_barrier;
4801
4802/* Determines if INSN is the start of a jump table. Returns the end
4803 of the TABLE or NULL_RTX. */
4804static rtx
4805is_jump_table (insn)
4806 rtx insn;
2b835d68 4807{
d5b7b3ae 4808 rtx table;
da6558fd 4809
d5b7b3ae
RE
4810 if (GET_CODE (insn) == JUMP_INSN
4811 && JUMP_LABEL (insn) != NULL
4812 && ((table = next_real_insn (JUMP_LABEL (insn)))
4813 == next_real_insn (insn))
4814 && table != NULL
4815 && GET_CODE (table) == JUMP_INSN
4816 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4817 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4818 return table;
4819
4820 return NULL_RTX;
2b835d68
RE
4821}
4822
d5b7b3ae
RE
4823static HOST_WIDE_INT
4824get_jump_table_size (insn)
4825 rtx insn;
2b835d68 4826{
d5b7b3ae
RE
4827 rtx body = PATTERN (insn);
4828 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 4829
d5b7b3ae
RE
4830 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
4831}
2b835d68 4832
d5b7b3ae
RE
4833/* Move a minipool fix MP from its current location to before MAX_MP.
4834 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
4835 contrains may need updating. */
4836static Mnode *
4837move_minipool_fix_forward_ref (mp, max_mp, max_address)
4838 Mnode * mp;
4839 Mnode * max_mp;
4840 HOST_WIDE_INT max_address;
4841{
4842 /* This should never be true and the code below assumes these are
4843 different. */
4844 if (mp == max_mp)
4845 abort ();
4846
4847 if (max_mp == NULL)
4848 {
4849 if (max_address < mp->max_address)
4850 mp->max_address = max_address;
4851 }
4852 else
2b835d68 4853 {
d5b7b3ae
RE
4854 if (max_address > max_mp->max_address - mp->fix_size)
4855 mp->max_address = max_mp->max_address - mp->fix_size;
4856 else
4857 mp->max_address = max_address;
2b835d68 4858
d5b7b3ae
RE
4859 /* Unlink MP from its current position. Since max_mp is non-null,
4860 mp->prev must be non-null. */
4861 mp->prev->next = mp->next;
4862 if (mp->next != NULL)
4863 mp->next->prev = mp->prev;
4864 else
4865 minipool_vector_tail = mp->prev;
2b835d68 4866
d5b7b3ae
RE
4867 /* Re-insert it before MAX_MP. */
4868 mp->next = max_mp;
4869 mp->prev = max_mp->prev;
4870 max_mp->prev = mp;
4871
4872 if (mp->prev != NULL)
4873 mp->prev->next = mp;
4874 else
4875 minipool_vector_head = mp;
4876 }
2b835d68 4877
d5b7b3ae
RE
4878 /* Save the new entry. */
4879 max_mp = mp;
4880
4881 /* Scan over the preceeding entries and adjust their addresses as
4882 required. */
4883 while (mp->prev != NULL
4884 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4885 {
4886 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4887 mp = mp->prev;
2b835d68
RE
4888 }
4889
d5b7b3ae 4890 return max_mp;
2b835d68
RE
4891}
4892
d5b7b3ae
RE
4893/* Add a constant to the minipool for a forward reference. Returns the
4894 node added or NULL if the constant will not fit in this pool. */
4895static Mnode *
4896add_minipool_forward_ref (fix)
4897 Mfix * fix;
4898{
4899 /* If set, max_mp is the first pool_entry that has a lower
4900 constraint than the one we are trying to add. */
4901 Mnode * max_mp = NULL;
4902 HOST_WIDE_INT max_address = fix->address + fix->forwards;
4903 Mnode * mp;
4904
4905 /* If this fix's address is greater than the address of the first
4906 entry, then we can't put the fix in this pool. We subtract the
4907 size of the current fix to ensure that if the table is fully
4908 packed we still have enough room to insert this value by suffling
4909 the other fixes forwards. */
4910 if (minipool_vector_head &&
4911 fix->address >= minipool_vector_head->max_address - fix->fix_size)
4912 return NULL;
2b835d68 4913
d5b7b3ae
RE
4914 /* Scan the pool to see if a constant with the same value has
4915 already been added. While we are doing this, also note the
4916 location where we must insert the constant if it doesn't already
4917 exist. */
4918 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4919 {
4920 if (GET_CODE (fix->value) == GET_CODE (mp->value)
4921 && fix->mode == mp->mode
4922 && (GET_CODE (fix->value) != CODE_LABEL
4923 || (CODE_LABEL_NUMBER (fix->value)
4924 == CODE_LABEL_NUMBER (mp->value)))
4925 && rtx_equal_p (fix->value, mp->value))
4926 {
4927 /* More than one fix references this entry. */
4928 mp->refcount++;
4929 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
4930 }
4931
4932 /* Note the insertion point if necessary. */
4933 if (max_mp == NULL
4934 && mp->max_address > max_address)
4935 max_mp = mp;
4936 }
4937
4938 /* The value is not currently in the minipool, so we need to create
4939 a new entry for it. If MAX_MP is NULL, the entry will be put on
4940 the end of the list since the placement is less constrained than
4941 any existing entry. Otherwise, we insert the new fix before
4942 MAX_MP and, if neceesary, adjust the constraints on the other
4943 entries. */
4944 mp = xmalloc (sizeof (* mp));
4945 mp->fix_size = fix->fix_size;
4946 mp->mode = fix->mode;
4947 mp->value = fix->value;
4948 mp->refcount = 1;
4949 /* Not yet required for a backwards ref. */
4950 mp->min_address = -65536;
4951
4952 if (max_mp == NULL)
4953 {
4954 mp->max_address = max_address;
4955 mp->next = NULL;
4956 mp->prev = minipool_vector_tail;
4957
4958 if (mp->prev == NULL)
4959 {
4960 minipool_vector_head = mp;
4961 minipool_vector_label = gen_label_rtx ();
7551cbc7 4962 }
2b835d68 4963 else
d5b7b3ae 4964 mp->prev->next = mp;
2b835d68 4965
d5b7b3ae
RE
4966 minipool_vector_tail = mp;
4967 }
4968 else
4969 {
4970 if (max_address > max_mp->max_address - mp->fix_size)
4971 mp->max_address = max_mp->max_address - mp->fix_size;
4972 else
4973 mp->max_address = max_address;
4974
4975 mp->next = max_mp;
4976 mp->prev = max_mp->prev;
4977 max_mp->prev = mp;
4978 if (mp->prev != NULL)
4979 mp->prev->next = mp;
4980 else
4981 minipool_vector_head = mp;
4982 }
4983
4984 /* Save the new entry. */
4985 max_mp = mp;
4986
4987 /* Scan over the preceeding entries and adjust their addresses as
4988 required. */
4989 while (mp->prev != NULL
4990 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4991 {
4992 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4993 mp = mp->prev;
2b835d68
RE
4994 }
4995
d5b7b3ae
RE
4996 return max_mp;
4997}
4998
4999static Mnode *
5000move_minipool_fix_backward_ref (mp, min_mp, min_address)
5001 Mnode * mp;
5002 Mnode * min_mp;
5003 HOST_WIDE_INT min_address;
5004{
5005 HOST_WIDE_INT offset;
5006
5007 /* This should never be true, and the code below assumes these are
5008 different. */
5009 if (mp == min_mp)
5010 abort ();
5011
5012 if (min_mp == NULL)
2b835d68 5013 {
d5b7b3ae
RE
5014 if (min_address > mp->min_address)
5015 mp->min_address = min_address;
5016 }
5017 else
5018 {
5019 /* We will adjust this below if it is too loose. */
5020 mp->min_address = min_address;
5021
5022 /* Unlink MP from its current position. Since min_mp is non-null,
5023 mp->next must be non-null. */
5024 mp->next->prev = mp->prev;
5025 if (mp->prev != NULL)
5026 mp->prev->next = mp->next;
5027 else
5028 minipool_vector_head = mp->next;
5029
5030 /* Reinsert it after MIN_MP. */
5031 mp->prev = min_mp;
5032 mp->next = min_mp->next;
5033 min_mp->next = mp;
5034 if (mp->next != NULL)
5035 mp->next->prev = mp;
2b835d68 5036 else
d5b7b3ae
RE
5037 minipool_vector_tail = mp;
5038 }
5039
5040 min_mp = mp;
5041
5042 offset = 0;
5043 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5044 {
5045 mp->offset = offset;
5046 if (mp->refcount > 0)
5047 offset += mp->fix_size;
5048
5049 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5050 mp->next->min_address = mp->min_address + mp->fix_size;
5051 }
5052
5053 return min_mp;
5054}
5055
5056/* Add a constant to the minipool for a backward reference. Returns the
5057 node added or NULL if the constant will not fit in this pool.
5058
5059 Note that the code for insertion for a backwards reference can be
5060 somewhat confusing because the calculated offsets for each fix do
5061 not take into account the size of the pool (which is still under
5062 construction. */
5063static Mnode *
5064add_minipool_backward_ref (fix)
5065 Mfix * fix;
5066{
5067 /* If set, min_mp is the last pool_entry that has a lower constraint
5068 than the one we are trying to add. */
5069 Mnode * min_mp = NULL;
5070 /* This can be negative, since it is only a constraint. */
5071 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5072 Mnode * mp;
5073
5074 /* If we can't reach the current pool from this insn, or if we can't
5075 insert this entry at the end of the pool without pushing other
5076 fixes out of range, then we don't try. This ensures that we
5077 can't fail later on. */
5078 if (min_address >= minipool_barrier->address
5079 || (minipool_vector_tail->min_address + fix->fix_size
5080 >= minipool_barrier->address))
5081 return NULL;
5082
5083 /* Scan the pool to see if a constant with the same value has
5084 already been added. While we are doing this, also note the
5085 location where we must insert the constant if it doesn't already
5086 exist. */
5087 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5088 {
5089 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5090 && fix->mode == mp->mode
5091 && (GET_CODE (fix->value) != CODE_LABEL
5092 || (CODE_LABEL_NUMBER (fix->value)
5093 == CODE_LABEL_NUMBER (mp->value)))
5094 && rtx_equal_p (fix->value, mp->value)
5095 /* Check that there is enough slack to move this entry to the
5096 end of the table (this is conservative). */
5097 && (mp->max_address
5098 > (minipool_barrier->address
5099 + minipool_vector_tail->offset
5100 + minipool_vector_tail->fix_size)))
5101 {
5102 mp->refcount++;
5103 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5104 }
5105
5106 if (min_mp != NULL)
5107 mp->min_address += fix->fix_size;
5108 else
5109 {
5110 /* Note the insertion point if necessary. */
5111 if (mp->min_address < min_address)
5112 min_mp = mp;
5113 else if (mp->max_address
5114 < minipool_barrier->address + mp->offset + fix->fix_size)
5115 {
5116 /* Inserting before this entry would push the fix beyond
5117 its maximum address (which can happen if we have
5118 re-located a forwards fix); force the new fix to come
5119 after it. */
5120 min_mp = mp;
5121 min_address = mp->min_address + fix->fix_size;
5122 }
5123 }
5124 }
5125
5126 /* We need to create a new entry. */
5127 mp = xmalloc (sizeof (* mp));
5128 mp->fix_size = fix->fix_size;
5129 mp->mode = fix->mode;
5130 mp->value = fix->value;
5131 mp->refcount = 1;
5132 mp->max_address = minipool_barrier->address + 65536;
5133
5134 mp->min_address = min_address;
5135
5136 if (min_mp == NULL)
5137 {
5138 mp->prev = NULL;
5139 mp->next = minipool_vector_head;
5140
5141 if (mp->next == NULL)
5142 {
5143 minipool_vector_tail = mp;
5144 minipool_vector_label = gen_label_rtx ();
5145 }
5146 else
5147 mp->next->prev = mp;
5148
5149 minipool_vector_head = mp;
5150 }
5151 else
5152 {
5153 mp->next = min_mp->next;
5154 mp->prev = min_mp;
5155 min_mp->next = mp;
da6558fd 5156
d5b7b3ae
RE
5157 if (mp->next != NULL)
5158 mp->next->prev = mp;
5159 else
5160 minipool_vector_tail = mp;
5161 }
5162
5163 /* Save the new entry. */
5164 min_mp = mp;
5165
5166 if (mp->prev)
5167 mp = mp->prev;
5168 else
5169 mp->offset = 0;
5170
5171 /* Scan over the following entries and adjust their offsets. */
5172 while (mp->next != NULL)
5173 {
5174 if (mp->next->min_address < mp->min_address + mp->fix_size)
5175 mp->next->min_address = mp->min_address + mp->fix_size;
5176
5177 if (mp->refcount)
5178 mp->next->offset = mp->offset + mp->fix_size;
5179 else
5180 mp->next->offset = mp->offset;
5181
5182 mp = mp->next;
5183 }
5184
5185 return min_mp;
5186}
5187
5188static void
5189assign_minipool_offsets (barrier)
5190 Mfix * barrier;
5191{
5192 HOST_WIDE_INT offset = 0;
5193 Mnode * mp;
5194
5195 minipool_barrier = barrier;
5196
5197 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5198 {
5199 mp->offset = offset;
da6558fd 5200
d5b7b3ae
RE
5201 if (mp->refcount > 0)
5202 offset += mp->fix_size;
5203 }
5204}
5205
5206/* Output the literal table */
5207static void
5208dump_minipool (scan)
5209 rtx scan;
5210{
5211 Mnode * mp;
5212 Mnode * nmp;
5213
5214 if (rtl_dump_file)
5215 fprintf (rtl_dump_file,
5216 ";; Emitting minipool after insn %u; address %ld\n",
5217 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5218
5219 scan = emit_label_after (gen_label_rtx (), scan);
5220 scan = emit_insn_after (gen_align_4 (), scan);
5221 scan = emit_label_after (minipool_vector_label, scan);
5222
5223 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5224 {
5225 if (mp->refcount > 0)
5226 {
5227 if (rtl_dump_file)
5228 {
5229 fprintf (rtl_dump_file,
5230 ";; Offset %u, min %ld, max %ld ",
5231 (unsigned) mp->offset, (unsigned long) mp->min_address,
5232 (unsigned long) mp->max_address);
5233 arm_print_value (rtl_dump_file, mp->value);
5234 fputc ('\n', rtl_dump_file);
5235 }
5236
5237 switch (mp->fix_size)
5238 {
5239#ifdef HAVE_consttable_1
5240 case 1:
5241 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5242 break;
5243
5244#endif
5245#ifdef HAVE_consttable_2
5246 case 2:
5247 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5248 break;
5249
5250#endif
5251#ifdef HAVE_consttable_4
5252 case 4:
5253 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5254 break;
5255
5256#endif
5257#ifdef HAVE_consttable_8
5258 case 8:
5259 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5260 break;
5261
5262#endif
5263 default:
5264 abort ();
5265 break;
5266 }
5267 }
5268
5269 nmp = mp->next;
5270 free (mp);
2b835d68
RE
5271 }
5272
d5b7b3ae
RE
5273 minipool_vector_head = minipool_vector_tail = NULL;
5274 scan = emit_insn_after (gen_consttable_end (), scan);
5275 scan = emit_barrier_after (scan);
2b835d68
RE
5276}
5277
d5b7b3ae
RE
5278/* Return the cost of forcibly inserting a barrier after INSN. */
5279static int
5280arm_barrier_cost (insn)
5281 rtx insn;
949d79eb 5282{
d5b7b3ae
RE
5283 /* Basing the location of the pool on the loop depth is preferable,
5284 but at the moment, the basic block information seems to be
5285 corrupt by this stage of the compilation. */
5286 int base_cost = 50;
5287 rtx next = next_nonnote_insn (insn);
5288
5289 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5290 base_cost -= 20;
5291
5292 switch (GET_CODE (insn))
5293 {
5294 case CODE_LABEL:
5295 /* It will always be better to place the table before the label, rather
5296 than after it. */
5297 return 50;
949d79eb 5298
d5b7b3ae
RE
5299 case INSN:
5300 case CALL_INSN:
5301 return base_cost;
5302
5303 case JUMP_INSN:
5304 return base_cost - 10;
5305
5306 default:
5307 return base_cost + 10;
5308 }
5309}
5310
5311/* Find the best place in the insn stream in the range
5312 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5313 Create the barrier by inserting a jump and add a new fix entry for
5314 it. */
5315static Mfix *
5316create_fix_barrier (fix, max_address)
5317 Mfix * fix;
5318 HOST_WIDE_INT max_address;
5319{
5320 HOST_WIDE_INT count = 0;
5321 rtx barrier;
5322 rtx from = fix->insn;
5323 rtx selected = from;
5324 int selected_cost;
5325 HOST_WIDE_INT selected_address;
5326 Mfix * new_fix;
5327 HOST_WIDE_INT max_count = max_address - fix->address;
5328 rtx label = gen_label_rtx ();
5329
5330 selected_cost = arm_barrier_cost (from);
5331 selected_address = fix->address;
5332
5333 while (from && count < max_count)
5334 {
5335 rtx tmp;
5336 int new_cost;
5337
5338 /* This code shouldn't have been called if there was a natural barrier
5339 within range. */
5340 if (GET_CODE (from) == BARRIER)
5341 abort ();
5342
5343 /* Count the length of this insn. */
5344 count += get_attr_length (from);
5345
5346 /* If there is a jump table, add its length. */
5347 tmp = is_jump_table (from);
5348 if (tmp != NULL)
5349 {
5350 count += get_jump_table_size (tmp);
5351
5352 /* Jump tables aren't in a basic block, so base the cost on
5353 the dispatch insn. If we select this location, we will
5354 still put the pool after the table. */
5355 new_cost = arm_barrier_cost (from);
5356
5357 if (count < max_count && new_cost <= selected_cost)
5358 {
5359 selected = tmp;
5360 selected_cost = new_cost;
5361 selected_address = fix->address + count;
5362 }
5363
5364 /* Continue after the dispatch table. */
5365 from = NEXT_INSN (tmp);
5366 continue;
5367 }
5368
5369 new_cost = arm_barrier_cost (from);
5370
5371 if (count < max_count && new_cost <= selected_cost)
5372 {
5373 selected = from;
5374 selected_cost = new_cost;
5375 selected_address = fix->address + count;
5376 }
5377
5378 from = NEXT_INSN (from);
5379 }
5380
5381 /* Create a new JUMP_INSN that branches around a barrier. */
5382 from = emit_jump_insn_after (gen_jump (label), selected);
5383 JUMP_LABEL (from) = label;
5384 barrier = emit_barrier_after (from);
5385 emit_label_after (label, barrier);
5386
5387 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5388 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5389 new_fix->insn = barrier;
5390 new_fix->address = selected_address;
5391 new_fix->next = fix->next;
5392 fix->next = new_fix;
5393
5394 return new_fix;
5395}
5396
5397/* Record that there is a natural barrier in the insn stream at
5398 ADDRESS. */
949d79eb
RE
5399static void
5400push_minipool_barrier (insn, address)
2b835d68 5401 rtx insn;
d5b7b3ae 5402 HOST_WIDE_INT address;
2b835d68 5403{
c7319d87 5404 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 5405
949d79eb
RE
5406 fix->insn = insn;
5407 fix->address = address;
2b835d68 5408
949d79eb
RE
5409 fix->next = NULL;
5410 if (minipool_fix_head != NULL)
5411 minipool_fix_tail->next = fix;
5412 else
5413 minipool_fix_head = fix;
5414
5415 minipool_fix_tail = fix;
5416}
2b835d68 5417
d5b7b3ae
RE
5418/* Record INSN, which will need fixing up to load a value from the
5419 minipool. ADDRESS is the offset of the insn since the start of the
5420 function; LOC is a pointer to the part of the insn which requires
5421 fixing; VALUE is the constant that must be loaded, which is of type
5422 MODE. */
949d79eb
RE
5423static void
5424push_minipool_fix (insn, address, loc, mode, value)
5425 rtx insn;
d5b7b3ae
RE
5426 HOST_WIDE_INT address;
5427 rtx * loc;
949d79eb
RE
5428 enum machine_mode mode;
5429 rtx value;
5430{
c7319d87 5431 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
5432
5433#ifdef AOF_ASSEMBLER
5434 /* PIC symbol refereneces need to be converted into offsets into the
5435 based area. */
d5b7b3ae
RE
5436 /* XXX This shouldn't be done here. */
5437 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5438 value = aof_pic_entry (value);
5439#endif /* AOF_ASSEMBLER */
5440
5441 fix->insn = insn;
5442 fix->address = address;
5443 fix->loc = loc;
5444 fix->mode = mode;
d5b7b3ae 5445 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5446 fix->value = value;
d5b7b3ae
RE
5447 fix->forwards = get_attr_pool_range (insn);
5448 fix->backwards = get_attr_neg_pool_range (insn);
5449 fix->minipool = NULL;
949d79eb
RE
5450
5451 /* If an insn doesn't have a range defined for it, then it isn't
5452 expecting to be reworked by this code. Better to abort now than
5453 to generate duff assembly code. */
d5b7b3ae 5454 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5455 abort ();
5456
d5b7b3ae
RE
5457 if (rtl_dump_file)
5458 {
5459 fprintf (rtl_dump_file,
5460 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5461 GET_MODE_NAME (mode),
5462 INSN_UID (insn), (unsigned long) address,
5463 -1 * (long)fix->backwards, (long)fix->forwards);
5464 arm_print_value (rtl_dump_file, fix->value);
5465 fprintf (rtl_dump_file, "\n");
5466 }
5467
6354dc9b 5468 /* Add it to the chain of fixes. */
949d79eb 5469 fix->next = NULL;
d5b7b3ae 5470
949d79eb
RE
5471 if (minipool_fix_head != NULL)
5472 minipool_fix_tail->next = fix;
5473 else
5474 minipool_fix_head = fix;
5475
5476 minipool_fix_tail = fix;
5477}
5478
d5b7b3ae 5479/* Scan INSN and note any of its operands that need fixing. */
949d79eb
RE
5480static void
5481note_invalid_constants (insn, address)
5482 rtx insn;
d5b7b3ae 5483 HOST_WIDE_INT address;
949d79eb
RE
5484{
5485 int opno;
5486
d5b7b3ae 5487 extract_insn (insn);
949d79eb 5488
5895f793 5489 if (!constrain_operands (1))
949d79eb
RE
5490 fatal_insn_not_found (insn);
5491
d5b7b3ae
RE
5492 /* Fill in recog_op_alt with information about the constraints of this
5493 insn. */
949d79eb
RE
5494 preprocess_constraints ();
5495
1ccbefce 5496 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 5497 {
6354dc9b 5498 /* Things we need to fix can only occur in inputs. */
36ab44c7 5499 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
5500 continue;
5501
5502 /* If this alternative is a memory reference, then any mention
5503 of constants in this alternative is really to fool reload
5504 into allowing us to accept one there. We need to fix them up
5505 now so that we output the right code. */
5506 if (recog_op_alt[opno][which_alternative].memory_ok)
5507 {
1ccbefce 5508 rtx op = recog_data.operand[opno];
949d79eb
RE
5509
5510 if (CONSTANT_P (op))
1ccbefce
RH
5511 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5512 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
5513#if 0
5514 /* RWE: Now we look correctly at the operands for the insn,
5515 this shouldn't be needed any more. */
949d79eb 5516#ifndef AOF_ASSEMBLER
d5b7b3ae 5517 /* XXX Is this still needed? */
949d79eb 5518 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
5519 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5520 recog_data.operand_mode[opno],
5521 XVECEXP (op, 0, 0));
949d79eb 5522#endif
d5b7b3ae
RE
5523#endif
5524 else if (GET_CODE (op) == MEM
949d79eb
RE
5525 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5526 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
5527 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5528 recog_data.operand_mode[opno],
949d79eb
RE
5529 get_pool_constant (XEXP (op, 0)));
5530 }
2b835d68 5531 }
2b835d68
RE
5532}
5533
5534void
5535arm_reorg (first)
5536 rtx first;
5537{
5538 rtx insn;
d5b7b3ae
RE
5539 HOST_WIDE_INT address = 0;
5540 Mfix * fix;
ad076f4e 5541
949d79eb 5542 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 5543
949d79eb
RE
5544 /* The first insn must always be a note, or the code below won't
5545 scan it properly. */
5546 if (GET_CODE (first) != NOTE)
5547 abort ();
5548
5549 /* Scan all the insns and record the operands that will need fixing. */
5550 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 5551 {
2b835d68 5552
949d79eb 5553 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 5554 push_minipool_barrier (insn, address);
949d79eb
RE
5555 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5556 || GET_CODE (insn) == JUMP_INSN)
5557 {
5558 rtx table;
5559
5560 note_invalid_constants (insn, address);
5561 address += get_attr_length (insn);
d5b7b3ae 5562
949d79eb
RE
5563 /* If the insn is a vector jump, add the size of the table
5564 and skip the table. */
d5b7b3ae 5565 if ((table = is_jump_table (insn)) != NULL)
2b835d68 5566 {
d5b7b3ae 5567 address += get_jump_table_size (table);
949d79eb
RE
5568 insn = table;
5569 }
5570 }
5571 }
332072db 5572
d5b7b3ae
RE
5573 fix = minipool_fix_head;
5574
949d79eb 5575 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 5576 while (fix)
949d79eb 5577 {
d5b7b3ae
RE
5578 Mfix * ftmp;
5579 Mfix * fdel;
5580 Mfix * last_added_fix;
5581 Mfix * last_barrier = NULL;
5582 Mfix * this_fix;
949d79eb
RE
5583
5584 /* Skip any further barriers before the next fix. */
5585 while (fix && GET_CODE (fix->insn) == BARRIER)
5586 fix = fix->next;
5587
d5b7b3ae 5588 /* No more fixes. */
949d79eb
RE
5589 if (fix == NULL)
5590 break;
332072db 5591
d5b7b3ae 5592 last_added_fix = NULL;
2b835d68 5593
d5b7b3ae 5594 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 5595 {
949d79eb 5596 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 5597 {
d5b7b3ae
RE
5598 if (ftmp->address >= minipool_vector_head->max_address)
5599 break;
2b835d68 5600
d5b7b3ae 5601 last_barrier = ftmp;
2b835d68 5602 }
d5b7b3ae
RE
5603 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5604 break;
5605
5606 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 5607 }
949d79eb 5608
d5b7b3ae
RE
5609 /* If we found a barrier, drop back to that; any fixes that we
5610 could have reached but come after the barrier will now go in
5611 the next mini-pool. */
949d79eb
RE
5612 if (last_barrier != NULL)
5613 {
d5b7b3ae
RE
5614 /* Reduce the refcount for those fixes that won't go into this
5615 pool after all. */
5616 for (fdel = last_barrier->next;
5617 fdel && fdel != ftmp;
5618 fdel = fdel->next)
5619 {
5620 fdel->minipool->refcount--;
5621 fdel->minipool = NULL;
5622 }
5623
949d79eb
RE
5624 ftmp = last_barrier;
5625 }
5626 else
2bfa88dc 5627 {
d5b7b3ae
RE
5628 /* ftmp is first fix that we can't fit into this pool and
5629 there no natural barriers that we could use. Insert a
5630 new barrier in the code somewhere between the previous
5631 fix and this one, and arrange to jump around it. */
5632 HOST_WIDE_INT max_address;
5633
5634 /* The last item on the list of fixes must be a barrier, so
5635 we can never run off the end of the list of fixes without
5636 last_barrier being set. */
5637 if (ftmp == NULL)
5638 abort ();
5639
5640 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
5641 /* Check that there isn't another fix that is in range that
5642 we couldn't fit into this pool because the pool was
5643 already too large: we need to put the pool before such an
5644 instruction. */
d5b7b3ae
RE
5645 if (ftmp->address < max_address)
5646 max_address = ftmp->address;
5647
5648 last_barrier = create_fix_barrier (last_added_fix, max_address);
5649 }
5650
5651 assign_minipool_offsets (last_barrier);
5652
5653 while (ftmp)
5654 {
5655 if (GET_CODE (ftmp->insn) != BARRIER
5656 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5657 == NULL))
5658 break;
2bfa88dc 5659
d5b7b3ae 5660 ftmp = ftmp->next;
2bfa88dc 5661 }
949d79eb
RE
5662
5663 /* Scan over the fixes we have identified for this pool, fixing them
5664 up and adding the constants to the pool itself. */
d5b7b3ae 5665 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
5666 this_fix = this_fix->next)
5667 if (GET_CODE (this_fix->insn) != BARRIER)
5668 {
949d79eb
RE
5669 rtx addr
5670 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5671 minipool_vector_label),
d5b7b3ae 5672 this_fix->minipool->offset);
949d79eb
RE
5673 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5674 }
5675
d5b7b3ae 5676 dump_minipool (last_barrier->insn);
949d79eb 5677 fix = ftmp;
2b835d68 5678 }
4b632bf1 5679
949d79eb
RE
5680 /* From now on we must synthesize any constants that we can't handle
5681 directly. This can happen if the RTL gets split during final
5682 instruction generation. */
4b632bf1 5683 after_arm_reorg = 1;
c7319d87
RE
5684
5685 /* Free the minipool memory. */
5686 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 5687}
cce8749e
CH
5688\f
5689/* Routines to output assembly language. */
5690
f3bb6135 5691/* If the rtx is the correct value then return the string of the number.
ff9940b0 5692 In this way we can ensure that valid double constants are generated even
6354dc9b 5693 when cross compiling. */
cd2b33d0 5694const char *
ff9940b0 5695fp_immediate_constant (x)
b5cc037f 5696 rtx x;
ff9940b0
RE
5697{
5698 REAL_VALUE_TYPE r;
5699 int i;
5700
5701 if (!fpa_consts_inited)
5702 init_fpa_table ();
5703
5704 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5705 for (i = 0; i < 8; i++)
5706 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5707 return strings_fpa[i];
f3bb6135 5708
ff9940b0
RE
5709 abort ();
5710}
5711
9997d19d 5712/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 5713static const char *
9997d19d 5714fp_const_from_val (r)
62b10bbc 5715 REAL_VALUE_TYPE * r;
9997d19d
RE
5716{
5717 int i;
5718
5895f793 5719 if (!fpa_consts_inited)
9997d19d
RE
5720 init_fpa_table ();
5721
5722 for (i = 0; i < 8; i++)
5723 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5724 return strings_fpa[i];
5725
5726 abort ();
5727}
ff9940b0 5728
cce8749e
CH
5729/* Output the operands of a LDM/STM instruction to STREAM.
5730 MASK is the ARM register set mask of which only bits 0-15 are important.
5731 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5732 must follow the register list. */
5733
d5b7b3ae 5734static void
dd18ae56 5735print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc 5736 FILE * stream;
cd2b33d0 5737 const char * instr;
dd18ae56
NC
5738 int reg;
5739 int mask;
5740 int hat;
cce8749e
CH
5741{
5742 int i;
5743 int not_first = FALSE;
5744
1d5473cb 5745 fputc ('\t', stream);
dd18ae56 5746 asm_fprintf (stream, instr, reg);
1d5473cb 5747 fputs (", {", stream);
62b10bbc 5748
d5b7b3ae 5749 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
5750 if (mask & (1 << i))
5751 {
5752 if (not_first)
5753 fprintf (stream, ", ");
62b10bbc 5754
dd18ae56 5755 asm_fprintf (stream, "%r", i);
cce8749e
CH
5756 not_first = TRUE;
5757 }
f3bb6135 5758
cce8749e 5759 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 5760}
cce8749e 5761
6354dc9b 5762/* Output a 'call' insn. */
cce8749e 5763
cd2b33d0 5764const char *
cce8749e 5765output_call (operands)
62b10bbc 5766 rtx * operands;
cce8749e 5767{
6354dc9b 5768 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 5769
62b10bbc 5770 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 5771 {
62b10bbc 5772 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 5773 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 5774 }
62b10bbc 5775
1d5473cb 5776 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 5777
6cfc7210 5778 if (TARGET_INTERWORK)
da6558fd
NC
5779 output_asm_insn ("bx%?\t%0", operands);
5780 else
5781 output_asm_insn ("mov%?\t%|pc, %0", operands);
5782
f3bb6135
RE
5783 return "";
5784}
cce8749e 5785
ff9940b0
RE
5786static int
5787eliminate_lr2ip (x)
62b10bbc 5788 rtx * x;
ff9940b0
RE
5789{
5790 int something_changed = 0;
62b10bbc 5791 rtx x0 = * x;
ff9940b0
RE
5792 int code = GET_CODE (x0);
5793 register int i, j;
6f7d635c 5794 register const char * fmt;
ff9940b0
RE
5795
5796 switch (code)
5797 {
5798 case REG:
62b10bbc 5799 if (REGNO (x0) == LR_REGNUM)
ff9940b0 5800 {
62b10bbc 5801 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
5802 return 1;
5803 }
5804 return 0;
5805 default:
6354dc9b 5806 /* Scan through the sub-elements and change any references there. */
ff9940b0 5807 fmt = GET_RTX_FORMAT (code);
62b10bbc 5808
ff9940b0
RE
5809 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5810 if (fmt[i] == 'e')
5811 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5812 else if (fmt[i] == 'E')
5813 for (j = 0; j < XVECLEN (x0, i); j++)
5814 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 5815
ff9940b0
RE
5816 return something_changed;
5817 }
5818}
5819
6354dc9b 5820/* Output a 'call' insn that is a reference in memory. */
ff9940b0 5821
cd2b33d0 5822const char *
ff9940b0 5823output_call_mem (operands)
62b10bbc 5824 rtx * operands;
ff9940b0 5825{
6354dc9b
NC
5826 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5827 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 5828 if (eliminate_lr2ip (&operands[0]))
1d5473cb 5829 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 5830
6cfc7210 5831 if (TARGET_INTERWORK)
da6558fd
NC
5832 {
5833 output_asm_insn ("ldr%?\t%|ip, %0", operands);
5834 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5835 output_asm_insn ("bx%?\t%|ip", operands);
5836 }
5837 else
5838 {
5839 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5840 output_asm_insn ("ldr%?\t%|pc, %0", operands);
5841 }
5842
f3bb6135
RE
5843 return "";
5844}
ff9940b0
RE
5845
5846
5847/* Output a move from arm registers to an fpu registers.
5848 OPERANDS[0] is an fpu register.
5849 OPERANDS[1] is the first registers of an arm register pair. */
5850
cd2b33d0 5851const char *
ff9940b0 5852output_mov_long_double_fpu_from_arm (operands)
62b10bbc 5853 rtx * operands;
ff9940b0
RE
5854{
5855 int arm_reg0 = REGNO (operands[1]);
5856 rtx ops[3];
5857
62b10bbc
NC
5858 if (arm_reg0 == IP_REGNUM)
5859 abort ();
f3bb6135 5860
43cffd11
RE
5861 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5862 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5863 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5864
1d5473cb
RE
5865 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
5866 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 5867
f3bb6135
RE
5868 return "";
5869}
ff9940b0
RE
5870
5871/* Output a move from an fpu register to arm registers.
5872 OPERANDS[0] is the first registers of an arm register pair.
5873 OPERANDS[1] is an fpu register. */
5874
cd2b33d0 5875const char *
ff9940b0 5876output_mov_long_double_arm_from_fpu (operands)
62b10bbc 5877 rtx * operands;
ff9940b0
RE
5878{
5879 int arm_reg0 = REGNO (operands[0]);
5880 rtx ops[3];
5881
62b10bbc
NC
5882 if (arm_reg0 == IP_REGNUM)
5883 abort ();
f3bb6135 5884
43cffd11
RE
5885 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5886 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5887 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5888
1d5473cb
RE
5889 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
5890 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
5891 return "";
5892}
ff9940b0
RE
5893
5894/* Output a move from arm registers to arm registers of a long double
5895 OPERANDS[0] is the destination.
5896 OPERANDS[1] is the source. */
cd2b33d0 5897const char *
ff9940b0 5898output_mov_long_double_arm_from_arm (operands)
62b10bbc 5899 rtx * operands;
ff9940b0 5900{
6354dc9b 5901 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
5902 int dest_start = REGNO (operands[0]);
5903 int src_start = REGNO (operands[1]);
5904 rtx ops[2];
5905 int i;
5906
5907 if (dest_start < src_start)
5908 {
5909 for (i = 0; i < 3; i++)
5910 {
43cffd11
RE
5911 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5912 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5913 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5914 }
5915 }
5916 else
5917 {
5918 for (i = 2; i >= 0; i--)
5919 {
43cffd11
RE
5920 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5921 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5922 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5923 }
5924 }
f3bb6135 5925
ff9940b0
RE
5926 return "";
5927}
5928
5929
cce8749e
CH
5930/* Output a move from arm registers to an fpu registers.
5931 OPERANDS[0] is an fpu register.
5932 OPERANDS[1] is the first registers of an arm register pair. */
5933
cd2b33d0 5934const char *
cce8749e 5935output_mov_double_fpu_from_arm (operands)
62b10bbc 5936 rtx * operands;
cce8749e
CH
5937{
5938 int arm_reg0 = REGNO (operands[1]);
5939 rtx ops[2];
5940
62b10bbc
NC
5941 if (arm_reg0 == IP_REGNUM)
5942 abort ();
5943
43cffd11
RE
5944 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5945 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5946 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5947 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
5948 return "";
5949}
cce8749e
CH
5950
5951/* Output a move from an fpu register to arm registers.
5952 OPERANDS[0] is the first registers of an arm register pair.
5953 OPERANDS[1] is an fpu register. */
5954
cd2b33d0 5955const char *
cce8749e 5956output_mov_double_arm_from_fpu (operands)
62b10bbc 5957 rtx * operands;
cce8749e
CH
5958{
5959 int arm_reg0 = REGNO (operands[0]);
5960 rtx ops[2];
5961
62b10bbc
NC
5962 if (arm_reg0 == IP_REGNUM)
5963 abort ();
f3bb6135 5964
43cffd11
RE
5965 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5966 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5967 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5968 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
5969 return "";
5970}
cce8749e
CH
5971
5972/* Output a move between double words.
5973 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5974 or MEM<-REG and all MEMs must be offsettable addresses. */
5975
cd2b33d0 5976const char *
cce8749e 5977output_move_double (operands)
aec3cfba 5978 rtx * operands;
cce8749e
CH
5979{
5980 enum rtx_code code0 = GET_CODE (operands[0]);
5981 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 5982 rtx otherops[3];
cce8749e
CH
5983
5984 if (code0 == REG)
5985 {
5986 int reg0 = REGNO (operands[0]);
5987
43cffd11 5988 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 5989
cce8749e
CH
5990 if (code1 == REG)
5991 {
5992 int reg1 = REGNO (operands[1]);
62b10bbc
NC
5993 if (reg1 == IP_REGNUM)
5994 abort ();
f3bb6135 5995
6354dc9b 5996 /* Ensure the second source is not overwritten. */
c1c2bc04 5997 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 5998 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 5999 else
6cfc7210 6000 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6001 }
6002 else if (code1 == CONST_DOUBLE)
6003 {
226a5051
RE
6004 if (GET_MODE (operands[1]) == DFmode)
6005 {
6006 long l[2];
6007 union real_extract u;
6008
6009 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
6010 sizeof (u));
6011 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
d5b7b3ae
RE
6012 otherops[1] = GEN_INT (l[1]);
6013 operands[1] = GEN_INT (l[0]);
226a5051 6014 }
c1c2bc04
RE
6015 else if (GET_MODE (operands[1]) != VOIDmode)
6016 abort ();
6017 else if (WORDS_BIG_ENDIAN)
6018 {
6019
6020 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6021 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6022 }
226a5051
RE
6023 else
6024 {
c1c2bc04 6025
226a5051
RE
6026 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6027 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6028 }
6cfc7210 6029
c1c2bc04
RE
6030 output_mov_immediate (operands);
6031 output_mov_immediate (otherops);
cce8749e
CH
6032 }
6033 else if (code1 == CONST_INT)
6034 {
56636818
JL
6035#if HOST_BITS_PER_WIDE_INT > 32
6036 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6037 what the upper word is. */
6038 if (WORDS_BIG_ENDIAN)
6039 {
6040 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6041 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6042 }
6043 else
6044 {
6045 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6046 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6047 }
6048#else
6354dc9b 6049 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6050 if (WORDS_BIG_ENDIAN)
6051 {
6052 otherops[1] = operands[1];
6053 operands[1] = (INTVAL (operands[1]) < 0
6054 ? constm1_rtx : const0_rtx);
6055 }
ff9940b0 6056 else
c1c2bc04 6057 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6058#endif
c1c2bc04
RE
6059 output_mov_immediate (otherops);
6060 output_mov_immediate (operands);
cce8749e
CH
6061 }
6062 else if (code1 == MEM)
6063 {
ff9940b0 6064 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6065 {
ff9940b0 6066 case REG:
9997d19d 6067 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6068 break;
2b835d68 6069
ff9940b0 6070 case PRE_INC:
6354dc9b 6071 abort (); /* Should never happen now. */
ff9940b0 6072 break;
2b835d68 6073
ff9940b0 6074 case PRE_DEC:
2b835d68 6075 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6076 break;
2b835d68 6077
ff9940b0 6078 case POST_INC:
9997d19d 6079 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6080 break;
2b835d68 6081
ff9940b0 6082 case POST_DEC:
6354dc9b 6083 abort (); /* Should never happen now. */
ff9940b0 6084 break;
2b835d68
RE
6085
6086 case LABEL_REF:
6087 case CONST:
6088 output_asm_insn ("adr%?\t%0, %1", operands);
6089 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6090 break;
6091
ff9940b0 6092 default:
aec3cfba
NC
6093 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6094 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6095 {
2b835d68
RE
6096 otherops[0] = operands[0];
6097 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6098 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6099 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6100 {
6101 if (GET_CODE (otherops[2]) == CONST_INT)
6102 {
6103 switch (INTVAL (otherops[2]))
6104 {
6105 case -8:
6106 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6107 return "";
6108 case -4:
6109 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6110 return "";
6111 case 4:
6112 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6113 return "";
6114 }
6115 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6116 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6117 else
6118 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6119 }
6120 else
6121 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6122 }
6123 else
6124 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6125
2b835d68
RE
6126 return "ldm%?ia\t%0, %M0";
6127 }
6128 else
6129 {
6130 otherops[1] = adj_offsettable_operand (operands[1], 4);
6131 /* Take care of overlapping base/data reg. */
6132 if (reg_mentioned_p (operands[0], operands[1]))
6133 {
6134 output_asm_insn ("ldr%?\t%0, %1", otherops);
6135 output_asm_insn ("ldr%?\t%0, %1", operands);
6136 }
6137 else
6138 {
6139 output_asm_insn ("ldr%?\t%0, %1", operands);
6140 output_asm_insn ("ldr%?\t%0, %1", otherops);
6141 }
cce8749e
CH
6142 }
6143 }
6144 }
2b835d68 6145 else
6354dc9b 6146 abort (); /* Constraints should prevent this. */
cce8749e
CH
6147 }
6148 else if (code0 == MEM && code1 == REG)
6149 {
62b10bbc
NC
6150 if (REGNO (operands[1]) == IP_REGNUM)
6151 abort ();
2b835d68 6152
ff9940b0
RE
6153 switch (GET_CODE (XEXP (operands[0], 0)))
6154 {
6155 case REG:
9997d19d 6156 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6157 break;
2b835d68 6158
ff9940b0 6159 case PRE_INC:
6354dc9b 6160 abort (); /* Should never happen now. */
ff9940b0 6161 break;
2b835d68 6162
ff9940b0 6163 case PRE_DEC:
2b835d68 6164 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6165 break;
2b835d68 6166
ff9940b0 6167 case POST_INC:
9997d19d 6168 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6169 break;
2b835d68 6170
ff9940b0 6171 case POST_DEC:
6354dc9b 6172 abort (); /* Should never happen now. */
ff9940b0 6173 break;
2b835d68
RE
6174
6175 case PLUS:
6176 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6177 {
6178 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6179 {
6180 case -8:
6181 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6182 return "";
6183
6184 case -4:
6185 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6186 return "";
6187
6188 case 4:
6189 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6190 return "";
6191 }
6192 }
6193 /* Fall through */
6194
ff9940b0 6195 default:
cce8749e 6196 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 6197 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6198 output_asm_insn ("str%?\t%1, %0", operands);
6199 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6200 }
6201 }
2b835d68 6202 else
62b10bbc 6203 abort (); /* Constraints should prevent this */
cce8749e 6204
9997d19d
RE
6205 return "";
6206}
cce8749e
CH
6207
6208
6209/* Output an arbitrary MOV reg, #n.
6210 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6211
cd2b33d0 6212const char *
cce8749e 6213output_mov_immediate (operands)
62b10bbc 6214 rtx * operands;
cce8749e 6215{
f3bb6135 6216 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
6217 int n_ones = 0;
6218 int i;
6219
6220 /* Try to use one MOV */
cce8749e 6221 if (const_ok_for_arm (n))
f3bb6135 6222 {
9997d19d 6223 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
6224 return "";
6225 }
cce8749e
CH
6226
6227 /* Try to use one MVN */
f3bb6135 6228 if (const_ok_for_arm (~n))
cce8749e 6229 {
f3bb6135 6230 operands[1] = GEN_INT (~n);
9997d19d 6231 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 6232 return "";
cce8749e
CH
6233 }
6234
6354dc9b 6235 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
6236
6237 for (i=0; i < 32; i++)
6238 if (n & 1 << i)
6239 n_ones++;
6240
6354dc9b 6241 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
e5951263 6242 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
cce8749e 6243 else
d5b7b3ae 6244 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
f3bb6135
RE
6245
6246 return "";
6247}
cce8749e
CH
6248
6249
6250/* Output an ADD r, s, #n where n may be too big for one instruction. If
6251 adding zero to one register, output nothing. */
6252
cd2b33d0 6253const char *
cce8749e 6254output_add_immediate (operands)
62b10bbc 6255 rtx * operands;
cce8749e 6256{
f3bb6135 6257 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6258
6259 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6260 {
6261 if (n < 0)
6262 output_multi_immediate (operands,
9997d19d
RE
6263 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6264 -n);
cce8749e
CH
6265 else
6266 output_multi_immediate (operands,
9997d19d
RE
6267 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6268 n);
cce8749e 6269 }
f3bb6135
RE
6270
6271 return "";
6272}
cce8749e 6273
cce8749e
CH
6274/* Output a multiple immediate operation.
6275 OPERANDS is the vector of operands referred to in the output patterns.
6276 INSTR1 is the output pattern to use for the first constant.
6277 INSTR2 is the output pattern to use for subsequent constants.
6278 IMMED_OP is the index of the constant slot in OPERANDS.
6279 N is the constant value. */
6280
cd2b33d0 6281static const char *
cce8749e 6282output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6283 rtx * operands;
cd2b33d0
NC
6284 const char * instr1;
6285 const char * instr2;
f3bb6135
RE
6286 int immed_op;
6287 HOST_WIDE_INT n;
cce8749e 6288{
f3bb6135 6289#if HOST_BITS_PER_WIDE_INT > 32
e5951263 6290 n &= HOST_UINT (0xffffffff);
f3bb6135
RE
6291#endif
6292
cce8749e
CH
6293 if (n == 0)
6294 {
6295 operands[immed_op] = const0_rtx;
6354dc9b 6296 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
6297 }
6298 else
6299 {
6300 int i;
cd2b33d0 6301 const char * instr = instr1;
cce8749e 6302
6354dc9b 6303 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6304 for (i = 0; i < 32; i += 2)
6305 {
6306 if (n & (3 << i))
6307 {
f3bb6135
RE
6308 operands[immed_op] = GEN_INT (n & (255 << i));
6309 output_asm_insn (instr, operands);
cce8749e
CH
6310 instr = instr2;
6311 i += 6;
6312 }
6313 }
6314 }
cd2b33d0 6315
f3bb6135 6316 return "";
9997d19d 6317}
cce8749e
CH
6318
6319
6320/* Return the appropriate ARM instruction for the operation code.
6321 The returned result should not be overwritten. OP is the rtx of the
6322 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6323 was shifted. */
6324
cd2b33d0 6325const char *
cce8749e
CH
6326arithmetic_instr (op, shift_first_arg)
6327 rtx op;
f3bb6135 6328 int shift_first_arg;
cce8749e 6329{
9997d19d 6330 switch (GET_CODE (op))
cce8749e
CH
6331 {
6332 case PLUS:
f3bb6135
RE
6333 return "add";
6334
cce8749e 6335 case MINUS:
f3bb6135
RE
6336 return shift_first_arg ? "rsb" : "sub";
6337
cce8749e 6338 case IOR:
f3bb6135
RE
6339 return "orr";
6340
cce8749e 6341 case XOR:
f3bb6135
RE
6342 return "eor";
6343
cce8749e 6344 case AND:
f3bb6135
RE
6345 return "and";
6346
cce8749e 6347 default:
f3bb6135 6348 abort ();
cce8749e 6349 }
f3bb6135 6350}
cce8749e
CH
6351
6352
6353/* Ensure valid constant shifts and return the appropriate shift mnemonic
6354 for the operation code. The returned result should not be overwritten.
6355 OP is the rtx code of the shift.
9997d19d 6356 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6357 shift. */
cce8749e 6358
cd2b33d0 6359static const char *
9997d19d
RE
6360shift_op (op, amountp)
6361 rtx op;
6362 HOST_WIDE_INT *amountp;
cce8749e 6363{
cd2b33d0 6364 const char * mnem;
e2c671ba 6365 enum rtx_code code = GET_CODE (op);
cce8749e 6366
9997d19d
RE
6367 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6368 *amountp = -1;
6369 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6370 *amountp = INTVAL (XEXP (op, 1));
6371 else
6372 abort ();
6373
e2c671ba 6374 switch (code)
cce8749e
CH
6375 {
6376 case ASHIFT:
6377 mnem = "asl";
6378 break;
f3bb6135 6379
cce8749e
CH
6380 case ASHIFTRT:
6381 mnem = "asr";
cce8749e 6382 break;
f3bb6135 6383
cce8749e
CH
6384 case LSHIFTRT:
6385 mnem = "lsr";
cce8749e 6386 break;
f3bb6135 6387
9997d19d
RE
6388 case ROTATERT:
6389 mnem = "ror";
9997d19d
RE
6390 break;
6391
ff9940b0 6392 case MULT:
e2c671ba
RE
6393 /* We never have to worry about the amount being other than a
6394 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6395 if (*amountp != -1)
6396 *amountp = int_log2 (*amountp);
6397 else
6398 abort ();
f3bb6135
RE
6399 return "asl";
6400
cce8749e 6401 default:
f3bb6135 6402 abort ();
cce8749e
CH
6403 }
6404
e2c671ba
RE
6405 if (*amountp != -1)
6406 {
6407 /* This is not 100% correct, but follows from the desire to merge
6408 multiplication by a power of 2 with the recognizer for a
6409 shift. >=32 is not a valid shift for "asl", so we must try and
6410 output a shift that produces the correct arithmetical result.
ddd5a7c1 6411 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6412 is not set correctly if we set the flags; but we never use the
6413 carry bit from such an operation, so we can ignore that. */
6414 if (code == ROTATERT)
6415 *amountp &= 31; /* Rotate is just modulo 32 */
6416 else if (*amountp != (*amountp & 31))
6417 {
6418 if (code == ASHIFT)
6419 mnem = "lsr";
6420 *amountp = 32;
6421 }
6422
6423 /* Shifts of 0 are no-ops. */
6424 if (*amountp == 0)
6425 return NULL;
6426 }
6427
9997d19d
RE
6428 return mnem;
6429}
cce8749e
CH
6430
6431
6354dc9b 6432/* Obtain the shift from the POWER of two. */
18af7313 6433static HOST_WIDE_INT
cce8749e 6434int_log2 (power)
f3bb6135 6435 HOST_WIDE_INT power;
cce8749e 6436{
f3bb6135 6437 HOST_WIDE_INT shift = 0;
cce8749e 6438
e5951263 6439 while ((((HOST_INT (1)) << shift) & power) == 0)
cce8749e
CH
6440 {
6441 if (shift > 31)
f3bb6135 6442 abort ();
cce8749e
CH
6443 shift++;
6444 }
f3bb6135
RE
6445
6446 return shift;
6447}
cce8749e 6448
cce8749e
CH
6449/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6450 /bin/as is horribly restrictive. */
6cfc7210 6451#define MAX_ASCII_LEN 51
cce8749e
CH
6452
6453void
6454output_ascii_pseudo_op (stream, p, len)
62b10bbc 6455 FILE * stream;
3cce094d 6456 const unsigned char * p;
cce8749e
CH
6457 int len;
6458{
6459 int i;
6cfc7210 6460 int len_so_far = 0;
cce8749e 6461
6cfc7210
NC
6462 fputs ("\t.ascii\t\"", stream);
6463
cce8749e
CH
6464 for (i = 0; i < len; i++)
6465 {
6466 register int c = p[i];
6467
6cfc7210 6468 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 6469 {
6cfc7210 6470 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 6471 len_so_far = 0;
cce8749e
CH
6472 }
6473
6cfc7210 6474 switch (c)
cce8749e 6475 {
6cfc7210
NC
6476 case TARGET_TAB:
6477 fputs ("\\t", stream);
6478 len_so_far += 2;
6479 break;
6480
6481 case TARGET_FF:
6482 fputs ("\\f", stream);
6483 len_so_far += 2;
6484 break;
6485
6486 case TARGET_BS:
6487 fputs ("\\b", stream);
6488 len_so_far += 2;
6489 break;
6490
6491 case TARGET_CR:
6492 fputs ("\\r", stream);
6493 len_so_far += 2;
6494 break;
6495
6496 case TARGET_NEWLINE:
6497 fputs ("\\n", stream);
6498 c = p [i + 1];
6499 if ((c >= ' ' && c <= '~')
6500 || c == TARGET_TAB)
6501 /* This is a good place for a line break. */
6502 len_so_far = MAX_ASCII_LEN;
6503 else
6504 len_so_far += 2;
6505 break;
6506
6507 case '\"':
6508 case '\\':
6509 putc ('\\', stream);
5895f793 6510 len_so_far++;
6cfc7210 6511 /* drop through. */
f3bb6135 6512
6cfc7210
NC
6513 default:
6514 if (c >= ' ' && c <= '~')
6515 {
6516 putc (c, stream);
5895f793 6517 len_so_far++;
6cfc7210
NC
6518 }
6519 else
6520 {
6521 fprintf (stream, "\\%03o", c);
6522 len_so_far += 4;
6523 }
6524 break;
cce8749e 6525 }
cce8749e 6526 }
f3bb6135 6527
cce8749e 6528 fputs ("\"\n", stream);
f3bb6135 6529}
cce8749e 6530\f
ff9940b0 6531
cd2b33d0 6532const char *
84ed5e79 6533output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
6534 rtx operand;
6535 int really_return;
84ed5e79 6536 int reverse;
ff9940b0
RE
6537{
6538 char instr[100];
6539 int reg, live_regs = 0;
46406379 6540 int volatile_func = arm_volatile_func ();
e2c671ba 6541
d5b7b3ae
RE
6542 /* If a function is naked, don't use the "return" insn. */
6543 if (arm_naked_function_p (current_function_decl))
6544 return "";
6545
e2c671ba 6546 return_used_this_function = 1;
d5b7b3ae 6547
62b10bbc 6548 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6549 {
e2c671ba 6550 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
6551 call, then we have to trust that the called function won't return. */
6552 if (really_return)
6553 {
6554 rtx ops[2];
6555
6556 /* Otherwise, trap an attempted return by aborting. */
6557 ops[0] = operand;
6558 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6559 : "abort");
6560 assemble_external_libcall (ops[1]);
6561 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6562 }
6563
e2c671ba
RE
6564 return "";
6565 }
6566
5895f793 6567 if (current_function_calls_alloca && !really_return)
62b10bbc 6568 abort ();
d5b7b3ae 6569
f3bb6135 6570 for (reg = 0; reg <= 10; reg++)
5895f793 6571 if (regs_ever_live[reg] && !call_used_regs[reg])
ff9940b0
RE
6572 live_regs++;
6573
5895f793
RE
6574 if (!TARGET_APCS_FRAME
6575 && !frame_pointer_needed
d5b7b3ae 6576 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6577 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6578 live_regs++;
6579
5895f793 6580 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6581 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6582 live_regs++;
6583
0616531f 6584 if (live_regs || regs_ever_live[LR_REGNUM])
ff9940b0
RE
6585 live_regs++;
6586
6587 if (frame_pointer_needed)
6588 live_regs += 4;
6589
3a5a4282
PB
6590 /* On some ARM architectures it is faster to use LDR rather than LDM to
6591 load a single register. On other architectures, the cost is the same. */
6592 if (live_regs == 1
6593 && regs_ever_live[LR_REGNUM]
5895f793 6594 && !really_return)
d5b7b3ae
RE
6595 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6596 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6597 else if (live_regs == 1
6598 && regs_ever_live[LR_REGNUM]
d5b7b3ae
RE
6599 && TARGET_APCS_32)
6600 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6601 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
3a5a4282 6602 else if (live_regs)
ff9940b0 6603 {
5895f793 6604 if (!regs_ever_live[LR_REGNUM])
ff9940b0 6605 live_regs++;
f3bb6135 6606
ff9940b0 6607 if (frame_pointer_needed)
84ed5e79
RE
6608 strcpy (instr,
6609 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 6610 else
84ed5e79
RE
6611 strcpy (instr,
6612 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
6613
6614 for (reg = 0; reg <= 10; reg++)
62b10bbc 6615 if (regs_ever_live[reg]
5895f793
RE
6616 && (!call_used_regs[reg]
6617 || (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6618 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 6619 {
1d5473cb 6620 strcat (instr, "%|");
ff9940b0
RE
6621 strcat (instr, reg_names[reg]);
6622 if (--live_regs)
6623 strcat (instr, ", ");
6624 }
f3bb6135 6625
ff9940b0
RE
6626 if (frame_pointer_needed)
6627 {
1d5473cb 6628 strcat (instr, "%|");
ff9940b0
RE
6629 strcat (instr, reg_names[11]);
6630 strcat (instr, ", ");
1d5473cb 6631 strcat (instr, "%|");
ff9940b0
RE
6632 strcat (instr, reg_names[13]);
6633 strcat (instr, ", ");
1d5473cb 6634 strcat (instr, "%|");
5895f793 6635 strcat (instr, TARGET_INTERWORK || (!really_return)
62b10bbc 6636 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
6637 }
6638 else
1d5473cb 6639 {
5895f793 6640 if (!TARGET_APCS_FRAME
d5b7b3ae 6641 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6642 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6643 {
6644 strcat (instr, "%|");
6645 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6646 strcat (instr, ", ");
6647 }
6648
1d5473cb 6649 strcat (instr, "%|");
d5b7b3ae 6650
6cfc7210 6651 if (TARGET_INTERWORK && really_return)
62b10bbc 6652 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 6653 else
62b10bbc 6654 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 6655 }
d5b7b3ae 6656
2b835d68 6657 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 6658 output_asm_insn (instr, &operand);
da6558fd 6659
6cfc7210 6660 if (TARGET_INTERWORK && really_return)
da6558fd
NC
6661 {
6662 strcpy (instr, "bx%?");
6663 strcat (instr, reverse ? "%D0" : "%d0");
6664 strcat (instr, "\t%|");
6665 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6666
5895f793 6667 output_asm_insn (instr, &operand);
da6558fd 6668 }
ff9940b0
RE
6669 }
6670 else if (really_return)
6671 {
6cfc7210 6672 if (TARGET_INTERWORK)
25b1c156 6673 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
6674 else
6675 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6676 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd 6677
5895f793 6678 output_asm_insn (instr, &operand);
ff9940b0 6679 }
f3bb6135 6680
ff9940b0
RE
6681 return "";
6682}
6683
e82ea128
DE
6684/* Return nonzero if optimizing and the current function is volatile.
6685 Such functions never return, and many memory cycles can be saved
6686 by not storing register values that will never be needed again.
6687 This optimization was added to speed up context switching in a
6354dc9b 6688 kernel application. */
e2c671ba
RE
6689int
6690arm_volatile_func ()
6691{
6354dc9b
NC
6692 return (optimize > 0
6693 && current_function_nothrow
46406379 6694 && TREE_THIS_VOLATILE (current_function_decl));
e2c671ba
RE
6695}
6696
ef179a26
NC
6697/* Write the function name into the code section, directly preceding
6698 the function prologue.
6699
6700 Code will be output similar to this:
6701 t0
6702 .ascii "arm_poke_function_name", 0
6703 .align
6704 t1
6705 .word 0xff000000 + (t1 - t0)
6706 arm_poke_function_name
6707 mov ip, sp
6708 stmfd sp!, {fp, ip, lr, pc}
6709 sub fp, ip, #4
6710
6711 When performing a stack backtrace, code can inspect the value
6712 of 'pc' stored at 'fp' + 0. If the trace function then looks
6713 at location pc - 12 and the top 8 bits are set, then we know
6714 that there is a function name embedded immediately preceding this
6715 location and has length ((pc[-3]) & 0xff000000).
6716
6717 We assume that pc is declared as a pointer to an unsigned long.
6718
6719 It is of no benefit to output the function name if we are assembling
6720 a leaf function. These function types will not contain a stack
6721 backtrace structure, therefore it is not possible to determine the
6722 function name. */
6723
6724void
6725arm_poke_function_name (stream, name)
6726 FILE * stream;
6727 char * name;
6728{
6729 unsigned long alignlength;
6730 unsigned long length;
6731 rtx x;
6732
d5b7b3ae
RE
6733 length = strlen (name) + 1;
6734 alignlength = ROUND_UP (length);
ef179a26 6735
949d79eb 6736 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 6737 ASM_OUTPUT_ALIGN (stream, 2);
e5951263 6738 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
ef179a26
NC
6739 ASM_OUTPUT_INT (stream, x);
6740}
6741
ff9940b0
RE
6742/* The amount of stack adjustment that happens here, in output_return and in
6743 output_epilogue must be exactly the same as was calculated during reload,
6744 or things will point to the wrong place. The only time we can safely
6745 ignore this constraint is when a function has no arguments on the stack,
6746 no stack frame requirement and no live registers execpt for `lr'. If we
6747 can guarantee that by making all function calls into tail calls and that
6748 lr is not clobbered in any other way, then there is no need to push lr
6354dc9b 6749 onto the stack. */
cce8749e 6750void
d5b7b3ae 6751output_arm_prologue (f, frame_size)
6cfc7210 6752 FILE * f;
cce8749e
CH
6753 int frame_size;
6754{
f3bb6135 6755 int reg, live_regs_mask = 0;
46406379 6756 int volatile_func = arm_volatile_func ();
cce8749e 6757
cce8749e
CH
6758 /* Nonzero if we must stuff some register arguments onto the stack as if
6759 they were passed there. */
6760 int store_arg_regs = 0;
6761
abaa26e5 6762 if (arm_ccfsm_state || arm_target_insn)
6354dc9b 6763 abort (); /* Sanity check. */
31fdb4d5
DE
6764
6765 if (arm_naked_function_p (current_function_decl))
6766 return;
6767
ff9940b0 6768 return_used_this_function = 0;
ff9940b0 6769
dd18ae56
NC
6770 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6771 current_function_args_size,
6772 current_function_pretend_args_size, frame_size);
6773 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6774 frame_pointer_needed,
6775 current_function_anonymous_args);
cce8749e 6776
e2c671ba 6777 if (volatile_func)
dd18ae56 6778 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 6779
cce8749e
CH
6780 if (current_function_anonymous_args && current_function_pretend_args_size)
6781 store_arg_regs = 1;
6782
f3bb6135 6783 for (reg = 0; reg <= 10; reg++)
5895f793 6784 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e
CH
6785 live_regs_mask |= (1 << reg);
6786
5895f793
RE
6787 if (!TARGET_APCS_FRAME
6788 && !frame_pointer_needed
d5b7b3ae 6789 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6790 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6791 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6792
5895f793 6793 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6794 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6795 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6796
ff9940b0 6797 if (frame_pointer_needed)
e2c671ba 6798 live_regs_mask |= 0xD800;
62b10bbc 6799 else if (regs_ever_live[LR_REGNUM])
ff9940b0 6800 {
62b10bbc 6801 live_regs_mask |= 1 << LR_REGNUM;
cce8749e
CH
6802 }
6803
0616531f
RE
6804 if (live_regs_mask)
6805 /* If a di mode load/store multiple is used, and the base register
6806 is r3, then r4 can become an ever live register without lr
6807 doing so, in this case we need to push lr as well, or we
6808 will fail to get a proper return. */
6809 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
6810
6811#ifdef AOF_ASSEMBLER
6812 if (flag_pic)
dd18ae56 6813 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 6814#endif
f3bb6135 6815}
cce8749e 6816
cd2b33d0 6817const char *
0616531f
RE
6818arm_output_epilogue (really_return)
6819 int really_return;
cce8749e 6820{
949d79eb
RE
6821 int reg;
6822 int live_regs_mask = 0;
6354dc9b 6823 /* If we need this, then it will always be at least this much. */
b111229a 6824 int floats_offset = 12;
cce8749e 6825 rtx operands[3];
949d79eb 6826 int frame_size = get_frame_size ();
d5b7b3ae
RE
6827 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
6828 FILE * f = asm_out_file;
e5951263 6829 int volatile_func = arm_volatile_func ();
d5b7b3ae 6830 int return_regnum;
cce8749e 6831
b36ba79f 6832 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 6833 return "";
cce8749e 6834
31fdb4d5
DE
6835 /* Naked functions don't have epilogues. */
6836 if (arm_naked_function_p (current_function_decl))
949d79eb 6837 return "";
31fdb4d5 6838
d5b7b3ae
RE
6839 /* If we are throwing an exception, the address we want to jump to is in
6840 R1; otherwise, it's in LR. */
6841 return_regnum = eh_ofs ? 2 : LR_REGNUM;
6842
0616531f
RE
6843 /* If we are throwing an exception, then we really must be doing a return,
6844 so we can't tail-call. */
5895f793 6845 if (eh_ofs && !really_return)
0616531f
RE
6846 abort();
6847
e2c671ba 6848 /* A volatile function should never return. Call abort. */
c11145f6 6849 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6850 {
86efdc8e 6851 rtx op;
ed0e6530 6852 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 6853 assemble_external_libcall (op);
e2c671ba 6854 output_asm_insn ("bl\t%a0", &op);
949d79eb 6855 return "";
e2c671ba
RE
6856 }
6857
f3bb6135 6858 for (reg = 0; reg <= 10; reg++)
5895f793 6859 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e 6860 {
ff9940b0
RE
6861 live_regs_mask |= (1 << reg);
6862 floats_offset += 4;
cce8749e
CH
6863 }
6864
d5b7b3ae 6865 /* Handle the frame pointer as a special case. */
5895f793
RE
6866 if (!TARGET_APCS_FRAME
6867 && !frame_pointer_needed
d5b7b3ae 6868 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6869 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6870 {
6871 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6872 floats_offset += 4;
6873 }
6874
ed0e6530
PB
6875 /* If we aren't loading the PIC register, don't stack it even though it may
6876 be live. */
5895f793 6877 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6878 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6879 {
6880 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6881 floats_offset += 4;
6882 }
6883
ff9940b0 6884 if (frame_pointer_needed)
cce8749e 6885 {
b111229a
RE
6886 if (arm_fpu_arch == FP_SOFT2)
6887 {
d5b7b3ae 6888 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 6889 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6890 {
6891 floats_offset += 12;
dd18ae56
NC
6892 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6893 reg, FP_REGNUM, floats_offset);
b111229a
RE
6894 }
6895 }
6896 else
6897 {
d5b7b3ae 6898 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 6899
d5b7b3ae 6900 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 6901 {
5895f793 6902 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6903 {
6904 floats_offset += 12;
6cfc7210 6905
6354dc9b 6906 /* We can't unstack more than four registers at once. */
b111229a
RE
6907 if (start_reg - reg == 3)
6908 {
dd18ae56
NC
6909 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6910 reg, FP_REGNUM, floats_offset);
b111229a
RE
6911 start_reg = reg - 1;
6912 }
6913 }
6914 else
6915 {
6916 if (reg != start_reg)
dd18ae56
NC
6917 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6918 reg + 1, start_reg - reg,
6919 FP_REGNUM, floats_offset);
b111229a
RE
6920 start_reg = reg - 1;
6921 }
6922 }
6923
6924 /* Just in case the last register checked also needs unstacking. */
6925 if (reg != start_reg)
dd18ae56
NC
6926 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6927 reg + 1, start_reg - reg,
6928 FP_REGNUM, floats_offset);
b111229a 6929 }
da6558fd 6930
6cfc7210 6931 if (TARGET_INTERWORK)
b111229a
RE
6932 {
6933 live_regs_mask |= 0x6800;
dd18ae56 6934 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
d5b7b3ae
RE
6935 if (eh_ofs)
6936 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6937 REGNO (eh_ofs));
0616531f
RE
6938 if (really_return)
6939 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
d5b7b3ae 6940 }
5895f793 6941 else if (eh_ofs || !really_return)
d5b7b3ae
RE
6942 {
6943 live_regs_mask |= 0x6800;
6944 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
0616531f
RE
6945 if (eh_ofs)
6946 {
6947 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6948 REGNO (eh_ofs));
6949 /* Even in 26-bit mode we do a mov (rather than a movs)
6950 because we don't have the PSR bits set in the
6951 address. */
6952 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6953 }
b111229a
RE
6954 }
6955 else
6956 {
6957 live_regs_mask |= 0xA800;
dd18ae56 6958 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
6959 TARGET_APCS_32 ? FALSE : TRUE);
6960 }
cce8749e
CH
6961 }
6962 else
6963 {
d2288d8d 6964 /* Restore stack pointer if necessary. */
56636818 6965 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
6966 {
6967 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
6968 operands[2] = GEN_INT (frame_size
6969 + current_function_outgoing_args_size);
d2288d8d
TG
6970 output_add_immediate (operands);
6971 }
6972
b111229a
RE
6973 if (arm_fpu_arch == FP_SOFT2)
6974 {
d5b7b3ae 6975 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 6976 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
6977 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6978 reg, SP_REGNUM);
b111229a
RE
6979 }
6980 else
6981 {
d5b7b3ae 6982 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 6983
d5b7b3ae 6984 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 6985 {
5895f793 6986 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6987 {
6988 if (reg - start_reg == 3)
6989 {
dd18ae56
NC
6990 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6991 start_reg, SP_REGNUM);
b111229a
RE
6992 start_reg = reg + 1;
6993 }
6994 }
6995 else
6996 {
6997 if (reg != start_reg)
dd18ae56
NC
6998 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6999 start_reg, reg - start_reg,
7000 SP_REGNUM);
6cfc7210 7001
b111229a
RE
7002 start_reg = reg + 1;
7003 }
7004 }
7005
7006 /* Just in case the last register checked also needs unstacking. */
7007 if (reg != start_reg)
dd18ae56
NC
7008 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7009 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7010 }
7011
62b10bbc 7012 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 7013 {
6cfc7210 7014 if (TARGET_INTERWORK)
b111229a 7015 {
0616531f 7016 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2 7017
d5b7b3ae
RE
7018 /* Handle LR on its own. */
7019 if (live_regs_mask == (1 << LR_REGNUM))
7020 {
7021 if (eh_ofs)
7022 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7023 SP_REGNUM);
7024 else
7025 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7026 SP_REGNUM);
7027 }
7028 else if (live_regs_mask != 0)
7029 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7030 FALSE);
7031
7032 if (eh_ofs)
7033 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7034 REGNO (eh_ofs));
7035
0616531f
RE
7036 if (really_return)
7037 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
b111229a 7038 }
d5b7b3ae
RE
7039 else if (eh_ofs)
7040 {
7041 if (live_regs_mask == 0)
7042 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7043 else
7044 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
7045 live_regs_mask | (1 << LR_REGNUM), FALSE);
7046
7047 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7048 REGNO (eh_ofs));
7049 /* Jump to the target; even in 26-bit mode. */
7050 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7051 }
5895f793 7052 else if (TARGET_APCS_32 && live_regs_mask == 0 && !really_return)
0616531f
RE
7053 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7054 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
d5b7b3ae 7055 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
5895f793 7056 else if (!really_return)
0616531f
RE
7057 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7058 live_regs_mask | (1 << LR_REGNUM), FALSE);
32de079a 7059 else
d5b7b3ae
RE
7060 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7061 live_regs_mask | (1 << PC_REGNUM),
32de079a 7062 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
7063 }
7064 else
7065 {
62b10bbc 7066 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 7067 {
6354dc9b 7068 /* Restore the integer regs, and the return address into lr. */
0616531f 7069 live_regs_mask |= 1 << LR_REGNUM;
32de079a 7070
d5b7b3ae
RE
7071 if (live_regs_mask == (1 << LR_REGNUM))
7072 {
7073 if (eh_ofs)
7074 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7075 SP_REGNUM);
7076 else
7077 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7078 SP_REGNUM);
7079 }
7080 else if (live_regs_mask != 0)
7081 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7082 FALSE);
cce8749e 7083 }
b111229a 7084
cce8749e
CH
7085 if (current_function_pretend_args_size)
7086 {
6354dc9b 7087 /* Unwind the pre-pushed regs. */
cce8749e 7088 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 7089 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
7090 output_add_immediate (operands);
7091 }
d5b7b3ae
RE
7092
7093 if (eh_ofs)
7094 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7095 REGNO (eh_ofs));
0616531f
RE
7096
7097 if (really_return)
7098 {
7099 /* And finally, go home. */
7100 if (TARGET_INTERWORK)
7101 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7102 else if (TARGET_APCS_32 || eh_ofs)
7103 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7104 else
7105 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7106 }
cce8749e
CH
7107 }
7108 }
f3bb6135 7109
949d79eb
RE
7110 return "";
7111}
7112
7113void
eb3921e8 7114output_func_epilogue (frame_size)
949d79eb
RE
7115 int frame_size;
7116{
d5b7b3ae
RE
7117 if (TARGET_THUMB)
7118 {
7119 /* ??? Probably not safe to set this here, since it assumes that a
7120 function will be emitted as assembly immediately after we generate
7121 RTL for it. This does not happen for inline functions. */
7122 return_used_this_function = 0;
7123 }
7124 else
7125 {
7126 if (use_return_insn (FALSE)
7127 && return_used_this_function
7128 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7129 && !frame_pointer_needed)
d5b7b3ae 7130 abort ();
f3bb6135 7131
d5b7b3ae
RE
7132 /* Reset the ARM-specific per-function variables. */
7133 current_function_anonymous_args = 0;
7134 after_arm_reorg = 0;
7135 }
f3bb6135 7136}
e2c671ba 7137
2c849145
JM
7138/* Generate and emit an insn that we will recognize as a push_multi.
7139 Unfortunately, since this insn does not reflect very well the actual
7140 semantics of the operation, we need to annotate the insn for the benefit
7141 of DWARF2 frame unwind information. */
2c849145 7142static rtx
e2c671ba
RE
7143emit_multi_reg_push (mask)
7144 int mask;
7145{
7146 int num_regs = 0;
7147 int i, j;
7148 rtx par;
2c849145 7149 rtx dwarf;
87e27392 7150 int dwarf_par_index;
2c849145 7151 rtx tmp, reg;
e2c671ba 7152
d5b7b3ae 7153 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7154 if (mask & (1 << i))
5895f793 7155 num_regs++;
e2c671ba
RE
7156
7157 if (num_regs == 0 || num_regs > 16)
7158 abort ();
7159
87e27392
NC
7160 /* For the body of the insn we are going to generate an UNSPEC in
7161 parallel with several USEs. This allows the insn to be recognised
7162 by the push_multi pattern in the arm.md file. The insn looks
7163 something like this:
7164
7165 (parallel [
7166 (set (mem:BLK (pre_dec:BLK (reg:SI sp))) (unspec:BLK [(reg:SI r4)] 2))
7167 (use (reg:SI 11 fp))
7168 (use (reg:SI 12 ip))
7169 (use (reg:SI 14 lr))
7170 (use (reg:SI 15 pc))
7171 ])
7172
7173 For the frame note however, we try to be more explicit and actually
7174 show each register being stored into the stack frame, plus a (single)
7175 decrement of the stack pointer. We do it this way in order to be
7176 friendly to the stack unwinding code, which only wants to see a single
7177 stack decrement per instruction. The RTL we generate for the note looks
7178 something like this:
7179
7180 (sequence [
7181 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7182 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7183 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7184 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7185 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7186 (set (mem:SI (plus:SI (reg:SI sp) (const_int 16))) (reg:SI pc))
7187 ])
7188
7189 This sequence is used both by the code to support stack unwinding for
7190 exceptions handlers and the code to generate dwarf2 frame debugging. */
7191
43cffd11 7192 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
87e27392 7193 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_regs + 1));
2c849145 7194 RTX_FRAME_RELATED_P (dwarf) = 1;
87e27392 7195 dwarf_par_index = 1;
e2c671ba 7196
d5b7b3ae 7197 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7198 {
7199 if (mask & (1 << i))
7200 {
2c849145
JM
7201 reg = gen_rtx_REG (SImode, i);
7202
e2c671ba 7203 XVECEXP (par, 0, 0)
43cffd11
RE
7204 = gen_rtx_SET (VOIDmode,
7205 gen_rtx_MEM (BLKmode,
7206 gen_rtx_PRE_DEC (BLKmode,
7207 stack_pointer_rtx)),
7208 gen_rtx_UNSPEC (BLKmode,
2c849145 7209 gen_rtvec (1, reg),
43cffd11 7210 2));
2c849145
JM
7211
7212 tmp = gen_rtx_SET (VOIDmode,
87e27392 7213 gen_rtx_MEM (SImode, stack_pointer_rtx),
2c849145
JM
7214 reg);
7215 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392
NC
7216 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7217 dwarf_par_index ++;
2c849145 7218
e2c671ba
RE
7219 break;
7220 }
7221 }
7222
7223 for (j = 1, i++; j < num_regs; i++)
7224 {
7225 if (mask & (1 << i))
7226 {
2c849145
JM
7227 reg = gen_rtx_REG (SImode, i);
7228
7229 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7230
7231 tmp = gen_rtx_SET (VOIDmode,
7232 gen_rtx_MEM (SImode,
87e27392
NC
7233 gen_rtx_PLUS (SImode,
7234 stack_pointer_rtx,
7235 GEN_INT (4 * j))),
2c849145
JM
7236 reg);
7237 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392 7238 XVECEXP (dwarf, 0, dwarf_par_index ++) = tmp;
2c849145 7239
e2c671ba
RE
7240 j++;
7241 }
7242 }
b111229a 7243
2c849145 7244 par = emit_insn (par);
87e27392
NC
7245
7246 tmp = gen_rtx_SET (SImode,
7247 stack_pointer_rtx,
7248 gen_rtx_PLUS (SImode,
7249 stack_pointer_rtx,
7250 GEN_INT (-4 * num_regs)));
7251 RTX_FRAME_RELATED_P (tmp) = 1;
7252 XVECEXP (dwarf, 0, 0) = tmp;
7253
2c849145
JM
7254 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7255 REG_NOTES (par));
7256 return par;
b111229a
RE
7257}
7258
2c849145 7259static rtx
b111229a
RE
7260emit_sfm (base_reg, count)
7261 int base_reg;
7262 int count;
7263{
7264 rtx par;
2c849145
JM
7265 rtx dwarf;
7266 rtx tmp, reg;
b111229a
RE
7267 int i;
7268
43cffd11 7269 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7270 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7271 RTX_FRAME_RELATED_P (dwarf) = 1;
7272
7273 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7274
7275 XVECEXP (par, 0, 0)
7276 = gen_rtx_SET (VOIDmode,
7277 gen_rtx_MEM (BLKmode,
7278 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7279 gen_rtx_UNSPEC (BLKmode,
2c849145 7280 gen_rtvec (1, reg),
43cffd11 7281 2));
2c849145
JM
7282 tmp
7283 = gen_rtx_SET (VOIDmode,
7284 gen_rtx_MEM (XFmode,
7285 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7286 reg);
7287 RTX_FRAME_RELATED_P (tmp) = 1;
7288 XVECEXP (dwarf, 0, count - 1) = tmp;
7289
b111229a 7290 for (i = 1; i < count; i++)
2c849145
JM
7291 {
7292 reg = gen_rtx_REG (XFmode, base_reg++);
7293 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7294
7295 tmp = gen_rtx_SET (VOIDmode,
7296 gen_rtx_MEM (XFmode,
7297 gen_rtx_PRE_DEC (BLKmode,
7298 stack_pointer_rtx)),
7299 reg);
7300 RTX_FRAME_RELATED_P (tmp) = 1;
7301 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7302 }
b111229a 7303
2c849145
JM
7304 par = emit_insn (par);
7305 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7306 REG_NOTES (par));
7307 return par;
e2c671ba
RE
7308}
7309
7310void
7311arm_expand_prologue ()
7312{
7313 int reg;
56636818
JL
7314 rtx amount = GEN_INT (-(get_frame_size ()
7315 + current_function_outgoing_args_size));
e2c671ba
RE
7316 int live_regs_mask = 0;
7317 int store_arg_regs = 0;
949d79eb
RE
7318 /* If this function doesn't return, then there is no need to push
7319 the call-saved regs. */
46406379 7320 int volatile_func = arm_volatile_func ();
2c849145 7321 rtx insn;
e2c671ba 7322
31fdb4d5
DE
7323 /* Naked functions don't have prologues. */
7324 if (arm_naked_function_p (current_function_decl))
7325 return;
7326
e2c671ba
RE
7327 if (current_function_anonymous_args && current_function_pretend_args_size)
7328 store_arg_regs = 1;
7329
5895f793 7330 if (!volatile_func)
6ed30148
RE
7331 {
7332 for (reg = 0; reg <= 10; reg++)
5895f793 7333 if (regs_ever_live[reg] && !call_used_regs[reg])
6ed30148
RE
7334 live_regs_mask |= 1 << reg;
7335
5895f793
RE
7336 if (!TARGET_APCS_FRAME
7337 && !frame_pointer_needed
d5b7b3ae 7338 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 7339 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
7340 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7341
6ed30148
RE
7342 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7343 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 7344
62b10bbc
NC
7345 if (regs_ever_live[LR_REGNUM])
7346 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 7347 }
e2c671ba
RE
7348
7349 if (frame_pointer_needed)
7350 {
7351 live_regs_mask |= 0xD800;
2c849145
JM
7352 insn = emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
7353 stack_pointer_rtx));
7354 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7355 }
7356
7357 if (current_function_pretend_args_size)
7358 {
7359 if (store_arg_regs)
2c849145
JM
7360 insn = emit_multi_reg_push
7361 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 7362 else
2c849145
JM
7363 insn = emit_insn
7364 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7365 GEN_INT (-current_function_pretend_args_size)));
7366 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7367 }
7368
7369 if (live_regs_mask)
7370 {
7371 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 7372 we won't get a proper return. */
62b10bbc 7373 live_regs_mask |= 1 << LR_REGNUM;
2c849145
JM
7374 insn = emit_multi_reg_push (live_regs_mask);
7375 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7376 }
7377
d5b7b3ae
RE
7378 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7379
5895f793 7380 if (!volatile_func)
b111229a
RE
7381 {
7382 if (arm_fpu_arch == FP_SOFT2)
7383 {
d5b7b3ae 7384 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 7385 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
7386 {
7387 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7388 insn = gen_rtx_MEM (XFmode, insn);
7389 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7390 gen_rtx_REG (XFmode, reg)));
7391 RTX_FRAME_RELATED_P (insn) = 1;
7392 }
b111229a
RE
7393 }
7394 else
7395 {
d5b7b3ae 7396 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7397
d5b7b3ae 7398 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 7399 {
5895f793 7400 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7401 {
7402 if (start_reg - reg == 3)
7403 {
2c849145
JM
7404 insn = emit_sfm (reg, 4);
7405 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
7406 start_reg = reg - 1;
7407 }
7408 }
7409 else
7410 {
7411 if (start_reg != reg)
2c849145
JM
7412 {
7413 insn = emit_sfm (reg + 1, start_reg - reg);
7414 RTX_FRAME_RELATED_P (insn) = 1;
7415 }
b111229a
RE
7416 start_reg = reg - 1;
7417 }
7418 }
7419
7420 if (start_reg != reg)
2c849145
JM
7421 {
7422 insn = emit_sfm (reg + 1, start_reg - reg);
7423 RTX_FRAME_RELATED_P (insn) = 1;
7424 }
b111229a
RE
7425 }
7426 }
e2c671ba
RE
7427
7428 if (frame_pointer_needed)
2c849145
JM
7429 {
7430 insn = GEN_INT (-(4 + current_function_pretend_args_size));
7431 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx,
7432 gen_rtx_REG (SImode, IP_REGNUM),
7433 insn));
7434 RTX_FRAME_RELATED_P (insn) = 1;
7435 }
e2c671ba
RE
7436
7437 if (amount != const0_rtx)
7438 {
2c849145
JM
7439 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7440 amount));
7441 RTX_FRAME_RELATED_P (insn) = 1;
e04c2d6c
RE
7442
7443 /* If the frame pointer is needed, emit a special barrier that
7444 will prevent the scheduler from moving stores to the frame
7445 before the stack adjustment. */
7446 if (frame_pointer_needed)
7447 {
7448 rtx unspec = gen_rtx_UNSPEC (SImode,
7449 gen_rtvec (2, stack_pointer_rtx,
7450 hard_frame_pointer_rtx), 4);
7451
7452 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7453 gen_rtx_MEM (BLKmode, unspec)));
7454 }
e2c671ba
RE
7455 }
7456
7457 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
7458 the call to mcount. Similarly if the user has requested no
7459 scheduling in the prolog. */
7460 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
7461 emit_insn (gen_blockage ());
7462}
cce8749e 7463\f
9997d19d
RE
7464/* If CODE is 'd', then the X is a condition operand and the instruction
7465 should only be executed if the condition is true.
ddd5a7c1 7466 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
7467 should only be executed if the condition is false: however, if the mode
7468 of the comparison is CCFPEmode, then always execute the instruction -- we
7469 do this because in these circumstances !GE does not necessarily imply LT;
7470 in these cases the instruction pattern will take care to make sure that
7471 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 7472 doing this instruction unconditionally.
9997d19d
RE
7473 If CODE is 'N' then X is a floating point operand that must be negated
7474 before output.
7475 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7476 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7477
7478void
7479arm_print_operand (stream, x, code)
62b10bbc 7480 FILE * stream;
9997d19d
RE
7481 rtx x;
7482 int code;
7483{
7484 switch (code)
7485 {
7486 case '@':
f3139301 7487 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
7488 return;
7489
d5b7b3ae
RE
7490 case '_':
7491 fputs (user_label_prefix, stream);
7492 return;
7493
9997d19d 7494 case '|':
f3139301 7495 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
7496 return;
7497
7498 case '?':
7499 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
7500 {
7501 if (TARGET_THUMB || current_insn_predicate != NULL)
7502 abort ();
7503
7504 fputs (arm_condition_codes[arm_current_cc], stream);
7505 }
7506 else if (current_insn_predicate)
7507 {
7508 enum arm_cond_code code;
7509
7510 if (TARGET_THUMB)
7511 abort ();
7512
7513 code = get_arm_condition_code (current_insn_predicate);
7514 fputs (arm_condition_codes[code], stream);
7515 }
9997d19d
RE
7516 return;
7517
7518 case 'N':
7519 {
7520 REAL_VALUE_TYPE r;
7521 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7522 r = REAL_VALUE_NEGATE (r);
7523 fprintf (stream, "%s", fp_const_from_val (&r));
7524 }
7525 return;
7526
7527 case 'B':
7528 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
7529 {
7530 HOST_WIDE_INT val;
5895f793 7531 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 7532 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7533 }
9997d19d
RE
7534 else
7535 {
7536 putc ('~', stream);
7537 output_addr_const (stream, x);
7538 }
7539 return;
7540
7541 case 'i':
7542 fprintf (stream, "%s", arithmetic_instr (x, 1));
7543 return;
7544
7545 case 'I':
7546 fprintf (stream, "%s", arithmetic_instr (x, 0));
7547 return;
7548
7549 case 'S':
7550 {
7551 HOST_WIDE_INT val;
5895f793 7552 const char * shift = shift_op (x, &val);
9997d19d 7553
e2c671ba
RE
7554 if (shift)
7555 {
5895f793 7556 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
7557 if (val == -1)
7558 arm_print_operand (stream, XEXP (x, 1), 0);
7559 else
4bc74ece
NC
7560 {
7561 fputc ('#', stream);
36ba9cb8 7562 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7563 }
e2c671ba 7564 }
9997d19d
RE
7565 }
7566 return;
7567
d5b7b3ae
RE
7568 /* An explanation of the 'Q', 'R' and 'H' register operands:
7569
7570 In a pair of registers containing a DI or DF value the 'Q'
7571 operand returns the register number of the register containing
7572 the least signficant part of the value. The 'R' operand returns
7573 the register number of the register containing the most
7574 significant part of the value.
7575
7576 The 'H' operand returns the higher of the two register numbers.
7577 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7578 same as the 'Q' operand, since the most signficant part of the
7579 value is held in the lower number register. The reverse is true
7580 on systems where WORDS_BIG_ENDIAN is false.
7581
7582 The purpose of these operands is to distinguish between cases
7583 where the endian-ness of the values is important (for example
7584 when they are added together), and cases where the endian-ness
7585 is irrelevant, but the order of register operations is important.
7586 For example when loading a value from memory into a register
7587 pair, the endian-ness does not matter. Provided that the value
7588 from the lower memory address is put into the lower numbered
7589 register, and the value from the higher address is put into the
7590 higher numbered register, the load will work regardless of whether
7591 the value being loaded is big-wordian or little-wordian. The
7592 order of the two register loads can matter however, if the address
7593 of the memory location is actually held in one of the registers
7594 being overwritten by the load. */
c1c2bc04 7595 case 'Q':
d5b7b3ae 7596 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 7597 abort ();
d5b7b3ae 7598 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
7599 return;
7600
9997d19d 7601 case 'R':
d5b7b3ae 7602 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 7603 abort ();
d5b7b3ae
RE
7604 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7605 return;
7606
7607 case 'H':
7608 if (REGNO (x) > LAST_ARM_REGNUM)
7609 abort ();
7610 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
7611 return;
7612
7613 case 'm':
d5b7b3ae
RE
7614 asm_fprintf (stream, "%r",
7615 GET_CODE (XEXP (x, 0)) == REG
7616 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
7617 return;
7618
7619 case 'M':
dd18ae56 7620 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae
RE
7621 REGNO (x),
7622 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
7623 return;
7624
7625 case 'd':
5895f793 7626 if (!x)
d5b7b3ae
RE
7627 return;
7628
7629 if (TARGET_ARM)
9997d19d
RE
7630 fputs (arm_condition_codes[get_arm_condition_code (x)],
7631 stream);
d5b7b3ae
RE
7632 else
7633 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
7634 return;
7635
7636 case 'D':
5895f793 7637 if (!x)
d5b7b3ae
RE
7638 return;
7639
7640 if (TARGET_ARM)
7641 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7642 (get_arm_condition_code (x))],
9997d19d 7643 stream);
d5b7b3ae
RE
7644 else
7645 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
7646 return;
7647
7648 default:
7649 if (x == 0)
7650 abort ();
7651
7652 if (GET_CODE (x) == REG)
d5b7b3ae 7653 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
7654 else if (GET_CODE (x) == MEM)
7655 {
7656 output_memory_reference_mode = GET_MODE (x);
7657 output_address (XEXP (x, 0));
7658 }
7659 else if (GET_CODE (x) == CONST_DOUBLE)
7660 fprintf (stream, "#%s", fp_immediate_constant (x));
7661 else if (GET_CODE (x) == NEG)
6354dc9b 7662 abort (); /* This should never happen now. */
9997d19d
RE
7663 else
7664 {
7665 fputc ('#', stream);
7666 output_addr_const (stream, x);
7667 }
7668 }
7669}
cce8749e
CH
7670\f
7671/* A finite state machine takes care of noticing whether or not instructions
7672 can be conditionally executed, and thus decrease execution time and code
7673 size by deleting branch instructions. The fsm is controlled by
7674 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7675
7676/* The state of the fsm controlling condition codes are:
7677 0: normal, do nothing special
7678 1: make ASM_OUTPUT_OPCODE not output this instruction
7679 2: make ASM_OUTPUT_OPCODE not output this instruction
7680 3: make instructions conditional
7681 4: make instructions conditional
7682
7683 State transitions (state->state by whom under condition):
7684 0 -> 1 final_prescan_insn if the `target' is a label
7685 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7686 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7687 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7688 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7689 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7690 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7691 (the target insn is arm_target_insn).
7692
ff9940b0
RE
7693 If the jump clobbers the conditions then we use states 2 and 4.
7694
7695 A similar thing can be done with conditional return insns.
7696
cce8749e
CH
7697 XXX In case the `target' is an unconditional branch, this conditionalising
7698 of the instructions always reduces code size, but not always execution
7699 time. But then, I want to reduce the code size to somewhere near what
7700 /bin/cc produces. */
7701
cce8749e
CH
7702/* Returns the index of the ARM condition code string in
7703 `arm_condition_codes'. COMPARISON should be an rtx like
7704 `(eq (...) (...))'. */
7705
84ed5e79 7706static enum arm_cond_code
cce8749e
CH
7707get_arm_condition_code (comparison)
7708 rtx comparison;
7709{
5165176d 7710 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
7711 register int code;
7712 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
7713
7714 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 7715 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
7716 XEXP (comparison, 1));
7717
7718 switch (mode)
cce8749e 7719 {
84ed5e79
RE
7720 case CC_DNEmode: code = ARM_NE; goto dominance;
7721 case CC_DEQmode: code = ARM_EQ; goto dominance;
7722 case CC_DGEmode: code = ARM_GE; goto dominance;
7723 case CC_DGTmode: code = ARM_GT; goto dominance;
7724 case CC_DLEmode: code = ARM_LE; goto dominance;
7725 case CC_DLTmode: code = ARM_LT; goto dominance;
7726 case CC_DGEUmode: code = ARM_CS; goto dominance;
7727 case CC_DGTUmode: code = ARM_HI; goto dominance;
7728 case CC_DLEUmode: code = ARM_LS; goto dominance;
7729 case CC_DLTUmode: code = ARM_CC;
7730
7731 dominance:
7732 if (comp_code != EQ && comp_code != NE)
7733 abort ();
7734
7735 if (comp_code == EQ)
7736 return ARM_INVERSE_CONDITION_CODE (code);
7737 return code;
7738
5165176d 7739 case CC_NOOVmode:
84ed5e79 7740 switch (comp_code)
5165176d 7741 {
84ed5e79
RE
7742 case NE: return ARM_NE;
7743 case EQ: return ARM_EQ;
7744 case GE: return ARM_PL;
7745 case LT: return ARM_MI;
5165176d
RE
7746 default: abort ();
7747 }
7748
7749 case CC_Zmode:
84ed5e79 7750 switch (comp_code)
5165176d 7751 {
84ed5e79
RE
7752 case NE: return ARM_NE;
7753 case EQ: return ARM_EQ;
5165176d
RE
7754 default: abort ();
7755 }
7756
7757 case CCFPEmode:
e45b72c4
RE
7758 case CCFPmode:
7759 /* These encodings assume that AC=1 in the FPA system control
7760 byte. This allows us to handle all cases except UNEQ and
7761 LTGT. */
84ed5e79
RE
7762 switch (comp_code)
7763 {
7764 case GE: return ARM_GE;
7765 case GT: return ARM_GT;
7766 case LE: return ARM_LS;
7767 case LT: return ARM_MI;
e45b72c4
RE
7768 case NE: return ARM_NE;
7769 case EQ: return ARM_EQ;
7770 case ORDERED: return ARM_VC;
7771 case UNORDERED: return ARM_VS;
7772 case UNLT: return ARM_LT;
7773 case UNLE: return ARM_LE;
7774 case UNGT: return ARM_HI;
7775 case UNGE: return ARM_PL;
7776 /* UNEQ and LTGT do not have a representation. */
7777 case UNEQ: /* Fall through. */
7778 case LTGT: /* Fall through. */
84ed5e79
RE
7779 default: abort ();
7780 }
7781
7782 case CC_SWPmode:
7783 switch (comp_code)
7784 {
7785 case NE: return ARM_NE;
7786 case EQ: return ARM_EQ;
7787 case GE: return ARM_LE;
7788 case GT: return ARM_LT;
7789 case LE: return ARM_GE;
7790 case LT: return ARM_GT;
7791 case GEU: return ARM_LS;
7792 case GTU: return ARM_CC;
7793 case LEU: return ARM_CS;
7794 case LTU: return ARM_HI;
7795 default: abort ();
7796 }
7797
bd9c7e23
RE
7798 case CC_Cmode:
7799 switch (comp_code)
7800 {
7801 case LTU: return ARM_CS;
7802 case GEU: return ARM_CC;
7803 default: abort ();
7804 }
7805
5165176d 7806 case CCmode:
84ed5e79 7807 switch (comp_code)
5165176d 7808 {
84ed5e79
RE
7809 case NE: return ARM_NE;
7810 case EQ: return ARM_EQ;
7811 case GE: return ARM_GE;
7812 case GT: return ARM_GT;
7813 case LE: return ARM_LE;
7814 case LT: return ARM_LT;
7815 case GEU: return ARM_CS;
7816 case GTU: return ARM_HI;
7817 case LEU: return ARM_LS;
7818 case LTU: return ARM_CC;
5165176d
RE
7819 default: abort ();
7820 }
7821
cce8749e
CH
7822 default: abort ();
7823 }
84ed5e79
RE
7824
7825 abort ();
f3bb6135 7826}
cce8749e
CH
7827
7828
7829void
74bbc178 7830arm_final_prescan_insn (insn)
cce8749e 7831 rtx insn;
cce8749e
CH
7832{
7833 /* BODY will hold the body of INSN. */
7834 register rtx body = PATTERN (insn);
7835
7836 /* This will be 1 if trying to repeat the trick, and things need to be
7837 reversed if it appears to fail. */
7838 int reverse = 0;
7839
ff9940b0
RE
7840 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
7841 taken are clobbered, even if the rtl suggests otherwise. It also
7842 means that we have to grub around within the jump expression to find
7843 out what the conditions are when the jump isn't taken. */
7844 int jump_clobbers = 0;
7845
6354dc9b 7846 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
7847 int seeking_return = 0;
7848
cce8749e
CH
7849 /* START_INSN will hold the insn from where we start looking. This is the
7850 first insn after the following code_label if REVERSE is true. */
7851 rtx start_insn = insn;
7852
7853 /* If in state 4, check if the target branch is reached, in order to
7854 change back to state 0. */
7855 if (arm_ccfsm_state == 4)
7856 {
7857 if (insn == arm_target_insn)
f5a1b0d2
NC
7858 {
7859 arm_target_insn = NULL;
7860 arm_ccfsm_state = 0;
7861 }
cce8749e
CH
7862 return;
7863 }
7864
7865 /* If in state 3, it is possible to repeat the trick, if this insn is an
7866 unconditional branch to a label, and immediately following this branch
7867 is the previous target label which is only used once, and the label this
7868 branch jumps to is not too far off. */
7869 if (arm_ccfsm_state == 3)
7870 {
7871 if (simplejump_p (insn))
7872 {
7873 start_insn = next_nonnote_insn (start_insn);
7874 if (GET_CODE (start_insn) == BARRIER)
7875 {
7876 /* XXX Isn't this always a barrier? */
7877 start_insn = next_nonnote_insn (start_insn);
7878 }
7879 if (GET_CODE (start_insn) == CODE_LABEL
7880 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7881 && LABEL_NUSES (start_insn) == 1)
7882 reverse = TRUE;
7883 else
7884 return;
7885 }
ff9940b0
RE
7886 else if (GET_CODE (body) == RETURN)
7887 {
7888 start_insn = next_nonnote_insn (start_insn);
7889 if (GET_CODE (start_insn) == BARRIER)
7890 start_insn = next_nonnote_insn (start_insn);
7891 if (GET_CODE (start_insn) == CODE_LABEL
7892 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7893 && LABEL_NUSES (start_insn) == 1)
7894 {
7895 reverse = TRUE;
7896 seeking_return = 1;
7897 }
7898 else
7899 return;
7900 }
cce8749e
CH
7901 else
7902 return;
7903 }
7904
7905 if (arm_ccfsm_state != 0 && !reverse)
7906 abort ();
7907 if (GET_CODE (insn) != JUMP_INSN)
7908 return;
7909
ddd5a7c1 7910 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
7911 the jump should always come first */
7912 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
7913 body = XVECEXP (body, 0, 0);
7914
7915#if 0
7916 /* If this is a conditional return then we don't want to know */
7917 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7918 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
7919 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
7920 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
7921 return;
7922#endif
7923
cce8749e
CH
7924 if (reverse
7925 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7926 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
7927 {
bd9c7e23
RE
7928 int insns_skipped;
7929 int fail = FALSE, succeed = FALSE;
cce8749e
CH
7930 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
7931 int then_not_else = TRUE;
ff9940b0 7932 rtx this_insn = start_insn, label = 0;
cce8749e 7933
e45b72c4
RE
7934 /* If the jump cannot be done with one instruction, we cannot
7935 conditionally execute the instruction in the inverse case. */
ff9940b0 7936 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 7937 {
5bbe2d40
RE
7938 jump_clobbers = 1;
7939 return;
7940 }
ff9940b0 7941
cce8749e
CH
7942 /* Register the insn jumped to. */
7943 if (reverse)
ff9940b0
RE
7944 {
7945 if (!seeking_return)
7946 label = XEXP (SET_SRC (body), 0);
7947 }
cce8749e
CH
7948 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
7949 label = XEXP (XEXP (SET_SRC (body), 1), 0);
7950 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
7951 {
7952 label = XEXP (XEXP (SET_SRC (body), 2), 0);
7953 then_not_else = FALSE;
7954 }
ff9940b0
RE
7955 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
7956 seeking_return = 1;
7957 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
7958 {
7959 seeking_return = 1;
7960 then_not_else = FALSE;
7961 }
cce8749e
CH
7962 else
7963 abort ();
7964
7965 /* See how many insns this branch skips, and what kind of insns. If all
7966 insns are okay, and the label or unconditional branch to the same
7967 label is not too far away, succeed. */
7968 for (insns_skipped = 0;
b36ba79f 7969 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
7970 {
7971 rtx scanbody;
7972
7973 this_insn = next_nonnote_insn (this_insn);
7974 if (!this_insn)
7975 break;
7976
cce8749e
CH
7977 switch (GET_CODE (this_insn))
7978 {
7979 case CODE_LABEL:
7980 /* Succeed if it is the target label, otherwise fail since
7981 control falls in from somewhere else. */
7982 if (this_insn == label)
7983 {
ff9940b0
RE
7984 if (jump_clobbers)
7985 {
7986 arm_ccfsm_state = 2;
7987 this_insn = next_nonnote_insn (this_insn);
7988 }
7989 else
7990 arm_ccfsm_state = 1;
cce8749e
CH
7991 succeed = TRUE;
7992 }
7993 else
7994 fail = TRUE;
7995 break;
7996
ff9940b0 7997 case BARRIER:
cce8749e 7998 /* Succeed if the following insn is the target label.
ff9940b0
RE
7999 Otherwise fail.
8000 If return insns are used then the last insn in a function
6354dc9b 8001 will be a barrier. */
cce8749e 8002 this_insn = next_nonnote_insn (this_insn);
ff9940b0 8003 if (this_insn && this_insn == label)
cce8749e 8004 {
ff9940b0
RE
8005 if (jump_clobbers)
8006 {
8007 arm_ccfsm_state = 2;
8008 this_insn = next_nonnote_insn (this_insn);
8009 }
8010 else
8011 arm_ccfsm_state = 1;
cce8749e
CH
8012 succeed = TRUE;
8013 }
8014 else
8015 fail = TRUE;
8016 break;
8017
ff9940b0 8018 case CALL_INSN:
2b835d68 8019 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 8020 calls. */
2b835d68 8021 if (TARGET_APCS_32)
bd9c7e23
RE
8022 {
8023 /* Succeed if the following insn is the target label,
8024 or if the following two insns are a barrier and
8025 the target label. */
8026 this_insn = next_nonnote_insn (this_insn);
8027 if (this_insn && GET_CODE (this_insn) == BARRIER)
8028 this_insn = next_nonnote_insn (this_insn);
8029
8030 if (this_insn && this_insn == label
b36ba79f 8031 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
8032 {
8033 if (jump_clobbers)
8034 {
8035 arm_ccfsm_state = 2;
8036 this_insn = next_nonnote_insn (this_insn);
8037 }
8038 else
8039 arm_ccfsm_state = 1;
8040 succeed = TRUE;
8041 }
8042 else
8043 fail = TRUE;
8044 }
ff9940b0 8045 break;
2b835d68 8046
cce8749e
CH
8047 case JUMP_INSN:
8048 /* If this is an unconditional branch to the same label, succeed.
8049 If it is to another label, do nothing. If it is conditional,
8050 fail. */
914a3b8c 8051 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 8052
ed4c4348 8053 scanbody = PATTERN (this_insn);
ff9940b0
RE
8054 if (GET_CODE (scanbody) == SET
8055 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
8056 {
8057 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
8058 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
8059 {
8060 arm_ccfsm_state = 2;
8061 succeed = TRUE;
8062 }
8063 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
8064 fail = TRUE;
8065 }
b36ba79f
RE
8066 /* Fail if a conditional return is undesirable (eg on a
8067 StrongARM), but still allow this if optimizing for size. */
8068 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
8069 && !use_return_insn (TRUE)
8070 && !optimize_size)
b36ba79f 8071 fail = TRUE;
ff9940b0
RE
8072 else if (GET_CODE (scanbody) == RETURN
8073 && seeking_return)
8074 {
8075 arm_ccfsm_state = 2;
8076 succeed = TRUE;
8077 }
8078 else if (GET_CODE (scanbody) == PARALLEL)
8079 {
8080 switch (get_attr_conds (this_insn))
8081 {
8082 case CONDS_NOCOND:
8083 break;
8084 default:
8085 fail = TRUE;
8086 break;
8087 }
8088 }
4e67550b
RE
8089 else
8090 fail = TRUE; /* Unrecognized jump (eg epilogue). */
8091
cce8749e
CH
8092 break;
8093
8094 case INSN:
ff9940b0
RE
8095 /* Instructions using or affecting the condition codes make it
8096 fail. */
ed4c4348 8097 scanbody = PATTERN (this_insn);
5895f793
RE
8098 if (!(GET_CODE (scanbody) == SET
8099 || GET_CODE (scanbody) == PARALLEL)
74641843 8100 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
8101 fail = TRUE;
8102 break;
8103
8104 default:
8105 break;
8106 }
8107 }
8108 if (succeed)
8109 {
ff9940b0 8110 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 8111 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
8112 else if (seeking_return || arm_ccfsm_state == 2)
8113 {
8114 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
8115 {
8116 this_insn = next_nonnote_insn (this_insn);
8117 if (this_insn && (GET_CODE (this_insn) == BARRIER
8118 || GET_CODE (this_insn) == CODE_LABEL))
8119 abort ();
8120 }
8121 if (!this_insn)
8122 {
8123 /* Oh, dear! we ran off the end.. give up */
8124 recog (PATTERN (insn), insn, NULL_PTR);
8125 arm_ccfsm_state = 0;
abaa26e5 8126 arm_target_insn = NULL;
ff9940b0
RE
8127 return;
8128 }
8129 arm_target_insn = this_insn;
8130 }
cce8749e
CH
8131 else
8132 abort ();
ff9940b0
RE
8133 if (jump_clobbers)
8134 {
8135 if (reverse)
8136 abort ();
8137 arm_current_cc =
8138 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
8139 0), 0), 1));
8140 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
8141 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8142 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
8143 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8144 }
8145 else
8146 {
8147 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
8148 what it was. */
8149 if (!reverse)
8150 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
8151 0));
8152 }
cce8749e 8153
cce8749e
CH
8154 if (reverse || then_not_else)
8155 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8156 }
d5b7b3ae 8157
1ccbefce 8158 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 8159 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 8160 across this call; since the insn has been recognized already we
b020fd92 8161 call recog direct). */
ff9940b0 8162 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 8163 }
f3bb6135 8164}
cce8749e 8165
d5b7b3ae
RE
8166int
8167arm_regno_class (regno)
8168 int regno;
8169{
8170 if (TARGET_THUMB)
8171 {
8172 if (regno == STACK_POINTER_REGNUM)
8173 return STACK_REG;
8174 if (regno == CC_REGNUM)
8175 return CC_REG;
8176 if (regno < 8)
8177 return LO_REGS;
8178 return HI_REGS;
8179 }
8180
8181 if ( regno <= LAST_ARM_REGNUM
8182 || regno == FRAME_POINTER_REGNUM
8183 || regno == ARG_POINTER_REGNUM)
8184 return GENERAL_REGS;
8185
8186 if (regno == CC_REGNUM)
8187 return NO_REGS;
8188
8189 return FPU_REGS;
8190}
8191
8192/* Handle a special case when computing the offset
8193 of an argument from the frame pointer. */
8194int
8195arm_debugger_arg_offset (value, addr)
8196 int value;
8197 rtx addr;
8198{
8199 rtx insn;
8200
8201 /* We are only interested if dbxout_parms() failed to compute the offset. */
8202 if (value != 0)
8203 return 0;
8204
8205 /* We can only cope with the case where the address is held in a register. */
8206 if (GET_CODE (addr) != REG)
8207 return 0;
8208
8209 /* If we are using the frame pointer to point at the argument, then
8210 an offset of 0 is correct. */
cd2b33d0 8211 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
8212 return 0;
8213
8214 /* If we are using the stack pointer to point at the
8215 argument, then an offset of 0 is correct. */
5895f793 8216 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
8217 && REGNO (addr) == SP_REGNUM)
8218 return 0;
8219
8220 /* Oh dear. The argument is pointed to by a register rather
8221 than being held in a register, or being stored at a known
8222 offset from the frame pointer. Since GDB only understands
8223 those two kinds of argument we must translate the address
8224 held in the register into an offset from the frame pointer.
8225 We do this by searching through the insns for the function
8226 looking to see where this register gets its value. If the
8227 register is initialised from the frame pointer plus an offset
8228 then we are in luck and we can continue, otherwise we give up.
8229
8230 This code is exercised by producing debugging information
8231 for a function with arguments like this:
8232
8233 double func (double a, double b, int c, double d) {return d;}
8234
8235 Without this code the stab for parameter 'd' will be set to
8236 an offset of 0 from the frame pointer, rather than 8. */
8237
8238 /* The if() statement says:
8239
8240 If the insn is a normal instruction
8241 and if the insn is setting the value in a register
8242 and if the register being set is the register holding the address of the argument
8243 and if the address is computing by an addition
8244 that involves adding to a register
8245 which is the frame pointer
8246 a constant integer
8247
8248 then... */
8249
8250 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8251 {
8252 if ( GET_CODE (insn) == INSN
8253 && GET_CODE (PATTERN (insn)) == SET
8254 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8255 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8256 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 8257 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
8258 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8259 )
8260 {
8261 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8262
8263 break;
8264 }
8265 }
8266
8267 if (value == 0)
8268 {
8269 debug_rtx (addr);
8270 warning ("Unable to compute real location of stacked parameter");
8271 value = 8; /* XXX magic hack */
8272 }
8273
8274 return value;
8275}
8276
8277\f
8278/* Recursively search through all of the blocks in a function
8279 checking to see if any of the variables created in that
8280 function match the RTX called 'orig'. If they do then
8281 replace them with the RTX called 'new'. */
8282
8283static void
8284replace_symbols_in_block (block, orig, new)
8285 tree block;
8286 rtx orig;
8287 rtx new;
8288{
8289 for (; block; block = BLOCK_CHAIN (block))
8290 {
8291 tree sym;
8292
5895f793 8293 if (!TREE_USED (block))
d5b7b3ae
RE
8294 continue;
8295
8296 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8297 {
8298 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8299 || DECL_IGNORED_P (sym)
8300 || TREE_CODE (sym) != VAR_DECL
8301 || DECL_EXTERNAL (sym)
5895f793 8302 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
8303 )
8304 continue;
8305
8306 DECL_RTL (sym) = new;
8307 }
8308
8309 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8310 }
8311}
8312
8313/* Return the number (counting from 0) of the least significant set
8314 bit in MASK. */
8315#ifdef __GNUC__
8316inline
8317#endif
8318static int
8319number_of_first_bit_set (mask)
8320 int mask;
8321{
8322 int bit;
8323
8324 for (bit = 0;
8325 (mask & (1 << bit)) == 0;
5895f793 8326 ++bit)
d5b7b3ae
RE
8327 continue;
8328
8329 return bit;
8330}
8331
8332/* Generate code to return from a thumb function.
8333 If 'reg_containing_return_addr' is -1, then the return address is
8334 actually on the stack, at the stack pointer. */
8335static void
8336thumb_exit (f, reg_containing_return_addr, eh_ofs)
8337 FILE * f;
8338 int reg_containing_return_addr;
8339 rtx eh_ofs;
8340{
8341 unsigned regs_available_for_popping;
8342 unsigned regs_to_pop;
8343 int pops_needed;
8344 unsigned available;
8345 unsigned required;
8346 int mode;
8347 int size;
8348 int restore_a4 = FALSE;
8349
8350 /* Compute the registers we need to pop. */
8351 regs_to_pop = 0;
8352 pops_needed = 0;
8353
8354 /* There is an assumption here, that if eh_ofs is not NULL, the
8355 normal return address will have been pushed. */
8356 if (reg_containing_return_addr == -1 || eh_ofs)
8357 {
8358 /* When we are generating a return for __builtin_eh_return,
8359 reg_containing_return_addr must specify the return regno. */
8360 if (eh_ofs && reg_containing_return_addr == -1)
8361 abort ();
8362
8363 regs_to_pop |= 1 << LR_REGNUM;
5895f793 8364 ++pops_needed;
d5b7b3ae
RE
8365 }
8366
8367 if (TARGET_BACKTRACE)
8368 {
8369 /* Restore the (ARM) frame pointer and stack pointer. */
8370 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8371 pops_needed += 2;
8372 }
8373
8374 /* If there is nothing to pop then just emit the BX instruction and
8375 return. */
8376 if (pops_needed == 0)
8377 {
8378 if (eh_ofs)
8379 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8380
8381 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8382 return;
8383 }
8384 /* Otherwise if we are not supporting interworking and we have not created
8385 a backtrace structure and the function was not entered in ARM mode then
8386 just pop the return address straight into the PC. */
5895f793
RE
8387 else if (!TARGET_INTERWORK
8388 && !TARGET_BACKTRACE
8389 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
8390 {
8391 if (eh_ofs)
8392 {
8393 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8394 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8395 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8396 }
8397 else
8398 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8399
8400 return;
8401 }
8402
8403 /* Find out how many of the (return) argument registers we can corrupt. */
8404 regs_available_for_popping = 0;
8405
8406 /* If returning via __builtin_eh_return, the bottom three registers
8407 all contain information needed for the return. */
8408 if (eh_ofs)
8409 size = 12;
8410 else
8411 {
8412#ifdef RTX_CODE
8413 /* If we can deduce the registers used from the function's
8414 return value. This is more reliable that examining
8415 regs_ever_live[] because that will be set if the register is
8416 ever used in the function, not just if the register is used
8417 to hold a return value. */
8418
8419 if (current_function_return_rtx != 0)
8420 mode = GET_MODE (current_function_return_rtx);
8421 else
8422#endif
8423 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8424
8425 size = GET_MODE_SIZE (mode);
8426
8427 if (size == 0)
8428 {
8429 /* In a void function we can use any argument register.
8430 In a function that returns a structure on the stack
8431 we can use the second and third argument registers. */
8432 if (mode == VOIDmode)
8433 regs_available_for_popping =
8434 (1 << ARG_REGISTER (1))
8435 | (1 << ARG_REGISTER (2))
8436 | (1 << ARG_REGISTER (3));
8437 else
8438 regs_available_for_popping =
8439 (1 << ARG_REGISTER (2))
8440 | (1 << ARG_REGISTER (3));
8441 }
8442 else if (size <= 4)
8443 regs_available_for_popping =
8444 (1 << ARG_REGISTER (2))
8445 | (1 << ARG_REGISTER (3));
8446 else if (size <= 8)
8447 regs_available_for_popping =
8448 (1 << ARG_REGISTER (3));
8449 }
8450
8451 /* Match registers to be popped with registers into which we pop them. */
8452 for (available = regs_available_for_popping,
8453 required = regs_to_pop;
8454 required != 0 && available != 0;
8455 available &= ~(available & - available),
8456 required &= ~(required & - required))
8457 -- pops_needed;
8458
8459 /* If we have any popping registers left over, remove them. */
8460 if (available > 0)
5895f793 8461 regs_available_for_popping &= ~available;
d5b7b3ae
RE
8462
8463 /* Otherwise if we need another popping register we can use
8464 the fourth argument register. */
8465 else if (pops_needed)
8466 {
8467 /* If we have not found any free argument registers and
8468 reg a4 contains the return address, we must move it. */
8469 if (regs_available_for_popping == 0
8470 && reg_containing_return_addr == LAST_ARG_REGNUM)
8471 {
8472 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8473 reg_containing_return_addr = LR_REGNUM;
8474 }
8475 else if (size > 12)
8476 {
8477 /* Register a4 is being used to hold part of the return value,
8478 but we have dire need of a free, low register. */
8479 restore_a4 = TRUE;
8480
8481 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8482 }
8483
8484 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8485 {
8486 /* The fourth argument register is available. */
8487 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8488
5895f793 8489 --pops_needed;
d5b7b3ae
RE
8490 }
8491 }
8492
8493 /* Pop as many registers as we can. */
8494 thumb_pushpop (f, regs_available_for_popping, FALSE);
8495
8496 /* Process the registers we popped. */
8497 if (reg_containing_return_addr == -1)
8498 {
8499 /* The return address was popped into the lowest numbered register. */
5895f793 8500 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
8501
8502 reg_containing_return_addr =
8503 number_of_first_bit_set (regs_available_for_popping);
8504
8505 /* Remove this register for the mask of available registers, so that
8506 the return address will not be corrupted by futher pops. */
5895f793 8507 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
8508 }
8509
8510 /* If we popped other registers then handle them here. */
8511 if (regs_available_for_popping)
8512 {
8513 int frame_pointer;
8514
8515 /* Work out which register currently contains the frame pointer. */
8516 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8517
8518 /* Move it into the correct place. */
8519 asm_fprintf (f, "\tmov\t%r, %r\n",
8520 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8521
8522 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
8523 regs_available_for_popping &= ~(1 << frame_pointer);
8524 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
8525
8526 if (regs_available_for_popping)
8527 {
8528 int stack_pointer;
8529
8530 /* We popped the stack pointer as well,
8531 find the register that contains it. */
8532 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8533
8534 /* Move it into the stack register. */
8535 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8536
8537 /* At this point we have popped all necessary registers, so
8538 do not worry about restoring regs_available_for_popping
8539 to its correct value:
8540
8541 assert (pops_needed == 0)
8542 assert (regs_available_for_popping == (1 << frame_pointer))
8543 assert (regs_to_pop == (1 << STACK_POINTER)) */
8544 }
8545 else
8546 {
8547 /* Since we have just move the popped value into the frame
8548 pointer, the popping register is available for reuse, and
8549 we know that we still have the stack pointer left to pop. */
8550 regs_available_for_popping |= (1 << frame_pointer);
8551 }
8552 }
8553
8554 /* If we still have registers left on the stack, but we no longer have
8555 any registers into which we can pop them, then we must move the return
8556 address into the link register and make available the register that
8557 contained it. */
8558 if (regs_available_for_popping == 0 && pops_needed > 0)
8559 {
8560 regs_available_for_popping |= 1 << reg_containing_return_addr;
8561
8562 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8563 reg_containing_return_addr);
8564
8565 reg_containing_return_addr = LR_REGNUM;
8566 }
8567
8568 /* If we have registers left on the stack then pop some more.
8569 We know that at most we will want to pop FP and SP. */
8570 if (pops_needed > 0)
8571 {
8572 int popped_into;
8573 int move_to;
8574
8575 thumb_pushpop (f, regs_available_for_popping, FALSE);
8576
8577 /* We have popped either FP or SP.
8578 Move whichever one it is into the correct register. */
8579 popped_into = number_of_first_bit_set (regs_available_for_popping);
8580 move_to = number_of_first_bit_set (regs_to_pop);
8581
8582 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8583
5895f793 8584 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 8585
5895f793 8586 --pops_needed;
d5b7b3ae
RE
8587 }
8588
8589 /* If we still have not popped everything then we must have only
8590 had one register available to us and we are now popping the SP. */
8591 if (pops_needed > 0)
8592 {
8593 int popped_into;
8594
8595 thumb_pushpop (f, regs_available_for_popping, FALSE);
8596
8597 popped_into = number_of_first_bit_set (regs_available_for_popping);
8598
8599 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8600 /*
8601 assert (regs_to_pop == (1 << STACK_POINTER))
8602 assert (pops_needed == 1)
8603 */
8604 }
8605
8606 /* If necessary restore the a4 register. */
8607 if (restore_a4)
8608 {
8609 if (reg_containing_return_addr != LR_REGNUM)
8610 {
8611 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8612 reg_containing_return_addr = LR_REGNUM;
8613 }
8614
8615 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8616 }
8617
8618 if (eh_ofs)
8619 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8620
8621 /* Return to caller. */
8622 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8623}
8624
8625/* Emit code to push or pop registers to or from the stack. */
8626static void
8627thumb_pushpop (f, mask, push)
8628 FILE * f;
8629 int mask;
8630 int push;
8631{
8632 int regno;
8633 int lo_mask = mask & 0xFF;
8634
5895f793 8635 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
8636 {
8637 /* Special case. Do not generate a POP PC statement here, do it in
8638 thumb_exit() */
8639 thumb_exit (f, -1, NULL_RTX);
8640 return;
8641 }
8642
8643 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8644
8645 /* Look at the low registers first. */
5895f793 8646 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
8647 {
8648 if (lo_mask & 1)
8649 {
8650 asm_fprintf (f, "%r", regno);
8651
8652 if ((lo_mask & ~1) != 0)
8653 fprintf (f, ", ");
8654 }
8655 }
8656
8657 if (push && (mask & (1 << LR_REGNUM)))
8658 {
8659 /* Catch pushing the LR. */
8660 if (mask & 0xFF)
8661 fprintf (f, ", ");
8662
8663 asm_fprintf (f, "%r", LR_REGNUM);
8664 }
8665 else if (!push && (mask & (1 << PC_REGNUM)))
8666 {
8667 /* Catch popping the PC. */
8668 if (TARGET_INTERWORK || TARGET_BACKTRACE)
8669 {
8670 /* The PC is never poped directly, instead
8671 it is popped into r3 and then BX is used. */
8672 fprintf (f, "}\n");
8673
8674 thumb_exit (f, -1, NULL_RTX);
8675
8676 return;
8677 }
8678 else
8679 {
8680 if (mask & 0xFF)
8681 fprintf (f, ", ");
8682
8683 asm_fprintf (f, "%r", PC_REGNUM);
8684 }
8685 }
8686
8687 fprintf (f, "}\n");
8688}
8689\f
8690void
8691thumb_final_prescan_insn (insn)
8692 rtx insn;
8693{
d5b7b3ae 8694 if (flag_print_asm_name)
9d98a694
AO
8695 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
8696 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
8697}
8698
8699int
8700thumb_shiftable_const (val)
8701 unsigned HOST_WIDE_INT val;
8702{
8703 unsigned HOST_WIDE_INT mask = 0xff;
8704 int i;
8705
8706 if (val == 0) /* XXX */
8707 return 0;
8708
8709 for (i = 0; i < 25; i++)
8710 if ((val & (mask << i)) == val)
8711 return 1;
8712
8713 return 0;
8714}
8715
8716/* Returns non-zero if the current function contains,
8717 or might contain a far jump. */
8718int
8719thumb_far_jump_used_p (int in_prologue)
8720{
8721 rtx insn;
8722
8723 /* This test is only important for leaf functions. */
5895f793 8724 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
8725
8726 /* If we have already decided that far jumps may be used,
8727 do not bother checking again, and always return true even if
8728 it turns out that they are not being used. Once we have made
8729 the decision that far jumps are present (and that hence the link
8730 register will be pushed onto the stack) we cannot go back on it. */
8731 if (cfun->machine->far_jump_used)
8732 return 1;
8733
8734 /* If this function is not being called from the prologue/epilogue
8735 generation code then it must be being called from the
8736 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 8737 if (!in_prologue)
d5b7b3ae
RE
8738 {
8739 /* In this case we know that we are being asked about the elimination
8740 of the arg pointer register. If that register is not being used,
8741 then there are no arguments on the stack, and we do not have to
8742 worry that a far jump might force the prologue to push the link
8743 register, changing the stack offsets. In this case we can just
8744 return false, since the presence of far jumps in the function will
8745 not affect stack offsets.
8746
8747 If the arg pointer is live (or if it was live, but has now been
8748 eliminated and so set to dead) then we do have to test to see if
8749 the function might contain a far jump. This test can lead to some
8750 false negatives, since before reload is completed, then length of
8751 branch instructions is not known, so gcc defaults to returning their
8752 longest length, which in turn sets the far jump attribute to true.
8753
8754 A false negative will not result in bad code being generated, but it
8755 will result in a needless push and pop of the link register. We
8756 hope that this does not occur too often. */
8757 if (regs_ever_live [ARG_POINTER_REGNUM])
8758 cfun->machine->arg_pointer_live = 1;
5895f793 8759 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
8760 return 0;
8761 }
8762
8763 /* Check to see if the function contains a branch
8764 insn with the far jump attribute set. */
8765 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8766 {
8767 if (GET_CODE (insn) == JUMP_INSN
8768 /* Ignore tablejump patterns. */
8769 && GET_CODE (PATTERN (insn)) != ADDR_VEC
8770 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
8771 && get_attr_far_jump (insn) == FAR_JUMP_YES
8772 )
8773 {
8774 /* Record the fact that we have decied that
8775 the function does use far jumps. */
8776 cfun->machine->far_jump_used = 1;
8777 return 1;
8778 }
8779 }
8780
8781 return 0;
8782}
8783
8784/* Return non-zero if FUNC must be entered in ARM mode. */
8785int
8786is_called_in_ARM_mode (func)
8787 tree func;
8788{
8789 if (TREE_CODE (func) != FUNCTION_DECL)
8790 abort ();
8791
8792 /* Ignore the problem about functions whoes address is taken. */
8793 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
8794 return TRUE;
8795
8796#ifdef ARM_PE
8797 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
8798#else
8799 return FALSE;
8800#endif
8801}
8802
8803/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 8804const char *
d5b7b3ae
RE
8805thumb_unexpanded_epilogue ()
8806{
8807 int regno;
8808 int live_regs_mask = 0;
8809 int high_regs_pushed = 0;
8810 int leaf_function = leaf_function_p ();
8811 int had_to_push_lr;
8812 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8813
8814 if (return_used_this_function)
8815 return "";
8816
8817 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
5895f793
RE
8818 if (regs_ever_live[regno] && !call_used_regs[regno]
8819 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
8820 live_regs_mask |= 1 << regno;
8821
8822 for (regno = 8; regno < 13; regno++)
8823 {
5895f793
RE
8824 if (regs_ever_live[regno] && !call_used_regs[regno]
8825 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8826 high_regs_pushed++;
d5b7b3ae
RE
8827 }
8828
8829 /* The prolog may have pushed some high registers to use as
8830 work registers. eg the testuite file:
8831 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
8832 compiles to produce:
8833 push {r4, r5, r6, r7, lr}
8834 mov r7, r9
8835 mov r6, r8
8836 push {r6, r7}
8837 as part of the prolog. We have to undo that pushing here. */
8838
8839 if (high_regs_pushed)
8840 {
8841 int mask = live_regs_mask;
8842 int next_hi_reg;
8843 int size;
8844 int mode;
8845
8846#ifdef RTX_CODE
8847 /* If we can deduce the registers used from the function's return value.
8848 This is more reliable that examining regs_ever_live[] because that
8849 will be set if the register is ever used in the function, not just if
8850 the register is used to hold a return value. */
8851
8852 if (current_function_return_rtx != 0)
8853 mode = GET_MODE (current_function_return_rtx);
8854 else
8855#endif
8856 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8857
8858 size = GET_MODE_SIZE (mode);
8859
8860 /* Unless we are returning a type of size > 12 register r3 is
8861 available. */
8862 if (size < 13)
8863 mask |= 1 << 3;
8864
8865 if (mask == 0)
8866 /* Oh dear! We have no low registers into which we can pop
8867 high registers! */
8868 fatal ("No low registers available for popping high registers");
8869
8870 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
8871 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
8872 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
8873 break;
8874
8875 while (high_regs_pushed)
8876 {
8877 /* Find lo register(s) into which the high register(s) can
8878 be popped. */
8879 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8880 {
8881 if (mask & (1 << regno))
8882 high_regs_pushed--;
8883 if (high_regs_pushed == 0)
8884 break;
8885 }
8886
8887 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
8888
8889 /* Pop the values into the low register(s). */
8890 thumb_pushpop (asm_out_file, mask, 0);
8891
8892 /* Move the value(s) into the high registers. */
8893 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8894 {
8895 if (mask & (1 << regno))
8896 {
8897 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
8898 regno);
8899
8900 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
8901 if (regs_ever_live[next_hi_reg]
8902 && !call_used_regs[next_hi_reg]
8903 && !(TARGET_SINGLE_PIC_BASE
8904 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
8905 break;
8906 }
8907 }
8908 }
8909 }
8910
5895f793 8911 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
8912 || thumb_far_jump_used_p (1));
8913
8914 if (TARGET_BACKTRACE
8915 && ((live_regs_mask & 0xFF) == 0)
8916 && regs_ever_live [LAST_ARG_REGNUM] != 0)
8917 {
8918 /* The stack backtrace structure creation code had to
8919 push R7 in order to get a work register, so we pop
8920 it now. */
8921 live_regs_mask |= (1 << LAST_LO_REGNUM);
8922 }
8923
8924 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
8925 {
8926 if (had_to_push_lr
5895f793
RE
8927 && !is_called_in_ARM_mode (current_function_decl)
8928 && !eh_ofs)
d5b7b3ae
RE
8929 live_regs_mask |= 1 << PC_REGNUM;
8930
8931 /* Either no argument registers were pushed or a backtrace
8932 structure was created which includes an adjusted stack
8933 pointer, so just pop everything. */
8934 if (live_regs_mask)
8935 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8936
8937 if (eh_ofs)
8938 thumb_exit (asm_out_file, 2, eh_ofs);
8939 /* We have either just popped the return address into the
8940 PC or it is was kept in LR for the entire function or
8941 it is still on the stack because we do not want to
8942 return by doing a pop {pc}. */
8943 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
8944 thumb_exit (asm_out_file,
8945 (had_to_push_lr
8946 && is_called_in_ARM_mode (current_function_decl)) ?
8947 -1 : LR_REGNUM, NULL_RTX);
8948 }
8949 else
8950 {
8951 /* Pop everything but the return address. */
5895f793 8952 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
8953
8954 if (live_regs_mask)
8955 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8956
8957 if (had_to_push_lr)
8958 /* Get the return address into a temporary register. */
8959 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
8960
8961 /* Remove the argument registers that were pushed onto the stack. */
8962 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
8963 SP_REGNUM, SP_REGNUM,
8964 current_function_pretend_args_size);
8965
8966 if (eh_ofs)
8967 thumb_exit (asm_out_file, 2, eh_ofs);
8968 else
8969 thumb_exit (asm_out_file,
8970 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
8971 }
8972
8973 return "";
8974}
8975
8976/* Functions to save and restore machine-specific function data. */
8977
8978static void
8979arm_mark_machine_status (p)
8980 struct function * p;
8981{
8982 struct machine_function *machine = p->machine;
8983
8984 ggc_mark_rtx (machine->ra_rtx);
8985 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
8986}
8987
8988static void
8989arm_init_machine_status (p)
8990 struct function * p;
8991{
8992 p->machine =
8993 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
8994}
8995
8996/* Return an RTX indicating where the return address to the
8997 calling function can be found. */
8998rtx
8999arm_return_addr (count, frame)
9000 int count;
9001 rtx frame ATTRIBUTE_UNUSED;
9002{
9003 rtx reg;
9004
9005 if (count != 0)
9006 return NULL_RTX;
9007
9008 reg = cfun->machine->ra_rtx;
9009
9010 if (reg == NULL)
9011 {
9012 rtx init;
9013
9014 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
9015 the prologue. */
9016 reg = gen_reg_rtx (Pmode);
9017 cfun->machine->ra_rtx = reg;
9018
5895f793 9019 if (!TARGET_APCS_32)
d5b7b3ae
RE
9020 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
9021 GEN_INT (RETURN_ADDR_MASK26));
9022 else
9023 init = gen_rtx_REG (Pmode, LR_REGNUM);
9024
9025 init = gen_rtx_SET (VOIDmode, reg, init);
9026
9027 /* Emit the insn to the prologue with the other argument copies. */
9028 push_topmost_sequence ();
9029 emit_insn_after (init, get_insns ());
9030 pop_topmost_sequence ();
9031 }
9032
9033 return reg;
9034}
9035
9036/* Do anything needed before RTL is emitted for each function. */
9037void
9038arm_init_expanders ()
9039{
9040 /* Arrange to initialize and mark the machine per-function status. */
9041 init_machine_status = arm_init_machine_status;
9042 mark_machine_status = arm_mark_machine_status;
9043}
9044
9045/* Generate the rest of a function's prologue. */
9046void
9047thumb_expand_prologue ()
9048{
9049 HOST_WIDE_INT amount = (get_frame_size ()
9050 + current_function_outgoing_args_size);
9051
9052 /* Naked functions don't have prologues. */
9053 if (arm_naked_function_p (current_function_decl))
9054 return;
9055
9056 if (frame_pointer_needed)
9057 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
9058
9059 if (amount)
9060 {
9061 amount = ROUND_UP (amount);
9062
9063 if (amount < 512)
9064 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5895f793 9065 GEN_INT (-amount)));
d5b7b3ae
RE
9066 else
9067 {
9068 int regno;
9069 rtx reg;
9070
9071 /* The stack decrement is too big for an immediate value in a single
9072 insn. In theory we could issue multiple subtracts, but after
9073 three of them it becomes more space efficient to place the full
9074 value in the constant pool and load into a register. (Also the
9075 ARM debugger really likes to see only one stack decrement per
9076 function). So instead we look for a scratch register into which
9077 we can load the decrement, and then we subtract this from the
9078 stack pointer. Unfortunately on the thumb the only available
9079 scratch registers are the argument registers, and we cannot use
9080 these as they may hold arguments to the function. Instead we
9081 attempt to locate a call preserved register which is used by this
9082 function. If we can find one, then we know that it will have
9083 been pushed at the start of the prologue and so we can corrupt
9084 it now. */
9085 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
9086 if (regs_ever_live[regno]
5895f793
RE
9087 && !call_used_regs[regno] /* Paranoia */
9088 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
9089 && !(frame_pointer_needed
9090 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
9091 break;
9092
9093 if (regno > LAST_LO_REGNUM) /* Very unlikely */
9094 {
9095 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
9096
9097 /* Choose an arbitary, non-argument low register. */
9098 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
9099
9100 /* Save it by copying it into a high, scratch register. */
9101 emit_insn (gen_movsi (spare, reg));
9102
9103 /* Decrement the stack. */
5895f793 9104 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9105 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9106 reg));
9107
9108 /* Restore the low register's original value. */
9109 emit_insn (gen_movsi (reg, spare));
9110
9111 /* Emit a USE of the restored scratch register, so that flow
9112 analysis will not consider the restore redundant. The
9113 register won't be used again in this function and isn't
9114 restored by the epilogue. */
9115 emit_insn (gen_rtx_USE (VOIDmode, reg));
9116 }
9117 else
9118 {
9119 reg = gen_rtx (REG, SImode, regno);
9120
5895f793 9121 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9122 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9123 reg));
9124 }
9125 }
9126 }
9127
9128 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9129 emit_insn (gen_blockage ());
9130}
9131
9132void
9133thumb_expand_epilogue ()
9134{
9135 HOST_WIDE_INT amount = (get_frame_size ()
9136 + current_function_outgoing_args_size);
9137
9138 /* Naked functions don't have epilogues. */
9139 if (arm_naked_function_p (current_function_decl))
9140 return;
9141
9142 if (frame_pointer_needed)
9143 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
9144 else if (amount)
9145 {
9146 amount = ROUND_UP (amount);
9147
9148 if (amount < 512)
9149 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9150 GEN_INT (amount)));
9151 else
9152 {
9153 /* r3 is always free in the epilogue. */
9154 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
9155
9156 emit_insn (gen_movsi (reg, GEN_INT (amount)));
9157 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
9158 }
9159 }
9160
9161 /* Emit a USE (stack_pointer_rtx), so that
9162 the stack adjustment will not be deleted. */
9163 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9164
9165 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9166 emit_insn (gen_blockage ());
9167}
9168
9169void
9170output_thumb_prologue (f)
9171 FILE * f;
9172{
9173 int live_regs_mask = 0;
9174 int high_regs_pushed = 0;
9175 int store_arg_regs = 0;
9176 int regno;
9177
9178 if (arm_naked_function_p (current_function_decl))
9179 return;
9180
9181 if (is_called_in_ARM_mode (current_function_decl))
9182 {
9183 const char * name;
9184
9185 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9186 abort ();
9187 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9188 abort ();
9189 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9190
9191 /* Generate code sequence to switch us into Thumb mode. */
9192 /* The .code 32 directive has already been emitted by
6d77b53e 9193 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
9194 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9195 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9196
9197 /* Generate a label, so that the debugger will notice the
9198 change in instruction sets. This label is also used by
9199 the assembler to bypass the ARM code when this function
9200 is called from a Thumb encoded function elsewhere in the
9201 same file. Hence the definition of STUB_NAME here must
9202 agree with the definition in gas/config/tc-arm.c */
9203
9204#define STUB_NAME ".real_start_of"
9205
9206 asm_fprintf (f, "\t.code\t16\n");
9207#ifdef ARM_PE
9208 if (arm_dllexport_name_p (name))
e5951263 9209 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
9210#endif
9211 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9212 asm_fprintf (f, "\t.thumb_func\n");
9213 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9214 }
9215
9216 if (current_function_anonymous_args && current_function_pretend_args_size)
9217 store_arg_regs = 1;
9218
9219 if (current_function_pretend_args_size)
9220 {
9221 if (store_arg_regs)
9222 {
9223 int num_pushes;
9224
9225 asm_fprintf (f, "\tpush\t{");
9226
9227 num_pushes = NUM_INTS (current_function_pretend_args_size);
9228
9229 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9230 regno <= LAST_ARG_REGNUM;
5895f793 9231 regno++)
d5b7b3ae
RE
9232 asm_fprintf (f, "%r%s", regno,
9233 regno == LAST_ARG_REGNUM ? "" : ", ");
9234
9235 asm_fprintf (f, "}\n");
9236 }
9237 else
9238 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9239 SP_REGNUM, SP_REGNUM,
9240 current_function_pretend_args_size);
9241 }
9242
5895f793
RE
9243 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9244 if (regs_ever_live[regno] && !call_used_regs[regno]
9245 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9246 live_regs_mask |= 1 << regno;
9247
5895f793 9248 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
9249 live_regs_mask |= 1 << LR_REGNUM;
9250
9251 if (TARGET_BACKTRACE)
9252 {
9253 int offset;
9254 int work_register = 0;
9255 int wr;
9256
9257 /* We have been asked to create a stack backtrace structure.
9258 The code looks like this:
9259
9260 0 .align 2
9261 0 func:
9262 0 sub SP, #16 Reserve space for 4 registers.
9263 2 push {R7} Get a work register.
9264 4 add R7, SP, #20 Get the stack pointer before the push.
9265 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9266 8 mov R7, PC Get hold of the start of this code plus 12.
9267 10 str R7, [SP, #16] Store it.
9268 12 mov R7, FP Get hold of the current frame pointer.
9269 14 str R7, [SP, #4] Store it.
9270 16 mov R7, LR Get hold of the current return address.
9271 18 str R7, [SP, #12] Store it.
9272 20 add R7, SP, #16 Point at the start of the backtrace structure.
9273 22 mov FP, R7 Put this value into the frame pointer. */
9274
9275 if ((live_regs_mask & 0xFF) == 0)
9276 {
9277 /* See if the a4 register is free. */
9278
9279 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9280 work_register = LAST_ARG_REGNUM;
9281 else /* We must push a register of our own */
9282 live_regs_mask |= (1 << LAST_LO_REGNUM);
9283 }
9284
9285 if (work_register == 0)
9286 {
9287 /* Select a register from the list that will be pushed to
9288 use as our work register. */
9289 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9290 if ((1 << work_register) & live_regs_mask)
9291 break;
9292 }
9293
9294 asm_fprintf
9295 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9296 SP_REGNUM, SP_REGNUM);
9297
9298 if (live_regs_mask)
9299 thumb_pushpop (f, live_regs_mask, 1);
9300
9301 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9302 if (wr & live_regs_mask)
9303 offset += 4;
9304
9305 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9306 offset + 16 + current_function_pretend_args_size);
9307
9308 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9309 offset + 4);
9310
9311 /* Make sure that the instruction fetching the PC is in the right place
9312 to calculate "start of backtrace creation code + 12". */
9313 if (live_regs_mask)
9314 {
9315 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9316 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9317 offset + 12);
9318 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9319 ARM_HARD_FRAME_POINTER_REGNUM);
9320 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9321 offset);
9322 }
9323 else
9324 {
9325 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9326 ARM_HARD_FRAME_POINTER_REGNUM);
9327 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9328 offset);
9329 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9330 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9331 offset + 12);
9332 }
9333
9334 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9335 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9336 offset + 8);
9337 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9338 offset + 12);
9339 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9340 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9341 }
9342 else if (live_regs_mask)
9343 thumb_pushpop (f, live_regs_mask, 1);
9344
9345 for (regno = 8; regno < 13; regno++)
9346 {
5895f793
RE
9347 if (regs_ever_live[regno] && !call_used_regs[regno]
9348 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9349 high_regs_pushed++;
d5b7b3ae
RE
9350 }
9351
9352 if (high_regs_pushed)
9353 {
9354 int pushable_regs = 0;
9355 int mask = live_regs_mask & 0xff;
9356 int next_hi_reg;
9357
9358 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9359 {
5895f793
RE
9360 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9361 && !(TARGET_SINGLE_PIC_BASE
9362 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9363 break;
9364 }
9365
9366 pushable_regs = mask;
9367
9368 if (pushable_regs == 0)
9369 {
9370 /* Desperation time -- this probably will never happen. */
9371 if (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9372 || !call_used_regs[LAST_ARG_REGNUM])
d5b7b3ae
RE
9373 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9374 mask = 1 << LAST_ARG_REGNUM;
9375 }
9376
9377 while (high_regs_pushed > 0)
9378 {
9379 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9380 {
9381 if (mask & (1 << regno))
9382 {
9383 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9384
5895f793 9385 high_regs_pushed--;
d5b7b3ae
RE
9386
9387 if (high_regs_pushed)
9388 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9389 next_hi_reg--)
9390 {
9391 if (regs_ever_live[next_hi_reg]
5895f793
RE
9392 && !call_used_regs[next_hi_reg]
9393 && !(TARGET_SINGLE_PIC_BASE
9394 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9395 break;
9396 }
9397 else
9398 {
5895f793 9399 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
9400 break;
9401 }
9402 }
9403 }
9404
9405 thumb_pushpop (f, mask, 1);
9406 }
9407
9408 if (pushable_regs == 0
9409 && (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9410 || !call_used_regs[LAST_ARG_REGNUM]))
d5b7b3ae
RE
9411 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9412 }
9413}
9414
9415/* Handle the case of a double word load into a low register from
9416 a computed memory address. The computed address may involve a
9417 register which is overwritten by the load. */
9418
cd2b33d0 9419const char *
d5b7b3ae
RE
9420thumb_load_double_from_address (operands)
9421 rtx * operands;
9422{
9423 rtx addr;
9424 rtx base;
9425 rtx offset;
9426 rtx arg1;
9427 rtx arg2;
9428
9429 if (GET_CODE (operands[0]) != REG)
9430 fatal ("thumb_load_double_from_address: destination is not a register");
9431
9432 if (GET_CODE (operands[1]) != MEM)
9433 {
9434 debug_rtx (operands[1]);
9435 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9436 }
9437
9438 /* Get the memory address. */
9439 addr = XEXP (operands[1], 0);
9440
9441 /* Work out how the memory address is computed. */
9442 switch (GET_CODE (addr))
9443 {
9444 case REG:
9445 operands[2] = gen_rtx (MEM, SImode,
9446 plus_constant (XEXP (operands[1], 0), 4));
9447
9448 if (REGNO (operands[0]) == REGNO (addr))
9449 {
9450 output_asm_insn ("ldr\t%H0, %2", operands);
9451 output_asm_insn ("ldr\t%0, %1", operands);
9452 }
9453 else
9454 {
9455 output_asm_insn ("ldr\t%0, %1", operands);
9456 output_asm_insn ("ldr\t%H0, %2", operands);
9457 }
9458 break;
9459
9460 case CONST:
9461 /* Compute <address> + 4 for the high order load. */
9462 operands[2] = gen_rtx (MEM, SImode,
9463 plus_constant (XEXP (operands[1], 0), 4));
9464
9465 output_asm_insn ("ldr\t%0, %1", operands);
9466 output_asm_insn ("ldr\t%H0, %2", operands);
9467 break;
9468
9469 case PLUS:
9470 arg1 = XEXP (addr, 0);
9471 arg2 = XEXP (addr, 1);
9472
9473 if (CONSTANT_P (arg1))
9474 base = arg2, offset = arg1;
9475 else
9476 base = arg1, offset = arg2;
9477
9478 if (GET_CODE (base) != REG)
9479 fatal ("thumb_load_double_from_address: base is not a register");
9480
9481 /* Catch the case of <address> = <reg> + <reg> */
9482 if (GET_CODE (offset) == REG)
9483 {
9484 int reg_offset = REGNO (offset);
9485 int reg_base = REGNO (base);
9486 int reg_dest = REGNO (operands[0]);
9487
9488 /* Add the base and offset registers together into the
9489 higher destination register. */
9490 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9491 reg_dest + 1, reg_base, reg_offset);
9492
9493 /* Load the lower destination register from the address in
9494 the higher destination register. */
9495 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9496 reg_dest, reg_dest + 1);
9497
9498 /* Load the higher destination register from its own address
9499 plus 4. */
9500 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9501 reg_dest + 1, reg_dest + 1);
9502 }
9503 else
9504 {
9505 /* Compute <address> + 4 for the high order load. */
9506 operands[2] = gen_rtx (MEM, SImode,
9507 plus_constant (XEXP (operands[1], 0), 4));
9508
9509 /* If the computed address is held in the low order register
9510 then load the high order register first, otherwise always
9511 load the low order register first. */
9512 if (REGNO (operands[0]) == REGNO (base))
9513 {
9514 output_asm_insn ("ldr\t%H0, %2", operands);
9515 output_asm_insn ("ldr\t%0, %1", operands);
9516 }
9517 else
9518 {
9519 output_asm_insn ("ldr\t%0, %1", operands);
9520 output_asm_insn ("ldr\t%H0, %2", operands);
9521 }
9522 }
9523 break;
9524
9525 case LABEL_REF:
9526 /* With no registers to worry about we can just load the value
9527 directly. */
9528 operands[2] = gen_rtx (MEM, SImode,
9529 plus_constant (XEXP (operands[1], 0), 4));
9530
9531 output_asm_insn ("ldr\t%H0, %2", operands);
9532 output_asm_insn ("ldr\t%0, %1", operands);
9533 break;
9534
9535 default:
9536 debug_rtx (operands[1]);
9537 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9538 break;
9539 }
9540
9541 return "";
9542}
9543
9544
cd2b33d0 9545const char *
d5b7b3ae
RE
9546thumb_output_move_mem_multiple (n, operands)
9547 int n;
9548 rtx * operands;
9549{
9550 rtx tmp;
9551
9552 switch (n)
9553 {
9554 case 2:
ca356f3a 9555 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9556 {
ca356f3a
RE
9557 tmp = operands[4];
9558 operands[4] = operands[5];
9559 operands[5] = tmp;
d5b7b3ae 9560 }
ca356f3a
RE
9561 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
9562 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
9563 break;
9564
9565 case 3:
ca356f3a 9566 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9567 {
ca356f3a
RE
9568 tmp = operands[4];
9569 operands[4] = operands[5];
9570 operands[5] = tmp;
d5b7b3ae 9571 }
ca356f3a 9572 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 9573 {
ca356f3a
RE
9574 tmp = operands[5];
9575 operands[5] = operands[6];
9576 operands[6] = tmp;
d5b7b3ae 9577 }
ca356f3a 9578 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9579 {
ca356f3a
RE
9580 tmp = operands[4];
9581 operands[4] = operands[5];
9582 operands[5] = tmp;
d5b7b3ae
RE
9583 }
9584
ca356f3a
RE
9585 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
9586 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
9587 break;
9588
9589 default:
9590 abort ();
9591 }
9592
9593 return "";
9594}
9595
9596/* Routines for generating rtl */
9597
9598void
9599thumb_expand_movstrqi (operands)
9600 rtx * operands;
9601{
9602 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9603 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9604 HOST_WIDE_INT len = INTVAL (operands[2]);
9605 HOST_WIDE_INT offset = 0;
9606
9607 while (len >= 12)
9608 {
ca356f3a 9609 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
9610 len -= 12;
9611 }
9612
9613 if (len >= 8)
9614 {
ca356f3a 9615 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
9616 len -= 8;
9617 }
9618
9619 if (len >= 4)
9620 {
9621 rtx reg = gen_reg_rtx (SImode);
9622 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9623 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9624 len -= 4;
9625 offset += 4;
9626 }
9627
9628 if (len >= 2)
9629 {
9630 rtx reg = gen_reg_rtx (HImode);
9631 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9632 plus_constant (in, offset))));
9633 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9634 reg));
9635 len -= 2;
9636 offset += 2;
9637 }
9638
9639 if (len)
9640 {
9641 rtx reg = gen_reg_rtx (QImode);
9642 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9643 plus_constant (in, offset))));
9644 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9645 reg));
9646 }
9647}
9648
9649int
9650thumb_cmp_operand (op, mode)
9651 rtx op;
9652 enum machine_mode mode;
9653{
9654 return ((GET_CODE (op) == CONST_INT
9655 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9656 || register_operand (op, mode));
9657}
9658
cd2b33d0 9659static const char *
d5b7b3ae
RE
9660thumb_condition_code (x, invert)
9661 rtx x;
9662 int invert;
9663{
cd2b33d0 9664 static const char * conds[] =
d5b7b3ae
RE
9665 {
9666 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
9667 "hi", "ls", "ge", "lt", "gt", "le"
9668 };
9669 int val;
9670
9671 switch (GET_CODE (x))
9672 {
9673 case EQ: val = 0; break;
9674 case NE: val = 1; break;
9675 case GEU: val = 2; break;
9676 case LTU: val = 3; break;
9677 case GTU: val = 8; break;
9678 case LEU: val = 9; break;
9679 case GE: val = 10; break;
9680 case LT: val = 11; break;
9681 case GT: val = 12; break;
9682 case LE: val = 13; break;
9683 default:
9684 abort ();
9685 }
9686
9687 return conds[val ^ invert];
9688}
9689
9690/* Handle storing a half-word to memory during reload. */
9691void
9692thumb_reload_out_hi (operands)
9693 rtx * operands;
9694{
9695 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
9696}
9697
9698/* Handle storing a half-word to memory during reload. */
9699void
9700thumb_reload_in_hi (operands)
9701 rtx * operands ATTRIBUTE_UNUSED;
9702{
9703 abort ();
9704}
9705
c27ba912
DM
9706/* Return the length of a function name prefix
9707 that starts with the character 'c'. */
9708static int
9709arm_get_strip_length (char c)
9710{
9711 switch (c)
9712 {
9713 ARM_NAME_ENCODING_LENGTHS
9714 default: return 0;
9715 }
9716}
9717
9718/* Return a pointer to a function's name with any
9719 and all prefix encodings stripped from it. */
9720const char *
9721arm_strip_name_encoding (const char * name)
9722{
9723 int skip;
9724
9725 while ((skip = arm_get_strip_length (* name)))
9726 name += skip;
9727
9728 return name;
9729}
9730
2b835d68 9731#ifdef AOF_ASSEMBLER
6354dc9b 9732/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 9733
32de079a
RE
9734rtx aof_pic_label = NULL_RTX;
9735struct pic_chain
9736{
62b10bbc
NC
9737 struct pic_chain * next;
9738 char * symname;
32de079a
RE
9739};
9740
62b10bbc 9741static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
9742
9743rtx
9744aof_pic_entry (x)
9745 rtx x;
9746{
62b10bbc 9747 struct pic_chain ** chainp;
32de079a
RE
9748 int offset;
9749
9750 if (aof_pic_label == NULL_RTX)
9751 {
92a432f4
RE
9752 /* We mark this here and not in arm_add_gc_roots() to avoid
9753 polluting even more code with ifdefs, and because it never
9754 contains anything useful until we assign to it here. */
5895f793 9755 ggc_add_rtx_root (&aof_pic_label, 1);
43cffd11 9756 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
9757 }
9758
9759 for (offset = 0, chainp = &aof_pic_chain; *chainp;
9760 offset += 4, chainp = &(*chainp)->next)
9761 if ((*chainp)->symname == XSTR (x, 0))
9762 return plus_constant (aof_pic_label, offset);
9763
9764 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
9765 (*chainp)->next = NULL;
9766 (*chainp)->symname = XSTR (x, 0);
9767 return plus_constant (aof_pic_label, offset);
9768}
9769
9770void
9771aof_dump_pic_table (f)
62b10bbc 9772 FILE * f;
32de079a 9773{
62b10bbc 9774 struct pic_chain * chain;
32de079a
RE
9775
9776 if (aof_pic_chain == NULL)
9777 return;
9778
dd18ae56
NC
9779 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
9780 PIC_OFFSET_TABLE_REGNUM,
9781 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
9782 fputs ("|x$adcons|\n", f);
9783
9784 for (chain = aof_pic_chain; chain; chain = chain->next)
9785 {
9786 fputs ("\tDCD\t", f);
9787 assemble_name (f, chain->symname);
9788 fputs ("\n", f);
9789 }
9790}
9791
2b835d68
RE
9792int arm_text_section_count = 1;
9793
9794char *
84ed5e79 9795aof_text_section ()
2b835d68
RE
9796{
9797 static char buf[100];
2b835d68
RE
9798 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
9799 arm_text_section_count++);
9800 if (flag_pic)
9801 strcat (buf, ", PIC, REENTRANT");
9802 return buf;
9803}
9804
9805static int arm_data_section_count = 1;
9806
9807char *
9808aof_data_section ()
9809{
9810 static char buf[100];
9811 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
9812 return buf;
9813}
9814
9815/* The AOF assembler is religiously strict about declarations of
9816 imported and exported symbols, so that it is impossible to declare
956d6950 9817 a function as imported near the beginning of the file, and then to
2b835d68
RE
9818 export it later on. It is, however, possible to delay the decision
9819 until all the functions in the file have been compiled. To get
9820 around this, we maintain a list of the imports and exports, and
9821 delete from it any that are subsequently defined. At the end of
9822 compilation we spit the remainder of the list out before the END
9823 directive. */
9824
9825struct import
9826{
62b10bbc
NC
9827 struct import * next;
9828 char * name;
2b835d68
RE
9829};
9830
62b10bbc 9831static struct import * imports_list = NULL;
2b835d68
RE
9832
9833void
9834aof_add_import (name)
62b10bbc 9835 char * name;
2b835d68 9836{
62b10bbc 9837 struct import * new;
2b835d68
RE
9838
9839 for (new = imports_list; new; new = new->next)
9840 if (new->name == name)
9841 return;
9842
9843 new = (struct import *) xmalloc (sizeof (struct import));
9844 new->next = imports_list;
9845 imports_list = new;
9846 new->name = name;
9847}
9848
9849void
9850aof_delete_import (name)
62b10bbc 9851 char * name;
2b835d68 9852{
62b10bbc 9853 struct import ** old;
2b835d68
RE
9854
9855 for (old = &imports_list; *old; old = & (*old)->next)
9856 {
9857 if ((*old)->name == name)
9858 {
9859 *old = (*old)->next;
9860 return;
9861 }
9862 }
9863}
9864
9865int arm_main_function = 0;
9866
9867void
9868aof_dump_imports (f)
62b10bbc 9869 FILE * f;
2b835d68
RE
9870{
9871 /* The AOF assembler needs this to cause the startup code to be extracted
9872 from the library. Brining in __main causes the whole thing to work
9873 automagically. */
9874 if (arm_main_function)
9875 {
9876 text_section ();
9877 fputs ("\tIMPORT __main\n", f);
9878 fputs ("\tDCD __main\n", f);
9879 }
9880
9881 /* Now dump the remaining imports. */
9882 while (imports_list)
9883 {
9884 fprintf (f, "\tIMPORT\t");
9885 assemble_name (f, imports_list->name);
9886 fputc ('\n', f);
9887 imports_list = imports_list->next;
9888 }
9889}
9890#endif /* AOF_ASSEMBLER */
This page took 1.856927 seconds and 5 git commands to generate.