]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
* config/c_io_stdio.h: Include stddef.h.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
914a3b8c 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e 26#include "rtl.h"
d5b7b3ae 27#include "tree.h"
c7319d87 28#include "obstack.h"
cce8749e
CH
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-flags.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
ad076f4e 41#include "toplev.h"
aec3cfba 42#include "recog.h"
92a432f4 43#include "ggc.h"
d5b7b3ae 44#include "except.h"
8b97c5f8 45#include "c-pragma.h"
c27ba912 46#include "tm_p.h"
cce8749e 47
d5b7b3ae
RE
48/* Forward definitions of types. */
49typedef struct minipool_node Mnode;
50typedef struct minipool_fixup Mfix;
51
52/* In order to improve the layout of the prototypes below
53 some short type abbreviations are defined here. */
54#define Hint HOST_WIDE_INT
55#define Mmode enum machine_mode
56#define Ulong unsigned long
57
58/* Forward function declarations. */
59static void arm_add_gc_roots PARAMS ((void));
60static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
61static int arm_naked_function_p PARAMS ((tree));
62static Ulong bit_count PARAMS ((signed int));
63static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
64static int eliminate_lr2ip PARAMS ((rtx *));
65static rtx emit_multi_reg_push PARAMS ((int));
66static rtx emit_sfm PARAMS ((int, int));
cd2b33d0 67static const char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
68static arm_cc get_arm_condition_code PARAMS ((rtx));
69static void init_fpa_table PARAMS ((void));
70static Hint int_log2 PARAMS ((Hint));
71static rtx is_jump_table PARAMS ((rtx));
cd2b33d0
NC
72static const char * output_multi_immediate PARAMS ((rtx *, const char *, const char *, int, Hint));
73static void print_multi_reg PARAMS ((FILE *, const char *, int, int, int));
d5b7b3ae 74static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
cd2b33d0 75static const char * shift_op PARAMS ((rtx, Hint *));
d5b7b3ae
RE
76static void arm_init_machine_status PARAMS ((struct function *));
77static void arm_mark_machine_status PARAMS ((struct function *));
78static int number_of_first_bit_set PARAMS ((int));
79static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
80static void thumb_exit PARAMS ((FILE *, int, rtx));
81static void thumb_pushpop PARAMS ((FILE *, int, int));
cd2b33d0 82static const char * thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
83static rtx is_jump_table PARAMS ((rtx));
84static Hint get_jump_table_size PARAMS ((rtx));
85static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
86static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
87static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
88static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
89static void assign_minipool_offsets PARAMS ((Mfix *));
90static void arm_print_value PARAMS ((FILE *, rtx));
91static void dump_minipool PARAMS ((rtx));
92static int arm_barrier_cost PARAMS ((rtx));
93static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
94static void push_minipool_barrier PARAMS ((rtx, Hint));
95static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
96static void note_invalid_constants PARAMS ((rtx, Hint));
87e27392 97static int current_file_function_operand PARAMS ((rtx));
d5b7b3ae
RE
98\f
99#undef Hint
100#undef Mmode
101#undef Ulong
f3bb6135 102
c7319d87
RE
103/* Obstack for minipool constant handling. */
104static struct obstack minipool_obstack;
105static char *minipool_startobj;
106
107#define obstack_chunk_alloc xmalloc
108#define obstack_chunk_free free
109
c27ba912
DM
110/* The maximum number of insns skipped which will be conditionalised if
111 possible. */
112static int max_insns_skipped = 5;
113
114extern FILE * asm_out_file;
115
6354dc9b 116/* True if we are currently building a constant table. */
13bd191d
PB
117int making_const_table;
118
60d0536b 119/* Define the information needed to generate branch insns. This is
6354dc9b 120 stored from the compare operation. */
ff9940b0 121rtx arm_compare_op0, arm_compare_op1;
ff9940b0 122
6354dc9b 123/* What type of floating point are we tuning for? */
bee06f3d
RE
124enum floating_point_type arm_fpu;
125
6354dc9b 126/* What type of floating point instructions are available? */
b111229a
RE
127enum floating_point_type arm_fpu_arch;
128
6354dc9b 129/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
130enum prog_mode_type arm_prgmode;
131
6354dc9b 132/* Set by the -mfp=... option. */
f9cc092a 133const char * target_fp_name = NULL;
2b835d68 134
b355a481 135/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 136const char * structure_size_string = NULL;
723ae7c1 137int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 138
aec3cfba 139/* Bit values used to identify processor capabilities. */
62b10bbc
NC
140#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
141#define FL_FAST_MULT (1 << 1) /* Fast multiply */
142#define FL_MODE26 (1 << 2) /* 26-bit mode support */
143#define FL_MODE32 (1 << 3) /* 32-bit mode support */
144#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
145#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
146#define FL_THUMB (1 << 6) /* Thumb aware */
147#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
148#define FL_STRONG (1 << 8) /* StrongARM */
aec3cfba 149
d5b7b3ae
RE
150/* The bits in this mask specify which instructions we are
151 allowed to generate. */
aec3cfba 152static int insn_flags = 0;
d5b7b3ae 153
aec3cfba
NC
154/* The bits in this mask specify which instruction scheduling options should
155 be used. Note - there is an overlap with the FL_FAST_MULT. For some
156 hardware we want to be able to generate the multiply instructions, but to
157 tune as if they were not present in the architecture. */
158static int tune_flags = 0;
159
160/* The following are used in the arm.md file as equivalents to bits
161 in the above two flag variables. */
162
2b835d68
RE
163/* Nonzero if this is an "M" variant of the processor. */
164int arm_fast_multiply = 0;
165
6354dc9b 166/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
167int arm_arch4 = 0;
168
6354dc9b 169/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
170int arm_arch5 = 0;
171
aec3cfba 172/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
173int arm_ld_sched = 0;
174
175/* Nonzero if this chip is a StrongARM. */
176int arm_is_strong = 0;
177
178/* Nonzero if this chip is a an ARM6 or an ARM7. */
179int arm_is_6_or_7 = 0;
b111229a 180
0616531f
RE
181/* Nonzero if generating Thumb instructions. */
182int thumb_code = 0;
183
cce8749e
CH
184/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
185 must report the mode of the memory reference from PRINT_OPERAND to
186 PRINT_OPERAND_ADDRESS. */
f3bb6135 187enum machine_mode output_memory_reference_mode;
cce8749e
CH
188
189/* Nonzero if the prologue must setup `fp'. */
190int current_function_anonymous_args;
191
32de079a 192/* The register number to be used for the PIC offset register. */
ed0e6530 193const char * arm_pic_register_string = NULL;
32de079a
RE
194int arm_pic_register = 9;
195
ff9940b0 196/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 197 is not needed. */
d5b7b3ae 198int return_used_this_function;
ff9940b0 199
aec3cfba
NC
200/* Set to 1 after arm_reorg has started. Reset to start at the start of
201 the next function. */
4b632bf1
RE
202static int after_arm_reorg = 0;
203
aec3cfba 204/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
205static int arm_constant_limit = 3;
206
cce8749e
CH
207/* For an explanation of these variables, see final_prescan_insn below. */
208int arm_ccfsm_state;
84ed5e79 209enum arm_cond_code arm_current_cc;
cce8749e
CH
210rtx arm_target_insn;
211int arm_target_label;
9997d19d
RE
212
213/* The condition codes of the ARM, and the inverse function. */
cd2b33d0 214const char * arm_condition_codes[] =
9997d19d
RE
215{
216 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
217 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
218};
219
f5a1b0d2 220#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 221\f
6354dc9b 222/* Initialization code. */
2b835d68 223
2b835d68
RE
224struct processors
225{
cd2b33d0 226 const char * name;
2b835d68
RE
227 unsigned int flags;
228};
229
230/* Not all of these give usefully different compilation alternatives,
231 but there is no simple way of generalizing them. */
f5a1b0d2
NC
232static struct processors all_cores[] =
233{
234 /* ARM Cores */
235
236 {"arm2", FL_CO_PROC | FL_MODE26 },
237 {"arm250", FL_CO_PROC | FL_MODE26 },
238 {"arm3", FL_CO_PROC | FL_MODE26 },
239 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
240 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
241 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
242 {"arm610", FL_MODE26 | FL_MODE32 },
243 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
244 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 246 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
247 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
248 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
249 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
250 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
251 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
252 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
253 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
254 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
255 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 256 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
257 {"arm710c", FL_MODE26 | FL_MODE32 },
258 {"arm7100", FL_MODE26 | FL_MODE32 },
259 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
260 /* Doesn't have an external co-proc, but does have embedded fpu. */
261 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
262 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
263 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
264 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
265 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
266 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
267 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
268 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
269 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
270 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
271 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
272
273 {NULL, 0}
274};
275
276static struct processors all_architectures[] =
2b835d68 277{
f5a1b0d2
NC
278 /* ARM Architectures */
279
62b10bbc
NC
280 { "armv2", FL_CO_PROC | FL_MODE26 },
281 { "armv2a", FL_CO_PROC | FL_MODE26 },
282 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
283 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 284 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
285 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
286 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
287 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
288 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
289 { NULL, 0 }
f5a1b0d2
NC
290};
291
292/* This is a magic stucture. The 'string' field is magically filled in
293 with a pointer to the value specified by the user on the command line
294 assuming that the user has specified such a value. */
295
296struct arm_cpu_select arm_select[] =
297{
298 /* string name processors */
299 { NULL, "-mcpu=", all_cores },
300 { NULL, "-march=", all_architectures },
301 { NULL, "-mtune=", all_cores }
2b835d68
RE
302};
303
aec3cfba 304/* Return the number of bits set in value' */
d5b7b3ae 305static unsigned long
aec3cfba
NC
306bit_count (value)
307 signed int value;
308{
d5b7b3ae 309 unsigned long count = 0;
aec3cfba
NC
310
311 while (value)
312 {
5895f793
RE
313 value &= ~(value & -value);
314 ++count;
aec3cfba
NC
315 }
316
317 return count;
318}
319
2b835d68
RE
320/* Fix up any incompatible options that the user has specified.
321 This has now turned into a maze. */
322void
323arm_override_options ()
324{
ed4c4348 325 unsigned i;
f5a1b0d2
NC
326
327 /* Set up the flags based on the cpu/architecture selected by the user. */
b6a1cbae 328 for (i = ARRAY_SIZE (arm_select); i--;)
bd9c7e23 329 {
f5a1b0d2
NC
330 struct arm_cpu_select * ptr = arm_select + i;
331
332 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 333 {
13bd191d 334 const struct processors * sel;
bd9c7e23 335
5895f793 336 for (sel = ptr->processors; sel->name != NULL; sel++)
f5a1b0d2 337 if (streq (ptr->string, sel->name))
bd9c7e23 338 {
aec3cfba
NC
339 if (i == 2)
340 tune_flags = sel->flags;
341 else
b111229a 342 {
aec3cfba
NC
343 /* If we have been given an architecture and a processor
344 make sure that they are compatible. We only generate
345 a warning though, and we prefer the CPU over the
6354dc9b 346 architecture. */
aec3cfba 347 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 348 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
349 ptr->string);
350
351 insn_flags = sel->flags;
b111229a 352 }
f5a1b0d2 353
bd9c7e23
RE
354 break;
355 }
356
357 if (sel->name == NULL)
358 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
359 }
360 }
aec3cfba 361
f5a1b0d2 362 /* If the user did not specify a processor, choose one for them. */
aec3cfba 363 if (insn_flags == 0)
f5a1b0d2
NC
364 {
365 struct processors * sel;
aec3cfba
NC
366 unsigned int sought;
367 static struct cpu_default
368 {
cd2b33d0
NC
369 int cpu;
370 const char * name;
aec3cfba
NC
371 }
372 cpu_defaults[] =
373 {
374 { TARGET_CPU_arm2, "arm2" },
375 { TARGET_CPU_arm6, "arm6" },
376 { TARGET_CPU_arm610, "arm610" },
2aa0c933 377 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
378 { TARGET_CPU_arm7m, "arm7m" },
379 { TARGET_CPU_arm7500fe, "arm7500fe" },
380 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
381 { TARGET_CPU_arm8, "arm8" },
382 { TARGET_CPU_arm810, "arm810" },
383 { TARGET_CPU_arm9, "arm9" },
384 { TARGET_CPU_strongarm, "strongarm" },
385 { TARGET_CPU_generic, "arm" },
386 { 0, 0 }
387 };
388 struct cpu_default * def;
389
390 /* Find the default. */
5895f793 391 for (def = cpu_defaults; def->name; def++)
aec3cfba
NC
392 if (def->cpu == TARGET_CPU_DEFAULT)
393 break;
394
395 /* Make sure we found the default CPU. */
396 if (def->name == NULL)
397 abort ();
398
399 /* Find the default CPU's flags. */
5895f793 400 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
401 if (streq (def->name, sel->name))
402 break;
403
404 if (sel->name == NULL)
405 abort ();
406
407 insn_flags = sel->flags;
408
409 /* Now check to see if the user has specified some command line
410 switch that require certain abilities from the cpu. */
411 sought = 0;
f5a1b0d2 412
d5b7b3ae 413 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 414 {
aec3cfba
NC
415 sought |= (FL_THUMB | FL_MODE32);
416
417 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 418 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 419
d5b7b3ae 420 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
421 interworking. Therefore we force FL_MODE26 to be removed
422 from insn_flags here (if it was set), so that the search
423 below will always be able to find a compatible processor. */
5895f793 424 insn_flags &= ~FL_MODE26;
f5a1b0d2 425 }
5895f793 426 else if (!TARGET_APCS_32)
f5a1b0d2 427 sought |= FL_MODE26;
d5b7b3ae 428
aec3cfba 429 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 430 {
aec3cfba
NC
431 /* Try to locate a CPU type that supports all of the abilities
432 of the default CPU, plus the extra abilities requested by
433 the user. */
5895f793 434 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba 435 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
436 break;
437
438 if (sel->name == NULL)
aec3cfba
NC
439 {
440 unsigned int current_bit_count = 0;
441 struct processors * best_fit = NULL;
442
443 /* Ideally we would like to issue an error message here
444 saying that it was not possible to find a CPU compatible
445 with the default CPU, but which also supports the command
446 line options specified by the programmer, and so they
447 ought to use the -mcpu=<name> command line option to
448 override the default CPU type.
449
450 Unfortunately this does not work with multilibing. We
451 need to be able to support multilibs for -mapcs-26 and for
452 -mthumb-interwork and there is no CPU that can support both
453 options. Instead if we cannot find a cpu that has both the
454 characteristics of the default cpu and the given command line
455 options we scan the array again looking for a best match. */
5895f793 456 for (sel = all_cores; sel->name != NULL; sel++)
aec3cfba
NC
457 if ((sel->flags & sought) == sought)
458 {
459 unsigned int count;
460
461 count = bit_count (sel->flags & insn_flags);
462
463 if (count >= current_bit_count)
464 {
465 best_fit = sel;
466 current_bit_count = count;
467 }
468 }
f5a1b0d2 469
aec3cfba
NC
470 if (best_fit == NULL)
471 abort ();
472 else
473 sel = best_fit;
474 }
475
476 insn_flags = sel->flags;
f5a1b0d2
NC
477 }
478 }
aec3cfba
NC
479
480 /* If tuning has not been specified, tune for whichever processor or
481 architecture has been selected. */
482 if (tune_flags == 0)
483 tune_flags = insn_flags;
484
f5a1b0d2
NC
485 /* Make sure that the processor choice does not conflict with any of the
486 other command line choices. */
aec3cfba 487 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 488 {
aec3cfba
NC
489 /* If APCS-32 was not the default then it must have been set by the
490 user, so issue a warning message. If the user has specified
491 "-mapcs-32 -mcpu=arm2" then we loose here. */
492 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
493 warning ("target CPU does not support APCS-32" );
5895f793 494 target_flags &= ~ARM_FLAG_APCS_32;
f5a1b0d2 495 }
5895f793 496 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
497 {
498 warning ("target CPU does not support APCS-26" );
499 target_flags |= ARM_FLAG_APCS_32;
500 }
501
6cfc7210 502 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
503 {
504 warning ("target CPU does not support interworking" );
6cfc7210 505 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
506 }
507
d5b7b3ae
RE
508 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
509 {
510 warning ("target CPU does not supoport THUMB instructions.");
511 target_flags &= ~ARM_FLAG_THUMB;
512 }
513
514 if (TARGET_APCS_FRAME && TARGET_THUMB)
515 {
516 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
517 target_flags &= ~ARM_FLAG_APCS_FRAME;
518 }
519
520 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
521 from here where no function is being compiled currently. */
522 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
523 && TARGET_ARM)
524 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
525
526 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
527 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
528
529 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
530 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
531
f5a1b0d2 532 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 533 if (TARGET_INTERWORK)
f5a1b0d2 534 {
5895f793 535 if (!TARGET_APCS_32)
f5a1b0d2
NC
536 warning ("interworking forces APCS-32 to be used" );
537 target_flags |= ARM_FLAG_APCS_32;
538 }
539
5895f793 540 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
f5a1b0d2
NC
541 {
542 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
543 target_flags |= ARM_FLAG_APCS_FRAME;
544 }
aec3cfba 545
2b835d68
RE
546 if (TARGET_POKE_FUNCTION_NAME)
547 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 548
2b835d68
RE
549 if (TARGET_APCS_REENT && flag_pic)
550 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 551
2b835d68 552 if (TARGET_APCS_REENT)
f5a1b0d2 553 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 554
d5b7b3ae
RE
555 /* If this target is normally configured to use APCS frames, warn if they
556 are turned off and debugging is turned on. */
557 if (TARGET_ARM
558 && write_symbols != NO_DEBUG
5895f793 559 && !TARGET_APCS_FRAME
d5b7b3ae
RE
560 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
561 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 562
32de079a
RE
563 /* If stack checking is disabled, we can use r10 as the PIC register,
564 which keeps r9 available. */
5895f793 565 if (flag_pic && !TARGET_APCS_STACK)
32de079a 566 arm_pic_register = 10;
aec3cfba 567
2b835d68
RE
568 if (TARGET_APCS_FLOAT)
569 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 570
aec3cfba 571 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
572 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
573 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
574 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
aec3cfba 575
2ca12935
JL
576 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
577 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 578 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
579 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
580 && !(tune_flags & FL_ARCH4))) != 0;
f5a1b0d2 581
bd9c7e23
RE
582 /* Default value for floating point code... if no co-processor
583 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
584 assume the user has an FPA.
585 Note: this does not prevent use of floating point instructions,
586 -msoft-float does that. */
aec3cfba 587 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 588
b111229a 589 if (target_fp_name)
2b835d68 590 {
f5a1b0d2 591 if (streq (target_fp_name, "2"))
b111229a 592 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
593 else if (streq (target_fp_name, "3"))
594 arm_fpu_arch = FP_SOFT3;
2b835d68 595 else
f5a1b0d2 596 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 597 target_fp_name);
2b835d68 598 }
b111229a
RE
599 else
600 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
601
602 if (TARGET_FPE && arm_fpu != FP_HARD)
603 arm_fpu = FP_SOFT2;
aec3cfba 604
f5a1b0d2
NC
605 /* For arm2/3 there is no need to do any scheduling if there is only
606 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
607 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
608 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 609 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 610
cd2b33d0 611 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
612
613 if (structure_size_string != NULL)
614 {
615 int size = strtol (structure_size_string, NULL, 0);
616
617 if (size == 8 || size == 32)
618 arm_structure_size_boundary = size;
619 else
620 warning ("Structure size boundary can only be set to 8 or 32");
621 }
ed0e6530
PB
622
623 if (arm_pic_register_string != NULL)
624 {
625 int pic_register;
626
5895f793 627 if (!flag_pic)
ed0e6530
PB
628 warning ("-mpic-register= is useless without -fpic");
629
630 pic_register = decode_reg_name (arm_pic_register_string);
631
632 /* Prevent the user from choosing an obviously stupid PIC register. */
633 if (pic_register < 0 || call_used_regs[pic_register]
634 || pic_register == HARD_FRAME_POINTER_REGNUM
635 || pic_register == STACK_POINTER_REGNUM
636 || pic_register >= PC_REGNUM)
637 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
638 else
639 arm_pic_register = pic_register;
640 }
d5b7b3ae
RE
641
642 if (TARGET_THUMB && flag_schedule_insns)
643 {
644 /* Don't warn since it's on by default in -O2. */
645 flag_schedule_insns = 0;
646 }
647
f5a1b0d2
NC
648 /* If optimizing for space, don't synthesize constants.
649 For processors with load scheduling, it never costs more than 2 cycles
650 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 651 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 652 arm_constant_limit = 1;
aec3cfba 653
f5a1b0d2
NC
654 /* If optimizing for size, bump the number of instructions that we
655 are prepared to conditionally execute (even on a StrongARM).
656 Otherwise for the StrongARM, which has early execution of branches,
657 a sequence that is worth skipping is shorter. */
658 if (optimize_size)
659 max_insns_skipped = 6;
660 else if (arm_is_strong)
661 max_insns_skipped = 3;
92a432f4
RE
662
663 /* Register global variables with the garbage collector. */
664 arm_add_gc_roots ();
665}
666
667static void
668arm_add_gc_roots ()
669{
670 ggc_add_rtx_root (&arm_compare_op0, 1);
671 ggc_add_rtx_root (&arm_compare_op1, 1);
672 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
c7319d87
RE
673
674 gcc_obstack_init(&minipool_obstack);
675 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
2b835d68 676}
cce8749e 677\f
6354dc9b 678/* Return 1 if it is possible to return using a single instruction. */
ff9940b0 679int
b36ba79f
RE
680use_return_insn (iscond)
681 int iscond;
ff9940b0
RE
682{
683 int regno;
684
d5b7b3ae 685 /* Never use a return instruction before reload has run. */
5895f793 686 if (!reload_completed
d5b7b3ae 687 /* Or if the function is variadic. */
f5a1b0d2 688 || current_function_pretend_args_size
ff9940b0 689 || current_function_anonymous_args
d5b7b3ae
RE
690 /* Of if the function calls __builtin_eh_return () */
691 || cfun->machine->eh_epilogue_sp_ofs != NULL
692 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 693 || ((get_frame_size () + current_function_outgoing_args_size != 0)
5895f793 694 && !frame_pointer_needed))
ff9940b0
RE
695 return 0;
696
b111229a 697 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
698 stacked. Similarly, on StrongARM, conditional returns are expensive
699 if they aren't taken and registers have been stacked. */
f5a1b0d2 700 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 701 return 0;
d5b7b3ae 702
f5a1b0d2 703 if ((iscond && arm_is_strong)
6cfc7210 704 || TARGET_INTERWORK)
6ed30148 705 {
d5b7b3ae 706 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
5895f793 707 if (regs_ever_live[regno] && !call_used_regs[regno])
6ed30148
RE
708 return 0;
709
710 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 711 return 0;
6ed30148 712 }
b111229a 713
ff9940b0 714 /* Can't be done if any of the FPU regs are pushed, since this also
6354dc9b 715 requires an insn. */
d5b7b3ae
RE
716 if (TARGET_HARD_FLOAT)
717 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
5895f793 718 if (regs_ever_live[regno] && !call_used_regs[regno])
d5b7b3ae 719 return 0;
ff9940b0 720
31fdb4d5
DE
721 /* If a function is naked, don't use the "return" insn. */
722 if (arm_naked_function_p (current_function_decl))
723 return 0;
724
ff9940b0
RE
725 return 1;
726}
727
cce8749e
CH
728/* Return TRUE if int I is a valid immediate ARM constant. */
729
730int
731const_ok_for_arm (i)
ff9940b0 732 HOST_WIDE_INT i;
cce8749e 733{
5895f793 734 unsigned HOST_WIDE_INT mask = ~HOST_UINT (0xFF);
cce8749e 735
56636818
JL
736 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
737 be all zero, or all one. */
5895f793
RE
738 if ((i & ~HOST_UINT (0xffffffff)) != 0
739 && ((i & ~HOST_UINT (0xffffffff))
740 != ((~HOST_UINT (0))
741 & ~HOST_UINT (0xffffffff))))
56636818
JL
742 return FALSE;
743
e2c671ba
RE
744 /* Fast return for 0 and powers of 2 */
745 if ((i & (i - 1)) == 0)
746 return TRUE;
747
cce8749e
CH
748 do
749 {
e5951263 750 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
f3bb6135 751 return TRUE;
abaa26e5 752 mask =
e5951263
NC
753 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
754 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
5895f793 755 } while (mask != ~HOST_UINT (0xFF));
cce8749e 756
f3bb6135
RE
757 return FALSE;
758}
cce8749e 759
6354dc9b 760/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
761static int
762const_ok_for_op (i, code)
e2c671ba
RE
763 HOST_WIDE_INT i;
764 enum rtx_code code;
e2c671ba
RE
765{
766 if (const_ok_for_arm (i))
767 return 1;
768
769 switch (code)
770 {
771 case PLUS:
772 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
773
774 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
775 case XOR:
776 case IOR:
777 return 0;
778
779 case AND:
780 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
781
782 default:
783 abort ();
784 }
785}
786
787/* Emit a sequence of insns to handle a large constant.
788 CODE is the code of the operation required, it can be any of SET, PLUS,
789 IOR, AND, XOR, MINUS;
790 MODE is the mode in which the operation is being performed;
791 VAL is the integer to operate on;
792 SOURCE is the other operand (a register, or a null-pointer for SET);
793 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
794 either produce a simpler sequence, or we will want to cse the values.
795 Return value is the number of insns emitted. */
e2c671ba
RE
796
797int
798arm_split_constant (code, mode, val, target, source, subtargets)
799 enum rtx_code code;
800 enum machine_mode mode;
801 HOST_WIDE_INT val;
802 rtx target;
803 rtx source;
804 int subtargets;
2b835d68
RE
805{
806 if (subtargets || code == SET
807 || (GET_CODE (target) == REG && GET_CODE (source) == REG
808 && REGNO (target) != REGNO (source)))
809 {
4b632bf1
RE
810 /* After arm_reorg has been called, we can't fix up expensive
811 constants by pushing them into memory so we must synthesise
812 them in-line, regardless of the cost. This is only likely to
813 be more costly on chips that have load delay slots and we are
814 compiling without running the scheduler (so no splitting
aec3cfba
NC
815 occurred before the final instruction emission).
816
817 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 818 */
5895f793 819 if (!after_arm_reorg
4b632bf1
RE
820 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
821 > arm_constant_limit + (code != SET)))
2b835d68
RE
822 {
823 if (code == SET)
824 {
825 /* Currently SET is the only monadic value for CODE, all
826 the rest are diadic. */
43cffd11 827 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
828 return 1;
829 }
830 else
831 {
832 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
833
43cffd11 834 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
835 /* For MINUS, the value is subtracted from, since we never
836 have subtraction of a constant. */
837 if (code == MINUS)
43cffd11 838 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 839 gen_rtx_MINUS (mode, temp, source)));
2b835d68 840 else
43cffd11
RE
841 emit_insn (gen_rtx_SET (VOIDmode, target,
842 gen_rtx (code, mode, source, temp)));
2b835d68
RE
843 return 2;
844 }
845 }
846 }
847
848 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
849}
850
851/* As above, but extra parameter GENERATE which, if clear, suppresses
852 RTL generation. */
d5b7b3ae 853static int
2b835d68
RE
854arm_gen_constant (code, mode, val, target, source, subtargets, generate)
855 enum rtx_code code;
856 enum machine_mode mode;
857 HOST_WIDE_INT val;
858 rtx target;
859 rtx source;
860 int subtargets;
861 int generate;
e2c671ba 862{
e2c671ba
RE
863 int can_invert = 0;
864 int can_negate = 0;
865 int can_negate_initial = 0;
866 int can_shift = 0;
867 int i;
868 int num_bits_set = 0;
869 int set_sign_bit_copies = 0;
870 int clear_sign_bit_copies = 0;
871 int clear_zero_bit_copies = 0;
872 int set_zero_bit_copies = 0;
873 int insns = 0;
e2c671ba 874 unsigned HOST_WIDE_INT temp1, temp2;
e5951263 875 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
e2c671ba 876
d5b7b3ae 877 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
878 check for degenerate cases; these can occur when DImode operations
879 are split. */
880 switch (code)
881 {
882 case SET:
883 can_invert = 1;
884 can_shift = 1;
885 can_negate = 1;
886 break;
887
888 case PLUS:
889 can_negate = 1;
890 can_negate_initial = 1;
891 break;
892
893 case IOR:
e5951263 894 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 895 {
2b835d68 896 if (generate)
43cffd11
RE
897 emit_insn (gen_rtx_SET (VOIDmode, target,
898 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
899 return 1;
900 }
901 if (remainder == 0)
902 {
903 if (reload_completed && rtx_equal_p (target, source))
904 return 0;
2b835d68 905 if (generate)
43cffd11 906 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
907 return 1;
908 }
909 break;
910
911 case AND:
912 if (remainder == 0)
913 {
2b835d68 914 if (generate)
43cffd11 915 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
916 return 1;
917 }
e5951263 918 if (remainder == HOST_UINT (0xffffffff))
e2c671ba
RE
919 {
920 if (reload_completed && rtx_equal_p (target, source))
921 return 0;
2b835d68 922 if (generate)
43cffd11 923 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
924 return 1;
925 }
926 can_invert = 1;
927 break;
928
929 case XOR:
930 if (remainder == 0)
931 {
932 if (reload_completed && rtx_equal_p (target, source))
933 return 0;
2b835d68 934 if (generate)
43cffd11 935 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
936 return 1;
937 }
e5951263 938 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 939 {
2b835d68 940 if (generate)
43cffd11
RE
941 emit_insn (gen_rtx_SET (VOIDmode, target,
942 gen_rtx_NOT (mode, source)));
e2c671ba
RE
943 return 1;
944 }
945
946 /* We don't know how to handle this yet below. */
947 abort ();
948
949 case MINUS:
950 /* We treat MINUS as (val - source), since (source - val) is always
951 passed as (source + (-val)). */
952 if (remainder == 0)
953 {
2b835d68 954 if (generate)
43cffd11
RE
955 emit_insn (gen_rtx_SET (VOIDmode, target,
956 gen_rtx_NEG (mode, source)));
e2c671ba
RE
957 return 1;
958 }
959 if (const_ok_for_arm (val))
960 {
2b835d68 961 if (generate)
43cffd11
RE
962 emit_insn (gen_rtx_SET (VOIDmode, target,
963 gen_rtx_MINUS (mode, GEN_INT (val),
964 source)));
e2c671ba
RE
965 return 1;
966 }
967 can_negate = 1;
968
969 break;
970
971 default:
972 abort ();
973 }
974
6354dc9b 975 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
976 if (const_ok_for_arm (val)
977 || (can_negate_initial && const_ok_for_arm (-val))
978 || (can_invert && const_ok_for_arm (~val)))
979 {
2b835d68 980 if (generate)
43cffd11
RE
981 emit_insn (gen_rtx_SET (VOIDmode, target,
982 (source ? gen_rtx (code, mode, source,
983 GEN_INT (val))
984 : GEN_INT (val))));
e2c671ba
RE
985 return 1;
986 }
987
e2c671ba 988 /* Calculate a few attributes that may be useful for specific
6354dc9b 989 optimizations. */
e2c671ba
RE
990 for (i = 31; i >= 0; i--)
991 {
992 if ((remainder & (1 << i)) == 0)
993 clear_sign_bit_copies++;
994 else
995 break;
996 }
997
998 for (i = 31; i >= 0; i--)
999 {
1000 if ((remainder & (1 << i)) != 0)
1001 set_sign_bit_copies++;
1002 else
1003 break;
1004 }
1005
1006 for (i = 0; i <= 31; i++)
1007 {
1008 if ((remainder & (1 << i)) == 0)
1009 clear_zero_bit_copies++;
1010 else
1011 break;
1012 }
1013
1014 for (i = 0; i <= 31; i++)
1015 {
1016 if ((remainder & (1 << i)) != 0)
1017 set_zero_bit_copies++;
1018 else
1019 break;
1020 }
1021
1022 switch (code)
1023 {
1024 case SET:
1025 /* See if we can do this by sign_extending a constant that is known
1026 to be negative. This is a good, way of doing it, since the shift
1027 may well merge into a subsequent insn. */
1028 if (set_sign_bit_copies > 1)
1029 {
1030 if (const_ok_for_arm
1031 (temp1 = ARM_SIGN_EXTEND (remainder
1032 << (set_sign_bit_copies - 1))))
1033 {
2b835d68
RE
1034 if (generate)
1035 {
d499463f 1036 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1037 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1038 GEN_INT (temp1)));
2b835d68
RE
1039 emit_insn (gen_ashrsi3 (target, new_src,
1040 GEN_INT (set_sign_bit_copies - 1)));
1041 }
e2c671ba
RE
1042 return 2;
1043 }
1044 /* For an inverted constant, we will need to set the low bits,
1045 these will be shifted out of harm's way. */
1046 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1047 if (const_ok_for_arm (~temp1))
1048 {
2b835d68
RE
1049 if (generate)
1050 {
d499463f 1051 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1052 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1053 GEN_INT (temp1)));
2b835d68
RE
1054 emit_insn (gen_ashrsi3 (target, new_src,
1055 GEN_INT (set_sign_bit_copies - 1)));
1056 }
e2c671ba
RE
1057 return 2;
1058 }
1059 }
1060
1061 /* See if we can generate this by setting the bottom (or the top)
1062 16 bits, and then shifting these into the other half of the
1063 word. We only look for the simplest cases, to do more would cost
1064 too much. Be careful, however, not to generate this when the
1065 alternative would take fewer insns. */
e5951263 1066 if (val & HOST_UINT (0xffff0000))
e2c671ba 1067 {
e5951263 1068 temp1 = remainder & HOST_UINT (0xffff0000);
e2c671ba
RE
1069 temp2 = remainder & 0x0000ffff;
1070
6354dc9b 1071 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1072 for (i = 9; i < 24; i++)
1073 {
d5b7b3ae 1074 if ((((temp2 | (temp2 << i))
e5951263 1075 & HOST_UINT (0xffffffff)) == remainder)
5895f793 1076 && !const_ok_for_arm (temp2))
e2c671ba 1077 {
d499463f
RE
1078 rtx new_src = (subtargets
1079 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1080 : target);
1081 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1082 source, subtargets, generate);
e2c671ba 1083 source = new_src;
2b835d68 1084 if (generate)
43cffd11
RE
1085 emit_insn (gen_rtx_SET
1086 (VOIDmode, target,
1087 gen_rtx_IOR (mode,
1088 gen_rtx_ASHIFT (mode, source,
1089 GEN_INT (i)),
1090 source)));
e2c671ba
RE
1091 return insns + 1;
1092 }
1093 }
1094
6354dc9b 1095 /* Don't duplicate cases already considered. */
e2c671ba
RE
1096 for (i = 17; i < 24; i++)
1097 {
1098 if (((temp1 | (temp1 >> i)) == remainder)
5895f793 1099 && !const_ok_for_arm (temp1))
e2c671ba 1100 {
d499463f
RE
1101 rtx new_src = (subtargets
1102 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1103 : target);
1104 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1105 source, subtargets, generate);
e2c671ba 1106 source = new_src;
2b835d68 1107 if (generate)
43cffd11
RE
1108 emit_insn
1109 (gen_rtx_SET (VOIDmode, target,
1110 gen_rtx_IOR
1111 (mode,
1112 gen_rtx_LSHIFTRT (mode, source,
1113 GEN_INT (i)),
1114 source)));
e2c671ba
RE
1115 return insns + 1;
1116 }
1117 }
1118 }
1119 break;
1120
1121 case IOR:
1122 case XOR:
7b64da89
RE
1123 /* If we have IOR or XOR, and the constant can be loaded in a
1124 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1125 then this can be done in two instructions instead of 3-4. */
1126 if (subtargets
d499463f 1127 /* TARGET can't be NULL if SUBTARGETS is 0 */
5895f793 1128 || (reload_completed && !reg_mentioned_p (target, source)))
e2c671ba 1129 {
5895f793 1130 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
e2c671ba 1131 {
2b835d68
RE
1132 if (generate)
1133 {
1134 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1135
43cffd11
RE
1136 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1137 emit_insn (gen_rtx_SET (VOIDmode, target,
1138 gen_rtx (code, mode, source, sub)));
2b835d68 1139 }
e2c671ba
RE
1140 return 2;
1141 }
1142 }
1143
1144 if (code == XOR)
1145 break;
1146
1147 if (set_sign_bit_copies > 8
1148 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1149 {
2b835d68
RE
1150 if (generate)
1151 {
1152 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1153 rtx shift = GEN_INT (set_sign_bit_copies);
1154
43cffd11
RE
1155 emit_insn (gen_rtx_SET (VOIDmode, sub,
1156 gen_rtx_NOT (mode,
1157 gen_rtx_ASHIFT (mode,
1158 source,
f5a1b0d2 1159 shift))));
43cffd11
RE
1160 emit_insn (gen_rtx_SET (VOIDmode, target,
1161 gen_rtx_NOT (mode,
1162 gen_rtx_LSHIFTRT (mode, sub,
1163 shift))));
2b835d68 1164 }
e2c671ba
RE
1165 return 2;
1166 }
1167
1168 if (set_zero_bit_copies > 8
1169 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1170 {
2b835d68
RE
1171 if (generate)
1172 {
1173 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1174 rtx shift = GEN_INT (set_zero_bit_copies);
1175
43cffd11
RE
1176 emit_insn (gen_rtx_SET (VOIDmode, sub,
1177 gen_rtx_NOT (mode,
1178 gen_rtx_LSHIFTRT (mode,
1179 source,
f5a1b0d2 1180 shift))));
43cffd11
RE
1181 emit_insn (gen_rtx_SET (VOIDmode, target,
1182 gen_rtx_NOT (mode,
1183 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1184 shift))));
2b835d68 1185 }
e2c671ba
RE
1186 return 2;
1187 }
1188
5895f793 1189 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
e2c671ba 1190 {
2b835d68
RE
1191 if (generate)
1192 {
1193 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1194 emit_insn (gen_rtx_SET (VOIDmode, sub,
1195 gen_rtx_NOT (mode, source)));
2b835d68
RE
1196 source = sub;
1197 if (subtargets)
1198 sub = gen_reg_rtx (mode);
43cffd11
RE
1199 emit_insn (gen_rtx_SET (VOIDmode, sub,
1200 gen_rtx_AND (mode, source,
1201 GEN_INT (temp1))));
1202 emit_insn (gen_rtx_SET (VOIDmode, target,
1203 gen_rtx_NOT (mode, sub)));
2b835d68 1204 }
e2c671ba
RE
1205 return 3;
1206 }
1207 break;
1208
1209 case AND:
1210 /* See if two shifts will do 2 or more insn's worth of work. */
1211 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1212 {
e5951263 1213 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
e2c671ba 1214 << (32 - clear_sign_bit_copies))
e5951263 1215 & HOST_UINT (0xffffffff));
e2c671ba 1216
e5951263 1217 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1218 {
2b835d68
RE
1219 if (generate)
1220 {
d499463f 1221 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1222 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1223 new_src, source, subtargets, 1);
1224 source = new_src;
2b835d68
RE
1225 }
1226 else
d499463f
RE
1227 {
1228 rtx targ = subtargets ? NULL_RTX : target;
1229 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1230 targ, source, subtargets, 0);
1231 }
2b835d68
RE
1232 }
1233
1234 if (generate)
1235 {
d499463f
RE
1236 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1237 rtx shift = GEN_INT (clear_sign_bit_copies);
1238
1239 emit_insn (gen_ashlsi3 (new_src, source, shift));
1240 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1241 }
1242
e2c671ba
RE
1243 return insns + 2;
1244 }
1245
1246 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1247 {
1248 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1249
e5951263 1250 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1251 {
2b835d68
RE
1252 if (generate)
1253 {
d499463f
RE
1254 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1255
2b835d68 1256 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1257 new_src, source, subtargets, 1);
1258 source = new_src;
2b835d68
RE
1259 }
1260 else
d499463f
RE
1261 {
1262 rtx targ = subtargets ? NULL_RTX : target;
1263
1264 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1265 targ, source, subtargets, 0);
1266 }
2b835d68
RE
1267 }
1268
1269 if (generate)
1270 {
d499463f
RE
1271 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1272 rtx shift = GEN_INT (clear_zero_bit_copies);
1273
1274 emit_insn (gen_lshrsi3 (new_src, source, shift));
1275 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1276 }
1277
e2c671ba
RE
1278 return insns + 2;
1279 }
1280
1281 break;
1282
1283 default:
1284 break;
1285 }
1286
1287 for (i = 0; i < 32; i++)
1288 if (remainder & (1 << i))
1289 num_bits_set++;
1290
1291 if (code == AND || (can_invert && num_bits_set > 16))
e5951263 1292 remainder = (~remainder) & HOST_UINT (0xffffffff);
e2c671ba 1293 else if (code == PLUS && num_bits_set > 16)
e5951263 1294 remainder = (-remainder) & HOST_UINT (0xffffffff);
e2c671ba
RE
1295 else
1296 {
1297 can_invert = 0;
1298 can_negate = 0;
1299 }
1300
1301 /* Now try and find a way of doing the job in either two or three
1302 instructions.
1303 We start by looking for the largest block of zeros that are aligned on
1304 a 2-bit boundary, we then fill up the temps, wrapping around to the
1305 top of the word when we drop off the bottom.
6354dc9b 1306 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1307 {
1308 int best_start = 0;
1309 int best_consecutive_zeros = 0;
1310
1311 for (i = 0; i < 32; i += 2)
1312 {
1313 int consecutive_zeros = 0;
1314
5895f793 1315 if (!(remainder & (3 << i)))
e2c671ba 1316 {
5895f793 1317 while ((i < 32) && !(remainder & (3 << i)))
e2c671ba
RE
1318 {
1319 consecutive_zeros += 2;
1320 i += 2;
1321 }
1322 if (consecutive_zeros > best_consecutive_zeros)
1323 {
1324 best_consecutive_zeros = consecutive_zeros;
1325 best_start = i - consecutive_zeros;
1326 }
1327 i -= 2;
1328 }
1329 }
1330
1331 /* Now start emitting the insns, starting with the one with the highest
1332 bit set: we do this so that the smallest number will be emitted last;
6354dc9b 1333 this is more likely to be combinable with addressing insns. */
e2c671ba
RE
1334 i = best_start;
1335 do
1336 {
1337 int end;
1338
1339 if (i <= 0)
1340 i += 32;
1341 if (remainder & (3 << (i - 2)))
1342 {
1343 end = i - 8;
1344 if (end < 0)
1345 end += 32;
1346 temp1 = remainder & ((0x0ff << end)
1347 | ((i < end) ? (0xff >> (32 - end)) : 0));
1348 remainder &= ~temp1;
1349
d499463f 1350 if (generate)
e2c671ba 1351 {
d499463f
RE
1352 rtx new_src;
1353
1354 if (code == SET)
43cffd11
RE
1355 emit_insn (gen_rtx_SET (VOIDmode,
1356 new_src = (subtargets
1357 ? gen_reg_rtx (mode)
1358 : target),
1359 GEN_INT (can_invert
1360 ? ~temp1 : temp1)));
d499463f 1361 else if (code == MINUS)
43cffd11
RE
1362 emit_insn (gen_rtx_SET (VOIDmode,
1363 new_src = (subtargets
1364 ? gen_reg_rtx (mode)
1365 : target),
1366 gen_rtx (code, mode, GEN_INT (temp1),
1367 source)));
d499463f 1368 else
43cffd11
RE
1369 emit_insn (gen_rtx_SET (VOIDmode,
1370 new_src = (remainder
1371 ? (subtargets
1372 ? gen_reg_rtx (mode)
1373 : target)
1374 : target),
1375 gen_rtx (code, mode, source,
1376 GEN_INT (can_invert ? ~temp1
1377 : (can_negate
1378 ? -temp1
1379 : temp1)))));
d499463f 1380 source = new_src;
e2c671ba
RE
1381 }
1382
d499463f
RE
1383 if (code == SET)
1384 {
1385 can_invert = 0;
1386 code = PLUS;
1387 }
1388 else if (code == MINUS)
1389 code = PLUS;
1390
e2c671ba 1391 insns++;
e2c671ba
RE
1392 i -= 6;
1393 }
1394 i -= 2;
1395 } while (remainder);
1396 }
1397 return insns;
1398}
1399
bd9c7e23
RE
1400/* Canonicalize a comparison so that we are more likely to recognize it.
1401 This can be done for a few constant compares, where we can make the
1402 immediate value easier to load. */
1403enum rtx_code
1404arm_canonicalize_comparison (code, op1)
1405 enum rtx_code code;
62b10bbc 1406 rtx * op1;
bd9c7e23 1407{
ad076f4e 1408 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1409
1410 switch (code)
1411 {
1412 case EQ:
1413 case NE:
1414 return code;
1415
1416 case GT:
1417 case LE:
5895f793
RE
1418 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1419 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23 1420 {
5895f793 1421 *op1 = GEN_INT (i + 1);
bd9c7e23
RE
1422 return code == GT ? GE : LT;
1423 }
1424 break;
1425
1426 case GE:
1427 case LT:
e5951263 1428 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
5895f793 1429 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23 1430 {
5895f793 1431 *op1 = GEN_INT (i - 1);
bd9c7e23
RE
1432 return code == GE ? GT : LE;
1433 }
1434 break;
1435
1436 case GTU:
1437 case LEU:
5895f793
RE
1438 if (i != ~(HOST_UINT (0))
1439 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
bd9c7e23
RE
1440 {
1441 *op1 = GEN_INT (i + 1);
1442 return code == GTU ? GEU : LTU;
1443 }
1444 break;
1445
1446 case GEU:
1447 case LTU:
1448 if (i != 0
5895f793 1449 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
bd9c7e23
RE
1450 {
1451 *op1 = GEN_INT (i - 1);
1452 return code == GEU ? GTU : LEU;
1453 }
1454 break;
1455
1456 default:
1457 abort ();
1458 }
1459
1460 return code;
1461}
bd9c7e23 1462
f5a1b0d2
NC
1463/* Decide whether a type should be returned in memory (true)
1464 or in a register (false). This is called by the macro
1465 RETURN_IN_MEMORY. */
2b835d68
RE
1466int
1467arm_return_in_memory (type)
1468 tree type;
1469{
5895f793 1470 if (!AGGREGATE_TYPE_P (type))
9e291dbe 1471 /* All simple types are returned in registers. */
d7d01975 1472 return 0;
d5b7b3ae
RE
1473
1474 /* For the arm-wince targets we choose to be compitable with Microsoft's
1475 ARM and Thumb compilers, which always return aggregates in memory. */
1476#ifndef ARM_WINCE
1477
d7d01975 1478 if (int_size_in_bytes (type) > 4)
9e291dbe 1479 /* All structures/unions bigger than one word are returned in memory. */
d7d01975 1480 return 1;
d5b7b3ae 1481
d7d01975 1482 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1483 {
1484 tree field;
1485
3a2ea258
RE
1486 /* For a struct the APCS says that we only return in a register
1487 if the type is 'integer like' and every addressable element
1488 has an offset of zero. For practical purposes this means
1489 that the structure can have at most one non bit-field element
1490 and that this element must be the first one in the structure. */
1491
f5a1b0d2
NC
1492 /* Find the first field, ignoring non FIELD_DECL things which will
1493 have been created by C++. */
1494 for (field = TYPE_FIELDS (type);
1495 field && TREE_CODE (field) != FIELD_DECL;
1496 field = TREE_CHAIN (field))
1497 continue;
1498
1499 if (field == NULL)
9e291dbe 1500 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1501
d5b7b3ae
RE
1502 /* Check that the first field is valid for returning in a register. */
1503
1504 /* ... Floats are not allowed */
9e291dbe 1505 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1506 return 1;
1507
d5b7b3ae
RE
1508 /* ... Aggregates that are not themselves valid for returning in
1509 a register are not allowed. */
9e291dbe 1510 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1511 return 1;
d5b7b3ae 1512
3a2ea258
RE
1513 /* Now check the remaining fields, if any. Only bitfields are allowed,
1514 since they are not addressable. */
f5a1b0d2
NC
1515 for (field = TREE_CHAIN (field);
1516 field;
1517 field = TREE_CHAIN (field))
1518 {
1519 if (TREE_CODE (field) != FIELD_DECL)
1520 continue;
1521
5895f793 1522 if (!DECL_BIT_FIELD_TYPE (field))
f5a1b0d2
NC
1523 return 1;
1524 }
2b835d68
RE
1525
1526 return 0;
1527 }
d7d01975
NC
1528
1529 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1530 {
1531 tree field;
1532
1533 /* Unions can be returned in registers if every element is
1534 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1535 for (field = TYPE_FIELDS (type);
1536 field;
1537 field = TREE_CHAIN (field))
2b835d68 1538 {
f5a1b0d2
NC
1539 if (TREE_CODE (field) != FIELD_DECL)
1540 continue;
1541
6cc8c0b3
NC
1542 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1543 return 1;
1544
f5a1b0d2 1545 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1546 return 1;
1547 }
f5a1b0d2 1548
2b835d68
RE
1549 return 0;
1550 }
d5b7b3ae 1551#endif /* not ARM_WINCE */
f5a1b0d2 1552
d5b7b3ae 1553 /* Return all other types in memory. */
2b835d68
RE
1554 return 1;
1555}
1556
82e9d970
PB
1557/* Initialize a variable CUM of type CUMULATIVE_ARGS
1558 for a call to a function whose data type is FNTYPE.
1559 For a library call, FNTYPE is NULL. */
1560void
1561arm_init_cumulative_args (pcum, fntype, libname, indirect)
1562 CUMULATIVE_ARGS * pcum;
1563 tree fntype;
1564 rtx libname ATTRIBUTE_UNUSED;
1565 int indirect ATTRIBUTE_UNUSED;
1566{
1567 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1568 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1569
82e9d970
PB
1570 pcum->call_cookie = CALL_NORMAL;
1571
1572 if (TARGET_LONG_CALLS)
1573 pcum->call_cookie = CALL_LONG;
1574
1575 /* Check for long call/short call attributes. The attributes
1576 override any command line option. */
1577 if (fntype)
1578 {
1579 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1580 pcum->call_cookie = CALL_SHORT;
1581 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1582 pcum->call_cookie = CALL_LONG;
1583 }
1584}
1585
1586/* Determine where to put an argument to a function.
1587 Value is zero to push the argument on the stack,
1588 or a hard register in which to store the argument.
1589
1590 MODE is the argument's machine mode.
1591 TYPE is the data type of the argument (as a tree).
1592 This is null for libcalls where that information may
1593 not be available.
1594 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1595 the preceding args and about the function being called.
1596 NAMED is nonzero if this argument is a named parameter
1597 (otherwise it is an extra parameter matching an ellipsis). */
1598rtx
1599arm_function_arg (pcum, mode, type, named)
1600 CUMULATIVE_ARGS * pcum;
1601 enum machine_mode mode;
1602 tree type ATTRIBUTE_UNUSED;
1603 int named;
1604{
1605 if (mode == VOIDmode)
1606 /* Compute operand 2 of the call insn. */
1607 return GEN_INT (pcum->call_cookie);
1608
5895f793 1609 if (!named || pcum->nregs >= NUM_ARG_REGS)
82e9d970
PB
1610 return NULL_RTX;
1611
1612 return gen_rtx_REG (mode, pcum->nregs);
1613}
82e9d970 1614\f
c27ba912
DM
1615/* Encode the current state of the #pragma [no_]long_calls. */
1616typedef enum
82e9d970 1617{
c27ba912
DM
1618 OFF, /* No #pramgma [no_]long_calls is in effect. */
1619 LONG, /* #pragma long_calls is in effect. */
1620 SHORT /* #pragma no_long_calls is in effect. */
1621} arm_pragma_enum;
82e9d970 1622
c27ba912 1623static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1624
8b97c5f8
ZW
1625void
1626arm_pr_long_calls (pfile)
1627 cpp_reader *pfile ATTRIBUTE_UNUSED;
82e9d970 1628{
8b97c5f8
ZW
1629 arm_pragma_long_calls = LONG;
1630}
1631
1632void
1633arm_pr_no_long_calls (pfile)
1634 cpp_reader *pfile ATTRIBUTE_UNUSED;
1635{
1636 arm_pragma_long_calls = SHORT;
1637}
1638
1639void
1640arm_pr_long_calls_off (pfile)
1641 cpp_reader *pfile ATTRIBUTE_UNUSED;
1642{
1643 arm_pragma_long_calls = OFF;
82e9d970 1644}
8b97c5f8 1645
82e9d970
PB
1646\f
1647/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1648 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1649 assigned to TYPE. */
1650int
1651arm_valid_type_attribute_p (type, attributes, identifier, args)
1652 tree type;
1653 tree attributes ATTRIBUTE_UNUSED;
1654 tree identifier;
1655 tree args;
1656{
1657 if ( TREE_CODE (type) != FUNCTION_TYPE
1658 && TREE_CODE (type) != METHOD_TYPE
1659 && TREE_CODE (type) != FIELD_DECL
1660 && TREE_CODE (type) != TYPE_DECL)
1661 return 0;
1662
1663 /* Function calls made to this symbol must be done indirectly, because
1664 it may lie outside of the 26 bit addressing range of a normal function
1665 call. */
1666 if (is_attribute_p ("long_call", identifier))
1667 return (args == NULL_TREE);
c27ba912 1668
82e9d970
PB
1669 /* Whereas these functions are always known to reside within the 26 bit
1670 addressing range. */
1671 if (is_attribute_p ("short_call", identifier))
1672 return (args == NULL_TREE);
1673
1674 return 0;
1675}
1676
1677/* Return 0 if the attributes for two types are incompatible, 1 if they
1678 are compatible, and 2 if they are nearly compatible (which causes a
1679 warning to be generated). */
1680int
1681arm_comp_type_attributes (type1, type2)
1682 tree type1;
1683 tree type2;
1684{
1cb8d58a 1685 int l1, l2, s1, s2;
bd7fc26f 1686
82e9d970
PB
1687 /* Check for mismatch of non-default calling convention. */
1688 if (TREE_CODE (type1) != FUNCTION_TYPE)
1689 return 1;
1690
1691 /* Check for mismatched call attributes. */
1cb8d58a
NC
1692 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1693 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1694 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1695 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1696
1697 /* Only bother to check if an attribute is defined. */
1698 if (l1 | l2 | s1 | s2)
1699 {
1700 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1701 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1702 return 0;
82e9d970 1703
bd7fc26f
NC
1704 /* Disallow mixed attributes. */
1705 if ((l1 & s2) || (l2 & s1))
1706 return 0;
1707 }
1708
1709 return 1;
82e9d970
PB
1710}
1711
c27ba912
DM
1712/* Encode long_call or short_call attribute by prefixing
1713 symbol name in DECL with a special character FLAG. */
1714void
1715arm_encode_call_attribute (decl, flag)
1716 tree decl;
cd2b33d0 1717 int flag;
c27ba912 1718{
3cce094d 1719 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b
NC
1720 int len = strlen (str);
1721 char * newstr;
c27ba912
DM
1722
1723 if (TREE_CODE (decl) != FUNCTION_DECL)
1724 return;
1725
1726 /* Do not allow weak functions to be treated as short call. */
1727 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1728 return;
1729
1f8f4a0b 1730 newstr = ggc_alloc_string (NULL, len + 2);
c27ba912
DM
1731
1732 sprintf (newstr, "%c%s", flag, str);
1733
1734 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1735}
1736
1737/* Assigns default attributes to newly defined type. This is used to
1738 set short_call/long_call attributes for function types of
1739 functions defined inside corresponding #pragma scopes. */
1740void
1741arm_set_default_type_attributes (type)
1742 tree type;
1743{
1744 /* Add __attribute__ ((long_call)) to all functions, when
1745 inside #pragma long_calls or __attribute__ ((short_call)),
1746 when inside #pragma no_long_calls. */
1747 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1748 {
1749 tree type_attr_list, attr_name;
1750 type_attr_list = TYPE_ATTRIBUTES (type);
1751
1752 if (arm_pragma_long_calls == LONG)
1753 attr_name = get_identifier ("long_call");
1754 else if (arm_pragma_long_calls == SHORT)
1755 attr_name = get_identifier ("short_call");
1756 else
1757 return;
1758
1759 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1760 TYPE_ATTRIBUTES (type) = type_attr_list;
1761 }
1762}
1763\f
1764/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1765 defined within the current compilation unit. If this caanot be
1766 determined, then 0 is returned. */
1767static int
1768current_file_function_operand (sym_ref)
1769 rtx sym_ref;
1770{
1771 /* This is a bit of a fib. A function will have a short call flag
1772 applied to its name if it has the short call attribute, or it has
1773 already been defined within the current compilation unit. */
1774 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1775 return 1;
1776
6d77b53e 1777 /* The current function is always defined within the current compilation
c27ba912
DM
1778 unit. if it s a weak defintion however, then this may not be the real
1779 defintion of the function, and so we have to say no. */
1780 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
5895f793 1781 && !DECL_WEAK (current_function_decl))
c27ba912
DM
1782 return 1;
1783
1784 /* We cannot make the determination - default to returning 0. */
1785 return 0;
1786}
1787
1788/* Return non-zero if a 32 bit "long_call" should be generated for
1789 this call. We generate a long_call if the function:
1790
1791 a. has an __attribute__((long call))
1792 or b. is within the scope of a #pragma long_calls
1793 or c. the -mlong-calls command line switch has been specified
1794
1795 However we do not generate a long call if the function:
1796
1797 d. has an __attribute__ ((short_call))
1798 or e. is inside the scope of a #pragma no_long_calls
1799 or f. has an __attribute__ ((section))
1800 or g. is defined within the current compilation unit.
1801
1802 This function will be called by C fragments contained in the machine
1803 description file. CALL_REF and CALL_COOKIE correspond to the matched
1804 rtl operands. CALL_SYMBOL is used to distinguish between
1805 two different callers of the function. It is set to 1 in the
1806 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1807 and "call_value" patterns. This is because of the difference in the
1808 SYM_REFs passed by these patterns. */
1809int
1810arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1811 rtx sym_ref;
1812 int call_cookie;
1813 int call_symbol;
1814{
5895f793 1815 if (!call_symbol)
c27ba912
DM
1816 {
1817 if (GET_CODE (sym_ref) != MEM)
1818 return 0;
1819
1820 sym_ref = XEXP (sym_ref, 0);
1821 }
1822
1823 if (GET_CODE (sym_ref) != SYMBOL_REF)
1824 return 0;
1825
1826 if (call_cookie & CALL_SHORT)
1827 return 0;
1828
1829 if (TARGET_LONG_CALLS && flag_function_sections)
1830 return 1;
1831
87e27392 1832 if (current_file_function_operand (sym_ref))
c27ba912
DM
1833 return 0;
1834
1835 return (call_cookie & CALL_LONG)
1836 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1837 || TARGET_LONG_CALLS;
1838}
f99fce0c
RE
1839
1840/* Return non-zero if it is ok to make a tail-call to DECL. */
1841int
1842arm_function_ok_for_sibcall (decl)
1843 tree decl;
1844{
1845 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
1846
1847 /* Never tailcall something for which we have no decl, or if we
1848 are in Thumb mode. */
1849 if (decl == NULL || TARGET_THUMB)
1850 return 0;
1851
1852 /* Get the calling method. */
1853 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1854 call_type = CALL_SHORT;
1855 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1856 call_type = CALL_LONG;
1857
1858 /* Cannot tail-call to long calls, since these are out of range of
1859 a branch instruction. However, if not compiling PIC, we know
1860 we can reach the symbol if it is in this compilation unit. */
5895f793 1861 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
f99fce0c
RE
1862 return 0;
1863
1864 /* If we are interworking and the function is not declared static
1865 then we can't tail-call it unless we know that it exists in this
1866 compilation unit (since it might be a Thumb routine). */
5895f793 1867 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
f99fce0c
RE
1868 return 0;
1869
1870 /* Everything else is ok. */
1871 return 1;
1872}
1873
82e9d970 1874\f
32de079a
RE
1875int
1876legitimate_pic_operand_p (x)
1877 rtx x;
1878{
d5b7b3ae
RE
1879 if (CONSTANT_P (x)
1880 && flag_pic
32de079a
RE
1881 && (GET_CODE (x) == SYMBOL_REF
1882 || (GET_CODE (x) == CONST
1883 && GET_CODE (XEXP (x, 0)) == PLUS
1884 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1885 return 0;
1886
1887 return 1;
1888}
1889
1890rtx
1891legitimize_pic_address (orig, mode, reg)
1892 rtx orig;
1893 enum machine_mode mode;
1894 rtx reg;
1895{
1896 if (GET_CODE (orig) == SYMBOL_REF)
1897 {
1898 rtx pic_ref, address;
1899 rtx insn;
1900 int subregs = 0;
1901
1902 if (reg == 0)
1903 {
893f3d5b 1904 if (no_new_pseudos)
32de079a
RE
1905 abort ();
1906 else
1907 reg = gen_reg_rtx (Pmode);
1908
1909 subregs = 1;
1910 }
1911
1912#ifdef AOF_ASSEMBLER
1913 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 1914 understands that the PIC register has to be added into the offset. */
32de079a
RE
1915 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1916#else
1917 if (subregs)
1918 address = gen_reg_rtx (Pmode);
1919 else
1920 address = reg;
1921
1922 emit_insn (gen_pic_load_addr (address, orig));
1923
43cffd11
RE
1924 pic_ref = gen_rtx_MEM (Pmode,
1925 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1926 address));
32de079a
RE
1927 RTX_UNCHANGING_P (pic_ref) = 1;
1928 insn = emit_move_insn (reg, pic_ref);
1929#endif
1930 current_function_uses_pic_offset_table = 1;
1931 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1932 by loop. */
43cffd11
RE
1933 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1934 REG_NOTES (insn));
32de079a
RE
1935 return reg;
1936 }
1937 else if (GET_CODE (orig) == CONST)
1938 {
1939 rtx base, offset;
1940
1941 if (GET_CODE (XEXP (orig, 0)) == PLUS
1942 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1943 return orig;
1944
1945 if (reg == 0)
1946 {
893f3d5b 1947 if (no_new_pseudos)
32de079a
RE
1948 abort ();
1949 else
1950 reg = gen_reg_rtx (Pmode);
1951 }
1952
1953 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1954 {
1955 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1956 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1957 base == reg ? 0 : reg);
1958 }
1959 else
1960 abort ();
1961
1962 if (GET_CODE (offset) == CONST_INT)
1963 {
1964 /* The base register doesn't really matter, we only want to
1965 test the index for the appropriate mode. */
1966 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1967
5895f793 1968 if (!no_new_pseudos)
32de079a
RE
1969 offset = force_reg (Pmode, offset);
1970 else
1971 abort ();
1972
1973 win:
1974 if (GET_CODE (offset) == CONST_INT)
1975 return plus_constant_for_output (base, INTVAL (offset));
1976 }
1977
1978 if (GET_MODE_SIZE (mode) > 4
1979 && (GET_MODE_CLASS (mode) == MODE_INT
1980 || TARGET_SOFT_FLOAT))
1981 {
1982 emit_insn (gen_addsi3 (reg, base, offset));
1983 return reg;
1984 }
1985
43cffd11 1986 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1987 }
1988 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
1989 {
1990 current_function_uses_pic_offset_table = 1;
1991
1992 if (NEED_GOT_RELOC)
d5b7b3ae
RE
1993 {
1994 rtx pic_ref, address = gen_reg_rtx (Pmode);
1995
1996 emit_insn (gen_pic_load_addr (address, orig));
1997 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1998
1999 emit_move_insn (address, pic_ref);
2000 return address;
2001 }
82e9d970 2002 }
32de079a
RE
2003
2004 return orig;
2005}
2006
2007static rtx pic_rtx;
2008
2009int
62b10bbc 2010is_pic (x)
32de079a
RE
2011 rtx x;
2012{
2013 if (x == pic_rtx)
2014 return 1;
2015 return 0;
2016}
2017
2018void
2019arm_finalize_pic ()
2020{
2021#ifndef AOF_ASSEMBLER
2022 rtx l1, pic_tmp, pic_tmp2, seq;
2023 rtx global_offset_table;
2024
ed0e6530 2025 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
2026 return;
2027
5895f793 2028 if (!flag_pic)
32de079a
RE
2029 abort ();
2030
2031 start_sequence ();
2032 l1 = gen_label_rtx ();
2033
43cffd11 2034 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 2035 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
2036 addition, on the Thumb it is 'dot + 4'. */
2037 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
2038 if (GOT_PCREL)
2039 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 2040 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
2041 else
2042 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2043
2044 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2045
32de079a 2046 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
d5b7b3ae
RE
2047 if (TARGET_ARM)
2048 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2049 else
2050 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
32de079a
RE
2051
2052 seq = gen_sequence ();
2053 end_sequence ();
2054 emit_insn_after (seq, get_insns ());
2055
2056 /* Need to emit this whether or not we obey regdecls,
2057 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2058 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2059#endif /* AOF_ASSEMBLER */
2060}
2061
e2c671ba
RE
2062#define REG_OR_SUBREG_REG(X) \
2063 (GET_CODE (X) == REG \
2064 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2065
2066#define REG_OR_SUBREG_RTX(X) \
2067 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2068
d5b7b3ae
RE
2069#ifndef COSTS_N_INSNS
2070#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2071#endif
e2c671ba
RE
2072
2073int
d5b7b3ae 2074arm_rtx_costs (x, code, outer)
e2c671ba 2075 rtx x;
74bbc178 2076 enum rtx_code code;
d5b7b3ae 2077 enum rtx_code outer;
e2c671ba
RE
2078{
2079 enum machine_mode mode = GET_MODE (x);
2080 enum rtx_code subcode;
2081 int extra_cost;
2082
d5b7b3ae
RE
2083 if (TARGET_THUMB)
2084 {
2085 switch (code)
2086 {
2087 case ASHIFT:
2088 case ASHIFTRT:
2089 case LSHIFTRT:
2090 case ROTATERT:
2091 case PLUS:
2092 case MINUS:
2093 case COMPARE:
2094 case NEG:
2095 case NOT:
2096 return COSTS_N_INSNS (1);
2097
2098 case MULT:
2099 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2100 {
2101 int cycles = 0;
2102 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2103
2104 while (i)
2105 {
2106 i >>= 2;
5895f793 2107 cycles++;
d5b7b3ae
RE
2108 }
2109 return COSTS_N_INSNS (2) + cycles;
2110 }
2111 return COSTS_N_INSNS (1) + 16;
2112
2113 case SET:
2114 return (COSTS_N_INSNS (1)
2115 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2116 + GET_CODE (SET_DEST (x)) == MEM));
2117
2118 case CONST_INT:
2119 if (outer == SET)
2120 {
2121 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2122 return 0;
2123 if (thumb_shiftable_const (INTVAL (x)))
2124 return COSTS_N_INSNS (2);
2125 return COSTS_N_INSNS (3);
2126 }
2127 else if (outer == PLUS
2128 && INTVAL (x) < 256 && INTVAL (x) > -256)
2129 return 0;
2130 else if (outer == COMPARE
2131 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2132 return 0;
2133 else if (outer == ASHIFT || outer == ASHIFTRT
2134 || outer == LSHIFTRT)
2135 return 0;
2136 return COSTS_N_INSNS (2);
2137
2138 case CONST:
2139 case CONST_DOUBLE:
2140 case LABEL_REF:
2141 case SYMBOL_REF:
2142 return COSTS_N_INSNS (3);
2143
2144 case UDIV:
2145 case UMOD:
2146 case DIV:
2147 case MOD:
2148 return 100;
2149
2150 case TRUNCATE:
2151 return 99;
2152
2153 case AND:
2154 case XOR:
2155 case IOR:
2156 /* XXX guess. */
2157 return 8;
2158
2159 case ADDRESSOF:
2160 case MEM:
2161 /* XXX another guess. */
2162 /* Memory costs quite a lot for the first word, but subsequent words
2163 load at the equivalent of a single insn each. */
2164 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2165 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2166
2167 case IF_THEN_ELSE:
2168 /* XXX a guess. */
2169 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2170 return 14;
2171 return 2;
2172
2173 case ZERO_EXTEND:
2174 /* XXX still guessing. */
2175 switch (GET_MODE (XEXP (x, 0)))
2176 {
2177 case QImode:
2178 return (1 + (mode == DImode ? 4 : 0)
2179 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2180
2181 case HImode:
2182 return (4 + (mode == DImode ? 4 : 0)
2183 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2184
2185 case SImode:
2186 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2187
2188 default:
2189 return 99;
2190 }
2191
2192 default:
2193 return 99;
2194#if 0
2195 case FFS:
2196 case FLOAT:
2197 case FIX:
2198 case UNSIGNED_FIX:
2199 /* XXX guess */
2200 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2201 rtx_name[code]);
2202 abort ();
2203#endif
2204 }
2205 }
2206
e2c671ba
RE
2207 switch (code)
2208 {
2209 case MEM:
2210 /* Memory costs quite a lot for the first word, but subsequent words
2211 load at the equivalent of a single insn each. */
2212 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2213 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2214
2215 case DIV:
2216 case MOD:
2217 return 100;
2218
2219 case ROTATE:
2220 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2221 return 4;
2222 /* Fall through */
2223 case ROTATERT:
2224 if (mode != SImode)
2225 return 8;
2226 /* Fall through */
2227 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2228 if (mode == DImode)
2229 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2230 + ((GET_CODE (XEXP (x, 0)) == REG
2231 || (GET_CODE (XEXP (x, 0)) == SUBREG
2232 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2233 ? 0 : 8));
2234 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2235 || (GET_CODE (XEXP (x, 0)) == SUBREG
2236 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2237 ? 0 : 4)
2238 + ((GET_CODE (XEXP (x, 1)) == REG
2239 || (GET_CODE (XEXP (x, 1)) == SUBREG
2240 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2241 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2242 ? 0 : 4));
2243
2244 case MINUS:
2245 if (mode == DImode)
2246 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2247 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2248 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2249 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2250 ? 0 : 8));
2251
2252 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2253 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2254 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2255 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2256 ? 0 : 8)
2257 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2258 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2259 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2260 ? 0 : 8));
2261
2262 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2263 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2264 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2265 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2266 || subcode == ASHIFTRT || subcode == LSHIFTRT
2267 || subcode == ROTATE || subcode == ROTATERT
2268 || (subcode == MULT
2269 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2270 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2271 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2272 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2273 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2274 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2275 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2276 return 1;
2277 /* Fall through */
2278
2279 case PLUS:
2280 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2281 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2282 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2283 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2284 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2285 ? 0 : 8));
2286
2287 /* Fall through */
2288 case AND: case XOR: case IOR:
2289 extra_cost = 0;
2290
2291 /* Normally the frame registers will be spilt into reg+const during
2292 reload, so it is a bad idea to combine them with other instructions,
2293 since then they might not be moved outside of loops. As a compromise
2294 we allow integration with ops that have a constant as their second
2295 operand. */
2296 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2297 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2298 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2299 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2300 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2301 extra_cost = 4;
2302
2303 if (mode == DImode)
2304 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2305 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2306 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2307 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2308 ? 0 : 8));
2309
2310 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2311 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2312 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2313 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2314 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2315 ? 0 : 4));
2316
2317 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2318 return (1 + extra_cost
2319 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2320 || subcode == LSHIFTRT || subcode == ASHIFTRT
2321 || subcode == ROTATE || subcode == ROTATERT
2322 || (subcode == MULT
2323 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2324 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2325 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2326 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2327 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2328 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2329 ? 0 : 4));
2330
2331 return 8;
2332
2333 case MULT:
b111229a 2334 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2335 fast variant if it exists at all. */
2b835d68
RE
2336 if (arm_fast_multiply && mode == DImode
2337 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2338 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2339 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2340 return 8;
2341
e2c671ba
RE
2342 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2343 || mode == DImode)
2344 return 30;
2345
2346 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2347 {
2b835d68 2348 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
e5951263 2349 & HOST_UINT (0xffffffff));
e2c671ba
RE
2350 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2351 int j;
6354dc9b
NC
2352
2353 /* Tune as appropriate. */
aec3cfba 2354 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2355
2b835d68 2356 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2357 {
2b835d68 2358 i >>= booth_unit_size;
e2c671ba
RE
2359 add_cost += 2;
2360 }
2361
2362 return add_cost;
2363 }
2364
aec3cfba 2365 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2366 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2367 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2368
56636818
JL
2369 case TRUNCATE:
2370 if (arm_fast_multiply && mode == SImode
2371 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2372 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2373 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2374 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2375 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2376 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2377 return 8;
2378 return 99;
2379
e2c671ba
RE
2380 case NEG:
2381 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2382 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2383 /* Fall through */
2384 case NOT:
2385 if (mode == DImode)
2386 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2387
2388 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2389
2390 case IF_THEN_ELSE:
2391 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2392 return 14;
2393 return 2;
2394
2395 case COMPARE:
2396 return 1;
2397
2398 case ABS:
2399 return 4 + (mode == DImode ? 4 : 0);
2400
2401 case SIGN_EXTEND:
2402 if (GET_MODE (XEXP (x, 0)) == QImode)
2403 return (4 + (mode == DImode ? 4 : 0)
2404 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2405 /* Fall through */
2406 case ZERO_EXTEND:
2407 switch (GET_MODE (XEXP (x, 0)))
2408 {
2409 case QImode:
2410 return (1 + (mode == DImode ? 4 : 0)
2411 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2412
2413 case HImode:
2414 return (4 + (mode == DImode ? 4 : 0)
2415 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2416
2417 case SImode:
2418 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2419
2420 default:
2421 break;
e2c671ba
RE
2422 }
2423 abort ();
2424
d5b7b3ae
RE
2425 case CONST_INT:
2426 if (const_ok_for_arm (INTVAL (x)))
2427 return outer == SET ? 2 : -1;
2428 else if (outer == AND
5895f793 2429 && const_ok_for_arm (~INTVAL (x)))
d5b7b3ae
RE
2430 return -1;
2431 else if ((outer == COMPARE
2432 || outer == PLUS || outer == MINUS)
5895f793 2433 && const_ok_for_arm (-INTVAL (x)))
d5b7b3ae
RE
2434 return -1;
2435 else
2436 return 5;
2437
2438 case CONST:
2439 case LABEL_REF:
2440 case SYMBOL_REF:
2441 return 6;
2442
2443 case CONST_DOUBLE:
2444 if (const_double_rtx_ok_for_fpu (x))
2445 return outer == SET ? 2 : -1;
2446 else if ((outer == COMPARE || outer == PLUS)
2447 && neg_const_double_rtx_ok_for_fpu (x))
2448 return -1;
2449 return 7;
2450
e2c671ba
RE
2451 default:
2452 return 99;
2453 }
2454}
32de079a
RE
2455
2456int
2457arm_adjust_cost (insn, link, dep, cost)
2458 rtx insn;
2459 rtx link;
2460 rtx dep;
2461 int cost;
2462{
2463 rtx i_pat, d_pat;
2464
6354dc9b 2465 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2466 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2467 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2468 return 0;
2469
d5b7b3ae
RE
2470 /* Call insns don't incur a stall, even if they follow a load. */
2471 if (REG_NOTE_KIND (link) == 0
2472 && GET_CODE (insn) == CALL_INSN)
2473 return 1;
2474
32de079a
RE
2475 if ((i_pat = single_set (insn)) != NULL
2476 && GET_CODE (SET_SRC (i_pat)) == MEM
2477 && (d_pat = single_set (dep)) != NULL
2478 && GET_CODE (SET_DEST (d_pat)) == MEM)
2479 {
2480 /* This is a load after a store, there is no conflict if the load reads
2481 from a cached area. Assume that loads from the stack, and from the
2482 constant pool are cached, and that others will miss. This is a
6354dc9b 2483 hack. */
32de079a 2484
32de079a
RE
2485 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2486 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2487 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2488 || reg_mentioned_p (hard_frame_pointer_rtx,
2489 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2490 return 1;
32de079a
RE
2491 }
2492
2493 return cost;
2494}
2495
6354dc9b 2496/* This code has been fixed for cross compilation. */
ff9940b0
RE
2497
2498static int fpa_consts_inited = 0;
2499
cd2b33d0 2500static const char * strings_fpa[8] =
62b10bbc 2501{
2b835d68
RE
2502 "0", "1", "2", "3",
2503 "4", "5", "0.5", "10"
2504};
ff9940b0
RE
2505
2506static REAL_VALUE_TYPE values_fpa[8];
2507
2508static void
2509init_fpa_table ()
2510{
2511 int i;
2512 REAL_VALUE_TYPE r;
2513
2514 for (i = 0; i < 8; i++)
2515 {
2516 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2517 values_fpa[i] = r;
2518 }
f3bb6135 2519
ff9940b0
RE
2520 fpa_consts_inited = 1;
2521}
2522
6354dc9b 2523/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2524
2525int
2526const_double_rtx_ok_for_fpu (x)
2527 rtx x;
2528{
ff9940b0
RE
2529 REAL_VALUE_TYPE r;
2530 int i;
2531
2532 if (!fpa_consts_inited)
2533 init_fpa_table ();
2534
2535 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2536 if (REAL_VALUE_MINUS_ZERO (r))
2537 return 0;
f3bb6135 2538
ff9940b0
RE
2539 for (i = 0; i < 8; i++)
2540 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2541 return 1;
f3bb6135 2542
ff9940b0 2543 return 0;
f3bb6135 2544}
ff9940b0 2545
6354dc9b 2546/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2547
2548int
2549neg_const_double_rtx_ok_for_fpu (x)
2550 rtx x;
2551{
2552 REAL_VALUE_TYPE r;
2553 int i;
2554
2555 if (!fpa_consts_inited)
2556 init_fpa_table ();
2557
2558 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2559 r = REAL_VALUE_NEGATE (r);
2560 if (REAL_VALUE_MINUS_ZERO (r))
2561 return 0;
f3bb6135 2562
ff9940b0
RE
2563 for (i = 0; i < 8; i++)
2564 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2565 return 1;
f3bb6135 2566
ff9940b0 2567 return 0;
f3bb6135 2568}
cce8749e
CH
2569\f
2570/* Predicates for `match_operand' and `match_operator'. */
2571
ff9940b0 2572/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2573 (SUBREG (MEM)...).
2574
2575 This function exists because at the time it was put in it led to better
2576 code. SUBREG(MEM) always needs a reload in the places where
2577 s_register_operand is used, and this seemed to lead to excessive
2578 reloading. */
ff9940b0
RE
2579
2580int
2581s_register_operand (op, mode)
2582 register rtx op;
2583 enum machine_mode mode;
2584{
2585 if (GET_MODE (op) != mode && mode != VOIDmode)
2586 return 0;
2587
2588 if (GET_CODE (op) == SUBREG)
f3bb6135 2589 op = SUBREG_REG (op);
ff9940b0
RE
2590
2591 /* We don't consider registers whose class is NO_REGS
2592 to be a register operand. */
d5b7b3ae 2593 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
2594 return (GET_CODE (op) == REG
2595 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2596 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2597}
2598
e2c671ba
RE
2599/* Only accept reg, subreg(reg), const_int. */
2600
2601int
2602reg_or_int_operand (op, mode)
2603 register rtx op;
2604 enum machine_mode mode;
2605{
2606 if (GET_CODE (op) == CONST_INT)
2607 return 1;
2608
2609 if (GET_MODE (op) != mode && mode != VOIDmode)
2610 return 0;
2611
2612 if (GET_CODE (op) == SUBREG)
2613 op = SUBREG_REG (op);
2614
2615 /* We don't consider registers whose class is NO_REGS
2616 to be a register operand. */
2617 return (GET_CODE (op) == REG
2618 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2619 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2620}
2621
ff9940b0
RE
2622/* Return 1 if OP is an item in memory, given that we are in reload. */
2623
2624int
d5b7b3ae 2625arm_reload_memory_operand (op, mode)
ff9940b0 2626 rtx op;
74bbc178 2627 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2628{
2629 int regno = true_regnum (op);
2630
5895f793 2631 return (!CONSTANT_P (op)
ff9940b0
RE
2632 && (regno == -1
2633 || (GET_CODE (op) == REG
2634 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2635}
2636
4d818c85 2637/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae
RE
2638 memory access (architecture V4).
2639 MODE is QImode if called when computing contraints, or VOIDmode when
2640 emitting patterns. In this latter case we cannot use memory_operand()
2641 because it will fail on badly formed MEMs, which is precisly what we are
2642 trying to catch. */
4d818c85
RE
2643int
2644bad_signed_byte_operand (op, mode)
2645 rtx op;
d5b7b3ae 2646 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 2647{
d5b7b3ae 2648#if 0
5895f793 2649 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
d5b7b3ae
RE
2650 return 0;
2651#endif
2652 if (GET_CODE (op) != MEM)
4d818c85
RE
2653 return 0;
2654
2655 op = XEXP (op, 0);
2656
6354dc9b 2657 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2658 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
5895f793
RE
2659 && (!s_register_operand (XEXP (op, 0), VOIDmode)
2660 || (!s_register_operand (XEXP (op, 1), VOIDmode)
9c8cc54f 2661 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2662 return 1;
2663
6354dc9b 2664 /* Big constants are also bad. */
4d818c85
RE
2665 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2666 && (INTVAL (XEXP (op, 1)) > 0xff
2667 || -INTVAL (XEXP (op, 1)) > 0xff))
2668 return 1;
2669
6354dc9b 2670 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2671 return 0;
2672}
2673
cce8749e
CH
2674/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2675
2676int
2677arm_rhs_operand (op, mode)
2678 rtx op;
2679 enum machine_mode mode;
2680{
ff9940b0 2681 return (s_register_operand (op, mode)
cce8749e 2682 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2683}
cce8749e 2684
ff9940b0
RE
2685/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2686 */
2687
2688int
2689arm_rhsm_operand (op, mode)
2690 rtx op;
2691 enum machine_mode mode;
2692{
2693 return (s_register_operand (op, mode)
2694 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2695 || memory_operand (op, mode));
f3bb6135 2696}
ff9940b0
RE
2697
2698/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2699 constant that is valid when negated. */
2700
2701int
2702arm_add_operand (op, mode)
2703 rtx op;
2704 enum machine_mode mode;
2705{
d5b7b3ae
RE
2706 if (TARGET_THUMB)
2707 return thumb_cmp_operand (op, mode);
2708
ff9940b0
RE
2709 return (s_register_operand (op, mode)
2710 || (GET_CODE (op) == CONST_INT
2711 && (const_ok_for_arm (INTVAL (op))
2712 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2713}
ff9940b0
RE
2714
2715int
2716arm_not_operand (op, mode)
2717 rtx op;
2718 enum machine_mode mode;
2719{
2720 return (s_register_operand (op, mode)
2721 || (GET_CODE (op) == CONST_INT
2722 && (const_ok_for_arm (INTVAL (op))
2723 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2724}
ff9940b0 2725
5165176d
RE
2726/* Return TRUE if the operand is a memory reference which contains an
2727 offsettable address. */
2728int
2729offsettable_memory_operand (op, mode)
2730 register rtx op;
2731 enum machine_mode mode;
2732{
2733 if (mode == VOIDmode)
2734 mode = GET_MODE (op);
2735
2736 return (mode == GET_MODE (op)
2737 && GET_CODE (op) == MEM
2738 && offsettable_address_p (reload_completed | reload_in_progress,
2739 mode, XEXP (op, 0)));
2740}
2741
2742/* Return TRUE if the operand is a memory reference which is, or can be
2743 made word aligned by adjusting the offset. */
2744int
2745alignable_memory_operand (op, mode)
2746 register rtx op;
2747 enum machine_mode mode;
2748{
2749 rtx reg;
2750
2751 if (mode == VOIDmode)
2752 mode = GET_MODE (op);
2753
2754 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2755 return 0;
2756
2757 op = XEXP (op, 0);
2758
2759 return ((GET_CODE (reg = op) == REG
2760 || (GET_CODE (op) == SUBREG
2761 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2762 || (GET_CODE (op) == PLUS
2763 && GET_CODE (XEXP (op, 1)) == CONST_INT
2764 && (GET_CODE (reg = XEXP (op, 0)) == REG
2765 || (GET_CODE (XEXP (op, 0)) == SUBREG
2766 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 2767 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
2768}
2769
b111229a
RE
2770/* Similar to s_register_operand, but does not allow hard integer
2771 registers. */
2772int
2773f_register_operand (op, mode)
2774 register rtx op;
2775 enum machine_mode mode;
2776{
2777 if (GET_MODE (op) != mode && mode != VOIDmode)
2778 return 0;
2779
2780 if (GET_CODE (op) == SUBREG)
2781 op = SUBREG_REG (op);
2782
2783 /* We don't consider registers whose class is NO_REGS
2784 to be a register operand. */
2785 return (GET_CODE (op) == REG
2786 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2787 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2788}
2789
cce8749e
CH
2790/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2791
2792int
2793fpu_rhs_operand (op, mode)
2794 rtx op;
2795 enum machine_mode mode;
2796{
ff9940b0 2797 if (s_register_operand (op, mode))
f3bb6135 2798 return TRUE;
9ce71c6f
BS
2799
2800 if (GET_MODE (op) != mode && mode != VOIDmode)
2801 return FALSE;
2802
2803 if (GET_CODE (op) == CONST_DOUBLE)
2804 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
2805
2806 return FALSE;
2807}
cce8749e 2808
ff9940b0
RE
2809int
2810fpu_add_operand (op, mode)
2811 rtx op;
2812 enum machine_mode mode;
2813{
2814 if (s_register_operand (op, mode))
f3bb6135 2815 return TRUE;
9ce71c6f
BS
2816
2817 if (GET_MODE (op) != mode && mode != VOIDmode)
2818 return FALSE;
2819
2820 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2821 return (const_double_rtx_ok_for_fpu (op)
2822 || neg_const_double_rtx_ok_for_fpu (op));
2823
2824 return FALSE;
ff9940b0
RE
2825}
2826
cce8749e
CH
2827/* Return nonzero if OP is a constant power of two. */
2828
2829int
2830power_of_two_operand (op, mode)
2831 rtx op;
74bbc178 2832 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2833{
2834 if (GET_CODE (op) == CONST_INT)
2835 {
d5b7b3ae 2836 HOST_WIDE_INT value = INTVAL (op);
f3bb6135 2837 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2838 }
f3bb6135
RE
2839 return FALSE;
2840}
cce8749e
CH
2841
2842/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2843 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2844 Note that this disallows MEM(REG+REG), but allows
2845 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2846
2847int
2848di_operand (op, mode)
2849 rtx op;
2850 enum machine_mode mode;
2851{
ff9940b0 2852 if (s_register_operand (op, mode))
f3bb6135 2853 return TRUE;
cce8749e 2854
9ce71c6f
BS
2855 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2856 return FALSE;
2857
e9c6b69b
NC
2858 if (GET_CODE (op) == SUBREG)
2859 op = SUBREG_REG (op);
2860
cce8749e
CH
2861 switch (GET_CODE (op))
2862 {
2863 case CONST_DOUBLE:
2864 case CONST_INT:
f3bb6135
RE
2865 return TRUE;
2866
cce8749e 2867 case MEM:
f3bb6135
RE
2868 return memory_address_p (DImode, XEXP (op, 0));
2869
cce8749e 2870 default:
f3bb6135 2871 return FALSE;
cce8749e 2872 }
f3bb6135 2873}
cce8749e 2874
d5b7b3ae
RE
2875/* Like di_operand, but don't accept constants. */
2876int
2877nonimmediate_di_operand (op, mode)
2878 rtx op;
2879 enum machine_mode mode;
2880{
2881 if (s_register_operand (op, mode))
2882 return TRUE;
2883
2884 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2885 return FALSE;
2886
2887 if (GET_CODE (op) == SUBREG)
2888 op = SUBREG_REG (op);
2889
2890 if (GET_CODE (op) == MEM)
2891 return memory_address_p (DImode, XEXP (op, 0));
2892
2893 return FALSE;
2894}
2895
f3139301 2896/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2897 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2898 Note that this disallows MEM(REG+REG), but allows
2899 MEM(PRE/POST_INC/DEC(REG)). */
2900
2901int
2902soft_df_operand (op, mode)
2903 rtx op;
2904 enum machine_mode mode;
2905{
2906 if (s_register_operand (op, mode))
2907 return TRUE;
2908
9ce71c6f
BS
2909 if (mode != VOIDmode && GET_MODE (op) != mode)
2910 return FALSE;
2911
37b80d2e
BS
2912 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2913 return FALSE;
2914
e9c6b69b
NC
2915 if (GET_CODE (op) == SUBREG)
2916 op = SUBREG_REG (op);
9ce71c6f 2917
f3139301
DE
2918 switch (GET_CODE (op))
2919 {
2920 case CONST_DOUBLE:
2921 return TRUE;
2922
2923 case MEM:
2924 return memory_address_p (DFmode, XEXP (op, 0));
2925
2926 default:
2927 return FALSE;
2928 }
2929}
2930
d5b7b3ae
RE
2931/* Like soft_df_operand, but don't accept constants. */
2932int
2933nonimmediate_soft_df_operand (op, mode)
2934 rtx op;
2935 enum machine_mode mode;
2936{
2937 if (s_register_operand (op, mode))
2938 return TRUE;
2939
2940 if (mode != VOIDmode && GET_MODE (op) != mode)
2941 return FALSE;
2942
2943 if (GET_CODE (op) == SUBREG)
2944 op = SUBREG_REG (op);
2945
2946 if (GET_CODE (op) == MEM)
2947 return memory_address_p (DFmode, XEXP (op, 0));
2948 return FALSE;
2949}
cce8749e 2950
d5b7b3ae 2951/* Return TRUE for valid index operands. */
cce8749e
CH
2952int
2953index_operand (op, mode)
2954 rtx op;
2955 enum machine_mode mode;
2956{
d5b7b3ae 2957 return (s_register_operand (op, mode)
ff9940b0 2958 || (immediate_operand (op, mode)
d5b7b3ae
RE
2959 && (GET_CODE (op) != CONST_INT
2960 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 2961}
cce8749e 2962
ff9940b0
RE
2963/* Return TRUE for valid shifts by a constant. This also accepts any
2964 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 2965 shift operator in this case was a mult. */
ff9940b0
RE
2966
2967int
2968const_shift_operand (op, mode)
2969 rtx op;
2970 enum machine_mode mode;
2971{
2972 return (power_of_two_operand (op, mode)
2973 || (immediate_operand (op, mode)
d5b7b3ae
RE
2974 && (GET_CODE (op) != CONST_INT
2975 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 2976}
ff9940b0 2977
cce8749e
CH
2978/* Return TRUE for arithmetic operators which can be combined with a multiply
2979 (shift). */
2980
2981int
2982shiftable_operator (x, mode)
2983 rtx x;
2984 enum machine_mode mode;
2985{
2986 if (GET_MODE (x) != mode)
2987 return FALSE;
2988 else
2989 {
2990 enum rtx_code code = GET_CODE (x);
2991
2992 return (code == PLUS || code == MINUS
2993 || code == IOR || code == XOR || code == AND);
2994 }
f3bb6135 2995}
cce8749e 2996
6ab589e0
JL
2997/* Return TRUE for binary logical operators. */
2998
2999int
3000logical_binary_operator (x, mode)
3001 rtx x;
3002 enum machine_mode mode;
3003{
3004 if (GET_MODE (x) != mode)
3005 return FALSE;
3006 else
3007 {
3008 enum rtx_code code = GET_CODE (x);
3009
3010 return (code == IOR || code == XOR || code == AND);
3011 }
3012}
3013
6354dc9b 3014/* Return TRUE for shift operators. */
cce8749e
CH
3015
3016int
3017shift_operator (x, mode)
3018 rtx x;
3019 enum machine_mode mode;
3020{
3021 if (GET_MODE (x) != mode)
3022 return FALSE;
3023 else
3024 {
3025 enum rtx_code code = GET_CODE (x);
3026
ff9940b0 3027 if (code == MULT)
aec3cfba 3028 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 3029
e2c671ba
RE
3030 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3031 || code == ROTATERT);
cce8749e 3032 }
f3bb6135 3033}
ff9940b0 3034
6354dc9b
NC
3035/* Return TRUE if x is EQ or NE. */
3036int
3037equality_operator (x, mode)
f3bb6135 3038 rtx x;
74bbc178 3039 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3040{
f3bb6135 3041 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3042}
3043
e45b72c4
RE
3044/* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3045int
3046arm_comparison_operator (x, mode)
3047 rtx x;
3048 enum machine_mode mode;
3049{
3050 return (comparison_operator (x, mode)
3051 && GET_CODE (x) != LTGT
3052 && GET_CODE (x) != UNEQ);
3053}
3054
6354dc9b 3055/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
3056int
3057minmax_operator (x, mode)
3058 rtx x;
3059 enum machine_mode mode;
3060{
3061 enum rtx_code code = GET_CODE (x);
3062
3063 if (GET_MODE (x) != mode)
3064 return FALSE;
f3bb6135 3065
ff9940b0 3066 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3067}
ff9940b0 3068
ff9940b0 3069/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3070 a mode, accept any class CCmode register. */
ff9940b0
RE
3071int
3072cc_register (x, mode)
f3bb6135
RE
3073 rtx x;
3074 enum machine_mode mode;
ff9940b0
RE
3075{
3076 if (mode == VOIDmode)
3077 {
3078 mode = GET_MODE (x);
d5b7b3ae 3079
ff9940b0
RE
3080 if (GET_MODE_CLASS (mode) != MODE_CC)
3081 return FALSE;
3082 }
f3bb6135 3083
d5b7b3ae
RE
3084 if ( GET_MODE (x) == mode
3085 && GET_CODE (x) == REG
3086 && REGNO (x) == CC_REGNUM)
ff9940b0 3087 return TRUE;
f3bb6135 3088
ff9940b0
RE
3089 return FALSE;
3090}
5bbe2d40
RE
3091
3092/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3093 a mode, accept any class CCmode register which indicates a dominance
3094 expression. */
5bbe2d40 3095int
84ed5e79 3096dominant_cc_register (x, mode)
5bbe2d40
RE
3097 rtx x;
3098 enum machine_mode mode;
3099{
3100 if (mode == VOIDmode)
3101 {
3102 mode = GET_MODE (x);
d5b7b3ae 3103
84ed5e79 3104 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3105 return FALSE;
3106 }
3107
d5b7b3ae 3108 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3109 && mode != CC_DLEmode && mode != CC_DLTmode
3110 && mode != CC_DGEmode && mode != CC_DGTmode
3111 && mode != CC_DLEUmode && mode != CC_DLTUmode
3112 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3113 return FALSE;
3114
d5b7b3ae 3115 return cc_register (x, mode);
5bbe2d40
RE
3116}
3117
2b835d68
RE
3118/* Return TRUE if X references a SYMBOL_REF. */
3119int
3120symbol_mentioned_p (x)
3121 rtx x;
3122{
6f7d635c 3123 register const char * fmt;
2b835d68
RE
3124 register int i;
3125
3126 if (GET_CODE (x) == SYMBOL_REF)
3127 return 1;
3128
3129 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3130
2b835d68
RE
3131 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3132 {
3133 if (fmt[i] == 'E')
3134 {
3135 register int j;
3136
3137 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3138 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3139 return 1;
3140 }
3141 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3142 return 1;
3143 }
3144
3145 return 0;
3146}
3147
3148/* Return TRUE if X references a LABEL_REF. */
3149int
3150label_mentioned_p (x)
3151 rtx x;
3152{
6f7d635c 3153 register const char * fmt;
2b835d68
RE
3154 register int i;
3155
3156 if (GET_CODE (x) == LABEL_REF)
3157 return 1;
3158
3159 fmt = GET_RTX_FORMAT (GET_CODE (x));
3160 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3161 {
3162 if (fmt[i] == 'E')
3163 {
3164 register int j;
3165
3166 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3167 if (label_mentioned_p (XVECEXP (x, i, j)))
3168 return 1;
3169 }
3170 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3171 return 1;
3172 }
3173
3174 return 0;
3175}
3176
ff9940b0
RE
3177enum rtx_code
3178minmax_code (x)
f3bb6135 3179 rtx x;
ff9940b0
RE
3180{
3181 enum rtx_code code = GET_CODE (x);
3182
3183 if (code == SMAX)
3184 return GE;
f3bb6135 3185 else if (code == SMIN)
ff9940b0 3186 return LE;
f3bb6135 3187 else if (code == UMIN)
ff9940b0 3188 return LEU;
f3bb6135 3189 else if (code == UMAX)
ff9940b0 3190 return GEU;
f3bb6135 3191
ff9940b0
RE
3192 abort ();
3193}
3194
6354dc9b 3195/* Return 1 if memory locations are adjacent. */
f3bb6135 3196int
ff9940b0
RE
3197adjacent_mem_locations (a, b)
3198 rtx a, b;
3199{
3200 int val0 = 0, val1 = 0;
3201 int reg0, reg1;
3202
3203 if ((GET_CODE (XEXP (a, 0)) == REG
3204 || (GET_CODE (XEXP (a, 0)) == PLUS
3205 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3206 && (GET_CODE (XEXP (b, 0)) == REG
3207 || (GET_CODE (XEXP (b, 0)) == PLUS
3208 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3209 {
3210 if (GET_CODE (XEXP (a, 0)) == PLUS)
3211 {
3212 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3213 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3214 }
3215 else
3216 reg0 = REGNO (XEXP (a, 0));
3217 if (GET_CODE (XEXP (b, 0)) == PLUS)
3218 {
3219 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3220 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3221 }
3222 else
3223 reg1 = REGNO (XEXP (b, 0));
3224 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3225 }
3226 return 0;
3227}
3228
3229/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3230 parallel and the first section will be tested. */
f3bb6135 3231int
ff9940b0
RE
3232load_multiple_operation (op, mode)
3233 rtx op;
74bbc178 3234 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3235{
f3bb6135 3236 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3237 int dest_regno;
3238 rtx src_addr;
f3bb6135 3239 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3240 rtx elt;
3241
3242 if (count <= 1
3243 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3244 return 0;
3245
6354dc9b 3246 /* Check to see if this might be a write-back. */
ff9940b0
RE
3247 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3248 {
3249 i++;
3250 base = 1;
3251
6354dc9b 3252 /* Now check it more carefully. */
ff9940b0
RE
3253 if (GET_CODE (SET_DEST (elt)) != REG
3254 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3255 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3256 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3257 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3258 return 0;
ff9940b0
RE
3259 }
3260
3261 /* Perform a quick check so we don't blow up below. */
3262 if (count <= i
3263 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3264 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3265 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3266 return 0;
3267
3268 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3269 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3270
3271 for (; i < count; i++)
3272 {
ed4c4348 3273 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3274
3275 if (GET_CODE (elt) != SET
3276 || GET_CODE (SET_DEST (elt)) != REG
3277 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3278 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3279 || GET_CODE (SET_SRC (elt)) != MEM
3280 || GET_MODE (SET_SRC (elt)) != SImode
3281 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5895f793 3282 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
ff9940b0
RE
3283 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3284 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3285 return 0;
3286 }
3287
3288 return 1;
3289}
3290
3291/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3292 parallel and the first section will be tested. */
f3bb6135 3293int
ff9940b0
RE
3294store_multiple_operation (op, mode)
3295 rtx op;
74bbc178 3296 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3297{
f3bb6135 3298 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3299 int src_regno;
3300 rtx dest_addr;
f3bb6135 3301 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3302 rtx elt;
3303
3304 if (count <= 1
3305 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3306 return 0;
3307
6354dc9b 3308 /* Check to see if this might be a write-back. */
ff9940b0
RE
3309 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3310 {
3311 i++;
3312 base = 1;
3313
6354dc9b 3314 /* Now check it more carefully. */
ff9940b0
RE
3315 if (GET_CODE (SET_DEST (elt)) != REG
3316 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3317 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3318 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3319 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3320 return 0;
ff9940b0
RE
3321 }
3322
3323 /* Perform a quick check so we don't blow up below. */
3324 if (count <= i
3325 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3326 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3327 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3328 return 0;
3329
3330 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3331 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3332
3333 for (; i < count; i++)
3334 {
3335 elt = XVECEXP (op, 0, i);
3336
3337 if (GET_CODE (elt) != SET
3338 || GET_CODE (SET_SRC (elt)) != REG
3339 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3340 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3341 || GET_CODE (SET_DEST (elt)) != MEM
3342 || GET_MODE (SET_DEST (elt)) != SImode
3343 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5895f793 3344 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
ff9940b0
RE
3345 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3346 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3347 return 0;
3348 }
3349
3350 return 1;
3351}
e2c671ba 3352
84ed5e79
RE
3353int
3354load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3355 rtx * operands;
84ed5e79 3356 int nops;
62b10bbc
NC
3357 int * regs;
3358 int * base;
3359 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3360{
3361 int unsorted_regs[4];
3362 HOST_WIDE_INT unsorted_offsets[4];
3363 int order[4];
ad076f4e 3364 int base_reg = -1;
84ed5e79
RE
3365 int i;
3366
3367 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3368 extended if required. */
3369 if (nops < 2 || nops > 4)
3370 abort ();
3371
3372 /* Loop over the operands and check that the memory references are
3373 suitable (ie immediate offsets from the same base register). At
3374 the same time, extract the target register, and the memory
3375 offsets. */
3376 for (i = 0; i < nops; i++)
3377 {
3378 rtx reg;
3379 rtx offset;
3380
56636818
JL
3381 /* Convert a subreg of a mem into the mem itself. */
3382 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3383 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3384
84ed5e79
RE
3385 if (GET_CODE (operands[nops + i]) != MEM)
3386 abort ();
3387
3388 /* Don't reorder volatile memory references; it doesn't seem worth
3389 looking for the case where the order is ok anyway. */
3390 if (MEM_VOLATILE_P (operands[nops + i]))
3391 return 0;
3392
3393 offset = const0_rtx;
3394
3395 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3396 || (GET_CODE (reg) == SUBREG
3397 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3398 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3399 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3400 == REG)
3401 || (GET_CODE (reg) == SUBREG
3402 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3403 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3404 == CONST_INT)))
3405 {
3406 if (i == 0)
3407 {
d5b7b3ae 3408 base_reg = REGNO (reg);
84ed5e79
RE
3409 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3410 ? REGNO (operands[i])
3411 : REGNO (SUBREG_REG (operands[i])));
3412 order[0] = 0;
3413 }
3414 else
3415 {
6354dc9b 3416 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3417 /* Not addressed from the same base register. */
3418 return 0;
3419
3420 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3421 ? REGNO (operands[i])
3422 : REGNO (SUBREG_REG (operands[i])));
3423 if (unsorted_regs[i] < unsorted_regs[order[0]])
3424 order[0] = i;
3425 }
3426
3427 /* If it isn't an integer register, or if it overwrites the
3428 base register but isn't the last insn in the list, then
3429 we can't do this. */
3430 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3431 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3432 return 0;
3433
3434 unsorted_offsets[i] = INTVAL (offset);
3435 }
3436 else
3437 /* Not a suitable memory address. */
3438 return 0;
3439 }
3440
3441 /* All the useful information has now been extracted from the
3442 operands into unsorted_regs and unsorted_offsets; additionally,
3443 order[0] has been set to the lowest numbered register in the
3444 list. Sort the registers into order, and check that the memory
3445 offsets are ascending and adjacent. */
3446
3447 for (i = 1; i < nops; i++)
3448 {
3449 int j;
3450
3451 order[i] = order[i - 1];
3452 for (j = 0; j < nops; j++)
3453 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3454 && (order[i] == order[i - 1]
3455 || unsorted_regs[j] < unsorted_regs[order[i]]))
3456 order[i] = j;
3457
3458 /* Have we found a suitable register? if not, one must be used more
3459 than once. */
3460 if (order[i] == order[i - 1])
3461 return 0;
3462
3463 /* Is the memory address adjacent and ascending? */
3464 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3465 return 0;
3466 }
3467
3468 if (base)
3469 {
3470 *base = base_reg;
3471
3472 for (i = 0; i < nops; i++)
3473 regs[i] = unsorted_regs[order[i]];
3474
3475 *load_offset = unsorted_offsets[order[0]];
3476 }
3477
3478 if (unsorted_offsets[order[0]] == 0)
3479 return 1; /* ldmia */
3480
3481 if (unsorted_offsets[order[0]] == 4)
3482 return 2; /* ldmib */
3483
3484 if (unsorted_offsets[order[nops - 1]] == 0)
3485 return 3; /* ldmda */
3486
3487 if (unsorted_offsets[order[nops - 1]] == -4)
3488 return 4; /* ldmdb */
3489
949d79eb
RE
3490 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3491 if the offset isn't small enough. The reason 2 ldrs are faster
3492 is because these ARMs are able to do more than one cache access
3493 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3494 whilst the ARM8 has a double bandwidth cache. This means that
3495 these cores can do both an instruction fetch and a data fetch in
3496 a single cycle, so the trick of calculating the address into a
3497 scratch register (one of the result regs) and then doing a load
3498 multiple actually becomes slower (and no smaller in code size).
3499 That is the transformation
6cc8c0b3
NC
3500
3501 ldr rd1, [rbase + offset]
3502 ldr rd2, [rbase + offset + 4]
3503
3504 to
3505
3506 add rd1, rbase, offset
3507 ldmia rd1, {rd1, rd2}
3508
949d79eb
RE
3509 produces worse code -- '3 cycles + any stalls on rd2' instead of
3510 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3511 access per cycle, the first sequence could never complete in less
3512 than 6 cycles, whereas the ldm sequence would only take 5 and
3513 would make better use of sequential accesses if not hitting the
3514 cache.
3515
3516 We cheat here and test 'arm_ld_sched' which we currently know to
3517 only be true for the ARM8, ARM9 and StrongARM. If this ever
3518 changes, then the test below needs to be reworked. */
f5a1b0d2 3519 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3520 return 0;
3521
84ed5e79
RE
3522 /* Can't do it without setting up the offset, only do this if it takes
3523 no more than one insn. */
3524 return (const_ok_for_arm (unsorted_offsets[order[0]])
3525 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3526}
3527
cd2b33d0 3528const char *
84ed5e79 3529emit_ldm_seq (operands, nops)
62b10bbc 3530 rtx * operands;
84ed5e79
RE
3531 int nops;
3532{
3533 int regs[4];
3534 int base_reg;
3535 HOST_WIDE_INT offset;
3536 char buf[100];
3537 int i;
3538
3539 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3540 {
3541 case 1:
3542 strcpy (buf, "ldm%?ia\t");
3543 break;
3544
3545 case 2:
3546 strcpy (buf, "ldm%?ib\t");
3547 break;
3548
3549 case 3:
3550 strcpy (buf, "ldm%?da\t");
3551 break;
3552
3553 case 4:
3554 strcpy (buf, "ldm%?db\t");
3555 break;
3556
3557 case 5:
3558 if (offset >= 0)
3559 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3560 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3561 (long) offset);
3562 else
3563 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3564 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3565 (long) -offset);
3566 output_asm_insn (buf, operands);
3567 base_reg = regs[0];
3568 strcpy (buf, "ldm%?ia\t");
3569 break;
3570
3571 default:
3572 abort ();
3573 }
3574
3575 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3576 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3577
3578 for (i = 1; i < nops; i++)
3579 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3580 reg_names[regs[i]]);
3581
3582 strcat (buf, "}\t%@ phole ldm");
3583
3584 output_asm_insn (buf, operands);
3585 return "";
3586}
3587
3588int
3589store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3590 rtx * operands;
84ed5e79 3591 int nops;
62b10bbc
NC
3592 int * regs;
3593 int * base;
3594 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3595{
3596 int unsorted_regs[4];
3597 HOST_WIDE_INT unsorted_offsets[4];
3598 int order[4];
ad076f4e 3599 int base_reg = -1;
84ed5e79
RE
3600 int i;
3601
3602 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3603 extended if required. */
3604 if (nops < 2 || nops > 4)
3605 abort ();
3606
3607 /* Loop over the operands and check that the memory references are
3608 suitable (ie immediate offsets from the same base register). At
3609 the same time, extract the target register, and the memory
3610 offsets. */
3611 for (i = 0; i < nops; i++)
3612 {
3613 rtx reg;
3614 rtx offset;
3615
56636818
JL
3616 /* Convert a subreg of a mem into the mem itself. */
3617 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3618 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3619
84ed5e79
RE
3620 if (GET_CODE (operands[nops + i]) != MEM)
3621 abort ();
3622
3623 /* Don't reorder volatile memory references; it doesn't seem worth
3624 looking for the case where the order is ok anyway. */
3625 if (MEM_VOLATILE_P (operands[nops + i]))
3626 return 0;
3627
3628 offset = const0_rtx;
3629
3630 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3631 || (GET_CODE (reg) == SUBREG
3632 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3633 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3634 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3635 == REG)
3636 || (GET_CODE (reg) == SUBREG
3637 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3638 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3639 == CONST_INT)))
3640 {
3641 if (i == 0)
3642 {
62b10bbc 3643 base_reg = REGNO (reg);
84ed5e79
RE
3644 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3645 ? REGNO (operands[i])
3646 : REGNO (SUBREG_REG (operands[i])));
3647 order[0] = 0;
3648 }
3649 else
3650 {
6354dc9b 3651 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3652 /* Not addressed from the same base register. */
3653 return 0;
3654
3655 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3656 ? REGNO (operands[i])
3657 : REGNO (SUBREG_REG (operands[i])));
3658 if (unsorted_regs[i] < unsorted_regs[order[0]])
3659 order[0] = i;
3660 }
3661
3662 /* If it isn't an integer register, then we can't do this. */
3663 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3664 return 0;
3665
3666 unsorted_offsets[i] = INTVAL (offset);
3667 }
3668 else
3669 /* Not a suitable memory address. */
3670 return 0;
3671 }
3672
3673 /* All the useful information has now been extracted from the
3674 operands into unsorted_regs and unsorted_offsets; additionally,
3675 order[0] has been set to the lowest numbered register in the
3676 list. Sort the registers into order, and check that the memory
3677 offsets are ascending and adjacent. */
3678
3679 for (i = 1; i < nops; i++)
3680 {
3681 int j;
3682
3683 order[i] = order[i - 1];
3684 for (j = 0; j < nops; j++)
3685 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3686 && (order[i] == order[i - 1]
3687 || unsorted_regs[j] < unsorted_regs[order[i]]))
3688 order[i] = j;
3689
3690 /* Have we found a suitable register? if not, one must be used more
3691 than once. */
3692 if (order[i] == order[i - 1])
3693 return 0;
3694
3695 /* Is the memory address adjacent and ascending? */
3696 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3697 return 0;
3698 }
3699
3700 if (base)
3701 {
3702 *base = base_reg;
3703
3704 for (i = 0; i < nops; i++)
3705 regs[i] = unsorted_regs[order[i]];
3706
3707 *load_offset = unsorted_offsets[order[0]];
3708 }
3709
3710 if (unsorted_offsets[order[0]] == 0)
3711 return 1; /* stmia */
3712
3713 if (unsorted_offsets[order[0]] == 4)
3714 return 2; /* stmib */
3715
3716 if (unsorted_offsets[order[nops - 1]] == 0)
3717 return 3; /* stmda */
3718
3719 if (unsorted_offsets[order[nops - 1]] == -4)
3720 return 4; /* stmdb */
3721
3722 return 0;
3723}
3724
cd2b33d0 3725const char *
84ed5e79 3726emit_stm_seq (operands, nops)
62b10bbc 3727 rtx * operands;
84ed5e79
RE
3728 int nops;
3729{
3730 int regs[4];
3731 int base_reg;
3732 HOST_WIDE_INT offset;
3733 char buf[100];
3734 int i;
3735
3736 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3737 {
3738 case 1:
3739 strcpy (buf, "stm%?ia\t");
3740 break;
3741
3742 case 2:
3743 strcpy (buf, "stm%?ib\t");
3744 break;
3745
3746 case 3:
3747 strcpy (buf, "stm%?da\t");
3748 break;
3749
3750 case 4:
3751 strcpy (buf, "stm%?db\t");
3752 break;
3753
3754 default:
3755 abort ();
3756 }
3757
3758 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3759 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3760
3761 for (i = 1; i < nops; i++)
3762 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3763 reg_names[regs[i]]);
3764
3765 strcat (buf, "}\t%@ phole stm");
3766
3767 output_asm_insn (buf, operands);
3768 return "";
3769}
3770
e2c671ba
RE
3771int
3772multi_register_push (op, mode)
0a81f500 3773 rtx op;
74bbc178 3774 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3775{
3776 if (GET_CODE (op) != PARALLEL
3777 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3778 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3779 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3780 return 0;
3781
3782 return 1;
3783}
ff9940b0 3784\f
d7d01975 3785/* Routines for use with attributes. */
f3bb6135 3786
31fdb4d5 3787/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
3788 ATTRIBUTES are any existing attributes and ARGS are
3789 the arguments supplied with ATTR.
31fdb4d5
DE
3790
3791 Supported attributes:
3792
d5b7b3ae
RE
3793 naked:
3794 don't output any prologue or epilogue code, the user is assumed
3795 to do the right thing.
3796
3797 interfacearm:
3798 Always assume that this function will be entered in ARM mode,
3799 not Thumb mode, and that the caller wishes to be returned to in
3800 ARM mode. */
31fdb4d5 3801int
74bbc178 3802arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3803 tree decl;
31fdb4d5
DE
3804 tree attr;
3805 tree args;
3806{
3807 if (args != NULL_TREE)
3808 return 0;
3809
3810 if (is_attribute_p ("naked", attr))
3811 return TREE_CODE (decl) == FUNCTION_DECL;
d5b7b3ae
RE
3812
3813#ifdef ARM_PE
3814 if (is_attribute_p ("interfacearm", attr))
3815 return TREE_CODE (decl) == FUNCTION_DECL;
3816#endif /* ARM_PE */
3817
31fdb4d5
DE
3818 return 0;
3819}
3820
3821/* Return non-zero if FUNC is a naked function. */
31fdb4d5
DE
3822static int
3823arm_naked_function_p (func)
3824 tree func;
3825{
3826 tree a;
3827
3828 if (TREE_CODE (func) != FUNCTION_DECL)
3829 abort ();
2e943e99 3830
31fdb4d5
DE
3831 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3832 return a != NULL_TREE;
3833}
f3bb6135 3834\f
6354dc9b 3835/* Routines for use in generating RTL. */
f3bb6135 3836rtx
56636818 3837arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3838 in_struct_p, scalar_p)
ff9940b0
RE
3839 int base_regno;
3840 int count;
3841 rtx from;
3842 int up;
3843 int write_back;
56636818
JL
3844 int unchanging_p;
3845 int in_struct_p;
c6df88cb 3846 int scalar_p;
ff9940b0
RE
3847{
3848 int i = 0, j;
3849 rtx result;
3850 int sign = up ? 1 : -1;
56636818 3851 rtx mem;
ff9940b0 3852
43cffd11 3853 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3854 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3855 if (write_back)
f3bb6135 3856 {
ff9940b0 3857 XVECEXP (result, 0, 0)
43cffd11
RE
3858 = gen_rtx_SET (GET_MODE (from), from,
3859 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3860 i = 1;
3861 count++;
f3bb6135
RE
3862 }
3863
ff9940b0 3864 for (j = 0; i < count; i++, j++)
f3bb6135 3865 {
43cffd11 3866 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3867 RTX_UNCHANGING_P (mem) = unchanging_p;
3868 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3869 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3870 XVECEXP (result, 0, i)
3871 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3872 }
3873
ff9940b0
RE
3874 return result;
3875}
3876
f3bb6135 3877rtx
56636818 3878arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3879 in_struct_p, scalar_p)
ff9940b0
RE
3880 int base_regno;
3881 int count;
3882 rtx to;
3883 int up;
3884 int write_back;
56636818
JL
3885 int unchanging_p;
3886 int in_struct_p;
c6df88cb 3887 int scalar_p;
ff9940b0
RE
3888{
3889 int i = 0, j;
3890 rtx result;
3891 int sign = up ? 1 : -1;
56636818 3892 rtx mem;
ff9940b0 3893
43cffd11 3894 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3895 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3896 if (write_back)
f3bb6135 3897 {
ff9940b0 3898 XVECEXP (result, 0, 0)
43cffd11
RE
3899 = gen_rtx_SET (GET_MODE (to), to,
3900 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3901 i = 1;
3902 count++;
f3bb6135
RE
3903 }
3904
ff9940b0 3905 for (j = 0; i < count; i++, j++)
f3bb6135 3906 {
43cffd11 3907 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3908 RTX_UNCHANGING_P (mem) = unchanging_p;
3909 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3910 MEM_SCALAR_P (mem) = scalar_p;
56636818 3911
43cffd11
RE
3912 XVECEXP (result, 0, i)
3913 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3914 }
3915
ff9940b0
RE
3916 return result;
3917}
3918
880e2516
RE
3919int
3920arm_gen_movstrqi (operands)
62b10bbc 3921 rtx * operands;
880e2516
RE
3922{
3923 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3924 int i;
880e2516 3925 rtx src, dst;
ad076f4e 3926 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3927 rtx part_bytes_reg = NULL;
56636818
JL
3928 rtx mem;
3929 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3930 int dst_scalar_p, src_scalar_p;
880e2516
RE
3931
3932 if (GET_CODE (operands[2]) != CONST_INT
3933 || GET_CODE (operands[3]) != CONST_INT
3934 || INTVAL (operands[2]) > 64
3935 || INTVAL (operands[3]) & 3)
3936 return 0;
3937
3938 st_dst = XEXP (operands[0], 0);
3939 st_src = XEXP (operands[1], 0);
56636818
JL
3940
3941 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3942 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3943 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3944 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3945 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3946 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3947
880e2516
RE
3948 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3949 fin_src = src = copy_to_mode_reg (SImode, st_src);
3950
d5b7b3ae 3951 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
880e2516
RE
3952 out_words_to_go = INTVAL (operands[2]) / 4;
3953 last_bytes = INTVAL (operands[2]) & 3;
3954
3955 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3956 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3957
3958 for (i = 0; in_words_to_go >= 2; i+=4)
3959 {
bd9c7e23 3960 if (in_words_to_go > 4)
56636818 3961 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3962 src_unchanging_p,
3963 src_in_struct_p,
3964 src_scalar_p));
bd9c7e23
RE
3965 else
3966 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3967 FALSE, src_unchanging_p,
c6df88cb 3968 src_in_struct_p, src_scalar_p));
bd9c7e23 3969
880e2516
RE
3970 if (out_words_to_go)
3971 {
bd9c7e23 3972 if (out_words_to_go > 4)
56636818
JL
3973 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3974 dst_unchanging_p,
c6df88cb
MM
3975 dst_in_struct_p,
3976 dst_scalar_p));
bd9c7e23
RE
3977 else if (out_words_to_go != 1)
3978 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3979 dst, TRUE,
3980 (last_bytes == 0
56636818
JL
3981 ? FALSE : TRUE),
3982 dst_unchanging_p,
c6df88cb
MM
3983 dst_in_struct_p,
3984 dst_scalar_p));
880e2516
RE
3985 else
3986 {
43cffd11 3987 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3988 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3989 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3990 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3991 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3992 if (last_bytes != 0)
3993 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3994 }
3995 }
3996
3997 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3998 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3999 }
4000
4001 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4002 if (out_words_to_go)
62b10bbc
NC
4003 {
4004 rtx sreg;
4005
4006 mem = gen_rtx_MEM (SImode, src);
4007 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4008 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4009 MEM_SCALAR_P (mem) = src_scalar_p;
4010 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4011 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4012
4013 mem = gen_rtx_MEM (SImode, dst);
4014 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4015 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4016 MEM_SCALAR_P (mem) = dst_scalar_p;
4017 emit_move_insn (mem, sreg);
4018 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4019 in_words_to_go--;
4020
4021 if (in_words_to_go) /* Sanity check */
4022 abort ();
4023 }
880e2516
RE
4024
4025 if (in_words_to_go)
4026 {
4027 if (in_words_to_go < 0)
4028 abort ();
4029
43cffd11 4030 mem = gen_rtx_MEM (SImode, src);
56636818
JL
4031 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4032 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 4033 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 4034 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
4035 }
4036
d5b7b3ae
RE
4037 if (last_bytes && part_bytes_reg == NULL)
4038 abort ();
4039
880e2516
RE
4040 if (BYTES_BIG_ENDIAN && last_bytes)
4041 {
4042 rtx tmp = gen_reg_rtx (SImode);
4043
6354dc9b 4044 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
4045 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4046 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
4047 part_bytes_reg = tmp;
4048
4049 while (last_bytes)
4050 {
43cffd11 4051 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
4052 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4053 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4054 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4055 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 4056
880e2516
RE
4057 if (--last_bytes)
4058 {
4059 tmp = gen_reg_rtx (SImode);
4060 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4061 part_bytes_reg = tmp;
4062 }
4063 }
4064
4065 }
4066 else
4067 {
d5b7b3ae 4068 if (last_bytes > 1)
880e2516 4069 {
d5b7b3ae 4070 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4071 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4072 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4073 MEM_SCALAR_P (mem) = dst_scalar_p;
d5b7b3ae
RE
4074 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4075 last_bytes -= 2;
4076 if (last_bytes)
880e2516
RE
4077 {
4078 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4079
d5b7b3ae
RE
4080 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4081 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4082 part_bytes_reg = tmp;
4083 }
4084 }
d5b7b3ae
RE
4085
4086 if (last_bytes)
4087 {
4088 mem = gen_rtx_MEM (QImode, dst);
4089 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4090 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4091 MEM_SCALAR_P (mem) = dst_scalar_p;
4092 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4093 }
880e2516
RE
4094 }
4095
4096 return 1;
4097}
4098
5165176d
RE
4099/* Generate a memory reference for a half word, such that it will be loaded
4100 into the top 16 bits of the word. We can assume that the address is
4101 known to be alignable and of the form reg, or plus (reg, const). */
4102rtx
d5b7b3ae 4103arm_gen_rotated_half_load (memref)
5165176d
RE
4104 rtx memref;
4105{
4106 HOST_WIDE_INT offset = 0;
4107 rtx base = XEXP (memref, 0);
4108
4109 if (GET_CODE (base) == PLUS)
4110 {
4111 offset = INTVAL (XEXP (base, 1));
4112 base = XEXP (base, 0);
4113 }
4114
956d6950 4115 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4116 if (TARGET_MMU_TRAPS
5165176d
RE
4117 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4118 return NULL;
4119
43cffd11 4120 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4121
4122 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4123 return base;
4124
43cffd11 4125 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4126}
4127
84ed5e79 4128static enum machine_mode
74bbc178 4129select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4130 rtx x;
4131 rtx y;
4132 HOST_WIDE_INT cond_or;
4133{
4134 enum rtx_code cond1, cond2;
4135 int swapped = 0;
4136
4137 /* Currently we will probably get the wrong result if the individual
4138 comparisons are not simple. This also ensures that it is safe to
956d6950 4139 reverse a comparison if necessary. */
84ed5e79
RE
4140 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4141 != CCmode)
4142 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4143 != CCmode))
4144 return CCmode;
4145
4146 if (cond_or)
4147 cond1 = reverse_condition (cond1);
4148
4149 /* If the comparisons are not equal, and one doesn't dominate the other,
4150 then we can't do this. */
4151 if (cond1 != cond2
5895f793
RE
4152 && !comparison_dominates_p (cond1, cond2)
4153 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
84ed5e79
RE
4154 return CCmode;
4155
4156 if (swapped)
4157 {
4158 enum rtx_code temp = cond1;
4159 cond1 = cond2;
4160 cond2 = temp;
4161 }
4162
4163 switch (cond1)
4164 {
4165 case EQ:
5895f793 4166 if (cond2 == EQ || !cond_or)
84ed5e79
RE
4167 return CC_DEQmode;
4168
4169 switch (cond2)
4170 {
4171 case LE: return CC_DLEmode;
4172 case LEU: return CC_DLEUmode;
4173 case GE: return CC_DGEmode;
4174 case GEU: return CC_DGEUmode;
ad076f4e 4175 default: break;
84ed5e79
RE
4176 }
4177
4178 break;
4179
4180 case LT:
5895f793 4181 if (cond2 == LT || !cond_or)
84ed5e79
RE
4182 return CC_DLTmode;
4183 if (cond2 == LE)
4184 return CC_DLEmode;
4185 if (cond2 == NE)
4186 return CC_DNEmode;
4187 break;
4188
4189 case GT:
5895f793 4190 if (cond2 == GT || !cond_or)
84ed5e79
RE
4191 return CC_DGTmode;
4192 if (cond2 == GE)
4193 return CC_DGEmode;
4194 if (cond2 == NE)
4195 return CC_DNEmode;
4196 break;
4197
4198 case LTU:
5895f793 4199 if (cond2 == LTU || !cond_or)
84ed5e79
RE
4200 return CC_DLTUmode;
4201 if (cond2 == LEU)
4202 return CC_DLEUmode;
4203 if (cond2 == NE)
4204 return CC_DNEmode;
4205 break;
4206
4207 case GTU:
5895f793 4208 if (cond2 == GTU || !cond_or)
84ed5e79
RE
4209 return CC_DGTUmode;
4210 if (cond2 == GEU)
4211 return CC_DGEUmode;
4212 if (cond2 == NE)
4213 return CC_DNEmode;
4214 break;
4215
4216 /* The remaining cases only occur when both comparisons are the
4217 same. */
4218 case NE:
4219 return CC_DNEmode;
4220
4221 case LE:
4222 return CC_DLEmode;
4223
4224 case GE:
4225 return CC_DGEmode;
4226
4227 case LEU:
4228 return CC_DLEUmode;
4229
4230 case GEU:
4231 return CC_DGEUmode;
ad076f4e
RE
4232
4233 default:
4234 break;
84ed5e79
RE
4235 }
4236
4237 abort ();
4238}
4239
4240enum machine_mode
4241arm_select_cc_mode (op, x, y)
4242 enum rtx_code op;
4243 rtx x;
4244 rtx y;
4245{
4246 /* All floating point compares return CCFP if it is an equality
4247 comparison, and CCFPE otherwise. */
4248 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
e45b72c4
RE
4249 {
4250 switch (op)
4251 {
4252 case EQ:
4253 case NE:
4254 case UNORDERED:
4255 case ORDERED:
4256 case UNLT:
4257 case UNLE:
4258 case UNGT:
4259 case UNGE:
4260 case UNEQ:
4261 case LTGT:
4262 return CCFPmode;
4263
4264 case LT:
4265 case LE:
4266 case GT:
4267 case GE:
4268 return CCFPEmode;
4269
4270 default:
4271 abort ();
4272 }
4273 }
84ed5e79
RE
4274
4275 /* A compare with a shifted operand. Because of canonicalization, the
4276 comparison will have to be swapped when we emit the assembler. */
4277 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4278 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4279 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4280 || GET_CODE (x) == ROTATERT))
4281 return CC_SWPmode;
4282
956d6950
JL
4283 /* This is a special case that is used by combine to allow a
4284 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4285 followed by a comparison of the shifted integer (only valid for
956d6950 4286 equalities and unsigned inequalities). */
84ed5e79
RE
4287 if (GET_MODE (x) == SImode
4288 && GET_CODE (x) == ASHIFT
4289 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4290 && GET_CODE (XEXP (x, 0)) == SUBREG
4291 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4292 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4293 && (op == EQ || op == NE
4294 || op == GEU || op == GTU || op == LTU || op == LEU)
4295 && GET_CODE (y) == CONST_INT)
4296 return CC_Zmode;
4297
4298 /* An operation that sets the condition codes as a side-effect, the
4299 V flag is not set correctly, so we can only use comparisons where
4300 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4301 instead. */
4302 if (GET_MODE (x) == SImode
4303 && y == const0_rtx
4304 && (op == EQ || op == NE || op == LT || op == GE)
4305 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4306 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4307 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4308 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4309 || GET_CODE (x) == LSHIFTRT
4310 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4311 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4312 return CC_NOOVmode;
4313
4314 /* A construct for a conditional compare, if the false arm contains
4315 0, then both conditions must be true, otherwise either condition
4316 must be true. Not all conditions are possible, so CCmode is
4317 returned if it can't be done. */
4318 if (GET_CODE (x) == IF_THEN_ELSE
4319 && (XEXP (x, 2) == const0_rtx
4320 || XEXP (x, 2) == const1_rtx)
4321 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4322 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 4323 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
4324 INTVAL (XEXP (x, 2)));
4325
4326 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4327 return CC_Zmode;
4328
bd9c7e23
RE
4329 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4330 && GET_CODE (x) == PLUS
4331 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4332 return CC_Cmode;
4333
84ed5e79
RE
4334 return CCmode;
4335}
4336
ff9940b0
RE
4337/* X and Y are two things to compare using CODE. Emit the compare insn and
4338 return the rtx for register 0 in the proper mode. FP means this is a
4339 floating point compare: I don't think that it is needed on the arm. */
4340
4341rtx
d5b7b3ae 4342arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4343 enum rtx_code code;
4344 rtx x, y;
4345{
4346 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4347 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4348
43cffd11
RE
4349 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4350 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4351
4352 return cc_reg;
4353}
4354
0a81f500
RE
4355void
4356arm_reload_in_hi (operands)
62b10bbc 4357 rtx * operands;
0a81f500 4358{
f9cc092a
RE
4359 rtx ref = operands[1];
4360 rtx base, scratch;
4361 HOST_WIDE_INT offset = 0;
4362
4363 if (GET_CODE (ref) == SUBREG)
4364 {
4365 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4366 if (BYTES_BIG_ENDIAN)
4367 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4368 - MIN (UNITS_PER_WORD,
4369 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4370 ref = SUBREG_REG (ref);
4371 }
4372
4373 if (GET_CODE (ref) == REG)
4374 {
4375 /* We have a pseudo which has been spilt onto the stack; there
4376 are two cases here: the first where there is a simple
4377 stack-slot replacement and a second where the stack-slot is
4378 out of range, or is used as a subreg. */
4379 if (reg_equiv_mem[REGNO (ref)])
4380 {
4381 ref = reg_equiv_mem[REGNO (ref)];
4382 base = find_replacement (&XEXP (ref, 0));
4383 }
4384 else
6354dc9b 4385 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4386 base = reg_equiv_address[REGNO (ref)];
4387 }
4388 else
4389 base = find_replacement (&XEXP (ref, 0));
0a81f500 4390
e5e809f4
JL
4391 /* Handle the case where the address is too complex to be offset by 1. */
4392 if (GET_CODE (base) == MINUS
4393 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4394 {
f9cc092a 4395 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4396
43cffd11 4397 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4398 base = base_plus;
4399 }
f9cc092a
RE
4400 else if (GET_CODE (base) == PLUS)
4401 {
6354dc9b 4402 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4403 HOST_WIDE_INT hi, lo;
4404
4405 offset += INTVAL (XEXP (base, 1));
4406 base = XEXP (base, 0);
4407
6354dc9b 4408 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4409 /* Valid range for lo is -4095 -> 4095 */
4410 lo = (offset >= 0
4411 ? (offset & 0xfff)
4412 : -((-offset) & 0xfff));
4413
4414 /* Corner case, if lo is the max offset then we would be out of range
4415 once we have added the additional 1 below, so bump the msb into the
4416 pre-loading insn(s). */
4417 if (lo == 4095)
4418 lo &= 0x7ff;
4419
e5951263
NC
4420 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4421 ^ HOST_INT (0x80000000))
4422 - HOST_INT (0x80000000));
f9cc092a
RE
4423
4424 if (hi + lo != offset)
4425 abort ();
4426
4427 if (hi != 0)
4428 {
4429 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4430
4431 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4432 that require more than one insn. */
f9cc092a
RE
4433 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4434 base = base_plus;
4435 offset = lo;
4436 }
4437 }
e5e809f4 4438
f9cc092a
RE
4439 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4440 emit_insn (gen_zero_extendqisi2 (scratch,
4441 gen_rtx_MEM (QImode,
4442 plus_constant (base,
4443 offset))));
43cffd11
RE
4444 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4445 gen_rtx_MEM (QImode,
f9cc092a
RE
4446 plus_constant (base,
4447 offset + 1))));
5895f793 4448 if (!BYTES_BIG_ENDIAN)
43cffd11
RE
4449 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4450 gen_rtx_IOR (SImode,
4451 gen_rtx_ASHIFT
4452 (SImode,
4453 gen_rtx_SUBREG (SImode, operands[0], 0),
4454 GEN_INT (8)),
f9cc092a 4455 scratch)));
0a81f500 4456 else
43cffd11
RE
4457 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4458 gen_rtx_IOR (SImode,
f9cc092a 4459 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4460 GEN_INT (8)),
4461 gen_rtx_SUBREG (SImode, operands[0],
4462 0))));
0a81f500
RE
4463}
4464
f9cc092a
RE
4465/* Handle storing a half-word to memory during reload by synthesising as two
4466 byte stores. Take care not to clobber the input values until after we
4467 have moved them somewhere safe. This code assumes that if the DImode
4468 scratch in operands[2] overlaps either the input value or output address
4469 in some way, then that value must die in this insn (we absolutely need
4470 two scratch registers for some corner cases). */
f3bb6135 4471void
af48348a 4472arm_reload_out_hi (operands)
62b10bbc 4473 rtx * operands;
af48348a 4474{
f9cc092a
RE
4475 rtx ref = operands[0];
4476 rtx outval = operands[1];
4477 rtx base, scratch;
4478 HOST_WIDE_INT offset = 0;
4479
4480 if (GET_CODE (ref) == SUBREG)
4481 {
4482 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4483 if (BYTES_BIG_ENDIAN)
4484 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4485 - MIN (UNITS_PER_WORD,
4486 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4487 ref = SUBREG_REG (ref);
4488 }
4489
4490
4491 if (GET_CODE (ref) == REG)
4492 {
4493 /* We have a pseudo which has been spilt onto the stack; there
4494 are two cases here: the first where there is a simple
4495 stack-slot replacement and a second where the stack-slot is
4496 out of range, or is used as a subreg. */
4497 if (reg_equiv_mem[REGNO (ref)])
4498 {
4499 ref = reg_equiv_mem[REGNO (ref)];
4500 base = find_replacement (&XEXP (ref, 0));
4501 }
4502 else
6354dc9b 4503 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4504 base = reg_equiv_address[REGNO (ref)];
4505 }
4506 else
4507 base = find_replacement (&XEXP (ref, 0));
4508
4509 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4510
4511 /* Handle the case where the address is too complex to be offset by 1. */
4512 if (GET_CODE (base) == MINUS
4513 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4514 {
4515 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4516
4517 /* Be careful not to destroy OUTVAL. */
4518 if (reg_overlap_mentioned_p (base_plus, outval))
4519 {
4520 /* Updating base_plus might destroy outval, see if we can
4521 swap the scratch and base_plus. */
5895f793 4522 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4523 {
4524 rtx tmp = scratch;
4525 scratch = base_plus;
4526 base_plus = tmp;
4527 }
4528 else
4529 {
4530 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4531
4532 /* Be conservative and copy OUTVAL into the scratch now,
4533 this should only be necessary if outval is a subreg
4534 of something larger than a word. */
4535 /* XXX Might this clobber base? I can't see how it can,
4536 since scratch is known to overlap with OUTVAL, and
4537 must be wider than a word. */
4538 emit_insn (gen_movhi (scratch_hi, outval));
4539 outval = scratch_hi;
4540 }
4541 }
4542
4543 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4544 base = base_plus;
4545 }
4546 else if (GET_CODE (base) == PLUS)
4547 {
6354dc9b 4548 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4549 HOST_WIDE_INT hi, lo;
4550
4551 offset += INTVAL (XEXP (base, 1));
4552 base = XEXP (base, 0);
4553
6354dc9b 4554 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4555 /* Valid range for lo is -4095 -> 4095 */
4556 lo = (offset >= 0
4557 ? (offset & 0xfff)
4558 : -((-offset) & 0xfff));
4559
4560 /* Corner case, if lo is the max offset then we would be out of range
4561 once we have added the additional 1 below, so bump the msb into the
4562 pre-loading insn(s). */
4563 if (lo == 4095)
4564 lo &= 0x7ff;
4565
e5951263
NC
4566 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4567 ^ HOST_INT (0x80000000))
5895f793 4568 - HOST_INT (0x80000000));
f9cc092a
RE
4569
4570 if (hi + lo != offset)
4571 abort ();
4572
4573 if (hi != 0)
4574 {
4575 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4576
4577 /* Be careful not to destroy OUTVAL. */
4578 if (reg_overlap_mentioned_p (base_plus, outval))
4579 {
4580 /* Updating base_plus might destroy outval, see if we
4581 can swap the scratch and base_plus. */
5895f793 4582 if (!reg_overlap_mentioned_p (scratch, outval))
f9cc092a
RE
4583 {
4584 rtx tmp = scratch;
4585 scratch = base_plus;
4586 base_plus = tmp;
4587 }
4588 else
4589 {
4590 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4591
4592 /* Be conservative and copy outval into scratch now,
4593 this should only be necessary if outval is a
4594 subreg of something larger than a word. */
4595 /* XXX Might this clobber base? I can't see how it
4596 can, since scratch is known to overlap with
4597 outval. */
4598 emit_insn (gen_movhi (scratch_hi, outval));
4599 outval = scratch_hi;
4600 }
4601 }
4602
4603 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4604 that require more than one insn. */
f9cc092a
RE
4605 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4606 base = base_plus;
4607 offset = lo;
4608 }
4609 }
af48348a 4610
b5cc037f
RE
4611 if (BYTES_BIG_ENDIAN)
4612 {
f9cc092a
RE
4613 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4614 plus_constant (base, offset + 1)),
4615 gen_rtx_SUBREG (QImode, outval, 0)));
4616 emit_insn (gen_lshrsi3 (scratch,
4617 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4618 GEN_INT (8)));
f9cc092a
RE
4619 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4620 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4621 }
4622 else
4623 {
f9cc092a
RE
4624 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4625 gen_rtx_SUBREG (QImode, outval, 0)));
4626 emit_insn (gen_lshrsi3 (scratch,
4627 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4628 GEN_INT (8)));
f9cc092a
RE
4629 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4630 plus_constant (base, offset + 1)),
4631 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 4632 }
af48348a 4633}
2b835d68 4634\f
d5b7b3ae
RE
4635/* Print a symbolic form of X to the debug file, F. */
4636static void
4637arm_print_value (f, x)
4638 FILE * f;
4639 rtx x;
4640{
4641 switch (GET_CODE (x))
4642 {
4643 case CONST_INT:
4644 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4645 return;
4646
4647 case CONST_DOUBLE:
4648 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4649 return;
4650
4651 case CONST_STRING:
4652 fprintf (f, "\"%s\"", XSTR (x, 0));
4653 return;
4654
4655 case SYMBOL_REF:
4656 fprintf (f, "`%s'", XSTR (x, 0));
4657 return;
4658
4659 case LABEL_REF:
4660 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4661 return;
4662
4663 case CONST:
4664 arm_print_value (f, XEXP (x, 0));
4665 return;
4666
4667 case PLUS:
4668 arm_print_value (f, XEXP (x, 0));
4669 fprintf (f, "+");
4670 arm_print_value (f, XEXP (x, 1));
4671 return;
4672
4673 case PC:
4674 fprintf (f, "pc");
4675 return;
4676
4677 default:
4678 fprintf (f, "????");
4679 return;
4680 }
4681}
4682\f
2b835d68 4683/* Routines for manipulation of the constant pool. */
2b835d68 4684
949d79eb
RE
4685/* Arm instructions cannot load a large constant directly into a
4686 register; they have to come from a pc relative load. The constant
4687 must therefore be placed in the addressable range of the pc
4688 relative load. Depending on the precise pc relative load
4689 instruction the range is somewhere between 256 bytes and 4k. This
4690 means that we often have to dump a constant inside a function, and
2b835d68
RE
4691 generate code to branch around it.
4692
949d79eb
RE
4693 It is important to minimize this, since the branches will slow
4694 things down and make the code larger.
2b835d68 4695
949d79eb
RE
4696 Normally we can hide the table after an existing unconditional
4697 branch so that there is no interruption of the flow, but in the
4698 worst case the code looks like this:
2b835d68
RE
4699
4700 ldr rn, L1
949d79eb 4701 ...
2b835d68
RE
4702 b L2
4703 align
4704 L1: .long value
4705 L2:
949d79eb 4706 ...
2b835d68 4707
2b835d68 4708 ldr rn, L3
949d79eb 4709 ...
2b835d68
RE
4710 b L4
4711 align
2b835d68
RE
4712 L3: .long value
4713 L4:
949d79eb
RE
4714 ...
4715
4716 We fix this by performing a scan after scheduling, which notices
4717 which instructions need to have their operands fetched from the
4718 constant table and builds the table.
4719
4720 The algorithm starts by building a table of all the constants that
4721 need fixing up and all the natural barriers in the function (places
4722 where a constant table can be dropped without breaking the flow).
4723 For each fixup we note how far the pc-relative replacement will be
4724 able to reach and the offset of the instruction into the function.
4725
4726 Having built the table we then group the fixes together to form
4727 tables that are as large as possible (subject to addressing
4728 constraints) and emit each table of constants after the last
4729 barrier that is within range of all the instructions in the group.
4730 If a group does not contain a barrier, then we forcibly create one
4731 by inserting a jump instruction into the flow. Once the table has
4732 been inserted, the insns are then modified to reference the
4733 relevant entry in the pool.
4734
6354dc9b 4735 Possible enhancements to the algorithm (not implemented) are:
949d79eb 4736
d5b7b3ae 4737 1) For some processors and object formats, there may be benefit in
949d79eb
RE
4738 aligning the pools to the start of cache lines; this alignment
4739 would need to be taken into account when calculating addressability
6354dc9b 4740 of a pool. */
2b835d68 4741
d5b7b3ae
RE
4742/* These typedefs are located at the start of this file, so that
4743 they can be used in the prototypes there. This comment is to
4744 remind readers of that fact so that the following structures
4745 can be understood more easily.
4746
4747 typedef struct minipool_node Mnode;
4748 typedef struct minipool_fixup Mfix; */
4749
4750struct minipool_node
4751{
4752 /* Doubly linked chain of entries. */
4753 Mnode * next;
4754 Mnode * prev;
4755 /* The maximum offset into the code that this entry can be placed. While
4756 pushing fixes for forward references, all entries are sorted in order
4757 of increasing max_address. */
4758 HOST_WIDE_INT max_address;
4759 /* Similarly for a entry inserted for a backwards ref. */
4760 HOST_WIDE_INT min_address;
4761 /* The number of fixes referencing this entry. This can become zero
4762 if we "unpush" an entry. In this case we ignore the entry when we
4763 come to emit the code. */
4764 int refcount;
4765 /* The offset from the start of the minipool. */
4766 HOST_WIDE_INT offset;
4767 /* The value in table. */
4768 rtx value;
4769 /* The mode of value. */
4770 enum machine_mode mode;
4771 int fix_size;
4772};
4773
4774struct minipool_fixup
2b835d68 4775{
d5b7b3ae
RE
4776 Mfix * next;
4777 rtx insn;
4778 HOST_WIDE_INT address;
4779 rtx * loc;
4780 enum machine_mode mode;
4781 int fix_size;
4782 rtx value;
4783 Mnode * minipool;
4784 HOST_WIDE_INT forwards;
4785 HOST_WIDE_INT backwards;
4786};
2b835d68 4787
d5b7b3ae
RE
4788/* Fixes less than a word need padding out to a word boundary. */
4789#define MINIPOOL_FIX_SIZE(mode) \
4790 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 4791
d5b7b3ae
RE
4792static Mnode * minipool_vector_head;
4793static Mnode * minipool_vector_tail;
4794static rtx minipool_vector_label;
332072db 4795
d5b7b3ae
RE
4796/* The linked list of all minipool fixes required for this function. */
4797Mfix * minipool_fix_head;
4798Mfix * minipool_fix_tail;
4799/* The fix entry for the current minipool, once it has been placed. */
4800Mfix * minipool_barrier;
4801
4802/* Determines if INSN is the start of a jump table. Returns the end
4803 of the TABLE or NULL_RTX. */
4804static rtx
4805is_jump_table (insn)
4806 rtx insn;
2b835d68 4807{
d5b7b3ae 4808 rtx table;
da6558fd 4809
d5b7b3ae
RE
4810 if (GET_CODE (insn) == JUMP_INSN
4811 && JUMP_LABEL (insn) != NULL
4812 && ((table = next_real_insn (JUMP_LABEL (insn)))
4813 == next_real_insn (insn))
4814 && table != NULL
4815 && GET_CODE (table) == JUMP_INSN
4816 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4817 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4818 return table;
4819
4820 return NULL_RTX;
2b835d68
RE
4821}
4822
d5b7b3ae
RE
4823static HOST_WIDE_INT
4824get_jump_table_size (insn)
4825 rtx insn;
2b835d68 4826{
d5b7b3ae
RE
4827 rtx body = PATTERN (insn);
4828 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 4829
d5b7b3ae
RE
4830 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
4831}
2b835d68 4832
d5b7b3ae
RE
4833/* Move a minipool fix MP from its current location to before MAX_MP.
4834 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
4835 contrains may need updating. */
4836static Mnode *
4837move_minipool_fix_forward_ref (mp, max_mp, max_address)
4838 Mnode * mp;
4839 Mnode * max_mp;
4840 HOST_WIDE_INT max_address;
4841{
4842 /* This should never be true and the code below assumes these are
4843 different. */
4844 if (mp == max_mp)
4845 abort ();
4846
4847 if (max_mp == NULL)
4848 {
4849 if (max_address < mp->max_address)
4850 mp->max_address = max_address;
4851 }
4852 else
2b835d68 4853 {
d5b7b3ae
RE
4854 if (max_address > max_mp->max_address - mp->fix_size)
4855 mp->max_address = max_mp->max_address - mp->fix_size;
4856 else
4857 mp->max_address = max_address;
2b835d68 4858
d5b7b3ae
RE
4859 /* Unlink MP from its current position. Since max_mp is non-null,
4860 mp->prev must be non-null. */
4861 mp->prev->next = mp->next;
4862 if (mp->next != NULL)
4863 mp->next->prev = mp->prev;
4864 else
4865 minipool_vector_tail = mp->prev;
2b835d68 4866
d5b7b3ae
RE
4867 /* Re-insert it before MAX_MP. */
4868 mp->next = max_mp;
4869 mp->prev = max_mp->prev;
4870 max_mp->prev = mp;
4871
4872 if (mp->prev != NULL)
4873 mp->prev->next = mp;
4874 else
4875 minipool_vector_head = mp;
4876 }
2b835d68 4877
d5b7b3ae
RE
4878 /* Save the new entry. */
4879 max_mp = mp;
4880
4881 /* Scan over the preceeding entries and adjust their addresses as
4882 required. */
4883 while (mp->prev != NULL
4884 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4885 {
4886 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4887 mp = mp->prev;
2b835d68
RE
4888 }
4889
d5b7b3ae 4890 return max_mp;
2b835d68
RE
4891}
4892
d5b7b3ae
RE
4893/* Add a constant to the minipool for a forward reference. Returns the
4894 node added or NULL if the constant will not fit in this pool. */
4895static Mnode *
4896add_minipool_forward_ref (fix)
4897 Mfix * fix;
4898{
4899 /* If set, max_mp is the first pool_entry that has a lower
4900 constraint than the one we are trying to add. */
4901 Mnode * max_mp = NULL;
4902 HOST_WIDE_INT max_address = fix->address + fix->forwards;
4903 Mnode * mp;
4904
4905 /* If this fix's address is greater than the address of the first
4906 entry, then we can't put the fix in this pool. We subtract the
4907 size of the current fix to ensure that if the table is fully
4908 packed we still have enough room to insert this value by suffling
4909 the other fixes forwards. */
4910 if (minipool_vector_head &&
4911 fix->address >= minipool_vector_head->max_address - fix->fix_size)
4912 return NULL;
2b835d68 4913
d5b7b3ae
RE
4914 /* Scan the pool to see if a constant with the same value has
4915 already been added. While we are doing this, also note the
4916 location where we must insert the constant if it doesn't already
4917 exist. */
4918 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4919 {
4920 if (GET_CODE (fix->value) == GET_CODE (mp->value)
4921 && fix->mode == mp->mode
4922 && (GET_CODE (fix->value) != CODE_LABEL
4923 || (CODE_LABEL_NUMBER (fix->value)
4924 == CODE_LABEL_NUMBER (mp->value)))
4925 && rtx_equal_p (fix->value, mp->value))
4926 {
4927 /* More than one fix references this entry. */
4928 mp->refcount++;
4929 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
4930 }
4931
4932 /* Note the insertion point if necessary. */
4933 if (max_mp == NULL
4934 && mp->max_address > max_address)
4935 max_mp = mp;
4936 }
4937
4938 /* The value is not currently in the minipool, so we need to create
4939 a new entry for it. If MAX_MP is NULL, the entry will be put on
4940 the end of the list since the placement is less constrained than
4941 any existing entry. Otherwise, we insert the new fix before
4942 MAX_MP and, if neceesary, adjust the constraints on the other
4943 entries. */
4944 mp = xmalloc (sizeof (* mp));
4945 mp->fix_size = fix->fix_size;
4946 mp->mode = fix->mode;
4947 mp->value = fix->value;
4948 mp->refcount = 1;
4949 /* Not yet required for a backwards ref. */
4950 mp->min_address = -65536;
4951
4952 if (max_mp == NULL)
4953 {
4954 mp->max_address = max_address;
4955 mp->next = NULL;
4956 mp->prev = minipool_vector_tail;
4957
4958 if (mp->prev == NULL)
4959 {
4960 minipool_vector_head = mp;
4961 minipool_vector_label = gen_label_rtx ();
7551cbc7 4962 }
2b835d68 4963 else
d5b7b3ae 4964 mp->prev->next = mp;
2b835d68 4965
d5b7b3ae
RE
4966 minipool_vector_tail = mp;
4967 }
4968 else
4969 {
4970 if (max_address > max_mp->max_address - mp->fix_size)
4971 mp->max_address = max_mp->max_address - mp->fix_size;
4972 else
4973 mp->max_address = max_address;
4974
4975 mp->next = max_mp;
4976 mp->prev = max_mp->prev;
4977 max_mp->prev = mp;
4978 if (mp->prev != NULL)
4979 mp->prev->next = mp;
4980 else
4981 minipool_vector_head = mp;
4982 }
4983
4984 /* Save the new entry. */
4985 max_mp = mp;
4986
4987 /* Scan over the preceeding entries and adjust their addresses as
4988 required. */
4989 while (mp->prev != NULL
4990 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4991 {
4992 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4993 mp = mp->prev;
2b835d68
RE
4994 }
4995
d5b7b3ae
RE
4996 return max_mp;
4997}
4998
4999static Mnode *
5000move_minipool_fix_backward_ref (mp, min_mp, min_address)
5001 Mnode * mp;
5002 Mnode * min_mp;
5003 HOST_WIDE_INT min_address;
5004{
5005 HOST_WIDE_INT offset;
5006
5007 /* This should never be true, and the code below assumes these are
5008 different. */
5009 if (mp == min_mp)
5010 abort ();
5011
5012 if (min_mp == NULL)
2b835d68 5013 {
d5b7b3ae
RE
5014 if (min_address > mp->min_address)
5015 mp->min_address = min_address;
5016 }
5017 else
5018 {
5019 /* We will adjust this below if it is too loose. */
5020 mp->min_address = min_address;
5021
5022 /* Unlink MP from its current position. Since min_mp is non-null,
5023 mp->next must be non-null. */
5024 mp->next->prev = mp->prev;
5025 if (mp->prev != NULL)
5026 mp->prev->next = mp->next;
5027 else
5028 minipool_vector_head = mp->next;
5029
5030 /* Reinsert it after MIN_MP. */
5031 mp->prev = min_mp;
5032 mp->next = min_mp->next;
5033 min_mp->next = mp;
5034 if (mp->next != NULL)
5035 mp->next->prev = mp;
2b835d68 5036 else
d5b7b3ae
RE
5037 minipool_vector_tail = mp;
5038 }
5039
5040 min_mp = mp;
5041
5042 offset = 0;
5043 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5044 {
5045 mp->offset = offset;
5046 if (mp->refcount > 0)
5047 offset += mp->fix_size;
5048
5049 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5050 mp->next->min_address = mp->min_address + mp->fix_size;
5051 }
5052
5053 return min_mp;
5054}
5055
5056/* Add a constant to the minipool for a backward reference. Returns the
5057 node added or NULL if the constant will not fit in this pool.
5058
5059 Note that the code for insertion for a backwards reference can be
5060 somewhat confusing because the calculated offsets for each fix do
5061 not take into account the size of the pool (which is still under
5062 construction. */
5063static Mnode *
5064add_minipool_backward_ref (fix)
5065 Mfix * fix;
5066{
5067 /* If set, min_mp is the last pool_entry that has a lower constraint
5068 than the one we are trying to add. */
5069 Mnode * min_mp = NULL;
5070 /* This can be negative, since it is only a constraint. */
5071 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5072 Mnode * mp;
5073
5074 /* If we can't reach the current pool from this insn, or if we can't
5075 insert this entry at the end of the pool without pushing other
5076 fixes out of range, then we don't try. This ensures that we
5077 can't fail later on. */
5078 if (min_address >= minipool_barrier->address
5079 || (minipool_vector_tail->min_address + fix->fix_size
5080 >= minipool_barrier->address))
5081 return NULL;
5082
5083 /* Scan the pool to see if a constant with the same value has
5084 already been added. While we are doing this, also note the
5085 location where we must insert the constant if it doesn't already
5086 exist. */
5087 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5088 {
5089 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5090 && fix->mode == mp->mode
5091 && (GET_CODE (fix->value) != CODE_LABEL
5092 || (CODE_LABEL_NUMBER (fix->value)
5093 == CODE_LABEL_NUMBER (mp->value)))
5094 && rtx_equal_p (fix->value, mp->value)
5095 /* Check that there is enough slack to move this entry to the
5096 end of the table (this is conservative). */
5097 && (mp->max_address
5098 > (minipool_barrier->address
5099 + minipool_vector_tail->offset
5100 + minipool_vector_tail->fix_size)))
5101 {
5102 mp->refcount++;
5103 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5104 }
5105
5106 if (min_mp != NULL)
5107 mp->min_address += fix->fix_size;
5108 else
5109 {
5110 /* Note the insertion point if necessary. */
5111 if (mp->min_address < min_address)
5112 min_mp = mp;
5113 else if (mp->max_address
5114 < minipool_barrier->address + mp->offset + fix->fix_size)
5115 {
5116 /* Inserting before this entry would push the fix beyond
5117 its maximum address (which can happen if we have
5118 re-located a forwards fix); force the new fix to come
5119 after it. */
5120 min_mp = mp;
5121 min_address = mp->min_address + fix->fix_size;
5122 }
5123 }
5124 }
5125
5126 /* We need to create a new entry. */
5127 mp = xmalloc (sizeof (* mp));
5128 mp->fix_size = fix->fix_size;
5129 mp->mode = fix->mode;
5130 mp->value = fix->value;
5131 mp->refcount = 1;
5132 mp->max_address = minipool_barrier->address + 65536;
5133
5134 mp->min_address = min_address;
5135
5136 if (min_mp == NULL)
5137 {
5138 mp->prev = NULL;
5139 mp->next = minipool_vector_head;
5140
5141 if (mp->next == NULL)
5142 {
5143 minipool_vector_tail = mp;
5144 minipool_vector_label = gen_label_rtx ();
5145 }
5146 else
5147 mp->next->prev = mp;
5148
5149 minipool_vector_head = mp;
5150 }
5151 else
5152 {
5153 mp->next = min_mp->next;
5154 mp->prev = min_mp;
5155 min_mp->next = mp;
da6558fd 5156
d5b7b3ae
RE
5157 if (mp->next != NULL)
5158 mp->next->prev = mp;
5159 else
5160 minipool_vector_tail = mp;
5161 }
5162
5163 /* Save the new entry. */
5164 min_mp = mp;
5165
5166 if (mp->prev)
5167 mp = mp->prev;
5168 else
5169 mp->offset = 0;
5170
5171 /* Scan over the following entries and adjust their offsets. */
5172 while (mp->next != NULL)
5173 {
5174 if (mp->next->min_address < mp->min_address + mp->fix_size)
5175 mp->next->min_address = mp->min_address + mp->fix_size;
5176
5177 if (mp->refcount)
5178 mp->next->offset = mp->offset + mp->fix_size;
5179 else
5180 mp->next->offset = mp->offset;
5181
5182 mp = mp->next;
5183 }
5184
5185 return min_mp;
5186}
5187
5188static void
5189assign_minipool_offsets (barrier)
5190 Mfix * barrier;
5191{
5192 HOST_WIDE_INT offset = 0;
5193 Mnode * mp;
5194
5195 minipool_barrier = barrier;
5196
5197 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5198 {
5199 mp->offset = offset;
da6558fd 5200
d5b7b3ae
RE
5201 if (mp->refcount > 0)
5202 offset += mp->fix_size;
5203 }
5204}
5205
5206/* Output the literal table */
5207static void
5208dump_minipool (scan)
5209 rtx scan;
5210{
5211 Mnode * mp;
5212 Mnode * nmp;
5213
5214 if (rtl_dump_file)
5215 fprintf (rtl_dump_file,
5216 ";; Emitting minipool after insn %u; address %ld\n",
5217 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5218
5219 scan = emit_label_after (gen_label_rtx (), scan);
5220 scan = emit_insn_after (gen_align_4 (), scan);
5221 scan = emit_label_after (minipool_vector_label, scan);
5222
5223 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5224 {
5225 if (mp->refcount > 0)
5226 {
5227 if (rtl_dump_file)
5228 {
5229 fprintf (rtl_dump_file,
5230 ";; Offset %u, min %ld, max %ld ",
5231 (unsigned) mp->offset, (unsigned long) mp->min_address,
5232 (unsigned long) mp->max_address);
5233 arm_print_value (rtl_dump_file, mp->value);
5234 fputc ('\n', rtl_dump_file);
5235 }
5236
5237 switch (mp->fix_size)
5238 {
5239#ifdef HAVE_consttable_1
5240 case 1:
5241 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5242 break;
5243
5244#endif
5245#ifdef HAVE_consttable_2
5246 case 2:
5247 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5248 break;
5249
5250#endif
5251#ifdef HAVE_consttable_4
5252 case 4:
5253 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5254 break;
5255
5256#endif
5257#ifdef HAVE_consttable_8
5258 case 8:
5259 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5260 break;
5261
5262#endif
5263 default:
5264 abort ();
5265 break;
5266 }
5267 }
5268
5269 nmp = mp->next;
5270 free (mp);
2b835d68
RE
5271 }
5272
d5b7b3ae
RE
5273 minipool_vector_head = minipool_vector_tail = NULL;
5274 scan = emit_insn_after (gen_consttable_end (), scan);
5275 scan = emit_barrier_after (scan);
2b835d68
RE
5276}
5277
d5b7b3ae
RE
5278/* Return the cost of forcibly inserting a barrier after INSN. */
5279static int
5280arm_barrier_cost (insn)
5281 rtx insn;
949d79eb 5282{
d5b7b3ae
RE
5283 /* Basing the location of the pool on the loop depth is preferable,
5284 but at the moment, the basic block information seems to be
5285 corrupt by this stage of the compilation. */
5286 int base_cost = 50;
5287 rtx next = next_nonnote_insn (insn);
5288
5289 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5290 base_cost -= 20;
5291
5292 switch (GET_CODE (insn))
5293 {
5294 case CODE_LABEL:
5295 /* It will always be better to place the table before the label, rather
5296 than after it. */
5297 return 50;
949d79eb 5298
d5b7b3ae
RE
5299 case INSN:
5300 case CALL_INSN:
5301 return base_cost;
5302
5303 case JUMP_INSN:
5304 return base_cost - 10;
5305
5306 default:
5307 return base_cost + 10;
5308 }
5309}
5310
5311/* Find the best place in the insn stream in the range
5312 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5313 Create the barrier by inserting a jump and add a new fix entry for
5314 it. */
5315static Mfix *
5316create_fix_barrier (fix, max_address)
5317 Mfix * fix;
5318 HOST_WIDE_INT max_address;
5319{
5320 HOST_WIDE_INT count = 0;
5321 rtx barrier;
5322 rtx from = fix->insn;
5323 rtx selected = from;
5324 int selected_cost;
5325 HOST_WIDE_INT selected_address;
5326 Mfix * new_fix;
5327 HOST_WIDE_INT max_count = max_address - fix->address;
5328 rtx label = gen_label_rtx ();
5329
5330 selected_cost = arm_barrier_cost (from);
5331 selected_address = fix->address;
5332
5333 while (from && count < max_count)
5334 {
5335 rtx tmp;
5336 int new_cost;
5337
5338 /* This code shouldn't have been called if there was a natural barrier
5339 within range. */
5340 if (GET_CODE (from) == BARRIER)
5341 abort ();
5342
5343 /* Count the length of this insn. */
5344 count += get_attr_length (from);
5345
5346 /* If there is a jump table, add its length. */
5347 tmp = is_jump_table (from);
5348 if (tmp != NULL)
5349 {
5350 count += get_jump_table_size (tmp);
5351
5352 /* Jump tables aren't in a basic block, so base the cost on
5353 the dispatch insn. If we select this location, we will
5354 still put the pool after the table. */
5355 new_cost = arm_barrier_cost (from);
5356
5357 if (count < max_count && new_cost <= selected_cost)
5358 {
5359 selected = tmp;
5360 selected_cost = new_cost;
5361 selected_address = fix->address + count;
5362 }
5363
5364 /* Continue after the dispatch table. */
5365 from = NEXT_INSN (tmp);
5366 continue;
5367 }
5368
5369 new_cost = arm_barrier_cost (from);
5370
5371 if (count < max_count && new_cost <= selected_cost)
5372 {
5373 selected = from;
5374 selected_cost = new_cost;
5375 selected_address = fix->address + count;
5376 }
5377
5378 from = NEXT_INSN (from);
5379 }
5380
5381 /* Create a new JUMP_INSN that branches around a barrier. */
5382 from = emit_jump_insn_after (gen_jump (label), selected);
5383 JUMP_LABEL (from) = label;
5384 barrier = emit_barrier_after (from);
5385 emit_label_after (label, barrier);
5386
5387 /* Create a minipool barrier entry for the new barrier. */
c7319d87 5388 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
d5b7b3ae
RE
5389 new_fix->insn = barrier;
5390 new_fix->address = selected_address;
5391 new_fix->next = fix->next;
5392 fix->next = new_fix;
5393
5394 return new_fix;
5395}
5396
5397/* Record that there is a natural barrier in the insn stream at
5398 ADDRESS. */
949d79eb
RE
5399static void
5400push_minipool_barrier (insn, address)
2b835d68 5401 rtx insn;
d5b7b3ae 5402 HOST_WIDE_INT address;
2b835d68 5403{
c7319d87 5404 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
ad076f4e 5405
949d79eb
RE
5406 fix->insn = insn;
5407 fix->address = address;
2b835d68 5408
949d79eb
RE
5409 fix->next = NULL;
5410 if (minipool_fix_head != NULL)
5411 minipool_fix_tail->next = fix;
5412 else
5413 minipool_fix_head = fix;
5414
5415 minipool_fix_tail = fix;
5416}
2b835d68 5417
d5b7b3ae
RE
5418/* Record INSN, which will need fixing up to load a value from the
5419 minipool. ADDRESS is the offset of the insn since the start of the
5420 function; LOC is a pointer to the part of the insn which requires
5421 fixing; VALUE is the constant that must be loaded, which is of type
5422 MODE. */
949d79eb
RE
5423static void
5424push_minipool_fix (insn, address, loc, mode, value)
5425 rtx insn;
d5b7b3ae
RE
5426 HOST_WIDE_INT address;
5427 rtx * loc;
949d79eb
RE
5428 enum machine_mode mode;
5429 rtx value;
5430{
c7319d87 5431 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
949d79eb
RE
5432
5433#ifdef AOF_ASSEMBLER
5434 /* PIC symbol refereneces need to be converted into offsets into the
5435 based area. */
d5b7b3ae
RE
5436 /* XXX This shouldn't be done here. */
5437 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5438 value = aof_pic_entry (value);
5439#endif /* AOF_ASSEMBLER */
5440
5441 fix->insn = insn;
5442 fix->address = address;
5443 fix->loc = loc;
5444 fix->mode = mode;
d5b7b3ae 5445 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5446 fix->value = value;
d5b7b3ae
RE
5447 fix->forwards = get_attr_pool_range (insn);
5448 fix->backwards = get_attr_neg_pool_range (insn);
5449 fix->minipool = NULL;
949d79eb
RE
5450
5451 /* If an insn doesn't have a range defined for it, then it isn't
5452 expecting to be reworked by this code. Better to abort now than
5453 to generate duff assembly code. */
d5b7b3ae 5454 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5455 abort ();
5456
d5b7b3ae
RE
5457 if (rtl_dump_file)
5458 {
5459 fprintf (rtl_dump_file,
5460 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5461 GET_MODE_NAME (mode),
5462 INSN_UID (insn), (unsigned long) address,
5463 -1 * (long)fix->backwards, (long)fix->forwards);
5464 arm_print_value (rtl_dump_file, fix->value);
5465 fprintf (rtl_dump_file, "\n");
5466 }
5467
6354dc9b 5468 /* Add it to the chain of fixes. */
949d79eb 5469 fix->next = NULL;
d5b7b3ae 5470
949d79eb
RE
5471 if (minipool_fix_head != NULL)
5472 minipool_fix_tail->next = fix;
5473 else
5474 minipool_fix_head = fix;
5475
5476 minipool_fix_tail = fix;
5477}
5478
d5b7b3ae 5479/* Scan INSN and note any of its operands that need fixing. */
949d79eb
RE
5480static void
5481note_invalid_constants (insn, address)
5482 rtx insn;
d5b7b3ae 5483 HOST_WIDE_INT address;
949d79eb
RE
5484{
5485 int opno;
5486
d5b7b3ae 5487 extract_insn (insn);
949d79eb 5488
5895f793 5489 if (!constrain_operands (1))
949d79eb
RE
5490 fatal_insn_not_found (insn);
5491
d5b7b3ae
RE
5492 /* Fill in recog_op_alt with information about the constraints of this
5493 insn. */
949d79eb
RE
5494 preprocess_constraints ();
5495
1ccbefce 5496 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 5497 {
6354dc9b 5498 /* Things we need to fix can only occur in inputs. */
36ab44c7 5499 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
5500 continue;
5501
5502 /* If this alternative is a memory reference, then any mention
5503 of constants in this alternative is really to fool reload
5504 into allowing us to accept one there. We need to fix them up
5505 now so that we output the right code. */
5506 if (recog_op_alt[opno][which_alternative].memory_ok)
5507 {
1ccbefce 5508 rtx op = recog_data.operand[opno];
949d79eb
RE
5509
5510 if (CONSTANT_P (op))
1ccbefce
RH
5511 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5512 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
5513#if 0
5514 /* RWE: Now we look correctly at the operands for the insn,
5515 this shouldn't be needed any more. */
949d79eb 5516#ifndef AOF_ASSEMBLER
d5b7b3ae 5517 /* XXX Is this still needed? */
949d79eb 5518 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
5519 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5520 recog_data.operand_mode[opno],
5521 XVECEXP (op, 0, 0));
949d79eb 5522#endif
d5b7b3ae
RE
5523#endif
5524 else if (GET_CODE (op) == MEM
949d79eb
RE
5525 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5526 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
5527 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5528 recog_data.operand_mode[opno],
949d79eb
RE
5529 get_pool_constant (XEXP (op, 0)));
5530 }
2b835d68 5531 }
2b835d68
RE
5532}
5533
5534void
5535arm_reorg (first)
5536 rtx first;
5537{
5538 rtx insn;
d5b7b3ae
RE
5539 HOST_WIDE_INT address = 0;
5540 Mfix * fix;
ad076f4e 5541
949d79eb 5542 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 5543
949d79eb
RE
5544 /* The first insn must always be a note, or the code below won't
5545 scan it properly. */
5546 if (GET_CODE (first) != NOTE)
5547 abort ();
5548
5549 /* Scan all the insns and record the operands that will need fixing. */
5550 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 5551 {
2b835d68 5552
949d79eb 5553 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 5554 push_minipool_barrier (insn, address);
949d79eb
RE
5555 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5556 || GET_CODE (insn) == JUMP_INSN)
5557 {
5558 rtx table;
5559
5560 note_invalid_constants (insn, address);
5561 address += get_attr_length (insn);
d5b7b3ae 5562
949d79eb
RE
5563 /* If the insn is a vector jump, add the size of the table
5564 and skip the table. */
d5b7b3ae 5565 if ((table = is_jump_table (insn)) != NULL)
2b835d68 5566 {
d5b7b3ae 5567 address += get_jump_table_size (table);
949d79eb
RE
5568 insn = table;
5569 }
5570 }
5571 }
332072db 5572
d5b7b3ae
RE
5573 fix = minipool_fix_head;
5574
949d79eb 5575 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 5576 while (fix)
949d79eb 5577 {
d5b7b3ae
RE
5578 Mfix * ftmp;
5579 Mfix * fdel;
5580 Mfix * last_added_fix;
5581 Mfix * last_barrier = NULL;
5582 Mfix * this_fix;
949d79eb
RE
5583
5584 /* Skip any further barriers before the next fix. */
5585 while (fix && GET_CODE (fix->insn) == BARRIER)
5586 fix = fix->next;
5587
d5b7b3ae 5588 /* No more fixes. */
949d79eb
RE
5589 if (fix == NULL)
5590 break;
332072db 5591
d5b7b3ae 5592 last_added_fix = NULL;
2b835d68 5593
d5b7b3ae 5594 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 5595 {
949d79eb 5596 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 5597 {
d5b7b3ae
RE
5598 if (ftmp->address >= minipool_vector_head->max_address)
5599 break;
2b835d68 5600
d5b7b3ae 5601 last_barrier = ftmp;
2b835d68 5602 }
d5b7b3ae
RE
5603 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5604 break;
5605
5606 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 5607 }
949d79eb 5608
d5b7b3ae
RE
5609 /* If we found a barrier, drop back to that; any fixes that we
5610 could have reached but come after the barrier will now go in
5611 the next mini-pool. */
949d79eb
RE
5612 if (last_barrier != NULL)
5613 {
d5b7b3ae
RE
5614 /* Reduce the refcount for those fixes that won't go into this
5615 pool after all. */
5616 for (fdel = last_barrier->next;
5617 fdel && fdel != ftmp;
5618 fdel = fdel->next)
5619 {
5620 fdel->minipool->refcount--;
5621 fdel->minipool = NULL;
5622 }
5623
949d79eb
RE
5624 ftmp = last_barrier;
5625 }
5626 else
2bfa88dc 5627 {
d5b7b3ae
RE
5628 /* ftmp is first fix that we can't fit into this pool and
5629 there no natural barriers that we could use. Insert a
5630 new barrier in the code somewhere between the previous
5631 fix and this one, and arrange to jump around it. */
5632 HOST_WIDE_INT max_address;
5633
5634 /* The last item on the list of fixes must be a barrier, so
5635 we can never run off the end of the list of fixes without
5636 last_barrier being set. */
5637 if (ftmp == NULL)
5638 abort ();
5639
5640 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
5641 /* Check that there isn't another fix that is in range that
5642 we couldn't fit into this pool because the pool was
5643 already too large: we need to put the pool before such an
5644 instruction. */
d5b7b3ae
RE
5645 if (ftmp->address < max_address)
5646 max_address = ftmp->address;
5647
5648 last_barrier = create_fix_barrier (last_added_fix, max_address);
5649 }
5650
5651 assign_minipool_offsets (last_barrier);
5652
5653 while (ftmp)
5654 {
5655 if (GET_CODE (ftmp->insn) != BARRIER
5656 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5657 == NULL))
5658 break;
2bfa88dc 5659
d5b7b3ae 5660 ftmp = ftmp->next;
2bfa88dc 5661 }
949d79eb
RE
5662
5663 /* Scan over the fixes we have identified for this pool, fixing them
5664 up and adding the constants to the pool itself. */
d5b7b3ae 5665 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
5666 this_fix = this_fix->next)
5667 if (GET_CODE (this_fix->insn) != BARRIER)
5668 {
949d79eb
RE
5669 rtx addr
5670 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5671 minipool_vector_label),
d5b7b3ae 5672 this_fix->minipool->offset);
949d79eb
RE
5673 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5674 }
5675
d5b7b3ae 5676 dump_minipool (last_barrier->insn);
949d79eb 5677 fix = ftmp;
2b835d68 5678 }
4b632bf1 5679
949d79eb
RE
5680 /* From now on we must synthesize any constants that we can't handle
5681 directly. This can happen if the RTL gets split during final
5682 instruction generation. */
4b632bf1 5683 after_arm_reorg = 1;
c7319d87
RE
5684
5685 /* Free the minipool memory. */
5686 obstack_free (&minipool_obstack, minipool_startobj);
2b835d68 5687}
cce8749e
CH
5688\f
5689/* Routines to output assembly language. */
5690
f3bb6135 5691/* If the rtx is the correct value then return the string of the number.
ff9940b0 5692 In this way we can ensure that valid double constants are generated even
6354dc9b 5693 when cross compiling. */
cd2b33d0 5694const char *
ff9940b0 5695fp_immediate_constant (x)
b5cc037f 5696 rtx x;
ff9940b0
RE
5697{
5698 REAL_VALUE_TYPE r;
5699 int i;
5700
5701 if (!fpa_consts_inited)
5702 init_fpa_table ();
5703
5704 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5705 for (i = 0; i < 8; i++)
5706 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5707 return strings_fpa[i];
f3bb6135 5708
ff9940b0
RE
5709 abort ();
5710}
5711
9997d19d 5712/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 5713static const char *
9997d19d 5714fp_const_from_val (r)
62b10bbc 5715 REAL_VALUE_TYPE * r;
9997d19d
RE
5716{
5717 int i;
5718
5895f793 5719 if (!fpa_consts_inited)
9997d19d
RE
5720 init_fpa_table ();
5721
5722 for (i = 0; i < 8; i++)
5723 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5724 return strings_fpa[i];
5725
5726 abort ();
5727}
ff9940b0 5728
cce8749e
CH
5729/* Output the operands of a LDM/STM instruction to STREAM.
5730 MASK is the ARM register set mask of which only bits 0-15 are important.
5731 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5732 must follow the register list. */
5733
d5b7b3ae 5734static void
dd18ae56 5735print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc 5736 FILE * stream;
cd2b33d0 5737 const char * instr;
dd18ae56
NC
5738 int reg;
5739 int mask;
5740 int hat;
cce8749e
CH
5741{
5742 int i;
5743 int not_first = FALSE;
5744
1d5473cb 5745 fputc ('\t', stream);
dd18ae56 5746 asm_fprintf (stream, instr, reg);
1d5473cb 5747 fputs (", {", stream);
62b10bbc 5748
d5b7b3ae 5749 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
5750 if (mask & (1 << i))
5751 {
5752 if (not_first)
5753 fprintf (stream, ", ");
62b10bbc 5754
dd18ae56 5755 asm_fprintf (stream, "%r", i);
cce8749e
CH
5756 not_first = TRUE;
5757 }
f3bb6135 5758
cce8749e 5759 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 5760}
cce8749e 5761
6354dc9b 5762/* Output a 'call' insn. */
cce8749e 5763
cd2b33d0 5764const char *
cce8749e 5765output_call (operands)
62b10bbc 5766 rtx * operands;
cce8749e 5767{
6354dc9b 5768 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 5769
62b10bbc 5770 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 5771 {
62b10bbc 5772 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 5773 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 5774 }
62b10bbc 5775
1d5473cb 5776 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 5777
6cfc7210 5778 if (TARGET_INTERWORK)
da6558fd
NC
5779 output_asm_insn ("bx%?\t%0", operands);
5780 else
5781 output_asm_insn ("mov%?\t%|pc, %0", operands);
5782
f3bb6135
RE
5783 return "";
5784}
cce8749e 5785
ff9940b0
RE
5786static int
5787eliminate_lr2ip (x)
62b10bbc 5788 rtx * x;
ff9940b0
RE
5789{
5790 int something_changed = 0;
62b10bbc 5791 rtx x0 = * x;
ff9940b0
RE
5792 int code = GET_CODE (x0);
5793 register int i, j;
6f7d635c 5794 register const char * fmt;
ff9940b0
RE
5795
5796 switch (code)
5797 {
5798 case REG:
62b10bbc 5799 if (REGNO (x0) == LR_REGNUM)
ff9940b0 5800 {
62b10bbc 5801 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
5802 return 1;
5803 }
5804 return 0;
5805 default:
6354dc9b 5806 /* Scan through the sub-elements and change any references there. */
ff9940b0 5807 fmt = GET_RTX_FORMAT (code);
62b10bbc 5808
ff9940b0
RE
5809 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5810 if (fmt[i] == 'e')
5811 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5812 else if (fmt[i] == 'E')
5813 for (j = 0; j < XVECLEN (x0, i); j++)
5814 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 5815
ff9940b0
RE
5816 return something_changed;
5817 }
5818}
5819
6354dc9b 5820/* Output a 'call' insn that is a reference in memory. */
ff9940b0 5821
cd2b33d0 5822const char *
ff9940b0 5823output_call_mem (operands)
62b10bbc 5824 rtx * operands;
ff9940b0 5825{
6354dc9b
NC
5826 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5827 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 5828 if (eliminate_lr2ip (&operands[0]))
1d5473cb 5829 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 5830
6cfc7210 5831 if (TARGET_INTERWORK)
da6558fd
NC
5832 {
5833 output_asm_insn ("ldr%?\t%|ip, %0", operands);
5834 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5835 output_asm_insn ("bx%?\t%|ip", operands);
5836 }
5837 else
5838 {
5839 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5840 output_asm_insn ("ldr%?\t%|pc, %0", operands);
5841 }
5842
f3bb6135
RE
5843 return "";
5844}
ff9940b0
RE
5845
5846
5847/* Output a move from arm registers to an fpu registers.
5848 OPERANDS[0] is an fpu register.
5849 OPERANDS[1] is the first registers of an arm register pair. */
5850
cd2b33d0 5851const char *
ff9940b0 5852output_mov_long_double_fpu_from_arm (operands)
62b10bbc 5853 rtx * operands;
ff9940b0
RE
5854{
5855 int arm_reg0 = REGNO (operands[1]);
5856 rtx ops[3];
5857
62b10bbc
NC
5858 if (arm_reg0 == IP_REGNUM)
5859 abort ();
f3bb6135 5860
43cffd11
RE
5861 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5862 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5863 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5864
1d5473cb
RE
5865 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
5866 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 5867
f3bb6135
RE
5868 return "";
5869}
ff9940b0
RE
5870
5871/* Output a move from an fpu register to arm registers.
5872 OPERANDS[0] is the first registers of an arm register pair.
5873 OPERANDS[1] is an fpu register. */
5874
cd2b33d0 5875const char *
ff9940b0 5876output_mov_long_double_arm_from_fpu (operands)
62b10bbc 5877 rtx * operands;
ff9940b0
RE
5878{
5879 int arm_reg0 = REGNO (operands[0]);
5880 rtx ops[3];
5881
62b10bbc
NC
5882 if (arm_reg0 == IP_REGNUM)
5883 abort ();
f3bb6135 5884
43cffd11
RE
5885 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5886 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5887 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5888
1d5473cb
RE
5889 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
5890 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
5891 return "";
5892}
ff9940b0
RE
5893
5894/* Output a move from arm registers to arm registers of a long double
5895 OPERANDS[0] is the destination.
5896 OPERANDS[1] is the source. */
cd2b33d0 5897const char *
ff9940b0 5898output_mov_long_double_arm_from_arm (operands)
62b10bbc 5899 rtx * operands;
ff9940b0 5900{
6354dc9b 5901 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
5902 int dest_start = REGNO (operands[0]);
5903 int src_start = REGNO (operands[1]);
5904 rtx ops[2];
5905 int i;
5906
5907 if (dest_start < src_start)
5908 {
5909 for (i = 0; i < 3; i++)
5910 {
43cffd11
RE
5911 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5912 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5913 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5914 }
5915 }
5916 else
5917 {
5918 for (i = 2; i >= 0; i--)
5919 {
43cffd11
RE
5920 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5921 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5922 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5923 }
5924 }
f3bb6135 5925
ff9940b0
RE
5926 return "";
5927}
5928
5929
cce8749e
CH
5930/* Output a move from arm registers to an fpu registers.
5931 OPERANDS[0] is an fpu register.
5932 OPERANDS[1] is the first registers of an arm register pair. */
5933
cd2b33d0 5934const char *
cce8749e 5935output_mov_double_fpu_from_arm (operands)
62b10bbc 5936 rtx * operands;
cce8749e
CH
5937{
5938 int arm_reg0 = REGNO (operands[1]);
5939 rtx ops[2];
5940
62b10bbc
NC
5941 if (arm_reg0 == IP_REGNUM)
5942 abort ();
5943
43cffd11
RE
5944 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5945 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5946 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5947 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
5948 return "";
5949}
cce8749e
CH
5950
5951/* Output a move from an fpu register to arm registers.
5952 OPERANDS[0] is the first registers of an arm register pair.
5953 OPERANDS[1] is an fpu register. */
5954
cd2b33d0 5955const char *
cce8749e 5956output_mov_double_arm_from_fpu (operands)
62b10bbc 5957 rtx * operands;
cce8749e
CH
5958{
5959 int arm_reg0 = REGNO (operands[0]);
5960 rtx ops[2];
5961
62b10bbc
NC
5962 if (arm_reg0 == IP_REGNUM)
5963 abort ();
f3bb6135 5964
43cffd11
RE
5965 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5966 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5967 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5968 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
5969 return "";
5970}
cce8749e
CH
5971
5972/* Output a move between double words.
5973 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5974 or MEM<-REG and all MEMs must be offsettable addresses. */
5975
cd2b33d0 5976const char *
cce8749e 5977output_move_double (operands)
aec3cfba 5978 rtx * operands;
cce8749e
CH
5979{
5980 enum rtx_code code0 = GET_CODE (operands[0]);
5981 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 5982 rtx otherops[3];
cce8749e
CH
5983
5984 if (code0 == REG)
5985 {
5986 int reg0 = REGNO (operands[0]);
5987
43cffd11 5988 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 5989
cce8749e
CH
5990 if (code1 == REG)
5991 {
5992 int reg1 = REGNO (operands[1]);
62b10bbc
NC
5993 if (reg1 == IP_REGNUM)
5994 abort ();
f3bb6135 5995
6354dc9b 5996 /* Ensure the second source is not overwritten. */
c1c2bc04 5997 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 5998 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 5999 else
6cfc7210 6000 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
6001 }
6002 else if (code1 == CONST_DOUBLE)
6003 {
226a5051
RE
6004 if (GET_MODE (operands[1]) == DFmode)
6005 {
6006 long l[2];
6007 union real_extract u;
6008
4e135bdd 6009 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
226a5051 6010 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
d5b7b3ae
RE
6011 otherops[1] = GEN_INT (l[1]);
6012 operands[1] = GEN_INT (l[0]);
226a5051 6013 }
c1c2bc04
RE
6014 else if (GET_MODE (operands[1]) != VOIDmode)
6015 abort ();
6016 else if (WORDS_BIG_ENDIAN)
6017 {
6018
6019 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6020 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6021 }
226a5051
RE
6022 else
6023 {
c1c2bc04 6024
226a5051
RE
6025 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6026 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6027 }
6cfc7210 6028
c1c2bc04
RE
6029 output_mov_immediate (operands);
6030 output_mov_immediate (otherops);
cce8749e
CH
6031 }
6032 else if (code1 == CONST_INT)
6033 {
56636818
JL
6034#if HOST_BITS_PER_WIDE_INT > 32
6035 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6036 what the upper word is. */
6037 if (WORDS_BIG_ENDIAN)
6038 {
6039 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6040 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6041 }
6042 else
6043 {
6044 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6045 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6046 }
6047#else
6354dc9b 6048 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
6049 if (WORDS_BIG_ENDIAN)
6050 {
6051 otherops[1] = operands[1];
6052 operands[1] = (INTVAL (operands[1]) < 0
6053 ? constm1_rtx : const0_rtx);
6054 }
ff9940b0 6055 else
c1c2bc04 6056 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 6057#endif
c1c2bc04
RE
6058 output_mov_immediate (otherops);
6059 output_mov_immediate (operands);
cce8749e
CH
6060 }
6061 else if (code1 == MEM)
6062 {
ff9940b0 6063 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 6064 {
ff9940b0 6065 case REG:
9997d19d 6066 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 6067 break;
2b835d68 6068
ff9940b0 6069 case PRE_INC:
6354dc9b 6070 abort (); /* Should never happen now. */
ff9940b0 6071 break;
2b835d68 6072
ff9940b0 6073 case PRE_DEC:
2b835d68 6074 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 6075 break;
2b835d68 6076
ff9940b0 6077 case POST_INC:
9997d19d 6078 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6079 break;
2b835d68 6080
ff9940b0 6081 case POST_DEC:
6354dc9b 6082 abort (); /* Should never happen now. */
ff9940b0 6083 break;
2b835d68
RE
6084
6085 case LABEL_REF:
6086 case CONST:
6087 output_asm_insn ("adr%?\t%0, %1", operands);
6088 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6089 break;
6090
ff9940b0 6091 default:
aec3cfba
NC
6092 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6093 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6094 {
2b835d68
RE
6095 otherops[0] = operands[0];
6096 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6097 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6098 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6099 {
6100 if (GET_CODE (otherops[2]) == CONST_INT)
6101 {
6102 switch (INTVAL (otherops[2]))
6103 {
6104 case -8:
6105 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6106 return "";
6107 case -4:
6108 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6109 return "";
6110 case 4:
6111 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6112 return "";
6113 }
6114 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6115 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6116 else
6117 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6118 }
6119 else
6120 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6121 }
6122 else
6123 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6124
2b835d68
RE
6125 return "ldm%?ia\t%0, %M0";
6126 }
6127 else
6128 {
6129 otherops[1] = adj_offsettable_operand (operands[1], 4);
6130 /* Take care of overlapping base/data reg. */
6131 if (reg_mentioned_p (operands[0], operands[1]))
6132 {
6133 output_asm_insn ("ldr%?\t%0, %1", otherops);
6134 output_asm_insn ("ldr%?\t%0, %1", operands);
6135 }
6136 else
6137 {
6138 output_asm_insn ("ldr%?\t%0, %1", operands);
6139 output_asm_insn ("ldr%?\t%0, %1", otherops);
6140 }
cce8749e
CH
6141 }
6142 }
6143 }
2b835d68 6144 else
6354dc9b 6145 abort (); /* Constraints should prevent this. */
cce8749e
CH
6146 }
6147 else if (code0 == MEM && code1 == REG)
6148 {
62b10bbc
NC
6149 if (REGNO (operands[1]) == IP_REGNUM)
6150 abort ();
2b835d68 6151
ff9940b0
RE
6152 switch (GET_CODE (XEXP (operands[0], 0)))
6153 {
6154 case REG:
9997d19d 6155 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6156 break;
2b835d68 6157
ff9940b0 6158 case PRE_INC:
6354dc9b 6159 abort (); /* Should never happen now. */
ff9940b0 6160 break;
2b835d68 6161
ff9940b0 6162 case PRE_DEC:
2b835d68 6163 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6164 break;
2b835d68 6165
ff9940b0 6166 case POST_INC:
9997d19d 6167 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6168 break;
2b835d68 6169
ff9940b0 6170 case POST_DEC:
6354dc9b 6171 abort (); /* Should never happen now. */
ff9940b0 6172 break;
2b835d68
RE
6173
6174 case PLUS:
6175 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6176 {
6177 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6178 {
6179 case -8:
6180 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6181 return "";
6182
6183 case -4:
6184 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6185 return "";
6186
6187 case 4:
6188 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6189 return "";
6190 }
6191 }
6192 /* Fall through */
6193
ff9940b0 6194 default:
cce8749e 6195 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 6196 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6197 output_asm_insn ("str%?\t%1, %0", operands);
6198 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6199 }
6200 }
2b835d68 6201 else
62b10bbc 6202 abort (); /* Constraints should prevent this */
cce8749e 6203
9997d19d
RE
6204 return "";
6205}
cce8749e
CH
6206
6207
6208/* Output an arbitrary MOV reg, #n.
6209 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6210
cd2b33d0 6211const char *
cce8749e 6212output_mov_immediate (operands)
62b10bbc 6213 rtx * operands;
cce8749e 6214{
f3bb6135 6215 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
6216 int n_ones = 0;
6217 int i;
6218
6219 /* Try to use one MOV */
cce8749e 6220 if (const_ok_for_arm (n))
f3bb6135 6221 {
9997d19d 6222 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
6223 return "";
6224 }
cce8749e
CH
6225
6226 /* Try to use one MVN */
f3bb6135 6227 if (const_ok_for_arm (~n))
cce8749e 6228 {
f3bb6135 6229 operands[1] = GEN_INT (~n);
9997d19d 6230 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 6231 return "";
cce8749e
CH
6232 }
6233
6354dc9b 6234 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
6235
6236 for (i=0; i < 32; i++)
6237 if (n & 1 << i)
6238 n_ones++;
6239
6354dc9b 6240 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
e5951263 6241 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
cce8749e 6242 else
d5b7b3ae 6243 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
f3bb6135
RE
6244
6245 return "";
6246}
cce8749e
CH
6247
6248
6249/* Output an ADD r, s, #n where n may be too big for one instruction. If
6250 adding zero to one register, output nothing. */
6251
cd2b33d0 6252const char *
cce8749e 6253output_add_immediate (operands)
62b10bbc 6254 rtx * operands;
cce8749e 6255{
f3bb6135 6256 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6257
6258 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6259 {
6260 if (n < 0)
6261 output_multi_immediate (operands,
9997d19d
RE
6262 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6263 -n);
cce8749e
CH
6264 else
6265 output_multi_immediate (operands,
9997d19d
RE
6266 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6267 n);
cce8749e 6268 }
f3bb6135
RE
6269
6270 return "";
6271}
cce8749e 6272
cce8749e
CH
6273/* Output a multiple immediate operation.
6274 OPERANDS is the vector of operands referred to in the output patterns.
6275 INSTR1 is the output pattern to use for the first constant.
6276 INSTR2 is the output pattern to use for subsequent constants.
6277 IMMED_OP is the index of the constant slot in OPERANDS.
6278 N is the constant value. */
6279
cd2b33d0 6280static const char *
cce8749e 6281output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6282 rtx * operands;
cd2b33d0
NC
6283 const char * instr1;
6284 const char * instr2;
f3bb6135
RE
6285 int immed_op;
6286 HOST_WIDE_INT n;
cce8749e 6287{
f3bb6135 6288#if HOST_BITS_PER_WIDE_INT > 32
e5951263 6289 n &= HOST_UINT (0xffffffff);
f3bb6135
RE
6290#endif
6291
cce8749e
CH
6292 if (n == 0)
6293 {
6294 operands[immed_op] = const0_rtx;
6354dc9b 6295 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
6296 }
6297 else
6298 {
6299 int i;
cd2b33d0 6300 const char * instr = instr1;
cce8749e 6301
6354dc9b 6302 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6303 for (i = 0; i < 32; i += 2)
6304 {
6305 if (n & (3 << i))
6306 {
f3bb6135
RE
6307 operands[immed_op] = GEN_INT (n & (255 << i));
6308 output_asm_insn (instr, operands);
cce8749e
CH
6309 instr = instr2;
6310 i += 6;
6311 }
6312 }
6313 }
cd2b33d0 6314
f3bb6135 6315 return "";
9997d19d 6316}
cce8749e
CH
6317
6318
6319/* Return the appropriate ARM instruction for the operation code.
6320 The returned result should not be overwritten. OP is the rtx of the
6321 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6322 was shifted. */
6323
cd2b33d0 6324const char *
cce8749e
CH
6325arithmetic_instr (op, shift_first_arg)
6326 rtx op;
f3bb6135 6327 int shift_first_arg;
cce8749e 6328{
9997d19d 6329 switch (GET_CODE (op))
cce8749e
CH
6330 {
6331 case PLUS:
f3bb6135
RE
6332 return "add";
6333
cce8749e 6334 case MINUS:
f3bb6135
RE
6335 return shift_first_arg ? "rsb" : "sub";
6336
cce8749e 6337 case IOR:
f3bb6135
RE
6338 return "orr";
6339
cce8749e 6340 case XOR:
f3bb6135
RE
6341 return "eor";
6342
cce8749e 6343 case AND:
f3bb6135
RE
6344 return "and";
6345
cce8749e 6346 default:
f3bb6135 6347 abort ();
cce8749e 6348 }
f3bb6135 6349}
cce8749e
CH
6350
6351
6352/* Ensure valid constant shifts and return the appropriate shift mnemonic
6353 for the operation code. The returned result should not be overwritten.
6354 OP is the rtx code of the shift.
9997d19d 6355 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6356 shift. */
cce8749e 6357
cd2b33d0 6358static const char *
9997d19d
RE
6359shift_op (op, amountp)
6360 rtx op;
6361 HOST_WIDE_INT *amountp;
cce8749e 6362{
cd2b33d0 6363 const char * mnem;
e2c671ba 6364 enum rtx_code code = GET_CODE (op);
cce8749e 6365
9997d19d
RE
6366 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6367 *amountp = -1;
6368 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6369 *amountp = INTVAL (XEXP (op, 1));
6370 else
6371 abort ();
6372
e2c671ba 6373 switch (code)
cce8749e
CH
6374 {
6375 case ASHIFT:
6376 mnem = "asl";
6377 break;
f3bb6135 6378
cce8749e
CH
6379 case ASHIFTRT:
6380 mnem = "asr";
cce8749e 6381 break;
f3bb6135 6382
cce8749e
CH
6383 case LSHIFTRT:
6384 mnem = "lsr";
cce8749e 6385 break;
f3bb6135 6386
9997d19d
RE
6387 case ROTATERT:
6388 mnem = "ror";
9997d19d
RE
6389 break;
6390
ff9940b0 6391 case MULT:
e2c671ba
RE
6392 /* We never have to worry about the amount being other than a
6393 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6394 if (*amountp != -1)
6395 *amountp = int_log2 (*amountp);
6396 else
6397 abort ();
f3bb6135
RE
6398 return "asl";
6399
cce8749e 6400 default:
f3bb6135 6401 abort ();
cce8749e
CH
6402 }
6403
e2c671ba
RE
6404 if (*amountp != -1)
6405 {
6406 /* This is not 100% correct, but follows from the desire to merge
6407 multiplication by a power of 2 with the recognizer for a
6408 shift. >=32 is not a valid shift for "asl", so we must try and
6409 output a shift that produces the correct arithmetical result.
ddd5a7c1 6410 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6411 is not set correctly if we set the flags; but we never use the
6412 carry bit from such an operation, so we can ignore that. */
6413 if (code == ROTATERT)
6414 *amountp &= 31; /* Rotate is just modulo 32 */
6415 else if (*amountp != (*amountp & 31))
6416 {
6417 if (code == ASHIFT)
6418 mnem = "lsr";
6419 *amountp = 32;
6420 }
6421
6422 /* Shifts of 0 are no-ops. */
6423 if (*amountp == 0)
6424 return NULL;
6425 }
6426
9997d19d
RE
6427 return mnem;
6428}
cce8749e
CH
6429
6430
6354dc9b 6431/* Obtain the shift from the POWER of two. */
18af7313 6432static HOST_WIDE_INT
cce8749e 6433int_log2 (power)
f3bb6135 6434 HOST_WIDE_INT power;
cce8749e 6435{
f3bb6135 6436 HOST_WIDE_INT shift = 0;
cce8749e 6437
e5951263 6438 while ((((HOST_INT (1)) << shift) & power) == 0)
cce8749e
CH
6439 {
6440 if (shift > 31)
f3bb6135 6441 abort ();
cce8749e
CH
6442 shift++;
6443 }
f3bb6135
RE
6444
6445 return shift;
6446}
cce8749e 6447
cce8749e
CH
6448/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6449 /bin/as is horribly restrictive. */
6cfc7210 6450#define MAX_ASCII_LEN 51
cce8749e
CH
6451
6452void
6453output_ascii_pseudo_op (stream, p, len)
62b10bbc 6454 FILE * stream;
3cce094d 6455 const unsigned char * p;
cce8749e
CH
6456 int len;
6457{
6458 int i;
6cfc7210 6459 int len_so_far = 0;
cce8749e 6460
6cfc7210
NC
6461 fputs ("\t.ascii\t\"", stream);
6462
cce8749e
CH
6463 for (i = 0; i < len; i++)
6464 {
6465 register int c = p[i];
6466
6cfc7210 6467 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 6468 {
6cfc7210 6469 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 6470 len_so_far = 0;
cce8749e
CH
6471 }
6472
6cfc7210 6473 switch (c)
cce8749e 6474 {
6cfc7210
NC
6475 case TARGET_TAB:
6476 fputs ("\\t", stream);
6477 len_so_far += 2;
6478 break;
6479
6480 case TARGET_FF:
6481 fputs ("\\f", stream);
6482 len_so_far += 2;
6483 break;
6484
6485 case TARGET_BS:
6486 fputs ("\\b", stream);
6487 len_so_far += 2;
6488 break;
6489
6490 case TARGET_CR:
6491 fputs ("\\r", stream);
6492 len_so_far += 2;
6493 break;
6494
6495 case TARGET_NEWLINE:
6496 fputs ("\\n", stream);
6497 c = p [i + 1];
6498 if ((c >= ' ' && c <= '~')
6499 || c == TARGET_TAB)
6500 /* This is a good place for a line break. */
6501 len_so_far = MAX_ASCII_LEN;
6502 else
6503 len_so_far += 2;
6504 break;
6505
6506 case '\"':
6507 case '\\':
6508 putc ('\\', stream);
5895f793 6509 len_so_far++;
6cfc7210 6510 /* drop through. */
f3bb6135 6511
6cfc7210
NC
6512 default:
6513 if (c >= ' ' && c <= '~')
6514 {
6515 putc (c, stream);
5895f793 6516 len_so_far++;
6cfc7210
NC
6517 }
6518 else
6519 {
6520 fprintf (stream, "\\%03o", c);
6521 len_so_far += 4;
6522 }
6523 break;
cce8749e 6524 }
cce8749e 6525 }
f3bb6135 6526
cce8749e 6527 fputs ("\"\n", stream);
f3bb6135 6528}
cce8749e 6529\f
ff9940b0 6530
cd2b33d0 6531const char *
84ed5e79 6532output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
6533 rtx operand;
6534 int really_return;
84ed5e79 6535 int reverse;
ff9940b0
RE
6536{
6537 char instr[100];
6538 int reg, live_regs = 0;
46406379 6539 int volatile_func = arm_volatile_func ();
e2c671ba 6540
d5b7b3ae
RE
6541 /* If a function is naked, don't use the "return" insn. */
6542 if (arm_naked_function_p (current_function_decl))
6543 return "";
6544
e2c671ba 6545 return_used_this_function = 1;
d5b7b3ae 6546
62b10bbc 6547 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6548 {
e2c671ba 6549 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
6550 call, then we have to trust that the called function won't return. */
6551 if (really_return)
6552 {
6553 rtx ops[2];
6554
6555 /* Otherwise, trap an attempted return by aborting. */
6556 ops[0] = operand;
6557 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6558 : "abort");
6559 assemble_external_libcall (ops[1]);
6560 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6561 }
6562
e2c671ba
RE
6563 return "";
6564 }
6565
5895f793 6566 if (current_function_calls_alloca && !really_return)
62b10bbc 6567 abort ();
d5b7b3ae 6568
f3bb6135 6569 for (reg = 0; reg <= 10; reg++)
5895f793 6570 if (regs_ever_live[reg] && !call_used_regs[reg])
ff9940b0
RE
6571 live_regs++;
6572
5895f793
RE
6573 if (!TARGET_APCS_FRAME
6574 && !frame_pointer_needed
d5b7b3ae 6575 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6576 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6577 live_regs++;
6578
5895f793 6579 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6580 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6581 live_regs++;
6582
0616531f 6583 if (live_regs || regs_ever_live[LR_REGNUM])
ff9940b0
RE
6584 live_regs++;
6585
6586 if (frame_pointer_needed)
6587 live_regs += 4;
6588
3a5a4282
PB
6589 /* On some ARM architectures it is faster to use LDR rather than LDM to
6590 load a single register. On other architectures, the cost is the same. */
6591 if (live_regs == 1
6592 && regs_ever_live[LR_REGNUM]
5895f793 6593 && !really_return)
d5b7b3ae
RE
6594 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6595 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6596 else if (live_regs == 1
6597 && regs_ever_live[LR_REGNUM]
d5b7b3ae
RE
6598 && TARGET_APCS_32)
6599 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6600 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
3a5a4282 6601 else if (live_regs)
ff9940b0 6602 {
5895f793 6603 if (!regs_ever_live[LR_REGNUM])
ff9940b0 6604 live_regs++;
f3bb6135 6605
ff9940b0 6606 if (frame_pointer_needed)
84ed5e79
RE
6607 strcpy (instr,
6608 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 6609 else
84ed5e79
RE
6610 strcpy (instr,
6611 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
6612
6613 for (reg = 0; reg <= 10; reg++)
62b10bbc 6614 if (regs_ever_live[reg]
5895f793
RE
6615 && (!call_used_regs[reg]
6616 || (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6617 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 6618 {
1d5473cb 6619 strcat (instr, "%|");
ff9940b0
RE
6620 strcat (instr, reg_names[reg]);
6621 if (--live_regs)
6622 strcat (instr, ", ");
6623 }
f3bb6135 6624
ff9940b0
RE
6625 if (frame_pointer_needed)
6626 {
1d5473cb 6627 strcat (instr, "%|");
ff9940b0
RE
6628 strcat (instr, reg_names[11]);
6629 strcat (instr, ", ");
1d5473cb 6630 strcat (instr, "%|");
ff9940b0
RE
6631 strcat (instr, reg_names[13]);
6632 strcat (instr, ", ");
1d5473cb 6633 strcat (instr, "%|");
5895f793 6634 strcat (instr, TARGET_INTERWORK || (!really_return)
62b10bbc 6635 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
6636 }
6637 else
1d5473cb 6638 {
5895f793 6639 if (!TARGET_APCS_FRAME
d5b7b3ae 6640 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6641 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6642 {
6643 strcat (instr, "%|");
6644 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6645 strcat (instr, ", ");
6646 }
6647
1d5473cb 6648 strcat (instr, "%|");
d5b7b3ae 6649
6cfc7210 6650 if (TARGET_INTERWORK && really_return)
62b10bbc 6651 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 6652 else
62b10bbc 6653 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 6654 }
d5b7b3ae 6655
2b835d68 6656 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 6657 output_asm_insn (instr, &operand);
da6558fd 6658
6cfc7210 6659 if (TARGET_INTERWORK && really_return)
da6558fd
NC
6660 {
6661 strcpy (instr, "bx%?");
6662 strcat (instr, reverse ? "%D0" : "%d0");
6663 strcat (instr, "\t%|");
6664 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6665
5895f793 6666 output_asm_insn (instr, &operand);
da6558fd 6667 }
ff9940b0
RE
6668 }
6669 else if (really_return)
6670 {
6cfc7210 6671 if (TARGET_INTERWORK)
25b1c156 6672 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
6673 else
6674 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6675 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd 6676
5895f793 6677 output_asm_insn (instr, &operand);
ff9940b0 6678 }
f3bb6135 6679
ff9940b0
RE
6680 return "";
6681}
6682
e82ea128
DE
6683/* Return nonzero if optimizing and the current function is volatile.
6684 Such functions never return, and many memory cycles can be saved
6685 by not storing register values that will never be needed again.
6686 This optimization was added to speed up context switching in a
6354dc9b 6687 kernel application. */
e2c671ba
RE
6688int
6689arm_volatile_func ()
6690{
6354dc9b
NC
6691 return (optimize > 0
6692 && current_function_nothrow
46406379 6693 && TREE_THIS_VOLATILE (current_function_decl));
e2c671ba
RE
6694}
6695
ef179a26
NC
6696/* Write the function name into the code section, directly preceding
6697 the function prologue.
6698
6699 Code will be output similar to this:
6700 t0
6701 .ascii "arm_poke_function_name", 0
6702 .align
6703 t1
6704 .word 0xff000000 + (t1 - t0)
6705 arm_poke_function_name
6706 mov ip, sp
6707 stmfd sp!, {fp, ip, lr, pc}
6708 sub fp, ip, #4
6709
6710 When performing a stack backtrace, code can inspect the value
6711 of 'pc' stored at 'fp' + 0. If the trace function then looks
6712 at location pc - 12 and the top 8 bits are set, then we know
6713 that there is a function name embedded immediately preceding this
6714 location and has length ((pc[-3]) & 0xff000000).
6715
6716 We assume that pc is declared as a pointer to an unsigned long.
6717
6718 It is of no benefit to output the function name if we are assembling
6719 a leaf function. These function types will not contain a stack
6720 backtrace structure, therefore it is not possible to determine the
6721 function name. */
6722
6723void
6724arm_poke_function_name (stream, name)
6725 FILE * stream;
6726 char * name;
6727{
6728 unsigned long alignlength;
6729 unsigned long length;
6730 rtx x;
6731
d5b7b3ae
RE
6732 length = strlen (name) + 1;
6733 alignlength = ROUND_UP (length);
ef179a26 6734
949d79eb 6735 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 6736 ASM_OUTPUT_ALIGN (stream, 2);
e5951263 6737 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
ef179a26
NC
6738 ASM_OUTPUT_INT (stream, x);
6739}
6740
ff9940b0
RE
6741/* The amount of stack adjustment that happens here, in output_return and in
6742 output_epilogue must be exactly the same as was calculated during reload,
6743 or things will point to the wrong place. The only time we can safely
6744 ignore this constraint is when a function has no arguments on the stack,
6745 no stack frame requirement and no live registers execpt for `lr'. If we
6746 can guarantee that by making all function calls into tail calls and that
6747 lr is not clobbered in any other way, then there is no need to push lr
6354dc9b 6748 onto the stack. */
cce8749e 6749void
d5b7b3ae 6750output_arm_prologue (f, frame_size)
6cfc7210 6751 FILE * f;
cce8749e
CH
6752 int frame_size;
6753{
f3bb6135 6754 int reg, live_regs_mask = 0;
46406379 6755 int volatile_func = arm_volatile_func ();
cce8749e 6756
cce8749e
CH
6757 /* Nonzero if we must stuff some register arguments onto the stack as if
6758 they were passed there. */
6759 int store_arg_regs = 0;
6760
abaa26e5 6761 if (arm_ccfsm_state || arm_target_insn)
6354dc9b 6762 abort (); /* Sanity check. */
31fdb4d5
DE
6763
6764 if (arm_naked_function_p (current_function_decl))
6765 return;
6766
ff9940b0 6767 return_used_this_function = 0;
ff9940b0 6768
dd18ae56
NC
6769 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6770 current_function_args_size,
6771 current_function_pretend_args_size, frame_size);
6772 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6773 frame_pointer_needed,
6774 current_function_anonymous_args);
cce8749e 6775
e2c671ba 6776 if (volatile_func)
dd18ae56 6777 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 6778
cce8749e
CH
6779 if (current_function_anonymous_args && current_function_pretend_args_size)
6780 store_arg_regs = 1;
6781
f3bb6135 6782 for (reg = 0; reg <= 10; reg++)
5895f793 6783 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e
CH
6784 live_regs_mask |= (1 << reg);
6785
5895f793
RE
6786 if (!TARGET_APCS_FRAME
6787 && !frame_pointer_needed
d5b7b3ae 6788 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6789 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6790 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6791
5895f793 6792 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6793 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6794 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6795
ff9940b0 6796 if (frame_pointer_needed)
e2c671ba 6797 live_regs_mask |= 0xD800;
62b10bbc 6798 else if (regs_ever_live[LR_REGNUM])
ff9940b0 6799 {
62b10bbc 6800 live_regs_mask |= 1 << LR_REGNUM;
cce8749e
CH
6801 }
6802
0616531f
RE
6803 if (live_regs_mask)
6804 /* If a di mode load/store multiple is used, and the base register
6805 is r3, then r4 can become an ever live register without lr
6806 doing so, in this case we need to push lr as well, or we
6807 will fail to get a proper return. */
6808 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
6809
6810#ifdef AOF_ASSEMBLER
6811 if (flag_pic)
dd18ae56 6812 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 6813#endif
f3bb6135 6814}
cce8749e 6815
cd2b33d0 6816const char *
0616531f
RE
6817arm_output_epilogue (really_return)
6818 int really_return;
cce8749e 6819{
949d79eb
RE
6820 int reg;
6821 int live_regs_mask = 0;
6354dc9b 6822 /* If we need this, then it will always be at least this much. */
b111229a 6823 int floats_offset = 12;
cce8749e 6824 rtx operands[3];
949d79eb 6825 int frame_size = get_frame_size ();
d5b7b3ae
RE
6826 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
6827 FILE * f = asm_out_file;
e5951263 6828 int volatile_func = arm_volatile_func ();
d5b7b3ae 6829 int return_regnum;
cce8749e 6830
b36ba79f 6831 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 6832 return "";
cce8749e 6833
31fdb4d5
DE
6834 /* Naked functions don't have epilogues. */
6835 if (arm_naked_function_p (current_function_decl))
949d79eb 6836 return "";
31fdb4d5 6837
d5b7b3ae
RE
6838 /* If we are throwing an exception, the address we want to jump to is in
6839 R1; otherwise, it's in LR. */
6840 return_regnum = eh_ofs ? 2 : LR_REGNUM;
6841
0616531f
RE
6842 /* If we are throwing an exception, then we really must be doing a return,
6843 so we can't tail-call. */
5895f793 6844 if (eh_ofs && !really_return)
0616531f
RE
6845 abort();
6846
e2c671ba 6847 /* A volatile function should never return. Call abort. */
c11145f6 6848 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6849 {
86efdc8e 6850 rtx op;
ed0e6530 6851 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 6852 assemble_external_libcall (op);
e2c671ba 6853 output_asm_insn ("bl\t%a0", &op);
949d79eb 6854 return "";
e2c671ba
RE
6855 }
6856
f3bb6135 6857 for (reg = 0; reg <= 10; reg++)
5895f793 6858 if (regs_ever_live[reg] && !call_used_regs[reg])
cce8749e 6859 {
ff9940b0
RE
6860 live_regs_mask |= (1 << reg);
6861 floats_offset += 4;
cce8749e
CH
6862 }
6863
d5b7b3ae 6864 /* Handle the frame pointer as a special case. */
5895f793
RE
6865 if (!TARGET_APCS_FRAME
6866 && !frame_pointer_needed
d5b7b3ae 6867 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 6868 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
6869 {
6870 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6871 floats_offset += 4;
6872 }
6873
ed0e6530
PB
6874 /* If we aren't loading the PIC register, don't stack it even though it may
6875 be live. */
5895f793 6876 if (flag_pic && !TARGET_SINGLE_PIC_BASE
ed0e6530 6877 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6878 {
6879 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6880 floats_offset += 4;
6881 }
6882
ff9940b0 6883 if (frame_pointer_needed)
cce8749e 6884 {
b111229a
RE
6885 if (arm_fpu_arch == FP_SOFT2)
6886 {
d5b7b3ae 6887 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
5895f793 6888 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6889 {
6890 floats_offset += 12;
dd18ae56
NC
6891 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6892 reg, FP_REGNUM, floats_offset);
b111229a
RE
6893 }
6894 }
6895 else
6896 {
d5b7b3ae 6897 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 6898
d5b7b3ae 6899 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a 6900 {
5895f793 6901 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6902 {
6903 floats_offset += 12;
6cfc7210 6904
6354dc9b 6905 /* We can't unstack more than four registers at once. */
b111229a
RE
6906 if (start_reg - reg == 3)
6907 {
dd18ae56
NC
6908 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6909 reg, FP_REGNUM, floats_offset);
b111229a
RE
6910 start_reg = reg - 1;
6911 }
6912 }
6913 else
6914 {
6915 if (reg != start_reg)
dd18ae56
NC
6916 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6917 reg + 1, start_reg - reg,
6918 FP_REGNUM, floats_offset);
b111229a
RE
6919 start_reg = reg - 1;
6920 }
6921 }
6922
6923 /* Just in case the last register checked also needs unstacking. */
6924 if (reg != start_reg)
dd18ae56
NC
6925 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6926 reg + 1, start_reg - reg,
6927 FP_REGNUM, floats_offset);
b111229a 6928 }
da6558fd 6929
6cfc7210 6930 if (TARGET_INTERWORK)
b111229a
RE
6931 {
6932 live_regs_mask |= 0x6800;
dd18ae56 6933 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
d5b7b3ae
RE
6934 if (eh_ofs)
6935 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6936 REGNO (eh_ofs));
0616531f
RE
6937 if (really_return)
6938 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
d5b7b3ae 6939 }
5895f793 6940 else if (eh_ofs || !really_return)
d5b7b3ae
RE
6941 {
6942 live_regs_mask |= 0x6800;
6943 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
0616531f
RE
6944 if (eh_ofs)
6945 {
6946 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6947 REGNO (eh_ofs));
6948 /* Even in 26-bit mode we do a mov (rather than a movs)
6949 because we don't have the PSR bits set in the
6950 address. */
6951 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6952 }
b111229a
RE
6953 }
6954 else
6955 {
6956 live_regs_mask |= 0xA800;
dd18ae56 6957 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
6958 TARGET_APCS_32 ? FALSE : TRUE);
6959 }
cce8749e
CH
6960 }
6961 else
6962 {
d2288d8d 6963 /* Restore stack pointer if necessary. */
56636818 6964 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
6965 {
6966 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
6967 operands[2] = GEN_INT (frame_size
6968 + current_function_outgoing_args_size);
d2288d8d
TG
6969 output_add_immediate (operands);
6970 }
6971
b111229a
RE
6972 if (arm_fpu_arch == FP_SOFT2)
6973 {
d5b7b3ae 6974 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
5895f793 6975 if (regs_ever_live[reg] && !call_used_regs[reg])
dd18ae56
NC
6976 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6977 reg, SP_REGNUM);
b111229a
RE
6978 }
6979 else
6980 {
d5b7b3ae 6981 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 6982
d5b7b3ae 6983 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 6984 {
5895f793 6985 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
6986 {
6987 if (reg - start_reg == 3)
6988 {
dd18ae56
NC
6989 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6990 start_reg, SP_REGNUM);
b111229a
RE
6991 start_reg = reg + 1;
6992 }
6993 }
6994 else
6995 {
6996 if (reg != start_reg)
dd18ae56
NC
6997 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6998 start_reg, reg - start_reg,
6999 SP_REGNUM);
6cfc7210 7000
b111229a
RE
7001 start_reg = reg + 1;
7002 }
7003 }
7004
7005 /* Just in case the last register checked also needs unstacking. */
7006 if (reg != start_reg)
dd18ae56
NC
7007 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7008 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
7009 }
7010
62b10bbc 7011 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 7012 {
6cfc7210 7013 if (TARGET_INTERWORK)
b111229a 7014 {
0616531f 7015 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2 7016
d5b7b3ae
RE
7017 /* Handle LR on its own. */
7018 if (live_regs_mask == (1 << LR_REGNUM))
7019 {
7020 if (eh_ofs)
7021 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7022 SP_REGNUM);
7023 else
7024 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7025 SP_REGNUM);
7026 }
7027 else if (live_regs_mask != 0)
7028 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7029 FALSE);
7030
7031 if (eh_ofs)
7032 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7033 REGNO (eh_ofs));
7034
0616531f
RE
7035 if (really_return)
7036 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
b111229a 7037 }
d5b7b3ae
RE
7038 else if (eh_ofs)
7039 {
7040 if (live_regs_mask == 0)
7041 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7042 else
7043 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
7044 live_regs_mask | (1 << LR_REGNUM), FALSE);
7045
7046 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7047 REGNO (eh_ofs));
7048 /* Jump to the target; even in 26-bit mode. */
7049 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7050 }
5895f793 7051 else if (TARGET_APCS_32 && live_regs_mask == 0 && !really_return)
0616531f
RE
7052 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7053 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
d5b7b3ae 7054 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
5895f793 7055 else if (!really_return)
0616531f
RE
7056 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7057 live_regs_mask | (1 << LR_REGNUM), FALSE);
32de079a 7058 else
d5b7b3ae
RE
7059 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7060 live_regs_mask | (1 << PC_REGNUM),
32de079a 7061 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
7062 }
7063 else
7064 {
62b10bbc 7065 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 7066 {
6354dc9b 7067 /* Restore the integer regs, and the return address into lr. */
0616531f 7068 live_regs_mask |= 1 << LR_REGNUM;
32de079a 7069
d5b7b3ae
RE
7070 if (live_regs_mask == (1 << LR_REGNUM))
7071 {
7072 if (eh_ofs)
7073 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7074 SP_REGNUM);
7075 else
7076 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7077 SP_REGNUM);
7078 }
7079 else if (live_regs_mask != 0)
7080 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7081 FALSE);
cce8749e 7082 }
b111229a 7083
cce8749e
CH
7084 if (current_function_pretend_args_size)
7085 {
6354dc9b 7086 /* Unwind the pre-pushed regs. */
cce8749e 7087 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 7088 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
7089 output_add_immediate (operands);
7090 }
d5b7b3ae
RE
7091
7092 if (eh_ofs)
7093 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7094 REGNO (eh_ofs));
0616531f
RE
7095
7096 if (really_return)
7097 {
7098 /* And finally, go home. */
7099 if (TARGET_INTERWORK)
7100 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7101 else if (TARGET_APCS_32 || eh_ofs)
7102 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7103 else
7104 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7105 }
cce8749e
CH
7106 }
7107 }
f3bb6135 7108
949d79eb
RE
7109 return "";
7110}
7111
7112void
eb3921e8 7113output_func_epilogue (frame_size)
949d79eb
RE
7114 int frame_size;
7115{
d5b7b3ae
RE
7116 if (TARGET_THUMB)
7117 {
7118 /* ??? Probably not safe to set this here, since it assumes that a
7119 function will be emitted as assembly immediately after we generate
7120 RTL for it. This does not happen for inline functions. */
7121 return_used_this_function = 0;
7122 }
7123 else
7124 {
7125 if (use_return_insn (FALSE)
7126 && return_used_this_function
7127 && (frame_size + current_function_outgoing_args_size) != 0
5895f793 7128 && !frame_pointer_needed)
d5b7b3ae 7129 abort ();
f3bb6135 7130
d5b7b3ae
RE
7131 /* Reset the ARM-specific per-function variables. */
7132 current_function_anonymous_args = 0;
7133 after_arm_reorg = 0;
7134 }
f3bb6135 7135}
e2c671ba 7136
2c849145
JM
7137/* Generate and emit an insn that we will recognize as a push_multi.
7138 Unfortunately, since this insn does not reflect very well the actual
7139 semantics of the operation, we need to annotate the insn for the benefit
7140 of DWARF2 frame unwind information. */
2c849145 7141static rtx
e2c671ba
RE
7142emit_multi_reg_push (mask)
7143 int mask;
7144{
7145 int num_regs = 0;
7146 int i, j;
7147 rtx par;
2c849145 7148 rtx dwarf;
87e27392 7149 int dwarf_par_index;
2c849145 7150 rtx tmp, reg;
e2c671ba 7151
d5b7b3ae 7152 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7153 if (mask & (1 << i))
5895f793 7154 num_regs++;
e2c671ba
RE
7155
7156 if (num_regs == 0 || num_regs > 16)
7157 abort ();
7158
87e27392
NC
7159 /* For the body of the insn we are going to generate an UNSPEC in
7160 parallel with several USEs. This allows the insn to be recognised
7161 by the push_multi pattern in the arm.md file. The insn looks
7162 something like this:
7163
7164 (parallel [
7165 (set (mem:BLK (pre_dec:BLK (reg:SI sp))) (unspec:BLK [(reg:SI r4)] 2))
7166 (use (reg:SI 11 fp))
7167 (use (reg:SI 12 ip))
7168 (use (reg:SI 14 lr))
7169 (use (reg:SI 15 pc))
7170 ])
7171
7172 For the frame note however, we try to be more explicit and actually
7173 show each register being stored into the stack frame, plus a (single)
7174 decrement of the stack pointer. We do it this way in order to be
7175 friendly to the stack unwinding code, which only wants to see a single
7176 stack decrement per instruction. The RTL we generate for the note looks
7177 something like this:
7178
7179 (sequence [
7180 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7181 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7182 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7183 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7184 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7185 (set (mem:SI (plus:SI (reg:SI sp) (const_int 16))) (reg:SI pc))
7186 ])
7187
7188 This sequence is used both by the code to support stack unwinding for
7189 exceptions handlers and the code to generate dwarf2 frame debugging. */
7190
43cffd11 7191 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
87e27392 7192 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_regs + 1));
2c849145 7193 RTX_FRAME_RELATED_P (dwarf) = 1;
87e27392 7194 dwarf_par_index = 1;
e2c671ba 7195
d5b7b3ae 7196 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7197 {
7198 if (mask & (1 << i))
7199 {
2c849145
JM
7200 reg = gen_rtx_REG (SImode, i);
7201
e2c671ba 7202 XVECEXP (par, 0, 0)
43cffd11
RE
7203 = gen_rtx_SET (VOIDmode,
7204 gen_rtx_MEM (BLKmode,
7205 gen_rtx_PRE_DEC (BLKmode,
7206 stack_pointer_rtx)),
7207 gen_rtx_UNSPEC (BLKmode,
2c849145 7208 gen_rtvec (1, reg),
43cffd11 7209 2));
2c849145
JM
7210
7211 tmp = gen_rtx_SET (VOIDmode,
87e27392 7212 gen_rtx_MEM (SImode, stack_pointer_rtx),
2c849145
JM
7213 reg);
7214 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392
NC
7215 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7216 dwarf_par_index ++;
2c849145 7217
e2c671ba
RE
7218 break;
7219 }
7220 }
7221
7222 for (j = 1, i++; j < num_regs; i++)
7223 {
7224 if (mask & (1 << i))
7225 {
2c849145
JM
7226 reg = gen_rtx_REG (SImode, i);
7227
7228 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7229
7230 tmp = gen_rtx_SET (VOIDmode,
7231 gen_rtx_MEM (SImode,
87e27392
NC
7232 gen_rtx_PLUS (SImode,
7233 stack_pointer_rtx,
7234 GEN_INT (4 * j))),
2c849145
JM
7235 reg);
7236 RTX_FRAME_RELATED_P (tmp) = 1;
87e27392 7237 XVECEXP (dwarf, 0, dwarf_par_index ++) = tmp;
2c849145 7238
e2c671ba
RE
7239 j++;
7240 }
7241 }
b111229a 7242
2c849145 7243 par = emit_insn (par);
87e27392
NC
7244
7245 tmp = gen_rtx_SET (SImode,
7246 stack_pointer_rtx,
7247 gen_rtx_PLUS (SImode,
7248 stack_pointer_rtx,
7249 GEN_INT (-4 * num_regs)));
7250 RTX_FRAME_RELATED_P (tmp) = 1;
7251 XVECEXP (dwarf, 0, 0) = tmp;
7252
2c849145
JM
7253 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7254 REG_NOTES (par));
7255 return par;
b111229a
RE
7256}
7257
2c849145 7258static rtx
b111229a
RE
7259emit_sfm (base_reg, count)
7260 int base_reg;
7261 int count;
7262{
7263 rtx par;
2c849145
JM
7264 rtx dwarf;
7265 rtx tmp, reg;
b111229a
RE
7266 int i;
7267
43cffd11 7268 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7269 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7270 RTX_FRAME_RELATED_P (dwarf) = 1;
7271
7272 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7273
7274 XVECEXP (par, 0, 0)
7275 = gen_rtx_SET (VOIDmode,
7276 gen_rtx_MEM (BLKmode,
7277 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7278 gen_rtx_UNSPEC (BLKmode,
2c849145 7279 gen_rtvec (1, reg),
43cffd11 7280 2));
2c849145
JM
7281 tmp
7282 = gen_rtx_SET (VOIDmode,
7283 gen_rtx_MEM (XFmode,
7284 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7285 reg);
7286 RTX_FRAME_RELATED_P (tmp) = 1;
7287 XVECEXP (dwarf, 0, count - 1) = tmp;
7288
b111229a 7289 for (i = 1; i < count; i++)
2c849145
JM
7290 {
7291 reg = gen_rtx_REG (XFmode, base_reg++);
7292 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7293
7294 tmp = gen_rtx_SET (VOIDmode,
7295 gen_rtx_MEM (XFmode,
7296 gen_rtx_PRE_DEC (BLKmode,
7297 stack_pointer_rtx)),
7298 reg);
7299 RTX_FRAME_RELATED_P (tmp) = 1;
7300 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7301 }
b111229a 7302
2c849145
JM
7303 par = emit_insn (par);
7304 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7305 REG_NOTES (par));
7306 return par;
e2c671ba
RE
7307}
7308
7309void
7310arm_expand_prologue ()
7311{
7312 int reg;
56636818
JL
7313 rtx amount = GEN_INT (-(get_frame_size ()
7314 + current_function_outgoing_args_size));
e2c671ba
RE
7315 int live_regs_mask = 0;
7316 int store_arg_regs = 0;
949d79eb
RE
7317 /* If this function doesn't return, then there is no need to push
7318 the call-saved regs. */
46406379 7319 int volatile_func = arm_volatile_func ();
2c849145 7320 rtx insn;
e2c671ba 7321
31fdb4d5
DE
7322 /* Naked functions don't have prologues. */
7323 if (arm_naked_function_p (current_function_decl))
7324 return;
7325
e2c671ba
RE
7326 if (current_function_anonymous_args && current_function_pretend_args_size)
7327 store_arg_regs = 1;
7328
5895f793 7329 if (!volatile_func)
6ed30148
RE
7330 {
7331 for (reg = 0; reg <= 10; reg++)
5895f793 7332 if (regs_ever_live[reg] && !call_used_regs[reg])
6ed30148
RE
7333 live_regs_mask |= 1 << reg;
7334
5895f793
RE
7335 if (!TARGET_APCS_FRAME
7336 && !frame_pointer_needed
d5b7b3ae 7337 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
5895f793 7338 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
d5b7b3ae
RE
7339 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7340
6ed30148
RE
7341 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7342 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 7343
62b10bbc
NC
7344 if (regs_ever_live[LR_REGNUM])
7345 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 7346 }
e2c671ba
RE
7347
7348 if (frame_pointer_needed)
7349 {
7350 live_regs_mask |= 0xD800;
2c849145
JM
7351 insn = emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
7352 stack_pointer_rtx));
7353 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7354 }
7355
7356 if (current_function_pretend_args_size)
7357 {
7358 if (store_arg_regs)
2c849145
JM
7359 insn = emit_multi_reg_push
7360 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 7361 else
2c849145
JM
7362 insn = emit_insn
7363 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7364 GEN_INT (-current_function_pretend_args_size)));
7365 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7366 }
7367
7368 if (live_regs_mask)
7369 {
7370 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 7371 we won't get a proper return. */
62b10bbc 7372 live_regs_mask |= 1 << LR_REGNUM;
2c849145
JM
7373 insn = emit_multi_reg_push (live_regs_mask);
7374 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7375 }
7376
d5b7b3ae
RE
7377 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7378
5895f793 7379 if (!volatile_func)
b111229a
RE
7380 {
7381 if (arm_fpu_arch == FP_SOFT2)
7382 {
d5b7b3ae 7383 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
5895f793 7384 if (regs_ever_live[reg] && !call_used_regs[reg])
2c849145
JM
7385 {
7386 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7387 insn = gen_rtx_MEM (XFmode, insn);
7388 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7389 gen_rtx_REG (XFmode, reg)));
7390 RTX_FRAME_RELATED_P (insn) = 1;
7391 }
b111229a
RE
7392 }
7393 else
7394 {
d5b7b3ae 7395 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7396
d5b7b3ae 7397 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 7398 {
5895f793 7399 if (regs_ever_live[reg] && !call_used_regs[reg])
b111229a
RE
7400 {
7401 if (start_reg - reg == 3)
7402 {
2c849145
JM
7403 insn = emit_sfm (reg, 4);
7404 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
7405 start_reg = reg - 1;
7406 }
7407 }
7408 else
7409 {
7410 if (start_reg != reg)
2c849145
JM
7411 {
7412 insn = emit_sfm (reg + 1, start_reg - reg);
7413 RTX_FRAME_RELATED_P (insn) = 1;
7414 }
b111229a
RE
7415 start_reg = reg - 1;
7416 }
7417 }
7418
7419 if (start_reg != reg)
2c849145
JM
7420 {
7421 insn = emit_sfm (reg + 1, start_reg - reg);
7422 RTX_FRAME_RELATED_P (insn) = 1;
7423 }
b111229a
RE
7424 }
7425 }
e2c671ba
RE
7426
7427 if (frame_pointer_needed)
2c849145
JM
7428 {
7429 insn = GEN_INT (-(4 + current_function_pretend_args_size));
7430 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx,
7431 gen_rtx_REG (SImode, IP_REGNUM),
7432 insn));
7433 RTX_FRAME_RELATED_P (insn) = 1;
7434 }
e2c671ba
RE
7435
7436 if (amount != const0_rtx)
7437 {
2c849145
JM
7438 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7439 amount));
7440 RTX_FRAME_RELATED_P (insn) = 1;
e04c2d6c
RE
7441
7442 /* If the frame pointer is needed, emit a special barrier that
7443 will prevent the scheduler from moving stores to the frame
7444 before the stack adjustment. */
7445 if (frame_pointer_needed)
7446 {
7447 rtx unspec = gen_rtx_UNSPEC (SImode,
7448 gen_rtvec (2, stack_pointer_rtx,
7449 hard_frame_pointer_rtx), 4);
7450
7451 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7452 gen_rtx_MEM (BLKmode, unspec)));
7453 }
e2c671ba
RE
7454 }
7455
7456 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
7457 the call to mcount. Similarly if the user has requested no
7458 scheduling in the prolog. */
7459 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
7460 emit_insn (gen_blockage ());
7461}
cce8749e 7462\f
9997d19d
RE
7463/* If CODE is 'd', then the X is a condition operand and the instruction
7464 should only be executed if the condition is true.
ddd5a7c1 7465 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
7466 should only be executed if the condition is false: however, if the mode
7467 of the comparison is CCFPEmode, then always execute the instruction -- we
7468 do this because in these circumstances !GE does not necessarily imply LT;
7469 in these cases the instruction pattern will take care to make sure that
7470 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 7471 doing this instruction unconditionally.
9997d19d
RE
7472 If CODE is 'N' then X is a floating point operand that must be negated
7473 before output.
7474 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7475 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7476
7477void
7478arm_print_operand (stream, x, code)
62b10bbc 7479 FILE * stream;
9997d19d
RE
7480 rtx x;
7481 int code;
7482{
7483 switch (code)
7484 {
7485 case '@':
f3139301 7486 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
7487 return;
7488
d5b7b3ae
RE
7489 case '_':
7490 fputs (user_label_prefix, stream);
7491 return;
7492
9997d19d 7493 case '|':
f3139301 7494 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
7495 return;
7496
7497 case '?':
7498 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
cca0a211
RE
7499 {
7500 if (TARGET_THUMB || current_insn_predicate != NULL)
7501 abort ();
7502
7503 fputs (arm_condition_codes[arm_current_cc], stream);
7504 }
7505 else if (current_insn_predicate)
7506 {
7507 enum arm_cond_code code;
7508
7509 if (TARGET_THUMB)
7510 abort ();
7511
7512 code = get_arm_condition_code (current_insn_predicate);
7513 fputs (arm_condition_codes[code], stream);
7514 }
9997d19d
RE
7515 return;
7516
7517 case 'N':
7518 {
7519 REAL_VALUE_TYPE r;
7520 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7521 r = REAL_VALUE_NEGATE (r);
7522 fprintf (stream, "%s", fp_const_from_val (&r));
7523 }
7524 return;
7525
7526 case 'B':
7527 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
7528 {
7529 HOST_WIDE_INT val;
5895f793 7530 val = ARM_SIGN_EXTEND (~INTVAL (x));
36ba9cb8 7531 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7532 }
9997d19d
RE
7533 else
7534 {
7535 putc ('~', stream);
7536 output_addr_const (stream, x);
7537 }
7538 return;
7539
7540 case 'i':
7541 fprintf (stream, "%s", arithmetic_instr (x, 1));
7542 return;
7543
7544 case 'I':
7545 fprintf (stream, "%s", arithmetic_instr (x, 0));
7546 return;
7547
7548 case 'S':
7549 {
7550 HOST_WIDE_INT val;
5895f793 7551 const char * shift = shift_op (x, &val);
9997d19d 7552
e2c671ba
RE
7553 if (shift)
7554 {
5895f793 7555 fprintf (stream, ", %s ", shift_op (x, &val));
e2c671ba
RE
7556 if (val == -1)
7557 arm_print_operand (stream, XEXP (x, 1), 0);
7558 else
4bc74ece
NC
7559 {
7560 fputc ('#', stream);
36ba9cb8 7561 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7562 }
e2c671ba 7563 }
9997d19d
RE
7564 }
7565 return;
7566
d5b7b3ae
RE
7567 /* An explanation of the 'Q', 'R' and 'H' register operands:
7568
7569 In a pair of registers containing a DI or DF value the 'Q'
7570 operand returns the register number of the register containing
7571 the least signficant part of the value. The 'R' operand returns
7572 the register number of the register containing the most
7573 significant part of the value.
7574
7575 The 'H' operand returns the higher of the two register numbers.
7576 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7577 same as the 'Q' operand, since the most signficant part of the
7578 value is held in the lower number register. The reverse is true
7579 on systems where WORDS_BIG_ENDIAN is false.
7580
7581 The purpose of these operands is to distinguish between cases
7582 where the endian-ness of the values is important (for example
7583 when they are added together), and cases where the endian-ness
7584 is irrelevant, but the order of register operations is important.
7585 For example when loading a value from memory into a register
7586 pair, the endian-ness does not matter. Provided that the value
7587 from the lower memory address is put into the lower numbered
7588 register, and the value from the higher address is put into the
7589 higher numbered register, the load will work regardless of whether
7590 the value being loaded is big-wordian or little-wordian. The
7591 order of the two register loads can matter however, if the address
7592 of the memory location is actually held in one of the registers
7593 being overwritten by the load. */
c1c2bc04 7594 case 'Q':
d5b7b3ae 7595 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 7596 abort ();
d5b7b3ae 7597 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
7598 return;
7599
9997d19d 7600 case 'R':
d5b7b3ae 7601 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 7602 abort ();
d5b7b3ae
RE
7603 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7604 return;
7605
7606 case 'H':
7607 if (REGNO (x) > LAST_ARM_REGNUM)
7608 abort ();
7609 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
7610 return;
7611
7612 case 'm':
d5b7b3ae
RE
7613 asm_fprintf (stream, "%r",
7614 GET_CODE (XEXP (x, 0)) == REG
7615 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
7616 return;
7617
7618 case 'M':
dd18ae56 7619 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae
RE
7620 REGNO (x),
7621 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
7622 return;
7623
7624 case 'd':
5895f793 7625 if (!x)
d5b7b3ae
RE
7626 return;
7627
7628 if (TARGET_ARM)
9997d19d
RE
7629 fputs (arm_condition_codes[get_arm_condition_code (x)],
7630 stream);
d5b7b3ae
RE
7631 else
7632 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
7633 return;
7634
7635 case 'D':
5895f793 7636 if (!x)
d5b7b3ae
RE
7637 return;
7638
7639 if (TARGET_ARM)
7640 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7641 (get_arm_condition_code (x))],
9997d19d 7642 stream);
d5b7b3ae
RE
7643 else
7644 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
7645 return;
7646
7647 default:
7648 if (x == 0)
7649 abort ();
7650
7651 if (GET_CODE (x) == REG)
d5b7b3ae 7652 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
7653 else if (GET_CODE (x) == MEM)
7654 {
7655 output_memory_reference_mode = GET_MODE (x);
7656 output_address (XEXP (x, 0));
7657 }
7658 else if (GET_CODE (x) == CONST_DOUBLE)
7659 fprintf (stream, "#%s", fp_immediate_constant (x));
7660 else if (GET_CODE (x) == NEG)
6354dc9b 7661 abort (); /* This should never happen now. */
9997d19d
RE
7662 else
7663 {
7664 fputc ('#', stream);
7665 output_addr_const (stream, x);
7666 }
7667 }
7668}
cce8749e
CH
7669\f
7670/* A finite state machine takes care of noticing whether or not instructions
7671 can be conditionally executed, and thus decrease execution time and code
7672 size by deleting branch instructions. The fsm is controlled by
7673 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7674
7675/* The state of the fsm controlling condition codes are:
7676 0: normal, do nothing special
7677 1: make ASM_OUTPUT_OPCODE not output this instruction
7678 2: make ASM_OUTPUT_OPCODE not output this instruction
7679 3: make instructions conditional
7680 4: make instructions conditional
7681
7682 State transitions (state->state by whom under condition):
7683 0 -> 1 final_prescan_insn if the `target' is a label
7684 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7685 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7686 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7687 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7688 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7689 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7690 (the target insn is arm_target_insn).
7691
ff9940b0
RE
7692 If the jump clobbers the conditions then we use states 2 and 4.
7693
7694 A similar thing can be done with conditional return insns.
7695
cce8749e
CH
7696 XXX In case the `target' is an unconditional branch, this conditionalising
7697 of the instructions always reduces code size, but not always execution
7698 time. But then, I want to reduce the code size to somewhere near what
7699 /bin/cc produces. */
7700
cce8749e
CH
7701/* Returns the index of the ARM condition code string in
7702 `arm_condition_codes'. COMPARISON should be an rtx like
7703 `(eq (...) (...))'. */
7704
84ed5e79 7705static enum arm_cond_code
cce8749e
CH
7706get_arm_condition_code (comparison)
7707 rtx comparison;
7708{
5165176d 7709 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
7710 register int code;
7711 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
7712
7713 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 7714 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
7715 XEXP (comparison, 1));
7716
7717 switch (mode)
cce8749e 7718 {
84ed5e79
RE
7719 case CC_DNEmode: code = ARM_NE; goto dominance;
7720 case CC_DEQmode: code = ARM_EQ; goto dominance;
7721 case CC_DGEmode: code = ARM_GE; goto dominance;
7722 case CC_DGTmode: code = ARM_GT; goto dominance;
7723 case CC_DLEmode: code = ARM_LE; goto dominance;
7724 case CC_DLTmode: code = ARM_LT; goto dominance;
7725 case CC_DGEUmode: code = ARM_CS; goto dominance;
7726 case CC_DGTUmode: code = ARM_HI; goto dominance;
7727 case CC_DLEUmode: code = ARM_LS; goto dominance;
7728 case CC_DLTUmode: code = ARM_CC;
7729
7730 dominance:
7731 if (comp_code != EQ && comp_code != NE)
7732 abort ();
7733
7734 if (comp_code == EQ)
7735 return ARM_INVERSE_CONDITION_CODE (code);
7736 return code;
7737
5165176d 7738 case CC_NOOVmode:
84ed5e79 7739 switch (comp_code)
5165176d 7740 {
84ed5e79
RE
7741 case NE: return ARM_NE;
7742 case EQ: return ARM_EQ;
7743 case GE: return ARM_PL;
7744 case LT: return ARM_MI;
5165176d
RE
7745 default: abort ();
7746 }
7747
7748 case CC_Zmode:
84ed5e79 7749 switch (comp_code)
5165176d 7750 {
84ed5e79
RE
7751 case NE: return ARM_NE;
7752 case EQ: return ARM_EQ;
5165176d
RE
7753 default: abort ();
7754 }
7755
7756 case CCFPEmode:
e45b72c4
RE
7757 case CCFPmode:
7758 /* These encodings assume that AC=1 in the FPA system control
7759 byte. This allows us to handle all cases except UNEQ and
7760 LTGT. */
84ed5e79
RE
7761 switch (comp_code)
7762 {
7763 case GE: return ARM_GE;
7764 case GT: return ARM_GT;
7765 case LE: return ARM_LS;
7766 case LT: return ARM_MI;
e45b72c4
RE
7767 case NE: return ARM_NE;
7768 case EQ: return ARM_EQ;
7769 case ORDERED: return ARM_VC;
7770 case UNORDERED: return ARM_VS;
7771 case UNLT: return ARM_LT;
7772 case UNLE: return ARM_LE;
7773 case UNGT: return ARM_HI;
7774 case UNGE: return ARM_PL;
7775 /* UNEQ and LTGT do not have a representation. */
7776 case UNEQ: /* Fall through. */
7777 case LTGT: /* Fall through. */
84ed5e79
RE
7778 default: abort ();
7779 }
7780
7781 case CC_SWPmode:
7782 switch (comp_code)
7783 {
7784 case NE: return ARM_NE;
7785 case EQ: return ARM_EQ;
7786 case GE: return ARM_LE;
7787 case GT: return ARM_LT;
7788 case LE: return ARM_GE;
7789 case LT: return ARM_GT;
7790 case GEU: return ARM_LS;
7791 case GTU: return ARM_CC;
7792 case LEU: return ARM_CS;
7793 case LTU: return ARM_HI;
7794 default: abort ();
7795 }
7796
bd9c7e23
RE
7797 case CC_Cmode:
7798 switch (comp_code)
7799 {
7800 case LTU: return ARM_CS;
7801 case GEU: return ARM_CC;
7802 default: abort ();
7803 }
7804
5165176d 7805 case CCmode:
84ed5e79 7806 switch (comp_code)
5165176d 7807 {
84ed5e79
RE
7808 case NE: return ARM_NE;
7809 case EQ: return ARM_EQ;
7810 case GE: return ARM_GE;
7811 case GT: return ARM_GT;
7812 case LE: return ARM_LE;
7813 case LT: return ARM_LT;
7814 case GEU: return ARM_CS;
7815 case GTU: return ARM_HI;
7816 case LEU: return ARM_LS;
7817 case LTU: return ARM_CC;
5165176d
RE
7818 default: abort ();
7819 }
7820
cce8749e
CH
7821 default: abort ();
7822 }
84ed5e79
RE
7823
7824 abort ();
f3bb6135 7825}
cce8749e
CH
7826
7827
7828void
74bbc178 7829arm_final_prescan_insn (insn)
cce8749e 7830 rtx insn;
cce8749e
CH
7831{
7832 /* BODY will hold the body of INSN. */
7833 register rtx body = PATTERN (insn);
7834
7835 /* This will be 1 if trying to repeat the trick, and things need to be
7836 reversed if it appears to fail. */
7837 int reverse = 0;
7838
ff9940b0
RE
7839 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
7840 taken are clobbered, even if the rtl suggests otherwise. It also
7841 means that we have to grub around within the jump expression to find
7842 out what the conditions are when the jump isn't taken. */
7843 int jump_clobbers = 0;
7844
6354dc9b 7845 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
7846 int seeking_return = 0;
7847
cce8749e
CH
7848 /* START_INSN will hold the insn from where we start looking. This is the
7849 first insn after the following code_label if REVERSE is true. */
7850 rtx start_insn = insn;
7851
7852 /* If in state 4, check if the target branch is reached, in order to
7853 change back to state 0. */
7854 if (arm_ccfsm_state == 4)
7855 {
7856 if (insn == arm_target_insn)
f5a1b0d2
NC
7857 {
7858 arm_target_insn = NULL;
7859 arm_ccfsm_state = 0;
7860 }
cce8749e
CH
7861 return;
7862 }
7863
7864 /* If in state 3, it is possible to repeat the trick, if this insn is an
7865 unconditional branch to a label, and immediately following this branch
7866 is the previous target label which is only used once, and the label this
7867 branch jumps to is not too far off. */
7868 if (arm_ccfsm_state == 3)
7869 {
7870 if (simplejump_p (insn))
7871 {
7872 start_insn = next_nonnote_insn (start_insn);
7873 if (GET_CODE (start_insn) == BARRIER)
7874 {
7875 /* XXX Isn't this always a barrier? */
7876 start_insn = next_nonnote_insn (start_insn);
7877 }
7878 if (GET_CODE (start_insn) == CODE_LABEL
7879 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7880 && LABEL_NUSES (start_insn) == 1)
7881 reverse = TRUE;
7882 else
7883 return;
7884 }
ff9940b0
RE
7885 else if (GET_CODE (body) == RETURN)
7886 {
7887 start_insn = next_nonnote_insn (start_insn);
7888 if (GET_CODE (start_insn) == BARRIER)
7889 start_insn = next_nonnote_insn (start_insn);
7890 if (GET_CODE (start_insn) == CODE_LABEL
7891 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7892 && LABEL_NUSES (start_insn) == 1)
7893 {
7894 reverse = TRUE;
7895 seeking_return = 1;
7896 }
7897 else
7898 return;
7899 }
cce8749e
CH
7900 else
7901 return;
7902 }
7903
7904 if (arm_ccfsm_state != 0 && !reverse)
7905 abort ();
7906 if (GET_CODE (insn) != JUMP_INSN)
7907 return;
7908
ddd5a7c1 7909 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
7910 the jump should always come first */
7911 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
7912 body = XVECEXP (body, 0, 0);
7913
7914#if 0
7915 /* If this is a conditional return then we don't want to know */
7916 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7917 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
7918 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
7919 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
7920 return;
7921#endif
7922
cce8749e
CH
7923 if (reverse
7924 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7925 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
7926 {
bd9c7e23
RE
7927 int insns_skipped;
7928 int fail = FALSE, succeed = FALSE;
cce8749e
CH
7929 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
7930 int then_not_else = TRUE;
ff9940b0 7931 rtx this_insn = start_insn, label = 0;
cce8749e 7932
e45b72c4
RE
7933 /* If the jump cannot be done with one instruction, we cannot
7934 conditionally execute the instruction in the inverse case. */
ff9940b0 7935 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40 7936 {
5bbe2d40
RE
7937 jump_clobbers = 1;
7938 return;
7939 }
ff9940b0 7940
cce8749e
CH
7941 /* Register the insn jumped to. */
7942 if (reverse)
ff9940b0
RE
7943 {
7944 if (!seeking_return)
7945 label = XEXP (SET_SRC (body), 0);
7946 }
cce8749e
CH
7947 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
7948 label = XEXP (XEXP (SET_SRC (body), 1), 0);
7949 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
7950 {
7951 label = XEXP (XEXP (SET_SRC (body), 2), 0);
7952 then_not_else = FALSE;
7953 }
ff9940b0
RE
7954 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
7955 seeking_return = 1;
7956 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
7957 {
7958 seeking_return = 1;
7959 then_not_else = FALSE;
7960 }
cce8749e
CH
7961 else
7962 abort ();
7963
7964 /* See how many insns this branch skips, and what kind of insns. If all
7965 insns are okay, and the label or unconditional branch to the same
7966 label is not too far away, succeed. */
7967 for (insns_skipped = 0;
b36ba79f 7968 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
7969 {
7970 rtx scanbody;
7971
7972 this_insn = next_nonnote_insn (this_insn);
7973 if (!this_insn)
7974 break;
7975
cce8749e
CH
7976 switch (GET_CODE (this_insn))
7977 {
7978 case CODE_LABEL:
7979 /* Succeed if it is the target label, otherwise fail since
7980 control falls in from somewhere else. */
7981 if (this_insn == label)
7982 {
ff9940b0
RE
7983 if (jump_clobbers)
7984 {
7985 arm_ccfsm_state = 2;
7986 this_insn = next_nonnote_insn (this_insn);
7987 }
7988 else
7989 arm_ccfsm_state = 1;
cce8749e
CH
7990 succeed = TRUE;
7991 }
7992 else
7993 fail = TRUE;
7994 break;
7995
ff9940b0 7996 case BARRIER:
cce8749e 7997 /* Succeed if the following insn is the target label.
ff9940b0
RE
7998 Otherwise fail.
7999 If return insns are used then the last insn in a function
6354dc9b 8000 will be a barrier. */
cce8749e 8001 this_insn = next_nonnote_insn (this_insn);
ff9940b0 8002 if (this_insn && this_insn == label)
cce8749e 8003 {
ff9940b0
RE
8004 if (jump_clobbers)
8005 {
8006 arm_ccfsm_state = 2;
8007 this_insn = next_nonnote_insn (this_insn);
8008 }
8009 else
8010 arm_ccfsm_state = 1;
cce8749e
CH
8011 succeed = TRUE;
8012 }
8013 else
8014 fail = TRUE;
8015 break;
8016
ff9940b0 8017 case CALL_INSN:
2b835d68 8018 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 8019 calls. */
2b835d68 8020 if (TARGET_APCS_32)
bd9c7e23
RE
8021 {
8022 /* Succeed if the following insn is the target label,
8023 or if the following two insns are a barrier and
8024 the target label. */
8025 this_insn = next_nonnote_insn (this_insn);
8026 if (this_insn && GET_CODE (this_insn) == BARRIER)
8027 this_insn = next_nonnote_insn (this_insn);
8028
8029 if (this_insn && this_insn == label
b36ba79f 8030 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
8031 {
8032 if (jump_clobbers)
8033 {
8034 arm_ccfsm_state = 2;
8035 this_insn = next_nonnote_insn (this_insn);
8036 }
8037 else
8038 arm_ccfsm_state = 1;
8039 succeed = TRUE;
8040 }
8041 else
8042 fail = TRUE;
8043 }
ff9940b0 8044 break;
2b835d68 8045
cce8749e
CH
8046 case JUMP_INSN:
8047 /* If this is an unconditional branch to the same label, succeed.
8048 If it is to another label, do nothing. If it is conditional,
8049 fail. */
914a3b8c 8050 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 8051
ed4c4348 8052 scanbody = PATTERN (this_insn);
ff9940b0
RE
8053 if (GET_CODE (scanbody) == SET
8054 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
8055 {
8056 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
8057 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
8058 {
8059 arm_ccfsm_state = 2;
8060 succeed = TRUE;
8061 }
8062 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
8063 fail = TRUE;
8064 }
b36ba79f
RE
8065 /* Fail if a conditional return is undesirable (eg on a
8066 StrongARM), but still allow this if optimizing for size. */
8067 else if (GET_CODE (scanbody) == RETURN
5895f793
RE
8068 && !use_return_insn (TRUE)
8069 && !optimize_size)
b36ba79f 8070 fail = TRUE;
ff9940b0
RE
8071 else if (GET_CODE (scanbody) == RETURN
8072 && seeking_return)
8073 {
8074 arm_ccfsm_state = 2;
8075 succeed = TRUE;
8076 }
8077 else if (GET_CODE (scanbody) == PARALLEL)
8078 {
8079 switch (get_attr_conds (this_insn))
8080 {
8081 case CONDS_NOCOND:
8082 break;
8083 default:
8084 fail = TRUE;
8085 break;
8086 }
8087 }
4e67550b
RE
8088 else
8089 fail = TRUE; /* Unrecognized jump (eg epilogue). */
8090
cce8749e
CH
8091 break;
8092
8093 case INSN:
ff9940b0
RE
8094 /* Instructions using or affecting the condition codes make it
8095 fail. */
ed4c4348 8096 scanbody = PATTERN (this_insn);
5895f793
RE
8097 if (!(GET_CODE (scanbody) == SET
8098 || GET_CODE (scanbody) == PARALLEL)
74641843 8099 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
8100 fail = TRUE;
8101 break;
8102
8103 default:
8104 break;
8105 }
8106 }
8107 if (succeed)
8108 {
ff9940b0 8109 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 8110 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
8111 else if (seeking_return || arm_ccfsm_state == 2)
8112 {
8113 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
8114 {
8115 this_insn = next_nonnote_insn (this_insn);
8116 if (this_insn && (GET_CODE (this_insn) == BARRIER
8117 || GET_CODE (this_insn) == CODE_LABEL))
8118 abort ();
8119 }
8120 if (!this_insn)
8121 {
8122 /* Oh, dear! we ran off the end.. give up */
8123 recog (PATTERN (insn), insn, NULL_PTR);
8124 arm_ccfsm_state = 0;
abaa26e5 8125 arm_target_insn = NULL;
ff9940b0
RE
8126 return;
8127 }
8128 arm_target_insn = this_insn;
8129 }
cce8749e
CH
8130 else
8131 abort ();
ff9940b0
RE
8132 if (jump_clobbers)
8133 {
8134 if (reverse)
8135 abort ();
8136 arm_current_cc =
8137 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
8138 0), 0), 1));
8139 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
8140 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8141 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
8142 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8143 }
8144 else
8145 {
8146 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
8147 what it was. */
8148 if (!reverse)
8149 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
8150 0));
8151 }
cce8749e 8152
cce8749e
CH
8153 if (reverse || then_not_else)
8154 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8155 }
d5b7b3ae 8156
1ccbefce 8157 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 8158 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 8159 across this call; since the insn has been recognized already we
b020fd92 8160 call recog direct). */
ff9940b0 8161 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 8162 }
f3bb6135 8163}
cce8749e 8164
d5b7b3ae
RE
8165int
8166arm_regno_class (regno)
8167 int regno;
8168{
8169 if (TARGET_THUMB)
8170 {
8171 if (regno == STACK_POINTER_REGNUM)
8172 return STACK_REG;
8173 if (regno == CC_REGNUM)
8174 return CC_REG;
8175 if (regno < 8)
8176 return LO_REGS;
8177 return HI_REGS;
8178 }
8179
8180 if ( regno <= LAST_ARM_REGNUM
8181 || regno == FRAME_POINTER_REGNUM
8182 || regno == ARG_POINTER_REGNUM)
8183 return GENERAL_REGS;
8184
8185 if (regno == CC_REGNUM)
8186 return NO_REGS;
8187
8188 return FPU_REGS;
8189}
8190
8191/* Handle a special case when computing the offset
8192 of an argument from the frame pointer. */
8193int
8194arm_debugger_arg_offset (value, addr)
8195 int value;
8196 rtx addr;
8197{
8198 rtx insn;
8199
8200 /* We are only interested if dbxout_parms() failed to compute the offset. */
8201 if (value != 0)
8202 return 0;
8203
8204 /* We can only cope with the case where the address is held in a register. */
8205 if (GET_CODE (addr) != REG)
8206 return 0;
8207
8208 /* If we are using the frame pointer to point at the argument, then
8209 an offset of 0 is correct. */
cd2b33d0 8210 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
8211 return 0;
8212
8213 /* If we are using the stack pointer to point at the
8214 argument, then an offset of 0 is correct. */
5895f793 8215 if ((TARGET_THUMB || !frame_pointer_needed)
d5b7b3ae
RE
8216 && REGNO (addr) == SP_REGNUM)
8217 return 0;
8218
8219 /* Oh dear. The argument is pointed to by a register rather
8220 than being held in a register, or being stored at a known
8221 offset from the frame pointer. Since GDB only understands
8222 those two kinds of argument we must translate the address
8223 held in the register into an offset from the frame pointer.
8224 We do this by searching through the insns for the function
8225 looking to see where this register gets its value. If the
8226 register is initialised from the frame pointer plus an offset
8227 then we are in luck and we can continue, otherwise we give up.
8228
8229 This code is exercised by producing debugging information
8230 for a function with arguments like this:
8231
8232 double func (double a, double b, int c, double d) {return d;}
8233
8234 Without this code the stab for parameter 'd' will be set to
8235 an offset of 0 from the frame pointer, rather than 8. */
8236
8237 /* The if() statement says:
8238
8239 If the insn is a normal instruction
8240 and if the insn is setting the value in a register
8241 and if the register being set is the register holding the address of the argument
8242 and if the address is computing by an addition
8243 that involves adding to a register
8244 which is the frame pointer
8245 a constant integer
8246
8247 then... */
8248
8249 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8250 {
8251 if ( GET_CODE (insn) == INSN
8252 && GET_CODE (PATTERN (insn)) == SET
8253 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8254 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8255 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 8256 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
8257 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8258 )
8259 {
8260 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8261
8262 break;
8263 }
8264 }
8265
8266 if (value == 0)
8267 {
8268 debug_rtx (addr);
8269 warning ("Unable to compute real location of stacked parameter");
8270 value = 8; /* XXX magic hack */
8271 }
8272
8273 return value;
8274}
8275
8276\f
8277/* Recursively search through all of the blocks in a function
8278 checking to see if any of the variables created in that
8279 function match the RTX called 'orig'. If they do then
8280 replace them with the RTX called 'new'. */
8281
8282static void
8283replace_symbols_in_block (block, orig, new)
8284 tree block;
8285 rtx orig;
8286 rtx new;
8287{
8288 for (; block; block = BLOCK_CHAIN (block))
8289 {
8290 tree sym;
8291
5895f793 8292 if (!TREE_USED (block))
d5b7b3ae
RE
8293 continue;
8294
8295 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8296 {
8297 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8298 || DECL_IGNORED_P (sym)
8299 || TREE_CODE (sym) != VAR_DECL
8300 || DECL_EXTERNAL (sym)
5895f793 8301 || !rtx_equal_p (DECL_RTL (sym), orig)
d5b7b3ae
RE
8302 )
8303 continue;
8304
8305 DECL_RTL (sym) = new;
8306 }
8307
8308 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8309 }
8310}
8311
8312/* Return the number (counting from 0) of the least significant set
8313 bit in MASK. */
8314#ifdef __GNUC__
8315inline
8316#endif
8317static int
8318number_of_first_bit_set (mask)
8319 int mask;
8320{
8321 int bit;
8322
8323 for (bit = 0;
8324 (mask & (1 << bit)) == 0;
5895f793 8325 ++bit)
d5b7b3ae
RE
8326 continue;
8327
8328 return bit;
8329}
8330
8331/* Generate code to return from a thumb function.
8332 If 'reg_containing_return_addr' is -1, then the return address is
8333 actually on the stack, at the stack pointer. */
8334static void
8335thumb_exit (f, reg_containing_return_addr, eh_ofs)
8336 FILE * f;
8337 int reg_containing_return_addr;
8338 rtx eh_ofs;
8339{
8340 unsigned regs_available_for_popping;
8341 unsigned regs_to_pop;
8342 int pops_needed;
8343 unsigned available;
8344 unsigned required;
8345 int mode;
8346 int size;
8347 int restore_a4 = FALSE;
8348
8349 /* Compute the registers we need to pop. */
8350 regs_to_pop = 0;
8351 pops_needed = 0;
8352
8353 /* There is an assumption here, that if eh_ofs is not NULL, the
8354 normal return address will have been pushed. */
8355 if (reg_containing_return_addr == -1 || eh_ofs)
8356 {
8357 /* When we are generating a return for __builtin_eh_return,
8358 reg_containing_return_addr must specify the return regno. */
8359 if (eh_ofs && reg_containing_return_addr == -1)
8360 abort ();
8361
8362 regs_to_pop |= 1 << LR_REGNUM;
5895f793 8363 ++pops_needed;
d5b7b3ae
RE
8364 }
8365
8366 if (TARGET_BACKTRACE)
8367 {
8368 /* Restore the (ARM) frame pointer and stack pointer. */
8369 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8370 pops_needed += 2;
8371 }
8372
8373 /* If there is nothing to pop then just emit the BX instruction and
8374 return. */
8375 if (pops_needed == 0)
8376 {
8377 if (eh_ofs)
8378 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8379
8380 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8381 return;
8382 }
8383 /* Otherwise if we are not supporting interworking and we have not created
8384 a backtrace structure and the function was not entered in ARM mode then
8385 just pop the return address straight into the PC. */
5895f793
RE
8386 else if (!TARGET_INTERWORK
8387 && !TARGET_BACKTRACE
8388 && !is_called_in_ARM_mode (current_function_decl))
d5b7b3ae
RE
8389 {
8390 if (eh_ofs)
8391 {
8392 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8393 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8394 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8395 }
8396 else
8397 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8398
8399 return;
8400 }
8401
8402 /* Find out how many of the (return) argument registers we can corrupt. */
8403 regs_available_for_popping = 0;
8404
8405 /* If returning via __builtin_eh_return, the bottom three registers
8406 all contain information needed for the return. */
8407 if (eh_ofs)
8408 size = 12;
8409 else
8410 {
8411#ifdef RTX_CODE
8412 /* If we can deduce the registers used from the function's
8413 return value. This is more reliable that examining
8414 regs_ever_live[] because that will be set if the register is
8415 ever used in the function, not just if the register is used
8416 to hold a return value. */
8417
8418 if (current_function_return_rtx != 0)
8419 mode = GET_MODE (current_function_return_rtx);
8420 else
8421#endif
8422 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8423
8424 size = GET_MODE_SIZE (mode);
8425
8426 if (size == 0)
8427 {
8428 /* In a void function we can use any argument register.
8429 In a function that returns a structure on the stack
8430 we can use the second and third argument registers. */
8431 if (mode == VOIDmode)
8432 regs_available_for_popping =
8433 (1 << ARG_REGISTER (1))
8434 | (1 << ARG_REGISTER (2))
8435 | (1 << ARG_REGISTER (3));
8436 else
8437 regs_available_for_popping =
8438 (1 << ARG_REGISTER (2))
8439 | (1 << ARG_REGISTER (3));
8440 }
8441 else if (size <= 4)
8442 regs_available_for_popping =
8443 (1 << ARG_REGISTER (2))
8444 | (1 << ARG_REGISTER (3));
8445 else if (size <= 8)
8446 regs_available_for_popping =
8447 (1 << ARG_REGISTER (3));
8448 }
8449
8450 /* Match registers to be popped with registers into which we pop them. */
8451 for (available = regs_available_for_popping,
8452 required = regs_to_pop;
8453 required != 0 && available != 0;
8454 available &= ~(available & - available),
8455 required &= ~(required & - required))
8456 -- pops_needed;
8457
8458 /* If we have any popping registers left over, remove them. */
8459 if (available > 0)
5895f793 8460 regs_available_for_popping &= ~available;
d5b7b3ae
RE
8461
8462 /* Otherwise if we need another popping register we can use
8463 the fourth argument register. */
8464 else if (pops_needed)
8465 {
8466 /* If we have not found any free argument registers and
8467 reg a4 contains the return address, we must move it. */
8468 if (regs_available_for_popping == 0
8469 && reg_containing_return_addr == LAST_ARG_REGNUM)
8470 {
8471 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8472 reg_containing_return_addr = LR_REGNUM;
8473 }
8474 else if (size > 12)
8475 {
8476 /* Register a4 is being used to hold part of the return value,
8477 but we have dire need of a free, low register. */
8478 restore_a4 = TRUE;
8479
8480 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8481 }
8482
8483 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8484 {
8485 /* The fourth argument register is available. */
8486 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8487
5895f793 8488 --pops_needed;
d5b7b3ae
RE
8489 }
8490 }
8491
8492 /* Pop as many registers as we can. */
8493 thumb_pushpop (f, regs_available_for_popping, FALSE);
8494
8495 /* Process the registers we popped. */
8496 if (reg_containing_return_addr == -1)
8497 {
8498 /* The return address was popped into the lowest numbered register. */
5895f793 8499 regs_to_pop &= ~(1 << LR_REGNUM);
d5b7b3ae
RE
8500
8501 reg_containing_return_addr =
8502 number_of_first_bit_set (regs_available_for_popping);
8503
8504 /* Remove this register for the mask of available registers, so that
8505 the return address will not be corrupted by futher pops. */
5895f793 8506 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
d5b7b3ae
RE
8507 }
8508
8509 /* If we popped other registers then handle them here. */
8510 if (regs_available_for_popping)
8511 {
8512 int frame_pointer;
8513
8514 /* Work out which register currently contains the frame pointer. */
8515 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8516
8517 /* Move it into the correct place. */
8518 asm_fprintf (f, "\tmov\t%r, %r\n",
8519 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8520
8521 /* (Temporarily) remove it from the mask of popped registers. */
5895f793
RE
8522 regs_available_for_popping &= ~(1 << frame_pointer);
8523 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
d5b7b3ae
RE
8524
8525 if (regs_available_for_popping)
8526 {
8527 int stack_pointer;
8528
8529 /* We popped the stack pointer as well,
8530 find the register that contains it. */
8531 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8532
8533 /* Move it into the stack register. */
8534 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8535
8536 /* At this point we have popped all necessary registers, so
8537 do not worry about restoring regs_available_for_popping
8538 to its correct value:
8539
8540 assert (pops_needed == 0)
8541 assert (regs_available_for_popping == (1 << frame_pointer))
8542 assert (regs_to_pop == (1 << STACK_POINTER)) */
8543 }
8544 else
8545 {
8546 /* Since we have just move the popped value into the frame
8547 pointer, the popping register is available for reuse, and
8548 we know that we still have the stack pointer left to pop. */
8549 regs_available_for_popping |= (1 << frame_pointer);
8550 }
8551 }
8552
8553 /* If we still have registers left on the stack, but we no longer have
8554 any registers into which we can pop them, then we must move the return
8555 address into the link register and make available the register that
8556 contained it. */
8557 if (regs_available_for_popping == 0 && pops_needed > 0)
8558 {
8559 regs_available_for_popping |= 1 << reg_containing_return_addr;
8560
8561 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8562 reg_containing_return_addr);
8563
8564 reg_containing_return_addr = LR_REGNUM;
8565 }
8566
8567 /* If we have registers left on the stack then pop some more.
8568 We know that at most we will want to pop FP and SP. */
8569 if (pops_needed > 0)
8570 {
8571 int popped_into;
8572 int move_to;
8573
8574 thumb_pushpop (f, regs_available_for_popping, FALSE);
8575
8576 /* We have popped either FP or SP.
8577 Move whichever one it is into the correct register. */
8578 popped_into = number_of_first_bit_set (regs_available_for_popping);
8579 move_to = number_of_first_bit_set (regs_to_pop);
8580
8581 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8582
5895f793 8583 regs_to_pop &= ~(1 << move_to);
d5b7b3ae 8584
5895f793 8585 --pops_needed;
d5b7b3ae
RE
8586 }
8587
8588 /* If we still have not popped everything then we must have only
8589 had one register available to us and we are now popping the SP. */
8590 if (pops_needed > 0)
8591 {
8592 int popped_into;
8593
8594 thumb_pushpop (f, regs_available_for_popping, FALSE);
8595
8596 popped_into = number_of_first_bit_set (regs_available_for_popping);
8597
8598 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8599 /*
8600 assert (regs_to_pop == (1 << STACK_POINTER))
8601 assert (pops_needed == 1)
8602 */
8603 }
8604
8605 /* If necessary restore the a4 register. */
8606 if (restore_a4)
8607 {
8608 if (reg_containing_return_addr != LR_REGNUM)
8609 {
8610 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8611 reg_containing_return_addr = LR_REGNUM;
8612 }
8613
8614 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8615 }
8616
8617 if (eh_ofs)
8618 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8619
8620 /* Return to caller. */
8621 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8622}
8623
8624/* Emit code to push or pop registers to or from the stack. */
8625static void
8626thumb_pushpop (f, mask, push)
8627 FILE * f;
8628 int mask;
8629 int push;
8630{
8631 int regno;
8632 int lo_mask = mask & 0xFF;
8633
5895f793 8634 if (lo_mask == 0 && !push && (mask & (1 << 15)))
d5b7b3ae
RE
8635 {
8636 /* Special case. Do not generate a POP PC statement here, do it in
8637 thumb_exit() */
8638 thumb_exit (f, -1, NULL_RTX);
8639 return;
8640 }
8641
8642 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8643
8644 /* Look at the low registers first. */
5895f793 8645 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
d5b7b3ae
RE
8646 {
8647 if (lo_mask & 1)
8648 {
8649 asm_fprintf (f, "%r", regno);
8650
8651 if ((lo_mask & ~1) != 0)
8652 fprintf (f, ", ");
8653 }
8654 }
8655
8656 if (push && (mask & (1 << LR_REGNUM)))
8657 {
8658 /* Catch pushing the LR. */
8659 if (mask & 0xFF)
8660 fprintf (f, ", ");
8661
8662 asm_fprintf (f, "%r", LR_REGNUM);
8663 }
8664 else if (!push && (mask & (1 << PC_REGNUM)))
8665 {
8666 /* Catch popping the PC. */
8667 if (TARGET_INTERWORK || TARGET_BACKTRACE)
8668 {
8669 /* The PC is never poped directly, instead
8670 it is popped into r3 and then BX is used. */
8671 fprintf (f, "}\n");
8672
8673 thumb_exit (f, -1, NULL_RTX);
8674
8675 return;
8676 }
8677 else
8678 {
8679 if (mask & 0xFF)
8680 fprintf (f, ", ");
8681
8682 asm_fprintf (f, "%r", PC_REGNUM);
8683 }
8684 }
8685
8686 fprintf (f, "}\n");
8687}
8688\f
8689void
8690thumb_final_prescan_insn (insn)
8691 rtx insn;
8692{
d5b7b3ae 8693 if (flag_print_asm_name)
9d98a694
AO
8694 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
8695 INSN_ADDRESSES (INSN_UID (insn)));
d5b7b3ae
RE
8696}
8697
8698int
8699thumb_shiftable_const (val)
8700 unsigned HOST_WIDE_INT val;
8701{
8702 unsigned HOST_WIDE_INT mask = 0xff;
8703 int i;
8704
8705 if (val == 0) /* XXX */
8706 return 0;
8707
8708 for (i = 0; i < 25; i++)
8709 if ((val & (mask << i)) == val)
8710 return 1;
8711
8712 return 0;
8713}
8714
8715/* Returns non-zero if the current function contains,
8716 or might contain a far jump. */
8717int
8718thumb_far_jump_used_p (int in_prologue)
8719{
8720 rtx insn;
8721
8722 /* This test is only important for leaf functions. */
5895f793 8723 /* assert (!leaf_function_p ()); */
d5b7b3ae
RE
8724
8725 /* If we have already decided that far jumps may be used,
8726 do not bother checking again, and always return true even if
8727 it turns out that they are not being used. Once we have made
8728 the decision that far jumps are present (and that hence the link
8729 register will be pushed onto the stack) we cannot go back on it. */
8730 if (cfun->machine->far_jump_used)
8731 return 1;
8732
8733 /* If this function is not being called from the prologue/epilogue
8734 generation code then it must be being called from the
8735 INITIAL_ELIMINATION_OFFSET macro. */
5895f793 8736 if (!in_prologue)
d5b7b3ae
RE
8737 {
8738 /* In this case we know that we are being asked about the elimination
8739 of the arg pointer register. If that register is not being used,
8740 then there are no arguments on the stack, and we do not have to
8741 worry that a far jump might force the prologue to push the link
8742 register, changing the stack offsets. In this case we can just
8743 return false, since the presence of far jumps in the function will
8744 not affect stack offsets.
8745
8746 If the arg pointer is live (or if it was live, but has now been
8747 eliminated and so set to dead) then we do have to test to see if
8748 the function might contain a far jump. This test can lead to some
8749 false negatives, since before reload is completed, then length of
8750 branch instructions is not known, so gcc defaults to returning their
8751 longest length, which in turn sets the far jump attribute to true.
8752
8753 A false negative will not result in bad code being generated, but it
8754 will result in a needless push and pop of the link register. We
8755 hope that this does not occur too often. */
8756 if (regs_ever_live [ARG_POINTER_REGNUM])
8757 cfun->machine->arg_pointer_live = 1;
5895f793 8758 else if (!cfun->machine->arg_pointer_live)
d5b7b3ae
RE
8759 return 0;
8760 }
8761
8762 /* Check to see if the function contains a branch
8763 insn with the far jump attribute set. */
8764 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8765 {
8766 if (GET_CODE (insn) == JUMP_INSN
8767 /* Ignore tablejump patterns. */
8768 && GET_CODE (PATTERN (insn)) != ADDR_VEC
8769 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
8770 && get_attr_far_jump (insn) == FAR_JUMP_YES
8771 )
8772 {
8773 /* Record the fact that we have decied that
8774 the function does use far jumps. */
8775 cfun->machine->far_jump_used = 1;
8776 return 1;
8777 }
8778 }
8779
8780 return 0;
8781}
8782
8783/* Return non-zero if FUNC must be entered in ARM mode. */
8784int
8785is_called_in_ARM_mode (func)
8786 tree func;
8787{
8788 if (TREE_CODE (func) != FUNCTION_DECL)
8789 abort ();
8790
8791 /* Ignore the problem about functions whoes address is taken. */
8792 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
8793 return TRUE;
8794
8795#ifdef ARM_PE
8796 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
8797#else
8798 return FALSE;
8799#endif
8800}
8801
8802/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 8803const char *
d5b7b3ae
RE
8804thumb_unexpanded_epilogue ()
8805{
8806 int regno;
8807 int live_regs_mask = 0;
8808 int high_regs_pushed = 0;
8809 int leaf_function = leaf_function_p ();
8810 int had_to_push_lr;
8811 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8812
8813 if (return_used_this_function)
8814 return "";
8815
8816 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
5895f793
RE
8817 if (regs_ever_live[regno] && !call_used_regs[regno]
8818 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
8819 live_regs_mask |= 1 << regno;
8820
8821 for (regno = 8; regno < 13; regno++)
8822 {
5895f793
RE
8823 if (regs_ever_live[regno] && !call_used_regs[regno]
8824 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8825 high_regs_pushed++;
d5b7b3ae
RE
8826 }
8827
8828 /* The prolog may have pushed some high registers to use as
8829 work registers. eg the testuite file:
8830 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
8831 compiles to produce:
8832 push {r4, r5, r6, r7, lr}
8833 mov r7, r9
8834 mov r6, r8
8835 push {r6, r7}
8836 as part of the prolog. We have to undo that pushing here. */
8837
8838 if (high_regs_pushed)
8839 {
8840 int mask = live_regs_mask;
8841 int next_hi_reg;
8842 int size;
8843 int mode;
8844
8845#ifdef RTX_CODE
8846 /* If we can deduce the registers used from the function's return value.
8847 This is more reliable that examining regs_ever_live[] because that
8848 will be set if the register is ever used in the function, not just if
8849 the register is used to hold a return value. */
8850
8851 if (current_function_return_rtx != 0)
8852 mode = GET_MODE (current_function_return_rtx);
8853 else
8854#endif
8855 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8856
8857 size = GET_MODE_SIZE (mode);
8858
8859 /* Unless we are returning a type of size > 12 register r3 is
8860 available. */
8861 if (size < 13)
8862 mask |= 1 << 3;
8863
8864 if (mask == 0)
8865 /* Oh dear! We have no low registers into which we can pop
8866 high registers! */
8867 fatal ("No low registers available for popping high registers");
8868
8869 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
8870 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
8871 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
8872 break;
8873
8874 while (high_regs_pushed)
8875 {
8876 /* Find lo register(s) into which the high register(s) can
8877 be popped. */
8878 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8879 {
8880 if (mask & (1 << regno))
8881 high_regs_pushed--;
8882 if (high_regs_pushed == 0)
8883 break;
8884 }
8885
8886 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
8887
8888 /* Pop the values into the low register(s). */
8889 thumb_pushpop (asm_out_file, mask, 0);
8890
8891 /* Move the value(s) into the high registers. */
8892 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8893 {
8894 if (mask & (1 << regno))
8895 {
8896 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
8897 regno);
8898
8899 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
5895f793
RE
8900 if (regs_ever_live[next_hi_reg]
8901 && !call_used_regs[next_hi_reg]
8902 && !(TARGET_SINGLE_PIC_BASE
8903 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
8904 break;
8905 }
8906 }
8907 }
8908 }
8909
5895f793 8910 had_to_push_lr = (live_regs_mask || !leaf_function
d5b7b3ae
RE
8911 || thumb_far_jump_used_p (1));
8912
8913 if (TARGET_BACKTRACE
8914 && ((live_regs_mask & 0xFF) == 0)
8915 && regs_ever_live [LAST_ARG_REGNUM] != 0)
8916 {
8917 /* The stack backtrace structure creation code had to
8918 push R7 in order to get a work register, so we pop
8919 it now. */
8920 live_regs_mask |= (1 << LAST_LO_REGNUM);
8921 }
8922
8923 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
8924 {
8925 if (had_to_push_lr
5895f793
RE
8926 && !is_called_in_ARM_mode (current_function_decl)
8927 && !eh_ofs)
d5b7b3ae
RE
8928 live_regs_mask |= 1 << PC_REGNUM;
8929
8930 /* Either no argument registers were pushed or a backtrace
8931 structure was created which includes an adjusted stack
8932 pointer, so just pop everything. */
8933 if (live_regs_mask)
8934 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8935
8936 if (eh_ofs)
8937 thumb_exit (asm_out_file, 2, eh_ofs);
8938 /* We have either just popped the return address into the
8939 PC or it is was kept in LR for the entire function or
8940 it is still on the stack because we do not want to
8941 return by doing a pop {pc}. */
8942 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
8943 thumb_exit (asm_out_file,
8944 (had_to_push_lr
8945 && is_called_in_ARM_mode (current_function_decl)) ?
8946 -1 : LR_REGNUM, NULL_RTX);
8947 }
8948 else
8949 {
8950 /* Pop everything but the return address. */
5895f793 8951 live_regs_mask &= ~(1 << PC_REGNUM);
d5b7b3ae
RE
8952
8953 if (live_regs_mask)
8954 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8955
8956 if (had_to_push_lr)
8957 /* Get the return address into a temporary register. */
8958 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
8959
8960 /* Remove the argument registers that were pushed onto the stack. */
8961 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
8962 SP_REGNUM, SP_REGNUM,
8963 current_function_pretend_args_size);
8964
8965 if (eh_ofs)
8966 thumb_exit (asm_out_file, 2, eh_ofs);
8967 else
8968 thumb_exit (asm_out_file,
8969 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
8970 }
8971
8972 return "";
8973}
8974
8975/* Functions to save and restore machine-specific function data. */
8976
8977static void
8978arm_mark_machine_status (p)
8979 struct function * p;
8980{
8981 struct machine_function *machine = p->machine;
8982
8983 ggc_mark_rtx (machine->ra_rtx);
8984 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
8985}
8986
8987static void
8988arm_init_machine_status (p)
8989 struct function * p;
8990{
8991 p->machine =
8992 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
8993}
8994
8995/* Return an RTX indicating where the return address to the
8996 calling function can be found. */
8997rtx
8998arm_return_addr (count, frame)
8999 int count;
9000 rtx frame ATTRIBUTE_UNUSED;
9001{
9002 rtx reg;
9003
9004 if (count != 0)
9005 return NULL_RTX;
9006
9007 reg = cfun->machine->ra_rtx;
9008
9009 if (reg == NULL)
9010 {
9011 rtx init;
9012
9013 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
9014 the prologue. */
9015 reg = gen_reg_rtx (Pmode);
9016 cfun->machine->ra_rtx = reg;
9017
5895f793 9018 if (!TARGET_APCS_32)
d5b7b3ae
RE
9019 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
9020 GEN_INT (RETURN_ADDR_MASK26));
9021 else
9022 init = gen_rtx_REG (Pmode, LR_REGNUM);
9023
9024 init = gen_rtx_SET (VOIDmode, reg, init);
9025
9026 /* Emit the insn to the prologue with the other argument copies. */
9027 push_topmost_sequence ();
9028 emit_insn_after (init, get_insns ());
9029 pop_topmost_sequence ();
9030 }
9031
9032 return reg;
9033}
9034
9035/* Do anything needed before RTL is emitted for each function. */
9036void
9037arm_init_expanders ()
9038{
9039 /* Arrange to initialize and mark the machine per-function status. */
9040 init_machine_status = arm_init_machine_status;
9041 mark_machine_status = arm_mark_machine_status;
9042}
9043
9044/* Generate the rest of a function's prologue. */
9045void
9046thumb_expand_prologue ()
9047{
9048 HOST_WIDE_INT amount = (get_frame_size ()
9049 + current_function_outgoing_args_size);
9050
9051 /* Naked functions don't have prologues. */
9052 if (arm_naked_function_p (current_function_decl))
9053 return;
9054
9055 if (frame_pointer_needed)
9056 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
9057
9058 if (amount)
9059 {
9060 amount = ROUND_UP (amount);
9061
9062 if (amount < 512)
9063 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5895f793 9064 GEN_INT (-amount)));
d5b7b3ae
RE
9065 else
9066 {
9067 int regno;
9068 rtx reg;
9069
9070 /* The stack decrement is too big for an immediate value in a single
9071 insn. In theory we could issue multiple subtracts, but after
9072 three of them it becomes more space efficient to place the full
9073 value in the constant pool and load into a register. (Also the
9074 ARM debugger really likes to see only one stack decrement per
9075 function). So instead we look for a scratch register into which
9076 we can load the decrement, and then we subtract this from the
9077 stack pointer. Unfortunately on the thumb the only available
9078 scratch registers are the argument registers, and we cannot use
9079 these as they may hold arguments to the function. Instead we
9080 attempt to locate a call preserved register which is used by this
9081 function. If we can find one, then we know that it will have
9082 been pushed at the start of the prologue and so we can corrupt
9083 it now. */
9084 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
9085 if (regs_ever_live[regno]
5895f793
RE
9086 && !call_used_regs[regno] /* Paranoia */
9087 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
9088 && !(frame_pointer_needed
9089 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
d5b7b3ae
RE
9090 break;
9091
9092 if (regno > LAST_LO_REGNUM) /* Very unlikely */
9093 {
9094 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
9095
9096 /* Choose an arbitary, non-argument low register. */
9097 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
9098
9099 /* Save it by copying it into a high, scratch register. */
9100 emit_insn (gen_movsi (spare, reg));
9101
9102 /* Decrement the stack. */
5895f793 9103 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9104 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9105 reg));
9106
9107 /* Restore the low register's original value. */
9108 emit_insn (gen_movsi (reg, spare));
9109
9110 /* Emit a USE of the restored scratch register, so that flow
9111 analysis will not consider the restore redundant. The
9112 register won't be used again in this function and isn't
9113 restored by the epilogue. */
9114 emit_insn (gen_rtx_USE (VOIDmode, reg));
9115 }
9116 else
9117 {
9118 reg = gen_rtx (REG, SImode, regno);
9119
5895f793 9120 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
d5b7b3ae
RE
9121 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9122 reg));
9123 }
9124 }
9125 }
9126
9127 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9128 emit_insn (gen_blockage ());
9129}
9130
9131void
9132thumb_expand_epilogue ()
9133{
9134 HOST_WIDE_INT amount = (get_frame_size ()
9135 + current_function_outgoing_args_size);
9136
9137 /* Naked functions don't have epilogues. */
9138 if (arm_naked_function_p (current_function_decl))
9139 return;
9140
9141 if (frame_pointer_needed)
9142 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
9143 else if (amount)
9144 {
9145 amount = ROUND_UP (amount);
9146
9147 if (amount < 512)
9148 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9149 GEN_INT (amount)));
9150 else
9151 {
9152 /* r3 is always free in the epilogue. */
9153 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
9154
9155 emit_insn (gen_movsi (reg, GEN_INT (amount)));
9156 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
9157 }
9158 }
9159
9160 /* Emit a USE (stack_pointer_rtx), so that
9161 the stack adjustment will not be deleted. */
9162 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9163
9164 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9165 emit_insn (gen_blockage ());
9166}
9167
9168void
9169output_thumb_prologue (f)
9170 FILE * f;
9171{
9172 int live_regs_mask = 0;
9173 int high_regs_pushed = 0;
9174 int store_arg_regs = 0;
9175 int regno;
9176
9177 if (arm_naked_function_p (current_function_decl))
9178 return;
9179
9180 if (is_called_in_ARM_mode (current_function_decl))
9181 {
9182 const char * name;
9183
9184 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9185 abort ();
9186 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9187 abort ();
9188 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9189
9190 /* Generate code sequence to switch us into Thumb mode. */
9191 /* The .code 32 directive has already been emitted by
6d77b53e 9192 ASM_DECLARE_FUNCTION_NAME. */
d5b7b3ae
RE
9193 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9194 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9195
9196 /* Generate a label, so that the debugger will notice the
9197 change in instruction sets. This label is also used by
9198 the assembler to bypass the ARM code when this function
9199 is called from a Thumb encoded function elsewhere in the
9200 same file. Hence the definition of STUB_NAME here must
9201 agree with the definition in gas/config/tc-arm.c */
9202
9203#define STUB_NAME ".real_start_of"
9204
9205 asm_fprintf (f, "\t.code\t16\n");
9206#ifdef ARM_PE
9207 if (arm_dllexport_name_p (name))
e5951263 9208 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
9209#endif
9210 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9211 asm_fprintf (f, "\t.thumb_func\n");
9212 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9213 }
9214
9215 if (current_function_anonymous_args && current_function_pretend_args_size)
9216 store_arg_regs = 1;
9217
9218 if (current_function_pretend_args_size)
9219 {
9220 if (store_arg_regs)
9221 {
9222 int num_pushes;
9223
9224 asm_fprintf (f, "\tpush\t{");
9225
9226 num_pushes = NUM_INTS (current_function_pretend_args_size);
9227
9228 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9229 regno <= LAST_ARG_REGNUM;
5895f793 9230 regno++)
d5b7b3ae
RE
9231 asm_fprintf (f, "%r%s", regno,
9232 regno == LAST_ARG_REGNUM ? "" : ", ");
9233
9234 asm_fprintf (f, "}\n");
9235 }
9236 else
9237 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9238 SP_REGNUM, SP_REGNUM,
9239 current_function_pretend_args_size);
9240 }
9241
5895f793
RE
9242 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9243 if (regs_ever_live[regno] && !call_used_regs[regno]
9244 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
d5b7b3ae
RE
9245 live_regs_mask |= 1 << regno;
9246
5895f793 9247 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
d5b7b3ae
RE
9248 live_regs_mask |= 1 << LR_REGNUM;
9249
9250 if (TARGET_BACKTRACE)
9251 {
9252 int offset;
9253 int work_register = 0;
9254 int wr;
9255
9256 /* We have been asked to create a stack backtrace structure.
9257 The code looks like this:
9258
9259 0 .align 2
9260 0 func:
9261 0 sub SP, #16 Reserve space for 4 registers.
9262 2 push {R7} Get a work register.
9263 4 add R7, SP, #20 Get the stack pointer before the push.
9264 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9265 8 mov R7, PC Get hold of the start of this code plus 12.
9266 10 str R7, [SP, #16] Store it.
9267 12 mov R7, FP Get hold of the current frame pointer.
9268 14 str R7, [SP, #4] Store it.
9269 16 mov R7, LR Get hold of the current return address.
9270 18 str R7, [SP, #12] Store it.
9271 20 add R7, SP, #16 Point at the start of the backtrace structure.
9272 22 mov FP, R7 Put this value into the frame pointer. */
9273
9274 if ((live_regs_mask & 0xFF) == 0)
9275 {
9276 /* See if the a4 register is free. */
9277
9278 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9279 work_register = LAST_ARG_REGNUM;
9280 else /* We must push a register of our own */
9281 live_regs_mask |= (1 << LAST_LO_REGNUM);
9282 }
9283
9284 if (work_register == 0)
9285 {
9286 /* Select a register from the list that will be pushed to
9287 use as our work register. */
9288 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9289 if ((1 << work_register) & live_regs_mask)
9290 break;
9291 }
9292
9293 asm_fprintf
9294 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9295 SP_REGNUM, SP_REGNUM);
9296
9297 if (live_regs_mask)
9298 thumb_pushpop (f, live_regs_mask, 1);
9299
9300 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9301 if (wr & live_regs_mask)
9302 offset += 4;
9303
9304 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9305 offset + 16 + current_function_pretend_args_size);
9306
9307 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9308 offset + 4);
9309
9310 /* Make sure that the instruction fetching the PC is in the right place
9311 to calculate "start of backtrace creation code + 12". */
9312 if (live_regs_mask)
9313 {
9314 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9315 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9316 offset + 12);
9317 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9318 ARM_HARD_FRAME_POINTER_REGNUM);
9319 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9320 offset);
9321 }
9322 else
9323 {
9324 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9325 ARM_HARD_FRAME_POINTER_REGNUM);
9326 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9327 offset);
9328 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9329 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9330 offset + 12);
9331 }
9332
9333 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9334 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9335 offset + 8);
9336 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9337 offset + 12);
9338 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9339 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9340 }
9341 else if (live_regs_mask)
9342 thumb_pushpop (f, live_regs_mask, 1);
9343
9344 for (regno = 8; regno < 13; regno++)
9345 {
5895f793
RE
9346 if (regs_ever_live[regno] && !call_used_regs[regno]
9347 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9348 high_regs_pushed++;
d5b7b3ae
RE
9349 }
9350
9351 if (high_regs_pushed)
9352 {
9353 int pushable_regs = 0;
9354 int mask = live_regs_mask & 0xff;
9355 int next_hi_reg;
9356
9357 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9358 {
5895f793
RE
9359 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9360 && !(TARGET_SINGLE_PIC_BASE
9361 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9362 break;
9363 }
9364
9365 pushable_regs = mask;
9366
9367 if (pushable_regs == 0)
9368 {
9369 /* Desperation time -- this probably will never happen. */
9370 if (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9371 || !call_used_regs[LAST_ARG_REGNUM])
d5b7b3ae
RE
9372 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9373 mask = 1 << LAST_ARG_REGNUM;
9374 }
9375
9376 while (high_regs_pushed > 0)
9377 {
9378 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9379 {
9380 if (mask & (1 << regno))
9381 {
9382 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9383
5895f793 9384 high_regs_pushed--;
d5b7b3ae
RE
9385
9386 if (high_regs_pushed)
9387 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9388 next_hi_reg--)
9389 {
9390 if (regs_ever_live[next_hi_reg]
5895f793
RE
9391 && !call_used_regs[next_hi_reg]
9392 && !(TARGET_SINGLE_PIC_BASE
9393 && (next_hi_reg == arm_pic_register)))
d5b7b3ae
RE
9394 break;
9395 }
9396 else
9397 {
5895f793 9398 mask &= ~((1 << regno) - 1);
d5b7b3ae
RE
9399 break;
9400 }
9401 }
9402 }
9403
9404 thumb_pushpop (f, mask, 1);
9405 }
9406
9407 if (pushable_regs == 0
9408 && (regs_ever_live[LAST_ARG_REGNUM]
5895f793 9409 || !call_used_regs[LAST_ARG_REGNUM]))
d5b7b3ae
RE
9410 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9411 }
9412}
9413
9414/* Handle the case of a double word load into a low register from
9415 a computed memory address. The computed address may involve a
9416 register which is overwritten by the load. */
9417
cd2b33d0 9418const char *
d5b7b3ae
RE
9419thumb_load_double_from_address (operands)
9420 rtx * operands;
9421{
9422 rtx addr;
9423 rtx base;
9424 rtx offset;
9425 rtx arg1;
9426 rtx arg2;
9427
9428 if (GET_CODE (operands[0]) != REG)
9429 fatal ("thumb_load_double_from_address: destination is not a register");
9430
9431 if (GET_CODE (operands[1]) != MEM)
9432 {
9433 debug_rtx (operands[1]);
9434 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9435 }
9436
9437 /* Get the memory address. */
9438 addr = XEXP (operands[1], 0);
9439
9440 /* Work out how the memory address is computed. */
9441 switch (GET_CODE (addr))
9442 {
9443 case REG:
9444 operands[2] = gen_rtx (MEM, SImode,
9445 plus_constant (XEXP (operands[1], 0), 4));
9446
9447 if (REGNO (operands[0]) == REGNO (addr))
9448 {
9449 output_asm_insn ("ldr\t%H0, %2", operands);
9450 output_asm_insn ("ldr\t%0, %1", operands);
9451 }
9452 else
9453 {
9454 output_asm_insn ("ldr\t%0, %1", operands);
9455 output_asm_insn ("ldr\t%H0, %2", operands);
9456 }
9457 break;
9458
9459 case CONST:
9460 /* Compute <address> + 4 for the high order load. */
9461 operands[2] = gen_rtx (MEM, SImode,
9462 plus_constant (XEXP (operands[1], 0), 4));
9463
9464 output_asm_insn ("ldr\t%0, %1", operands);
9465 output_asm_insn ("ldr\t%H0, %2", operands);
9466 break;
9467
9468 case PLUS:
9469 arg1 = XEXP (addr, 0);
9470 arg2 = XEXP (addr, 1);
9471
9472 if (CONSTANT_P (arg1))
9473 base = arg2, offset = arg1;
9474 else
9475 base = arg1, offset = arg2;
9476
9477 if (GET_CODE (base) != REG)
9478 fatal ("thumb_load_double_from_address: base is not a register");
9479
9480 /* Catch the case of <address> = <reg> + <reg> */
9481 if (GET_CODE (offset) == REG)
9482 {
9483 int reg_offset = REGNO (offset);
9484 int reg_base = REGNO (base);
9485 int reg_dest = REGNO (operands[0]);
9486
9487 /* Add the base and offset registers together into the
9488 higher destination register. */
9489 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9490 reg_dest + 1, reg_base, reg_offset);
9491
9492 /* Load the lower destination register from the address in
9493 the higher destination register. */
9494 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9495 reg_dest, reg_dest + 1);
9496
9497 /* Load the higher destination register from its own address
9498 plus 4. */
9499 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9500 reg_dest + 1, reg_dest + 1);
9501 }
9502 else
9503 {
9504 /* Compute <address> + 4 for the high order load. */
9505 operands[2] = gen_rtx (MEM, SImode,
9506 plus_constant (XEXP (operands[1], 0), 4));
9507
9508 /* If the computed address is held in the low order register
9509 then load the high order register first, otherwise always
9510 load the low order register first. */
9511 if (REGNO (operands[0]) == REGNO (base))
9512 {
9513 output_asm_insn ("ldr\t%H0, %2", operands);
9514 output_asm_insn ("ldr\t%0, %1", operands);
9515 }
9516 else
9517 {
9518 output_asm_insn ("ldr\t%0, %1", operands);
9519 output_asm_insn ("ldr\t%H0, %2", operands);
9520 }
9521 }
9522 break;
9523
9524 case LABEL_REF:
9525 /* With no registers to worry about we can just load the value
9526 directly. */
9527 operands[2] = gen_rtx (MEM, SImode,
9528 plus_constant (XEXP (operands[1], 0), 4));
9529
9530 output_asm_insn ("ldr\t%H0, %2", operands);
9531 output_asm_insn ("ldr\t%0, %1", operands);
9532 break;
9533
9534 default:
9535 debug_rtx (operands[1]);
9536 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9537 break;
9538 }
9539
9540 return "";
9541}
9542
9543
cd2b33d0 9544const char *
d5b7b3ae
RE
9545thumb_output_move_mem_multiple (n, operands)
9546 int n;
9547 rtx * operands;
9548{
9549 rtx tmp;
9550
9551 switch (n)
9552 {
9553 case 2:
ca356f3a 9554 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9555 {
ca356f3a
RE
9556 tmp = operands[4];
9557 operands[4] = operands[5];
9558 operands[5] = tmp;
d5b7b3ae 9559 }
ca356f3a
RE
9560 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
9561 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
d5b7b3ae
RE
9562 break;
9563
9564 case 3:
ca356f3a 9565 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9566 {
ca356f3a
RE
9567 tmp = operands[4];
9568 operands[4] = operands[5];
9569 operands[5] = tmp;
d5b7b3ae 9570 }
ca356f3a 9571 if (REGNO (operands[5]) > REGNO (operands[6]))
d5b7b3ae 9572 {
ca356f3a
RE
9573 tmp = operands[5];
9574 operands[5] = operands[6];
9575 operands[6] = tmp;
d5b7b3ae 9576 }
ca356f3a 9577 if (REGNO (operands[4]) > REGNO (operands[5]))
d5b7b3ae 9578 {
ca356f3a
RE
9579 tmp = operands[4];
9580 operands[4] = operands[5];
9581 operands[5] = tmp;
d5b7b3ae
RE
9582 }
9583
ca356f3a
RE
9584 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
9585 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
d5b7b3ae
RE
9586 break;
9587
9588 default:
9589 abort ();
9590 }
9591
9592 return "";
9593}
9594
9595/* Routines for generating rtl */
9596
9597void
9598thumb_expand_movstrqi (operands)
9599 rtx * operands;
9600{
9601 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9602 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9603 HOST_WIDE_INT len = INTVAL (operands[2]);
9604 HOST_WIDE_INT offset = 0;
9605
9606 while (len >= 12)
9607 {
ca356f3a 9608 emit_insn (gen_movmem12b (out, in, out, in));
d5b7b3ae
RE
9609 len -= 12;
9610 }
9611
9612 if (len >= 8)
9613 {
ca356f3a 9614 emit_insn (gen_movmem8b (out, in, out, in));
d5b7b3ae
RE
9615 len -= 8;
9616 }
9617
9618 if (len >= 4)
9619 {
9620 rtx reg = gen_reg_rtx (SImode);
9621 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9622 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9623 len -= 4;
9624 offset += 4;
9625 }
9626
9627 if (len >= 2)
9628 {
9629 rtx reg = gen_reg_rtx (HImode);
9630 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9631 plus_constant (in, offset))));
9632 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9633 reg));
9634 len -= 2;
9635 offset += 2;
9636 }
9637
9638 if (len)
9639 {
9640 rtx reg = gen_reg_rtx (QImode);
9641 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9642 plus_constant (in, offset))));
9643 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9644 reg));
9645 }
9646}
9647
9648int
9649thumb_cmp_operand (op, mode)
9650 rtx op;
9651 enum machine_mode mode;
9652{
9653 return ((GET_CODE (op) == CONST_INT
9654 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9655 || register_operand (op, mode));
9656}
9657
cd2b33d0 9658static const char *
d5b7b3ae
RE
9659thumb_condition_code (x, invert)
9660 rtx x;
9661 int invert;
9662{
cd2b33d0 9663 static const char * conds[] =
d5b7b3ae
RE
9664 {
9665 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
9666 "hi", "ls", "ge", "lt", "gt", "le"
9667 };
9668 int val;
9669
9670 switch (GET_CODE (x))
9671 {
9672 case EQ: val = 0; break;
9673 case NE: val = 1; break;
9674 case GEU: val = 2; break;
9675 case LTU: val = 3; break;
9676 case GTU: val = 8; break;
9677 case LEU: val = 9; break;
9678 case GE: val = 10; break;
9679 case LT: val = 11; break;
9680 case GT: val = 12; break;
9681 case LE: val = 13; break;
9682 default:
9683 abort ();
9684 }
9685
9686 return conds[val ^ invert];
9687}
9688
9689/* Handle storing a half-word to memory during reload. */
9690void
9691thumb_reload_out_hi (operands)
9692 rtx * operands;
9693{
9694 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
9695}
9696
9697/* Handle storing a half-word to memory during reload. */
9698void
9699thumb_reload_in_hi (operands)
9700 rtx * operands ATTRIBUTE_UNUSED;
9701{
9702 abort ();
9703}
9704
c27ba912
DM
9705/* Return the length of a function name prefix
9706 that starts with the character 'c'. */
9707static int
9708arm_get_strip_length (char c)
9709{
9710 switch (c)
9711 {
9712 ARM_NAME_ENCODING_LENGTHS
9713 default: return 0;
9714 }
9715}
9716
9717/* Return a pointer to a function's name with any
9718 and all prefix encodings stripped from it. */
9719const char *
9720arm_strip_name_encoding (const char * name)
9721{
9722 int skip;
9723
9724 while ((skip = arm_get_strip_length (* name)))
9725 name += skip;
9726
9727 return name;
9728}
9729
2b835d68 9730#ifdef AOF_ASSEMBLER
6354dc9b 9731/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 9732
32de079a
RE
9733rtx aof_pic_label = NULL_RTX;
9734struct pic_chain
9735{
62b10bbc
NC
9736 struct pic_chain * next;
9737 char * symname;
32de079a
RE
9738};
9739
62b10bbc 9740static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
9741
9742rtx
9743aof_pic_entry (x)
9744 rtx x;
9745{
62b10bbc 9746 struct pic_chain ** chainp;
32de079a
RE
9747 int offset;
9748
9749 if (aof_pic_label == NULL_RTX)
9750 {
92a432f4
RE
9751 /* We mark this here and not in arm_add_gc_roots() to avoid
9752 polluting even more code with ifdefs, and because it never
9753 contains anything useful until we assign to it here. */
5895f793 9754 ggc_add_rtx_root (&aof_pic_label, 1);
43cffd11 9755 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
9756 }
9757
9758 for (offset = 0, chainp = &aof_pic_chain; *chainp;
9759 offset += 4, chainp = &(*chainp)->next)
9760 if ((*chainp)->symname == XSTR (x, 0))
9761 return plus_constant (aof_pic_label, offset);
9762
9763 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
9764 (*chainp)->next = NULL;
9765 (*chainp)->symname = XSTR (x, 0);
9766 return plus_constant (aof_pic_label, offset);
9767}
9768
9769void
9770aof_dump_pic_table (f)
62b10bbc 9771 FILE * f;
32de079a 9772{
62b10bbc 9773 struct pic_chain * chain;
32de079a
RE
9774
9775 if (aof_pic_chain == NULL)
9776 return;
9777
dd18ae56
NC
9778 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
9779 PIC_OFFSET_TABLE_REGNUM,
9780 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
9781 fputs ("|x$adcons|\n", f);
9782
9783 for (chain = aof_pic_chain; chain; chain = chain->next)
9784 {
9785 fputs ("\tDCD\t", f);
9786 assemble_name (f, chain->symname);
9787 fputs ("\n", f);
9788 }
9789}
9790
2b835d68
RE
9791int arm_text_section_count = 1;
9792
9793char *
84ed5e79 9794aof_text_section ()
2b835d68
RE
9795{
9796 static char buf[100];
2b835d68
RE
9797 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
9798 arm_text_section_count++);
9799 if (flag_pic)
9800 strcat (buf, ", PIC, REENTRANT");
9801 return buf;
9802}
9803
9804static int arm_data_section_count = 1;
9805
9806char *
9807aof_data_section ()
9808{
9809 static char buf[100];
9810 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
9811 return buf;
9812}
9813
9814/* The AOF assembler is religiously strict about declarations of
9815 imported and exported symbols, so that it is impossible to declare
956d6950 9816 a function as imported near the beginning of the file, and then to
2b835d68
RE
9817 export it later on. It is, however, possible to delay the decision
9818 until all the functions in the file have been compiled. To get
9819 around this, we maintain a list of the imports and exports, and
9820 delete from it any that are subsequently defined. At the end of
9821 compilation we spit the remainder of the list out before the END
9822 directive. */
9823
9824struct import
9825{
62b10bbc
NC
9826 struct import * next;
9827 char * name;
2b835d68
RE
9828};
9829
62b10bbc 9830static struct import * imports_list = NULL;
2b835d68
RE
9831
9832void
9833aof_add_import (name)
62b10bbc 9834 char * name;
2b835d68 9835{
62b10bbc 9836 struct import * new;
2b835d68
RE
9837
9838 for (new = imports_list; new; new = new->next)
9839 if (new->name == name)
9840 return;
9841
9842 new = (struct import *) xmalloc (sizeof (struct import));
9843 new->next = imports_list;
9844 imports_list = new;
9845 new->name = name;
9846}
9847
9848void
9849aof_delete_import (name)
62b10bbc 9850 char * name;
2b835d68 9851{
62b10bbc 9852 struct import ** old;
2b835d68
RE
9853
9854 for (old = &imports_list; *old; old = & (*old)->next)
9855 {
9856 if ((*old)->name == name)
9857 {
9858 *old = (*old)->next;
9859 return;
9860 }
9861 }
9862}
9863
9864int arm_main_function = 0;
9865
9866void
9867aof_dump_imports (f)
62b10bbc 9868 FILE * f;
2b835d68
RE
9869{
9870 /* The AOF assembler needs this to cause the startup code to be extracted
9871 from the library. Brining in __main causes the whole thing to work
9872 automagically. */
9873 if (arm_main_function)
9874 {
9875 text_section ();
9876 fputs ("\tIMPORT __main\n", f);
9877 fputs ("\tDCD __main\n", f);
9878 }
9879
9880 /* Now dump the remaining imports. */
9881 while (imports_list)
9882 {
9883 fprintf (f, "\tIMPORT\t");
9884 assemble_name (f, imports_list->name);
9885 fputc ('\n', f);
9886 imports_list = imports_list->next;
9887 }
9888}
9889#endif /* AOF_ASSEMBLER */
This page took 1.962829 seconds and 5 git commands to generate.