]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Fix APCS violation.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
949d79eb 2 Copyright (C) 1991, 93-98, 1999 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e
CH
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
af48348a 36#include "reload.h"
e2c671ba 37#include "tree.h"
49ad7cfa 38#include "function.h"
bee06f3d 39#include "expr.h"
ad076f4e 40#include "toplev.h"
aec3cfba 41#include "recog.h"
92a432f4 42#include "ggc.h"
cce8749e
CH
43
44/* The maximum number of insns skipped which will be conditionalised if
45 possible. */
b36ba79f 46static int max_insns_skipped = 5;
cce8749e 47
f5a1b0d2 48extern FILE * asm_out_file;
cce8749e 49/* Some function declarations. */
cce8749e 50
18af7313 51static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
2e943e99 52static char * output_multi_immediate PROTO ((rtx *, char *, char *, int,
18af7313 53 HOST_WIDE_INT));
2b835d68
RE
54static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
55 HOST_WIDE_INT, rtx, rtx, int, int));
18af7313
RE
56static int arm_naked_function_p PROTO ((tree));
57static void init_fpa_table PROTO ((void));
74bbc178
NC
58static enum machine_mode select_dominance_cc_mode PROTO ((rtx, rtx,
59 HOST_WIDE_INT));
949d79eb
RE
60static HOST_WIDE_INT add_minipool_constant PROTO ((rtx, enum machine_mode));
61static void dump_minipool PROTO ((rtx));
18af7313 62static rtx find_barrier PROTO ((rtx, int));
949d79eb
RE
63static void push_minipool_fix PROTO ((rtx, int, rtx *, enum machine_mode,
64 rtx));
65static void push_minipool_barrier PROTO ((rtx, int));
66static void note_invalid_constants PROTO ((rtx, int));
2e943e99 67static char * fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
18af7313 68static int eliminate_lr2ip PROTO ((rtx *));
2e943e99 69static char * shift_op PROTO ((rtx, HOST_WIDE_INT *));
18af7313
RE
70static int pattern_really_clobbers_lr PROTO ((rtx));
71static int function_really_clobbers_lr PROTO ((rtx));
72static void emit_multi_reg_push PROTO ((int));
b111229a 73static void emit_sfm PROTO ((int, int));
18af7313 74static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
3bccbef6 75static int const_ok_for_op RTX_CODE_PROTO ((HOST_WIDE_INT, Rcode));
92a432f4 76static void arm_add_gc_roots PROTO ((void));
f3bb6135 77
13bd191d
PB
78/* True if we are currently building a constant table. */
79int making_const_table;
80
60d0536b 81/* Define the information needed to generate branch insns. This is
ff9940b0 82 stored from the compare operation. */
ff9940b0 83rtx arm_compare_op0, arm_compare_op1;
ff9940b0 84
b111229a 85/* What type of floating point are we tuning for? */
bee06f3d
RE
86enum floating_point_type arm_fpu;
87
b111229a
RE
88/* What type of floating point instructions are available? */
89enum floating_point_type arm_fpu_arch;
90
2b835d68
RE
91/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
92enum prog_mode_type arm_prgmode;
93
b111229a 94/* Set by the -mfp=... option */
f9cc092a 95const char * target_fp_name = NULL;
2b835d68 96
b355a481 97/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 98const char * structure_size_string = NULL;
b355a481
NC
99int arm_structure_size_boundary = 32; /* Used to be 8 */
100
aec3cfba 101/* Bit values used to identify processor capabilities. */
62b10bbc
NC
102#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
103#define FL_FAST_MULT (1 << 1) /* Fast multiply */
104#define FL_MODE26 (1 << 2) /* 26-bit mode support */
105#define FL_MODE32 (1 << 3) /* 32-bit mode support */
106#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
107#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
108#define FL_THUMB (1 << 6) /* Thumb aware */
109#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
110#define FL_STRONG (1 << 8) /* StrongARM */
aec3cfba 111
949d79eb
RE
112/* The bits in this mask specify which instructions we are allowed to
113 generate. */
aec3cfba
NC
114static int insn_flags = 0;
115/* The bits in this mask specify which instruction scheduling options should
116 be used. Note - there is an overlap with the FL_FAST_MULT. For some
117 hardware we want to be able to generate the multiply instructions, but to
118 tune as if they were not present in the architecture. */
119static int tune_flags = 0;
120
121/* The following are used in the arm.md file as equivalents to bits
122 in the above two flag variables. */
123
2b835d68
RE
124/* Nonzero if this is an "M" variant of the processor. */
125int arm_fast_multiply = 0;
126
32de079a 127/* Nonzero if this chip supports the ARM Architecture 4 extensions */
2b835d68
RE
128int arm_arch4 = 0;
129
62b10bbc
NC
130/* Nonzero if this chip supports the ARM Architecture 5 extensions */
131int arm_arch5 = 0;
132
aec3cfba 133/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
134int arm_ld_sched = 0;
135
136/* Nonzero if this chip is a StrongARM. */
137int arm_is_strong = 0;
138
139/* Nonzero if this chip is a an ARM6 or an ARM7. */
140int arm_is_6_or_7 = 0;
b111229a 141
cce8749e
CH
142/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
143 must report the mode of the memory reference from PRINT_OPERAND to
144 PRINT_OPERAND_ADDRESS. */
f3bb6135 145enum machine_mode output_memory_reference_mode;
cce8749e
CH
146
147/* Nonzero if the prologue must setup `fp'. */
148int current_function_anonymous_args;
149
32de079a 150/* The register number to be used for the PIC offset register. */
ed0e6530 151const char * arm_pic_register_string = NULL;
32de079a
RE
152int arm_pic_register = 9;
153
ff9940b0
RE
154/* Set to one if we think that lr is only saved because of subroutine calls,
155 but all of these can be `put after' return insns */
156int lr_save_eliminated;
157
ff9940b0
RE
158/* Set to 1 when a return insn is output, this means that the epilogue
159 is not needed. */
ff9940b0
RE
160static int return_used_this_function;
161
aec3cfba
NC
162/* Set to 1 after arm_reorg has started. Reset to start at the start of
163 the next function. */
4b632bf1
RE
164static int after_arm_reorg = 0;
165
aec3cfba 166/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
167static int arm_constant_limit = 3;
168
cce8749e
CH
169/* For an explanation of these variables, see final_prescan_insn below. */
170int arm_ccfsm_state;
84ed5e79 171enum arm_cond_code arm_current_cc;
cce8749e
CH
172rtx arm_target_insn;
173int arm_target_label;
9997d19d
RE
174
175/* The condition codes of the ARM, and the inverse function. */
f5a1b0d2 176char * arm_condition_codes[] =
9997d19d
RE
177{
178 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
179 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
180};
181
84ed5e79 182static enum arm_cond_code get_arm_condition_code ();
2b835d68 183
f5a1b0d2 184#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68
RE
185\f
186/* Initialization code */
187
2b835d68
RE
188struct processors
189{
f5a1b0d2 190 char * name;
2b835d68
RE
191 unsigned int flags;
192};
193
194/* Not all of these give usefully different compilation alternatives,
195 but there is no simple way of generalizing them. */
f5a1b0d2
NC
196static struct processors all_cores[] =
197{
198 /* ARM Cores */
199
200 {"arm2", FL_CO_PROC | FL_MODE26 },
201 {"arm250", FL_CO_PROC | FL_MODE26 },
202 {"arm3", FL_CO_PROC | FL_MODE26 },
203 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
204 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
205 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
206 {"arm610", FL_MODE26 | FL_MODE32 },
207 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
208 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
209 /* arm7m doesn't exist on its own, but only with D, (and I), but
210 those don't alter the code, so arm7m is sometimes used. */
211 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
212 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
213 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
214 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
215 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
216 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
217 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
218 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
219 {"arm710", FL_MODE26 | FL_MODE32 },
220 {"arm710c", FL_MODE26 | FL_MODE32 },
221 {"arm7100", FL_MODE26 | FL_MODE32 },
222 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
223 /* Doesn't have an external co-proc, but does have embedded fpu. */
224 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
225 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
226 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
227 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
228 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
229 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
230 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
231 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
232 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
233 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
234 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
235
236 {NULL, 0}
237};
238
239static struct processors all_architectures[] =
2b835d68 240{
f5a1b0d2
NC
241 /* ARM Architectures */
242
62b10bbc
NC
243 { "armv2", FL_CO_PROC | FL_MODE26 },
244 { "armv2a", FL_CO_PROC | FL_MODE26 },
245 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
246 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 247 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
248 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
249 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
250 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
251 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
252 { NULL, 0 }
f5a1b0d2
NC
253};
254
255/* This is a magic stucture. The 'string' field is magically filled in
256 with a pointer to the value specified by the user on the command line
257 assuming that the user has specified such a value. */
258
259struct arm_cpu_select arm_select[] =
260{
261 /* string name processors */
262 { NULL, "-mcpu=", all_cores },
263 { NULL, "-march=", all_architectures },
264 { NULL, "-mtune=", all_cores }
2b835d68
RE
265};
266
aec3cfba
NC
267/* Return the number of bits set in value' */
268static unsigned int
269bit_count (value)
270 signed int value;
271{
272 unsigned int count = 0;
273
274 while (value)
275 {
276 value &= ~(value & - value);
277 ++ count;
278 }
279
280 return count;
281}
282
2b835d68
RE
283/* Fix up any incompatible options that the user has specified.
284 This has now turned into a maze. */
285void
286arm_override_options ()
287{
ed4c4348 288 unsigned i;
f5a1b0d2
NC
289
290 /* Set up the flags based on the cpu/architecture selected by the user. */
291 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
bd9c7e23 292 {
f5a1b0d2
NC
293 struct arm_cpu_select * ptr = arm_select + i;
294
295 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 296 {
13bd191d 297 const struct processors * sel;
bd9c7e23 298
f5a1b0d2
NC
299 for (sel = ptr->processors; sel->name != NULL; sel ++)
300 if (streq (ptr->string, sel->name))
bd9c7e23 301 {
aec3cfba
NC
302 if (i == 2)
303 tune_flags = sel->flags;
304 else
b111229a 305 {
aec3cfba
NC
306 /* If we have been given an architecture and a processor
307 make sure that they are compatible. We only generate
308 a warning though, and we prefer the CPU over the
309 architecture. */
310 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 311 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
312 ptr->string);
313
314 insn_flags = sel->flags;
b111229a 315 }
f5a1b0d2 316
bd9c7e23
RE
317 break;
318 }
319
320 if (sel->name == NULL)
321 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
322 }
323 }
aec3cfba 324
f5a1b0d2 325 /* If the user did not specify a processor, choose one for them. */
aec3cfba 326 if (insn_flags == 0)
f5a1b0d2
NC
327 {
328 struct processors * sel;
aec3cfba
NC
329 unsigned int sought;
330 static struct cpu_default
331 {
332 int cpu;
333 char * name;
334 }
335 cpu_defaults[] =
336 {
337 { TARGET_CPU_arm2, "arm2" },
338 { TARGET_CPU_arm6, "arm6" },
339 { TARGET_CPU_arm610, "arm610" },
2aa0c933 340 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
341 { TARGET_CPU_arm7m, "arm7m" },
342 { TARGET_CPU_arm7500fe, "arm7500fe" },
343 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
344 { TARGET_CPU_arm8, "arm8" },
345 { TARGET_CPU_arm810, "arm810" },
346 { TARGET_CPU_arm9, "arm9" },
347 { TARGET_CPU_strongarm, "strongarm" },
348 { TARGET_CPU_generic, "arm" },
349 { 0, 0 }
350 };
351 struct cpu_default * def;
352
353 /* Find the default. */
354 for (def = cpu_defaults; def->name; def ++)
355 if (def->cpu == TARGET_CPU_DEFAULT)
356 break;
357
358 /* Make sure we found the default CPU. */
359 if (def->name == NULL)
360 abort ();
361
362 /* Find the default CPU's flags. */
363 for (sel = all_cores; sel->name != NULL; sel ++)
364 if (streq (def->name, sel->name))
365 break;
366
367 if (sel->name == NULL)
368 abort ();
369
370 insn_flags = sel->flags;
371
372 /* Now check to see if the user has specified some command line
373 switch that require certain abilities from the cpu. */
374 sought = 0;
f5a1b0d2 375
6cfc7210 376 if (TARGET_INTERWORK)
f5a1b0d2 377 {
aec3cfba
NC
378 sought |= (FL_THUMB | FL_MODE32);
379
380 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 381 target_flags |= ARM_FLAG_APCS_32;
aec3cfba
NC
382
383 /* There are no ARM processor that supports both APCS-26 and
384 interworking. Therefore we force FL_MODE26 to be removed
385 from insn_flags here (if it was set), so that the search
386 below will always be able to find a compatible processor. */
387 insn_flags &= ~ FL_MODE26;
f5a1b0d2
NC
388 }
389
1323d53a 390 if (! TARGET_APCS_32)
f5a1b0d2
NC
391 sought |= FL_MODE26;
392
aec3cfba 393 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 394 {
aec3cfba
NC
395 /* Try to locate a CPU type that supports all of the abilities
396 of the default CPU, plus the extra abilities requested by
397 the user. */
f5a1b0d2 398 for (sel = all_cores; sel->name != NULL; sel ++)
aec3cfba 399 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
400 break;
401
402 if (sel->name == NULL)
aec3cfba
NC
403 {
404 unsigned int current_bit_count = 0;
405 struct processors * best_fit = NULL;
406
407 /* Ideally we would like to issue an error message here
408 saying that it was not possible to find a CPU compatible
409 with the default CPU, but which also supports the command
410 line options specified by the programmer, and so they
411 ought to use the -mcpu=<name> command line option to
412 override the default CPU type.
413
414 Unfortunately this does not work with multilibing. We
415 need to be able to support multilibs for -mapcs-26 and for
416 -mthumb-interwork and there is no CPU that can support both
417 options. Instead if we cannot find a cpu that has both the
418 characteristics of the default cpu and the given command line
419 options we scan the array again looking for a best match. */
420 for (sel = all_cores; sel->name != NULL; sel ++)
421 if ((sel->flags & sought) == sought)
422 {
423 unsigned int count;
424
425 count = bit_count (sel->flags & insn_flags);
426
427 if (count >= current_bit_count)
428 {
429 best_fit = sel;
430 current_bit_count = count;
431 }
432 }
f5a1b0d2 433
aec3cfba
NC
434 if (best_fit == NULL)
435 abort ();
436 else
437 sel = best_fit;
438 }
439
440 insn_flags = sel->flags;
f5a1b0d2
NC
441 }
442 }
aec3cfba
NC
443
444 /* If tuning has not been specified, tune for whichever processor or
445 architecture has been selected. */
446 if (tune_flags == 0)
447 tune_flags = insn_flags;
448
f5a1b0d2
NC
449 /* Make sure that the processor choice does not conflict with any of the
450 other command line choices. */
aec3cfba 451 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 452 {
aec3cfba
NC
453 /* If APCS-32 was not the default then it must have been set by the
454 user, so issue a warning message. If the user has specified
455 "-mapcs-32 -mcpu=arm2" then we loose here. */
456 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
457 warning ("target CPU does not support APCS-32" );
f5a1b0d2
NC
458 target_flags &= ~ ARM_FLAG_APCS_32;
459 }
aec3cfba 460 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
461 {
462 warning ("target CPU does not support APCS-26" );
463 target_flags |= ARM_FLAG_APCS_32;
464 }
465
6cfc7210 466 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
467 {
468 warning ("target CPU does not support interworking" );
6cfc7210 469 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
470 }
471
472 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 473 if (TARGET_INTERWORK)
f5a1b0d2
NC
474 {
475 if (! TARGET_APCS_32)
476 warning ("interworking forces APCS-32 to be used" );
477 target_flags |= ARM_FLAG_APCS_32;
478 }
479
480 if (TARGET_APCS_STACK && ! TARGET_APCS)
481 {
482 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
483 target_flags |= ARM_FLAG_APCS_FRAME;
484 }
aec3cfba 485
2b835d68
RE
486 if (TARGET_POKE_FUNCTION_NAME)
487 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 488
2b835d68
RE
489 if (TARGET_APCS_REENT && flag_pic)
490 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 491
2b835d68 492 if (TARGET_APCS_REENT)
f5a1b0d2 493 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 494
6cfc7210
NC
495 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
496 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
497
32de079a
RE
498 /* If stack checking is disabled, we can use r10 as the PIC register,
499 which keeps r9 available. */
500 if (flag_pic && ! TARGET_APCS_STACK)
501 arm_pic_register = 10;
aec3cfba 502
2b835d68
RE
503 if (TARGET_APCS_FLOAT)
504 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 505
aec3cfba
NC
506 /* Initialise boolean versions of the flags, for use in the arm.md file. */
507 arm_fast_multiply = insn_flags & FL_FAST_MULT;
508 arm_arch4 = insn_flags & FL_ARCH4;
62b10bbc 509 arm_arch5 = insn_flags & FL_ARCH5;
aec3cfba
NC
510
511 arm_ld_sched = tune_flags & FL_LDSCHED;
512 arm_is_strong = tune_flags & FL_STRONG;
513 arm_is_6_or_7 = ((tune_flags & (FL_MODE26 | FL_MODE32))
514 && !(tune_flags & FL_ARCH4));
f5a1b0d2 515
bd9c7e23
RE
516 /* Default value for floating point code... if no co-processor
517 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
518 assume the user has an FPA.
519 Note: this does not prevent use of floating point instructions,
520 -msoft-float does that. */
aec3cfba 521 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 522
b111229a 523 if (target_fp_name)
2b835d68 524 {
f5a1b0d2 525 if (streq (target_fp_name, "2"))
b111229a 526 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
527 else if (streq (target_fp_name, "3"))
528 arm_fpu_arch = FP_SOFT3;
2b835d68 529 else
f5a1b0d2 530 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 531 target_fp_name);
2b835d68 532 }
b111229a
RE
533 else
534 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
535
536 if (TARGET_FPE && arm_fpu != FP_HARD)
537 arm_fpu = FP_SOFT2;
aec3cfba 538
f5a1b0d2
NC
539 /* For arm2/3 there is no need to do any scheduling if there is only
540 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
541 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
542 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 543 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 544
2b835d68 545 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
546
547 if (structure_size_string != NULL)
548 {
549 int size = strtol (structure_size_string, NULL, 0);
550
551 if (size == 8 || size == 32)
552 arm_structure_size_boundary = size;
553 else
554 warning ("Structure size boundary can only be set to 8 or 32");
555 }
ed0e6530
PB
556
557 if (arm_pic_register_string != NULL)
558 {
559 int pic_register;
560
561 if (! flag_pic)
562 warning ("-mpic-register= is useless without -fpic");
563
564 pic_register = decode_reg_name (arm_pic_register_string);
565
566 /* Prevent the user from choosing an obviously stupid PIC register. */
567 if (pic_register < 0 || call_used_regs[pic_register]
568 || pic_register == HARD_FRAME_POINTER_REGNUM
569 || pic_register == STACK_POINTER_REGNUM
570 || pic_register >= PC_REGNUM)
571 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
572 else
573 arm_pic_register = pic_register;
574 }
f5a1b0d2
NC
575
576 /* If optimizing for space, don't synthesize constants.
577 For processors with load scheduling, it never costs more than 2 cycles
578 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 579 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 580 arm_constant_limit = 1;
aec3cfba 581
f5a1b0d2
NC
582 /* If optimizing for size, bump the number of instructions that we
583 are prepared to conditionally execute (even on a StrongARM).
584 Otherwise for the StrongARM, which has early execution of branches,
585 a sequence that is worth skipping is shorter. */
586 if (optimize_size)
587 max_insns_skipped = 6;
588 else if (arm_is_strong)
589 max_insns_skipped = 3;
92a432f4
RE
590
591 /* Register global variables with the garbage collector. */
592 arm_add_gc_roots ();
593}
594
595static void
596arm_add_gc_roots ()
597{
598 ggc_add_rtx_root (&arm_compare_op0, 1);
599 ggc_add_rtx_root (&arm_compare_op1, 1);
600 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
601 /* XXX: What about the minipool tables? */
2b835d68 602}
92a432f4 603
cce8749e 604\f
ff9940b0
RE
605/* Return 1 if it is possible to return using a single instruction */
606
607int
b36ba79f
RE
608use_return_insn (iscond)
609 int iscond;
ff9940b0
RE
610{
611 int regno;
612
f5a1b0d2
NC
613 if (!reload_completed
614 || current_function_pretend_args_size
ff9940b0 615 || current_function_anonymous_args
56636818 616 || ((get_frame_size () + current_function_outgoing_args_size != 0)
f5a1b0d2 617 && !(TARGET_APCS && frame_pointer_needed)))
ff9940b0
RE
618 return 0;
619
b111229a 620 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
621 stacked. Similarly, on StrongARM, conditional returns are expensive
622 if they aren't taken and registers have been stacked. */
f5a1b0d2 623 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 624 return 0;
f5a1b0d2 625 if ((iscond && arm_is_strong)
6cfc7210 626 || TARGET_INTERWORK)
6ed30148
RE
627 {
628 for (regno = 0; regno < 16; regno++)
629 if (regs_ever_live[regno] && ! call_used_regs[regno])
630 return 0;
631
632 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 633 return 0;
6ed30148 634 }
b111229a 635
ff9940b0
RE
636 /* Can't be done if any of the FPU regs are pushed, since this also
637 requires an insn */
b111229a
RE
638 for (regno = 16; regno < 24; regno++)
639 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
640 return 0;
641
31fdb4d5
DE
642 /* If a function is naked, don't use the "return" insn. */
643 if (arm_naked_function_p (current_function_decl))
644 return 0;
645
ff9940b0
RE
646 return 1;
647}
648
cce8749e
CH
649/* Return TRUE if int I is a valid immediate ARM constant. */
650
651int
652const_ok_for_arm (i)
ff9940b0 653 HOST_WIDE_INT i;
cce8749e 654{
ed4c4348 655 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 656
56636818
JL
657 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
658 be all zero, or all one. */
659 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
660 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
ed4c4348
RE
661 != ((~(unsigned HOST_WIDE_INT) 0)
662 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
663 return FALSE;
664
e2c671ba
RE
665 /* Fast return for 0 and powers of 2 */
666 if ((i & (i - 1)) == 0)
667 return TRUE;
668
cce8749e
CH
669 do
670 {
abaa26e5 671 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 672 return TRUE;
abaa26e5
RE
673 mask =
674 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
675 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
ed4c4348 676 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 677
f3bb6135
RE
678 return FALSE;
679}
cce8749e 680
e2c671ba 681/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
682static int
683const_ok_for_op (i, code)
e2c671ba
RE
684 HOST_WIDE_INT i;
685 enum rtx_code code;
e2c671ba
RE
686{
687 if (const_ok_for_arm (i))
688 return 1;
689
690 switch (code)
691 {
692 case PLUS:
693 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
694
695 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
696 case XOR:
697 case IOR:
698 return 0;
699
700 case AND:
701 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
702
703 default:
704 abort ();
705 }
706}
707
708/* Emit a sequence of insns to handle a large constant.
709 CODE is the code of the operation required, it can be any of SET, PLUS,
710 IOR, AND, XOR, MINUS;
711 MODE is the mode in which the operation is being performed;
712 VAL is the integer to operate on;
713 SOURCE is the other operand (a register, or a null-pointer for SET);
714 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
715 either produce a simpler sequence, or we will want to cse the values.
716 Return value is the number of insns emitted. */
e2c671ba
RE
717
718int
719arm_split_constant (code, mode, val, target, source, subtargets)
720 enum rtx_code code;
721 enum machine_mode mode;
722 HOST_WIDE_INT val;
723 rtx target;
724 rtx source;
725 int subtargets;
2b835d68
RE
726{
727 if (subtargets || code == SET
728 || (GET_CODE (target) == REG && GET_CODE (source) == REG
729 && REGNO (target) != REGNO (source)))
730 {
4b632bf1
RE
731 /* After arm_reorg has been called, we can't fix up expensive
732 constants by pushing them into memory so we must synthesise
733 them in-line, regardless of the cost. This is only likely to
734 be more costly on chips that have load delay slots and we are
735 compiling without running the scheduler (so no splitting
aec3cfba
NC
736 occurred before the final instruction emission).
737
738 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 739 */
4b632bf1
RE
740 if (! after_arm_reorg
741 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
742 > arm_constant_limit + (code != SET)))
2b835d68
RE
743 {
744 if (code == SET)
745 {
746 /* Currently SET is the only monadic value for CODE, all
747 the rest are diadic. */
43cffd11 748 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
749 return 1;
750 }
751 else
752 {
753 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
754
43cffd11 755 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
756 /* For MINUS, the value is subtracted from, since we never
757 have subtraction of a constant. */
758 if (code == MINUS)
43cffd11
RE
759 emit_insn (gen_rtx_SET (VOIDmode, target,
760 gen_rtx (code, mode, temp, source)));
2b835d68 761 else
43cffd11
RE
762 emit_insn (gen_rtx_SET (VOIDmode, target,
763 gen_rtx (code, mode, source, temp)));
2b835d68
RE
764 return 2;
765 }
766 }
767 }
768
769 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
770}
771
772/* As above, but extra parameter GENERATE which, if clear, suppresses
773 RTL generation. */
774int
775arm_gen_constant (code, mode, val, target, source, subtargets, generate)
776 enum rtx_code code;
777 enum machine_mode mode;
778 HOST_WIDE_INT val;
779 rtx target;
780 rtx source;
781 int subtargets;
782 int generate;
e2c671ba 783{
e2c671ba
RE
784 int can_invert = 0;
785 int can_negate = 0;
786 int can_negate_initial = 0;
787 int can_shift = 0;
788 int i;
789 int num_bits_set = 0;
790 int set_sign_bit_copies = 0;
791 int clear_sign_bit_copies = 0;
792 int clear_zero_bit_copies = 0;
793 int set_zero_bit_copies = 0;
794 int insns = 0;
e2c671ba
RE
795 unsigned HOST_WIDE_INT temp1, temp2;
796 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
797
798 /* find out which operations are safe for a given CODE. Also do a quick
799 check for degenerate cases; these can occur when DImode operations
800 are split. */
801 switch (code)
802 {
803 case SET:
804 can_invert = 1;
805 can_shift = 1;
806 can_negate = 1;
807 break;
808
809 case PLUS:
810 can_negate = 1;
811 can_negate_initial = 1;
812 break;
813
814 case IOR:
815 if (remainder == 0xffffffff)
816 {
2b835d68 817 if (generate)
43cffd11
RE
818 emit_insn (gen_rtx_SET (VOIDmode, target,
819 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
820 return 1;
821 }
822 if (remainder == 0)
823 {
824 if (reload_completed && rtx_equal_p (target, source))
825 return 0;
2b835d68 826 if (generate)
43cffd11 827 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
828 return 1;
829 }
830 break;
831
832 case AND:
833 if (remainder == 0)
834 {
2b835d68 835 if (generate)
43cffd11 836 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
837 return 1;
838 }
839 if (remainder == 0xffffffff)
840 {
841 if (reload_completed && rtx_equal_p (target, source))
842 return 0;
2b835d68 843 if (generate)
43cffd11 844 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
845 return 1;
846 }
847 can_invert = 1;
848 break;
849
850 case XOR:
851 if (remainder == 0)
852 {
853 if (reload_completed && rtx_equal_p (target, source))
854 return 0;
2b835d68 855 if (generate)
43cffd11 856 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
857 return 1;
858 }
859 if (remainder == 0xffffffff)
860 {
2b835d68 861 if (generate)
43cffd11
RE
862 emit_insn (gen_rtx_SET (VOIDmode, target,
863 gen_rtx_NOT (mode, source)));
e2c671ba
RE
864 return 1;
865 }
866
867 /* We don't know how to handle this yet below. */
868 abort ();
869
870 case MINUS:
871 /* We treat MINUS as (val - source), since (source - val) is always
872 passed as (source + (-val)). */
873 if (remainder == 0)
874 {
2b835d68 875 if (generate)
43cffd11
RE
876 emit_insn (gen_rtx_SET (VOIDmode, target,
877 gen_rtx_NEG (mode, source)));
e2c671ba
RE
878 return 1;
879 }
880 if (const_ok_for_arm (val))
881 {
2b835d68 882 if (generate)
43cffd11
RE
883 emit_insn (gen_rtx_SET (VOIDmode, target,
884 gen_rtx_MINUS (mode, GEN_INT (val),
885 source)));
e2c671ba
RE
886 return 1;
887 }
888 can_negate = 1;
889
890 break;
891
892 default:
893 abort ();
894 }
895
896 /* If we can do it in one insn get out quickly */
897 if (const_ok_for_arm (val)
898 || (can_negate_initial && const_ok_for_arm (-val))
899 || (can_invert && const_ok_for_arm (~val)))
900 {
2b835d68 901 if (generate)
43cffd11
RE
902 emit_insn (gen_rtx_SET (VOIDmode, target,
903 (source ? gen_rtx (code, mode, source,
904 GEN_INT (val))
905 : GEN_INT (val))));
e2c671ba
RE
906 return 1;
907 }
908
909
910 /* Calculate a few attributes that may be useful for specific
911 optimizations. */
912
913 for (i = 31; i >= 0; i--)
914 {
915 if ((remainder & (1 << i)) == 0)
916 clear_sign_bit_copies++;
917 else
918 break;
919 }
920
921 for (i = 31; i >= 0; i--)
922 {
923 if ((remainder & (1 << i)) != 0)
924 set_sign_bit_copies++;
925 else
926 break;
927 }
928
929 for (i = 0; i <= 31; i++)
930 {
931 if ((remainder & (1 << i)) == 0)
932 clear_zero_bit_copies++;
933 else
934 break;
935 }
936
937 for (i = 0; i <= 31; i++)
938 {
939 if ((remainder & (1 << i)) != 0)
940 set_zero_bit_copies++;
941 else
942 break;
943 }
944
945 switch (code)
946 {
947 case SET:
948 /* See if we can do this by sign_extending a constant that is known
949 to be negative. This is a good, way of doing it, since the shift
950 may well merge into a subsequent insn. */
951 if (set_sign_bit_copies > 1)
952 {
953 if (const_ok_for_arm
954 (temp1 = ARM_SIGN_EXTEND (remainder
955 << (set_sign_bit_copies - 1))))
956 {
2b835d68
RE
957 if (generate)
958 {
d499463f 959 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
960 emit_insn (gen_rtx_SET (VOIDmode, new_src,
961 GEN_INT (temp1)));
2b835d68
RE
962 emit_insn (gen_ashrsi3 (target, new_src,
963 GEN_INT (set_sign_bit_copies - 1)));
964 }
e2c671ba
RE
965 return 2;
966 }
967 /* For an inverted constant, we will need to set the low bits,
968 these will be shifted out of harm's way. */
969 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
970 if (const_ok_for_arm (~temp1))
971 {
2b835d68
RE
972 if (generate)
973 {
d499463f 974 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
975 emit_insn (gen_rtx_SET (VOIDmode, new_src,
976 GEN_INT (temp1)));
2b835d68
RE
977 emit_insn (gen_ashrsi3 (target, new_src,
978 GEN_INT (set_sign_bit_copies - 1)));
979 }
e2c671ba
RE
980 return 2;
981 }
982 }
983
984 /* See if we can generate this by setting the bottom (or the top)
985 16 bits, and then shifting these into the other half of the
986 word. We only look for the simplest cases, to do more would cost
987 too much. Be careful, however, not to generate this when the
988 alternative would take fewer insns. */
989 if (val & 0xffff0000)
990 {
991 temp1 = remainder & 0xffff0000;
992 temp2 = remainder & 0x0000ffff;
993
994 /* Overlaps outside this range are best done using other methods. */
995 for (i = 9; i < 24; i++)
996 {
997 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
998 && ! const_ok_for_arm (temp2))
999 {
d499463f
RE
1000 rtx new_src = (subtargets
1001 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1002 : target);
1003 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1004 source, subtargets, generate);
e2c671ba 1005 source = new_src;
2b835d68 1006 if (generate)
43cffd11
RE
1007 emit_insn (gen_rtx_SET
1008 (VOIDmode, target,
1009 gen_rtx_IOR (mode,
1010 gen_rtx_ASHIFT (mode, source,
1011 GEN_INT (i)),
1012 source)));
e2c671ba
RE
1013 return insns + 1;
1014 }
1015 }
1016
1017 /* Don't duplicate cases already considered. */
1018 for (i = 17; i < 24; i++)
1019 {
1020 if (((temp1 | (temp1 >> i)) == remainder)
1021 && ! const_ok_for_arm (temp1))
1022 {
d499463f
RE
1023 rtx new_src = (subtargets
1024 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1025 : target);
1026 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1027 source, subtargets, generate);
e2c671ba 1028 source = new_src;
2b835d68 1029 if (generate)
43cffd11
RE
1030 emit_insn
1031 (gen_rtx_SET (VOIDmode, target,
1032 gen_rtx_IOR
1033 (mode,
1034 gen_rtx_LSHIFTRT (mode, source,
1035 GEN_INT (i)),
1036 source)));
e2c671ba
RE
1037 return insns + 1;
1038 }
1039 }
1040 }
1041 break;
1042
1043 case IOR:
1044 case XOR:
7b64da89
RE
1045 /* If we have IOR or XOR, and the constant can be loaded in a
1046 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1047 then this can be done in two instructions instead of 3-4. */
1048 if (subtargets
d499463f 1049 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
1050 || (reload_completed && ! reg_mentioned_p (target, source)))
1051 {
1052 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1053 {
2b835d68
RE
1054 if (generate)
1055 {
1056 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1057
43cffd11
RE
1058 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1059 emit_insn (gen_rtx_SET (VOIDmode, target,
1060 gen_rtx (code, mode, source, sub)));
2b835d68 1061 }
e2c671ba
RE
1062 return 2;
1063 }
1064 }
1065
1066 if (code == XOR)
1067 break;
1068
1069 if (set_sign_bit_copies > 8
1070 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1071 {
2b835d68
RE
1072 if (generate)
1073 {
1074 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1075 rtx shift = GEN_INT (set_sign_bit_copies);
1076
43cffd11
RE
1077 emit_insn (gen_rtx_SET (VOIDmode, sub,
1078 gen_rtx_NOT (mode,
1079 gen_rtx_ASHIFT (mode,
1080 source,
f5a1b0d2 1081 shift))));
43cffd11
RE
1082 emit_insn (gen_rtx_SET (VOIDmode, target,
1083 gen_rtx_NOT (mode,
1084 gen_rtx_LSHIFTRT (mode, sub,
1085 shift))));
2b835d68 1086 }
e2c671ba
RE
1087 return 2;
1088 }
1089
1090 if (set_zero_bit_copies > 8
1091 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1092 {
2b835d68
RE
1093 if (generate)
1094 {
1095 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1096 rtx shift = GEN_INT (set_zero_bit_copies);
1097
43cffd11
RE
1098 emit_insn (gen_rtx_SET (VOIDmode, sub,
1099 gen_rtx_NOT (mode,
1100 gen_rtx_LSHIFTRT (mode,
1101 source,
f5a1b0d2 1102 shift))));
43cffd11
RE
1103 emit_insn (gen_rtx_SET (VOIDmode, target,
1104 gen_rtx_NOT (mode,
1105 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1106 shift))));
2b835d68 1107 }
e2c671ba
RE
1108 return 2;
1109 }
1110
1111 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1112 {
2b835d68
RE
1113 if (generate)
1114 {
1115 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1116 emit_insn (gen_rtx_SET (VOIDmode, sub,
1117 gen_rtx_NOT (mode, source)));
2b835d68
RE
1118 source = sub;
1119 if (subtargets)
1120 sub = gen_reg_rtx (mode);
43cffd11
RE
1121 emit_insn (gen_rtx_SET (VOIDmode, sub,
1122 gen_rtx_AND (mode, source,
1123 GEN_INT (temp1))));
1124 emit_insn (gen_rtx_SET (VOIDmode, target,
1125 gen_rtx_NOT (mode, sub)));
2b835d68 1126 }
e2c671ba
RE
1127 return 3;
1128 }
1129 break;
1130
1131 case AND:
1132 /* See if two shifts will do 2 or more insn's worth of work. */
1133 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1134 {
1135 HOST_WIDE_INT shift_mask = ((0xffffffff
1136 << (32 - clear_sign_bit_copies))
1137 & 0xffffffff);
e2c671ba
RE
1138
1139 if ((remainder | shift_mask) != 0xffffffff)
1140 {
2b835d68
RE
1141 if (generate)
1142 {
d499463f 1143 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1144 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1145 new_src, source, subtargets, 1);
1146 source = new_src;
2b835d68
RE
1147 }
1148 else
d499463f
RE
1149 {
1150 rtx targ = subtargets ? NULL_RTX : target;
1151 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1152 targ, source, subtargets, 0);
1153 }
2b835d68
RE
1154 }
1155
1156 if (generate)
1157 {
d499463f
RE
1158 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1159 rtx shift = GEN_INT (clear_sign_bit_copies);
1160
1161 emit_insn (gen_ashlsi3 (new_src, source, shift));
1162 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1163 }
1164
e2c671ba
RE
1165 return insns + 2;
1166 }
1167
1168 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1169 {
1170 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba
RE
1171
1172 if ((remainder | shift_mask) != 0xffffffff)
1173 {
2b835d68
RE
1174 if (generate)
1175 {
d499463f
RE
1176 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1177
2b835d68 1178 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1179 new_src, source, subtargets, 1);
1180 source = new_src;
2b835d68
RE
1181 }
1182 else
d499463f
RE
1183 {
1184 rtx targ = subtargets ? NULL_RTX : target;
1185
1186 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1187 targ, source, subtargets, 0);
1188 }
2b835d68
RE
1189 }
1190
1191 if (generate)
1192 {
d499463f
RE
1193 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1194 rtx shift = GEN_INT (clear_zero_bit_copies);
1195
1196 emit_insn (gen_lshrsi3 (new_src, source, shift));
1197 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1198 }
1199
e2c671ba
RE
1200 return insns + 2;
1201 }
1202
1203 break;
1204
1205 default:
1206 break;
1207 }
1208
1209 for (i = 0; i < 32; i++)
1210 if (remainder & (1 << i))
1211 num_bits_set++;
1212
1213 if (code == AND || (can_invert && num_bits_set > 16))
1214 remainder = (~remainder) & 0xffffffff;
1215 else if (code == PLUS && num_bits_set > 16)
1216 remainder = (-remainder) & 0xffffffff;
1217 else
1218 {
1219 can_invert = 0;
1220 can_negate = 0;
1221 }
1222
1223 /* Now try and find a way of doing the job in either two or three
1224 instructions.
1225 We start by looking for the largest block of zeros that are aligned on
1226 a 2-bit boundary, we then fill up the temps, wrapping around to the
1227 top of the word when we drop off the bottom.
1228 In the worst case this code should produce no more than four insns. */
1229 {
1230 int best_start = 0;
1231 int best_consecutive_zeros = 0;
1232
1233 for (i = 0; i < 32; i += 2)
1234 {
1235 int consecutive_zeros = 0;
1236
1237 if (! (remainder & (3 << i)))
1238 {
1239 while ((i < 32) && ! (remainder & (3 << i)))
1240 {
1241 consecutive_zeros += 2;
1242 i += 2;
1243 }
1244 if (consecutive_zeros > best_consecutive_zeros)
1245 {
1246 best_consecutive_zeros = consecutive_zeros;
1247 best_start = i - consecutive_zeros;
1248 }
1249 i -= 2;
1250 }
1251 }
1252
1253 /* Now start emitting the insns, starting with the one with the highest
1254 bit set: we do this so that the smallest number will be emitted last;
1255 this is more likely to be combinable with addressing insns. */
1256 i = best_start;
1257 do
1258 {
1259 int end;
1260
1261 if (i <= 0)
1262 i += 32;
1263 if (remainder & (3 << (i - 2)))
1264 {
1265 end = i - 8;
1266 if (end < 0)
1267 end += 32;
1268 temp1 = remainder & ((0x0ff << end)
1269 | ((i < end) ? (0xff >> (32 - end)) : 0));
1270 remainder &= ~temp1;
1271
d499463f 1272 if (generate)
e2c671ba 1273 {
d499463f
RE
1274 rtx new_src;
1275
1276 if (code == SET)
43cffd11
RE
1277 emit_insn (gen_rtx_SET (VOIDmode,
1278 new_src = (subtargets
1279 ? gen_reg_rtx (mode)
1280 : target),
1281 GEN_INT (can_invert
1282 ? ~temp1 : temp1)));
d499463f 1283 else if (code == MINUS)
43cffd11
RE
1284 emit_insn (gen_rtx_SET (VOIDmode,
1285 new_src = (subtargets
1286 ? gen_reg_rtx (mode)
1287 : target),
1288 gen_rtx (code, mode, GEN_INT (temp1),
1289 source)));
d499463f 1290 else
43cffd11
RE
1291 emit_insn (gen_rtx_SET (VOIDmode,
1292 new_src = (remainder
1293 ? (subtargets
1294 ? gen_reg_rtx (mode)
1295 : target)
1296 : target),
1297 gen_rtx (code, mode, source,
1298 GEN_INT (can_invert ? ~temp1
1299 : (can_negate
1300 ? -temp1
1301 : temp1)))));
d499463f 1302 source = new_src;
e2c671ba
RE
1303 }
1304
d499463f
RE
1305 if (code == SET)
1306 {
1307 can_invert = 0;
1308 code = PLUS;
1309 }
1310 else if (code == MINUS)
1311 code = PLUS;
1312
e2c671ba 1313 insns++;
e2c671ba
RE
1314 i -= 6;
1315 }
1316 i -= 2;
1317 } while (remainder);
1318 }
1319 return insns;
1320}
1321
bd9c7e23
RE
1322/* Canonicalize a comparison so that we are more likely to recognize it.
1323 This can be done for a few constant compares, where we can make the
1324 immediate value easier to load. */
1325enum rtx_code
1326arm_canonicalize_comparison (code, op1)
1327 enum rtx_code code;
62b10bbc 1328 rtx * op1;
bd9c7e23 1329{
ad076f4e 1330 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1331
1332 switch (code)
1333 {
1334 case EQ:
1335 case NE:
1336 return code;
1337
1338 case GT:
1339 case LE:
ad076f4e
RE
1340 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1341 - 1)
bd9c7e23
RE
1342 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1343 {
1344 *op1 = GEN_INT (i+1);
1345 return code == GT ? GE : LT;
1346 }
1347 break;
1348
1349 case GE:
1350 case LT:
ad076f4e 1351 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1352 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1353 {
1354 *op1 = GEN_INT (i-1);
1355 return code == GE ? GT : LE;
1356 }
1357 break;
1358
1359 case GTU:
1360 case LEU:
ad076f4e 1361 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1362 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1363 {
1364 *op1 = GEN_INT (i + 1);
1365 return code == GTU ? GEU : LTU;
1366 }
1367 break;
1368
1369 case GEU:
1370 case LTU:
1371 if (i != 0
1372 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1373 {
1374 *op1 = GEN_INT (i - 1);
1375 return code == GEU ? GTU : LEU;
1376 }
1377 break;
1378
1379 default:
1380 abort ();
1381 }
1382
1383 return code;
1384}
bd9c7e23 1385
f5a1b0d2
NC
1386/* Decide whether a type should be returned in memory (true)
1387 or in a register (false). This is called by the macro
1388 RETURN_IN_MEMORY. */
2b835d68
RE
1389int
1390arm_return_in_memory (type)
1391 tree type;
1392{
f5a1b0d2
NC
1393 if (! AGGREGATE_TYPE_P (type))
1394 {
1395 /* All simple types are returned in registers. */
1396 return 0;
1397 }
1398 else if (int_size_in_bytes (type) > 4)
1399 {
1400 /* All structures/unions bigger than one word are returned in memory. */
1401 return 1;
1402 }
1403 else if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1404 {
1405 tree field;
1406
3a2ea258
RE
1407 /* For a struct the APCS says that we only return in a register
1408 if the type is 'integer like' and every addressable element
1409 has an offset of zero. For practical purposes this means
1410 that the structure can have at most one non bit-field element
1411 and that this element must be the first one in the structure. */
1412
f5a1b0d2
NC
1413 /* Find the first field, ignoring non FIELD_DECL things which will
1414 have been created by C++. */
1415 for (field = TYPE_FIELDS (type);
1416 field && TREE_CODE (field) != FIELD_DECL;
1417 field = TREE_CHAIN (field))
1418 continue;
1419
1420 if (field == NULL)
1421 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1422
3a2ea258
RE
1423 /* Check that the first field is valid for returning in a register... */
1424
1425 /* ... Floats are not allowed */
1426 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1427 return 1;
1428
1429 /* ... Aggregates that are not themselves valid for returning in
1430 a register are not allowed. */
1431 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1432 return 1;
1433
1434 /* Now check the remaining fields, if any. Only bitfields are allowed,
1435 since they are not addressable. */
f5a1b0d2
NC
1436 for (field = TREE_CHAIN (field);
1437 field;
1438 field = TREE_CHAIN (field))
1439 {
1440 if (TREE_CODE (field) != FIELD_DECL)
1441 continue;
1442
1443 if (! DECL_BIT_FIELD_TYPE (field))
1444 return 1;
1445 }
2b835d68
RE
1446
1447 return 0;
1448 }
1449 else if (TREE_CODE (type) == UNION_TYPE)
1450 {
1451 tree field;
1452
1453 /* Unions can be returned in registers if every element is
1454 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1455 for (field = TYPE_FIELDS (type);
1456 field;
1457 field = TREE_CHAIN (field))
2b835d68 1458 {
f5a1b0d2
NC
1459 if (TREE_CODE (field) != FIELD_DECL)
1460 continue;
1461
6cc8c0b3
NC
1462 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1463 return 1;
1464
f5a1b0d2 1465 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1466 return 1;
1467 }
f5a1b0d2 1468
2b835d68
RE
1469 return 0;
1470 }
f5a1b0d2 1471
2b835d68
RE
1472 /* XXX Not sure what should be done for other aggregates, so put them in
1473 memory. */
1474 return 1;
1475}
1476
32de079a
RE
1477int
1478legitimate_pic_operand_p (x)
1479 rtx x;
1480{
1481 if (CONSTANT_P (x) && flag_pic
1482 && (GET_CODE (x) == SYMBOL_REF
1483 || (GET_CODE (x) == CONST
1484 && GET_CODE (XEXP (x, 0)) == PLUS
1485 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1486 return 0;
1487
1488 return 1;
1489}
1490
1491rtx
1492legitimize_pic_address (orig, mode, reg)
1493 rtx orig;
1494 enum machine_mode mode;
1495 rtx reg;
1496{
1497 if (GET_CODE (orig) == SYMBOL_REF)
1498 {
1499 rtx pic_ref, address;
1500 rtx insn;
1501 int subregs = 0;
1502
1503 if (reg == 0)
1504 {
1505 if (reload_in_progress || reload_completed)
1506 abort ();
1507 else
1508 reg = gen_reg_rtx (Pmode);
1509
1510 subregs = 1;
1511 }
1512
1513#ifdef AOF_ASSEMBLER
1514 /* The AOF assembler can generate relocations for these directly, and
1515 understands that the PIC register has to be added into the offset.
1516 */
1517 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1518#else
1519 if (subregs)
1520 address = gen_reg_rtx (Pmode);
1521 else
1522 address = reg;
1523
1524 emit_insn (gen_pic_load_addr (address, orig));
1525
43cffd11
RE
1526 pic_ref = gen_rtx_MEM (Pmode,
1527 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1528 address));
32de079a
RE
1529 RTX_UNCHANGING_P (pic_ref) = 1;
1530 insn = emit_move_insn (reg, pic_ref);
1531#endif
1532 current_function_uses_pic_offset_table = 1;
1533 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1534 by loop. */
43cffd11
RE
1535 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1536 REG_NOTES (insn));
32de079a
RE
1537 return reg;
1538 }
1539 else if (GET_CODE (orig) == CONST)
1540 {
1541 rtx base, offset;
1542
1543 if (GET_CODE (XEXP (orig, 0)) == PLUS
1544 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1545 return orig;
1546
1547 if (reg == 0)
1548 {
1549 if (reload_in_progress || reload_completed)
1550 abort ();
1551 else
1552 reg = gen_reg_rtx (Pmode);
1553 }
1554
1555 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1556 {
1557 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1558 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1559 base == reg ? 0 : reg);
1560 }
1561 else
1562 abort ();
1563
1564 if (GET_CODE (offset) == CONST_INT)
1565 {
1566 /* The base register doesn't really matter, we only want to
1567 test the index for the appropriate mode. */
1568 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1569
1570 if (! reload_in_progress && ! reload_completed)
1571 offset = force_reg (Pmode, offset);
1572 else
1573 abort ();
1574
1575 win:
1576 if (GET_CODE (offset) == CONST_INT)
1577 return plus_constant_for_output (base, INTVAL (offset));
1578 }
1579
1580 if (GET_MODE_SIZE (mode) > 4
1581 && (GET_MODE_CLASS (mode) == MODE_INT
1582 || TARGET_SOFT_FLOAT))
1583 {
1584 emit_insn (gen_addsi3 (reg, base, offset));
1585 return reg;
1586 }
1587
43cffd11 1588 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1589 }
1590 else if (GET_CODE (orig) == LABEL_REF)
1591 current_function_uses_pic_offset_table = 1;
1592
1593 return orig;
1594}
1595
1596static rtx pic_rtx;
1597
1598int
62b10bbc 1599is_pic (x)
32de079a
RE
1600 rtx x;
1601{
1602 if (x == pic_rtx)
1603 return 1;
1604 return 0;
1605}
1606
1607void
1608arm_finalize_pic ()
1609{
1610#ifndef AOF_ASSEMBLER
1611 rtx l1, pic_tmp, pic_tmp2, seq;
1612 rtx global_offset_table;
1613
ed0e6530 1614 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
1615 return;
1616
1617 if (! flag_pic)
1618 abort ();
1619
1620 start_sequence ();
1621 l1 = gen_label_rtx ();
1622
43cffd11 1623 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768
RE
1624 /* On the ARM the PC register contains 'dot + 8' at the time of the
1625 addition. */
1626 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), 8);
84306176
PB
1627 if (GOT_PCREL)
1628 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 1629 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
1630 else
1631 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
1632
1633 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 1634
32de079a 1635 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
dfa08768 1636 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
32de079a
RE
1637
1638 seq = gen_sequence ();
1639 end_sequence ();
1640 emit_insn_after (seq, get_insns ());
1641
1642 /* Need to emit this whether or not we obey regdecls,
1643 since setjmp/longjmp can cause life info to screw up. */
43cffd11 1644 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
1645#endif /* AOF_ASSEMBLER */
1646}
1647
e2c671ba
RE
1648#define REG_OR_SUBREG_REG(X) \
1649 (GET_CODE (X) == REG \
1650 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1651
1652#define REG_OR_SUBREG_RTX(X) \
1653 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1654
1655#define ARM_FRAME_RTX(X) \
1656 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1657 || (X) == arg_pointer_rtx)
1658
1659int
74bbc178 1660arm_rtx_costs (x, code)
e2c671ba 1661 rtx x;
74bbc178 1662 enum rtx_code code;
e2c671ba
RE
1663{
1664 enum machine_mode mode = GET_MODE (x);
1665 enum rtx_code subcode;
1666 int extra_cost;
1667
1668 switch (code)
1669 {
1670 case MEM:
1671 /* Memory costs quite a lot for the first word, but subsequent words
1672 load at the equivalent of a single insn each. */
1673 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1674 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1675
1676 case DIV:
1677 case MOD:
1678 return 100;
1679
1680 case ROTATE:
1681 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1682 return 4;
1683 /* Fall through */
1684 case ROTATERT:
1685 if (mode != SImode)
1686 return 8;
1687 /* Fall through */
1688 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1689 if (mode == DImode)
1690 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1691 + ((GET_CODE (XEXP (x, 0)) == REG
1692 || (GET_CODE (XEXP (x, 0)) == SUBREG
1693 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1694 ? 0 : 8));
1695 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1696 || (GET_CODE (XEXP (x, 0)) == SUBREG
1697 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1698 ? 0 : 4)
1699 + ((GET_CODE (XEXP (x, 1)) == REG
1700 || (GET_CODE (XEXP (x, 1)) == SUBREG
1701 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1702 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1703 ? 0 : 4));
1704
1705 case MINUS:
1706 if (mode == DImode)
1707 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1708 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1709 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1710 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1711 ? 0 : 8));
1712
1713 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1714 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1715 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1716 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1717 ? 0 : 8)
1718 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1719 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1720 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1721 ? 0 : 8));
1722
1723 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1724 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1725 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1726 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1727 || subcode == ASHIFTRT || subcode == LSHIFTRT
1728 || subcode == ROTATE || subcode == ROTATERT
1729 || (subcode == MULT
1730 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1731 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1732 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1733 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1734 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1735 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1736 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1737 return 1;
1738 /* Fall through */
1739
1740 case PLUS:
1741 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1742 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1743 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1744 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1745 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1746 ? 0 : 8));
1747
1748 /* Fall through */
1749 case AND: case XOR: case IOR:
1750 extra_cost = 0;
1751
1752 /* Normally the frame registers will be spilt into reg+const during
1753 reload, so it is a bad idea to combine them with other instructions,
1754 since then they might not be moved outside of loops. As a compromise
1755 we allow integration with ops that have a constant as their second
1756 operand. */
1757 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1758 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1759 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1760 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1761 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1762 extra_cost = 4;
1763
1764 if (mode == DImode)
1765 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1766 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1767 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 1768 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
1769 ? 0 : 8));
1770
1771 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1772 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1773 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1774 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 1775 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
1776 ? 0 : 4));
1777
1778 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1779 return (1 + extra_cost
1780 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1781 || subcode == LSHIFTRT || subcode == ASHIFTRT
1782 || subcode == ROTATE || subcode == ROTATERT
1783 || (subcode == MULT
1784 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1785 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 1786 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
1787 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1788 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 1789 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
1790 ? 0 : 4));
1791
1792 return 8;
1793
1794 case MULT:
b111229a
RE
1795 /* There is no point basing this on the tuning, since it is always the
1796 fast variant if it exists at all */
2b835d68
RE
1797 if (arm_fast_multiply && mode == DImode
1798 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1799 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1800 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1801 return 8;
1802
e2c671ba
RE
1803 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1804 || mode == DImode)
1805 return 30;
1806
1807 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1808 {
2b835d68
RE
1809 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1810 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
1811 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1812 int j;
b111229a 1813 /* Tune as appropriate */
aec3cfba 1814 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 1815
2b835d68 1816 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 1817 {
2b835d68 1818 i >>= booth_unit_size;
e2c671ba
RE
1819 add_cost += 2;
1820 }
1821
1822 return add_cost;
1823 }
1824
aec3cfba 1825 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 1826 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
1827 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1828
56636818
JL
1829 case TRUNCATE:
1830 if (arm_fast_multiply && mode == SImode
1831 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1832 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1833 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1834 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1835 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1836 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1837 return 8;
1838 return 99;
1839
e2c671ba
RE
1840 case NEG:
1841 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1842 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1843 /* Fall through */
1844 case NOT:
1845 if (mode == DImode)
1846 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1847
1848 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1849
1850 case IF_THEN_ELSE:
1851 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1852 return 14;
1853 return 2;
1854
1855 case COMPARE:
1856 return 1;
1857
1858 case ABS:
1859 return 4 + (mode == DImode ? 4 : 0);
1860
1861 case SIGN_EXTEND:
1862 if (GET_MODE (XEXP (x, 0)) == QImode)
1863 return (4 + (mode == DImode ? 4 : 0)
1864 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1865 /* Fall through */
1866 case ZERO_EXTEND:
1867 switch (GET_MODE (XEXP (x, 0)))
1868 {
1869 case QImode:
1870 return (1 + (mode == DImode ? 4 : 0)
1871 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1872
1873 case HImode:
1874 return (4 + (mode == DImode ? 4 : 0)
1875 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1876
1877 case SImode:
1878 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
1879
1880 default:
1881 break;
e2c671ba
RE
1882 }
1883 abort ();
1884
1885 default:
1886 return 99;
1887 }
1888}
32de079a
RE
1889
1890int
1891arm_adjust_cost (insn, link, dep, cost)
1892 rtx insn;
1893 rtx link;
1894 rtx dep;
1895 int cost;
1896{
1897 rtx i_pat, d_pat;
1898
b36ba79f
RE
1899 /* XXX This is not strictly true for the FPA. */
1900 if (REG_NOTE_KIND(link) == REG_DEP_ANTI
1901 || REG_NOTE_KIND(link) == REG_DEP_OUTPUT)
1902 return 0;
1903
32de079a
RE
1904 if ((i_pat = single_set (insn)) != NULL
1905 && GET_CODE (SET_SRC (i_pat)) == MEM
1906 && (d_pat = single_set (dep)) != NULL
1907 && GET_CODE (SET_DEST (d_pat)) == MEM)
1908 {
1909 /* This is a load after a store, there is no conflict if the load reads
1910 from a cached area. Assume that loads from the stack, and from the
1911 constant pool are cached, and that others will miss. This is a
1912 hack. */
1913
32de079a
RE
1914 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1915 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1916 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1917 || reg_mentioned_p (hard_frame_pointer_rtx,
1918 XEXP (SET_SRC (i_pat), 0)))
949d79eb 1919 return 1;
32de079a
RE
1920 }
1921
1922 return cost;
1923}
1924
ff9940b0
RE
1925/* This code has been fixed for cross compilation. */
1926
1927static int fpa_consts_inited = 0;
1928
62b10bbc
NC
1929char * strings_fpa[8] =
1930{
2b835d68
RE
1931 "0", "1", "2", "3",
1932 "4", "5", "0.5", "10"
1933};
ff9940b0
RE
1934
1935static REAL_VALUE_TYPE values_fpa[8];
1936
1937static void
1938init_fpa_table ()
1939{
1940 int i;
1941 REAL_VALUE_TYPE r;
1942
1943 for (i = 0; i < 8; i++)
1944 {
1945 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1946 values_fpa[i] = r;
1947 }
f3bb6135 1948
ff9940b0
RE
1949 fpa_consts_inited = 1;
1950}
1951
cce8749e
CH
1952/* Return TRUE if rtx X is a valid immediate FPU constant. */
1953
1954int
1955const_double_rtx_ok_for_fpu (x)
1956 rtx x;
1957{
ff9940b0
RE
1958 REAL_VALUE_TYPE r;
1959 int i;
1960
1961 if (!fpa_consts_inited)
1962 init_fpa_table ();
1963
1964 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1965 if (REAL_VALUE_MINUS_ZERO (r))
1966 return 0;
f3bb6135 1967
ff9940b0
RE
1968 for (i = 0; i < 8; i++)
1969 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1970 return 1;
f3bb6135 1971
ff9940b0 1972 return 0;
f3bb6135 1973}
ff9940b0
RE
1974
1975/* Return TRUE if rtx X is a valid immediate FPU constant. */
1976
1977int
1978neg_const_double_rtx_ok_for_fpu (x)
1979 rtx x;
1980{
1981 REAL_VALUE_TYPE r;
1982 int i;
1983
1984 if (!fpa_consts_inited)
1985 init_fpa_table ();
1986
1987 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1988 r = REAL_VALUE_NEGATE (r);
1989 if (REAL_VALUE_MINUS_ZERO (r))
1990 return 0;
f3bb6135 1991
ff9940b0
RE
1992 for (i = 0; i < 8; i++)
1993 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1994 return 1;
f3bb6135 1995
ff9940b0 1996 return 0;
f3bb6135 1997}
cce8749e
CH
1998\f
1999/* Predicates for `match_operand' and `match_operator'. */
2000
ff9940b0 2001/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2002 (SUBREG (MEM)...).
2003
2004 This function exists because at the time it was put in it led to better
2005 code. SUBREG(MEM) always needs a reload in the places where
2006 s_register_operand is used, and this seemed to lead to excessive
2007 reloading. */
ff9940b0
RE
2008
2009int
2010s_register_operand (op, mode)
2011 register rtx op;
2012 enum machine_mode mode;
2013{
2014 if (GET_MODE (op) != mode && mode != VOIDmode)
2015 return 0;
2016
2017 if (GET_CODE (op) == SUBREG)
f3bb6135 2018 op = SUBREG_REG (op);
ff9940b0
RE
2019
2020 /* We don't consider registers whose class is NO_REGS
2021 to be a register operand. */
2022 return (GET_CODE (op) == REG
2023 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2024 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2025}
2026
e2c671ba
RE
2027/* Only accept reg, subreg(reg), const_int. */
2028
2029int
2030reg_or_int_operand (op, mode)
2031 register rtx op;
2032 enum machine_mode mode;
2033{
2034 if (GET_CODE (op) == CONST_INT)
2035 return 1;
2036
2037 if (GET_MODE (op) != mode && mode != VOIDmode)
2038 return 0;
2039
2040 if (GET_CODE (op) == SUBREG)
2041 op = SUBREG_REG (op);
2042
2043 /* We don't consider registers whose class is NO_REGS
2044 to be a register operand. */
2045 return (GET_CODE (op) == REG
2046 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2047 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2048}
2049
ff9940b0
RE
2050/* Return 1 if OP is an item in memory, given that we are in reload. */
2051
2052int
2053reload_memory_operand (op, mode)
2054 rtx op;
74bbc178 2055 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2056{
2057 int regno = true_regnum (op);
2058
2059 return (! CONSTANT_P (op)
2060 && (regno == -1
2061 || (GET_CODE (op) == REG
2062 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2063}
2064
4d818c85
RE
2065/* Return 1 if OP is a valid memory address, but not valid for a signed byte
2066 memory access (architecture V4) */
2067int
2068bad_signed_byte_operand (op, mode)
2069 rtx op;
2070 enum machine_mode mode;
2071{
2072 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
2073 return 0;
2074
2075 op = XEXP (op, 0);
2076
2077 /* A sum of anything more complex than reg + reg or reg + const is bad */
2078 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
9c8cc54f
RE
2079 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2080 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2081 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2082 return 1;
2083
2084 /* Big constants are also bad */
2085 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2086 && (INTVAL (XEXP (op, 1)) > 0xff
2087 || -INTVAL (XEXP (op, 1)) > 0xff))
2088 return 1;
2089
2090 /* Everything else is good, or can will automatically be made so. */
2091 return 0;
2092}
2093
cce8749e
CH
2094/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2095
2096int
2097arm_rhs_operand (op, mode)
2098 rtx op;
2099 enum machine_mode mode;
2100{
ff9940b0 2101 return (s_register_operand (op, mode)
cce8749e 2102 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2103}
cce8749e 2104
ff9940b0
RE
2105/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2106 */
2107
2108int
2109arm_rhsm_operand (op, mode)
2110 rtx op;
2111 enum machine_mode mode;
2112{
2113 return (s_register_operand (op, mode)
2114 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2115 || memory_operand (op, mode));
f3bb6135 2116}
ff9940b0
RE
2117
2118/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2119 constant that is valid when negated. */
2120
2121int
2122arm_add_operand (op, mode)
2123 rtx op;
2124 enum machine_mode mode;
2125{
2126 return (s_register_operand (op, mode)
2127 || (GET_CODE (op) == CONST_INT
2128 && (const_ok_for_arm (INTVAL (op))
2129 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2130}
ff9940b0
RE
2131
2132int
2133arm_not_operand (op, mode)
2134 rtx op;
2135 enum machine_mode mode;
2136{
2137 return (s_register_operand (op, mode)
2138 || (GET_CODE (op) == CONST_INT
2139 && (const_ok_for_arm (INTVAL (op))
2140 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2141}
ff9940b0 2142
5165176d
RE
2143/* Return TRUE if the operand is a memory reference which contains an
2144 offsettable address. */
2145int
2146offsettable_memory_operand (op, mode)
2147 register rtx op;
2148 enum machine_mode mode;
2149{
2150 if (mode == VOIDmode)
2151 mode = GET_MODE (op);
2152
2153 return (mode == GET_MODE (op)
2154 && GET_CODE (op) == MEM
2155 && offsettable_address_p (reload_completed | reload_in_progress,
2156 mode, XEXP (op, 0)));
2157}
2158
2159/* Return TRUE if the operand is a memory reference which is, or can be
2160 made word aligned by adjusting the offset. */
2161int
2162alignable_memory_operand (op, mode)
2163 register rtx op;
2164 enum machine_mode mode;
2165{
2166 rtx reg;
2167
2168 if (mode == VOIDmode)
2169 mode = GET_MODE (op);
2170
2171 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2172 return 0;
2173
2174 op = XEXP (op, 0);
2175
2176 return ((GET_CODE (reg = op) == REG
2177 || (GET_CODE (op) == SUBREG
2178 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2179 || (GET_CODE (op) == PLUS
2180 && GET_CODE (XEXP (op, 1)) == CONST_INT
2181 && (GET_CODE (reg = XEXP (op, 0)) == REG
2182 || (GET_CODE (XEXP (op, 0)) == SUBREG
2183 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
2184 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
2185}
2186
b111229a
RE
2187/* Similar to s_register_operand, but does not allow hard integer
2188 registers. */
2189int
2190f_register_operand (op, mode)
2191 register rtx op;
2192 enum machine_mode mode;
2193{
2194 if (GET_MODE (op) != mode && mode != VOIDmode)
2195 return 0;
2196
2197 if (GET_CODE (op) == SUBREG)
2198 op = SUBREG_REG (op);
2199
2200 /* We don't consider registers whose class is NO_REGS
2201 to be a register operand. */
2202 return (GET_CODE (op) == REG
2203 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2204 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2205}
2206
cce8749e
CH
2207/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2208
2209int
2210fpu_rhs_operand (op, mode)
2211 rtx op;
2212 enum machine_mode mode;
2213{
ff9940b0 2214 if (s_register_operand (op, mode))
f3bb6135 2215 return TRUE;
cce8749e
CH
2216 else if (GET_CODE (op) == CONST_DOUBLE)
2217 return (const_double_rtx_ok_for_fpu (op));
f3bb6135
RE
2218
2219 return FALSE;
2220}
cce8749e 2221
ff9940b0
RE
2222int
2223fpu_add_operand (op, mode)
2224 rtx op;
2225 enum machine_mode mode;
2226{
2227 if (s_register_operand (op, mode))
f3bb6135 2228 return TRUE;
ff9940b0 2229 else if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2230 return (const_double_rtx_ok_for_fpu (op)
2231 || neg_const_double_rtx_ok_for_fpu (op));
2232
2233 return FALSE;
ff9940b0
RE
2234}
2235
cce8749e
CH
2236/* Return nonzero if OP is a constant power of two. */
2237
2238int
2239power_of_two_operand (op, mode)
2240 rtx op;
74bbc178 2241 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2242{
2243 if (GET_CODE (op) == CONST_INT)
2244 {
f3bb6135
RE
2245 HOST_WIDE_INT value = INTVAL(op);
2246 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2247 }
f3bb6135
RE
2248 return FALSE;
2249}
cce8749e
CH
2250
2251/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2252 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2253 Note that this disallows MEM(REG+REG), but allows
2254 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2255
2256int
2257di_operand (op, mode)
2258 rtx op;
2259 enum machine_mode mode;
2260{
ff9940b0 2261 if (s_register_operand (op, mode))
f3bb6135 2262 return TRUE;
cce8749e 2263
e9c6b69b
NC
2264 if (GET_CODE (op) == SUBREG)
2265 op = SUBREG_REG (op);
2266
cce8749e
CH
2267 switch (GET_CODE (op))
2268 {
2269 case CONST_DOUBLE:
2270 case CONST_INT:
f3bb6135
RE
2271 return TRUE;
2272
cce8749e 2273 case MEM:
f3bb6135
RE
2274 return memory_address_p (DImode, XEXP (op, 0));
2275
cce8749e 2276 default:
f3bb6135 2277 return FALSE;
cce8749e 2278 }
f3bb6135 2279}
cce8749e 2280
f3139301 2281/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2282 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2283 Note that this disallows MEM(REG+REG), but allows
2284 MEM(PRE/POST_INC/DEC(REG)). */
2285
2286int
2287soft_df_operand (op, mode)
2288 rtx op;
2289 enum machine_mode mode;
2290{
2291 if (s_register_operand (op, mode))
2292 return TRUE;
2293
e9c6b69b
NC
2294 if (GET_CODE (op) == SUBREG)
2295 op = SUBREG_REG (op);
2296
f3139301
DE
2297 switch (GET_CODE (op))
2298 {
2299 case CONST_DOUBLE:
2300 return TRUE;
2301
2302 case MEM:
2303 return memory_address_p (DFmode, XEXP (op, 0));
2304
2305 default:
2306 return FALSE;
2307 }
2308}
2309
cce8749e
CH
2310/* Return TRUE for valid index operands. */
2311
2312int
2313index_operand (op, mode)
2314 rtx op;
2315 enum machine_mode mode;
2316{
ff9940b0
RE
2317 return (s_register_operand(op, mode)
2318 || (immediate_operand (op, mode)
2319 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2320}
cce8749e 2321
ff9940b0
RE
2322/* Return TRUE for valid shifts by a constant. This also accepts any
2323 power of two on the (somewhat overly relaxed) assumption that the
2324 shift operator in this case was a mult. */
2325
2326int
2327const_shift_operand (op, mode)
2328 rtx op;
2329 enum machine_mode mode;
2330{
2331 return (power_of_two_operand (op, mode)
2332 || (immediate_operand (op, mode)
2333 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2334}
ff9940b0 2335
cce8749e
CH
2336/* Return TRUE for arithmetic operators which can be combined with a multiply
2337 (shift). */
2338
2339int
2340shiftable_operator (x, mode)
2341 rtx x;
2342 enum machine_mode mode;
2343{
2344 if (GET_MODE (x) != mode)
2345 return FALSE;
2346 else
2347 {
2348 enum rtx_code code = GET_CODE (x);
2349
2350 return (code == PLUS || code == MINUS
2351 || code == IOR || code == XOR || code == AND);
2352 }
f3bb6135 2353}
cce8749e
CH
2354
2355/* Return TRUE for shift operators. */
2356
2357int
2358shift_operator (x, mode)
2359 rtx x;
2360 enum machine_mode mode;
2361{
2362 if (GET_MODE (x) != mode)
2363 return FALSE;
2364 else
2365 {
2366 enum rtx_code code = GET_CODE (x);
2367
ff9940b0 2368 if (code == MULT)
aec3cfba 2369 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 2370
e2c671ba
RE
2371 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2372 || code == ROTATERT);
cce8749e 2373 }
f3bb6135 2374}
ff9940b0
RE
2375
2376int equality_operator (x, mode)
f3bb6135 2377 rtx x;
74bbc178 2378 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2379{
f3bb6135 2380 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2381}
2382
2383/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2384
2385int
2386minmax_operator (x, mode)
2387 rtx x;
2388 enum machine_mode mode;
2389{
2390 enum rtx_code code = GET_CODE (x);
2391
2392 if (GET_MODE (x) != mode)
2393 return FALSE;
f3bb6135 2394
ff9940b0 2395 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2396}
ff9940b0
RE
2397
2398/* return TRUE if x is EQ or NE */
2399
2400/* Return TRUE if this is the condition code register, if we aren't given
2401 a mode, accept any class CCmode register */
2402
2403int
2404cc_register (x, mode)
f3bb6135
RE
2405 rtx x;
2406 enum machine_mode mode;
ff9940b0
RE
2407{
2408 if (mode == VOIDmode)
2409 {
2410 mode = GET_MODE (x);
2411 if (GET_MODE_CLASS (mode) != MODE_CC)
2412 return FALSE;
2413 }
f3bb6135 2414
ff9940b0
RE
2415 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2416 return TRUE;
f3bb6135 2417
ff9940b0
RE
2418 return FALSE;
2419}
5bbe2d40
RE
2420
2421/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2422 a mode, accept any class CCmode register which indicates a dominance
2423 expression. */
5bbe2d40
RE
2424
2425int
84ed5e79 2426dominant_cc_register (x, mode)
5bbe2d40
RE
2427 rtx x;
2428 enum machine_mode mode;
2429{
2430 if (mode == VOIDmode)
2431 {
2432 mode = GET_MODE (x);
84ed5e79 2433 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2434 return FALSE;
2435 }
2436
84ed5e79
RE
2437 if (mode != CC_DNEmode && mode != CC_DEQmode
2438 && mode != CC_DLEmode && mode != CC_DLTmode
2439 && mode != CC_DGEmode && mode != CC_DGTmode
2440 && mode != CC_DLEUmode && mode != CC_DLTUmode
2441 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2442 return FALSE;
2443
5bbe2d40
RE
2444 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2445 return TRUE;
2446
2447 return FALSE;
2448}
2449
2b835d68
RE
2450/* Return TRUE if X references a SYMBOL_REF. */
2451int
2452symbol_mentioned_p (x)
2453 rtx x;
2454{
6f7d635c 2455 register const char * fmt;
2b835d68
RE
2456 register int i;
2457
2458 if (GET_CODE (x) == SYMBOL_REF)
2459 return 1;
2460
2461 fmt = GET_RTX_FORMAT (GET_CODE (x));
2462 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2463 {
2464 if (fmt[i] == 'E')
2465 {
2466 register int j;
2467
2468 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2469 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2470 return 1;
2471 }
2472 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2473 return 1;
2474 }
2475
2476 return 0;
2477}
2478
2479/* Return TRUE if X references a LABEL_REF. */
2480int
2481label_mentioned_p (x)
2482 rtx x;
2483{
6f7d635c 2484 register const char * fmt;
2b835d68
RE
2485 register int i;
2486
2487 if (GET_CODE (x) == LABEL_REF)
2488 return 1;
2489
2490 fmt = GET_RTX_FORMAT (GET_CODE (x));
2491 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2492 {
2493 if (fmt[i] == 'E')
2494 {
2495 register int j;
2496
2497 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2498 if (label_mentioned_p (XVECEXP (x, i, j)))
2499 return 1;
2500 }
2501 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2502 return 1;
2503 }
2504
2505 return 0;
2506}
2507
ff9940b0
RE
2508enum rtx_code
2509minmax_code (x)
f3bb6135 2510 rtx x;
ff9940b0
RE
2511{
2512 enum rtx_code code = GET_CODE (x);
2513
2514 if (code == SMAX)
2515 return GE;
f3bb6135 2516 else if (code == SMIN)
ff9940b0 2517 return LE;
f3bb6135 2518 else if (code == UMIN)
ff9940b0 2519 return LEU;
f3bb6135 2520 else if (code == UMAX)
ff9940b0 2521 return GEU;
f3bb6135 2522
ff9940b0
RE
2523 abort ();
2524}
2525
2526/* Return 1 if memory locations are adjacent */
2527
f3bb6135 2528int
ff9940b0
RE
2529adjacent_mem_locations (a, b)
2530 rtx a, b;
2531{
2532 int val0 = 0, val1 = 0;
2533 int reg0, reg1;
2534
2535 if ((GET_CODE (XEXP (a, 0)) == REG
2536 || (GET_CODE (XEXP (a, 0)) == PLUS
2537 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2538 && (GET_CODE (XEXP (b, 0)) == REG
2539 || (GET_CODE (XEXP (b, 0)) == PLUS
2540 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2541 {
2542 if (GET_CODE (XEXP (a, 0)) == PLUS)
2543 {
2544 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2545 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2546 }
2547 else
2548 reg0 = REGNO (XEXP (a, 0));
2549 if (GET_CODE (XEXP (b, 0)) == PLUS)
2550 {
2551 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2552 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2553 }
2554 else
2555 reg1 = REGNO (XEXP (b, 0));
2556 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2557 }
2558 return 0;
2559}
2560
2561/* Return 1 if OP is a load multiple operation. It is known to be
2562 parallel and the first section will be tested. */
2563
f3bb6135 2564int
ff9940b0
RE
2565load_multiple_operation (op, mode)
2566 rtx op;
74bbc178 2567 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2568{
f3bb6135 2569 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2570 int dest_regno;
2571 rtx src_addr;
f3bb6135 2572 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2573 rtx elt;
2574
2575 if (count <= 1
2576 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2577 return 0;
2578
2579 /* Check to see if this might be a write-back */
2580 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2581 {
2582 i++;
2583 base = 1;
2584
2585 /* Now check it more carefully */
2586 if (GET_CODE (SET_DEST (elt)) != REG
2587 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2588 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2589 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2590 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2591 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2592 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2593 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2594 != REGNO (SET_DEST (elt)))
2595 return 0;
f3bb6135 2596
ff9940b0
RE
2597 count--;
2598 }
2599
2600 /* Perform a quick check so we don't blow up below. */
2601 if (count <= i
2602 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2603 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2604 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2605 return 0;
2606
2607 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2608 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2609
2610 for (; i < count; i++)
2611 {
ed4c4348 2612 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2613
2614 if (GET_CODE (elt) != SET
2615 || GET_CODE (SET_DEST (elt)) != REG
2616 || GET_MODE (SET_DEST (elt)) != SImode
2617 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2618 || GET_CODE (SET_SRC (elt)) != MEM
2619 || GET_MODE (SET_SRC (elt)) != SImode
2620 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2621 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2622 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2623 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2624 return 0;
2625 }
2626
2627 return 1;
2628}
2629
2630/* Return 1 if OP is a store multiple operation. It is known to be
2631 parallel and the first section will be tested. */
2632
f3bb6135 2633int
ff9940b0
RE
2634store_multiple_operation (op, mode)
2635 rtx op;
74bbc178 2636 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2637{
f3bb6135 2638 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2639 int src_regno;
2640 rtx dest_addr;
f3bb6135 2641 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2642 rtx elt;
2643
2644 if (count <= 1
2645 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2646 return 0;
2647
2648 /* Check to see if this might be a write-back */
2649 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2650 {
2651 i++;
2652 base = 1;
2653
2654 /* Now check it more carefully */
2655 if (GET_CODE (SET_DEST (elt)) != REG
2656 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2657 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2658 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2659 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2660 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2661 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2662 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2663 != REGNO (SET_DEST (elt)))
2664 return 0;
f3bb6135 2665
ff9940b0
RE
2666 count--;
2667 }
2668
2669 /* Perform a quick check so we don't blow up below. */
2670 if (count <= i
2671 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2672 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2673 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2674 return 0;
2675
2676 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2677 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2678
2679 for (; i < count; i++)
2680 {
2681 elt = XVECEXP (op, 0, i);
2682
2683 if (GET_CODE (elt) != SET
2684 || GET_CODE (SET_SRC (elt)) != REG
2685 || GET_MODE (SET_SRC (elt)) != SImode
2686 || REGNO (SET_SRC (elt)) != src_regno + i - base
2687 || GET_CODE (SET_DEST (elt)) != MEM
2688 || GET_MODE (SET_DEST (elt)) != SImode
2689 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2690 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2691 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2692 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2693 return 0;
2694 }
2695
2696 return 1;
2697}
e2c671ba 2698
84ed5e79
RE
2699int
2700load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 2701 rtx * operands;
84ed5e79 2702 int nops;
62b10bbc
NC
2703 int * regs;
2704 int * base;
2705 HOST_WIDE_INT * load_offset;
84ed5e79
RE
2706{
2707 int unsorted_regs[4];
2708 HOST_WIDE_INT unsorted_offsets[4];
2709 int order[4];
ad076f4e 2710 int base_reg = -1;
84ed5e79
RE
2711 int i;
2712
2713 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2714 extended if required. */
2715 if (nops < 2 || nops > 4)
2716 abort ();
2717
2718 /* Loop over the operands and check that the memory references are
2719 suitable (ie immediate offsets from the same base register). At
2720 the same time, extract the target register, and the memory
2721 offsets. */
2722 for (i = 0; i < nops; i++)
2723 {
2724 rtx reg;
2725 rtx offset;
2726
56636818
JL
2727 /* Convert a subreg of a mem into the mem itself. */
2728 if (GET_CODE (operands[nops + i]) == SUBREG)
2729 operands[nops + i] = alter_subreg(operands[nops + i]);
2730
84ed5e79
RE
2731 if (GET_CODE (operands[nops + i]) != MEM)
2732 abort ();
2733
2734 /* Don't reorder volatile memory references; it doesn't seem worth
2735 looking for the case where the order is ok anyway. */
2736 if (MEM_VOLATILE_P (operands[nops + i]))
2737 return 0;
2738
2739 offset = const0_rtx;
2740
2741 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2742 || (GET_CODE (reg) == SUBREG
2743 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2744 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2745 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2746 == REG)
2747 || (GET_CODE (reg) == SUBREG
2748 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2749 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2750 == CONST_INT)))
2751 {
2752 if (i == 0)
2753 {
2754 base_reg = REGNO(reg);
2755 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2756 ? REGNO (operands[i])
2757 : REGNO (SUBREG_REG (operands[i])));
2758 order[0] = 0;
2759 }
2760 else
2761 {
2762 if (base_reg != REGNO (reg))
2763 /* Not addressed from the same base register. */
2764 return 0;
2765
2766 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2767 ? REGNO (operands[i])
2768 : REGNO (SUBREG_REG (operands[i])));
2769 if (unsorted_regs[i] < unsorted_regs[order[0]])
2770 order[0] = i;
2771 }
2772
2773 /* If it isn't an integer register, or if it overwrites the
2774 base register but isn't the last insn in the list, then
2775 we can't do this. */
2776 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2777 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2778 return 0;
2779
2780 unsorted_offsets[i] = INTVAL (offset);
2781 }
2782 else
2783 /* Not a suitable memory address. */
2784 return 0;
2785 }
2786
2787 /* All the useful information has now been extracted from the
2788 operands into unsorted_regs and unsorted_offsets; additionally,
2789 order[0] has been set to the lowest numbered register in the
2790 list. Sort the registers into order, and check that the memory
2791 offsets are ascending and adjacent. */
2792
2793 for (i = 1; i < nops; i++)
2794 {
2795 int j;
2796
2797 order[i] = order[i - 1];
2798 for (j = 0; j < nops; j++)
2799 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2800 && (order[i] == order[i - 1]
2801 || unsorted_regs[j] < unsorted_regs[order[i]]))
2802 order[i] = j;
2803
2804 /* Have we found a suitable register? if not, one must be used more
2805 than once. */
2806 if (order[i] == order[i - 1])
2807 return 0;
2808
2809 /* Is the memory address adjacent and ascending? */
2810 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2811 return 0;
2812 }
2813
2814 if (base)
2815 {
2816 *base = base_reg;
2817
2818 for (i = 0; i < nops; i++)
2819 regs[i] = unsorted_regs[order[i]];
2820
2821 *load_offset = unsorted_offsets[order[0]];
2822 }
2823
2824 if (unsorted_offsets[order[0]] == 0)
2825 return 1; /* ldmia */
2826
2827 if (unsorted_offsets[order[0]] == 4)
2828 return 2; /* ldmib */
2829
2830 if (unsorted_offsets[order[nops - 1]] == 0)
2831 return 3; /* ldmda */
2832
2833 if (unsorted_offsets[order[nops - 1]] == -4)
2834 return 4; /* ldmdb */
2835
949d79eb
RE
2836 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
2837 if the offset isn't small enough. The reason 2 ldrs are faster
2838 is because these ARMs are able to do more than one cache access
2839 in a single cycle. The ARM9 and StrongARM have Harvard caches,
2840 whilst the ARM8 has a double bandwidth cache. This means that
2841 these cores can do both an instruction fetch and a data fetch in
2842 a single cycle, so the trick of calculating the address into a
2843 scratch register (one of the result regs) and then doing a load
2844 multiple actually becomes slower (and no smaller in code size).
2845 That is the transformation
6cc8c0b3
NC
2846
2847 ldr rd1, [rbase + offset]
2848 ldr rd2, [rbase + offset + 4]
2849
2850 to
2851
2852 add rd1, rbase, offset
2853 ldmia rd1, {rd1, rd2}
2854
949d79eb
RE
2855 produces worse code -- '3 cycles + any stalls on rd2' instead of
2856 '2 cycles + any stalls on rd2'. On ARMs with only one cache
2857 access per cycle, the first sequence could never complete in less
2858 than 6 cycles, whereas the ldm sequence would only take 5 and
2859 would make better use of sequential accesses if not hitting the
2860 cache.
2861
2862 We cheat here and test 'arm_ld_sched' which we currently know to
2863 only be true for the ARM8, ARM9 and StrongARM. If this ever
2864 changes, then the test below needs to be reworked. */
f5a1b0d2 2865 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
2866 return 0;
2867
84ed5e79
RE
2868 /* Can't do it without setting up the offset, only do this if it takes
2869 no more than one insn. */
2870 return (const_ok_for_arm (unsorted_offsets[order[0]])
2871 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2872}
2873
2874char *
2875emit_ldm_seq (operands, nops)
62b10bbc 2876 rtx * operands;
84ed5e79
RE
2877 int nops;
2878{
2879 int regs[4];
2880 int base_reg;
2881 HOST_WIDE_INT offset;
2882 char buf[100];
2883 int i;
2884
2885 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2886 {
2887 case 1:
2888 strcpy (buf, "ldm%?ia\t");
2889 break;
2890
2891 case 2:
2892 strcpy (buf, "ldm%?ib\t");
2893 break;
2894
2895 case 3:
2896 strcpy (buf, "ldm%?da\t");
2897 break;
2898
2899 case 4:
2900 strcpy (buf, "ldm%?db\t");
2901 break;
2902
2903 case 5:
2904 if (offset >= 0)
2905 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2906 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2907 (long) offset);
2908 else
2909 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2910 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2911 (long) -offset);
2912 output_asm_insn (buf, operands);
2913 base_reg = regs[0];
2914 strcpy (buf, "ldm%?ia\t");
2915 break;
2916
2917 default:
2918 abort ();
2919 }
2920
2921 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2922 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2923
2924 for (i = 1; i < nops; i++)
2925 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2926 reg_names[regs[i]]);
2927
2928 strcat (buf, "}\t%@ phole ldm");
2929
2930 output_asm_insn (buf, operands);
2931 return "";
2932}
2933
2934int
2935store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 2936 rtx * operands;
84ed5e79 2937 int nops;
62b10bbc
NC
2938 int * regs;
2939 int * base;
2940 HOST_WIDE_INT * load_offset;
84ed5e79
RE
2941{
2942 int unsorted_regs[4];
2943 HOST_WIDE_INT unsorted_offsets[4];
2944 int order[4];
ad076f4e 2945 int base_reg = -1;
84ed5e79
RE
2946 int i;
2947
2948 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2949 extended if required. */
2950 if (nops < 2 || nops > 4)
2951 abort ();
2952
2953 /* Loop over the operands and check that the memory references are
2954 suitable (ie immediate offsets from the same base register). At
2955 the same time, extract the target register, and the memory
2956 offsets. */
2957 for (i = 0; i < nops; i++)
2958 {
2959 rtx reg;
2960 rtx offset;
2961
56636818
JL
2962 /* Convert a subreg of a mem into the mem itself. */
2963 if (GET_CODE (operands[nops + i]) == SUBREG)
2964 operands[nops + i] = alter_subreg(operands[nops + i]);
2965
84ed5e79
RE
2966 if (GET_CODE (operands[nops + i]) != MEM)
2967 abort ();
2968
2969 /* Don't reorder volatile memory references; it doesn't seem worth
2970 looking for the case where the order is ok anyway. */
2971 if (MEM_VOLATILE_P (operands[nops + i]))
2972 return 0;
2973
2974 offset = const0_rtx;
2975
2976 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2977 || (GET_CODE (reg) == SUBREG
2978 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2979 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2980 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2981 == REG)
2982 || (GET_CODE (reg) == SUBREG
2983 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2984 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2985 == CONST_INT)))
2986 {
2987 if (i == 0)
2988 {
62b10bbc 2989 base_reg = REGNO (reg);
84ed5e79
RE
2990 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2991 ? REGNO (operands[i])
2992 : REGNO (SUBREG_REG (operands[i])));
2993 order[0] = 0;
2994 }
2995 else
2996 {
2997 if (base_reg != REGNO (reg))
2998 /* Not addressed from the same base register. */
2999 return 0;
3000
3001 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3002 ? REGNO (operands[i])
3003 : REGNO (SUBREG_REG (operands[i])));
3004 if (unsorted_regs[i] < unsorted_regs[order[0]])
3005 order[0] = i;
3006 }
3007
3008 /* If it isn't an integer register, then we can't do this. */
3009 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3010 return 0;
3011
3012 unsorted_offsets[i] = INTVAL (offset);
3013 }
3014 else
3015 /* Not a suitable memory address. */
3016 return 0;
3017 }
3018
3019 /* All the useful information has now been extracted from the
3020 operands into unsorted_regs and unsorted_offsets; additionally,
3021 order[0] has been set to the lowest numbered register in the
3022 list. Sort the registers into order, and check that the memory
3023 offsets are ascending and adjacent. */
3024
3025 for (i = 1; i < nops; i++)
3026 {
3027 int j;
3028
3029 order[i] = order[i - 1];
3030 for (j = 0; j < nops; j++)
3031 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3032 && (order[i] == order[i - 1]
3033 || unsorted_regs[j] < unsorted_regs[order[i]]))
3034 order[i] = j;
3035
3036 /* Have we found a suitable register? if not, one must be used more
3037 than once. */
3038 if (order[i] == order[i - 1])
3039 return 0;
3040
3041 /* Is the memory address adjacent and ascending? */
3042 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3043 return 0;
3044 }
3045
3046 if (base)
3047 {
3048 *base = base_reg;
3049
3050 for (i = 0; i < nops; i++)
3051 regs[i] = unsorted_regs[order[i]];
3052
3053 *load_offset = unsorted_offsets[order[0]];
3054 }
3055
3056 if (unsorted_offsets[order[0]] == 0)
3057 return 1; /* stmia */
3058
3059 if (unsorted_offsets[order[0]] == 4)
3060 return 2; /* stmib */
3061
3062 if (unsorted_offsets[order[nops - 1]] == 0)
3063 return 3; /* stmda */
3064
3065 if (unsorted_offsets[order[nops - 1]] == -4)
3066 return 4; /* stmdb */
3067
3068 return 0;
3069}
3070
3071char *
3072emit_stm_seq (operands, nops)
62b10bbc 3073 rtx * operands;
84ed5e79
RE
3074 int nops;
3075{
3076 int regs[4];
3077 int base_reg;
3078 HOST_WIDE_INT offset;
3079 char buf[100];
3080 int i;
3081
3082 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3083 {
3084 case 1:
3085 strcpy (buf, "stm%?ia\t");
3086 break;
3087
3088 case 2:
3089 strcpy (buf, "stm%?ib\t");
3090 break;
3091
3092 case 3:
3093 strcpy (buf, "stm%?da\t");
3094 break;
3095
3096 case 4:
3097 strcpy (buf, "stm%?db\t");
3098 break;
3099
3100 default:
3101 abort ();
3102 }
3103
3104 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3105 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3106
3107 for (i = 1; i < nops; i++)
3108 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3109 reg_names[regs[i]]);
3110
3111 strcat (buf, "}\t%@ phole stm");
3112
3113 output_asm_insn (buf, operands);
3114 return "";
3115}
3116
e2c671ba
RE
3117int
3118multi_register_push (op, mode)
0a81f500 3119 rtx op;
74bbc178 3120 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3121{
3122 if (GET_CODE (op) != PARALLEL
3123 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3124 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3125 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3126 return 0;
3127
3128 return 1;
3129}
3130
ff9940b0 3131\f
f3bb6135
RE
3132/* Routines for use with attributes */
3133
31fdb4d5
DE
3134/* Return nonzero if ATTR is a valid attribute for DECL.
3135 ATTRIBUTES are any existing attributes and ARGS are the arguments
3136 supplied with ATTR.
3137
3138 Supported attributes:
3139
3140 naked: don't output any prologue or epilogue code, the user is assumed
3141 to do the right thing. */
3142
3143int
74bbc178 3144arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3145 tree decl;
31fdb4d5
DE
3146 tree attr;
3147 tree args;
3148{
3149 if (args != NULL_TREE)
3150 return 0;
3151
3152 if (is_attribute_p ("naked", attr))
3153 return TREE_CODE (decl) == FUNCTION_DECL;
3154 return 0;
3155}
3156
3157/* Return non-zero if FUNC is a naked function. */
3158
3159static int
3160arm_naked_function_p (func)
3161 tree func;
3162{
3163 tree a;
3164
3165 if (TREE_CODE (func) != FUNCTION_DECL)
3166 abort ();
2e943e99 3167
31fdb4d5
DE
3168 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3169 return a != NULL_TREE;
3170}
f3bb6135 3171\f
ff9940b0
RE
3172/* Routines for use in generating RTL */
3173
f3bb6135 3174rtx
56636818 3175arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3176 in_struct_p, scalar_p)
ff9940b0
RE
3177 int base_regno;
3178 int count;
3179 rtx from;
3180 int up;
3181 int write_back;
56636818
JL
3182 int unchanging_p;
3183 int in_struct_p;
c6df88cb 3184 int scalar_p;
ff9940b0
RE
3185{
3186 int i = 0, j;
3187 rtx result;
3188 int sign = up ? 1 : -1;
56636818 3189 rtx mem;
ff9940b0 3190
43cffd11
RE
3191 result = gen_rtx_PARALLEL (VOIDmode,
3192 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3193 if (write_back)
f3bb6135 3194 {
ff9940b0 3195 XVECEXP (result, 0, 0)
43cffd11
RE
3196 = gen_rtx_SET (GET_MODE (from), from,
3197 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3198 i = 1;
3199 count++;
f3bb6135
RE
3200 }
3201
ff9940b0 3202 for (j = 0; i < count; i++, j++)
f3bb6135 3203 {
43cffd11 3204 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3205 RTX_UNCHANGING_P (mem) = unchanging_p;
3206 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3207 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3208 XVECEXP (result, 0, i)
3209 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3210 }
3211
ff9940b0 3212 if (write_back)
43cffd11 3213 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, from);
ff9940b0
RE
3214
3215 return result;
3216}
3217
f3bb6135 3218rtx
56636818 3219arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3220 in_struct_p, scalar_p)
ff9940b0
RE
3221 int base_regno;
3222 int count;
3223 rtx to;
3224 int up;
3225 int write_back;
56636818
JL
3226 int unchanging_p;
3227 int in_struct_p;
c6df88cb 3228 int scalar_p;
ff9940b0
RE
3229{
3230 int i = 0, j;
3231 rtx result;
3232 int sign = up ? 1 : -1;
56636818 3233 rtx mem;
ff9940b0 3234
43cffd11
RE
3235 result = gen_rtx_PARALLEL (VOIDmode,
3236 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3237 if (write_back)
f3bb6135 3238 {
ff9940b0 3239 XVECEXP (result, 0, 0)
43cffd11
RE
3240 = gen_rtx_SET (GET_MODE (to), to,
3241 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3242 i = 1;
3243 count++;
f3bb6135
RE
3244 }
3245
ff9940b0 3246 for (j = 0; i < count; i++, j++)
f3bb6135 3247 {
43cffd11 3248 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3249 RTX_UNCHANGING_P (mem) = unchanging_p;
3250 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3251 MEM_SCALAR_P (mem) = scalar_p;
56636818 3252
43cffd11
RE
3253 XVECEXP (result, 0, i)
3254 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3255 }
3256
ff9940b0 3257 if (write_back)
43cffd11 3258 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, to);
ff9940b0
RE
3259
3260 return result;
3261}
3262
880e2516
RE
3263int
3264arm_gen_movstrqi (operands)
62b10bbc 3265 rtx * operands;
880e2516
RE
3266{
3267 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3268 int i;
880e2516 3269 rtx src, dst;
ad076f4e 3270 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3271 rtx part_bytes_reg = NULL;
56636818
JL
3272 rtx mem;
3273 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3274 int dst_scalar_p, src_scalar_p;
880e2516
RE
3275
3276 if (GET_CODE (operands[2]) != CONST_INT
3277 || GET_CODE (operands[3]) != CONST_INT
3278 || INTVAL (operands[2]) > 64
3279 || INTVAL (operands[3]) & 3)
3280 return 0;
3281
3282 st_dst = XEXP (operands[0], 0);
3283 st_src = XEXP (operands[1], 0);
56636818
JL
3284
3285 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3286 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3287 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3288 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3289 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3290 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3291
880e2516
RE
3292 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3293 fin_src = src = copy_to_mode_reg (SImode, st_src);
3294
3295 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
3296 out_words_to_go = INTVAL (operands[2]) / 4;
3297 last_bytes = INTVAL (operands[2]) & 3;
3298
3299 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3300 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3301
3302 for (i = 0; in_words_to_go >= 2; i+=4)
3303 {
bd9c7e23 3304 if (in_words_to_go > 4)
56636818 3305 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3306 src_unchanging_p,
3307 src_in_struct_p,
3308 src_scalar_p));
bd9c7e23
RE
3309 else
3310 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3311 FALSE, src_unchanging_p,
c6df88cb 3312 src_in_struct_p, src_scalar_p));
bd9c7e23 3313
880e2516
RE
3314 if (out_words_to_go)
3315 {
bd9c7e23 3316 if (out_words_to_go > 4)
56636818
JL
3317 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3318 dst_unchanging_p,
c6df88cb
MM
3319 dst_in_struct_p,
3320 dst_scalar_p));
bd9c7e23
RE
3321 else if (out_words_to_go != 1)
3322 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3323 dst, TRUE,
3324 (last_bytes == 0
56636818
JL
3325 ? FALSE : TRUE),
3326 dst_unchanging_p,
c6df88cb
MM
3327 dst_in_struct_p,
3328 dst_scalar_p));
880e2516
RE
3329 else
3330 {
43cffd11 3331 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3332 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3333 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3334 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3335 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3336 if (last_bytes != 0)
3337 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3338 }
3339 }
3340
3341 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3342 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3343 }
3344
3345 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3346 if (out_words_to_go)
62b10bbc
NC
3347 {
3348 rtx sreg;
3349
3350 mem = gen_rtx_MEM (SImode, src);
3351 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3352 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3353 MEM_SCALAR_P (mem) = src_scalar_p;
3354 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
3355 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
3356
3357 mem = gen_rtx_MEM (SImode, dst);
3358 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3359 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3360 MEM_SCALAR_P (mem) = dst_scalar_p;
3361 emit_move_insn (mem, sreg);
3362 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3363 in_words_to_go--;
3364
3365 if (in_words_to_go) /* Sanity check */
3366 abort ();
3367 }
880e2516
RE
3368
3369 if (in_words_to_go)
3370 {
3371 if (in_words_to_go < 0)
3372 abort ();
3373
43cffd11 3374 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3375 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3376 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3377 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3378 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3379 }
3380
3381 if (BYTES_BIG_ENDIAN && last_bytes)
3382 {
3383 rtx tmp = gen_reg_rtx (SImode);
3384
3385 if (part_bytes_reg == NULL)
3386 abort ();
3387
3388 /* The bytes we want are in the top end of the word */
bee06f3d
RE
3389 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3390 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3391 part_bytes_reg = tmp;
3392
3393 while (last_bytes)
3394 {
43cffd11 3395 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
3396 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3397 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3398 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3399 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 3400
880e2516
RE
3401 if (--last_bytes)
3402 {
3403 tmp = gen_reg_rtx (SImode);
3404 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3405 part_bytes_reg = tmp;
3406 }
3407 }
3408
3409 }
3410 else
3411 {
3412 while (last_bytes)
3413 {
3414 if (part_bytes_reg == NULL)
3415 abort ();
3416
43cffd11 3417 mem = gen_rtx_MEM (QImode, dst);
56636818
JL
3418 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3419 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3420 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3421 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 3422
880e2516
RE
3423 if (--last_bytes)
3424 {
3425 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3426
3427 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3428 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3429 part_bytes_reg = tmp;
3430 }
3431 }
3432 }
3433
3434 return 1;
3435}
3436
5165176d
RE
3437/* Generate a memory reference for a half word, such that it will be loaded
3438 into the top 16 bits of the word. We can assume that the address is
3439 known to be alignable and of the form reg, or plus (reg, const). */
3440rtx
3441gen_rotated_half_load (memref)
3442 rtx memref;
3443{
3444 HOST_WIDE_INT offset = 0;
3445 rtx base = XEXP (memref, 0);
3446
3447 if (GET_CODE (base) == PLUS)
3448 {
3449 offset = INTVAL (XEXP (base, 1));
3450 base = XEXP (base, 0);
3451 }
3452
956d6950 3453 /* If we aren't allowed to generate unaligned addresses, then fail. */
5165176d
RE
3454 if (TARGET_SHORT_BY_BYTES
3455 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3456 return NULL;
3457
43cffd11 3458 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
3459
3460 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3461 return base;
3462
43cffd11 3463 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
3464}
3465
84ed5e79 3466static enum machine_mode
74bbc178 3467select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
3468 rtx x;
3469 rtx y;
3470 HOST_WIDE_INT cond_or;
3471{
3472 enum rtx_code cond1, cond2;
3473 int swapped = 0;
3474
3475 /* Currently we will probably get the wrong result if the individual
3476 comparisons are not simple. This also ensures that it is safe to
956d6950 3477 reverse a comparison if necessary. */
84ed5e79
RE
3478 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3479 != CCmode)
3480 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3481 != CCmode))
3482 return CCmode;
3483
3484 if (cond_or)
3485 cond1 = reverse_condition (cond1);
3486
3487 /* If the comparisons are not equal, and one doesn't dominate the other,
3488 then we can't do this. */
3489 if (cond1 != cond2
3490 && ! comparison_dominates_p (cond1, cond2)
3491 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3492 return CCmode;
3493
3494 if (swapped)
3495 {
3496 enum rtx_code temp = cond1;
3497 cond1 = cond2;
3498 cond2 = temp;
3499 }
3500
3501 switch (cond1)
3502 {
3503 case EQ:
3504 if (cond2 == EQ || ! cond_or)
3505 return CC_DEQmode;
3506
3507 switch (cond2)
3508 {
3509 case LE: return CC_DLEmode;
3510 case LEU: return CC_DLEUmode;
3511 case GE: return CC_DGEmode;
3512 case GEU: return CC_DGEUmode;
ad076f4e 3513 default: break;
84ed5e79
RE
3514 }
3515
3516 break;
3517
3518 case LT:
3519 if (cond2 == LT || ! cond_or)
3520 return CC_DLTmode;
3521 if (cond2 == LE)
3522 return CC_DLEmode;
3523 if (cond2 == NE)
3524 return CC_DNEmode;
3525 break;
3526
3527 case GT:
3528 if (cond2 == GT || ! cond_or)
3529 return CC_DGTmode;
3530 if (cond2 == GE)
3531 return CC_DGEmode;
3532 if (cond2 == NE)
3533 return CC_DNEmode;
3534 break;
3535
3536 case LTU:
3537 if (cond2 == LTU || ! cond_or)
3538 return CC_DLTUmode;
3539 if (cond2 == LEU)
3540 return CC_DLEUmode;
3541 if (cond2 == NE)
3542 return CC_DNEmode;
3543 break;
3544
3545 case GTU:
3546 if (cond2 == GTU || ! cond_or)
3547 return CC_DGTUmode;
3548 if (cond2 == GEU)
3549 return CC_DGEUmode;
3550 if (cond2 == NE)
3551 return CC_DNEmode;
3552 break;
3553
3554 /* The remaining cases only occur when both comparisons are the
3555 same. */
3556 case NE:
3557 return CC_DNEmode;
3558
3559 case LE:
3560 return CC_DLEmode;
3561
3562 case GE:
3563 return CC_DGEmode;
3564
3565 case LEU:
3566 return CC_DLEUmode;
3567
3568 case GEU:
3569 return CC_DGEUmode;
ad076f4e
RE
3570
3571 default:
3572 break;
84ed5e79
RE
3573 }
3574
3575 abort ();
3576}
3577
3578enum machine_mode
3579arm_select_cc_mode (op, x, y)
3580 enum rtx_code op;
3581 rtx x;
3582 rtx y;
3583{
3584 /* All floating point compares return CCFP if it is an equality
3585 comparison, and CCFPE otherwise. */
3586 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3587 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3588
3589 /* A compare with a shifted operand. Because of canonicalization, the
3590 comparison will have to be swapped when we emit the assembler. */
3591 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3592 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3593 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3594 || GET_CODE (x) == ROTATERT))
3595 return CC_SWPmode;
3596
956d6950
JL
3597 /* This is a special case that is used by combine to allow a
3598 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3599 followed by a comparison of the shifted integer (only valid for
956d6950 3600 equalities and unsigned inequalities). */
84ed5e79
RE
3601 if (GET_MODE (x) == SImode
3602 && GET_CODE (x) == ASHIFT
3603 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3604 && GET_CODE (XEXP (x, 0)) == SUBREG
3605 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3606 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3607 && (op == EQ || op == NE
3608 || op == GEU || op == GTU || op == LTU || op == LEU)
3609 && GET_CODE (y) == CONST_INT)
3610 return CC_Zmode;
3611
3612 /* An operation that sets the condition codes as a side-effect, the
3613 V flag is not set correctly, so we can only use comparisons where
3614 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3615 instead. */
3616 if (GET_MODE (x) == SImode
3617 && y == const0_rtx
3618 && (op == EQ || op == NE || op == LT || op == GE)
3619 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3620 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3621 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3622 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3623 || GET_CODE (x) == LSHIFTRT
3624 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3625 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3626 return CC_NOOVmode;
3627
3628 /* A construct for a conditional compare, if the false arm contains
3629 0, then both conditions must be true, otherwise either condition
3630 must be true. Not all conditions are possible, so CCmode is
3631 returned if it can't be done. */
3632 if (GET_CODE (x) == IF_THEN_ELSE
3633 && (XEXP (x, 2) == const0_rtx
3634 || XEXP (x, 2) == const1_rtx)
3635 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3636 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 3637 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
3638 INTVAL (XEXP (x, 2)));
3639
3640 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3641 return CC_Zmode;
3642
bd9c7e23
RE
3643 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3644 && GET_CODE (x) == PLUS
3645 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3646 return CC_Cmode;
3647
84ed5e79
RE
3648 return CCmode;
3649}
3650
ff9940b0
RE
3651/* X and Y are two things to compare using CODE. Emit the compare insn and
3652 return the rtx for register 0 in the proper mode. FP means this is a
3653 floating point compare: I don't think that it is needed on the arm. */
3654
3655rtx
74bbc178 3656gen_compare_reg (code, x, y)
ff9940b0
RE
3657 enum rtx_code code;
3658 rtx x, y;
3659{
3660 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
43cffd11 3661 rtx cc_reg = gen_rtx_REG (mode, 24);
ff9940b0 3662
43cffd11
RE
3663 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
3664 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
3665
3666 return cc_reg;
3667}
3668
0a81f500
RE
3669void
3670arm_reload_in_hi (operands)
62b10bbc 3671 rtx * operands;
0a81f500 3672{
f9cc092a
RE
3673 rtx ref = operands[1];
3674 rtx base, scratch;
3675 HOST_WIDE_INT offset = 0;
3676
3677 if (GET_CODE (ref) == SUBREG)
3678 {
3679 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
3680 if (BYTES_BIG_ENDIAN)
3681 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
3682 - MIN (UNITS_PER_WORD,
3683 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
3684 ref = SUBREG_REG (ref);
3685 }
3686
3687 if (GET_CODE (ref) == REG)
3688 {
3689 /* We have a pseudo which has been spilt onto the stack; there
3690 are two cases here: the first where there is a simple
3691 stack-slot replacement and a second where the stack-slot is
3692 out of range, or is used as a subreg. */
3693 if (reg_equiv_mem[REGNO (ref)])
3694 {
3695 ref = reg_equiv_mem[REGNO (ref)];
3696 base = find_replacement (&XEXP (ref, 0));
3697 }
3698 else
3699 /* The slot is out of range, or was dressed up in a SUBREG */
3700 base = reg_equiv_address[REGNO (ref)];
3701 }
3702 else
3703 base = find_replacement (&XEXP (ref, 0));
0a81f500 3704
e5e809f4
JL
3705 /* Handle the case where the address is too complex to be offset by 1. */
3706 if (GET_CODE (base) == MINUS
3707 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3708 {
f9cc092a 3709 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 3710
43cffd11 3711 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
3712 base = base_plus;
3713 }
f9cc092a
RE
3714 else if (GET_CODE (base) == PLUS)
3715 {
3716 /* The addend must be CONST_INT, or we would have dealt with it above */
3717 HOST_WIDE_INT hi, lo;
3718
3719 offset += INTVAL (XEXP (base, 1));
3720 base = XEXP (base, 0);
3721
3722 /* Rework the address into a legal sequence of insns */
3723 /* Valid range for lo is -4095 -> 4095 */
3724 lo = (offset >= 0
3725 ? (offset & 0xfff)
3726 : -((-offset) & 0xfff));
3727
3728 /* Corner case, if lo is the max offset then we would be out of range
3729 once we have added the additional 1 below, so bump the msb into the
3730 pre-loading insn(s). */
3731 if (lo == 4095)
3732 lo &= 0x7ff;
3733
3734 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFF)
3735 ^ (HOST_WIDE_INT) 0x80000000)
3736 - (HOST_WIDE_INT) 0x80000000);
3737
3738 if (hi + lo != offset)
3739 abort ();
3740
3741 if (hi != 0)
3742 {
3743 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3744
3745 /* Get the base address; addsi3 knows how to handle constants
3746 that require more than one insn */
3747 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
3748 base = base_plus;
3749 offset = lo;
3750 }
3751 }
e5e809f4 3752
f9cc092a
RE
3753 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
3754 emit_insn (gen_zero_extendqisi2 (scratch,
3755 gen_rtx_MEM (QImode,
3756 plus_constant (base,
3757 offset))));
43cffd11
RE
3758 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
3759 gen_rtx_MEM (QImode,
f9cc092a
RE
3760 plus_constant (base,
3761 offset + 1))));
b3b15f14 3762 if (! BYTES_BIG_ENDIAN)
43cffd11
RE
3763 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3764 gen_rtx_IOR (SImode,
3765 gen_rtx_ASHIFT
3766 (SImode,
3767 gen_rtx_SUBREG (SImode, operands[0], 0),
3768 GEN_INT (8)),
f9cc092a 3769 scratch)));
0a81f500 3770 else
43cffd11
RE
3771 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3772 gen_rtx_IOR (SImode,
f9cc092a 3773 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
3774 GEN_INT (8)),
3775 gen_rtx_SUBREG (SImode, operands[0],
3776 0))));
0a81f500
RE
3777}
3778
f9cc092a
RE
3779/* Handle storing a half-word to memory during reload by synthesising as two
3780 byte stores. Take care not to clobber the input values until after we
3781 have moved them somewhere safe. This code assumes that if the DImode
3782 scratch in operands[2] overlaps either the input value or output address
3783 in some way, then that value must die in this insn (we absolutely need
3784 two scratch registers for some corner cases). */
f3bb6135 3785void
af48348a 3786arm_reload_out_hi (operands)
62b10bbc 3787 rtx * operands;
af48348a 3788{
f9cc092a
RE
3789 rtx ref = operands[0];
3790 rtx outval = operands[1];
3791 rtx base, scratch;
3792 HOST_WIDE_INT offset = 0;
3793
3794 if (GET_CODE (ref) == SUBREG)
3795 {
3796 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
3797 if (BYTES_BIG_ENDIAN)
3798 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
3799 - MIN (UNITS_PER_WORD,
3800 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
3801 ref = SUBREG_REG (ref);
3802 }
3803
3804
3805 if (GET_CODE (ref) == REG)
3806 {
3807 /* We have a pseudo which has been spilt onto the stack; there
3808 are two cases here: the first where there is a simple
3809 stack-slot replacement and a second where the stack-slot is
3810 out of range, or is used as a subreg. */
3811 if (reg_equiv_mem[REGNO (ref)])
3812 {
3813 ref = reg_equiv_mem[REGNO (ref)];
3814 base = find_replacement (&XEXP (ref, 0));
3815 }
3816 else
3817 /* The slot is out of range, or was dressed up in a SUBREG */
3818 base = reg_equiv_address[REGNO (ref)];
3819 }
3820 else
3821 base = find_replacement (&XEXP (ref, 0));
3822
3823 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
3824
3825 /* Handle the case where the address is too complex to be offset by 1. */
3826 if (GET_CODE (base) == MINUS
3827 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3828 {
3829 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3830
3831 /* Be careful not to destroy OUTVAL. */
3832 if (reg_overlap_mentioned_p (base_plus, outval))
3833 {
3834 /* Updating base_plus might destroy outval, see if we can
3835 swap the scratch and base_plus. */
3836 if (! reg_overlap_mentioned_p (scratch, outval))
3837 {
3838 rtx tmp = scratch;
3839 scratch = base_plus;
3840 base_plus = tmp;
3841 }
3842 else
3843 {
3844 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
3845
3846 /* Be conservative and copy OUTVAL into the scratch now,
3847 this should only be necessary if outval is a subreg
3848 of something larger than a word. */
3849 /* XXX Might this clobber base? I can't see how it can,
3850 since scratch is known to overlap with OUTVAL, and
3851 must be wider than a word. */
3852 emit_insn (gen_movhi (scratch_hi, outval));
3853 outval = scratch_hi;
3854 }
3855 }
3856
3857 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
3858 base = base_plus;
3859 }
3860 else if (GET_CODE (base) == PLUS)
3861 {
3862 /* The addend must be CONST_INT, or we would have dealt with it above */
3863 HOST_WIDE_INT hi, lo;
3864
3865 offset += INTVAL (XEXP (base, 1));
3866 base = XEXP (base, 0);
3867
3868 /* Rework the address into a legal sequence of insns */
3869 /* Valid range for lo is -4095 -> 4095 */
3870 lo = (offset >= 0
3871 ? (offset & 0xfff)
3872 : -((-offset) & 0xfff));
3873
3874 /* Corner case, if lo is the max offset then we would be out of range
3875 once we have added the additional 1 below, so bump the msb into the
3876 pre-loading insn(s). */
3877 if (lo == 4095)
3878 lo &= 0x7ff;
3879
3880 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFF)
3881 ^ (HOST_WIDE_INT) 0x80000000)
3882 - (HOST_WIDE_INT) 0x80000000);
3883
3884 if (hi + lo != offset)
3885 abort ();
3886
3887 if (hi != 0)
3888 {
3889 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3890
3891 /* Be careful not to destroy OUTVAL. */
3892 if (reg_overlap_mentioned_p (base_plus, outval))
3893 {
3894 /* Updating base_plus might destroy outval, see if we
3895 can swap the scratch and base_plus. */
3896 if (! reg_overlap_mentioned_p (scratch, outval))
3897 {
3898 rtx tmp = scratch;
3899 scratch = base_plus;
3900 base_plus = tmp;
3901 }
3902 else
3903 {
3904 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
3905
3906 /* Be conservative and copy outval into scratch now,
3907 this should only be necessary if outval is a
3908 subreg of something larger than a word. */
3909 /* XXX Might this clobber base? I can't see how it
3910 can, since scratch is known to overlap with
3911 outval. */
3912 emit_insn (gen_movhi (scratch_hi, outval));
3913 outval = scratch_hi;
3914 }
3915 }
3916
3917 /* Get the base address; addsi3 knows how to handle constants
3918 that require more than one insn */
3919 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
3920 base = base_plus;
3921 offset = lo;
3922 }
3923 }
af48348a 3924
b5cc037f
RE
3925 if (BYTES_BIG_ENDIAN)
3926 {
f9cc092a
RE
3927 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
3928 plus_constant (base, offset + 1)),
3929 gen_rtx_SUBREG (QImode, outval, 0)));
3930 emit_insn (gen_lshrsi3 (scratch,
3931 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 3932 GEN_INT (8)));
f9cc092a
RE
3933 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
3934 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
3935 }
3936 else
3937 {
f9cc092a
RE
3938 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
3939 gen_rtx_SUBREG (QImode, outval, 0)));
3940 emit_insn (gen_lshrsi3 (scratch,
3941 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 3942 GEN_INT (8)));
f9cc092a
RE
3943 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
3944 plus_constant (base, offset + 1)),
3945 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 3946 }
af48348a 3947}
2b835d68
RE
3948\f
3949/* Routines for manipulation of the constant pool. */
2b835d68 3950
949d79eb
RE
3951/* Arm instructions cannot load a large constant directly into a
3952 register; they have to come from a pc relative load. The constant
3953 must therefore be placed in the addressable range of the pc
3954 relative load. Depending on the precise pc relative load
3955 instruction the range is somewhere between 256 bytes and 4k. This
3956 means that we often have to dump a constant inside a function, and
2b835d68
RE
3957 generate code to branch around it.
3958
949d79eb
RE
3959 It is important to minimize this, since the branches will slow
3960 things down and make the code larger.
2b835d68 3961
949d79eb
RE
3962 Normally we can hide the table after an existing unconditional
3963 branch so that there is no interruption of the flow, but in the
3964 worst case the code looks like this:
2b835d68
RE
3965
3966 ldr rn, L1
949d79eb 3967 ...
2b835d68
RE
3968 b L2
3969 align
3970 L1: .long value
3971 L2:
949d79eb 3972 ...
2b835d68 3973
2b835d68 3974 ldr rn, L3
949d79eb 3975 ...
2b835d68
RE
3976 b L4
3977 align
2b835d68
RE
3978 L3: .long value
3979 L4:
949d79eb
RE
3980 ...
3981
3982 We fix this by performing a scan after scheduling, which notices
3983 which instructions need to have their operands fetched from the
3984 constant table and builds the table.
3985
3986 The algorithm starts by building a table of all the constants that
3987 need fixing up and all the natural barriers in the function (places
3988 where a constant table can be dropped without breaking the flow).
3989 For each fixup we note how far the pc-relative replacement will be
3990 able to reach and the offset of the instruction into the function.
3991
3992 Having built the table we then group the fixes together to form
3993 tables that are as large as possible (subject to addressing
3994 constraints) and emit each table of constants after the last
3995 barrier that is within range of all the instructions in the group.
3996 If a group does not contain a barrier, then we forcibly create one
3997 by inserting a jump instruction into the flow. Once the table has
3998 been inserted, the insns are then modified to reference the
3999 relevant entry in the pool.
4000
4001 Possible enhancements to the alogorithm (not implemented) are:
4002
4003 1) ARM instructions (but not thumb) can use negative offsets, so we
4004 could reference back to a previous pool rather than forwards to a
4005 new one. For large functions this may reduce the number of pools
4006 required.
4007
4008 2) For some processors and object formats, there may be benefit in
4009 aligning the pools to the start of cache lines; this alignment
4010 would need to be taken into account when calculating addressability
4011 of a pool.
2b835d68
RE
4012
4013 */
4014
4015typedef struct
4016{
4017 rtx value; /* Value in table */
4018 HOST_WIDE_INT next_offset;
4019 enum machine_mode mode; /* Mode of value */
949d79eb 4020} minipool_node;
2b835d68
RE
4021
4022/* The maximum number of constants that can fit into one pool, since
949d79eb
RE
4023 the pc relative range is 0...4092 bytes and constants are at least 4
4024 bytes long. */
2b835d68 4025
949d79eb
RE
4026#define MAX_MINIPOOL_SIZE (4092/4)
4027static minipool_node minipool_vector[MAX_MINIPOOL_SIZE];
4028static int minipool_size;
4029static rtx minipool_vector_label;
2b835d68 4030
332072db
RE
4031/* Add a constant to the pool and return its offset within the current
4032 pool.
4033
4034 X is the rtx we want to replace. MODE is its mode. On return,
4035 ADDRESS_ONLY will be non-zero if we really want the address of such
4036 a constant, not the constant itself. */
2b835d68 4037static HOST_WIDE_INT
949d79eb 4038add_minipool_constant (x, mode)
2b835d68
RE
4039 rtx x;
4040 enum machine_mode mode;
4041{
4042 int i;
2b835d68 4043 HOST_WIDE_INT offset;
da6558fd 4044
949d79eb
RE
4045 /* First, see if we've already got it. */
4046 for (i = 0; i < minipool_size; i++)
2b835d68 4047 {
949d79eb
RE
4048 if (GET_CODE (x) == minipool_vector[i].value->code
4049 && mode == minipool_vector[i].mode)
2b835d68
RE
4050 {
4051 if (GET_CODE (x) == CODE_LABEL)
4052 {
949d79eb 4053 if (XINT (x, 3) != XINT (minipool_vector[i].value, 3))
2b835d68
RE
4054 continue;
4055 }
949d79eb
RE
4056 if (rtx_equal_p (x, minipool_vector[i].value))
4057 return minipool_vector[i].next_offset - GET_MODE_SIZE (mode);
2b835d68
RE
4058 }
4059 }
4060
4061 /* Need a new one */
949d79eb 4062 minipool_vector[minipool_size].next_offset = GET_MODE_SIZE (mode);
2b835d68 4063 offset = 0;
949d79eb
RE
4064 if (minipool_size == 0)
4065 minipool_vector_label = gen_label_rtx ();
2b835d68 4066 else
949d79eb
RE
4067 minipool_vector[minipool_size].next_offset
4068 += (offset = minipool_vector[minipool_size - 1].next_offset);
2b835d68 4069
949d79eb
RE
4070 minipool_vector[minipool_size].value = x;
4071 minipool_vector[minipool_size].mode = mode;
4072 minipool_size++;
2b835d68
RE
4073 return offset;
4074}
4075
4076/* Output the literal table */
4077static void
949d79eb 4078dump_minipool (scan)
2b835d68
RE
4079 rtx scan;
4080{
4081 int i;
4082
4083 scan = emit_label_after (gen_label_rtx (), scan);
4084 scan = emit_insn_after (gen_align_4 (), scan);
949d79eb 4085 scan = emit_label_after (minipool_vector_label, scan);
2b835d68 4086
949d79eb 4087 for (i = 0; i < minipool_size; i++)
2b835d68 4088 {
949d79eb 4089 minipool_node *p = minipool_vector + i;
2b835d68
RE
4090
4091 switch (GET_MODE_SIZE (p->mode))
4092 {
4093 case 4:
4094 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
4095 break;
4096
4097 case 8:
4098 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
4099 break;
4100
4101 default:
4102 abort ();
4103 break;
4104 }
4105 }
4106
4107 scan = emit_insn_after (gen_consttable_end (), scan);
4108 scan = emit_barrier_after (scan);
949d79eb 4109 minipool_size = 0;
2b835d68
RE
4110}
4111
949d79eb
RE
4112/* Find the last barrier less than MAX_COUNT bytes from FROM, or
4113 create one. */
2b835d68
RE
4114static rtx
4115find_barrier (from, max_count)
4116 rtx from;
4117 int max_count;
4118{
4119 int count = 0;
4120 rtx found_barrier = 0;
e5e809f4 4121 rtx last = from;
2b835d68
RE
4122
4123 while (from && count < max_count)
4124 {
7551cbc7 4125 rtx tmp;
da6558fd 4126
2b835d68 4127 if (GET_CODE (from) == BARRIER)
7551cbc7 4128 found_barrier = from;
2b835d68
RE
4129
4130 /* Count the length of this insn */
949d79eb
RE
4131 if (GET_CODE (from) == JUMP_INSN
4132 && JUMP_LABEL (from) != 0
4133 && ((tmp = next_real_insn (JUMP_LABEL (from)))
4134 == next_real_insn (from))
4135 && tmp != NULL
4136 && GET_CODE (tmp) == JUMP_INSN
4137 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
4138 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
7551cbc7
RE
4139 {
4140 int elt = GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC ? 1 : 0;
4141 count += (get_attr_length (from)
4142 + GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (tmp), elt));
4143 /* Continue after the dispatch table. */
4144 last = from;
4145 from = NEXT_INSN (tmp);
4146 continue;
4147 }
2b835d68
RE
4148 else
4149 count += get_attr_length (from);
4150
e5e809f4 4151 last = from;
2b835d68
RE
4152 from = NEXT_INSN (from);
4153 }
4154
da6558fd 4155 if (! found_barrier)
2b835d68
RE
4156 {
4157 /* We didn't find a barrier in time to
da6558fd 4158 dump our stuff, so we'll make one. */
2b835d68 4159 rtx label = gen_label_rtx ();
da6558fd 4160
2b835d68 4161 if (from)
e5e809f4 4162 from = PREV_INSN (last);
2b835d68
RE
4163 else
4164 from = get_last_insn ();
da6558fd
NC
4165
4166 /* Walk back to be just before any jump. */
2b835d68 4167 while (GET_CODE (from) == JUMP_INSN
25b1c156 4168 || GET_CODE (from) == NOTE
2b835d68
RE
4169 || GET_CODE (from) == CODE_LABEL)
4170 from = PREV_INSN (from);
da6558fd 4171
2b835d68
RE
4172 from = emit_jump_insn_after (gen_jump (label), from);
4173 JUMP_LABEL (from) = label;
4174 found_barrier = emit_barrier_after (from);
4175 emit_label_after (label, found_barrier);
2b835d68
RE
4176 }
4177
4178 return found_barrier;
4179}
4180
949d79eb
RE
4181struct minipool_fixup
4182{
4183 struct minipool_fixup *next;
4184 rtx insn;
4185 int address;
4186 rtx *loc;
4187 enum machine_mode mode;
4188 rtx value;
4189 int range;
4190};
4191
4192struct minipool_fixup *minipool_fix_head;
4193struct minipool_fixup *minipool_fix_tail;
4194
4195static void
4196push_minipool_barrier (insn, address)
2b835d68 4197 rtx insn;
949d79eb 4198 int address;
2b835d68 4199{
949d79eb
RE
4200 struct minipool_fixup *fix
4201 = (struct minipool_fixup *) oballoc (sizeof (struct minipool_fixup));
ad076f4e 4202
949d79eb
RE
4203 fix->insn = insn;
4204 fix->address = address;
2b835d68 4205
949d79eb
RE
4206 fix->next = NULL;
4207 if (minipool_fix_head != NULL)
4208 minipool_fix_tail->next = fix;
4209 else
4210 minipool_fix_head = fix;
4211
4212 minipool_fix_tail = fix;
4213}
2b835d68 4214
949d79eb
RE
4215static void
4216push_minipool_fix (insn, address, loc, mode, value)
4217 rtx insn;
4218 int address;
4219 rtx *loc;
4220 enum machine_mode mode;
4221 rtx value;
4222{
4223 struct minipool_fixup *fix
4224 = (struct minipool_fixup *) oballoc (sizeof (struct minipool_fixup));
4225
4226#ifdef AOF_ASSEMBLER
4227 /* PIC symbol refereneces need to be converted into offsets into the
4228 based area. */
4229 if (flag_pic && GET_MODE == SYMBOL_REF)
4230 value = aof_pic_entry (value);
4231#endif /* AOF_ASSEMBLER */
4232
4233 fix->insn = insn;
4234 fix->address = address;
4235 fix->loc = loc;
4236 fix->mode = mode;
4237 fix->value = value;
4238 fix->range = get_attr_pool_range (insn);
4239
4240 /* If an insn doesn't have a range defined for it, then it isn't
4241 expecting to be reworked by this code. Better to abort now than
4242 to generate duff assembly code. */
4243 if (fix->range == 0)
4244 abort ();
4245
4246 /* Add it to the chain of fixes */
4247 fix->next = NULL;
4248 if (minipool_fix_head != NULL)
4249 minipool_fix_tail->next = fix;
4250 else
4251 minipool_fix_head = fix;
4252
4253 minipool_fix_tail = fix;
4254}
4255
4256static void
4257note_invalid_constants (insn, address)
4258 rtx insn;
4259 int address;
4260{
4261 int opno;
4262
4263 /* Extract the operands of the insn */
4264 extract_insn(insn);
4265
949d79eb
RE
4266 /* Find the alternative selected */
4267 if (! constrain_operands (1))
4268 fatal_insn_not_found (insn);
4269
4270 /* Preprocess the constraints, to extract some useful information. */
4271 preprocess_constraints ();
4272
1ccbefce 4273 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb
RE
4274 {
4275 /* Things we need to fix can only occur in inputs */
36ab44c7 4276 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
4277 continue;
4278
4279 /* If this alternative is a memory reference, then any mention
4280 of constants in this alternative is really to fool reload
4281 into allowing us to accept one there. We need to fix them up
4282 now so that we output the right code. */
4283 if (recog_op_alt[opno][which_alternative].memory_ok)
4284 {
1ccbefce 4285 rtx op = recog_data.operand[opno];
949d79eb
RE
4286
4287 if (CONSTANT_P (op))
1ccbefce
RH
4288 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4289 recog_data.operand_mode[opno], op);
949d79eb
RE
4290#ifndef AOF_ASSEMBLER
4291 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
4292 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4293 recog_data.operand_mode[opno],
4294 XVECEXP (op, 0, 0));
949d79eb 4295#endif
1ccbefce 4296 else if (recog_data.operand_mode[opno] == SImode
949d79eb
RE
4297 && GET_CODE (op) == MEM
4298 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
4299 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
4300 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4301 recog_data.operand_mode[opno],
949d79eb
RE
4302 get_pool_constant (XEXP (op, 0)));
4303 }
2b835d68 4304 }
2b835d68
RE
4305}
4306
4307void
4308arm_reorg (first)
4309 rtx first;
4310{
4311 rtx insn;
949d79eb
RE
4312 int address = 0;
4313 struct minipool_fixup *fix;
ad076f4e 4314
949d79eb 4315 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 4316
949d79eb
RE
4317 /* The first insn must always be a note, or the code below won't
4318 scan it properly. */
4319 if (GET_CODE (first) != NOTE)
4320 abort ();
4321
4322 /* Scan all the insns and record the operands that will need fixing. */
4323 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 4324 {
2b835d68 4325
949d79eb
RE
4326 if (GET_CODE (insn) == BARRIER)
4327 push_minipool_barrier(insn, address);
4328 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
4329 || GET_CODE (insn) == JUMP_INSN)
4330 {
4331 rtx table;
4332
4333 note_invalid_constants (insn, address);
4334 address += get_attr_length (insn);
4335 /* If the insn is a vector jump, add the size of the table
4336 and skip the table. */
4337 if (GET_CODE (insn) == JUMP_INSN
4338 && JUMP_LABEL (insn) != NULL
4339 && ((table = next_real_insn (JUMP_LABEL (insn)))
4340 == next_real_insn (insn))
4341 && table != NULL
4342 && GET_CODE (table) == JUMP_INSN
4343 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4344 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2b835d68 4345 {
949d79eb 4346 int elt = GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 4347
949d79eb
RE
4348 address += GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (table),
4349 elt);
4350 insn = table;
4351 }
4352 }
4353 }
332072db 4354
949d79eb
RE
4355 /* Now scan the fixups and perform the required changes. */
4356 for (fix = minipool_fix_head; fix; fix = fix->next)
4357 {
4358 struct minipool_fixup *ftmp;
4359 struct minipool_fixup *last_barrier = NULL;
4360 int max_range;
4361 rtx barrier;
4362 struct minipool_fixup *this_fix;
4363 int new_minipool_size = 0;
4364
4365 /* Skip any further barriers before the next fix. */
4366 while (fix && GET_CODE (fix->insn) == BARRIER)
4367 fix = fix->next;
4368
4369 if (fix == NULL)
4370 break;
332072db 4371
949d79eb
RE
4372 ftmp = fix;
4373 max_range = fix->address + fix->range;
2b835d68 4374
949d79eb
RE
4375 /* Find all the other fixes that can live in the same pool. */
4376 while (ftmp->next && ftmp->next->address < max_range
4377 && (GET_CODE (ftmp->next->insn) == BARRIER
4378 /* Ensure we can reach the constant inside the pool. */
4379 || ftmp->next->range > new_minipool_size))
4380 {
4381 ftmp = ftmp->next;
4382 if (GET_CODE (ftmp->insn) == BARRIER)
4383 last_barrier = ftmp;
4384 else
4385 {
4386 /* Does this fix constrain the range we can search? */
4387 if (ftmp->address + ftmp->range - new_minipool_size < max_range)
4388 max_range = ftmp->address + ftmp->range - new_minipool_size;
2b835d68 4389
949d79eb 4390 new_minipool_size += GET_MODE_SIZE (ftmp->mode);
2b835d68 4391 }
2b835d68 4392 }
949d79eb
RE
4393
4394 /* If we found a barrier, drop back to that; any fixes that we could
4395 have reached but come after the barrier will now go in the next
4396 mini-pool. */
4397 if (last_barrier != NULL)
4398 {
4399 barrier = last_barrier->insn;
4400 ftmp = last_barrier;
4401 }
2bfa88dc
RE
4402 /* ftmp is last fix that we can fit into this pool and we
4403 failed to find a barrier that we could use. Insert a new
4404 barrier in the code and arrange to jump around it. */
949d79eb 4405 else
2bfa88dc
RE
4406 {
4407 /* Check that there isn't another fix that is in range that
4408 we couldn't fit into this pool because the pool was
4409 already too large: we need to put the pool before such an
4410 instruction. */
4411 if (ftmp->next && ftmp->next->address < max_range)
4412 max_range = ftmp->address;
4413
4414 barrier = find_barrier (ftmp->insn, max_range - ftmp->address);
4415 }
949d79eb
RE
4416
4417 /* Scan over the fixes we have identified for this pool, fixing them
4418 up and adding the constants to the pool itself. */
4419 for (this_fix = fix; this_fix && ftmp->next != this_fix;
4420 this_fix = this_fix->next)
4421 if (GET_CODE (this_fix->insn) != BARRIER)
4422 {
4423 int offset = add_minipool_constant (this_fix->value,
4424 this_fix->mode);
4425 rtx addr
4426 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
4427 minipool_vector_label),
4428 offset);
4429 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
4430 }
4431
4432 dump_minipool (barrier);
4433 fix = ftmp;
2b835d68 4434 }
4b632bf1 4435
949d79eb
RE
4436 /* From now on we must synthesize any constants that we can't handle
4437 directly. This can happen if the RTL gets split during final
4438 instruction generation. */
4b632bf1 4439 after_arm_reorg = 1;
2b835d68
RE
4440}
4441
cce8749e
CH
4442\f
4443/* Routines to output assembly language. */
4444
f3bb6135 4445/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
4446 In this way we can ensure that valid double constants are generated even
4447 when cross compiling. */
4448char *
4449fp_immediate_constant (x)
b5cc037f 4450 rtx x;
ff9940b0
RE
4451{
4452 REAL_VALUE_TYPE r;
4453 int i;
4454
4455 if (!fpa_consts_inited)
4456 init_fpa_table ();
4457
4458 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4459 for (i = 0; i < 8; i++)
4460 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
4461 return strings_fpa[i];
f3bb6135 4462
ff9940b0
RE
4463 abort ();
4464}
4465
9997d19d
RE
4466/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
4467static char *
4468fp_const_from_val (r)
62b10bbc 4469 REAL_VALUE_TYPE * r;
9997d19d
RE
4470{
4471 int i;
4472
4473 if (! fpa_consts_inited)
4474 init_fpa_table ();
4475
4476 for (i = 0; i < 8; i++)
4477 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
4478 return strings_fpa[i];
4479
4480 abort ();
4481}
ff9940b0 4482
cce8749e
CH
4483/* Output the operands of a LDM/STM instruction to STREAM.
4484 MASK is the ARM register set mask of which only bits 0-15 are important.
4485 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
4486 must follow the register list. */
4487
4488void
dd18ae56 4489print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc
NC
4490 FILE * stream;
4491 char * instr;
dd18ae56
NC
4492 int reg;
4493 int mask;
4494 int hat;
cce8749e
CH
4495{
4496 int i;
4497 int not_first = FALSE;
4498
1d5473cb 4499 fputc ('\t', stream);
dd18ae56 4500 asm_fprintf (stream, instr, reg);
1d5473cb 4501 fputs (", {", stream);
62b10bbc 4502
cce8749e
CH
4503 for (i = 0; i < 16; i++)
4504 if (mask & (1 << i))
4505 {
4506 if (not_first)
4507 fprintf (stream, ", ");
62b10bbc 4508
dd18ae56 4509 asm_fprintf (stream, "%r", i);
cce8749e
CH
4510 not_first = TRUE;
4511 }
f3bb6135 4512
cce8749e 4513 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 4514}
cce8749e
CH
4515
4516/* Output a 'call' insn. */
4517
4518char *
4519output_call (operands)
62b10bbc 4520 rtx * operands;
cce8749e 4521{
cce8749e
CH
4522 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
4523
62b10bbc 4524 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 4525 {
62b10bbc 4526 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 4527 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 4528 }
62b10bbc 4529
1d5473cb 4530 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 4531
6cfc7210 4532 if (TARGET_INTERWORK)
da6558fd
NC
4533 output_asm_insn ("bx%?\t%0", operands);
4534 else
4535 output_asm_insn ("mov%?\t%|pc, %0", operands);
4536
f3bb6135
RE
4537 return "";
4538}
cce8749e 4539
ff9940b0
RE
4540static int
4541eliminate_lr2ip (x)
62b10bbc 4542 rtx * x;
ff9940b0
RE
4543{
4544 int something_changed = 0;
62b10bbc 4545 rtx x0 = * x;
ff9940b0
RE
4546 int code = GET_CODE (x0);
4547 register int i, j;
6f7d635c 4548 register const char * fmt;
ff9940b0
RE
4549
4550 switch (code)
4551 {
4552 case REG:
62b10bbc 4553 if (REGNO (x0) == LR_REGNUM)
ff9940b0 4554 {
62b10bbc 4555 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
4556 return 1;
4557 }
4558 return 0;
4559 default:
4560 /* Scan through the sub-elements and change any references there */
4561 fmt = GET_RTX_FORMAT (code);
62b10bbc 4562
ff9940b0
RE
4563 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4564 if (fmt[i] == 'e')
4565 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
4566 else if (fmt[i] == 'E')
4567 for (j = 0; j < XVECLEN (x0, i); j++)
4568 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 4569
ff9940b0
RE
4570 return something_changed;
4571 }
4572}
4573
4574/* Output a 'call' insn that is a reference in memory. */
4575
4576char *
4577output_call_mem (operands)
62b10bbc 4578 rtx * operands;
ff9940b0
RE
4579{
4580 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
4581 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
4582 */
4583 if (eliminate_lr2ip (&operands[0]))
1d5473cb 4584 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 4585
6cfc7210 4586 if (TARGET_INTERWORK)
da6558fd
NC
4587 {
4588 output_asm_insn ("ldr%?\t%|ip, %0", operands);
4589 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4590 output_asm_insn ("bx%?\t%|ip", operands);
4591 }
4592 else
4593 {
4594 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4595 output_asm_insn ("ldr%?\t%|pc, %0", operands);
4596 }
4597
f3bb6135
RE
4598 return "";
4599}
ff9940b0
RE
4600
4601
4602/* Output a move from arm registers to an fpu registers.
4603 OPERANDS[0] is an fpu register.
4604 OPERANDS[1] is the first registers of an arm register pair. */
4605
4606char *
4607output_mov_long_double_fpu_from_arm (operands)
62b10bbc 4608 rtx * operands;
ff9940b0
RE
4609{
4610 int arm_reg0 = REGNO (operands[1]);
4611 rtx ops[3];
4612
62b10bbc
NC
4613 if (arm_reg0 == IP_REGNUM)
4614 abort ();
f3bb6135 4615
43cffd11
RE
4616 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4617 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4618 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4619
1d5473cb
RE
4620 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
4621 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 4622
f3bb6135
RE
4623 return "";
4624}
ff9940b0
RE
4625
4626/* Output a move from an fpu register to arm registers.
4627 OPERANDS[0] is the first registers of an arm register pair.
4628 OPERANDS[1] is an fpu register. */
4629
4630char *
4631output_mov_long_double_arm_from_fpu (operands)
62b10bbc 4632 rtx * operands;
ff9940b0
RE
4633{
4634 int arm_reg0 = REGNO (operands[0]);
4635 rtx ops[3];
4636
62b10bbc
NC
4637 if (arm_reg0 == IP_REGNUM)
4638 abort ();
f3bb6135 4639
43cffd11
RE
4640 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4641 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4642 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4643
1d5473cb
RE
4644 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
4645 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
4646 return "";
4647}
ff9940b0
RE
4648
4649/* Output a move from arm registers to arm registers of a long double
4650 OPERANDS[0] is the destination.
4651 OPERANDS[1] is the source. */
4652char *
4653output_mov_long_double_arm_from_arm (operands)
62b10bbc 4654 rtx * operands;
ff9940b0
RE
4655{
4656 /* We have to be careful here because the two might overlap */
4657 int dest_start = REGNO (operands[0]);
4658 int src_start = REGNO (operands[1]);
4659 rtx ops[2];
4660 int i;
4661
4662 if (dest_start < src_start)
4663 {
4664 for (i = 0; i < 3; i++)
4665 {
43cffd11
RE
4666 ops[0] = gen_rtx_REG (SImode, dest_start + i);
4667 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 4668 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4669 }
4670 }
4671 else
4672 {
4673 for (i = 2; i >= 0; i--)
4674 {
43cffd11
RE
4675 ops[0] = gen_rtx_REG (SImode, dest_start + i);
4676 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 4677 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4678 }
4679 }
f3bb6135 4680
ff9940b0
RE
4681 return "";
4682}
4683
4684
cce8749e
CH
4685/* Output a move from arm registers to an fpu registers.
4686 OPERANDS[0] is an fpu register.
4687 OPERANDS[1] is the first registers of an arm register pair. */
4688
4689char *
4690output_mov_double_fpu_from_arm (operands)
62b10bbc 4691 rtx * operands;
cce8749e
CH
4692{
4693 int arm_reg0 = REGNO (operands[1]);
4694 rtx ops[2];
4695
62b10bbc
NC
4696 if (arm_reg0 == IP_REGNUM)
4697 abort ();
4698
43cffd11
RE
4699 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4700 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
4701 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
4702 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
4703 return "";
4704}
cce8749e
CH
4705
4706/* Output a move from an fpu register to arm registers.
4707 OPERANDS[0] is the first registers of an arm register pair.
4708 OPERANDS[1] is an fpu register. */
4709
4710char *
4711output_mov_double_arm_from_fpu (operands)
62b10bbc 4712 rtx * operands;
cce8749e
CH
4713{
4714 int arm_reg0 = REGNO (operands[0]);
4715 rtx ops[2];
4716
62b10bbc
NC
4717 if (arm_reg0 == IP_REGNUM)
4718 abort ();
f3bb6135 4719
43cffd11
RE
4720 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4721 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
4722 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
4723 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
4724 return "";
4725}
cce8749e
CH
4726
4727/* Output a move between double words.
4728 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
4729 or MEM<-REG and all MEMs must be offsettable addresses. */
4730
4731char *
4732output_move_double (operands)
aec3cfba 4733 rtx * operands;
cce8749e
CH
4734{
4735 enum rtx_code code0 = GET_CODE (operands[0]);
4736 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 4737 rtx otherops[3];
cce8749e
CH
4738
4739 if (code0 == REG)
4740 {
4741 int reg0 = REGNO (operands[0]);
4742
43cffd11 4743 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 4744
cce8749e
CH
4745 if (code1 == REG)
4746 {
4747 int reg1 = REGNO (operands[1]);
62b10bbc
NC
4748 if (reg1 == IP_REGNUM)
4749 abort ();
f3bb6135 4750
cce8749e 4751 /* Ensure the second source is not overwritten */
c1c2bc04 4752 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 4753 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 4754 else
6cfc7210 4755 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
4756 }
4757 else if (code1 == CONST_DOUBLE)
4758 {
226a5051
RE
4759 if (GET_MODE (operands[1]) == DFmode)
4760 {
4761 long l[2];
4762 union real_extract u;
4763
4764 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4765 sizeof (u));
4766 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4767 otherops[1] = GEN_INT(l[1]);
4768 operands[1] = GEN_INT(l[0]);
4769 }
c1c2bc04
RE
4770 else if (GET_MODE (operands[1]) != VOIDmode)
4771 abort ();
4772 else if (WORDS_BIG_ENDIAN)
4773 {
4774
4775 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4776 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4777 }
226a5051
RE
4778 else
4779 {
c1c2bc04 4780
226a5051
RE
4781 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4782 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4783 }
6cfc7210 4784
c1c2bc04
RE
4785 output_mov_immediate (operands);
4786 output_mov_immediate (otherops);
cce8749e
CH
4787 }
4788 else if (code1 == CONST_INT)
4789 {
56636818
JL
4790#if HOST_BITS_PER_WIDE_INT > 32
4791 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4792 what the upper word is. */
4793 if (WORDS_BIG_ENDIAN)
4794 {
4795 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4796 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4797 }
4798 else
4799 {
4800 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4801 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4802 }
4803#else
4804 /* Sign extend the intval into the high-order word */
c1c2bc04
RE
4805 if (WORDS_BIG_ENDIAN)
4806 {
4807 otherops[1] = operands[1];
4808 operands[1] = (INTVAL (operands[1]) < 0
4809 ? constm1_rtx : const0_rtx);
4810 }
ff9940b0 4811 else
c1c2bc04 4812 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 4813#endif
c1c2bc04
RE
4814 output_mov_immediate (otherops);
4815 output_mov_immediate (operands);
cce8749e
CH
4816 }
4817 else if (code1 == MEM)
4818 {
ff9940b0 4819 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 4820 {
ff9940b0 4821 case REG:
9997d19d 4822 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 4823 break;
2b835d68 4824
ff9940b0 4825 case PRE_INC:
2b835d68 4826 abort (); /* Should never happen now */
ff9940b0 4827 break;
2b835d68 4828
ff9940b0 4829 case PRE_DEC:
2b835d68 4830 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 4831 break;
2b835d68 4832
ff9940b0 4833 case POST_INC:
9997d19d 4834 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 4835 break;
2b835d68 4836
ff9940b0 4837 case POST_DEC:
2b835d68 4838 abort (); /* Should never happen now */
ff9940b0 4839 break;
2b835d68
RE
4840
4841 case LABEL_REF:
4842 case CONST:
4843 output_asm_insn ("adr%?\t%0, %1", operands);
4844 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4845 break;
4846
ff9940b0 4847 default:
aec3cfba
NC
4848 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
4849 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 4850 {
2b835d68
RE
4851 otherops[0] = operands[0];
4852 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4853 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4854 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4855 {
4856 if (GET_CODE (otherops[2]) == CONST_INT)
4857 {
4858 switch (INTVAL (otherops[2]))
4859 {
4860 case -8:
4861 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4862 return "";
4863 case -4:
4864 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4865 return "";
4866 case 4:
4867 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4868 return "";
4869 }
4870 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4871 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4872 else
4873 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4874 }
4875 else
4876 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4877 }
4878 else
4879 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 4880
2b835d68
RE
4881 return "ldm%?ia\t%0, %M0";
4882 }
4883 else
4884 {
4885 otherops[1] = adj_offsettable_operand (operands[1], 4);
4886 /* Take care of overlapping base/data reg. */
4887 if (reg_mentioned_p (operands[0], operands[1]))
4888 {
4889 output_asm_insn ("ldr%?\t%0, %1", otherops);
4890 output_asm_insn ("ldr%?\t%0, %1", operands);
4891 }
4892 else
4893 {
4894 output_asm_insn ("ldr%?\t%0, %1", operands);
4895 output_asm_insn ("ldr%?\t%0, %1", otherops);
4896 }
cce8749e
CH
4897 }
4898 }
4899 }
2b835d68 4900 else
62b10bbc 4901 abort (); /* Constraints should prevent this */
cce8749e
CH
4902 }
4903 else if (code0 == MEM && code1 == REG)
4904 {
62b10bbc
NC
4905 if (REGNO (operands[1]) == IP_REGNUM)
4906 abort ();
2b835d68 4907
ff9940b0
RE
4908 switch (GET_CODE (XEXP (operands[0], 0)))
4909 {
4910 case REG:
9997d19d 4911 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 4912 break;
2b835d68 4913
ff9940b0 4914 case PRE_INC:
2b835d68 4915 abort (); /* Should never happen now */
ff9940b0 4916 break;
2b835d68 4917
ff9940b0 4918 case PRE_DEC:
2b835d68 4919 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 4920 break;
2b835d68 4921
ff9940b0 4922 case POST_INC:
9997d19d 4923 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 4924 break;
2b835d68 4925
ff9940b0 4926 case POST_DEC:
2b835d68 4927 abort (); /* Should never happen now */
ff9940b0 4928 break;
2b835d68
RE
4929
4930 case PLUS:
4931 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4932 {
4933 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4934 {
4935 case -8:
4936 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4937 return "";
4938
4939 case -4:
4940 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4941 return "";
4942
4943 case 4:
4944 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4945 return "";
4946 }
4947 }
4948 /* Fall through */
4949
ff9940b0 4950 default:
cce8749e 4951 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 4952 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
4953 output_asm_insn ("str%?\t%1, %0", operands);
4954 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
4955 }
4956 }
2b835d68 4957 else
62b10bbc 4958 abort (); /* Constraints should prevent this */
cce8749e 4959
9997d19d
RE
4960 return "";
4961}
cce8749e
CH
4962
4963
4964/* Output an arbitrary MOV reg, #n.
4965 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4966
4967char *
4968output_mov_immediate (operands)
62b10bbc 4969 rtx * operands;
cce8749e 4970{
f3bb6135 4971 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
4972 int n_ones = 0;
4973 int i;
4974
4975 /* Try to use one MOV */
cce8749e 4976 if (const_ok_for_arm (n))
f3bb6135 4977 {
9997d19d 4978 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
4979 return "";
4980 }
cce8749e
CH
4981
4982 /* Try to use one MVN */
f3bb6135 4983 if (const_ok_for_arm (~n))
cce8749e 4984 {
f3bb6135 4985 operands[1] = GEN_INT (~n);
9997d19d 4986 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 4987 return "";
cce8749e
CH
4988 }
4989
4990 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4991
4992 for (i=0; i < 32; i++)
4993 if (n & 1 << i)
4994 n_ones++;
4995
4996 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
4997 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4998 ~n);
cce8749e 4999 else
9997d19d
RE
5000 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
5001 n);
f3bb6135
RE
5002
5003 return "";
5004}
cce8749e
CH
5005
5006
5007/* Output an ADD r, s, #n where n may be too big for one instruction. If
5008 adding zero to one register, output nothing. */
5009
5010char *
5011output_add_immediate (operands)
62b10bbc 5012 rtx * operands;
cce8749e 5013{
f3bb6135 5014 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
5015
5016 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
5017 {
5018 if (n < 0)
5019 output_multi_immediate (operands,
9997d19d
RE
5020 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
5021 -n);
cce8749e
CH
5022 else
5023 output_multi_immediate (operands,
9997d19d
RE
5024 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
5025 n);
cce8749e 5026 }
f3bb6135
RE
5027
5028 return "";
5029}
cce8749e 5030
cce8749e
CH
5031/* Output a multiple immediate operation.
5032 OPERANDS is the vector of operands referred to in the output patterns.
5033 INSTR1 is the output pattern to use for the first constant.
5034 INSTR2 is the output pattern to use for subsequent constants.
5035 IMMED_OP is the index of the constant slot in OPERANDS.
5036 N is the constant value. */
5037
18af7313 5038static char *
cce8749e 5039output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc
NC
5040 rtx * operands;
5041 char * instr1, * instr2;
f3bb6135
RE
5042 int immed_op;
5043 HOST_WIDE_INT n;
cce8749e 5044{
f3bb6135
RE
5045#if HOST_BITS_PER_WIDE_INT > 32
5046 n &= 0xffffffff;
5047#endif
5048
cce8749e
CH
5049 if (n == 0)
5050 {
5051 operands[immed_op] = const0_rtx;
f3bb6135 5052 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
5053 }
5054 else
5055 {
5056 int i;
5057 char *instr = instr1;
5058
5059 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
5060 for (i = 0; i < 32; i += 2)
5061 {
5062 if (n & (3 << i))
5063 {
f3bb6135
RE
5064 operands[immed_op] = GEN_INT (n & (255 << i));
5065 output_asm_insn (instr, operands);
cce8749e
CH
5066 instr = instr2;
5067 i += 6;
5068 }
5069 }
5070 }
f3bb6135 5071 return "";
9997d19d 5072}
cce8749e
CH
5073
5074
5075/* Return the appropriate ARM instruction for the operation code.
5076 The returned result should not be overwritten. OP is the rtx of the
5077 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
5078 was shifted. */
5079
5080char *
5081arithmetic_instr (op, shift_first_arg)
5082 rtx op;
f3bb6135 5083 int shift_first_arg;
cce8749e 5084{
9997d19d 5085 switch (GET_CODE (op))
cce8749e
CH
5086 {
5087 case PLUS:
f3bb6135
RE
5088 return "add";
5089
cce8749e 5090 case MINUS:
f3bb6135
RE
5091 return shift_first_arg ? "rsb" : "sub";
5092
cce8749e 5093 case IOR:
f3bb6135
RE
5094 return "orr";
5095
cce8749e 5096 case XOR:
f3bb6135
RE
5097 return "eor";
5098
cce8749e 5099 case AND:
f3bb6135
RE
5100 return "and";
5101
cce8749e 5102 default:
f3bb6135 5103 abort ();
cce8749e 5104 }
f3bb6135 5105}
cce8749e
CH
5106
5107
5108/* Ensure valid constant shifts and return the appropriate shift mnemonic
5109 for the operation code. The returned result should not be overwritten.
5110 OP is the rtx code of the shift.
9997d19d
RE
5111 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
5112 shift. */
cce8749e 5113
9997d19d
RE
5114static char *
5115shift_op (op, amountp)
5116 rtx op;
5117 HOST_WIDE_INT *amountp;
cce8749e 5118{
62b10bbc 5119 char * mnem;
e2c671ba 5120 enum rtx_code code = GET_CODE (op);
cce8749e 5121
9997d19d
RE
5122 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
5123 *amountp = -1;
5124 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
5125 *amountp = INTVAL (XEXP (op, 1));
5126 else
5127 abort ();
5128
e2c671ba 5129 switch (code)
cce8749e
CH
5130 {
5131 case ASHIFT:
5132 mnem = "asl";
5133 break;
f3bb6135 5134
cce8749e
CH
5135 case ASHIFTRT:
5136 mnem = "asr";
cce8749e 5137 break;
f3bb6135 5138
cce8749e
CH
5139 case LSHIFTRT:
5140 mnem = "lsr";
cce8749e 5141 break;
f3bb6135 5142
9997d19d
RE
5143 case ROTATERT:
5144 mnem = "ror";
9997d19d
RE
5145 break;
5146
ff9940b0 5147 case MULT:
e2c671ba
RE
5148 /* We never have to worry about the amount being other than a
5149 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
5150 if (*amountp != -1)
5151 *amountp = int_log2 (*amountp);
5152 else
5153 abort ();
f3bb6135
RE
5154 return "asl";
5155
cce8749e 5156 default:
f3bb6135 5157 abort ();
cce8749e
CH
5158 }
5159
e2c671ba
RE
5160 if (*amountp != -1)
5161 {
5162 /* This is not 100% correct, but follows from the desire to merge
5163 multiplication by a power of 2 with the recognizer for a
5164 shift. >=32 is not a valid shift for "asl", so we must try and
5165 output a shift that produces the correct arithmetical result.
ddd5a7c1 5166 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
5167 is not set correctly if we set the flags; but we never use the
5168 carry bit from such an operation, so we can ignore that. */
5169 if (code == ROTATERT)
5170 *amountp &= 31; /* Rotate is just modulo 32 */
5171 else if (*amountp != (*amountp & 31))
5172 {
5173 if (code == ASHIFT)
5174 mnem = "lsr";
5175 *amountp = 32;
5176 }
5177
5178 /* Shifts of 0 are no-ops. */
5179 if (*amountp == 0)
5180 return NULL;
5181 }
5182
9997d19d
RE
5183 return mnem;
5184}
cce8749e
CH
5185
5186
5187/* Obtain the shift from the POWER of two. */
5188
18af7313 5189static HOST_WIDE_INT
cce8749e 5190int_log2 (power)
f3bb6135 5191 HOST_WIDE_INT power;
cce8749e 5192{
f3bb6135 5193 HOST_WIDE_INT shift = 0;
cce8749e 5194
2b835d68 5195 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
5196 {
5197 if (shift > 31)
f3bb6135 5198 abort ();
cce8749e
CH
5199 shift++;
5200 }
f3bb6135
RE
5201
5202 return shift;
5203}
cce8749e 5204
cce8749e
CH
5205/* Output a .ascii pseudo-op, keeping track of lengths. This is because
5206 /bin/as is horribly restrictive. */
6cfc7210 5207#define MAX_ASCII_LEN 51
cce8749e
CH
5208
5209void
5210output_ascii_pseudo_op (stream, p, len)
62b10bbc
NC
5211 FILE * stream;
5212 unsigned char * p;
cce8749e
CH
5213 int len;
5214{
5215 int i;
6cfc7210 5216 int len_so_far = 0;
cce8749e 5217
6cfc7210
NC
5218 fputs ("\t.ascii\t\"", stream);
5219
cce8749e
CH
5220 for (i = 0; i < len; i++)
5221 {
5222 register int c = p[i];
5223
6cfc7210 5224 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 5225 {
6cfc7210 5226 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 5227 len_so_far = 0;
cce8749e
CH
5228 }
5229
6cfc7210 5230 switch (c)
cce8749e 5231 {
6cfc7210
NC
5232 case TARGET_TAB:
5233 fputs ("\\t", stream);
5234 len_so_far += 2;
5235 break;
5236
5237 case TARGET_FF:
5238 fputs ("\\f", stream);
5239 len_so_far += 2;
5240 break;
5241
5242 case TARGET_BS:
5243 fputs ("\\b", stream);
5244 len_so_far += 2;
5245 break;
5246
5247 case TARGET_CR:
5248 fputs ("\\r", stream);
5249 len_so_far += 2;
5250 break;
5251
5252 case TARGET_NEWLINE:
5253 fputs ("\\n", stream);
5254 c = p [i + 1];
5255 if ((c >= ' ' && c <= '~')
5256 || c == TARGET_TAB)
5257 /* This is a good place for a line break. */
5258 len_so_far = MAX_ASCII_LEN;
5259 else
5260 len_so_far += 2;
5261 break;
5262
5263 case '\"':
5264 case '\\':
5265 putc ('\\', stream);
5266 len_so_far ++;
5267 /* drop through. */
f3bb6135 5268
6cfc7210
NC
5269 default:
5270 if (c >= ' ' && c <= '~')
5271 {
5272 putc (c, stream);
5273 len_so_far ++;
5274 }
5275 else
5276 {
5277 fprintf (stream, "\\%03o", c);
5278 len_so_far += 4;
5279 }
5280 break;
cce8749e 5281 }
cce8749e 5282 }
f3bb6135 5283
cce8749e 5284 fputs ("\"\n", stream);
f3bb6135 5285}
cce8749e 5286\f
ff9940b0
RE
5287
5288/* Try to determine whether a pattern really clobbers the link register.
5289 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
5290 if we combine a call followed by a return.
5291 NOTE: This code does not check for side-effect expressions in a SET_SRC:
5292 such a check should not be needed because these only update an existing
5293 value within a register; the register must still be set elsewhere within
5294 the function. */
ff9940b0
RE
5295
5296static int
5297pattern_really_clobbers_lr (x)
f3bb6135 5298 rtx x;
ff9940b0
RE
5299{
5300 int i;
5301
5302 switch (GET_CODE (x))
5303 {
5304 case SET:
5305 switch (GET_CODE (SET_DEST (x)))
5306 {
5307 case REG:
62b10bbc 5308 return REGNO (SET_DEST (x)) == LR_REGNUM;
f3bb6135 5309
ff9940b0
RE
5310 case SUBREG:
5311 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
62b10bbc 5312 return REGNO (XEXP (SET_DEST (x), 0)) == LR_REGNUM;
f3bb6135 5313
0e84b556
RK
5314 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
5315 return 0;
ff9940b0 5316 abort ();
f3bb6135 5317
ff9940b0
RE
5318 default:
5319 return 0;
5320 }
f3bb6135 5321
ff9940b0
RE
5322 case PARALLEL:
5323 for (i = 0; i < XVECLEN (x, 0); i++)
5324 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
5325 return 1;
5326 return 0;
f3bb6135 5327
ff9940b0
RE
5328 case CLOBBER:
5329 switch (GET_CODE (XEXP (x, 0)))
5330 {
5331 case REG:
62b10bbc 5332 return REGNO (XEXP (x, 0)) == LR_REGNUM;
f3bb6135 5333
ff9940b0
RE
5334 case SUBREG:
5335 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
62b10bbc 5336 return REGNO (XEXP (XEXP (x, 0), 0)) == LR_REGNUM;
ff9940b0 5337 abort ();
f3bb6135 5338
ff9940b0
RE
5339 default:
5340 return 0;
5341 }
f3bb6135 5342
ff9940b0
RE
5343 case UNSPEC:
5344 return 1;
f3bb6135 5345
ff9940b0
RE
5346 default:
5347 return 0;
5348 }
5349}
5350
5351static int
5352function_really_clobbers_lr (first)
f3bb6135 5353 rtx first;
ff9940b0
RE
5354{
5355 rtx insn, next;
5356
5357 for (insn = first; insn; insn = next_nonnote_insn (insn))
5358 {
5359 switch (GET_CODE (insn))
5360 {
5361 case BARRIER:
5362 case NOTE:
5363 case CODE_LABEL:
5364 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
ff9940b0 5365 break;
f3bb6135 5366
ff9940b0
RE
5367 case INSN:
5368 if (pattern_really_clobbers_lr (PATTERN (insn)))
5369 return 1;
5370 break;
f3bb6135 5371
ff9940b0
RE
5372 case CALL_INSN:
5373 /* Don't yet know how to handle those calls that are not to a
5374 SYMBOL_REF */
5375 if (GET_CODE (PATTERN (insn)) != PARALLEL)
5376 abort ();
f3bb6135 5377
ff9940b0
RE
5378 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
5379 {
5380 case CALL:
5381 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
5382 != SYMBOL_REF)
5383 return 1;
5384 break;
f3bb6135 5385
ff9940b0
RE
5386 case SET:
5387 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
5388 0, 0)), 0), 0))
5389 != SYMBOL_REF)
5390 return 1;
5391 break;
f3bb6135 5392
ff9940b0
RE
5393 default: /* Don't recognize it, be safe */
5394 return 1;
5395 }
f3bb6135 5396
ff9940b0
RE
5397 /* A call can be made (by peepholing) not to clobber lr iff it is
5398 followed by a return. There may, however, be a use insn iff
5399 we are returning the result of the call.
5400 If we run off the end of the insn chain, then that means the
5401 call was at the end of the function. Unfortunately we don't
5402 have a return insn for the peephole to recognize, so we
5403 must reject this. (Can this be fixed by adding our own insn?) */
5404 if ((next = next_nonnote_insn (insn)) == NULL)
5405 return 1;
f3bb6135 5406
32de079a
RE
5407 /* No need to worry about lr if the call never returns */
5408 if (GET_CODE (next) == BARRIER)
5409 break;
5410
ff9940b0
RE
5411 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
5412 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
5413 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
5414 == REGNO (XEXP (PATTERN (next), 0))))
5415 if ((next = next_nonnote_insn (next)) == NULL)
5416 return 1;
f3bb6135 5417
ff9940b0
RE
5418 if (GET_CODE (next) == JUMP_INSN
5419 && GET_CODE (PATTERN (next)) == RETURN)
5420 break;
5421 return 1;
f3bb6135 5422
ff9940b0
RE
5423 default:
5424 abort ();
5425 }
5426 }
f3bb6135 5427
ff9940b0
RE
5428 /* We have reached the end of the chain so lr was _not_ clobbered */
5429 return 0;
5430}
5431
5432char *
84ed5e79 5433output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
5434 rtx operand;
5435 int really_return;
84ed5e79 5436 int reverse;
ff9940b0
RE
5437{
5438 char instr[100];
5439 int reg, live_regs = 0;
e2c671ba
RE
5440 int volatile_func = (optimize > 0
5441 && TREE_THIS_VOLATILE (current_function_decl));
5442
5443 return_used_this_function = 1;
ff9940b0 5444
62b10bbc 5445 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba
RE
5446 {
5447 rtx ops[2];
5448 /* If this function was declared non-returning, and we have found a tail
5449 call, then we have to trust that the called function won't return. */
5450 if (! really_return)
5451 return "";
5452
5453 /* Otherwise, trap an attempted return by aborting. */
5454 ops[0] = operand;
ed0e6530 5455 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
86efdc8e 5456 : "abort");
2b835d68 5457 assemble_external_libcall (ops[1]);
84ed5e79 5458 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
e2c671ba
RE
5459 return "";
5460 }
5461
f3bb6135 5462 if (current_function_calls_alloca && ! really_return)
62b10bbc 5463 abort ();
ff9940b0 5464
f3bb6135
RE
5465 for (reg = 0; reg <= 10; reg++)
5466 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
5467 live_regs++;
5468
ed0e6530
PB
5469 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
5470 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
5471 live_regs++;
5472
62b10bbc 5473 if (live_regs || (regs_ever_live[LR_REGNUM] && ! lr_save_eliminated))
ff9940b0
RE
5474 live_regs++;
5475
5476 if (frame_pointer_needed)
5477 live_regs += 4;
5478
5479 if (live_regs)
5480 {
62b10bbc 5481 if (lr_save_eliminated || ! regs_ever_live[LR_REGNUM])
ff9940b0 5482 live_regs++;
f3bb6135 5483
ff9940b0 5484 if (frame_pointer_needed)
84ed5e79
RE
5485 strcpy (instr,
5486 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 5487 else
84ed5e79
RE
5488 strcpy (instr,
5489 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
5490
5491 for (reg = 0; reg <= 10; reg++)
62b10bbc 5492 if (regs_ever_live[reg]
6ed30148 5493 && (! call_used_regs[reg]
ed0e6530
PB
5494 || (flag_pic && ! TARGET_SINGLE_PIC_BASE
5495 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 5496 {
1d5473cb 5497 strcat (instr, "%|");
ff9940b0
RE
5498 strcat (instr, reg_names[reg]);
5499 if (--live_regs)
5500 strcat (instr, ", ");
5501 }
f3bb6135 5502
ff9940b0
RE
5503 if (frame_pointer_needed)
5504 {
1d5473cb 5505 strcat (instr, "%|");
ff9940b0
RE
5506 strcat (instr, reg_names[11]);
5507 strcat (instr, ", ");
1d5473cb 5508 strcat (instr, "%|");
ff9940b0
RE
5509 strcat (instr, reg_names[13]);
5510 strcat (instr, ", ");
1d5473cb 5511 strcat (instr, "%|");
6cfc7210 5512 strcat (instr, TARGET_INTERWORK || (! really_return)
62b10bbc 5513 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
5514 }
5515 else
1d5473cb
RE
5516 {
5517 strcat (instr, "%|");
6cfc7210 5518 if (TARGET_INTERWORK && really_return)
62b10bbc 5519 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 5520 else
62b10bbc 5521 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 5522 }
2b835d68 5523 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 5524 output_asm_insn (instr, &operand);
da6558fd 5525
6cfc7210 5526 if (TARGET_INTERWORK && really_return)
da6558fd
NC
5527 {
5528 strcpy (instr, "bx%?");
5529 strcat (instr, reverse ? "%D0" : "%d0");
5530 strcat (instr, "\t%|");
5531 strcat (instr, frame_pointer_needed ? "lr" : "ip");
5532
5533 output_asm_insn (instr, & operand);
5534 }
ff9940b0
RE
5535 }
5536 else if (really_return)
5537 {
6cfc7210 5538 if (TARGET_INTERWORK)
25b1c156 5539 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
5540 else
5541 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
5542 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd
NC
5543
5544 output_asm_insn (instr, & operand);
ff9940b0 5545 }
f3bb6135 5546
ff9940b0
RE
5547 return "";
5548}
5549
e82ea128
DE
5550/* Return nonzero if optimizing and the current function is volatile.
5551 Such functions never return, and many memory cycles can be saved
5552 by not storing register values that will never be needed again.
5553 This optimization was added to speed up context switching in a
5554 kernel application. */
a0b2ce4c 5555
e2c671ba
RE
5556int
5557arm_volatile_func ()
5558{
5559 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
5560}
5561
ef179a26
NC
5562/* Write the function name into the code section, directly preceding
5563 the function prologue.
5564
5565 Code will be output similar to this:
5566 t0
5567 .ascii "arm_poke_function_name", 0
5568 .align
5569 t1
5570 .word 0xff000000 + (t1 - t0)
5571 arm_poke_function_name
5572 mov ip, sp
5573 stmfd sp!, {fp, ip, lr, pc}
5574 sub fp, ip, #4
5575
5576 When performing a stack backtrace, code can inspect the value
5577 of 'pc' stored at 'fp' + 0. If the trace function then looks
5578 at location pc - 12 and the top 8 bits are set, then we know
5579 that there is a function name embedded immediately preceding this
5580 location and has length ((pc[-3]) & 0xff000000).
5581
5582 We assume that pc is declared as a pointer to an unsigned long.
5583
5584 It is of no benefit to output the function name if we are assembling
5585 a leaf function. These function types will not contain a stack
5586 backtrace structure, therefore it is not possible to determine the
5587 function name. */
5588
5589void
5590arm_poke_function_name (stream, name)
5591 FILE * stream;
5592 char * name;
5593{
5594 unsigned long alignlength;
5595 unsigned long length;
5596 rtx x;
5597
949d79eb
RE
5598 length = strlen (name) + 1;
5599 alignlength = (length + 3) & ~3;
ef179a26 5600
949d79eb 5601 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26
NC
5602 ASM_OUTPUT_ALIGN (stream, 2);
5603 x = GEN_INT (0xff000000UL + alignlength);
5604 ASM_OUTPUT_INT (stream, x);
5605}
5606
ff9940b0
RE
5607/* The amount of stack adjustment that happens here, in output_return and in
5608 output_epilogue must be exactly the same as was calculated during reload,
5609 or things will point to the wrong place. The only time we can safely
5610 ignore this constraint is when a function has no arguments on the stack,
5611 no stack frame requirement and no live registers execpt for `lr'. If we
5612 can guarantee that by making all function calls into tail calls and that
5613 lr is not clobbered in any other way, then there is no need to push lr
5614 onto the stack. */
5615
cce8749e 5616void
f3bb6135 5617output_func_prologue (f, frame_size)
6cfc7210 5618 FILE * f;
cce8749e
CH
5619 int frame_size;
5620{
f3bb6135 5621 int reg, live_regs_mask = 0;
e2c671ba
RE
5622 int volatile_func = (optimize > 0
5623 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 5624
cce8749e
CH
5625 /* Nonzero if we must stuff some register arguments onto the stack as if
5626 they were passed there. */
5627 int store_arg_regs = 0;
5628
abaa26e5
RE
5629 if (arm_ccfsm_state || arm_target_insn)
5630 abort (); /* Sanity check */
31fdb4d5
DE
5631
5632 if (arm_naked_function_p (current_function_decl))
5633 return;
5634
ff9940b0
RE
5635 return_used_this_function = 0;
5636 lr_save_eliminated = 0;
5637
dd18ae56
NC
5638 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
5639 current_function_args_size,
5640 current_function_pretend_args_size, frame_size);
5641 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
5642 frame_pointer_needed,
5643 current_function_anonymous_args);
cce8749e 5644
e2c671ba 5645 if (volatile_func)
dd18ae56 5646 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 5647
cce8749e
CH
5648 if (current_function_anonymous_args && current_function_pretend_args_size)
5649 store_arg_regs = 1;
5650
f3bb6135
RE
5651 for (reg = 0; reg <= 10; reg++)
5652 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
5653 live_regs_mask |= (1 << reg);
5654
dd18ae56 5655 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
ed0e6530 5656 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
5657 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
5658
ff9940b0 5659 if (frame_pointer_needed)
e2c671ba 5660 live_regs_mask |= 0xD800;
62b10bbc 5661 else if (regs_ever_live[LR_REGNUM])
ff9940b0
RE
5662 {
5663 if (! current_function_args_size
f3bb6135 5664 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 5665 lr_save_eliminated = 1;
ff9940b0 5666 else
62b10bbc 5667 live_regs_mask |= 1 << LR_REGNUM;
ff9940b0 5668 }
cce8749e 5669
cce8749e
CH
5670 if (live_regs_mask)
5671 {
ff9940b0
RE
5672 /* if a di mode load/store multiple is used, and the base register
5673 is r3, then r4 can become an ever live register without lr
5674 doing so, in this case we need to push lr as well, or we
5675 will fail to get a proper return. */
5676
62b10bbc 5677 live_regs_mask |= 1 << LR_REGNUM;
ff9940b0 5678 lr_save_eliminated = 0;
f3bb6135 5679
cce8749e
CH
5680 }
5681
e2c671ba 5682 if (lr_save_eliminated)
dd18ae56 5683 asm_fprintf (f,"\t%@ I don't think this function clobbers lr\n");
32de079a
RE
5684
5685#ifdef AOF_ASSEMBLER
5686 if (flag_pic)
dd18ae56 5687 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 5688#endif
f3bb6135 5689}
cce8749e 5690
949d79eb
RE
5691char *
5692arm_output_epilogue ()
cce8749e 5693{
949d79eb
RE
5694 int reg;
5695 int live_regs_mask = 0;
5696 /* If we need this, then it will always be at least this much */
b111229a 5697 int floats_offset = 12;
cce8749e 5698 rtx operands[3];
949d79eb
RE
5699 int frame_size = get_frame_size ();
5700 FILE *f = asm_out_file;
e2c671ba
RE
5701 int volatile_func = (optimize > 0
5702 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 5703
b36ba79f 5704 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 5705 return "";
cce8749e 5706
31fdb4d5
DE
5707 /* Naked functions don't have epilogues. */
5708 if (arm_naked_function_p (current_function_decl))
949d79eb 5709 return "";
31fdb4d5 5710
e2c671ba 5711 /* A volatile function should never return. Call abort. */
c11145f6 5712 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 5713 {
86efdc8e 5714 rtx op;
ed0e6530 5715 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 5716 assemble_external_libcall (op);
e2c671ba 5717 output_asm_insn ("bl\t%a0", &op);
949d79eb 5718 return "";
e2c671ba
RE
5719 }
5720
f3bb6135
RE
5721 for (reg = 0; reg <= 10; reg++)
5722 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 5723 {
ff9940b0
RE
5724 live_regs_mask |= (1 << reg);
5725 floats_offset += 4;
cce8749e
CH
5726 }
5727
ed0e6530
PB
5728 /* If we aren't loading the PIC register, don't stack it even though it may
5729 be live. */
5730 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
5731 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
5732 {
5733 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
5734 floats_offset += 4;
5735 }
5736
ff9940b0 5737 if (frame_pointer_needed)
cce8749e 5738 {
b111229a
RE
5739 if (arm_fpu_arch == FP_SOFT2)
5740 {
5741 for (reg = 23; reg > 15; reg--)
5742 if (regs_ever_live[reg] && ! call_used_regs[reg])
5743 {
5744 floats_offset += 12;
dd18ae56
NC
5745 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
5746 reg, FP_REGNUM, floats_offset);
b111229a
RE
5747 }
5748 }
5749 else
5750 {
5751 int start_reg = 23;
5752
5753 for (reg = 23; reg > 15; reg--)
5754 {
5755 if (regs_ever_live[reg] && ! call_used_regs[reg])
5756 {
5757 floats_offset += 12;
6cfc7210 5758
b111229a
RE
5759 /* We can't unstack more than four registers at once */
5760 if (start_reg - reg == 3)
5761 {
dd18ae56
NC
5762 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
5763 reg, FP_REGNUM, floats_offset);
b111229a
RE
5764 start_reg = reg - 1;
5765 }
5766 }
5767 else
5768 {
5769 if (reg != start_reg)
dd18ae56
NC
5770 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
5771 reg + 1, start_reg - reg,
5772 FP_REGNUM, floats_offset);
b111229a
RE
5773 start_reg = reg - 1;
5774 }
5775 }
5776
5777 /* Just in case the last register checked also needs unstacking. */
5778 if (reg != start_reg)
dd18ae56
NC
5779 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
5780 reg + 1, start_reg - reg,
5781 FP_REGNUM, floats_offset);
b111229a 5782 }
da6558fd 5783
6cfc7210 5784 if (TARGET_INTERWORK)
b111229a
RE
5785 {
5786 live_regs_mask |= 0x6800;
dd18ae56
NC
5787 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
5788 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
b111229a
RE
5789 }
5790 else
5791 {
5792 live_regs_mask |= 0xA800;
dd18ae56 5793 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
5794 TARGET_APCS_32 ? FALSE : TRUE);
5795 }
cce8749e
CH
5796 }
5797 else
5798 {
d2288d8d 5799 /* Restore stack pointer if necessary. */
56636818 5800 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
5801 {
5802 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
5803 operands[2] = GEN_INT (frame_size
5804 + current_function_outgoing_args_size);
d2288d8d
TG
5805 output_add_immediate (operands);
5806 }
5807
b111229a
RE
5808 if (arm_fpu_arch == FP_SOFT2)
5809 {
5810 for (reg = 16; reg < 24; reg++)
5811 if (regs_ever_live[reg] && ! call_used_regs[reg])
dd18ae56
NC
5812 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
5813 reg, SP_REGNUM);
b111229a
RE
5814 }
5815 else
5816 {
5817 int start_reg = 16;
5818
5819 for (reg = 16; reg < 24; reg++)
5820 {
5821 if (regs_ever_live[reg] && ! call_used_regs[reg])
5822 {
5823 if (reg - start_reg == 3)
5824 {
dd18ae56
NC
5825 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
5826 start_reg, SP_REGNUM);
b111229a
RE
5827 start_reg = reg + 1;
5828 }
5829 }
5830 else
5831 {
5832 if (reg != start_reg)
dd18ae56
NC
5833 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
5834 start_reg, reg - start_reg,
5835 SP_REGNUM);
6cfc7210 5836
b111229a
RE
5837 start_reg = reg + 1;
5838 }
5839 }
5840
5841 /* Just in case the last register checked also needs unstacking. */
5842 if (reg != start_reg)
dd18ae56
NC
5843 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
5844 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
5845 }
5846
62b10bbc 5847 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 5848 {
6cfc7210 5849 if (TARGET_INTERWORK)
b111229a
RE
5850 {
5851 if (! lr_save_eliminated)
62b10bbc 5852 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2
NC
5853
5854 if (live_regs_mask != 0)
dd18ae56 5855 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask, FALSE);
6cfc7210 5856
dd18ae56 5857 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
b111229a
RE
5858 }
5859 else if (lr_save_eliminated)
6cfc7210
NC
5860 asm_fprintf (f, "\tmov%c\t%r, %r\n",
5861 TARGET_APCS_32 ? ' ' : 's',
dd18ae56 5862 PC_REGNUM, LR_REGNUM);
32de079a 5863 else
dd18ae56 5864 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask | 0x8000,
32de079a 5865 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
5866 }
5867 else
5868 {
62b10bbc 5869 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 5870 {
32de079a
RE
5871 /* Restore the integer regs, and the return address into lr */
5872 if (! lr_save_eliminated)
62b10bbc 5873 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
5874
5875 if (live_regs_mask != 0)
dd18ae56 5876 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask, FALSE);
cce8749e 5877 }
b111229a 5878
cce8749e
CH
5879 if (current_function_pretend_args_size)
5880 {
32de079a 5881 /* Unwind the pre-pushed regs */
cce8749e 5882 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 5883 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
5884 output_add_immediate (operands);
5885 }
32de079a 5886 /* And finally, go home */
6cfc7210 5887 if (TARGET_INTERWORK)
dd18ae56 5888 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
25b1c156 5889 else if (TARGET_APCS_32)
dd18ae56 5890 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
b111229a 5891 else
dd18ae56 5892 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
cce8749e
CH
5893 }
5894 }
f3bb6135 5895
949d79eb
RE
5896 return "";
5897}
5898
5899void
5900output_func_epilogue (f, frame_size)
5901 FILE *f ATTRIBUTE_UNUSED;
5902 int frame_size;
5903{
5904 if (use_return_insn (FALSE) && return_used_this_function
5905 && (frame_size + current_function_outgoing_args_size) != 0
5906 && ! (frame_pointer_needed && TARGET_APCS))
5907 abort ();
f3bb6135 5908
4b632bf1 5909 /* Reset the ARM-specific per-function variables. */
cce8749e 5910 current_function_anonymous_args = 0;
4b632bf1 5911 after_arm_reorg = 0;
f3bb6135 5912}
e2c671ba
RE
5913
5914static void
5915emit_multi_reg_push (mask)
5916 int mask;
5917{
5918 int num_regs = 0;
5919 int i, j;
5920 rtx par;
5921
5922 for (i = 0; i < 16; i++)
5923 if (mask & (1 << i))
5924 num_regs++;
5925
5926 if (num_regs == 0 || num_regs > 16)
5927 abort ();
5928
43cffd11 5929 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
e2c671ba
RE
5930
5931 for (i = 0; i < 16; i++)
5932 {
5933 if (mask & (1 << i))
5934 {
5935 XVECEXP (par, 0, 0)
43cffd11
RE
5936 = gen_rtx_SET (VOIDmode,
5937 gen_rtx_MEM (BLKmode,
5938 gen_rtx_PRE_DEC (BLKmode,
5939 stack_pointer_rtx)),
5940 gen_rtx_UNSPEC (BLKmode,
5941 gen_rtvec (1,
5942 gen_rtx_REG (SImode, i)),
5943 2));
e2c671ba
RE
5944 break;
5945 }
5946 }
5947
5948 for (j = 1, i++; j < num_regs; i++)
5949 {
5950 if (mask & (1 << i))
5951 {
5952 XVECEXP (par, 0, j)
43cffd11 5953 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, i));
e2c671ba
RE
5954 j++;
5955 }
5956 }
b111229a
RE
5957
5958 emit_insn (par);
5959}
5960
5961static void
5962emit_sfm (base_reg, count)
5963 int base_reg;
5964 int count;
5965{
5966 rtx par;
5967 int i;
5968
43cffd11
RE
5969 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
5970
5971 XVECEXP (par, 0, 0)
5972 = gen_rtx_SET (VOIDmode,
5973 gen_rtx_MEM (BLKmode,
5974 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
5975 gen_rtx_UNSPEC (BLKmode,
5976 gen_rtvec (1, gen_rtx_REG (XFmode,
5977 base_reg++)),
5978 2));
b111229a 5979 for (i = 1; i < count; i++)
43cffd11
RE
5980 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode,
5981 gen_rtx_REG (XFmode, base_reg++));
b111229a 5982
e2c671ba
RE
5983 emit_insn (par);
5984}
5985
5986void
5987arm_expand_prologue ()
5988{
5989 int reg;
56636818
JL
5990 rtx amount = GEN_INT (-(get_frame_size ()
5991 + current_function_outgoing_args_size));
e2c671ba
RE
5992 int live_regs_mask = 0;
5993 int store_arg_regs = 0;
949d79eb
RE
5994 /* If this function doesn't return, then there is no need to push
5995 the call-saved regs. */
e2c671ba
RE
5996 int volatile_func = (optimize > 0
5997 && TREE_THIS_VOLATILE (current_function_decl));
5998
31fdb4d5
DE
5999 /* Naked functions don't have prologues. */
6000 if (arm_naked_function_p (current_function_decl))
6001 return;
6002
e2c671ba
RE
6003 if (current_function_anonymous_args && current_function_pretend_args_size)
6004 store_arg_regs = 1;
6005
6006 if (! volatile_func)
6ed30148
RE
6007 {
6008 for (reg = 0; reg <= 10; reg++)
6009 if (regs_ever_live[reg] && ! call_used_regs[reg])
6010 live_regs_mask |= 1 << reg;
6011
6012 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6013 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 6014
62b10bbc
NC
6015 if (regs_ever_live[LR_REGNUM])
6016 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 6017 }
e2c671ba
RE
6018
6019 if (frame_pointer_needed)
6020 {
6021 live_regs_mask |= 0xD800;
62b10bbc 6022 emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
e2c671ba
RE
6023 stack_pointer_rtx));
6024 }
6025
6026 if (current_function_pretend_args_size)
6027 {
6028 if (store_arg_regs)
6029 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
6030 & 0xf);
6031 else
6032 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
6033 GEN_INT (-current_function_pretend_args_size)));
6034 }
6035
6036 if (live_regs_mask)
6037 {
6038 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 6039 we won't get a proper return. */
62b10bbc 6040 live_regs_mask |= 1 << LR_REGNUM;
e2c671ba
RE
6041 emit_multi_reg_push (live_regs_mask);
6042 }
6043
6044 /* For now the integer regs are still pushed in output_func_epilogue (). */
6045
6046 if (! volatile_func)
b111229a
RE
6047 {
6048 if (arm_fpu_arch == FP_SOFT2)
6049 {
6050 for (reg = 23; reg > 15; reg--)
6051 if (regs_ever_live[reg] && ! call_used_regs[reg])
43cffd11
RE
6052 emit_insn (gen_rtx_SET
6053 (VOIDmode,
6054 gen_rtx_MEM (XFmode,
6055 gen_rtx_PRE_DEC (XFmode,
6056 stack_pointer_rtx)),
6057 gen_rtx_REG (XFmode, reg)));
b111229a
RE
6058 }
6059 else
6060 {
6061 int start_reg = 23;
6062
6063 for (reg = 23; reg > 15; reg--)
6064 {
6065 if (regs_ever_live[reg] && ! call_used_regs[reg])
6066 {
6067 if (start_reg - reg == 3)
6068 {
6069 emit_sfm (reg, 4);
6070 start_reg = reg - 1;
6071 }
6072 }
6073 else
6074 {
6075 if (start_reg != reg)
6076 emit_sfm (reg + 1, start_reg - reg);
6077 start_reg = reg - 1;
6078 }
6079 }
6080
6081 if (start_reg != reg)
6082 emit_sfm (reg + 1, start_reg - reg);
6083 }
6084 }
e2c671ba
RE
6085
6086 if (frame_pointer_needed)
62b10bbc 6087 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx_REG (SImode, IP_REGNUM),
e2c671ba
RE
6088 (GEN_INT
6089 (-(4 + current_function_pretend_args_size)))));
6090
6091 if (amount != const0_rtx)
6092 {
6093 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
43cffd11
RE
6094 emit_insn (gen_rtx_CLOBBER (VOIDmode,
6095 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
e2c671ba
RE
6096 }
6097
6098 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
6099 the call to mcount. Similarly if the user has requested no
6100 scheduling in the prolog. */
6101 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
6102 emit_insn (gen_blockage ());
6103}
6104
cce8749e 6105\f
9997d19d
RE
6106/* If CODE is 'd', then the X is a condition operand and the instruction
6107 should only be executed if the condition is true.
ddd5a7c1 6108 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
6109 should only be executed if the condition is false: however, if the mode
6110 of the comparison is CCFPEmode, then always execute the instruction -- we
6111 do this because in these circumstances !GE does not necessarily imply LT;
6112 in these cases the instruction pattern will take care to make sure that
6113 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 6114 doing this instruction unconditionally.
9997d19d
RE
6115 If CODE is 'N' then X is a floating point operand that must be negated
6116 before output.
6117 If CODE is 'B' then output a bitwise inverted value of X (a const int).
6118 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
6119
6120void
6121arm_print_operand (stream, x, code)
62b10bbc 6122 FILE * stream;
9997d19d
RE
6123 rtx x;
6124 int code;
6125{
6126 switch (code)
6127 {
6128 case '@':
f3139301 6129 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
6130 return;
6131
6132 case '|':
f3139301 6133 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6134 return;
6135
6136 case '?':
6137 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
6138 fputs (arm_condition_codes[arm_current_cc], stream);
6139 return;
6140
6141 case 'N':
6142 {
6143 REAL_VALUE_TYPE r;
6144 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6145 r = REAL_VALUE_NEGATE (r);
6146 fprintf (stream, "%s", fp_const_from_val (&r));
6147 }
6148 return;
6149
6150 case 'B':
6151 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
6152 {
6153 HOST_WIDE_INT val;
6154 val = ARM_SIGN_EXTEND (~ INTVAL (x));
36ba9cb8 6155 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6156 }
9997d19d
RE
6157 else
6158 {
6159 putc ('~', stream);
6160 output_addr_const (stream, x);
6161 }
6162 return;
6163
6164 case 'i':
6165 fprintf (stream, "%s", arithmetic_instr (x, 1));
6166 return;
6167
6168 case 'I':
6169 fprintf (stream, "%s", arithmetic_instr (x, 0));
6170 return;
6171
6172 case 'S':
6173 {
6174 HOST_WIDE_INT val;
4bc74ece 6175 char * shift = shift_op (x, & val);
9997d19d 6176
e2c671ba
RE
6177 if (shift)
6178 {
4bc74ece 6179 fprintf (stream, ", %s ", shift_op (x, & val));
e2c671ba
RE
6180 if (val == -1)
6181 arm_print_operand (stream, XEXP (x, 1), 0);
6182 else
4bc74ece
NC
6183 {
6184 fputc ('#', stream);
36ba9cb8 6185 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6186 }
e2c671ba 6187 }
9997d19d
RE
6188 }
6189 return;
6190
c1c2bc04
RE
6191 case 'Q':
6192 if (REGNO (x) > 15)
6193 abort ();
6194 fputs (REGISTER_PREFIX, stream);
6195 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
6196 return;
6197
9997d19d
RE
6198 case 'R':
6199 if (REGNO (x) > 15)
6200 abort ();
f3139301 6201 fputs (REGISTER_PREFIX, stream);
c1c2bc04 6202 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
6203 return;
6204
6205 case 'm':
f3139301 6206 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6207 if (GET_CODE (XEXP (x, 0)) == REG)
6208 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
6209 else
6210 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
6211 return;
6212
6213 case 'M':
dd18ae56
NC
6214 asm_fprintf (stream, "{%r-%r}",
6215 REGNO (x), REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
6216 return;
6217
6218 case 'd':
6219 if (x)
6220 fputs (arm_condition_codes[get_arm_condition_code (x)],
6221 stream);
6222 return;
6223
6224 case 'D':
84ed5e79 6225 if (x)
9997d19d
RE
6226 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
6227 (get_arm_condition_code (x))],
6228 stream);
6229 return;
6230
6231 default:
6232 if (x == 0)
6233 abort ();
6234
6235 if (GET_CODE (x) == REG)
1d5473cb 6236 {
f3139301 6237 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
6238 fputs (reg_names[REGNO (x)], stream);
6239 }
9997d19d
RE
6240 else if (GET_CODE (x) == MEM)
6241 {
6242 output_memory_reference_mode = GET_MODE (x);
6243 output_address (XEXP (x, 0));
6244 }
6245 else if (GET_CODE (x) == CONST_DOUBLE)
6246 fprintf (stream, "#%s", fp_immediate_constant (x));
6247 else if (GET_CODE (x) == NEG)
6248 abort (); /* This should never happen now. */
6249 else
6250 {
6251 fputc ('#', stream);
6252 output_addr_const (stream, x);
6253 }
6254 }
6255}
cce8749e
CH
6256\f
6257/* A finite state machine takes care of noticing whether or not instructions
6258 can be conditionally executed, and thus decrease execution time and code
6259 size by deleting branch instructions. The fsm is controlled by
6260 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
6261
6262/* The state of the fsm controlling condition codes are:
6263 0: normal, do nothing special
6264 1: make ASM_OUTPUT_OPCODE not output this instruction
6265 2: make ASM_OUTPUT_OPCODE not output this instruction
6266 3: make instructions conditional
6267 4: make instructions conditional
6268
6269 State transitions (state->state by whom under condition):
6270 0 -> 1 final_prescan_insn if the `target' is a label
6271 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
6272 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
6273 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
6274 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
6275 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
6276 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
6277 (the target insn is arm_target_insn).
6278
ff9940b0
RE
6279 If the jump clobbers the conditions then we use states 2 and 4.
6280
6281 A similar thing can be done with conditional return insns.
6282
cce8749e
CH
6283 XXX In case the `target' is an unconditional branch, this conditionalising
6284 of the instructions always reduces code size, but not always execution
6285 time. But then, I want to reduce the code size to somewhere near what
6286 /bin/cc produces. */
6287
cce8749e
CH
6288/* Returns the index of the ARM condition code string in
6289 `arm_condition_codes'. COMPARISON should be an rtx like
6290 `(eq (...) (...))'. */
6291
84ed5e79 6292static enum arm_cond_code
cce8749e
CH
6293get_arm_condition_code (comparison)
6294 rtx comparison;
6295{
5165176d 6296 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
6297 register int code;
6298 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
6299
6300 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 6301 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
6302 XEXP (comparison, 1));
6303
6304 switch (mode)
cce8749e 6305 {
84ed5e79
RE
6306 case CC_DNEmode: code = ARM_NE; goto dominance;
6307 case CC_DEQmode: code = ARM_EQ; goto dominance;
6308 case CC_DGEmode: code = ARM_GE; goto dominance;
6309 case CC_DGTmode: code = ARM_GT; goto dominance;
6310 case CC_DLEmode: code = ARM_LE; goto dominance;
6311 case CC_DLTmode: code = ARM_LT; goto dominance;
6312 case CC_DGEUmode: code = ARM_CS; goto dominance;
6313 case CC_DGTUmode: code = ARM_HI; goto dominance;
6314 case CC_DLEUmode: code = ARM_LS; goto dominance;
6315 case CC_DLTUmode: code = ARM_CC;
6316
6317 dominance:
6318 if (comp_code != EQ && comp_code != NE)
6319 abort ();
6320
6321 if (comp_code == EQ)
6322 return ARM_INVERSE_CONDITION_CODE (code);
6323 return code;
6324
5165176d 6325 case CC_NOOVmode:
84ed5e79 6326 switch (comp_code)
5165176d 6327 {
84ed5e79
RE
6328 case NE: return ARM_NE;
6329 case EQ: return ARM_EQ;
6330 case GE: return ARM_PL;
6331 case LT: return ARM_MI;
5165176d
RE
6332 default: abort ();
6333 }
6334
6335 case CC_Zmode:
6336 case CCFPmode:
84ed5e79 6337 switch (comp_code)
5165176d 6338 {
84ed5e79
RE
6339 case NE: return ARM_NE;
6340 case EQ: return ARM_EQ;
5165176d
RE
6341 default: abort ();
6342 }
6343
6344 case CCFPEmode:
84ed5e79
RE
6345 switch (comp_code)
6346 {
6347 case GE: return ARM_GE;
6348 case GT: return ARM_GT;
6349 case LE: return ARM_LS;
6350 case LT: return ARM_MI;
6351 default: abort ();
6352 }
6353
6354 case CC_SWPmode:
6355 switch (comp_code)
6356 {
6357 case NE: return ARM_NE;
6358 case EQ: return ARM_EQ;
6359 case GE: return ARM_LE;
6360 case GT: return ARM_LT;
6361 case LE: return ARM_GE;
6362 case LT: return ARM_GT;
6363 case GEU: return ARM_LS;
6364 case GTU: return ARM_CC;
6365 case LEU: return ARM_CS;
6366 case LTU: return ARM_HI;
6367 default: abort ();
6368 }
6369
bd9c7e23
RE
6370 case CC_Cmode:
6371 switch (comp_code)
6372 {
6373 case LTU: return ARM_CS;
6374 case GEU: return ARM_CC;
6375 default: abort ();
6376 }
6377
5165176d 6378 case CCmode:
84ed5e79 6379 switch (comp_code)
5165176d 6380 {
84ed5e79
RE
6381 case NE: return ARM_NE;
6382 case EQ: return ARM_EQ;
6383 case GE: return ARM_GE;
6384 case GT: return ARM_GT;
6385 case LE: return ARM_LE;
6386 case LT: return ARM_LT;
6387 case GEU: return ARM_CS;
6388 case GTU: return ARM_HI;
6389 case LEU: return ARM_LS;
6390 case LTU: return ARM_CC;
5165176d
RE
6391 default: abort ();
6392 }
6393
cce8749e
CH
6394 default: abort ();
6395 }
84ed5e79
RE
6396
6397 abort ();
f3bb6135 6398}
cce8749e
CH
6399
6400
6401void
74bbc178 6402arm_final_prescan_insn (insn)
cce8749e 6403 rtx insn;
cce8749e
CH
6404{
6405 /* BODY will hold the body of INSN. */
6406 register rtx body = PATTERN (insn);
6407
6408 /* This will be 1 if trying to repeat the trick, and things need to be
6409 reversed if it appears to fail. */
6410 int reverse = 0;
6411
ff9940b0
RE
6412 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
6413 taken are clobbered, even if the rtl suggests otherwise. It also
6414 means that we have to grub around within the jump expression to find
6415 out what the conditions are when the jump isn't taken. */
6416 int jump_clobbers = 0;
6417
6418 /* If we start with a return insn, we only succeed if we find another one. */
6419 int seeking_return = 0;
6420
cce8749e
CH
6421 /* START_INSN will hold the insn from where we start looking. This is the
6422 first insn after the following code_label if REVERSE is true. */
6423 rtx start_insn = insn;
6424
6425 /* If in state 4, check if the target branch is reached, in order to
6426 change back to state 0. */
6427 if (arm_ccfsm_state == 4)
6428 {
6429 if (insn == arm_target_insn)
f5a1b0d2
NC
6430 {
6431 arm_target_insn = NULL;
6432 arm_ccfsm_state = 0;
6433 }
cce8749e
CH
6434 return;
6435 }
6436
6437 /* If in state 3, it is possible to repeat the trick, if this insn is an
6438 unconditional branch to a label, and immediately following this branch
6439 is the previous target label which is only used once, and the label this
6440 branch jumps to is not too far off. */
6441 if (arm_ccfsm_state == 3)
6442 {
6443 if (simplejump_p (insn))
6444 {
6445 start_insn = next_nonnote_insn (start_insn);
6446 if (GET_CODE (start_insn) == BARRIER)
6447 {
6448 /* XXX Isn't this always a barrier? */
6449 start_insn = next_nonnote_insn (start_insn);
6450 }
6451 if (GET_CODE (start_insn) == CODE_LABEL
6452 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6453 && LABEL_NUSES (start_insn) == 1)
6454 reverse = TRUE;
6455 else
6456 return;
6457 }
ff9940b0
RE
6458 else if (GET_CODE (body) == RETURN)
6459 {
6460 start_insn = next_nonnote_insn (start_insn);
6461 if (GET_CODE (start_insn) == BARRIER)
6462 start_insn = next_nonnote_insn (start_insn);
6463 if (GET_CODE (start_insn) == CODE_LABEL
6464 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6465 && LABEL_NUSES (start_insn) == 1)
6466 {
6467 reverse = TRUE;
6468 seeking_return = 1;
6469 }
6470 else
6471 return;
6472 }
cce8749e
CH
6473 else
6474 return;
6475 }
6476
6477 if (arm_ccfsm_state != 0 && !reverse)
6478 abort ();
6479 if (GET_CODE (insn) != JUMP_INSN)
6480 return;
6481
ddd5a7c1 6482 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
6483 the jump should always come first */
6484 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
6485 body = XVECEXP (body, 0, 0);
6486
6487#if 0
6488 /* If this is a conditional return then we don't want to know */
6489 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6490 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
6491 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
6492 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
6493 return;
6494#endif
6495
cce8749e
CH
6496 if (reverse
6497 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6498 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
6499 {
bd9c7e23
RE
6500 int insns_skipped;
6501 int fail = FALSE, succeed = FALSE;
cce8749e
CH
6502 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
6503 int then_not_else = TRUE;
ff9940b0 6504 rtx this_insn = start_insn, label = 0;
cce8749e 6505
ff9940b0 6506 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
6507 {
6508 /* The code below is wrong for these, and I haven't time to
6509 fix it now. So we just do the safe thing and return. This
6510 whole function needs re-writing anyway. */
6511 jump_clobbers = 1;
6512 return;
6513 }
ff9940b0 6514
cce8749e
CH
6515 /* Register the insn jumped to. */
6516 if (reverse)
ff9940b0
RE
6517 {
6518 if (!seeking_return)
6519 label = XEXP (SET_SRC (body), 0);
6520 }
cce8749e
CH
6521 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
6522 label = XEXP (XEXP (SET_SRC (body), 1), 0);
6523 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
6524 {
6525 label = XEXP (XEXP (SET_SRC (body), 2), 0);
6526 then_not_else = FALSE;
6527 }
ff9940b0
RE
6528 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
6529 seeking_return = 1;
6530 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
6531 {
6532 seeking_return = 1;
6533 then_not_else = FALSE;
6534 }
cce8749e
CH
6535 else
6536 abort ();
6537
6538 /* See how many insns this branch skips, and what kind of insns. If all
6539 insns are okay, and the label or unconditional branch to the same
6540 label is not too far away, succeed. */
6541 for (insns_skipped = 0;
b36ba79f 6542 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
6543 {
6544 rtx scanbody;
6545
6546 this_insn = next_nonnote_insn (this_insn);
6547 if (!this_insn)
6548 break;
6549
cce8749e
CH
6550 switch (GET_CODE (this_insn))
6551 {
6552 case CODE_LABEL:
6553 /* Succeed if it is the target label, otherwise fail since
6554 control falls in from somewhere else. */
6555 if (this_insn == label)
6556 {
ff9940b0
RE
6557 if (jump_clobbers)
6558 {
6559 arm_ccfsm_state = 2;
6560 this_insn = next_nonnote_insn (this_insn);
6561 }
6562 else
6563 arm_ccfsm_state = 1;
cce8749e
CH
6564 succeed = TRUE;
6565 }
6566 else
6567 fail = TRUE;
6568 break;
6569
ff9940b0 6570 case BARRIER:
cce8749e 6571 /* Succeed if the following insn is the target label.
ff9940b0
RE
6572 Otherwise fail.
6573 If return insns are used then the last insn in a function
6574 will be a barrier. */
cce8749e 6575 this_insn = next_nonnote_insn (this_insn);
ff9940b0 6576 if (this_insn && this_insn == label)
cce8749e 6577 {
ff9940b0
RE
6578 if (jump_clobbers)
6579 {
6580 arm_ccfsm_state = 2;
6581 this_insn = next_nonnote_insn (this_insn);
6582 }
6583 else
6584 arm_ccfsm_state = 1;
cce8749e
CH
6585 succeed = TRUE;
6586 }
6587 else
6588 fail = TRUE;
6589 break;
6590
ff9940b0 6591 case CALL_INSN:
2b835d68
RE
6592 /* If using 32-bit addresses the cc is not preserved over
6593 calls */
6594 if (TARGET_APCS_32)
bd9c7e23
RE
6595 {
6596 /* Succeed if the following insn is the target label,
6597 or if the following two insns are a barrier and
6598 the target label. */
6599 this_insn = next_nonnote_insn (this_insn);
6600 if (this_insn && GET_CODE (this_insn) == BARRIER)
6601 this_insn = next_nonnote_insn (this_insn);
6602
6603 if (this_insn && this_insn == label
b36ba79f 6604 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
6605 {
6606 if (jump_clobbers)
6607 {
6608 arm_ccfsm_state = 2;
6609 this_insn = next_nonnote_insn (this_insn);
6610 }
6611 else
6612 arm_ccfsm_state = 1;
6613 succeed = TRUE;
6614 }
6615 else
6616 fail = TRUE;
6617 }
ff9940b0 6618 break;
2b835d68 6619
cce8749e
CH
6620 case JUMP_INSN:
6621 /* If this is an unconditional branch to the same label, succeed.
6622 If it is to another label, do nothing. If it is conditional,
6623 fail. */
ed4c4348 6624 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 6625
ed4c4348 6626 scanbody = PATTERN (this_insn);
ff9940b0
RE
6627 if (GET_CODE (scanbody) == SET
6628 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
6629 {
6630 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
6631 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
6632 {
6633 arm_ccfsm_state = 2;
6634 succeed = TRUE;
6635 }
6636 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
6637 fail = TRUE;
6638 }
b36ba79f
RE
6639 /* Fail if a conditional return is undesirable (eg on a
6640 StrongARM), but still allow this if optimizing for size. */
6641 else if (GET_CODE (scanbody) == RETURN
6642 && ! use_return_insn (TRUE)
6643 && ! optimize_size)
6644 fail = TRUE;
ff9940b0
RE
6645 else if (GET_CODE (scanbody) == RETURN
6646 && seeking_return)
6647 {
6648 arm_ccfsm_state = 2;
6649 succeed = TRUE;
6650 }
6651 else if (GET_CODE (scanbody) == PARALLEL)
6652 {
6653 switch (get_attr_conds (this_insn))
6654 {
6655 case CONDS_NOCOND:
6656 break;
6657 default:
6658 fail = TRUE;
6659 break;
6660 }
6661 }
cce8749e
CH
6662 break;
6663
6664 case INSN:
ff9940b0
RE
6665 /* Instructions using or affecting the condition codes make it
6666 fail. */
ed4c4348 6667 scanbody = PATTERN (this_insn);
74641843
RE
6668 if (! (GET_CODE (scanbody) == SET
6669 || GET_CODE (scanbody) == PARALLEL)
6670 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
6671 fail = TRUE;
6672 break;
6673
6674 default:
6675 break;
6676 }
6677 }
6678 if (succeed)
6679 {
ff9940b0 6680 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 6681 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
6682 else if (seeking_return || arm_ccfsm_state == 2)
6683 {
6684 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
6685 {
6686 this_insn = next_nonnote_insn (this_insn);
6687 if (this_insn && (GET_CODE (this_insn) == BARRIER
6688 || GET_CODE (this_insn) == CODE_LABEL))
6689 abort ();
6690 }
6691 if (!this_insn)
6692 {
6693 /* Oh, dear! we ran off the end.. give up */
6694 recog (PATTERN (insn), insn, NULL_PTR);
6695 arm_ccfsm_state = 0;
abaa26e5 6696 arm_target_insn = NULL;
ff9940b0
RE
6697 return;
6698 }
6699 arm_target_insn = this_insn;
6700 }
cce8749e
CH
6701 else
6702 abort ();
ff9940b0
RE
6703 if (jump_clobbers)
6704 {
6705 if (reverse)
6706 abort ();
6707 arm_current_cc =
6708 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
6709 0), 0), 1));
6710 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
6711 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6712 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
6713 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6714 }
6715 else
6716 {
6717 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
6718 what it was. */
6719 if (!reverse)
6720 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
6721 0));
6722 }
cce8749e 6723
cce8749e
CH
6724 if (reverse || then_not_else)
6725 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6726 }
1ccbefce
RH
6727
6728 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 6729 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 6730 across this call; since the insn has been recognized already we
ff9940b0
RE
6731 call recog direct). */
6732 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 6733 }
f3bb6135 6734}
cce8749e 6735
2b835d68
RE
6736#ifdef AOF_ASSEMBLER
6737/* Special functions only needed when producing AOF syntax assembler. */
6738
32de079a
RE
6739rtx aof_pic_label = NULL_RTX;
6740struct pic_chain
6741{
62b10bbc
NC
6742 struct pic_chain * next;
6743 char * symname;
32de079a
RE
6744};
6745
62b10bbc 6746static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
6747
6748rtx
6749aof_pic_entry (x)
6750 rtx x;
6751{
62b10bbc 6752 struct pic_chain ** chainp;
32de079a
RE
6753 int offset;
6754
6755 if (aof_pic_label == NULL_RTX)
6756 {
92a432f4
RE
6757 /* We mark this here and not in arm_add_gc_roots() to avoid
6758 polluting even more code with ifdefs, and because it never
6759 contains anything useful until we assign to it here. */
6760 ggc_add_rtx_root (&aof_pic_label, 1);
32de079a
RE
6761 /* This needs to persist throughout the compilation. */
6762 end_temporary_allocation ();
43cffd11 6763 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
6764 resume_temporary_allocation ();
6765 }
6766
6767 for (offset = 0, chainp = &aof_pic_chain; *chainp;
6768 offset += 4, chainp = &(*chainp)->next)
6769 if ((*chainp)->symname == XSTR (x, 0))
6770 return plus_constant (aof_pic_label, offset);
6771
6772 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
6773 (*chainp)->next = NULL;
6774 (*chainp)->symname = XSTR (x, 0);
6775 return plus_constant (aof_pic_label, offset);
6776}
6777
6778void
6779aof_dump_pic_table (f)
62b10bbc 6780 FILE * f;
32de079a 6781{
62b10bbc 6782 struct pic_chain * chain;
32de079a
RE
6783
6784 if (aof_pic_chain == NULL)
6785 return;
6786
dd18ae56
NC
6787 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
6788 PIC_OFFSET_TABLE_REGNUM,
6789 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
6790 fputs ("|x$adcons|\n", f);
6791
6792 for (chain = aof_pic_chain; chain; chain = chain->next)
6793 {
6794 fputs ("\tDCD\t", f);
6795 assemble_name (f, chain->symname);
6796 fputs ("\n", f);
6797 }
6798}
6799
2b835d68
RE
6800int arm_text_section_count = 1;
6801
6802char *
84ed5e79 6803aof_text_section ()
2b835d68
RE
6804{
6805 static char buf[100];
2b835d68
RE
6806 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
6807 arm_text_section_count++);
6808 if (flag_pic)
6809 strcat (buf, ", PIC, REENTRANT");
6810 return buf;
6811}
6812
6813static int arm_data_section_count = 1;
6814
6815char *
6816aof_data_section ()
6817{
6818 static char buf[100];
6819 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
6820 return buf;
6821}
6822
6823/* The AOF assembler is religiously strict about declarations of
6824 imported and exported symbols, so that it is impossible to declare
956d6950 6825 a function as imported near the beginning of the file, and then to
2b835d68
RE
6826 export it later on. It is, however, possible to delay the decision
6827 until all the functions in the file have been compiled. To get
6828 around this, we maintain a list of the imports and exports, and
6829 delete from it any that are subsequently defined. At the end of
6830 compilation we spit the remainder of the list out before the END
6831 directive. */
6832
6833struct import
6834{
62b10bbc
NC
6835 struct import * next;
6836 char * name;
2b835d68
RE
6837};
6838
62b10bbc 6839static struct import * imports_list = NULL;
2b835d68
RE
6840
6841void
6842aof_add_import (name)
62b10bbc 6843 char * name;
2b835d68 6844{
62b10bbc 6845 struct import * new;
2b835d68
RE
6846
6847 for (new = imports_list; new; new = new->next)
6848 if (new->name == name)
6849 return;
6850
6851 new = (struct import *) xmalloc (sizeof (struct import));
6852 new->next = imports_list;
6853 imports_list = new;
6854 new->name = name;
6855}
6856
6857void
6858aof_delete_import (name)
62b10bbc 6859 char * name;
2b835d68 6860{
62b10bbc 6861 struct import ** old;
2b835d68
RE
6862
6863 for (old = &imports_list; *old; old = & (*old)->next)
6864 {
6865 if ((*old)->name == name)
6866 {
6867 *old = (*old)->next;
6868 return;
6869 }
6870 }
6871}
6872
6873int arm_main_function = 0;
6874
6875void
6876aof_dump_imports (f)
62b10bbc 6877 FILE * f;
2b835d68
RE
6878{
6879 /* The AOF assembler needs this to cause the startup code to be extracted
6880 from the library. Brining in __main causes the whole thing to work
6881 automagically. */
6882 if (arm_main_function)
6883 {
6884 text_section ();
6885 fputs ("\tIMPORT __main\n", f);
6886 fputs ("\tDCD __main\n", f);
6887 }
6888
6889 /* Now dump the remaining imports. */
6890 while (imports_list)
6891 {
6892 fprintf (f, "\tIMPORT\t");
6893 assemble_name (f, imports_list->name);
6894 fputc ('\n', f);
6895 imports_list = imports_list->next;
6896 }
6897}
6898#endif /* AOF_ASSEMBLER */
This page took 1.523643 seconds and 5 git commands to generate.