]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
arm.h (CONDITIONAL_REGISTER_USAGE): If flag_pic, never use PIC_OFFSET_TABLE_REGNUM...
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
63114329 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 1999 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e
CH
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
af48348a 36#include "reload.h"
e2c671ba 37#include "tree.h"
bee06f3d 38#include "expr.h"
ad076f4e 39#include "toplev.h"
aec3cfba 40#include "recog.h"
cce8749e
CH
41
42/* The maximum number of insns skipped which will be conditionalised if
43 possible. */
b36ba79f 44static int max_insns_skipped = 5;
cce8749e 45
f5a1b0d2 46extern FILE * asm_out_file;
cce8749e 47/* Some function declarations. */
cce8749e 48
18af7313 49static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
2e943e99 50static char * output_multi_immediate PROTO ((rtx *, char *, char *, int,
18af7313 51 HOST_WIDE_INT));
2b835d68
RE
52static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
53 HOST_WIDE_INT, rtx, rtx, int, int));
18af7313
RE
54static int arm_naked_function_p PROTO ((tree));
55static void init_fpa_table PROTO ((void));
74bbc178
NC
56static enum machine_mode select_dominance_cc_mode PROTO ((rtx, rtx,
57 HOST_WIDE_INT));
332072db 58static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode, int *));
18af7313
RE
59static void dump_table PROTO ((rtx));
60static int fixit PROTO ((rtx, enum machine_mode, int));
61static rtx find_barrier PROTO ((rtx, int));
62static int broken_move PROTO ((rtx));
2e943e99 63static char * fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
18af7313 64static int eliminate_lr2ip PROTO ((rtx *));
2e943e99 65static char * shift_op PROTO ((rtx, HOST_WIDE_INT *));
18af7313
RE
66static int pattern_really_clobbers_lr PROTO ((rtx));
67static int function_really_clobbers_lr PROTO ((rtx));
68static void emit_multi_reg_push PROTO ((int));
b111229a 69static void emit_sfm PROTO ((int, int));
18af7313 70static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
74bbc178 71static int const_ok_for_op RTX_CODE_PROTO ((Hint, Rcode));
f3bb6135 72
13bd191d
PB
73/* True if we are currently building a constant table. */
74int making_const_table;
75
ff9940b0
RE
76/* Define the information needed to generate branch insns. This is
77 stored from the compare operation. */
ff9940b0 78rtx arm_compare_op0, arm_compare_op1;
ff9940b0 79
b111229a 80/* What type of floating point are we tuning for? */
bee06f3d
RE
81enum floating_point_type arm_fpu;
82
b111229a
RE
83/* What type of floating point instructions are available? */
84enum floating_point_type arm_fpu_arch;
85
2b835d68
RE
86/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
87enum prog_mode_type arm_prgmode;
88
b111229a 89/* Set by the -mfp=... option */
f9cc092a 90const char * target_fp_name = NULL;
2b835d68 91
b355a481 92/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 93const char * structure_size_string = NULL;
b355a481
NC
94int arm_structure_size_boundary = 32; /* Used to be 8 */
95
aec3cfba
NC
96/* Bit values used to identify processor capabilities. */
97#define FL_CO_PROC 0x01 /* Has external co-processor bus */
98#define FL_FAST_MULT 0x02 /* Fast multiply */
99#define FL_MODE26 0x04 /* 26-bit mode support */
100#define FL_MODE32 0x08 /* 32-bit mode support */
101#define FL_ARCH4 0x10 /* Architecture rel 4 */
102#define FL_THUMB 0x20 /* Thumb aware */
103#define FL_LDSCHED 0x40 /* Load scheduling necessary */
104#define FL_STRONG 0x80 /* StrongARM */
105
106/* The bits in this mask specify which instructions we are allowed to generate. */
107static int insn_flags = 0;
108/* The bits in this mask specify which instruction scheduling options should
109 be used. Note - there is an overlap with the FL_FAST_MULT. For some
110 hardware we want to be able to generate the multiply instructions, but to
111 tune as if they were not present in the architecture. */
112static int tune_flags = 0;
113
114/* The following are used in the arm.md file as equivalents to bits
115 in the above two flag variables. */
116
2b835d68
RE
117/* Nonzero if this is an "M" variant of the processor. */
118int arm_fast_multiply = 0;
119
32de079a 120/* Nonzero if this chip supports the ARM Architecture 4 extensions */
2b835d68
RE
121int arm_arch4 = 0;
122
aec3cfba 123/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
124int arm_ld_sched = 0;
125
126/* Nonzero if this chip is a StrongARM. */
127int arm_is_strong = 0;
128
129/* Nonzero if this chip is a an ARM6 or an ARM7. */
130int arm_is_6_or_7 = 0;
b111229a 131
cce8749e
CH
132/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
133 must report the mode of the memory reference from PRINT_OPERAND to
134 PRINT_OPERAND_ADDRESS. */
f3bb6135 135enum machine_mode output_memory_reference_mode;
cce8749e
CH
136
137/* Nonzero if the prologue must setup `fp'. */
138int current_function_anonymous_args;
139
32de079a
RE
140/* The register number to be used for the PIC offset register. */
141int arm_pic_register = 9;
142
cce8749e
CH
143/* Location counter of .text segment. */
144int arm_text_location = 0;
145
ff9940b0
RE
146/* Set to one if we think that lr is only saved because of subroutine calls,
147 but all of these can be `put after' return insns */
148int lr_save_eliminated;
149
ff9940b0
RE
150/* Set to 1 when a return insn is output, this means that the epilogue
151 is not needed. */
ff9940b0
RE
152static int return_used_this_function;
153
aec3cfba
NC
154/* Set to 1 after arm_reorg has started. Reset to start at the start of
155 the next function. */
4b632bf1
RE
156static int after_arm_reorg = 0;
157
aec3cfba 158/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
159static int arm_constant_limit = 3;
160
cce8749e
CH
161/* For an explanation of these variables, see final_prescan_insn below. */
162int arm_ccfsm_state;
84ed5e79 163enum arm_cond_code arm_current_cc;
cce8749e
CH
164rtx arm_target_insn;
165int arm_target_label;
9997d19d
RE
166
167/* The condition codes of the ARM, and the inverse function. */
f5a1b0d2 168char * arm_condition_codes[] =
9997d19d
RE
169{
170 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
171 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
172};
173
84ed5e79 174static enum arm_cond_code get_arm_condition_code ();
2b835d68 175
f5a1b0d2 176#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68
RE
177\f
178/* Initialization code */
179
2b835d68
RE
180struct processors
181{
f5a1b0d2 182 char * name;
2b835d68
RE
183 unsigned int flags;
184};
185
186/* Not all of these give usefully different compilation alternatives,
187 but there is no simple way of generalizing them. */
f5a1b0d2
NC
188static struct processors all_cores[] =
189{
190 /* ARM Cores */
191
192 {"arm2", FL_CO_PROC | FL_MODE26 },
193 {"arm250", FL_CO_PROC | FL_MODE26 },
194 {"arm3", FL_CO_PROC | FL_MODE26 },
195 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
196 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
197 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
198 {"arm610", FL_MODE26 | FL_MODE32 },
199 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
200 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
201 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT }, /* arm7m doesn't exist on its own, */
202 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 }, /* but only with D, (and I), */
203 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT }, /* but those don't alter the code, */
204 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 }, /* so arm7m is sometimes used. */
205 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
206 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
207 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
208 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
209 {"arm710", FL_MODE26 | FL_MODE32 },
210 {"arm710c", FL_MODE26 | FL_MODE32 },
211 {"arm7100", FL_MODE26 | FL_MODE32 },
212 {"arm7500", FL_MODE26 | FL_MODE32 },
213 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 }, /* Doesn't really have an external co-proc, but does have embedded fpu. */
214 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
215 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
216 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
217 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
218 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
219 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
220 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
221 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
222 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
223 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
224
225 {NULL, 0}
226};
227
228static struct processors all_architectures[] =
2b835d68 229{
f5a1b0d2
NC
230 /* ARM Architectures */
231
232 {"armv2", FL_CO_PROC | FL_MODE26 },
233 {"armv2a", FL_CO_PROC | FL_MODE26 },
234 {"armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
235 {"armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
236 {"armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
237 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
238 implementations that support it, so we will leave it out for now. */
f5a1b0d2
NC
239 {"armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
240 {NULL, 0}
241};
242
243/* This is a magic stucture. The 'string' field is magically filled in
244 with a pointer to the value specified by the user on the command line
245 assuming that the user has specified such a value. */
246
247struct arm_cpu_select arm_select[] =
248{
249 /* string name processors */
250 { NULL, "-mcpu=", all_cores },
251 { NULL, "-march=", all_architectures },
252 { NULL, "-mtune=", all_cores }
2b835d68
RE
253};
254
aec3cfba
NC
255/* Return the number of bits set in value' */
256static unsigned int
257bit_count (value)
258 signed int value;
259{
260 unsigned int count = 0;
261
262 while (value)
263 {
264 value &= ~(value & - value);
265 ++ count;
266 }
267
268 return count;
269}
270
2b835d68
RE
271/* Fix up any incompatible options that the user has specified.
272 This has now turned into a maze. */
273void
274arm_override_options ()
275{
ed4c4348 276 unsigned i;
f5a1b0d2
NC
277
278 /* Set up the flags based on the cpu/architecture selected by the user. */
279 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
bd9c7e23 280 {
f5a1b0d2
NC
281 struct arm_cpu_select * ptr = arm_select + i;
282
283 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 284 {
13bd191d 285 const struct processors * sel;
bd9c7e23 286
f5a1b0d2
NC
287 for (sel = ptr->processors; sel->name != NULL; sel ++)
288 if (streq (ptr->string, sel->name))
bd9c7e23 289 {
aec3cfba
NC
290 if (i == 2)
291 tune_flags = sel->flags;
292 else
b111229a 293 {
aec3cfba
NC
294 /* If we have been given an architecture and a processor
295 make sure that they are compatible. We only generate
296 a warning though, and we prefer the CPU over the
297 architecture. */
298 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 299 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
300 ptr->string);
301
302 insn_flags = sel->flags;
b111229a 303 }
f5a1b0d2 304
bd9c7e23
RE
305 break;
306 }
307
308 if (sel->name == NULL)
309 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
310 }
311 }
aec3cfba 312
f5a1b0d2 313 /* If the user did not specify a processor, choose one for them. */
aec3cfba 314 if (insn_flags == 0)
f5a1b0d2
NC
315 {
316 struct processors * sel;
aec3cfba
NC
317 unsigned int sought;
318 static struct cpu_default
319 {
320 int cpu;
321 char * name;
322 }
323 cpu_defaults[] =
324 {
325 { TARGET_CPU_arm2, "arm2" },
326 { TARGET_CPU_arm6, "arm6" },
327 { TARGET_CPU_arm610, "arm610" },
2aa0c933 328 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
329 { TARGET_CPU_arm7m, "arm7m" },
330 { TARGET_CPU_arm7500fe, "arm7500fe" },
331 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
332 { TARGET_CPU_arm8, "arm8" },
333 { TARGET_CPU_arm810, "arm810" },
334 { TARGET_CPU_arm9, "arm9" },
335 { TARGET_CPU_strongarm, "strongarm" },
336 { TARGET_CPU_generic, "arm" },
337 { 0, 0 }
338 };
339 struct cpu_default * def;
340
341 /* Find the default. */
342 for (def = cpu_defaults; def->name; def ++)
343 if (def->cpu == TARGET_CPU_DEFAULT)
344 break;
345
346 /* Make sure we found the default CPU. */
347 if (def->name == NULL)
348 abort ();
349
350 /* Find the default CPU's flags. */
351 for (sel = all_cores; sel->name != NULL; sel ++)
352 if (streq (def->name, sel->name))
353 break;
354
355 if (sel->name == NULL)
356 abort ();
357
358 insn_flags = sel->flags;
359
360 /* Now check to see if the user has specified some command line
361 switch that require certain abilities from the cpu. */
362 sought = 0;
f5a1b0d2
NC
363
364 if (TARGET_THUMB_INTERWORK)
365 {
aec3cfba
NC
366 sought |= (FL_THUMB | FL_MODE32);
367
368 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 369 target_flags |= ARM_FLAG_APCS_32;
aec3cfba
NC
370
371 /* There are no ARM processor that supports both APCS-26 and
372 interworking. Therefore we force FL_MODE26 to be removed
373 from insn_flags here (if it was set), so that the search
374 below will always be able to find a compatible processor. */
375 insn_flags &= ~ FL_MODE26;
f5a1b0d2
NC
376 }
377
1323d53a 378 if (! TARGET_APCS_32)
f5a1b0d2
NC
379 sought |= FL_MODE26;
380
aec3cfba 381 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 382 {
aec3cfba
NC
383 /* Try to locate a CPU type that supports all of the abilities
384 of the default CPU, plus the extra abilities requested by
385 the user. */
f5a1b0d2 386 for (sel = all_cores; sel->name != NULL; sel ++)
aec3cfba 387 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
388 break;
389
390 if (sel->name == NULL)
aec3cfba
NC
391 {
392 unsigned int current_bit_count = 0;
393 struct processors * best_fit = NULL;
394
395 /* Ideally we would like to issue an error message here
396 saying that it was not possible to find a CPU compatible
397 with the default CPU, but which also supports the command
398 line options specified by the programmer, and so they
399 ought to use the -mcpu=<name> command line option to
400 override the default CPU type.
401
402 Unfortunately this does not work with multilibing. We
403 need to be able to support multilibs for -mapcs-26 and for
404 -mthumb-interwork and there is no CPU that can support both
405 options. Instead if we cannot find a cpu that has both the
406 characteristics of the default cpu and the given command line
407 options we scan the array again looking for a best match. */
408 for (sel = all_cores; sel->name != NULL; sel ++)
409 if ((sel->flags & sought) == sought)
410 {
411 unsigned int count;
412
413 count = bit_count (sel->flags & insn_flags);
414
415 if (count >= current_bit_count)
416 {
417 best_fit = sel;
418 current_bit_count = count;
419 }
420 }
f5a1b0d2 421
aec3cfba
NC
422 if (best_fit == NULL)
423 abort ();
424 else
425 sel = best_fit;
426 }
427
428 insn_flags = sel->flags;
f5a1b0d2
NC
429 }
430 }
aec3cfba
NC
431
432 /* If tuning has not been specified, tune for whichever processor or
433 architecture has been selected. */
434 if (tune_flags == 0)
435 tune_flags = insn_flags;
436
f5a1b0d2
NC
437 /* Make sure that the processor choice does not conflict with any of the
438 other command line choices. */
aec3cfba 439 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 440 {
aec3cfba
NC
441 /* If APCS-32 was not the default then it must have been set by the
442 user, so issue a warning message. If the user has specified
443 "-mapcs-32 -mcpu=arm2" then we loose here. */
444 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
445 warning ("target CPU does not support APCS-32" );
f5a1b0d2
NC
446 target_flags &= ~ ARM_FLAG_APCS_32;
447 }
aec3cfba 448 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
449 {
450 warning ("target CPU does not support APCS-26" );
451 target_flags |= ARM_FLAG_APCS_32;
452 }
453
aec3cfba 454 if (TARGET_THUMB_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
455 {
456 warning ("target CPU does not support interworking" );
457 target_flags &= ~ARM_FLAG_THUMB;
458 }
459
460 /* If interworking is enabled then APCS-32 must be selected as well. */
461 if (TARGET_THUMB_INTERWORK)
462 {
463 if (! TARGET_APCS_32)
464 warning ("interworking forces APCS-32 to be used" );
465 target_flags |= ARM_FLAG_APCS_32;
466 }
467
468 if (TARGET_APCS_STACK && ! TARGET_APCS)
469 {
470 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
471 target_flags |= ARM_FLAG_APCS_FRAME;
472 }
aec3cfba 473
2b835d68
RE
474 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
475 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
aec3cfba 476
2b835d68
RE
477 if (TARGET_POKE_FUNCTION_NAME)
478 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 479
2b835d68
RE
480 if (TARGET_APCS_REENT && flag_pic)
481 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 482
2b835d68 483 if (TARGET_APCS_REENT)
f5a1b0d2 484 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 485
32de079a
RE
486 /* If stack checking is disabled, we can use r10 as the PIC register,
487 which keeps r9 available. */
488 if (flag_pic && ! TARGET_APCS_STACK)
489 arm_pic_register = 10;
aec3cfba 490
32de079a
RE
491 /* Well, I'm about to have a go, but pic is NOT going to be compatible
492 with APCS reentrancy, since that requires too much support in the
493 assembler and linker, and the ARMASM assembler seems to lack some
494 required directives. */
2b835d68 495 if (flag_pic)
b4b68717 496 warning ("Position independent code not supported");
aec3cfba 497
2b835d68
RE
498 if (TARGET_APCS_FLOAT)
499 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 500
aec3cfba
NC
501 /* Initialise boolean versions of the flags, for use in the arm.md file. */
502 arm_fast_multiply = insn_flags & FL_FAST_MULT;
503 arm_arch4 = insn_flags & FL_ARCH4;
504
505 arm_ld_sched = tune_flags & FL_LDSCHED;
506 arm_is_strong = tune_flags & FL_STRONG;
507 arm_is_6_or_7 = ((tune_flags & (FL_MODE26 | FL_MODE32))
508 && !(tune_flags & FL_ARCH4));
f5a1b0d2 509
bd9c7e23
RE
510 /* Default value for floating point code... if no co-processor
511 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
512 assume the user has an FPA.
513 Note: this does not prevent use of floating point instructions,
514 -msoft-float does that. */
aec3cfba 515 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 516
b111229a 517 if (target_fp_name)
2b835d68 518 {
f5a1b0d2 519 if (streq (target_fp_name, "2"))
b111229a 520 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
521 else if (streq (target_fp_name, "3"))
522 arm_fpu_arch = FP_SOFT3;
2b835d68 523 else
f5a1b0d2 524 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 525 target_fp_name);
2b835d68 526 }
b111229a
RE
527 else
528 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
529
530 if (TARGET_FPE && arm_fpu != FP_HARD)
531 arm_fpu = FP_SOFT2;
aec3cfba 532
f5a1b0d2
NC
533 /* For arm2/3 there is no need to do any scheduling if there is only
534 a floating point emulator, or we are doing software floating-point. */
aec3cfba 535 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 536 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 537
2b835d68 538 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
539
540 if (structure_size_string != NULL)
541 {
542 int size = strtol (structure_size_string, NULL, 0);
543
544 if (size == 8 || size == 32)
545 arm_structure_size_boundary = size;
546 else
547 warning ("Structure size boundary can only be set to 8 or 32");
548 }
f5a1b0d2
NC
549
550 /* If optimizing for space, don't synthesize constants.
551 For processors with load scheduling, it never costs more than 2 cycles
552 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 553 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 554 arm_constant_limit = 1;
aec3cfba 555
f5a1b0d2
NC
556 /* If optimizing for size, bump the number of instructions that we
557 are prepared to conditionally execute (even on a StrongARM).
558 Otherwise for the StrongARM, which has early execution of branches,
559 a sequence that is worth skipping is shorter. */
560 if (optimize_size)
561 max_insns_skipped = 6;
562 else if (arm_is_strong)
563 max_insns_skipped = 3;
2b835d68 564}
cce8749e 565\f
ff9940b0
RE
566/* Return 1 if it is possible to return using a single instruction */
567
568int
b36ba79f
RE
569use_return_insn (iscond)
570 int iscond;
ff9940b0
RE
571{
572 int regno;
573
f5a1b0d2
NC
574 if (!reload_completed
575 || current_function_pretend_args_size
ff9940b0 576 || current_function_anonymous_args
56636818 577 || ((get_frame_size () + current_function_outgoing_args_size != 0)
f5a1b0d2 578 && !(TARGET_APCS && frame_pointer_needed)))
ff9940b0
RE
579 return 0;
580
b111229a 581 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
582 stacked. Similarly, on StrongARM, conditional returns are expensive
583 if they aren't taken and registers have been stacked. */
f5a1b0d2 584 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 585 return 0;
f5a1b0d2
NC
586 if ((iscond && arm_is_strong)
587 || TARGET_THUMB_INTERWORK)
6ed30148
RE
588 {
589 for (regno = 0; regno < 16; regno++)
590 if (regs_ever_live[regno] && ! call_used_regs[regno])
591 return 0;
592
593 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 594 return 0;
6ed30148 595 }
b111229a 596
ff9940b0
RE
597 /* Can't be done if any of the FPU regs are pushed, since this also
598 requires an insn */
b111229a
RE
599 for (regno = 16; regno < 24; regno++)
600 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
601 return 0;
602
31fdb4d5
DE
603 /* If a function is naked, don't use the "return" insn. */
604 if (arm_naked_function_p (current_function_decl))
605 return 0;
606
ff9940b0
RE
607 return 1;
608}
609
cce8749e
CH
610/* Return TRUE if int I is a valid immediate ARM constant. */
611
612int
613const_ok_for_arm (i)
ff9940b0 614 HOST_WIDE_INT i;
cce8749e 615{
ed4c4348 616 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 617
56636818
JL
618 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
619 be all zero, or all one. */
620 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
621 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
ed4c4348
RE
622 != ((~(unsigned HOST_WIDE_INT) 0)
623 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
624 return FALSE;
625
e2c671ba
RE
626 /* Fast return for 0 and powers of 2 */
627 if ((i & (i - 1)) == 0)
628 return TRUE;
629
cce8749e
CH
630 do
631 {
abaa26e5 632 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 633 return TRUE;
abaa26e5
RE
634 mask =
635 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
636 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
ed4c4348 637 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 638
f3bb6135
RE
639 return FALSE;
640}
cce8749e 641
e2c671ba 642/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
643static int
644const_ok_for_op (i, code)
e2c671ba
RE
645 HOST_WIDE_INT i;
646 enum rtx_code code;
e2c671ba
RE
647{
648 if (const_ok_for_arm (i))
649 return 1;
650
651 switch (code)
652 {
653 case PLUS:
654 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
655
656 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
657 case XOR:
658 case IOR:
659 return 0;
660
661 case AND:
662 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
663
664 default:
665 abort ();
666 }
667}
668
669/* Emit a sequence of insns to handle a large constant.
670 CODE is the code of the operation required, it can be any of SET, PLUS,
671 IOR, AND, XOR, MINUS;
672 MODE is the mode in which the operation is being performed;
673 VAL is the integer to operate on;
674 SOURCE is the other operand (a register, or a null-pointer for SET);
675 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
676 either produce a simpler sequence, or we will want to cse the values.
677 Return value is the number of insns emitted. */
e2c671ba
RE
678
679int
680arm_split_constant (code, mode, val, target, source, subtargets)
681 enum rtx_code code;
682 enum machine_mode mode;
683 HOST_WIDE_INT val;
684 rtx target;
685 rtx source;
686 int subtargets;
2b835d68
RE
687{
688 if (subtargets || code == SET
689 || (GET_CODE (target) == REG && GET_CODE (source) == REG
690 && REGNO (target) != REGNO (source)))
691 {
4b632bf1
RE
692 /* After arm_reorg has been called, we can't fix up expensive
693 constants by pushing them into memory so we must synthesise
694 them in-line, regardless of the cost. This is only likely to
695 be more costly on chips that have load delay slots and we are
696 compiling without running the scheduler (so no splitting
aec3cfba
NC
697 occurred before the final instruction emission).
698
699 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 700 */
4b632bf1
RE
701 if (! after_arm_reorg
702 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
703 > arm_constant_limit + (code != SET)))
2b835d68
RE
704 {
705 if (code == SET)
706 {
707 /* Currently SET is the only monadic value for CODE, all
708 the rest are diadic. */
43cffd11 709 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
710 return 1;
711 }
712 else
713 {
714 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
715
43cffd11 716 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
717 /* For MINUS, the value is subtracted from, since we never
718 have subtraction of a constant. */
719 if (code == MINUS)
43cffd11
RE
720 emit_insn (gen_rtx_SET (VOIDmode, target,
721 gen_rtx (code, mode, temp, source)));
2b835d68 722 else
43cffd11
RE
723 emit_insn (gen_rtx_SET (VOIDmode, target,
724 gen_rtx (code, mode, source, temp)));
2b835d68
RE
725 return 2;
726 }
727 }
728 }
729
730 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
731}
732
733/* As above, but extra parameter GENERATE which, if clear, suppresses
734 RTL generation. */
735int
736arm_gen_constant (code, mode, val, target, source, subtargets, generate)
737 enum rtx_code code;
738 enum machine_mode mode;
739 HOST_WIDE_INT val;
740 rtx target;
741 rtx source;
742 int subtargets;
743 int generate;
e2c671ba 744{
e2c671ba
RE
745 int can_invert = 0;
746 int can_negate = 0;
747 int can_negate_initial = 0;
748 int can_shift = 0;
749 int i;
750 int num_bits_set = 0;
751 int set_sign_bit_copies = 0;
752 int clear_sign_bit_copies = 0;
753 int clear_zero_bit_copies = 0;
754 int set_zero_bit_copies = 0;
755 int insns = 0;
e2c671ba
RE
756 unsigned HOST_WIDE_INT temp1, temp2;
757 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
758
759 /* find out which operations are safe for a given CODE. Also do a quick
760 check for degenerate cases; these can occur when DImode operations
761 are split. */
762 switch (code)
763 {
764 case SET:
765 can_invert = 1;
766 can_shift = 1;
767 can_negate = 1;
768 break;
769
770 case PLUS:
771 can_negate = 1;
772 can_negate_initial = 1;
773 break;
774
775 case IOR:
776 if (remainder == 0xffffffff)
777 {
2b835d68 778 if (generate)
43cffd11
RE
779 emit_insn (gen_rtx_SET (VOIDmode, target,
780 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
781 return 1;
782 }
783 if (remainder == 0)
784 {
785 if (reload_completed && rtx_equal_p (target, source))
786 return 0;
2b835d68 787 if (generate)
43cffd11 788 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
789 return 1;
790 }
791 break;
792
793 case AND:
794 if (remainder == 0)
795 {
2b835d68 796 if (generate)
43cffd11 797 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
798 return 1;
799 }
800 if (remainder == 0xffffffff)
801 {
802 if (reload_completed && rtx_equal_p (target, source))
803 return 0;
2b835d68 804 if (generate)
43cffd11 805 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
806 return 1;
807 }
808 can_invert = 1;
809 break;
810
811 case XOR:
812 if (remainder == 0)
813 {
814 if (reload_completed && rtx_equal_p (target, source))
815 return 0;
2b835d68 816 if (generate)
43cffd11 817 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
818 return 1;
819 }
820 if (remainder == 0xffffffff)
821 {
2b835d68 822 if (generate)
43cffd11
RE
823 emit_insn (gen_rtx_SET (VOIDmode, target,
824 gen_rtx_NOT (mode, source)));
e2c671ba
RE
825 return 1;
826 }
827
828 /* We don't know how to handle this yet below. */
829 abort ();
830
831 case MINUS:
832 /* We treat MINUS as (val - source), since (source - val) is always
833 passed as (source + (-val)). */
834 if (remainder == 0)
835 {
2b835d68 836 if (generate)
43cffd11
RE
837 emit_insn (gen_rtx_SET (VOIDmode, target,
838 gen_rtx_NEG (mode, source)));
e2c671ba
RE
839 return 1;
840 }
841 if (const_ok_for_arm (val))
842 {
2b835d68 843 if (generate)
43cffd11
RE
844 emit_insn (gen_rtx_SET (VOIDmode, target,
845 gen_rtx_MINUS (mode, GEN_INT (val),
846 source)));
e2c671ba
RE
847 return 1;
848 }
849 can_negate = 1;
850
851 break;
852
853 default:
854 abort ();
855 }
856
857 /* If we can do it in one insn get out quickly */
858 if (const_ok_for_arm (val)
859 || (can_negate_initial && const_ok_for_arm (-val))
860 || (can_invert && const_ok_for_arm (~val)))
861 {
2b835d68 862 if (generate)
43cffd11
RE
863 emit_insn (gen_rtx_SET (VOIDmode, target,
864 (source ? gen_rtx (code, mode, source,
865 GEN_INT (val))
866 : GEN_INT (val))));
e2c671ba
RE
867 return 1;
868 }
869
870
871 /* Calculate a few attributes that may be useful for specific
872 optimizations. */
873
874 for (i = 31; i >= 0; i--)
875 {
876 if ((remainder & (1 << i)) == 0)
877 clear_sign_bit_copies++;
878 else
879 break;
880 }
881
882 for (i = 31; i >= 0; i--)
883 {
884 if ((remainder & (1 << i)) != 0)
885 set_sign_bit_copies++;
886 else
887 break;
888 }
889
890 for (i = 0; i <= 31; i++)
891 {
892 if ((remainder & (1 << i)) == 0)
893 clear_zero_bit_copies++;
894 else
895 break;
896 }
897
898 for (i = 0; i <= 31; i++)
899 {
900 if ((remainder & (1 << i)) != 0)
901 set_zero_bit_copies++;
902 else
903 break;
904 }
905
906 switch (code)
907 {
908 case SET:
909 /* See if we can do this by sign_extending a constant that is known
910 to be negative. This is a good, way of doing it, since the shift
911 may well merge into a subsequent insn. */
912 if (set_sign_bit_copies > 1)
913 {
914 if (const_ok_for_arm
915 (temp1 = ARM_SIGN_EXTEND (remainder
916 << (set_sign_bit_copies - 1))))
917 {
2b835d68
RE
918 if (generate)
919 {
d499463f 920 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
921 emit_insn (gen_rtx_SET (VOIDmode, new_src,
922 GEN_INT (temp1)));
2b835d68
RE
923 emit_insn (gen_ashrsi3 (target, new_src,
924 GEN_INT (set_sign_bit_copies - 1)));
925 }
e2c671ba
RE
926 return 2;
927 }
928 /* For an inverted constant, we will need to set the low bits,
929 these will be shifted out of harm's way. */
930 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
931 if (const_ok_for_arm (~temp1))
932 {
2b835d68
RE
933 if (generate)
934 {
d499463f 935 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
936 emit_insn (gen_rtx_SET (VOIDmode, new_src,
937 GEN_INT (temp1)));
2b835d68
RE
938 emit_insn (gen_ashrsi3 (target, new_src,
939 GEN_INT (set_sign_bit_copies - 1)));
940 }
e2c671ba
RE
941 return 2;
942 }
943 }
944
945 /* See if we can generate this by setting the bottom (or the top)
946 16 bits, and then shifting these into the other half of the
947 word. We only look for the simplest cases, to do more would cost
948 too much. Be careful, however, not to generate this when the
949 alternative would take fewer insns. */
950 if (val & 0xffff0000)
951 {
952 temp1 = remainder & 0xffff0000;
953 temp2 = remainder & 0x0000ffff;
954
955 /* Overlaps outside this range are best done using other methods. */
956 for (i = 9; i < 24; i++)
957 {
958 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
959 && ! const_ok_for_arm (temp2))
960 {
d499463f
RE
961 rtx new_src = (subtargets
962 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
963 : target);
964 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 965 source, subtargets, generate);
e2c671ba 966 source = new_src;
2b835d68 967 if (generate)
43cffd11
RE
968 emit_insn (gen_rtx_SET
969 (VOIDmode, target,
970 gen_rtx_IOR (mode,
971 gen_rtx_ASHIFT (mode, source,
972 GEN_INT (i)),
973 source)));
e2c671ba
RE
974 return insns + 1;
975 }
976 }
977
978 /* Don't duplicate cases already considered. */
979 for (i = 17; i < 24; i++)
980 {
981 if (((temp1 | (temp1 >> i)) == remainder)
982 && ! const_ok_for_arm (temp1))
983 {
d499463f
RE
984 rtx new_src = (subtargets
985 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
986 : target);
987 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 988 source, subtargets, generate);
e2c671ba 989 source = new_src;
2b835d68 990 if (generate)
43cffd11
RE
991 emit_insn
992 (gen_rtx_SET (VOIDmode, target,
993 gen_rtx_IOR
994 (mode,
995 gen_rtx_LSHIFTRT (mode, source,
996 GEN_INT (i)),
997 source)));
e2c671ba
RE
998 return insns + 1;
999 }
1000 }
1001 }
1002 break;
1003
1004 case IOR:
1005 case XOR:
7b64da89
RE
1006 /* If we have IOR or XOR, and the constant can be loaded in a
1007 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1008 then this can be done in two instructions instead of 3-4. */
1009 if (subtargets
d499463f 1010 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
1011 || (reload_completed && ! reg_mentioned_p (target, source)))
1012 {
1013 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1014 {
2b835d68
RE
1015 if (generate)
1016 {
1017 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1018
43cffd11
RE
1019 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1020 emit_insn (gen_rtx_SET (VOIDmode, target,
1021 gen_rtx (code, mode, source, sub)));
2b835d68 1022 }
e2c671ba
RE
1023 return 2;
1024 }
1025 }
1026
1027 if (code == XOR)
1028 break;
1029
1030 if (set_sign_bit_copies > 8
1031 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1032 {
2b835d68
RE
1033 if (generate)
1034 {
1035 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1036 rtx shift = GEN_INT (set_sign_bit_copies);
1037
43cffd11
RE
1038 emit_insn (gen_rtx_SET (VOIDmode, sub,
1039 gen_rtx_NOT (mode,
1040 gen_rtx_ASHIFT (mode,
1041 source,
f5a1b0d2 1042 shift))));
43cffd11
RE
1043 emit_insn (gen_rtx_SET (VOIDmode, target,
1044 gen_rtx_NOT (mode,
1045 gen_rtx_LSHIFTRT (mode, sub,
1046 shift))));
2b835d68 1047 }
e2c671ba
RE
1048 return 2;
1049 }
1050
1051 if (set_zero_bit_copies > 8
1052 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1053 {
2b835d68
RE
1054 if (generate)
1055 {
1056 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1057 rtx shift = GEN_INT (set_zero_bit_copies);
1058
43cffd11
RE
1059 emit_insn (gen_rtx_SET (VOIDmode, sub,
1060 gen_rtx_NOT (mode,
1061 gen_rtx_LSHIFTRT (mode,
1062 source,
f5a1b0d2 1063 shift))));
43cffd11
RE
1064 emit_insn (gen_rtx_SET (VOIDmode, target,
1065 gen_rtx_NOT (mode,
1066 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1067 shift))));
2b835d68 1068 }
e2c671ba
RE
1069 return 2;
1070 }
1071
1072 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1073 {
2b835d68
RE
1074 if (generate)
1075 {
1076 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1077 emit_insn (gen_rtx_SET (VOIDmode, sub,
1078 gen_rtx_NOT (mode, source)));
2b835d68
RE
1079 source = sub;
1080 if (subtargets)
1081 sub = gen_reg_rtx (mode);
43cffd11
RE
1082 emit_insn (gen_rtx_SET (VOIDmode, sub,
1083 gen_rtx_AND (mode, source,
1084 GEN_INT (temp1))));
1085 emit_insn (gen_rtx_SET (VOIDmode, target,
1086 gen_rtx_NOT (mode, sub)));
2b835d68 1087 }
e2c671ba
RE
1088 return 3;
1089 }
1090 break;
1091
1092 case AND:
1093 /* See if two shifts will do 2 or more insn's worth of work. */
1094 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1095 {
1096 HOST_WIDE_INT shift_mask = ((0xffffffff
1097 << (32 - clear_sign_bit_copies))
1098 & 0xffffffff);
e2c671ba
RE
1099
1100 if ((remainder | shift_mask) != 0xffffffff)
1101 {
2b835d68
RE
1102 if (generate)
1103 {
d499463f 1104 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1105 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1106 new_src, source, subtargets, 1);
1107 source = new_src;
2b835d68
RE
1108 }
1109 else
d499463f
RE
1110 {
1111 rtx targ = subtargets ? NULL_RTX : target;
1112 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1113 targ, source, subtargets, 0);
1114 }
2b835d68
RE
1115 }
1116
1117 if (generate)
1118 {
d499463f
RE
1119 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1120 rtx shift = GEN_INT (clear_sign_bit_copies);
1121
1122 emit_insn (gen_ashlsi3 (new_src, source, shift));
1123 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1124 }
1125
e2c671ba
RE
1126 return insns + 2;
1127 }
1128
1129 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1130 {
1131 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba
RE
1132
1133 if ((remainder | shift_mask) != 0xffffffff)
1134 {
2b835d68
RE
1135 if (generate)
1136 {
d499463f
RE
1137 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1138
2b835d68 1139 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1140 new_src, source, subtargets, 1);
1141 source = new_src;
2b835d68
RE
1142 }
1143 else
d499463f
RE
1144 {
1145 rtx targ = subtargets ? NULL_RTX : target;
1146
1147 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1148 targ, source, subtargets, 0);
1149 }
2b835d68
RE
1150 }
1151
1152 if (generate)
1153 {
d499463f
RE
1154 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1155 rtx shift = GEN_INT (clear_zero_bit_copies);
1156
1157 emit_insn (gen_lshrsi3 (new_src, source, shift));
1158 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1159 }
1160
e2c671ba
RE
1161 return insns + 2;
1162 }
1163
1164 break;
1165
1166 default:
1167 break;
1168 }
1169
1170 for (i = 0; i < 32; i++)
1171 if (remainder & (1 << i))
1172 num_bits_set++;
1173
1174 if (code == AND || (can_invert && num_bits_set > 16))
1175 remainder = (~remainder) & 0xffffffff;
1176 else if (code == PLUS && num_bits_set > 16)
1177 remainder = (-remainder) & 0xffffffff;
1178 else
1179 {
1180 can_invert = 0;
1181 can_negate = 0;
1182 }
1183
1184 /* Now try and find a way of doing the job in either two or three
1185 instructions.
1186 We start by looking for the largest block of zeros that are aligned on
1187 a 2-bit boundary, we then fill up the temps, wrapping around to the
1188 top of the word when we drop off the bottom.
1189 In the worst case this code should produce no more than four insns. */
1190 {
1191 int best_start = 0;
1192 int best_consecutive_zeros = 0;
1193
1194 for (i = 0; i < 32; i += 2)
1195 {
1196 int consecutive_zeros = 0;
1197
1198 if (! (remainder & (3 << i)))
1199 {
1200 while ((i < 32) && ! (remainder & (3 << i)))
1201 {
1202 consecutive_zeros += 2;
1203 i += 2;
1204 }
1205 if (consecutive_zeros > best_consecutive_zeros)
1206 {
1207 best_consecutive_zeros = consecutive_zeros;
1208 best_start = i - consecutive_zeros;
1209 }
1210 i -= 2;
1211 }
1212 }
1213
1214 /* Now start emitting the insns, starting with the one with the highest
1215 bit set: we do this so that the smallest number will be emitted last;
1216 this is more likely to be combinable with addressing insns. */
1217 i = best_start;
1218 do
1219 {
1220 int end;
1221
1222 if (i <= 0)
1223 i += 32;
1224 if (remainder & (3 << (i - 2)))
1225 {
1226 end = i - 8;
1227 if (end < 0)
1228 end += 32;
1229 temp1 = remainder & ((0x0ff << end)
1230 | ((i < end) ? (0xff >> (32 - end)) : 0));
1231 remainder &= ~temp1;
1232
d499463f 1233 if (generate)
e2c671ba 1234 {
d499463f
RE
1235 rtx new_src;
1236
1237 if (code == SET)
43cffd11
RE
1238 emit_insn (gen_rtx_SET (VOIDmode,
1239 new_src = (subtargets
1240 ? gen_reg_rtx (mode)
1241 : target),
1242 GEN_INT (can_invert
1243 ? ~temp1 : temp1)));
d499463f 1244 else if (code == MINUS)
43cffd11
RE
1245 emit_insn (gen_rtx_SET (VOIDmode,
1246 new_src = (subtargets
1247 ? gen_reg_rtx (mode)
1248 : target),
1249 gen_rtx (code, mode, GEN_INT (temp1),
1250 source)));
d499463f 1251 else
43cffd11
RE
1252 emit_insn (gen_rtx_SET (VOIDmode,
1253 new_src = (remainder
1254 ? (subtargets
1255 ? gen_reg_rtx (mode)
1256 : target)
1257 : target),
1258 gen_rtx (code, mode, source,
1259 GEN_INT (can_invert ? ~temp1
1260 : (can_negate
1261 ? -temp1
1262 : temp1)))));
d499463f 1263 source = new_src;
e2c671ba
RE
1264 }
1265
d499463f
RE
1266 if (code == SET)
1267 {
1268 can_invert = 0;
1269 code = PLUS;
1270 }
1271 else if (code == MINUS)
1272 code = PLUS;
1273
e2c671ba 1274 insns++;
e2c671ba
RE
1275 i -= 6;
1276 }
1277 i -= 2;
1278 } while (remainder);
1279 }
1280 return insns;
1281}
1282
bd9c7e23
RE
1283/* Canonicalize a comparison so that we are more likely to recognize it.
1284 This can be done for a few constant compares, where we can make the
1285 immediate value easier to load. */
1286enum rtx_code
1287arm_canonicalize_comparison (code, op1)
1288 enum rtx_code code;
1289 rtx *op1;
1290{
ad076f4e 1291 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1292
1293 switch (code)
1294 {
1295 case EQ:
1296 case NE:
1297 return code;
1298
1299 case GT:
1300 case LE:
ad076f4e
RE
1301 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1302 - 1)
bd9c7e23
RE
1303 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1304 {
1305 *op1 = GEN_INT (i+1);
1306 return code == GT ? GE : LT;
1307 }
1308 break;
1309
1310 case GE:
1311 case LT:
ad076f4e 1312 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1313 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1314 {
1315 *op1 = GEN_INT (i-1);
1316 return code == GE ? GT : LE;
1317 }
1318 break;
1319
1320 case GTU:
1321 case LEU:
ad076f4e 1322 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1323 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1324 {
1325 *op1 = GEN_INT (i + 1);
1326 return code == GTU ? GEU : LTU;
1327 }
1328 break;
1329
1330 case GEU:
1331 case LTU:
1332 if (i != 0
1333 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1334 {
1335 *op1 = GEN_INT (i - 1);
1336 return code == GEU ? GTU : LEU;
1337 }
1338 break;
1339
1340 default:
1341 abort ();
1342 }
1343
1344 return code;
1345}
bd9c7e23 1346
f5a1b0d2
NC
1347/* Decide whether a type should be returned in memory (true)
1348 or in a register (false). This is called by the macro
1349 RETURN_IN_MEMORY. */
2b835d68
RE
1350int
1351arm_return_in_memory (type)
1352 tree type;
1353{
f5a1b0d2
NC
1354 if (! AGGREGATE_TYPE_P (type))
1355 {
1356 /* All simple types are returned in registers. */
1357 return 0;
1358 }
1359 else if (int_size_in_bytes (type) > 4)
1360 {
1361 /* All structures/unions bigger than one word are returned in memory. */
1362 return 1;
1363 }
1364 else if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1365 {
1366 tree field;
1367
f5a1b0d2
NC
1368 /* For a struct the APCS says that we must return in a register if
1369 every addressable element has an offset of zero. For practical
1370 purposes this means that the structure can have at most one non
1371 bit-field element and that this element must be the first one in
1372 the structure. */
1373
1374 /* Find the first field, ignoring non FIELD_DECL things which will
1375 have been created by C++. */
1376 for (field = TYPE_FIELDS (type);
1377 field && TREE_CODE (field) != FIELD_DECL;
1378 field = TREE_CHAIN (field))
1379 continue;
1380
1381 if (field == NULL)
1382 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1383
1384 /* Now check the remaining fields, if any. */
1385 for (field = TREE_CHAIN (field);
1386 field;
1387 field = TREE_CHAIN (field))
1388 {
1389 if (TREE_CODE (field) != FIELD_DECL)
1390 continue;
1391
1392 if (! DECL_BIT_FIELD_TYPE (field))
1393 return 1;
1394 }
2b835d68
RE
1395
1396 return 0;
1397 }
1398 else if (TREE_CODE (type) == UNION_TYPE)
1399 {
1400 tree field;
1401
1402 /* Unions can be returned in registers if every element is
1403 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1404 for (field = TYPE_FIELDS (type);
1405 field;
1406 field = TREE_CHAIN (field))
2b835d68 1407 {
f5a1b0d2
NC
1408 if (TREE_CODE (field) != FIELD_DECL)
1409 continue;
1410
6cc8c0b3
NC
1411 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1412 return 1;
1413
f5a1b0d2 1414 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1415 return 1;
1416 }
f5a1b0d2 1417
2b835d68
RE
1418 return 0;
1419 }
f5a1b0d2 1420
2b835d68
RE
1421 /* XXX Not sure what should be done for other aggregates, so put them in
1422 memory. */
1423 return 1;
1424}
1425
32de079a
RE
1426int
1427legitimate_pic_operand_p (x)
1428 rtx x;
1429{
1430 if (CONSTANT_P (x) && flag_pic
1431 && (GET_CODE (x) == SYMBOL_REF
1432 || (GET_CODE (x) == CONST
1433 && GET_CODE (XEXP (x, 0)) == PLUS
1434 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1435 return 0;
1436
1437 return 1;
1438}
1439
1440rtx
1441legitimize_pic_address (orig, mode, reg)
1442 rtx orig;
1443 enum machine_mode mode;
1444 rtx reg;
1445{
1446 if (GET_CODE (orig) == SYMBOL_REF)
1447 {
1448 rtx pic_ref, address;
1449 rtx insn;
1450 int subregs = 0;
1451
1452 if (reg == 0)
1453 {
1454 if (reload_in_progress || reload_completed)
1455 abort ();
1456 else
1457 reg = gen_reg_rtx (Pmode);
1458
1459 subregs = 1;
1460 }
1461
1462#ifdef AOF_ASSEMBLER
1463 /* The AOF assembler can generate relocations for these directly, and
1464 understands that the PIC register has to be added into the offset.
1465 */
1466 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1467#else
1468 if (subregs)
1469 address = gen_reg_rtx (Pmode);
1470 else
1471 address = reg;
1472
1473 emit_insn (gen_pic_load_addr (address, orig));
1474
43cffd11
RE
1475 pic_ref = gen_rtx_MEM (Pmode,
1476 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1477 address));
32de079a
RE
1478 RTX_UNCHANGING_P (pic_ref) = 1;
1479 insn = emit_move_insn (reg, pic_ref);
1480#endif
1481 current_function_uses_pic_offset_table = 1;
1482 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1483 by loop. */
43cffd11
RE
1484 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1485 REG_NOTES (insn));
32de079a
RE
1486 return reg;
1487 }
1488 else if (GET_CODE (orig) == CONST)
1489 {
1490 rtx base, offset;
1491
1492 if (GET_CODE (XEXP (orig, 0)) == PLUS
1493 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1494 return orig;
1495
1496 if (reg == 0)
1497 {
1498 if (reload_in_progress || reload_completed)
1499 abort ();
1500 else
1501 reg = gen_reg_rtx (Pmode);
1502 }
1503
1504 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1505 {
1506 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1507 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1508 base == reg ? 0 : reg);
1509 }
1510 else
1511 abort ();
1512
1513 if (GET_CODE (offset) == CONST_INT)
1514 {
1515 /* The base register doesn't really matter, we only want to
1516 test the index for the appropriate mode. */
1517 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1518
1519 if (! reload_in_progress && ! reload_completed)
1520 offset = force_reg (Pmode, offset);
1521 else
1522 abort ();
1523
1524 win:
1525 if (GET_CODE (offset) == CONST_INT)
1526 return plus_constant_for_output (base, INTVAL (offset));
1527 }
1528
1529 if (GET_MODE_SIZE (mode) > 4
1530 && (GET_MODE_CLASS (mode) == MODE_INT
1531 || TARGET_SOFT_FLOAT))
1532 {
1533 emit_insn (gen_addsi3 (reg, base, offset));
1534 return reg;
1535 }
1536
43cffd11 1537 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1538 }
1539 else if (GET_CODE (orig) == LABEL_REF)
1540 current_function_uses_pic_offset_table = 1;
1541
1542 return orig;
1543}
1544
1545static rtx pic_rtx;
1546
1547int
1548is_pic(x)
1549 rtx x;
1550{
1551 if (x == pic_rtx)
1552 return 1;
1553 return 0;
1554}
1555
1556void
1557arm_finalize_pic ()
1558{
1559#ifndef AOF_ASSEMBLER
1560 rtx l1, pic_tmp, pic_tmp2, seq;
1561 rtx global_offset_table;
1562
1563 if (current_function_uses_pic_offset_table == 0)
1564 return;
1565
1566 if (! flag_pic)
1567 abort ();
1568
1569 start_sequence ();
1570 l1 = gen_label_rtx ();
1571
43cffd11 1572 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768
RE
1573 /* On the ARM the PC register contains 'dot + 8' at the time of the
1574 addition. */
1575 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), 8);
84306176
PB
1576 if (GOT_PCREL)
1577 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 1578 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
1579 else
1580 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
1581
1582 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 1583
32de079a 1584 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
dfa08768 1585 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
32de079a
RE
1586
1587 seq = gen_sequence ();
1588 end_sequence ();
1589 emit_insn_after (seq, get_insns ());
1590
1591 /* Need to emit this whether or not we obey regdecls,
1592 since setjmp/longjmp can cause life info to screw up. */
43cffd11 1593 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
1594#endif /* AOF_ASSEMBLER */
1595}
1596
e2c671ba
RE
1597#define REG_OR_SUBREG_REG(X) \
1598 (GET_CODE (X) == REG \
1599 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1600
1601#define REG_OR_SUBREG_RTX(X) \
1602 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1603
1604#define ARM_FRAME_RTX(X) \
1605 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1606 || (X) == arg_pointer_rtx)
1607
1608int
74bbc178 1609arm_rtx_costs (x, code)
e2c671ba 1610 rtx x;
74bbc178 1611 enum rtx_code code;
e2c671ba
RE
1612{
1613 enum machine_mode mode = GET_MODE (x);
1614 enum rtx_code subcode;
1615 int extra_cost;
1616
1617 switch (code)
1618 {
1619 case MEM:
1620 /* Memory costs quite a lot for the first word, but subsequent words
1621 load at the equivalent of a single insn each. */
1622 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1623 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1624
1625 case DIV:
1626 case MOD:
1627 return 100;
1628
1629 case ROTATE:
1630 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1631 return 4;
1632 /* Fall through */
1633 case ROTATERT:
1634 if (mode != SImode)
1635 return 8;
1636 /* Fall through */
1637 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1638 if (mode == DImode)
1639 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1640 + ((GET_CODE (XEXP (x, 0)) == REG
1641 || (GET_CODE (XEXP (x, 0)) == SUBREG
1642 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1643 ? 0 : 8));
1644 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1645 || (GET_CODE (XEXP (x, 0)) == SUBREG
1646 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1647 ? 0 : 4)
1648 + ((GET_CODE (XEXP (x, 1)) == REG
1649 || (GET_CODE (XEXP (x, 1)) == SUBREG
1650 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1651 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1652 ? 0 : 4));
1653
1654 case MINUS:
1655 if (mode == DImode)
1656 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1657 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1658 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1659 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1660 ? 0 : 8));
1661
1662 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1663 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1664 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1665 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1666 ? 0 : 8)
1667 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1668 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1669 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1670 ? 0 : 8));
1671
1672 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1673 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1674 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1675 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1676 || subcode == ASHIFTRT || subcode == LSHIFTRT
1677 || subcode == ROTATE || subcode == ROTATERT
1678 || (subcode == MULT
1679 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1680 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1681 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1682 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1683 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1684 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1685 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1686 return 1;
1687 /* Fall through */
1688
1689 case PLUS:
1690 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1691 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1692 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1693 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1694 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1695 ? 0 : 8));
1696
1697 /* Fall through */
1698 case AND: case XOR: case IOR:
1699 extra_cost = 0;
1700
1701 /* Normally the frame registers will be spilt into reg+const during
1702 reload, so it is a bad idea to combine them with other instructions,
1703 since then they might not be moved outside of loops. As a compromise
1704 we allow integration with ops that have a constant as their second
1705 operand. */
1706 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1707 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1708 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1709 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1710 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1711 extra_cost = 4;
1712
1713 if (mode == DImode)
1714 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1715 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1716 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 1717 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
1718 ? 0 : 8));
1719
1720 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1721 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1722 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1723 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 1724 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
1725 ? 0 : 4));
1726
1727 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1728 return (1 + extra_cost
1729 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1730 || subcode == LSHIFTRT || subcode == ASHIFTRT
1731 || subcode == ROTATE || subcode == ROTATERT
1732 || (subcode == MULT
1733 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1734 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 1735 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
1736 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1737 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 1738 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
1739 ? 0 : 4));
1740
1741 return 8;
1742
1743 case MULT:
b111229a
RE
1744 /* There is no point basing this on the tuning, since it is always the
1745 fast variant if it exists at all */
2b835d68
RE
1746 if (arm_fast_multiply && mode == DImode
1747 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1748 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1749 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1750 return 8;
1751
e2c671ba
RE
1752 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1753 || mode == DImode)
1754 return 30;
1755
1756 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1757 {
2b835d68
RE
1758 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1759 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
1760 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1761 int j;
b111229a 1762 /* Tune as appropriate */
aec3cfba 1763 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 1764
2b835d68 1765 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 1766 {
2b835d68 1767 i >>= booth_unit_size;
e2c671ba
RE
1768 add_cost += 2;
1769 }
1770
1771 return add_cost;
1772 }
1773
aec3cfba 1774 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 1775 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
1776 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1777
56636818
JL
1778 case TRUNCATE:
1779 if (arm_fast_multiply && mode == SImode
1780 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1781 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1782 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1783 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1784 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1785 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1786 return 8;
1787 return 99;
1788
e2c671ba
RE
1789 case NEG:
1790 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1791 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1792 /* Fall through */
1793 case NOT:
1794 if (mode == DImode)
1795 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1796
1797 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1798
1799 case IF_THEN_ELSE:
1800 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1801 return 14;
1802 return 2;
1803
1804 case COMPARE:
1805 return 1;
1806
1807 case ABS:
1808 return 4 + (mode == DImode ? 4 : 0);
1809
1810 case SIGN_EXTEND:
1811 if (GET_MODE (XEXP (x, 0)) == QImode)
1812 return (4 + (mode == DImode ? 4 : 0)
1813 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1814 /* Fall through */
1815 case ZERO_EXTEND:
1816 switch (GET_MODE (XEXP (x, 0)))
1817 {
1818 case QImode:
1819 return (1 + (mode == DImode ? 4 : 0)
1820 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1821
1822 case HImode:
1823 return (4 + (mode == DImode ? 4 : 0)
1824 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1825
1826 case SImode:
1827 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
1828
1829 default:
1830 break;
e2c671ba
RE
1831 }
1832 abort ();
1833
1834 default:
1835 return 99;
1836 }
1837}
32de079a
RE
1838
1839int
1840arm_adjust_cost (insn, link, dep, cost)
1841 rtx insn;
1842 rtx link;
1843 rtx dep;
1844 int cost;
1845{
1846 rtx i_pat, d_pat;
1847
b36ba79f
RE
1848 /* XXX This is not strictly true for the FPA. */
1849 if (REG_NOTE_KIND(link) == REG_DEP_ANTI
1850 || REG_NOTE_KIND(link) == REG_DEP_OUTPUT)
1851 return 0;
1852
32de079a
RE
1853 if ((i_pat = single_set (insn)) != NULL
1854 && GET_CODE (SET_SRC (i_pat)) == MEM
1855 && (d_pat = single_set (dep)) != NULL
1856 && GET_CODE (SET_DEST (d_pat)) == MEM)
1857 {
1858 /* This is a load after a store, there is no conflict if the load reads
1859 from a cached area. Assume that loads from the stack, and from the
1860 constant pool are cached, and that others will miss. This is a
1861 hack. */
1862
1863/* debug_rtx (insn);
1864 debug_rtx (dep);
1865 debug_rtx (link);
1866 fprintf (stderr, "costs %d\n", cost); */
1867
1868 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1869 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1870 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1871 || reg_mentioned_p (hard_frame_pointer_rtx,
1872 XEXP (SET_SRC (i_pat), 0)))
1873 {
1874/* fprintf (stderr, "***** Now 1\n"); */
1875 return 1;
1876 }
1877 }
1878
1879 return cost;
1880}
1881
ff9940b0
RE
1882/* This code has been fixed for cross compilation. */
1883
1884static int fpa_consts_inited = 0;
1885
1886char *strings_fpa[8] = {
2b835d68
RE
1887 "0", "1", "2", "3",
1888 "4", "5", "0.5", "10"
1889};
ff9940b0
RE
1890
1891static REAL_VALUE_TYPE values_fpa[8];
1892
1893static void
1894init_fpa_table ()
1895{
1896 int i;
1897 REAL_VALUE_TYPE r;
1898
1899 for (i = 0; i < 8; i++)
1900 {
1901 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1902 values_fpa[i] = r;
1903 }
f3bb6135 1904
ff9940b0
RE
1905 fpa_consts_inited = 1;
1906}
1907
cce8749e
CH
1908/* Return TRUE if rtx X is a valid immediate FPU constant. */
1909
1910int
1911const_double_rtx_ok_for_fpu (x)
1912 rtx x;
1913{
ff9940b0
RE
1914 REAL_VALUE_TYPE r;
1915 int i;
1916
1917 if (!fpa_consts_inited)
1918 init_fpa_table ();
1919
1920 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1921 if (REAL_VALUE_MINUS_ZERO (r))
1922 return 0;
f3bb6135 1923
ff9940b0
RE
1924 for (i = 0; i < 8; i++)
1925 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1926 return 1;
f3bb6135 1927
ff9940b0 1928 return 0;
f3bb6135 1929}
ff9940b0
RE
1930
1931/* Return TRUE if rtx X is a valid immediate FPU constant. */
1932
1933int
1934neg_const_double_rtx_ok_for_fpu (x)
1935 rtx x;
1936{
1937 REAL_VALUE_TYPE r;
1938 int i;
1939
1940 if (!fpa_consts_inited)
1941 init_fpa_table ();
1942
1943 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1944 r = REAL_VALUE_NEGATE (r);
1945 if (REAL_VALUE_MINUS_ZERO (r))
1946 return 0;
f3bb6135 1947
ff9940b0
RE
1948 for (i = 0; i < 8; i++)
1949 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1950 return 1;
f3bb6135 1951
ff9940b0 1952 return 0;
f3bb6135 1953}
cce8749e
CH
1954\f
1955/* Predicates for `match_operand' and `match_operator'. */
1956
ff9940b0 1957/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
1958 (SUBREG (MEM)...).
1959
1960 This function exists because at the time it was put in it led to better
1961 code. SUBREG(MEM) always needs a reload in the places where
1962 s_register_operand is used, and this seemed to lead to excessive
1963 reloading. */
ff9940b0
RE
1964
1965int
1966s_register_operand (op, mode)
1967 register rtx op;
1968 enum machine_mode mode;
1969{
1970 if (GET_MODE (op) != mode && mode != VOIDmode)
1971 return 0;
1972
1973 if (GET_CODE (op) == SUBREG)
f3bb6135 1974 op = SUBREG_REG (op);
ff9940b0
RE
1975
1976 /* We don't consider registers whose class is NO_REGS
1977 to be a register operand. */
1978 return (GET_CODE (op) == REG
1979 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1980 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1981}
1982
e2c671ba
RE
1983/* Only accept reg, subreg(reg), const_int. */
1984
1985int
1986reg_or_int_operand (op, mode)
1987 register rtx op;
1988 enum machine_mode mode;
1989{
1990 if (GET_CODE (op) == CONST_INT)
1991 return 1;
1992
1993 if (GET_MODE (op) != mode && mode != VOIDmode)
1994 return 0;
1995
1996 if (GET_CODE (op) == SUBREG)
1997 op = SUBREG_REG (op);
1998
1999 /* We don't consider registers whose class is NO_REGS
2000 to be a register operand. */
2001 return (GET_CODE (op) == REG
2002 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2003 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2004}
2005
ff9940b0
RE
2006/* Return 1 if OP is an item in memory, given that we are in reload. */
2007
2008int
2009reload_memory_operand (op, mode)
2010 rtx op;
74bbc178 2011 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2012{
2013 int regno = true_regnum (op);
2014
2015 return (! CONSTANT_P (op)
2016 && (regno == -1
2017 || (GET_CODE (op) == REG
2018 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2019}
2020
4d818c85
RE
2021/* Return 1 if OP is a valid memory address, but not valid for a signed byte
2022 memory access (architecture V4) */
2023int
2024bad_signed_byte_operand (op, mode)
2025 rtx op;
2026 enum machine_mode mode;
2027{
2028 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
2029 return 0;
2030
2031 op = XEXP (op, 0);
2032
2033 /* A sum of anything more complex than reg + reg or reg + const is bad */
2034 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
9c8cc54f
RE
2035 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2036 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2037 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2038 return 1;
2039
2040 /* Big constants are also bad */
2041 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2042 && (INTVAL (XEXP (op, 1)) > 0xff
2043 || -INTVAL (XEXP (op, 1)) > 0xff))
2044 return 1;
2045
2046 /* Everything else is good, or can will automatically be made so. */
2047 return 0;
2048}
2049
cce8749e
CH
2050/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2051
2052int
2053arm_rhs_operand (op, mode)
2054 rtx op;
2055 enum machine_mode mode;
2056{
ff9940b0 2057 return (s_register_operand (op, mode)
cce8749e 2058 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2059}
cce8749e 2060
ff9940b0
RE
2061/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2062 */
2063
2064int
2065arm_rhsm_operand (op, mode)
2066 rtx op;
2067 enum machine_mode mode;
2068{
2069 return (s_register_operand (op, mode)
2070 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2071 || memory_operand (op, mode));
f3bb6135 2072}
ff9940b0
RE
2073
2074/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2075 constant that is valid when negated. */
2076
2077int
2078arm_add_operand (op, mode)
2079 rtx op;
2080 enum machine_mode mode;
2081{
2082 return (s_register_operand (op, mode)
2083 || (GET_CODE (op) == CONST_INT
2084 && (const_ok_for_arm (INTVAL (op))
2085 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2086}
ff9940b0
RE
2087
2088int
2089arm_not_operand (op, mode)
2090 rtx op;
2091 enum machine_mode mode;
2092{
2093 return (s_register_operand (op, mode)
2094 || (GET_CODE (op) == CONST_INT
2095 && (const_ok_for_arm (INTVAL (op))
2096 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2097}
ff9940b0 2098
5165176d
RE
2099/* Return TRUE if the operand is a memory reference which contains an
2100 offsettable address. */
2101int
2102offsettable_memory_operand (op, mode)
2103 register rtx op;
2104 enum machine_mode mode;
2105{
2106 if (mode == VOIDmode)
2107 mode = GET_MODE (op);
2108
2109 return (mode == GET_MODE (op)
2110 && GET_CODE (op) == MEM
2111 && offsettable_address_p (reload_completed | reload_in_progress,
2112 mode, XEXP (op, 0)));
2113}
2114
2115/* Return TRUE if the operand is a memory reference which is, or can be
2116 made word aligned by adjusting the offset. */
2117int
2118alignable_memory_operand (op, mode)
2119 register rtx op;
2120 enum machine_mode mode;
2121{
2122 rtx reg;
2123
2124 if (mode == VOIDmode)
2125 mode = GET_MODE (op);
2126
2127 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2128 return 0;
2129
2130 op = XEXP (op, 0);
2131
2132 return ((GET_CODE (reg = op) == REG
2133 || (GET_CODE (op) == SUBREG
2134 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2135 || (GET_CODE (op) == PLUS
2136 && GET_CODE (XEXP (op, 1)) == CONST_INT
2137 && (GET_CODE (reg = XEXP (op, 0)) == REG
2138 || (GET_CODE (XEXP (op, 0)) == SUBREG
2139 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
2140 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
2141}
2142
b111229a
RE
2143/* Similar to s_register_operand, but does not allow hard integer
2144 registers. */
2145int
2146f_register_operand (op, mode)
2147 register rtx op;
2148 enum machine_mode mode;
2149{
2150 if (GET_MODE (op) != mode && mode != VOIDmode)
2151 return 0;
2152
2153 if (GET_CODE (op) == SUBREG)
2154 op = SUBREG_REG (op);
2155
2156 /* We don't consider registers whose class is NO_REGS
2157 to be a register operand. */
2158 return (GET_CODE (op) == REG
2159 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2160 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2161}
2162
cce8749e
CH
2163/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2164
2165int
2166fpu_rhs_operand (op, mode)
2167 rtx op;
2168 enum machine_mode mode;
2169{
ff9940b0 2170 if (s_register_operand (op, mode))
f3bb6135 2171 return TRUE;
cce8749e
CH
2172 else if (GET_CODE (op) == CONST_DOUBLE)
2173 return (const_double_rtx_ok_for_fpu (op));
f3bb6135
RE
2174
2175 return FALSE;
2176}
cce8749e 2177
ff9940b0
RE
2178int
2179fpu_add_operand (op, mode)
2180 rtx op;
2181 enum machine_mode mode;
2182{
2183 if (s_register_operand (op, mode))
f3bb6135 2184 return TRUE;
ff9940b0 2185 else if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2186 return (const_double_rtx_ok_for_fpu (op)
2187 || neg_const_double_rtx_ok_for_fpu (op));
2188
2189 return FALSE;
ff9940b0
RE
2190}
2191
cce8749e
CH
2192/* Return nonzero if OP is a constant power of two. */
2193
2194int
2195power_of_two_operand (op, mode)
2196 rtx op;
74bbc178 2197 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2198{
2199 if (GET_CODE (op) == CONST_INT)
2200 {
f3bb6135
RE
2201 HOST_WIDE_INT value = INTVAL(op);
2202 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2203 }
f3bb6135
RE
2204 return FALSE;
2205}
cce8749e
CH
2206
2207/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2208 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2209 Note that this disallows MEM(REG+REG), but allows
2210 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2211
2212int
2213di_operand (op, mode)
2214 rtx op;
2215 enum machine_mode mode;
2216{
ff9940b0 2217 if (s_register_operand (op, mode))
f3bb6135 2218 return TRUE;
cce8749e 2219
e9c6b69b
NC
2220 if (GET_CODE (op) == SUBREG)
2221 op = SUBREG_REG (op);
2222
cce8749e
CH
2223 switch (GET_CODE (op))
2224 {
2225 case CONST_DOUBLE:
2226 case CONST_INT:
f3bb6135
RE
2227 return TRUE;
2228
cce8749e 2229 case MEM:
f3bb6135
RE
2230 return memory_address_p (DImode, XEXP (op, 0));
2231
cce8749e 2232 default:
f3bb6135 2233 return FALSE;
cce8749e 2234 }
f3bb6135 2235}
cce8749e 2236
f3139301 2237/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2238 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2239 Note that this disallows MEM(REG+REG), but allows
2240 MEM(PRE/POST_INC/DEC(REG)). */
2241
2242int
2243soft_df_operand (op, mode)
2244 rtx op;
2245 enum machine_mode mode;
2246{
2247 if (s_register_operand (op, mode))
2248 return TRUE;
2249
e9c6b69b
NC
2250 if (GET_CODE (op) == SUBREG)
2251 op = SUBREG_REG (op);
2252
f3139301
DE
2253 switch (GET_CODE (op))
2254 {
2255 case CONST_DOUBLE:
2256 return TRUE;
2257
2258 case MEM:
2259 return memory_address_p (DFmode, XEXP (op, 0));
2260
2261 default:
2262 return FALSE;
2263 }
2264}
2265
cce8749e
CH
2266/* Return TRUE for valid index operands. */
2267
2268int
2269index_operand (op, mode)
2270 rtx op;
2271 enum machine_mode mode;
2272{
ff9940b0
RE
2273 return (s_register_operand(op, mode)
2274 || (immediate_operand (op, mode)
2275 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2276}
cce8749e 2277
ff9940b0
RE
2278/* Return TRUE for valid shifts by a constant. This also accepts any
2279 power of two on the (somewhat overly relaxed) assumption that the
2280 shift operator in this case was a mult. */
2281
2282int
2283const_shift_operand (op, mode)
2284 rtx op;
2285 enum machine_mode mode;
2286{
2287 return (power_of_two_operand (op, mode)
2288 || (immediate_operand (op, mode)
2289 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2290}
ff9940b0 2291
cce8749e
CH
2292/* Return TRUE for arithmetic operators which can be combined with a multiply
2293 (shift). */
2294
2295int
2296shiftable_operator (x, mode)
2297 rtx x;
2298 enum machine_mode mode;
2299{
2300 if (GET_MODE (x) != mode)
2301 return FALSE;
2302 else
2303 {
2304 enum rtx_code code = GET_CODE (x);
2305
2306 return (code == PLUS || code == MINUS
2307 || code == IOR || code == XOR || code == AND);
2308 }
f3bb6135 2309}
cce8749e
CH
2310
2311/* Return TRUE for shift operators. */
2312
2313int
2314shift_operator (x, mode)
2315 rtx x;
2316 enum machine_mode mode;
2317{
2318 if (GET_MODE (x) != mode)
2319 return FALSE;
2320 else
2321 {
2322 enum rtx_code code = GET_CODE (x);
2323
ff9940b0 2324 if (code == MULT)
aec3cfba 2325 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 2326
e2c671ba
RE
2327 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2328 || code == ROTATERT);
cce8749e 2329 }
f3bb6135 2330}
ff9940b0
RE
2331
2332int equality_operator (x, mode)
f3bb6135 2333 rtx x;
74bbc178 2334 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2335{
f3bb6135 2336 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2337}
2338
2339/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2340
2341int
2342minmax_operator (x, mode)
2343 rtx x;
2344 enum machine_mode mode;
2345{
2346 enum rtx_code code = GET_CODE (x);
2347
2348 if (GET_MODE (x) != mode)
2349 return FALSE;
f3bb6135 2350
ff9940b0 2351 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2352}
ff9940b0
RE
2353
2354/* return TRUE if x is EQ or NE */
2355
2356/* Return TRUE if this is the condition code register, if we aren't given
2357 a mode, accept any class CCmode register */
2358
2359int
2360cc_register (x, mode)
f3bb6135
RE
2361 rtx x;
2362 enum machine_mode mode;
ff9940b0
RE
2363{
2364 if (mode == VOIDmode)
2365 {
2366 mode = GET_MODE (x);
2367 if (GET_MODE_CLASS (mode) != MODE_CC)
2368 return FALSE;
2369 }
f3bb6135 2370
ff9940b0
RE
2371 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2372 return TRUE;
f3bb6135 2373
ff9940b0
RE
2374 return FALSE;
2375}
5bbe2d40
RE
2376
2377/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2378 a mode, accept any class CCmode register which indicates a dominance
2379 expression. */
5bbe2d40
RE
2380
2381int
84ed5e79 2382dominant_cc_register (x, mode)
5bbe2d40
RE
2383 rtx x;
2384 enum machine_mode mode;
2385{
2386 if (mode == VOIDmode)
2387 {
2388 mode = GET_MODE (x);
84ed5e79 2389 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2390 return FALSE;
2391 }
2392
84ed5e79
RE
2393 if (mode != CC_DNEmode && mode != CC_DEQmode
2394 && mode != CC_DLEmode && mode != CC_DLTmode
2395 && mode != CC_DGEmode && mode != CC_DGTmode
2396 && mode != CC_DLEUmode && mode != CC_DLTUmode
2397 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2398 return FALSE;
2399
5bbe2d40
RE
2400 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2401 return TRUE;
2402
2403 return FALSE;
2404}
2405
2b835d68
RE
2406/* Return TRUE if X references a SYMBOL_REF. */
2407int
2408symbol_mentioned_p (x)
2409 rtx x;
2410{
2411 register char *fmt;
2412 register int i;
2413
2414 if (GET_CODE (x) == SYMBOL_REF)
2415 return 1;
2416
2417 fmt = GET_RTX_FORMAT (GET_CODE (x));
2418 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2419 {
2420 if (fmt[i] == 'E')
2421 {
2422 register int j;
2423
2424 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2425 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2426 return 1;
2427 }
2428 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2429 return 1;
2430 }
2431
2432 return 0;
2433}
2434
2435/* Return TRUE if X references a LABEL_REF. */
2436int
2437label_mentioned_p (x)
2438 rtx x;
2439{
2440 register char *fmt;
2441 register int i;
2442
2443 if (GET_CODE (x) == LABEL_REF)
2444 return 1;
2445
2446 fmt = GET_RTX_FORMAT (GET_CODE (x));
2447 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2448 {
2449 if (fmt[i] == 'E')
2450 {
2451 register int j;
2452
2453 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2454 if (label_mentioned_p (XVECEXP (x, i, j)))
2455 return 1;
2456 }
2457 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2458 return 1;
2459 }
2460
2461 return 0;
2462}
2463
ff9940b0
RE
2464enum rtx_code
2465minmax_code (x)
f3bb6135 2466 rtx x;
ff9940b0
RE
2467{
2468 enum rtx_code code = GET_CODE (x);
2469
2470 if (code == SMAX)
2471 return GE;
f3bb6135 2472 else if (code == SMIN)
ff9940b0 2473 return LE;
f3bb6135 2474 else if (code == UMIN)
ff9940b0 2475 return LEU;
f3bb6135 2476 else if (code == UMAX)
ff9940b0 2477 return GEU;
f3bb6135 2478
ff9940b0
RE
2479 abort ();
2480}
2481
2482/* Return 1 if memory locations are adjacent */
2483
f3bb6135 2484int
ff9940b0
RE
2485adjacent_mem_locations (a, b)
2486 rtx a, b;
2487{
2488 int val0 = 0, val1 = 0;
2489 int reg0, reg1;
2490
2491 if ((GET_CODE (XEXP (a, 0)) == REG
2492 || (GET_CODE (XEXP (a, 0)) == PLUS
2493 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2494 && (GET_CODE (XEXP (b, 0)) == REG
2495 || (GET_CODE (XEXP (b, 0)) == PLUS
2496 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2497 {
2498 if (GET_CODE (XEXP (a, 0)) == PLUS)
2499 {
2500 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2501 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2502 }
2503 else
2504 reg0 = REGNO (XEXP (a, 0));
2505 if (GET_CODE (XEXP (b, 0)) == PLUS)
2506 {
2507 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2508 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2509 }
2510 else
2511 reg1 = REGNO (XEXP (b, 0));
2512 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2513 }
2514 return 0;
2515}
2516
2517/* Return 1 if OP is a load multiple operation. It is known to be
2518 parallel and the first section will be tested. */
2519
f3bb6135 2520int
ff9940b0
RE
2521load_multiple_operation (op, mode)
2522 rtx op;
74bbc178 2523 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2524{
f3bb6135 2525 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2526 int dest_regno;
2527 rtx src_addr;
f3bb6135 2528 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2529 rtx elt;
2530
2531 if (count <= 1
2532 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2533 return 0;
2534
2535 /* Check to see if this might be a write-back */
2536 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2537 {
2538 i++;
2539 base = 1;
2540
2541 /* Now check it more carefully */
2542 if (GET_CODE (SET_DEST (elt)) != REG
2543 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2544 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2545 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2546 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2547 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2548 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2549 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2550 != REGNO (SET_DEST (elt)))
2551 return 0;
f3bb6135 2552
ff9940b0
RE
2553 count--;
2554 }
2555
2556 /* Perform a quick check so we don't blow up below. */
2557 if (count <= i
2558 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2559 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2560 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2561 return 0;
2562
2563 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2564 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2565
2566 for (; i < count; i++)
2567 {
ed4c4348 2568 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2569
2570 if (GET_CODE (elt) != SET
2571 || GET_CODE (SET_DEST (elt)) != REG
2572 || GET_MODE (SET_DEST (elt)) != SImode
2573 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2574 || GET_CODE (SET_SRC (elt)) != MEM
2575 || GET_MODE (SET_SRC (elt)) != SImode
2576 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2577 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2578 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2579 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2580 return 0;
2581 }
2582
2583 return 1;
2584}
2585
2586/* Return 1 if OP is a store multiple operation. It is known to be
2587 parallel and the first section will be tested. */
2588
f3bb6135 2589int
ff9940b0
RE
2590store_multiple_operation (op, mode)
2591 rtx op;
74bbc178 2592 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2593{
f3bb6135 2594 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2595 int src_regno;
2596 rtx dest_addr;
f3bb6135 2597 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2598 rtx elt;
2599
2600 if (count <= 1
2601 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2602 return 0;
2603
2604 /* Check to see if this might be a write-back */
2605 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2606 {
2607 i++;
2608 base = 1;
2609
2610 /* Now check it more carefully */
2611 if (GET_CODE (SET_DEST (elt)) != REG
2612 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2613 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2614 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2615 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2616 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2617 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2618 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2619 != REGNO (SET_DEST (elt)))
2620 return 0;
f3bb6135 2621
ff9940b0
RE
2622 count--;
2623 }
2624
2625 /* Perform a quick check so we don't blow up below. */
2626 if (count <= i
2627 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2628 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2629 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2630 return 0;
2631
2632 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2633 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2634
2635 for (; i < count; i++)
2636 {
2637 elt = XVECEXP (op, 0, i);
2638
2639 if (GET_CODE (elt) != SET
2640 || GET_CODE (SET_SRC (elt)) != REG
2641 || GET_MODE (SET_SRC (elt)) != SImode
2642 || REGNO (SET_SRC (elt)) != src_regno + i - base
2643 || GET_CODE (SET_DEST (elt)) != MEM
2644 || GET_MODE (SET_DEST (elt)) != SImode
2645 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2646 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2647 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2648 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2649 return 0;
2650 }
2651
2652 return 1;
2653}
e2c671ba 2654
84ed5e79
RE
2655int
2656load_multiple_sequence (operands, nops, regs, base, load_offset)
2657 rtx *operands;
2658 int nops;
2659 int *regs;
2660 int *base;
2661 HOST_WIDE_INT *load_offset;
2662{
2663 int unsorted_regs[4];
2664 HOST_WIDE_INT unsorted_offsets[4];
2665 int order[4];
ad076f4e 2666 int base_reg = -1;
84ed5e79
RE
2667 int i;
2668
2669 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2670 extended if required. */
2671 if (nops < 2 || nops > 4)
2672 abort ();
2673
2674 /* Loop over the operands and check that the memory references are
2675 suitable (ie immediate offsets from the same base register). At
2676 the same time, extract the target register, and the memory
2677 offsets. */
2678 for (i = 0; i < nops; i++)
2679 {
2680 rtx reg;
2681 rtx offset;
2682
56636818
JL
2683 /* Convert a subreg of a mem into the mem itself. */
2684 if (GET_CODE (operands[nops + i]) == SUBREG)
2685 operands[nops + i] = alter_subreg(operands[nops + i]);
2686
84ed5e79
RE
2687 if (GET_CODE (operands[nops + i]) != MEM)
2688 abort ();
2689
2690 /* Don't reorder volatile memory references; it doesn't seem worth
2691 looking for the case where the order is ok anyway. */
2692 if (MEM_VOLATILE_P (operands[nops + i]))
2693 return 0;
2694
2695 offset = const0_rtx;
2696
2697 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2698 || (GET_CODE (reg) == SUBREG
2699 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2700 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2701 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2702 == REG)
2703 || (GET_CODE (reg) == SUBREG
2704 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2705 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2706 == CONST_INT)))
2707 {
2708 if (i == 0)
2709 {
2710 base_reg = REGNO(reg);
2711 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2712 ? REGNO (operands[i])
2713 : REGNO (SUBREG_REG (operands[i])));
2714 order[0] = 0;
2715 }
2716 else
2717 {
2718 if (base_reg != REGNO (reg))
2719 /* Not addressed from the same base register. */
2720 return 0;
2721
2722 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2723 ? REGNO (operands[i])
2724 : REGNO (SUBREG_REG (operands[i])));
2725 if (unsorted_regs[i] < unsorted_regs[order[0]])
2726 order[0] = i;
2727 }
2728
2729 /* If it isn't an integer register, or if it overwrites the
2730 base register but isn't the last insn in the list, then
2731 we can't do this. */
2732 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2733 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2734 return 0;
2735
2736 unsorted_offsets[i] = INTVAL (offset);
2737 }
2738 else
2739 /* Not a suitable memory address. */
2740 return 0;
2741 }
2742
2743 /* All the useful information has now been extracted from the
2744 operands into unsorted_regs and unsorted_offsets; additionally,
2745 order[0] has been set to the lowest numbered register in the
2746 list. Sort the registers into order, and check that the memory
2747 offsets are ascending and adjacent. */
2748
2749 for (i = 1; i < nops; i++)
2750 {
2751 int j;
2752
2753 order[i] = order[i - 1];
2754 for (j = 0; j < nops; j++)
2755 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2756 && (order[i] == order[i - 1]
2757 || unsorted_regs[j] < unsorted_regs[order[i]]))
2758 order[i] = j;
2759
2760 /* Have we found a suitable register? if not, one must be used more
2761 than once. */
2762 if (order[i] == order[i - 1])
2763 return 0;
2764
2765 /* Is the memory address adjacent and ascending? */
2766 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2767 return 0;
2768 }
2769
2770 if (base)
2771 {
2772 *base = base_reg;
2773
2774 for (i = 0; i < nops; i++)
2775 regs[i] = unsorted_regs[order[i]];
2776
2777 *load_offset = unsorted_offsets[order[0]];
2778 }
2779
2780 if (unsorted_offsets[order[0]] == 0)
2781 return 1; /* ldmia */
2782
2783 if (unsorted_offsets[order[0]] == 4)
2784 return 2; /* ldmib */
2785
2786 if (unsorted_offsets[order[nops - 1]] == 0)
2787 return 3; /* ldmda */
2788
2789 if (unsorted_offsets[order[nops - 1]] == -4)
2790 return 4; /* ldmdb */
2791
b36ba79f 2792 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm if
6cc8c0b3
NC
2793 the offset isn't small enough. The reason 2 ldrs are faster is because
2794 these ARMs are able to do more than one cache access in a single cycle.
2795 The ARM9 and StrongARM have Harvard caches, whilst the ARM8 has a double
2796 bandwidth cache. This means that these cores can do both an instruction
2797 fetch and a data fetch in a single cycle, so the trick of calculating the
2798 address into a scratch register (one of the result regs) and then doing a
2799 load multiple actually becomes slower (and no smaller in code size). That
2800 is the transformation
2801
2802 ldr rd1, [rbase + offset]
2803 ldr rd2, [rbase + offset + 4]
2804
2805 to
2806
2807 add rd1, rbase, offset
2808 ldmia rd1, {rd1, rd2}
2809
2810 produces worse code -- '3 cycles + any stalls on rd2' instead of '2 cycles
2811 + any stalls on rd2'. On ARMs with only one cache access per cycle, the
2812 first sequence could never complete in less than 6 cycles, whereas the ldm
2813 sequence would only take 5 and would make better use of sequential accesses
2814 if not hitting the cache.
2815
2816 We cheat here and test 'arm_ld_sched' which we currently know to only be
2817 true for the ARM8, ARM9 and StrongARM. If this ever changes, then the test
2818 below needs to be reworked. */
f5a1b0d2 2819 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
2820 return 0;
2821
84ed5e79
RE
2822 /* Can't do it without setting up the offset, only do this if it takes
2823 no more than one insn. */
2824 return (const_ok_for_arm (unsorted_offsets[order[0]])
2825 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2826}
2827
2828char *
2829emit_ldm_seq (operands, nops)
2830 rtx *operands;
2831 int nops;
2832{
2833 int regs[4];
2834 int base_reg;
2835 HOST_WIDE_INT offset;
2836 char buf[100];
2837 int i;
2838
2839 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2840 {
2841 case 1:
2842 strcpy (buf, "ldm%?ia\t");
2843 break;
2844
2845 case 2:
2846 strcpy (buf, "ldm%?ib\t");
2847 break;
2848
2849 case 3:
2850 strcpy (buf, "ldm%?da\t");
2851 break;
2852
2853 case 4:
2854 strcpy (buf, "ldm%?db\t");
2855 break;
2856
2857 case 5:
2858 if (offset >= 0)
2859 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2860 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2861 (long) offset);
2862 else
2863 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2864 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2865 (long) -offset);
2866 output_asm_insn (buf, operands);
2867 base_reg = regs[0];
2868 strcpy (buf, "ldm%?ia\t");
2869 break;
2870
2871 default:
2872 abort ();
2873 }
2874
2875 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2876 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2877
2878 for (i = 1; i < nops; i++)
2879 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2880 reg_names[regs[i]]);
2881
2882 strcat (buf, "}\t%@ phole ldm");
2883
2884 output_asm_insn (buf, operands);
2885 return "";
2886}
2887
2888int
2889store_multiple_sequence (operands, nops, regs, base, load_offset)
2890 rtx *operands;
2891 int nops;
2892 int *regs;
2893 int *base;
2894 HOST_WIDE_INT *load_offset;
2895{
2896 int unsorted_regs[4];
2897 HOST_WIDE_INT unsorted_offsets[4];
2898 int order[4];
ad076f4e 2899 int base_reg = -1;
84ed5e79
RE
2900 int i;
2901
2902 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2903 extended if required. */
2904 if (nops < 2 || nops > 4)
2905 abort ();
2906
2907 /* Loop over the operands and check that the memory references are
2908 suitable (ie immediate offsets from the same base register). At
2909 the same time, extract the target register, and the memory
2910 offsets. */
2911 for (i = 0; i < nops; i++)
2912 {
2913 rtx reg;
2914 rtx offset;
2915
56636818
JL
2916 /* Convert a subreg of a mem into the mem itself. */
2917 if (GET_CODE (operands[nops + i]) == SUBREG)
2918 operands[nops + i] = alter_subreg(operands[nops + i]);
2919
84ed5e79
RE
2920 if (GET_CODE (operands[nops + i]) != MEM)
2921 abort ();
2922
2923 /* Don't reorder volatile memory references; it doesn't seem worth
2924 looking for the case where the order is ok anyway. */
2925 if (MEM_VOLATILE_P (operands[nops + i]))
2926 return 0;
2927
2928 offset = const0_rtx;
2929
2930 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2931 || (GET_CODE (reg) == SUBREG
2932 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2933 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2934 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2935 == REG)
2936 || (GET_CODE (reg) == SUBREG
2937 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2938 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2939 == CONST_INT)))
2940 {
2941 if (i == 0)
2942 {
2943 base_reg = REGNO(reg);
2944 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2945 ? REGNO (operands[i])
2946 : REGNO (SUBREG_REG (operands[i])));
2947 order[0] = 0;
2948 }
2949 else
2950 {
2951 if (base_reg != REGNO (reg))
2952 /* Not addressed from the same base register. */
2953 return 0;
2954
2955 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2956 ? REGNO (operands[i])
2957 : REGNO (SUBREG_REG (operands[i])));
2958 if (unsorted_regs[i] < unsorted_regs[order[0]])
2959 order[0] = i;
2960 }
2961
2962 /* If it isn't an integer register, then we can't do this. */
2963 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2964 return 0;
2965
2966 unsorted_offsets[i] = INTVAL (offset);
2967 }
2968 else
2969 /* Not a suitable memory address. */
2970 return 0;
2971 }
2972
2973 /* All the useful information has now been extracted from the
2974 operands into unsorted_regs and unsorted_offsets; additionally,
2975 order[0] has been set to the lowest numbered register in the
2976 list. Sort the registers into order, and check that the memory
2977 offsets are ascending and adjacent. */
2978
2979 for (i = 1; i < nops; i++)
2980 {
2981 int j;
2982
2983 order[i] = order[i - 1];
2984 for (j = 0; j < nops; j++)
2985 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2986 && (order[i] == order[i - 1]
2987 || unsorted_regs[j] < unsorted_regs[order[i]]))
2988 order[i] = j;
2989
2990 /* Have we found a suitable register? if not, one must be used more
2991 than once. */
2992 if (order[i] == order[i - 1])
2993 return 0;
2994
2995 /* Is the memory address adjacent and ascending? */
2996 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2997 return 0;
2998 }
2999
3000 if (base)
3001 {
3002 *base = base_reg;
3003
3004 for (i = 0; i < nops; i++)
3005 regs[i] = unsorted_regs[order[i]];
3006
3007 *load_offset = unsorted_offsets[order[0]];
3008 }
3009
3010 if (unsorted_offsets[order[0]] == 0)
3011 return 1; /* stmia */
3012
3013 if (unsorted_offsets[order[0]] == 4)
3014 return 2; /* stmib */
3015
3016 if (unsorted_offsets[order[nops - 1]] == 0)
3017 return 3; /* stmda */
3018
3019 if (unsorted_offsets[order[nops - 1]] == -4)
3020 return 4; /* stmdb */
3021
3022 return 0;
3023}
3024
3025char *
3026emit_stm_seq (operands, nops)
3027 rtx *operands;
3028 int nops;
3029{
3030 int regs[4];
3031 int base_reg;
3032 HOST_WIDE_INT offset;
3033 char buf[100];
3034 int i;
3035
3036 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3037 {
3038 case 1:
3039 strcpy (buf, "stm%?ia\t");
3040 break;
3041
3042 case 2:
3043 strcpy (buf, "stm%?ib\t");
3044 break;
3045
3046 case 3:
3047 strcpy (buf, "stm%?da\t");
3048 break;
3049
3050 case 4:
3051 strcpy (buf, "stm%?db\t");
3052 break;
3053
3054 default:
3055 abort ();
3056 }
3057
3058 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3059 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3060
3061 for (i = 1; i < nops; i++)
3062 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3063 reg_names[regs[i]]);
3064
3065 strcat (buf, "}\t%@ phole stm");
3066
3067 output_asm_insn (buf, operands);
3068 return "";
3069}
3070
e2c671ba
RE
3071int
3072multi_register_push (op, mode)
0a81f500 3073 rtx op;
74bbc178 3074 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3075{
3076 if (GET_CODE (op) != PARALLEL
3077 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3078 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3079 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3080 return 0;
3081
3082 return 1;
3083}
3084
ff9940b0 3085\f
f3bb6135
RE
3086/* Routines for use with attributes */
3087
31fdb4d5
DE
3088/* Return nonzero if ATTR is a valid attribute for DECL.
3089 ATTRIBUTES are any existing attributes and ARGS are the arguments
3090 supplied with ATTR.
3091
3092 Supported attributes:
3093
3094 naked: don't output any prologue or epilogue code, the user is assumed
3095 to do the right thing. */
3096
3097int
74bbc178 3098arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3099 tree decl;
31fdb4d5
DE
3100 tree attr;
3101 tree args;
3102{
3103 if (args != NULL_TREE)
3104 return 0;
3105
3106 if (is_attribute_p ("naked", attr))
3107 return TREE_CODE (decl) == FUNCTION_DECL;
3108 return 0;
3109}
3110
3111/* Return non-zero if FUNC is a naked function. */
3112
3113static int
3114arm_naked_function_p (func)
3115 tree func;
3116{
3117 tree a;
3118
3119 if (TREE_CODE (func) != FUNCTION_DECL)
3120 abort ();
2e943e99 3121
31fdb4d5
DE
3122 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3123 return a != NULL_TREE;
3124}
f3bb6135 3125\f
ff9940b0
RE
3126/* Routines for use in generating RTL */
3127
f3bb6135 3128rtx
56636818 3129arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3130 in_struct_p, scalar_p)
ff9940b0
RE
3131 int base_regno;
3132 int count;
3133 rtx from;
3134 int up;
3135 int write_back;
56636818
JL
3136 int unchanging_p;
3137 int in_struct_p;
c6df88cb 3138 int scalar_p;
ff9940b0
RE
3139{
3140 int i = 0, j;
3141 rtx result;
3142 int sign = up ? 1 : -1;
56636818 3143 rtx mem;
ff9940b0 3144
43cffd11
RE
3145 result = gen_rtx_PARALLEL (VOIDmode,
3146 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3147 if (write_back)
f3bb6135 3148 {
ff9940b0 3149 XVECEXP (result, 0, 0)
43cffd11
RE
3150 = gen_rtx_SET (GET_MODE (from), from,
3151 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3152 i = 1;
3153 count++;
f3bb6135
RE
3154 }
3155
ff9940b0 3156 for (j = 0; i < count; i++, j++)
f3bb6135 3157 {
43cffd11 3158 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3159 RTX_UNCHANGING_P (mem) = unchanging_p;
3160 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3161 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3162 XVECEXP (result, 0, i)
3163 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3164 }
3165
ff9940b0 3166 if (write_back)
43cffd11 3167 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, from);
ff9940b0
RE
3168
3169 return result;
3170}
3171
f3bb6135 3172rtx
56636818 3173arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3174 in_struct_p, scalar_p)
ff9940b0
RE
3175 int base_regno;
3176 int count;
3177 rtx to;
3178 int up;
3179 int write_back;
56636818
JL
3180 int unchanging_p;
3181 int in_struct_p;
c6df88cb 3182 int scalar_p;
ff9940b0
RE
3183{
3184 int i = 0, j;
3185 rtx result;
3186 int sign = up ? 1 : -1;
56636818 3187 rtx mem;
ff9940b0 3188
43cffd11
RE
3189 result = gen_rtx_PARALLEL (VOIDmode,
3190 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3191 if (write_back)
f3bb6135 3192 {
ff9940b0 3193 XVECEXP (result, 0, 0)
43cffd11
RE
3194 = gen_rtx_SET (GET_MODE (to), to,
3195 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3196 i = 1;
3197 count++;
f3bb6135
RE
3198 }
3199
ff9940b0 3200 for (j = 0; i < count; i++, j++)
f3bb6135 3201 {
43cffd11 3202 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3203 RTX_UNCHANGING_P (mem) = unchanging_p;
3204 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3205 MEM_SCALAR_P (mem) = scalar_p;
56636818 3206
43cffd11
RE
3207 XVECEXP (result, 0, i)
3208 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3209 }
3210
ff9940b0 3211 if (write_back)
43cffd11 3212 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, to);
ff9940b0
RE
3213
3214 return result;
3215}
3216
880e2516
RE
3217int
3218arm_gen_movstrqi (operands)
3219 rtx *operands;
3220{
3221 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3222 int i;
880e2516 3223 rtx src, dst;
ad076f4e 3224 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3225 rtx part_bytes_reg = NULL;
56636818
JL
3226 rtx mem;
3227 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3228 int dst_scalar_p, src_scalar_p;
880e2516
RE
3229
3230 if (GET_CODE (operands[2]) != CONST_INT
3231 || GET_CODE (operands[3]) != CONST_INT
3232 || INTVAL (operands[2]) > 64
3233 || INTVAL (operands[3]) & 3)
3234 return 0;
3235
3236 st_dst = XEXP (operands[0], 0);
3237 st_src = XEXP (operands[1], 0);
56636818
JL
3238
3239 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3240 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3241 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3242 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3243 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3244 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3245
880e2516
RE
3246 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3247 fin_src = src = copy_to_mode_reg (SImode, st_src);
3248
3249 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
3250 out_words_to_go = INTVAL (operands[2]) / 4;
3251 last_bytes = INTVAL (operands[2]) & 3;
3252
3253 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3254 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3255
3256 for (i = 0; in_words_to_go >= 2; i+=4)
3257 {
bd9c7e23 3258 if (in_words_to_go > 4)
56636818 3259 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3260 src_unchanging_p,
3261 src_in_struct_p,
3262 src_scalar_p));
bd9c7e23
RE
3263 else
3264 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3265 FALSE, src_unchanging_p,
c6df88cb 3266 src_in_struct_p, src_scalar_p));
bd9c7e23 3267
880e2516
RE
3268 if (out_words_to_go)
3269 {
bd9c7e23 3270 if (out_words_to_go > 4)
56636818
JL
3271 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3272 dst_unchanging_p,
c6df88cb
MM
3273 dst_in_struct_p,
3274 dst_scalar_p));
bd9c7e23
RE
3275 else if (out_words_to_go != 1)
3276 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3277 dst, TRUE,
3278 (last_bytes == 0
56636818
JL
3279 ? FALSE : TRUE),
3280 dst_unchanging_p,
c6df88cb
MM
3281 dst_in_struct_p,
3282 dst_scalar_p));
880e2516
RE
3283 else
3284 {
43cffd11 3285 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3286 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3287 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3288 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3289 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3290 if (last_bytes != 0)
3291 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3292 }
3293 }
3294
3295 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3296 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3297 }
3298
3299 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3300 if (out_words_to_go)
3301 {
3302 rtx sreg;
3303
43cffd11 3304 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3305 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3306 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3307 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3308 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
880e2516 3309 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
56636818 3310
43cffd11 3311 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3312 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3313 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3314 MEM_SCALAR_P (mem) = dst_scalar_p;
56636818 3315 emit_move_insn (mem, sreg);
880e2516
RE
3316 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3317 in_words_to_go--;
3318
3319 if (in_words_to_go) /* Sanity check */
3320 abort ();
3321 }
3322
3323 if (in_words_to_go)
3324 {
3325 if (in_words_to_go < 0)
3326 abort ();
3327
43cffd11 3328 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3329 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3330 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3331 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3332 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3333 }
3334
3335 if (BYTES_BIG_ENDIAN && last_bytes)
3336 {
3337 rtx tmp = gen_reg_rtx (SImode);
3338
3339 if (part_bytes_reg == NULL)
3340 abort ();
3341
3342 /* The bytes we want are in the top end of the word */
bee06f3d
RE
3343 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3344 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3345 part_bytes_reg = tmp;
3346
3347 while (last_bytes)
3348 {
43cffd11 3349 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
3350 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3351 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3352 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3353 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
880e2516
RE
3354 if (--last_bytes)
3355 {
3356 tmp = gen_reg_rtx (SImode);
3357 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3358 part_bytes_reg = tmp;
3359 }
3360 }
3361
3362 }
3363 else
3364 {
3365 while (last_bytes)
3366 {
3367 if (part_bytes_reg == NULL)
3368 abort ();
3369
43cffd11 3370 mem = gen_rtx_MEM (QImode, dst);
56636818
JL
3371 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3372 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3373 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3374 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
880e2516
RE
3375 if (--last_bytes)
3376 {
3377 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3378
3379 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3380 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3381 part_bytes_reg = tmp;
3382 }
3383 }
3384 }
3385
3386 return 1;
3387}
3388
5165176d
RE
3389/* Generate a memory reference for a half word, such that it will be loaded
3390 into the top 16 bits of the word. We can assume that the address is
3391 known to be alignable and of the form reg, or plus (reg, const). */
3392rtx
3393gen_rotated_half_load (memref)
3394 rtx memref;
3395{
3396 HOST_WIDE_INT offset = 0;
3397 rtx base = XEXP (memref, 0);
3398
3399 if (GET_CODE (base) == PLUS)
3400 {
3401 offset = INTVAL (XEXP (base, 1));
3402 base = XEXP (base, 0);
3403 }
3404
956d6950 3405 /* If we aren't allowed to generate unaligned addresses, then fail. */
5165176d
RE
3406 if (TARGET_SHORT_BY_BYTES
3407 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3408 return NULL;
3409
43cffd11 3410 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
3411
3412 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3413 return base;
3414
43cffd11 3415 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
3416}
3417
84ed5e79 3418static enum machine_mode
74bbc178 3419select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
3420 rtx x;
3421 rtx y;
3422 HOST_WIDE_INT cond_or;
3423{
3424 enum rtx_code cond1, cond2;
3425 int swapped = 0;
3426
3427 /* Currently we will probably get the wrong result if the individual
3428 comparisons are not simple. This also ensures that it is safe to
956d6950 3429 reverse a comparison if necessary. */
84ed5e79
RE
3430 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3431 != CCmode)
3432 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3433 != CCmode))
3434 return CCmode;
3435
3436 if (cond_or)
3437 cond1 = reverse_condition (cond1);
3438
3439 /* If the comparisons are not equal, and one doesn't dominate the other,
3440 then we can't do this. */
3441 if (cond1 != cond2
3442 && ! comparison_dominates_p (cond1, cond2)
3443 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3444 return CCmode;
3445
3446 if (swapped)
3447 {
3448 enum rtx_code temp = cond1;
3449 cond1 = cond2;
3450 cond2 = temp;
3451 }
3452
3453 switch (cond1)
3454 {
3455 case EQ:
3456 if (cond2 == EQ || ! cond_or)
3457 return CC_DEQmode;
3458
3459 switch (cond2)
3460 {
3461 case LE: return CC_DLEmode;
3462 case LEU: return CC_DLEUmode;
3463 case GE: return CC_DGEmode;
3464 case GEU: return CC_DGEUmode;
ad076f4e 3465 default: break;
84ed5e79
RE
3466 }
3467
3468 break;
3469
3470 case LT:
3471 if (cond2 == LT || ! cond_or)
3472 return CC_DLTmode;
3473 if (cond2 == LE)
3474 return CC_DLEmode;
3475 if (cond2 == NE)
3476 return CC_DNEmode;
3477 break;
3478
3479 case GT:
3480 if (cond2 == GT || ! cond_or)
3481 return CC_DGTmode;
3482 if (cond2 == GE)
3483 return CC_DGEmode;
3484 if (cond2 == NE)
3485 return CC_DNEmode;
3486 break;
3487
3488 case LTU:
3489 if (cond2 == LTU || ! cond_or)
3490 return CC_DLTUmode;
3491 if (cond2 == LEU)
3492 return CC_DLEUmode;
3493 if (cond2 == NE)
3494 return CC_DNEmode;
3495 break;
3496
3497 case GTU:
3498 if (cond2 == GTU || ! cond_or)
3499 return CC_DGTUmode;
3500 if (cond2 == GEU)
3501 return CC_DGEUmode;
3502 if (cond2 == NE)
3503 return CC_DNEmode;
3504 break;
3505
3506 /* The remaining cases only occur when both comparisons are the
3507 same. */
3508 case NE:
3509 return CC_DNEmode;
3510
3511 case LE:
3512 return CC_DLEmode;
3513
3514 case GE:
3515 return CC_DGEmode;
3516
3517 case LEU:
3518 return CC_DLEUmode;
3519
3520 case GEU:
3521 return CC_DGEUmode;
ad076f4e
RE
3522
3523 default:
3524 break;
84ed5e79
RE
3525 }
3526
3527 abort ();
3528}
3529
3530enum machine_mode
3531arm_select_cc_mode (op, x, y)
3532 enum rtx_code op;
3533 rtx x;
3534 rtx y;
3535{
3536 /* All floating point compares return CCFP if it is an equality
3537 comparison, and CCFPE otherwise. */
3538 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3539 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3540
3541 /* A compare with a shifted operand. Because of canonicalization, the
3542 comparison will have to be swapped when we emit the assembler. */
3543 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3544 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3545 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3546 || GET_CODE (x) == ROTATERT))
3547 return CC_SWPmode;
3548
956d6950
JL
3549 /* This is a special case that is used by combine to allow a
3550 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3551 followed by a comparison of the shifted integer (only valid for
956d6950 3552 equalities and unsigned inequalities). */
84ed5e79
RE
3553 if (GET_MODE (x) == SImode
3554 && GET_CODE (x) == ASHIFT
3555 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3556 && GET_CODE (XEXP (x, 0)) == SUBREG
3557 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3558 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3559 && (op == EQ || op == NE
3560 || op == GEU || op == GTU || op == LTU || op == LEU)
3561 && GET_CODE (y) == CONST_INT)
3562 return CC_Zmode;
3563
3564 /* An operation that sets the condition codes as a side-effect, the
3565 V flag is not set correctly, so we can only use comparisons where
3566 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3567 instead. */
3568 if (GET_MODE (x) == SImode
3569 && y == const0_rtx
3570 && (op == EQ || op == NE || op == LT || op == GE)
3571 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3572 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3573 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3574 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3575 || GET_CODE (x) == LSHIFTRT
3576 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3577 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3578 return CC_NOOVmode;
3579
3580 /* A construct for a conditional compare, if the false arm contains
3581 0, then both conditions must be true, otherwise either condition
3582 must be true. Not all conditions are possible, so CCmode is
3583 returned if it can't be done. */
3584 if (GET_CODE (x) == IF_THEN_ELSE
3585 && (XEXP (x, 2) == const0_rtx
3586 || XEXP (x, 2) == const1_rtx)
3587 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3588 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 3589 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
3590 INTVAL (XEXP (x, 2)));
3591
3592 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3593 return CC_Zmode;
3594
bd9c7e23
RE
3595 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3596 && GET_CODE (x) == PLUS
3597 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3598 return CC_Cmode;
3599
84ed5e79
RE
3600 return CCmode;
3601}
3602
ff9940b0
RE
3603/* X and Y are two things to compare using CODE. Emit the compare insn and
3604 return the rtx for register 0 in the proper mode. FP means this is a
3605 floating point compare: I don't think that it is needed on the arm. */
3606
3607rtx
74bbc178 3608gen_compare_reg (code, x, y)
ff9940b0
RE
3609 enum rtx_code code;
3610 rtx x, y;
3611{
3612 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
43cffd11 3613 rtx cc_reg = gen_rtx_REG (mode, 24);
ff9940b0 3614
43cffd11
RE
3615 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
3616 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
3617
3618 return cc_reg;
3619}
3620
0a81f500
RE
3621void
3622arm_reload_in_hi (operands)
3623 rtx *operands;
3624{
f9cc092a
RE
3625 rtx ref = operands[1];
3626 rtx base, scratch;
3627 HOST_WIDE_INT offset = 0;
3628
3629 if (GET_CODE (ref) == SUBREG)
3630 {
3631 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
3632 if (BYTES_BIG_ENDIAN)
3633 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
3634 - MIN (UNITS_PER_WORD,
3635 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
3636 ref = SUBREG_REG (ref);
3637 }
3638
3639 if (GET_CODE (ref) == REG)
3640 {
3641 /* We have a pseudo which has been spilt onto the stack; there
3642 are two cases here: the first where there is a simple
3643 stack-slot replacement and a second where the stack-slot is
3644 out of range, or is used as a subreg. */
3645 if (reg_equiv_mem[REGNO (ref)])
3646 {
3647 ref = reg_equiv_mem[REGNO (ref)];
3648 base = find_replacement (&XEXP (ref, 0));
3649 }
3650 else
3651 /* The slot is out of range, or was dressed up in a SUBREG */
3652 base = reg_equiv_address[REGNO (ref)];
3653 }
3654 else
3655 base = find_replacement (&XEXP (ref, 0));
0a81f500 3656
e5e809f4
JL
3657 /* Handle the case where the address is too complex to be offset by 1. */
3658 if (GET_CODE (base) == MINUS
3659 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3660 {
f9cc092a 3661 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 3662
43cffd11 3663 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
3664 base = base_plus;
3665 }
f9cc092a
RE
3666 else if (GET_CODE (base) == PLUS)
3667 {
3668 /* The addend must be CONST_INT, or we would have dealt with it above */
3669 HOST_WIDE_INT hi, lo;
3670
3671 offset += INTVAL (XEXP (base, 1));
3672 base = XEXP (base, 0);
3673
3674 /* Rework the address into a legal sequence of insns */
3675 /* Valid range for lo is -4095 -> 4095 */
3676 lo = (offset >= 0
3677 ? (offset & 0xfff)
3678 : -((-offset) & 0xfff));
3679
3680 /* Corner case, if lo is the max offset then we would be out of range
3681 once we have added the additional 1 below, so bump the msb into the
3682 pre-loading insn(s). */
3683 if (lo == 4095)
3684 lo &= 0x7ff;
3685
3686 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFF)
3687 ^ (HOST_WIDE_INT) 0x80000000)
3688 - (HOST_WIDE_INT) 0x80000000);
3689
3690 if (hi + lo != offset)
3691 abort ();
3692
3693 if (hi != 0)
3694 {
3695 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3696
3697 /* Get the base address; addsi3 knows how to handle constants
3698 that require more than one insn */
3699 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
3700 base = base_plus;
3701 offset = lo;
3702 }
3703 }
e5e809f4 3704
f9cc092a
RE
3705 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
3706 emit_insn (gen_zero_extendqisi2 (scratch,
3707 gen_rtx_MEM (QImode,
3708 plus_constant (base,
3709 offset))));
43cffd11
RE
3710 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
3711 gen_rtx_MEM (QImode,
f9cc092a
RE
3712 plus_constant (base,
3713 offset + 1))));
0a81f500 3714 if (BYTES_BIG_ENDIAN)
43cffd11
RE
3715 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3716 gen_rtx_IOR (SImode,
3717 gen_rtx_ASHIFT
3718 (SImode,
3719 gen_rtx_SUBREG (SImode, operands[0], 0),
3720 GEN_INT (8)),
f9cc092a 3721 scratch)));
0a81f500 3722 else
43cffd11
RE
3723 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3724 gen_rtx_IOR (SImode,
f9cc092a 3725 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
3726 GEN_INT (8)),
3727 gen_rtx_SUBREG (SImode, operands[0],
3728 0))));
0a81f500
RE
3729}
3730
f9cc092a
RE
3731/* Handle storing a half-word to memory during reload by synthesising as two
3732 byte stores. Take care not to clobber the input values until after we
3733 have moved them somewhere safe. This code assumes that if the DImode
3734 scratch in operands[2] overlaps either the input value or output address
3735 in some way, then that value must die in this insn (we absolutely need
3736 two scratch registers for some corner cases). */
f3bb6135 3737void
af48348a 3738arm_reload_out_hi (operands)
f3bb6135 3739 rtx *operands;
af48348a 3740{
f9cc092a
RE
3741 rtx ref = operands[0];
3742 rtx outval = operands[1];
3743 rtx base, scratch;
3744 HOST_WIDE_INT offset = 0;
3745
3746 if (GET_CODE (ref) == SUBREG)
3747 {
3748 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
3749 if (BYTES_BIG_ENDIAN)
3750 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
3751 - MIN (UNITS_PER_WORD,
3752 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
3753 ref = SUBREG_REG (ref);
3754 }
3755
3756
3757 if (GET_CODE (ref) == REG)
3758 {
3759 /* We have a pseudo which has been spilt onto the stack; there
3760 are two cases here: the first where there is a simple
3761 stack-slot replacement and a second where the stack-slot is
3762 out of range, or is used as a subreg. */
3763 if (reg_equiv_mem[REGNO (ref)])
3764 {
3765 ref = reg_equiv_mem[REGNO (ref)];
3766 base = find_replacement (&XEXP (ref, 0));
3767 }
3768 else
3769 /* The slot is out of range, or was dressed up in a SUBREG */
3770 base = reg_equiv_address[REGNO (ref)];
3771 }
3772 else
3773 base = find_replacement (&XEXP (ref, 0));
3774
3775 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
3776
3777 /* Handle the case where the address is too complex to be offset by 1. */
3778 if (GET_CODE (base) == MINUS
3779 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3780 {
3781 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3782
3783 /* Be careful not to destroy OUTVAL. */
3784 if (reg_overlap_mentioned_p (base_plus, outval))
3785 {
3786 /* Updating base_plus might destroy outval, see if we can
3787 swap the scratch and base_plus. */
3788 if (! reg_overlap_mentioned_p (scratch, outval))
3789 {
3790 rtx tmp = scratch;
3791 scratch = base_plus;
3792 base_plus = tmp;
3793 }
3794 else
3795 {
3796 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
3797
3798 /* Be conservative and copy OUTVAL into the scratch now,
3799 this should only be necessary if outval is a subreg
3800 of something larger than a word. */
3801 /* XXX Might this clobber base? I can't see how it can,
3802 since scratch is known to overlap with OUTVAL, and
3803 must be wider than a word. */
3804 emit_insn (gen_movhi (scratch_hi, outval));
3805 outval = scratch_hi;
3806 }
3807 }
3808
3809 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
3810 base = base_plus;
3811 }
3812 else if (GET_CODE (base) == PLUS)
3813 {
3814 /* The addend must be CONST_INT, or we would have dealt with it above */
3815 HOST_WIDE_INT hi, lo;
3816
3817 offset += INTVAL (XEXP (base, 1));
3818 base = XEXP (base, 0);
3819
3820 /* Rework the address into a legal sequence of insns */
3821 /* Valid range for lo is -4095 -> 4095 */
3822 lo = (offset >= 0
3823 ? (offset & 0xfff)
3824 : -((-offset) & 0xfff));
3825
3826 /* Corner case, if lo is the max offset then we would be out of range
3827 once we have added the additional 1 below, so bump the msb into the
3828 pre-loading insn(s). */
3829 if (lo == 4095)
3830 lo &= 0x7ff;
3831
3832 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFF)
3833 ^ (HOST_WIDE_INT) 0x80000000)
3834 - (HOST_WIDE_INT) 0x80000000);
3835
3836 if (hi + lo != offset)
3837 abort ();
3838
3839 if (hi != 0)
3840 {
3841 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3842
3843 /* Be careful not to destroy OUTVAL. */
3844 if (reg_overlap_mentioned_p (base_plus, outval))
3845 {
3846 /* Updating base_plus might destroy outval, see if we
3847 can swap the scratch and base_plus. */
3848 if (! reg_overlap_mentioned_p (scratch, outval))
3849 {
3850 rtx tmp = scratch;
3851 scratch = base_plus;
3852 base_plus = tmp;
3853 }
3854 else
3855 {
3856 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
3857
3858 /* Be conservative and copy outval into scratch now,
3859 this should only be necessary if outval is a
3860 subreg of something larger than a word. */
3861 /* XXX Might this clobber base? I can't see how it
3862 can, since scratch is known to overlap with
3863 outval. */
3864 emit_insn (gen_movhi (scratch_hi, outval));
3865 outval = scratch_hi;
3866 }
3867 }
3868
3869 /* Get the base address; addsi3 knows how to handle constants
3870 that require more than one insn */
3871 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
3872 base = base_plus;
3873 offset = lo;
3874 }
3875 }
af48348a 3876
b5cc037f
RE
3877 if (BYTES_BIG_ENDIAN)
3878 {
f9cc092a
RE
3879 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
3880 plus_constant (base, offset + 1)),
3881 gen_rtx_SUBREG (QImode, outval, 0)));
3882 emit_insn (gen_lshrsi3 (scratch,
3883 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 3884 GEN_INT (8)));
f9cc092a
RE
3885 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
3886 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
3887 }
3888 else
3889 {
f9cc092a
RE
3890 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
3891 gen_rtx_SUBREG (QImode, outval, 0)));
3892 emit_insn (gen_lshrsi3 (scratch,
3893 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 3894 GEN_INT (8)));
f9cc092a
RE
3895 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
3896 plus_constant (base, offset + 1)),
3897 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 3898 }
af48348a 3899}
2b835d68
RE
3900\f
3901/* Routines for manipulation of the constant pool. */
3902/* This is unashamedly hacked from the version in sh.c, since the problem is
3903 extremely similar. */
3904
3905/* Arm instructions cannot load a large constant into a register,
3906 constants have to come from a pc relative load. The reference of a pc
3907 relative load instruction must be less than 1k infront of the instruction.
3908 This means that we often have to dump a constant inside a function, and
3909 generate code to branch around it.
3910
3911 It is important to minimize this, since the branches will slow things
3912 down and make things bigger.
3913
3914 Worst case code looks like:
3915
3916 ldr rn, L1
3917 b L2
3918 align
3919 L1: .long value
3920 L2:
3921 ..
3922
3923 ldr rn, L3
3924 b L4
3925 align
3926 L3: .long value
3927 L4:
3928 ..
3929
3930 We fix this by performing a scan before scheduling, which notices which
3931 instructions need to have their operands fetched from the constant table
3932 and builds the table.
3933
3934
3935 The algorithm is:
3936
3937 scan, find an instruction which needs a pcrel move. Look forward, find th
3938 last barrier which is within MAX_COUNT bytes of the requirement.
3939 If there isn't one, make one. Process all the instructions between
3940 the find and the barrier.
3941
3942 In the above example, we can tell that L3 is within 1k of L1, so
3943 the first move can be shrunk from the 2 insn+constant sequence into
3944 just 1 insn, and the constant moved to L3 to make:
3945
3946 ldr rn, L1
3947 ..
3948 ldr rn, L3
3949 b L4
3950 align
3951 L1: .long value
3952 L3: .long value
3953 L4:
3954
3955 Then the second move becomes the target for the shortening process.
3956
3957 */
3958
3959typedef struct
3960{
3961 rtx value; /* Value in table */
3962 HOST_WIDE_INT next_offset;
3963 enum machine_mode mode; /* Mode of value */
3964} pool_node;
3965
3966/* The maximum number of constants that can fit into one pool, since
3967 the pc relative range is 0...1020 bytes and constants are at least 4
3968 bytes long */
3969
3970#define MAX_POOL_SIZE (1020/4)
3971static pool_node pool_vector[MAX_POOL_SIZE];
3972static int pool_size;
3973static rtx pool_vector_label;
3974
332072db
RE
3975/* Add a constant to the pool and return its offset within the current
3976 pool.
3977
3978 X is the rtx we want to replace. MODE is its mode. On return,
3979 ADDRESS_ONLY will be non-zero if we really want the address of such
3980 a constant, not the constant itself. */
2b835d68 3981static HOST_WIDE_INT
332072db 3982add_constant (x, mode, address_only)
2b835d68
RE
3983 rtx x;
3984 enum machine_mode mode;
da6558fd 3985 int * address_only;
2b835d68
RE
3986{
3987 int i;
2b835d68
RE
3988 HOST_WIDE_INT offset;
3989
da6558fd
NC
3990 * address_only = 0;
3991
2b835d68
RE
3992 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3993 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3994 x = get_pool_constant (XEXP (x, 0));
332072db
RE
3995 else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P(x))
3996 {
3997 *address_only = 1;
74641843 3998 mode = get_pool_mode (x);
332072db
RE
3999 x = get_pool_constant (x);
4000 }
2b835d68
RE
4001#ifndef AOF_ASSEMBLER
4002 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
4003 x = XVECEXP (x, 0, 0);
4004#endif
4005
32de079a
RE
4006#ifdef AOF_ASSEMBLER
4007 /* PIC Symbol references need to be converted into offsets into the
4008 based area. */
4009 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
4010 x = aof_pic_entry (x);
4011#endif /* AOF_ASSEMBLER */
4012
2b835d68
RE
4013 /* First see if we've already got it */
4014 for (i = 0; i < pool_size; i++)
4015 {
4016 if (GET_CODE (x) == pool_vector[i].value->code
4017 && mode == pool_vector[i].mode)
4018 {
4019 if (GET_CODE (x) == CODE_LABEL)
4020 {
4021 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
4022 continue;
4023 }
4024 if (rtx_equal_p (x, pool_vector[i].value))
4025 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
4026 }
4027 }
4028
4029 /* Need a new one */
4030 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
4031 offset = 0;
4032 if (pool_size == 0)
4033 pool_vector_label = gen_label_rtx ();
4034 else
4035 pool_vector[pool_size].next_offset
4036 += (offset = pool_vector[pool_size - 1].next_offset);
4037
4038 pool_vector[pool_size].value = x;
4039 pool_vector[pool_size].mode = mode;
4040 pool_size++;
4041 return offset;
4042}
4043
4044/* Output the literal table */
4045static void
4046dump_table (scan)
4047 rtx scan;
4048{
4049 int i;
4050
4051 scan = emit_label_after (gen_label_rtx (), scan);
4052 scan = emit_insn_after (gen_align_4 (), scan);
4053 scan = emit_label_after (pool_vector_label, scan);
4054
4055 for (i = 0; i < pool_size; i++)
4056 {
4057 pool_node *p = pool_vector + i;
4058
4059 switch (GET_MODE_SIZE (p->mode))
4060 {
4061 case 4:
4062 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
4063 break;
4064
4065 case 8:
4066 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
4067 break;
4068
4069 default:
4070 abort ();
4071 break;
4072 }
4073 }
4074
4075 scan = emit_insn_after (gen_consttable_end (), scan);
4076 scan = emit_barrier_after (scan);
4077 pool_size = 0;
4078}
4079
4080/* Non zero if the src operand needs to be fixed up */
4081static int
4082fixit (src, mode, destreg)
4083 rtx src;
4084 enum machine_mode mode;
4085 int destreg;
4086{
4087 if (CONSTANT_P (src))
4088 {
4089 if (GET_CODE (src) == CONST_INT)
4090 return (! const_ok_for_arm (INTVAL (src))
4091 && ! const_ok_for_arm (~INTVAL (src)));
4092 if (GET_CODE (src) == CONST_DOUBLE)
4093 return (GET_MODE (src) == VOIDmode
4094 || destreg < 16
4095 || (! const_double_rtx_ok_for_fpu (src)
4096 && ! neg_const_double_rtx_ok_for_fpu (src)));
4097 return symbol_mentioned_p (src);
4098 }
4099#ifndef AOF_ASSEMBLER
4100 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
4101 return 1;
4102#endif
4103 else
4104 return (mode == SImode && GET_CODE (src) == MEM
4105 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
4106 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
4107}
4108
4109/* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
4110static rtx
4111find_barrier (from, max_count)
4112 rtx from;
4113 int max_count;
4114{
4115 int count = 0;
4116 rtx found_barrier = 0;
e5e809f4 4117 rtx last = from;
2b835d68
RE
4118
4119 while (from && count < max_count)
4120 {
7551cbc7 4121 rtx tmp;
da6558fd 4122
2b835d68 4123 if (GET_CODE (from) == BARRIER)
7551cbc7 4124 found_barrier = from;
2b835d68
RE
4125
4126 /* Count the length of this insn */
4127 if (GET_CODE (from) == INSN
4128 && GET_CODE (PATTERN (from)) == SET
4129 && CONSTANT_P (SET_SRC (PATTERN (from)))
4130 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
d499463f 4131 count += 8;
7551cbc7
RE
4132 /* Handle table jumps as a single entity. */
4133 else if (GET_CODE (from) == JUMP_INSN
4134 && JUMP_LABEL (from) != 0
4135 && ((tmp = next_real_insn (JUMP_LABEL (from)))
4136 == next_real_insn (from))
4137 && tmp != NULL
4138 && GET_CODE (tmp) == JUMP_INSN
4139 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
4140 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
4141 {
4142 int elt = GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC ? 1 : 0;
4143 count += (get_attr_length (from)
4144 + GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (tmp), elt));
4145 /* Continue after the dispatch table. */
4146 last = from;
4147 from = NEXT_INSN (tmp);
4148 continue;
4149 }
2b835d68
RE
4150 else
4151 count += get_attr_length (from);
4152
e5e809f4 4153 last = from;
2b835d68
RE
4154 from = NEXT_INSN (from);
4155 }
4156
da6558fd 4157 if (! found_barrier)
2b835d68
RE
4158 {
4159 /* We didn't find a barrier in time to
da6558fd 4160 dump our stuff, so we'll make one. */
2b835d68 4161 rtx label = gen_label_rtx ();
da6558fd 4162
2b835d68 4163 if (from)
e5e809f4 4164 from = PREV_INSN (last);
2b835d68
RE
4165 else
4166 from = get_last_insn ();
da6558fd
NC
4167
4168 /* Walk back to be just before any jump. */
2b835d68 4169 while (GET_CODE (from) == JUMP_INSN
25b1c156 4170 || GET_CODE (from) == NOTE
2b835d68
RE
4171 || GET_CODE (from) == CODE_LABEL)
4172 from = PREV_INSN (from);
da6558fd 4173
2b835d68
RE
4174 from = emit_jump_insn_after (gen_jump (label), from);
4175 JUMP_LABEL (from) = label;
4176 found_barrier = emit_barrier_after (from);
4177 emit_label_after (label, found_barrier);
2b835d68
RE
4178 }
4179
4180 return found_barrier;
4181}
4182
4183/* Non zero if the insn is a move instruction which needs to be fixed. */
4184static int
4185broken_move (insn)
4186 rtx insn;
4187{
4188 if (!INSN_DELETED_P (insn)
4189 && GET_CODE (insn) == INSN
4190 && GET_CODE (PATTERN (insn)) == SET)
4191 {
4192 rtx pat = PATTERN (insn);
4193 rtx src = SET_SRC (pat);
4194 rtx dst = SET_DEST (pat);
4195 int destreg;
4196 enum machine_mode mode = GET_MODE (dst);
ad076f4e 4197
2b835d68
RE
4198 if (dst == pc_rtx)
4199 return 0;
4200
4201 if (GET_CODE (dst) == REG)
4202 destreg = REGNO (dst);
4203 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
4204 destreg = REGNO (SUBREG_REG (dst));
ad076f4e
RE
4205 else
4206 return 0;
2b835d68
RE
4207
4208 return fixit (src, mode, destreg);
4209 }
4210 return 0;
4211}
4212
4213void
4214arm_reorg (first)
4215 rtx first;
4216{
4217 rtx insn;
4218 int count_size;
2b835d68
RE
4219
4220#if 0
4221 /* The ldr instruction can work with up to a 4k offset, and most constants
4222 will be loaded with one of these instructions; however, the adr
4223 instruction and the ldf instructions only work with a 1k offset. This
4224 code needs to be rewritten to use the 4k offset when possible, and to
4225 adjust when a 1k offset is needed. For now we just use a 1k offset
4226 from the start. */
4227 count_size = 4000;
4228
4229 /* Floating point operands can't work further than 1024 bytes from the
4230 PC, so to make things simple we restrict all loads for such functions.
4231 */
4232 if (TARGET_HARD_FLOAT)
ad076f4e
RE
4233 {
4234 int regno;
4235
4236 for (regno = 16; regno < 24; regno++)
4237 if (regs_ever_live[regno])
4238 {
4239 count_size = 1000;
4240 break;
4241 }
4242 }
2b835d68
RE
4243#else
4244 count_size = 1000;
4245#endif /* 0 */
4246
4247 for (insn = first; insn; insn = NEXT_INSN (insn))
4248 {
4249 if (broken_move (insn))
4250 {
4251 /* This is a broken move instruction, scan ahead looking for
4252 a barrier to stick the constant table behind */
4253 rtx scan;
4254 rtx barrier = find_barrier (insn, count_size);
4255
4256 /* Now find all the moves between the points and modify them */
4257 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
4258 {
4259 if (broken_move (scan))
4260 {
4261 /* This is a broken move instruction, add it to the pool */
4262 rtx pat = PATTERN (scan);
4263 rtx src = SET_SRC (pat);
4264 rtx dst = SET_DEST (pat);
4265 enum machine_mode mode = GET_MODE (dst);
4266 HOST_WIDE_INT offset;
4267 rtx newinsn = scan;
4268 rtx newsrc;
4269 rtx addr;
4270 int scratch;
332072db 4271 int address_only;
2b835d68
RE
4272
4273 /* If this is an HImode constant load, convert it into
4274 an SImode constant load. Since the register is always
4275 32 bits this is safe. We have to do this, since the
4276 load pc-relative instruction only does a 32-bit load. */
4277 if (mode == HImode)
4278 {
4279 mode = SImode;
4280 if (GET_CODE (dst) != REG)
4281 abort ();
4282 PUT_MODE (dst, SImode);
4283 }
4284
332072db 4285 offset = add_constant (src, mode, &address_only);
43cffd11
RE
4286 addr = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
4287 pool_vector_label),
2b835d68
RE
4288 offset);
4289
332072db
RE
4290 /* If we only want the address of the pool entry, or
4291 for wide moves to integer regs we need to split
4292 the address calculation off into a separate insn.
4293 If necessary, the load can then be done with a
4294 load-multiple. This is safe, since we have
4295 already noted the length of such insns to be 8,
4296 and we are immediately over-writing the scratch
4297 we have grabbed with the final result. */
4298 if ((address_only || GET_MODE_SIZE (mode) > 4)
2b835d68
RE
4299 && (scratch = REGNO (dst)) < 16)
4300 {
332072db
RE
4301 rtx reg;
4302
4303 if (mode == SImode)
4304 reg = dst;
4305 else
43cffd11 4306 reg = gen_rtx_REG (SImode, scratch);
332072db 4307
2b835d68
RE
4308 newinsn = emit_insn_after (gen_movaddr (reg, addr),
4309 newinsn);
4310 addr = reg;
4311 }
4312
332072db
RE
4313 if (! address_only)
4314 {
43cffd11 4315 newsrc = gen_rtx_MEM (mode, addr);
332072db
RE
4316
4317 /* XXX Fixme -- I think the following is bogus. */
4318 /* Build a jump insn wrapper around the move instead
4319 of an ordinary insn, because we want to have room for
4320 the target label rtx in fld[7], which an ordinary
4321 insn doesn't have. */
43cffd11
RE
4322 newinsn
4323 = emit_jump_insn_after (gen_rtx_SET (VOIDmode, dst,
4324 newsrc),
4325 newinsn);
332072db
RE
4326 JUMP_LABEL (newinsn) = pool_vector_label;
4327
4328 /* But it's still an ordinary insn */
4329 PUT_CODE (newinsn, INSN);
4330 }
2b835d68
RE
4331
4332 /* Kill old insn */
4333 delete_insn (scan);
4334 scan = newinsn;
4335 }
4336 }
4337 dump_table (barrier);
4338 insn = scan;
4339 }
4340 }
4b632bf1
RE
4341
4342 after_arm_reorg = 1;
2b835d68
RE
4343}
4344
cce8749e
CH
4345\f
4346/* Routines to output assembly language. */
4347
f3bb6135 4348/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
4349 In this way we can ensure that valid double constants are generated even
4350 when cross compiling. */
4351char *
4352fp_immediate_constant (x)
b5cc037f 4353 rtx x;
ff9940b0
RE
4354{
4355 REAL_VALUE_TYPE r;
4356 int i;
4357
4358 if (!fpa_consts_inited)
4359 init_fpa_table ();
4360
4361 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4362 for (i = 0; i < 8; i++)
4363 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
4364 return strings_fpa[i];
f3bb6135 4365
ff9940b0
RE
4366 abort ();
4367}
4368
9997d19d
RE
4369/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
4370static char *
4371fp_const_from_val (r)
4372 REAL_VALUE_TYPE *r;
4373{
4374 int i;
4375
4376 if (! fpa_consts_inited)
4377 init_fpa_table ();
4378
4379 for (i = 0; i < 8; i++)
4380 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
4381 return strings_fpa[i];
4382
4383 abort ();
4384}
ff9940b0 4385
cce8749e
CH
4386/* Output the operands of a LDM/STM instruction to STREAM.
4387 MASK is the ARM register set mask of which only bits 0-15 are important.
4388 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
4389 must follow the register list. */
4390
4391void
4392print_multi_reg (stream, instr, mask, hat)
4393 FILE *stream;
4394 char *instr;
4395 int mask, hat;
4396{
4397 int i;
4398 int not_first = FALSE;
4399
1d5473cb 4400 fputc ('\t', stream);
f3139301 4401 fprintf (stream, instr, REGISTER_PREFIX);
1d5473cb 4402 fputs (", {", stream);
cce8749e
CH
4403 for (i = 0; i < 16; i++)
4404 if (mask & (1 << i))
4405 {
4406 if (not_first)
4407 fprintf (stream, ", ");
f3139301 4408 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
cce8749e
CH
4409 not_first = TRUE;
4410 }
f3bb6135 4411
cce8749e 4412 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 4413}
cce8749e
CH
4414
4415/* Output a 'call' insn. */
4416
4417char *
4418output_call (operands)
f3bb6135 4419 rtx *operands;
cce8749e 4420{
cce8749e
CH
4421 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
4422
4423 if (REGNO (operands[0]) == 14)
4424 {
43cffd11 4425 operands[0] = gen_rtx_REG (SImode, 12);
1d5473cb 4426 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 4427 }
1d5473cb 4428 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd
NC
4429
4430 if (TARGET_THUMB_INTERWORK)
4431 output_asm_insn ("bx%?\t%0", operands);
4432 else
4433 output_asm_insn ("mov%?\t%|pc, %0", operands);
4434
f3bb6135
RE
4435 return "";
4436}
cce8749e 4437
ff9940b0
RE
4438static int
4439eliminate_lr2ip (x)
f3bb6135 4440 rtx *x;
ff9940b0
RE
4441{
4442 int something_changed = 0;
4443 rtx x0 = *x;
4444 int code = GET_CODE (x0);
4445 register int i, j;
4446 register char *fmt;
4447
4448 switch (code)
4449 {
4450 case REG:
4451 if (REGNO (x0) == 14)
4452 {
43cffd11 4453 *x = gen_rtx_REG (SImode, 12);
ff9940b0
RE
4454 return 1;
4455 }
4456 return 0;
4457 default:
4458 /* Scan through the sub-elements and change any references there */
4459 fmt = GET_RTX_FORMAT (code);
4460 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4461 if (fmt[i] == 'e')
4462 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
4463 else if (fmt[i] == 'E')
4464 for (j = 0; j < XVECLEN (x0, i); j++)
4465 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
4466 return something_changed;
4467 }
4468}
4469
4470/* Output a 'call' insn that is a reference in memory. */
4471
4472char *
4473output_call_mem (operands)
f3bb6135 4474 rtx *operands;
ff9940b0
RE
4475{
4476 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
4477 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
4478 */
4479 if (eliminate_lr2ip (&operands[0]))
1d5473cb 4480 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 4481
da6558fd
NC
4482 if (TARGET_THUMB_INTERWORK)
4483 {
4484 output_asm_insn ("ldr%?\t%|ip, %0", operands);
4485 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4486 output_asm_insn ("bx%?\t%|ip", operands);
4487 }
4488 else
4489 {
4490 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4491 output_asm_insn ("ldr%?\t%|pc, %0", operands);
4492 }
4493
f3bb6135
RE
4494 return "";
4495}
ff9940b0
RE
4496
4497
4498/* Output a move from arm registers to an fpu registers.
4499 OPERANDS[0] is an fpu register.
4500 OPERANDS[1] is the first registers of an arm register pair. */
4501
4502char *
4503output_mov_long_double_fpu_from_arm (operands)
f3bb6135 4504 rtx *operands;
ff9940b0
RE
4505{
4506 int arm_reg0 = REGNO (operands[1]);
4507 rtx ops[3];
4508
4509 if (arm_reg0 == 12)
4510 abort();
f3bb6135 4511
43cffd11
RE
4512 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4513 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4514 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4515
1d5473cb
RE
4516 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
4517 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
f3bb6135
RE
4518 return "";
4519}
ff9940b0
RE
4520
4521/* Output a move from an fpu register to arm registers.
4522 OPERANDS[0] is the first registers of an arm register pair.
4523 OPERANDS[1] is an fpu register. */
4524
4525char *
4526output_mov_long_double_arm_from_fpu (operands)
f3bb6135 4527 rtx *operands;
ff9940b0
RE
4528{
4529 int arm_reg0 = REGNO (operands[0]);
4530 rtx ops[3];
4531
4532 if (arm_reg0 == 12)
4533 abort();
f3bb6135 4534
43cffd11
RE
4535 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4536 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4537 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4538
1d5473cb
RE
4539 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
4540 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
4541 return "";
4542}
ff9940b0
RE
4543
4544/* Output a move from arm registers to arm registers of a long double
4545 OPERANDS[0] is the destination.
4546 OPERANDS[1] is the source. */
4547char *
4548output_mov_long_double_arm_from_arm (operands)
f3bb6135 4549 rtx *operands;
ff9940b0
RE
4550{
4551 /* We have to be careful here because the two might overlap */
4552 int dest_start = REGNO (operands[0]);
4553 int src_start = REGNO (operands[1]);
4554 rtx ops[2];
4555 int i;
4556
4557 if (dest_start < src_start)
4558 {
4559 for (i = 0; i < 3; i++)
4560 {
43cffd11
RE
4561 ops[0] = gen_rtx_REG (SImode, dest_start + i);
4562 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 4563 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4564 }
4565 }
4566 else
4567 {
4568 for (i = 2; i >= 0; i--)
4569 {
43cffd11
RE
4570 ops[0] = gen_rtx_REG (SImode, dest_start + i);
4571 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 4572 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4573 }
4574 }
f3bb6135 4575
ff9940b0
RE
4576 return "";
4577}
4578
4579
cce8749e
CH
4580/* Output a move from arm registers to an fpu registers.
4581 OPERANDS[0] is an fpu register.
4582 OPERANDS[1] is the first registers of an arm register pair. */
4583
4584char *
4585output_mov_double_fpu_from_arm (operands)
f3bb6135 4586 rtx *operands;
cce8749e
CH
4587{
4588 int arm_reg0 = REGNO (operands[1]);
4589 rtx ops[2];
4590
4591 if (arm_reg0 == 12)
4592 abort();
43cffd11
RE
4593 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4594 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
4595 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
4596 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
4597 return "";
4598}
cce8749e
CH
4599
4600/* Output a move from an fpu register to arm registers.
4601 OPERANDS[0] is the first registers of an arm register pair.
4602 OPERANDS[1] is an fpu register. */
4603
4604char *
4605output_mov_double_arm_from_fpu (operands)
f3bb6135 4606 rtx *operands;
cce8749e
CH
4607{
4608 int arm_reg0 = REGNO (operands[0]);
4609 rtx ops[2];
4610
4611 if (arm_reg0 == 12)
4612 abort();
f3bb6135 4613
43cffd11
RE
4614 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4615 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
4616 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
4617 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
4618 return "";
4619}
cce8749e
CH
4620
4621/* Output a move between double words.
4622 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
4623 or MEM<-REG and all MEMs must be offsettable addresses. */
4624
4625char *
4626output_move_double (operands)
aec3cfba 4627 rtx * operands;
cce8749e
CH
4628{
4629 enum rtx_code code0 = GET_CODE (operands[0]);
4630 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 4631 rtx otherops[3];
cce8749e
CH
4632
4633 if (code0 == REG)
4634 {
4635 int reg0 = REGNO (operands[0]);
4636
43cffd11 4637 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 4638
cce8749e
CH
4639 if (code1 == REG)
4640 {
4641 int reg1 = REGNO (operands[1]);
4642 if (reg1 == 12)
4643 abort();
f3bb6135 4644
cce8749e 4645 /* Ensure the second source is not overwritten */
c1c2bc04
RE
4646 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
4647 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 4648 else
c1c2bc04 4649 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
4650 }
4651 else if (code1 == CONST_DOUBLE)
4652 {
226a5051
RE
4653 if (GET_MODE (operands[1]) == DFmode)
4654 {
4655 long l[2];
4656 union real_extract u;
4657
4658 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4659 sizeof (u));
4660 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4661 otherops[1] = GEN_INT(l[1]);
4662 operands[1] = GEN_INT(l[0]);
4663 }
c1c2bc04
RE
4664 else if (GET_MODE (operands[1]) != VOIDmode)
4665 abort ();
4666 else if (WORDS_BIG_ENDIAN)
4667 {
4668
4669 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4670 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4671 }
226a5051
RE
4672 else
4673 {
c1c2bc04 4674
226a5051
RE
4675 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4676 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4677 }
c1c2bc04
RE
4678 output_mov_immediate (operands);
4679 output_mov_immediate (otherops);
cce8749e
CH
4680 }
4681 else if (code1 == CONST_INT)
4682 {
56636818
JL
4683#if HOST_BITS_PER_WIDE_INT > 32
4684 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4685 what the upper word is. */
4686 if (WORDS_BIG_ENDIAN)
4687 {
4688 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4689 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4690 }
4691 else
4692 {
4693 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4694 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4695 }
4696#else
4697 /* Sign extend the intval into the high-order word */
c1c2bc04
RE
4698 if (WORDS_BIG_ENDIAN)
4699 {
4700 otherops[1] = operands[1];
4701 operands[1] = (INTVAL (operands[1]) < 0
4702 ? constm1_rtx : const0_rtx);
4703 }
ff9940b0 4704 else
c1c2bc04 4705 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 4706#endif
c1c2bc04
RE
4707 output_mov_immediate (otherops);
4708 output_mov_immediate (operands);
cce8749e
CH
4709 }
4710 else if (code1 == MEM)
4711 {
ff9940b0 4712 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 4713 {
ff9940b0 4714 case REG:
9997d19d 4715 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 4716 break;
2b835d68 4717
ff9940b0 4718 case PRE_INC:
2b835d68 4719 abort (); /* Should never happen now */
ff9940b0 4720 break;
2b835d68 4721
ff9940b0 4722 case PRE_DEC:
2b835d68 4723 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 4724 break;
2b835d68 4725
ff9940b0 4726 case POST_INC:
9997d19d 4727 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 4728 break;
2b835d68 4729
ff9940b0 4730 case POST_DEC:
2b835d68 4731 abort (); /* Should never happen now */
ff9940b0 4732 break;
2b835d68
RE
4733
4734 case LABEL_REF:
4735 case CONST:
4736 output_asm_insn ("adr%?\t%0, %1", operands);
4737 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4738 break;
4739
ff9940b0 4740 default:
aec3cfba
NC
4741 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
4742 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 4743 {
2b835d68
RE
4744 otherops[0] = operands[0];
4745 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4746 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4747 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4748 {
4749 if (GET_CODE (otherops[2]) == CONST_INT)
4750 {
4751 switch (INTVAL (otherops[2]))
4752 {
4753 case -8:
4754 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4755 return "";
4756 case -4:
4757 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4758 return "";
4759 case 4:
4760 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4761 return "";
4762 }
4763 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4764 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4765 else
4766 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4767 }
4768 else
4769 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4770 }
4771 else
4772 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
4773 return "ldm%?ia\t%0, %M0";
4774 }
4775 else
4776 {
4777 otherops[1] = adj_offsettable_operand (operands[1], 4);
4778 /* Take care of overlapping base/data reg. */
4779 if (reg_mentioned_p (operands[0], operands[1]))
4780 {
4781 output_asm_insn ("ldr%?\t%0, %1", otherops);
4782 output_asm_insn ("ldr%?\t%0, %1", operands);
4783 }
4784 else
4785 {
4786 output_asm_insn ("ldr%?\t%0, %1", operands);
4787 output_asm_insn ("ldr%?\t%0, %1", otherops);
4788 }
cce8749e
CH
4789 }
4790 }
4791 }
2b835d68
RE
4792 else
4793 abort(); /* Constraints should prevent this */
cce8749e
CH
4794 }
4795 else if (code0 == MEM && code1 == REG)
4796 {
4797 if (REGNO (operands[1]) == 12)
4798 abort();
2b835d68 4799
ff9940b0
RE
4800 switch (GET_CODE (XEXP (operands[0], 0)))
4801 {
4802 case REG:
9997d19d 4803 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 4804 break;
2b835d68 4805
ff9940b0 4806 case PRE_INC:
2b835d68 4807 abort (); /* Should never happen now */
ff9940b0 4808 break;
2b835d68 4809
ff9940b0 4810 case PRE_DEC:
2b835d68 4811 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 4812 break;
2b835d68 4813
ff9940b0 4814 case POST_INC:
9997d19d 4815 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 4816 break;
2b835d68 4817
ff9940b0 4818 case POST_DEC:
2b835d68 4819 abort (); /* Should never happen now */
ff9940b0 4820 break;
2b835d68
RE
4821
4822 case PLUS:
4823 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4824 {
4825 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4826 {
4827 case -8:
4828 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4829 return "";
4830
4831 case -4:
4832 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4833 return "";
4834
4835 case 4:
4836 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4837 return "";
4838 }
4839 }
4840 /* Fall through */
4841
ff9940b0 4842 default:
cce8749e 4843 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 4844 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
4845 output_asm_insn ("str%?\t%1, %0", operands);
4846 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
4847 }
4848 }
2b835d68
RE
4849 else
4850 abort(); /* Constraints should prevent this */
cce8749e 4851
9997d19d
RE
4852 return "";
4853}
cce8749e
CH
4854
4855
4856/* Output an arbitrary MOV reg, #n.
4857 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4858
4859char *
4860output_mov_immediate (operands)
f3bb6135 4861 rtx *operands;
cce8749e 4862{
f3bb6135 4863 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
4864 int n_ones = 0;
4865 int i;
4866
4867 /* Try to use one MOV */
cce8749e 4868 if (const_ok_for_arm (n))
f3bb6135 4869 {
9997d19d 4870 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
4871 return "";
4872 }
cce8749e
CH
4873
4874 /* Try to use one MVN */
f3bb6135 4875 if (const_ok_for_arm (~n))
cce8749e 4876 {
f3bb6135 4877 operands[1] = GEN_INT (~n);
9997d19d 4878 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 4879 return "";
cce8749e
CH
4880 }
4881
4882 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4883
4884 for (i=0; i < 32; i++)
4885 if (n & 1 << i)
4886 n_ones++;
4887
4888 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
4889 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4890 ~n);
cce8749e 4891 else
9997d19d
RE
4892 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4893 n);
f3bb6135
RE
4894
4895 return "";
4896}
cce8749e
CH
4897
4898
4899/* Output an ADD r, s, #n where n may be too big for one instruction. If
4900 adding zero to one register, output nothing. */
4901
4902char *
4903output_add_immediate (operands)
f3bb6135 4904 rtx *operands;
cce8749e 4905{
f3bb6135 4906 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
4907
4908 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4909 {
4910 if (n < 0)
4911 output_multi_immediate (operands,
9997d19d
RE
4912 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4913 -n);
cce8749e
CH
4914 else
4915 output_multi_immediate (operands,
9997d19d
RE
4916 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4917 n);
cce8749e 4918 }
f3bb6135
RE
4919
4920 return "";
4921}
cce8749e 4922
cce8749e
CH
4923/* Output a multiple immediate operation.
4924 OPERANDS is the vector of operands referred to in the output patterns.
4925 INSTR1 is the output pattern to use for the first constant.
4926 INSTR2 is the output pattern to use for subsequent constants.
4927 IMMED_OP is the index of the constant slot in OPERANDS.
4928 N is the constant value. */
4929
18af7313 4930static char *
cce8749e 4931output_multi_immediate (operands, instr1, instr2, immed_op, n)
f3bb6135 4932 rtx *operands;
cce8749e 4933 char *instr1, *instr2;
f3bb6135
RE
4934 int immed_op;
4935 HOST_WIDE_INT n;
cce8749e 4936{
f3bb6135
RE
4937#if HOST_BITS_PER_WIDE_INT > 32
4938 n &= 0xffffffff;
4939#endif
4940
cce8749e
CH
4941 if (n == 0)
4942 {
4943 operands[immed_op] = const0_rtx;
f3bb6135 4944 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
4945 }
4946 else
4947 {
4948 int i;
4949 char *instr = instr1;
4950
4951 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
4952 for (i = 0; i < 32; i += 2)
4953 {
4954 if (n & (3 << i))
4955 {
f3bb6135
RE
4956 operands[immed_op] = GEN_INT (n & (255 << i));
4957 output_asm_insn (instr, operands);
cce8749e
CH
4958 instr = instr2;
4959 i += 6;
4960 }
4961 }
4962 }
f3bb6135 4963 return "";
9997d19d 4964}
cce8749e
CH
4965
4966
4967/* Return the appropriate ARM instruction for the operation code.
4968 The returned result should not be overwritten. OP is the rtx of the
4969 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4970 was shifted. */
4971
4972char *
4973arithmetic_instr (op, shift_first_arg)
4974 rtx op;
f3bb6135 4975 int shift_first_arg;
cce8749e 4976{
9997d19d 4977 switch (GET_CODE (op))
cce8749e
CH
4978 {
4979 case PLUS:
f3bb6135
RE
4980 return "add";
4981
cce8749e 4982 case MINUS:
f3bb6135
RE
4983 return shift_first_arg ? "rsb" : "sub";
4984
cce8749e 4985 case IOR:
f3bb6135
RE
4986 return "orr";
4987
cce8749e 4988 case XOR:
f3bb6135
RE
4989 return "eor";
4990
cce8749e 4991 case AND:
f3bb6135
RE
4992 return "and";
4993
cce8749e 4994 default:
f3bb6135 4995 abort ();
cce8749e 4996 }
f3bb6135 4997}
cce8749e
CH
4998
4999
5000/* Ensure valid constant shifts and return the appropriate shift mnemonic
5001 for the operation code. The returned result should not be overwritten.
5002 OP is the rtx code of the shift.
9997d19d
RE
5003 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
5004 shift. */
cce8749e 5005
9997d19d
RE
5006static char *
5007shift_op (op, amountp)
5008 rtx op;
5009 HOST_WIDE_INT *amountp;
cce8749e 5010{
cce8749e 5011 char *mnem;
e2c671ba 5012 enum rtx_code code = GET_CODE (op);
cce8749e 5013
9997d19d
RE
5014 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
5015 *amountp = -1;
5016 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
5017 *amountp = INTVAL (XEXP (op, 1));
5018 else
5019 abort ();
5020
e2c671ba 5021 switch (code)
cce8749e
CH
5022 {
5023 case ASHIFT:
5024 mnem = "asl";
5025 break;
f3bb6135 5026
cce8749e
CH
5027 case ASHIFTRT:
5028 mnem = "asr";
cce8749e 5029 break;
f3bb6135 5030
cce8749e
CH
5031 case LSHIFTRT:
5032 mnem = "lsr";
cce8749e 5033 break;
f3bb6135 5034
9997d19d
RE
5035 case ROTATERT:
5036 mnem = "ror";
9997d19d
RE
5037 break;
5038
ff9940b0 5039 case MULT:
e2c671ba
RE
5040 /* We never have to worry about the amount being other than a
5041 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
5042 if (*amountp != -1)
5043 *amountp = int_log2 (*amountp);
5044 else
5045 abort ();
f3bb6135
RE
5046 return "asl";
5047
cce8749e 5048 default:
f3bb6135 5049 abort ();
cce8749e
CH
5050 }
5051
e2c671ba
RE
5052 if (*amountp != -1)
5053 {
5054 /* This is not 100% correct, but follows from the desire to merge
5055 multiplication by a power of 2 with the recognizer for a
5056 shift. >=32 is not a valid shift for "asl", so we must try and
5057 output a shift that produces the correct arithmetical result.
ddd5a7c1 5058 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
5059 is not set correctly if we set the flags; but we never use the
5060 carry bit from such an operation, so we can ignore that. */
5061 if (code == ROTATERT)
5062 *amountp &= 31; /* Rotate is just modulo 32 */
5063 else if (*amountp != (*amountp & 31))
5064 {
5065 if (code == ASHIFT)
5066 mnem = "lsr";
5067 *amountp = 32;
5068 }
5069
5070 /* Shifts of 0 are no-ops. */
5071 if (*amountp == 0)
5072 return NULL;
5073 }
5074
9997d19d
RE
5075 return mnem;
5076}
cce8749e
CH
5077
5078
5079/* Obtain the shift from the POWER of two. */
5080
18af7313 5081static HOST_WIDE_INT
cce8749e 5082int_log2 (power)
f3bb6135 5083 HOST_WIDE_INT power;
cce8749e 5084{
f3bb6135 5085 HOST_WIDE_INT shift = 0;
cce8749e 5086
2b835d68 5087 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
5088 {
5089 if (shift > 31)
f3bb6135 5090 abort ();
cce8749e
CH
5091 shift++;
5092 }
f3bb6135
RE
5093
5094 return shift;
5095}
cce8749e 5096
cce8749e
CH
5097/* Output a .ascii pseudo-op, keeping track of lengths. This is because
5098 /bin/as is horribly restrictive. */
5099
5100void
5101output_ascii_pseudo_op (stream, p, len)
5102 FILE *stream;
f1b3f515 5103 unsigned char *p;
cce8749e
CH
5104 int len;
5105{
5106 int i;
5107 int len_so_far = 1000;
5108 int chars_so_far = 0;
5109
5110 for (i = 0; i < len; i++)
5111 {
5112 register int c = p[i];
5113
5114 if (len_so_far > 50)
5115 {
5116 if (chars_so_far)
5117 fputs ("\"\n", stream);
5118 fputs ("\t.ascii\t\"", stream);
5119 len_so_far = 0;
cce8749e
CH
5120 chars_so_far = 0;
5121 }
5122
5123 if (c == '\"' || c == '\\')
5124 {
5125 putc('\\', stream);
5126 len_so_far++;
5127 }
f3bb6135 5128
cce8749e
CH
5129 if (c >= ' ' && c < 0177)
5130 {
5131 putc (c, stream);
5132 len_so_far++;
5133 }
5134 else
5135 {
5136 fprintf (stream, "\\%03o", c);
5137 len_so_far +=4;
5138 }
f3bb6135 5139
cce8749e
CH
5140 chars_so_far++;
5141 }
f3bb6135 5142
cce8749e 5143 fputs ("\"\n", stream);
f3bb6135 5144}
cce8749e 5145\f
ff9940b0
RE
5146
5147/* Try to determine whether a pattern really clobbers the link register.
5148 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
5149 if we combine a call followed by a return.
5150 NOTE: This code does not check for side-effect expressions in a SET_SRC:
5151 such a check should not be needed because these only update an existing
5152 value within a register; the register must still be set elsewhere within
5153 the function. */
ff9940b0
RE
5154
5155static int
5156pattern_really_clobbers_lr (x)
f3bb6135 5157 rtx x;
ff9940b0
RE
5158{
5159 int i;
5160
5161 switch (GET_CODE (x))
5162 {
5163 case SET:
5164 switch (GET_CODE (SET_DEST (x)))
5165 {
5166 case REG:
5167 return REGNO (SET_DEST (x)) == 14;
f3bb6135 5168
ff9940b0
RE
5169 case SUBREG:
5170 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
5171 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
f3bb6135 5172
0e84b556
RK
5173 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
5174 return 0;
ff9940b0 5175 abort ();
f3bb6135 5176
ff9940b0
RE
5177 default:
5178 return 0;
5179 }
f3bb6135 5180
ff9940b0
RE
5181 case PARALLEL:
5182 for (i = 0; i < XVECLEN (x, 0); i++)
5183 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
5184 return 1;
5185 return 0;
f3bb6135 5186
ff9940b0
RE
5187 case CLOBBER:
5188 switch (GET_CODE (XEXP (x, 0)))
5189 {
5190 case REG:
5191 return REGNO (XEXP (x, 0)) == 14;
f3bb6135 5192
ff9940b0
RE
5193 case SUBREG:
5194 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
5195 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
5196 abort ();
f3bb6135 5197
ff9940b0
RE
5198 default:
5199 return 0;
5200 }
f3bb6135 5201
ff9940b0
RE
5202 case UNSPEC:
5203 return 1;
f3bb6135 5204
ff9940b0
RE
5205 default:
5206 return 0;
5207 }
5208}
5209
5210static int
5211function_really_clobbers_lr (first)
f3bb6135 5212 rtx first;
ff9940b0
RE
5213{
5214 rtx insn, next;
5215
5216 for (insn = first; insn; insn = next_nonnote_insn (insn))
5217 {
5218 switch (GET_CODE (insn))
5219 {
5220 case BARRIER:
5221 case NOTE:
5222 case CODE_LABEL:
5223 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
5224 case INLINE_HEADER:
5225 break;
f3bb6135 5226
ff9940b0
RE
5227 case INSN:
5228 if (pattern_really_clobbers_lr (PATTERN (insn)))
5229 return 1;
5230 break;
f3bb6135 5231
ff9940b0
RE
5232 case CALL_INSN:
5233 /* Don't yet know how to handle those calls that are not to a
5234 SYMBOL_REF */
5235 if (GET_CODE (PATTERN (insn)) != PARALLEL)
5236 abort ();
f3bb6135 5237
ff9940b0
RE
5238 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
5239 {
5240 case CALL:
5241 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
5242 != SYMBOL_REF)
5243 return 1;
5244 break;
f3bb6135 5245
ff9940b0
RE
5246 case SET:
5247 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
5248 0, 0)), 0), 0))
5249 != SYMBOL_REF)
5250 return 1;
5251 break;
f3bb6135 5252
ff9940b0
RE
5253 default: /* Don't recognize it, be safe */
5254 return 1;
5255 }
f3bb6135 5256
ff9940b0
RE
5257 /* A call can be made (by peepholing) not to clobber lr iff it is
5258 followed by a return. There may, however, be a use insn iff
5259 we are returning the result of the call.
5260 If we run off the end of the insn chain, then that means the
5261 call was at the end of the function. Unfortunately we don't
5262 have a return insn for the peephole to recognize, so we
5263 must reject this. (Can this be fixed by adding our own insn?) */
5264 if ((next = next_nonnote_insn (insn)) == NULL)
5265 return 1;
f3bb6135 5266
32de079a
RE
5267 /* No need to worry about lr if the call never returns */
5268 if (GET_CODE (next) == BARRIER)
5269 break;
5270
ff9940b0
RE
5271 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
5272 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
5273 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
5274 == REGNO (XEXP (PATTERN (next), 0))))
5275 if ((next = next_nonnote_insn (next)) == NULL)
5276 return 1;
f3bb6135 5277
ff9940b0
RE
5278 if (GET_CODE (next) == JUMP_INSN
5279 && GET_CODE (PATTERN (next)) == RETURN)
5280 break;
5281 return 1;
f3bb6135 5282
ff9940b0
RE
5283 default:
5284 abort ();
5285 }
5286 }
f3bb6135 5287
ff9940b0
RE
5288 /* We have reached the end of the chain so lr was _not_ clobbered */
5289 return 0;
5290}
5291
5292char *
84ed5e79 5293output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
5294 rtx operand;
5295 int really_return;
84ed5e79 5296 int reverse;
ff9940b0
RE
5297{
5298 char instr[100];
5299 int reg, live_regs = 0;
e2c671ba
RE
5300 int volatile_func = (optimize > 0
5301 && TREE_THIS_VOLATILE (current_function_decl));
5302
5303 return_used_this_function = 1;
ff9940b0 5304
e2c671ba
RE
5305 if (volatile_func)
5306 {
5307 rtx ops[2];
5308 /* If this function was declared non-returning, and we have found a tail
5309 call, then we have to trust that the called function won't return. */
5310 if (! really_return)
5311 return "";
5312
5313 /* Otherwise, trap an attempted return by aborting. */
5314 ops[0] = operand;
86efdc8e
PB
5315 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_GOT ? "abort(PLT)"
5316 : "abort");
2b835d68 5317 assemble_external_libcall (ops[1]);
84ed5e79 5318 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
e2c671ba
RE
5319 return "";
5320 }
5321
f3bb6135 5322 if (current_function_calls_alloca && ! really_return)
ff9940b0
RE
5323 abort();
5324
f3bb6135
RE
5325 for (reg = 0; reg <= 10; reg++)
5326 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
5327 live_regs++;
5328
6ed30148
RE
5329 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5330 live_regs++;
5331
f3bb6135 5332 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
ff9940b0
RE
5333 live_regs++;
5334
5335 if (frame_pointer_needed)
5336 live_regs += 4;
5337
5338 if (live_regs)
5339 {
f3bb6135 5340 if (lr_save_eliminated || ! regs_ever_live[14])
ff9940b0 5341 live_regs++;
f3bb6135 5342
ff9940b0 5343 if (frame_pointer_needed)
84ed5e79
RE
5344 strcpy (instr,
5345 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 5346 else
84ed5e79
RE
5347 strcpy (instr,
5348 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
5349
5350 for (reg = 0; reg <= 10; reg++)
6ed30148
RE
5351 if (regs_ever_live[reg]
5352 && (! call_used_regs[reg]
5353 || (flag_pic && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 5354 {
1d5473cb 5355 strcat (instr, "%|");
ff9940b0
RE
5356 strcat (instr, reg_names[reg]);
5357 if (--live_regs)
5358 strcat (instr, ", ");
5359 }
f3bb6135 5360
ff9940b0
RE
5361 if (frame_pointer_needed)
5362 {
1d5473cb 5363 strcat (instr, "%|");
ff9940b0
RE
5364 strcat (instr, reg_names[11]);
5365 strcat (instr, ", ");
1d5473cb 5366 strcat (instr, "%|");
ff9940b0
RE
5367 strcat (instr, reg_names[13]);
5368 strcat (instr, ", ");
1d5473cb 5369 strcat (instr, "%|");
da6558fd
NC
5370 strcat (instr, TARGET_THUMB_INTERWORK || (! really_return)
5371 ? reg_names[14] : reg_names[15] );
ff9940b0
RE
5372 }
5373 else
1d5473cb
RE
5374 {
5375 strcat (instr, "%|");
da6558fd
NC
5376 if (TARGET_THUMB_INTERWORK && really_return)
5377 strcat (instr, reg_names[12]);
5378 else
5379 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1d5473cb 5380 }
2b835d68 5381 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 5382 output_asm_insn (instr, &operand);
da6558fd
NC
5383
5384 if (TARGET_THUMB_INTERWORK && really_return)
5385 {
5386 strcpy (instr, "bx%?");
5387 strcat (instr, reverse ? "%D0" : "%d0");
5388 strcat (instr, "\t%|");
5389 strcat (instr, frame_pointer_needed ? "lr" : "ip");
5390
5391 output_asm_insn (instr, & operand);
5392 }
ff9940b0
RE
5393 }
5394 else if (really_return)
5395 {
b111229a 5396 if (TARGET_THUMB_INTERWORK)
25b1c156 5397 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
5398 else
5399 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
5400 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd
NC
5401
5402 output_asm_insn (instr, & operand);
ff9940b0 5403 }
f3bb6135 5404
ff9940b0
RE
5405 return "";
5406}
5407
e82ea128
DE
5408/* Return nonzero if optimizing and the current function is volatile.
5409 Such functions never return, and many memory cycles can be saved
5410 by not storing register values that will never be needed again.
5411 This optimization was added to speed up context switching in a
5412 kernel application. */
a0b2ce4c 5413
e2c671ba
RE
5414int
5415arm_volatile_func ()
5416{
5417 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
5418}
5419
ef179a26
NC
5420/* Write the function name into the code section, directly preceding
5421 the function prologue.
5422
5423 Code will be output similar to this:
5424 t0
5425 .ascii "arm_poke_function_name", 0
5426 .align
5427 t1
5428 .word 0xff000000 + (t1 - t0)
5429 arm_poke_function_name
5430 mov ip, sp
5431 stmfd sp!, {fp, ip, lr, pc}
5432 sub fp, ip, #4
5433
5434 When performing a stack backtrace, code can inspect the value
5435 of 'pc' stored at 'fp' + 0. If the trace function then looks
5436 at location pc - 12 and the top 8 bits are set, then we know
5437 that there is a function name embedded immediately preceding this
5438 location and has length ((pc[-3]) & 0xff000000).
5439
5440 We assume that pc is declared as a pointer to an unsigned long.
5441
5442 It is of no benefit to output the function name if we are assembling
5443 a leaf function. These function types will not contain a stack
5444 backtrace structure, therefore it is not possible to determine the
5445 function name. */
5446
5447void
5448arm_poke_function_name (stream, name)
5449 FILE * stream;
5450 char * name;
5451{
5452 unsigned long alignlength;
5453 unsigned long length;
5454 rtx x;
5455
5456 length = strlen (name);
5457 alignlength = (length + 1) + 3 & ~3;
5458
5459 ASM_OUTPUT_ASCII (stream, name, length + 1);
5460 ASM_OUTPUT_ALIGN (stream, 2);
5461 x = GEN_INT (0xff000000UL + alignlength);
5462 ASM_OUTPUT_INT (stream, x);
5463}
5464
ff9940b0
RE
5465/* The amount of stack adjustment that happens here, in output_return and in
5466 output_epilogue must be exactly the same as was calculated during reload,
5467 or things will point to the wrong place. The only time we can safely
5468 ignore this constraint is when a function has no arguments on the stack,
5469 no stack frame requirement and no live registers execpt for `lr'. If we
5470 can guarantee that by making all function calls into tail calls and that
5471 lr is not clobbered in any other way, then there is no need to push lr
5472 onto the stack. */
5473
cce8749e 5474void
f3bb6135 5475output_func_prologue (f, frame_size)
cce8749e
CH
5476 FILE *f;
5477 int frame_size;
5478{
f3bb6135 5479 int reg, live_regs_mask = 0;
e2c671ba
RE
5480 int volatile_func = (optimize > 0
5481 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 5482
cce8749e
CH
5483 /* Nonzero if we must stuff some register arguments onto the stack as if
5484 they were passed there. */
5485 int store_arg_regs = 0;
5486
abaa26e5
RE
5487 if (arm_ccfsm_state || arm_target_insn)
5488 abort (); /* Sanity check */
31fdb4d5
DE
5489
5490 if (arm_naked_function_p (current_function_decl))
5491 return;
5492
ff9940b0
RE
5493 return_used_this_function = 0;
5494 lr_save_eliminated = 0;
5495
f3139301
DE
5496 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
5497 ASM_COMMENT_START, current_function_args_size,
1d5473cb 5498 current_function_pretend_args_size, frame_size);
f3139301
DE
5499 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
5500 ASM_COMMENT_START, frame_pointer_needed,
1d5473cb 5501 current_function_anonymous_args);
cce8749e 5502
e2c671ba 5503 if (volatile_func)
f3139301 5504 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
e2c671ba 5505
cce8749e
CH
5506 if (current_function_anonymous_args && current_function_pretend_args_size)
5507 store_arg_regs = 1;
5508
f3bb6135
RE
5509 for (reg = 0; reg <= 10; reg++)
5510 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
5511 live_regs_mask |= (1 << reg);
5512
6ed30148
RE
5513 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5514 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
5515
ff9940b0 5516 if (frame_pointer_needed)
e2c671ba 5517 live_regs_mask |= 0xD800;
cce8749e 5518 else if (regs_ever_live[14])
ff9940b0
RE
5519 {
5520 if (! current_function_args_size
f3bb6135 5521 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 5522 lr_save_eliminated = 1;
ff9940b0
RE
5523 else
5524 live_regs_mask |= 0x4000;
5525 }
cce8749e 5526
cce8749e
CH
5527 if (live_regs_mask)
5528 {
ff9940b0
RE
5529 /* if a di mode load/store multiple is used, and the base register
5530 is r3, then r4 can become an ever live register without lr
5531 doing so, in this case we need to push lr as well, or we
5532 will fail to get a proper return. */
5533
5534 live_regs_mask |= 0x4000;
5535 lr_save_eliminated = 0;
f3bb6135 5536
cce8749e
CH
5537 }
5538
e2c671ba 5539 if (lr_save_eliminated)
f3139301
DE
5540 fprintf (f,"\t%s I don't think this function clobbers lr\n",
5541 ASM_COMMENT_START);
32de079a
RE
5542
5543#ifdef AOF_ASSEMBLER
5544 if (flag_pic)
5545 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
5546 reg_names[PIC_OFFSET_TABLE_REGNUM]);
5547#endif
f3bb6135 5548}
cce8749e
CH
5549
5550
5551void
f3bb6135 5552output_func_epilogue (f, frame_size)
cce8749e
CH
5553 FILE *f;
5554 int frame_size;
5555{
b111229a
RE
5556 int reg, live_regs_mask = 0;
5557 /* If we need this then it will always be at least this much */
5558 int floats_offset = 12;
cce8749e 5559 rtx operands[3];
e2c671ba
RE
5560 int volatile_func = (optimize > 0
5561 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 5562
b36ba79f 5563 if (use_return_insn (FALSE) && return_used_this_function)
cce8749e 5564 {
56636818 5565 if ((frame_size + current_function_outgoing_args_size) != 0
f5a1b0d2 5566 && !(frame_pointer_needed && TARGET_APCS))
56636818 5567 abort ();
f3bb6135 5568 goto epilogue_done;
cce8749e 5569 }
cce8749e 5570
31fdb4d5
DE
5571 /* Naked functions don't have epilogues. */
5572 if (arm_naked_function_p (current_function_decl))
5573 goto epilogue_done;
5574
e2c671ba 5575 /* A volatile function should never return. Call abort. */
c11145f6 5576 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 5577 {
86efdc8e
PB
5578 rtx op;
5579 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_GOT ? "abort(PLT)" : "abort");
2b835d68 5580 assemble_external_libcall (op);
e2c671ba 5581 output_asm_insn ("bl\t%a0", &op);
e2c671ba
RE
5582 goto epilogue_done;
5583 }
5584
f3bb6135
RE
5585 for (reg = 0; reg <= 10; reg++)
5586 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 5587 {
ff9940b0
RE
5588 live_regs_mask |= (1 << reg);
5589 floats_offset += 4;
cce8749e
CH
5590 }
5591
6ed30148
RE
5592 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5593 {
5594 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
5595 floats_offset += 4;
5596 }
5597
ff9940b0 5598 if (frame_pointer_needed)
cce8749e 5599 {
b111229a
RE
5600 if (arm_fpu_arch == FP_SOFT2)
5601 {
5602 for (reg = 23; reg > 15; reg--)
5603 if (regs_ever_live[reg] && ! call_used_regs[reg])
5604 {
5605 floats_offset += 12;
5606 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
5607 reg_names[reg], REGISTER_PREFIX, floats_offset);
5608 }
5609 }
5610 else
5611 {
5612 int start_reg = 23;
5613
5614 for (reg = 23; reg > 15; reg--)
5615 {
5616 if (regs_ever_live[reg] && ! call_used_regs[reg])
5617 {
5618 floats_offset += 12;
5619 /* We can't unstack more than four registers at once */
5620 if (start_reg - reg == 3)
5621 {
5622 fprintf (f, "\tlfm\t%s%s, 4, [%sfp, #-%d]\n",
5623 REGISTER_PREFIX, reg_names[reg],
5624 REGISTER_PREFIX, floats_offset);
5625 start_reg = reg - 1;
5626 }
5627 }
5628 else
5629 {
5630 if (reg != start_reg)
5631 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
5632 REGISTER_PREFIX, reg_names[reg + 1],
5633 start_reg - reg, REGISTER_PREFIX, floats_offset);
ff9940b0 5634
b111229a
RE
5635 start_reg = reg - 1;
5636 }
5637 }
5638
5639 /* Just in case the last register checked also needs unstacking. */
5640 if (reg != start_reg)
5641 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
5642 REGISTER_PREFIX, reg_names[reg + 1],
5643 start_reg - reg, REGISTER_PREFIX, floats_offset);
5644 }
da6558fd 5645
b111229a
RE
5646 if (TARGET_THUMB_INTERWORK)
5647 {
5648 live_regs_mask |= 0x6800;
5649 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask, FALSE);
5650 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5651 }
5652 else
5653 {
5654 live_regs_mask |= 0xA800;
5655 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
5656 TARGET_APCS_32 ? FALSE : TRUE);
5657 }
cce8749e
CH
5658 }
5659 else
5660 {
d2288d8d 5661 /* Restore stack pointer if necessary. */
56636818 5662 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
5663 {
5664 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
5665 operands[2] = GEN_INT (frame_size
5666 + current_function_outgoing_args_size);
d2288d8d
TG
5667 output_add_immediate (operands);
5668 }
5669
b111229a
RE
5670 if (arm_fpu_arch == FP_SOFT2)
5671 {
5672 for (reg = 16; reg < 24; reg++)
5673 if (regs_ever_live[reg] && ! call_used_regs[reg])
5674 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
5675 reg_names[reg], REGISTER_PREFIX);
5676 }
5677 else
5678 {
5679 int start_reg = 16;
5680
5681 for (reg = 16; reg < 24; reg++)
5682 {
5683 if (regs_ever_live[reg] && ! call_used_regs[reg])
5684 {
5685 if (reg - start_reg == 3)
5686 {
5687 fprintf (f, "\tlfmfd\t%s%s, 4, [%ssp]!\n",
5688 REGISTER_PREFIX, reg_names[start_reg],
5689 REGISTER_PREFIX);
5690 start_reg = reg + 1;
5691 }
5692 }
5693 else
5694 {
5695 if (reg != start_reg)
5696 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5697 REGISTER_PREFIX, reg_names[start_reg],
5698 reg - start_reg, REGISTER_PREFIX);
5699
5700 start_reg = reg + 1;
5701 }
5702 }
5703
5704 /* Just in case the last register checked also needs unstacking. */
5705 if (reg != start_reg)
5706 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5707 REGISTER_PREFIX, reg_names[start_reg],
5708 reg - start_reg, REGISTER_PREFIX);
5709 }
5710
cce8749e
CH
5711 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
5712 {
b111229a
RE
5713 if (TARGET_THUMB_INTERWORK)
5714 {
5715 if (! lr_save_eliminated)
f5a1b0d2
NC
5716 live_regs_mask |= 0x4000;
5717
5718 if (live_regs_mask != 0)
5719 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
b111229a
RE
5720
5721 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5722 }
5723 else if (lr_save_eliminated)
32de079a
RE
5724 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5725 : "\tmovs\t%spc, %slr\n"),
5726 REGISTER_PREFIX, REGISTER_PREFIX, f);
5727 else
5728 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
5729 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
5730 }
5731 else
5732 {
ff9940b0 5733 if (live_regs_mask || regs_ever_live[14])
cce8749e 5734 {
32de079a
RE
5735 /* Restore the integer regs, and the return address into lr */
5736 if (! lr_save_eliminated)
5737 live_regs_mask |= 0x4000;
5738
5739 if (live_regs_mask != 0)
32de079a 5740 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
cce8749e 5741 }
b111229a 5742
cce8749e
CH
5743 if (current_function_pretend_args_size)
5744 {
32de079a 5745 /* Unwind the pre-pushed regs */
cce8749e 5746 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 5747 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
5748 output_add_immediate (operands);
5749 }
32de079a 5750 /* And finally, go home */
b111229a
RE
5751 if (TARGET_THUMB_INTERWORK)
5752 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
25b1c156
NC
5753 else if (TARGET_APCS_32)
5754 fprintf (f, "\tmov\t%spc, %slr\n", REGISTER_PREFIX, REGISTER_PREFIX );
b111229a 5755 else
25b1c156 5756 fprintf (f, "\tmovs\t%spc, %slr\n", REGISTER_PREFIX, REGISTER_PREFIX );
cce8749e
CH
5757 }
5758 }
f3bb6135 5759
32de079a 5760epilogue_done:
f3bb6135 5761
4b632bf1 5762 /* Reset the ARM-specific per-function variables. */
cce8749e 5763 current_function_anonymous_args = 0;
4b632bf1 5764 after_arm_reorg = 0;
f3bb6135 5765}
e2c671ba
RE
5766
5767static void
5768emit_multi_reg_push (mask)
5769 int mask;
5770{
5771 int num_regs = 0;
5772 int i, j;
5773 rtx par;
5774
5775 for (i = 0; i < 16; i++)
5776 if (mask & (1 << i))
5777 num_regs++;
5778
5779 if (num_regs == 0 || num_regs > 16)
5780 abort ();
5781
43cffd11 5782 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
e2c671ba
RE
5783
5784 for (i = 0; i < 16; i++)
5785 {
5786 if (mask & (1 << i))
5787 {
5788 XVECEXP (par, 0, 0)
43cffd11
RE
5789 = gen_rtx_SET (VOIDmode,
5790 gen_rtx_MEM (BLKmode,
5791 gen_rtx_PRE_DEC (BLKmode,
5792 stack_pointer_rtx)),
5793 gen_rtx_UNSPEC (BLKmode,
5794 gen_rtvec (1,
5795 gen_rtx_REG (SImode, i)),
5796 2));
e2c671ba
RE
5797 break;
5798 }
5799 }
5800
5801 for (j = 1, i++; j < num_regs; i++)
5802 {
5803 if (mask & (1 << i))
5804 {
5805 XVECEXP (par, 0, j)
43cffd11 5806 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, i));
e2c671ba
RE
5807 j++;
5808 }
5809 }
b111229a
RE
5810
5811 emit_insn (par);
5812}
5813
5814static void
5815emit_sfm (base_reg, count)
5816 int base_reg;
5817 int count;
5818{
5819 rtx par;
5820 int i;
5821
43cffd11
RE
5822 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
5823
5824 XVECEXP (par, 0, 0)
5825 = gen_rtx_SET (VOIDmode,
5826 gen_rtx_MEM (BLKmode,
5827 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
5828 gen_rtx_UNSPEC (BLKmode,
5829 gen_rtvec (1, gen_rtx_REG (XFmode,
5830 base_reg++)),
5831 2));
b111229a 5832 for (i = 1; i < count; i++)
43cffd11
RE
5833 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode,
5834 gen_rtx_REG (XFmode, base_reg++));
b111229a 5835
e2c671ba
RE
5836 emit_insn (par);
5837}
5838
5839void
5840arm_expand_prologue ()
5841{
5842 int reg;
56636818
JL
5843 rtx amount = GEN_INT (-(get_frame_size ()
5844 + current_function_outgoing_args_size));
e2c671ba
RE
5845 int live_regs_mask = 0;
5846 int store_arg_regs = 0;
5847 int volatile_func = (optimize > 0
5848 && TREE_THIS_VOLATILE (current_function_decl));
5849
31fdb4d5
DE
5850 /* Naked functions don't have prologues. */
5851 if (arm_naked_function_p (current_function_decl))
5852 return;
5853
e2c671ba
RE
5854 if (current_function_anonymous_args && current_function_pretend_args_size)
5855 store_arg_regs = 1;
5856
5857 if (! volatile_func)
6ed30148
RE
5858 {
5859 for (reg = 0; reg <= 10; reg++)
5860 if (regs_ever_live[reg] && ! call_used_regs[reg])
5861 live_regs_mask |= 1 << reg;
5862
5863 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5864 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 5865
6ed30148
RE
5866 if (regs_ever_live[14])
5867 live_regs_mask |= 0x4000;
5868 }
e2c671ba
RE
5869
5870 if (frame_pointer_needed)
5871 {
5872 live_regs_mask |= 0xD800;
43cffd11 5873 emit_insn (gen_movsi (gen_rtx_REG (SImode, 12),
e2c671ba
RE
5874 stack_pointer_rtx));
5875 }
5876
5877 if (current_function_pretend_args_size)
5878 {
5879 if (store_arg_regs)
5880 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
5881 & 0xf);
5882 else
5883 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5884 GEN_INT (-current_function_pretend_args_size)));
5885 }
5886
5887 if (live_regs_mask)
5888 {
5889 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 5890 we won't get a proper return. */
e2c671ba
RE
5891 live_regs_mask |= 0x4000;
5892 emit_multi_reg_push (live_regs_mask);
5893 }
5894
5895 /* For now the integer regs are still pushed in output_func_epilogue (). */
5896
5897 if (! volatile_func)
b111229a
RE
5898 {
5899 if (arm_fpu_arch == FP_SOFT2)
5900 {
5901 for (reg = 23; reg > 15; reg--)
5902 if (regs_ever_live[reg] && ! call_used_regs[reg])
43cffd11
RE
5903 emit_insn (gen_rtx_SET
5904 (VOIDmode,
5905 gen_rtx_MEM (XFmode,
5906 gen_rtx_PRE_DEC (XFmode,
5907 stack_pointer_rtx)),
5908 gen_rtx_REG (XFmode, reg)));
b111229a
RE
5909 }
5910 else
5911 {
5912 int start_reg = 23;
5913
5914 for (reg = 23; reg > 15; reg--)
5915 {
5916 if (regs_ever_live[reg] && ! call_used_regs[reg])
5917 {
5918 if (start_reg - reg == 3)
5919 {
5920 emit_sfm (reg, 4);
5921 start_reg = reg - 1;
5922 }
5923 }
5924 else
5925 {
5926 if (start_reg != reg)
5927 emit_sfm (reg + 1, start_reg - reg);
5928 start_reg = reg - 1;
5929 }
5930 }
5931
5932 if (start_reg != reg)
5933 emit_sfm (reg + 1, start_reg - reg);
5934 }
5935 }
e2c671ba
RE
5936
5937 if (frame_pointer_needed)
43cffd11 5938 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx_REG (SImode, 12),
e2c671ba
RE
5939 (GEN_INT
5940 (-(4 + current_function_pretend_args_size)))));
5941
5942 if (amount != const0_rtx)
5943 {
5944 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
43cffd11
RE
5945 emit_insn (gen_rtx_CLOBBER (VOIDmode,
5946 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
e2c671ba
RE
5947 }
5948
5949 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
5950 the call to mcount. Similarly if the user has requested no
5951 scheduling in the prolog. */
5952 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
5953 emit_insn (gen_blockage ());
5954}
5955
cce8749e 5956\f
9997d19d
RE
5957/* If CODE is 'd', then the X is a condition operand and the instruction
5958 should only be executed if the condition is true.
ddd5a7c1 5959 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
5960 should only be executed if the condition is false: however, if the mode
5961 of the comparison is CCFPEmode, then always execute the instruction -- we
5962 do this because in these circumstances !GE does not necessarily imply LT;
5963 in these cases the instruction pattern will take care to make sure that
5964 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 5965 doing this instruction unconditionally.
9997d19d
RE
5966 If CODE is 'N' then X is a floating point operand that must be negated
5967 before output.
5968 If CODE is 'B' then output a bitwise inverted value of X (a const int).
5969 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
5970
5971void
5972arm_print_operand (stream, x, code)
5973 FILE *stream;
5974 rtx x;
5975 int code;
5976{
5977 switch (code)
5978 {
5979 case '@':
f3139301 5980 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
5981 return;
5982
5983 case '|':
f3139301 5984 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5985 return;
5986
5987 case '?':
5988 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
5989 fputs (arm_condition_codes[arm_current_cc], stream);
5990 return;
5991
5992 case 'N':
5993 {
5994 REAL_VALUE_TYPE r;
5995 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5996 r = REAL_VALUE_NEGATE (r);
5997 fprintf (stream, "%s", fp_const_from_val (&r));
5998 }
5999 return;
6000
6001 case 'B':
6002 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
6003 {
6004 HOST_WIDE_INT val;
6005 val = ARM_SIGN_EXTEND (~ INTVAL (x));
36ba9cb8 6006 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6007 }
9997d19d
RE
6008 else
6009 {
6010 putc ('~', stream);
6011 output_addr_const (stream, x);
6012 }
6013 return;
6014
6015 case 'i':
6016 fprintf (stream, "%s", arithmetic_instr (x, 1));
6017 return;
6018
6019 case 'I':
6020 fprintf (stream, "%s", arithmetic_instr (x, 0));
6021 return;
6022
6023 case 'S':
6024 {
6025 HOST_WIDE_INT val;
4bc74ece 6026 char * shift = shift_op (x, & val);
9997d19d 6027
e2c671ba
RE
6028 if (shift)
6029 {
4bc74ece 6030 fprintf (stream, ", %s ", shift_op (x, & val));
e2c671ba
RE
6031 if (val == -1)
6032 arm_print_operand (stream, XEXP (x, 1), 0);
6033 else
4bc74ece
NC
6034 {
6035 fputc ('#', stream);
36ba9cb8 6036 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6037 }
e2c671ba 6038 }
9997d19d
RE
6039 }
6040 return;
6041
c1c2bc04
RE
6042 case 'Q':
6043 if (REGNO (x) > 15)
6044 abort ();
6045 fputs (REGISTER_PREFIX, stream);
6046 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
6047 return;
6048
9997d19d
RE
6049 case 'R':
6050 if (REGNO (x) > 15)
6051 abort ();
f3139301 6052 fputs (REGISTER_PREFIX, stream);
c1c2bc04 6053 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
6054 return;
6055
6056 case 'm':
f3139301 6057 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6058 if (GET_CODE (XEXP (x, 0)) == REG)
6059 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
6060 else
6061 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
6062 return;
6063
6064 case 'M':
f3139301
DE
6065 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
6066 REGISTER_PREFIX, reg_names[REGNO (x) - 1
1d5473cb
RE
6067 + ((GET_MODE_SIZE (GET_MODE (x))
6068 + GET_MODE_SIZE (SImode) - 1)
6069 / GET_MODE_SIZE (SImode))]);
9997d19d
RE
6070 return;
6071
6072 case 'd':
6073 if (x)
6074 fputs (arm_condition_codes[get_arm_condition_code (x)],
6075 stream);
6076 return;
6077
6078 case 'D':
84ed5e79 6079 if (x)
9997d19d
RE
6080 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
6081 (get_arm_condition_code (x))],
6082 stream);
6083 return;
6084
6085 default:
6086 if (x == 0)
6087 abort ();
6088
6089 if (GET_CODE (x) == REG)
1d5473cb 6090 {
f3139301 6091 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
6092 fputs (reg_names[REGNO (x)], stream);
6093 }
9997d19d
RE
6094 else if (GET_CODE (x) == MEM)
6095 {
6096 output_memory_reference_mode = GET_MODE (x);
6097 output_address (XEXP (x, 0));
6098 }
6099 else if (GET_CODE (x) == CONST_DOUBLE)
6100 fprintf (stream, "#%s", fp_immediate_constant (x));
6101 else if (GET_CODE (x) == NEG)
6102 abort (); /* This should never happen now. */
6103 else
6104 {
6105 fputc ('#', stream);
6106 output_addr_const (stream, x);
6107 }
6108 }
6109}
6110
cce8749e
CH
6111\f
6112/* A finite state machine takes care of noticing whether or not instructions
6113 can be conditionally executed, and thus decrease execution time and code
6114 size by deleting branch instructions. The fsm is controlled by
6115 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
6116
6117/* The state of the fsm controlling condition codes are:
6118 0: normal, do nothing special
6119 1: make ASM_OUTPUT_OPCODE not output this instruction
6120 2: make ASM_OUTPUT_OPCODE not output this instruction
6121 3: make instructions conditional
6122 4: make instructions conditional
6123
6124 State transitions (state->state by whom under condition):
6125 0 -> 1 final_prescan_insn if the `target' is a label
6126 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
6127 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
6128 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
6129 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
6130 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
6131 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
6132 (the target insn is arm_target_insn).
6133
ff9940b0
RE
6134 If the jump clobbers the conditions then we use states 2 and 4.
6135
6136 A similar thing can be done with conditional return insns.
6137
cce8749e
CH
6138 XXX In case the `target' is an unconditional branch, this conditionalising
6139 of the instructions always reduces code size, but not always execution
6140 time. But then, I want to reduce the code size to somewhere near what
6141 /bin/cc produces. */
6142
cce8749e
CH
6143/* Returns the index of the ARM condition code string in
6144 `arm_condition_codes'. COMPARISON should be an rtx like
6145 `(eq (...) (...))'. */
6146
84ed5e79 6147static enum arm_cond_code
cce8749e
CH
6148get_arm_condition_code (comparison)
6149 rtx comparison;
6150{
5165176d 6151 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
6152 register int code;
6153 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
6154
6155 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 6156 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
6157 XEXP (comparison, 1));
6158
6159 switch (mode)
cce8749e 6160 {
84ed5e79
RE
6161 case CC_DNEmode: code = ARM_NE; goto dominance;
6162 case CC_DEQmode: code = ARM_EQ; goto dominance;
6163 case CC_DGEmode: code = ARM_GE; goto dominance;
6164 case CC_DGTmode: code = ARM_GT; goto dominance;
6165 case CC_DLEmode: code = ARM_LE; goto dominance;
6166 case CC_DLTmode: code = ARM_LT; goto dominance;
6167 case CC_DGEUmode: code = ARM_CS; goto dominance;
6168 case CC_DGTUmode: code = ARM_HI; goto dominance;
6169 case CC_DLEUmode: code = ARM_LS; goto dominance;
6170 case CC_DLTUmode: code = ARM_CC;
6171
6172 dominance:
6173 if (comp_code != EQ && comp_code != NE)
6174 abort ();
6175
6176 if (comp_code == EQ)
6177 return ARM_INVERSE_CONDITION_CODE (code);
6178 return code;
6179
5165176d 6180 case CC_NOOVmode:
84ed5e79 6181 switch (comp_code)
5165176d 6182 {
84ed5e79
RE
6183 case NE: return ARM_NE;
6184 case EQ: return ARM_EQ;
6185 case GE: return ARM_PL;
6186 case LT: return ARM_MI;
5165176d
RE
6187 default: abort ();
6188 }
6189
6190 case CC_Zmode:
6191 case CCFPmode:
84ed5e79 6192 switch (comp_code)
5165176d 6193 {
84ed5e79
RE
6194 case NE: return ARM_NE;
6195 case EQ: return ARM_EQ;
5165176d
RE
6196 default: abort ();
6197 }
6198
6199 case CCFPEmode:
84ed5e79
RE
6200 switch (comp_code)
6201 {
6202 case GE: return ARM_GE;
6203 case GT: return ARM_GT;
6204 case LE: return ARM_LS;
6205 case LT: return ARM_MI;
6206 default: abort ();
6207 }
6208
6209 case CC_SWPmode:
6210 switch (comp_code)
6211 {
6212 case NE: return ARM_NE;
6213 case EQ: return ARM_EQ;
6214 case GE: return ARM_LE;
6215 case GT: return ARM_LT;
6216 case LE: return ARM_GE;
6217 case LT: return ARM_GT;
6218 case GEU: return ARM_LS;
6219 case GTU: return ARM_CC;
6220 case LEU: return ARM_CS;
6221 case LTU: return ARM_HI;
6222 default: abort ();
6223 }
6224
bd9c7e23
RE
6225 case CC_Cmode:
6226 switch (comp_code)
6227 {
6228 case LTU: return ARM_CS;
6229 case GEU: return ARM_CC;
6230 default: abort ();
6231 }
6232
5165176d 6233 case CCmode:
84ed5e79 6234 switch (comp_code)
5165176d 6235 {
84ed5e79
RE
6236 case NE: return ARM_NE;
6237 case EQ: return ARM_EQ;
6238 case GE: return ARM_GE;
6239 case GT: return ARM_GT;
6240 case LE: return ARM_LE;
6241 case LT: return ARM_LT;
6242 case GEU: return ARM_CS;
6243 case GTU: return ARM_HI;
6244 case LEU: return ARM_LS;
6245 case LTU: return ARM_CC;
5165176d
RE
6246 default: abort ();
6247 }
6248
cce8749e
CH
6249 default: abort ();
6250 }
84ed5e79
RE
6251
6252 abort ();
f3bb6135 6253}
cce8749e
CH
6254
6255
6256void
74bbc178 6257arm_final_prescan_insn (insn)
cce8749e 6258 rtx insn;
cce8749e
CH
6259{
6260 /* BODY will hold the body of INSN. */
6261 register rtx body = PATTERN (insn);
6262
6263 /* This will be 1 if trying to repeat the trick, and things need to be
6264 reversed if it appears to fail. */
6265 int reverse = 0;
6266
ff9940b0
RE
6267 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
6268 taken are clobbered, even if the rtl suggests otherwise. It also
6269 means that we have to grub around within the jump expression to find
6270 out what the conditions are when the jump isn't taken. */
6271 int jump_clobbers = 0;
6272
6273 /* If we start with a return insn, we only succeed if we find another one. */
6274 int seeking_return = 0;
6275
cce8749e
CH
6276 /* START_INSN will hold the insn from where we start looking. This is the
6277 first insn after the following code_label if REVERSE is true. */
6278 rtx start_insn = insn;
6279
6280 /* If in state 4, check if the target branch is reached, in order to
6281 change back to state 0. */
6282 if (arm_ccfsm_state == 4)
6283 {
6284 if (insn == arm_target_insn)
f5a1b0d2
NC
6285 {
6286 arm_target_insn = NULL;
6287 arm_ccfsm_state = 0;
6288 }
cce8749e
CH
6289 return;
6290 }
6291
6292 /* If in state 3, it is possible to repeat the trick, if this insn is an
6293 unconditional branch to a label, and immediately following this branch
6294 is the previous target label which is only used once, and the label this
6295 branch jumps to is not too far off. */
6296 if (arm_ccfsm_state == 3)
6297 {
6298 if (simplejump_p (insn))
6299 {
6300 start_insn = next_nonnote_insn (start_insn);
6301 if (GET_CODE (start_insn) == BARRIER)
6302 {
6303 /* XXX Isn't this always a barrier? */
6304 start_insn = next_nonnote_insn (start_insn);
6305 }
6306 if (GET_CODE (start_insn) == CODE_LABEL
6307 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6308 && LABEL_NUSES (start_insn) == 1)
6309 reverse = TRUE;
6310 else
6311 return;
6312 }
ff9940b0
RE
6313 else if (GET_CODE (body) == RETURN)
6314 {
6315 start_insn = next_nonnote_insn (start_insn);
6316 if (GET_CODE (start_insn) == BARRIER)
6317 start_insn = next_nonnote_insn (start_insn);
6318 if (GET_CODE (start_insn) == CODE_LABEL
6319 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6320 && LABEL_NUSES (start_insn) == 1)
6321 {
6322 reverse = TRUE;
6323 seeking_return = 1;
6324 }
6325 else
6326 return;
6327 }
cce8749e
CH
6328 else
6329 return;
6330 }
6331
6332 if (arm_ccfsm_state != 0 && !reverse)
6333 abort ();
6334 if (GET_CODE (insn) != JUMP_INSN)
6335 return;
6336
ddd5a7c1 6337 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
6338 the jump should always come first */
6339 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
6340 body = XVECEXP (body, 0, 0);
6341
6342#if 0
6343 /* If this is a conditional return then we don't want to know */
6344 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6345 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
6346 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
6347 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
6348 return;
6349#endif
6350
cce8749e
CH
6351 if (reverse
6352 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6353 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
6354 {
bd9c7e23
RE
6355 int insns_skipped;
6356 int fail = FALSE, succeed = FALSE;
cce8749e
CH
6357 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
6358 int then_not_else = TRUE;
ff9940b0 6359 rtx this_insn = start_insn, label = 0;
cce8749e 6360
ff9940b0 6361 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
6362 {
6363 /* The code below is wrong for these, and I haven't time to
6364 fix it now. So we just do the safe thing and return. This
6365 whole function needs re-writing anyway. */
6366 jump_clobbers = 1;
6367 return;
6368 }
ff9940b0 6369
cce8749e
CH
6370 /* Register the insn jumped to. */
6371 if (reverse)
ff9940b0
RE
6372 {
6373 if (!seeking_return)
6374 label = XEXP (SET_SRC (body), 0);
6375 }
cce8749e
CH
6376 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
6377 label = XEXP (XEXP (SET_SRC (body), 1), 0);
6378 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
6379 {
6380 label = XEXP (XEXP (SET_SRC (body), 2), 0);
6381 then_not_else = FALSE;
6382 }
ff9940b0
RE
6383 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
6384 seeking_return = 1;
6385 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
6386 {
6387 seeking_return = 1;
6388 then_not_else = FALSE;
6389 }
cce8749e
CH
6390 else
6391 abort ();
6392
6393 /* See how many insns this branch skips, and what kind of insns. If all
6394 insns are okay, and the label or unconditional branch to the same
6395 label is not too far away, succeed. */
6396 for (insns_skipped = 0;
b36ba79f 6397 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
6398 {
6399 rtx scanbody;
6400
6401 this_insn = next_nonnote_insn (this_insn);
6402 if (!this_insn)
6403 break;
6404
cce8749e
CH
6405 switch (GET_CODE (this_insn))
6406 {
6407 case CODE_LABEL:
6408 /* Succeed if it is the target label, otherwise fail since
6409 control falls in from somewhere else. */
6410 if (this_insn == label)
6411 {
ff9940b0
RE
6412 if (jump_clobbers)
6413 {
6414 arm_ccfsm_state = 2;
6415 this_insn = next_nonnote_insn (this_insn);
6416 }
6417 else
6418 arm_ccfsm_state = 1;
cce8749e
CH
6419 succeed = TRUE;
6420 }
6421 else
6422 fail = TRUE;
6423 break;
6424
ff9940b0 6425 case BARRIER:
cce8749e 6426 /* Succeed if the following insn is the target label.
ff9940b0
RE
6427 Otherwise fail.
6428 If return insns are used then the last insn in a function
6429 will be a barrier. */
cce8749e 6430 this_insn = next_nonnote_insn (this_insn);
ff9940b0 6431 if (this_insn && this_insn == label)
cce8749e 6432 {
ff9940b0
RE
6433 if (jump_clobbers)
6434 {
6435 arm_ccfsm_state = 2;
6436 this_insn = next_nonnote_insn (this_insn);
6437 }
6438 else
6439 arm_ccfsm_state = 1;
cce8749e
CH
6440 succeed = TRUE;
6441 }
6442 else
6443 fail = TRUE;
6444 break;
6445
ff9940b0 6446 case CALL_INSN:
2b835d68
RE
6447 /* If using 32-bit addresses the cc is not preserved over
6448 calls */
6449 if (TARGET_APCS_32)
bd9c7e23
RE
6450 {
6451 /* Succeed if the following insn is the target label,
6452 or if the following two insns are a barrier and
6453 the target label. */
6454 this_insn = next_nonnote_insn (this_insn);
6455 if (this_insn && GET_CODE (this_insn) == BARRIER)
6456 this_insn = next_nonnote_insn (this_insn);
6457
6458 if (this_insn && this_insn == label
b36ba79f 6459 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
6460 {
6461 if (jump_clobbers)
6462 {
6463 arm_ccfsm_state = 2;
6464 this_insn = next_nonnote_insn (this_insn);
6465 }
6466 else
6467 arm_ccfsm_state = 1;
6468 succeed = TRUE;
6469 }
6470 else
6471 fail = TRUE;
6472 }
ff9940b0 6473 break;
2b835d68 6474
cce8749e
CH
6475 case JUMP_INSN:
6476 /* If this is an unconditional branch to the same label, succeed.
6477 If it is to another label, do nothing. If it is conditional,
6478 fail. */
ed4c4348 6479 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 6480
ed4c4348 6481 scanbody = PATTERN (this_insn);
ff9940b0
RE
6482 if (GET_CODE (scanbody) == SET
6483 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
6484 {
6485 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
6486 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
6487 {
6488 arm_ccfsm_state = 2;
6489 succeed = TRUE;
6490 }
6491 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
6492 fail = TRUE;
6493 }
b36ba79f
RE
6494 /* Fail if a conditional return is undesirable (eg on a
6495 StrongARM), but still allow this if optimizing for size. */
6496 else if (GET_CODE (scanbody) == RETURN
6497 && ! use_return_insn (TRUE)
6498 && ! optimize_size)
6499 fail = TRUE;
ff9940b0
RE
6500 else if (GET_CODE (scanbody) == RETURN
6501 && seeking_return)
6502 {
6503 arm_ccfsm_state = 2;
6504 succeed = TRUE;
6505 }
6506 else if (GET_CODE (scanbody) == PARALLEL)
6507 {
6508 switch (get_attr_conds (this_insn))
6509 {
6510 case CONDS_NOCOND:
6511 break;
6512 default:
6513 fail = TRUE;
6514 break;
6515 }
6516 }
cce8749e
CH
6517 break;
6518
6519 case INSN:
ff9940b0
RE
6520 /* Instructions using or affecting the condition codes make it
6521 fail. */
ed4c4348 6522 scanbody = PATTERN (this_insn);
74641843
RE
6523 if (! (GET_CODE (scanbody) == SET
6524 || GET_CODE (scanbody) == PARALLEL)
6525 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
6526 fail = TRUE;
6527 break;
6528
6529 default:
6530 break;
6531 }
6532 }
6533 if (succeed)
6534 {
ff9940b0 6535 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 6536 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
6537 else if (seeking_return || arm_ccfsm_state == 2)
6538 {
6539 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
6540 {
6541 this_insn = next_nonnote_insn (this_insn);
6542 if (this_insn && (GET_CODE (this_insn) == BARRIER
6543 || GET_CODE (this_insn) == CODE_LABEL))
6544 abort ();
6545 }
6546 if (!this_insn)
6547 {
6548 /* Oh, dear! we ran off the end.. give up */
6549 recog (PATTERN (insn), insn, NULL_PTR);
6550 arm_ccfsm_state = 0;
abaa26e5 6551 arm_target_insn = NULL;
ff9940b0
RE
6552 return;
6553 }
6554 arm_target_insn = this_insn;
6555 }
cce8749e
CH
6556 else
6557 abort ();
ff9940b0
RE
6558 if (jump_clobbers)
6559 {
6560 if (reverse)
6561 abort ();
6562 arm_current_cc =
6563 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
6564 0), 0), 1));
6565 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
6566 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6567 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
6568 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6569 }
6570 else
6571 {
6572 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
6573 what it was. */
6574 if (!reverse)
6575 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
6576 0));
6577 }
cce8749e 6578
cce8749e
CH
6579 if (reverse || then_not_else)
6580 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6581 }
ff9940b0
RE
6582 /* restore recog_operand (getting the attributes of other insns can
6583 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 6584 across this call; since the insn has been recognized already we
ff9940b0
RE
6585 call recog direct). */
6586 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 6587 }
f3bb6135 6588}
cce8749e 6589
2b835d68
RE
6590#ifdef AOF_ASSEMBLER
6591/* Special functions only needed when producing AOF syntax assembler. */
6592
32de079a
RE
6593rtx aof_pic_label = NULL_RTX;
6594struct pic_chain
6595{
6596 struct pic_chain *next;
6597 char *symname;
6598};
6599
6600static struct pic_chain *aof_pic_chain = NULL;
6601
6602rtx
6603aof_pic_entry (x)
6604 rtx x;
6605{
6606 struct pic_chain **chainp;
6607 int offset;
6608
6609 if (aof_pic_label == NULL_RTX)
6610 {
6611 /* This needs to persist throughout the compilation. */
6612 end_temporary_allocation ();
43cffd11 6613 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
6614 resume_temporary_allocation ();
6615 }
6616
6617 for (offset = 0, chainp = &aof_pic_chain; *chainp;
6618 offset += 4, chainp = &(*chainp)->next)
6619 if ((*chainp)->symname == XSTR (x, 0))
6620 return plus_constant (aof_pic_label, offset);
6621
6622 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
6623 (*chainp)->next = NULL;
6624 (*chainp)->symname = XSTR (x, 0);
6625 return plus_constant (aof_pic_label, offset);
6626}
6627
6628void
6629aof_dump_pic_table (f)
6630 FILE *f;
6631{
6632 struct pic_chain *chain;
6633
6634 if (aof_pic_chain == NULL)
6635 return;
6636
6637 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
6638 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
6639 reg_names[PIC_OFFSET_TABLE_REGNUM]);
6640 fputs ("|x$adcons|\n", f);
6641
6642 for (chain = aof_pic_chain; chain; chain = chain->next)
6643 {
6644 fputs ("\tDCD\t", f);
6645 assemble_name (f, chain->symname);
6646 fputs ("\n", f);
6647 }
6648}
6649
2b835d68
RE
6650int arm_text_section_count = 1;
6651
6652char *
84ed5e79 6653aof_text_section ()
2b835d68
RE
6654{
6655 static char buf[100];
2b835d68
RE
6656 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
6657 arm_text_section_count++);
6658 if (flag_pic)
6659 strcat (buf, ", PIC, REENTRANT");
6660 return buf;
6661}
6662
6663static int arm_data_section_count = 1;
6664
6665char *
6666aof_data_section ()
6667{
6668 static char buf[100];
6669 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
6670 return buf;
6671}
6672
6673/* The AOF assembler is religiously strict about declarations of
6674 imported and exported symbols, so that it is impossible to declare
956d6950 6675 a function as imported near the beginning of the file, and then to
2b835d68
RE
6676 export it later on. It is, however, possible to delay the decision
6677 until all the functions in the file have been compiled. To get
6678 around this, we maintain a list of the imports and exports, and
6679 delete from it any that are subsequently defined. At the end of
6680 compilation we spit the remainder of the list out before the END
6681 directive. */
6682
6683struct import
6684{
6685 struct import *next;
6686 char *name;
6687};
6688
6689static struct import *imports_list = NULL;
6690
6691void
6692aof_add_import (name)
6693 char *name;
6694{
6695 struct import *new;
6696
6697 for (new = imports_list; new; new = new->next)
6698 if (new->name == name)
6699 return;
6700
6701 new = (struct import *) xmalloc (sizeof (struct import));
6702 new->next = imports_list;
6703 imports_list = new;
6704 new->name = name;
6705}
6706
6707void
6708aof_delete_import (name)
6709 char *name;
6710{
6711 struct import **old;
6712
6713 for (old = &imports_list; *old; old = & (*old)->next)
6714 {
6715 if ((*old)->name == name)
6716 {
6717 *old = (*old)->next;
6718 return;
6719 }
6720 }
6721}
6722
6723int arm_main_function = 0;
6724
6725void
6726aof_dump_imports (f)
6727 FILE *f;
6728{
6729 /* The AOF assembler needs this to cause the startup code to be extracted
6730 from the library. Brining in __main causes the whole thing to work
6731 automagically. */
6732 if (arm_main_function)
6733 {
6734 text_section ();
6735 fputs ("\tIMPORT __main\n", f);
6736 fputs ("\tDCD __main\n", f);
6737 }
6738
6739 /* Now dump the remaining imports. */
6740 while (imports_list)
6741 {
6742 fprintf (f, "\tIMPORT\t");
6743 assemble_name (f, imports_list->name);
6744 fputc ('\n', f);
6745 imports_list = imports_list->next;
6746 }
6747}
6748#endif /* AOF_ASSEMBLER */
This page took 1.329496 seconds and 5 git commands to generate.