]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Daily bump.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
63114329 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 1999 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e
CH
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
af48348a 36#include "reload.h"
e2c671ba 37#include "tree.h"
bee06f3d 38#include "expr.h"
ad076f4e 39#include "toplev.h"
aec3cfba 40#include "recog.h"
cce8749e
CH
41
42/* The maximum number of insns skipped which will be conditionalised if
43 possible. */
b36ba79f 44static int max_insns_skipped = 5;
cce8749e 45
f5a1b0d2 46extern FILE * asm_out_file;
cce8749e 47/* Some function declarations. */
cce8749e 48
18af7313 49static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
2e943e99 50static char * output_multi_immediate PROTO ((rtx *, char *, char *, int,
18af7313 51 HOST_WIDE_INT));
2b835d68
RE
52static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
53 HOST_WIDE_INT, rtx, rtx, int, int));
18af7313
RE
54static int arm_naked_function_p PROTO ((tree));
55static void init_fpa_table PROTO ((void));
74bbc178
NC
56static enum machine_mode select_dominance_cc_mode PROTO ((rtx, rtx,
57 HOST_WIDE_INT));
332072db 58static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode, int *));
18af7313
RE
59static void dump_table PROTO ((rtx));
60static int fixit PROTO ((rtx, enum machine_mode, int));
61static rtx find_barrier PROTO ((rtx, int));
62static int broken_move PROTO ((rtx));
2e943e99 63static char * fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
18af7313 64static int eliminate_lr2ip PROTO ((rtx *));
2e943e99 65static char * shift_op PROTO ((rtx, HOST_WIDE_INT *));
18af7313
RE
66static int pattern_really_clobbers_lr PROTO ((rtx));
67static int function_really_clobbers_lr PROTO ((rtx));
68static void emit_multi_reg_push PROTO ((int));
b111229a 69static void emit_sfm PROTO ((int, int));
18af7313 70static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
74bbc178 71static int const_ok_for_op RTX_CODE_PROTO ((Hint, Rcode));
f3bb6135 72
13bd191d
PB
73/* True if we are currently building a constant table. */
74int making_const_table;
75
ff9940b0
RE
76/* Define the information needed to generate branch insns. This is
77 stored from the compare operation. */
ff9940b0 78rtx arm_compare_op0, arm_compare_op1;
ff9940b0 79
b111229a 80/* What type of floating point are we tuning for? */
bee06f3d
RE
81enum floating_point_type arm_fpu;
82
b111229a
RE
83/* What type of floating point instructions are available? */
84enum floating_point_type arm_fpu_arch;
85
2b835d68
RE
86/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
87enum prog_mode_type arm_prgmode;
88
b111229a 89/* Set by the -mfp=... option */
f9cc092a 90const char * target_fp_name = NULL;
2b835d68 91
b355a481 92/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 93const char * structure_size_string = NULL;
b355a481
NC
94int arm_structure_size_boundary = 32; /* Used to be 8 */
95
aec3cfba
NC
96/* Bit values used to identify processor capabilities. */
97#define FL_CO_PROC 0x01 /* Has external co-processor bus */
98#define FL_FAST_MULT 0x02 /* Fast multiply */
99#define FL_MODE26 0x04 /* 26-bit mode support */
100#define FL_MODE32 0x08 /* 32-bit mode support */
101#define FL_ARCH4 0x10 /* Architecture rel 4 */
102#define FL_THUMB 0x20 /* Thumb aware */
103#define FL_LDSCHED 0x40 /* Load scheduling necessary */
104#define FL_STRONG 0x80 /* StrongARM */
105
106/* The bits in this mask specify which instructions we are allowed to generate. */
107static int insn_flags = 0;
108/* The bits in this mask specify which instruction scheduling options should
109 be used. Note - there is an overlap with the FL_FAST_MULT. For some
110 hardware we want to be able to generate the multiply instructions, but to
111 tune as if they were not present in the architecture. */
112static int tune_flags = 0;
113
114/* The following are used in the arm.md file as equivalents to bits
115 in the above two flag variables. */
116
2b835d68
RE
117/* Nonzero if this is an "M" variant of the processor. */
118int arm_fast_multiply = 0;
119
32de079a 120/* Nonzero if this chip supports the ARM Architecture 4 extensions */
2b835d68
RE
121int arm_arch4 = 0;
122
aec3cfba 123/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
124int arm_ld_sched = 0;
125
126/* Nonzero if this chip is a StrongARM. */
127int arm_is_strong = 0;
128
129/* Nonzero if this chip is a an ARM6 or an ARM7. */
130int arm_is_6_or_7 = 0;
b111229a 131
cce8749e
CH
132/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
133 must report the mode of the memory reference from PRINT_OPERAND to
134 PRINT_OPERAND_ADDRESS. */
f3bb6135 135enum machine_mode output_memory_reference_mode;
cce8749e
CH
136
137/* Nonzero if the prologue must setup `fp'. */
138int current_function_anonymous_args;
139
32de079a
RE
140/* The register number to be used for the PIC offset register. */
141int arm_pic_register = 9;
142
cce8749e
CH
143/* Location counter of .text segment. */
144int arm_text_location = 0;
145
ff9940b0
RE
146/* Set to one if we think that lr is only saved because of subroutine calls,
147 but all of these can be `put after' return insns */
148int lr_save_eliminated;
149
ff9940b0
RE
150/* Set to 1 when a return insn is output, this means that the epilogue
151 is not needed. */
ff9940b0
RE
152static int return_used_this_function;
153
aec3cfba
NC
154/* Set to 1 after arm_reorg has started. Reset to start at the start of
155 the next function. */
4b632bf1
RE
156static int after_arm_reorg = 0;
157
aec3cfba 158/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
159static int arm_constant_limit = 3;
160
cce8749e
CH
161/* For an explanation of these variables, see final_prescan_insn below. */
162int arm_ccfsm_state;
84ed5e79 163enum arm_cond_code arm_current_cc;
cce8749e
CH
164rtx arm_target_insn;
165int arm_target_label;
9997d19d
RE
166
167/* The condition codes of the ARM, and the inverse function. */
f5a1b0d2 168char * arm_condition_codes[] =
9997d19d
RE
169{
170 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
171 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
172};
173
84ed5e79 174static enum arm_cond_code get_arm_condition_code ();
2b835d68 175
f5a1b0d2 176#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68
RE
177\f
178/* Initialization code */
179
2b835d68
RE
180struct processors
181{
f5a1b0d2 182 char * name;
2b835d68
RE
183 unsigned int flags;
184};
185
186/* Not all of these give usefully different compilation alternatives,
187 but there is no simple way of generalizing them. */
f5a1b0d2
NC
188static struct processors all_cores[] =
189{
190 /* ARM Cores */
191
192 {"arm2", FL_CO_PROC | FL_MODE26 },
193 {"arm250", FL_CO_PROC | FL_MODE26 },
194 {"arm3", FL_CO_PROC | FL_MODE26 },
195 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
196 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
197 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
198 {"arm610", FL_MODE26 | FL_MODE32 },
199 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
200 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
201 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT }, /* arm7m doesn't exist on its own, */
202 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 }, /* but only with D, (and I), */
203 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT }, /* but those don't alter the code, */
204 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 }, /* so arm7m is sometimes used. */
205 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
206 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
207 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
208 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
209 {"arm710", FL_MODE26 | FL_MODE32 },
210 {"arm710c", FL_MODE26 | FL_MODE32 },
211 {"arm7100", FL_MODE26 | FL_MODE32 },
212 {"arm7500", FL_MODE26 | FL_MODE32 },
213 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 }, /* Doesn't really have an external co-proc, but does have embedded fpu. */
214 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
215 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
216 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
217 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
218 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
219 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
220 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
221 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
222
223 {NULL, 0}
224};
225
226static struct processors all_architectures[] =
2b835d68 227{
f5a1b0d2
NC
228 /* ARM Architectures */
229
230 {"armv2", FL_CO_PROC | FL_MODE26 },
231 {"armv2a", FL_CO_PROC | FL_MODE26 },
232 {"armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
233 {"armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
234 {"armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
235 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
236 implementations that support it, so we will leave it out for now. */
f5a1b0d2
NC
237 {"armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
238 {NULL, 0}
239};
240
241/* This is a magic stucture. The 'string' field is magically filled in
242 with a pointer to the value specified by the user on the command line
243 assuming that the user has specified such a value. */
244
245struct arm_cpu_select arm_select[] =
246{
247 /* string name processors */
248 { NULL, "-mcpu=", all_cores },
249 { NULL, "-march=", all_architectures },
250 { NULL, "-mtune=", all_cores }
2b835d68
RE
251};
252
aec3cfba
NC
253/* Return the number of bits set in value' */
254static unsigned int
255bit_count (value)
256 signed int value;
257{
258 unsigned int count = 0;
259
260 while (value)
261 {
262 value &= ~(value & - value);
263 ++ count;
264 }
265
266 return count;
267}
268
2b835d68
RE
269/* Fix up any incompatible options that the user has specified.
270 This has now turned into a maze. */
271void
272arm_override_options ()
273{
ed4c4348 274 unsigned i;
f5a1b0d2
NC
275
276 /* Set up the flags based on the cpu/architecture selected by the user. */
277 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
bd9c7e23 278 {
f5a1b0d2
NC
279 struct arm_cpu_select * ptr = arm_select + i;
280
281 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 282 {
13bd191d 283 const struct processors * sel;
bd9c7e23 284
f5a1b0d2
NC
285 for (sel = ptr->processors; sel->name != NULL; sel ++)
286 if (streq (ptr->string, sel->name))
bd9c7e23 287 {
aec3cfba
NC
288 if (i == 2)
289 tune_flags = sel->flags;
290 else
b111229a 291 {
aec3cfba
NC
292 /* If we have been given an architecture and a processor
293 make sure that they are compatible. We only generate
294 a warning though, and we prefer the CPU over the
295 architecture. */
296 if (insn_flags != 0 && (insn_flags ^ sel->flags))
ed4e4c74 297 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
298 ptr->string);
299
300 insn_flags = sel->flags;
b111229a 301 }
f5a1b0d2 302
bd9c7e23
RE
303 break;
304 }
305
306 if (sel->name == NULL)
307 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
308 }
309 }
aec3cfba 310
f5a1b0d2 311 /* If the user did not specify a processor, choose one for them. */
aec3cfba 312 if (insn_flags == 0)
f5a1b0d2
NC
313 {
314 struct processors * sel;
aec3cfba
NC
315 unsigned int sought;
316 static struct cpu_default
317 {
318 int cpu;
319 char * name;
320 }
321 cpu_defaults[] =
322 {
323 { TARGET_CPU_arm2, "arm2" },
324 { TARGET_CPU_arm6, "arm6" },
325 { TARGET_CPU_arm610, "arm610" },
2aa0c933 326 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
327 { TARGET_CPU_arm7m, "arm7m" },
328 { TARGET_CPU_arm7500fe, "arm7500fe" },
329 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
330 { TARGET_CPU_arm8, "arm8" },
331 { TARGET_CPU_arm810, "arm810" },
332 { TARGET_CPU_arm9, "arm9" },
333 { TARGET_CPU_strongarm, "strongarm" },
334 { TARGET_CPU_generic, "arm" },
335 { 0, 0 }
336 };
337 struct cpu_default * def;
338
339 /* Find the default. */
340 for (def = cpu_defaults; def->name; def ++)
341 if (def->cpu == TARGET_CPU_DEFAULT)
342 break;
343
344 /* Make sure we found the default CPU. */
345 if (def->name == NULL)
346 abort ();
347
348 /* Find the default CPU's flags. */
349 for (sel = all_cores; sel->name != NULL; sel ++)
350 if (streq (def->name, sel->name))
351 break;
352
353 if (sel->name == NULL)
354 abort ();
355
356 insn_flags = sel->flags;
357
358 /* Now check to see if the user has specified some command line
359 switch that require certain abilities from the cpu. */
360 sought = 0;
f5a1b0d2
NC
361
362 if (TARGET_THUMB_INTERWORK)
363 {
aec3cfba
NC
364 sought |= (FL_THUMB | FL_MODE32);
365
366 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 367 target_flags |= ARM_FLAG_APCS_32;
aec3cfba
NC
368
369 /* There are no ARM processor that supports both APCS-26 and
370 interworking. Therefore we force FL_MODE26 to be removed
371 from insn_flags here (if it was set), so that the search
372 below will always be able to find a compatible processor. */
373 insn_flags &= ~ FL_MODE26;
f5a1b0d2
NC
374 }
375
1323d53a 376 if (! TARGET_APCS_32)
f5a1b0d2
NC
377 sought |= FL_MODE26;
378
aec3cfba 379 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 380 {
aec3cfba
NC
381 /* Try to locate a CPU type that supports all of the abilities
382 of the default CPU, plus the extra abilities requested by
383 the user. */
f5a1b0d2 384 for (sel = all_cores; sel->name != NULL; sel ++)
aec3cfba 385 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
386 break;
387
388 if (sel->name == NULL)
aec3cfba
NC
389 {
390 unsigned int current_bit_count = 0;
391 struct processors * best_fit = NULL;
392
393 /* Ideally we would like to issue an error message here
394 saying that it was not possible to find a CPU compatible
395 with the default CPU, but which also supports the command
396 line options specified by the programmer, and so they
397 ought to use the -mcpu=<name> command line option to
398 override the default CPU type.
399
400 Unfortunately this does not work with multilibing. We
401 need to be able to support multilibs for -mapcs-26 and for
402 -mthumb-interwork and there is no CPU that can support both
403 options. Instead if we cannot find a cpu that has both the
404 characteristics of the default cpu and the given command line
405 options we scan the array again looking for a best match. */
406 for (sel = all_cores; sel->name != NULL; sel ++)
407 if ((sel->flags & sought) == sought)
408 {
409 unsigned int count;
410
411 count = bit_count (sel->flags & insn_flags);
412
413 if (count >= current_bit_count)
414 {
415 best_fit = sel;
416 current_bit_count = count;
417 }
418 }
f5a1b0d2 419
aec3cfba
NC
420 if (best_fit == NULL)
421 abort ();
422 else
423 sel = best_fit;
424 }
425
426 insn_flags = sel->flags;
f5a1b0d2
NC
427 }
428 }
aec3cfba
NC
429
430 /* If tuning has not been specified, tune for whichever processor or
431 architecture has been selected. */
432 if (tune_flags == 0)
433 tune_flags = insn_flags;
434
f5a1b0d2
NC
435 /* Make sure that the processor choice does not conflict with any of the
436 other command line choices. */
aec3cfba 437 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 438 {
aec3cfba
NC
439 /* If APCS-32 was not the default then it must have been set by the
440 user, so issue a warning message. If the user has specified
441 "-mapcs-32 -mcpu=arm2" then we loose here. */
442 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
443 warning ("target CPU does not support APCS-32" );
f5a1b0d2
NC
444 target_flags &= ~ ARM_FLAG_APCS_32;
445 }
aec3cfba 446 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
447 {
448 warning ("target CPU does not support APCS-26" );
449 target_flags |= ARM_FLAG_APCS_32;
450 }
451
aec3cfba 452 if (TARGET_THUMB_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
453 {
454 warning ("target CPU does not support interworking" );
455 target_flags &= ~ARM_FLAG_THUMB;
456 }
457
458 /* If interworking is enabled then APCS-32 must be selected as well. */
459 if (TARGET_THUMB_INTERWORK)
460 {
461 if (! TARGET_APCS_32)
462 warning ("interworking forces APCS-32 to be used" );
463 target_flags |= ARM_FLAG_APCS_32;
464 }
465
466 if (TARGET_APCS_STACK && ! TARGET_APCS)
467 {
468 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
469 target_flags |= ARM_FLAG_APCS_FRAME;
470 }
aec3cfba 471
2b835d68
RE
472 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
473 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
aec3cfba 474
2b835d68
RE
475 if (TARGET_POKE_FUNCTION_NAME)
476 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 477
2b835d68
RE
478 if (TARGET_APCS_REENT && flag_pic)
479 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 480
2b835d68 481 if (TARGET_APCS_REENT)
f5a1b0d2 482 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 483
32de079a
RE
484 /* If stack checking is disabled, we can use r10 as the PIC register,
485 which keeps r9 available. */
486 if (flag_pic && ! TARGET_APCS_STACK)
487 arm_pic_register = 10;
aec3cfba 488
32de079a
RE
489 /* Well, I'm about to have a go, but pic is NOT going to be compatible
490 with APCS reentrancy, since that requires too much support in the
491 assembler and linker, and the ARMASM assembler seems to lack some
492 required directives. */
2b835d68 493 if (flag_pic)
b4b68717 494 warning ("Position independent code not supported");
aec3cfba 495
2b835d68
RE
496 if (TARGET_APCS_FLOAT)
497 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 498
aec3cfba
NC
499 /* Initialise boolean versions of the flags, for use in the arm.md file. */
500 arm_fast_multiply = insn_flags & FL_FAST_MULT;
501 arm_arch4 = insn_flags & FL_ARCH4;
502
503 arm_ld_sched = tune_flags & FL_LDSCHED;
504 arm_is_strong = tune_flags & FL_STRONG;
505 arm_is_6_or_7 = ((tune_flags & (FL_MODE26 | FL_MODE32))
506 && !(tune_flags & FL_ARCH4));
f5a1b0d2 507
bd9c7e23
RE
508 /* Default value for floating point code... if no co-processor
509 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
510 assume the user has an FPA.
511 Note: this does not prevent use of floating point instructions,
512 -msoft-float does that. */
aec3cfba 513 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 514
b111229a 515 if (target_fp_name)
2b835d68 516 {
f5a1b0d2 517 if (streq (target_fp_name, "2"))
b111229a 518 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
519 else if (streq (target_fp_name, "3"))
520 arm_fpu_arch = FP_SOFT3;
2b835d68 521 else
f5a1b0d2 522 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 523 target_fp_name);
2b835d68 524 }
b111229a
RE
525 else
526 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
527
528 if (TARGET_FPE && arm_fpu != FP_HARD)
529 arm_fpu = FP_SOFT2;
aec3cfba 530
f5a1b0d2
NC
531 /* For arm2/3 there is no need to do any scheduling if there is only
532 a floating point emulator, or we are doing software floating-point. */
aec3cfba 533 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 534 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 535
2b835d68 536 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
537
538 if (structure_size_string != NULL)
539 {
540 int size = strtol (structure_size_string, NULL, 0);
541
542 if (size == 8 || size == 32)
543 arm_structure_size_boundary = size;
544 else
545 warning ("Structure size boundary can only be set to 8 or 32");
546 }
f5a1b0d2
NC
547
548 /* If optimizing for space, don't synthesize constants.
549 For processors with load scheduling, it never costs more than 2 cycles
550 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 551 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 552 arm_constant_limit = 1;
aec3cfba 553
f5a1b0d2
NC
554 /* If optimizing for size, bump the number of instructions that we
555 are prepared to conditionally execute (even on a StrongARM).
556 Otherwise for the StrongARM, which has early execution of branches,
557 a sequence that is worth skipping is shorter. */
558 if (optimize_size)
559 max_insns_skipped = 6;
560 else if (arm_is_strong)
561 max_insns_skipped = 3;
2b835d68 562}
cce8749e 563\f
ff9940b0
RE
564/* Return 1 if it is possible to return using a single instruction */
565
566int
b36ba79f
RE
567use_return_insn (iscond)
568 int iscond;
ff9940b0
RE
569{
570 int regno;
571
f5a1b0d2
NC
572 if (!reload_completed
573 || current_function_pretend_args_size
ff9940b0 574 || current_function_anonymous_args
56636818 575 || ((get_frame_size () + current_function_outgoing_args_size != 0)
f5a1b0d2 576 && !(TARGET_APCS && frame_pointer_needed)))
ff9940b0
RE
577 return 0;
578
b111229a 579 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
580 stacked. Similarly, on StrongARM, conditional returns are expensive
581 if they aren't taken and registers have been stacked. */
f5a1b0d2 582 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 583 return 0;
f5a1b0d2
NC
584 if ((iscond && arm_is_strong)
585 || TARGET_THUMB_INTERWORK)
b111229a
RE
586 for (regno = 0; regno < 16; regno++)
587 if (regs_ever_live[regno] && ! call_used_regs[regno])
588 return 0;
589
ff9940b0
RE
590 /* Can't be done if any of the FPU regs are pushed, since this also
591 requires an insn */
b111229a
RE
592 for (regno = 16; regno < 24; regno++)
593 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
594 return 0;
595
31fdb4d5
DE
596 /* If a function is naked, don't use the "return" insn. */
597 if (arm_naked_function_p (current_function_decl))
598 return 0;
599
ff9940b0
RE
600 return 1;
601}
602
cce8749e
CH
603/* Return TRUE if int I is a valid immediate ARM constant. */
604
605int
606const_ok_for_arm (i)
ff9940b0 607 HOST_WIDE_INT i;
cce8749e 608{
ed4c4348 609 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 610
56636818
JL
611 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
612 be all zero, or all one. */
613 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
614 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
ed4c4348
RE
615 != ((~(unsigned HOST_WIDE_INT) 0)
616 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
617 return FALSE;
618
e2c671ba
RE
619 /* Fast return for 0 and powers of 2 */
620 if ((i & (i - 1)) == 0)
621 return TRUE;
622
cce8749e
CH
623 do
624 {
abaa26e5 625 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 626 return TRUE;
abaa26e5
RE
627 mask =
628 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
629 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
ed4c4348 630 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 631
f3bb6135
RE
632 return FALSE;
633}
cce8749e 634
e2c671ba 635/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
636static int
637const_ok_for_op (i, code)
e2c671ba
RE
638 HOST_WIDE_INT i;
639 enum rtx_code code;
e2c671ba
RE
640{
641 if (const_ok_for_arm (i))
642 return 1;
643
644 switch (code)
645 {
646 case PLUS:
647 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
648
649 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
650 case XOR:
651 case IOR:
652 return 0;
653
654 case AND:
655 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
656
657 default:
658 abort ();
659 }
660}
661
662/* Emit a sequence of insns to handle a large constant.
663 CODE is the code of the operation required, it can be any of SET, PLUS,
664 IOR, AND, XOR, MINUS;
665 MODE is the mode in which the operation is being performed;
666 VAL is the integer to operate on;
667 SOURCE is the other operand (a register, or a null-pointer for SET);
668 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
669 either produce a simpler sequence, or we will want to cse the values.
670 Return value is the number of insns emitted. */
e2c671ba
RE
671
672int
673arm_split_constant (code, mode, val, target, source, subtargets)
674 enum rtx_code code;
675 enum machine_mode mode;
676 HOST_WIDE_INT val;
677 rtx target;
678 rtx source;
679 int subtargets;
2b835d68
RE
680{
681 if (subtargets || code == SET
682 || (GET_CODE (target) == REG && GET_CODE (source) == REG
683 && REGNO (target) != REGNO (source)))
684 {
4b632bf1
RE
685 /* After arm_reorg has been called, we can't fix up expensive
686 constants by pushing them into memory so we must synthesise
687 them in-line, regardless of the cost. This is only likely to
688 be more costly on chips that have load delay slots and we are
689 compiling without running the scheduler (so no splitting
aec3cfba
NC
690 occurred before the final instruction emission).
691
692 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 693 */
4b632bf1
RE
694 if (! after_arm_reorg
695 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
696 > arm_constant_limit + (code != SET)))
2b835d68
RE
697 {
698 if (code == SET)
699 {
700 /* Currently SET is the only monadic value for CODE, all
701 the rest are diadic. */
43cffd11 702 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
703 return 1;
704 }
705 else
706 {
707 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
708
43cffd11 709 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
710 /* For MINUS, the value is subtracted from, since we never
711 have subtraction of a constant. */
712 if (code == MINUS)
43cffd11
RE
713 emit_insn (gen_rtx_SET (VOIDmode, target,
714 gen_rtx (code, mode, temp, source)));
2b835d68 715 else
43cffd11
RE
716 emit_insn (gen_rtx_SET (VOIDmode, target,
717 gen_rtx (code, mode, source, temp)));
2b835d68
RE
718 return 2;
719 }
720 }
721 }
722
723 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
724}
725
726/* As above, but extra parameter GENERATE which, if clear, suppresses
727 RTL generation. */
728int
729arm_gen_constant (code, mode, val, target, source, subtargets, generate)
730 enum rtx_code code;
731 enum machine_mode mode;
732 HOST_WIDE_INT val;
733 rtx target;
734 rtx source;
735 int subtargets;
736 int generate;
e2c671ba 737{
e2c671ba
RE
738 int can_invert = 0;
739 int can_negate = 0;
740 int can_negate_initial = 0;
741 int can_shift = 0;
742 int i;
743 int num_bits_set = 0;
744 int set_sign_bit_copies = 0;
745 int clear_sign_bit_copies = 0;
746 int clear_zero_bit_copies = 0;
747 int set_zero_bit_copies = 0;
748 int insns = 0;
e2c671ba
RE
749 unsigned HOST_WIDE_INT temp1, temp2;
750 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
751
752 /* find out which operations are safe for a given CODE. Also do a quick
753 check for degenerate cases; these can occur when DImode operations
754 are split. */
755 switch (code)
756 {
757 case SET:
758 can_invert = 1;
759 can_shift = 1;
760 can_negate = 1;
761 break;
762
763 case PLUS:
764 can_negate = 1;
765 can_negate_initial = 1;
766 break;
767
768 case IOR:
769 if (remainder == 0xffffffff)
770 {
2b835d68 771 if (generate)
43cffd11
RE
772 emit_insn (gen_rtx_SET (VOIDmode, target,
773 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
774 return 1;
775 }
776 if (remainder == 0)
777 {
778 if (reload_completed && rtx_equal_p (target, source))
779 return 0;
2b835d68 780 if (generate)
43cffd11 781 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
782 return 1;
783 }
784 break;
785
786 case AND:
787 if (remainder == 0)
788 {
2b835d68 789 if (generate)
43cffd11 790 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
791 return 1;
792 }
793 if (remainder == 0xffffffff)
794 {
795 if (reload_completed && rtx_equal_p (target, source))
796 return 0;
2b835d68 797 if (generate)
43cffd11 798 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
799 return 1;
800 }
801 can_invert = 1;
802 break;
803
804 case XOR:
805 if (remainder == 0)
806 {
807 if (reload_completed && rtx_equal_p (target, source))
808 return 0;
2b835d68 809 if (generate)
43cffd11 810 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
811 return 1;
812 }
813 if (remainder == 0xffffffff)
814 {
2b835d68 815 if (generate)
43cffd11
RE
816 emit_insn (gen_rtx_SET (VOIDmode, target,
817 gen_rtx_NOT (mode, source)));
e2c671ba
RE
818 return 1;
819 }
820
821 /* We don't know how to handle this yet below. */
822 abort ();
823
824 case MINUS:
825 /* We treat MINUS as (val - source), since (source - val) is always
826 passed as (source + (-val)). */
827 if (remainder == 0)
828 {
2b835d68 829 if (generate)
43cffd11
RE
830 emit_insn (gen_rtx_SET (VOIDmode, target,
831 gen_rtx_NEG (mode, source)));
e2c671ba
RE
832 return 1;
833 }
834 if (const_ok_for_arm (val))
835 {
2b835d68 836 if (generate)
43cffd11
RE
837 emit_insn (gen_rtx_SET (VOIDmode, target,
838 gen_rtx_MINUS (mode, GEN_INT (val),
839 source)));
e2c671ba
RE
840 return 1;
841 }
842 can_negate = 1;
843
844 break;
845
846 default:
847 abort ();
848 }
849
850 /* If we can do it in one insn get out quickly */
851 if (const_ok_for_arm (val)
852 || (can_negate_initial && const_ok_for_arm (-val))
853 || (can_invert && const_ok_for_arm (~val)))
854 {
2b835d68 855 if (generate)
43cffd11
RE
856 emit_insn (gen_rtx_SET (VOIDmode, target,
857 (source ? gen_rtx (code, mode, source,
858 GEN_INT (val))
859 : GEN_INT (val))));
e2c671ba
RE
860 return 1;
861 }
862
863
864 /* Calculate a few attributes that may be useful for specific
865 optimizations. */
866
867 for (i = 31; i >= 0; i--)
868 {
869 if ((remainder & (1 << i)) == 0)
870 clear_sign_bit_copies++;
871 else
872 break;
873 }
874
875 for (i = 31; i >= 0; i--)
876 {
877 if ((remainder & (1 << i)) != 0)
878 set_sign_bit_copies++;
879 else
880 break;
881 }
882
883 for (i = 0; i <= 31; i++)
884 {
885 if ((remainder & (1 << i)) == 0)
886 clear_zero_bit_copies++;
887 else
888 break;
889 }
890
891 for (i = 0; i <= 31; i++)
892 {
893 if ((remainder & (1 << i)) != 0)
894 set_zero_bit_copies++;
895 else
896 break;
897 }
898
899 switch (code)
900 {
901 case SET:
902 /* See if we can do this by sign_extending a constant that is known
903 to be negative. This is a good, way of doing it, since the shift
904 may well merge into a subsequent insn. */
905 if (set_sign_bit_copies > 1)
906 {
907 if (const_ok_for_arm
908 (temp1 = ARM_SIGN_EXTEND (remainder
909 << (set_sign_bit_copies - 1))))
910 {
2b835d68
RE
911 if (generate)
912 {
d499463f 913 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
914 emit_insn (gen_rtx_SET (VOIDmode, new_src,
915 GEN_INT (temp1)));
2b835d68
RE
916 emit_insn (gen_ashrsi3 (target, new_src,
917 GEN_INT (set_sign_bit_copies - 1)));
918 }
e2c671ba
RE
919 return 2;
920 }
921 /* For an inverted constant, we will need to set the low bits,
922 these will be shifted out of harm's way. */
923 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
924 if (const_ok_for_arm (~temp1))
925 {
2b835d68
RE
926 if (generate)
927 {
d499463f 928 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
929 emit_insn (gen_rtx_SET (VOIDmode, new_src,
930 GEN_INT (temp1)));
2b835d68
RE
931 emit_insn (gen_ashrsi3 (target, new_src,
932 GEN_INT (set_sign_bit_copies - 1)));
933 }
e2c671ba
RE
934 return 2;
935 }
936 }
937
938 /* See if we can generate this by setting the bottom (or the top)
939 16 bits, and then shifting these into the other half of the
940 word. We only look for the simplest cases, to do more would cost
941 too much. Be careful, however, not to generate this when the
942 alternative would take fewer insns. */
943 if (val & 0xffff0000)
944 {
945 temp1 = remainder & 0xffff0000;
946 temp2 = remainder & 0x0000ffff;
947
948 /* Overlaps outside this range are best done using other methods. */
949 for (i = 9; i < 24; i++)
950 {
951 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
952 && ! const_ok_for_arm (temp2))
953 {
d499463f
RE
954 rtx new_src = (subtargets
955 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
956 : target);
957 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 958 source, subtargets, generate);
e2c671ba 959 source = new_src;
2b835d68 960 if (generate)
43cffd11
RE
961 emit_insn (gen_rtx_SET
962 (VOIDmode, target,
963 gen_rtx_IOR (mode,
964 gen_rtx_ASHIFT (mode, source,
965 GEN_INT (i)),
966 source)));
e2c671ba
RE
967 return insns + 1;
968 }
969 }
970
971 /* Don't duplicate cases already considered. */
972 for (i = 17; i < 24; i++)
973 {
974 if (((temp1 | (temp1 >> i)) == remainder)
975 && ! const_ok_for_arm (temp1))
976 {
d499463f
RE
977 rtx new_src = (subtargets
978 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
979 : target);
980 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 981 source, subtargets, generate);
e2c671ba 982 source = new_src;
2b835d68 983 if (generate)
43cffd11
RE
984 emit_insn
985 (gen_rtx_SET (VOIDmode, target,
986 gen_rtx_IOR
987 (mode,
988 gen_rtx_LSHIFTRT (mode, source,
989 GEN_INT (i)),
990 source)));
e2c671ba
RE
991 return insns + 1;
992 }
993 }
994 }
995 break;
996
997 case IOR:
998 case XOR:
7b64da89
RE
999 /* If we have IOR or XOR, and the constant can be loaded in a
1000 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1001 then this can be done in two instructions instead of 3-4. */
1002 if (subtargets
d499463f 1003 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
1004 || (reload_completed && ! reg_mentioned_p (target, source)))
1005 {
1006 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1007 {
2b835d68
RE
1008 if (generate)
1009 {
1010 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1011
43cffd11
RE
1012 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1013 emit_insn (gen_rtx_SET (VOIDmode, target,
1014 gen_rtx (code, mode, source, sub)));
2b835d68 1015 }
e2c671ba
RE
1016 return 2;
1017 }
1018 }
1019
1020 if (code == XOR)
1021 break;
1022
1023 if (set_sign_bit_copies > 8
1024 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1025 {
2b835d68
RE
1026 if (generate)
1027 {
1028 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1029 rtx shift = GEN_INT (set_sign_bit_copies);
1030
43cffd11
RE
1031 emit_insn (gen_rtx_SET (VOIDmode, sub,
1032 gen_rtx_NOT (mode,
1033 gen_rtx_ASHIFT (mode,
1034 source,
f5a1b0d2 1035 shift))));
43cffd11
RE
1036 emit_insn (gen_rtx_SET (VOIDmode, target,
1037 gen_rtx_NOT (mode,
1038 gen_rtx_LSHIFTRT (mode, sub,
1039 shift))));
2b835d68 1040 }
e2c671ba
RE
1041 return 2;
1042 }
1043
1044 if (set_zero_bit_copies > 8
1045 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1046 {
2b835d68
RE
1047 if (generate)
1048 {
1049 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1050 rtx shift = GEN_INT (set_zero_bit_copies);
1051
43cffd11
RE
1052 emit_insn (gen_rtx_SET (VOIDmode, sub,
1053 gen_rtx_NOT (mode,
1054 gen_rtx_LSHIFTRT (mode,
1055 source,
f5a1b0d2 1056 shift))));
43cffd11
RE
1057 emit_insn (gen_rtx_SET (VOIDmode, target,
1058 gen_rtx_NOT (mode,
1059 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1060 shift))));
2b835d68 1061 }
e2c671ba
RE
1062 return 2;
1063 }
1064
1065 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1066 {
2b835d68
RE
1067 if (generate)
1068 {
1069 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1070 emit_insn (gen_rtx_SET (VOIDmode, sub,
1071 gen_rtx_NOT (mode, source)));
2b835d68
RE
1072 source = sub;
1073 if (subtargets)
1074 sub = gen_reg_rtx (mode);
43cffd11
RE
1075 emit_insn (gen_rtx_SET (VOIDmode, sub,
1076 gen_rtx_AND (mode, source,
1077 GEN_INT (temp1))));
1078 emit_insn (gen_rtx_SET (VOIDmode, target,
1079 gen_rtx_NOT (mode, sub)));
2b835d68 1080 }
e2c671ba
RE
1081 return 3;
1082 }
1083 break;
1084
1085 case AND:
1086 /* See if two shifts will do 2 or more insn's worth of work. */
1087 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1088 {
1089 HOST_WIDE_INT shift_mask = ((0xffffffff
1090 << (32 - clear_sign_bit_copies))
1091 & 0xffffffff);
e2c671ba
RE
1092
1093 if ((remainder | shift_mask) != 0xffffffff)
1094 {
2b835d68
RE
1095 if (generate)
1096 {
d499463f 1097 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1098 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1099 new_src, source, subtargets, 1);
1100 source = new_src;
2b835d68
RE
1101 }
1102 else
d499463f
RE
1103 {
1104 rtx targ = subtargets ? NULL_RTX : target;
1105 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1106 targ, source, subtargets, 0);
1107 }
2b835d68
RE
1108 }
1109
1110 if (generate)
1111 {
d499463f
RE
1112 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1113 rtx shift = GEN_INT (clear_sign_bit_copies);
1114
1115 emit_insn (gen_ashlsi3 (new_src, source, shift));
1116 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1117 }
1118
e2c671ba
RE
1119 return insns + 2;
1120 }
1121
1122 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1123 {
1124 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba
RE
1125
1126 if ((remainder | shift_mask) != 0xffffffff)
1127 {
2b835d68
RE
1128 if (generate)
1129 {
d499463f
RE
1130 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1131
2b835d68 1132 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1133 new_src, source, subtargets, 1);
1134 source = new_src;
2b835d68
RE
1135 }
1136 else
d499463f
RE
1137 {
1138 rtx targ = subtargets ? NULL_RTX : target;
1139
1140 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1141 targ, source, subtargets, 0);
1142 }
2b835d68
RE
1143 }
1144
1145 if (generate)
1146 {
d499463f
RE
1147 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1148 rtx shift = GEN_INT (clear_zero_bit_copies);
1149
1150 emit_insn (gen_lshrsi3 (new_src, source, shift));
1151 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1152 }
1153
e2c671ba
RE
1154 return insns + 2;
1155 }
1156
1157 break;
1158
1159 default:
1160 break;
1161 }
1162
1163 for (i = 0; i < 32; i++)
1164 if (remainder & (1 << i))
1165 num_bits_set++;
1166
1167 if (code == AND || (can_invert && num_bits_set > 16))
1168 remainder = (~remainder) & 0xffffffff;
1169 else if (code == PLUS && num_bits_set > 16)
1170 remainder = (-remainder) & 0xffffffff;
1171 else
1172 {
1173 can_invert = 0;
1174 can_negate = 0;
1175 }
1176
1177 /* Now try and find a way of doing the job in either two or three
1178 instructions.
1179 We start by looking for the largest block of zeros that are aligned on
1180 a 2-bit boundary, we then fill up the temps, wrapping around to the
1181 top of the word when we drop off the bottom.
1182 In the worst case this code should produce no more than four insns. */
1183 {
1184 int best_start = 0;
1185 int best_consecutive_zeros = 0;
1186
1187 for (i = 0; i < 32; i += 2)
1188 {
1189 int consecutive_zeros = 0;
1190
1191 if (! (remainder & (3 << i)))
1192 {
1193 while ((i < 32) && ! (remainder & (3 << i)))
1194 {
1195 consecutive_zeros += 2;
1196 i += 2;
1197 }
1198 if (consecutive_zeros > best_consecutive_zeros)
1199 {
1200 best_consecutive_zeros = consecutive_zeros;
1201 best_start = i - consecutive_zeros;
1202 }
1203 i -= 2;
1204 }
1205 }
1206
1207 /* Now start emitting the insns, starting with the one with the highest
1208 bit set: we do this so that the smallest number will be emitted last;
1209 this is more likely to be combinable with addressing insns. */
1210 i = best_start;
1211 do
1212 {
1213 int end;
1214
1215 if (i <= 0)
1216 i += 32;
1217 if (remainder & (3 << (i - 2)))
1218 {
1219 end = i - 8;
1220 if (end < 0)
1221 end += 32;
1222 temp1 = remainder & ((0x0ff << end)
1223 | ((i < end) ? (0xff >> (32 - end)) : 0));
1224 remainder &= ~temp1;
1225
d499463f 1226 if (generate)
e2c671ba 1227 {
d499463f
RE
1228 rtx new_src;
1229
1230 if (code == SET)
43cffd11
RE
1231 emit_insn (gen_rtx_SET (VOIDmode,
1232 new_src = (subtargets
1233 ? gen_reg_rtx (mode)
1234 : target),
1235 GEN_INT (can_invert
1236 ? ~temp1 : temp1)));
d499463f 1237 else if (code == MINUS)
43cffd11
RE
1238 emit_insn (gen_rtx_SET (VOIDmode,
1239 new_src = (subtargets
1240 ? gen_reg_rtx (mode)
1241 : target),
1242 gen_rtx (code, mode, GEN_INT (temp1),
1243 source)));
d499463f 1244 else
43cffd11
RE
1245 emit_insn (gen_rtx_SET (VOIDmode,
1246 new_src = (remainder
1247 ? (subtargets
1248 ? gen_reg_rtx (mode)
1249 : target)
1250 : target),
1251 gen_rtx (code, mode, source,
1252 GEN_INT (can_invert ? ~temp1
1253 : (can_negate
1254 ? -temp1
1255 : temp1)))));
d499463f 1256 source = new_src;
e2c671ba
RE
1257 }
1258
d499463f
RE
1259 if (code == SET)
1260 {
1261 can_invert = 0;
1262 code = PLUS;
1263 }
1264 else if (code == MINUS)
1265 code = PLUS;
1266
e2c671ba 1267 insns++;
e2c671ba
RE
1268 i -= 6;
1269 }
1270 i -= 2;
1271 } while (remainder);
1272 }
1273 return insns;
1274}
1275
bd9c7e23
RE
1276/* Canonicalize a comparison so that we are more likely to recognize it.
1277 This can be done for a few constant compares, where we can make the
1278 immediate value easier to load. */
1279enum rtx_code
1280arm_canonicalize_comparison (code, op1)
1281 enum rtx_code code;
1282 rtx *op1;
1283{
ad076f4e 1284 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1285
1286 switch (code)
1287 {
1288 case EQ:
1289 case NE:
1290 return code;
1291
1292 case GT:
1293 case LE:
ad076f4e
RE
1294 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1295 - 1)
bd9c7e23
RE
1296 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1297 {
1298 *op1 = GEN_INT (i+1);
1299 return code == GT ? GE : LT;
1300 }
1301 break;
1302
1303 case GE:
1304 case LT:
ad076f4e 1305 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1306 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1307 {
1308 *op1 = GEN_INT (i-1);
1309 return code == GE ? GT : LE;
1310 }
1311 break;
1312
1313 case GTU:
1314 case LEU:
ad076f4e 1315 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1316 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1317 {
1318 *op1 = GEN_INT (i + 1);
1319 return code == GTU ? GEU : LTU;
1320 }
1321 break;
1322
1323 case GEU:
1324 case LTU:
1325 if (i != 0
1326 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1327 {
1328 *op1 = GEN_INT (i - 1);
1329 return code == GEU ? GTU : LEU;
1330 }
1331 break;
1332
1333 default:
1334 abort ();
1335 }
1336
1337 return code;
1338}
bd9c7e23 1339
f5a1b0d2
NC
1340/* Decide whether a type should be returned in memory (true)
1341 or in a register (false). This is called by the macro
1342 RETURN_IN_MEMORY. */
2b835d68
RE
1343int
1344arm_return_in_memory (type)
1345 tree type;
1346{
f5a1b0d2
NC
1347 if (! AGGREGATE_TYPE_P (type))
1348 {
1349 /* All simple types are returned in registers. */
1350 return 0;
1351 }
1352 else if (int_size_in_bytes (type) > 4)
1353 {
1354 /* All structures/unions bigger than one word are returned in memory. */
1355 return 1;
1356 }
1357 else if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1358 {
1359 tree field;
1360
f5a1b0d2
NC
1361 /* For a struct the APCS says that we must return in a register if
1362 every addressable element has an offset of zero. For practical
1363 purposes this means that the structure can have at most one non
1364 bit-field element and that this element must be the first one in
1365 the structure. */
1366
1367 /* Find the first field, ignoring non FIELD_DECL things which will
1368 have been created by C++. */
1369 for (field = TYPE_FIELDS (type);
1370 field && TREE_CODE (field) != FIELD_DECL;
1371 field = TREE_CHAIN (field))
1372 continue;
1373
1374 if (field == NULL)
1375 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1376
1377 /* Now check the remaining fields, if any. */
1378 for (field = TREE_CHAIN (field);
1379 field;
1380 field = TREE_CHAIN (field))
1381 {
1382 if (TREE_CODE (field) != FIELD_DECL)
1383 continue;
1384
1385 if (! DECL_BIT_FIELD_TYPE (field))
1386 return 1;
1387 }
2b835d68
RE
1388
1389 return 0;
1390 }
1391 else if (TREE_CODE (type) == UNION_TYPE)
1392 {
1393 tree field;
1394
1395 /* Unions can be returned in registers if every element is
1396 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1397 for (field = TYPE_FIELDS (type);
1398 field;
1399 field = TREE_CHAIN (field))
2b835d68 1400 {
f5a1b0d2
NC
1401 if (TREE_CODE (field) != FIELD_DECL)
1402 continue;
1403
6cc8c0b3
NC
1404 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1405 return 1;
1406
f5a1b0d2 1407 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1408 return 1;
1409 }
f5a1b0d2 1410
2b835d68
RE
1411 return 0;
1412 }
f5a1b0d2 1413
2b835d68
RE
1414 /* XXX Not sure what should be done for other aggregates, so put them in
1415 memory. */
1416 return 1;
1417}
1418
32de079a
RE
1419int
1420legitimate_pic_operand_p (x)
1421 rtx x;
1422{
1423 if (CONSTANT_P (x) && flag_pic
1424 && (GET_CODE (x) == SYMBOL_REF
1425 || (GET_CODE (x) == CONST
1426 && GET_CODE (XEXP (x, 0)) == PLUS
1427 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1428 return 0;
1429
1430 return 1;
1431}
1432
1433rtx
1434legitimize_pic_address (orig, mode, reg)
1435 rtx orig;
1436 enum machine_mode mode;
1437 rtx reg;
1438{
1439 if (GET_CODE (orig) == SYMBOL_REF)
1440 {
1441 rtx pic_ref, address;
1442 rtx insn;
1443 int subregs = 0;
1444
1445 if (reg == 0)
1446 {
1447 if (reload_in_progress || reload_completed)
1448 abort ();
1449 else
1450 reg = gen_reg_rtx (Pmode);
1451
1452 subregs = 1;
1453 }
1454
1455#ifdef AOF_ASSEMBLER
1456 /* The AOF assembler can generate relocations for these directly, and
1457 understands that the PIC register has to be added into the offset.
1458 */
1459 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1460#else
1461 if (subregs)
1462 address = gen_reg_rtx (Pmode);
1463 else
1464 address = reg;
1465
1466 emit_insn (gen_pic_load_addr (address, orig));
1467
43cffd11
RE
1468 pic_ref = gen_rtx_MEM (Pmode,
1469 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1470 address));
32de079a
RE
1471 RTX_UNCHANGING_P (pic_ref) = 1;
1472 insn = emit_move_insn (reg, pic_ref);
1473#endif
1474 current_function_uses_pic_offset_table = 1;
1475 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1476 by loop. */
43cffd11
RE
1477 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1478 REG_NOTES (insn));
32de079a
RE
1479 return reg;
1480 }
1481 else if (GET_CODE (orig) == CONST)
1482 {
1483 rtx base, offset;
1484
1485 if (GET_CODE (XEXP (orig, 0)) == PLUS
1486 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1487 return orig;
1488
1489 if (reg == 0)
1490 {
1491 if (reload_in_progress || reload_completed)
1492 abort ();
1493 else
1494 reg = gen_reg_rtx (Pmode);
1495 }
1496
1497 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1498 {
1499 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1500 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1501 base == reg ? 0 : reg);
1502 }
1503 else
1504 abort ();
1505
1506 if (GET_CODE (offset) == CONST_INT)
1507 {
1508 /* The base register doesn't really matter, we only want to
1509 test the index for the appropriate mode. */
1510 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1511
1512 if (! reload_in_progress && ! reload_completed)
1513 offset = force_reg (Pmode, offset);
1514 else
1515 abort ();
1516
1517 win:
1518 if (GET_CODE (offset) == CONST_INT)
1519 return plus_constant_for_output (base, INTVAL (offset));
1520 }
1521
1522 if (GET_MODE_SIZE (mode) > 4
1523 && (GET_MODE_CLASS (mode) == MODE_INT
1524 || TARGET_SOFT_FLOAT))
1525 {
1526 emit_insn (gen_addsi3 (reg, base, offset));
1527 return reg;
1528 }
1529
43cffd11 1530 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1531 }
1532 else if (GET_CODE (orig) == LABEL_REF)
1533 current_function_uses_pic_offset_table = 1;
1534
1535 return orig;
1536}
1537
1538static rtx pic_rtx;
1539
1540int
1541is_pic(x)
1542 rtx x;
1543{
1544 if (x == pic_rtx)
1545 return 1;
1546 return 0;
1547}
1548
1549void
1550arm_finalize_pic ()
1551{
1552#ifndef AOF_ASSEMBLER
1553 rtx l1, pic_tmp, pic_tmp2, seq;
1554 rtx global_offset_table;
1555
1556 if (current_function_uses_pic_offset_table == 0)
1557 return;
1558
1559 if (! flag_pic)
1560 abort ();
1561
1562 start_sequence ();
1563 l1 = gen_label_rtx ();
1564
43cffd11 1565 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768
RE
1566 /* On the ARM the PC register contains 'dot + 8' at the time of the
1567 addition. */
1568 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), 8);
43cffd11
RE
1569 pic_tmp2 = gen_rtx_CONST (VOIDmode,
1570 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
1571
1572 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 1573
32de079a 1574 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
dfa08768 1575 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
32de079a
RE
1576
1577 seq = gen_sequence ();
1578 end_sequence ();
1579 emit_insn_after (seq, get_insns ());
1580
1581 /* Need to emit this whether or not we obey regdecls,
1582 since setjmp/longjmp can cause life info to screw up. */
43cffd11 1583 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
1584#endif /* AOF_ASSEMBLER */
1585}
1586
e2c671ba
RE
1587#define REG_OR_SUBREG_REG(X) \
1588 (GET_CODE (X) == REG \
1589 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1590
1591#define REG_OR_SUBREG_RTX(X) \
1592 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1593
1594#define ARM_FRAME_RTX(X) \
1595 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1596 || (X) == arg_pointer_rtx)
1597
1598int
74bbc178 1599arm_rtx_costs (x, code)
e2c671ba 1600 rtx x;
74bbc178 1601 enum rtx_code code;
e2c671ba
RE
1602{
1603 enum machine_mode mode = GET_MODE (x);
1604 enum rtx_code subcode;
1605 int extra_cost;
1606
1607 switch (code)
1608 {
1609 case MEM:
1610 /* Memory costs quite a lot for the first word, but subsequent words
1611 load at the equivalent of a single insn each. */
1612 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1613 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1614
1615 case DIV:
1616 case MOD:
1617 return 100;
1618
1619 case ROTATE:
1620 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1621 return 4;
1622 /* Fall through */
1623 case ROTATERT:
1624 if (mode != SImode)
1625 return 8;
1626 /* Fall through */
1627 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1628 if (mode == DImode)
1629 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1630 + ((GET_CODE (XEXP (x, 0)) == REG
1631 || (GET_CODE (XEXP (x, 0)) == SUBREG
1632 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1633 ? 0 : 8));
1634 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1635 || (GET_CODE (XEXP (x, 0)) == SUBREG
1636 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1637 ? 0 : 4)
1638 + ((GET_CODE (XEXP (x, 1)) == REG
1639 || (GET_CODE (XEXP (x, 1)) == SUBREG
1640 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1641 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1642 ? 0 : 4));
1643
1644 case MINUS:
1645 if (mode == DImode)
1646 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1647 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1648 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1649 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1650 ? 0 : 8));
1651
1652 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1653 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1654 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1655 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1656 ? 0 : 8)
1657 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1658 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1659 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1660 ? 0 : 8));
1661
1662 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1663 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1664 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1665 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1666 || subcode == ASHIFTRT || subcode == LSHIFTRT
1667 || subcode == ROTATE || subcode == ROTATERT
1668 || (subcode == MULT
1669 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1670 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1671 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1672 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1673 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1674 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1675 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1676 return 1;
1677 /* Fall through */
1678
1679 case PLUS:
1680 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1681 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1682 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1683 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1684 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1685 ? 0 : 8));
1686
1687 /* Fall through */
1688 case AND: case XOR: case IOR:
1689 extra_cost = 0;
1690
1691 /* Normally the frame registers will be spilt into reg+const during
1692 reload, so it is a bad idea to combine them with other instructions,
1693 since then they might not be moved outside of loops. As a compromise
1694 we allow integration with ops that have a constant as their second
1695 operand. */
1696 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1697 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1698 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1699 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1700 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1701 extra_cost = 4;
1702
1703 if (mode == DImode)
1704 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1705 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1706 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 1707 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
1708 ? 0 : 8));
1709
1710 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1711 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1712 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1713 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 1714 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
1715 ? 0 : 4));
1716
1717 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1718 return (1 + extra_cost
1719 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1720 || subcode == LSHIFTRT || subcode == ASHIFTRT
1721 || subcode == ROTATE || subcode == ROTATERT
1722 || (subcode == MULT
1723 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1724 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 1725 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
1726 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1727 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 1728 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
1729 ? 0 : 4));
1730
1731 return 8;
1732
1733 case MULT:
b111229a
RE
1734 /* There is no point basing this on the tuning, since it is always the
1735 fast variant if it exists at all */
2b835d68
RE
1736 if (arm_fast_multiply && mode == DImode
1737 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1738 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1739 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1740 return 8;
1741
e2c671ba
RE
1742 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1743 || mode == DImode)
1744 return 30;
1745
1746 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1747 {
2b835d68
RE
1748 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1749 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
1750 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1751 int j;
b111229a 1752 /* Tune as appropriate */
aec3cfba 1753 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 1754
2b835d68 1755 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 1756 {
2b835d68 1757 i >>= booth_unit_size;
e2c671ba
RE
1758 add_cost += 2;
1759 }
1760
1761 return add_cost;
1762 }
1763
aec3cfba 1764 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 1765 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
1766 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1767
56636818
JL
1768 case TRUNCATE:
1769 if (arm_fast_multiply && mode == SImode
1770 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1771 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1772 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1773 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1774 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1775 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1776 return 8;
1777 return 99;
1778
e2c671ba
RE
1779 case NEG:
1780 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1781 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1782 /* Fall through */
1783 case NOT:
1784 if (mode == DImode)
1785 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1786
1787 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1788
1789 case IF_THEN_ELSE:
1790 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1791 return 14;
1792 return 2;
1793
1794 case COMPARE:
1795 return 1;
1796
1797 case ABS:
1798 return 4 + (mode == DImode ? 4 : 0);
1799
1800 case SIGN_EXTEND:
1801 if (GET_MODE (XEXP (x, 0)) == QImode)
1802 return (4 + (mode == DImode ? 4 : 0)
1803 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1804 /* Fall through */
1805 case ZERO_EXTEND:
1806 switch (GET_MODE (XEXP (x, 0)))
1807 {
1808 case QImode:
1809 return (1 + (mode == DImode ? 4 : 0)
1810 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1811
1812 case HImode:
1813 return (4 + (mode == DImode ? 4 : 0)
1814 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1815
1816 case SImode:
1817 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
1818
1819 default:
1820 break;
e2c671ba
RE
1821 }
1822 abort ();
1823
1824 default:
1825 return 99;
1826 }
1827}
32de079a
RE
1828
1829int
1830arm_adjust_cost (insn, link, dep, cost)
1831 rtx insn;
1832 rtx link;
1833 rtx dep;
1834 int cost;
1835{
1836 rtx i_pat, d_pat;
1837
b36ba79f
RE
1838 /* XXX This is not strictly true for the FPA. */
1839 if (REG_NOTE_KIND(link) == REG_DEP_ANTI
1840 || REG_NOTE_KIND(link) == REG_DEP_OUTPUT)
1841 return 0;
1842
32de079a
RE
1843 if ((i_pat = single_set (insn)) != NULL
1844 && GET_CODE (SET_SRC (i_pat)) == MEM
1845 && (d_pat = single_set (dep)) != NULL
1846 && GET_CODE (SET_DEST (d_pat)) == MEM)
1847 {
1848 /* This is a load after a store, there is no conflict if the load reads
1849 from a cached area. Assume that loads from the stack, and from the
1850 constant pool are cached, and that others will miss. This is a
1851 hack. */
1852
1853/* debug_rtx (insn);
1854 debug_rtx (dep);
1855 debug_rtx (link);
1856 fprintf (stderr, "costs %d\n", cost); */
1857
1858 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1859 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1860 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1861 || reg_mentioned_p (hard_frame_pointer_rtx,
1862 XEXP (SET_SRC (i_pat), 0)))
1863 {
1864/* fprintf (stderr, "***** Now 1\n"); */
1865 return 1;
1866 }
1867 }
1868
1869 return cost;
1870}
1871
ff9940b0
RE
1872/* This code has been fixed for cross compilation. */
1873
1874static int fpa_consts_inited = 0;
1875
1876char *strings_fpa[8] = {
2b835d68
RE
1877 "0", "1", "2", "3",
1878 "4", "5", "0.5", "10"
1879};
ff9940b0
RE
1880
1881static REAL_VALUE_TYPE values_fpa[8];
1882
1883static void
1884init_fpa_table ()
1885{
1886 int i;
1887 REAL_VALUE_TYPE r;
1888
1889 for (i = 0; i < 8; i++)
1890 {
1891 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1892 values_fpa[i] = r;
1893 }
f3bb6135 1894
ff9940b0
RE
1895 fpa_consts_inited = 1;
1896}
1897
cce8749e
CH
1898/* Return TRUE if rtx X is a valid immediate FPU constant. */
1899
1900int
1901const_double_rtx_ok_for_fpu (x)
1902 rtx x;
1903{
ff9940b0
RE
1904 REAL_VALUE_TYPE r;
1905 int i;
1906
1907 if (!fpa_consts_inited)
1908 init_fpa_table ();
1909
1910 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1911 if (REAL_VALUE_MINUS_ZERO (r))
1912 return 0;
f3bb6135 1913
ff9940b0
RE
1914 for (i = 0; i < 8; i++)
1915 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1916 return 1;
f3bb6135 1917
ff9940b0 1918 return 0;
f3bb6135 1919}
ff9940b0
RE
1920
1921/* Return TRUE if rtx X is a valid immediate FPU constant. */
1922
1923int
1924neg_const_double_rtx_ok_for_fpu (x)
1925 rtx x;
1926{
1927 REAL_VALUE_TYPE r;
1928 int i;
1929
1930 if (!fpa_consts_inited)
1931 init_fpa_table ();
1932
1933 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1934 r = REAL_VALUE_NEGATE (r);
1935 if (REAL_VALUE_MINUS_ZERO (r))
1936 return 0;
f3bb6135 1937
ff9940b0
RE
1938 for (i = 0; i < 8; i++)
1939 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1940 return 1;
f3bb6135 1941
ff9940b0 1942 return 0;
f3bb6135 1943}
cce8749e
CH
1944\f
1945/* Predicates for `match_operand' and `match_operator'. */
1946
ff9940b0 1947/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
1948 (SUBREG (MEM)...).
1949
1950 This function exists because at the time it was put in it led to better
1951 code. SUBREG(MEM) always needs a reload in the places where
1952 s_register_operand is used, and this seemed to lead to excessive
1953 reloading. */
ff9940b0
RE
1954
1955int
1956s_register_operand (op, mode)
1957 register rtx op;
1958 enum machine_mode mode;
1959{
1960 if (GET_MODE (op) != mode && mode != VOIDmode)
1961 return 0;
1962
1963 if (GET_CODE (op) == SUBREG)
f3bb6135 1964 op = SUBREG_REG (op);
ff9940b0
RE
1965
1966 /* We don't consider registers whose class is NO_REGS
1967 to be a register operand. */
1968 return (GET_CODE (op) == REG
1969 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1970 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1971}
1972
e2c671ba
RE
1973/* Only accept reg, subreg(reg), const_int. */
1974
1975int
1976reg_or_int_operand (op, mode)
1977 register rtx op;
1978 enum machine_mode mode;
1979{
1980 if (GET_CODE (op) == CONST_INT)
1981 return 1;
1982
1983 if (GET_MODE (op) != mode && mode != VOIDmode)
1984 return 0;
1985
1986 if (GET_CODE (op) == SUBREG)
1987 op = SUBREG_REG (op);
1988
1989 /* We don't consider registers whose class is NO_REGS
1990 to be a register operand. */
1991 return (GET_CODE (op) == REG
1992 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1993 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1994}
1995
ff9940b0
RE
1996/* Return 1 if OP is an item in memory, given that we are in reload. */
1997
1998int
1999reload_memory_operand (op, mode)
2000 rtx op;
74bbc178 2001 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2002{
2003 int regno = true_regnum (op);
2004
2005 return (! CONSTANT_P (op)
2006 && (regno == -1
2007 || (GET_CODE (op) == REG
2008 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2009}
2010
4d818c85
RE
2011/* Return 1 if OP is a valid memory address, but not valid for a signed byte
2012 memory access (architecture V4) */
2013int
2014bad_signed_byte_operand (op, mode)
2015 rtx op;
2016 enum machine_mode mode;
2017{
2018 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
2019 return 0;
2020
2021 op = XEXP (op, 0);
2022
2023 /* A sum of anything more complex than reg + reg or reg + const is bad */
2024 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
9c8cc54f
RE
2025 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2026 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2027 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2028 return 1;
2029
2030 /* Big constants are also bad */
2031 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2032 && (INTVAL (XEXP (op, 1)) > 0xff
2033 || -INTVAL (XEXP (op, 1)) > 0xff))
2034 return 1;
2035
2036 /* Everything else is good, or can will automatically be made so. */
2037 return 0;
2038}
2039
cce8749e
CH
2040/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2041
2042int
2043arm_rhs_operand (op, mode)
2044 rtx op;
2045 enum machine_mode mode;
2046{
ff9940b0 2047 return (s_register_operand (op, mode)
cce8749e 2048 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2049}
cce8749e 2050
ff9940b0
RE
2051/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2052 */
2053
2054int
2055arm_rhsm_operand (op, mode)
2056 rtx op;
2057 enum machine_mode mode;
2058{
2059 return (s_register_operand (op, mode)
2060 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2061 || memory_operand (op, mode));
f3bb6135 2062}
ff9940b0
RE
2063
2064/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2065 constant that is valid when negated. */
2066
2067int
2068arm_add_operand (op, mode)
2069 rtx op;
2070 enum machine_mode mode;
2071{
2072 return (s_register_operand (op, mode)
2073 || (GET_CODE (op) == CONST_INT
2074 && (const_ok_for_arm (INTVAL (op))
2075 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2076}
ff9940b0
RE
2077
2078int
2079arm_not_operand (op, mode)
2080 rtx op;
2081 enum machine_mode mode;
2082{
2083 return (s_register_operand (op, mode)
2084 || (GET_CODE (op) == CONST_INT
2085 && (const_ok_for_arm (INTVAL (op))
2086 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2087}
ff9940b0 2088
5165176d
RE
2089/* Return TRUE if the operand is a memory reference which contains an
2090 offsettable address. */
2091int
2092offsettable_memory_operand (op, mode)
2093 register rtx op;
2094 enum machine_mode mode;
2095{
2096 if (mode == VOIDmode)
2097 mode = GET_MODE (op);
2098
2099 return (mode == GET_MODE (op)
2100 && GET_CODE (op) == MEM
2101 && offsettable_address_p (reload_completed | reload_in_progress,
2102 mode, XEXP (op, 0)));
2103}
2104
2105/* Return TRUE if the operand is a memory reference which is, or can be
2106 made word aligned by adjusting the offset. */
2107int
2108alignable_memory_operand (op, mode)
2109 register rtx op;
2110 enum machine_mode mode;
2111{
2112 rtx reg;
2113
2114 if (mode == VOIDmode)
2115 mode = GET_MODE (op);
2116
2117 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2118 return 0;
2119
2120 op = XEXP (op, 0);
2121
2122 return ((GET_CODE (reg = op) == REG
2123 || (GET_CODE (op) == SUBREG
2124 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2125 || (GET_CODE (op) == PLUS
2126 && GET_CODE (XEXP (op, 1)) == CONST_INT
2127 && (GET_CODE (reg = XEXP (op, 0)) == REG
2128 || (GET_CODE (XEXP (op, 0)) == SUBREG
2129 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
2130 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
2131}
2132
b111229a
RE
2133/* Similar to s_register_operand, but does not allow hard integer
2134 registers. */
2135int
2136f_register_operand (op, mode)
2137 register rtx op;
2138 enum machine_mode mode;
2139{
2140 if (GET_MODE (op) != mode && mode != VOIDmode)
2141 return 0;
2142
2143 if (GET_CODE (op) == SUBREG)
2144 op = SUBREG_REG (op);
2145
2146 /* We don't consider registers whose class is NO_REGS
2147 to be a register operand. */
2148 return (GET_CODE (op) == REG
2149 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2150 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2151}
2152
cce8749e
CH
2153/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2154
2155int
2156fpu_rhs_operand (op, mode)
2157 rtx op;
2158 enum machine_mode mode;
2159{
ff9940b0 2160 if (s_register_operand (op, mode))
f3bb6135 2161 return TRUE;
cce8749e
CH
2162 else if (GET_CODE (op) == CONST_DOUBLE)
2163 return (const_double_rtx_ok_for_fpu (op));
f3bb6135
RE
2164
2165 return FALSE;
2166}
cce8749e 2167
ff9940b0
RE
2168int
2169fpu_add_operand (op, mode)
2170 rtx op;
2171 enum machine_mode mode;
2172{
2173 if (s_register_operand (op, mode))
f3bb6135 2174 return TRUE;
ff9940b0 2175 else if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2176 return (const_double_rtx_ok_for_fpu (op)
2177 || neg_const_double_rtx_ok_for_fpu (op));
2178
2179 return FALSE;
ff9940b0
RE
2180}
2181
cce8749e
CH
2182/* Return nonzero if OP is a constant power of two. */
2183
2184int
2185power_of_two_operand (op, mode)
2186 rtx op;
74bbc178 2187 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2188{
2189 if (GET_CODE (op) == CONST_INT)
2190 {
f3bb6135
RE
2191 HOST_WIDE_INT value = INTVAL(op);
2192 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2193 }
f3bb6135
RE
2194 return FALSE;
2195}
cce8749e
CH
2196
2197/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2198 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2199 Note that this disallows MEM(REG+REG), but allows
2200 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2201
2202int
2203di_operand (op, mode)
2204 rtx op;
2205 enum machine_mode mode;
2206{
ff9940b0 2207 if (s_register_operand (op, mode))
f3bb6135 2208 return TRUE;
cce8749e 2209
e9c6b69b
NC
2210 if (GET_CODE (op) == SUBREG)
2211 op = SUBREG_REG (op);
2212
cce8749e
CH
2213 switch (GET_CODE (op))
2214 {
2215 case CONST_DOUBLE:
2216 case CONST_INT:
f3bb6135
RE
2217 return TRUE;
2218
cce8749e 2219 case MEM:
f3bb6135
RE
2220 return memory_address_p (DImode, XEXP (op, 0));
2221
cce8749e 2222 default:
f3bb6135 2223 return FALSE;
cce8749e 2224 }
f3bb6135 2225}
cce8749e 2226
f3139301 2227/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2228 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2229 Note that this disallows MEM(REG+REG), but allows
2230 MEM(PRE/POST_INC/DEC(REG)). */
2231
2232int
2233soft_df_operand (op, mode)
2234 rtx op;
2235 enum machine_mode mode;
2236{
2237 if (s_register_operand (op, mode))
2238 return TRUE;
2239
e9c6b69b
NC
2240 if (GET_CODE (op) == SUBREG)
2241 op = SUBREG_REG (op);
2242
f3139301
DE
2243 switch (GET_CODE (op))
2244 {
2245 case CONST_DOUBLE:
2246 return TRUE;
2247
2248 case MEM:
2249 return memory_address_p (DFmode, XEXP (op, 0));
2250
2251 default:
2252 return FALSE;
2253 }
2254}
2255
cce8749e
CH
2256/* Return TRUE for valid index operands. */
2257
2258int
2259index_operand (op, mode)
2260 rtx op;
2261 enum machine_mode mode;
2262{
ff9940b0
RE
2263 return (s_register_operand(op, mode)
2264 || (immediate_operand (op, mode)
2265 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2266}
cce8749e 2267
ff9940b0
RE
2268/* Return TRUE for valid shifts by a constant. This also accepts any
2269 power of two on the (somewhat overly relaxed) assumption that the
2270 shift operator in this case was a mult. */
2271
2272int
2273const_shift_operand (op, mode)
2274 rtx op;
2275 enum machine_mode mode;
2276{
2277 return (power_of_two_operand (op, mode)
2278 || (immediate_operand (op, mode)
2279 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2280}
ff9940b0 2281
cce8749e
CH
2282/* Return TRUE for arithmetic operators which can be combined with a multiply
2283 (shift). */
2284
2285int
2286shiftable_operator (x, mode)
2287 rtx x;
2288 enum machine_mode mode;
2289{
2290 if (GET_MODE (x) != mode)
2291 return FALSE;
2292 else
2293 {
2294 enum rtx_code code = GET_CODE (x);
2295
2296 return (code == PLUS || code == MINUS
2297 || code == IOR || code == XOR || code == AND);
2298 }
f3bb6135 2299}
cce8749e
CH
2300
2301/* Return TRUE for shift operators. */
2302
2303int
2304shift_operator (x, mode)
2305 rtx x;
2306 enum machine_mode mode;
2307{
2308 if (GET_MODE (x) != mode)
2309 return FALSE;
2310 else
2311 {
2312 enum rtx_code code = GET_CODE (x);
2313
ff9940b0 2314 if (code == MULT)
aec3cfba 2315 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 2316
e2c671ba
RE
2317 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2318 || code == ROTATERT);
cce8749e 2319 }
f3bb6135 2320}
ff9940b0
RE
2321
2322int equality_operator (x, mode)
f3bb6135 2323 rtx x;
74bbc178 2324 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2325{
f3bb6135 2326 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2327}
2328
2329/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2330
2331int
2332minmax_operator (x, mode)
2333 rtx x;
2334 enum machine_mode mode;
2335{
2336 enum rtx_code code = GET_CODE (x);
2337
2338 if (GET_MODE (x) != mode)
2339 return FALSE;
f3bb6135 2340
ff9940b0 2341 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2342}
ff9940b0
RE
2343
2344/* return TRUE if x is EQ or NE */
2345
2346/* Return TRUE if this is the condition code register, if we aren't given
2347 a mode, accept any class CCmode register */
2348
2349int
2350cc_register (x, mode)
f3bb6135
RE
2351 rtx x;
2352 enum machine_mode mode;
ff9940b0
RE
2353{
2354 if (mode == VOIDmode)
2355 {
2356 mode = GET_MODE (x);
2357 if (GET_MODE_CLASS (mode) != MODE_CC)
2358 return FALSE;
2359 }
f3bb6135 2360
ff9940b0
RE
2361 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2362 return TRUE;
f3bb6135 2363
ff9940b0
RE
2364 return FALSE;
2365}
5bbe2d40
RE
2366
2367/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2368 a mode, accept any class CCmode register which indicates a dominance
2369 expression. */
5bbe2d40
RE
2370
2371int
84ed5e79 2372dominant_cc_register (x, mode)
5bbe2d40
RE
2373 rtx x;
2374 enum machine_mode mode;
2375{
2376 if (mode == VOIDmode)
2377 {
2378 mode = GET_MODE (x);
84ed5e79 2379 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2380 return FALSE;
2381 }
2382
84ed5e79
RE
2383 if (mode != CC_DNEmode && mode != CC_DEQmode
2384 && mode != CC_DLEmode && mode != CC_DLTmode
2385 && mode != CC_DGEmode && mode != CC_DGTmode
2386 && mode != CC_DLEUmode && mode != CC_DLTUmode
2387 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2388 return FALSE;
2389
5bbe2d40
RE
2390 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2391 return TRUE;
2392
2393 return FALSE;
2394}
2395
2b835d68
RE
2396/* Return TRUE if X references a SYMBOL_REF. */
2397int
2398symbol_mentioned_p (x)
2399 rtx x;
2400{
2401 register char *fmt;
2402 register int i;
2403
2404 if (GET_CODE (x) == SYMBOL_REF)
2405 return 1;
2406
2407 fmt = GET_RTX_FORMAT (GET_CODE (x));
2408 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2409 {
2410 if (fmt[i] == 'E')
2411 {
2412 register int j;
2413
2414 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2415 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2416 return 1;
2417 }
2418 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2419 return 1;
2420 }
2421
2422 return 0;
2423}
2424
2425/* Return TRUE if X references a LABEL_REF. */
2426int
2427label_mentioned_p (x)
2428 rtx x;
2429{
2430 register char *fmt;
2431 register int i;
2432
2433 if (GET_CODE (x) == LABEL_REF)
2434 return 1;
2435
2436 fmt = GET_RTX_FORMAT (GET_CODE (x));
2437 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2438 {
2439 if (fmt[i] == 'E')
2440 {
2441 register int j;
2442
2443 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2444 if (label_mentioned_p (XVECEXP (x, i, j)))
2445 return 1;
2446 }
2447 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2448 return 1;
2449 }
2450
2451 return 0;
2452}
2453
ff9940b0
RE
2454enum rtx_code
2455minmax_code (x)
f3bb6135 2456 rtx x;
ff9940b0
RE
2457{
2458 enum rtx_code code = GET_CODE (x);
2459
2460 if (code == SMAX)
2461 return GE;
f3bb6135 2462 else if (code == SMIN)
ff9940b0 2463 return LE;
f3bb6135 2464 else if (code == UMIN)
ff9940b0 2465 return LEU;
f3bb6135 2466 else if (code == UMAX)
ff9940b0 2467 return GEU;
f3bb6135 2468
ff9940b0
RE
2469 abort ();
2470}
2471
2472/* Return 1 if memory locations are adjacent */
2473
f3bb6135 2474int
ff9940b0
RE
2475adjacent_mem_locations (a, b)
2476 rtx a, b;
2477{
2478 int val0 = 0, val1 = 0;
2479 int reg0, reg1;
2480
2481 if ((GET_CODE (XEXP (a, 0)) == REG
2482 || (GET_CODE (XEXP (a, 0)) == PLUS
2483 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2484 && (GET_CODE (XEXP (b, 0)) == REG
2485 || (GET_CODE (XEXP (b, 0)) == PLUS
2486 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2487 {
2488 if (GET_CODE (XEXP (a, 0)) == PLUS)
2489 {
2490 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2491 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2492 }
2493 else
2494 reg0 = REGNO (XEXP (a, 0));
2495 if (GET_CODE (XEXP (b, 0)) == PLUS)
2496 {
2497 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2498 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2499 }
2500 else
2501 reg1 = REGNO (XEXP (b, 0));
2502 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2503 }
2504 return 0;
2505}
2506
2507/* Return 1 if OP is a load multiple operation. It is known to be
2508 parallel and the first section will be tested. */
2509
f3bb6135 2510int
ff9940b0
RE
2511load_multiple_operation (op, mode)
2512 rtx op;
74bbc178 2513 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2514{
f3bb6135 2515 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2516 int dest_regno;
2517 rtx src_addr;
f3bb6135 2518 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2519 rtx elt;
2520
2521 if (count <= 1
2522 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2523 return 0;
2524
2525 /* Check to see if this might be a write-back */
2526 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2527 {
2528 i++;
2529 base = 1;
2530
2531 /* Now check it more carefully */
2532 if (GET_CODE (SET_DEST (elt)) != REG
2533 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2534 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2535 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2536 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2537 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2538 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2539 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2540 != REGNO (SET_DEST (elt)))
2541 return 0;
f3bb6135 2542
ff9940b0
RE
2543 count--;
2544 }
2545
2546 /* Perform a quick check so we don't blow up below. */
2547 if (count <= i
2548 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2549 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2550 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2551 return 0;
2552
2553 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2554 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2555
2556 for (; i < count; i++)
2557 {
ed4c4348 2558 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2559
2560 if (GET_CODE (elt) != SET
2561 || GET_CODE (SET_DEST (elt)) != REG
2562 || GET_MODE (SET_DEST (elt)) != SImode
2563 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2564 || GET_CODE (SET_SRC (elt)) != MEM
2565 || GET_MODE (SET_SRC (elt)) != SImode
2566 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2567 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2568 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2569 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2570 return 0;
2571 }
2572
2573 return 1;
2574}
2575
2576/* Return 1 if OP is a store multiple operation. It is known to be
2577 parallel and the first section will be tested. */
2578
f3bb6135 2579int
ff9940b0
RE
2580store_multiple_operation (op, mode)
2581 rtx op;
74bbc178 2582 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2583{
f3bb6135 2584 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2585 int src_regno;
2586 rtx dest_addr;
f3bb6135 2587 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2588 rtx elt;
2589
2590 if (count <= 1
2591 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2592 return 0;
2593
2594 /* Check to see if this might be a write-back */
2595 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2596 {
2597 i++;
2598 base = 1;
2599
2600 /* Now check it more carefully */
2601 if (GET_CODE (SET_DEST (elt)) != REG
2602 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2603 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2604 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2605 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2606 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2607 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2608 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2609 != REGNO (SET_DEST (elt)))
2610 return 0;
f3bb6135 2611
ff9940b0
RE
2612 count--;
2613 }
2614
2615 /* Perform a quick check so we don't blow up below. */
2616 if (count <= i
2617 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2618 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2619 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2620 return 0;
2621
2622 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2623 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2624
2625 for (; i < count; i++)
2626 {
2627 elt = XVECEXP (op, 0, i);
2628
2629 if (GET_CODE (elt) != SET
2630 || GET_CODE (SET_SRC (elt)) != REG
2631 || GET_MODE (SET_SRC (elt)) != SImode
2632 || REGNO (SET_SRC (elt)) != src_regno + i - base
2633 || GET_CODE (SET_DEST (elt)) != MEM
2634 || GET_MODE (SET_DEST (elt)) != SImode
2635 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2636 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2637 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2638 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2639 return 0;
2640 }
2641
2642 return 1;
2643}
e2c671ba 2644
84ed5e79
RE
2645int
2646load_multiple_sequence (operands, nops, regs, base, load_offset)
2647 rtx *operands;
2648 int nops;
2649 int *regs;
2650 int *base;
2651 HOST_WIDE_INT *load_offset;
2652{
2653 int unsorted_regs[4];
2654 HOST_WIDE_INT unsorted_offsets[4];
2655 int order[4];
ad076f4e 2656 int base_reg = -1;
84ed5e79
RE
2657 int i;
2658
2659 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2660 extended if required. */
2661 if (nops < 2 || nops > 4)
2662 abort ();
2663
2664 /* Loop over the operands and check that the memory references are
2665 suitable (ie immediate offsets from the same base register). At
2666 the same time, extract the target register, and the memory
2667 offsets. */
2668 for (i = 0; i < nops; i++)
2669 {
2670 rtx reg;
2671 rtx offset;
2672
56636818
JL
2673 /* Convert a subreg of a mem into the mem itself. */
2674 if (GET_CODE (operands[nops + i]) == SUBREG)
2675 operands[nops + i] = alter_subreg(operands[nops + i]);
2676
84ed5e79
RE
2677 if (GET_CODE (operands[nops + i]) != MEM)
2678 abort ();
2679
2680 /* Don't reorder volatile memory references; it doesn't seem worth
2681 looking for the case where the order is ok anyway. */
2682 if (MEM_VOLATILE_P (operands[nops + i]))
2683 return 0;
2684
2685 offset = const0_rtx;
2686
2687 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2688 || (GET_CODE (reg) == SUBREG
2689 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2690 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2691 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2692 == REG)
2693 || (GET_CODE (reg) == SUBREG
2694 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2695 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2696 == CONST_INT)))
2697 {
2698 if (i == 0)
2699 {
2700 base_reg = REGNO(reg);
2701 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2702 ? REGNO (operands[i])
2703 : REGNO (SUBREG_REG (operands[i])));
2704 order[0] = 0;
2705 }
2706 else
2707 {
2708 if (base_reg != REGNO (reg))
2709 /* Not addressed from the same base register. */
2710 return 0;
2711
2712 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2713 ? REGNO (operands[i])
2714 : REGNO (SUBREG_REG (operands[i])));
2715 if (unsorted_regs[i] < unsorted_regs[order[0]])
2716 order[0] = i;
2717 }
2718
2719 /* If it isn't an integer register, or if it overwrites the
2720 base register but isn't the last insn in the list, then
2721 we can't do this. */
2722 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2723 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2724 return 0;
2725
2726 unsorted_offsets[i] = INTVAL (offset);
2727 }
2728 else
2729 /* Not a suitable memory address. */
2730 return 0;
2731 }
2732
2733 /* All the useful information has now been extracted from the
2734 operands into unsorted_regs and unsorted_offsets; additionally,
2735 order[0] has been set to the lowest numbered register in the
2736 list. Sort the registers into order, and check that the memory
2737 offsets are ascending and adjacent. */
2738
2739 for (i = 1; i < nops; i++)
2740 {
2741 int j;
2742
2743 order[i] = order[i - 1];
2744 for (j = 0; j < nops; j++)
2745 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2746 && (order[i] == order[i - 1]
2747 || unsorted_regs[j] < unsorted_regs[order[i]]))
2748 order[i] = j;
2749
2750 /* Have we found a suitable register? if not, one must be used more
2751 than once. */
2752 if (order[i] == order[i - 1])
2753 return 0;
2754
2755 /* Is the memory address adjacent and ascending? */
2756 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2757 return 0;
2758 }
2759
2760 if (base)
2761 {
2762 *base = base_reg;
2763
2764 for (i = 0; i < nops; i++)
2765 regs[i] = unsorted_regs[order[i]];
2766
2767 *load_offset = unsorted_offsets[order[0]];
2768 }
2769
2770 if (unsorted_offsets[order[0]] == 0)
2771 return 1; /* ldmia */
2772
2773 if (unsorted_offsets[order[0]] == 4)
2774 return 2; /* ldmib */
2775
2776 if (unsorted_offsets[order[nops - 1]] == 0)
2777 return 3; /* ldmda */
2778
2779 if (unsorted_offsets[order[nops - 1]] == -4)
2780 return 4; /* ldmdb */
2781
b36ba79f 2782 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm if
6cc8c0b3
NC
2783 the offset isn't small enough. The reason 2 ldrs are faster is because
2784 these ARMs are able to do more than one cache access in a single cycle.
2785 The ARM9 and StrongARM have Harvard caches, whilst the ARM8 has a double
2786 bandwidth cache. This means that these cores can do both an instruction
2787 fetch and a data fetch in a single cycle, so the trick of calculating the
2788 address into a scratch register (one of the result regs) and then doing a
2789 load multiple actually becomes slower (and no smaller in code size). That
2790 is the transformation
2791
2792 ldr rd1, [rbase + offset]
2793 ldr rd2, [rbase + offset + 4]
2794
2795 to
2796
2797 add rd1, rbase, offset
2798 ldmia rd1, {rd1, rd2}
2799
2800 produces worse code -- '3 cycles + any stalls on rd2' instead of '2 cycles
2801 + any stalls on rd2'. On ARMs with only one cache access per cycle, the
2802 first sequence could never complete in less than 6 cycles, whereas the ldm
2803 sequence would only take 5 and would make better use of sequential accesses
2804 if not hitting the cache.
2805
2806 We cheat here and test 'arm_ld_sched' which we currently know to only be
2807 true for the ARM8, ARM9 and StrongARM. If this ever changes, then the test
2808 below needs to be reworked. */
f5a1b0d2 2809 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
2810 return 0;
2811
84ed5e79
RE
2812 /* Can't do it without setting up the offset, only do this if it takes
2813 no more than one insn. */
2814 return (const_ok_for_arm (unsorted_offsets[order[0]])
2815 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2816}
2817
2818char *
2819emit_ldm_seq (operands, nops)
2820 rtx *operands;
2821 int nops;
2822{
2823 int regs[4];
2824 int base_reg;
2825 HOST_WIDE_INT offset;
2826 char buf[100];
2827 int i;
2828
2829 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2830 {
2831 case 1:
2832 strcpy (buf, "ldm%?ia\t");
2833 break;
2834
2835 case 2:
2836 strcpy (buf, "ldm%?ib\t");
2837 break;
2838
2839 case 3:
2840 strcpy (buf, "ldm%?da\t");
2841 break;
2842
2843 case 4:
2844 strcpy (buf, "ldm%?db\t");
2845 break;
2846
2847 case 5:
2848 if (offset >= 0)
2849 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2850 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2851 (long) offset);
2852 else
2853 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2854 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2855 (long) -offset);
2856 output_asm_insn (buf, operands);
2857 base_reg = regs[0];
2858 strcpy (buf, "ldm%?ia\t");
2859 break;
2860
2861 default:
2862 abort ();
2863 }
2864
2865 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2866 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2867
2868 for (i = 1; i < nops; i++)
2869 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2870 reg_names[regs[i]]);
2871
2872 strcat (buf, "}\t%@ phole ldm");
2873
2874 output_asm_insn (buf, operands);
2875 return "";
2876}
2877
2878int
2879store_multiple_sequence (operands, nops, regs, base, load_offset)
2880 rtx *operands;
2881 int nops;
2882 int *regs;
2883 int *base;
2884 HOST_WIDE_INT *load_offset;
2885{
2886 int unsorted_regs[4];
2887 HOST_WIDE_INT unsorted_offsets[4];
2888 int order[4];
ad076f4e 2889 int base_reg = -1;
84ed5e79
RE
2890 int i;
2891
2892 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2893 extended if required. */
2894 if (nops < 2 || nops > 4)
2895 abort ();
2896
2897 /* Loop over the operands and check that the memory references are
2898 suitable (ie immediate offsets from the same base register). At
2899 the same time, extract the target register, and the memory
2900 offsets. */
2901 for (i = 0; i < nops; i++)
2902 {
2903 rtx reg;
2904 rtx offset;
2905
56636818
JL
2906 /* Convert a subreg of a mem into the mem itself. */
2907 if (GET_CODE (operands[nops + i]) == SUBREG)
2908 operands[nops + i] = alter_subreg(operands[nops + i]);
2909
84ed5e79
RE
2910 if (GET_CODE (operands[nops + i]) != MEM)
2911 abort ();
2912
2913 /* Don't reorder volatile memory references; it doesn't seem worth
2914 looking for the case where the order is ok anyway. */
2915 if (MEM_VOLATILE_P (operands[nops + i]))
2916 return 0;
2917
2918 offset = const0_rtx;
2919
2920 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2921 || (GET_CODE (reg) == SUBREG
2922 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2923 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2924 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2925 == REG)
2926 || (GET_CODE (reg) == SUBREG
2927 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2928 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2929 == CONST_INT)))
2930 {
2931 if (i == 0)
2932 {
2933 base_reg = REGNO(reg);
2934 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2935 ? REGNO (operands[i])
2936 : REGNO (SUBREG_REG (operands[i])));
2937 order[0] = 0;
2938 }
2939 else
2940 {
2941 if (base_reg != REGNO (reg))
2942 /* Not addressed from the same base register. */
2943 return 0;
2944
2945 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2946 ? REGNO (operands[i])
2947 : REGNO (SUBREG_REG (operands[i])));
2948 if (unsorted_regs[i] < unsorted_regs[order[0]])
2949 order[0] = i;
2950 }
2951
2952 /* If it isn't an integer register, then we can't do this. */
2953 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2954 return 0;
2955
2956 unsorted_offsets[i] = INTVAL (offset);
2957 }
2958 else
2959 /* Not a suitable memory address. */
2960 return 0;
2961 }
2962
2963 /* All the useful information has now been extracted from the
2964 operands into unsorted_regs and unsorted_offsets; additionally,
2965 order[0] has been set to the lowest numbered register in the
2966 list. Sort the registers into order, and check that the memory
2967 offsets are ascending and adjacent. */
2968
2969 for (i = 1; i < nops; i++)
2970 {
2971 int j;
2972
2973 order[i] = order[i - 1];
2974 for (j = 0; j < nops; j++)
2975 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2976 && (order[i] == order[i - 1]
2977 || unsorted_regs[j] < unsorted_regs[order[i]]))
2978 order[i] = j;
2979
2980 /* Have we found a suitable register? if not, one must be used more
2981 than once. */
2982 if (order[i] == order[i - 1])
2983 return 0;
2984
2985 /* Is the memory address adjacent and ascending? */
2986 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2987 return 0;
2988 }
2989
2990 if (base)
2991 {
2992 *base = base_reg;
2993
2994 for (i = 0; i < nops; i++)
2995 regs[i] = unsorted_regs[order[i]];
2996
2997 *load_offset = unsorted_offsets[order[0]];
2998 }
2999
3000 if (unsorted_offsets[order[0]] == 0)
3001 return 1; /* stmia */
3002
3003 if (unsorted_offsets[order[0]] == 4)
3004 return 2; /* stmib */
3005
3006 if (unsorted_offsets[order[nops - 1]] == 0)
3007 return 3; /* stmda */
3008
3009 if (unsorted_offsets[order[nops - 1]] == -4)
3010 return 4; /* stmdb */
3011
3012 return 0;
3013}
3014
3015char *
3016emit_stm_seq (operands, nops)
3017 rtx *operands;
3018 int nops;
3019{
3020 int regs[4];
3021 int base_reg;
3022 HOST_WIDE_INT offset;
3023 char buf[100];
3024 int i;
3025
3026 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3027 {
3028 case 1:
3029 strcpy (buf, "stm%?ia\t");
3030 break;
3031
3032 case 2:
3033 strcpy (buf, "stm%?ib\t");
3034 break;
3035
3036 case 3:
3037 strcpy (buf, "stm%?da\t");
3038 break;
3039
3040 case 4:
3041 strcpy (buf, "stm%?db\t");
3042 break;
3043
3044 default:
3045 abort ();
3046 }
3047
3048 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3049 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3050
3051 for (i = 1; i < nops; i++)
3052 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3053 reg_names[regs[i]]);
3054
3055 strcat (buf, "}\t%@ phole stm");
3056
3057 output_asm_insn (buf, operands);
3058 return "";
3059}
3060
e2c671ba
RE
3061int
3062multi_register_push (op, mode)
0a81f500 3063 rtx op;
74bbc178 3064 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3065{
3066 if (GET_CODE (op) != PARALLEL
3067 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3068 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3069 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3070 return 0;
3071
3072 return 1;
3073}
3074
ff9940b0 3075\f
f3bb6135
RE
3076/* Routines for use with attributes */
3077
31fdb4d5
DE
3078/* Return nonzero if ATTR is a valid attribute for DECL.
3079 ATTRIBUTES are any existing attributes and ARGS are the arguments
3080 supplied with ATTR.
3081
3082 Supported attributes:
3083
3084 naked: don't output any prologue or epilogue code, the user is assumed
3085 to do the right thing. */
3086
3087int
74bbc178 3088arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3089 tree decl;
31fdb4d5
DE
3090 tree attr;
3091 tree args;
3092{
3093 if (args != NULL_TREE)
3094 return 0;
3095
3096 if (is_attribute_p ("naked", attr))
3097 return TREE_CODE (decl) == FUNCTION_DECL;
3098 return 0;
3099}
3100
3101/* Return non-zero if FUNC is a naked function. */
3102
3103static int
3104arm_naked_function_p (func)
3105 tree func;
3106{
3107 tree a;
3108
3109 if (TREE_CODE (func) != FUNCTION_DECL)
3110 abort ();
2e943e99 3111
31fdb4d5
DE
3112 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3113 return a != NULL_TREE;
3114}
f3bb6135 3115\f
ff9940b0
RE
3116/* Routines for use in generating RTL */
3117
f3bb6135 3118rtx
56636818 3119arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3120 in_struct_p, scalar_p)
ff9940b0
RE
3121 int base_regno;
3122 int count;
3123 rtx from;
3124 int up;
3125 int write_back;
56636818
JL
3126 int unchanging_p;
3127 int in_struct_p;
c6df88cb 3128 int scalar_p;
ff9940b0
RE
3129{
3130 int i = 0, j;
3131 rtx result;
3132 int sign = up ? 1 : -1;
56636818 3133 rtx mem;
ff9940b0 3134
43cffd11
RE
3135 result = gen_rtx_PARALLEL (VOIDmode,
3136 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3137 if (write_back)
f3bb6135 3138 {
ff9940b0 3139 XVECEXP (result, 0, 0)
43cffd11
RE
3140 = gen_rtx_SET (GET_MODE (from), from,
3141 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3142 i = 1;
3143 count++;
f3bb6135
RE
3144 }
3145
ff9940b0 3146 for (j = 0; i < count; i++, j++)
f3bb6135 3147 {
43cffd11 3148 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3149 RTX_UNCHANGING_P (mem) = unchanging_p;
3150 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3151 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3152 XVECEXP (result, 0, i)
3153 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3154 }
3155
ff9940b0 3156 if (write_back)
43cffd11 3157 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, from);
ff9940b0
RE
3158
3159 return result;
3160}
3161
f3bb6135 3162rtx
56636818 3163arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3164 in_struct_p, scalar_p)
ff9940b0
RE
3165 int base_regno;
3166 int count;
3167 rtx to;
3168 int up;
3169 int write_back;
56636818
JL
3170 int unchanging_p;
3171 int in_struct_p;
c6df88cb 3172 int scalar_p;
ff9940b0
RE
3173{
3174 int i = 0, j;
3175 rtx result;
3176 int sign = up ? 1 : -1;
56636818 3177 rtx mem;
ff9940b0 3178
43cffd11
RE
3179 result = gen_rtx_PARALLEL (VOIDmode,
3180 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3181 if (write_back)
f3bb6135 3182 {
ff9940b0 3183 XVECEXP (result, 0, 0)
43cffd11
RE
3184 = gen_rtx_SET (GET_MODE (to), to,
3185 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3186 i = 1;
3187 count++;
f3bb6135
RE
3188 }
3189
ff9940b0 3190 for (j = 0; i < count; i++, j++)
f3bb6135 3191 {
43cffd11 3192 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3193 RTX_UNCHANGING_P (mem) = unchanging_p;
3194 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3195 MEM_SCALAR_P (mem) = scalar_p;
56636818 3196
43cffd11
RE
3197 XVECEXP (result, 0, i)
3198 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3199 }
3200
ff9940b0 3201 if (write_back)
43cffd11 3202 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, to);
ff9940b0
RE
3203
3204 return result;
3205}
3206
880e2516
RE
3207int
3208arm_gen_movstrqi (operands)
3209 rtx *operands;
3210{
3211 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3212 int i;
880e2516 3213 rtx src, dst;
ad076f4e 3214 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3215 rtx part_bytes_reg = NULL;
56636818
JL
3216 rtx mem;
3217 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3218 int dst_scalar_p, src_scalar_p;
880e2516
RE
3219
3220 if (GET_CODE (operands[2]) != CONST_INT
3221 || GET_CODE (operands[3]) != CONST_INT
3222 || INTVAL (operands[2]) > 64
3223 || INTVAL (operands[3]) & 3)
3224 return 0;
3225
3226 st_dst = XEXP (operands[0], 0);
3227 st_src = XEXP (operands[1], 0);
56636818
JL
3228
3229 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3230 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3231 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3232 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3233 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3234 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3235
880e2516
RE
3236 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3237 fin_src = src = copy_to_mode_reg (SImode, st_src);
3238
3239 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
3240 out_words_to_go = INTVAL (operands[2]) / 4;
3241 last_bytes = INTVAL (operands[2]) & 3;
3242
3243 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3244 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3245
3246 for (i = 0; in_words_to_go >= 2; i+=4)
3247 {
bd9c7e23 3248 if (in_words_to_go > 4)
56636818 3249 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3250 src_unchanging_p,
3251 src_in_struct_p,
3252 src_scalar_p));
bd9c7e23
RE
3253 else
3254 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3255 FALSE, src_unchanging_p,
c6df88cb 3256 src_in_struct_p, src_scalar_p));
bd9c7e23 3257
880e2516
RE
3258 if (out_words_to_go)
3259 {
bd9c7e23 3260 if (out_words_to_go > 4)
56636818
JL
3261 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3262 dst_unchanging_p,
c6df88cb
MM
3263 dst_in_struct_p,
3264 dst_scalar_p));
bd9c7e23
RE
3265 else if (out_words_to_go != 1)
3266 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3267 dst, TRUE,
3268 (last_bytes == 0
56636818
JL
3269 ? FALSE : TRUE),
3270 dst_unchanging_p,
c6df88cb
MM
3271 dst_in_struct_p,
3272 dst_scalar_p));
880e2516
RE
3273 else
3274 {
43cffd11 3275 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3276 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3277 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3278 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3279 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3280 if (last_bytes != 0)
3281 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3282 }
3283 }
3284
3285 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3286 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3287 }
3288
3289 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3290 if (out_words_to_go)
3291 {
3292 rtx sreg;
3293
43cffd11 3294 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3295 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3296 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3297 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3298 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
880e2516 3299 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
56636818 3300
43cffd11 3301 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3302 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3303 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3304 MEM_SCALAR_P (mem) = dst_scalar_p;
56636818 3305 emit_move_insn (mem, sreg);
880e2516
RE
3306 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3307 in_words_to_go--;
3308
3309 if (in_words_to_go) /* Sanity check */
3310 abort ();
3311 }
3312
3313 if (in_words_to_go)
3314 {
3315 if (in_words_to_go < 0)
3316 abort ();
3317
43cffd11 3318 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3319 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3320 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3321 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3322 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3323 }
3324
3325 if (BYTES_BIG_ENDIAN && last_bytes)
3326 {
3327 rtx tmp = gen_reg_rtx (SImode);
3328
3329 if (part_bytes_reg == NULL)
3330 abort ();
3331
3332 /* The bytes we want are in the top end of the word */
bee06f3d
RE
3333 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3334 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3335 part_bytes_reg = tmp;
3336
3337 while (last_bytes)
3338 {
43cffd11 3339 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
3340 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3341 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3342 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3343 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
880e2516
RE
3344 if (--last_bytes)
3345 {
3346 tmp = gen_reg_rtx (SImode);
3347 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3348 part_bytes_reg = tmp;
3349 }
3350 }
3351
3352 }
3353 else
3354 {
3355 while (last_bytes)
3356 {
3357 if (part_bytes_reg == NULL)
3358 abort ();
3359
43cffd11 3360 mem = gen_rtx_MEM (QImode, dst);
56636818
JL
3361 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3362 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3363 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3364 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
880e2516
RE
3365 if (--last_bytes)
3366 {
3367 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3368
3369 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3370 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3371 part_bytes_reg = tmp;
3372 }
3373 }
3374 }
3375
3376 return 1;
3377}
3378
5165176d
RE
3379/* Generate a memory reference for a half word, such that it will be loaded
3380 into the top 16 bits of the word. We can assume that the address is
3381 known to be alignable and of the form reg, or plus (reg, const). */
3382rtx
3383gen_rotated_half_load (memref)
3384 rtx memref;
3385{
3386 HOST_WIDE_INT offset = 0;
3387 rtx base = XEXP (memref, 0);
3388
3389 if (GET_CODE (base) == PLUS)
3390 {
3391 offset = INTVAL (XEXP (base, 1));
3392 base = XEXP (base, 0);
3393 }
3394
956d6950 3395 /* If we aren't allowed to generate unaligned addresses, then fail. */
5165176d
RE
3396 if (TARGET_SHORT_BY_BYTES
3397 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3398 return NULL;
3399
43cffd11 3400 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
3401
3402 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3403 return base;
3404
43cffd11 3405 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
3406}
3407
84ed5e79 3408static enum machine_mode
74bbc178 3409select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
3410 rtx x;
3411 rtx y;
3412 HOST_WIDE_INT cond_or;
3413{
3414 enum rtx_code cond1, cond2;
3415 int swapped = 0;
3416
3417 /* Currently we will probably get the wrong result if the individual
3418 comparisons are not simple. This also ensures that it is safe to
956d6950 3419 reverse a comparison if necessary. */
84ed5e79
RE
3420 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3421 != CCmode)
3422 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3423 != CCmode))
3424 return CCmode;
3425
3426 if (cond_or)
3427 cond1 = reverse_condition (cond1);
3428
3429 /* If the comparisons are not equal, and one doesn't dominate the other,
3430 then we can't do this. */
3431 if (cond1 != cond2
3432 && ! comparison_dominates_p (cond1, cond2)
3433 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3434 return CCmode;
3435
3436 if (swapped)
3437 {
3438 enum rtx_code temp = cond1;
3439 cond1 = cond2;
3440 cond2 = temp;
3441 }
3442
3443 switch (cond1)
3444 {
3445 case EQ:
3446 if (cond2 == EQ || ! cond_or)
3447 return CC_DEQmode;
3448
3449 switch (cond2)
3450 {
3451 case LE: return CC_DLEmode;
3452 case LEU: return CC_DLEUmode;
3453 case GE: return CC_DGEmode;
3454 case GEU: return CC_DGEUmode;
ad076f4e 3455 default: break;
84ed5e79
RE
3456 }
3457
3458 break;
3459
3460 case LT:
3461 if (cond2 == LT || ! cond_or)
3462 return CC_DLTmode;
3463 if (cond2 == LE)
3464 return CC_DLEmode;
3465 if (cond2 == NE)
3466 return CC_DNEmode;
3467 break;
3468
3469 case GT:
3470 if (cond2 == GT || ! cond_or)
3471 return CC_DGTmode;
3472 if (cond2 == GE)
3473 return CC_DGEmode;
3474 if (cond2 == NE)
3475 return CC_DNEmode;
3476 break;
3477
3478 case LTU:
3479 if (cond2 == LTU || ! cond_or)
3480 return CC_DLTUmode;
3481 if (cond2 == LEU)
3482 return CC_DLEUmode;
3483 if (cond2 == NE)
3484 return CC_DNEmode;
3485 break;
3486
3487 case GTU:
3488 if (cond2 == GTU || ! cond_or)
3489 return CC_DGTUmode;
3490 if (cond2 == GEU)
3491 return CC_DGEUmode;
3492 if (cond2 == NE)
3493 return CC_DNEmode;
3494 break;
3495
3496 /* The remaining cases only occur when both comparisons are the
3497 same. */
3498 case NE:
3499 return CC_DNEmode;
3500
3501 case LE:
3502 return CC_DLEmode;
3503
3504 case GE:
3505 return CC_DGEmode;
3506
3507 case LEU:
3508 return CC_DLEUmode;
3509
3510 case GEU:
3511 return CC_DGEUmode;
ad076f4e
RE
3512
3513 default:
3514 break;
84ed5e79
RE
3515 }
3516
3517 abort ();
3518}
3519
3520enum machine_mode
3521arm_select_cc_mode (op, x, y)
3522 enum rtx_code op;
3523 rtx x;
3524 rtx y;
3525{
3526 /* All floating point compares return CCFP if it is an equality
3527 comparison, and CCFPE otherwise. */
3528 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3529 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3530
3531 /* A compare with a shifted operand. Because of canonicalization, the
3532 comparison will have to be swapped when we emit the assembler. */
3533 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3534 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3535 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3536 || GET_CODE (x) == ROTATERT))
3537 return CC_SWPmode;
3538
956d6950
JL
3539 /* This is a special case that is used by combine to allow a
3540 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3541 followed by a comparison of the shifted integer (only valid for
956d6950 3542 equalities and unsigned inequalities). */
84ed5e79
RE
3543 if (GET_MODE (x) == SImode
3544 && GET_CODE (x) == ASHIFT
3545 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3546 && GET_CODE (XEXP (x, 0)) == SUBREG
3547 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3548 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3549 && (op == EQ || op == NE
3550 || op == GEU || op == GTU || op == LTU || op == LEU)
3551 && GET_CODE (y) == CONST_INT)
3552 return CC_Zmode;
3553
3554 /* An operation that sets the condition codes as a side-effect, the
3555 V flag is not set correctly, so we can only use comparisons where
3556 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3557 instead. */
3558 if (GET_MODE (x) == SImode
3559 && y == const0_rtx
3560 && (op == EQ || op == NE || op == LT || op == GE)
3561 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3562 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3563 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3564 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3565 || GET_CODE (x) == LSHIFTRT
3566 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3567 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3568 return CC_NOOVmode;
3569
3570 /* A construct for a conditional compare, if the false arm contains
3571 0, then both conditions must be true, otherwise either condition
3572 must be true. Not all conditions are possible, so CCmode is
3573 returned if it can't be done. */
3574 if (GET_CODE (x) == IF_THEN_ELSE
3575 && (XEXP (x, 2) == const0_rtx
3576 || XEXP (x, 2) == const1_rtx)
3577 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3578 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 3579 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
3580 INTVAL (XEXP (x, 2)));
3581
3582 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3583 return CC_Zmode;
3584
bd9c7e23
RE
3585 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3586 && GET_CODE (x) == PLUS
3587 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3588 return CC_Cmode;
3589
84ed5e79
RE
3590 return CCmode;
3591}
3592
ff9940b0
RE
3593/* X and Y are two things to compare using CODE. Emit the compare insn and
3594 return the rtx for register 0 in the proper mode. FP means this is a
3595 floating point compare: I don't think that it is needed on the arm. */
3596
3597rtx
74bbc178 3598gen_compare_reg (code, x, y)
ff9940b0
RE
3599 enum rtx_code code;
3600 rtx x, y;
3601{
3602 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
43cffd11 3603 rtx cc_reg = gen_rtx_REG (mode, 24);
ff9940b0 3604
43cffd11
RE
3605 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
3606 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
3607
3608 return cc_reg;
3609}
3610
0a81f500
RE
3611void
3612arm_reload_in_hi (operands)
3613 rtx *operands;
3614{
f9cc092a
RE
3615 rtx ref = operands[1];
3616 rtx base, scratch;
3617 HOST_WIDE_INT offset = 0;
3618
3619 if (GET_CODE (ref) == SUBREG)
3620 {
3621 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
3622 if (BYTES_BIG_ENDIAN)
3623 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
3624 - MIN (UNITS_PER_WORD,
3625 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
3626 ref = SUBREG_REG (ref);
3627 }
3628
3629 if (GET_CODE (ref) == REG)
3630 {
3631 /* We have a pseudo which has been spilt onto the stack; there
3632 are two cases here: the first where there is a simple
3633 stack-slot replacement and a second where the stack-slot is
3634 out of range, or is used as a subreg. */
3635 if (reg_equiv_mem[REGNO (ref)])
3636 {
3637 ref = reg_equiv_mem[REGNO (ref)];
3638 base = find_replacement (&XEXP (ref, 0));
3639 }
3640 else
3641 /* The slot is out of range, or was dressed up in a SUBREG */
3642 base = reg_equiv_address[REGNO (ref)];
3643 }
3644 else
3645 base = find_replacement (&XEXP (ref, 0));
0a81f500 3646
e5e809f4
JL
3647 /* Handle the case where the address is too complex to be offset by 1. */
3648 if (GET_CODE (base) == MINUS
3649 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3650 {
f9cc092a 3651 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 3652
43cffd11 3653 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
3654 base = base_plus;
3655 }
f9cc092a
RE
3656 else if (GET_CODE (base) == PLUS)
3657 {
3658 /* The addend must be CONST_INT, or we would have dealt with it above */
3659 HOST_WIDE_INT hi, lo;
3660
3661 offset += INTVAL (XEXP (base, 1));
3662 base = XEXP (base, 0);
3663
3664 /* Rework the address into a legal sequence of insns */
3665 /* Valid range for lo is -4095 -> 4095 */
3666 lo = (offset >= 0
3667 ? (offset & 0xfff)
3668 : -((-offset) & 0xfff));
3669
3670 /* Corner case, if lo is the max offset then we would be out of range
3671 once we have added the additional 1 below, so bump the msb into the
3672 pre-loading insn(s). */
3673 if (lo == 4095)
3674 lo &= 0x7ff;
3675
3676 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFF)
3677 ^ (HOST_WIDE_INT) 0x80000000)
3678 - (HOST_WIDE_INT) 0x80000000);
3679
3680 if (hi + lo != offset)
3681 abort ();
3682
3683 if (hi != 0)
3684 {
3685 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3686
3687 /* Get the base address; addsi3 knows how to handle constants
3688 that require more than one insn */
3689 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
3690 base = base_plus;
3691 offset = lo;
3692 }
3693 }
e5e809f4 3694
f9cc092a
RE
3695 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
3696 emit_insn (gen_zero_extendqisi2 (scratch,
3697 gen_rtx_MEM (QImode,
3698 plus_constant (base,
3699 offset))));
43cffd11
RE
3700 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
3701 gen_rtx_MEM (QImode,
f9cc092a
RE
3702 plus_constant (base,
3703 offset + 1))));
0a81f500 3704 if (BYTES_BIG_ENDIAN)
43cffd11
RE
3705 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3706 gen_rtx_IOR (SImode,
3707 gen_rtx_ASHIFT
3708 (SImode,
3709 gen_rtx_SUBREG (SImode, operands[0], 0),
3710 GEN_INT (8)),
f9cc092a 3711 scratch)));
0a81f500 3712 else
43cffd11
RE
3713 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3714 gen_rtx_IOR (SImode,
f9cc092a 3715 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
3716 GEN_INT (8)),
3717 gen_rtx_SUBREG (SImode, operands[0],
3718 0))));
0a81f500
RE
3719}
3720
f9cc092a
RE
3721/* Handle storing a half-word to memory during reload by synthesising as two
3722 byte stores. Take care not to clobber the input values until after we
3723 have moved them somewhere safe. This code assumes that if the DImode
3724 scratch in operands[2] overlaps either the input value or output address
3725 in some way, then that value must die in this insn (we absolutely need
3726 two scratch registers for some corner cases). */
f3bb6135 3727void
af48348a 3728arm_reload_out_hi (operands)
f3bb6135 3729 rtx *operands;
af48348a 3730{
f9cc092a
RE
3731 rtx ref = operands[0];
3732 rtx outval = operands[1];
3733 rtx base, scratch;
3734 HOST_WIDE_INT offset = 0;
3735
3736 if (GET_CODE (ref) == SUBREG)
3737 {
3738 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
3739 if (BYTES_BIG_ENDIAN)
3740 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
3741 - MIN (UNITS_PER_WORD,
3742 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
3743 ref = SUBREG_REG (ref);
3744 }
3745
3746
3747 if (GET_CODE (ref) == REG)
3748 {
3749 /* We have a pseudo which has been spilt onto the stack; there
3750 are two cases here: the first where there is a simple
3751 stack-slot replacement and a second where the stack-slot is
3752 out of range, or is used as a subreg. */
3753 if (reg_equiv_mem[REGNO (ref)])
3754 {
3755 ref = reg_equiv_mem[REGNO (ref)];
3756 base = find_replacement (&XEXP (ref, 0));
3757 }
3758 else
3759 /* The slot is out of range, or was dressed up in a SUBREG */
3760 base = reg_equiv_address[REGNO (ref)];
3761 }
3762 else
3763 base = find_replacement (&XEXP (ref, 0));
3764
3765 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
3766
3767 /* Handle the case where the address is too complex to be offset by 1. */
3768 if (GET_CODE (base) == MINUS
3769 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3770 {
3771 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3772
3773 /* Be careful not to destroy OUTVAL. */
3774 if (reg_overlap_mentioned_p (base_plus, outval))
3775 {
3776 /* Updating base_plus might destroy outval, see if we can
3777 swap the scratch and base_plus. */
3778 if (! reg_overlap_mentioned_p (scratch, outval))
3779 {
3780 rtx tmp = scratch;
3781 scratch = base_plus;
3782 base_plus = tmp;
3783 }
3784 else
3785 {
3786 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
3787
3788 /* Be conservative and copy OUTVAL into the scratch now,
3789 this should only be necessary if outval is a subreg
3790 of something larger than a word. */
3791 /* XXX Might this clobber base? I can't see how it can,
3792 since scratch is known to overlap with OUTVAL, and
3793 must be wider than a word. */
3794 emit_insn (gen_movhi (scratch_hi, outval));
3795 outval = scratch_hi;
3796 }
3797 }
3798
3799 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
3800 base = base_plus;
3801 }
3802 else if (GET_CODE (base) == PLUS)
3803 {
3804 /* The addend must be CONST_INT, or we would have dealt with it above */
3805 HOST_WIDE_INT hi, lo;
3806
3807 offset += INTVAL (XEXP (base, 1));
3808 base = XEXP (base, 0);
3809
3810 /* Rework the address into a legal sequence of insns */
3811 /* Valid range for lo is -4095 -> 4095 */
3812 lo = (offset >= 0
3813 ? (offset & 0xfff)
3814 : -((-offset) & 0xfff));
3815
3816 /* Corner case, if lo is the max offset then we would be out of range
3817 once we have added the additional 1 below, so bump the msb into the
3818 pre-loading insn(s). */
3819 if (lo == 4095)
3820 lo &= 0x7ff;
3821
3822 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFF)
3823 ^ (HOST_WIDE_INT) 0x80000000)
3824 - (HOST_WIDE_INT) 0x80000000);
3825
3826 if (hi + lo != offset)
3827 abort ();
3828
3829 if (hi != 0)
3830 {
3831 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
3832
3833 /* Be careful not to destroy OUTVAL. */
3834 if (reg_overlap_mentioned_p (base_plus, outval))
3835 {
3836 /* Updating base_plus might destroy outval, see if we
3837 can swap the scratch and base_plus. */
3838 if (! reg_overlap_mentioned_p (scratch, outval))
3839 {
3840 rtx tmp = scratch;
3841 scratch = base_plus;
3842 base_plus = tmp;
3843 }
3844 else
3845 {
3846 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
3847
3848 /* Be conservative and copy outval into scratch now,
3849 this should only be necessary if outval is a
3850 subreg of something larger than a word. */
3851 /* XXX Might this clobber base? I can't see how it
3852 can, since scratch is known to overlap with
3853 outval. */
3854 emit_insn (gen_movhi (scratch_hi, outval));
3855 outval = scratch_hi;
3856 }
3857 }
3858
3859 /* Get the base address; addsi3 knows how to handle constants
3860 that require more than one insn */
3861 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
3862 base = base_plus;
3863 offset = lo;
3864 }
3865 }
af48348a 3866
b5cc037f
RE
3867 if (BYTES_BIG_ENDIAN)
3868 {
f9cc092a
RE
3869 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
3870 plus_constant (base, offset + 1)),
3871 gen_rtx_SUBREG (QImode, outval, 0)));
3872 emit_insn (gen_lshrsi3 (scratch,
3873 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 3874 GEN_INT (8)));
f9cc092a
RE
3875 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
3876 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
3877 }
3878 else
3879 {
f9cc092a
RE
3880 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
3881 gen_rtx_SUBREG (QImode, outval, 0)));
3882 emit_insn (gen_lshrsi3 (scratch,
3883 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 3884 GEN_INT (8)));
f9cc092a
RE
3885 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
3886 plus_constant (base, offset + 1)),
3887 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 3888 }
af48348a 3889}
2b835d68
RE
3890\f
3891/* Routines for manipulation of the constant pool. */
3892/* This is unashamedly hacked from the version in sh.c, since the problem is
3893 extremely similar. */
3894
3895/* Arm instructions cannot load a large constant into a register,
3896 constants have to come from a pc relative load. The reference of a pc
3897 relative load instruction must be less than 1k infront of the instruction.
3898 This means that we often have to dump a constant inside a function, and
3899 generate code to branch around it.
3900
3901 It is important to minimize this, since the branches will slow things
3902 down and make things bigger.
3903
3904 Worst case code looks like:
3905
3906 ldr rn, L1
3907 b L2
3908 align
3909 L1: .long value
3910 L2:
3911 ..
3912
3913 ldr rn, L3
3914 b L4
3915 align
3916 L3: .long value
3917 L4:
3918 ..
3919
3920 We fix this by performing a scan before scheduling, which notices which
3921 instructions need to have their operands fetched from the constant table
3922 and builds the table.
3923
3924
3925 The algorithm is:
3926
3927 scan, find an instruction which needs a pcrel move. Look forward, find th
3928 last barrier which is within MAX_COUNT bytes of the requirement.
3929 If there isn't one, make one. Process all the instructions between
3930 the find and the barrier.
3931
3932 In the above example, we can tell that L3 is within 1k of L1, so
3933 the first move can be shrunk from the 2 insn+constant sequence into
3934 just 1 insn, and the constant moved to L3 to make:
3935
3936 ldr rn, L1
3937 ..
3938 ldr rn, L3
3939 b L4
3940 align
3941 L1: .long value
3942 L3: .long value
3943 L4:
3944
3945 Then the second move becomes the target for the shortening process.
3946
3947 */
3948
3949typedef struct
3950{
3951 rtx value; /* Value in table */
3952 HOST_WIDE_INT next_offset;
3953 enum machine_mode mode; /* Mode of value */
3954} pool_node;
3955
3956/* The maximum number of constants that can fit into one pool, since
3957 the pc relative range is 0...1020 bytes and constants are at least 4
3958 bytes long */
3959
3960#define MAX_POOL_SIZE (1020/4)
3961static pool_node pool_vector[MAX_POOL_SIZE];
3962static int pool_size;
3963static rtx pool_vector_label;
3964
332072db
RE
3965/* Add a constant to the pool and return its offset within the current
3966 pool.
3967
3968 X is the rtx we want to replace. MODE is its mode. On return,
3969 ADDRESS_ONLY will be non-zero if we really want the address of such
3970 a constant, not the constant itself. */
2b835d68 3971static HOST_WIDE_INT
332072db 3972add_constant (x, mode, address_only)
2b835d68
RE
3973 rtx x;
3974 enum machine_mode mode;
da6558fd 3975 int * address_only;
2b835d68
RE
3976{
3977 int i;
2b835d68
RE
3978 HOST_WIDE_INT offset;
3979
da6558fd
NC
3980 * address_only = 0;
3981
2b835d68
RE
3982 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3983 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3984 x = get_pool_constant (XEXP (x, 0));
332072db
RE
3985 else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P(x))
3986 {
3987 *address_only = 1;
74641843 3988 mode = get_pool_mode (x);
332072db
RE
3989 x = get_pool_constant (x);
3990 }
2b835d68
RE
3991#ifndef AOF_ASSEMBLER
3992 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3993 x = XVECEXP (x, 0, 0);
3994#endif
3995
32de079a
RE
3996#ifdef AOF_ASSEMBLER
3997 /* PIC Symbol references need to be converted into offsets into the
3998 based area. */
3999 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
4000 x = aof_pic_entry (x);
4001#endif /* AOF_ASSEMBLER */
4002
2b835d68
RE
4003 /* First see if we've already got it */
4004 for (i = 0; i < pool_size; i++)
4005 {
4006 if (GET_CODE (x) == pool_vector[i].value->code
4007 && mode == pool_vector[i].mode)
4008 {
4009 if (GET_CODE (x) == CODE_LABEL)
4010 {
4011 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
4012 continue;
4013 }
4014 if (rtx_equal_p (x, pool_vector[i].value))
4015 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
4016 }
4017 }
4018
4019 /* Need a new one */
4020 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
4021 offset = 0;
4022 if (pool_size == 0)
4023 pool_vector_label = gen_label_rtx ();
4024 else
4025 pool_vector[pool_size].next_offset
4026 += (offset = pool_vector[pool_size - 1].next_offset);
4027
4028 pool_vector[pool_size].value = x;
4029 pool_vector[pool_size].mode = mode;
4030 pool_size++;
4031 return offset;
4032}
4033
4034/* Output the literal table */
4035static void
4036dump_table (scan)
4037 rtx scan;
4038{
4039 int i;
4040
4041 scan = emit_label_after (gen_label_rtx (), scan);
4042 scan = emit_insn_after (gen_align_4 (), scan);
4043 scan = emit_label_after (pool_vector_label, scan);
4044
4045 for (i = 0; i < pool_size; i++)
4046 {
4047 pool_node *p = pool_vector + i;
4048
4049 switch (GET_MODE_SIZE (p->mode))
4050 {
4051 case 4:
4052 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
4053 break;
4054
4055 case 8:
4056 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
4057 break;
4058
4059 default:
4060 abort ();
4061 break;
4062 }
4063 }
4064
4065 scan = emit_insn_after (gen_consttable_end (), scan);
4066 scan = emit_barrier_after (scan);
4067 pool_size = 0;
4068}
4069
4070/* Non zero if the src operand needs to be fixed up */
4071static int
4072fixit (src, mode, destreg)
4073 rtx src;
4074 enum machine_mode mode;
4075 int destreg;
4076{
4077 if (CONSTANT_P (src))
4078 {
4079 if (GET_CODE (src) == CONST_INT)
4080 return (! const_ok_for_arm (INTVAL (src))
4081 && ! const_ok_for_arm (~INTVAL (src)));
4082 if (GET_CODE (src) == CONST_DOUBLE)
4083 return (GET_MODE (src) == VOIDmode
4084 || destreg < 16
4085 || (! const_double_rtx_ok_for_fpu (src)
4086 && ! neg_const_double_rtx_ok_for_fpu (src)));
4087 return symbol_mentioned_p (src);
4088 }
4089#ifndef AOF_ASSEMBLER
4090 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
4091 return 1;
4092#endif
4093 else
4094 return (mode == SImode && GET_CODE (src) == MEM
4095 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
4096 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
4097}
4098
4099/* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
4100static rtx
4101find_barrier (from, max_count)
4102 rtx from;
4103 int max_count;
4104{
4105 int count = 0;
4106 rtx found_barrier = 0;
e5e809f4 4107 rtx last = from;
2b835d68
RE
4108
4109 while (from && count < max_count)
4110 {
7551cbc7 4111 rtx tmp;
da6558fd 4112
2b835d68 4113 if (GET_CODE (from) == BARRIER)
7551cbc7 4114 found_barrier = from;
2b835d68
RE
4115
4116 /* Count the length of this insn */
4117 if (GET_CODE (from) == INSN
4118 && GET_CODE (PATTERN (from)) == SET
4119 && CONSTANT_P (SET_SRC (PATTERN (from)))
4120 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
d499463f 4121 count += 8;
7551cbc7
RE
4122 /* Handle table jumps as a single entity. */
4123 else if (GET_CODE (from) == JUMP_INSN
4124 && JUMP_LABEL (from) != 0
4125 && ((tmp = next_real_insn (JUMP_LABEL (from)))
4126 == next_real_insn (from))
4127 && tmp != NULL
4128 && GET_CODE (tmp) == JUMP_INSN
4129 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
4130 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
4131 {
4132 int elt = GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC ? 1 : 0;
4133 count += (get_attr_length (from)
4134 + GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (tmp), elt));
4135 /* Continue after the dispatch table. */
4136 last = from;
4137 from = NEXT_INSN (tmp);
4138 continue;
4139 }
2b835d68
RE
4140 else
4141 count += get_attr_length (from);
4142
e5e809f4 4143 last = from;
2b835d68
RE
4144 from = NEXT_INSN (from);
4145 }
4146
da6558fd 4147 if (! found_barrier)
2b835d68
RE
4148 {
4149 /* We didn't find a barrier in time to
da6558fd 4150 dump our stuff, so we'll make one. */
2b835d68 4151 rtx label = gen_label_rtx ();
da6558fd 4152
2b835d68 4153 if (from)
e5e809f4 4154 from = PREV_INSN (last);
2b835d68
RE
4155 else
4156 from = get_last_insn ();
da6558fd
NC
4157
4158 /* Walk back to be just before any jump. */
2b835d68 4159 while (GET_CODE (from) == JUMP_INSN
25b1c156 4160 || GET_CODE (from) == NOTE
2b835d68
RE
4161 || GET_CODE (from) == CODE_LABEL)
4162 from = PREV_INSN (from);
da6558fd 4163
2b835d68
RE
4164 from = emit_jump_insn_after (gen_jump (label), from);
4165 JUMP_LABEL (from) = label;
4166 found_barrier = emit_barrier_after (from);
4167 emit_label_after (label, found_barrier);
2b835d68
RE
4168 }
4169
4170 return found_barrier;
4171}
4172
4173/* Non zero if the insn is a move instruction which needs to be fixed. */
4174static int
4175broken_move (insn)
4176 rtx insn;
4177{
4178 if (!INSN_DELETED_P (insn)
4179 && GET_CODE (insn) == INSN
4180 && GET_CODE (PATTERN (insn)) == SET)
4181 {
4182 rtx pat = PATTERN (insn);
4183 rtx src = SET_SRC (pat);
4184 rtx dst = SET_DEST (pat);
4185 int destreg;
4186 enum machine_mode mode = GET_MODE (dst);
ad076f4e 4187
2b835d68
RE
4188 if (dst == pc_rtx)
4189 return 0;
4190
4191 if (GET_CODE (dst) == REG)
4192 destreg = REGNO (dst);
4193 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
4194 destreg = REGNO (SUBREG_REG (dst));
ad076f4e
RE
4195 else
4196 return 0;
2b835d68
RE
4197
4198 return fixit (src, mode, destreg);
4199 }
4200 return 0;
4201}
4202
4203void
4204arm_reorg (first)
4205 rtx first;
4206{
4207 rtx insn;
4208 int count_size;
2b835d68
RE
4209
4210#if 0
4211 /* The ldr instruction can work with up to a 4k offset, and most constants
4212 will be loaded with one of these instructions; however, the adr
4213 instruction and the ldf instructions only work with a 1k offset. This
4214 code needs to be rewritten to use the 4k offset when possible, and to
4215 adjust when a 1k offset is needed. For now we just use a 1k offset
4216 from the start. */
4217 count_size = 4000;
4218
4219 /* Floating point operands can't work further than 1024 bytes from the
4220 PC, so to make things simple we restrict all loads for such functions.
4221 */
4222 if (TARGET_HARD_FLOAT)
ad076f4e
RE
4223 {
4224 int regno;
4225
4226 for (regno = 16; regno < 24; regno++)
4227 if (regs_ever_live[regno])
4228 {
4229 count_size = 1000;
4230 break;
4231 }
4232 }
2b835d68
RE
4233#else
4234 count_size = 1000;
4235#endif /* 0 */
4236
4237 for (insn = first; insn; insn = NEXT_INSN (insn))
4238 {
4239 if (broken_move (insn))
4240 {
4241 /* This is a broken move instruction, scan ahead looking for
4242 a barrier to stick the constant table behind */
4243 rtx scan;
4244 rtx barrier = find_barrier (insn, count_size);
4245
4246 /* Now find all the moves between the points and modify them */
4247 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
4248 {
4249 if (broken_move (scan))
4250 {
4251 /* This is a broken move instruction, add it to the pool */
4252 rtx pat = PATTERN (scan);
4253 rtx src = SET_SRC (pat);
4254 rtx dst = SET_DEST (pat);
4255 enum machine_mode mode = GET_MODE (dst);
4256 HOST_WIDE_INT offset;
4257 rtx newinsn = scan;
4258 rtx newsrc;
4259 rtx addr;
4260 int scratch;
332072db 4261 int address_only;
2b835d68
RE
4262
4263 /* If this is an HImode constant load, convert it into
4264 an SImode constant load. Since the register is always
4265 32 bits this is safe. We have to do this, since the
4266 load pc-relative instruction only does a 32-bit load. */
4267 if (mode == HImode)
4268 {
4269 mode = SImode;
4270 if (GET_CODE (dst) != REG)
4271 abort ();
4272 PUT_MODE (dst, SImode);
4273 }
4274
332072db 4275 offset = add_constant (src, mode, &address_only);
43cffd11
RE
4276 addr = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
4277 pool_vector_label),
2b835d68
RE
4278 offset);
4279
332072db
RE
4280 /* If we only want the address of the pool entry, or
4281 for wide moves to integer regs we need to split
4282 the address calculation off into a separate insn.
4283 If necessary, the load can then be done with a
4284 load-multiple. This is safe, since we have
4285 already noted the length of such insns to be 8,
4286 and we are immediately over-writing the scratch
4287 we have grabbed with the final result. */
4288 if ((address_only || GET_MODE_SIZE (mode) > 4)
2b835d68
RE
4289 && (scratch = REGNO (dst)) < 16)
4290 {
332072db
RE
4291 rtx reg;
4292
4293 if (mode == SImode)
4294 reg = dst;
4295 else
43cffd11 4296 reg = gen_rtx_REG (SImode, scratch);
332072db 4297
2b835d68
RE
4298 newinsn = emit_insn_after (gen_movaddr (reg, addr),
4299 newinsn);
4300 addr = reg;
4301 }
4302
332072db
RE
4303 if (! address_only)
4304 {
43cffd11 4305 newsrc = gen_rtx_MEM (mode, addr);
332072db
RE
4306
4307 /* XXX Fixme -- I think the following is bogus. */
4308 /* Build a jump insn wrapper around the move instead
4309 of an ordinary insn, because we want to have room for
4310 the target label rtx in fld[7], which an ordinary
4311 insn doesn't have. */
43cffd11
RE
4312 newinsn
4313 = emit_jump_insn_after (gen_rtx_SET (VOIDmode, dst,
4314 newsrc),
4315 newinsn);
332072db
RE
4316 JUMP_LABEL (newinsn) = pool_vector_label;
4317
4318 /* But it's still an ordinary insn */
4319 PUT_CODE (newinsn, INSN);
4320 }
2b835d68
RE
4321
4322 /* Kill old insn */
4323 delete_insn (scan);
4324 scan = newinsn;
4325 }
4326 }
4327 dump_table (barrier);
4328 insn = scan;
4329 }
4330 }
4b632bf1
RE
4331
4332 after_arm_reorg = 1;
2b835d68
RE
4333}
4334
cce8749e
CH
4335\f
4336/* Routines to output assembly language. */
4337
f3bb6135 4338/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
4339 In this way we can ensure that valid double constants are generated even
4340 when cross compiling. */
4341char *
4342fp_immediate_constant (x)
b5cc037f 4343 rtx x;
ff9940b0
RE
4344{
4345 REAL_VALUE_TYPE r;
4346 int i;
4347
4348 if (!fpa_consts_inited)
4349 init_fpa_table ();
4350
4351 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4352 for (i = 0; i < 8; i++)
4353 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
4354 return strings_fpa[i];
f3bb6135 4355
ff9940b0
RE
4356 abort ();
4357}
4358
9997d19d
RE
4359/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
4360static char *
4361fp_const_from_val (r)
4362 REAL_VALUE_TYPE *r;
4363{
4364 int i;
4365
4366 if (! fpa_consts_inited)
4367 init_fpa_table ();
4368
4369 for (i = 0; i < 8; i++)
4370 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
4371 return strings_fpa[i];
4372
4373 abort ();
4374}
ff9940b0 4375
cce8749e
CH
4376/* Output the operands of a LDM/STM instruction to STREAM.
4377 MASK is the ARM register set mask of which only bits 0-15 are important.
4378 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
4379 must follow the register list. */
4380
4381void
4382print_multi_reg (stream, instr, mask, hat)
4383 FILE *stream;
4384 char *instr;
4385 int mask, hat;
4386{
4387 int i;
4388 int not_first = FALSE;
4389
1d5473cb 4390 fputc ('\t', stream);
f3139301 4391 fprintf (stream, instr, REGISTER_PREFIX);
1d5473cb 4392 fputs (", {", stream);
cce8749e
CH
4393 for (i = 0; i < 16; i++)
4394 if (mask & (1 << i))
4395 {
4396 if (not_first)
4397 fprintf (stream, ", ");
f3139301 4398 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
cce8749e
CH
4399 not_first = TRUE;
4400 }
f3bb6135 4401
cce8749e 4402 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 4403}
cce8749e
CH
4404
4405/* Output a 'call' insn. */
4406
4407char *
4408output_call (operands)
f3bb6135 4409 rtx *operands;
cce8749e 4410{
cce8749e
CH
4411 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
4412
4413 if (REGNO (operands[0]) == 14)
4414 {
43cffd11 4415 operands[0] = gen_rtx_REG (SImode, 12);
1d5473cb 4416 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 4417 }
1d5473cb 4418 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd
NC
4419
4420 if (TARGET_THUMB_INTERWORK)
4421 output_asm_insn ("bx%?\t%0", operands);
4422 else
4423 output_asm_insn ("mov%?\t%|pc, %0", operands);
4424
f3bb6135
RE
4425 return "";
4426}
cce8749e 4427
ff9940b0
RE
4428static int
4429eliminate_lr2ip (x)
f3bb6135 4430 rtx *x;
ff9940b0
RE
4431{
4432 int something_changed = 0;
4433 rtx x0 = *x;
4434 int code = GET_CODE (x0);
4435 register int i, j;
4436 register char *fmt;
4437
4438 switch (code)
4439 {
4440 case REG:
4441 if (REGNO (x0) == 14)
4442 {
43cffd11 4443 *x = gen_rtx_REG (SImode, 12);
ff9940b0
RE
4444 return 1;
4445 }
4446 return 0;
4447 default:
4448 /* Scan through the sub-elements and change any references there */
4449 fmt = GET_RTX_FORMAT (code);
4450 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4451 if (fmt[i] == 'e')
4452 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
4453 else if (fmt[i] == 'E')
4454 for (j = 0; j < XVECLEN (x0, i); j++)
4455 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
4456 return something_changed;
4457 }
4458}
4459
4460/* Output a 'call' insn that is a reference in memory. */
4461
4462char *
4463output_call_mem (operands)
f3bb6135 4464 rtx *operands;
ff9940b0
RE
4465{
4466 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
4467 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
4468 */
4469 if (eliminate_lr2ip (&operands[0]))
1d5473cb 4470 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 4471
da6558fd
NC
4472 if (TARGET_THUMB_INTERWORK)
4473 {
4474 output_asm_insn ("ldr%?\t%|ip, %0", operands);
4475 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4476 output_asm_insn ("bx%?\t%|ip", operands);
4477 }
4478 else
4479 {
4480 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4481 output_asm_insn ("ldr%?\t%|pc, %0", operands);
4482 }
4483
f3bb6135
RE
4484 return "";
4485}
ff9940b0
RE
4486
4487
4488/* Output a move from arm registers to an fpu registers.
4489 OPERANDS[0] is an fpu register.
4490 OPERANDS[1] is the first registers of an arm register pair. */
4491
4492char *
4493output_mov_long_double_fpu_from_arm (operands)
f3bb6135 4494 rtx *operands;
ff9940b0
RE
4495{
4496 int arm_reg0 = REGNO (operands[1]);
4497 rtx ops[3];
4498
4499 if (arm_reg0 == 12)
4500 abort();
f3bb6135 4501
43cffd11
RE
4502 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4503 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4504 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4505
1d5473cb
RE
4506 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
4507 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
f3bb6135
RE
4508 return "";
4509}
ff9940b0
RE
4510
4511/* Output a move from an fpu register to arm registers.
4512 OPERANDS[0] is the first registers of an arm register pair.
4513 OPERANDS[1] is an fpu register. */
4514
4515char *
4516output_mov_long_double_arm_from_fpu (operands)
f3bb6135 4517 rtx *operands;
ff9940b0
RE
4518{
4519 int arm_reg0 = REGNO (operands[0]);
4520 rtx ops[3];
4521
4522 if (arm_reg0 == 12)
4523 abort();
f3bb6135 4524
43cffd11
RE
4525 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4526 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4527 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4528
1d5473cb
RE
4529 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
4530 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
4531 return "";
4532}
ff9940b0
RE
4533
4534/* Output a move from arm registers to arm registers of a long double
4535 OPERANDS[0] is the destination.
4536 OPERANDS[1] is the source. */
4537char *
4538output_mov_long_double_arm_from_arm (operands)
f3bb6135 4539 rtx *operands;
ff9940b0
RE
4540{
4541 /* We have to be careful here because the two might overlap */
4542 int dest_start = REGNO (operands[0]);
4543 int src_start = REGNO (operands[1]);
4544 rtx ops[2];
4545 int i;
4546
4547 if (dest_start < src_start)
4548 {
4549 for (i = 0; i < 3; i++)
4550 {
43cffd11
RE
4551 ops[0] = gen_rtx_REG (SImode, dest_start + i);
4552 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 4553 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4554 }
4555 }
4556 else
4557 {
4558 for (i = 2; i >= 0; i--)
4559 {
43cffd11
RE
4560 ops[0] = gen_rtx_REG (SImode, dest_start + i);
4561 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 4562 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4563 }
4564 }
f3bb6135 4565
ff9940b0
RE
4566 return "";
4567}
4568
4569
cce8749e
CH
4570/* Output a move from arm registers to an fpu registers.
4571 OPERANDS[0] is an fpu register.
4572 OPERANDS[1] is the first registers of an arm register pair. */
4573
4574char *
4575output_mov_double_fpu_from_arm (operands)
f3bb6135 4576 rtx *operands;
cce8749e
CH
4577{
4578 int arm_reg0 = REGNO (operands[1]);
4579 rtx ops[2];
4580
4581 if (arm_reg0 == 12)
4582 abort();
43cffd11
RE
4583 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4584 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
4585 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
4586 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
4587 return "";
4588}
cce8749e
CH
4589
4590/* Output a move from an fpu register to arm registers.
4591 OPERANDS[0] is the first registers of an arm register pair.
4592 OPERANDS[1] is an fpu register. */
4593
4594char *
4595output_mov_double_arm_from_fpu (operands)
f3bb6135 4596 rtx *operands;
cce8749e
CH
4597{
4598 int arm_reg0 = REGNO (operands[0]);
4599 rtx ops[2];
4600
4601 if (arm_reg0 == 12)
4602 abort();
f3bb6135 4603
43cffd11
RE
4604 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4605 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
4606 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
4607 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
4608 return "";
4609}
cce8749e
CH
4610
4611/* Output a move between double words.
4612 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
4613 or MEM<-REG and all MEMs must be offsettable addresses. */
4614
4615char *
4616output_move_double (operands)
aec3cfba 4617 rtx * operands;
cce8749e
CH
4618{
4619 enum rtx_code code0 = GET_CODE (operands[0]);
4620 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 4621 rtx otherops[3];
cce8749e
CH
4622
4623 if (code0 == REG)
4624 {
4625 int reg0 = REGNO (operands[0]);
4626
43cffd11 4627 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 4628
cce8749e
CH
4629 if (code1 == REG)
4630 {
4631 int reg1 = REGNO (operands[1]);
4632 if (reg1 == 12)
4633 abort();
f3bb6135 4634
cce8749e 4635 /* Ensure the second source is not overwritten */
c1c2bc04
RE
4636 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
4637 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 4638 else
c1c2bc04 4639 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
4640 }
4641 else if (code1 == CONST_DOUBLE)
4642 {
226a5051
RE
4643 if (GET_MODE (operands[1]) == DFmode)
4644 {
4645 long l[2];
4646 union real_extract u;
4647
4648 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4649 sizeof (u));
4650 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4651 otherops[1] = GEN_INT(l[1]);
4652 operands[1] = GEN_INT(l[0]);
4653 }
c1c2bc04
RE
4654 else if (GET_MODE (operands[1]) != VOIDmode)
4655 abort ();
4656 else if (WORDS_BIG_ENDIAN)
4657 {
4658
4659 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4660 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4661 }
226a5051
RE
4662 else
4663 {
c1c2bc04 4664
226a5051
RE
4665 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4666 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4667 }
c1c2bc04
RE
4668 output_mov_immediate (operands);
4669 output_mov_immediate (otherops);
cce8749e
CH
4670 }
4671 else if (code1 == CONST_INT)
4672 {
56636818
JL
4673#if HOST_BITS_PER_WIDE_INT > 32
4674 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4675 what the upper word is. */
4676 if (WORDS_BIG_ENDIAN)
4677 {
4678 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4679 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4680 }
4681 else
4682 {
4683 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4684 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4685 }
4686#else
4687 /* Sign extend the intval into the high-order word */
c1c2bc04
RE
4688 if (WORDS_BIG_ENDIAN)
4689 {
4690 otherops[1] = operands[1];
4691 operands[1] = (INTVAL (operands[1]) < 0
4692 ? constm1_rtx : const0_rtx);
4693 }
ff9940b0 4694 else
c1c2bc04 4695 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 4696#endif
c1c2bc04
RE
4697 output_mov_immediate (otherops);
4698 output_mov_immediate (operands);
cce8749e
CH
4699 }
4700 else if (code1 == MEM)
4701 {
ff9940b0 4702 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 4703 {
ff9940b0 4704 case REG:
9997d19d 4705 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 4706 break;
2b835d68 4707
ff9940b0 4708 case PRE_INC:
2b835d68 4709 abort (); /* Should never happen now */
ff9940b0 4710 break;
2b835d68 4711
ff9940b0 4712 case PRE_DEC:
2b835d68 4713 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 4714 break;
2b835d68 4715
ff9940b0 4716 case POST_INC:
9997d19d 4717 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 4718 break;
2b835d68 4719
ff9940b0 4720 case POST_DEC:
2b835d68 4721 abort (); /* Should never happen now */
ff9940b0 4722 break;
2b835d68
RE
4723
4724 case LABEL_REF:
4725 case CONST:
4726 output_asm_insn ("adr%?\t%0, %1", operands);
4727 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4728 break;
4729
ff9940b0 4730 default:
aec3cfba
NC
4731 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
4732 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 4733 {
2b835d68
RE
4734 otherops[0] = operands[0];
4735 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4736 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4737 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4738 {
4739 if (GET_CODE (otherops[2]) == CONST_INT)
4740 {
4741 switch (INTVAL (otherops[2]))
4742 {
4743 case -8:
4744 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4745 return "";
4746 case -4:
4747 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4748 return "";
4749 case 4:
4750 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4751 return "";
4752 }
4753 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4754 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4755 else
4756 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4757 }
4758 else
4759 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4760 }
4761 else
4762 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
4763 return "ldm%?ia\t%0, %M0";
4764 }
4765 else
4766 {
4767 otherops[1] = adj_offsettable_operand (operands[1], 4);
4768 /* Take care of overlapping base/data reg. */
4769 if (reg_mentioned_p (operands[0], operands[1]))
4770 {
4771 output_asm_insn ("ldr%?\t%0, %1", otherops);
4772 output_asm_insn ("ldr%?\t%0, %1", operands);
4773 }
4774 else
4775 {
4776 output_asm_insn ("ldr%?\t%0, %1", operands);
4777 output_asm_insn ("ldr%?\t%0, %1", otherops);
4778 }
cce8749e
CH
4779 }
4780 }
4781 }
2b835d68
RE
4782 else
4783 abort(); /* Constraints should prevent this */
cce8749e
CH
4784 }
4785 else if (code0 == MEM && code1 == REG)
4786 {
4787 if (REGNO (operands[1]) == 12)
4788 abort();
2b835d68 4789
ff9940b0
RE
4790 switch (GET_CODE (XEXP (operands[0], 0)))
4791 {
4792 case REG:
9997d19d 4793 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 4794 break;
2b835d68 4795
ff9940b0 4796 case PRE_INC:
2b835d68 4797 abort (); /* Should never happen now */
ff9940b0 4798 break;
2b835d68 4799
ff9940b0 4800 case PRE_DEC:
2b835d68 4801 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 4802 break;
2b835d68 4803
ff9940b0 4804 case POST_INC:
9997d19d 4805 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 4806 break;
2b835d68 4807
ff9940b0 4808 case POST_DEC:
2b835d68 4809 abort (); /* Should never happen now */
ff9940b0 4810 break;
2b835d68
RE
4811
4812 case PLUS:
4813 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4814 {
4815 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4816 {
4817 case -8:
4818 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4819 return "";
4820
4821 case -4:
4822 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4823 return "";
4824
4825 case 4:
4826 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4827 return "";
4828 }
4829 }
4830 /* Fall through */
4831
ff9940b0 4832 default:
cce8749e 4833 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 4834 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
4835 output_asm_insn ("str%?\t%1, %0", operands);
4836 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
4837 }
4838 }
2b835d68
RE
4839 else
4840 abort(); /* Constraints should prevent this */
cce8749e 4841
9997d19d
RE
4842 return "";
4843}
cce8749e
CH
4844
4845
4846/* Output an arbitrary MOV reg, #n.
4847 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4848
4849char *
4850output_mov_immediate (operands)
f3bb6135 4851 rtx *operands;
cce8749e 4852{
f3bb6135 4853 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
4854 int n_ones = 0;
4855 int i;
4856
4857 /* Try to use one MOV */
cce8749e 4858 if (const_ok_for_arm (n))
f3bb6135 4859 {
9997d19d 4860 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
4861 return "";
4862 }
cce8749e
CH
4863
4864 /* Try to use one MVN */
f3bb6135 4865 if (const_ok_for_arm (~n))
cce8749e 4866 {
f3bb6135 4867 operands[1] = GEN_INT (~n);
9997d19d 4868 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 4869 return "";
cce8749e
CH
4870 }
4871
4872 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4873
4874 for (i=0; i < 32; i++)
4875 if (n & 1 << i)
4876 n_ones++;
4877
4878 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
4879 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4880 ~n);
cce8749e 4881 else
9997d19d
RE
4882 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4883 n);
f3bb6135
RE
4884
4885 return "";
4886}
cce8749e
CH
4887
4888
4889/* Output an ADD r, s, #n where n may be too big for one instruction. If
4890 adding zero to one register, output nothing. */
4891
4892char *
4893output_add_immediate (operands)
f3bb6135 4894 rtx *operands;
cce8749e 4895{
f3bb6135 4896 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
4897
4898 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4899 {
4900 if (n < 0)
4901 output_multi_immediate (operands,
9997d19d
RE
4902 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4903 -n);
cce8749e
CH
4904 else
4905 output_multi_immediate (operands,
9997d19d
RE
4906 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4907 n);
cce8749e 4908 }
f3bb6135
RE
4909
4910 return "";
4911}
cce8749e 4912
cce8749e
CH
4913/* Output a multiple immediate operation.
4914 OPERANDS is the vector of operands referred to in the output patterns.
4915 INSTR1 is the output pattern to use for the first constant.
4916 INSTR2 is the output pattern to use for subsequent constants.
4917 IMMED_OP is the index of the constant slot in OPERANDS.
4918 N is the constant value. */
4919
18af7313 4920static char *
cce8749e 4921output_multi_immediate (operands, instr1, instr2, immed_op, n)
f3bb6135 4922 rtx *operands;
cce8749e 4923 char *instr1, *instr2;
f3bb6135
RE
4924 int immed_op;
4925 HOST_WIDE_INT n;
cce8749e 4926{
f3bb6135
RE
4927#if HOST_BITS_PER_WIDE_INT > 32
4928 n &= 0xffffffff;
4929#endif
4930
cce8749e
CH
4931 if (n == 0)
4932 {
4933 operands[immed_op] = const0_rtx;
f3bb6135 4934 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
4935 }
4936 else
4937 {
4938 int i;
4939 char *instr = instr1;
4940
4941 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
4942 for (i = 0; i < 32; i += 2)
4943 {
4944 if (n & (3 << i))
4945 {
f3bb6135
RE
4946 operands[immed_op] = GEN_INT (n & (255 << i));
4947 output_asm_insn (instr, operands);
cce8749e
CH
4948 instr = instr2;
4949 i += 6;
4950 }
4951 }
4952 }
f3bb6135 4953 return "";
9997d19d 4954}
cce8749e
CH
4955
4956
4957/* Return the appropriate ARM instruction for the operation code.
4958 The returned result should not be overwritten. OP is the rtx of the
4959 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4960 was shifted. */
4961
4962char *
4963arithmetic_instr (op, shift_first_arg)
4964 rtx op;
f3bb6135 4965 int shift_first_arg;
cce8749e 4966{
9997d19d 4967 switch (GET_CODE (op))
cce8749e
CH
4968 {
4969 case PLUS:
f3bb6135
RE
4970 return "add";
4971
cce8749e 4972 case MINUS:
f3bb6135
RE
4973 return shift_first_arg ? "rsb" : "sub";
4974
cce8749e 4975 case IOR:
f3bb6135
RE
4976 return "orr";
4977
cce8749e 4978 case XOR:
f3bb6135
RE
4979 return "eor";
4980
cce8749e 4981 case AND:
f3bb6135
RE
4982 return "and";
4983
cce8749e 4984 default:
f3bb6135 4985 abort ();
cce8749e 4986 }
f3bb6135 4987}
cce8749e
CH
4988
4989
4990/* Ensure valid constant shifts and return the appropriate shift mnemonic
4991 for the operation code. The returned result should not be overwritten.
4992 OP is the rtx code of the shift.
9997d19d
RE
4993 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4994 shift. */
cce8749e 4995
9997d19d
RE
4996static char *
4997shift_op (op, amountp)
4998 rtx op;
4999 HOST_WIDE_INT *amountp;
cce8749e 5000{
cce8749e 5001 char *mnem;
e2c671ba 5002 enum rtx_code code = GET_CODE (op);
cce8749e 5003
9997d19d
RE
5004 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
5005 *amountp = -1;
5006 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
5007 *amountp = INTVAL (XEXP (op, 1));
5008 else
5009 abort ();
5010
e2c671ba 5011 switch (code)
cce8749e
CH
5012 {
5013 case ASHIFT:
5014 mnem = "asl";
5015 break;
f3bb6135 5016
cce8749e
CH
5017 case ASHIFTRT:
5018 mnem = "asr";
cce8749e 5019 break;
f3bb6135 5020
cce8749e
CH
5021 case LSHIFTRT:
5022 mnem = "lsr";
cce8749e 5023 break;
f3bb6135 5024
9997d19d
RE
5025 case ROTATERT:
5026 mnem = "ror";
9997d19d
RE
5027 break;
5028
ff9940b0 5029 case MULT:
e2c671ba
RE
5030 /* We never have to worry about the amount being other than a
5031 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
5032 if (*amountp != -1)
5033 *amountp = int_log2 (*amountp);
5034 else
5035 abort ();
f3bb6135
RE
5036 return "asl";
5037
cce8749e 5038 default:
f3bb6135 5039 abort ();
cce8749e
CH
5040 }
5041
e2c671ba
RE
5042 if (*amountp != -1)
5043 {
5044 /* This is not 100% correct, but follows from the desire to merge
5045 multiplication by a power of 2 with the recognizer for a
5046 shift. >=32 is not a valid shift for "asl", so we must try and
5047 output a shift that produces the correct arithmetical result.
ddd5a7c1 5048 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
5049 is not set correctly if we set the flags; but we never use the
5050 carry bit from such an operation, so we can ignore that. */
5051 if (code == ROTATERT)
5052 *amountp &= 31; /* Rotate is just modulo 32 */
5053 else if (*amountp != (*amountp & 31))
5054 {
5055 if (code == ASHIFT)
5056 mnem = "lsr";
5057 *amountp = 32;
5058 }
5059
5060 /* Shifts of 0 are no-ops. */
5061 if (*amountp == 0)
5062 return NULL;
5063 }
5064
9997d19d
RE
5065 return mnem;
5066}
cce8749e
CH
5067
5068
5069/* Obtain the shift from the POWER of two. */
5070
18af7313 5071static HOST_WIDE_INT
cce8749e 5072int_log2 (power)
f3bb6135 5073 HOST_WIDE_INT power;
cce8749e 5074{
f3bb6135 5075 HOST_WIDE_INT shift = 0;
cce8749e 5076
2b835d68 5077 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
5078 {
5079 if (shift > 31)
f3bb6135 5080 abort ();
cce8749e
CH
5081 shift++;
5082 }
f3bb6135
RE
5083
5084 return shift;
5085}
cce8749e 5086
cce8749e
CH
5087/* Output a .ascii pseudo-op, keeping track of lengths. This is because
5088 /bin/as is horribly restrictive. */
5089
5090void
5091output_ascii_pseudo_op (stream, p, len)
5092 FILE *stream;
f1b3f515 5093 unsigned char *p;
cce8749e
CH
5094 int len;
5095{
5096 int i;
5097 int len_so_far = 1000;
5098 int chars_so_far = 0;
5099
5100 for (i = 0; i < len; i++)
5101 {
5102 register int c = p[i];
5103
5104 if (len_so_far > 50)
5105 {
5106 if (chars_so_far)
5107 fputs ("\"\n", stream);
5108 fputs ("\t.ascii\t\"", stream);
5109 len_so_far = 0;
cce8749e
CH
5110 chars_so_far = 0;
5111 }
5112
5113 if (c == '\"' || c == '\\')
5114 {
5115 putc('\\', stream);
5116 len_so_far++;
5117 }
f3bb6135 5118
cce8749e
CH
5119 if (c >= ' ' && c < 0177)
5120 {
5121 putc (c, stream);
5122 len_so_far++;
5123 }
5124 else
5125 {
5126 fprintf (stream, "\\%03o", c);
5127 len_so_far +=4;
5128 }
f3bb6135 5129
cce8749e
CH
5130 chars_so_far++;
5131 }
f3bb6135 5132
cce8749e 5133 fputs ("\"\n", stream);
f3bb6135 5134}
cce8749e 5135\f
ff9940b0
RE
5136
5137/* Try to determine whether a pattern really clobbers the link register.
5138 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
5139 if we combine a call followed by a return.
5140 NOTE: This code does not check for side-effect expressions in a SET_SRC:
5141 such a check should not be needed because these only update an existing
5142 value within a register; the register must still be set elsewhere within
5143 the function. */
ff9940b0
RE
5144
5145static int
5146pattern_really_clobbers_lr (x)
f3bb6135 5147 rtx x;
ff9940b0
RE
5148{
5149 int i;
5150
5151 switch (GET_CODE (x))
5152 {
5153 case SET:
5154 switch (GET_CODE (SET_DEST (x)))
5155 {
5156 case REG:
5157 return REGNO (SET_DEST (x)) == 14;
f3bb6135 5158
ff9940b0
RE
5159 case SUBREG:
5160 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
5161 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
f3bb6135 5162
0e84b556
RK
5163 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
5164 return 0;
ff9940b0 5165 abort ();
f3bb6135 5166
ff9940b0
RE
5167 default:
5168 return 0;
5169 }
f3bb6135 5170
ff9940b0
RE
5171 case PARALLEL:
5172 for (i = 0; i < XVECLEN (x, 0); i++)
5173 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
5174 return 1;
5175 return 0;
f3bb6135 5176
ff9940b0
RE
5177 case CLOBBER:
5178 switch (GET_CODE (XEXP (x, 0)))
5179 {
5180 case REG:
5181 return REGNO (XEXP (x, 0)) == 14;
f3bb6135 5182
ff9940b0
RE
5183 case SUBREG:
5184 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
5185 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
5186 abort ();
f3bb6135 5187
ff9940b0
RE
5188 default:
5189 return 0;
5190 }
f3bb6135 5191
ff9940b0
RE
5192 case UNSPEC:
5193 return 1;
f3bb6135 5194
ff9940b0
RE
5195 default:
5196 return 0;
5197 }
5198}
5199
5200static int
5201function_really_clobbers_lr (first)
f3bb6135 5202 rtx first;
ff9940b0
RE
5203{
5204 rtx insn, next;
5205
5206 for (insn = first; insn; insn = next_nonnote_insn (insn))
5207 {
5208 switch (GET_CODE (insn))
5209 {
5210 case BARRIER:
5211 case NOTE:
5212 case CODE_LABEL:
5213 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
5214 case INLINE_HEADER:
5215 break;
f3bb6135 5216
ff9940b0
RE
5217 case INSN:
5218 if (pattern_really_clobbers_lr (PATTERN (insn)))
5219 return 1;
5220 break;
f3bb6135 5221
ff9940b0
RE
5222 case CALL_INSN:
5223 /* Don't yet know how to handle those calls that are not to a
5224 SYMBOL_REF */
5225 if (GET_CODE (PATTERN (insn)) != PARALLEL)
5226 abort ();
f3bb6135 5227
ff9940b0
RE
5228 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
5229 {
5230 case CALL:
5231 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
5232 != SYMBOL_REF)
5233 return 1;
5234 break;
f3bb6135 5235
ff9940b0
RE
5236 case SET:
5237 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
5238 0, 0)), 0), 0))
5239 != SYMBOL_REF)
5240 return 1;
5241 break;
f3bb6135 5242
ff9940b0
RE
5243 default: /* Don't recognize it, be safe */
5244 return 1;
5245 }
f3bb6135 5246
ff9940b0
RE
5247 /* A call can be made (by peepholing) not to clobber lr iff it is
5248 followed by a return. There may, however, be a use insn iff
5249 we are returning the result of the call.
5250 If we run off the end of the insn chain, then that means the
5251 call was at the end of the function. Unfortunately we don't
5252 have a return insn for the peephole to recognize, so we
5253 must reject this. (Can this be fixed by adding our own insn?) */
5254 if ((next = next_nonnote_insn (insn)) == NULL)
5255 return 1;
f3bb6135 5256
32de079a
RE
5257 /* No need to worry about lr if the call never returns */
5258 if (GET_CODE (next) == BARRIER)
5259 break;
5260
ff9940b0
RE
5261 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
5262 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
5263 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
5264 == REGNO (XEXP (PATTERN (next), 0))))
5265 if ((next = next_nonnote_insn (next)) == NULL)
5266 return 1;
f3bb6135 5267
ff9940b0
RE
5268 if (GET_CODE (next) == JUMP_INSN
5269 && GET_CODE (PATTERN (next)) == RETURN)
5270 break;
5271 return 1;
f3bb6135 5272
ff9940b0
RE
5273 default:
5274 abort ();
5275 }
5276 }
f3bb6135 5277
ff9940b0
RE
5278 /* We have reached the end of the chain so lr was _not_ clobbered */
5279 return 0;
5280}
5281
5282char *
84ed5e79 5283output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
5284 rtx operand;
5285 int really_return;
84ed5e79 5286 int reverse;
ff9940b0
RE
5287{
5288 char instr[100];
5289 int reg, live_regs = 0;
e2c671ba
RE
5290 int volatile_func = (optimize > 0
5291 && TREE_THIS_VOLATILE (current_function_decl));
5292
5293 return_used_this_function = 1;
ff9940b0 5294
e2c671ba
RE
5295 if (volatile_func)
5296 {
5297 rtx ops[2];
5298 /* If this function was declared non-returning, and we have found a tail
5299 call, then we have to trust that the called function won't return. */
5300 if (! really_return)
5301 return "";
5302
5303 /* Otherwise, trap an attempted return by aborting. */
5304 ops[0] = operand;
86efdc8e
PB
5305 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_GOT ? "abort(PLT)"
5306 : "abort");
2b835d68 5307 assemble_external_libcall (ops[1]);
84ed5e79 5308 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
e2c671ba
RE
5309 return "";
5310 }
5311
f3bb6135 5312 if (current_function_calls_alloca && ! really_return)
ff9940b0
RE
5313 abort();
5314
f3bb6135
RE
5315 for (reg = 0; reg <= 10; reg++)
5316 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
5317 live_regs++;
5318
f3bb6135 5319 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
ff9940b0
RE
5320 live_regs++;
5321
5322 if (frame_pointer_needed)
5323 live_regs += 4;
5324
5325 if (live_regs)
5326 {
f3bb6135 5327 if (lr_save_eliminated || ! regs_ever_live[14])
ff9940b0 5328 live_regs++;
f3bb6135 5329
ff9940b0 5330 if (frame_pointer_needed)
84ed5e79
RE
5331 strcpy (instr,
5332 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 5333 else
84ed5e79
RE
5334 strcpy (instr,
5335 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
5336
5337 for (reg = 0; reg <= 10; reg++)
5338 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0 5339 {
1d5473cb 5340 strcat (instr, "%|");
ff9940b0
RE
5341 strcat (instr, reg_names[reg]);
5342 if (--live_regs)
5343 strcat (instr, ", ");
5344 }
f3bb6135 5345
ff9940b0
RE
5346 if (frame_pointer_needed)
5347 {
1d5473cb 5348 strcat (instr, "%|");
ff9940b0
RE
5349 strcat (instr, reg_names[11]);
5350 strcat (instr, ", ");
1d5473cb 5351 strcat (instr, "%|");
ff9940b0
RE
5352 strcat (instr, reg_names[13]);
5353 strcat (instr, ", ");
1d5473cb 5354 strcat (instr, "%|");
da6558fd
NC
5355 strcat (instr, TARGET_THUMB_INTERWORK || (! really_return)
5356 ? reg_names[14] : reg_names[15] );
ff9940b0
RE
5357 }
5358 else
1d5473cb
RE
5359 {
5360 strcat (instr, "%|");
da6558fd
NC
5361 if (TARGET_THUMB_INTERWORK && really_return)
5362 strcat (instr, reg_names[12]);
5363 else
5364 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1d5473cb 5365 }
2b835d68 5366 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 5367 output_asm_insn (instr, &operand);
da6558fd
NC
5368
5369 if (TARGET_THUMB_INTERWORK && really_return)
5370 {
5371 strcpy (instr, "bx%?");
5372 strcat (instr, reverse ? "%D0" : "%d0");
5373 strcat (instr, "\t%|");
5374 strcat (instr, frame_pointer_needed ? "lr" : "ip");
5375
5376 output_asm_insn (instr, & operand);
5377 }
ff9940b0
RE
5378 }
5379 else if (really_return)
5380 {
b111229a 5381 if (TARGET_THUMB_INTERWORK)
25b1c156 5382 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
5383 else
5384 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
5385 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd
NC
5386
5387 output_asm_insn (instr, & operand);
ff9940b0 5388 }
f3bb6135 5389
ff9940b0
RE
5390 return "";
5391}
5392
e82ea128
DE
5393/* Return nonzero if optimizing and the current function is volatile.
5394 Such functions never return, and many memory cycles can be saved
5395 by not storing register values that will never be needed again.
5396 This optimization was added to speed up context switching in a
5397 kernel application. */
a0b2ce4c 5398
e2c671ba
RE
5399int
5400arm_volatile_func ()
5401{
5402 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
5403}
5404
ef179a26
NC
5405/* Write the function name into the code section, directly preceding
5406 the function prologue.
5407
5408 Code will be output similar to this:
5409 t0
5410 .ascii "arm_poke_function_name", 0
5411 .align
5412 t1
5413 .word 0xff000000 + (t1 - t0)
5414 arm_poke_function_name
5415 mov ip, sp
5416 stmfd sp!, {fp, ip, lr, pc}
5417 sub fp, ip, #4
5418
5419 When performing a stack backtrace, code can inspect the value
5420 of 'pc' stored at 'fp' + 0. If the trace function then looks
5421 at location pc - 12 and the top 8 bits are set, then we know
5422 that there is a function name embedded immediately preceding this
5423 location and has length ((pc[-3]) & 0xff000000).
5424
5425 We assume that pc is declared as a pointer to an unsigned long.
5426
5427 It is of no benefit to output the function name if we are assembling
5428 a leaf function. These function types will not contain a stack
5429 backtrace structure, therefore it is not possible to determine the
5430 function name. */
5431
5432void
5433arm_poke_function_name (stream, name)
5434 FILE * stream;
5435 char * name;
5436{
5437 unsigned long alignlength;
5438 unsigned long length;
5439 rtx x;
5440
5441 length = strlen (name);
5442 alignlength = (length + 1) + 3 & ~3;
5443
5444 ASM_OUTPUT_ASCII (stream, name, length + 1);
5445 ASM_OUTPUT_ALIGN (stream, 2);
5446 x = GEN_INT (0xff000000UL + alignlength);
5447 ASM_OUTPUT_INT (stream, x);
5448}
5449
ff9940b0
RE
5450/* The amount of stack adjustment that happens here, in output_return and in
5451 output_epilogue must be exactly the same as was calculated during reload,
5452 or things will point to the wrong place. The only time we can safely
5453 ignore this constraint is when a function has no arguments on the stack,
5454 no stack frame requirement and no live registers execpt for `lr'. If we
5455 can guarantee that by making all function calls into tail calls and that
5456 lr is not clobbered in any other way, then there is no need to push lr
5457 onto the stack. */
5458
cce8749e 5459void
f3bb6135 5460output_func_prologue (f, frame_size)
cce8749e
CH
5461 FILE *f;
5462 int frame_size;
5463{
f3bb6135 5464 int reg, live_regs_mask = 0;
e2c671ba
RE
5465 int volatile_func = (optimize > 0
5466 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 5467
cce8749e
CH
5468 /* Nonzero if we must stuff some register arguments onto the stack as if
5469 they were passed there. */
5470 int store_arg_regs = 0;
5471
abaa26e5
RE
5472 if (arm_ccfsm_state || arm_target_insn)
5473 abort (); /* Sanity check */
31fdb4d5
DE
5474
5475 if (arm_naked_function_p (current_function_decl))
5476 return;
5477
ff9940b0
RE
5478 return_used_this_function = 0;
5479 lr_save_eliminated = 0;
5480
f3139301
DE
5481 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
5482 ASM_COMMENT_START, current_function_args_size,
1d5473cb 5483 current_function_pretend_args_size, frame_size);
f3139301
DE
5484 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
5485 ASM_COMMENT_START, frame_pointer_needed,
1d5473cb 5486 current_function_anonymous_args);
cce8749e 5487
e2c671ba 5488 if (volatile_func)
f3139301 5489 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
e2c671ba 5490
cce8749e
CH
5491 if (current_function_anonymous_args && current_function_pretend_args_size)
5492 store_arg_regs = 1;
5493
f3bb6135
RE
5494 for (reg = 0; reg <= 10; reg++)
5495 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
5496 live_regs_mask |= (1 << reg);
5497
ff9940b0 5498 if (frame_pointer_needed)
e2c671ba 5499 live_regs_mask |= 0xD800;
cce8749e 5500 else if (regs_ever_live[14])
ff9940b0
RE
5501 {
5502 if (! current_function_args_size
f3bb6135 5503 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 5504 lr_save_eliminated = 1;
ff9940b0
RE
5505 else
5506 live_regs_mask |= 0x4000;
5507 }
cce8749e 5508
cce8749e
CH
5509 if (live_regs_mask)
5510 {
ff9940b0
RE
5511 /* if a di mode load/store multiple is used, and the base register
5512 is r3, then r4 can become an ever live register without lr
5513 doing so, in this case we need to push lr as well, or we
5514 will fail to get a proper return. */
5515
5516 live_regs_mask |= 0x4000;
5517 lr_save_eliminated = 0;
f3bb6135 5518
cce8749e
CH
5519 }
5520
e2c671ba 5521 if (lr_save_eliminated)
f3139301
DE
5522 fprintf (f,"\t%s I don't think this function clobbers lr\n",
5523 ASM_COMMENT_START);
32de079a
RE
5524
5525#ifdef AOF_ASSEMBLER
5526 if (flag_pic)
5527 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
5528 reg_names[PIC_OFFSET_TABLE_REGNUM]);
5529#endif
f3bb6135 5530}
cce8749e
CH
5531
5532
5533void
f3bb6135 5534output_func_epilogue (f, frame_size)
cce8749e
CH
5535 FILE *f;
5536 int frame_size;
5537{
b111229a
RE
5538 int reg, live_regs_mask = 0;
5539 /* If we need this then it will always be at least this much */
5540 int floats_offset = 12;
cce8749e 5541 rtx operands[3];
e2c671ba
RE
5542 int volatile_func = (optimize > 0
5543 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 5544
b36ba79f 5545 if (use_return_insn (FALSE) && return_used_this_function)
cce8749e 5546 {
56636818 5547 if ((frame_size + current_function_outgoing_args_size) != 0
f5a1b0d2 5548 && !(frame_pointer_needed && TARGET_APCS))
56636818 5549 abort ();
f3bb6135 5550 goto epilogue_done;
cce8749e 5551 }
cce8749e 5552
31fdb4d5
DE
5553 /* Naked functions don't have epilogues. */
5554 if (arm_naked_function_p (current_function_decl))
5555 goto epilogue_done;
5556
e2c671ba 5557 /* A volatile function should never return. Call abort. */
c11145f6 5558 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 5559 {
86efdc8e
PB
5560 rtx op;
5561 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_GOT ? "abort(PLT)" : "abort");
2b835d68 5562 assemble_external_libcall (op);
e2c671ba 5563 output_asm_insn ("bl\t%a0", &op);
e2c671ba
RE
5564 goto epilogue_done;
5565 }
5566
f3bb6135
RE
5567 for (reg = 0; reg <= 10; reg++)
5568 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 5569 {
ff9940b0
RE
5570 live_regs_mask |= (1 << reg);
5571 floats_offset += 4;
cce8749e
CH
5572 }
5573
ff9940b0 5574 if (frame_pointer_needed)
cce8749e 5575 {
b111229a
RE
5576 if (arm_fpu_arch == FP_SOFT2)
5577 {
5578 for (reg = 23; reg > 15; reg--)
5579 if (regs_ever_live[reg] && ! call_used_regs[reg])
5580 {
5581 floats_offset += 12;
5582 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
5583 reg_names[reg], REGISTER_PREFIX, floats_offset);
5584 }
5585 }
5586 else
5587 {
5588 int start_reg = 23;
5589
5590 for (reg = 23; reg > 15; reg--)
5591 {
5592 if (regs_ever_live[reg] && ! call_used_regs[reg])
5593 {
5594 floats_offset += 12;
5595 /* We can't unstack more than four registers at once */
5596 if (start_reg - reg == 3)
5597 {
5598 fprintf (f, "\tlfm\t%s%s, 4, [%sfp, #-%d]\n",
5599 REGISTER_PREFIX, reg_names[reg],
5600 REGISTER_PREFIX, floats_offset);
5601 start_reg = reg - 1;
5602 }
5603 }
5604 else
5605 {
5606 if (reg != start_reg)
5607 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
5608 REGISTER_PREFIX, reg_names[reg + 1],
5609 start_reg - reg, REGISTER_PREFIX, floats_offset);
ff9940b0 5610
b111229a
RE
5611 start_reg = reg - 1;
5612 }
5613 }
5614
5615 /* Just in case the last register checked also needs unstacking. */
5616 if (reg != start_reg)
5617 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
5618 REGISTER_PREFIX, reg_names[reg + 1],
5619 start_reg - reg, REGISTER_PREFIX, floats_offset);
5620 }
da6558fd 5621
b111229a
RE
5622 if (TARGET_THUMB_INTERWORK)
5623 {
5624 live_regs_mask |= 0x6800;
5625 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask, FALSE);
5626 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5627 }
5628 else
5629 {
5630 live_regs_mask |= 0xA800;
5631 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
5632 TARGET_APCS_32 ? FALSE : TRUE);
5633 }
cce8749e
CH
5634 }
5635 else
5636 {
d2288d8d 5637 /* Restore stack pointer if necessary. */
56636818 5638 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
5639 {
5640 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
5641 operands[2] = GEN_INT (frame_size
5642 + current_function_outgoing_args_size);
d2288d8d
TG
5643 output_add_immediate (operands);
5644 }
5645
b111229a
RE
5646 if (arm_fpu_arch == FP_SOFT2)
5647 {
5648 for (reg = 16; reg < 24; reg++)
5649 if (regs_ever_live[reg] && ! call_used_regs[reg])
5650 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
5651 reg_names[reg], REGISTER_PREFIX);
5652 }
5653 else
5654 {
5655 int start_reg = 16;
5656
5657 for (reg = 16; reg < 24; reg++)
5658 {
5659 if (regs_ever_live[reg] && ! call_used_regs[reg])
5660 {
5661 if (reg - start_reg == 3)
5662 {
5663 fprintf (f, "\tlfmfd\t%s%s, 4, [%ssp]!\n",
5664 REGISTER_PREFIX, reg_names[start_reg],
5665 REGISTER_PREFIX);
5666 start_reg = reg + 1;
5667 }
5668 }
5669 else
5670 {
5671 if (reg != start_reg)
5672 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5673 REGISTER_PREFIX, reg_names[start_reg],
5674 reg - start_reg, REGISTER_PREFIX);
5675
5676 start_reg = reg + 1;
5677 }
5678 }
5679
5680 /* Just in case the last register checked also needs unstacking. */
5681 if (reg != start_reg)
5682 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5683 REGISTER_PREFIX, reg_names[start_reg],
5684 reg - start_reg, REGISTER_PREFIX);
5685 }
5686
cce8749e
CH
5687 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
5688 {
b111229a
RE
5689 if (TARGET_THUMB_INTERWORK)
5690 {
5691 if (! lr_save_eliminated)
f5a1b0d2
NC
5692 live_regs_mask |= 0x4000;
5693
5694 if (live_regs_mask != 0)
5695 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
b111229a
RE
5696
5697 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5698 }
5699 else if (lr_save_eliminated)
32de079a
RE
5700 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5701 : "\tmovs\t%spc, %slr\n"),
5702 REGISTER_PREFIX, REGISTER_PREFIX, f);
5703 else
5704 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
5705 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
5706 }
5707 else
5708 {
ff9940b0 5709 if (live_regs_mask || regs_ever_live[14])
cce8749e 5710 {
32de079a
RE
5711 /* Restore the integer regs, and the return address into lr */
5712 if (! lr_save_eliminated)
5713 live_regs_mask |= 0x4000;
5714
5715 if (live_regs_mask != 0)
32de079a 5716 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
cce8749e 5717 }
b111229a 5718
cce8749e
CH
5719 if (current_function_pretend_args_size)
5720 {
32de079a 5721 /* Unwind the pre-pushed regs */
cce8749e 5722 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 5723 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
5724 output_add_immediate (operands);
5725 }
32de079a 5726 /* And finally, go home */
b111229a
RE
5727 if (TARGET_THUMB_INTERWORK)
5728 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
25b1c156
NC
5729 else if (TARGET_APCS_32)
5730 fprintf (f, "\tmov\t%spc, %slr\n", REGISTER_PREFIX, REGISTER_PREFIX );
b111229a 5731 else
25b1c156 5732 fprintf (f, "\tmovs\t%spc, %slr\n", REGISTER_PREFIX, REGISTER_PREFIX );
cce8749e
CH
5733 }
5734 }
f3bb6135 5735
32de079a 5736epilogue_done:
f3bb6135 5737
4b632bf1 5738 /* Reset the ARM-specific per-function variables. */
cce8749e 5739 current_function_anonymous_args = 0;
4b632bf1 5740 after_arm_reorg = 0;
f3bb6135 5741}
e2c671ba
RE
5742
5743static void
5744emit_multi_reg_push (mask)
5745 int mask;
5746{
5747 int num_regs = 0;
5748 int i, j;
5749 rtx par;
5750
5751 for (i = 0; i < 16; i++)
5752 if (mask & (1 << i))
5753 num_regs++;
5754
5755 if (num_regs == 0 || num_regs > 16)
5756 abort ();
5757
43cffd11 5758 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
e2c671ba
RE
5759
5760 for (i = 0; i < 16; i++)
5761 {
5762 if (mask & (1 << i))
5763 {
5764 XVECEXP (par, 0, 0)
43cffd11
RE
5765 = gen_rtx_SET (VOIDmode,
5766 gen_rtx_MEM (BLKmode,
5767 gen_rtx_PRE_DEC (BLKmode,
5768 stack_pointer_rtx)),
5769 gen_rtx_UNSPEC (BLKmode,
5770 gen_rtvec (1,
5771 gen_rtx_REG (SImode, i)),
5772 2));
e2c671ba
RE
5773 break;
5774 }
5775 }
5776
5777 for (j = 1, i++; j < num_regs; i++)
5778 {
5779 if (mask & (1 << i))
5780 {
5781 XVECEXP (par, 0, j)
43cffd11 5782 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, i));
e2c671ba
RE
5783 j++;
5784 }
5785 }
b111229a
RE
5786
5787 emit_insn (par);
5788}
5789
5790static void
5791emit_sfm (base_reg, count)
5792 int base_reg;
5793 int count;
5794{
5795 rtx par;
5796 int i;
5797
43cffd11
RE
5798 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
5799
5800 XVECEXP (par, 0, 0)
5801 = gen_rtx_SET (VOIDmode,
5802 gen_rtx_MEM (BLKmode,
5803 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
5804 gen_rtx_UNSPEC (BLKmode,
5805 gen_rtvec (1, gen_rtx_REG (XFmode,
5806 base_reg++)),
5807 2));
b111229a 5808 for (i = 1; i < count; i++)
43cffd11
RE
5809 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode,
5810 gen_rtx_REG (XFmode, base_reg++));
b111229a 5811
e2c671ba
RE
5812 emit_insn (par);
5813}
5814
5815void
5816arm_expand_prologue ()
5817{
5818 int reg;
56636818
JL
5819 rtx amount = GEN_INT (-(get_frame_size ()
5820 + current_function_outgoing_args_size));
e2c671ba
RE
5821 int live_regs_mask = 0;
5822 int store_arg_regs = 0;
5823 int volatile_func = (optimize > 0
5824 && TREE_THIS_VOLATILE (current_function_decl));
5825
31fdb4d5
DE
5826 /* Naked functions don't have prologues. */
5827 if (arm_naked_function_p (current_function_decl))
5828 return;
5829
e2c671ba
RE
5830 if (current_function_anonymous_args && current_function_pretend_args_size)
5831 store_arg_regs = 1;
5832
5833 if (! volatile_func)
5834 for (reg = 0; reg <= 10; reg++)
5835 if (regs_ever_live[reg] && ! call_used_regs[reg])
5836 live_regs_mask |= 1 << reg;
5837
5838 if (! volatile_func && regs_ever_live[14])
5839 live_regs_mask |= 0x4000;
5840
5841 if (frame_pointer_needed)
5842 {
5843 live_regs_mask |= 0xD800;
43cffd11 5844 emit_insn (gen_movsi (gen_rtx_REG (SImode, 12),
e2c671ba
RE
5845 stack_pointer_rtx));
5846 }
5847
5848 if (current_function_pretend_args_size)
5849 {
5850 if (store_arg_regs)
5851 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
5852 & 0xf);
5853 else
5854 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5855 GEN_INT (-current_function_pretend_args_size)));
5856 }
5857
5858 if (live_regs_mask)
5859 {
5860 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 5861 we won't get a proper return. */
e2c671ba
RE
5862 live_regs_mask |= 0x4000;
5863 emit_multi_reg_push (live_regs_mask);
5864 }
5865
5866 /* For now the integer regs are still pushed in output_func_epilogue (). */
5867
5868 if (! volatile_func)
b111229a
RE
5869 {
5870 if (arm_fpu_arch == FP_SOFT2)
5871 {
5872 for (reg = 23; reg > 15; reg--)
5873 if (regs_ever_live[reg] && ! call_used_regs[reg])
43cffd11
RE
5874 emit_insn (gen_rtx_SET
5875 (VOIDmode,
5876 gen_rtx_MEM (XFmode,
5877 gen_rtx_PRE_DEC (XFmode,
5878 stack_pointer_rtx)),
5879 gen_rtx_REG (XFmode, reg)));
b111229a
RE
5880 }
5881 else
5882 {
5883 int start_reg = 23;
5884
5885 for (reg = 23; reg > 15; reg--)
5886 {
5887 if (regs_ever_live[reg] && ! call_used_regs[reg])
5888 {
5889 if (start_reg - reg == 3)
5890 {
5891 emit_sfm (reg, 4);
5892 start_reg = reg - 1;
5893 }
5894 }
5895 else
5896 {
5897 if (start_reg != reg)
5898 emit_sfm (reg + 1, start_reg - reg);
5899 start_reg = reg - 1;
5900 }
5901 }
5902
5903 if (start_reg != reg)
5904 emit_sfm (reg + 1, start_reg - reg);
5905 }
5906 }
e2c671ba
RE
5907
5908 if (frame_pointer_needed)
43cffd11 5909 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx_REG (SImode, 12),
e2c671ba
RE
5910 (GEN_INT
5911 (-(4 + current_function_pretend_args_size)))));
5912
5913 if (amount != const0_rtx)
5914 {
5915 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
43cffd11
RE
5916 emit_insn (gen_rtx_CLOBBER (VOIDmode,
5917 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
e2c671ba
RE
5918 }
5919
5920 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
5921 the call to mcount. Similarly if the user has requested no
5922 scheduling in the prolog. */
5923 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
5924 emit_insn (gen_blockage ());
5925}
5926
cce8749e 5927\f
9997d19d
RE
5928/* If CODE is 'd', then the X is a condition operand and the instruction
5929 should only be executed if the condition is true.
ddd5a7c1 5930 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
5931 should only be executed if the condition is false: however, if the mode
5932 of the comparison is CCFPEmode, then always execute the instruction -- we
5933 do this because in these circumstances !GE does not necessarily imply LT;
5934 in these cases the instruction pattern will take care to make sure that
5935 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 5936 doing this instruction unconditionally.
9997d19d
RE
5937 If CODE is 'N' then X is a floating point operand that must be negated
5938 before output.
5939 If CODE is 'B' then output a bitwise inverted value of X (a const int).
5940 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
5941
5942void
5943arm_print_operand (stream, x, code)
5944 FILE *stream;
5945 rtx x;
5946 int code;
5947{
5948 switch (code)
5949 {
5950 case '@':
f3139301 5951 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
5952 return;
5953
5954 case '|':
f3139301 5955 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5956 return;
5957
5958 case '?':
5959 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
5960 fputs (arm_condition_codes[arm_current_cc], stream);
5961 return;
5962
5963 case 'N':
5964 {
5965 REAL_VALUE_TYPE r;
5966 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5967 r = REAL_VALUE_NEGATE (r);
5968 fprintf (stream, "%s", fp_const_from_val (&r));
5969 }
5970 return;
5971
5972 case 'B':
5973 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
5974 {
5975 HOST_WIDE_INT val;
5976 val = ARM_SIGN_EXTEND (~ INTVAL (x));
36ba9cb8 5977 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 5978 }
9997d19d
RE
5979 else
5980 {
5981 putc ('~', stream);
5982 output_addr_const (stream, x);
5983 }
5984 return;
5985
5986 case 'i':
5987 fprintf (stream, "%s", arithmetic_instr (x, 1));
5988 return;
5989
5990 case 'I':
5991 fprintf (stream, "%s", arithmetic_instr (x, 0));
5992 return;
5993
5994 case 'S':
5995 {
5996 HOST_WIDE_INT val;
4bc74ece 5997 char * shift = shift_op (x, & val);
9997d19d 5998
e2c671ba
RE
5999 if (shift)
6000 {
4bc74ece 6001 fprintf (stream, ", %s ", shift_op (x, & val));
e2c671ba
RE
6002 if (val == -1)
6003 arm_print_operand (stream, XEXP (x, 1), 0);
6004 else
4bc74ece
NC
6005 {
6006 fputc ('#', stream);
36ba9cb8 6007 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6008 }
e2c671ba 6009 }
9997d19d
RE
6010 }
6011 return;
6012
c1c2bc04
RE
6013 case 'Q':
6014 if (REGNO (x) > 15)
6015 abort ();
6016 fputs (REGISTER_PREFIX, stream);
6017 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
6018 return;
6019
9997d19d
RE
6020 case 'R':
6021 if (REGNO (x) > 15)
6022 abort ();
f3139301 6023 fputs (REGISTER_PREFIX, stream);
c1c2bc04 6024 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
6025 return;
6026
6027 case 'm':
f3139301 6028 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6029 if (GET_CODE (XEXP (x, 0)) == REG)
6030 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
6031 else
6032 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
6033 return;
6034
6035 case 'M':
f3139301
DE
6036 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
6037 REGISTER_PREFIX, reg_names[REGNO (x) - 1
1d5473cb
RE
6038 + ((GET_MODE_SIZE (GET_MODE (x))
6039 + GET_MODE_SIZE (SImode) - 1)
6040 / GET_MODE_SIZE (SImode))]);
9997d19d
RE
6041 return;
6042
6043 case 'd':
6044 if (x)
6045 fputs (arm_condition_codes[get_arm_condition_code (x)],
6046 stream);
6047 return;
6048
6049 case 'D':
84ed5e79 6050 if (x)
9997d19d
RE
6051 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
6052 (get_arm_condition_code (x))],
6053 stream);
6054 return;
6055
6056 default:
6057 if (x == 0)
6058 abort ();
6059
6060 if (GET_CODE (x) == REG)
1d5473cb 6061 {
f3139301 6062 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
6063 fputs (reg_names[REGNO (x)], stream);
6064 }
9997d19d
RE
6065 else if (GET_CODE (x) == MEM)
6066 {
6067 output_memory_reference_mode = GET_MODE (x);
6068 output_address (XEXP (x, 0));
6069 }
6070 else if (GET_CODE (x) == CONST_DOUBLE)
6071 fprintf (stream, "#%s", fp_immediate_constant (x));
6072 else if (GET_CODE (x) == NEG)
6073 abort (); /* This should never happen now. */
6074 else
6075 {
6076 fputc ('#', stream);
6077 output_addr_const (stream, x);
6078 }
6079 }
6080}
6081
cce8749e
CH
6082\f
6083/* A finite state machine takes care of noticing whether or not instructions
6084 can be conditionally executed, and thus decrease execution time and code
6085 size by deleting branch instructions. The fsm is controlled by
6086 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
6087
6088/* The state of the fsm controlling condition codes are:
6089 0: normal, do nothing special
6090 1: make ASM_OUTPUT_OPCODE not output this instruction
6091 2: make ASM_OUTPUT_OPCODE not output this instruction
6092 3: make instructions conditional
6093 4: make instructions conditional
6094
6095 State transitions (state->state by whom under condition):
6096 0 -> 1 final_prescan_insn if the `target' is a label
6097 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
6098 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
6099 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
6100 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
6101 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
6102 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
6103 (the target insn is arm_target_insn).
6104
ff9940b0
RE
6105 If the jump clobbers the conditions then we use states 2 and 4.
6106
6107 A similar thing can be done with conditional return insns.
6108
cce8749e
CH
6109 XXX In case the `target' is an unconditional branch, this conditionalising
6110 of the instructions always reduces code size, but not always execution
6111 time. But then, I want to reduce the code size to somewhere near what
6112 /bin/cc produces. */
6113
cce8749e
CH
6114/* Returns the index of the ARM condition code string in
6115 `arm_condition_codes'. COMPARISON should be an rtx like
6116 `(eq (...) (...))'. */
6117
84ed5e79 6118static enum arm_cond_code
cce8749e
CH
6119get_arm_condition_code (comparison)
6120 rtx comparison;
6121{
5165176d 6122 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
6123 register int code;
6124 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
6125
6126 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 6127 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
6128 XEXP (comparison, 1));
6129
6130 switch (mode)
cce8749e 6131 {
84ed5e79
RE
6132 case CC_DNEmode: code = ARM_NE; goto dominance;
6133 case CC_DEQmode: code = ARM_EQ; goto dominance;
6134 case CC_DGEmode: code = ARM_GE; goto dominance;
6135 case CC_DGTmode: code = ARM_GT; goto dominance;
6136 case CC_DLEmode: code = ARM_LE; goto dominance;
6137 case CC_DLTmode: code = ARM_LT; goto dominance;
6138 case CC_DGEUmode: code = ARM_CS; goto dominance;
6139 case CC_DGTUmode: code = ARM_HI; goto dominance;
6140 case CC_DLEUmode: code = ARM_LS; goto dominance;
6141 case CC_DLTUmode: code = ARM_CC;
6142
6143 dominance:
6144 if (comp_code != EQ && comp_code != NE)
6145 abort ();
6146
6147 if (comp_code == EQ)
6148 return ARM_INVERSE_CONDITION_CODE (code);
6149 return code;
6150
5165176d 6151 case CC_NOOVmode:
84ed5e79 6152 switch (comp_code)
5165176d 6153 {
84ed5e79
RE
6154 case NE: return ARM_NE;
6155 case EQ: return ARM_EQ;
6156 case GE: return ARM_PL;
6157 case LT: return ARM_MI;
5165176d
RE
6158 default: abort ();
6159 }
6160
6161 case CC_Zmode:
6162 case CCFPmode:
84ed5e79 6163 switch (comp_code)
5165176d 6164 {
84ed5e79
RE
6165 case NE: return ARM_NE;
6166 case EQ: return ARM_EQ;
5165176d
RE
6167 default: abort ();
6168 }
6169
6170 case CCFPEmode:
84ed5e79
RE
6171 switch (comp_code)
6172 {
6173 case GE: return ARM_GE;
6174 case GT: return ARM_GT;
6175 case LE: return ARM_LS;
6176 case LT: return ARM_MI;
6177 default: abort ();
6178 }
6179
6180 case CC_SWPmode:
6181 switch (comp_code)
6182 {
6183 case NE: return ARM_NE;
6184 case EQ: return ARM_EQ;
6185 case GE: return ARM_LE;
6186 case GT: return ARM_LT;
6187 case LE: return ARM_GE;
6188 case LT: return ARM_GT;
6189 case GEU: return ARM_LS;
6190 case GTU: return ARM_CC;
6191 case LEU: return ARM_CS;
6192 case LTU: return ARM_HI;
6193 default: abort ();
6194 }
6195
bd9c7e23
RE
6196 case CC_Cmode:
6197 switch (comp_code)
6198 {
6199 case LTU: return ARM_CS;
6200 case GEU: return ARM_CC;
6201 default: abort ();
6202 }
6203
5165176d 6204 case CCmode:
84ed5e79 6205 switch (comp_code)
5165176d 6206 {
84ed5e79
RE
6207 case NE: return ARM_NE;
6208 case EQ: return ARM_EQ;
6209 case GE: return ARM_GE;
6210 case GT: return ARM_GT;
6211 case LE: return ARM_LE;
6212 case LT: return ARM_LT;
6213 case GEU: return ARM_CS;
6214 case GTU: return ARM_HI;
6215 case LEU: return ARM_LS;
6216 case LTU: return ARM_CC;
5165176d
RE
6217 default: abort ();
6218 }
6219
cce8749e
CH
6220 default: abort ();
6221 }
84ed5e79
RE
6222
6223 abort ();
f3bb6135 6224}
cce8749e
CH
6225
6226
6227void
74bbc178 6228arm_final_prescan_insn (insn)
cce8749e 6229 rtx insn;
cce8749e
CH
6230{
6231 /* BODY will hold the body of INSN. */
6232 register rtx body = PATTERN (insn);
6233
6234 /* This will be 1 if trying to repeat the trick, and things need to be
6235 reversed if it appears to fail. */
6236 int reverse = 0;
6237
ff9940b0
RE
6238 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
6239 taken are clobbered, even if the rtl suggests otherwise. It also
6240 means that we have to grub around within the jump expression to find
6241 out what the conditions are when the jump isn't taken. */
6242 int jump_clobbers = 0;
6243
6244 /* If we start with a return insn, we only succeed if we find another one. */
6245 int seeking_return = 0;
6246
cce8749e
CH
6247 /* START_INSN will hold the insn from where we start looking. This is the
6248 first insn after the following code_label if REVERSE is true. */
6249 rtx start_insn = insn;
6250
6251 /* If in state 4, check if the target branch is reached, in order to
6252 change back to state 0. */
6253 if (arm_ccfsm_state == 4)
6254 {
6255 if (insn == arm_target_insn)
f5a1b0d2
NC
6256 {
6257 arm_target_insn = NULL;
6258 arm_ccfsm_state = 0;
6259 }
cce8749e
CH
6260 return;
6261 }
6262
6263 /* If in state 3, it is possible to repeat the trick, if this insn is an
6264 unconditional branch to a label, and immediately following this branch
6265 is the previous target label which is only used once, and the label this
6266 branch jumps to is not too far off. */
6267 if (arm_ccfsm_state == 3)
6268 {
6269 if (simplejump_p (insn))
6270 {
6271 start_insn = next_nonnote_insn (start_insn);
6272 if (GET_CODE (start_insn) == BARRIER)
6273 {
6274 /* XXX Isn't this always a barrier? */
6275 start_insn = next_nonnote_insn (start_insn);
6276 }
6277 if (GET_CODE (start_insn) == CODE_LABEL
6278 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6279 && LABEL_NUSES (start_insn) == 1)
6280 reverse = TRUE;
6281 else
6282 return;
6283 }
ff9940b0
RE
6284 else if (GET_CODE (body) == RETURN)
6285 {
6286 start_insn = next_nonnote_insn (start_insn);
6287 if (GET_CODE (start_insn) == BARRIER)
6288 start_insn = next_nonnote_insn (start_insn);
6289 if (GET_CODE (start_insn) == CODE_LABEL
6290 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6291 && LABEL_NUSES (start_insn) == 1)
6292 {
6293 reverse = TRUE;
6294 seeking_return = 1;
6295 }
6296 else
6297 return;
6298 }
cce8749e
CH
6299 else
6300 return;
6301 }
6302
6303 if (arm_ccfsm_state != 0 && !reverse)
6304 abort ();
6305 if (GET_CODE (insn) != JUMP_INSN)
6306 return;
6307
ddd5a7c1 6308 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
6309 the jump should always come first */
6310 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
6311 body = XVECEXP (body, 0, 0);
6312
6313#if 0
6314 /* If this is a conditional return then we don't want to know */
6315 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6316 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
6317 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
6318 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
6319 return;
6320#endif
6321
cce8749e
CH
6322 if (reverse
6323 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6324 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
6325 {
bd9c7e23
RE
6326 int insns_skipped;
6327 int fail = FALSE, succeed = FALSE;
cce8749e
CH
6328 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
6329 int then_not_else = TRUE;
ff9940b0 6330 rtx this_insn = start_insn, label = 0;
cce8749e 6331
ff9940b0 6332 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
6333 {
6334 /* The code below is wrong for these, and I haven't time to
6335 fix it now. So we just do the safe thing and return. This
6336 whole function needs re-writing anyway. */
6337 jump_clobbers = 1;
6338 return;
6339 }
ff9940b0 6340
cce8749e
CH
6341 /* Register the insn jumped to. */
6342 if (reverse)
ff9940b0
RE
6343 {
6344 if (!seeking_return)
6345 label = XEXP (SET_SRC (body), 0);
6346 }
cce8749e
CH
6347 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
6348 label = XEXP (XEXP (SET_SRC (body), 1), 0);
6349 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
6350 {
6351 label = XEXP (XEXP (SET_SRC (body), 2), 0);
6352 then_not_else = FALSE;
6353 }
ff9940b0
RE
6354 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
6355 seeking_return = 1;
6356 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
6357 {
6358 seeking_return = 1;
6359 then_not_else = FALSE;
6360 }
cce8749e
CH
6361 else
6362 abort ();
6363
6364 /* See how many insns this branch skips, and what kind of insns. If all
6365 insns are okay, and the label or unconditional branch to the same
6366 label is not too far away, succeed. */
6367 for (insns_skipped = 0;
b36ba79f 6368 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
6369 {
6370 rtx scanbody;
6371
6372 this_insn = next_nonnote_insn (this_insn);
6373 if (!this_insn)
6374 break;
6375
cce8749e
CH
6376 switch (GET_CODE (this_insn))
6377 {
6378 case CODE_LABEL:
6379 /* Succeed if it is the target label, otherwise fail since
6380 control falls in from somewhere else. */
6381 if (this_insn == label)
6382 {
ff9940b0
RE
6383 if (jump_clobbers)
6384 {
6385 arm_ccfsm_state = 2;
6386 this_insn = next_nonnote_insn (this_insn);
6387 }
6388 else
6389 arm_ccfsm_state = 1;
cce8749e
CH
6390 succeed = TRUE;
6391 }
6392 else
6393 fail = TRUE;
6394 break;
6395
ff9940b0 6396 case BARRIER:
cce8749e 6397 /* Succeed if the following insn is the target label.
ff9940b0
RE
6398 Otherwise fail.
6399 If return insns are used then the last insn in a function
6400 will be a barrier. */
cce8749e 6401 this_insn = next_nonnote_insn (this_insn);
ff9940b0 6402 if (this_insn && this_insn == label)
cce8749e 6403 {
ff9940b0
RE
6404 if (jump_clobbers)
6405 {
6406 arm_ccfsm_state = 2;
6407 this_insn = next_nonnote_insn (this_insn);
6408 }
6409 else
6410 arm_ccfsm_state = 1;
cce8749e
CH
6411 succeed = TRUE;
6412 }
6413 else
6414 fail = TRUE;
6415 break;
6416
ff9940b0 6417 case CALL_INSN:
2b835d68
RE
6418 /* If using 32-bit addresses the cc is not preserved over
6419 calls */
6420 if (TARGET_APCS_32)
bd9c7e23
RE
6421 {
6422 /* Succeed if the following insn is the target label,
6423 or if the following two insns are a barrier and
6424 the target label. */
6425 this_insn = next_nonnote_insn (this_insn);
6426 if (this_insn && GET_CODE (this_insn) == BARRIER)
6427 this_insn = next_nonnote_insn (this_insn);
6428
6429 if (this_insn && this_insn == label
b36ba79f 6430 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
6431 {
6432 if (jump_clobbers)
6433 {
6434 arm_ccfsm_state = 2;
6435 this_insn = next_nonnote_insn (this_insn);
6436 }
6437 else
6438 arm_ccfsm_state = 1;
6439 succeed = TRUE;
6440 }
6441 else
6442 fail = TRUE;
6443 }
ff9940b0 6444 break;
2b835d68 6445
cce8749e
CH
6446 case JUMP_INSN:
6447 /* If this is an unconditional branch to the same label, succeed.
6448 If it is to another label, do nothing. If it is conditional,
6449 fail. */
ed4c4348 6450 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 6451
ed4c4348 6452 scanbody = PATTERN (this_insn);
ff9940b0
RE
6453 if (GET_CODE (scanbody) == SET
6454 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
6455 {
6456 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
6457 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
6458 {
6459 arm_ccfsm_state = 2;
6460 succeed = TRUE;
6461 }
6462 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
6463 fail = TRUE;
6464 }
b36ba79f
RE
6465 /* Fail if a conditional return is undesirable (eg on a
6466 StrongARM), but still allow this if optimizing for size. */
6467 else if (GET_CODE (scanbody) == RETURN
6468 && ! use_return_insn (TRUE)
6469 && ! optimize_size)
6470 fail = TRUE;
ff9940b0
RE
6471 else if (GET_CODE (scanbody) == RETURN
6472 && seeking_return)
6473 {
6474 arm_ccfsm_state = 2;
6475 succeed = TRUE;
6476 }
6477 else if (GET_CODE (scanbody) == PARALLEL)
6478 {
6479 switch (get_attr_conds (this_insn))
6480 {
6481 case CONDS_NOCOND:
6482 break;
6483 default:
6484 fail = TRUE;
6485 break;
6486 }
6487 }
cce8749e
CH
6488 break;
6489
6490 case INSN:
ff9940b0
RE
6491 /* Instructions using or affecting the condition codes make it
6492 fail. */
ed4c4348 6493 scanbody = PATTERN (this_insn);
74641843
RE
6494 if (! (GET_CODE (scanbody) == SET
6495 || GET_CODE (scanbody) == PARALLEL)
6496 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
6497 fail = TRUE;
6498 break;
6499
6500 default:
6501 break;
6502 }
6503 }
6504 if (succeed)
6505 {
ff9940b0 6506 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 6507 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
6508 else if (seeking_return || arm_ccfsm_state == 2)
6509 {
6510 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
6511 {
6512 this_insn = next_nonnote_insn (this_insn);
6513 if (this_insn && (GET_CODE (this_insn) == BARRIER
6514 || GET_CODE (this_insn) == CODE_LABEL))
6515 abort ();
6516 }
6517 if (!this_insn)
6518 {
6519 /* Oh, dear! we ran off the end.. give up */
6520 recog (PATTERN (insn), insn, NULL_PTR);
6521 arm_ccfsm_state = 0;
abaa26e5 6522 arm_target_insn = NULL;
ff9940b0
RE
6523 return;
6524 }
6525 arm_target_insn = this_insn;
6526 }
cce8749e
CH
6527 else
6528 abort ();
ff9940b0
RE
6529 if (jump_clobbers)
6530 {
6531 if (reverse)
6532 abort ();
6533 arm_current_cc =
6534 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
6535 0), 0), 1));
6536 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
6537 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6538 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
6539 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6540 }
6541 else
6542 {
6543 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
6544 what it was. */
6545 if (!reverse)
6546 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
6547 0));
6548 }
cce8749e 6549
cce8749e
CH
6550 if (reverse || then_not_else)
6551 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
6552 }
ff9940b0
RE
6553 /* restore recog_operand (getting the attributes of other insns can
6554 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 6555 across this call; since the insn has been recognized already we
ff9940b0
RE
6556 call recog direct). */
6557 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 6558 }
f3bb6135 6559}
cce8749e 6560
2b835d68
RE
6561#ifdef AOF_ASSEMBLER
6562/* Special functions only needed when producing AOF syntax assembler. */
6563
32de079a
RE
6564rtx aof_pic_label = NULL_RTX;
6565struct pic_chain
6566{
6567 struct pic_chain *next;
6568 char *symname;
6569};
6570
6571static struct pic_chain *aof_pic_chain = NULL;
6572
6573rtx
6574aof_pic_entry (x)
6575 rtx x;
6576{
6577 struct pic_chain **chainp;
6578 int offset;
6579
6580 if (aof_pic_label == NULL_RTX)
6581 {
6582 /* This needs to persist throughout the compilation. */
6583 end_temporary_allocation ();
43cffd11 6584 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
6585 resume_temporary_allocation ();
6586 }
6587
6588 for (offset = 0, chainp = &aof_pic_chain; *chainp;
6589 offset += 4, chainp = &(*chainp)->next)
6590 if ((*chainp)->symname == XSTR (x, 0))
6591 return plus_constant (aof_pic_label, offset);
6592
6593 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
6594 (*chainp)->next = NULL;
6595 (*chainp)->symname = XSTR (x, 0);
6596 return plus_constant (aof_pic_label, offset);
6597}
6598
6599void
6600aof_dump_pic_table (f)
6601 FILE *f;
6602{
6603 struct pic_chain *chain;
6604
6605 if (aof_pic_chain == NULL)
6606 return;
6607
6608 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
6609 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
6610 reg_names[PIC_OFFSET_TABLE_REGNUM]);
6611 fputs ("|x$adcons|\n", f);
6612
6613 for (chain = aof_pic_chain; chain; chain = chain->next)
6614 {
6615 fputs ("\tDCD\t", f);
6616 assemble_name (f, chain->symname);
6617 fputs ("\n", f);
6618 }
6619}
6620
2b835d68
RE
6621int arm_text_section_count = 1;
6622
6623char *
84ed5e79 6624aof_text_section ()
2b835d68
RE
6625{
6626 static char buf[100];
2b835d68
RE
6627 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
6628 arm_text_section_count++);
6629 if (flag_pic)
6630 strcat (buf, ", PIC, REENTRANT");
6631 return buf;
6632}
6633
6634static int arm_data_section_count = 1;
6635
6636char *
6637aof_data_section ()
6638{
6639 static char buf[100];
6640 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
6641 return buf;
6642}
6643
6644/* The AOF assembler is religiously strict about declarations of
6645 imported and exported symbols, so that it is impossible to declare
956d6950 6646 a function as imported near the beginning of the file, and then to
2b835d68
RE
6647 export it later on. It is, however, possible to delay the decision
6648 until all the functions in the file have been compiled. To get
6649 around this, we maintain a list of the imports and exports, and
6650 delete from it any that are subsequently defined. At the end of
6651 compilation we spit the remainder of the list out before the END
6652 directive. */
6653
6654struct import
6655{
6656 struct import *next;
6657 char *name;
6658};
6659
6660static struct import *imports_list = NULL;
6661
6662void
6663aof_add_import (name)
6664 char *name;
6665{
6666 struct import *new;
6667
6668 for (new = imports_list; new; new = new->next)
6669 if (new->name == name)
6670 return;
6671
6672 new = (struct import *) xmalloc (sizeof (struct import));
6673 new->next = imports_list;
6674 imports_list = new;
6675 new->name = name;
6676}
6677
6678void
6679aof_delete_import (name)
6680 char *name;
6681{
6682 struct import **old;
6683
6684 for (old = &imports_list; *old; old = & (*old)->next)
6685 {
6686 if ((*old)->name == name)
6687 {
6688 *old = (*old)->next;
6689 return;
6690 }
6691 }
6692}
6693
6694int arm_main_function = 0;
6695
6696void
6697aof_dump_imports (f)
6698 FILE *f;
6699{
6700 /* The AOF assembler needs this to cause the startup code to be extracted
6701 from the library. Brining in __main causes the whole thing to work
6702 automagically. */
6703 if (arm_main_function)
6704 {
6705 text_section ();
6706 fputs ("\tIMPORT __main\n", f);
6707 fputs ("\tDCD __main\n", f);
6708 }
6709
6710 /* Now dump the remaining imports. */
6711 while (imports_list)
6712 {
6713 fprintf (f, "\tIMPORT\t");
6714 assemble_name (f, imports_list->name);
6715 fputc ('\n', f);
6716 imports_list = imports_list->next;
6717 }
6718}
6719#endif /* AOF_ASSEMBLER */
This page took 1.406021 seconds and 5 git commands to generate.