]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
All files: Updated copyright information.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
914a3b8c 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e
CH
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
af48348a 36#include "reload.h"
e2c671ba 37#include "tree.h"
49ad7cfa 38#include "function.h"
bee06f3d 39#include "expr.h"
ad076f4e 40#include "toplev.h"
aec3cfba 41#include "recog.h"
92a432f4 42#include "ggc.h"
c27ba912 43#include "tm_p.h"
cce8749e 44
eb3921e8
NC
45#ifndef Mmode
46#define Mmode enum machine_mode
47#endif
48
c27ba912 49/* Some function declarations. */
299d06ad
KG
50static HOST_WIDE_INT int_log2 PARAMS ((HOST_WIDE_INT));
51static char * output_multi_immediate PARAMS ((rtx *, char *, char *, int, HOST_WIDE_INT));
52static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, HOST_WIDE_INT, rtx, rtx, int, int));
53static int arm_naked_function_p PARAMS ((tree));
54static void init_fpa_table PARAMS ((void));
55static enum machine_mode select_dominance_cc_mode PARAMS ((rtx, rtx, HOST_WIDE_INT));
56static HOST_WIDE_INT add_minipool_constant PARAMS ((rtx, Mmode));
57static void dump_minipool PARAMS ((rtx));
58static rtx find_barrier PARAMS ((rtx, int));
59static void push_minipool_fix PARAMS ((rtx, int, rtx *, Mmode, rtx));
60static void push_minipool_barrier PARAMS ((rtx, int));
61static void note_invalid_constants PARAMS ((rtx, int));
62static char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
63static int eliminate_lr2ip PARAMS ((rtx *));
64static char * shift_op PARAMS ((rtx, HOST_WIDE_INT *));
65static int pattern_really_clobbers_lr PARAMS ((rtx));
66static int function_really_clobbers_lr PARAMS ((rtx));
67static void emit_multi_reg_push PARAMS ((int));
68static void emit_sfm PARAMS ((int, int));
69static enum arm_cond_code get_arm_condition_code PARAMS ((rtx));
70static int const_ok_for_op PARAMS ((HOST_WIDE_INT, enum rtx_code));
71static void arm_add_gc_roots PARAMS ((void));
f3bb6135 72
c27ba912
DM
73/* The maximum number of insns skipped which will be conditionalised if
74 possible. */
75static int max_insns_skipped = 5;
76
77extern FILE * asm_out_file;
78
13bd191d
PB
79/* True if we are currently building a constant table. */
80int making_const_table;
81
60d0536b 82/* Define the information needed to generate branch insns. This is
ff9940b0 83 stored from the compare operation. */
ff9940b0 84rtx arm_compare_op0, arm_compare_op1;
ff9940b0 85
b111229a 86/* What type of floating point are we tuning for? */
bee06f3d
RE
87enum floating_point_type arm_fpu;
88
b111229a
RE
89/* What type of floating point instructions are available? */
90enum floating_point_type arm_fpu_arch;
91
2b835d68
RE
92/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
93enum prog_mode_type arm_prgmode;
94
b111229a 95/* Set by the -mfp=... option */
f9cc092a 96const char * target_fp_name = NULL;
2b835d68 97
b355a481 98/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 99const char * structure_size_string = NULL;
723ae7c1 100int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 101
aec3cfba 102/* Bit values used to identify processor capabilities. */
62b10bbc
NC
103#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
104#define FL_FAST_MULT (1 << 1) /* Fast multiply */
105#define FL_MODE26 (1 << 2) /* 26-bit mode support */
106#define FL_MODE32 (1 << 3) /* 32-bit mode support */
107#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
108#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
109#define FL_THUMB (1 << 6) /* Thumb aware */
110#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
111#define FL_STRONG (1 << 8) /* StrongARM */
aec3cfba 112
949d79eb
RE
113/* The bits in this mask specify which instructions we are allowed to
114 generate. */
aec3cfba
NC
115static int insn_flags = 0;
116/* The bits in this mask specify which instruction scheduling options should
117 be used. Note - there is an overlap with the FL_FAST_MULT. For some
118 hardware we want to be able to generate the multiply instructions, but to
119 tune as if they were not present in the architecture. */
120static int tune_flags = 0;
121
122/* The following are used in the arm.md file as equivalents to bits
123 in the above two flag variables. */
124
2b835d68
RE
125/* Nonzero if this is an "M" variant of the processor. */
126int arm_fast_multiply = 0;
127
32de079a 128/* Nonzero if this chip supports the ARM Architecture 4 extensions */
2b835d68
RE
129int arm_arch4 = 0;
130
62b10bbc
NC
131/* Nonzero if this chip supports the ARM Architecture 5 extensions */
132int arm_arch5 = 0;
133
aec3cfba 134/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
135int arm_ld_sched = 0;
136
137/* Nonzero if this chip is a StrongARM. */
138int arm_is_strong = 0;
139
140/* Nonzero if this chip is a an ARM6 or an ARM7. */
141int arm_is_6_or_7 = 0;
b111229a 142
cce8749e
CH
143/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
144 must report the mode of the memory reference from PRINT_OPERAND to
145 PRINT_OPERAND_ADDRESS. */
f3bb6135 146enum machine_mode output_memory_reference_mode;
cce8749e
CH
147
148/* Nonzero if the prologue must setup `fp'. */
149int current_function_anonymous_args;
150
32de079a 151/* The register number to be used for the PIC offset register. */
ed0e6530 152const char * arm_pic_register_string = NULL;
32de079a
RE
153int arm_pic_register = 9;
154
ff9940b0
RE
155/* Set to one if we think that lr is only saved because of subroutine calls,
156 but all of these can be `put after' return insns */
157int lr_save_eliminated;
158
ff9940b0
RE
159/* Set to 1 when a return insn is output, this means that the epilogue
160 is not needed. */
ff9940b0
RE
161static int return_used_this_function;
162
aec3cfba
NC
163/* Set to 1 after arm_reorg has started. Reset to start at the start of
164 the next function. */
4b632bf1
RE
165static int after_arm_reorg = 0;
166
aec3cfba 167/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
168static int arm_constant_limit = 3;
169
cce8749e
CH
170/* For an explanation of these variables, see final_prescan_insn below. */
171int arm_ccfsm_state;
84ed5e79 172enum arm_cond_code arm_current_cc;
cce8749e
CH
173rtx arm_target_insn;
174int arm_target_label;
9997d19d
RE
175
176/* The condition codes of the ARM, and the inverse function. */
f5a1b0d2 177char * arm_condition_codes[] =
9997d19d
RE
178{
179 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
180 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
181};
182
84ed5e79 183static enum arm_cond_code get_arm_condition_code ();
2b835d68 184
f5a1b0d2 185#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68
RE
186\f
187/* Initialization code */
188
2b835d68
RE
189struct processors
190{
f5a1b0d2 191 char * name;
2b835d68
RE
192 unsigned int flags;
193};
194
195/* Not all of these give usefully different compilation alternatives,
196 but there is no simple way of generalizing them. */
f5a1b0d2
NC
197static struct processors all_cores[] =
198{
199 /* ARM Cores */
200
201 {"arm2", FL_CO_PROC | FL_MODE26 },
202 {"arm250", FL_CO_PROC | FL_MODE26 },
203 {"arm3", FL_CO_PROC | FL_MODE26 },
204 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
205 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
206 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
207 {"arm610", FL_MODE26 | FL_MODE32 },
208 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
209 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
210 /* arm7m doesn't exist on its own, but only with D, (and I), but
211 those don't alter the code, so arm7m is sometimes used. */
212 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
213 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
214 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
215 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
216 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
217 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
218 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
219 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
220 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 221 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
222 {"arm710c", FL_MODE26 | FL_MODE32 },
223 {"arm7100", FL_MODE26 | FL_MODE32 },
224 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
225 /* Doesn't have an external co-proc, but does have embedded fpu. */
226 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
227 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
228 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
229 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
230 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
231 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
232 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
233 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
234 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
235 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
236 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
237
238 {NULL, 0}
239};
240
241static struct processors all_architectures[] =
2b835d68 242{
f5a1b0d2
NC
243 /* ARM Architectures */
244
62b10bbc
NC
245 { "armv2", FL_CO_PROC | FL_MODE26 },
246 { "armv2a", FL_CO_PROC | FL_MODE26 },
247 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
248 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 249 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
250 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
251 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
252 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
253 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
254 { NULL, 0 }
f5a1b0d2
NC
255};
256
257/* This is a magic stucture. The 'string' field is magically filled in
258 with a pointer to the value specified by the user on the command line
259 assuming that the user has specified such a value. */
260
261struct arm_cpu_select arm_select[] =
262{
263 /* string name processors */
264 { NULL, "-mcpu=", all_cores },
265 { NULL, "-march=", all_architectures },
266 { NULL, "-mtune=", all_cores }
2b835d68
RE
267};
268
aec3cfba
NC
269/* Return the number of bits set in value' */
270static unsigned int
271bit_count (value)
272 signed int value;
273{
274 unsigned int count = 0;
275
276 while (value)
277 {
278 value &= ~(value & - value);
279 ++ count;
280 }
281
282 return count;
283}
284
2b835d68
RE
285/* Fix up any incompatible options that the user has specified.
286 This has now turned into a maze. */
287void
288arm_override_options ()
289{
ed4c4348 290 unsigned i;
f5a1b0d2
NC
291
292 /* Set up the flags based on the cpu/architecture selected by the user. */
293 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
bd9c7e23 294 {
f5a1b0d2
NC
295 struct arm_cpu_select * ptr = arm_select + i;
296
297 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 298 {
13bd191d 299 const struct processors * sel;
bd9c7e23 300
f5a1b0d2
NC
301 for (sel = ptr->processors; sel->name != NULL; sel ++)
302 if (streq (ptr->string, sel->name))
bd9c7e23 303 {
aec3cfba
NC
304 if (i == 2)
305 tune_flags = sel->flags;
306 else
b111229a 307 {
aec3cfba
NC
308 /* If we have been given an architecture and a processor
309 make sure that they are compatible. We only generate
310 a warning though, and we prefer the CPU over the
311 architecture. */
312 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 313 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
314 ptr->string);
315
316 insn_flags = sel->flags;
b111229a 317 }
f5a1b0d2 318
bd9c7e23
RE
319 break;
320 }
321
322 if (sel->name == NULL)
323 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
324 }
325 }
aec3cfba 326
f5a1b0d2 327 /* If the user did not specify a processor, choose one for them. */
aec3cfba 328 if (insn_flags == 0)
f5a1b0d2
NC
329 {
330 struct processors * sel;
aec3cfba
NC
331 unsigned int sought;
332 static struct cpu_default
333 {
334 int cpu;
335 char * name;
336 }
337 cpu_defaults[] =
338 {
339 { TARGET_CPU_arm2, "arm2" },
340 { TARGET_CPU_arm6, "arm6" },
341 { TARGET_CPU_arm610, "arm610" },
2aa0c933 342 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
343 { TARGET_CPU_arm7m, "arm7m" },
344 { TARGET_CPU_arm7500fe, "arm7500fe" },
345 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
346 { TARGET_CPU_arm8, "arm8" },
347 { TARGET_CPU_arm810, "arm810" },
348 { TARGET_CPU_arm9, "arm9" },
349 { TARGET_CPU_strongarm, "strongarm" },
350 { TARGET_CPU_generic, "arm" },
351 { 0, 0 }
352 };
353 struct cpu_default * def;
354
355 /* Find the default. */
356 for (def = cpu_defaults; def->name; def ++)
357 if (def->cpu == TARGET_CPU_DEFAULT)
358 break;
359
360 /* Make sure we found the default CPU. */
361 if (def->name == NULL)
362 abort ();
363
364 /* Find the default CPU's flags. */
365 for (sel = all_cores; sel->name != NULL; sel ++)
366 if (streq (def->name, sel->name))
367 break;
368
369 if (sel->name == NULL)
370 abort ();
371
372 insn_flags = sel->flags;
373
374 /* Now check to see if the user has specified some command line
375 switch that require certain abilities from the cpu. */
376 sought = 0;
f5a1b0d2 377
6cfc7210 378 if (TARGET_INTERWORK)
f5a1b0d2 379 {
aec3cfba
NC
380 sought |= (FL_THUMB | FL_MODE32);
381
382 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 383 target_flags |= ARM_FLAG_APCS_32;
aec3cfba
NC
384
385 /* There are no ARM processor that supports both APCS-26 and
386 interworking. Therefore we force FL_MODE26 to be removed
387 from insn_flags here (if it was set), so that the search
388 below will always be able to find a compatible processor. */
389 insn_flags &= ~ FL_MODE26;
f5a1b0d2
NC
390 }
391
1323d53a 392 if (! TARGET_APCS_32)
f5a1b0d2
NC
393 sought |= FL_MODE26;
394
aec3cfba 395 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 396 {
aec3cfba
NC
397 /* Try to locate a CPU type that supports all of the abilities
398 of the default CPU, plus the extra abilities requested by
399 the user. */
f5a1b0d2 400 for (sel = all_cores; sel->name != NULL; sel ++)
aec3cfba 401 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
402 break;
403
404 if (sel->name == NULL)
aec3cfba
NC
405 {
406 unsigned int current_bit_count = 0;
407 struct processors * best_fit = NULL;
408
409 /* Ideally we would like to issue an error message here
410 saying that it was not possible to find a CPU compatible
411 with the default CPU, but which also supports the command
412 line options specified by the programmer, and so they
413 ought to use the -mcpu=<name> command line option to
414 override the default CPU type.
415
416 Unfortunately this does not work with multilibing. We
417 need to be able to support multilibs for -mapcs-26 and for
418 -mthumb-interwork and there is no CPU that can support both
419 options. Instead if we cannot find a cpu that has both the
420 characteristics of the default cpu and the given command line
421 options we scan the array again looking for a best match. */
422 for (sel = all_cores; sel->name != NULL; sel ++)
423 if ((sel->flags & sought) == sought)
424 {
425 unsigned int count;
426
427 count = bit_count (sel->flags & insn_flags);
428
429 if (count >= current_bit_count)
430 {
431 best_fit = sel;
432 current_bit_count = count;
433 }
434 }
f5a1b0d2 435
aec3cfba
NC
436 if (best_fit == NULL)
437 abort ();
438 else
439 sel = best_fit;
440 }
441
442 insn_flags = sel->flags;
f5a1b0d2
NC
443 }
444 }
aec3cfba
NC
445
446 /* If tuning has not been specified, tune for whichever processor or
447 architecture has been selected. */
448 if (tune_flags == 0)
449 tune_flags = insn_flags;
450
f5a1b0d2
NC
451 /* Make sure that the processor choice does not conflict with any of the
452 other command line choices. */
aec3cfba 453 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 454 {
aec3cfba
NC
455 /* If APCS-32 was not the default then it must have been set by the
456 user, so issue a warning message. If the user has specified
457 "-mapcs-32 -mcpu=arm2" then we loose here. */
458 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
459 warning ("target CPU does not support APCS-32" );
f5a1b0d2
NC
460 target_flags &= ~ ARM_FLAG_APCS_32;
461 }
aec3cfba 462 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
463 {
464 warning ("target CPU does not support APCS-26" );
465 target_flags |= ARM_FLAG_APCS_32;
466 }
467
6cfc7210 468 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
469 {
470 warning ("target CPU does not support interworking" );
6cfc7210 471 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
472 }
473
474 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 475 if (TARGET_INTERWORK)
f5a1b0d2
NC
476 {
477 if (! TARGET_APCS_32)
478 warning ("interworking forces APCS-32 to be used" );
479 target_flags |= ARM_FLAG_APCS_32;
480 }
481
482 if (TARGET_APCS_STACK && ! TARGET_APCS)
483 {
484 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
485 target_flags |= ARM_FLAG_APCS_FRAME;
486 }
aec3cfba 487
2b835d68
RE
488 if (TARGET_POKE_FUNCTION_NAME)
489 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 490
2b835d68
RE
491 if (TARGET_APCS_REENT && flag_pic)
492 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 493
2b835d68 494 if (TARGET_APCS_REENT)
f5a1b0d2 495 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 496
6cfc7210
NC
497 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
498 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
499
32de079a
RE
500 /* If stack checking is disabled, we can use r10 as the PIC register,
501 which keeps r9 available. */
502 if (flag_pic && ! TARGET_APCS_STACK)
503 arm_pic_register = 10;
aec3cfba 504
2b835d68
RE
505 if (TARGET_APCS_FLOAT)
506 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 507
aec3cfba 508 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
509 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
510 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
511 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
aec3cfba 512
2ca12935
JL
513 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
514 arm_is_strong = (tune_flags & FL_STRONG) != 0;
aec3cfba 515 arm_is_6_or_7 = ((tune_flags & (FL_MODE26 | FL_MODE32))
2ca12935 516 && !(tune_flags & FL_ARCH4)) != 0;
f5a1b0d2 517
bd9c7e23
RE
518 /* Default value for floating point code... if no co-processor
519 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
520 assume the user has an FPA.
521 Note: this does not prevent use of floating point instructions,
522 -msoft-float does that. */
aec3cfba 523 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 524
b111229a 525 if (target_fp_name)
2b835d68 526 {
f5a1b0d2 527 if (streq (target_fp_name, "2"))
b111229a 528 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
529 else if (streq (target_fp_name, "3"))
530 arm_fpu_arch = FP_SOFT3;
2b835d68 531 else
f5a1b0d2 532 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 533 target_fp_name);
2b835d68 534 }
b111229a
RE
535 else
536 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
537
538 if (TARGET_FPE && arm_fpu != FP_HARD)
539 arm_fpu = FP_SOFT2;
aec3cfba 540
f5a1b0d2
NC
541 /* For arm2/3 there is no need to do any scheduling if there is only
542 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
543 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
544 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 545 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 546
2b835d68 547 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
548
549 if (structure_size_string != NULL)
550 {
551 int size = strtol (structure_size_string, NULL, 0);
552
553 if (size == 8 || size == 32)
554 arm_structure_size_boundary = size;
555 else
556 warning ("Structure size boundary can only be set to 8 or 32");
557 }
ed0e6530
PB
558
559 if (arm_pic_register_string != NULL)
560 {
561 int pic_register;
562
563 if (! flag_pic)
564 warning ("-mpic-register= is useless without -fpic");
565
566 pic_register = decode_reg_name (arm_pic_register_string);
567
568 /* Prevent the user from choosing an obviously stupid PIC register. */
569 if (pic_register < 0 || call_used_regs[pic_register]
570 || pic_register == HARD_FRAME_POINTER_REGNUM
571 || pic_register == STACK_POINTER_REGNUM
572 || pic_register >= PC_REGNUM)
573 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
574 else
575 arm_pic_register = pic_register;
576 }
f5a1b0d2
NC
577
578 /* If optimizing for space, don't synthesize constants.
579 For processors with load scheduling, it never costs more than 2 cycles
580 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 581 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 582 arm_constant_limit = 1;
aec3cfba 583
f5a1b0d2
NC
584 /* If optimizing for size, bump the number of instructions that we
585 are prepared to conditionally execute (even on a StrongARM).
586 Otherwise for the StrongARM, which has early execution of branches,
587 a sequence that is worth skipping is shorter. */
588 if (optimize_size)
589 max_insns_skipped = 6;
590 else if (arm_is_strong)
591 max_insns_skipped = 3;
92a432f4
RE
592
593 /* Register global variables with the garbage collector. */
594 arm_add_gc_roots ();
595}
596
597static void
598arm_add_gc_roots ()
599{
600 ggc_add_rtx_root (&arm_compare_op0, 1);
601 ggc_add_rtx_root (&arm_compare_op1, 1);
602 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
603 /* XXX: What about the minipool tables? */
2b835d68 604}
92a432f4 605
cce8749e 606\f
ff9940b0
RE
607/* Return 1 if it is possible to return using a single instruction */
608
609int
b36ba79f
RE
610use_return_insn (iscond)
611 int iscond;
ff9940b0
RE
612{
613 int regno;
614
f5a1b0d2
NC
615 if (!reload_completed
616 || current_function_pretend_args_size
ff9940b0 617 || current_function_anonymous_args
56636818 618 || ((get_frame_size () + current_function_outgoing_args_size != 0)
f5a1b0d2 619 && !(TARGET_APCS && frame_pointer_needed)))
ff9940b0
RE
620 return 0;
621
b111229a 622 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
623 stacked. Similarly, on StrongARM, conditional returns are expensive
624 if they aren't taken and registers have been stacked. */
f5a1b0d2 625 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 626 return 0;
f5a1b0d2 627 if ((iscond && arm_is_strong)
6cfc7210 628 || TARGET_INTERWORK)
6ed30148
RE
629 {
630 for (regno = 0; regno < 16; regno++)
631 if (regs_ever_live[regno] && ! call_used_regs[regno])
632 return 0;
633
634 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 635 return 0;
6ed30148 636 }
b111229a 637
ff9940b0
RE
638 /* Can't be done if any of the FPU regs are pushed, since this also
639 requires an insn */
b111229a
RE
640 for (regno = 16; regno < 24; regno++)
641 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
642 return 0;
643
31fdb4d5
DE
644 /* If a function is naked, don't use the "return" insn. */
645 if (arm_naked_function_p (current_function_decl))
646 return 0;
647
ff9940b0
RE
648 return 1;
649}
650
cce8749e
CH
651/* Return TRUE if int I is a valid immediate ARM constant. */
652
653int
654const_ok_for_arm (i)
ff9940b0 655 HOST_WIDE_INT i;
cce8749e 656{
ed4c4348 657 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 658
56636818
JL
659 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
660 be all zero, or all one. */
b39e1240
NC
661 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffffUL) != 0
662 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffffUL)
ed4c4348 663 != ((~(unsigned HOST_WIDE_INT) 0)
b39e1240 664 & ~(unsigned HOST_WIDE_INT) 0xffffffffUL)))
56636818
JL
665 return FALSE;
666
e2c671ba
RE
667 /* Fast return for 0 and powers of 2 */
668 if ((i & (i - 1)) == 0)
669 return TRUE;
670
cce8749e
CH
671 do
672 {
b39e1240 673 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffffUL) == 0)
f3bb6135 674 return TRUE;
abaa26e5 675 mask =
b39e1240
NC
676 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffffUL)
677 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffffUL);
ed4c4348 678 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 679
f3bb6135
RE
680 return FALSE;
681}
cce8749e 682
e2c671ba 683/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
684static int
685const_ok_for_op (i, code)
e2c671ba
RE
686 HOST_WIDE_INT i;
687 enum rtx_code code;
e2c671ba
RE
688{
689 if (const_ok_for_arm (i))
690 return 1;
691
692 switch (code)
693 {
694 case PLUS:
695 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
696
697 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
698 case XOR:
699 case IOR:
700 return 0;
701
702 case AND:
703 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
704
705 default:
706 abort ();
707 }
708}
709
710/* Emit a sequence of insns to handle a large constant.
711 CODE is the code of the operation required, it can be any of SET, PLUS,
712 IOR, AND, XOR, MINUS;
713 MODE is the mode in which the operation is being performed;
714 VAL is the integer to operate on;
715 SOURCE is the other operand (a register, or a null-pointer for SET);
716 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
717 either produce a simpler sequence, or we will want to cse the values.
718 Return value is the number of insns emitted. */
e2c671ba
RE
719
720int
721arm_split_constant (code, mode, val, target, source, subtargets)
722 enum rtx_code code;
723 enum machine_mode mode;
724 HOST_WIDE_INT val;
725 rtx target;
726 rtx source;
727 int subtargets;
2b835d68
RE
728{
729 if (subtargets || code == SET
730 || (GET_CODE (target) == REG && GET_CODE (source) == REG
731 && REGNO (target) != REGNO (source)))
732 {
4b632bf1
RE
733 /* After arm_reorg has been called, we can't fix up expensive
734 constants by pushing them into memory so we must synthesise
735 them in-line, regardless of the cost. This is only likely to
736 be more costly on chips that have load delay slots and we are
737 compiling without running the scheduler (so no splitting
aec3cfba
NC
738 occurred before the final instruction emission).
739
740 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 741 */
4b632bf1
RE
742 if (! after_arm_reorg
743 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
744 > arm_constant_limit + (code != SET)))
2b835d68
RE
745 {
746 if (code == SET)
747 {
748 /* Currently SET is the only monadic value for CODE, all
749 the rest are diadic. */
43cffd11 750 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
751 return 1;
752 }
753 else
754 {
755 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
756
43cffd11 757 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
758 /* For MINUS, the value is subtracted from, since we never
759 have subtraction of a constant. */
760 if (code == MINUS)
43cffd11
RE
761 emit_insn (gen_rtx_SET (VOIDmode, target,
762 gen_rtx (code, mode, temp, source)));
2b835d68 763 else
43cffd11
RE
764 emit_insn (gen_rtx_SET (VOIDmode, target,
765 gen_rtx (code, mode, source, temp)));
2b835d68
RE
766 return 2;
767 }
768 }
769 }
770
771 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
772}
773
774/* As above, but extra parameter GENERATE which, if clear, suppresses
775 RTL generation. */
776int
777arm_gen_constant (code, mode, val, target, source, subtargets, generate)
778 enum rtx_code code;
779 enum machine_mode mode;
780 HOST_WIDE_INT val;
781 rtx target;
782 rtx source;
783 int subtargets;
784 int generate;
e2c671ba 785{
e2c671ba
RE
786 int can_invert = 0;
787 int can_negate = 0;
788 int can_negate_initial = 0;
789 int can_shift = 0;
790 int i;
791 int num_bits_set = 0;
792 int set_sign_bit_copies = 0;
793 int clear_sign_bit_copies = 0;
794 int clear_zero_bit_copies = 0;
795 int set_zero_bit_copies = 0;
796 int insns = 0;
e2c671ba 797 unsigned HOST_WIDE_INT temp1, temp2;
b39e1240 798 unsigned HOST_WIDE_INT remainder = val & 0xffffffffUL;
e2c671ba
RE
799
800 /* find out which operations are safe for a given CODE. Also do a quick
801 check for degenerate cases; these can occur when DImode operations
802 are split. */
803 switch (code)
804 {
805 case SET:
806 can_invert = 1;
807 can_shift = 1;
808 can_negate = 1;
809 break;
810
811 case PLUS:
812 can_negate = 1;
813 can_negate_initial = 1;
814 break;
815
816 case IOR:
b39e1240 817 if (remainder == 0xffffffffUL)
e2c671ba 818 {
2b835d68 819 if (generate)
43cffd11
RE
820 emit_insn (gen_rtx_SET (VOIDmode, target,
821 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
822 return 1;
823 }
824 if (remainder == 0)
825 {
826 if (reload_completed && rtx_equal_p (target, source))
827 return 0;
2b835d68 828 if (generate)
43cffd11 829 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
830 return 1;
831 }
832 break;
833
834 case AND:
835 if (remainder == 0)
836 {
2b835d68 837 if (generate)
43cffd11 838 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
839 return 1;
840 }
b39e1240 841 if (remainder == 0xffffffffUL)
e2c671ba
RE
842 {
843 if (reload_completed && rtx_equal_p (target, source))
844 return 0;
2b835d68 845 if (generate)
43cffd11 846 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
847 return 1;
848 }
849 can_invert = 1;
850 break;
851
852 case XOR:
853 if (remainder == 0)
854 {
855 if (reload_completed && rtx_equal_p (target, source))
856 return 0;
2b835d68 857 if (generate)
43cffd11 858 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
859 return 1;
860 }
b39e1240 861 if (remainder == 0xffffffffUL)
e2c671ba 862 {
2b835d68 863 if (generate)
43cffd11
RE
864 emit_insn (gen_rtx_SET (VOIDmode, target,
865 gen_rtx_NOT (mode, source)));
e2c671ba
RE
866 return 1;
867 }
868
869 /* We don't know how to handle this yet below. */
870 abort ();
871
872 case MINUS:
873 /* We treat MINUS as (val - source), since (source - val) is always
874 passed as (source + (-val)). */
875 if (remainder == 0)
876 {
2b835d68 877 if (generate)
43cffd11
RE
878 emit_insn (gen_rtx_SET (VOIDmode, target,
879 gen_rtx_NEG (mode, source)));
e2c671ba
RE
880 return 1;
881 }
882 if (const_ok_for_arm (val))
883 {
2b835d68 884 if (generate)
43cffd11
RE
885 emit_insn (gen_rtx_SET (VOIDmode, target,
886 gen_rtx_MINUS (mode, GEN_INT (val),
887 source)));
e2c671ba
RE
888 return 1;
889 }
890 can_negate = 1;
891
892 break;
893
894 default:
895 abort ();
896 }
897
898 /* If we can do it in one insn get out quickly */
899 if (const_ok_for_arm (val)
900 || (can_negate_initial && const_ok_for_arm (-val))
901 || (can_invert && const_ok_for_arm (~val)))
902 {
2b835d68 903 if (generate)
43cffd11
RE
904 emit_insn (gen_rtx_SET (VOIDmode, target,
905 (source ? gen_rtx (code, mode, source,
906 GEN_INT (val))
907 : GEN_INT (val))));
e2c671ba
RE
908 return 1;
909 }
910
911
912 /* Calculate a few attributes that may be useful for specific
913 optimizations. */
914
915 for (i = 31; i >= 0; i--)
916 {
917 if ((remainder & (1 << i)) == 0)
918 clear_sign_bit_copies++;
919 else
920 break;
921 }
922
923 for (i = 31; i >= 0; i--)
924 {
925 if ((remainder & (1 << i)) != 0)
926 set_sign_bit_copies++;
927 else
928 break;
929 }
930
931 for (i = 0; i <= 31; i++)
932 {
933 if ((remainder & (1 << i)) == 0)
934 clear_zero_bit_copies++;
935 else
936 break;
937 }
938
939 for (i = 0; i <= 31; i++)
940 {
941 if ((remainder & (1 << i)) != 0)
942 set_zero_bit_copies++;
943 else
944 break;
945 }
946
947 switch (code)
948 {
949 case SET:
950 /* See if we can do this by sign_extending a constant that is known
951 to be negative. This is a good, way of doing it, since the shift
952 may well merge into a subsequent insn. */
953 if (set_sign_bit_copies > 1)
954 {
955 if (const_ok_for_arm
956 (temp1 = ARM_SIGN_EXTEND (remainder
957 << (set_sign_bit_copies - 1))))
958 {
2b835d68
RE
959 if (generate)
960 {
d499463f 961 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
962 emit_insn (gen_rtx_SET (VOIDmode, new_src,
963 GEN_INT (temp1)));
2b835d68
RE
964 emit_insn (gen_ashrsi3 (target, new_src,
965 GEN_INT (set_sign_bit_copies - 1)));
966 }
e2c671ba
RE
967 return 2;
968 }
969 /* For an inverted constant, we will need to set the low bits,
970 these will be shifted out of harm's way. */
971 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
972 if (const_ok_for_arm (~temp1))
973 {
2b835d68
RE
974 if (generate)
975 {
d499463f 976 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
977 emit_insn (gen_rtx_SET (VOIDmode, new_src,
978 GEN_INT (temp1)));
2b835d68
RE
979 emit_insn (gen_ashrsi3 (target, new_src,
980 GEN_INT (set_sign_bit_copies - 1)));
981 }
e2c671ba
RE
982 return 2;
983 }
984 }
985
986 /* See if we can generate this by setting the bottom (or the top)
987 16 bits, and then shifting these into the other half of the
988 word. We only look for the simplest cases, to do more would cost
989 too much. Be careful, however, not to generate this when the
990 alternative would take fewer insns. */
b39e1240 991 if (val & 0xffff0000UL)
e2c671ba 992 {
b39e1240 993 temp1 = remainder & 0xffff0000UL;
e2c671ba
RE
994 temp2 = remainder & 0x0000ffff;
995
996 /* Overlaps outside this range are best done using other methods. */
997 for (i = 9; i < 24; i++)
998 {
b39e1240 999 if ((((temp2 | (temp2 << i)) & 0xffffffffUL) == remainder)
e2c671ba
RE
1000 && ! const_ok_for_arm (temp2))
1001 {
d499463f
RE
1002 rtx new_src = (subtargets
1003 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1004 : target);
1005 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1006 source, subtargets, generate);
e2c671ba 1007 source = new_src;
2b835d68 1008 if (generate)
43cffd11
RE
1009 emit_insn (gen_rtx_SET
1010 (VOIDmode, target,
1011 gen_rtx_IOR (mode,
1012 gen_rtx_ASHIFT (mode, source,
1013 GEN_INT (i)),
1014 source)));
e2c671ba
RE
1015 return insns + 1;
1016 }
1017 }
1018
1019 /* Don't duplicate cases already considered. */
1020 for (i = 17; i < 24; i++)
1021 {
1022 if (((temp1 | (temp1 >> i)) == remainder)
1023 && ! const_ok_for_arm (temp1))
1024 {
d499463f
RE
1025 rtx new_src = (subtargets
1026 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1027 : target);
1028 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1029 source, subtargets, generate);
e2c671ba 1030 source = new_src;
2b835d68 1031 if (generate)
43cffd11
RE
1032 emit_insn
1033 (gen_rtx_SET (VOIDmode, target,
1034 gen_rtx_IOR
1035 (mode,
1036 gen_rtx_LSHIFTRT (mode, source,
1037 GEN_INT (i)),
1038 source)));
e2c671ba
RE
1039 return insns + 1;
1040 }
1041 }
1042 }
1043 break;
1044
1045 case IOR:
1046 case XOR:
7b64da89
RE
1047 /* If we have IOR or XOR, and the constant can be loaded in a
1048 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1049 then this can be done in two instructions instead of 3-4. */
1050 if (subtargets
d499463f 1051 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
1052 || (reload_completed && ! reg_mentioned_p (target, source)))
1053 {
1054 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1055 {
2b835d68
RE
1056 if (generate)
1057 {
1058 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1059
43cffd11
RE
1060 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1061 emit_insn (gen_rtx_SET (VOIDmode, target,
1062 gen_rtx (code, mode, source, sub)));
2b835d68 1063 }
e2c671ba
RE
1064 return 2;
1065 }
1066 }
1067
1068 if (code == XOR)
1069 break;
1070
1071 if (set_sign_bit_copies > 8
1072 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1073 {
2b835d68
RE
1074 if (generate)
1075 {
1076 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1077 rtx shift = GEN_INT (set_sign_bit_copies);
1078
43cffd11
RE
1079 emit_insn (gen_rtx_SET (VOIDmode, sub,
1080 gen_rtx_NOT (mode,
1081 gen_rtx_ASHIFT (mode,
1082 source,
f5a1b0d2 1083 shift))));
43cffd11
RE
1084 emit_insn (gen_rtx_SET (VOIDmode, target,
1085 gen_rtx_NOT (mode,
1086 gen_rtx_LSHIFTRT (mode, sub,
1087 shift))));
2b835d68 1088 }
e2c671ba
RE
1089 return 2;
1090 }
1091
1092 if (set_zero_bit_copies > 8
1093 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1094 {
2b835d68
RE
1095 if (generate)
1096 {
1097 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1098 rtx shift = GEN_INT (set_zero_bit_copies);
1099
43cffd11
RE
1100 emit_insn (gen_rtx_SET (VOIDmode, sub,
1101 gen_rtx_NOT (mode,
1102 gen_rtx_LSHIFTRT (mode,
1103 source,
f5a1b0d2 1104 shift))));
43cffd11
RE
1105 emit_insn (gen_rtx_SET (VOIDmode, target,
1106 gen_rtx_NOT (mode,
1107 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1108 shift))));
2b835d68 1109 }
e2c671ba
RE
1110 return 2;
1111 }
1112
1113 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1114 {
2b835d68
RE
1115 if (generate)
1116 {
1117 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1118 emit_insn (gen_rtx_SET (VOIDmode, sub,
1119 gen_rtx_NOT (mode, source)));
2b835d68
RE
1120 source = sub;
1121 if (subtargets)
1122 sub = gen_reg_rtx (mode);
43cffd11
RE
1123 emit_insn (gen_rtx_SET (VOIDmode, sub,
1124 gen_rtx_AND (mode, source,
1125 GEN_INT (temp1))));
1126 emit_insn (gen_rtx_SET (VOIDmode, target,
1127 gen_rtx_NOT (mode, sub)));
2b835d68 1128 }
e2c671ba
RE
1129 return 3;
1130 }
1131 break;
1132
1133 case AND:
1134 /* See if two shifts will do 2 or more insn's worth of work. */
1135 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1136 {
914a3b8c 1137 HOST_WIDE_INT shift_mask = ((0xffffffffUL
e2c671ba 1138 << (32 - clear_sign_bit_copies))
b39e1240 1139 & 0xffffffffUL);
e2c671ba 1140
b39e1240 1141 if ((remainder | shift_mask) != 0xffffffffUL)
e2c671ba 1142 {
2b835d68
RE
1143 if (generate)
1144 {
d499463f 1145 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1146 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1147 new_src, source, subtargets, 1);
1148 source = new_src;
2b835d68
RE
1149 }
1150 else
d499463f
RE
1151 {
1152 rtx targ = subtargets ? NULL_RTX : target;
1153 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1154 targ, source, subtargets, 0);
1155 }
2b835d68
RE
1156 }
1157
1158 if (generate)
1159 {
d499463f
RE
1160 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1161 rtx shift = GEN_INT (clear_sign_bit_copies);
1162
1163 emit_insn (gen_ashlsi3 (new_src, source, shift));
1164 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1165 }
1166
e2c671ba
RE
1167 return insns + 2;
1168 }
1169
1170 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1171 {
1172 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1173
b39e1240 1174 if ((remainder | shift_mask) != 0xffffffffUL)
e2c671ba 1175 {
2b835d68
RE
1176 if (generate)
1177 {
d499463f
RE
1178 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1179
2b835d68 1180 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1181 new_src, source, subtargets, 1);
1182 source = new_src;
2b835d68
RE
1183 }
1184 else
d499463f
RE
1185 {
1186 rtx targ = subtargets ? NULL_RTX : target;
1187
1188 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1189 targ, source, subtargets, 0);
1190 }
2b835d68
RE
1191 }
1192
1193 if (generate)
1194 {
d499463f
RE
1195 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1196 rtx shift = GEN_INT (clear_zero_bit_copies);
1197
1198 emit_insn (gen_lshrsi3 (new_src, source, shift));
1199 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1200 }
1201
e2c671ba
RE
1202 return insns + 2;
1203 }
1204
1205 break;
1206
1207 default:
1208 break;
1209 }
1210
1211 for (i = 0; i < 32; i++)
1212 if (remainder & (1 << i))
1213 num_bits_set++;
1214
1215 if (code == AND || (can_invert && num_bits_set > 16))
b39e1240 1216 remainder = (~remainder) & 0xffffffffUL;
e2c671ba 1217 else if (code == PLUS && num_bits_set > 16)
b39e1240 1218 remainder = (-remainder) & 0xffffffffUL;
e2c671ba
RE
1219 else
1220 {
1221 can_invert = 0;
1222 can_negate = 0;
1223 }
1224
1225 /* Now try and find a way of doing the job in either two or three
1226 instructions.
1227 We start by looking for the largest block of zeros that are aligned on
1228 a 2-bit boundary, we then fill up the temps, wrapping around to the
1229 top of the word when we drop off the bottom.
1230 In the worst case this code should produce no more than four insns. */
1231 {
1232 int best_start = 0;
1233 int best_consecutive_zeros = 0;
1234
1235 for (i = 0; i < 32; i += 2)
1236 {
1237 int consecutive_zeros = 0;
1238
1239 if (! (remainder & (3 << i)))
1240 {
1241 while ((i < 32) && ! (remainder & (3 << i)))
1242 {
1243 consecutive_zeros += 2;
1244 i += 2;
1245 }
1246 if (consecutive_zeros > best_consecutive_zeros)
1247 {
1248 best_consecutive_zeros = consecutive_zeros;
1249 best_start = i - consecutive_zeros;
1250 }
1251 i -= 2;
1252 }
1253 }
1254
1255 /* Now start emitting the insns, starting with the one with the highest
1256 bit set: we do this so that the smallest number will be emitted last;
1257 this is more likely to be combinable with addressing insns. */
1258 i = best_start;
1259 do
1260 {
1261 int end;
1262
1263 if (i <= 0)
1264 i += 32;
1265 if (remainder & (3 << (i - 2)))
1266 {
1267 end = i - 8;
1268 if (end < 0)
1269 end += 32;
1270 temp1 = remainder & ((0x0ff << end)
1271 | ((i < end) ? (0xff >> (32 - end)) : 0));
1272 remainder &= ~temp1;
1273
d499463f 1274 if (generate)
e2c671ba 1275 {
d499463f
RE
1276 rtx new_src;
1277
1278 if (code == SET)
43cffd11
RE
1279 emit_insn (gen_rtx_SET (VOIDmode,
1280 new_src = (subtargets
1281 ? gen_reg_rtx (mode)
1282 : target),
1283 GEN_INT (can_invert
1284 ? ~temp1 : temp1)));
d499463f 1285 else if (code == MINUS)
43cffd11
RE
1286 emit_insn (gen_rtx_SET (VOIDmode,
1287 new_src = (subtargets
1288 ? gen_reg_rtx (mode)
1289 : target),
1290 gen_rtx (code, mode, GEN_INT (temp1),
1291 source)));
d499463f 1292 else
43cffd11
RE
1293 emit_insn (gen_rtx_SET (VOIDmode,
1294 new_src = (remainder
1295 ? (subtargets
1296 ? gen_reg_rtx (mode)
1297 : target)
1298 : target),
1299 gen_rtx (code, mode, source,
1300 GEN_INT (can_invert ? ~temp1
1301 : (can_negate
1302 ? -temp1
1303 : temp1)))));
d499463f 1304 source = new_src;
e2c671ba
RE
1305 }
1306
d499463f
RE
1307 if (code == SET)
1308 {
1309 can_invert = 0;
1310 code = PLUS;
1311 }
1312 else if (code == MINUS)
1313 code = PLUS;
1314
e2c671ba 1315 insns++;
e2c671ba
RE
1316 i -= 6;
1317 }
1318 i -= 2;
1319 } while (remainder);
1320 }
1321 return insns;
1322}
1323
bd9c7e23
RE
1324/* Canonicalize a comparison so that we are more likely to recognize it.
1325 This can be done for a few constant compares, where we can make the
1326 immediate value easier to load. */
1327enum rtx_code
1328arm_canonicalize_comparison (code, op1)
1329 enum rtx_code code;
62b10bbc 1330 rtx * op1;
bd9c7e23 1331{
ad076f4e 1332 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1333
1334 switch (code)
1335 {
1336 case EQ:
1337 case NE:
1338 return code;
1339
1340 case GT:
1341 case LE:
ad076f4e
RE
1342 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1343 - 1)
bd9c7e23
RE
1344 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1345 {
1346 *op1 = GEN_INT (i+1);
1347 return code == GT ? GE : LT;
1348 }
1349 break;
1350
1351 case GE:
1352 case LT:
ad076f4e 1353 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1354 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1355 {
1356 *op1 = GEN_INT (i-1);
1357 return code == GE ? GT : LE;
1358 }
1359 break;
1360
1361 case GTU:
1362 case LEU:
ad076f4e 1363 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1364 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1365 {
1366 *op1 = GEN_INT (i + 1);
1367 return code == GTU ? GEU : LTU;
1368 }
1369 break;
1370
1371 case GEU:
1372 case LTU:
1373 if (i != 0
1374 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1375 {
1376 *op1 = GEN_INT (i - 1);
1377 return code == GEU ? GTU : LEU;
1378 }
1379 break;
1380
1381 default:
1382 abort ();
1383 }
1384
1385 return code;
1386}
bd9c7e23 1387
f5a1b0d2
NC
1388/* Decide whether a type should be returned in memory (true)
1389 or in a register (false). This is called by the macro
1390 RETURN_IN_MEMORY. */
2b835d68
RE
1391int
1392arm_return_in_memory (type)
1393 tree type;
1394{
f5a1b0d2
NC
1395 if (! AGGREGATE_TYPE_P (type))
1396 {
1397 /* All simple types are returned in registers. */
1398 return 0;
1399 }
1400 else if (int_size_in_bytes (type) > 4)
1401 {
1402 /* All structures/unions bigger than one word are returned in memory. */
1403 return 1;
1404 }
1405 else if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1406 {
1407 tree field;
1408
3a2ea258
RE
1409 /* For a struct the APCS says that we only return in a register
1410 if the type is 'integer like' and every addressable element
1411 has an offset of zero. For practical purposes this means
1412 that the structure can have at most one non bit-field element
1413 and that this element must be the first one in the structure. */
1414
f5a1b0d2
NC
1415 /* Find the first field, ignoring non FIELD_DECL things which will
1416 have been created by C++. */
1417 for (field = TYPE_FIELDS (type);
1418 field && TREE_CODE (field) != FIELD_DECL;
1419 field = TREE_CHAIN (field))
1420 continue;
1421
1422 if (field == NULL)
1423 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1424
3a2ea258
RE
1425 /* Check that the first field is valid for returning in a register... */
1426
1427 /* ... Floats are not allowed */
1428 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1429 return 1;
1430
1431 /* ... Aggregates that are not themselves valid for returning in
1432 a register are not allowed. */
1433 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1434 return 1;
1435
1436 /* Now check the remaining fields, if any. Only bitfields are allowed,
1437 since they are not addressable. */
f5a1b0d2
NC
1438 for (field = TREE_CHAIN (field);
1439 field;
1440 field = TREE_CHAIN (field))
1441 {
1442 if (TREE_CODE (field) != FIELD_DECL)
1443 continue;
1444
1445 if (! DECL_BIT_FIELD_TYPE (field))
1446 return 1;
1447 }
2b835d68
RE
1448
1449 return 0;
1450 }
1451 else if (TREE_CODE (type) == UNION_TYPE)
1452 {
1453 tree field;
1454
1455 /* Unions can be returned in registers if every element is
1456 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1457 for (field = TYPE_FIELDS (type);
1458 field;
1459 field = TREE_CHAIN (field))
2b835d68 1460 {
f5a1b0d2
NC
1461 if (TREE_CODE (field) != FIELD_DECL)
1462 continue;
1463
6cc8c0b3
NC
1464 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1465 return 1;
1466
f5a1b0d2 1467 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1468 return 1;
1469 }
f5a1b0d2 1470
2b835d68
RE
1471 return 0;
1472 }
f5a1b0d2 1473
2b835d68
RE
1474 /* XXX Not sure what should be done for other aggregates, so put them in
1475 memory. */
1476 return 1;
1477}
1478
82e9d970
PB
1479/* Initialize a variable CUM of type CUMULATIVE_ARGS
1480 for a call to a function whose data type is FNTYPE.
1481 For a library call, FNTYPE is NULL. */
1482void
1483arm_init_cumulative_args (pcum, fntype, libname, indirect)
1484 CUMULATIVE_ARGS * pcum;
1485 tree fntype;
1486 rtx libname ATTRIBUTE_UNUSED;
1487 int indirect ATTRIBUTE_UNUSED;
1488{
1489 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1490 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1491
82e9d970
PB
1492 pcum->call_cookie = CALL_NORMAL;
1493
1494 if (TARGET_LONG_CALLS)
1495 pcum->call_cookie = CALL_LONG;
1496
1497 /* Check for long call/short call attributes. The attributes
1498 override any command line option. */
1499 if (fntype)
1500 {
1501 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1502 pcum->call_cookie = CALL_SHORT;
1503 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1504 pcum->call_cookie = CALL_LONG;
1505 }
1506}
1507
1508/* Determine where to put an argument to a function.
1509 Value is zero to push the argument on the stack,
1510 or a hard register in which to store the argument.
1511
1512 MODE is the argument's machine mode.
1513 TYPE is the data type of the argument (as a tree).
1514 This is null for libcalls where that information may
1515 not be available.
1516 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1517 the preceding args and about the function being called.
1518 NAMED is nonzero if this argument is a named parameter
1519 (otherwise it is an extra parameter matching an ellipsis). */
1520rtx
1521arm_function_arg (pcum, mode, type, named)
1522 CUMULATIVE_ARGS * pcum;
1523 enum machine_mode mode;
1524 tree type ATTRIBUTE_UNUSED;
1525 int named;
1526{
1527 if (mode == VOIDmode)
1528 /* Compute operand 2 of the call insn. */
1529 return GEN_INT (pcum->call_cookie);
1530
1531 if (! named || pcum->nregs >= NUM_ARG_REGS)
1532 return NULL_RTX;
1533
1534 return gen_rtx_REG (mode, pcum->nregs);
1535}
82e9d970 1536\f
c27ba912
DM
1537/* Encode the current state of the #pragma [no_]long_calls. */
1538typedef enum
82e9d970 1539{
c27ba912
DM
1540 OFF, /* No #pramgma [no_]long_calls is in effect. */
1541 LONG, /* #pragma long_calls is in effect. */
1542 SHORT /* #pragma no_long_calls is in effect. */
1543} arm_pragma_enum;
82e9d970 1544
c27ba912 1545static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1546
c27ba912
DM
1547/* Handle pragmas for compatibility with Intel's compilers.
1548 FIXME: This is incomplete, since it does not handle all
1549 the pragmas that the Intel compilers understand. */
82e9d970 1550int
c27ba912
DM
1551arm_process_pragma (p_getc, p_ungetc, pname)
1552 int (* p_getc) PARAMS ((void)) ATTRIBUTE_UNUSED;
1553 void (* p_ungetc) PARAMS ((int)) ATTRIBUTE_UNUSED;
1554 char * pname;
82e9d970 1555{
c27ba912
DM
1556 /* Should be pragma 'far' or equivalent for callx/balx here. */
1557 if (strcmp (pname, "long_calls") == 0)
1558 arm_pragma_long_calls = LONG;
1559 else if (strcmp (pname, "no_long_calls") == 0)
1560 arm_pragma_long_calls = SHORT;
1561 else if (strcmp (pname, "long_calls_off") == 0)
1562 arm_pragma_long_calls = OFF;
1563 else
82e9d970
PB
1564 return 0;
1565
c27ba912 1566 return 1;
82e9d970
PB
1567}
1568\f
1569/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1570 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1571 assigned to TYPE. */
1572int
1573arm_valid_type_attribute_p (type, attributes, identifier, args)
1574 tree type;
1575 tree attributes ATTRIBUTE_UNUSED;
1576 tree identifier;
1577 tree args;
1578{
1579 if ( TREE_CODE (type) != FUNCTION_TYPE
1580 && TREE_CODE (type) != METHOD_TYPE
1581 && TREE_CODE (type) != FIELD_DECL
1582 && TREE_CODE (type) != TYPE_DECL)
1583 return 0;
1584
1585 /* Function calls made to this symbol must be done indirectly, because
1586 it may lie outside of the 26 bit addressing range of a normal function
1587 call. */
1588 if (is_attribute_p ("long_call", identifier))
1589 return (args == NULL_TREE);
c27ba912 1590
82e9d970
PB
1591 /* Whereas these functions are always known to reside within the 26 bit
1592 addressing range. */
1593 if (is_attribute_p ("short_call", identifier))
1594 return (args == NULL_TREE);
1595
1596 return 0;
1597}
1598
1599/* Return 0 if the attributes for two types are incompatible, 1 if they
1600 are compatible, and 2 if they are nearly compatible (which causes a
1601 warning to be generated). */
1602int
1603arm_comp_type_attributes (type1, type2)
1604 tree type1;
1605 tree type2;
1606{
1cb8d58a 1607 int l1, l2, s1, s2;
bd7fc26f 1608
82e9d970
PB
1609 /* Check for mismatch of non-default calling convention. */
1610 if (TREE_CODE (type1) != FUNCTION_TYPE)
1611 return 1;
1612
1613 /* Check for mismatched call attributes. */
1cb8d58a
NC
1614 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1615 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1616 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1617 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1618
1619 /* Only bother to check if an attribute is defined. */
1620 if (l1 | l2 | s1 | s2)
1621 {
1622 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1623 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1624 return 0;
82e9d970 1625
bd7fc26f
NC
1626 /* Disallow mixed attributes. */
1627 if ((l1 & s2) || (l2 & s1))
1628 return 0;
1629 }
1630
1631 return 1;
82e9d970
PB
1632}
1633
c27ba912
DM
1634/* Encode long_call or short_call attribute by prefixing
1635 symbol name in DECL with a special character FLAG. */
1636void
1637arm_encode_call_attribute (decl, flag)
1638 tree decl;
1639 char flag;
1640{
1641 char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
1642 int len = strlen (str);
1643 char * newstr;
1644
1645 if (TREE_CODE (decl) != FUNCTION_DECL)
1646 return;
1647
1648 /* Do not allow weak functions to be treated as short call. */
1649 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1650 return;
1651
1652 if (ggc_p)
1653 newstr = ggc_alloc_string (NULL, len + 2);
1654 else
1655 newstr = permalloc (len + 2);
1656
1657 sprintf (newstr, "%c%s", flag, str);
1658
1659 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1660}
1661
1662/* Assigns default attributes to newly defined type. This is used to
1663 set short_call/long_call attributes for function types of
1664 functions defined inside corresponding #pragma scopes. */
1665void
1666arm_set_default_type_attributes (type)
1667 tree type;
1668{
1669 /* Add __attribute__ ((long_call)) to all functions, when
1670 inside #pragma long_calls or __attribute__ ((short_call)),
1671 when inside #pragma no_long_calls. */
1672 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1673 {
1674 tree type_attr_list, attr_name;
1675 type_attr_list = TYPE_ATTRIBUTES (type);
1676
1677 if (arm_pragma_long_calls == LONG)
1678 attr_name = get_identifier ("long_call");
1679 else if (arm_pragma_long_calls == SHORT)
1680 attr_name = get_identifier ("short_call");
1681 else
1682 return;
1683
1684 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1685 TYPE_ATTRIBUTES (type) = type_attr_list;
1686 }
1687}
1688\f
1689/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1690 defined within the current compilation unit. If this caanot be
1691 determined, then 0 is returned. */
1692static int
1693current_file_function_operand (sym_ref)
1694 rtx sym_ref;
1695{
1696 /* This is a bit of a fib. A function will have a short call flag
1697 applied to its name if it has the short call attribute, or it has
1698 already been defined within the current compilation unit. */
1699 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1700 return 1;
1701
1702 /* The current funciton is always defined within the current compilation
1703 unit. if it s a weak defintion however, then this may not be the real
1704 defintion of the function, and so we have to say no. */
1705 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
1706 && ! DECL_WEAK (current_function_decl))
1707 return 1;
1708
1709 /* We cannot make the determination - default to returning 0. */
1710 return 0;
1711}
1712
1713/* Return non-zero if a 32 bit "long_call" should be generated for
1714 this call. We generate a long_call if the function:
1715
1716 a. has an __attribute__((long call))
1717 or b. is within the scope of a #pragma long_calls
1718 or c. the -mlong-calls command line switch has been specified
1719
1720 However we do not generate a long call if the function:
1721
1722 d. has an __attribute__ ((short_call))
1723 or e. is inside the scope of a #pragma no_long_calls
1724 or f. has an __attribute__ ((section))
1725 or g. is defined within the current compilation unit.
1726
1727 This function will be called by C fragments contained in the machine
1728 description file. CALL_REF and CALL_COOKIE correspond to the matched
1729 rtl operands. CALL_SYMBOL is used to distinguish between
1730 two different callers of the function. It is set to 1 in the
1731 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1732 and "call_value" patterns. This is because of the difference in the
1733 SYM_REFs passed by these patterns. */
1734int
1735arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1736 rtx sym_ref;
1737 int call_cookie;
1738 int call_symbol;
1739{
1740 if (! call_symbol)
1741 {
1742 if (GET_CODE (sym_ref) != MEM)
1743 return 0;
1744
1745 sym_ref = XEXP (sym_ref, 0);
1746 }
1747
1748 if (GET_CODE (sym_ref) != SYMBOL_REF)
1749 return 0;
1750
1751 if (call_cookie & CALL_SHORT)
1752 return 0;
1753
1754 if (TARGET_LONG_CALLS && flag_function_sections)
1755 return 1;
1756
1757 if (current_file_function_operand (sym_ref, VOIDmode))
1758 return 0;
1759
1760 return (call_cookie & CALL_LONG)
1761 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1762 || TARGET_LONG_CALLS;
1763}
82e9d970 1764\f
32de079a
RE
1765int
1766legitimate_pic_operand_p (x)
1767 rtx x;
1768{
1769 if (CONSTANT_P (x) && flag_pic
1770 && (GET_CODE (x) == SYMBOL_REF
1771 || (GET_CODE (x) == CONST
1772 && GET_CODE (XEXP (x, 0)) == PLUS
1773 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1774 return 0;
1775
1776 return 1;
1777}
1778
1779rtx
1780legitimize_pic_address (orig, mode, reg)
1781 rtx orig;
1782 enum machine_mode mode;
1783 rtx reg;
1784{
1785 if (GET_CODE (orig) == SYMBOL_REF)
1786 {
1787 rtx pic_ref, address;
1788 rtx insn;
1789 int subregs = 0;
1790
1791 if (reg == 0)
1792 {
1793 if (reload_in_progress || reload_completed)
1794 abort ();
1795 else
1796 reg = gen_reg_rtx (Pmode);
1797
1798 subregs = 1;
1799 }
1800
1801#ifdef AOF_ASSEMBLER
1802 /* The AOF assembler can generate relocations for these directly, and
1803 understands that the PIC register has to be added into the offset.
1804 */
1805 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1806#else
1807 if (subregs)
1808 address = gen_reg_rtx (Pmode);
1809 else
1810 address = reg;
1811
1812 emit_insn (gen_pic_load_addr (address, orig));
1813
43cffd11
RE
1814 pic_ref = gen_rtx_MEM (Pmode,
1815 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1816 address));
32de079a
RE
1817 RTX_UNCHANGING_P (pic_ref) = 1;
1818 insn = emit_move_insn (reg, pic_ref);
1819#endif
1820 current_function_uses_pic_offset_table = 1;
1821 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1822 by loop. */
43cffd11
RE
1823 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1824 REG_NOTES (insn));
32de079a
RE
1825 return reg;
1826 }
1827 else if (GET_CODE (orig) == CONST)
1828 {
1829 rtx base, offset;
1830
1831 if (GET_CODE (XEXP (orig, 0)) == PLUS
1832 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1833 return orig;
1834
1835 if (reg == 0)
1836 {
1837 if (reload_in_progress || reload_completed)
1838 abort ();
1839 else
1840 reg = gen_reg_rtx (Pmode);
1841 }
1842
1843 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1844 {
1845 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1846 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1847 base == reg ? 0 : reg);
1848 }
1849 else
1850 abort ();
1851
1852 if (GET_CODE (offset) == CONST_INT)
1853 {
1854 /* The base register doesn't really matter, we only want to
1855 test the index for the appropriate mode. */
1856 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1857
1858 if (! reload_in_progress && ! reload_completed)
1859 offset = force_reg (Pmode, offset);
1860 else
1861 abort ();
1862
1863 win:
1864 if (GET_CODE (offset) == CONST_INT)
1865 return plus_constant_for_output (base, INTVAL (offset));
1866 }
1867
1868 if (GET_MODE_SIZE (mode) > 4
1869 && (GET_MODE_CLASS (mode) == MODE_INT
1870 || TARGET_SOFT_FLOAT))
1871 {
1872 emit_insn (gen_addsi3 (reg, base, offset));
1873 return reg;
1874 }
1875
43cffd11 1876 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1877 }
1878 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
1879 {
1880 current_function_uses_pic_offset_table = 1;
1881
1882 if (NEED_GOT_RELOC)
1883 {
1884 rtx pic_ref, address = gen_reg_rtx (Pmode);
1885
1886 emit_insn (gen_pic_load_addr (address, orig));
1887 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1888
1889 emit_move_insn (address, pic_ref);
1890 return address;
1891 }
1892 }
32de079a
RE
1893
1894 return orig;
1895}
1896
1897static rtx pic_rtx;
1898
1899int
62b10bbc 1900is_pic (x)
32de079a
RE
1901 rtx x;
1902{
1903 if (x == pic_rtx)
1904 return 1;
1905 return 0;
1906}
1907
1908void
1909arm_finalize_pic ()
1910{
1911#ifndef AOF_ASSEMBLER
1912 rtx l1, pic_tmp, pic_tmp2, seq;
1913 rtx global_offset_table;
1914
ed0e6530 1915 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
1916 return;
1917
1918 if (! flag_pic)
1919 abort ();
1920
1921 start_sequence ();
1922 l1 = gen_label_rtx ();
1923
43cffd11 1924 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768
RE
1925 /* On the ARM the PC register contains 'dot + 8' at the time of the
1926 addition. */
1927 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), 8);
84306176
PB
1928 if (GOT_PCREL)
1929 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 1930 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
1931 else
1932 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
1933
1934 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 1935
32de079a 1936 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
dfa08768 1937 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
32de079a
RE
1938
1939 seq = gen_sequence ();
1940 end_sequence ();
1941 emit_insn_after (seq, get_insns ());
1942
1943 /* Need to emit this whether or not we obey regdecls,
1944 since setjmp/longjmp can cause life info to screw up. */
43cffd11 1945 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
1946#endif /* AOF_ASSEMBLER */
1947}
1948
e2c671ba
RE
1949#define REG_OR_SUBREG_REG(X) \
1950 (GET_CODE (X) == REG \
1951 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1952
1953#define REG_OR_SUBREG_RTX(X) \
1954 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1955
1956#define ARM_FRAME_RTX(X) \
1957 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1958 || (X) == arg_pointer_rtx)
1959
1960int
74bbc178 1961arm_rtx_costs (x, code)
e2c671ba 1962 rtx x;
74bbc178 1963 enum rtx_code code;
e2c671ba
RE
1964{
1965 enum machine_mode mode = GET_MODE (x);
1966 enum rtx_code subcode;
1967 int extra_cost;
1968
1969 switch (code)
1970 {
1971 case MEM:
1972 /* Memory costs quite a lot for the first word, but subsequent words
1973 load at the equivalent of a single insn each. */
1974 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1975 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1976
1977 case DIV:
1978 case MOD:
1979 return 100;
1980
1981 case ROTATE:
1982 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1983 return 4;
1984 /* Fall through */
1985 case ROTATERT:
1986 if (mode != SImode)
1987 return 8;
1988 /* Fall through */
1989 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1990 if (mode == DImode)
1991 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1992 + ((GET_CODE (XEXP (x, 0)) == REG
1993 || (GET_CODE (XEXP (x, 0)) == SUBREG
1994 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1995 ? 0 : 8));
1996 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1997 || (GET_CODE (XEXP (x, 0)) == SUBREG
1998 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1999 ? 0 : 4)
2000 + ((GET_CODE (XEXP (x, 1)) == REG
2001 || (GET_CODE (XEXP (x, 1)) == SUBREG
2002 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2003 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2004 ? 0 : 4));
2005
2006 case MINUS:
2007 if (mode == DImode)
2008 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2009 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2010 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2011 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2012 ? 0 : 8));
2013
2014 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2015 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2016 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2017 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2018 ? 0 : 8)
2019 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2020 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2021 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2022 ? 0 : 8));
2023
2024 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2025 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2026 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2027 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2028 || subcode == ASHIFTRT || subcode == LSHIFTRT
2029 || subcode == ROTATE || subcode == ROTATERT
2030 || (subcode == MULT
2031 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2032 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2033 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2034 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2035 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2036 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2037 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2038 return 1;
2039 /* Fall through */
2040
2041 case PLUS:
2042 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2043 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2044 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2045 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2046 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2047 ? 0 : 8));
2048
2049 /* Fall through */
2050 case AND: case XOR: case IOR:
2051 extra_cost = 0;
2052
2053 /* Normally the frame registers will be spilt into reg+const during
2054 reload, so it is a bad idea to combine them with other instructions,
2055 since then they might not be moved outside of loops. As a compromise
2056 we allow integration with ops that have a constant as their second
2057 operand. */
2058 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2059 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2060 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2061 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2062 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2063 extra_cost = 4;
2064
2065 if (mode == DImode)
2066 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2067 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2068 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2069 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2070 ? 0 : 8));
2071
2072 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2073 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2074 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2075 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2076 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2077 ? 0 : 4));
2078
2079 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2080 return (1 + extra_cost
2081 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2082 || subcode == LSHIFTRT || subcode == ASHIFTRT
2083 || subcode == ROTATE || subcode == ROTATERT
2084 || (subcode == MULT
2085 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2086 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2087 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2088 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2089 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2090 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2091 ? 0 : 4));
2092
2093 return 8;
2094
2095 case MULT:
b111229a
RE
2096 /* There is no point basing this on the tuning, since it is always the
2097 fast variant if it exists at all */
2b835d68
RE
2098 if (arm_fast_multiply && mode == DImode
2099 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2100 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2101 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2102 return 8;
2103
e2c671ba
RE
2104 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2105 || mode == DImode)
2106 return 30;
2107
2108 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2109 {
2b835d68 2110 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
b39e1240 2111 & (unsigned HOST_WIDE_INT) 0xffffffffUL);
e2c671ba
RE
2112 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2113 int j;
b111229a 2114 /* Tune as appropriate */
aec3cfba 2115 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2116
2b835d68 2117 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2118 {
2b835d68 2119 i >>= booth_unit_size;
e2c671ba
RE
2120 add_cost += 2;
2121 }
2122
2123 return add_cost;
2124 }
2125
aec3cfba 2126 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2127 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2128 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2129
56636818
JL
2130 case TRUNCATE:
2131 if (arm_fast_multiply && mode == SImode
2132 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2133 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2134 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2135 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2136 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2137 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2138 return 8;
2139 return 99;
2140
e2c671ba
RE
2141 case NEG:
2142 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2143 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2144 /* Fall through */
2145 case NOT:
2146 if (mode == DImode)
2147 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2148
2149 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2150
2151 case IF_THEN_ELSE:
2152 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2153 return 14;
2154 return 2;
2155
2156 case COMPARE:
2157 return 1;
2158
2159 case ABS:
2160 return 4 + (mode == DImode ? 4 : 0);
2161
2162 case SIGN_EXTEND:
2163 if (GET_MODE (XEXP (x, 0)) == QImode)
2164 return (4 + (mode == DImode ? 4 : 0)
2165 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2166 /* Fall through */
2167 case ZERO_EXTEND:
2168 switch (GET_MODE (XEXP (x, 0)))
2169 {
2170 case QImode:
2171 return (1 + (mode == DImode ? 4 : 0)
2172 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2173
2174 case HImode:
2175 return (4 + (mode == DImode ? 4 : 0)
2176 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2177
2178 case SImode:
2179 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2180
2181 default:
2182 break;
e2c671ba
RE
2183 }
2184 abort ();
2185
2186 default:
2187 return 99;
2188 }
2189}
32de079a
RE
2190
2191int
2192arm_adjust_cost (insn, link, dep, cost)
2193 rtx insn;
2194 rtx link;
2195 rtx dep;
2196 int cost;
2197{
2198 rtx i_pat, d_pat;
2199
b36ba79f
RE
2200 /* XXX This is not strictly true for the FPA. */
2201 if (REG_NOTE_KIND(link) == REG_DEP_ANTI
2202 || REG_NOTE_KIND(link) == REG_DEP_OUTPUT)
2203 return 0;
2204
32de079a
RE
2205 if ((i_pat = single_set (insn)) != NULL
2206 && GET_CODE (SET_SRC (i_pat)) == MEM
2207 && (d_pat = single_set (dep)) != NULL
2208 && GET_CODE (SET_DEST (d_pat)) == MEM)
2209 {
2210 /* This is a load after a store, there is no conflict if the load reads
2211 from a cached area. Assume that loads from the stack, and from the
2212 constant pool are cached, and that others will miss. This is a
2213 hack. */
2214
32de079a
RE
2215 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2216 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2217 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2218 || reg_mentioned_p (hard_frame_pointer_rtx,
2219 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2220 return 1;
32de079a
RE
2221 }
2222
2223 return cost;
2224}
2225
ff9940b0
RE
2226/* This code has been fixed for cross compilation. */
2227
2228static int fpa_consts_inited = 0;
2229
62b10bbc
NC
2230char * strings_fpa[8] =
2231{
2b835d68
RE
2232 "0", "1", "2", "3",
2233 "4", "5", "0.5", "10"
2234};
ff9940b0
RE
2235
2236static REAL_VALUE_TYPE values_fpa[8];
2237
2238static void
2239init_fpa_table ()
2240{
2241 int i;
2242 REAL_VALUE_TYPE r;
2243
2244 for (i = 0; i < 8; i++)
2245 {
2246 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2247 values_fpa[i] = r;
2248 }
f3bb6135 2249
ff9940b0
RE
2250 fpa_consts_inited = 1;
2251}
2252
cce8749e
CH
2253/* Return TRUE if rtx X is a valid immediate FPU constant. */
2254
2255int
2256const_double_rtx_ok_for_fpu (x)
2257 rtx x;
2258{
ff9940b0
RE
2259 REAL_VALUE_TYPE r;
2260 int i;
2261
2262 if (!fpa_consts_inited)
2263 init_fpa_table ();
2264
2265 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2266 if (REAL_VALUE_MINUS_ZERO (r))
2267 return 0;
f3bb6135 2268
ff9940b0
RE
2269 for (i = 0; i < 8; i++)
2270 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2271 return 1;
f3bb6135 2272
ff9940b0 2273 return 0;
f3bb6135 2274}
ff9940b0
RE
2275
2276/* Return TRUE if rtx X is a valid immediate FPU constant. */
2277
2278int
2279neg_const_double_rtx_ok_for_fpu (x)
2280 rtx x;
2281{
2282 REAL_VALUE_TYPE r;
2283 int i;
2284
2285 if (!fpa_consts_inited)
2286 init_fpa_table ();
2287
2288 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2289 r = REAL_VALUE_NEGATE (r);
2290 if (REAL_VALUE_MINUS_ZERO (r))
2291 return 0;
f3bb6135 2292
ff9940b0
RE
2293 for (i = 0; i < 8; i++)
2294 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2295 return 1;
f3bb6135 2296
ff9940b0 2297 return 0;
f3bb6135 2298}
cce8749e
CH
2299\f
2300/* Predicates for `match_operand' and `match_operator'. */
2301
ff9940b0 2302/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2303 (SUBREG (MEM)...).
2304
2305 This function exists because at the time it was put in it led to better
2306 code. SUBREG(MEM) always needs a reload in the places where
2307 s_register_operand is used, and this seemed to lead to excessive
2308 reloading. */
ff9940b0
RE
2309
2310int
2311s_register_operand (op, mode)
2312 register rtx op;
2313 enum machine_mode mode;
2314{
2315 if (GET_MODE (op) != mode && mode != VOIDmode)
2316 return 0;
2317
2318 if (GET_CODE (op) == SUBREG)
f3bb6135 2319 op = SUBREG_REG (op);
ff9940b0
RE
2320
2321 /* We don't consider registers whose class is NO_REGS
2322 to be a register operand. */
2323 return (GET_CODE (op) == REG
2324 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2325 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2326}
2327
e2c671ba
RE
2328/* Only accept reg, subreg(reg), const_int. */
2329
2330int
2331reg_or_int_operand (op, mode)
2332 register rtx op;
2333 enum machine_mode mode;
2334{
2335 if (GET_CODE (op) == CONST_INT)
2336 return 1;
2337
2338 if (GET_MODE (op) != mode && mode != VOIDmode)
2339 return 0;
2340
2341 if (GET_CODE (op) == SUBREG)
2342 op = SUBREG_REG (op);
2343
2344 /* We don't consider registers whose class is NO_REGS
2345 to be a register operand. */
2346 return (GET_CODE (op) == REG
2347 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2348 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2349}
2350
ff9940b0
RE
2351/* Return 1 if OP is an item in memory, given that we are in reload. */
2352
2353int
2354reload_memory_operand (op, mode)
2355 rtx op;
74bbc178 2356 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2357{
2358 int regno = true_regnum (op);
2359
2360 return (! CONSTANT_P (op)
2361 && (regno == -1
2362 || (GET_CODE (op) == REG
2363 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2364}
2365
4d818c85
RE
2366/* Return 1 if OP is a valid memory address, but not valid for a signed byte
2367 memory access (architecture V4) */
2368int
2369bad_signed_byte_operand (op, mode)
2370 rtx op;
2371 enum machine_mode mode;
2372{
2373 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
2374 return 0;
2375
2376 op = XEXP (op, 0);
2377
2378 /* A sum of anything more complex than reg + reg or reg + const is bad */
2379 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
9c8cc54f
RE
2380 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2381 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2382 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2383 return 1;
2384
2385 /* Big constants are also bad */
2386 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2387 && (INTVAL (XEXP (op, 1)) > 0xff
2388 || -INTVAL (XEXP (op, 1)) > 0xff))
2389 return 1;
2390
2391 /* Everything else is good, or can will automatically be made so. */
2392 return 0;
2393}
2394
cce8749e
CH
2395/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2396
2397int
2398arm_rhs_operand (op, mode)
2399 rtx op;
2400 enum machine_mode mode;
2401{
ff9940b0 2402 return (s_register_operand (op, mode)
cce8749e 2403 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2404}
cce8749e 2405
ff9940b0
RE
2406/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2407 */
2408
2409int
2410arm_rhsm_operand (op, mode)
2411 rtx op;
2412 enum machine_mode mode;
2413{
2414 return (s_register_operand (op, mode)
2415 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2416 || memory_operand (op, mode));
f3bb6135 2417}
ff9940b0
RE
2418
2419/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2420 constant that is valid when negated. */
2421
2422int
2423arm_add_operand (op, mode)
2424 rtx op;
2425 enum machine_mode mode;
2426{
2427 return (s_register_operand (op, mode)
2428 || (GET_CODE (op) == CONST_INT
2429 && (const_ok_for_arm (INTVAL (op))
2430 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2431}
ff9940b0
RE
2432
2433int
2434arm_not_operand (op, mode)
2435 rtx op;
2436 enum machine_mode mode;
2437{
2438 return (s_register_operand (op, mode)
2439 || (GET_CODE (op) == CONST_INT
2440 && (const_ok_for_arm (INTVAL (op))
2441 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2442}
ff9940b0 2443
5165176d
RE
2444/* Return TRUE if the operand is a memory reference which contains an
2445 offsettable address. */
2446int
2447offsettable_memory_operand (op, mode)
2448 register rtx op;
2449 enum machine_mode mode;
2450{
2451 if (mode == VOIDmode)
2452 mode = GET_MODE (op);
2453
2454 return (mode == GET_MODE (op)
2455 && GET_CODE (op) == MEM
2456 && offsettable_address_p (reload_completed | reload_in_progress,
2457 mode, XEXP (op, 0)));
2458}
2459
2460/* Return TRUE if the operand is a memory reference which is, or can be
2461 made word aligned by adjusting the offset. */
2462int
2463alignable_memory_operand (op, mode)
2464 register rtx op;
2465 enum machine_mode mode;
2466{
2467 rtx reg;
2468
2469 if (mode == VOIDmode)
2470 mode = GET_MODE (op);
2471
2472 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2473 return 0;
2474
2475 op = XEXP (op, 0);
2476
2477 return ((GET_CODE (reg = op) == REG
2478 || (GET_CODE (op) == SUBREG
2479 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2480 || (GET_CODE (op) == PLUS
2481 && GET_CODE (XEXP (op, 1)) == CONST_INT
2482 && (GET_CODE (reg = XEXP (op, 0)) == REG
2483 || (GET_CODE (XEXP (op, 0)) == SUBREG
2484 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
2485 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
2486}
2487
b111229a
RE
2488/* Similar to s_register_operand, but does not allow hard integer
2489 registers. */
2490int
2491f_register_operand (op, mode)
2492 register rtx op;
2493 enum machine_mode mode;
2494{
2495 if (GET_MODE (op) != mode && mode != VOIDmode)
2496 return 0;
2497
2498 if (GET_CODE (op) == SUBREG)
2499 op = SUBREG_REG (op);
2500
2501 /* We don't consider registers whose class is NO_REGS
2502 to be a register operand. */
2503 return (GET_CODE (op) == REG
2504 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2505 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2506}
2507
cce8749e
CH
2508/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2509
2510int
2511fpu_rhs_operand (op, mode)
2512 rtx op;
2513 enum machine_mode mode;
2514{
ff9940b0 2515 if (s_register_operand (op, mode))
f3bb6135 2516 return TRUE;
9ce71c6f
BS
2517
2518 if (GET_MODE (op) != mode && mode != VOIDmode)
2519 return FALSE;
2520
2521 if (GET_CODE (op) == CONST_DOUBLE)
2522 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
2523
2524 return FALSE;
2525}
cce8749e 2526
ff9940b0
RE
2527int
2528fpu_add_operand (op, mode)
2529 rtx op;
2530 enum machine_mode mode;
2531{
2532 if (s_register_operand (op, mode))
f3bb6135 2533 return TRUE;
9ce71c6f
BS
2534
2535 if (GET_MODE (op) != mode && mode != VOIDmode)
2536 return FALSE;
2537
2538 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2539 return (const_double_rtx_ok_for_fpu (op)
2540 || neg_const_double_rtx_ok_for_fpu (op));
2541
2542 return FALSE;
ff9940b0
RE
2543}
2544
cce8749e
CH
2545/* Return nonzero if OP is a constant power of two. */
2546
2547int
2548power_of_two_operand (op, mode)
2549 rtx op;
74bbc178 2550 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2551{
2552 if (GET_CODE (op) == CONST_INT)
2553 {
f3bb6135
RE
2554 HOST_WIDE_INT value = INTVAL(op);
2555 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2556 }
f3bb6135
RE
2557 return FALSE;
2558}
cce8749e
CH
2559
2560/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2561 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2562 Note that this disallows MEM(REG+REG), but allows
2563 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2564
2565int
2566di_operand (op, mode)
2567 rtx op;
2568 enum machine_mode mode;
2569{
ff9940b0 2570 if (s_register_operand (op, mode))
f3bb6135 2571 return TRUE;
cce8749e 2572
9ce71c6f
BS
2573 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2574 return FALSE;
2575
e9c6b69b
NC
2576 if (GET_CODE (op) == SUBREG)
2577 op = SUBREG_REG (op);
2578
cce8749e
CH
2579 switch (GET_CODE (op))
2580 {
2581 case CONST_DOUBLE:
2582 case CONST_INT:
f3bb6135
RE
2583 return TRUE;
2584
cce8749e 2585 case MEM:
f3bb6135
RE
2586 return memory_address_p (DImode, XEXP (op, 0));
2587
cce8749e 2588 default:
f3bb6135 2589 return FALSE;
cce8749e 2590 }
f3bb6135 2591}
cce8749e 2592
f3139301 2593/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2594 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2595 Note that this disallows MEM(REG+REG), but allows
2596 MEM(PRE/POST_INC/DEC(REG)). */
2597
2598int
2599soft_df_operand (op, mode)
2600 rtx op;
2601 enum machine_mode mode;
2602{
2603 if (s_register_operand (op, mode))
2604 return TRUE;
2605
9ce71c6f
BS
2606 if (mode != VOIDmode && GET_MODE (op) != mode)
2607 return FALSE;
2608
37b80d2e
BS
2609 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2610 return FALSE;
2611
e9c6b69b
NC
2612 if (GET_CODE (op) == SUBREG)
2613 op = SUBREG_REG (op);
9ce71c6f 2614
f3139301
DE
2615 switch (GET_CODE (op))
2616 {
2617 case CONST_DOUBLE:
2618 return TRUE;
2619
2620 case MEM:
2621 return memory_address_p (DFmode, XEXP (op, 0));
2622
2623 default:
2624 return FALSE;
2625 }
2626}
2627
cce8749e
CH
2628/* Return TRUE for valid index operands. */
2629
2630int
2631index_operand (op, mode)
2632 rtx op;
2633 enum machine_mode mode;
2634{
ff9940b0
RE
2635 return (s_register_operand(op, mode)
2636 || (immediate_operand (op, mode)
2637 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2638}
cce8749e 2639
ff9940b0
RE
2640/* Return TRUE for valid shifts by a constant. This also accepts any
2641 power of two on the (somewhat overly relaxed) assumption that the
2642 shift operator in this case was a mult. */
2643
2644int
2645const_shift_operand (op, mode)
2646 rtx op;
2647 enum machine_mode mode;
2648{
2649 return (power_of_two_operand (op, mode)
2650 || (immediate_operand (op, mode)
2651 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2652}
ff9940b0 2653
cce8749e
CH
2654/* Return TRUE for arithmetic operators which can be combined with a multiply
2655 (shift). */
2656
2657int
2658shiftable_operator (x, mode)
2659 rtx x;
2660 enum machine_mode mode;
2661{
2662 if (GET_MODE (x) != mode)
2663 return FALSE;
2664 else
2665 {
2666 enum rtx_code code = GET_CODE (x);
2667
2668 return (code == PLUS || code == MINUS
2669 || code == IOR || code == XOR || code == AND);
2670 }
f3bb6135 2671}
cce8749e 2672
6ab589e0
JL
2673/* Return TRUE for binary logical operators. */
2674
2675int
2676logical_binary_operator (x, mode)
2677 rtx x;
2678 enum machine_mode mode;
2679{
2680 if (GET_MODE (x) != mode)
2681 return FALSE;
2682 else
2683 {
2684 enum rtx_code code = GET_CODE (x);
2685
2686 return (code == IOR || code == XOR || code == AND);
2687 }
2688}
2689
cce8749e
CH
2690/* Return TRUE for shift operators. */
2691
2692int
2693shift_operator (x, mode)
2694 rtx x;
2695 enum machine_mode mode;
2696{
2697 if (GET_MODE (x) != mode)
2698 return FALSE;
2699 else
2700 {
2701 enum rtx_code code = GET_CODE (x);
2702
ff9940b0 2703 if (code == MULT)
aec3cfba 2704 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 2705
e2c671ba
RE
2706 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2707 || code == ROTATERT);
cce8749e 2708 }
f3bb6135 2709}
ff9940b0
RE
2710
2711int equality_operator (x, mode)
f3bb6135 2712 rtx x;
74bbc178 2713 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2714{
f3bb6135 2715 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2716}
2717
2718/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2719
2720int
2721minmax_operator (x, mode)
2722 rtx x;
2723 enum machine_mode mode;
2724{
2725 enum rtx_code code = GET_CODE (x);
2726
2727 if (GET_MODE (x) != mode)
2728 return FALSE;
f3bb6135 2729
ff9940b0 2730 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2731}
ff9940b0
RE
2732
2733/* return TRUE if x is EQ or NE */
2734
2735/* Return TRUE if this is the condition code register, if we aren't given
2736 a mode, accept any class CCmode register */
2737
2738int
2739cc_register (x, mode)
f3bb6135
RE
2740 rtx x;
2741 enum machine_mode mode;
ff9940b0
RE
2742{
2743 if (mode == VOIDmode)
2744 {
2745 mode = GET_MODE (x);
2746 if (GET_MODE_CLASS (mode) != MODE_CC)
2747 return FALSE;
2748 }
f3bb6135 2749
ff9940b0
RE
2750 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2751 return TRUE;
f3bb6135 2752
ff9940b0
RE
2753 return FALSE;
2754}
5bbe2d40
RE
2755
2756/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2757 a mode, accept any class CCmode register which indicates a dominance
2758 expression. */
5bbe2d40
RE
2759
2760int
84ed5e79 2761dominant_cc_register (x, mode)
5bbe2d40
RE
2762 rtx x;
2763 enum machine_mode mode;
2764{
2765 if (mode == VOIDmode)
2766 {
2767 mode = GET_MODE (x);
84ed5e79 2768 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2769 return FALSE;
2770 }
2771
84ed5e79
RE
2772 if (mode != CC_DNEmode && mode != CC_DEQmode
2773 && mode != CC_DLEmode && mode != CC_DLTmode
2774 && mode != CC_DGEmode && mode != CC_DGTmode
2775 && mode != CC_DLEUmode && mode != CC_DLTUmode
2776 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2777 return FALSE;
2778
5bbe2d40
RE
2779 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2780 return TRUE;
2781
2782 return FALSE;
2783}
2784
2b835d68
RE
2785/* Return TRUE if X references a SYMBOL_REF. */
2786int
2787symbol_mentioned_p (x)
2788 rtx x;
2789{
6f7d635c 2790 register const char * fmt;
2b835d68
RE
2791 register int i;
2792
2793 if (GET_CODE (x) == SYMBOL_REF)
2794 return 1;
2795
2796 fmt = GET_RTX_FORMAT (GET_CODE (x));
2797 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2798 {
2799 if (fmt[i] == 'E')
2800 {
2801 register int j;
2802
2803 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2804 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2805 return 1;
2806 }
2807 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2808 return 1;
2809 }
2810
2811 return 0;
2812}
2813
2814/* Return TRUE if X references a LABEL_REF. */
2815int
2816label_mentioned_p (x)
2817 rtx x;
2818{
6f7d635c 2819 register const char * fmt;
2b835d68
RE
2820 register int i;
2821
2822 if (GET_CODE (x) == LABEL_REF)
2823 return 1;
2824
2825 fmt = GET_RTX_FORMAT (GET_CODE (x));
2826 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2827 {
2828 if (fmt[i] == 'E')
2829 {
2830 register int j;
2831
2832 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2833 if (label_mentioned_p (XVECEXP (x, i, j)))
2834 return 1;
2835 }
2836 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2837 return 1;
2838 }
2839
2840 return 0;
2841}
2842
ff9940b0
RE
2843enum rtx_code
2844minmax_code (x)
f3bb6135 2845 rtx x;
ff9940b0
RE
2846{
2847 enum rtx_code code = GET_CODE (x);
2848
2849 if (code == SMAX)
2850 return GE;
f3bb6135 2851 else if (code == SMIN)
ff9940b0 2852 return LE;
f3bb6135 2853 else if (code == UMIN)
ff9940b0 2854 return LEU;
f3bb6135 2855 else if (code == UMAX)
ff9940b0 2856 return GEU;
f3bb6135 2857
ff9940b0
RE
2858 abort ();
2859}
2860
2861/* Return 1 if memory locations are adjacent */
2862
f3bb6135 2863int
ff9940b0
RE
2864adjacent_mem_locations (a, b)
2865 rtx a, b;
2866{
2867 int val0 = 0, val1 = 0;
2868 int reg0, reg1;
2869
2870 if ((GET_CODE (XEXP (a, 0)) == REG
2871 || (GET_CODE (XEXP (a, 0)) == PLUS
2872 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2873 && (GET_CODE (XEXP (b, 0)) == REG
2874 || (GET_CODE (XEXP (b, 0)) == PLUS
2875 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2876 {
2877 if (GET_CODE (XEXP (a, 0)) == PLUS)
2878 {
2879 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2880 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2881 }
2882 else
2883 reg0 = REGNO (XEXP (a, 0));
2884 if (GET_CODE (XEXP (b, 0)) == PLUS)
2885 {
2886 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2887 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2888 }
2889 else
2890 reg1 = REGNO (XEXP (b, 0));
2891 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2892 }
2893 return 0;
2894}
2895
2896/* Return 1 if OP is a load multiple operation. It is known to be
2897 parallel and the first section will be tested. */
2898
f3bb6135 2899int
ff9940b0
RE
2900load_multiple_operation (op, mode)
2901 rtx op;
74bbc178 2902 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2903{
f3bb6135 2904 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2905 int dest_regno;
2906 rtx src_addr;
f3bb6135 2907 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2908 rtx elt;
2909
2910 if (count <= 1
2911 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2912 return 0;
2913
2914 /* Check to see if this might be a write-back */
2915 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2916 {
2917 i++;
2918 base = 1;
2919
2920 /* Now check it more carefully */
2921 if (GET_CODE (SET_DEST (elt)) != REG
2922 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2923 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2924 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2925 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2926 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2927 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2928 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2929 != REGNO (SET_DEST (elt)))
2930 return 0;
f3bb6135 2931
ff9940b0
RE
2932 count--;
2933 }
2934
2935 /* Perform a quick check so we don't blow up below. */
2936 if (count <= i
2937 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2938 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2939 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2940 return 0;
2941
2942 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2943 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2944
2945 for (; i < count; i++)
2946 {
ed4c4348 2947 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2948
2949 if (GET_CODE (elt) != SET
2950 || GET_CODE (SET_DEST (elt)) != REG
2951 || GET_MODE (SET_DEST (elt)) != SImode
2952 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2953 || GET_CODE (SET_SRC (elt)) != MEM
2954 || GET_MODE (SET_SRC (elt)) != SImode
2955 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2956 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2957 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2958 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2959 return 0;
2960 }
2961
2962 return 1;
2963}
2964
2965/* Return 1 if OP is a store multiple operation. It is known to be
2966 parallel and the first section will be tested. */
2967
f3bb6135 2968int
ff9940b0
RE
2969store_multiple_operation (op, mode)
2970 rtx op;
74bbc178 2971 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2972{
f3bb6135 2973 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2974 int src_regno;
2975 rtx dest_addr;
f3bb6135 2976 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2977 rtx elt;
2978
2979 if (count <= 1
2980 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2981 return 0;
2982
2983 /* Check to see if this might be a write-back */
2984 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2985 {
2986 i++;
2987 base = 1;
2988
2989 /* Now check it more carefully */
2990 if (GET_CODE (SET_DEST (elt)) != REG
2991 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2992 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2993 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2994 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2995 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2996 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2997 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2998 != REGNO (SET_DEST (elt)))
2999 return 0;
f3bb6135 3000
ff9940b0
RE
3001 count--;
3002 }
3003
3004 /* Perform a quick check so we don't blow up below. */
3005 if (count <= i
3006 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3007 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3008 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3009 return 0;
3010
3011 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3012 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3013
3014 for (; i < count; i++)
3015 {
3016 elt = XVECEXP (op, 0, i);
3017
3018 if (GET_CODE (elt) != SET
3019 || GET_CODE (SET_SRC (elt)) != REG
3020 || GET_MODE (SET_SRC (elt)) != SImode
3021 || REGNO (SET_SRC (elt)) != src_regno + i - base
3022 || GET_CODE (SET_DEST (elt)) != MEM
3023 || GET_MODE (SET_DEST (elt)) != SImode
3024 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3025 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3026 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3027 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3028 return 0;
3029 }
3030
3031 return 1;
3032}
e2c671ba 3033
84ed5e79
RE
3034int
3035load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3036 rtx * operands;
84ed5e79 3037 int nops;
62b10bbc
NC
3038 int * regs;
3039 int * base;
3040 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3041{
3042 int unsorted_regs[4];
3043 HOST_WIDE_INT unsorted_offsets[4];
3044 int order[4];
ad076f4e 3045 int base_reg = -1;
84ed5e79
RE
3046 int i;
3047
3048 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3049 extended if required. */
3050 if (nops < 2 || nops > 4)
3051 abort ();
3052
3053 /* Loop over the operands and check that the memory references are
3054 suitable (ie immediate offsets from the same base register). At
3055 the same time, extract the target register, and the memory
3056 offsets. */
3057 for (i = 0; i < nops; i++)
3058 {
3059 rtx reg;
3060 rtx offset;
3061
56636818
JL
3062 /* Convert a subreg of a mem into the mem itself. */
3063 if (GET_CODE (operands[nops + i]) == SUBREG)
3064 operands[nops + i] = alter_subreg(operands[nops + i]);
3065
84ed5e79
RE
3066 if (GET_CODE (operands[nops + i]) != MEM)
3067 abort ();
3068
3069 /* Don't reorder volatile memory references; it doesn't seem worth
3070 looking for the case where the order is ok anyway. */
3071 if (MEM_VOLATILE_P (operands[nops + i]))
3072 return 0;
3073
3074 offset = const0_rtx;
3075
3076 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3077 || (GET_CODE (reg) == SUBREG
3078 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3079 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3080 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3081 == REG)
3082 || (GET_CODE (reg) == SUBREG
3083 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3084 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3085 == CONST_INT)))
3086 {
3087 if (i == 0)
3088 {
3089 base_reg = REGNO(reg);
3090 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3091 ? REGNO (operands[i])
3092 : REGNO (SUBREG_REG (operands[i])));
3093 order[0] = 0;
3094 }
3095 else
3096 {
3097 if (base_reg != REGNO (reg))
3098 /* Not addressed from the same base register. */
3099 return 0;
3100
3101 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3102 ? REGNO (operands[i])
3103 : REGNO (SUBREG_REG (operands[i])));
3104 if (unsorted_regs[i] < unsorted_regs[order[0]])
3105 order[0] = i;
3106 }
3107
3108 /* If it isn't an integer register, or if it overwrites the
3109 base register but isn't the last insn in the list, then
3110 we can't do this. */
3111 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3112 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3113 return 0;
3114
3115 unsorted_offsets[i] = INTVAL (offset);
3116 }
3117 else
3118 /* Not a suitable memory address. */
3119 return 0;
3120 }
3121
3122 /* All the useful information has now been extracted from the
3123 operands into unsorted_regs and unsorted_offsets; additionally,
3124 order[0] has been set to the lowest numbered register in the
3125 list. Sort the registers into order, and check that the memory
3126 offsets are ascending and adjacent. */
3127
3128 for (i = 1; i < nops; i++)
3129 {
3130 int j;
3131
3132 order[i] = order[i - 1];
3133 for (j = 0; j < nops; j++)
3134 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3135 && (order[i] == order[i - 1]
3136 || unsorted_regs[j] < unsorted_regs[order[i]]))
3137 order[i] = j;
3138
3139 /* Have we found a suitable register? if not, one must be used more
3140 than once. */
3141 if (order[i] == order[i - 1])
3142 return 0;
3143
3144 /* Is the memory address adjacent and ascending? */
3145 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3146 return 0;
3147 }
3148
3149 if (base)
3150 {
3151 *base = base_reg;
3152
3153 for (i = 0; i < nops; i++)
3154 regs[i] = unsorted_regs[order[i]];
3155
3156 *load_offset = unsorted_offsets[order[0]];
3157 }
3158
3159 if (unsorted_offsets[order[0]] == 0)
3160 return 1; /* ldmia */
3161
3162 if (unsorted_offsets[order[0]] == 4)
3163 return 2; /* ldmib */
3164
3165 if (unsorted_offsets[order[nops - 1]] == 0)
3166 return 3; /* ldmda */
3167
3168 if (unsorted_offsets[order[nops - 1]] == -4)
3169 return 4; /* ldmdb */
3170
949d79eb
RE
3171 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3172 if the offset isn't small enough. The reason 2 ldrs are faster
3173 is because these ARMs are able to do more than one cache access
3174 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3175 whilst the ARM8 has a double bandwidth cache. This means that
3176 these cores can do both an instruction fetch and a data fetch in
3177 a single cycle, so the trick of calculating the address into a
3178 scratch register (one of the result regs) and then doing a load
3179 multiple actually becomes slower (and no smaller in code size).
3180 That is the transformation
6cc8c0b3
NC
3181
3182 ldr rd1, [rbase + offset]
3183 ldr rd2, [rbase + offset + 4]
3184
3185 to
3186
3187 add rd1, rbase, offset
3188 ldmia rd1, {rd1, rd2}
3189
949d79eb
RE
3190 produces worse code -- '3 cycles + any stalls on rd2' instead of
3191 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3192 access per cycle, the first sequence could never complete in less
3193 than 6 cycles, whereas the ldm sequence would only take 5 and
3194 would make better use of sequential accesses if not hitting the
3195 cache.
3196
3197 We cheat here and test 'arm_ld_sched' which we currently know to
3198 only be true for the ARM8, ARM9 and StrongARM. If this ever
3199 changes, then the test below needs to be reworked. */
f5a1b0d2 3200 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3201 return 0;
3202
84ed5e79
RE
3203 /* Can't do it without setting up the offset, only do this if it takes
3204 no more than one insn. */
3205 return (const_ok_for_arm (unsorted_offsets[order[0]])
3206 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3207}
3208
3209char *
3210emit_ldm_seq (operands, nops)
62b10bbc 3211 rtx * operands;
84ed5e79
RE
3212 int nops;
3213{
3214 int regs[4];
3215 int base_reg;
3216 HOST_WIDE_INT offset;
3217 char buf[100];
3218 int i;
3219
3220 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3221 {
3222 case 1:
3223 strcpy (buf, "ldm%?ia\t");
3224 break;
3225
3226 case 2:
3227 strcpy (buf, "ldm%?ib\t");
3228 break;
3229
3230 case 3:
3231 strcpy (buf, "ldm%?da\t");
3232 break;
3233
3234 case 4:
3235 strcpy (buf, "ldm%?db\t");
3236 break;
3237
3238 case 5:
3239 if (offset >= 0)
3240 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3241 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3242 (long) offset);
3243 else
3244 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3245 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3246 (long) -offset);
3247 output_asm_insn (buf, operands);
3248 base_reg = regs[0];
3249 strcpy (buf, "ldm%?ia\t");
3250 break;
3251
3252 default:
3253 abort ();
3254 }
3255
3256 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3257 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3258
3259 for (i = 1; i < nops; i++)
3260 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3261 reg_names[regs[i]]);
3262
3263 strcat (buf, "}\t%@ phole ldm");
3264
3265 output_asm_insn (buf, operands);
3266 return "";
3267}
3268
3269int
3270store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3271 rtx * operands;
84ed5e79 3272 int nops;
62b10bbc
NC
3273 int * regs;
3274 int * base;
3275 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3276{
3277 int unsorted_regs[4];
3278 HOST_WIDE_INT unsorted_offsets[4];
3279 int order[4];
ad076f4e 3280 int base_reg = -1;
84ed5e79
RE
3281 int i;
3282
3283 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3284 extended if required. */
3285 if (nops < 2 || nops > 4)
3286 abort ();
3287
3288 /* Loop over the operands and check that the memory references are
3289 suitable (ie immediate offsets from the same base register). At
3290 the same time, extract the target register, and the memory
3291 offsets. */
3292 for (i = 0; i < nops; i++)
3293 {
3294 rtx reg;
3295 rtx offset;
3296
56636818
JL
3297 /* Convert a subreg of a mem into the mem itself. */
3298 if (GET_CODE (operands[nops + i]) == SUBREG)
3299 operands[nops + i] = alter_subreg(operands[nops + i]);
3300
84ed5e79
RE
3301 if (GET_CODE (operands[nops + i]) != MEM)
3302 abort ();
3303
3304 /* Don't reorder volatile memory references; it doesn't seem worth
3305 looking for the case where the order is ok anyway. */
3306 if (MEM_VOLATILE_P (operands[nops + i]))
3307 return 0;
3308
3309 offset = const0_rtx;
3310
3311 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3312 || (GET_CODE (reg) == SUBREG
3313 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3314 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3315 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3316 == REG)
3317 || (GET_CODE (reg) == SUBREG
3318 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3319 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3320 == CONST_INT)))
3321 {
3322 if (i == 0)
3323 {
62b10bbc 3324 base_reg = REGNO (reg);
84ed5e79
RE
3325 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3326 ? REGNO (operands[i])
3327 : REGNO (SUBREG_REG (operands[i])));
3328 order[0] = 0;
3329 }
3330 else
3331 {
3332 if (base_reg != REGNO (reg))
3333 /* Not addressed from the same base register. */
3334 return 0;
3335
3336 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3337 ? REGNO (operands[i])
3338 : REGNO (SUBREG_REG (operands[i])));
3339 if (unsorted_regs[i] < unsorted_regs[order[0]])
3340 order[0] = i;
3341 }
3342
3343 /* If it isn't an integer register, then we can't do this. */
3344 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3345 return 0;
3346
3347 unsorted_offsets[i] = INTVAL (offset);
3348 }
3349 else
3350 /* Not a suitable memory address. */
3351 return 0;
3352 }
3353
3354 /* All the useful information has now been extracted from the
3355 operands into unsorted_regs and unsorted_offsets; additionally,
3356 order[0] has been set to the lowest numbered register in the
3357 list. Sort the registers into order, and check that the memory
3358 offsets are ascending and adjacent. */
3359
3360 for (i = 1; i < nops; i++)
3361 {
3362 int j;
3363
3364 order[i] = order[i - 1];
3365 for (j = 0; j < nops; j++)
3366 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3367 && (order[i] == order[i - 1]
3368 || unsorted_regs[j] < unsorted_regs[order[i]]))
3369 order[i] = j;
3370
3371 /* Have we found a suitable register? if not, one must be used more
3372 than once. */
3373 if (order[i] == order[i - 1])
3374 return 0;
3375
3376 /* Is the memory address adjacent and ascending? */
3377 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3378 return 0;
3379 }
3380
3381 if (base)
3382 {
3383 *base = base_reg;
3384
3385 for (i = 0; i < nops; i++)
3386 regs[i] = unsorted_regs[order[i]];
3387
3388 *load_offset = unsorted_offsets[order[0]];
3389 }
3390
3391 if (unsorted_offsets[order[0]] == 0)
3392 return 1; /* stmia */
3393
3394 if (unsorted_offsets[order[0]] == 4)
3395 return 2; /* stmib */
3396
3397 if (unsorted_offsets[order[nops - 1]] == 0)
3398 return 3; /* stmda */
3399
3400 if (unsorted_offsets[order[nops - 1]] == -4)
3401 return 4; /* stmdb */
3402
3403 return 0;
3404}
3405
3406char *
3407emit_stm_seq (operands, nops)
62b10bbc 3408 rtx * operands;
84ed5e79
RE
3409 int nops;
3410{
3411 int regs[4];
3412 int base_reg;
3413 HOST_WIDE_INT offset;
3414 char buf[100];
3415 int i;
3416
3417 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3418 {
3419 case 1:
3420 strcpy (buf, "stm%?ia\t");
3421 break;
3422
3423 case 2:
3424 strcpy (buf, "stm%?ib\t");
3425 break;
3426
3427 case 3:
3428 strcpy (buf, "stm%?da\t");
3429 break;
3430
3431 case 4:
3432 strcpy (buf, "stm%?db\t");
3433 break;
3434
3435 default:
3436 abort ();
3437 }
3438
3439 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3440 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3441
3442 for (i = 1; i < nops; i++)
3443 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3444 reg_names[regs[i]]);
3445
3446 strcat (buf, "}\t%@ phole stm");
3447
3448 output_asm_insn (buf, operands);
3449 return "";
3450}
3451
e2c671ba
RE
3452int
3453multi_register_push (op, mode)
0a81f500 3454 rtx op;
74bbc178 3455 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3456{
3457 if (GET_CODE (op) != PARALLEL
3458 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3459 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3460 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3461 return 0;
3462
3463 return 1;
3464}
3465
ff9940b0 3466\f
f3bb6135
RE
3467/* Routines for use with attributes */
3468
31fdb4d5
DE
3469/* Return nonzero if ATTR is a valid attribute for DECL.
3470 ATTRIBUTES are any existing attributes and ARGS are the arguments
3471 supplied with ATTR.
3472
3473 Supported attributes:
3474
3475 naked: don't output any prologue or epilogue code, the user is assumed
3476 to do the right thing. */
3477
3478int
74bbc178 3479arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3480 tree decl;
31fdb4d5
DE
3481 tree attr;
3482 tree args;
3483{
3484 if (args != NULL_TREE)
3485 return 0;
3486
3487 if (is_attribute_p ("naked", attr))
3488 return TREE_CODE (decl) == FUNCTION_DECL;
3489 return 0;
3490}
3491
3492/* Return non-zero if FUNC is a naked function. */
3493
3494static int
3495arm_naked_function_p (func)
3496 tree func;
3497{
3498 tree a;
3499
3500 if (TREE_CODE (func) != FUNCTION_DECL)
3501 abort ();
2e943e99 3502
31fdb4d5
DE
3503 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3504 return a != NULL_TREE;
3505}
f3bb6135 3506\f
ff9940b0
RE
3507/* Routines for use in generating RTL */
3508
f3bb6135 3509rtx
56636818 3510arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3511 in_struct_p, scalar_p)
ff9940b0
RE
3512 int base_regno;
3513 int count;
3514 rtx from;
3515 int up;
3516 int write_back;
56636818
JL
3517 int unchanging_p;
3518 int in_struct_p;
c6df88cb 3519 int scalar_p;
ff9940b0
RE
3520{
3521 int i = 0, j;
3522 rtx result;
3523 int sign = up ? 1 : -1;
56636818 3524 rtx mem;
ff9940b0 3525
43cffd11
RE
3526 result = gen_rtx_PARALLEL (VOIDmode,
3527 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3528 if (write_back)
f3bb6135 3529 {
ff9940b0 3530 XVECEXP (result, 0, 0)
43cffd11
RE
3531 = gen_rtx_SET (GET_MODE (from), from,
3532 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3533 i = 1;
3534 count++;
f3bb6135
RE
3535 }
3536
ff9940b0 3537 for (j = 0; i < count; i++, j++)
f3bb6135 3538 {
43cffd11 3539 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3540 RTX_UNCHANGING_P (mem) = unchanging_p;
3541 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3542 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3543 XVECEXP (result, 0, i)
3544 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3545 }
3546
ff9940b0 3547 if (write_back)
43cffd11 3548 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, from);
ff9940b0
RE
3549
3550 return result;
3551}
3552
f3bb6135 3553rtx
56636818 3554arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3555 in_struct_p, scalar_p)
ff9940b0
RE
3556 int base_regno;
3557 int count;
3558 rtx to;
3559 int up;
3560 int write_back;
56636818
JL
3561 int unchanging_p;
3562 int in_struct_p;
c6df88cb 3563 int scalar_p;
ff9940b0
RE
3564{
3565 int i = 0, j;
3566 rtx result;
3567 int sign = up ? 1 : -1;
56636818 3568 rtx mem;
ff9940b0 3569
43cffd11
RE
3570 result = gen_rtx_PARALLEL (VOIDmode,
3571 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3572 if (write_back)
f3bb6135 3573 {
ff9940b0 3574 XVECEXP (result, 0, 0)
43cffd11
RE
3575 = gen_rtx_SET (GET_MODE (to), to,
3576 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3577 i = 1;
3578 count++;
f3bb6135
RE
3579 }
3580
ff9940b0 3581 for (j = 0; i < count; i++, j++)
f3bb6135 3582 {
43cffd11 3583 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3584 RTX_UNCHANGING_P (mem) = unchanging_p;
3585 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3586 MEM_SCALAR_P (mem) = scalar_p;
56636818 3587
43cffd11
RE
3588 XVECEXP (result, 0, i)
3589 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3590 }
3591
ff9940b0 3592 if (write_back)
43cffd11 3593 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, to);
ff9940b0
RE
3594
3595 return result;
3596}
3597
880e2516
RE
3598int
3599arm_gen_movstrqi (operands)
62b10bbc 3600 rtx * operands;
880e2516
RE
3601{
3602 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3603 int i;
880e2516 3604 rtx src, dst;
ad076f4e 3605 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3606 rtx part_bytes_reg = NULL;
56636818
JL
3607 rtx mem;
3608 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3609 int dst_scalar_p, src_scalar_p;
880e2516
RE
3610
3611 if (GET_CODE (operands[2]) != CONST_INT
3612 || GET_CODE (operands[3]) != CONST_INT
3613 || INTVAL (operands[2]) > 64
3614 || INTVAL (operands[3]) & 3)
3615 return 0;
3616
3617 st_dst = XEXP (operands[0], 0);
3618 st_src = XEXP (operands[1], 0);
56636818
JL
3619
3620 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3621 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3622 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3623 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3624 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3625 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3626
880e2516
RE
3627 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3628 fin_src = src = copy_to_mode_reg (SImode, st_src);
3629
3630 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
3631 out_words_to_go = INTVAL (operands[2]) / 4;
3632 last_bytes = INTVAL (operands[2]) & 3;
3633
3634 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3635 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3636
3637 for (i = 0; in_words_to_go >= 2; i+=4)
3638 {
bd9c7e23 3639 if (in_words_to_go > 4)
56636818 3640 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3641 src_unchanging_p,
3642 src_in_struct_p,
3643 src_scalar_p));
bd9c7e23
RE
3644 else
3645 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3646 FALSE, src_unchanging_p,
c6df88cb 3647 src_in_struct_p, src_scalar_p));
bd9c7e23 3648
880e2516
RE
3649 if (out_words_to_go)
3650 {
bd9c7e23 3651 if (out_words_to_go > 4)
56636818
JL
3652 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3653 dst_unchanging_p,
c6df88cb
MM
3654 dst_in_struct_p,
3655 dst_scalar_p));
bd9c7e23
RE
3656 else if (out_words_to_go != 1)
3657 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3658 dst, TRUE,
3659 (last_bytes == 0
56636818
JL
3660 ? FALSE : TRUE),
3661 dst_unchanging_p,
c6df88cb
MM
3662 dst_in_struct_p,
3663 dst_scalar_p));
880e2516
RE
3664 else
3665 {
43cffd11 3666 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3667 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3668 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3669 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3670 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3671 if (last_bytes != 0)
3672 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3673 }
3674 }
3675
3676 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3677 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3678 }
3679
3680 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3681 if (out_words_to_go)
62b10bbc
NC
3682 {
3683 rtx sreg;
3684
3685 mem = gen_rtx_MEM (SImode, src);
3686 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3687 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3688 MEM_SCALAR_P (mem) = src_scalar_p;
3689 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
3690 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
3691
3692 mem = gen_rtx_MEM (SImode, dst);
3693 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3694 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3695 MEM_SCALAR_P (mem) = dst_scalar_p;
3696 emit_move_insn (mem, sreg);
3697 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3698 in_words_to_go--;
3699
3700 if (in_words_to_go) /* Sanity check */
3701 abort ();
3702 }
880e2516
RE
3703
3704 if (in_words_to_go)
3705 {
3706 if (in_words_to_go < 0)
3707 abort ();
3708
43cffd11 3709 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3710 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3711 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3712 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3713 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3714 }
3715
3716 if (BYTES_BIG_ENDIAN && last_bytes)
3717 {
3718 rtx tmp = gen_reg_rtx (SImode);
3719
3720 if (part_bytes_reg == NULL)
3721 abort ();
3722
3723 /* The bytes we want are in the top end of the word */
bee06f3d
RE
3724 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3725 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3726 part_bytes_reg = tmp;
3727
3728 while (last_bytes)
3729 {
43cffd11 3730 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
3731 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3732 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3733 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3734 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 3735
880e2516
RE
3736 if (--last_bytes)
3737 {
3738 tmp = gen_reg_rtx (SImode);
3739 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3740 part_bytes_reg = tmp;
3741 }
3742 }
3743
3744 }
3745 else
3746 {
3747 while (last_bytes)
3748 {
3749 if (part_bytes_reg == NULL)
3750 abort ();
3751
43cffd11 3752 mem = gen_rtx_MEM (QImode, dst);
56636818
JL
3753 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3754 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3755 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3756 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 3757
880e2516
RE
3758 if (--last_bytes)
3759 {
3760 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3761
3762 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3763 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3764 part_bytes_reg = tmp;
3765 }
3766 }
3767 }
3768
3769 return 1;
3770}
3771
5165176d
RE
3772/* Generate a memory reference for a half word, such that it will be loaded
3773 into the top 16 bits of the word. We can assume that the address is
3774 known to be alignable and of the form reg, or plus (reg, const). */
3775rtx
3776gen_rotated_half_load (memref)
3777 rtx memref;
3778{
3779 HOST_WIDE_INT offset = 0;
3780 rtx base = XEXP (memref, 0);
3781
3782 if (GET_CODE (base) == PLUS)
3783 {
3784 offset = INTVAL (XEXP (base, 1));
3785 base = XEXP (base, 0);
3786 }
3787
956d6950 3788 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 3789 if (TARGET_MMU_TRAPS
5165176d
RE
3790 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3791 return NULL;
3792
43cffd11 3793 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
3794
3795 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3796 return base;
3797
43cffd11 3798 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
3799}
3800
84ed5e79 3801static enum machine_mode
74bbc178 3802select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
3803 rtx x;
3804 rtx y;
3805 HOST_WIDE_INT cond_or;
3806{
3807 enum rtx_code cond1, cond2;
3808 int swapped = 0;
3809
3810 /* Currently we will probably get the wrong result if the individual
3811 comparisons are not simple. This also ensures that it is safe to
956d6950 3812 reverse a comparison if necessary. */
84ed5e79
RE
3813 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3814 != CCmode)
3815 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3816 != CCmode))
3817 return CCmode;
3818
3819 if (cond_or)
3820 cond1 = reverse_condition (cond1);
3821
3822 /* If the comparisons are not equal, and one doesn't dominate the other,
3823 then we can't do this. */
3824 if (cond1 != cond2
3825 && ! comparison_dominates_p (cond1, cond2)
3826 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3827 return CCmode;
3828
3829 if (swapped)
3830 {
3831 enum rtx_code temp = cond1;
3832 cond1 = cond2;
3833 cond2 = temp;
3834 }
3835
3836 switch (cond1)
3837 {
3838 case EQ:
3839 if (cond2 == EQ || ! cond_or)
3840 return CC_DEQmode;
3841
3842 switch (cond2)
3843 {
3844 case LE: return CC_DLEmode;
3845 case LEU: return CC_DLEUmode;
3846 case GE: return CC_DGEmode;
3847 case GEU: return CC_DGEUmode;
ad076f4e 3848 default: break;
84ed5e79
RE
3849 }
3850
3851 break;
3852
3853 case LT:
3854 if (cond2 == LT || ! cond_or)
3855 return CC_DLTmode;
3856 if (cond2 == LE)
3857 return CC_DLEmode;
3858 if (cond2 == NE)
3859 return CC_DNEmode;
3860 break;
3861
3862 case GT:
3863 if (cond2 == GT || ! cond_or)
3864 return CC_DGTmode;
3865 if (cond2 == GE)
3866 return CC_DGEmode;
3867 if (cond2 == NE)
3868 return CC_DNEmode;
3869 break;
3870
3871 case LTU:
3872 if (cond2 == LTU || ! cond_or)
3873 return CC_DLTUmode;
3874 if (cond2 == LEU)
3875 return CC_DLEUmode;
3876 if (cond2 == NE)
3877 return CC_DNEmode;
3878 break;
3879
3880 case GTU:
3881 if (cond2 == GTU || ! cond_or)
3882 return CC_DGTUmode;
3883 if (cond2 == GEU)
3884 return CC_DGEUmode;
3885 if (cond2 == NE)
3886 return CC_DNEmode;
3887 break;
3888
3889 /* The remaining cases only occur when both comparisons are the
3890 same. */
3891 case NE:
3892 return CC_DNEmode;
3893
3894 case LE:
3895 return CC_DLEmode;
3896
3897 case GE:
3898 return CC_DGEmode;
3899
3900 case LEU:
3901 return CC_DLEUmode;
3902
3903 case GEU:
3904 return CC_DGEUmode;
ad076f4e
RE
3905
3906 default:
3907 break;
84ed5e79
RE
3908 }
3909
3910 abort ();
3911}
3912
3913enum machine_mode
3914arm_select_cc_mode (op, x, y)
3915 enum rtx_code op;
3916 rtx x;
3917 rtx y;
3918{
3919 /* All floating point compares return CCFP if it is an equality
3920 comparison, and CCFPE otherwise. */
3921 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3922 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3923
3924 /* A compare with a shifted operand. Because of canonicalization, the
3925 comparison will have to be swapped when we emit the assembler. */
3926 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3927 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3928 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3929 || GET_CODE (x) == ROTATERT))
3930 return CC_SWPmode;
3931
956d6950
JL
3932 /* This is a special case that is used by combine to allow a
3933 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3934 followed by a comparison of the shifted integer (only valid for
956d6950 3935 equalities and unsigned inequalities). */
84ed5e79
RE
3936 if (GET_MODE (x) == SImode
3937 && GET_CODE (x) == ASHIFT
3938 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3939 && GET_CODE (XEXP (x, 0)) == SUBREG
3940 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3941 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3942 && (op == EQ || op == NE
3943 || op == GEU || op == GTU || op == LTU || op == LEU)
3944 && GET_CODE (y) == CONST_INT)
3945 return CC_Zmode;
3946
3947 /* An operation that sets the condition codes as a side-effect, the
3948 V flag is not set correctly, so we can only use comparisons where
3949 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3950 instead. */
3951 if (GET_MODE (x) == SImode
3952 && y == const0_rtx
3953 && (op == EQ || op == NE || op == LT || op == GE)
3954 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3955 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3956 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3957 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3958 || GET_CODE (x) == LSHIFTRT
3959 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3960 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3961 return CC_NOOVmode;
3962
3963 /* A construct for a conditional compare, if the false arm contains
3964 0, then both conditions must be true, otherwise either condition
3965 must be true. Not all conditions are possible, so CCmode is
3966 returned if it can't be done. */
3967 if (GET_CODE (x) == IF_THEN_ELSE
3968 && (XEXP (x, 2) == const0_rtx
3969 || XEXP (x, 2) == const1_rtx)
3970 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3971 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 3972 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
3973 INTVAL (XEXP (x, 2)));
3974
3975 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3976 return CC_Zmode;
3977
bd9c7e23
RE
3978 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3979 && GET_CODE (x) == PLUS
3980 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3981 return CC_Cmode;
3982
84ed5e79
RE
3983 return CCmode;
3984}
3985
ff9940b0
RE
3986/* X and Y are two things to compare using CODE. Emit the compare insn and
3987 return the rtx for register 0 in the proper mode. FP means this is a
3988 floating point compare: I don't think that it is needed on the arm. */
3989
3990rtx
74bbc178 3991gen_compare_reg (code, x, y)
ff9940b0
RE
3992 enum rtx_code code;
3993 rtx x, y;
3994{
3995 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
43cffd11 3996 rtx cc_reg = gen_rtx_REG (mode, 24);
ff9940b0 3997
43cffd11
RE
3998 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
3999 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4000
4001 return cc_reg;
4002}
4003
0a81f500
RE
4004void
4005arm_reload_in_hi (operands)
62b10bbc 4006 rtx * operands;
0a81f500 4007{
f9cc092a
RE
4008 rtx ref = operands[1];
4009 rtx base, scratch;
4010 HOST_WIDE_INT offset = 0;
4011
4012 if (GET_CODE (ref) == SUBREG)
4013 {
4014 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4015 if (BYTES_BIG_ENDIAN)
4016 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4017 - MIN (UNITS_PER_WORD,
4018 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4019 ref = SUBREG_REG (ref);
4020 }
4021
4022 if (GET_CODE (ref) == REG)
4023 {
4024 /* We have a pseudo which has been spilt onto the stack; there
4025 are two cases here: the first where there is a simple
4026 stack-slot replacement and a second where the stack-slot is
4027 out of range, or is used as a subreg. */
4028 if (reg_equiv_mem[REGNO (ref)])
4029 {
4030 ref = reg_equiv_mem[REGNO (ref)];
4031 base = find_replacement (&XEXP (ref, 0));
4032 }
4033 else
4034 /* The slot is out of range, or was dressed up in a SUBREG */
4035 base = reg_equiv_address[REGNO (ref)];
4036 }
4037 else
4038 base = find_replacement (&XEXP (ref, 0));
0a81f500 4039
e5e809f4
JL
4040 /* Handle the case where the address is too complex to be offset by 1. */
4041 if (GET_CODE (base) == MINUS
4042 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4043 {
f9cc092a 4044 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4045
43cffd11 4046 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4047 base = base_plus;
4048 }
f9cc092a
RE
4049 else if (GET_CODE (base) == PLUS)
4050 {
4051 /* The addend must be CONST_INT, or we would have dealt with it above */
4052 HOST_WIDE_INT hi, lo;
4053
4054 offset += INTVAL (XEXP (base, 1));
4055 base = XEXP (base, 0);
4056
4057 /* Rework the address into a legal sequence of insns */
4058 /* Valid range for lo is -4095 -> 4095 */
4059 lo = (offset >= 0
4060 ? (offset & 0xfff)
4061 : -((-offset) & 0xfff));
4062
4063 /* Corner case, if lo is the max offset then we would be out of range
4064 once we have added the additional 1 below, so bump the msb into the
4065 pre-loading insn(s). */
4066 if (lo == 4095)
4067 lo &= 0x7ff;
4068
b39e1240
NC
4069 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFFUL)
4070 ^ (HOST_WIDE_INT) 0x80000000UL)
4071 - (HOST_WIDE_INT) 0x80000000UL);
f9cc092a
RE
4072
4073 if (hi + lo != offset)
4074 abort ();
4075
4076 if (hi != 0)
4077 {
4078 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4079
4080 /* Get the base address; addsi3 knows how to handle constants
4081 that require more than one insn */
4082 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4083 base = base_plus;
4084 offset = lo;
4085 }
4086 }
e5e809f4 4087
f9cc092a
RE
4088 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4089 emit_insn (gen_zero_extendqisi2 (scratch,
4090 gen_rtx_MEM (QImode,
4091 plus_constant (base,
4092 offset))));
43cffd11
RE
4093 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4094 gen_rtx_MEM (QImode,
f9cc092a
RE
4095 plus_constant (base,
4096 offset + 1))));
b3b15f14 4097 if (! BYTES_BIG_ENDIAN)
43cffd11
RE
4098 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4099 gen_rtx_IOR (SImode,
4100 gen_rtx_ASHIFT
4101 (SImode,
4102 gen_rtx_SUBREG (SImode, operands[0], 0),
4103 GEN_INT (8)),
f9cc092a 4104 scratch)));
0a81f500 4105 else
43cffd11
RE
4106 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4107 gen_rtx_IOR (SImode,
f9cc092a 4108 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4109 GEN_INT (8)),
4110 gen_rtx_SUBREG (SImode, operands[0],
4111 0))));
0a81f500
RE
4112}
4113
f9cc092a
RE
4114/* Handle storing a half-word to memory during reload by synthesising as two
4115 byte stores. Take care not to clobber the input values until after we
4116 have moved them somewhere safe. This code assumes that if the DImode
4117 scratch in operands[2] overlaps either the input value or output address
4118 in some way, then that value must die in this insn (we absolutely need
4119 two scratch registers for some corner cases). */
f3bb6135 4120void
af48348a 4121arm_reload_out_hi (operands)
62b10bbc 4122 rtx * operands;
af48348a 4123{
f9cc092a
RE
4124 rtx ref = operands[0];
4125 rtx outval = operands[1];
4126 rtx base, scratch;
4127 HOST_WIDE_INT offset = 0;
4128
4129 if (GET_CODE (ref) == SUBREG)
4130 {
4131 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4132 if (BYTES_BIG_ENDIAN)
4133 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4134 - MIN (UNITS_PER_WORD,
4135 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4136 ref = SUBREG_REG (ref);
4137 }
4138
4139
4140 if (GET_CODE (ref) == REG)
4141 {
4142 /* We have a pseudo which has been spilt onto the stack; there
4143 are two cases here: the first where there is a simple
4144 stack-slot replacement and a second where the stack-slot is
4145 out of range, or is used as a subreg. */
4146 if (reg_equiv_mem[REGNO (ref)])
4147 {
4148 ref = reg_equiv_mem[REGNO (ref)];
4149 base = find_replacement (&XEXP (ref, 0));
4150 }
4151 else
4152 /* The slot is out of range, or was dressed up in a SUBREG */
4153 base = reg_equiv_address[REGNO (ref)];
4154 }
4155 else
4156 base = find_replacement (&XEXP (ref, 0));
4157
4158 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4159
4160 /* Handle the case where the address is too complex to be offset by 1. */
4161 if (GET_CODE (base) == MINUS
4162 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4163 {
4164 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4165
4166 /* Be careful not to destroy OUTVAL. */
4167 if (reg_overlap_mentioned_p (base_plus, outval))
4168 {
4169 /* Updating base_plus might destroy outval, see if we can
4170 swap the scratch and base_plus. */
4171 if (! reg_overlap_mentioned_p (scratch, outval))
4172 {
4173 rtx tmp = scratch;
4174 scratch = base_plus;
4175 base_plus = tmp;
4176 }
4177 else
4178 {
4179 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4180
4181 /* Be conservative and copy OUTVAL into the scratch now,
4182 this should only be necessary if outval is a subreg
4183 of something larger than a word. */
4184 /* XXX Might this clobber base? I can't see how it can,
4185 since scratch is known to overlap with OUTVAL, and
4186 must be wider than a word. */
4187 emit_insn (gen_movhi (scratch_hi, outval));
4188 outval = scratch_hi;
4189 }
4190 }
4191
4192 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4193 base = base_plus;
4194 }
4195 else if (GET_CODE (base) == PLUS)
4196 {
4197 /* The addend must be CONST_INT, or we would have dealt with it above */
4198 HOST_WIDE_INT hi, lo;
4199
4200 offset += INTVAL (XEXP (base, 1));
4201 base = XEXP (base, 0);
4202
4203 /* Rework the address into a legal sequence of insns */
4204 /* Valid range for lo is -4095 -> 4095 */
4205 lo = (offset >= 0
4206 ? (offset & 0xfff)
4207 : -((-offset) & 0xfff));
4208
4209 /* Corner case, if lo is the max offset then we would be out of range
4210 once we have added the additional 1 below, so bump the msb into the
4211 pre-loading insn(s). */
4212 if (lo == 4095)
4213 lo &= 0x7ff;
4214
b39e1240
NC
4215 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFFUL)
4216 ^ (HOST_WIDE_INT) 0x80000000UL)
4217 - (HOST_WIDE_INT) 0x80000000UL);
f9cc092a
RE
4218
4219 if (hi + lo != offset)
4220 abort ();
4221
4222 if (hi != 0)
4223 {
4224 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4225
4226 /* Be careful not to destroy OUTVAL. */
4227 if (reg_overlap_mentioned_p (base_plus, outval))
4228 {
4229 /* Updating base_plus might destroy outval, see if we
4230 can swap the scratch and base_plus. */
4231 if (! reg_overlap_mentioned_p (scratch, outval))
4232 {
4233 rtx tmp = scratch;
4234 scratch = base_plus;
4235 base_plus = tmp;
4236 }
4237 else
4238 {
4239 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4240
4241 /* Be conservative and copy outval into scratch now,
4242 this should only be necessary if outval is a
4243 subreg of something larger than a word. */
4244 /* XXX Might this clobber base? I can't see how it
4245 can, since scratch is known to overlap with
4246 outval. */
4247 emit_insn (gen_movhi (scratch_hi, outval));
4248 outval = scratch_hi;
4249 }
4250 }
4251
4252 /* Get the base address; addsi3 knows how to handle constants
4253 that require more than one insn */
4254 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4255 base = base_plus;
4256 offset = lo;
4257 }
4258 }
af48348a 4259
b5cc037f
RE
4260 if (BYTES_BIG_ENDIAN)
4261 {
f9cc092a
RE
4262 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4263 plus_constant (base, offset + 1)),
4264 gen_rtx_SUBREG (QImode, outval, 0)));
4265 emit_insn (gen_lshrsi3 (scratch,
4266 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4267 GEN_INT (8)));
f9cc092a
RE
4268 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4269 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4270 }
4271 else
4272 {
f9cc092a
RE
4273 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4274 gen_rtx_SUBREG (QImode, outval, 0)));
4275 emit_insn (gen_lshrsi3 (scratch,
4276 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4277 GEN_INT (8)));
f9cc092a
RE
4278 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4279 plus_constant (base, offset + 1)),
4280 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 4281 }
af48348a 4282}
2b835d68
RE
4283\f
4284/* Routines for manipulation of the constant pool. */
2b835d68 4285
949d79eb
RE
4286/* Arm instructions cannot load a large constant directly into a
4287 register; they have to come from a pc relative load. The constant
4288 must therefore be placed in the addressable range of the pc
4289 relative load. Depending on the precise pc relative load
4290 instruction the range is somewhere between 256 bytes and 4k. This
4291 means that we often have to dump a constant inside a function, and
2b835d68
RE
4292 generate code to branch around it.
4293
949d79eb
RE
4294 It is important to minimize this, since the branches will slow
4295 things down and make the code larger.
2b835d68 4296
949d79eb
RE
4297 Normally we can hide the table after an existing unconditional
4298 branch so that there is no interruption of the flow, but in the
4299 worst case the code looks like this:
2b835d68
RE
4300
4301 ldr rn, L1
949d79eb 4302 ...
2b835d68
RE
4303 b L2
4304 align
4305 L1: .long value
4306 L2:
949d79eb 4307 ...
2b835d68 4308
2b835d68 4309 ldr rn, L3
949d79eb 4310 ...
2b835d68
RE
4311 b L4
4312 align
2b835d68
RE
4313 L3: .long value
4314 L4:
949d79eb
RE
4315 ...
4316
4317 We fix this by performing a scan after scheduling, which notices
4318 which instructions need to have their operands fetched from the
4319 constant table and builds the table.
4320
4321 The algorithm starts by building a table of all the constants that
4322 need fixing up and all the natural barriers in the function (places
4323 where a constant table can be dropped without breaking the flow).
4324 For each fixup we note how far the pc-relative replacement will be
4325 able to reach and the offset of the instruction into the function.
4326
4327 Having built the table we then group the fixes together to form
4328 tables that are as large as possible (subject to addressing
4329 constraints) and emit each table of constants after the last
4330 barrier that is within range of all the instructions in the group.
4331 If a group does not contain a barrier, then we forcibly create one
4332 by inserting a jump instruction into the flow. Once the table has
4333 been inserted, the insns are then modified to reference the
4334 relevant entry in the pool.
4335
4336 Possible enhancements to the alogorithm (not implemented) are:
4337
4338 1) ARM instructions (but not thumb) can use negative offsets, so we
4339 could reference back to a previous pool rather than forwards to a
4340 new one. For large functions this may reduce the number of pools
4341 required.
4342
4343 2) For some processors and object formats, there may be benefit in
4344 aligning the pools to the start of cache lines; this alignment
4345 would need to be taken into account when calculating addressability
4346 of a pool.
2b835d68
RE
4347
4348 */
4349
4350typedef struct
4351{
4352 rtx value; /* Value in table */
4353 HOST_WIDE_INT next_offset;
4354 enum machine_mode mode; /* Mode of value */
949d79eb 4355} minipool_node;
2b835d68
RE
4356
4357/* The maximum number of constants that can fit into one pool, since
949d79eb
RE
4358 the pc relative range is 0...4092 bytes and constants are at least 4
4359 bytes long. */
2b835d68 4360
949d79eb
RE
4361#define MAX_MINIPOOL_SIZE (4092/4)
4362static minipool_node minipool_vector[MAX_MINIPOOL_SIZE];
4363static int minipool_size;
4364static rtx minipool_vector_label;
2b835d68 4365
332072db
RE
4366/* Add a constant to the pool and return its offset within the current
4367 pool.
4368
4369 X is the rtx we want to replace. MODE is its mode. On return,
4370 ADDRESS_ONLY will be non-zero if we really want the address of such
4371 a constant, not the constant itself. */
2b835d68 4372static HOST_WIDE_INT
949d79eb 4373add_minipool_constant (x, mode)
2b835d68
RE
4374 rtx x;
4375 enum machine_mode mode;
4376{
4377 int i;
2b835d68 4378 HOST_WIDE_INT offset;
da6558fd 4379
949d79eb
RE
4380 /* First, see if we've already got it. */
4381 for (i = 0; i < minipool_size; i++)
2b835d68 4382 {
949d79eb
RE
4383 if (GET_CODE (x) == minipool_vector[i].value->code
4384 && mode == minipool_vector[i].mode)
2b835d68
RE
4385 {
4386 if (GET_CODE (x) == CODE_LABEL)
4387 {
949d79eb 4388 if (XINT (x, 3) != XINT (minipool_vector[i].value, 3))
2b835d68
RE
4389 continue;
4390 }
949d79eb
RE
4391 if (rtx_equal_p (x, minipool_vector[i].value))
4392 return minipool_vector[i].next_offset - GET_MODE_SIZE (mode);
2b835d68
RE
4393 }
4394 }
4395
4396 /* Need a new one */
949d79eb 4397 minipool_vector[minipool_size].next_offset = GET_MODE_SIZE (mode);
2b835d68 4398 offset = 0;
949d79eb
RE
4399 if (minipool_size == 0)
4400 minipool_vector_label = gen_label_rtx ();
2b835d68 4401 else
949d79eb
RE
4402 minipool_vector[minipool_size].next_offset
4403 += (offset = minipool_vector[minipool_size - 1].next_offset);
2b835d68 4404
949d79eb
RE
4405 minipool_vector[minipool_size].value = x;
4406 minipool_vector[minipool_size].mode = mode;
4407 minipool_size++;
2b835d68
RE
4408 return offset;
4409}
4410
4411/* Output the literal table */
4412static void
949d79eb 4413dump_minipool (scan)
2b835d68
RE
4414 rtx scan;
4415{
4416 int i;
4417
4418 scan = emit_label_after (gen_label_rtx (), scan);
4419 scan = emit_insn_after (gen_align_4 (), scan);
949d79eb 4420 scan = emit_label_after (minipool_vector_label, scan);
2b835d68 4421
949d79eb 4422 for (i = 0; i < minipool_size; i++)
2b835d68 4423 {
949d79eb 4424 minipool_node *p = minipool_vector + i;
2b835d68
RE
4425
4426 switch (GET_MODE_SIZE (p->mode))
4427 {
4428 case 4:
4429 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
4430 break;
4431
4432 case 8:
4433 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
4434 break;
4435
4436 default:
4437 abort ();
4438 break;
4439 }
4440 }
4441
4442 scan = emit_insn_after (gen_consttable_end (), scan);
4443 scan = emit_barrier_after (scan);
949d79eb 4444 minipool_size = 0;
2b835d68
RE
4445}
4446
949d79eb
RE
4447/* Find the last barrier less than MAX_COUNT bytes from FROM, or
4448 create one. */
2b835d68
RE
4449static rtx
4450find_barrier (from, max_count)
4451 rtx from;
4452 int max_count;
4453{
4454 int count = 0;
4455 rtx found_barrier = 0;
e5e809f4 4456 rtx last = from;
2b835d68
RE
4457
4458 while (from && count < max_count)
4459 {
7551cbc7 4460 rtx tmp;
da6558fd 4461
2b835d68 4462 if (GET_CODE (from) == BARRIER)
7551cbc7 4463 found_barrier = from;
2b835d68
RE
4464
4465 /* Count the length of this insn */
949d79eb
RE
4466 if (GET_CODE (from) == JUMP_INSN
4467 && JUMP_LABEL (from) != 0
4468 && ((tmp = next_real_insn (JUMP_LABEL (from)))
4469 == next_real_insn (from))
4470 && tmp != NULL
4471 && GET_CODE (tmp) == JUMP_INSN
4472 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
4473 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
7551cbc7
RE
4474 {
4475 int elt = GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC ? 1 : 0;
4476 count += (get_attr_length (from)
4477 + GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (tmp), elt));
4478 /* Continue after the dispatch table. */
4479 last = from;
4480 from = NEXT_INSN (tmp);
4481 continue;
4482 }
2b835d68
RE
4483 else
4484 count += get_attr_length (from);
4485
e5e809f4 4486 last = from;
2b835d68
RE
4487 from = NEXT_INSN (from);
4488 }
4489
da6558fd 4490 if (! found_barrier)
2b835d68
RE
4491 {
4492 /* We didn't find a barrier in time to
da6558fd 4493 dump our stuff, so we'll make one. */
2b835d68 4494 rtx label = gen_label_rtx ();
da6558fd 4495
2b835d68 4496 if (from)
e5e809f4 4497 from = PREV_INSN (last);
2b835d68
RE
4498 else
4499 from = get_last_insn ();
da6558fd
NC
4500
4501 /* Walk back to be just before any jump. */
2b835d68 4502 while (GET_CODE (from) == JUMP_INSN
25b1c156 4503 || GET_CODE (from) == NOTE
2b835d68
RE
4504 || GET_CODE (from) == CODE_LABEL)
4505 from = PREV_INSN (from);
da6558fd 4506
2b835d68
RE
4507 from = emit_jump_insn_after (gen_jump (label), from);
4508 JUMP_LABEL (from) = label;
4509 found_barrier = emit_barrier_after (from);
4510 emit_label_after (label, found_barrier);
2b835d68
RE
4511 }
4512
4513 return found_barrier;
4514}
4515
949d79eb
RE
4516struct minipool_fixup
4517{
4518 struct minipool_fixup *next;
4519 rtx insn;
4520 int address;
4521 rtx *loc;
4522 enum machine_mode mode;
4523 rtx value;
4524 int range;
4525};
4526
4527struct minipool_fixup *minipool_fix_head;
4528struct minipool_fixup *minipool_fix_tail;
4529
4530static void
4531push_minipool_barrier (insn, address)
2b835d68 4532 rtx insn;
949d79eb 4533 int address;
2b835d68 4534{
949d79eb
RE
4535 struct minipool_fixup *fix
4536 = (struct minipool_fixup *) oballoc (sizeof (struct minipool_fixup));
ad076f4e 4537
949d79eb
RE
4538 fix->insn = insn;
4539 fix->address = address;
2b835d68 4540
949d79eb
RE
4541 fix->next = NULL;
4542 if (minipool_fix_head != NULL)
4543 minipool_fix_tail->next = fix;
4544 else
4545 minipool_fix_head = fix;
4546
4547 minipool_fix_tail = fix;
4548}
2b835d68 4549
949d79eb
RE
4550static void
4551push_minipool_fix (insn, address, loc, mode, value)
4552 rtx insn;
4553 int address;
4554 rtx *loc;
4555 enum machine_mode mode;
4556 rtx value;
4557{
4558 struct minipool_fixup *fix
4559 = (struct minipool_fixup *) oballoc (sizeof (struct minipool_fixup));
4560
4561#ifdef AOF_ASSEMBLER
4562 /* PIC symbol refereneces need to be converted into offsets into the
4563 based area. */
4564 if (flag_pic && GET_MODE == SYMBOL_REF)
4565 value = aof_pic_entry (value);
4566#endif /* AOF_ASSEMBLER */
4567
4568 fix->insn = insn;
4569 fix->address = address;
4570 fix->loc = loc;
4571 fix->mode = mode;
4572 fix->value = value;
4573 fix->range = get_attr_pool_range (insn);
4574
4575 /* If an insn doesn't have a range defined for it, then it isn't
4576 expecting to be reworked by this code. Better to abort now than
4577 to generate duff assembly code. */
4578 if (fix->range == 0)
4579 abort ();
4580
4581 /* Add it to the chain of fixes */
4582 fix->next = NULL;
4583 if (minipool_fix_head != NULL)
4584 minipool_fix_tail->next = fix;
4585 else
4586 minipool_fix_head = fix;
4587
4588 minipool_fix_tail = fix;
4589}
4590
4591static void
4592note_invalid_constants (insn, address)
4593 rtx insn;
4594 int address;
4595{
4596 int opno;
4597
4598 /* Extract the operands of the insn */
4599 extract_insn(insn);
4600
949d79eb
RE
4601 /* Find the alternative selected */
4602 if (! constrain_operands (1))
4603 fatal_insn_not_found (insn);
4604
4605 /* Preprocess the constraints, to extract some useful information. */
4606 preprocess_constraints ();
4607
1ccbefce 4608 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb
RE
4609 {
4610 /* Things we need to fix can only occur in inputs */
36ab44c7 4611 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
4612 continue;
4613
4614 /* If this alternative is a memory reference, then any mention
4615 of constants in this alternative is really to fool reload
4616 into allowing us to accept one there. We need to fix them up
4617 now so that we output the right code. */
4618 if (recog_op_alt[opno][which_alternative].memory_ok)
4619 {
1ccbefce 4620 rtx op = recog_data.operand[opno];
949d79eb
RE
4621
4622 if (CONSTANT_P (op))
1ccbefce
RH
4623 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4624 recog_data.operand_mode[opno], op);
949d79eb
RE
4625#ifndef AOF_ASSEMBLER
4626 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
4627 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4628 recog_data.operand_mode[opno],
4629 XVECEXP (op, 0, 0));
949d79eb 4630#endif
1ccbefce 4631 else if (recog_data.operand_mode[opno] == SImode
949d79eb
RE
4632 && GET_CODE (op) == MEM
4633 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
4634 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
4635 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4636 recog_data.operand_mode[opno],
949d79eb
RE
4637 get_pool_constant (XEXP (op, 0)));
4638 }
2b835d68 4639 }
2b835d68
RE
4640}
4641
4642void
4643arm_reorg (first)
4644 rtx first;
4645{
4646 rtx insn;
949d79eb
RE
4647 int address = 0;
4648 struct minipool_fixup *fix;
ad076f4e 4649
949d79eb 4650 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 4651
949d79eb
RE
4652 /* The first insn must always be a note, or the code below won't
4653 scan it properly. */
4654 if (GET_CODE (first) != NOTE)
4655 abort ();
4656
4657 /* Scan all the insns and record the operands that will need fixing. */
4658 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 4659 {
2b835d68 4660
949d79eb
RE
4661 if (GET_CODE (insn) == BARRIER)
4662 push_minipool_barrier(insn, address);
4663 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
4664 || GET_CODE (insn) == JUMP_INSN)
4665 {
4666 rtx table;
4667
4668 note_invalid_constants (insn, address);
4669 address += get_attr_length (insn);
4670 /* If the insn is a vector jump, add the size of the table
4671 and skip the table. */
4672 if (GET_CODE (insn) == JUMP_INSN
4673 && JUMP_LABEL (insn) != NULL
4674 && ((table = next_real_insn (JUMP_LABEL (insn)))
4675 == next_real_insn (insn))
4676 && table != NULL
4677 && GET_CODE (table) == JUMP_INSN
4678 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4679 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2b835d68 4680 {
949d79eb 4681 int elt = GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 4682
949d79eb
RE
4683 address += GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (table),
4684 elt);
4685 insn = table;
4686 }
4687 }
4688 }
332072db 4689
949d79eb
RE
4690 /* Now scan the fixups and perform the required changes. */
4691 for (fix = minipool_fix_head; fix; fix = fix->next)
4692 {
4693 struct minipool_fixup *ftmp;
4694 struct minipool_fixup *last_barrier = NULL;
4695 int max_range;
4696 rtx barrier;
4697 struct minipool_fixup *this_fix;
4698 int new_minipool_size = 0;
4699
4700 /* Skip any further barriers before the next fix. */
4701 while (fix && GET_CODE (fix->insn) == BARRIER)
4702 fix = fix->next;
4703
4704 if (fix == NULL)
4705 break;
332072db 4706
949d79eb
RE
4707 ftmp = fix;
4708 max_range = fix->address + fix->range;
2b835d68 4709
949d79eb
RE
4710 /* Find all the other fixes that can live in the same pool. */
4711 while (ftmp->next && ftmp->next->address < max_range
4712 && (GET_CODE (ftmp->next->insn) == BARRIER
4713 /* Ensure we can reach the constant inside the pool. */
4714 || ftmp->next->range > new_minipool_size))
4715 {
4716 ftmp = ftmp->next;
4717 if (GET_CODE (ftmp->insn) == BARRIER)
4718 last_barrier = ftmp;
4719 else
4720 {
4721 /* Does this fix constrain the range we can search? */
4722 if (ftmp->address + ftmp->range - new_minipool_size < max_range)
4723 max_range = ftmp->address + ftmp->range - new_minipool_size;
2b835d68 4724
949d79eb 4725 new_minipool_size += GET_MODE_SIZE (ftmp->mode);
2b835d68 4726 }
2b835d68 4727 }
949d79eb
RE
4728
4729 /* If we found a barrier, drop back to that; any fixes that we could
4730 have reached but come after the barrier will now go in the next
4731 mini-pool. */
4732 if (last_barrier != NULL)
4733 {
4734 barrier = last_barrier->insn;
4735 ftmp = last_barrier;
4736 }
2bfa88dc
RE
4737 /* ftmp is last fix that we can fit into this pool and we
4738 failed to find a barrier that we could use. Insert a new
4739 barrier in the code and arrange to jump around it. */
949d79eb 4740 else
2bfa88dc
RE
4741 {
4742 /* Check that there isn't another fix that is in range that
4743 we couldn't fit into this pool because the pool was
4744 already too large: we need to put the pool before such an
4745 instruction. */
4746 if (ftmp->next && ftmp->next->address < max_range)
4747 max_range = ftmp->address;
4748
4749 barrier = find_barrier (ftmp->insn, max_range - ftmp->address);
4750 }
949d79eb
RE
4751
4752 /* Scan over the fixes we have identified for this pool, fixing them
4753 up and adding the constants to the pool itself. */
4754 for (this_fix = fix; this_fix && ftmp->next != this_fix;
4755 this_fix = this_fix->next)
4756 if (GET_CODE (this_fix->insn) != BARRIER)
4757 {
4758 int offset = add_minipool_constant (this_fix->value,
4759 this_fix->mode);
4760 rtx addr
4761 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
4762 minipool_vector_label),
4763 offset);
4764 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
4765 }
4766
4767 dump_minipool (barrier);
4768 fix = ftmp;
2b835d68 4769 }
4b632bf1 4770
949d79eb
RE
4771 /* From now on we must synthesize any constants that we can't handle
4772 directly. This can happen if the RTL gets split during final
4773 instruction generation. */
4b632bf1 4774 after_arm_reorg = 1;
2b835d68
RE
4775}
4776
cce8749e
CH
4777\f
4778/* Routines to output assembly language. */
4779
f3bb6135 4780/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
4781 In this way we can ensure that valid double constants are generated even
4782 when cross compiling. */
4783char *
4784fp_immediate_constant (x)
b5cc037f 4785 rtx x;
ff9940b0
RE
4786{
4787 REAL_VALUE_TYPE r;
4788 int i;
4789
4790 if (!fpa_consts_inited)
4791 init_fpa_table ();
4792
4793 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4794 for (i = 0; i < 8; i++)
4795 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
4796 return strings_fpa[i];
f3bb6135 4797
ff9940b0
RE
4798 abort ();
4799}
4800
9997d19d
RE
4801/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
4802static char *
4803fp_const_from_val (r)
62b10bbc 4804 REAL_VALUE_TYPE * r;
9997d19d
RE
4805{
4806 int i;
4807
4808 if (! fpa_consts_inited)
4809 init_fpa_table ();
4810
4811 for (i = 0; i < 8; i++)
4812 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
4813 return strings_fpa[i];
4814
4815 abort ();
4816}
ff9940b0 4817
cce8749e
CH
4818/* Output the operands of a LDM/STM instruction to STREAM.
4819 MASK is the ARM register set mask of which only bits 0-15 are important.
4820 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
4821 must follow the register list. */
4822
4823void
dd18ae56 4824print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc
NC
4825 FILE * stream;
4826 char * instr;
dd18ae56
NC
4827 int reg;
4828 int mask;
4829 int hat;
cce8749e
CH
4830{
4831 int i;
4832 int not_first = FALSE;
4833
1d5473cb 4834 fputc ('\t', stream);
dd18ae56 4835 asm_fprintf (stream, instr, reg);
1d5473cb 4836 fputs (", {", stream);
62b10bbc 4837
cce8749e
CH
4838 for (i = 0; i < 16; i++)
4839 if (mask & (1 << i))
4840 {
4841 if (not_first)
4842 fprintf (stream, ", ");
62b10bbc 4843
dd18ae56 4844 asm_fprintf (stream, "%r", i);
cce8749e
CH
4845 not_first = TRUE;
4846 }
f3bb6135 4847
cce8749e 4848 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 4849}
cce8749e
CH
4850
4851/* Output a 'call' insn. */
4852
4853char *
4854output_call (operands)
62b10bbc 4855 rtx * operands;
cce8749e 4856{
cce8749e
CH
4857 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
4858
62b10bbc 4859 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 4860 {
62b10bbc 4861 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 4862 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 4863 }
62b10bbc 4864
1d5473cb 4865 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 4866
6cfc7210 4867 if (TARGET_INTERWORK)
da6558fd
NC
4868 output_asm_insn ("bx%?\t%0", operands);
4869 else
4870 output_asm_insn ("mov%?\t%|pc, %0", operands);
4871
f3bb6135
RE
4872 return "";
4873}
cce8749e 4874
ff9940b0
RE
4875static int
4876eliminate_lr2ip (x)
62b10bbc 4877 rtx * x;
ff9940b0
RE
4878{
4879 int something_changed = 0;
62b10bbc 4880 rtx x0 = * x;
ff9940b0
RE
4881 int code = GET_CODE (x0);
4882 register int i, j;
6f7d635c 4883 register const char * fmt;
ff9940b0
RE
4884
4885 switch (code)
4886 {
4887 case REG:
62b10bbc 4888 if (REGNO (x0) == LR_REGNUM)
ff9940b0 4889 {
62b10bbc 4890 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
4891 return 1;
4892 }
4893 return 0;
4894 default:
4895 /* Scan through the sub-elements and change any references there */
4896 fmt = GET_RTX_FORMAT (code);
62b10bbc 4897
ff9940b0
RE
4898 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4899 if (fmt[i] == 'e')
4900 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
4901 else if (fmt[i] == 'E')
4902 for (j = 0; j < XVECLEN (x0, i); j++)
4903 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 4904
ff9940b0
RE
4905 return something_changed;
4906 }
4907}
4908
4909/* Output a 'call' insn that is a reference in memory. */
4910
4911char *
4912output_call_mem (operands)
62b10bbc 4913 rtx * operands;
ff9940b0
RE
4914{
4915 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
4916 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
4917 */
4918 if (eliminate_lr2ip (&operands[0]))
1d5473cb 4919 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 4920
6cfc7210 4921 if (TARGET_INTERWORK)
da6558fd
NC
4922 {
4923 output_asm_insn ("ldr%?\t%|ip, %0", operands);
4924 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4925 output_asm_insn ("bx%?\t%|ip", operands);
4926 }
4927 else
4928 {
4929 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4930 output_asm_insn ("ldr%?\t%|pc, %0", operands);
4931 }
4932
f3bb6135
RE
4933 return "";
4934}
ff9940b0
RE
4935
4936
4937/* Output a move from arm registers to an fpu registers.
4938 OPERANDS[0] is an fpu register.
4939 OPERANDS[1] is the first registers of an arm register pair. */
4940
4941char *
4942output_mov_long_double_fpu_from_arm (operands)
62b10bbc 4943 rtx * operands;
ff9940b0
RE
4944{
4945 int arm_reg0 = REGNO (operands[1]);
4946 rtx ops[3];
4947
62b10bbc
NC
4948 if (arm_reg0 == IP_REGNUM)
4949 abort ();
f3bb6135 4950
43cffd11
RE
4951 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4952 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4953 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4954
1d5473cb
RE
4955 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
4956 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 4957
f3bb6135
RE
4958 return "";
4959}
ff9940b0
RE
4960
4961/* Output a move from an fpu register to arm registers.
4962 OPERANDS[0] is the first registers of an arm register pair.
4963 OPERANDS[1] is an fpu register. */
4964
4965char *
4966output_mov_long_double_arm_from_fpu (operands)
62b10bbc 4967 rtx * operands;
ff9940b0
RE
4968{
4969 int arm_reg0 = REGNO (operands[0]);
4970 rtx ops[3];
4971
62b10bbc
NC
4972 if (arm_reg0 == IP_REGNUM)
4973 abort ();
f3bb6135 4974
43cffd11
RE
4975 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4976 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4977 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4978
1d5473cb
RE
4979 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
4980 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
4981 return "";
4982}
ff9940b0
RE
4983
4984/* Output a move from arm registers to arm registers of a long double
4985 OPERANDS[0] is the destination.
4986 OPERANDS[1] is the source. */
4987char *
4988output_mov_long_double_arm_from_arm (operands)
62b10bbc 4989 rtx * operands;
ff9940b0
RE
4990{
4991 /* We have to be careful here because the two might overlap */
4992 int dest_start = REGNO (operands[0]);
4993 int src_start = REGNO (operands[1]);
4994 rtx ops[2];
4995 int i;
4996
4997 if (dest_start < src_start)
4998 {
4999 for (i = 0; i < 3; i++)
5000 {
43cffd11
RE
5001 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5002 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5003 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5004 }
5005 }
5006 else
5007 {
5008 for (i = 2; i >= 0; i--)
5009 {
43cffd11
RE
5010 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5011 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5012 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5013 }
5014 }
f3bb6135 5015
ff9940b0
RE
5016 return "";
5017}
5018
5019
cce8749e
CH
5020/* Output a move from arm registers to an fpu registers.
5021 OPERANDS[0] is an fpu register.
5022 OPERANDS[1] is the first registers of an arm register pair. */
5023
5024char *
5025output_mov_double_fpu_from_arm (operands)
62b10bbc 5026 rtx * operands;
cce8749e
CH
5027{
5028 int arm_reg0 = REGNO (operands[1]);
5029 rtx ops[2];
5030
62b10bbc
NC
5031 if (arm_reg0 == IP_REGNUM)
5032 abort ();
5033
43cffd11
RE
5034 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5035 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5036 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5037 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
5038 return "";
5039}
cce8749e
CH
5040
5041/* Output a move from an fpu register to arm registers.
5042 OPERANDS[0] is the first registers of an arm register pair.
5043 OPERANDS[1] is an fpu register. */
5044
5045char *
5046output_mov_double_arm_from_fpu (operands)
62b10bbc 5047 rtx * operands;
cce8749e
CH
5048{
5049 int arm_reg0 = REGNO (operands[0]);
5050 rtx ops[2];
5051
62b10bbc
NC
5052 if (arm_reg0 == IP_REGNUM)
5053 abort ();
f3bb6135 5054
43cffd11
RE
5055 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5056 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5057 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5058 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
5059 return "";
5060}
cce8749e
CH
5061
5062/* Output a move between double words.
5063 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5064 or MEM<-REG and all MEMs must be offsettable addresses. */
5065
5066char *
5067output_move_double (operands)
aec3cfba 5068 rtx * operands;
cce8749e
CH
5069{
5070 enum rtx_code code0 = GET_CODE (operands[0]);
5071 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 5072 rtx otherops[3];
cce8749e
CH
5073
5074 if (code0 == REG)
5075 {
5076 int reg0 = REGNO (operands[0]);
5077
43cffd11 5078 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 5079
cce8749e
CH
5080 if (code1 == REG)
5081 {
5082 int reg1 = REGNO (operands[1]);
62b10bbc
NC
5083 if (reg1 == IP_REGNUM)
5084 abort ();
f3bb6135 5085
cce8749e 5086 /* Ensure the second source is not overwritten */
c1c2bc04 5087 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 5088 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 5089 else
6cfc7210 5090 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
5091 }
5092 else if (code1 == CONST_DOUBLE)
5093 {
226a5051
RE
5094 if (GET_MODE (operands[1]) == DFmode)
5095 {
5096 long l[2];
5097 union real_extract u;
5098
5099 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
5100 sizeof (u));
5101 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
5102 otherops[1] = GEN_INT(l[1]);
5103 operands[1] = GEN_INT(l[0]);
5104 }
c1c2bc04
RE
5105 else if (GET_MODE (operands[1]) != VOIDmode)
5106 abort ();
5107 else if (WORDS_BIG_ENDIAN)
5108 {
5109
5110 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5111 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5112 }
226a5051
RE
5113 else
5114 {
c1c2bc04 5115
226a5051
RE
5116 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5117 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5118 }
6cfc7210 5119
c1c2bc04
RE
5120 output_mov_immediate (operands);
5121 output_mov_immediate (otherops);
cce8749e
CH
5122 }
5123 else if (code1 == CONST_INT)
5124 {
56636818
JL
5125#if HOST_BITS_PER_WIDE_INT > 32
5126 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
5127 what the upper word is. */
5128 if (WORDS_BIG_ENDIAN)
5129 {
5130 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5131 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5132 }
5133 else
5134 {
5135 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5136 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5137 }
5138#else
5139 /* Sign extend the intval into the high-order word */
c1c2bc04
RE
5140 if (WORDS_BIG_ENDIAN)
5141 {
5142 otherops[1] = operands[1];
5143 operands[1] = (INTVAL (operands[1]) < 0
5144 ? constm1_rtx : const0_rtx);
5145 }
ff9940b0 5146 else
c1c2bc04 5147 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 5148#endif
c1c2bc04
RE
5149 output_mov_immediate (otherops);
5150 output_mov_immediate (operands);
cce8749e
CH
5151 }
5152 else if (code1 == MEM)
5153 {
ff9940b0 5154 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 5155 {
ff9940b0 5156 case REG:
9997d19d 5157 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 5158 break;
2b835d68 5159
ff9940b0 5160 case PRE_INC:
2b835d68 5161 abort (); /* Should never happen now */
ff9940b0 5162 break;
2b835d68 5163
ff9940b0 5164 case PRE_DEC:
2b835d68 5165 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 5166 break;
2b835d68 5167
ff9940b0 5168 case POST_INC:
9997d19d 5169 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 5170 break;
2b835d68 5171
ff9940b0 5172 case POST_DEC:
2b835d68 5173 abort (); /* Should never happen now */
ff9940b0 5174 break;
2b835d68
RE
5175
5176 case LABEL_REF:
5177 case CONST:
5178 output_asm_insn ("adr%?\t%0, %1", operands);
5179 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
5180 break;
5181
ff9940b0 5182 default:
aec3cfba
NC
5183 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
5184 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 5185 {
2b835d68
RE
5186 otherops[0] = operands[0];
5187 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
5188 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
5189 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
5190 {
5191 if (GET_CODE (otherops[2]) == CONST_INT)
5192 {
5193 switch (INTVAL (otherops[2]))
5194 {
5195 case -8:
5196 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
5197 return "";
5198 case -4:
5199 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
5200 return "";
5201 case 4:
5202 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
5203 return "";
5204 }
5205 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
5206 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
5207 else
5208 output_asm_insn ("add%?\t%0, %1, %2", otherops);
5209 }
5210 else
5211 output_asm_insn ("add%?\t%0, %1, %2", otherops);
5212 }
5213 else
5214 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 5215
2b835d68
RE
5216 return "ldm%?ia\t%0, %M0";
5217 }
5218 else
5219 {
5220 otherops[1] = adj_offsettable_operand (operands[1], 4);
5221 /* Take care of overlapping base/data reg. */
5222 if (reg_mentioned_p (operands[0], operands[1]))
5223 {
5224 output_asm_insn ("ldr%?\t%0, %1", otherops);
5225 output_asm_insn ("ldr%?\t%0, %1", operands);
5226 }
5227 else
5228 {
5229 output_asm_insn ("ldr%?\t%0, %1", operands);
5230 output_asm_insn ("ldr%?\t%0, %1", otherops);
5231 }
cce8749e
CH
5232 }
5233 }
5234 }
2b835d68 5235 else
62b10bbc 5236 abort (); /* Constraints should prevent this */
cce8749e
CH
5237 }
5238 else if (code0 == MEM && code1 == REG)
5239 {
62b10bbc
NC
5240 if (REGNO (operands[1]) == IP_REGNUM)
5241 abort ();
2b835d68 5242
ff9940b0
RE
5243 switch (GET_CODE (XEXP (operands[0], 0)))
5244 {
5245 case REG:
9997d19d 5246 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 5247 break;
2b835d68 5248
ff9940b0 5249 case PRE_INC:
2b835d68 5250 abort (); /* Should never happen now */
ff9940b0 5251 break;
2b835d68 5252
ff9940b0 5253 case PRE_DEC:
2b835d68 5254 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 5255 break;
2b835d68 5256
ff9940b0 5257 case POST_INC:
9997d19d 5258 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 5259 break;
2b835d68 5260
ff9940b0 5261 case POST_DEC:
2b835d68 5262 abort (); /* Should never happen now */
ff9940b0 5263 break;
2b835d68
RE
5264
5265 case PLUS:
5266 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
5267 {
5268 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
5269 {
5270 case -8:
5271 output_asm_insn ("stm%?db\t%m0, %M1", operands);
5272 return "";
5273
5274 case -4:
5275 output_asm_insn ("stm%?da\t%m0, %M1", operands);
5276 return "";
5277
5278 case 4:
5279 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
5280 return "";
5281 }
5282 }
5283 /* Fall through */
5284
ff9940b0 5285 default:
cce8749e 5286 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 5287 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
5288 output_asm_insn ("str%?\t%1, %0", operands);
5289 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
5290 }
5291 }
2b835d68 5292 else
62b10bbc 5293 abort (); /* Constraints should prevent this */
cce8749e 5294
9997d19d
RE
5295 return "";
5296}
cce8749e
CH
5297
5298
5299/* Output an arbitrary MOV reg, #n.
5300 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
5301
5302char *
5303output_mov_immediate (operands)
62b10bbc 5304 rtx * operands;
cce8749e 5305{
f3bb6135 5306 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
5307 int n_ones = 0;
5308 int i;
5309
5310 /* Try to use one MOV */
cce8749e 5311 if (const_ok_for_arm (n))
f3bb6135 5312 {
9997d19d 5313 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
5314 return "";
5315 }
cce8749e
CH
5316
5317 /* Try to use one MVN */
f3bb6135 5318 if (const_ok_for_arm (~n))
cce8749e 5319 {
f3bb6135 5320 operands[1] = GEN_INT (~n);
9997d19d 5321 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 5322 return "";
cce8749e
CH
5323 }
5324
5325 /* If all else fails, make it out of ORRs or BICs as appropriate. */
5326
5327 for (i=0; i < 32; i++)
5328 if (n & 1 << i)
5329 n_ones++;
5330
5331 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
5332 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
5333 ~n);
cce8749e 5334 else
9997d19d
RE
5335 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
5336 n);
f3bb6135
RE
5337
5338 return "";
5339}
cce8749e
CH
5340
5341
5342/* Output an ADD r, s, #n where n may be too big for one instruction. If
5343 adding zero to one register, output nothing. */
5344
5345char *
5346output_add_immediate (operands)
62b10bbc 5347 rtx * operands;
cce8749e 5348{
f3bb6135 5349 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
5350
5351 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
5352 {
5353 if (n < 0)
5354 output_multi_immediate (operands,
9997d19d
RE
5355 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
5356 -n);
cce8749e
CH
5357 else
5358 output_multi_immediate (operands,
9997d19d
RE
5359 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
5360 n);
cce8749e 5361 }
f3bb6135
RE
5362
5363 return "";
5364}
cce8749e 5365
cce8749e
CH
5366/* Output a multiple immediate operation.
5367 OPERANDS is the vector of operands referred to in the output patterns.
5368 INSTR1 is the output pattern to use for the first constant.
5369 INSTR2 is the output pattern to use for subsequent constants.
5370 IMMED_OP is the index of the constant slot in OPERANDS.
5371 N is the constant value. */
5372
18af7313 5373static char *
cce8749e 5374output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc
NC
5375 rtx * operands;
5376 char * instr1, * instr2;
f3bb6135
RE
5377 int immed_op;
5378 HOST_WIDE_INT n;
cce8749e 5379{
f3bb6135
RE
5380#if HOST_BITS_PER_WIDE_INT > 32
5381 n &= 0xffffffff;
5382#endif
5383
cce8749e
CH
5384 if (n == 0)
5385 {
5386 operands[immed_op] = const0_rtx;
f3bb6135 5387 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
5388 }
5389 else
5390 {
5391 int i;
5392 char *instr = instr1;
5393
5394 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
5395 for (i = 0; i < 32; i += 2)
5396 {
5397 if (n & (3 << i))
5398 {
f3bb6135
RE
5399 operands[immed_op] = GEN_INT (n & (255 << i));
5400 output_asm_insn (instr, operands);
cce8749e
CH
5401 instr = instr2;
5402 i += 6;
5403 }
5404 }
5405 }
f3bb6135 5406 return "";
9997d19d 5407}
cce8749e
CH
5408
5409
5410/* Return the appropriate ARM instruction for the operation code.
5411 The returned result should not be overwritten. OP is the rtx of the
5412 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
5413 was shifted. */
5414
5415char *
5416arithmetic_instr (op, shift_first_arg)
5417 rtx op;
f3bb6135 5418 int shift_first_arg;
cce8749e 5419{
9997d19d 5420 switch (GET_CODE (op))
cce8749e
CH
5421 {
5422 case PLUS:
f3bb6135
RE
5423 return "add";
5424
cce8749e 5425 case MINUS:
f3bb6135
RE
5426 return shift_first_arg ? "rsb" : "sub";
5427
cce8749e 5428 case IOR:
f3bb6135
RE
5429 return "orr";
5430
cce8749e 5431 case XOR:
f3bb6135
RE
5432 return "eor";
5433
cce8749e 5434 case AND:
f3bb6135
RE
5435 return "and";
5436
cce8749e 5437 default:
f3bb6135 5438 abort ();
cce8749e 5439 }
f3bb6135 5440}
cce8749e
CH
5441
5442
5443/* Ensure valid constant shifts and return the appropriate shift mnemonic
5444 for the operation code. The returned result should not be overwritten.
5445 OP is the rtx code of the shift.
9997d19d
RE
5446 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
5447 shift. */
cce8749e 5448
9997d19d
RE
5449static char *
5450shift_op (op, amountp)
5451 rtx op;
5452 HOST_WIDE_INT *amountp;
cce8749e 5453{
62b10bbc 5454 char * mnem;
e2c671ba 5455 enum rtx_code code = GET_CODE (op);
cce8749e 5456
9997d19d
RE
5457 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
5458 *amountp = -1;
5459 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
5460 *amountp = INTVAL (XEXP (op, 1));
5461 else
5462 abort ();
5463
e2c671ba 5464 switch (code)
cce8749e
CH
5465 {
5466 case ASHIFT:
5467 mnem = "asl";
5468 break;
f3bb6135 5469
cce8749e
CH
5470 case ASHIFTRT:
5471 mnem = "asr";
cce8749e 5472 break;
f3bb6135 5473
cce8749e
CH
5474 case LSHIFTRT:
5475 mnem = "lsr";
cce8749e 5476 break;
f3bb6135 5477
9997d19d
RE
5478 case ROTATERT:
5479 mnem = "ror";
9997d19d
RE
5480 break;
5481
ff9940b0 5482 case MULT:
e2c671ba
RE
5483 /* We never have to worry about the amount being other than a
5484 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
5485 if (*amountp != -1)
5486 *amountp = int_log2 (*amountp);
5487 else
5488 abort ();
f3bb6135
RE
5489 return "asl";
5490
cce8749e 5491 default:
f3bb6135 5492 abort ();
cce8749e
CH
5493 }
5494
e2c671ba
RE
5495 if (*amountp != -1)
5496 {
5497 /* This is not 100% correct, but follows from the desire to merge
5498 multiplication by a power of 2 with the recognizer for a
5499 shift. >=32 is not a valid shift for "asl", so we must try and
5500 output a shift that produces the correct arithmetical result.
ddd5a7c1 5501 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
5502 is not set correctly if we set the flags; but we never use the
5503 carry bit from such an operation, so we can ignore that. */
5504 if (code == ROTATERT)
5505 *amountp &= 31; /* Rotate is just modulo 32 */
5506 else if (*amountp != (*amountp & 31))
5507 {
5508 if (code == ASHIFT)
5509 mnem = "lsr";
5510 *amountp = 32;
5511 }
5512
5513 /* Shifts of 0 are no-ops. */
5514 if (*amountp == 0)
5515 return NULL;
5516 }
5517
9997d19d
RE
5518 return mnem;
5519}
cce8749e
CH
5520
5521
5522/* Obtain the shift from the POWER of two. */
5523
18af7313 5524static HOST_WIDE_INT
cce8749e 5525int_log2 (power)
f3bb6135 5526 HOST_WIDE_INT power;
cce8749e 5527{
f3bb6135 5528 HOST_WIDE_INT shift = 0;
cce8749e 5529
2b835d68 5530 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
5531 {
5532 if (shift > 31)
f3bb6135 5533 abort ();
cce8749e
CH
5534 shift++;
5535 }
f3bb6135
RE
5536
5537 return shift;
5538}
cce8749e 5539
cce8749e
CH
5540/* Output a .ascii pseudo-op, keeping track of lengths. This is because
5541 /bin/as is horribly restrictive. */
6cfc7210 5542#define MAX_ASCII_LEN 51
cce8749e
CH
5543
5544void
5545output_ascii_pseudo_op (stream, p, len)
62b10bbc
NC
5546 FILE * stream;
5547 unsigned char * p;
cce8749e
CH
5548 int len;
5549{
5550 int i;
6cfc7210 5551 int len_so_far = 0;
cce8749e 5552
6cfc7210
NC
5553 fputs ("\t.ascii\t\"", stream);
5554
cce8749e
CH
5555 for (i = 0; i < len; i++)
5556 {
5557 register int c = p[i];
5558
6cfc7210 5559 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 5560 {
6cfc7210 5561 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 5562 len_so_far = 0;
cce8749e
CH
5563 }
5564
6cfc7210 5565 switch (c)
cce8749e 5566 {
6cfc7210
NC
5567 case TARGET_TAB:
5568 fputs ("\\t", stream);
5569 len_so_far += 2;
5570 break;
5571
5572 case TARGET_FF:
5573 fputs ("\\f", stream);
5574 len_so_far += 2;
5575 break;
5576
5577 case TARGET_BS:
5578 fputs ("\\b", stream);
5579 len_so_far += 2;
5580 break;
5581
5582 case TARGET_CR:
5583 fputs ("\\r", stream);
5584 len_so_far += 2;
5585 break;
5586
5587 case TARGET_NEWLINE:
5588 fputs ("\\n", stream);
5589 c = p [i + 1];
5590 if ((c >= ' ' && c <= '~')
5591 || c == TARGET_TAB)
5592 /* This is a good place for a line break. */
5593 len_so_far = MAX_ASCII_LEN;
5594 else
5595 len_so_far += 2;
5596 break;
5597
5598 case '\"':
5599 case '\\':
5600 putc ('\\', stream);
5601 len_so_far ++;
5602 /* drop through. */
f3bb6135 5603
6cfc7210
NC
5604 default:
5605 if (c >= ' ' && c <= '~')
5606 {
5607 putc (c, stream);
5608 len_so_far ++;
5609 }
5610 else
5611 {
5612 fprintf (stream, "\\%03o", c);
5613 len_so_far += 4;
5614 }
5615 break;
cce8749e 5616 }
cce8749e 5617 }
f3bb6135 5618
cce8749e 5619 fputs ("\"\n", stream);
f3bb6135 5620}
cce8749e 5621\f
ff9940b0
RE
5622
5623/* Try to determine whether a pattern really clobbers the link register.
5624 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
5625 if we combine a call followed by a return.
5626 NOTE: This code does not check for side-effect expressions in a SET_SRC:
5627 such a check should not be needed because these only update an existing
5628 value within a register; the register must still be set elsewhere within
5629 the function. */
ff9940b0
RE
5630
5631static int
5632pattern_really_clobbers_lr (x)
f3bb6135 5633 rtx x;
ff9940b0
RE
5634{
5635 int i;
5636
5637 switch (GET_CODE (x))
5638 {
5639 case SET:
5640 switch (GET_CODE (SET_DEST (x)))
5641 {
5642 case REG:
62b10bbc 5643 return REGNO (SET_DEST (x)) == LR_REGNUM;
f3bb6135 5644
ff9940b0
RE
5645 case SUBREG:
5646 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
62b10bbc 5647 return REGNO (XEXP (SET_DEST (x), 0)) == LR_REGNUM;
f3bb6135 5648
0e84b556
RK
5649 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
5650 return 0;
ff9940b0 5651 abort ();
f3bb6135 5652
ff9940b0
RE
5653 default:
5654 return 0;
5655 }
f3bb6135 5656
ff9940b0
RE
5657 case PARALLEL:
5658 for (i = 0; i < XVECLEN (x, 0); i++)
5659 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
5660 return 1;
5661 return 0;
f3bb6135 5662
ff9940b0
RE
5663 case CLOBBER:
5664 switch (GET_CODE (XEXP (x, 0)))
5665 {
5666 case REG:
62b10bbc 5667 return REGNO (XEXP (x, 0)) == LR_REGNUM;
f3bb6135 5668
ff9940b0
RE
5669 case SUBREG:
5670 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
62b10bbc 5671 return REGNO (XEXP (XEXP (x, 0), 0)) == LR_REGNUM;
ff9940b0 5672 abort ();
f3bb6135 5673
ff9940b0
RE
5674 default:
5675 return 0;
5676 }
f3bb6135 5677
ff9940b0
RE
5678 case UNSPEC:
5679 return 1;
f3bb6135 5680
ff9940b0
RE
5681 default:
5682 return 0;
5683 }
5684}
5685
5686static int
5687function_really_clobbers_lr (first)
f3bb6135 5688 rtx first;
ff9940b0
RE
5689{
5690 rtx insn, next;
5691
5692 for (insn = first; insn; insn = next_nonnote_insn (insn))
5693 {
5694 switch (GET_CODE (insn))
5695 {
5696 case BARRIER:
5697 case NOTE:
5698 case CODE_LABEL:
5699 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
ff9940b0 5700 break;
f3bb6135 5701
ff9940b0
RE
5702 case INSN:
5703 if (pattern_really_clobbers_lr (PATTERN (insn)))
5704 return 1;
5705 break;
f3bb6135 5706
ff9940b0
RE
5707 case CALL_INSN:
5708 /* Don't yet know how to handle those calls that are not to a
5709 SYMBOL_REF */
5710 if (GET_CODE (PATTERN (insn)) != PARALLEL)
5711 abort ();
f3bb6135 5712
ff9940b0
RE
5713 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
5714 {
5715 case CALL:
5716 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
5717 != SYMBOL_REF)
5718 return 1;
5719 break;
f3bb6135 5720
ff9940b0
RE
5721 case SET:
5722 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
5723 0, 0)), 0), 0))
5724 != SYMBOL_REF)
5725 return 1;
5726 break;
f3bb6135 5727
ff9940b0
RE
5728 default: /* Don't recognize it, be safe */
5729 return 1;
5730 }
f3bb6135 5731
ff9940b0
RE
5732 /* A call can be made (by peepholing) not to clobber lr iff it is
5733 followed by a return. There may, however, be a use insn iff
5734 we are returning the result of the call.
5735 If we run off the end of the insn chain, then that means the
5736 call was at the end of the function. Unfortunately we don't
5737 have a return insn for the peephole to recognize, so we
5738 must reject this. (Can this be fixed by adding our own insn?) */
5739 if ((next = next_nonnote_insn (insn)) == NULL)
5740 return 1;
f3bb6135 5741
32de079a
RE
5742 /* No need to worry about lr if the call never returns */
5743 if (GET_CODE (next) == BARRIER)
5744 break;
5745
ff9940b0
RE
5746 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
5747 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
5748 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
5749 == REGNO (XEXP (PATTERN (next), 0))))
5750 if ((next = next_nonnote_insn (next)) == NULL)
5751 return 1;
f3bb6135 5752
ff9940b0
RE
5753 if (GET_CODE (next) == JUMP_INSN
5754 && GET_CODE (PATTERN (next)) == RETURN)
5755 break;
5756 return 1;
f3bb6135 5757
ff9940b0
RE
5758 default:
5759 abort ();
5760 }
5761 }
f3bb6135 5762
ff9940b0
RE
5763 /* We have reached the end of the chain so lr was _not_ clobbered */
5764 return 0;
5765}
5766
5767char *
84ed5e79 5768output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
5769 rtx operand;
5770 int really_return;
84ed5e79 5771 int reverse;
ff9940b0
RE
5772{
5773 char instr[100];
5774 int reg, live_regs = 0;
e2c671ba
RE
5775 int volatile_func = (optimize > 0
5776 && TREE_THIS_VOLATILE (current_function_decl));
5777
5778 return_used_this_function = 1;
ff9940b0 5779
62b10bbc 5780 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 5781 {
e2c671ba 5782 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
5783 call, then we have to trust that the called function won't return. */
5784 if (really_return)
5785 {
5786 rtx ops[2];
5787
5788 /* Otherwise, trap an attempted return by aborting. */
5789 ops[0] = operand;
5790 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
5791 : "abort");
5792 assemble_external_libcall (ops[1]);
5793 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
5794 }
5795
e2c671ba
RE
5796 return "";
5797 }
5798
f3bb6135 5799 if (current_function_calls_alloca && ! really_return)
62b10bbc 5800 abort ();
ff9940b0 5801
f3bb6135
RE
5802 for (reg = 0; reg <= 10; reg++)
5803 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
5804 live_regs++;
5805
ed0e6530
PB
5806 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
5807 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
5808 live_regs++;
5809
62b10bbc 5810 if (live_regs || (regs_ever_live[LR_REGNUM] && ! lr_save_eliminated))
ff9940b0
RE
5811 live_regs++;
5812
5813 if (frame_pointer_needed)
5814 live_regs += 4;
5815
3a5a4282
PB
5816 /* On some ARM architectures it is faster to use LDR rather than LDM to
5817 load a single register. On other architectures, the cost is the same. */
5818 if (live_regs == 1
5819 && regs_ever_live[LR_REGNUM]
5820 && ! lr_save_eliminated
5821 /* FIXME: We ought to handle the case TARGET_APCS_32 is true,
5822 really_return is true, and only the PC needs restoring. */
5823 && ! really_return)
5824 {
5825 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
5826 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
5827 }
5828 else if (live_regs)
ff9940b0 5829 {
62b10bbc 5830 if (lr_save_eliminated || ! regs_ever_live[LR_REGNUM])
ff9940b0 5831 live_regs++;
f3bb6135 5832
ff9940b0 5833 if (frame_pointer_needed)
84ed5e79
RE
5834 strcpy (instr,
5835 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 5836 else
84ed5e79
RE
5837 strcpy (instr,
5838 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
5839
5840 for (reg = 0; reg <= 10; reg++)
62b10bbc 5841 if (regs_ever_live[reg]
6ed30148 5842 && (! call_used_regs[reg]
ed0e6530
PB
5843 || (flag_pic && ! TARGET_SINGLE_PIC_BASE
5844 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 5845 {
1d5473cb 5846 strcat (instr, "%|");
ff9940b0
RE
5847 strcat (instr, reg_names[reg]);
5848 if (--live_regs)
5849 strcat (instr, ", ");
5850 }
f3bb6135 5851
ff9940b0
RE
5852 if (frame_pointer_needed)
5853 {
1d5473cb 5854 strcat (instr, "%|");
ff9940b0
RE
5855 strcat (instr, reg_names[11]);
5856 strcat (instr, ", ");
1d5473cb 5857 strcat (instr, "%|");
ff9940b0
RE
5858 strcat (instr, reg_names[13]);
5859 strcat (instr, ", ");
1d5473cb 5860 strcat (instr, "%|");
6cfc7210 5861 strcat (instr, TARGET_INTERWORK || (! really_return)
62b10bbc 5862 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
5863 }
5864 else
1d5473cb
RE
5865 {
5866 strcat (instr, "%|");
6cfc7210 5867 if (TARGET_INTERWORK && really_return)
62b10bbc 5868 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 5869 else
62b10bbc 5870 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 5871 }
2b835d68 5872 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 5873 output_asm_insn (instr, &operand);
da6558fd 5874
6cfc7210 5875 if (TARGET_INTERWORK && really_return)
da6558fd
NC
5876 {
5877 strcpy (instr, "bx%?");
5878 strcat (instr, reverse ? "%D0" : "%d0");
5879 strcat (instr, "\t%|");
5880 strcat (instr, frame_pointer_needed ? "lr" : "ip");
5881
5882 output_asm_insn (instr, & operand);
5883 }
ff9940b0
RE
5884 }
5885 else if (really_return)
5886 {
6cfc7210 5887 if (TARGET_INTERWORK)
25b1c156 5888 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
5889 else
5890 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
5891 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd
NC
5892
5893 output_asm_insn (instr, & operand);
ff9940b0 5894 }
f3bb6135 5895
ff9940b0
RE
5896 return "";
5897}
5898
e82ea128
DE
5899/* Return nonzero if optimizing and the current function is volatile.
5900 Such functions never return, and many memory cycles can be saved
5901 by not storing register values that will never be needed again.
5902 This optimization was added to speed up context switching in a
5903 kernel application. */
a0b2ce4c 5904
e2c671ba
RE
5905int
5906arm_volatile_func ()
5907{
5908 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
5909}
5910
ef179a26
NC
5911/* Write the function name into the code section, directly preceding
5912 the function prologue.
5913
5914 Code will be output similar to this:
5915 t0
5916 .ascii "arm_poke_function_name", 0
5917 .align
5918 t1
5919 .word 0xff000000 + (t1 - t0)
5920 arm_poke_function_name
5921 mov ip, sp
5922 stmfd sp!, {fp, ip, lr, pc}
5923 sub fp, ip, #4
5924
5925 When performing a stack backtrace, code can inspect the value
5926 of 'pc' stored at 'fp' + 0. If the trace function then looks
5927 at location pc - 12 and the top 8 bits are set, then we know
5928 that there is a function name embedded immediately preceding this
5929 location and has length ((pc[-3]) & 0xff000000).
5930
5931 We assume that pc is declared as a pointer to an unsigned long.
5932
5933 It is of no benefit to output the function name if we are assembling
5934 a leaf function. These function types will not contain a stack
5935 backtrace structure, therefore it is not possible to determine the
5936 function name. */
5937
5938void
5939arm_poke_function_name (stream, name)
5940 FILE * stream;
5941 char * name;
5942{
5943 unsigned long alignlength;
5944 unsigned long length;
5945 rtx x;
5946
949d79eb
RE
5947 length = strlen (name) + 1;
5948 alignlength = (length + 3) & ~3;
ef179a26 5949
949d79eb 5950 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26
NC
5951 ASM_OUTPUT_ALIGN (stream, 2);
5952 x = GEN_INT (0xff000000UL + alignlength);
5953 ASM_OUTPUT_INT (stream, x);
5954}
5955
ff9940b0
RE
5956/* The amount of stack adjustment that happens here, in output_return and in
5957 output_epilogue must be exactly the same as was calculated during reload,
5958 or things will point to the wrong place. The only time we can safely
5959 ignore this constraint is when a function has no arguments on the stack,
5960 no stack frame requirement and no live registers execpt for `lr'. If we
5961 can guarantee that by making all function calls into tail calls and that
5962 lr is not clobbered in any other way, then there is no need to push lr
5963 onto the stack. */
5964
cce8749e 5965void
f3bb6135 5966output_func_prologue (f, frame_size)
6cfc7210 5967 FILE * f;
cce8749e
CH
5968 int frame_size;
5969{
f3bb6135 5970 int reg, live_regs_mask = 0;
e2c671ba
RE
5971 int volatile_func = (optimize > 0
5972 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 5973
cce8749e
CH
5974 /* Nonzero if we must stuff some register arguments onto the stack as if
5975 they were passed there. */
5976 int store_arg_regs = 0;
5977
abaa26e5
RE
5978 if (arm_ccfsm_state || arm_target_insn)
5979 abort (); /* Sanity check */
31fdb4d5
DE
5980
5981 if (arm_naked_function_p (current_function_decl))
5982 return;
5983
ff9940b0
RE
5984 return_used_this_function = 0;
5985 lr_save_eliminated = 0;
5986
dd18ae56
NC
5987 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
5988 current_function_args_size,
5989 current_function_pretend_args_size, frame_size);
5990 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
5991 frame_pointer_needed,
5992 current_function_anonymous_args);
cce8749e 5993
e2c671ba 5994 if (volatile_func)
dd18ae56 5995 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 5996
cce8749e
CH
5997 if (current_function_anonymous_args && current_function_pretend_args_size)
5998 store_arg_regs = 1;
5999
f3bb6135
RE
6000 for (reg = 0; reg <= 10; reg++)
6001 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
6002 live_regs_mask |= (1 << reg);
6003
dd18ae56 6004 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
ed0e6530 6005 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6006 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6007
ff9940b0 6008 if (frame_pointer_needed)
e2c671ba 6009 live_regs_mask |= 0xD800;
62b10bbc 6010 else if (regs_ever_live[LR_REGNUM])
ff9940b0
RE
6011 {
6012 if (! current_function_args_size
f3bb6135 6013 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 6014 lr_save_eliminated = 1;
ff9940b0 6015 else
62b10bbc 6016 live_regs_mask |= 1 << LR_REGNUM;
ff9940b0 6017 }
cce8749e 6018
cce8749e
CH
6019 if (live_regs_mask)
6020 {
ff9940b0
RE
6021 /* if a di mode load/store multiple is used, and the base register
6022 is r3, then r4 can become an ever live register without lr
6023 doing so, in this case we need to push lr as well, or we
6024 will fail to get a proper return. */
6025
62b10bbc 6026 live_regs_mask |= 1 << LR_REGNUM;
ff9940b0 6027 lr_save_eliminated = 0;
f3bb6135 6028
cce8749e
CH
6029 }
6030
e2c671ba 6031 if (lr_save_eliminated)
dd18ae56 6032 asm_fprintf (f,"\t%@ I don't think this function clobbers lr\n");
32de079a
RE
6033
6034#ifdef AOF_ASSEMBLER
6035 if (flag_pic)
dd18ae56 6036 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 6037#endif
f3bb6135 6038}
cce8749e 6039
949d79eb
RE
6040char *
6041arm_output_epilogue ()
cce8749e 6042{
949d79eb
RE
6043 int reg;
6044 int live_regs_mask = 0;
6045 /* If we need this, then it will always be at least this much */
b111229a 6046 int floats_offset = 12;
cce8749e 6047 rtx operands[3];
949d79eb
RE
6048 int frame_size = get_frame_size ();
6049 FILE *f = asm_out_file;
e2c671ba
RE
6050 int volatile_func = (optimize > 0
6051 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 6052
b36ba79f 6053 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 6054 return "";
cce8749e 6055
31fdb4d5
DE
6056 /* Naked functions don't have epilogues. */
6057 if (arm_naked_function_p (current_function_decl))
949d79eb 6058 return "";
31fdb4d5 6059
e2c671ba 6060 /* A volatile function should never return. Call abort. */
c11145f6 6061 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6062 {
86efdc8e 6063 rtx op;
ed0e6530 6064 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 6065 assemble_external_libcall (op);
e2c671ba 6066 output_asm_insn ("bl\t%a0", &op);
949d79eb 6067 return "";
e2c671ba
RE
6068 }
6069
f3bb6135
RE
6070 for (reg = 0; reg <= 10; reg++)
6071 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 6072 {
ff9940b0
RE
6073 live_regs_mask |= (1 << reg);
6074 floats_offset += 4;
cce8749e
CH
6075 }
6076
ed0e6530
PB
6077 /* If we aren't loading the PIC register, don't stack it even though it may
6078 be live. */
6079 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6080 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6081 {
6082 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6083 floats_offset += 4;
6084 }
6085
ff9940b0 6086 if (frame_pointer_needed)
cce8749e 6087 {
b111229a
RE
6088 if (arm_fpu_arch == FP_SOFT2)
6089 {
6090 for (reg = 23; reg > 15; reg--)
6091 if (regs_ever_live[reg] && ! call_used_regs[reg])
6092 {
6093 floats_offset += 12;
dd18ae56
NC
6094 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6095 reg, FP_REGNUM, floats_offset);
b111229a
RE
6096 }
6097 }
6098 else
6099 {
6100 int start_reg = 23;
6101
6102 for (reg = 23; reg > 15; reg--)
6103 {
6104 if (regs_ever_live[reg] && ! call_used_regs[reg])
6105 {
6106 floats_offset += 12;
6cfc7210 6107
b111229a
RE
6108 /* We can't unstack more than four registers at once */
6109 if (start_reg - reg == 3)
6110 {
dd18ae56
NC
6111 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6112 reg, FP_REGNUM, floats_offset);
b111229a
RE
6113 start_reg = reg - 1;
6114 }
6115 }
6116 else
6117 {
6118 if (reg != start_reg)
dd18ae56
NC
6119 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6120 reg + 1, start_reg - reg,
6121 FP_REGNUM, floats_offset);
b111229a
RE
6122 start_reg = reg - 1;
6123 }
6124 }
6125
6126 /* Just in case the last register checked also needs unstacking. */
6127 if (reg != start_reg)
dd18ae56
NC
6128 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6129 reg + 1, start_reg - reg,
6130 FP_REGNUM, floats_offset);
b111229a 6131 }
da6558fd 6132
6cfc7210 6133 if (TARGET_INTERWORK)
b111229a
RE
6134 {
6135 live_regs_mask |= 0x6800;
dd18ae56
NC
6136 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
6137 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
b111229a
RE
6138 }
6139 else
6140 {
6141 live_regs_mask |= 0xA800;
dd18ae56 6142 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
6143 TARGET_APCS_32 ? FALSE : TRUE);
6144 }
cce8749e
CH
6145 }
6146 else
6147 {
d2288d8d 6148 /* Restore stack pointer if necessary. */
56636818 6149 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
6150 {
6151 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
6152 operands[2] = GEN_INT (frame_size
6153 + current_function_outgoing_args_size);
d2288d8d
TG
6154 output_add_immediate (operands);
6155 }
6156
b111229a
RE
6157 if (arm_fpu_arch == FP_SOFT2)
6158 {
6159 for (reg = 16; reg < 24; reg++)
6160 if (regs_ever_live[reg] && ! call_used_regs[reg])
dd18ae56
NC
6161 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6162 reg, SP_REGNUM);
b111229a
RE
6163 }
6164 else
6165 {
6166 int start_reg = 16;
6167
6168 for (reg = 16; reg < 24; reg++)
6169 {
6170 if (regs_ever_live[reg] && ! call_used_regs[reg])
6171 {
6172 if (reg - start_reg == 3)
6173 {
dd18ae56
NC
6174 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6175 start_reg, SP_REGNUM);
b111229a
RE
6176 start_reg = reg + 1;
6177 }
6178 }
6179 else
6180 {
6181 if (reg != start_reg)
dd18ae56
NC
6182 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6183 start_reg, reg - start_reg,
6184 SP_REGNUM);
6cfc7210 6185
b111229a
RE
6186 start_reg = reg + 1;
6187 }
6188 }
6189
6190 /* Just in case the last register checked also needs unstacking. */
6191 if (reg != start_reg)
dd18ae56
NC
6192 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6193 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
6194 }
6195
62b10bbc 6196 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 6197 {
6cfc7210 6198 if (TARGET_INTERWORK)
b111229a
RE
6199 {
6200 if (! lr_save_eliminated)
62b10bbc 6201 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2
NC
6202
6203 if (live_regs_mask != 0)
dd18ae56 6204 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask, FALSE);
6cfc7210 6205
dd18ae56 6206 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
b111229a
RE
6207 }
6208 else if (lr_save_eliminated)
6cfc7210
NC
6209 asm_fprintf (f, "\tmov%c\t%r, %r\n",
6210 TARGET_APCS_32 ? ' ' : 's',
dd18ae56 6211 PC_REGNUM, LR_REGNUM);
32de079a 6212 else
dd18ae56 6213 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask | 0x8000,
32de079a 6214 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
6215 }
6216 else
6217 {
62b10bbc 6218 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 6219 {
32de079a
RE
6220 /* Restore the integer regs, and the return address into lr */
6221 if (! lr_save_eliminated)
62b10bbc 6222 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
6223
6224 if (live_regs_mask != 0)
dd18ae56 6225 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask, FALSE);
cce8749e 6226 }
b111229a 6227
cce8749e
CH
6228 if (current_function_pretend_args_size)
6229 {
32de079a 6230 /* Unwind the pre-pushed regs */
cce8749e 6231 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 6232 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
6233 output_add_immediate (operands);
6234 }
32de079a 6235 /* And finally, go home */
6cfc7210 6236 if (TARGET_INTERWORK)
dd18ae56 6237 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
25b1c156 6238 else if (TARGET_APCS_32)
dd18ae56 6239 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
b111229a 6240 else
dd18ae56 6241 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
cce8749e
CH
6242 }
6243 }
f3bb6135 6244
949d79eb
RE
6245 return "";
6246}
6247
6248void
eb3921e8 6249output_func_epilogue (frame_size)
949d79eb
RE
6250 int frame_size;
6251{
6252 if (use_return_insn (FALSE) && return_used_this_function
6253 && (frame_size + current_function_outgoing_args_size) != 0
6254 && ! (frame_pointer_needed && TARGET_APCS))
914a3b8c 6255 abort ();
f3bb6135 6256
4b632bf1 6257 /* Reset the ARM-specific per-function variables. */
cce8749e 6258 current_function_anonymous_args = 0;
4b632bf1 6259 after_arm_reorg = 0;
f3bb6135 6260}
e2c671ba
RE
6261
6262static void
6263emit_multi_reg_push (mask)
6264 int mask;
6265{
6266 int num_regs = 0;
6267 int i, j;
6268 rtx par;
6269
6270 for (i = 0; i < 16; i++)
6271 if (mask & (1 << i))
6272 num_regs++;
6273
6274 if (num_regs == 0 || num_regs > 16)
6275 abort ();
6276
43cffd11 6277 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
e2c671ba
RE
6278
6279 for (i = 0; i < 16; i++)
6280 {
6281 if (mask & (1 << i))
6282 {
6283 XVECEXP (par, 0, 0)
43cffd11
RE
6284 = gen_rtx_SET (VOIDmode,
6285 gen_rtx_MEM (BLKmode,
6286 gen_rtx_PRE_DEC (BLKmode,
6287 stack_pointer_rtx)),
6288 gen_rtx_UNSPEC (BLKmode,
6289 gen_rtvec (1,
6290 gen_rtx_REG (SImode, i)),
6291 2));
e2c671ba
RE
6292 break;
6293 }
6294 }
6295
6296 for (j = 1, i++; j < num_regs; i++)
6297 {
6298 if (mask & (1 << i))
6299 {
6300 XVECEXP (par, 0, j)
43cffd11 6301 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, i));
e2c671ba
RE
6302 j++;
6303 }
6304 }
b111229a
RE
6305
6306 emit_insn (par);
6307}
6308
6309static void
6310emit_sfm (base_reg, count)
6311 int base_reg;
6312 int count;
6313{
6314 rtx par;
6315 int i;
6316
43cffd11
RE
6317 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
6318
6319 XVECEXP (par, 0, 0)
6320 = gen_rtx_SET (VOIDmode,
6321 gen_rtx_MEM (BLKmode,
6322 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
6323 gen_rtx_UNSPEC (BLKmode,
6324 gen_rtvec (1, gen_rtx_REG (XFmode,
6325 base_reg++)),
6326 2));
b111229a 6327 for (i = 1; i < count; i++)
43cffd11
RE
6328 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode,
6329 gen_rtx_REG (XFmode, base_reg++));
b111229a 6330
e2c671ba
RE
6331 emit_insn (par);
6332}
6333
6334void
6335arm_expand_prologue ()
6336{
6337 int reg;
56636818
JL
6338 rtx amount = GEN_INT (-(get_frame_size ()
6339 + current_function_outgoing_args_size));
e2c671ba
RE
6340 int live_regs_mask = 0;
6341 int store_arg_regs = 0;
949d79eb
RE
6342 /* If this function doesn't return, then there is no need to push
6343 the call-saved regs. */
e2c671ba
RE
6344 int volatile_func = (optimize > 0
6345 && TREE_THIS_VOLATILE (current_function_decl));
6346
31fdb4d5
DE
6347 /* Naked functions don't have prologues. */
6348 if (arm_naked_function_p (current_function_decl))
6349 return;
6350
e2c671ba
RE
6351 if (current_function_anonymous_args && current_function_pretend_args_size)
6352 store_arg_regs = 1;
6353
6354 if (! volatile_func)
6ed30148
RE
6355 {
6356 for (reg = 0; reg <= 10; reg++)
6357 if (regs_ever_live[reg] && ! call_used_regs[reg])
6358 live_regs_mask |= 1 << reg;
6359
6360 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6361 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 6362
62b10bbc
NC
6363 if (regs_ever_live[LR_REGNUM])
6364 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 6365 }
e2c671ba
RE
6366
6367 if (frame_pointer_needed)
6368 {
6369 live_regs_mask |= 0xD800;
62b10bbc 6370 emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
e2c671ba
RE
6371 stack_pointer_rtx));
6372 }
6373
6374 if (current_function_pretend_args_size)
6375 {
6376 if (store_arg_regs)
6377 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
6378 & 0xf);
6379 else
6380 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
6381 GEN_INT (-current_function_pretend_args_size)));
6382 }
6383
6384 if (live_regs_mask)
6385 {
6386 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 6387 we won't get a proper return. */
62b10bbc 6388 live_regs_mask |= 1 << LR_REGNUM;
e2c671ba
RE
6389 emit_multi_reg_push (live_regs_mask);
6390 }
6391
6392 /* For now the integer regs are still pushed in output_func_epilogue (). */
6393
6394 if (! volatile_func)
b111229a
RE
6395 {
6396 if (arm_fpu_arch == FP_SOFT2)
6397 {
6398 for (reg = 23; reg > 15; reg--)
6399 if (regs_ever_live[reg] && ! call_used_regs[reg])
43cffd11
RE
6400 emit_insn (gen_rtx_SET
6401 (VOIDmode,
6402 gen_rtx_MEM (XFmode,
6403 gen_rtx_PRE_DEC (XFmode,
6404 stack_pointer_rtx)),
6405 gen_rtx_REG (XFmode, reg)));
b111229a
RE
6406 }
6407 else
6408 {
6409 int start_reg = 23;
6410
6411 for (reg = 23; reg > 15; reg--)
6412 {
6413 if (regs_ever_live[reg] && ! call_used_regs[reg])
6414 {
6415 if (start_reg - reg == 3)
6416 {
6417 emit_sfm (reg, 4);
6418 start_reg = reg - 1;
6419 }
6420 }
6421 else
6422 {
6423 if (start_reg != reg)
6424 emit_sfm (reg + 1, start_reg - reg);
6425 start_reg = reg - 1;
6426 }
6427 }
6428
6429 if (start_reg != reg)
6430 emit_sfm (reg + 1, start_reg - reg);
6431 }
6432 }
e2c671ba
RE
6433
6434 if (frame_pointer_needed)
62b10bbc 6435 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx_REG (SImode, IP_REGNUM),
e2c671ba
RE
6436 (GEN_INT
6437 (-(4 + current_function_pretend_args_size)))));
6438
6439 if (amount != const0_rtx)
6440 {
6441 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
43cffd11
RE
6442 emit_insn (gen_rtx_CLOBBER (VOIDmode,
6443 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
e2c671ba
RE
6444 }
6445
6446 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
6447 the call to mcount. Similarly if the user has requested no
6448 scheduling in the prolog. */
6449 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
6450 emit_insn (gen_blockage ());
6451}
6452
cce8749e 6453\f
9997d19d
RE
6454/* If CODE is 'd', then the X is a condition operand and the instruction
6455 should only be executed if the condition is true.
ddd5a7c1 6456 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
6457 should only be executed if the condition is false: however, if the mode
6458 of the comparison is CCFPEmode, then always execute the instruction -- we
6459 do this because in these circumstances !GE does not necessarily imply LT;
6460 in these cases the instruction pattern will take care to make sure that
6461 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 6462 doing this instruction unconditionally.
9997d19d
RE
6463 If CODE is 'N' then X is a floating point operand that must be negated
6464 before output.
6465 If CODE is 'B' then output a bitwise inverted value of X (a const int).
6466 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
6467
6468void
6469arm_print_operand (stream, x, code)
62b10bbc 6470 FILE * stream;
9997d19d
RE
6471 rtx x;
6472 int code;
6473{
6474 switch (code)
6475 {
6476 case '@':
f3139301 6477 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
6478 return;
6479
6480 case '|':
f3139301 6481 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6482 return;
6483
6484 case '?':
6485 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
6486 fputs (arm_condition_codes[arm_current_cc], stream);
6487 return;
6488
6489 case 'N':
6490 {
6491 REAL_VALUE_TYPE r;
6492 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6493 r = REAL_VALUE_NEGATE (r);
6494 fprintf (stream, "%s", fp_const_from_val (&r));
6495 }
6496 return;
6497
6498 case 'B':
6499 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
6500 {
6501 HOST_WIDE_INT val;
6502 val = ARM_SIGN_EXTEND (~ INTVAL (x));
36ba9cb8 6503 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6504 }
9997d19d
RE
6505 else
6506 {
6507 putc ('~', stream);
6508 output_addr_const (stream, x);
6509 }
6510 return;
6511
6512 case 'i':
6513 fprintf (stream, "%s", arithmetic_instr (x, 1));
6514 return;
6515
6516 case 'I':
6517 fprintf (stream, "%s", arithmetic_instr (x, 0));
6518 return;
6519
6520 case 'S':
6521 {
6522 HOST_WIDE_INT val;
4bc74ece 6523 char * shift = shift_op (x, & val);
9997d19d 6524
e2c671ba
RE
6525 if (shift)
6526 {
4bc74ece 6527 fprintf (stream, ", %s ", shift_op (x, & val));
e2c671ba
RE
6528 if (val == -1)
6529 arm_print_operand (stream, XEXP (x, 1), 0);
6530 else
4bc74ece
NC
6531 {
6532 fputc ('#', stream);
36ba9cb8 6533 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6534 }
e2c671ba 6535 }
9997d19d
RE
6536 }
6537 return;
6538
c1c2bc04
RE
6539 case 'Q':
6540 if (REGNO (x) > 15)
6541 abort ();
6542 fputs (REGISTER_PREFIX, stream);
6543 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
6544 return;
6545
9997d19d
RE
6546 case 'R':
6547 if (REGNO (x) > 15)
6548 abort ();
f3139301 6549 fputs (REGISTER_PREFIX, stream);
c1c2bc04 6550 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
6551 return;
6552
6553 case 'm':
f3139301 6554 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6555 if (GET_CODE (XEXP (x, 0)) == REG)
6556 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
6557 else
6558 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
6559 return;
6560
6561 case 'M':
dd18ae56
NC
6562 asm_fprintf (stream, "{%r-%r}",
6563 REGNO (x), REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
6564 return;
6565
6566 case 'd':
6567 if (x)
6568 fputs (arm_condition_codes[get_arm_condition_code (x)],
6569 stream);
6570 return;
6571
6572 case 'D':
84ed5e79 6573 if (x)
9997d19d
RE
6574 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
6575 (get_arm_condition_code (x))],
6576 stream);
6577 return;
6578
6579 default:
6580 if (x == 0)
6581 abort ();
6582
6583 if (GET_CODE (x) == REG)
1d5473cb 6584 {
f3139301 6585 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
6586 fputs (reg_names[REGNO (x)], stream);
6587 }
9997d19d
RE
6588 else if (GET_CODE (x) == MEM)
6589 {
6590 output_memory_reference_mode = GET_MODE (x);
6591 output_address (XEXP (x, 0));
6592 }
6593 else if (GET_CODE (x) == CONST_DOUBLE)
6594 fprintf (stream, "#%s", fp_immediate_constant (x));
6595 else if (GET_CODE (x) == NEG)
6596 abort (); /* This should never happen now. */
6597 else
6598 {
6599 fputc ('#', stream);
6600 output_addr_const (stream, x);
6601 }
6602 }
6603}
cce8749e
CH
6604\f
6605/* A finite state machine takes care of noticing whether or not instructions
6606 can be conditionally executed, and thus decrease execution time and code
6607 size by deleting branch instructions. The fsm is controlled by
6608 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
6609
6610/* The state of the fsm controlling condition codes are:
6611 0: normal, do nothing special
6612 1: make ASM_OUTPUT_OPCODE not output this instruction
6613 2: make ASM_OUTPUT_OPCODE not output this instruction
6614 3: make instructions conditional
6615 4: make instructions conditional
6616
6617 State transitions (state->state by whom under condition):
6618 0 -> 1 final_prescan_insn if the `target' is a label
6619 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
6620 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
6621 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
6622 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
6623 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
6624 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
6625 (the target insn is arm_target_insn).
6626
ff9940b0
RE
6627 If the jump clobbers the conditions then we use states 2 and 4.
6628
6629 A similar thing can be done with conditional return insns.
6630
cce8749e
CH
6631 XXX In case the `target' is an unconditional branch, this conditionalising
6632 of the instructions always reduces code size, but not always execution
6633 time. But then, I want to reduce the code size to somewhere near what
6634 /bin/cc produces. */
6635
cce8749e
CH
6636/* Returns the index of the ARM condition code string in
6637 `arm_condition_codes'. COMPARISON should be an rtx like
6638 `(eq (...) (...))'. */
6639
84ed5e79 6640static enum arm_cond_code
cce8749e
CH
6641get_arm_condition_code (comparison)
6642 rtx comparison;
6643{
5165176d 6644 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
6645 register int code;
6646 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
6647
6648 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 6649 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
6650 XEXP (comparison, 1));
6651
6652 switch (mode)
cce8749e 6653 {
84ed5e79
RE
6654 case CC_DNEmode: code = ARM_NE; goto dominance;
6655 case CC_DEQmode: code = ARM_EQ; goto dominance;
6656 case CC_DGEmode: code = ARM_GE; goto dominance;
6657 case CC_DGTmode: code = ARM_GT; goto dominance;
6658 case CC_DLEmode: code = ARM_LE; goto dominance;
6659 case CC_DLTmode: code = ARM_LT; goto dominance;
6660 case CC_DGEUmode: code = ARM_CS; goto dominance;
6661 case CC_DGTUmode: code = ARM_HI; goto dominance;
6662 case CC_DLEUmode: code = ARM_LS; goto dominance;
6663 case CC_DLTUmode: code = ARM_CC;
6664
6665 dominance:
6666 if (comp_code != EQ && comp_code != NE)
6667 abort ();
6668
6669 if (comp_code == EQ)
6670 return ARM_INVERSE_CONDITION_CODE (code);
6671 return code;
6672
5165176d 6673 case CC_NOOVmode:
84ed5e79 6674 switch (comp_code)
5165176d 6675 {
84ed5e79
RE
6676 case NE: return ARM_NE;
6677 case EQ: return ARM_EQ;
6678 case GE: return ARM_PL;
6679 case LT: return ARM_MI;
5165176d
RE
6680 default: abort ();
6681 }
6682
6683 case CC_Zmode:
6684 case CCFPmode:
84ed5e79 6685 switch (comp_code)
5165176d 6686 {
84ed5e79
RE
6687 case NE: return ARM_NE;
6688 case EQ: return ARM_EQ;
5165176d
RE
6689 default: abort ();
6690 }
6691
6692 case CCFPEmode:
84ed5e79
RE
6693 switch (comp_code)
6694 {
6695 case GE: return ARM_GE;
6696 case GT: return ARM_GT;
6697 case LE: return ARM_LS;
6698 case LT: return ARM_MI;
6699 default: abort ();
6700 }
6701
6702 case CC_SWPmode:
6703 switch (comp_code)
6704 {
6705 case NE: return ARM_NE;
6706 case EQ: return ARM_EQ;
6707 case GE: return ARM_LE;
6708 case GT: return ARM_LT;
6709 case LE: return ARM_GE;
6710 case LT: return ARM_GT;
6711 case GEU: return ARM_LS;
6712 case GTU: return ARM_CC;
6713 case LEU: return ARM_CS;
6714 case LTU: return ARM_HI;
6715 default: abort ();
6716 }
6717
bd9c7e23
RE
6718 case CC_Cmode:
6719 switch (comp_code)
6720 {
6721 case LTU: return ARM_CS;
6722 case GEU: return ARM_CC;
6723 default: abort ();
6724 }
6725
5165176d 6726 case CCmode:
84ed5e79 6727 switch (comp_code)
5165176d 6728 {
84ed5e79
RE
6729 case NE: return ARM_NE;
6730 case EQ: return ARM_EQ;
6731 case GE: return ARM_GE;
6732 case GT: return ARM_GT;
6733 case LE: return ARM_LE;
6734 case LT: return ARM_LT;
6735 case GEU: return ARM_CS;
6736 case GTU: return ARM_HI;
6737 case LEU: return ARM_LS;
6738 case LTU: return ARM_CC;
5165176d
RE
6739 default: abort ();
6740 }
6741
cce8749e
CH
6742 default: abort ();
6743 }
84ed5e79
RE
6744
6745 abort ();
f3bb6135 6746}
cce8749e
CH
6747
6748
6749void
74bbc178 6750arm_final_prescan_insn (insn)
cce8749e 6751 rtx insn;
cce8749e
CH
6752{
6753 /* BODY will hold the body of INSN. */
6754 register rtx body = PATTERN (insn);
6755
6756 /* This will be 1 if trying to repeat the trick, and things need to be
6757 reversed if it appears to fail. */
6758 int reverse = 0;
6759
ff9940b0
RE
6760 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
6761 taken are clobbered, even if the rtl suggests otherwise. It also
6762 means that we have to grub around within the jump expression to find
6763 out what the conditions are when the jump isn't taken. */
6764 int jump_clobbers = 0;
6765
6766 /* If we start with a return insn, we only succeed if we find another one. */
6767 int seeking_return = 0;
6768
cce8749e
CH
6769 /* START_INSN will hold the insn from where we start looking. This is the
6770 first insn after the following code_label if REVERSE is true. */
6771 rtx start_insn = insn;
6772
6773 /* If in state 4, check if the target branch is reached, in order to
6774 change back to state 0. */
6775 if (arm_ccfsm_state == 4)
6776 {
6777 if (insn == arm_target_insn)
f5a1b0d2
NC
6778 {
6779 arm_target_insn = NULL;
6780 arm_ccfsm_state = 0;
6781 }
cce8749e
CH
6782 return;
6783 }
6784
6785 /* If in state 3, it is possible to repeat the trick, if this insn is an
6786 unconditional branch to a label, and immediately following this branch
6787 is the previous target label which is only used once, and the label this
6788 branch jumps to is not too far off. */
6789 if (arm_ccfsm_state == 3)
6790 {
6791 if (simplejump_p (insn))
6792 {
6793 start_insn = next_nonnote_insn (start_insn);
6794 if (GET_CODE (start_insn) == BARRIER)
6795 {
6796 /* XXX Isn't this always a barrier? */
6797 start_insn = next_nonnote_insn (start_insn);
6798 }
6799 if (GET_CODE (start_insn) == CODE_LABEL
6800 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6801 && LABEL_NUSES (start_insn) == 1)
6802 reverse = TRUE;
6803 else
6804 return;
6805 }
ff9940b0
RE
6806 else if (GET_CODE (body) == RETURN)
6807 {
6808 start_insn = next_nonnote_insn (start_insn);
6809 if (GET_CODE (start_insn) == BARRIER)
6810 start_insn = next_nonnote_insn (start_insn);
6811 if (GET_CODE (start_insn) == CODE_LABEL
6812 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6813 && LABEL_NUSES (start_insn) == 1)
6814 {
6815 reverse = TRUE;
6816 seeking_return = 1;
6817 }
6818 else
6819 return;
6820 }
cce8749e
CH
6821 else
6822 return;
6823 }
6824
6825 if (arm_ccfsm_state != 0 && !reverse)
6826 abort ();
6827 if (GET_CODE (insn) != JUMP_INSN)
6828 return;
6829
ddd5a7c1 6830 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
6831 the jump should always come first */
6832 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
6833 body = XVECEXP (body, 0, 0);
6834
6835#if 0
6836 /* If this is a conditional return then we don't want to know */
6837 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6838 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
6839 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
6840 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
6841 return;
6842#endif
6843
cce8749e
CH
6844 if (reverse
6845 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6846 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
6847 {
bd9c7e23
RE
6848 int insns_skipped;
6849 int fail = FALSE, succeed = FALSE;
cce8749e
CH
6850 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
6851 int then_not_else = TRUE;
ff9940b0 6852 rtx this_insn = start_insn, label = 0;
cce8749e 6853
ff9940b0 6854 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
6855 {
6856 /* The code below is wrong for these, and I haven't time to
6857 fix it now. So we just do the safe thing and return. This
6858 whole function needs re-writing anyway. */
6859 jump_clobbers = 1;
6860 return;
6861 }
ff9940b0 6862
cce8749e
CH
6863 /* Register the insn jumped to. */
6864 if (reverse)
ff9940b0
RE
6865 {
6866 if (!seeking_return)
6867 label = XEXP (SET_SRC (body), 0);
6868 }
cce8749e
CH
6869 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
6870 label = XEXP (XEXP (SET_SRC (body), 1), 0);
6871 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
6872 {
6873 label = XEXP (XEXP (SET_SRC (body), 2), 0);
6874 then_not_else = FALSE;
6875 }
ff9940b0
RE
6876 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
6877 seeking_return = 1;
6878 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
6879 {
6880 seeking_return = 1;
6881 then_not_else = FALSE;
6882 }
cce8749e
CH
6883 else
6884 abort ();
6885
6886 /* See how many insns this branch skips, and what kind of insns. If all
6887 insns are okay, and the label or unconditional branch to the same
6888 label is not too far away, succeed. */
6889 for (insns_skipped = 0;
b36ba79f 6890 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
6891 {
6892 rtx scanbody;
6893
6894 this_insn = next_nonnote_insn (this_insn);
6895 if (!this_insn)
6896 break;
6897
cce8749e
CH
6898 switch (GET_CODE (this_insn))
6899 {
6900 case CODE_LABEL:
6901 /* Succeed if it is the target label, otherwise fail since
6902 control falls in from somewhere else. */
6903 if (this_insn == label)
6904 {
ff9940b0
RE
6905 if (jump_clobbers)
6906 {
6907 arm_ccfsm_state = 2;
6908 this_insn = next_nonnote_insn (this_insn);
6909 }
6910 else
6911 arm_ccfsm_state = 1;
cce8749e
CH
6912 succeed = TRUE;
6913 }
6914 else
6915 fail = TRUE;
6916 break;
6917
ff9940b0 6918 case BARRIER:
cce8749e 6919 /* Succeed if the following insn is the target label.
ff9940b0
RE
6920 Otherwise fail.
6921 If return insns are used then the last insn in a function
6922 will be a barrier. */
cce8749e 6923 this_insn = next_nonnote_insn (this_insn);
ff9940b0 6924 if (this_insn && this_insn == label)
cce8749e 6925 {
ff9940b0
RE
6926 if (jump_clobbers)
6927 {
6928 arm_ccfsm_state = 2;
6929 this_insn = next_nonnote_insn (this_insn);
6930 }
6931 else
6932 arm_ccfsm_state = 1;
cce8749e
CH
6933 succeed = TRUE;
6934 }
6935 else
6936 fail = TRUE;
6937 break;
6938
ff9940b0 6939 case CALL_INSN:
2b835d68 6940 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 6941 calls. */
2b835d68 6942 if (TARGET_APCS_32)
bd9c7e23
RE
6943 {
6944 /* Succeed if the following insn is the target label,
6945 or if the following two insns are a barrier and
6946 the target label. */
6947 this_insn = next_nonnote_insn (this_insn);
6948 if (this_insn && GET_CODE (this_insn) == BARRIER)
6949 this_insn = next_nonnote_insn (this_insn);
6950
6951 if (this_insn && this_insn == label
b36ba79f 6952 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
6953 {
6954 if (jump_clobbers)
6955 {
6956 arm_ccfsm_state = 2;
6957 this_insn = next_nonnote_insn (this_insn);
6958 }
6959 else
6960 arm_ccfsm_state = 1;
6961 succeed = TRUE;
6962 }
6963 else
6964 fail = TRUE;
6965 }
ff9940b0 6966 break;
2b835d68 6967
cce8749e
CH
6968 case JUMP_INSN:
6969 /* If this is an unconditional branch to the same label, succeed.
6970 If it is to another label, do nothing. If it is conditional,
6971 fail. */
914a3b8c 6972 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 6973
ed4c4348 6974 scanbody = PATTERN (this_insn);
ff9940b0
RE
6975 if (GET_CODE (scanbody) == SET
6976 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
6977 {
6978 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
6979 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
6980 {
6981 arm_ccfsm_state = 2;
6982 succeed = TRUE;
6983 }
6984 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
6985 fail = TRUE;
6986 }
b36ba79f
RE
6987 /* Fail if a conditional return is undesirable (eg on a
6988 StrongARM), but still allow this if optimizing for size. */
6989 else if (GET_CODE (scanbody) == RETURN
6990 && ! use_return_insn (TRUE)
6991 && ! optimize_size)
6992 fail = TRUE;
ff9940b0
RE
6993 else if (GET_CODE (scanbody) == RETURN
6994 && seeking_return)
6995 {
6996 arm_ccfsm_state = 2;
6997 succeed = TRUE;
6998 }
6999 else if (GET_CODE (scanbody) == PARALLEL)
7000 {
7001 switch (get_attr_conds (this_insn))
7002 {
7003 case CONDS_NOCOND:
7004 break;
7005 default:
7006 fail = TRUE;
7007 break;
7008 }
7009 }
cce8749e
CH
7010 break;
7011
7012 case INSN:
ff9940b0
RE
7013 /* Instructions using or affecting the condition codes make it
7014 fail. */
ed4c4348 7015 scanbody = PATTERN (this_insn);
74641843
RE
7016 if (! (GET_CODE (scanbody) == SET
7017 || GET_CODE (scanbody) == PARALLEL)
7018 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
7019 fail = TRUE;
7020 break;
7021
7022 default:
7023 break;
7024 }
7025 }
7026 if (succeed)
7027 {
ff9940b0 7028 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 7029 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
7030 else if (seeking_return || arm_ccfsm_state == 2)
7031 {
7032 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
7033 {
7034 this_insn = next_nonnote_insn (this_insn);
7035 if (this_insn && (GET_CODE (this_insn) == BARRIER
7036 || GET_CODE (this_insn) == CODE_LABEL))
7037 abort ();
7038 }
7039 if (!this_insn)
7040 {
7041 /* Oh, dear! we ran off the end.. give up */
7042 recog (PATTERN (insn), insn, NULL_PTR);
7043 arm_ccfsm_state = 0;
abaa26e5 7044 arm_target_insn = NULL;
ff9940b0
RE
7045 return;
7046 }
7047 arm_target_insn = this_insn;
7048 }
cce8749e
CH
7049 else
7050 abort ();
ff9940b0
RE
7051 if (jump_clobbers)
7052 {
7053 if (reverse)
7054 abort ();
7055 arm_current_cc =
7056 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
7057 0), 0), 1));
7058 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
7059 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7060 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
7061 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7062 }
7063 else
7064 {
7065 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
7066 what it was. */
7067 if (!reverse)
7068 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
7069 0));
7070 }
cce8749e 7071
cce8749e
CH
7072 if (reverse || then_not_else)
7073 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7074 }
1ccbefce
RH
7075
7076 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 7077 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 7078 across this call; since the insn has been recognized already we
b020fd92 7079 call recog direct). */
ff9940b0 7080 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 7081 }
f3bb6135 7082}
cce8749e 7083
c27ba912
DM
7084/* Return the length of a function name prefix
7085 that starts with the character 'c'. */
7086static int
7087arm_get_strip_length (char c)
7088{
7089 switch (c)
7090 {
7091 ARM_NAME_ENCODING_LENGTHS
7092 default: return 0;
7093 }
7094}
7095
7096/* Return a pointer to a function's name with any
7097 and all prefix encodings stripped from it. */
7098const char *
7099arm_strip_name_encoding (const char * name)
7100{
7101 int skip;
7102
7103 while ((skip = arm_get_strip_length (* name)))
7104 name += skip;
7105
7106 return name;
7107}
7108
2b835d68
RE
7109#ifdef AOF_ASSEMBLER
7110/* Special functions only needed when producing AOF syntax assembler. */
7111
32de079a
RE
7112rtx aof_pic_label = NULL_RTX;
7113struct pic_chain
7114{
62b10bbc
NC
7115 struct pic_chain * next;
7116 char * symname;
32de079a
RE
7117};
7118
62b10bbc 7119static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
7120
7121rtx
7122aof_pic_entry (x)
7123 rtx x;
7124{
62b10bbc 7125 struct pic_chain ** chainp;
32de079a
RE
7126 int offset;
7127
7128 if (aof_pic_label == NULL_RTX)
7129 {
92a432f4
RE
7130 /* We mark this here and not in arm_add_gc_roots() to avoid
7131 polluting even more code with ifdefs, and because it never
7132 contains anything useful until we assign to it here. */
7133 ggc_add_rtx_root (&aof_pic_label, 1);
32de079a
RE
7134 /* This needs to persist throughout the compilation. */
7135 end_temporary_allocation ();
43cffd11 7136 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
7137 resume_temporary_allocation ();
7138 }
7139
7140 for (offset = 0, chainp = &aof_pic_chain; *chainp;
7141 offset += 4, chainp = &(*chainp)->next)
7142 if ((*chainp)->symname == XSTR (x, 0))
7143 return plus_constant (aof_pic_label, offset);
7144
7145 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
7146 (*chainp)->next = NULL;
7147 (*chainp)->symname = XSTR (x, 0);
7148 return plus_constant (aof_pic_label, offset);
7149}
7150
7151void
7152aof_dump_pic_table (f)
62b10bbc 7153 FILE * f;
32de079a 7154{
62b10bbc 7155 struct pic_chain * chain;
32de079a
RE
7156
7157 if (aof_pic_chain == NULL)
7158 return;
7159
dd18ae56
NC
7160 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
7161 PIC_OFFSET_TABLE_REGNUM,
7162 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
7163 fputs ("|x$adcons|\n", f);
7164
7165 for (chain = aof_pic_chain; chain; chain = chain->next)
7166 {
7167 fputs ("\tDCD\t", f);
7168 assemble_name (f, chain->symname);
7169 fputs ("\n", f);
7170 }
7171}
7172
2b835d68
RE
7173int arm_text_section_count = 1;
7174
7175char *
84ed5e79 7176aof_text_section ()
2b835d68
RE
7177{
7178 static char buf[100];
2b835d68
RE
7179 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
7180 arm_text_section_count++);
7181 if (flag_pic)
7182 strcat (buf, ", PIC, REENTRANT");
7183 return buf;
7184}
7185
7186static int arm_data_section_count = 1;
7187
7188char *
7189aof_data_section ()
7190{
7191 static char buf[100];
7192 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
7193 return buf;
7194}
7195
7196/* The AOF assembler is religiously strict about declarations of
7197 imported and exported symbols, so that it is impossible to declare
956d6950 7198 a function as imported near the beginning of the file, and then to
2b835d68
RE
7199 export it later on. It is, however, possible to delay the decision
7200 until all the functions in the file have been compiled. To get
7201 around this, we maintain a list of the imports and exports, and
7202 delete from it any that are subsequently defined. At the end of
7203 compilation we spit the remainder of the list out before the END
7204 directive. */
7205
7206struct import
7207{
62b10bbc
NC
7208 struct import * next;
7209 char * name;
2b835d68
RE
7210};
7211
62b10bbc 7212static struct import * imports_list = NULL;
2b835d68
RE
7213
7214void
7215aof_add_import (name)
62b10bbc 7216 char * name;
2b835d68 7217{
62b10bbc 7218 struct import * new;
2b835d68
RE
7219
7220 for (new = imports_list; new; new = new->next)
7221 if (new->name == name)
7222 return;
7223
7224 new = (struct import *) xmalloc (sizeof (struct import));
7225 new->next = imports_list;
7226 imports_list = new;
7227 new->name = name;
7228}
7229
7230void
7231aof_delete_import (name)
62b10bbc 7232 char * name;
2b835d68 7233{
62b10bbc 7234 struct import ** old;
2b835d68
RE
7235
7236 for (old = &imports_list; *old; old = & (*old)->next)
7237 {
7238 if ((*old)->name == name)
7239 {
7240 *old = (*old)->next;
7241 return;
7242 }
7243 }
7244}
7245
7246int arm_main_function = 0;
7247
7248void
7249aof_dump_imports (f)
62b10bbc 7250 FILE * f;
2b835d68
RE
7251{
7252 /* The AOF assembler needs this to cause the startup code to be extracted
7253 from the library. Brining in __main causes the whole thing to work
7254 automagically. */
7255 if (arm_main_function)
7256 {
7257 text_section ();
7258 fputs ("\tIMPORT __main\n", f);
7259 fputs ("\tDCD __main\n", f);
7260 }
7261
7262 /* Now dump the remaining imports. */
7263 while (imports_list)
7264 {
7265 fprintf (f, "\tIMPORT\t");
7266 assemble_name (f, imports_list->name);
7267 fputc ('\n', f);
7268 imports_list = imports_list->next;
7269 }
7270}
7271#endif /* AOF_ASSEMBLER */
This page took 1.687772 seconds and 5 git commands to generate.