]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Daily bump.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
914a3b8c 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e
CH
26#include "rtl.h"
27#include "regs.h"
28#include "hard-reg-set.h"
29#include "real.h"
30#include "insn-config.h"
31#include "conditions.h"
32#include "insn-flags.h"
33#include "output.h"
34#include "insn-attr.h"
35#include "flags.h"
af48348a 36#include "reload.h"
e2c671ba 37#include "tree.h"
49ad7cfa 38#include "function.h"
bee06f3d 39#include "expr.h"
ad076f4e 40#include "toplev.h"
aec3cfba 41#include "recog.h"
92a432f4 42#include "ggc.h"
c27ba912 43#include "tm_p.h"
cce8749e 44
eb3921e8
NC
45#ifndef Mmode
46#define Mmode enum machine_mode
47#endif
48
c27ba912 49/* Some function declarations. */
299d06ad
KG
50static HOST_WIDE_INT int_log2 PARAMS ((HOST_WIDE_INT));
51static char * output_multi_immediate PARAMS ((rtx *, char *, char *, int, HOST_WIDE_INT));
52static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, HOST_WIDE_INT, rtx, rtx, int, int));
53static int arm_naked_function_p PARAMS ((tree));
54static void init_fpa_table PARAMS ((void));
55static enum machine_mode select_dominance_cc_mode PARAMS ((rtx, rtx, HOST_WIDE_INT));
56static HOST_WIDE_INT add_minipool_constant PARAMS ((rtx, Mmode));
57static void dump_minipool PARAMS ((rtx));
58static rtx find_barrier PARAMS ((rtx, int));
59static void push_minipool_fix PARAMS ((rtx, int, rtx *, Mmode, rtx));
60static void push_minipool_barrier PARAMS ((rtx, int));
61static void note_invalid_constants PARAMS ((rtx, int));
62static char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
63static int eliminate_lr2ip PARAMS ((rtx *));
64static char * shift_op PARAMS ((rtx, HOST_WIDE_INT *));
65static int pattern_really_clobbers_lr PARAMS ((rtx));
66static int function_really_clobbers_lr PARAMS ((rtx));
6354dc9b 67static rtx emit_multi_reg_push PARAMS ((int));
2c849145 68static rtx emit_sfm PARAMS ((int, int));
299d06ad
KG
69static enum arm_cond_code get_arm_condition_code PARAMS ((rtx));
70static int const_ok_for_op PARAMS ((HOST_WIDE_INT, enum rtx_code));
71static void arm_add_gc_roots PARAMS ((void));
f3bb6135 72
c27ba912
DM
73/* The maximum number of insns skipped which will be conditionalised if
74 possible. */
75static int max_insns_skipped = 5;
76
77extern FILE * asm_out_file;
78
6354dc9b 79/* True if we are currently building a constant table. */
13bd191d
PB
80int making_const_table;
81
60d0536b 82/* Define the information needed to generate branch insns. This is
6354dc9b 83 stored from the compare operation. */
ff9940b0 84rtx arm_compare_op0, arm_compare_op1;
ff9940b0 85
6354dc9b 86/* What type of floating point are we tuning for? */
bee06f3d
RE
87enum floating_point_type arm_fpu;
88
6354dc9b 89/* What type of floating point instructions are available? */
b111229a
RE
90enum floating_point_type arm_fpu_arch;
91
6354dc9b 92/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
93enum prog_mode_type arm_prgmode;
94
6354dc9b 95/* Set by the -mfp=... option. */
f9cc092a 96const char * target_fp_name = NULL;
2b835d68 97
b355a481 98/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 99const char * structure_size_string = NULL;
723ae7c1 100int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 101
aec3cfba 102/* Bit values used to identify processor capabilities. */
62b10bbc
NC
103#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
104#define FL_FAST_MULT (1 << 1) /* Fast multiply */
105#define FL_MODE26 (1 << 2) /* 26-bit mode support */
106#define FL_MODE32 (1 << 3) /* 32-bit mode support */
107#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
108#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
109#define FL_THUMB (1 << 6) /* Thumb aware */
110#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
111#define FL_STRONG (1 << 8) /* StrongARM */
aec3cfba 112
949d79eb
RE
113/* The bits in this mask specify which instructions we are allowed to
114 generate. */
aec3cfba
NC
115static int insn_flags = 0;
116/* The bits in this mask specify which instruction scheduling options should
117 be used. Note - there is an overlap with the FL_FAST_MULT. For some
118 hardware we want to be able to generate the multiply instructions, but to
119 tune as if they were not present in the architecture. */
120static int tune_flags = 0;
121
122/* The following are used in the arm.md file as equivalents to bits
123 in the above two flag variables. */
124
2b835d68
RE
125/* Nonzero if this is an "M" variant of the processor. */
126int arm_fast_multiply = 0;
127
6354dc9b 128/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
129int arm_arch4 = 0;
130
6354dc9b 131/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
132int arm_arch5 = 0;
133
aec3cfba 134/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
135int arm_ld_sched = 0;
136
137/* Nonzero if this chip is a StrongARM. */
138int arm_is_strong = 0;
139
140/* Nonzero if this chip is a an ARM6 or an ARM7. */
141int arm_is_6_or_7 = 0;
b111229a 142
cce8749e
CH
143/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
144 must report the mode of the memory reference from PRINT_OPERAND to
145 PRINT_OPERAND_ADDRESS. */
f3bb6135 146enum machine_mode output_memory_reference_mode;
cce8749e
CH
147
148/* Nonzero if the prologue must setup `fp'. */
149int current_function_anonymous_args;
150
32de079a 151/* The register number to be used for the PIC offset register. */
ed0e6530 152const char * arm_pic_register_string = NULL;
32de079a
RE
153int arm_pic_register = 9;
154
ff9940b0 155/* Set to one if we think that lr is only saved because of subroutine calls,
6354dc9b 156 but all of these can be `put after' return insns. */
ff9940b0
RE
157int lr_save_eliminated;
158
ff9940b0 159/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 160 is not needed. */
ff9940b0
RE
161static int return_used_this_function;
162
aec3cfba
NC
163/* Set to 1 after arm_reorg has started. Reset to start at the start of
164 the next function. */
4b632bf1
RE
165static int after_arm_reorg = 0;
166
aec3cfba 167/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
168static int arm_constant_limit = 3;
169
cce8749e
CH
170/* For an explanation of these variables, see final_prescan_insn below. */
171int arm_ccfsm_state;
84ed5e79 172enum arm_cond_code arm_current_cc;
cce8749e
CH
173rtx arm_target_insn;
174int arm_target_label;
9997d19d
RE
175
176/* The condition codes of the ARM, and the inverse function. */
f5a1b0d2 177char * arm_condition_codes[] =
9997d19d
RE
178{
179 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
180 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
181};
182
84ed5e79 183static enum arm_cond_code get_arm_condition_code ();
2b835d68 184
f5a1b0d2 185#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 186\f
6354dc9b 187/* Initialization code. */
2b835d68 188
2b835d68
RE
189struct processors
190{
f5a1b0d2 191 char * name;
2b835d68
RE
192 unsigned int flags;
193};
194
195/* Not all of these give usefully different compilation alternatives,
196 but there is no simple way of generalizing them. */
f5a1b0d2
NC
197static struct processors all_cores[] =
198{
199 /* ARM Cores */
200
201 {"arm2", FL_CO_PROC | FL_MODE26 },
202 {"arm250", FL_CO_PROC | FL_MODE26 },
203 {"arm3", FL_CO_PROC | FL_MODE26 },
204 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
205 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
206 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
207 {"arm610", FL_MODE26 | FL_MODE32 },
208 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
209 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
210 /* arm7m doesn't exist on its own, but only with D, (and I), but
211 those don't alter the code, so arm7m is sometimes used. */
212 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
213 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
214 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
215 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
216 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
217 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
218 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
219 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
220 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 221 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
222 {"arm710c", FL_MODE26 | FL_MODE32 },
223 {"arm7100", FL_MODE26 | FL_MODE32 },
224 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
225 /* Doesn't have an external co-proc, but does have embedded fpu. */
226 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
227 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
228 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
229 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
230 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
231 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
232 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
233 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
234 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
235 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
236 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
237
238 {NULL, 0}
239};
240
241static struct processors all_architectures[] =
2b835d68 242{
f5a1b0d2
NC
243 /* ARM Architectures */
244
62b10bbc
NC
245 { "armv2", FL_CO_PROC | FL_MODE26 },
246 { "armv2a", FL_CO_PROC | FL_MODE26 },
247 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
248 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 249 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
250 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
251 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
252 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
253 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
254 { NULL, 0 }
f5a1b0d2
NC
255};
256
257/* This is a magic stucture. The 'string' field is magically filled in
258 with a pointer to the value specified by the user on the command line
259 assuming that the user has specified such a value. */
260
261struct arm_cpu_select arm_select[] =
262{
263 /* string name processors */
264 { NULL, "-mcpu=", all_cores },
265 { NULL, "-march=", all_architectures },
266 { NULL, "-mtune=", all_cores }
2b835d68
RE
267};
268
aec3cfba
NC
269/* Return the number of bits set in value' */
270static unsigned int
271bit_count (value)
272 signed int value;
273{
274 unsigned int count = 0;
275
276 while (value)
277 {
278 value &= ~(value & - value);
279 ++ count;
280 }
281
282 return count;
283}
284
2b835d68
RE
285/* Fix up any incompatible options that the user has specified.
286 This has now turned into a maze. */
287void
288arm_override_options ()
289{
ed4c4348 290 unsigned i;
f5a1b0d2
NC
291
292 /* Set up the flags based on the cpu/architecture selected by the user. */
293 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
bd9c7e23 294 {
f5a1b0d2
NC
295 struct arm_cpu_select * ptr = arm_select + i;
296
297 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 298 {
13bd191d 299 const struct processors * sel;
bd9c7e23 300
f5a1b0d2
NC
301 for (sel = ptr->processors; sel->name != NULL; sel ++)
302 if (streq (ptr->string, sel->name))
bd9c7e23 303 {
aec3cfba
NC
304 if (i == 2)
305 tune_flags = sel->flags;
306 else
b111229a 307 {
aec3cfba
NC
308 /* If we have been given an architecture and a processor
309 make sure that they are compatible. We only generate
310 a warning though, and we prefer the CPU over the
6354dc9b 311 architecture. */
aec3cfba 312 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 313 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
314 ptr->string);
315
316 insn_flags = sel->flags;
b111229a 317 }
f5a1b0d2 318
bd9c7e23
RE
319 break;
320 }
321
322 if (sel->name == NULL)
323 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
324 }
325 }
aec3cfba 326
f5a1b0d2 327 /* If the user did not specify a processor, choose one for them. */
aec3cfba 328 if (insn_flags == 0)
f5a1b0d2
NC
329 {
330 struct processors * sel;
aec3cfba
NC
331 unsigned int sought;
332 static struct cpu_default
333 {
334 int cpu;
335 char * name;
336 }
337 cpu_defaults[] =
338 {
339 { TARGET_CPU_arm2, "arm2" },
340 { TARGET_CPU_arm6, "arm6" },
341 { TARGET_CPU_arm610, "arm610" },
2aa0c933 342 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
343 { TARGET_CPU_arm7m, "arm7m" },
344 { TARGET_CPU_arm7500fe, "arm7500fe" },
345 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
346 { TARGET_CPU_arm8, "arm8" },
347 { TARGET_CPU_arm810, "arm810" },
348 { TARGET_CPU_arm9, "arm9" },
349 { TARGET_CPU_strongarm, "strongarm" },
350 { TARGET_CPU_generic, "arm" },
351 { 0, 0 }
352 };
353 struct cpu_default * def;
354
355 /* Find the default. */
356 for (def = cpu_defaults; def->name; def ++)
357 if (def->cpu == TARGET_CPU_DEFAULT)
358 break;
359
360 /* Make sure we found the default CPU. */
361 if (def->name == NULL)
362 abort ();
363
364 /* Find the default CPU's flags. */
365 for (sel = all_cores; sel->name != NULL; sel ++)
366 if (streq (def->name, sel->name))
367 break;
368
369 if (sel->name == NULL)
370 abort ();
371
372 insn_flags = sel->flags;
373
374 /* Now check to see if the user has specified some command line
375 switch that require certain abilities from the cpu. */
376 sought = 0;
f5a1b0d2 377
6cfc7210 378 if (TARGET_INTERWORK)
f5a1b0d2 379 {
aec3cfba
NC
380 sought |= (FL_THUMB | FL_MODE32);
381
382 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 383 target_flags |= ARM_FLAG_APCS_32;
aec3cfba
NC
384
385 /* There are no ARM processor that supports both APCS-26 and
386 interworking. Therefore we force FL_MODE26 to be removed
387 from insn_flags here (if it was set), so that the search
388 below will always be able to find a compatible processor. */
389 insn_flags &= ~ FL_MODE26;
f5a1b0d2
NC
390 }
391
1323d53a 392 if (! TARGET_APCS_32)
f5a1b0d2
NC
393 sought |= FL_MODE26;
394
aec3cfba 395 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 396 {
aec3cfba
NC
397 /* Try to locate a CPU type that supports all of the abilities
398 of the default CPU, plus the extra abilities requested by
399 the user. */
f5a1b0d2 400 for (sel = all_cores; sel->name != NULL; sel ++)
aec3cfba 401 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
402 break;
403
404 if (sel->name == NULL)
aec3cfba
NC
405 {
406 unsigned int current_bit_count = 0;
407 struct processors * best_fit = NULL;
408
409 /* Ideally we would like to issue an error message here
410 saying that it was not possible to find a CPU compatible
411 with the default CPU, but which also supports the command
412 line options specified by the programmer, and so they
413 ought to use the -mcpu=<name> command line option to
414 override the default CPU type.
415
416 Unfortunately this does not work with multilibing. We
417 need to be able to support multilibs for -mapcs-26 and for
418 -mthumb-interwork and there is no CPU that can support both
419 options. Instead if we cannot find a cpu that has both the
420 characteristics of the default cpu and the given command line
421 options we scan the array again looking for a best match. */
422 for (sel = all_cores; sel->name != NULL; sel ++)
423 if ((sel->flags & sought) == sought)
424 {
425 unsigned int count;
426
427 count = bit_count (sel->flags & insn_flags);
428
429 if (count >= current_bit_count)
430 {
431 best_fit = sel;
432 current_bit_count = count;
433 }
434 }
f5a1b0d2 435
aec3cfba
NC
436 if (best_fit == NULL)
437 abort ();
438 else
439 sel = best_fit;
440 }
441
442 insn_flags = sel->flags;
f5a1b0d2
NC
443 }
444 }
aec3cfba
NC
445
446 /* If tuning has not been specified, tune for whichever processor or
447 architecture has been selected. */
448 if (tune_flags == 0)
449 tune_flags = insn_flags;
450
f5a1b0d2
NC
451 /* Make sure that the processor choice does not conflict with any of the
452 other command line choices. */
aec3cfba 453 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 454 {
aec3cfba
NC
455 /* If APCS-32 was not the default then it must have been set by the
456 user, so issue a warning message. If the user has specified
457 "-mapcs-32 -mcpu=arm2" then we loose here. */
458 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
459 warning ("target CPU does not support APCS-32" );
f5a1b0d2
NC
460 target_flags &= ~ ARM_FLAG_APCS_32;
461 }
aec3cfba 462 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
463 {
464 warning ("target CPU does not support APCS-26" );
465 target_flags |= ARM_FLAG_APCS_32;
466 }
467
6cfc7210 468 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
469 {
470 warning ("target CPU does not support interworking" );
6cfc7210 471 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
472 }
473
474 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 475 if (TARGET_INTERWORK)
f5a1b0d2
NC
476 {
477 if (! TARGET_APCS_32)
478 warning ("interworking forces APCS-32 to be used" );
479 target_flags |= ARM_FLAG_APCS_32;
480 }
481
482 if (TARGET_APCS_STACK && ! TARGET_APCS)
483 {
484 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
485 target_flags |= ARM_FLAG_APCS_FRAME;
486 }
aec3cfba 487
2b835d68
RE
488 if (TARGET_POKE_FUNCTION_NAME)
489 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 490
2b835d68
RE
491 if (TARGET_APCS_REENT && flag_pic)
492 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 493
2b835d68 494 if (TARGET_APCS_REENT)
f5a1b0d2 495 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 496
6cfc7210
NC
497 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
498 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
499
32de079a
RE
500 /* If stack checking is disabled, we can use r10 as the PIC register,
501 which keeps r9 available. */
502 if (flag_pic && ! TARGET_APCS_STACK)
503 arm_pic_register = 10;
aec3cfba 504
2b835d68
RE
505 if (TARGET_APCS_FLOAT)
506 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 507
aec3cfba 508 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
509 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
510 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
511 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
aec3cfba 512
2ca12935
JL
513 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
514 arm_is_strong = (tune_flags & FL_STRONG) != 0;
aec3cfba 515 arm_is_6_or_7 = ((tune_flags & (FL_MODE26 | FL_MODE32))
2ca12935 516 && !(tune_flags & FL_ARCH4)) != 0;
f5a1b0d2 517
bd9c7e23
RE
518 /* Default value for floating point code... if no co-processor
519 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
520 assume the user has an FPA.
521 Note: this does not prevent use of floating point instructions,
522 -msoft-float does that. */
aec3cfba 523 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 524
b111229a 525 if (target_fp_name)
2b835d68 526 {
f5a1b0d2 527 if (streq (target_fp_name, "2"))
b111229a 528 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
529 else if (streq (target_fp_name, "3"))
530 arm_fpu_arch = FP_SOFT3;
2b835d68 531 else
f5a1b0d2 532 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 533 target_fp_name);
2b835d68 534 }
b111229a
RE
535 else
536 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
537
538 if (TARGET_FPE && arm_fpu != FP_HARD)
539 arm_fpu = FP_SOFT2;
aec3cfba 540
f5a1b0d2
NC
541 /* For arm2/3 there is no need to do any scheduling if there is only
542 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
543 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
544 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 545 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 546
2b835d68 547 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
548
549 if (structure_size_string != NULL)
550 {
551 int size = strtol (structure_size_string, NULL, 0);
552
553 if (size == 8 || size == 32)
554 arm_structure_size_boundary = size;
555 else
556 warning ("Structure size boundary can only be set to 8 or 32");
557 }
ed0e6530
PB
558
559 if (arm_pic_register_string != NULL)
560 {
561 int pic_register;
562
563 if (! flag_pic)
564 warning ("-mpic-register= is useless without -fpic");
565
566 pic_register = decode_reg_name (arm_pic_register_string);
567
568 /* Prevent the user from choosing an obviously stupid PIC register. */
569 if (pic_register < 0 || call_used_regs[pic_register]
570 || pic_register == HARD_FRAME_POINTER_REGNUM
571 || pic_register == STACK_POINTER_REGNUM
572 || pic_register >= PC_REGNUM)
573 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
574 else
575 arm_pic_register = pic_register;
576 }
f5a1b0d2
NC
577
578 /* If optimizing for space, don't synthesize constants.
579 For processors with load scheduling, it never costs more than 2 cycles
580 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 581 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 582 arm_constant_limit = 1;
aec3cfba 583
f5a1b0d2
NC
584 /* If optimizing for size, bump the number of instructions that we
585 are prepared to conditionally execute (even on a StrongARM).
586 Otherwise for the StrongARM, which has early execution of branches,
587 a sequence that is worth skipping is shorter. */
588 if (optimize_size)
589 max_insns_skipped = 6;
590 else if (arm_is_strong)
591 max_insns_skipped = 3;
92a432f4
RE
592
593 /* Register global variables with the garbage collector. */
594 arm_add_gc_roots ();
595}
596
597static void
598arm_add_gc_roots ()
599{
600 ggc_add_rtx_root (&arm_compare_op0, 1);
601 ggc_add_rtx_root (&arm_compare_op1, 1);
602 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
603 /* XXX: What about the minipool tables? */
2b835d68 604}
92a432f4 605
cce8749e 606\f
6354dc9b 607/* Return 1 if it is possible to return using a single instruction. */
ff9940b0 608int
b36ba79f
RE
609use_return_insn (iscond)
610 int iscond;
ff9940b0
RE
611{
612 int regno;
613
f5a1b0d2
NC
614 if (!reload_completed
615 || current_function_pretend_args_size
ff9940b0 616 || current_function_anonymous_args
56636818 617 || ((get_frame_size () + current_function_outgoing_args_size != 0)
f5a1b0d2 618 && !(TARGET_APCS && frame_pointer_needed)))
ff9940b0
RE
619 return 0;
620
b111229a 621 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
622 stacked. Similarly, on StrongARM, conditional returns are expensive
623 if they aren't taken and registers have been stacked. */
f5a1b0d2 624 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 625 return 0;
f5a1b0d2 626 if ((iscond && arm_is_strong)
6cfc7210 627 || TARGET_INTERWORK)
6ed30148
RE
628 {
629 for (regno = 0; regno < 16; regno++)
630 if (regs_ever_live[regno] && ! call_used_regs[regno])
631 return 0;
632
633 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 634 return 0;
6ed30148 635 }
b111229a 636
ff9940b0 637 /* Can't be done if any of the FPU regs are pushed, since this also
6354dc9b 638 requires an insn. */
b111229a
RE
639 for (regno = 16; regno < 24; regno++)
640 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
641 return 0;
642
31fdb4d5
DE
643 /* If a function is naked, don't use the "return" insn. */
644 if (arm_naked_function_p (current_function_decl))
645 return 0;
646
ff9940b0
RE
647 return 1;
648}
649
cce8749e
CH
650/* Return TRUE if int I is a valid immediate ARM constant. */
651
652int
653const_ok_for_arm (i)
ff9940b0 654 HOST_WIDE_INT i;
cce8749e 655{
ed4c4348 656 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 657
56636818
JL
658 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
659 be all zero, or all one. */
b39e1240
NC
660 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffffUL) != 0
661 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffffUL)
ed4c4348 662 != ((~(unsigned HOST_WIDE_INT) 0)
b39e1240 663 & ~(unsigned HOST_WIDE_INT) 0xffffffffUL)))
56636818
JL
664 return FALSE;
665
e2c671ba
RE
666 /* Fast return for 0 and powers of 2 */
667 if ((i & (i - 1)) == 0)
668 return TRUE;
669
cce8749e
CH
670 do
671 {
b39e1240 672 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffffUL) == 0)
f3bb6135 673 return TRUE;
abaa26e5 674 mask =
b39e1240
NC
675 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffffUL)
676 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffffUL);
ed4c4348 677 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 678
f3bb6135
RE
679 return FALSE;
680}
cce8749e 681
6354dc9b 682/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
683static int
684const_ok_for_op (i, code)
e2c671ba
RE
685 HOST_WIDE_INT i;
686 enum rtx_code code;
e2c671ba
RE
687{
688 if (const_ok_for_arm (i))
689 return 1;
690
691 switch (code)
692 {
693 case PLUS:
694 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
695
696 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
697 case XOR:
698 case IOR:
699 return 0;
700
701 case AND:
702 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
703
704 default:
705 abort ();
706 }
707}
708
709/* Emit a sequence of insns to handle a large constant.
710 CODE is the code of the operation required, it can be any of SET, PLUS,
711 IOR, AND, XOR, MINUS;
712 MODE is the mode in which the operation is being performed;
713 VAL is the integer to operate on;
714 SOURCE is the other operand (a register, or a null-pointer for SET);
715 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
716 either produce a simpler sequence, or we will want to cse the values.
717 Return value is the number of insns emitted. */
e2c671ba
RE
718
719int
720arm_split_constant (code, mode, val, target, source, subtargets)
721 enum rtx_code code;
722 enum machine_mode mode;
723 HOST_WIDE_INT val;
724 rtx target;
725 rtx source;
726 int subtargets;
2b835d68
RE
727{
728 if (subtargets || code == SET
729 || (GET_CODE (target) == REG && GET_CODE (source) == REG
730 && REGNO (target) != REGNO (source)))
731 {
4b632bf1
RE
732 /* After arm_reorg has been called, we can't fix up expensive
733 constants by pushing them into memory so we must synthesise
734 them in-line, regardless of the cost. This is only likely to
735 be more costly on chips that have load delay slots and we are
736 compiling without running the scheduler (so no splitting
aec3cfba
NC
737 occurred before the final instruction emission).
738
739 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 740 */
4b632bf1
RE
741 if (! after_arm_reorg
742 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
743 > arm_constant_limit + (code != SET)))
2b835d68
RE
744 {
745 if (code == SET)
746 {
747 /* Currently SET is the only monadic value for CODE, all
748 the rest are diadic. */
43cffd11 749 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
750 return 1;
751 }
752 else
753 {
754 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
755
43cffd11 756 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
757 /* For MINUS, the value is subtracted from, since we never
758 have subtraction of a constant. */
759 if (code == MINUS)
43cffd11
RE
760 emit_insn (gen_rtx_SET (VOIDmode, target,
761 gen_rtx (code, mode, temp, source)));
2b835d68 762 else
43cffd11
RE
763 emit_insn (gen_rtx_SET (VOIDmode, target,
764 gen_rtx (code, mode, source, temp)));
2b835d68
RE
765 return 2;
766 }
767 }
768 }
769
770 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
771}
772
773/* As above, but extra parameter GENERATE which, if clear, suppresses
774 RTL generation. */
775int
776arm_gen_constant (code, mode, val, target, source, subtargets, generate)
777 enum rtx_code code;
778 enum machine_mode mode;
779 HOST_WIDE_INT val;
780 rtx target;
781 rtx source;
782 int subtargets;
783 int generate;
e2c671ba 784{
e2c671ba
RE
785 int can_invert = 0;
786 int can_negate = 0;
787 int can_negate_initial = 0;
788 int can_shift = 0;
789 int i;
790 int num_bits_set = 0;
791 int set_sign_bit_copies = 0;
792 int clear_sign_bit_copies = 0;
793 int clear_zero_bit_copies = 0;
794 int set_zero_bit_copies = 0;
795 int insns = 0;
e2c671ba 796 unsigned HOST_WIDE_INT temp1, temp2;
b39e1240 797 unsigned HOST_WIDE_INT remainder = val & 0xffffffffUL;
e2c671ba
RE
798
799 /* find out which operations are safe for a given CODE. Also do a quick
800 check for degenerate cases; these can occur when DImode operations
801 are split. */
802 switch (code)
803 {
804 case SET:
805 can_invert = 1;
806 can_shift = 1;
807 can_negate = 1;
808 break;
809
810 case PLUS:
811 can_negate = 1;
812 can_negate_initial = 1;
813 break;
814
815 case IOR:
b39e1240 816 if (remainder == 0xffffffffUL)
e2c671ba 817 {
2b835d68 818 if (generate)
43cffd11
RE
819 emit_insn (gen_rtx_SET (VOIDmode, target,
820 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
821 return 1;
822 }
823 if (remainder == 0)
824 {
825 if (reload_completed && rtx_equal_p (target, source))
826 return 0;
2b835d68 827 if (generate)
43cffd11 828 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
829 return 1;
830 }
831 break;
832
833 case AND:
834 if (remainder == 0)
835 {
2b835d68 836 if (generate)
43cffd11 837 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
838 return 1;
839 }
b39e1240 840 if (remainder == 0xffffffffUL)
e2c671ba
RE
841 {
842 if (reload_completed && rtx_equal_p (target, source))
843 return 0;
2b835d68 844 if (generate)
43cffd11 845 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
846 return 1;
847 }
848 can_invert = 1;
849 break;
850
851 case XOR:
852 if (remainder == 0)
853 {
854 if (reload_completed && rtx_equal_p (target, source))
855 return 0;
2b835d68 856 if (generate)
43cffd11 857 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
858 return 1;
859 }
b39e1240 860 if (remainder == 0xffffffffUL)
e2c671ba 861 {
2b835d68 862 if (generate)
43cffd11
RE
863 emit_insn (gen_rtx_SET (VOIDmode, target,
864 gen_rtx_NOT (mode, source)));
e2c671ba
RE
865 return 1;
866 }
867
868 /* We don't know how to handle this yet below. */
869 abort ();
870
871 case MINUS:
872 /* We treat MINUS as (val - source), since (source - val) is always
873 passed as (source + (-val)). */
874 if (remainder == 0)
875 {
2b835d68 876 if (generate)
43cffd11
RE
877 emit_insn (gen_rtx_SET (VOIDmode, target,
878 gen_rtx_NEG (mode, source)));
e2c671ba
RE
879 return 1;
880 }
881 if (const_ok_for_arm (val))
882 {
2b835d68 883 if (generate)
43cffd11
RE
884 emit_insn (gen_rtx_SET (VOIDmode, target,
885 gen_rtx_MINUS (mode, GEN_INT (val),
886 source)));
e2c671ba
RE
887 return 1;
888 }
889 can_negate = 1;
890
891 break;
892
893 default:
894 abort ();
895 }
896
6354dc9b 897 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
898 if (const_ok_for_arm (val)
899 || (can_negate_initial && const_ok_for_arm (-val))
900 || (can_invert && const_ok_for_arm (~val)))
901 {
2b835d68 902 if (generate)
43cffd11
RE
903 emit_insn (gen_rtx_SET (VOIDmode, target,
904 (source ? gen_rtx (code, mode, source,
905 GEN_INT (val))
906 : GEN_INT (val))));
e2c671ba
RE
907 return 1;
908 }
909
e2c671ba 910 /* Calculate a few attributes that may be useful for specific
6354dc9b 911 optimizations. */
e2c671ba
RE
912 for (i = 31; i >= 0; i--)
913 {
914 if ((remainder & (1 << i)) == 0)
915 clear_sign_bit_copies++;
916 else
917 break;
918 }
919
920 for (i = 31; i >= 0; i--)
921 {
922 if ((remainder & (1 << i)) != 0)
923 set_sign_bit_copies++;
924 else
925 break;
926 }
927
928 for (i = 0; i <= 31; i++)
929 {
930 if ((remainder & (1 << i)) == 0)
931 clear_zero_bit_copies++;
932 else
933 break;
934 }
935
936 for (i = 0; i <= 31; i++)
937 {
938 if ((remainder & (1 << i)) != 0)
939 set_zero_bit_copies++;
940 else
941 break;
942 }
943
944 switch (code)
945 {
946 case SET:
947 /* See if we can do this by sign_extending a constant that is known
948 to be negative. This is a good, way of doing it, since the shift
949 may well merge into a subsequent insn. */
950 if (set_sign_bit_copies > 1)
951 {
952 if (const_ok_for_arm
953 (temp1 = ARM_SIGN_EXTEND (remainder
954 << (set_sign_bit_copies - 1))))
955 {
2b835d68
RE
956 if (generate)
957 {
d499463f 958 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
959 emit_insn (gen_rtx_SET (VOIDmode, new_src,
960 GEN_INT (temp1)));
2b835d68
RE
961 emit_insn (gen_ashrsi3 (target, new_src,
962 GEN_INT (set_sign_bit_copies - 1)));
963 }
e2c671ba
RE
964 return 2;
965 }
966 /* For an inverted constant, we will need to set the low bits,
967 these will be shifted out of harm's way. */
968 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
969 if (const_ok_for_arm (~temp1))
970 {
2b835d68
RE
971 if (generate)
972 {
d499463f 973 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
974 emit_insn (gen_rtx_SET (VOIDmode, new_src,
975 GEN_INT (temp1)));
2b835d68
RE
976 emit_insn (gen_ashrsi3 (target, new_src,
977 GEN_INT (set_sign_bit_copies - 1)));
978 }
e2c671ba
RE
979 return 2;
980 }
981 }
982
983 /* See if we can generate this by setting the bottom (or the top)
984 16 bits, and then shifting these into the other half of the
985 word. We only look for the simplest cases, to do more would cost
986 too much. Be careful, however, not to generate this when the
987 alternative would take fewer insns. */
b39e1240 988 if (val & 0xffff0000UL)
e2c671ba 989 {
b39e1240 990 temp1 = remainder & 0xffff0000UL;
e2c671ba
RE
991 temp2 = remainder & 0x0000ffff;
992
6354dc9b 993 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
994 for (i = 9; i < 24; i++)
995 {
b39e1240 996 if ((((temp2 | (temp2 << i)) & 0xffffffffUL) == remainder)
e2c671ba
RE
997 && ! const_ok_for_arm (temp2))
998 {
d499463f
RE
999 rtx new_src = (subtargets
1000 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1001 : target);
1002 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1003 source, subtargets, generate);
e2c671ba 1004 source = new_src;
2b835d68 1005 if (generate)
43cffd11
RE
1006 emit_insn (gen_rtx_SET
1007 (VOIDmode, target,
1008 gen_rtx_IOR (mode,
1009 gen_rtx_ASHIFT (mode, source,
1010 GEN_INT (i)),
1011 source)));
e2c671ba
RE
1012 return insns + 1;
1013 }
1014 }
1015
6354dc9b 1016 /* Don't duplicate cases already considered. */
e2c671ba
RE
1017 for (i = 17; i < 24; i++)
1018 {
1019 if (((temp1 | (temp1 >> i)) == remainder)
1020 && ! const_ok_for_arm (temp1))
1021 {
d499463f
RE
1022 rtx new_src = (subtargets
1023 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1024 : target);
1025 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1026 source, subtargets, generate);
e2c671ba 1027 source = new_src;
2b835d68 1028 if (generate)
43cffd11
RE
1029 emit_insn
1030 (gen_rtx_SET (VOIDmode, target,
1031 gen_rtx_IOR
1032 (mode,
1033 gen_rtx_LSHIFTRT (mode, source,
1034 GEN_INT (i)),
1035 source)));
e2c671ba
RE
1036 return insns + 1;
1037 }
1038 }
1039 }
1040 break;
1041
1042 case IOR:
1043 case XOR:
7b64da89
RE
1044 /* If we have IOR or XOR, and the constant can be loaded in a
1045 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1046 then this can be done in two instructions instead of 3-4. */
1047 if (subtargets
d499463f 1048 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
1049 || (reload_completed && ! reg_mentioned_p (target, source)))
1050 {
1051 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1052 {
2b835d68
RE
1053 if (generate)
1054 {
1055 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1056
43cffd11
RE
1057 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1058 emit_insn (gen_rtx_SET (VOIDmode, target,
1059 gen_rtx (code, mode, source, sub)));
2b835d68 1060 }
e2c671ba
RE
1061 return 2;
1062 }
1063 }
1064
1065 if (code == XOR)
1066 break;
1067
1068 if (set_sign_bit_copies > 8
1069 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1070 {
2b835d68
RE
1071 if (generate)
1072 {
1073 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1074 rtx shift = GEN_INT (set_sign_bit_copies);
1075
43cffd11
RE
1076 emit_insn (gen_rtx_SET (VOIDmode, sub,
1077 gen_rtx_NOT (mode,
1078 gen_rtx_ASHIFT (mode,
1079 source,
f5a1b0d2 1080 shift))));
43cffd11
RE
1081 emit_insn (gen_rtx_SET (VOIDmode, target,
1082 gen_rtx_NOT (mode,
1083 gen_rtx_LSHIFTRT (mode, sub,
1084 shift))));
2b835d68 1085 }
e2c671ba
RE
1086 return 2;
1087 }
1088
1089 if (set_zero_bit_copies > 8
1090 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1091 {
2b835d68
RE
1092 if (generate)
1093 {
1094 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1095 rtx shift = GEN_INT (set_zero_bit_copies);
1096
43cffd11
RE
1097 emit_insn (gen_rtx_SET (VOIDmode, sub,
1098 gen_rtx_NOT (mode,
1099 gen_rtx_LSHIFTRT (mode,
1100 source,
f5a1b0d2 1101 shift))));
43cffd11
RE
1102 emit_insn (gen_rtx_SET (VOIDmode, target,
1103 gen_rtx_NOT (mode,
1104 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1105 shift))));
2b835d68 1106 }
e2c671ba
RE
1107 return 2;
1108 }
1109
1110 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1111 {
2b835d68
RE
1112 if (generate)
1113 {
1114 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1115 emit_insn (gen_rtx_SET (VOIDmode, sub,
1116 gen_rtx_NOT (mode, source)));
2b835d68
RE
1117 source = sub;
1118 if (subtargets)
1119 sub = gen_reg_rtx (mode);
43cffd11
RE
1120 emit_insn (gen_rtx_SET (VOIDmode, sub,
1121 gen_rtx_AND (mode, source,
1122 GEN_INT (temp1))));
1123 emit_insn (gen_rtx_SET (VOIDmode, target,
1124 gen_rtx_NOT (mode, sub)));
2b835d68 1125 }
e2c671ba
RE
1126 return 3;
1127 }
1128 break;
1129
1130 case AND:
1131 /* See if two shifts will do 2 or more insn's worth of work. */
1132 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1133 {
914a3b8c 1134 HOST_WIDE_INT shift_mask = ((0xffffffffUL
e2c671ba 1135 << (32 - clear_sign_bit_copies))
b39e1240 1136 & 0xffffffffUL);
e2c671ba 1137
b39e1240 1138 if ((remainder | shift_mask) != 0xffffffffUL)
e2c671ba 1139 {
2b835d68
RE
1140 if (generate)
1141 {
d499463f 1142 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1143 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1144 new_src, source, subtargets, 1);
1145 source = new_src;
2b835d68
RE
1146 }
1147 else
d499463f
RE
1148 {
1149 rtx targ = subtargets ? NULL_RTX : target;
1150 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1151 targ, source, subtargets, 0);
1152 }
2b835d68
RE
1153 }
1154
1155 if (generate)
1156 {
d499463f
RE
1157 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1158 rtx shift = GEN_INT (clear_sign_bit_copies);
1159
1160 emit_insn (gen_ashlsi3 (new_src, source, shift));
1161 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1162 }
1163
e2c671ba
RE
1164 return insns + 2;
1165 }
1166
1167 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1168 {
1169 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1170
b39e1240 1171 if ((remainder | shift_mask) != 0xffffffffUL)
e2c671ba 1172 {
2b835d68
RE
1173 if (generate)
1174 {
d499463f
RE
1175 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1176
2b835d68 1177 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1178 new_src, source, subtargets, 1);
1179 source = new_src;
2b835d68
RE
1180 }
1181 else
d499463f
RE
1182 {
1183 rtx targ = subtargets ? NULL_RTX : target;
1184
1185 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1186 targ, source, subtargets, 0);
1187 }
2b835d68
RE
1188 }
1189
1190 if (generate)
1191 {
d499463f
RE
1192 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1193 rtx shift = GEN_INT (clear_zero_bit_copies);
1194
1195 emit_insn (gen_lshrsi3 (new_src, source, shift));
1196 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1197 }
1198
e2c671ba
RE
1199 return insns + 2;
1200 }
1201
1202 break;
1203
1204 default:
1205 break;
1206 }
1207
1208 for (i = 0; i < 32; i++)
1209 if (remainder & (1 << i))
1210 num_bits_set++;
1211
1212 if (code == AND || (can_invert && num_bits_set > 16))
b39e1240 1213 remainder = (~remainder) & 0xffffffffUL;
e2c671ba 1214 else if (code == PLUS && num_bits_set > 16)
b39e1240 1215 remainder = (-remainder) & 0xffffffffUL;
e2c671ba
RE
1216 else
1217 {
1218 can_invert = 0;
1219 can_negate = 0;
1220 }
1221
1222 /* Now try and find a way of doing the job in either two or three
1223 instructions.
1224 We start by looking for the largest block of zeros that are aligned on
1225 a 2-bit boundary, we then fill up the temps, wrapping around to the
1226 top of the word when we drop off the bottom.
6354dc9b 1227 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1228 {
1229 int best_start = 0;
1230 int best_consecutive_zeros = 0;
1231
1232 for (i = 0; i < 32; i += 2)
1233 {
1234 int consecutive_zeros = 0;
1235
1236 if (! (remainder & (3 << i)))
1237 {
1238 while ((i < 32) && ! (remainder & (3 << i)))
1239 {
1240 consecutive_zeros += 2;
1241 i += 2;
1242 }
1243 if (consecutive_zeros > best_consecutive_zeros)
1244 {
1245 best_consecutive_zeros = consecutive_zeros;
1246 best_start = i - consecutive_zeros;
1247 }
1248 i -= 2;
1249 }
1250 }
1251
1252 /* Now start emitting the insns, starting with the one with the highest
1253 bit set: we do this so that the smallest number will be emitted last;
6354dc9b 1254 this is more likely to be combinable with addressing insns. */
e2c671ba
RE
1255 i = best_start;
1256 do
1257 {
1258 int end;
1259
1260 if (i <= 0)
1261 i += 32;
1262 if (remainder & (3 << (i - 2)))
1263 {
1264 end = i - 8;
1265 if (end < 0)
1266 end += 32;
1267 temp1 = remainder & ((0x0ff << end)
1268 | ((i < end) ? (0xff >> (32 - end)) : 0));
1269 remainder &= ~temp1;
1270
d499463f 1271 if (generate)
e2c671ba 1272 {
d499463f
RE
1273 rtx new_src;
1274
1275 if (code == SET)
43cffd11
RE
1276 emit_insn (gen_rtx_SET (VOIDmode,
1277 new_src = (subtargets
1278 ? gen_reg_rtx (mode)
1279 : target),
1280 GEN_INT (can_invert
1281 ? ~temp1 : temp1)));
d499463f 1282 else if (code == MINUS)
43cffd11
RE
1283 emit_insn (gen_rtx_SET (VOIDmode,
1284 new_src = (subtargets
1285 ? gen_reg_rtx (mode)
1286 : target),
1287 gen_rtx (code, mode, GEN_INT (temp1),
1288 source)));
d499463f 1289 else
43cffd11
RE
1290 emit_insn (gen_rtx_SET (VOIDmode,
1291 new_src = (remainder
1292 ? (subtargets
1293 ? gen_reg_rtx (mode)
1294 : target)
1295 : target),
1296 gen_rtx (code, mode, source,
1297 GEN_INT (can_invert ? ~temp1
1298 : (can_negate
1299 ? -temp1
1300 : temp1)))));
d499463f 1301 source = new_src;
e2c671ba
RE
1302 }
1303
d499463f
RE
1304 if (code == SET)
1305 {
1306 can_invert = 0;
1307 code = PLUS;
1308 }
1309 else if (code == MINUS)
1310 code = PLUS;
1311
e2c671ba 1312 insns++;
e2c671ba
RE
1313 i -= 6;
1314 }
1315 i -= 2;
1316 } while (remainder);
1317 }
1318 return insns;
1319}
1320
bd9c7e23
RE
1321/* Canonicalize a comparison so that we are more likely to recognize it.
1322 This can be done for a few constant compares, where we can make the
1323 immediate value easier to load. */
1324enum rtx_code
1325arm_canonicalize_comparison (code, op1)
1326 enum rtx_code code;
62b10bbc 1327 rtx * op1;
bd9c7e23 1328{
ad076f4e 1329 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1330
1331 switch (code)
1332 {
1333 case EQ:
1334 case NE:
1335 return code;
1336
1337 case GT:
1338 case LE:
ad076f4e
RE
1339 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1340 - 1)
bd9c7e23
RE
1341 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1342 {
1343 *op1 = GEN_INT (i+1);
1344 return code == GT ? GE : LT;
1345 }
1346 break;
1347
1348 case GE:
1349 case LT:
ad076f4e 1350 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1351 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1352 {
1353 *op1 = GEN_INT (i-1);
1354 return code == GE ? GT : LE;
1355 }
1356 break;
1357
1358 case GTU:
1359 case LEU:
ad076f4e 1360 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1361 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1362 {
1363 *op1 = GEN_INT (i + 1);
1364 return code == GTU ? GEU : LTU;
1365 }
1366 break;
1367
1368 case GEU:
1369 case LTU:
1370 if (i != 0
1371 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1372 {
1373 *op1 = GEN_INT (i - 1);
1374 return code == GEU ? GTU : LEU;
1375 }
1376 break;
1377
1378 default:
1379 abort ();
1380 }
1381
1382 return code;
1383}
bd9c7e23 1384
f5a1b0d2
NC
1385/* Decide whether a type should be returned in memory (true)
1386 or in a register (false). This is called by the macro
1387 RETURN_IN_MEMORY. */
2b835d68
RE
1388int
1389arm_return_in_memory (type)
1390 tree type;
1391{
f5a1b0d2 1392 if (! AGGREGATE_TYPE_P (type))
9e291dbe 1393 /* All simple types are returned in registers. */
d7d01975
NC
1394 return 0;
1395
1396 if (int_size_in_bytes (type) > 4)
9e291dbe 1397 /* All structures/unions bigger than one word are returned in memory. */
d7d01975
NC
1398 return 1;
1399
d7d01975 1400 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1401 {
1402 tree field;
1403
3a2ea258
RE
1404 /* For a struct the APCS says that we only return in a register
1405 if the type is 'integer like' and every addressable element
1406 has an offset of zero. For practical purposes this means
1407 that the structure can have at most one non bit-field element
1408 and that this element must be the first one in the structure. */
1409
f5a1b0d2
NC
1410 /* Find the first field, ignoring non FIELD_DECL things which will
1411 have been created by C++. */
1412 for (field = TYPE_FIELDS (type);
1413 field && TREE_CODE (field) != FIELD_DECL;
1414 field = TREE_CHAIN (field))
1415 continue;
1416
1417 if (field == NULL)
9e291dbe 1418 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1419
3a2ea258 1420 /* Check that the first field is valid for returning in a register... */
9e291dbe 1421 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1422 return 1;
1423
9e291dbe 1424 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258
RE
1425 return 1;
1426
1427 /* Now check the remaining fields, if any. Only bitfields are allowed,
1428 since they are not addressable. */
f5a1b0d2
NC
1429 for (field = TREE_CHAIN (field);
1430 field;
1431 field = TREE_CHAIN (field))
1432 {
1433 if (TREE_CODE (field) != FIELD_DECL)
1434 continue;
1435
1436 if (! DECL_BIT_FIELD_TYPE (field))
1437 return 1;
1438 }
2b835d68
RE
1439
1440 return 0;
1441 }
d7d01975
NC
1442
1443 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1444 {
1445 tree field;
1446
1447 /* Unions can be returned in registers if every element is
1448 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1449 for (field = TYPE_FIELDS (type);
1450 field;
1451 field = TREE_CHAIN (field))
2b835d68 1452 {
f5a1b0d2
NC
1453 if (TREE_CODE (field) != FIELD_DECL)
1454 continue;
1455
6cc8c0b3
NC
1456 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1457 return 1;
1458
f5a1b0d2 1459 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1460 return 1;
1461 }
f5a1b0d2 1462
2b835d68
RE
1463 return 0;
1464 }
f5a1b0d2 1465
6354dc9b 1466 /* Put other aggregates in memory. */
2b835d68
RE
1467 return 1;
1468}
1469
82e9d970
PB
1470/* Initialize a variable CUM of type CUMULATIVE_ARGS
1471 for a call to a function whose data type is FNTYPE.
1472 For a library call, FNTYPE is NULL. */
1473void
1474arm_init_cumulative_args (pcum, fntype, libname, indirect)
1475 CUMULATIVE_ARGS * pcum;
1476 tree fntype;
1477 rtx libname ATTRIBUTE_UNUSED;
1478 int indirect ATTRIBUTE_UNUSED;
1479{
1480 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1481 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1482
82e9d970
PB
1483 pcum->call_cookie = CALL_NORMAL;
1484
1485 if (TARGET_LONG_CALLS)
1486 pcum->call_cookie = CALL_LONG;
1487
1488 /* Check for long call/short call attributes. The attributes
1489 override any command line option. */
1490 if (fntype)
1491 {
1492 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1493 pcum->call_cookie = CALL_SHORT;
1494 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1495 pcum->call_cookie = CALL_LONG;
1496 }
1497}
1498
1499/* Determine where to put an argument to a function.
1500 Value is zero to push the argument on the stack,
1501 or a hard register in which to store the argument.
1502
1503 MODE is the argument's machine mode.
1504 TYPE is the data type of the argument (as a tree).
1505 This is null for libcalls where that information may
1506 not be available.
1507 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1508 the preceding args and about the function being called.
1509 NAMED is nonzero if this argument is a named parameter
1510 (otherwise it is an extra parameter matching an ellipsis). */
1511rtx
1512arm_function_arg (pcum, mode, type, named)
1513 CUMULATIVE_ARGS * pcum;
1514 enum machine_mode mode;
1515 tree type ATTRIBUTE_UNUSED;
1516 int named;
1517{
1518 if (mode == VOIDmode)
1519 /* Compute operand 2 of the call insn. */
1520 return GEN_INT (pcum->call_cookie);
1521
1522 if (! named || pcum->nregs >= NUM_ARG_REGS)
1523 return NULL_RTX;
1524
1525 return gen_rtx_REG (mode, pcum->nregs);
1526}
82e9d970 1527\f
c27ba912
DM
1528/* Encode the current state of the #pragma [no_]long_calls. */
1529typedef enum
82e9d970 1530{
c27ba912
DM
1531 OFF, /* No #pramgma [no_]long_calls is in effect. */
1532 LONG, /* #pragma long_calls is in effect. */
1533 SHORT /* #pragma no_long_calls is in effect. */
1534} arm_pragma_enum;
82e9d970 1535
c27ba912 1536static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1537
c27ba912
DM
1538/* Handle pragmas for compatibility with Intel's compilers.
1539 FIXME: This is incomplete, since it does not handle all
1540 the pragmas that the Intel compilers understand. */
82e9d970 1541int
c27ba912
DM
1542arm_process_pragma (p_getc, p_ungetc, pname)
1543 int (* p_getc) PARAMS ((void)) ATTRIBUTE_UNUSED;
1544 void (* p_ungetc) PARAMS ((int)) ATTRIBUTE_UNUSED;
1545 char * pname;
82e9d970 1546{
c27ba912
DM
1547 /* Should be pragma 'far' or equivalent for callx/balx here. */
1548 if (strcmp (pname, "long_calls") == 0)
1549 arm_pragma_long_calls = LONG;
1550 else if (strcmp (pname, "no_long_calls") == 0)
1551 arm_pragma_long_calls = SHORT;
1552 else if (strcmp (pname, "long_calls_off") == 0)
1553 arm_pragma_long_calls = OFF;
1554 else
82e9d970
PB
1555 return 0;
1556
c27ba912 1557 return 1;
82e9d970
PB
1558}
1559\f
1560/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1561 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1562 assigned to TYPE. */
1563int
1564arm_valid_type_attribute_p (type, attributes, identifier, args)
1565 tree type;
1566 tree attributes ATTRIBUTE_UNUSED;
1567 tree identifier;
1568 tree args;
1569{
1570 if ( TREE_CODE (type) != FUNCTION_TYPE
1571 && TREE_CODE (type) != METHOD_TYPE
1572 && TREE_CODE (type) != FIELD_DECL
1573 && TREE_CODE (type) != TYPE_DECL)
1574 return 0;
1575
1576 /* Function calls made to this symbol must be done indirectly, because
1577 it may lie outside of the 26 bit addressing range of a normal function
1578 call. */
1579 if (is_attribute_p ("long_call", identifier))
1580 return (args == NULL_TREE);
c27ba912 1581
82e9d970
PB
1582 /* Whereas these functions are always known to reside within the 26 bit
1583 addressing range. */
1584 if (is_attribute_p ("short_call", identifier))
1585 return (args == NULL_TREE);
1586
1587 return 0;
1588}
1589
1590/* Return 0 if the attributes for two types are incompatible, 1 if they
1591 are compatible, and 2 if they are nearly compatible (which causes a
1592 warning to be generated). */
1593int
1594arm_comp_type_attributes (type1, type2)
1595 tree type1;
1596 tree type2;
1597{
1cb8d58a 1598 int l1, l2, s1, s2;
bd7fc26f 1599
82e9d970
PB
1600 /* Check for mismatch of non-default calling convention. */
1601 if (TREE_CODE (type1) != FUNCTION_TYPE)
1602 return 1;
1603
1604 /* Check for mismatched call attributes. */
1cb8d58a
NC
1605 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1606 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1607 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1608 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1609
1610 /* Only bother to check if an attribute is defined. */
1611 if (l1 | l2 | s1 | s2)
1612 {
1613 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1614 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1615 return 0;
82e9d970 1616
bd7fc26f
NC
1617 /* Disallow mixed attributes. */
1618 if ((l1 & s2) || (l2 & s1))
1619 return 0;
1620 }
1621
1622 return 1;
82e9d970
PB
1623}
1624
d7d01975
NC
1625/* Check the ARM specific attributes on the given function decl.
1626 If any of them would prevent the function from being inlined,
1627 return a tesxtual description of why not. Otherwise return NULL. */
1628const char *
1629arm_function_attribute_inlineable_p (fndecl)
1630 tree fndecl;
1631{
1632 if (lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (fndecl)))
1633 return "naked functions cannot be inlined";
1634
1635 /* Allow functions with short_call and long_call attributes to be inlined. */
1636 return NULL;
1637}
1638
c27ba912
DM
1639/* Encode long_call or short_call attribute by prefixing
1640 symbol name in DECL with a special character FLAG. */
1641void
1642arm_encode_call_attribute (decl, flag)
1643 tree decl;
1644 char flag;
1645{
3cce094d 1646 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b
NC
1647 int len = strlen (str);
1648 char * newstr;
c27ba912
DM
1649
1650 if (TREE_CODE (decl) != FUNCTION_DECL)
1651 return;
1652
1653 /* Do not allow weak functions to be treated as short call. */
1654 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1655 return;
1656
1657 if (ggc_p)
1658 newstr = ggc_alloc_string (NULL, len + 2);
1659 else
1660 newstr = permalloc (len + 2);
1661
1662 sprintf (newstr, "%c%s", flag, str);
1663
1664 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1665}
1666
1667/* Assigns default attributes to newly defined type. This is used to
1668 set short_call/long_call attributes for function types of
1669 functions defined inside corresponding #pragma scopes. */
1670void
1671arm_set_default_type_attributes (type)
1672 tree type;
1673{
1674 /* Add __attribute__ ((long_call)) to all functions, when
1675 inside #pragma long_calls or __attribute__ ((short_call)),
1676 when inside #pragma no_long_calls. */
1677 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1678 {
1679 tree type_attr_list, attr_name;
1680 type_attr_list = TYPE_ATTRIBUTES (type);
1681
1682 if (arm_pragma_long_calls == LONG)
1683 attr_name = get_identifier ("long_call");
1684 else if (arm_pragma_long_calls == SHORT)
1685 attr_name = get_identifier ("short_call");
1686 else
1687 return;
1688
1689 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1690 TYPE_ATTRIBUTES (type) = type_attr_list;
1691 }
1692}
1693\f
1694/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1695 defined within the current compilation unit. If this caanot be
1696 determined, then 0 is returned. */
1697static int
1698current_file_function_operand (sym_ref)
1699 rtx sym_ref;
1700{
1701 /* This is a bit of a fib. A function will have a short call flag
1702 applied to its name if it has the short call attribute, or it has
1703 already been defined within the current compilation unit. */
1704 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1705 return 1;
1706
1707 /* The current funciton is always defined within the current compilation
1708 unit. if it s a weak defintion however, then this may not be the real
1709 defintion of the function, and so we have to say no. */
1710 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
1711 && ! DECL_WEAK (current_function_decl))
1712 return 1;
1713
1714 /* We cannot make the determination - default to returning 0. */
1715 return 0;
1716}
1717
1718/* Return non-zero if a 32 bit "long_call" should be generated for
1719 this call. We generate a long_call if the function:
1720
1721 a. has an __attribute__((long call))
1722 or b. is within the scope of a #pragma long_calls
1723 or c. the -mlong-calls command line switch has been specified
1724
1725 However we do not generate a long call if the function:
1726
1727 d. has an __attribute__ ((short_call))
1728 or e. is inside the scope of a #pragma no_long_calls
1729 or f. has an __attribute__ ((section))
1730 or g. is defined within the current compilation unit.
1731
1732 This function will be called by C fragments contained in the machine
1733 description file. CALL_REF and CALL_COOKIE correspond to the matched
1734 rtl operands. CALL_SYMBOL is used to distinguish between
1735 two different callers of the function. It is set to 1 in the
1736 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1737 and "call_value" patterns. This is because of the difference in the
1738 SYM_REFs passed by these patterns. */
1739int
1740arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1741 rtx sym_ref;
1742 int call_cookie;
1743 int call_symbol;
1744{
1745 if (! call_symbol)
1746 {
1747 if (GET_CODE (sym_ref) != MEM)
1748 return 0;
1749
1750 sym_ref = XEXP (sym_ref, 0);
1751 }
1752
1753 if (GET_CODE (sym_ref) != SYMBOL_REF)
1754 return 0;
1755
1756 if (call_cookie & CALL_SHORT)
1757 return 0;
1758
1759 if (TARGET_LONG_CALLS && flag_function_sections)
1760 return 1;
1761
1762 if (current_file_function_operand (sym_ref, VOIDmode))
1763 return 0;
1764
1765 return (call_cookie & CALL_LONG)
1766 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1767 || TARGET_LONG_CALLS;
1768}
82e9d970 1769\f
32de079a
RE
1770int
1771legitimate_pic_operand_p (x)
1772 rtx x;
1773{
1774 if (CONSTANT_P (x) && flag_pic
1775 && (GET_CODE (x) == SYMBOL_REF
1776 || (GET_CODE (x) == CONST
1777 && GET_CODE (XEXP (x, 0)) == PLUS
1778 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1779 return 0;
1780
1781 return 1;
1782}
1783
1784rtx
1785legitimize_pic_address (orig, mode, reg)
1786 rtx orig;
1787 enum machine_mode mode;
1788 rtx reg;
1789{
1790 if (GET_CODE (orig) == SYMBOL_REF)
1791 {
1792 rtx pic_ref, address;
1793 rtx insn;
1794 int subregs = 0;
1795
1796 if (reg == 0)
1797 {
1798 if (reload_in_progress || reload_completed)
1799 abort ();
1800 else
1801 reg = gen_reg_rtx (Pmode);
1802
1803 subregs = 1;
1804 }
1805
1806#ifdef AOF_ASSEMBLER
1807 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 1808 understands that the PIC register has to be added into the offset. */
32de079a
RE
1809 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1810#else
1811 if (subregs)
1812 address = gen_reg_rtx (Pmode);
1813 else
1814 address = reg;
1815
1816 emit_insn (gen_pic_load_addr (address, orig));
1817
43cffd11
RE
1818 pic_ref = gen_rtx_MEM (Pmode,
1819 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1820 address));
32de079a
RE
1821 RTX_UNCHANGING_P (pic_ref) = 1;
1822 insn = emit_move_insn (reg, pic_ref);
1823#endif
1824 current_function_uses_pic_offset_table = 1;
1825 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1826 by loop. */
43cffd11
RE
1827 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1828 REG_NOTES (insn));
32de079a
RE
1829 return reg;
1830 }
1831 else if (GET_CODE (orig) == CONST)
1832 {
1833 rtx base, offset;
1834
1835 if (GET_CODE (XEXP (orig, 0)) == PLUS
1836 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1837 return orig;
1838
1839 if (reg == 0)
1840 {
1841 if (reload_in_progress || reload_completed)
1842 abort ();
1843 else
1844 reg = gen_reg_rtx (Pmode);
1845 }
1846
1847 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1848 {
1849 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1850 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1851 base == reg ? 0 : reg);
1852 }
1853 else
1854 abort ();
1855
1856 if (GET_CODE (offset) == CONST_INT)
1857 {
1858 /* The base register doesn't really matter, we only want to
1859 test the index for the appropriate mode. */
1860 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1861
1862 if (! reload_in_progress && ! reload_completed)
1863 offset = force_reg (Pmode, offset);
1864 else
1865 abort ();
1866
1867 win:
1868 if (GET_CODE (offset) == CONST_INT)
1869 return plus_constant_for_output (base, INTVAL (offset));
1870 }
1871
1872 if (GET_MODE_SIZE (mode) > 4
1873 && (GET_MODE_CLASS (mode) == MODE_INT
1874 || TARGET_SOFT_FLOAT))
1875 {
1876 emit_insn (gen_addsi3 (reg, base, offset));
1877 return reg;
1878 }
1879
43cffd11 1880 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1881 }
1882 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
1883 {
1884 current_function_uses_pic_offset_table = 1;
1885
1886 if (NEED_GOT_RELOC)
1887 {
1888 rtx pic_ref, address = gen_reg_rtx (Pmode);
1889
1890 emit_insn (gen_pic_load_addr (address, orig));
1891 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1892
1893 emit_move_insn (address, pic_ref);
1894 return address;
1895 }
1896 }
32de079a
RE
1897
1898 return orig;
1899}
1900
1901static rtx pic_rtx;
1902
1903int
62b10bbc 1904is_pic (x)
32de079a
RE
1905 rtx x;
1906{
1907 if (x == pic_rtx)
1908 return 1;
1909 return 0;
1910}
1911
1912void
1913arm_finalize_pic ()
1914{
1915#ifndef AOF_ASSEMBLER
1916 rtx l1, pic_tmp, pic_tmp2, seq;
1917 rtx global_offset_table;
1918
ed0e6530 1919 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
1920 return;
1921
1922 if (! flag_pic)
1923 abort ();
1924
1925 start_sequence ();
1926 l1 = gen_label_rtx ();
1927
43cffd11 1928 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768
RE
1929 /* On the ARM the PC register contains 'dot + 8' at the time of the
1930 addition. */
1931 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), 8);
84306176
PB
1932 if (GOT_PCREL)
1933 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 1934 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
1935 else
1936 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
1937
1938 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 1939
32de079a 1940 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
dfa08768 1941 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
32de079a
RE
1942
1943 seq = gen_sequence ();
1944 end_sequence ();
1945 emit_insn_after (seq, get_insns ());
1946
1947 /* Need to emit this whether or not we obey regdecls,
1948 since setjmp/longjmp can cause life info to screw up. */
43cffd11 1949 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
1950#endif /* AOF_ASSEMBLER */
1951}
1952
e2c671ba
RE
1953#define REG_OR_SUBREG_REG(X) \
1954 (GET_CODE (X) == REG \
1955 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1956
1957#define REG_OR_SUBREG_RTX(X) \
1958 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1959
1960#define ARM_FRAME_RTX(X) \
1961 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1962 || (X) == arg_pointer_rtx)
1963
1964int
74bbc178 1965arm_rtx_costs (x, code)
e2c671ba 1966 rtx x;
74bbc178 1967 enum rtx_code code;
e2c671ba
RE
1968{
1969 enum machine_mode mode = GET_MODE (x);
1970 enum rtx_code subcode;
1971 int extra_cost;
1972
1973 switch (code)
1974 {
1975 case MEM:
1976 /* Memory costs quite a lot for the first word, but subsequent words
1977 load at the equivalent of a single insn each. */
1978 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1979 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1980
1981 case DIV:
1982 case MOD:
1983 return 100;
1984
1985 case ROTATE:
1986 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1987 return 4;
1988 /* Fall through */
1989 case ROTATERT:
1990 if (mode != SImode)
1991 return 8;
1992 /* Fall through */
1993 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1994 if (mode == DImode)
1995 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1996 + ((GET_CODE (XEXP (x, 0)) == REG
1997 || (GET_CODE (XEXP (x, 0)) == SUBREG
1998 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1999 ? 0 : 8));
2000 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2001 || (GET_CODE (XEXP (x, 0)) == SUBREG
2002 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2003 ? 0 : 4)
2004 + ((GET_CODE (XEXP (x, 1)) == REG
2005 || (GET_CODE (XEXP (x, 1)) == SUBREG
2006 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2007 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2008 ? 0 : 4));
2009
2010 case MINUS:
2011 if (mode == DImode)
2012 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2013 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2014 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2015 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2016 ? 0 : 8));
2017
2018 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2019 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2020 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2021 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2022 ? 0 : 8)
2023 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2024 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2025 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2026 ? 0 : 8));
2027
2028 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2029 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2030 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2031 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2032 || subcode == ASHIFTRT || subcode == LSHIFTRT
2033 || subcode == ROTATE || subcode == ROTATERT
2034 || (subcode == MULT
2035 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2036 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2037 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2038 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2039 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2040 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2041 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2042 return 1;
2043 /* Fall through */
2044
2045 case PLUS:
2046 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2047 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2048 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2049 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2050 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2051 ? 0 : 8));
2052
2053 /* Fall through */
2054 case AND: case XOR: case IOR:
2055 extra_cost = 0;
2056
2057 /* Normally the frame registers will be spilt into reg+const during
2058 reload, so it is a bad idea to combine them with other instructions,
2059 since then they might not be moved outside of loops. As a compromise
2060 we allow integration with ops that have a constant as their second
2061 operand. */
2062 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2063 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2064 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2065 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2066 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2067 extra_cost = 4;
2068
2069 if (mode == DImode)
2070 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2071 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2072 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2073 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2074 ? 0 : 8));
2075
2076 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2077 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2078 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2079 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2080 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2081 ? 0 : 4));
2082
2083 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2084 return (1 + extra_cost
2085 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2086 || subcode == LSHIFTRT || subcode == ASHIFTRT
2087 || subcode == ROTATE || subcode == ROTATERT
2088 || (subcode == MULT
2089 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2090 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2091 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2092 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2093 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2094 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2095 ? 0 : 4));
2096
2097 return 8;
2098
2099 case MULT:
b111229a 2100 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2101 fast variant if it exists at all. */
2b835d68
RE
2102 if (arm_fast_multiply && mode == DImode
2103 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2104 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2105 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2106 return 8;
2107
e2c671ba
RE
2108 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2109 || mode == DImode)
2110 return 30;
2111
2112 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2113 {
2b835d68 2114 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
b39e1240 2115 & (unsigned HOST_WIDE_INT) 0xffffffffUL);
e2c671ba
RE
2116 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2117 int j;
6354dc9b
NC
2118
2119 /* Tune as appropriate. */
aec3cfba 2120 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2121
2b835d68 2122 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2123 {
2b835d68 2124 i >>= booth_unit_size;
e2c671ba
RE
2125 add_cost += 2;
2126 }
2127
2128 return add_cost;
2129 }
2130
aec3cfba 2131 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2132 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2133 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2134
56636818
JL
2135 case TRUNCATE:
2136 if (arm_fast_multiply && mode == SImode
2137 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2138 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2139 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2140 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2141 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2142 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2143 return 8;
2144 return 99;
2145
e2c671ba
RE
2146 case NEG:
2147 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2148 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2149 /* Fall through */
2150 case NOT:
2151 if (mode == DImode)
2152 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2153
2154 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2155
2156 case IF_THEN_ELSE:
2157 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2158 return 14;
2159 return 2;
2160
2161 case COMPARE:
2162 return 1;
2163
2164 case ABS:
2165 return 4 + (mode == DImode ? 4 : 0);
2166
2167 case SIGN_EXTEND:
2168 if (GET_MODE (XEXP (x, 0)) == QImode)
2169 return (4 + (mode == DImode ? 4 : 0)
2170 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2171 /* Fall through */
2172 case ZERO_EXTEND:
2173 switch (GET_MODE (XEXP (x, 0)))
2174 {
2175 case QImode:
2176 return (1 + (mode == DImode ? 4 : 0)
2177 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2178
2179 case HImode:
2180 return (4 + (mode == DImode ? 4 : 0)
2181 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2182
2183 case SImode:
2184 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2185
2186 default:
2187 break;
e2c671ba
RE
2188 }
2189 abort ();
2190
2191 default:
2192 return 99;
2193 }
2194}
32de079a
RE
2195
2196int
2197arm_adjust_cost (insn, link, dep, cost)
2198 rtx insn;
2199 rtx link;
2200 rtx dep;
2201 int cost;
2202{
2203 rtx i_pat, d_pat;
2204
6354dc9b 2205 /* XXX This is not strictly true for the FPA. */
b36ba79f
RE
2206 if (REG_NOTE_KIND(link) == REG_DEP_ANTI
2207 || REG_NOTE_KIND(link) == REG_DEP_OUTPUT)
2208 return 0;
2209
32de079a
RE
2210 if ((i_pat = single_set (insn)) != NULL
2211 && GET_CODE (SET_SRC (i_pat)) == MEM
2212 && (d_pat = single_set (dep)) != NULL
2213 && GET_CODE (SET_DEST (d_pat)) == MEM)
2214 {
2215 /* This is a load after a store, there is no conflict if the load reads
2216 from a cached area. Assume that loads from the stack, and from the
2217 constant pool are cached, and that others will miss. This is a
6354dc9b 2218 hack. */
32de079a 2219
32de079a
RE
2220 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2221 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2222 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2223 || reg_mentioned_p (hard_frame_pointer_rtx,
2224 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2225 return 1;
32de079a
RE
2226 }
2227
2228 return cost;
2229}
2230
6354dc9b 2231/* This code has been fixed for cross compilation. */
ff9940b0
RE
2232
2233static int fpa_consts_inited = 0;
2234
62b10bbc
NC
2235char * strings_fpa[8] =
2236{
2b835d68
RE
2237 "0", "1", "2", "3",
2238 "4", "5", "0.5", "10"
2239};
ff9940b0
RE
2240
2241static REAL_VALUE_TYPE values_fpa[8];
2242
2243static void
2244init_fpa_table ()
2245{
2246 int i;
2247 REAL_VALUE_TYPE r;
2248
2249 for (i = 0; i < 8; i++)
2250 {
2251 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2252 values_fpa[i] = r;
2253 }
f3bb6135 2254
ff9940b0
RE
2255 fpa_consts_inited = 1;
2256}
2257
6354dc9b 2258/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2259
2260int
2261const_double_rtx_ok_for_fpu (x)
2262 rtx x;
2263{
ff9940b0
RE
2264 REAL_VALUE_TYPE r;
2265 int i;
2266
2267 if (!fpa_consts_inited)
2268 init_fpa_table ();
2269
2270 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2271 if (REAL_VALUE_MINUS_ZERO (r))
2272 return 0;
f3bb6135 2273
ff9940b0
RE
2274 for (i = 0; i < 8; i++)
2275 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2276 return 1;
f3bb6135 2277
ff9940b0 2278 return 0;
f3bb6135 2279}
ff9940b0 2280
6354dc9b 2281/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2282
2283int
2284neg_const_double_rtx_ok_for_fpu (x)
2285 rtx x;
2286{
2287 REAL_VALUE_TYPE r;
2288 int i;
2289
2290 if (!fpa_consts_inited)
2291 init_fpa_table ();
2292
2293 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2294 r = REAL_VALUE_NEGATE (r);
2295 if (REAL_VALUE_MINUS_ZERO (r))
2296 return 0;
f3bb6135 2297
ff9940b0
RE
2298 for (i = 0; i < 8; i++)
2299 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2300 return 1;
f3bb6135 2301
ff9940b0 2302 return 0;
f3bb6135 2303}
cce8749e
CH
2304\f
2305/* Predicates for `match_operand' and `match_operator'. */
2306
ff9940b0 2307/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2308 (SUBREG (MEM)...).
2309
2310 This function exists because at the time it was put in it led to better
2311 code. SUBREG(MEM) always needs a reload in the places where
2312 s_register_operand is used, and this seemed to lead to excessive
2313 reloading. */
ff9940b0
RE
2314
2315int
2316s_register_operand (op, mode)
2317 register rtx op;
2318 enum machine_mode mode;
2319{
2320 if (GET_MODE (op) != mode && mode != VOIDmode)
2321 return 0;
2322
2323 if (GET_CODE (op) == SUBREG)
f3bb6135 2324 op = SUBREG_REG (op);
ff9940b0
RE
2325
2326 /* We don't consider registers whose class is NO_REGS
2327 to be a register operand. */
2328 return (GET_CODE (op) == REG
2329 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2330 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2331}
2332
e2c671ba
RE
2333/* Only accept reg, subreg(reg), const_int. */
2334
2335int
2336reg_or_int_operand (op, mode)
2337 register rtx op;
2338 enum machine_mode mode;
2339{
2340 if (GET_CODE (op) == CONST_INT)
2341 return 1;
2342
2343 if (GET_MODE (op) != mode && mode != VOIDmode)
2344 return 0;
2345
2346 if (GET_CODE (op) == SUBREG)
2347 op = SUBREG_REG (op);
2348
2349 /* We don't consider registers whose class is NO_REGS
2350 to be a register operand. */
2351 return (GET_CODE (op) == REG
2352 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2353 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2354}
2355
ff9940b0
RE
2356/* Return 1 if OP is an item in memory, given that we are in reload. */
2357
2358int
2359reload_memory_operand (op, mode)
2360 rtx op;
74bbc178 2361 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2362{
2363 int regno = true_regnum (op);
2364
2365 return (! CONSTANT_P (op)
2366 && (regno == -1
2367 || (GET_CODE (op) == REG
2368 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2369}
2370
4d818c85
RE
2371/* Return 1 if OP is a valid memory address, but not valid for a signed byte
2372 memory access (architecture V4) */
2373int
2374bad_signed_byte_operand (op, mode)
2375 rtx op;
2376 enum machine_mode mode;
2377{
2378 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
2379 return 0;
2380
2381 op = XEXP (op, 0);
2382
6354dc9b 2383 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2384 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
9c8cc54f
RE
2385 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2386 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2387 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2388 return 1;
2389
6354dc9b 2390 /* Big constants are also bad. */
4d818c85
RE
2391 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2392 && (INTVAL (XEXP (op, 1)) > 0xff
2393 || -INTVAL (XEXP (op, 1)) > 0xff))
2394 return 1;
2395
6354dc9b 2396 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2397 return 0;
2398}
2399
cce8749e
CH
2400/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2401
2402int
2403arm_rhs_operand (op, mode)
2404 rtx op;
2405 enum machine_mode mode;
2406{
ff9940b0 2407 return (s_register_operand (op, mode)
cce8749e 2408 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2409}
cce8749e 2410
ff9940b0
RE
2411/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2412 */
2413
2414int
2415arm_rhsm_operand (op, mode)
2416 rtx op;
2417 enum machine_mode mode;
2418{
2419 return (s_register_operand (op, mode)
2420 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2421 || memory_operand (op, mode));
f3bb6135 2422}
ff9940b0
RE
2423
2424/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2425 constant that is valid when negated. */
2426
2427int
2428arm_add_operand (op, mode)
2429 rtx op;
2430 enum machine_mode mode;
2431{
2432 return (s_register_operand (op, mode)
2433 || (GET_CODE (op) == CONST_INT
2434 && (const_ok_for_arm (INTVAL (op))
2435 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2436}
ff9940b0
RE
2437
2438int
2439arm_not_operand (op, mode)
2440 rtx op;
2441 enum machine_mode mode;
2442{
2443 return (s_register_operand (op, mode)
2444 || (GET_CODE (op) == CONST_INT
2445 && (const_ok_for_arm (INTVAL (op))
2446 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2447}
ff9940b0 2448
5165176d
RE
2449/* Return TRUE if the operand is a memory reference which contains an
2450 offsettable address. */
2451int
2452offsettable_memory_operand (op, mode)
2453 register rtx op;
2454 enum machine_mode mode;
2455{
2456 if (mode == VOIDmode)
2457 mode = GET_MODE (op);
2458
2459 return (mode == GET_MODE (op)
2460 && GET_CODE (op) == MEM
2461 && offsettable_address_p (reload_completed | reload_in_progress,
2462 mode, XEXP (op, 0)));
2463}
2464
2465/* Return TRUE if the operand is a memory reference which is, or can be
2466 made word aligned by adjusting the offset. */
2467int
2468alignable_memory_operand (op, mode)
2469 register rtx op;
2470 enum machine_mode mode;
2471{
2472 rtx reg;
2473
2474 if (mode == VOIDmode)
2475 mode = GET_MODE (op);
2476
2477 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2478 return 0;
2479
2480 op = XEXP (op, 0);
2481
2482 return ((GET_CODE (reg = op) == REG
2483 || (GET_CODE (op) == SUBREG
2484 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2485 || (GET_CODE (op) == PLUS
2486 && GET_CODE (XEXP (op, 1)) == CONST_INT
2487 && (GET_CODE (reg = XEXP (op, 0)) == REG
2488 || (GET_CODE (XEXP (op, 0)) == SUBREG
2489 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 2490 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
2491}
2492
b111229a
RE
2493/* Similar to s_register_operand, but does not allow hard integer
2494 registers. */
2495int
2496f_register_operand (op, mode)
2497 register rtx op;
2498 enum machine_mode mode;
2499{
2500 if (GET_MODE (op) != mode && mode != VOIDmode)
2501 return 0;
2502
2503 if (GET_CODE (op) == SUBREG)
2504 op = SUBREG_REG (op);
2505
2506 /* We don't consider registers whose class is NO_REGS
2507 to be a register operand. */
2508 return (GET_CODE (op) == REG
2509 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2510 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2511}
2512
cce8749e
CH
2513/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2514
2515int
2516fpu_rhs_operand (op, mode)
2517 rtx op;
2518 enum machine_mode mode;
2519{
ff9940b0 2520 if (s_register_operand (op, mode))
f3bb6135 2521 return TRUE;
9ce71c6f
BS
2522
2523 if (GET_MODE (op) != mode && mode != VOIDmode)
2524 return FALSE;
2525
2526 if (GET_CODE (op) == CONST_DOUBLE)
2527 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
2528
2529 return FALSE;
2530}
cce8749e 2531
ff9940b0
RE
2532int
2533fpu_add_operand (op, mode)
2534 rtx op;
2535 enum machine_mode mode;
2536{
2537 if (s_register_operand (op, mode))
f3bb6135 2538 return TRUE;
9ce71c6f
BS
2539
2540 if (GET_MODE (op) != mode && mode != VOIDmode)
2541 return FALSE;
2542
2543 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2544 return (const_double_rtx_ok_for_fpu (op)
2545 || neg_const_double_rtx_ok_for_fpu (op));
2546
2547 return FALSE;
ff9940b0
RE
2548}
2549
cce8749e
CH
2550/* Return nonzero if OP is a constant power of two. */
2551
2552int
2553power_of_two_operand (op, mode)
2554 rtx op;
74bbc178 2555 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2556{
2557 if (GET_CODE (op) == CONST_INT)
2558 {
f3bb6135
RE
2559 HOST_WIDE_INT value = INTVAL(op);
2560 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2561 }
f3bb6135
RE
2562 return FALSE;
2563}
cce8749e
CH
2564
2565/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2566 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2567 Note that this disallows MEM(REG+REG), but allows
2568 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2569
2570int
2571di_operand (op, mode)
2572 rtx op;
2573 enum machine_mode mode;
2574{
ff9940b0 2575 if (s_register_operand (op, mode))
f3bb6135 2576 return TRUE;
cce8749e 2577
9ce71c6f
BS
2578 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2579 return FALSE;
2580
e9c6b69b
NC
2581 if (GET_CODE (op) == SUBREG)
2582 op = SUBREG_REG (op);
2583
cce8749e
CH
2584 switch (GET_CODE (op))
2585 {
2586 case CONST_DOUBLE:
2587 case CONST_INT:
f3bb6135
RE
2588 return TRUE;
2589
cce8749e 2590 case MEM:
f3bb6135
RE
2591 return memory_address_p (DImode, XEXP (op, 0));
2592
cce8749e 2593 default:
f3bb6135 2594 return FALSE;
cce8749e 2595 }
f3bb6135 2596}
cce8749e 2597
f3139301 2598/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2599 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2600 Note that this disallows MEM(REG+REG), but allows
2601 MEM(PRE/POST_INC/DEC(REG)). */
2602
2603int
2604soft_df_operand (op, mode)
2605 rtx op;
2606 enum machine_mode mode;
2607{
2608 if (s_register_operand (op, mode))
2609 return TRUE;
2610
9ce71c6f
BS
2611 if (mode != VOIDmode && GET_MODE (op) != mode)
2612 return FALSE;
2613
37b80d2e
BS
2614 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2615 return FALSE;
2616
e9c6b69b
NC
2617 if (GET_CODE (op) == SUBREG)
2618 op = SUBREG_REG (op);
9ce71c6f 2619
f3139301
DE
2620 switch (GET_CODE (op))
2621 {
2622 case CONST_DOUBLE:
2623 return TRUE;
2624
2625 case MEM:
2626 return memory_address_p (DFmode, XEXP (op, 0));
2627
2628 default:
2629 return FALSE;
2630 }
2631}
2632
6354dc9b 2633/* Return TRUE for valid index operands. */
cce8749e
CH
2634
2635int
2636index_operand (op, mode)
2637 rtx op;
2638 enum machine_mode mode;
2639{
ff9940b0
RE
2640 return (s_register_operand(op, mode)
2641 || (immediate_operand (op, mode)
2642 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2643}
cce8749e 2644
ff9940b0
RE
2645/* Return TRUE for valid shifts by a constant. This also accepts any
2646 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 2647 shift operator in this case was a mult. */
ff9940b0
RE
2648
2649int
2650const_shift_operand (op, mode)
2651 rtx op;
2652 enum machine_mode mode;
2653{
2654 return (power_of_two_operand (op, mode)
2655 || (immediate_operand (op, mode)
2656 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2657}
ff9940b0 2658
cce8749e
CH
2659/* Return TRUE for arithmetic operators which can be combined with a multiply
2660 (shift). */
2661
2662int
2663shiftable_operator (x, mode)
2664 rtx x;
2665 enum machine_mode mode;
2666{
2667 if (GET_MODE (x) != mode)
2668 return FALSE;
2669 else
2670 {
2671 enum rtx_code code = GET_CODE (x);
2672
2673 return (code == PLUS || code == MINUS
2674 || code == IOR || code == XOR || code == AND);
2675 }
f3bb6135 2676}
cce8749e 2677
6ab589e0
JL
2678/* Return TRUE for binary logical operators. */
2679
2680int
2681logical_binary_operator (x, mode)
2682 rtx x;
2683 enum machine_mode mode;
2684{
2685 if (GET_MODE (x) != mode)
2686 return FALSE;
2687 else
2688 {
2689 enum rtx_code code = GET_CODE (x);
2690
2691 return (code == IOR || code == XOR || code == AND);
2692 }
2693}
2694
6354dc9b 2695/* Return TRUE for shift operators. */
cce8749e
CH
2696
2697int
2698shift_operator (x, mode)
2699 rtx x;
2700 enum machine_mode mode;
2701{
2702 if (GET_MODE (x) != mode)
2703 return FALSE;
2704 else
2705 {
2706 enum rtx_code code = GET_CODE (x);
2707
ff9940b0 2708 if (code == MULT)
aec3cfba 2709 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 2710
e2c671ba
RE
2711 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2712 || code == ROTATERT);
cce8749e 2713 }
f3bb6135 2714}
ff9940b0 2715
6354dc9b
NC
2716/* Return TRUE if x is EQ or NE. */
2717int
2718equality_operator (x, mode)
f3bb6135 2719 rtx x;
74bbc178 2720 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2721{
f3bb6135 2722 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2723}
2724
6354dc9b 2725/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
2726int
2727minmax_operator (x, mode)
2728 rtx x;
2729 enum machine_mode mode;
2730{
2731 enum rtx_code code = GET_CODE (x);
2732
2733 if (GET_MODE (x) != mode)
2734 return FALSE;
f3bb6135 2735
ff9940b0 2736 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2737}
ff9940b0 2738
ff9940b0 2739/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 2740 a mode, accept any class CCmode register. */
ff9940b0
RE
2741int
2742cc_register (x, mode)
f3bb6135
RE
2743 rtx x;
2744 enum machine_mode mode;
ff9940b0
RE
2745{
2746 if (mode == VOIDmode)
2747 {
2748 mode = GET_MODE (x);
2749 if (GET_MODE_CLASS (mode) != MODE_CC)
2750 return FALSE;
2751 }
f3bb6135 2752
ff9940b0
RE
2753 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2754 return TRUE;
f3bb6135 2755
ff9940b0
RE
2756 return FALSE;
2757}
5bbe2d40
RE
2758
2759/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2760 a mode, accept any class CCmode register which indicates a dominance
2761 expression. */
5bbe2d40 2762int
84ed5e79 2763dominant_cc_register (x, mode)
5bbe2d40
RE
2764 rtx x;
2765 enum machine_mode mode;
2766{
2767 if (mode == VOIDmode)
2768 {
2769 mode = GET_MODE (x);
84ed5e79 2770 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2771 return FALSE;
2772 }
2773
84ed5e79
RE
2774 if (mode != CC_DNEmode && mode != CC_DEQmode
2775 && mode != CC_DLEmode && mode != CC_DLTmode
2776 && mode != CC_DGEmode && mode != CC_DGTmode
2777 && mode != CC_DLEUmode && mode != CC_DLTUmode
2778 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2779 return FALSE;
2780
5bbe2d40
RE
2781 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2782 return TRUE;
2783
2784 return FALSE;
2785}
2786
2b835d68
RE
2787/* Return TRUE if X references a SYMBOL_REF. */
2788int
2789symbol_mentioned_p (x)
2790 rtx x;
2791{
6f7d635c 2792 register const char * fmt;
2b835d68
RE
2793 register int i;
2794
2795 if (GET_CODE (x) == SYMBOL_REF)
2796 return 1;
2797
2798 fmt = GET_RTX_FORMAT (GET_CODE (x));
2799 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2800 {
2801 if (fmt[i] == 'E')
2802 {
2803 register int j;
2804
2805 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2806 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2807 return 1;
2808 }
2809 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2810 return 1;
2811 }
2812
2813 return 0;
2814}
2815
2816/* Return TRUE if X references a LABEL_REF. */
2817int
2818label_mentioned_p (x)
2819 rtx x;
2820{
6f7d635c 2821 register const char * fmt;
2b835d68
RE
2822 register int i;
2823
2824 if (GET_CODE (x) == LABEL_REF)
2825 return 1;
2826
2827 fmt = GET_RTX_FORMAT (GET_CODE (x));
2828 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2829 {
2830 if (fmt[i] == 'E')
2831 {
2832 register int j;
2833
2834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2835 if (label_mentioned_p (XVECEXP (x, i, j)))
2836 return 1;
2837 }
2838 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2839 return 1;
2840 }
2841
2842 return 0;
2843}
2844
ff9940b0
RE
2845enum rtx_code
2846minmax_code (x)
f3bb6135 2847 rtx x;
ff9940b0
RE
2848{
2849 enum rtx_code code = GET_CODE (x);
2850
2851 if (code == SMAX)
2852 return GE;
f3bb6135 2853 else if (code == SMIN)
ff9940b0 2854 return LE;
f3bb6135 2855 else if (code == UMIN)
ff9940b0 2856 return LEU;
f3bb6135 2857 else if (code == UMAX)
ff9940b0 2858 return GEU;
f3bb6135 2859
ff9940b0
RE
2860 abort ();
2861}
2862
6354dc9b 2863/* Return 1 if memory locations are adjacent. */
f3bb6135 2864int
ff9940b0
RE
2865adjacent_mem_locations (a, b)
2866 rtx a, b;
2867{
2868 int val0 = 0, val1 = 0;
2869 int reg0, reg1;
2870
2871 if ((GET_CODE (XEXP (a, 0)) == REG
2872 || (GET_CODE (XEXP (a, 0)) == PLUS
2873 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2874 && (GET_CODE (XEXP (b, 0)) == REG
2875 || (GET_CODE (XEXP (b, 0)) == PLUS
2876 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2877 {
2878 if (GET_CODE (XEXP (a, 0)) == PLUS)
2879 {
2880 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2881 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2882 }
2883 else
2884 reg0 = REGNO (XEXP (a, 0));
2885 if (GET_CODE (XEXP (b, 0)) == PLUS)
2886 {
2887 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2888 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2889 }
2890 else
2891 reg1 = REGNO (XEXP (b, 0));
2892 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2893 }
2894 return 0;
2895}
2896
2897/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 2898 parallel and the first section will be tested. */
ff9940b0 2899
f3bb6135 2900int
ff9940b0
RE
2901load_multiple_operation (op, mode)
2902 rtx op;
74bbc178 2903 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2904{
f3bb6135 2905 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2906 int dest_regno;
2907 rtx src_addr;
f3bb6135 2908 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2909 rtx elt;
2910
2911 if (count <= 1
2912 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2913 return 0;
2914
6354dc9b 2915 /* Check to see if this might be a write-back. */
ff9940b0
RE
2916 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2917 {
2918 i++;
2919 base = 1;
2920
6354dc9b 2921 /* Now check it more carefully. */
ff9940b0
RE
2922 if (GET_CODE (SET_DEST (elt)) != REG
2923 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2924 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2925 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2926 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2927 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2928 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2929 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2930 != REGNO (SET_DEST (elt)))
2931 return 0;
f3bb6135 2932
ff9940b0
RE
2933 count--;
2934 }
2935
2936 /* Perform a quick check so we don't blow up below. */
2937 if (count <= i
2938 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2939 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2940 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2941 return 0;
2942
2943 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2944 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2945
2946 for (; i < count; i++)
2947 {
ed4c4348 2948 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2949
2950 if (GET_CODE (elt) != SET
2951 || GET_CODE (SET_DEST (elt)) != REG
2952 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 2953 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
2954 || GET_CODE (SET_SRC (elt)) != MEM
2955 || GET_MODE (SET_SRC (elt)) != SImode
2956 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2957 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2958 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2959 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2960 return 0;
2961 }
2962
2963 return 1;
2964}
2965
2966/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 2967 parallel and the first section will be tested. */
f3bb6135 2968int
ff9940b0
RE
2969store_multiple_operation (op, mode)
2970 rtx op;
74bbc178 2971 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2972{
f3bb6135 2973 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2974 int src_regno;
2975 rtx dest_addr;
f3bb6135 2976 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2977 rtx elt;
2978
2979 if (count <= 1
2980 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2981 return 0;
2982
6354dc9b 2983 /* Check to see if this might be a write-back. */
ff9940b0
RE
2984 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2985 {
2986 i++;
2987 base = 1;
2988
6354dc9b 2989 /* Now check it more carefully. */
ff9940b0
RE
2990 if (GET_CODE (SET_DEST (elt)) != REG
2991 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2992 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2993 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2994 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2995 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2996 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2997 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2998 != REGNO (SET_DEST (elt)))
2999 return 0;
f3bb6135 3000
ff9940b0
RE
3001 count--;
3002 }
3003
3004 /* Perform a quick check so we don't blow up below. */
3005 if (count <= i
3006 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3007 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3008 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3009 return 0;
3010
3011 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3012 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3013
3014 for (; i < count; i++)
3015 {
3016 elt = XVECEXP (op, 0, i);
3017
3018 if (GET_CODE (elt) != SET
3019 || GET_CODE (SET_SRC (elt)) != REG
3020 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3021 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3022 || GET_CODE (SET_DEST (elt)) != MEM
3023 || GET_MODE (SET_DEST (elt)) != SImode
3024 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3025 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3026 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3027 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3028 return 0;
3029 }
3030
3031 return 1;
3032}
e2c671ba 3033
84ed5e79
RE
3034int
3035load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3036 rtx * operands;
84ed5e79 3037 int nops;
62b10bbc
NC
3038 int * regs;
3039 int * base;
3040 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3041{
3042 int unsorted_regs[4];
3043 HOST_WIDE_INT unsorted_offsets[4];
3044 int order[4];
ad076f4e 3045 int base_reg = -1;
84ed5e79
RE
3046 int i;
3047
3048 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3049 extended if required. */
3050 if (nops < 2 || nops > 4)
3051 abort ();
3052
3053 /* Loop over the operands and check that the memory references are
3054 suitable (ie immediate offsets from the same base register). At
3055 the same time, extract the target register, and the memory
3056 offsets. */
3057 for (i = 0; i < nops; i++)
3058 {
3059 rtx reg;
3060 rtx offset;
3061
56636818
JL
3062 /* Convert a subreg of a mem into the mem itself. */
3063 if (GET_CODE (operands[nops + i]) == SUBREG)
3064 operands[nops + i] = alter_subreg(operands[nops + i]);
3065
84ed5e79
RE
3066 if (GET_CODE (operands[nops + i]) != MEM)
3067 abort ();
3068
3069 /* Don't reorder volatile memory references; it doesn't seem worth
3070 looking for the case where the order is ok anyway. */
3071 if (MEM_VOLATILE_P (operands[nops + i]))
3072 return 0;
3073
3074 offset = const0_rtx;
3075
3076 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3077 || (GET_CODE (reg) == SUBREG
3078 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3079 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3080 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3081 == REG)
3082 || (GET_CODE (reg) == SUBREG
3083 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3084 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3085 == CONST_INT)))
3086 {
3087 if (i == 0)
3088 {
3089 base_reg = REGNO(reg);
3090 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3091 ? REGNO (operands[i])
3092 : REGNO (SUBREG_REG (operands[i])));
3093 order[0] = 0;
3094 }
3095 else
3096 {
6354dc9b 3097 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3098 /* Not addressed from the same base register. */
3099 return 0;
3100
3101 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3102 ? REGNO (operands[i])
3103 : REGNO (SUBREG_REG (operands[i])));
3104 if (unsorted_regs[i] < unsorted_regs[order[0]])
3105 order[0] = i;
3106 }
3107
3108 /* If it isn't an integer register, or if it overwrites the
3109 base register but isn't the last insn in the list, then
3110 we can't do this. */
3111 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3112 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3113 return 0;
3114
3115 unsorted_offsets[i] = INTVAL (offset);
3116 }
3117 else
3118 /* Not a suitable memory address. */
3119 return 0;
3120 }
3121
3122 /* All the useful information has now been extracted from the
3123 operands into unsorted_regs and unsorted_offsets; additionally,
3124 order[0] has been set to the lowest numbered register in the
3125 list. Sort the registers into order, and check that the memory
3126 offsets are ascending and adjacent. */
3127
3128 for (i = 1; i < nops; i++)
3129 {
3130 int j;
3131
3132 order[i] = order[i - 1];
3133 for (j = 0; j < nops; j++)
3134 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3135 && (order[i] == order[i - 1]
3136 || unsorted_regs[j] < unsorted_regs[order[i]]))
3137 order[i] = j;
3138
3139 /* Have we found a suitable register? if not, one must be used more
3140 than once. */
3141 if (order[i] == order[i - 1])
3142 return 0;
3143
3144 /* Is the memory address adjacent and ascending? */
3145 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3146 return 0;
3147 }
3148
3149 if (base)
3150 {
3151 *base = base_reg;
3152
3153 for (i = 0; i < nops; i++)
3154 regs[i] = unsorted_regs[order[i]];
3155
3156 *load_offset = unsorted_offsets[order[0]];
3157 }
3158
3159 if (unsorted_offsets[order[0]] == 0)
3160 return 1; /* ldmia */
3161
3162 if (unsorted_offsets[order[0]] == 4)
3163 return 2; /* ldmib */
3164
3165 if (unsorted_offsets[order[nops - 1]] == 0)
3166 return 3; /* ldmda */
3167
3168 if (unsorted_offsets[order[nops - 1]] == -4)
3169 return 4; /* ldmdb */
3170
949d79eb
RE
3171 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3172 if the offset isn't small enough. The reason 2 ldrs are faster
3173 is because these ARMs are able to do more than one cache access
3174 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3175 whilst the ARM8 has a double bandwidth cache. This means that
3176 these cores can do both an instruction fetch and a data fetch in
3177 a single cycle, so the trick of calculating the address into a
3178 scratch register (one of the result regs) and then doing a load
3179 multiple actually becomes slower (and no smaller in code size).
3180 That is the transformation
6cc8c0b3
NC
3181
3182 ldr rd1, [rbase + offset]
3183 ldr rd2, [rbase + offset + 4]
3184
3185 to
3186
3187 add rd1, rbase, offset
3188 ldmia rd1, {rd1, rd2}
3189
949d79eb
RE
3190 produces worse code -- '3 cycles + any stalls on rd2' instead of
3191 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3192 access per cycle, the first sequence could never complete in less
3193 than 6 cycles, whereas the ldm sequence would only take 5 and
3194 would make better use of sequential accesses if not hitting the
3195 cache.
3196
3197 We cheat here and test 'arm_ld_sched' which we currently know to
3198 only be true for the ARM8, ARM9 and StrongARM. If this ever
3199 changes, then the test below needs to be reworked. */
f5a1b0d2 3200 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3201 return 0;
3202
84ed5e79
RE
3203 /* Can't do it without setting up the offset, only do this if it takes
3204 no more than one insn. */
3205 return (const_ok_for_arm (unsorted_offsets[order[0]])
3206 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3207}
3208
3209char *
3210emit_ldm_seq (operands, nops)
62b10bbc 3211 rtx * operands;
84ed5e79
RE
3212 int nops;
3213{
3214 int regs[4];
3215 int base_reg;
3216 HOST_WIDE_INT offset;
3217 char buf[100];
3218 int i;
3219
3220 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3221 {
3222 case 1:
3223 strcpy (buf, "ldm%?ia\t");
3224 break;
3225
3226 case 2:
3227 strcpy (buf, "ldm%?ib\t");
3228 break;
3229
3230 case 3:
3231 strcpy (buf, "ldm%?da\t");
3232 break;
3233
3234 case 4:
3235 strcpy (buf, "ldm%?db\t");
3236 break;
3237
3238 case 5:
3239 if (offset >= 0)
3240 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3241 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3242 (long) offset);
3243 else
3244 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3245 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3246 (long) -offset);
3247 output_asm_insn (buf, operands);
3248 base_reg = regs[0];
3249 strcpy (buf, "ldm%?ia\t");
3250 break;
3251
3252 default:
3253 abort ();
3254 }
3255
3256 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3257 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3258
3259 for (i = 1; i < nops; i++)
3260 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3261 reg_names[regs[i]]);
3262
3263 strcat (buf, "}\t%@ phole ldm");
3264
3265 output_asm_insn (buf, operands);
3266 return "";
3267}
3268
3269int
3270store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3271 rtx * operands;
84ed5e79 3272 int nops;
62b10bbc
NC
3273 int * regs;
3274 int * base;
3275 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3276{
3277 int unsorted_regs[4];
3278 HOST_WIDE_INT unsorted_offsets[4];
3279 int order[4];
ad076f4e 3280 int base_reg = -1;
84ed5e79
RE
3281 int i;
3282
3283 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3284 extended if required. */
3285 if (nops < 2 || nops > 4)
3286 abort ();
3287
3288 /* Loop over the operands and check that the memory references are
3289 suitable (ie immediate offsets from the same base register). At
3290 the same time, extract the target register, and the memory
3291 offsets. */
3292 for (i = 0; i < nops; i++)
3293 {
3294 rtx reg;
3295 rtx offset;
3296
56636818
JL
3297 /* Convert a subreg of a mem into the mem itself. */
3298 if (GET_CODE (operands[nops + i]) == SUBREG)
3299 operands[nops + i] = alter_subreg(operands[nops + i]);
3300
84ed5e79
RE
3301 if (GET_CODE (operands[nops + i]) != MEM)
3302 abort ();
3303
3304 /* Don't reorder volatile memory references; it doesn't seem worth
3305 looking for the case where the order is ok anyway. */
3306 if (MEM_VOLATILE_P (operands[nops + i]))
3307 return 0;
3308
3309 offset = const0_rtx;
3310
3311 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3312 || (GET_CODE (reg) == SUBREG
3313 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3314 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3315 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3316 == REG)
3317 || (GET_CODE (reg) == SUBREG
3318 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3319 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3320 == CONST_INT)))
3321 {
3322 if (i == 0)
3323 {
62b10bbc 3324 base_reg = REGNO (reg);
84ed5e79
RE
3325 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3326 ? REGNO (operands[i])
3327 : REGNO (SUBREG_REG (operands[i])));
3328 order[0] = 0;
3329 }
3330 else
3331 {
6354dc9b 3332 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3333 /* Not addressed from the same base register. */
3334 return 0;
3335
3336 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3337 ? REGNO (operands[i])
3338 : REGNO (SUBREG_REG (operands[i])));
3339 if (unsorted_regs[i] < unsorted_regs[order[0]])
3340 order[0] = i;
3341 }
3342
3343 /* If it isn't an integer register, then we can't do this. */
3344 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3345 return 0;
3346
3347 unsorted_offsets[i] = INTVAL (offset);
3348 }
3349 else
3350 /* Not a suitable memory address. */
3351 return 0;
3352 }
3353
3354 /* All the useful information has now been extracted from the
3355 operands into unsorted_regs and unsorted_offsets; additionally,
3356 order[0] has been set to the lowest numbered register in the
3357 list. Sort the registers into order, and check that the memory
3358 offsets are ascending and adjacent. */
3359
3360 for (i = 1; i < nops; i++)
3361 {
3362 int j;
3363
3364 order[i] = order[i - 1];
3365 for (j = 0; j < nops; j++)
3366 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3367 && (order[i] == order[i - 1]
3368 || unsorted_regs[j] < unsorted_regs[order[i]]))
3369 order[i] = j;
3370
3371 /* Have we found a suitable register? if not, one must be used more
3372 than once. */
3373 if (order[i] == order[i - 1])
3374 return 0;
3375
3376 /* Is the memory address adjacent and ascending? */
3377 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3378 return 0;
3379 }
3380
3381 if (base)
3382 {
3383 *base = base_reg;
3384
3385 for (i = 0; i < nops; i++)
3386 regs[i] = unsorted_regs[order[i]];
3387
3388 *load_offset = unsorted_offsets[order[0]];
3389 }
3390
3391 if (unsorted_offsets[order[0]] == 0)
3392 return 1; /* stmia */
3393
3394 if (unsorted_offsets[order[0]] == 4)
3395 return 2; /* stmib */
3396
3397 if (unsorted_offsets[order[nops - 1]] == 0)
3398 return 3; /* stmda */
3399
3400 if (unsorted_offsets[order[nops - 1]] == -4)
3401 return 4; /* stmdb */
3402
3403 return 0;
3404}
3405
3406char *
3407emit_stm_seq (operands, nops)
62b10bbc 3408 rtx * operands;
84ed5e79
RE
3409 int nops;
3410{
3411 int regs[4];
3412 int base_reg;
3413 HOST_WIDE_INT offset;
3414 char buf[100];
3415 int i;
3416
3417 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3418 {
3419 case 1:
3420 strcpy (buf, "stm%?ia\t");
3421 break;
3422
3423 case 2:
3424 strcpy (buf, "stm%?ib\t");
3425 break;
3426
3427 case 3:
3428 strcpy (buf, "stm%?da\t");
3429 break;
3430
3431 case 4:
3432 strcpy (buf, "stm%?db\t");
3433 break;
3434
3435 default:
3436 abort ();
3437 }
3438
3439 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3440 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3441
3442 for (i = 1; i < nops; i++)
3443 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3444 reg_names[regs[i]]);
3445
3446 strcat (buf, "}\t%@ phole stm");
3447
3448 output_asm_insn (buf, operands);
3449 return "";
3450}
3451
e2c671ba
RE
3452int
3453multi_register_push (op, mode)
0a81f500 3454 rtx op;
74bbc178 3455 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3456{
3457 if (GET_CODE (op) != PARALLEL
3458 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3459 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3460 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3461 return 0;
3462
3463 return 1;
3464}
3465
ff9940b0 3466\f
d7d01975 3467/* Routines for use with attributes. */
f3bb6135 3468
31fdb4d5 3469/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
3470 ATTRIBUTES are any existing attributes and ARGS are
3471 the arguments supplied with ATTR.
31fdb4d5
DE
3472
3473 Supported attributes:
3474
d7d01975
NC
3475 naked: don't output any prologue or epilogue code,
3476 the user is assumed to do the right thing.
3477*/
31fdb4d5 3478int
74bbc178 3479arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3480 tree decl;
31fdb4d5
DE
3481 tree attr;
3482 tree args;
3483{
3484 if (args != NULL_TREE)
3485 return 0;
3486
3487 if (is_attribute_p ("naked", attr))
3488 return TREE_CODE (decl) == FUNCTION_DECL;
3489 return 0;
3490}
3491
3492/* Return non-zero if FUNC is a naked function. */
3493
3494static int
3495arm_naked_function_p (func)
3496 tree func;
3497{
3498 tree a;
3499
3500 if (TREE_CODE (func) != FUNCTION_DECL)
3501 abort ();
2e943e99 3502
31fdb4d5
DE
3503 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3504 return a != NULL_TREE;
3505}
f3bb6135 3506\f
6354dc9b 3507/* Routines for use in generating RTL. */
f3bb6135 3508rtx
56636818 3509arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3510 in_struct_p, scalar_p)
ff9940b0
RE
3511 int base_regno;
3512 int count;
3513 rtx from;
3514 int up;
3515 int write_back;
56636818
JL
3516 int unchanging_p;
3517 int in_struct_p;
c6df88cb 3518 int scalar_p;
ff9940b0
RE
3519{
3520 int i = 0, j;
3521 rtx result;
3522 int sign = up ? 1 : -1;
56636818 3523 rtx mem;
ff9940b0 3524
43cffd11
RE
3525 result = gen_rtx_PARALLEL (VOIDmode,
3526 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3527 if (write_back)
f3bb6135 3528 {
ff9940b0 3529 XVECEXP (result, 0, 0)
43cffd11
RE
3530 = gen_rtx_SET (GET_MODE (from), from,
3531 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3532 i = 1;
3533 count++;
f3bb6135
RE
3534 }
3535
ff9940b0 3536 for (j = 0; i < count; i++, j++)
f3bb6135 3537 {
43cffd11 3538 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3539 RTX_UNCHANGING_P (mem) = unchanging_p;
3540 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3541 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3542 XVECEXP (result, 0, i)
3543 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3544 }
3545
ff9940b0 3546 if (write_back)
43cffd11 3547 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, from);
ff9940b0
RE
3548
3549 return result;
3550}
3551
f3bb6135 3552rtx
56636818 3553arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3554 in_struct_p, scalar_p)
ff9940b0
RE
3555 int base_regno;
3556 int count;
3557 rtx to;
3558 int up;
3559 int write_back;
56636818
JL
3560 int unchanging_p;
3561 int in_struct_p;
c6df88cb 3562 int scalar_p;
ff9940b0
RE
3563{
3564 int i = 0, j;
3565 rtx result;
3566 int sign = up ? 1 : -1;
56636818 3567 rtx mem;
ff9940b0 3568
43cffd11
RE
3569 result = gen_rtx_PARALLEL (VOIDmode,
3570 rtvec_alloc (count + (write_back ? 2 : 0)));
ff9940b0 3571 if (write_back)
f3bb6135 3572 {
ff9940b0 3573 XVECEXP (result, 0, 0)
43cffd11
RE
3574 = gen_rtx_SET (GET_MODE (to), to,
3575 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3576 i = 1;
3577 count++;
f3bb6135
RE
3578 }
3579
ff9940b0 3580 for (j = 0; i < count; i++, j++)
f3bb6135 3581 {
43cffd11 3582 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3583 RTX_UNCHANGING_P (mem) = unchanging_p;
3584 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3585 MEM_SCALAR_P (mem) = scalar_p;
56636818 3586
43cffd11
RE
3587 XVECEXP (result, 0, i)
3588 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3589 }
3590
ff9940b0 3591 if (write_back)
43cffd11 3592 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, to);
ff9940b0
RE
3593
3594 return result;
3595}
3596
880e2516
RE
3597int
3598arm_gen_movstrqi (operands)
62b10bbc 3599 rtx * operands;
880e2516
RE
3600{
3601 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3602 int i;
880e2516 3603 rtx src, dst;
ad076f4e 3604 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3605 rtx part_bytes_reg = NULL;
56636818
JL
3606 rtx mem;
3607 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3608 int dst_scalar_p, src_scalar_p;
880e2516
RE
3609
3610 if (GET_CODE (operands[2]) != CONST_INT
3611 || GET_CODE (operands[3]) != CONST_INT
3612 || INTVAL (operands[2]) > 64
3613 || INTVAL (operands[3]) & 3)
3614 return 0;
3615
3616 st_dst = XEXP (operands[0], 0);
3617 st_src = XEXP (operands[1], 0);
56636818
JL
3618
3619 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3620 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3621 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3622 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3623 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3624 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3625
880e2516
RE
3626 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3627 fin_src = src = copy_to_mode_reg (SImode, st_src);
3628
3629 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
3630 out_words_to_go = INTVAL (operands[2]) / 4;
3631 last_bytes = INTVAL (operands[2]) & 3;
3632
3633 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3634 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3635
3636 for (i = 0; in_words_to_go >= 2; i+=4)
3637 {
bd9c7e23 3638 if (in_words_to_go > 4)
56636818 3639 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3640 src_unchanging_p,
3641 src_in_struct_p,
3642 src_scalar_p));
bd9c7e23
RE
3643 else
3644 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3645 FALSE, src_unchanging_p,
c6df88cb 3646 src_in_struct_p, src_scalar_p));
bd9c7e23 3647
880e2516
RE
3648 if (out_words_to_go)
3649 {
bd9c7e23 3650 if (out_words_to_go > 4)
56636818
JL
3651 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3652 dst_unchanging_p,
c6df88cb
MM
3653 dst_in_struct_p,
3654 dst_scalar_p));
bd9c7e23
RE
3655 else if (out_words_to_go != 1)
3656 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3657 dst, TRUE,
3658 (last_bytes == 0
56636818
JL
3659 ? FALSE : TRUE),
3660 dst_unchanging_p,
c6df88cb
MM
3661 dst_in_struct_p,
3662 dst_scalar_p));
880e2516
RE
3663 else
3664 {
43cffd11 3665 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3666 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3667 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3668 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3669 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3670 if (last_bytes != 0)
3671 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3672 }
3673 }
3674
3675 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3676 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3677 }
3678
3679 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3680 if (out_words_to_go)
62b10bbc
NC
3681 {
3682 rtx sreg;
3683
3684 mem = gen_rtx_MEM (SImode, src);
3685 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3686 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3687 MEM_SCALAR_P (mem) = src_scalar_p;
3688 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
3689 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
3690
3691 mem = gen_rtx_MEM (SImode, dst);
3692 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3693 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3694 MEM_SCALAR_P (mem) = dst_scalar_p;
3695 emit_move_insn (mem, sreg);
3696 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3697 in_words_to_go--;
3698
3699 if (in_words_to_go) /* Sanity check */
3700 abort ();
3701 }
880e2516
RE
3702
3703 if (in_words_to_go)
3704 {
3705 if (in_words_to_go < 0)
3706 abort ();
3707
43cffd11 3708 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3709 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3710 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3711 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3712 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3713 }
3714
3715 if (BYTES_BIG_ENDIAN && last_bytes)
3716 {
3717 rtx tmp = gen_reg_rtx (SImode);
3718
3719 if (part_bytes_reg == NULL)
3720 abort ();
3721
6354dc9b 3722 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
3723 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3724 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3725 part_bytes_reg = tmp;
3726
3727 while (last_bytes)
3728 {
43cffd11 3729 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
3730 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3731 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3732 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3733 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 3734
880e2516
RE
3735 if (--last_bytes)
3736 {
3737 tmp = gen_reg_rtx (SImode);
3738 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3739 part_bytes_reg = tmp;
3740 }
3741 }
3742
3743 }
3744 else
3745 {
3746 while (last_bytes)
3747 {
3748 if (part_bytes_reg == NULL)
3749 abort ();
3750
43cffd11 3751 mem = gen_rtx_MEM (QImode, dst);
56636818
JL
3752 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3753 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3754 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3755 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 3756
880e2516
RE
3757 if (--last_bytes)
3758 {
3759 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3760
3761 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3762 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3763 part_bytes_reg = tmp;
3764 }
3765 }
3766 }
3767
3768 return 1;
3769}
3770
5165176d
RE
3771/* Generate a memory reference for a half word, such that it will be loaded
3772 into the top 16 bits of the word. We can assume that the address is
3773 known to be alignable and of the form reg, or plus (reg, const). */
3774rtx
3775gen_rotated_half_load (memref)
3776 rtx memref;
3777{
3778 HOST_WIDE_INT offset = 0;
3779 rtx base = XEXP (memref, 0);
3780
3781 if (GET_CODE (base) == PLUS)
3782 {
3783 offset = INTVAL (XEXP (base, 1));
3784 base = XEXP (base, 0);
3785 }
3786
956d6950 3787 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 3788 if (TARGET_MMU_TRAPS
5165176d
RE
3789 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3790 return NULL;
3791
43cffd11 3792 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
3793
3794 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3795 return base;
3796
43cffd11 3797 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
3798}
3799
84ed5e79 3800static enum machine_mode
74bbc178 3801select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
3802 rtx x;
3803 rtx y;
3804 HOST_WIDE_INT cond_or;
3805{
3806 enum rtx_code cond1, cond2;
3807 int swapped = 0;
3808
3809 /* Currently we will probably get the wrong result if the individual
3810 comparisons are not simple. This also ensures that it is safe to
956d6950 3811 reverse a comparison if necessary. */
84ed5e79
RE
3812 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3813 != CCmode)
3814 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3815 != CCmode))
3816 return CCmode;
3817
3818 if (cond_or)
3819 cond1 = reverse_condition (cond1);
3820
3821 /* If the comparisons are not equal, and one doesn't dominate the other,
3822 then we can't do this. */
3823 if (cond1 != cond2
3824 && ! comparison_dominates_p (cond1, cond2)
3825 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3826 return CCmode;
3827
3828 if (swapped)
3829 {
3830 enum rtx_code temp = cond1;
3831 cond1 = cond2;
3832 cond2 = temp;
3833 }
3834
3835 switch (cond1)
3836 {
3837 case EQ:
3838 if (cond2 == EQ || ! cond_or)
3839 return CC_DEQmode;
3840
3841 switch (cond2)
3842 {
3843 case LE: return CC_DLEmode;
3844 case LEU: return CC_DLEUmode;
3845 case GE: return CC_DGEmode;
3846 case GEU: return CC_DGEUmode;
ad076f4e 3847 default: break;
84ed5e79
RE
3848 }
3849
3850 break;
3851
3852 case LT:
3853 if (cond2 == LT || ! cond_or)
3854 return CC_DLTmode;
3855 if (cond2 == LE)
3856 return CC_DLEmode;
3857 if (cond2 == NE)
3858 return CC_DNEmode;
3859 break;
3860
3861 case GT:
3862 if (cond2 == GT || ! cond_or)
3863 return CC_DGTmode;
3864 if (cond2 == GE)
3865 return CC_DGEmode;
3866 if (cond2 == NE)
3867 return CC_DNEmode;
3868 break;
3869
3870 case LTU:
3871 if (cond2 == LTU || ! cond_or)
3872 return CC_DLTUmode;
3873 if (cond2 == LEU)
3874 return CC_DLEUmode;
3875 if (cond2 == NE)
3876 return CC_DNEmode;
3877 break;
3878
3879 case GTU:
3880 if (cond2 == GTU || ! cond_or)
3881 return CC_DGTUmode;
3882 if (cond2 == GEU)
3883 return CC_DGEUmode;
3884 if (cond2 == NE)
3885 return CC_DNEmode;
3886 break;
3887
3888 /* The remaining cases only occur when both comparisons are the
3889 same. */
3890 case NE:
3891 return CC_DNEmode;
3892
3893 case LE:
3894 return CC_DLEmode;
3895
3896 case GE:
3897 return CC_DGEmode;
3898
3899 case LEU:
3900 return CC_DLEUmode;
3901
3902 case GEU:
3903 return CC_DGEUmode;
ad076f4e
RE
3904
3905 default:
3906 break;
84ed5e79
RE
3907 }
3908
3909 abort ();
3910}
3911
3912enum machine_mode
3913arm_select_cc_mode (op, x, y)
3914 enum rtx_code op;
3915 rtx x;
3916 rtx y;
3917{
3918 /* All floating point compares return CCFP if it is an equality
3919 comparison, and CCFPE otherwise. */
3920 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3921 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3922
3923 /* A compare with a shifted operand. Because of canonicalization, the
3924 comparison will have to be swapped when we emit the assembler. */
3925 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3926 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3927 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3928 || GET_CODE (x) == ROTATERT))
3929 return CC_SWPmode;
3930
956d6950
JL
3931 /* This is a special case that is used by combine to allow a
3932 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3933 followed by a comparison of the shifted integer (only valid for
956d6950 3934 equalities and unsigned inequalities). */
84ed5e79
RE
3935 if (GET_MODE (x) == SImode
3936 && GET_CODE (x) == ASHIFT
3937 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3938 && GET_CODE (XEXP (x, 0)) == SUBREG
3939 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3940 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3941 && (op == EQ || op == NE
3942 || op == GEU || op == GTU || op == LTU || op == LEU)
3943 && GET_CODE (y) == CONST_INT)
3944 return CC_Zmode;
3945
3946 /* An operation that sets the condition codes as a side-effect, the
3947 V flag is not set correctly, so we can only use comparisons where
3948 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3949 instead. */
3950 if (GET_MODE (x) == SImode
3951 && y == const0_rtx
3952 && (op == EQ || op == NE || op == LT || op == GE)
3953 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3954 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3955 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3956 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3957 || GET_CODE (x) == LSHIFTRT
3958 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3959 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3960 return CC_NOOVmode;
3961
3962 /* A construct for a conditional compare, if the false arm contains
3963 0, then both conditions must be true, otherwise either condition
3964 must be true. Not all conditions are possible, so CCmode is
3965 returned if it can't be done. */
3966 if (GET_CODE (x) == IF_THEN_ELSE
3967 && (XEXP (x, 2) == const0_rtx
3968 || XEXP (x, 2) == const1_rtx)
3969 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3970 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 3971 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
3972 INTVAL (XEXP (x, 2)));
3973
3974 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3975 return CC_Zmode;
3976
bd9c7e23
RE
3977 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3978 && GET_CODE (x) == PLUS
3979 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3980 return CC_Cmode;
3981
84ed5e79
RE
3982 return CCmode;
3983}
3984
ff9940b0
RE
3985/* X and Y are two things to compare using CODE. Emit the compare insn and
3986 return the rtx for register 0 in the proper mode. FP means this is a
3987 floating point compare: I don't think that it is needed on the arm. */
3988
3989rtx
74bbc178 3990gen_compare_reg (code, x, y)
ff9940b0
RE
3991 enum rtx_code code;
3992 rtx x, y;
3993{
3994 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
43cffd11 3995 rtx cc_reg = gen_rtx_REG (mode, 24);
ff9940b0 3996
43cffd11
RE
3997 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
3998 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
3999
4000 return cc_reg;
4001}
4002
0a81f500
RE
4003void
4004arm_reload_in_hi (operands)
62b10bbc 4005 rtx * operands;
0a81f500 4006{
f9cc092a
RE
4007 rtx ref = operands[1];
4008 rtx base, scratch;
4009 HOST_WIDE_INT offset = 0;
4010
4011 if (GET_CODE (ref) == SUBREG)
4012 {
4013 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4014 if (BYTES_BIG_ENDIAN)
4015 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4016 - MIN (UNITS_PER_WORD,
4017 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4018 ref = SUBREG_REG (ref);
4019 }
4020
4021 if (GET_CODE (ref) == REG)
4022 {
4023 /* We have a pseudo which has been spilt onto the stack; there
4024 are two cases here: the first where there is a simple
4025 stack-slot replacement and a second where the stack-slot is
4026 out of range, or is used as a subreg. */
4027 if (reg_equiv_mem[REGNO (ref)])
4028 {
4029 ref = reg_equiv_mem[REGNO (ref)];
4030 base = find_replacement (&XEXP (ref, 0));
4031 }
4032 else
6354dc9b 4033 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4034 base = reg_equiv_address[REGNO (ref)];
4035 }
4036 else
4037 base = find_replacement (&XEXP (ref, 0));
0a81f500 4038
e5e809f4
JL
4039 /* Handle the case where the address is too complex to be offset by 1. */
4040 if (GET_CODE (base) == MINUS
4041 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4042 {
f9cc092a 4043 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4044
43cffd11 4045 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4046 base = base_plus;
4047 }
f9cc092a
RE
4048 else if (GET_CODE (base) == PLUS)
4049 {
6354dc9b 4050 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4051 HOST_WIDE_INT hi, lo;
4052
4053 offset += INTVAL (XEXP (base, 1));
4054 base = XEXP (base, 0);
4055
6354dc9b 4056 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4057 /* Valid range for lo is -4095 -> 4095 */
4058 lo = (offset >= 0
4059 ? (offset & 0xfff)
4060 : -((-offset) & 0xfff));
4061
4062 /* Corner case, if lo is the max offset then we would be out of range
4063 once we have added the additional 1 below, so bump the msb into the
4064 pre-loading insn(s). */
4065 if (lo == 4095)
4066 lo &= 0x7ff;
4067
b39e1240
NC
4068 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFFUL)
4069 ^ (HOST_WIDE_INT) 0x80000000UL)
4070 - (HOST_WIDE_INT) 0x80000000UL);
f9cc092a
RE
4071
4072 if (hi + lo != offset)
4073 abort ();
4074
4075 if (hi != 0)
4076 {
4077 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4078
4079 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4080 that require more than one insn. */
f9cc092a
RE
4081 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4082 base = base_plus;
4083 offset = lo;
4084 }
4085 }
e5e809f4 4086
f9cc092a
RE
4087 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4088 emit_insn (gen_zero_extendqisi2 (scratch,
4089 gen_rtx_MEM (QImode,
4090 plus_constant (base,
4091 offset))));
43cffd11
RE
4092 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4093 gen_rtx_MEM (QImode,
f9cc092a
RE
4094 plus_constant (base,
4095 offset + 1))));
b3b15f14 4096 if (! BYTES_BIG_ENDIAN)
43cffd11
RE
4097 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4098 gen_rtx_IOR (SImode,
4099 gen_rtx_ASHIFT
4100 (SImode,
4101 gen_rtx_SUBREG (SImode, operands[0], 0),
4102 GEN_INT (8)),
f9cc092a 4103 scratch)));
0a81f500 4104 else
43cffd11
RE
4105 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4106 gen_rtx_IOR (SImode,
f9cc092a 4107 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4108 GEN_INT (8)),
4109 gen_rtx_SUBREG (SImode, operands[0],
4110 0))));
0a81f500
RE
4111}
4112
f9cc092a
RE
4113/* Handle storing a half-word to memory during reload by synthesising as two
4114 byte stores. Take care not to clobber the input values until after we
4115 have moved them somewhere safe. This code assumes that if the DImode
4116 scratch in operands[2] overlaps either the input value or output address
4117 in some way, then that value must die in this insn (we absolutely need
4118 two scratch registers for some corner cases). */
f3bb6135 4119void
af48348a 4120arm_reload_out_hi (operands)
62b10bbc 4121 rtx * operands;
af48348a 4122{
f9cc092a
RE
4123 rtx ref = operands[0];
4124 rtx outval = operands[1];
4125 rtx base, scratch;
4126 HOST_WIDE_INT offset = 0;
4127
4128 if (GET_CODE (ref) == SUBREG)
4129 {
4130 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4131 if (BYTES_BIG_ENDIAN)
4132 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4133 - MIN (UNITS_PER_WORD,
4134 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4135 ref = SUBREG_REG (ref);
4136 }
4137
4138
4139 if (GET_CODE (ref) == REG)
4140 {
4141 /* We have a pseudo which has been spilt onto the stack; there
4142 are two cases here: the first where there is a simple
4143 stack-slot replacement and a second where the stack-slot is
4144 out of range, or is used as a subreg. */
4145 if (reg_equiv_mem[REGNO (ref)])
4146 {
4147 ref = reg_equiv_mem[REGNO (ref)];
4148 base = find_replacement (&XEXP (ref, 0));
4149 }
4150 else
6354dc9b 4151 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4152 base = reg_equiv_address[REGNO (ref)];
4153 }
4154 else
4155 base = find_replacement (&XEXP (ref, 0));
4156
4157 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4158
4159 /* Handle the case where the address is too complex to be offset by 1. */
4160 if (GET_CODE (base) == MINUS
4161 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4162 {
4163 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4164
4165 /* Be careful not to destroy OUTVAL. */
4166 if (reg_overlap_mentioned_p (base_plus, outval))
4167 {
4168 /* Updating base_plus might destroy outval, see if we can
4169 swap the scratch and base_plus. */
4170 if (! reg_overlap_mentioned_p (scratch, outval))
4171 {
4172 rtx tmp = scratch;
4173 scratch = base_plus;
4174 base_plus = tmp;
4175 }
4176 else
4177 {
4178 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4179
4180 /* Be conservative and copy OUTVAL into the scratch now,
4181 this should only be necessary if outval is a subreg
4182 of something larger than a word. */
4183 /* XXX Might this clobber base? I can't see how it can,
4184 since scratch is known to overlap with OUTVAL, and
4185 must be wider than a word. */
4186 emit_insn (gen_movhi (scratch_hi, outval));
4187 outval = scratch_hi;
4188 }
4189 }
4190
4191 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4192 base = base_plus;
4193 }
4194 else if (GET_CODE (base) == PLUS)
4195 {
6354dc9b 4196 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4197 HOST_WIDE_INT hi, lo;
4198
4199 offset += INTVAL (XEXP (base, 1));
4200 base = XEXP (base, 0);
4201
6354dc9b 4202 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4203 /* Valid range for lo is -4095 -> 4095 */
4204 lo = (offset >= 0
4205 ? (offset & 0xfff)
4206 : -((-offset) & 0xfff));
4207
4208 /* Corner case, if lo is the max offset then we would be out of range
4209 once we have added the additional 1 below, so bump the msb into the
4210 pre-loading insn(s). */
4211 if (lo == 4095)
4212 lo &= 0x7ff;
4213
b39e1240
NC
4214 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xFFFFFFFFUL)
4215 ^ (HOST_WIDE_INT) 0x80000000UL)
4216 - (HOST_WIDE_INT) 0x80000000UL);
f9cc092a
RE
4217
4218 if (hi + lo != offset)
4219 abort ();
4220
4221 if (hi != 0)
4222 {
4223 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4224
4225 /* Be careful not to destroy OUTVAL. */
4226 if (reg_overlap_mentioned_p (base_plus, outval))
4227 {
4228 /* Updating base_plus might destroy outval, see if we
4229 can swap the scratch and base_plus. */
4230 if (! reg_overlap_mentioned_p (scratch, outval))
4231 {
4232 rtx tmp = scratch;
4233 scratch = base_plus;
4234 base_plus = tmp;
4235 }
4236 else
4237 {
4238 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4239
4240 /* Be conservative and copy outval into scratch now,
4241 this should only be necessary if outval is a
4242 subreg of something larger than a word. */
4243 /* XXX Might this clobber base? I can't see how it
4244 can, since scratch is known to overlap with
4245 outval. */
4246 emit_insn (gen_movhi (scratch_hi, outval));
4247 outval = scratch_hi;
4248 }
4249 }
4250
4251 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4252 that require more than one insn. */
f9cc092a
RE
4253 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4254 base = base_plus;
4255 offset = lo;
4256 }
4257 }
af48348a 4258
b5cc037f
RE
4259 if (BYTES_BIG_ENDIAN)
4260 {
f9cc092a
RE
4261 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4262 plus_constant (base, offset + 1)),
4263 gen_rtx_SUBREG (QImode, outval, 0)));
4264 emit_insn (gen_lshrsi3 (scratch,
4265 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4266 GEN_INT (8)));
f9cc092a
RE
4267 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4268 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4269 }
4270 else
4271 {
f9cc092a
RE
4272 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4273 gen_rtx_SUBREG (QImode, outval, 0)));
4274 emit_insn (gen_lshrsi3 (scratch,
4275 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4276 GEN_INT (8)));
f9cc092a
RE
4277 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4278 plus_constant (base, offset + 1)),
4279 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 4280 }
af48348a 4281}
2b835d68
RE
4282\f
4283/* Routines for manipulation of the constant pool. */
2b835d68 4284
949d79eb
RE
4285/* Arm instructions cannot load a large constant directly into a
4286 register; they have to come from a pc relative load. The constant
4287 must therefore be placed in the addressable range of the pc
4288 relative load. Depending on the precise pc relative load
4289 instruction the range is somewhere between 256 bytes and 4k. This
4290 means that we often have to dump a constant inside a function, and
2b835d68
RE
4291 generate code to branch around it.
4292
949d79eb
RE
4293 It is important to minimize this, since the branches will slow
4294 things down and make the code larger.
2b835d68 4295
949d79eb
RE
4296 Normally we can hide the table after an existing unconditional
4297 branch so that there is no interruption of the flow, but in the
4298 worst case the code looks like this:
2b835d68
RE
4299
4300 ldr rn, L1
949d79eb 4301 ...
2b835d68
RE
4302 b L2
4303 align
4304 L1: .long value
4305 L2:
949d79eb 4306 ...
2b835d68 4307
2b835d68 4308 ldr rn, L3
949d79eb 4309 ...
2b835d68
RE
4310 b L4
4311 align
2b835d68
RE
4312 L3: .long value
4313 L4:
949d79eb
RE
4314 ...
4315
4316 We fix this by performing a scan after scheduling, which notices
4317 which instructions need to have their operands fetched from the
4318 constant table and builds the table.
4319
4320 The algorithm starts by building a table of all the constants that
4321 need fixing up and all the natural barriers in the function (places
4322 where a constant table can be dropped without breaking the flow).
4323 For each fixup we note how far the pc-relative replacement will be
4324 able to reach and the offset of the instruction into the function.
4325
4326 Having built the table we then group the fixes together to form
4327 tables that are as large as possible (subject to addressing
4328 constraints) and emit each table of constants after the last
4329 barrier that is within range of all the instructions in the group.
4330 If a group does not contain a barrier, then we forcibly create one
4331 by inserting a jump instruction into the flow. Once the table has
4332 been inserted, the insns are then modified to reference the
4333 relevant entry in the pool.
4334
6354dc9b 4335 Possible enhancements to the algorithm (not implemented) are:
949d79eb 4336
6354dc9b 4337 1) ARM instructions (but not Thumb) can use negative offsets, so we
949d79eb
RE
4338 could reference back to a previous pool rather than forwards to a
4339 new one. For large functions this may reduce the number of pools
4340 required.
4341
4342 2) For some processors and object formats, there may be benefit in
4343 aligning the pools to the start of cache lines; this alignment
4344 would need to be taken into account when calculating addressability
6354dc9b 4345 of a pool. */
2b835d68
RE
4346
4347typedef struct
4348{
4349 rtx value; /* Value in table */
4350 HOST_WIDE_INT next_offset;
4351 enum machine_mode mode; /* Mode of value */
949d79eb 4352} minipool_node;
2b835d68
RE
4353
4354/* The maximum number of constants that can fit into one pool, since
949d79eb
RE
4355 the pc relative range is 0...4092 bytes and constants are at least 4
4356 bytes long. */
949d79eb
RE
4357#define MAX_MINIPOOL_SIZE (4092/4)
4358static minipool_node minipool_vector[MAX_MINIPOOL_SIZE];
4359static int minipool_size;
4360static rtx minipool_vector_label;
2b835d68 4361
332072db
RE
4362/* Add a constant to the pool and return its offset within the current
4363 pool.
4364
4365 X is the rtx we want to replace. MODE is its mode. On return,
4366 ADDRESS_ONLY will be non-zero if we really want the address of such
4367 a constant, not the constant itself. */
2b835d68 4368static HOST_WIDE_INT
949d79eb 4369add_minipool_constant (x, mode)
2b835d68
RE
4370 rtx x;
4371 enum machine_mode mode;
4372{
4373 int i;
2b835d68 4374 HOST_WIDE_INT offset;
da6558fd 4375
949d79eb
RE
4376 /* First, see if we've already got it. */
4377 for (i = 0; i < minipool_size; i++)
2b835d68 4378 {
949d79eb
RE
4379 if (GET_CODE (x) == minipool_vector[i].value->code
4380 && mode == minipool_vector[i].mode)
2b835d68
RE
4381 {
4382 if (GET_CODE (x) == CODE_LABEL)
4383 {
949d79eb 4384 if (XINT (x, 3) != XINT (minipool_vector[i].value, 3))
2b835d68
RE
4385 continue;
4386 }
949d79eb
RE
4387 if (rtx_equal_p (x, minipool_vector[i].value))
4388 return minipool_vector[i].next_offset - GET_MODE_SIZE (mode);
2b835d68
RE
4389 }
4390 }
4391
6354dc9b 4392 /* Need a new one. */
949d79eb 4393 minipool_vector[minipool_size].next_offset = GET_MODE_SIZE (mode);
2b835d68 4394 offset = 0;
949d79eb
RE
4395 if (minipool_size == 0)
4396 minipool_vector_label = gen_label_rtx ();
2b835d68 4397 else
949d79eb
RE
4398 minipool_vector[minipool_size].next_offset
4399 += (offset = minipool_vector[minipool_size - 1].next_offset);
2b835d68 4400
949d79eb
RE
4401 minipool_vector[minipool_size].value = x;
4402 minipool_vector[minipool_size].mode = mode;
4403 minipool_size++;
2b835d68
RE
4404 return offset;
4405}
4406
6354dc9b 4407/* Output the literal table. */
2b835d68 4408static void
949d79eb 4409dump_minipool (scan)
2b835d68
RE
4410 rtx scan;
4411{
4412 int i;
4413
4414 scan = emit_label_after (gen_label_rtx (), scan);
4415 scan = emit_insn_after (gen_align_4 (), scan);
949d79eb 4416 scan = emit_label_after (minipool_vector_label, scan);
2b835d68 4417
949d79eb 4418 for (i = 0; i < minipool_size; i++)
2b835d68 4419 {
949d79eb 4420 minipool_node *p = minipool_vector + i;
2b835d68
RE
4421
4422 switch (GET_MODE_SIZE (p->mode))
4423 {
4424 case 4:
4425 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
4426 break;
4427
4428 case 8:
4429 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
4430 break;
4431
4432 default:
4433 abort ();
4434 break;
4435 }
4436 }
4437
4438 scan = emit_insn_after (gen_consttable_end (), scan);
4439 scan = emit_barrier_after (scan);
949d79eb 4440 minipool_size = 0;
2b835d68
RE
4441}
4442
949d79eb
RE
4443/* Find the last barrier less than MAX_COUNT bytes from FROM, or
4444 create one. */
2b835d68
RE
4445static rtx
4446find_barrier (from, max_count)
4447 rtx from;
4448 int max_count;
4449{
4450 int count = 0;
4451 rtx found_barrier = 0;
e5e809f4 4452 rtx last = from;
2b835d68
RE
4453
4454 while (from && count < max_count)
4455 {
7551cbc7 4456 rtx tmp;
da6558fd 4457
2b835d68 4458 if (GET_CODE (from) == BARRIER)
7551cbc7 4459 found_barrier = from;
2b835d68 4460
6354dc9b 4461 /* Count the length of this insn. */
949d79eb
RE
4462 if (GET_CODE (from) == JUMP_INSN
4463 && JUMP_LABEL (from) != 0
4464 && ((tmp = next_real_insn (JUMP_LABEL (from)))
4465 == next_real_insn (from))
4466 && tmp != NULL
4467 && GET_CODE (tmp) == JUMP_INSN
4468 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
4469 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
7551cbc7
RE
4470 {
4471 int elt = GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC ? 1 : 0;
4472 count += (get_attr_length (from)
4473 + GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (tmp), elt));
4474 /* Continue after the dispatch table. */
4475 last = from;
4476 from = NEXT_INSN (tmp);
4477 continue;
4478 }
2b835d68
RE
4479 else
4480 count += get_attr_length (from);
4481
e5e809f4 4482 last = from;
2b835d68
RE
4483 from = NEXT_INSN (from);
4484 }
4485
da6558fd 4486 if (! found_barrier)
2b835d68
RE
4487 {
4488 /* We didn't find a barrier in time to
da6558fd 4489 dump our stuff, so we'll make one. */
2b835d68 4490 rtx label = gen_label_rtx ();
da6558fd 4491
2b835d68 4492 if (from)
e5e809f4 4493 from = PREV_INSN (last);
2b835d68
RE
4494 else
4495 from = get_last_insn ();
da6558fd
NC
4496
4497 /* Walk back to be just before any jump. */
2b835d68 4498 while (GET_CODE (from) == JUMP_INSN
25b1c156 4499 || GET_CODE (from) == NOTE
2b835d68
RE
4500 || GET_CODE (from) == CODE_LABEL)
4501 from = PREV_INSN (from);
da6558fd 4502
2b835d68
RE
4503 from = emit_jump_insn_after (gen_jump (label), from);
4504 JUMP_LABEL (from) = label;
4505 found_barrier = emit_barrier_after (from);
4506 emit_label_after (label, found_barrier);
2b835d68
RE
4507 }
4508
4509 return found_barrier;
4510}
4511
949d79eb
RE
4512struct minipool_fixup
4513{
4514 struct minipool_fixup *next;
4515 rtx insn;
4516 int address;
4517 rtx *loc;
4518 enum machine_mode mode;
4519 rtx value;
4520 int range;
4521};
4522
4523struct minipool_fixup *minipool_fix_head;
4524struct minipool_fixup *minipool_fix_tail;
4525
4526static void
4527push_minipool_barrier (insn, address)
2b835d68 4528 rtx insn;
949d79eb 4529 int address;
2b835d68 4530{
949d79eb
RE
4531 struct minipool_fixup *fix
4532 = (struct minipool_fixup *) oballoc (sizeof (struct minipool_fixup));
ad076f4e 4533
949d79eb
RE
4534 fix->insn = insn;
4535 fix->address = address;
2b835d68 4536
949d79eb
RE
4537 fix->next = NULL;
4538 if (minipool_fix_head != NULL)
4539 minipool_fix_tail->next = fix;
4540 else
4541 minipool_fix_head = fix;
4542
4543 minipool_fix_tail = fix;
4544}
2b835d68 4545
949d79eb
RE
4546static void
4547push_minipool_fix (insn, address, loc, mode, value)
4548 rtx insn;
4549 int address;
4550 rtx *loc;
4551 enum machine_mode mode;
4552 rtx value;
4553{
4554 struct minipool_fixup *fix
4555 = (struct minipool_fixup *) oballoc (sizeof (struct minipool_fixup));
4556
4557#ifdef AOF_ASSEMBLER
4558 /* PIC symbol refereneces need to be converted into offsets into the
4559 based area. */
4560 if (flag_pic && GET_MODE == SYMBOL_REF)
4561 value = aof_pic_entry (value);
4562#endif /* AOF_ASSEMBLER */
4563
4564 fix->insn = insn;
4565 fix->address = address;
4566 fix->loc = loc;
4567 fix->mode = mode;
4568 fix->value = value;
4569 fix->range = get_attr_pool_range (insn);
4570
4571 /* If an insn doesn't have a range defined for it, then it isn't
4572 expecting to be reworked by this code. Better to abort now than
4573 to generate duff assembly code. */
4574 if (fix->range == 0)
4575 abort ();
4576
6354dc9b 4577 /* Add it to the chain of fixes. */
949d79eb
RE
4578 fix->next = NULL;
4579 if (minipool_fix_head != NULL)
4580 minipool_fix_tail->next = fix;
4581 else
4582 minipool_fix_head = fix;
4583
4584 minipool_fix_tail = fix;
4585}
4586
4587static void
4588note_invalid_constants (insn, address)
4589 rtx insn;
4590 int address;
4591{
4592 int opno;
4593
6354dc9b 4594 /* Extract the operands of the insn. */
949d79eb
RE
4595 extract_insn(insn);
4596
6354dc9b 4597 /* Find the alternative selected. */
949d79eb
RE
4598 if (! constrain_operands (1))
4599 fatal_insn_not_found (insn);
4600
4601 /* Preprocess the constraints, to extract some useful information. */
4602 preprocess_constraints ();
4603
1ccbefce 4604 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 4605 {
6354dc9b 4606 /* Things we need to fix can only occur in inputs. */
36ab44c7 4607 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
4608 continue;
4609
4610 /* If this alternative is a memory reference, then any mention
4611 of constants in this alternative is really to fool reload
4612 into allowing us to accept one there. We need to fix them up
4613 now so that we output the right code. */
4614 if (recog_op_alt[opno][which_alternative].memory_ok)
4615 {
1ccbefce 4616 rtx op = recog_data.operand[opno];
949d79eb
RE
4617
4618 if (CONSTANT_P (op))
1ccbefce
RH
4619 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4620 recog_data.operand_mode[opno], op);
949d79eb
RE
4621#ifndef AOF_ASSEMBLER
4622 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
4623 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4624 recog_data.operand_mode[opno],
4625 XVECEXP (op, 0, 0));
949d79eb 4626#endif
1ccbefce 4627 else if (recog_data.operand_mode[opno] == SImode
949d79eb
RE
4628 && GET_CODE (op) == MEM
4629 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
4630 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
4631 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
4632 recog_data.operand_mode[opno],
949d79eb
RE
4633 get_pool_constant (XEXP (op, 0)));
4634 }
2b835d68 4635 }
2b835d68
RE
4636}
4637
4638void
4639arm_reorg (first)
4640 rtx first;
4641{
4642 rtx insn;
949d79eb
RE
4643 int address = 0;
4644 struct minipool_fixup *fix;
ad076f4e 4645
949d79eb 4646 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 4647
949d79eb
RE
4648 /* The first insn must always be a note, or the code below won't
4649 scan it properly. */
4650 if (GET_CODE (first) != NOTE)
4651 abort ();
4652
4653 /* Scan all the insns and record the operands that will need fixing. */
4654 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 4655 {
2b835d68 4656
949d79eb
RE
4657 if (GET_CODE (insn) == BARRIER)
4658 push_minipool_barrier(insn, address);
4659 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
4660 || GET_CODE (insn) == JUMP_INSN)
4661 {
4662 rtx table;
4663
4664 note_invalid_constants (insn, address);
4665 address += get_attr_length (insn);
4666 /* If the insn is a vector jump, add the size of the table
4667 and skip the table. */
4668 if (GET_CODE (insn) == JUMP_INSN
4669 && JUMP_LABEL (insn) != NULL
4670 && ((table = next_real_insn (JUMP_LABEL (insn)))
4671 == next_real_insn (insn))
4672 && table != NULL
4673 && GET_CODE (table) == JUMP_INSN
4674 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4675 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2b835d68 4676 {
949d79eb 4677 int elt = GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 4678
949d79eb
RE
4679 address += GET_MODE_SIZE (SImode) * XVECLEN (PATTERN (table),
4680 elt);
4681 insn = table;
4682 }
4683 }
4684 }
332072db 4685
949d79eb
RE
4686 /* Now scan the fixups and perform the required changes. */
4687 for (fix = minipool_fix_head; fix; fix = fix->next)
4688 {
4689 struct minipool_fixup *ftmp;
4690 struct minipool_fixup *last_barrier = NULL;
4691 int max_range;
4692 rtx barrier;
4693 struct minipool_fixup *this_fix;
4694 int new_minipool_size = 0;
4695
4696 /* Skip any further barriers before the next fix. */
4697 while (fix && GET_CODE (fix->insn) == BARRIER)
4698 fix = fix->next;
4699
4700 if (fix == NULL)
4701 break;
332072db 4702
949d79eb
RE
4703 ftmp = fix;
4704 max_range = fix->address + fix->range;
2b835d68 4705
949d79eb
RE
4706 /* Find all the other fixes that can live in the same pool. */
4707 while (ftmp->next && ftmp->next->address < max_range
4708 && (GET_CODE (ftmp->next->insn) == BARRIER
4709 /* Ensure we can reach the constant inside the pool. */
4710 || ftmp->next->range > new_minipool_size))
4711 {
4712 ftmp = ftmp->next;
4713 if (GET_CODE (ftmp->insn) == BARRIER)
4714 last_barrier = ftmp;
4715 else
4716 {
4717 /* Does this fix constrain the range we can search? */
4718 if (ftmp->address + ftmp->range - new_minipool_size < max_range)
4719 max_range = ftmp->address + ftmp->range - new_minipool_size;
2b835d68 4720
949d79eb 4721 new_minipool_size += GET_MODE_SIZE (ftmp->mode);
2b835d68 4722 }
2b835d68 4723 }
949d79eb
RE
4724
4725 /* If we found a barrier, drop back to that; any fixes that we could
4726 have reached but come after the barrier will now go in the next
4727 mini-pool. */
4728 if (last_barrier != NULL)
4729 {
4730 barrier = last_barrier->insn;
4731 ftmp = last_barrier;
4732 }
2bfa88dc
RE
4733 /* ftmp is last fix that we can fit into this pool and we
4734 failed to find a barrier that we could use. Insert a new
4735 barrier in the code and arrange to jump around it. */
949d79eb 4736 else
2bfa88dc
RE
4737 {
4738 /* Check that there isn't another fix that is in range that
4739 we couldn't fit into this pool because the pool was
4740 already too large: we need to put the pool before such an
4741 instruction. */
4742 if (ftmp->next && ftmp->next->address < max_range)
4743 max_range = ftmp->address;
4744
4745 barrier = find_barrier (ftmp->insn, max_range - ftmp->address);
4746 }
949d79eb
RE
4747
4748 /* Scan over the fixes we have identified for this pool, fixing them
4749 up and adding the constants to the pool itself. */
4750 for (this_fix = fix; this_fix && ftmp->next != this_fix;
4751 this_fix = this_fix->next)
4752 if (GET_CODE (this_fix->insn) != BARRIER)
4753 {
4754 int offset = add_minipool_constant (this_fix->value,
4755 this_fix->mode);
4756 rtx addr
4757 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
4758 minipool_vector_label),
4759 offset);
4760 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
4761 }
4762
4763 dump_minipool (barrier);
4764 fix = ftmp;
2b835d68 4765 }
4b632bf1 4766
949d79eb
RE
4767 /* From now on we must synthesize any constants that we can't handle
4768 directly. This can happen if the RTL gets split during final
4769 instruction generation. */
4b632bf1 4770 after_arm_reorg = 1;
2b835d68
RE
4771}
4772
cce8749e
CH
4773\f
4774/* Routines to output assembly language. */
4775
f3bb6135 4776/* If the rtx is the correct value then return the string of the number.
ff9940b0 4777 In this way we can ensure that valid double constants are generated even
6354dc9b 4778 when cross compiling. */
ff9940b0
RE
4779char *
4780fp_immediate_constant (x)
b5cc037f 4781 rtx x;
ff9940b0
RE
4782{
4783 REAL_VALUE_TYPE r;
4784 int i;
4785
4786 if (!fpa_consts_inited)
4787 init_fpa_table ();
4788
4789 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4790 for (i = 0; i < 8; i++)
4791 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
4792 return strings_fpa[i];
f3bb6135 4793
ff9940b0
RE
4794 abort ();
4795}
4796
9997d19d
RE
4797/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
4798static char *
4799fp_const_from_val (r)
62b10bbc 4800 REAL_VALUE_TYPE * r;
9997d19d
RE
4801{
4802 int i;
4803
4804 if (! fpa_consts_inited)
4805 init_fpa_table ();
4806
4807 for (i = 0; i < 8; i++)
4808 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
4809 return strings_fpa[i];
4810
4811 abort ();
4812}
ff9940b0 4813
cce8749e
CH
4814/* Output the operands of a LDM/STM instruction to STREAM.
4815 MASK is the ARM register set mask of which only bits 0-15 are important.
4816 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
4817 must follow the register list. */
4818
4819void
dd18ae56 4820print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc
NC
4821 FILE * stream;
4822 char * instr;
dd18ae56
NC
4823 int reg;
4824 int mask;
4825 int hat;
cce8749e
CH
4826{
4827 int i;
4828 int not_first = FALSE;
4829
1d5473cb 4830 fputc ('\t', stream);
dd18ae56 4831 asm_fprintf (stream, instr, reg);
1d5473cb 4832 fputs (", {", stream);
62b10bbc 4833
cce8749e
CH
4834 for (i = 0; i < 16; i++)
4835 if (mask & (1 << i))
4836 {
4837 if (not_first)
4838 fprintf (stream, ", ");
62b10bbc 4839
dd18ae56 4840 asm_fprintf (stream, "%r", i);
cce8749e
CH
4841 not_first = TRUE;
4842 }
f3bb6135 4843
cce8749e 4844 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 4845}
cce8749e 4846
6354dc9b 4847/* Output a 'call' insn. */
cce8749e
CH
4848
4849char *
4850output_call (operands)
62b10bbc 4851 rtx * operands;
cce8749e 4852{
6354dc9b 4853 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 4854
62b10bbc 4855 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 4856 {
62b10bbc 4857 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 4858 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 4859 }
62b10bbc 4860
1d5473cb 4861 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 4862
6cfc7210 4863 if (TARGET_INTERWORK)
da6558fd
NC
4864 output_asm_insn ("bx%?\t%0", operands);
4865 else
4866 output_asm_insn ("mov%?\t%|pc, %0", operands);
4867
f3bb6135
RE
4868 return "";
4869}
cce8749e 4870
ff9940b0
RE
4871static int
4872eliminate_lr2ip (x)
62b10bbc 4873 rtx * x;
ff9940b0
RE
4874{
4875 int something_changed = 0;
62b10bbc 4876 rtx x0 = * x;
ff9940b0
RE
4877 int code = GET_CODE (x0);
4878 register int i, j;
6f7d635c 4879 register const char * fmt;
ff9940b0
RE
4880
4881 switch (code)
4882 {
4883 case REG:
62b10bbc 4884 if (REGNO (x0) == LR_REGNUM)
ff9940b0 4885 {
62b10bbc 4886 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
4887 return 1;
4888 }
4889 return 0;
4890 default:
6354dc9b 4891 /* Scan through the sub-elements and change any references there. */
ff9940b0 4892 fmt = GET_RTX_FORMAT (code);
62b10bbc 4893
ff9940b0
RE
4894 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4895 if (fmt[i] == 'e')
4896 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
4897 else if (fmt[i] == 'E')
4898 for (j = 0; j < XVECLEN (x0, i); j++)
4899 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 4900
ff9940b0
RE
4901 return something_changed;
4902 }
4903}
4904
6354dc9b 4905/* Output a 'call' insn that is a reference in memory. */
ff9940b0
RE
4906
4907char *
4908output_call_mem (operands)
62b10bbc 4909 rtx * operands;
ff9940b0 4910{
6354dc9b
NC
4911 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
4912 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 4913 if (eliminate_lr2ip (&operands[0]))
1d5473cb 4914 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 4915
6cfc7210 4916 if (TARGET_INTERWORK)
da6558fd
NC
4917 {
4918 output_asm_insn ("ldr%?\t%|ip, %0", operands);
4919 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4920 output_asm_insn ("bx%?\t%|ip", operands);
4921 }
4922 else
4923 {
4924 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
4925 output_asm_insn ("ldr%?\t%|pc, %0", operands);
4926 }
4927
f3bb6135
RE
4928 return "";
4929}
ff9940b0
RE
4930
4931
4932/* Output a move from arm registers to an fpu registers.
4933 OPERANDS[0] is an fpu register.
4934 OPERANDS[1] is the first registers of an arm register pair. */
4935
4936char *
4937output_mov_long_double_fpu_from_arm (operands)
62b10bbc 4938 rtx * operands;
ff9940b0
RE
4939{
4940 int arm_reg0 = REGNO (operands[1]);
4941 rtx ops[3];
4942
62b10bbc
NC
4943 if (arm_reg0 == IP_REGNUM)
4944 abort ();
f3bb6135 4945
43cffd11
RE
4946 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4947 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4948 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4949
1d5473cb
RE
4950 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
4951 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 4952
f3bb6135
RE
4953 return "";
4954}
ff9940b0
RE
4955
4956/* Output a move from an fpu register to arm registers.
4957 OPERANDS[0] is the first registers of an arm register pair.
4958 OPERANDS[1] is an fpu register. */
4959
4960char *
4961output_mov_long_double_arm_from_fpu (operands)
62b10bbc 4962 rtx * operands;
ff9940b0
RE
4963{
4964 int arm_reg0 = REGNO (operands[0]);
4965 rtx ops[3];
4966
62b10bbc
NC
4967 if (arm_reg0 == IP_REGNUM)
4968 abort ();
f3bb6135 4969
43cffd11
RE
4970 ops[0] = gen_rtx_REG (SImode, arm_reg0);
4971 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
4972 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 4973
1d5473cb
RE
4974 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
4975 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
4976 return "";
4977}
ff9940b0
RE
4978
4979/* Output a move from arm registers to arm registers of a long double
4980 OPERANDS[0] is the destination.
4981 OPERANDS[1] is the source. */
4982char *
4983output_mov_long_double_arm_from_arm (operands)
62b10bbc 4984 rtx * operands;
ff9940b0 4985{
6354dc9b 4986 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
4987 int dest_start = REGNO (operands[0]);
4988 int src_start = REGNO (operands[1]);
4989 rtx ops[2];
4990 int i;
4991
4992 if (dest_start < src_start)
4993 {
4994 for (i = 0; i < 3; i++)
4995 {
43cffd11
RE
4996 ops[0] = gen_rtx_REG (SImode, dest_start + i);
4997 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 4998 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4999 }
5000 }
5001 else
5002 {
5003 for (i = 2; i >= 0; i--)
5004 {
43cffd11
RE
5005 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5006 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5007 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5008 }
5009 }
f3bb6135 5010
ff9940b0
RE
5011 return "";
5012}
5013
5014
cce8749e
CH
5015/* Output a move from arm registers to an fpu registers.
5016 OPERANDS[0] is an fpu register.
5017 OPERANDS[1] is the first registers of an arm register pair. */
5018
5019char *
5020output_mov_double_fpu_from_arm (operands)
62b10bbc 5021 rtx * operands;
cce8749e
CH
5022{
5023 int arm_reg0 = REGNO (operands[1]);
5024 rtx ops[2];
5025
62b10bbc
NC
5026 if (arm_reg0 == IP_REGNUM)
5027 abort ();
5028
43cffd11
RE
5029 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5030 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5031 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5032 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
5033 return "";
5034}
cce8749e
CH
5035
5036/* Output a move from an fpu register to arm registers.
5037 OPERANDS[0] is the first registers of an arm register pair.
5038 OPERANDS[1] is an fpu register. */
5039
5040char *
5041output_mov_double_arm_from_fpu (operands)
62b10bbc 5042 rtx * operands;
cce8749e
CH
5043{
5044 int arm_reg0 = REGNO (operands[0]);
5045 rtx ops[2];
5046
62b10bbc
NC
5047 if (arm_reg0 == IP_REGNUM)
5048 abort ();
f3bb6135 5049
43cffd11
RE
5050 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5051 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5052 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5053 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
5054 return "";
5055}
cce8749e
CH
5056
5057/* Output a move between double words.
5058 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5059 or MEM<-REG and all MEMs must be offsettable addresses. */
5060
5061char *
5062output_move_double (operands)
aec3cfba 5063 rtx * operands;
cce8749e
CH
5064{
5065 enum rtx_code code0 = GET_CODE (operands[0]);
5066 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 5067 rtx otherops[3];
cce8749e
CH
5068
5069 if (code0 == REG)
5070 {
5071 int reg0 = REGNO (operands[0]);
5072
43cffd11 5073 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 5074
cce8749e
CH
5075 if (code1 == REG)
5076 {
5077 int reg1 = REGNO (operands[1]);
62b10bbc
NC
5078 if (reg1 == IP_REGNUM)
5079 abort ();
f3bb6135 5080
6354dc9b 5081 /* Ensure the second source is not overwritten. */
c1c2bc04 5082 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 5083 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 5084 else
6cfc7210 5085 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
5086 }
5087 else if (code1 == CONST_DOUBLE)
5088 {
226a5051
RE
5089 if (GET_MODE (operands[1]) == DFmode)
5090 {
5091 long l[2];
5092 union real_extract u;
5093
5094 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
5095 sizeof (u));
5096 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
5097 otherops[1] = GEN_INT(l[1]);
5098 operands[1] = GEN_INT(l[0]);
5099 }
c1c2bc04
RE
5100 else if (GET_MODE (operands[1]) != VOIDmode)
5101 abort ();
5102 else if (WORDS_BIG_ENDIAN)
5103 {
5104
5105 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5106 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5107 }
226a5051
RE
5108 else
5109 {
c1c2bc04 5110
226a5051
RE
5111 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5112 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5113 }
6cfc7210 5114
c1c2bc04
RE
5115 output_mov_immediate (operands);
5116 output_mov_immediate (otherops);
cce8749e
CH
5117 }
5118 else if (code1 == CONST_INT)
5119 {
56636818
JL
5120#if HOST_BITS_PER_WIDE_INT > 32
5121 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
5122 what the upper word is. */
5123 if (WORDS_BIG_ENDIAN)
5124 {
5125 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5126 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5127 }
5128 else
5129 {
5130 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5131 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5132 }
5133#else
6354dc9b 5134 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
5135 if (WORDS_BIG_ENDIAN)
5136 {
5137 otherops[1] = operands[1];
5138 operands[1] = (INTVAL (operands[1]) < 0
5139 ? constm1_rtx : const0_rtx);
5140 }
ff9940b0 5141 else
c1c2bc04 5142 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 5143#endif
c1c2bc04
RE
5144 output_mov_immediate (otherops);
5145 output_mov_immediate (operands);
cce8749e
CH
5146 }
5147 else if (code1 == MEM)
5148 {
ff9940b0 5149 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 5150 {
ff9940b0 5151 case REG:
9997d19d 5152 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 5153 break;
2b835d68 5154
ff9940b0 5155 case PRE_INC:
6354dc9b 5156 abort (); /* Should never happen now. */
ff9940b0 5157 break;
2b835d68 5158
ff9940b0 5159 case PRE_DEC:
2b835d68 5160 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 5161 break;
2b835d68 5162
ff9940b0 5163 case POST_INC:
9997d19d 5164 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 5165 break;
2b835d68 5166
ff9940b0 5167 case POST_DEC:
6354dc9b 5168 abort (); /* Should never happen now. */
ff9940b0 5169 break;
2b835d68
RE
5170
5171 case LABEL_REF:
5172 case CONST:
5173 output_asm_insn ("adr%?\t%0, %1", operands);
5174 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
5175 break;
5176
ff9940b0 5177 default:
aec3cfba
NC
5178 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
5179 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 5180 {
2b835d68
RE
5181 otherops[0] = operands[0];
5182 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
5183 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
5184 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
5185 {
5186 if (GET_CODE (otherops[2]) == CONST_INT)
5187 {
5188 switch (INTVAL (otherops[2]))
5189 {
5190 case -8:
5191 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
5192 return "";
5193 case -4:
5194 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
5195 return "";
5196 case 4:
5197 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
5198 return "";
5199 }
5200 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
5201 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
5202 else
5203 output_asm_insn ("add%?\t%0, %1, %2", otherops);
5204 }
5205 else
5206 output_asm_insn ("add%?\t%0, %1, %2", otherops);
5207 }
5208 else
5209 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 5210
2b835d68
RE
5211 return "ldm%?ia\t%0, %M0";
5212 }
5213 else
5214 {
5215 otherops[1] = adj_offsettable_operand (operands[1], 4);
5216 /* Take care of overlapping base/data reg. */
5217 if (reg_mentioned_p (operands[0], operands[1]))
5218 {
5219 output_asm_insn ("ldr%?\t%0, %1", otherops);
5220 output_asm_insn ("ldr%?\t%0, %1", operands);
5221 }
5222 else
5223 {
5224 output_asm_insn ("ldr%?\t%0, %1", operands);
5225 output_asm_insn ("ldr%?\t%0, %1", otherops);
5226 }
cce8749e
CH
5227 }
5228 }
5229 }
2b835d68 5230 else
6354dc9b 5231 abort (); /* Constraints should prevent this. */
cce8749e
CH
5232 }
5233 else if (code0 == MEM && code1 == REG)
5234 {
62b10bbc
NC
5235 if (REGNO (operands[1]) == IP_REGNUM)
5236 abort ();
2b835d68 5237
ff9940b0
RE
5238 switch (GET_CODE (XEXP (operands[0], 0)))
5239 {
5240 case REG:
9997d19d 5241 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 5242 break;
2b835d68 5243
ff9940b0 5244 case PRE_INC:
6354dc9b 5245 abort (); /* Should never happen now. */
ff9940b0 5246 break;
2b835d68 5247
ff9940b0 5248 case PRE_DEC:
2b835d68 5249 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 5250 break;
2b835d68 5251
ff9940b0 5252 case POST_INC:
9997d19d 5253 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 5254 break;
2b835d68 5255
ff9940b0 5256 case POST_DEC:
6354dc9b 5257 abort (); /* Should never happen now. */
ff9940b0 5258 break;
2b835d68
RE
5259
5260 case PLUS:
5261 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
5262 {
5263 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
5264 {
5265 case -8:
5266 output_asm_insn ("stm%?db\t%m0, %M1", operands);
5267 return "";
5268
5269 case -4:
5270 output_asm_insn ("stm%?da\t%m0, %M1", operands);
5271 return "";
5272
5273 case 4:
5274 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
5275 return "";
5276 }
5277 }
5278 /* Fall through */
5279
ff9940b0 5280 default:
cce8749e 5281 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 5282 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
5283 output_asm_insn ("str%?\t%1, %0", operands);
5284 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
5285 }
5286 }
2b835d68 5287 else
62b10bbc 5288 abort (); /* Constraints should prevent this */
cce8749e 5289
9997d19d
RE
5290 return "";
5291}
cce8749e
CH
5292
5293
5294/* Output an arbitrary MOV reg, #n.
5295 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
5296
5297char *
5298output_mov_immediate (operands)
62b10bbc 5299 rtx * operands;
cce8749e 5300{
f3bb6135 5301 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
5302 int n_ones = 0;
5303 int i;
5304
5305 /* Try to use one MOV */
cce8749e 5306 if (const_ok_for_arm (n))
f3bb6135 5307 {
9997d19d 5308 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
5309 return "";
5310 }
cce8749e
CH
5311
5312 /* Try to use one MVN */
f3bb6135 5313 if (const_ok_for_arm (~n))
cce8749e 5314 {
f3bb6135 5315 operands[1] = GEN_INT (~n);
9997d19d 5316 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 5317 return "";
cce8749e
CH
5318 }
5319
6354dc9b 5320 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
5321
5322 for (i=0; i < 32; i++)
5323 if (n & 1 << i)
5324 n_ones++;
5325
6354dc9b 5326 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
5327 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
5328 ~n);
cce8749e 5329 else
9997d19d
RE
5330 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
5331 n);
f3bb6135
RE
5332
5333 return "";
5334}
cce8749e
CH
5335
5336
5337/* Output an ADD r, s, #n where n may be too big for one instruction. If
5338 adding zero to one register, output nothing. */
5339
5340char *
5341output_add_immediate (operands)
62b10bbc 5342 rtx * operands;
cce8749e 5343{
f3bb6135 5344 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
5345
5346 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
5347 {
5348 if (n < 0)
5349 output_multi_immediate (operands,
9997d19d
RE
5350 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
5351 -n);
cce8749e
CH
5352 else
5353 output_multi_immediate (operands,
9997d19d
RE
5354 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
5355 n);
cce8749e 5356 }
f3bb6135
RE
5357
5358 return "";
5359}
cce8749e 5360
cce8749e
CH
5361/* Output a multiple immediate operation.
5362 OPERANDS is the vector of operands referred to in the output patterns.
5363 INSTR1 is the output pattern to use for the first constant.
5364 INSTR2 is the output pattern to use for subsequent constants.
5365 IMMED_OP is the index of the constant slot in OPERANDS.
5366 N is the constant value. */
5367
18af7313 5368static char *
cce8749e 5369output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc
NC
5370 rtx * operands;
5371 char * instr1, * instr2;
f3bb6135
RE
5372 int immed_op;
5373 HOST_WIDE_INT n;
cce8749e 5374{
f3bb6135
RE
5375#if HOST_BITS_PER_WIDE_INT > 32
5376 n &= 0xffffffff;
5377#endif
5378
cce8749e
CH
5379 if (n == 0)
5380 {
5381 operands[immed_op] = const0_rtx;
6354dc9b 5382 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
5383 }
5384 else
5385 {
5386 int i;
5387 char *instr = instr1;
5388
6354dc9b 5389 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
5390 for (i = 0; i < 32; i += 2)
5391 {
5392 if (n & (3 << i))
5393 {
f3bb6135
RE
5394 operands[immed_op] = GEN_INT (n & (255 << i));
5395 output_asm_insn (instr, operands);
cce8749e
CH
5396 instr = instr2;
5397 i += 6;
5398 }
5399 }
5400 }
f3bb6135 5401 return "";
9997d19d 5402}
cce8749e
CH
5403
5404
5405/* Return the appropriate ARM instruction for the operation code.
5406 The returned result should not be overwritten. OP is the rtx of the
5407 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
5408 was shifted. */
5409
5410char *
5411arithmetic_instr (op, shift_first_arg)
5412 rtx op;
f3bb6135 5413 int shift_first_arg;
cce8749e 5414{
9997d19d 5415 switch (GET_CODE (op))
cce8749e
CH
5416 {
5417 case PLUS:
f3bb6135
RE
5418 return "add";
5419
cce8749e 5420 case MINUS:
f3bb6135
RE
5421 return shift_first_arg ? "rsb" : "sub";
5422
cce8749e 5423 case IOR:
f3bb6135
RE
5424 return "orr";
5425
cce8749e 5426 case XOR:
f3bb6135
RE
5427 return "eor";
5428
cce8749e 5429 case AND:
f3bb6135
RE
5430 return "and";
5431
cce8749e 5432 default:
f3bb6135 5433 abort ();
cce8749e 5434 }
f3bb6135 5435}
cce8749e
CH
5436
5437
5438/* Ensure valid constant shifts and return the appropriate shift mnemonic
5439 for the operation code. The returned result should not be overwritten.
5440 OP is the rtx code of the shift.
9997d19d 5441 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 5442 shift. */
cce8749e 5443
9997d19d
RE
5444static char *
5445shift_op (op, amountp)
5446 rtx op;
5447 HOST_WIDE_INT *amountp;
cce8749e 5448{
62b10bbc 5449 char * mnem;
e2c671ba 5450 enum rtx_code code = GET_CODE (op);
cce8749e 5451
9997d19d
RE
5452 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
5453 *amountp = -1;
5454 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
5455 *amountp = INTVAL (XEXP (op, 1));
5456 else
5457 abort ();
5458
e2c671ba 5459 switch (code)
cce8749e
CH
5460 {
5461 case ASHIFT:
5462 mnem = "asl";
5463 break;
f3bb6135 5464
cce8749e
CH
5465 case ASHIFTRT:
5466 mnem = "asr";
cce8749e 5467 break;
f3bb6135 5468
cce8749e
CH
5469 case LSHIFTRT:
5470 mnem = "lsr";
cce8749e 5471 break;
f3bb6135 5472
9997d19d
RE
5473 case ROTATERT:
5474 mnem = "ror";
9997d19d
RE
5475 break;
5476
ff9940b0 5477 case MULT:
e2c671ba
RE
5478 /* We never have to worry about the amount being other than a
5479 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
5480 if (*amountp != -1)
5481 *amountp = int_log2 (*amountp);
5482 else
5483 abort ();
f3bb6135
RE
5484 return "asl";
5485
cce8749e 5486 default:
f3bb6135 5487 abort ();
cce8749e
CH
5488 }
5489
e2c671ba
RE
5490 if (*amountp != -1)
5491 {
5492 /* This is not 100% correct, but follows from the desire to merge
5493 multiplication by a power of 2 with the recognizer for a
5494 shift. >=32 is not a valid shift for "asl", so we must try and
5495 output a shift that produces the correct arithmetical result.
ddd5a7c1 5496 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
5497 is not set correctly if we set the flags; but we never use the
5498 carry bit from such an operation, so we can ignore that. */
5499 if (code == ROTATERT)
5500 *amountp &= 31; /* Rotate is just modulo 32 */
5501 else if (*amountp != (*amountp & 31))
5502 {
5503 if (code == ASHIFT)
5504 mnem = "lsr";
5505 *amountp = 32;
5506 }
5507
5508 /* Shifts of 0 are no-ops. */
5509 if (*amountp == 0)
5510 return NULL;
5511 }
5512
9997d19d
RE
5513 return mnem;
5514}
cce8749e
CH
5515
5516
6354dc9b 5517/* Obtain the shift from the POWER of two. */
18af7313 5518static HOST_WIDE_INT
cce8749e 5519int_log2 (power)
f3bb6135 5520 HOST_WIDE_INT power;
cce8749e 5521{
f3bb6135 5522 HOST_WIDE_INT shift = 0;
cce8749e 5523
2b835d68 5524 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
5525 {
5526 if (shift > 31)
f3bb6135 5527 abort ();
cce8749e
CH
5528 shift++;
5529 }
f3bb6135
RE
5530
5531 return shift;
5532}
cce8749e 5533
cce8749e
CH
5534/* Output a .ascii pseudo-op, keeping track of lengths. This is because
5535 /bin/as is horribly restrictive. */
6cfc7210 5536#define MAX_ASCII_LEN 51
cce8749e
CH
5537
5538void
5539output_ascii_pseudo_op (stream, p, len)
62b10bbc 5540 FILE * stream;
3cce094d 5541 const unsigned char * p;
cce8749e
CH
5542 int len;
5543{
5544 int i;
6cfc7210 5545 int len_so_far = 0;
cce8749e 5546
6cfc7210
NC
5547 fputs ("\t.ascii\t\"", stream);
5548
cce8749e
CH
5549 for (i = 0; i < len; i++)
5550 {
5551 register int c = p[i];
5552
6cfc7210 5553 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 5554 {
6cfc7210 5555 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 5556 len_so_far = 0;
cce8749e
CH
5557 }
5558
6cfc7210 5559 switch (c)
cce8749e 5560 {
6cfc7210
NC
5561 case TARGET_TAB:
5562 fputs ("\\t", stream);
5563 len_so_far += 2;
5564 break;
5565
5566 case TARGET_FF:
5567 fputs ("\\f", stream);
5568 len_so_far += 2;
5569 break;
5570
5571 case TARGET_BS:
5572 fputs ("\\b", stream);
5573 len_so_far += 2;
5574 break;
5575
5576 case TARGET_CR:
5577 fputs ("\\r", stream);
5578 len_so_far += 2;
5579 break;
5580
5581 case TARGET_NEWLINE:
5582 fputs ("\\n", stream);
5583 c = p [i + 1];
5584 if ((c >= ' ' && c <= '~')
5585 || c == TARGET_TAB)
5586 /* This is a good place for a line break. */
5587 len_so_far = MAX_ASCII_LEN;
5588 else
5589 len_so_far += 2;
5590 break;
5591
5592 case '\"':
5593 case '\\':
5594 putc ('\\', stream);
5595 len_so_far ++;
5596 /* drop through. */
f3bb6135 5597
6cfc7210
NC
5598 default:
5599 if (c >= ' ' && c <= '~')
5600 {
5601 putc (c, stream);
5602 len_so_far ++;
5603 }
5604 else
5605 {
5606 fprintf (stream, "\\%03o", c);
5607 len_so_far += 4;
5608 }
5609 break;
cce8749e 5610 }
cce8749e 5611 }
f3bb6135 5612
cce8749e 5613 fputs ("\"\n", stream);
f3bb6135 5614}
cce8749e 5615\f
ff9940b0
RE
5616
5617/* Try to determine whether a pattern really clobbers the link register.
5618 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
5619 if we combine a call followed by a return.
5620 NOTE: This code does not check for side-effect expressions in a SET_SRC:
5621 such a check should not be needed because these only update an existing
5622 value within a register; the register must still be set elsewhere within
6354dc9b 5623 the function. */
ff9940b0
RE
5624static int
5625pattern_really_clobbers_lr (x)
f3bb6135 5626 rtx x;
ff9940b0
RE
5627{
5628 int i;
5629
5630 switch (GET_CODE (x))
5631 {
5632 case SET:
5633 switch (GET_CODE (SET_DEST (x)))
5634 {
5635 case REG:
62b10bbc 5636 return REGNO (SET_DEST (x)) == LR_REGNUM;
f3bb6135 5637
ff9940b0
RE
5638 case SUBREG:
5639 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
62b10bbc 5640 return REGNO (XEXP (SET_DEST (x), 0)) == LR_REGNUM;
f3bb6135 5641
0e84b556
RK
5642 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
5643 return 0;
ff9940b0 5644 abort ();
f3bb6135 5645
ff9940b0
RE
5646 default:
5647 return 0;
5648 }
f3bb6135 5649
ff9940b0
RE
5650 case PARALLEL:
5651 for (i = 0; i < XVECLEN (x, 0); i++)
5652 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
5653 return 1;
5654 return 0;
f3bb6135 5655
ff9940b0
RE
5656 case CLOBBER:
5657 switch (GET_CODE (XEXP (x, 0)))
5658 {
5659 case REG:
62b10bbc 5660 return REGNO (XEXP (x, 0)) == LR_REGNUM;
f3bb6135 5661
ff9940b0
RE
5662 case SUBREG:
5663 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
62b10bbc 5664 return REGNO (XEXP (XEXP (x, 0), 0)) == LR_REGNUM;
ff9940b0 5665 abort ();
f3bb6135 5666
ff9940b0
RE
5667 default:
5668 return 0;
5669 }
f3bb6135 5670
ff9940b0
RE
5671 case UNSPEC:
5672 return 1;
f3bb6135 5673
ff9940b0
RE
5674 default:
5675 return 0;
5676 }
5677}
5678
5679static int
5680function_really_clobbers_lr (first)
f3bb6135 5681 rtx first;
ff9940b0
RE
5682{
5683 rtx insn, next;
5684
5685 for (insn = first; insn; insn = next_nonnote_insn (insn))
5686 {
5687 switch (GET_CODE (insn))
5688 {
5689 case BARRIER:
5690 case NOTE:
5691 case CODE_LABEL:
5692 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
ff9940b0 5693 break;
f3bb6135 5694
ff9940b0
RE
5695 case INSN:
5696 if (pattern_really_clobbers_lr (PATTERN (insn)))
5697 return 1;
5698 break;
f3bb6135 5699
ff9940b0
RE
5700 case CALL_INSN:
5701 /* Don't yet know how to handle those calls that are not to a
6354dc9b 5702 SYMBOL_REF. */
ff9940b0
RE
5703 if (GET_CODE (PATTERN (insn)) != PARALLEL)
5704 abort ();
f3bb6135 5705
ff9940b0
RE
5706 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
5707 {
5708 case CALL:
5709 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
5710 != SYMBOL_REF)
5711 return 1;
5712 break;
f3bb6135 5713
ff9940b0
RE
5714 case SET:
5715 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
5716 0, 0)), 0), 0))
5717 != SYMBOL_REF)
5718 return 1;
5719 break;
f3bb6135 5720
6354dc9b 5721 default: /* Don't recognize it, be safe. */
ff9940b0
RE
5722 return 1;
5723 }
f3bb6135 5724
ff9940b0
RE
5725 /* A call can be made (by peepholing) not to clobber lr iff it is
5726 followed by a return. There may, however, be a use insn iff
5727 we are returning the result of the call.
5728 If we run off the end of the insn chain, then that means the
5729 call was at the end of the function. Unfortunately we don't
5730 have a return insn for the peephole to recognize, so we
5731 must reject this. (Can this be fixed by adding our own insn?) */
5732 if ((next = next_nonnote_insn (insn)) == NULL)
5733 return 1;
f3bb6135 5734
6354dc9b 5735 /* No need to worry about lr if the call never returns. */
32de079a
RE
5736 if (GET_CODE (next) == BARRIER)
5737 break;
5738
6354dc9b
NC
5739 if (GET_CODE (next) == INSN
5740 && GET_CODE (PATTERN (next)) == USE
ff9940b0
RE
5741 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
5742 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
5743 == REGNO (XEXP (PATTERN (next), 0))))
5744 if ((next = next_nonnote_insn (next)) == NULL)
5745 return 1;
f3bb6135 5746
ff9940b0
RE
5747 if (GET_CODE (next) == JUMP_INSN
5748 && GET_CODE (PATTERN (next)) == RETURN)
5749 break;
5750 return 1;
f3bb6135 5751
ff9940b0
RE
5752 default:
5753 abort ();
5754 }
5755 }
f3bb6135 5756
6354dc9b 5757 /* We have reached the end of the chain so lr was _not_ clobbered. */
ff9940b0
RE
5758 return 0;
5759}
5760
5761char *
84ed5e79 5762output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
5763 rtx operand;
5764 int really_return;
84ed5e79 5765 int reverse;
ff9940b0
RE
5766{
5767 char instr[100];
5768 int reg, live_regs = 0;
46406379 5769 int volatile_func = arm_volatile_func ();
e2c671ba
RE
5770
5771 return_used_this_function = 1;
ff9940b0 5772
62b10bbc 5773 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 5774 {
e2c671ba 5775 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
5776 call, then we have to trust that the called function won't return. */
5777 if (really_return)
5778 {
5779 rtx ops[2];
5780
5781 /* Otherwise, trap an attempted return by aborting. */
5782 ops[0] = operand;
5783 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
5784 : "abort");
5785 assemble_external_libcall (ops[1]);
5786 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
5787 }
5788
e2c671ba
RE
5789 return "";
5790 }
5791
f3bb6135 5792 if (current_function_calls_alloca && ! really_return)
62b10bbc 5793 abort ();
ff9940b0 5794
f3bb6135
RE
5795 for (reg = 0; reg <= 10; reg++)
5796 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
5797 live_regs++;
5798
ed0e6530
PB
5799 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
5800 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
5801 live_regs++;
5802
62b10bbc 5803 if (live_regs || (regs_ever_live[LR_REGNUM] && ! lr_save_eliminated))
ff9940b0
RE
5804 live_regs++;
5805
5806 if (frame_pointer_needed)
5807 live_regs += 4;
5808
3a5a4282
PB
5809 /* On some ARM architectures it is faster to use LDR rather than LDM to
5810 load a single register. On other architectures, the cost is the same. */
5811 if (live_regs == 1
5812 && regs_ever_live[LR_REGNUM]
5813 && ! lr_save_eliminated
5814 /* FIXME: We ought to handle the case TARGET_APCS_32 is true,
5815 really_return is true, and only the PC needs restoring. */
5816 && ! really_return)
5817 {
5818 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
5819 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
5820 }
5821 else if (live_regs)
ff9940b0 5822 {
62b10bbc 5823 if (lr_save_eliminated || ! regs_ever_live[LR_REGNUM])
ff9940b0 5824 live_regs++;
f3bb6135 5825
ff9940b0 5826 if (frame_pointer_needed)
84ed5e79
RE
5827 strcpy (instr,
5828 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 5829 else
84ed5e79
RE
5830 strcpy (instr,
5831 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
5832
5833 for (reg = 0; reg <= 10; reg++)
62b10bbc 5834 if (regs_ever_live[reg]
6ed30148 5835 && (! call_used_regs[reg]
ed0e6530
PB
5836 || (flag_pic && ! TARGET_SINGLE_PIC_BASE
5837 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 5838 {
1d5473cb 5839 strcat (instr, "%|");
ff9940b0
RE
5840 strcat (instr, reg_names[reg]);
5841 if (--live_regs)
5842 strcat (instr, ", ");
5843 }
f3bb6135 5844
ff9940b0
RE
5845 if (frame_pointer_needed)
5846 {
1d5473cb 5847 strcat (instr, "%|");
ff9940b0
RE
5848 strcat (instr, reg_names[11]);
5849 strcat (instr, ", ");
1d5473cb 5850 strcat (instr, "%|");
ff9940b0
RE
5851 strcat (instr, reg_names[13]);
5852 strcat (instr, ", ");
1d5473cb 5853 strcat (instr, "%|");
6cfc7210 5854 strcat (instr, TARGET_INTERWORK || (! really_return)
62b10bbc 5855 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
5856 }
5857 else
1d5473cb
RE
5858 {
5859 strcat (instr, "%|");
6cfc7210 5860 if (TARGET_INTERWORK && really_return)
62b10bbc 5861 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 5862 else
62b10bbc 5863 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 5864 }
2b835d68 5865 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 5866 output_asm_insn (instr, &operand);
da6558fd 5867
6cfc7210 5868 if (TARGET_INTERWORK && really_return)
da6558fd
NC
5869 {
5870 strcpy (instr, "bx%?");
5871 strcat (instr, reverse ? "%D0" : "%d0");
5872 strcat (instr, "\t%|");
5873 strcat (instr, frame_pointer_needed ? "lr" : "ip");
5874
5875 output_asm_insn (instr, & operand);
5876 }
ff9940b0
RE
5877 }
5878 else if (really_return)
5879 {
6cfc7210 5880 if (TARGET_INTERWORK)
25b1c156 5881 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
5882 else
5883 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
5884 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd
NC
5885
5886 output_asm_insn (instr, & operand);
ff9940b0 5887 }
f3bb6135 5888
ff9940b0
RE
5889 return "";
5890}
5891
e82ea128
DE
5892/* Return nonzero if optimizing and the current function is volatile.
5893 Such functions never return, and many memory cycles can be saved
5894 by not storing register values that will never be needed again.
5895 This optimization was added to speed up context switching in a
6354dc9b 5896 kernel application. */
e2c671ba
RE
5897int
5898arm_volatile_func ()
5899{
6354dc9b
NC
5900 return (optimize > 0
5901 && current_function_nothrow
46406379 5902 && TREE_THIS_VOLATILE (current_function_decl));
e2c671ba
RE
5903}
5904
ef179a26
NC
5905/* Write the function name into the code section, directly preceding
5906 the function prologue.
5907
5908 Code will be output similar to this:
5909 t0
5910 .ascii "arm_poke_function_name", 0
5911 .align
5912 t1
5913 .word 0xff000000 + (t1 - t0)
5914 arm_poke_function_name
5915 mov ip, sp
5916 stmfd sp!, {fp, ip, lr, pc}
5917 sub fp, ip, #4
5918
5919 When performing a stack backtrace, code can inspect the value
5920 of 'pc' stored at 'fp' + 0. If the trace function then looks
5921 at location pc - 12 and the top 8 bits are set, then we know
5922 that there is a function name embedded immediately preceding this
5923 location and has length ((pc[-3]) & 0xff000000).
5924
5925 We assume that pc is declared as a pointer to an unsigned long.
5926
5927 It is of no benefit to output the function name if we are assembling
5928 a leaf function. These function types will not contain a stack
5929 backtrace structure, therefore it is not possible to determine the
5930 function name. */
5931
5932void
5933arm_poke_function_name (stream, name)
5934 FILE * stream;
5935 char * name;
5936{
5937 unsigned long alignlength;
5938 unsigned long length;
5939 rtx x;
5940
949d79eb
RE
5941 length = strlen (name) + 1;
5942 alignlength = (length + 3) & ~3;
ef179a26 5943
949d79eb 5944 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26
NC
5945 ASM_OUTPUT_ALIGN (stream, 2);
5946 x = GEN_INT (0xff000000UL + alignlength);
5947 ASM_OUTPUT_INT (stream, x);
5948}
5949
ff9940b0
RE
5950/* The amount of stack adjustment that happens here, in output_return and in
5951 output_epilogue must be exactly the same as was calculated during reload,
5952 or things will point to the wrong place. The only time we can safely
5953 ignore this constraint is when a function has no arguments on the stack,
5954 no stack frame requirement and no live registers execpt for `lr'. If we
5955 can guarantee that by making all function calls into tail calls and that
5956 lr is not clobbered in any other way, then there is no need to push lr
6354dc9b 5957 onto the stack. */
cce8749e 5958void
f3bb6135 5959output_func_prologue (f, frame_size)
6cfc7210 5960 FILE * f;
cce8749e
CH
5961 int frame_size;
5962{
f3bb6135 5963 int reg, live_regs_mask = 0;
46406379 5964 int volatile_func = arm_volatile_func ();
cce8749e 5965
cce8749e
CH
5966 /* Nonzero if we must stuff some register arguments onto the stack as if
5967 they were passed there. */
5968 int store_arg_regs = 0;
5969
abaa26e5 5970 if (arm_ccfsm_state || arm_target_insn)
6354dc9b 5971 abort (); /* Sanity check. */
31fdb4d5
DE
5972
5973 if (arm_naked_function_p (current_function_decl))
5974 return;
5975
ff9940b0
RE
5976 return_used_this_function = 0;
5977 lr_save_eliminated = 0;
5978
dd18ae56
NC
5979 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
5980 current_function_args_size,
5981 current_function_pretend_args_size, frame_size);
5982 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
5983 frame_pointer_needed,
5984 current_function_anonymous_args);
cce8749e 5985
e2c671ba 5986 if (volatile_func)
dd18ae56 5987 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 5988
cce8749e
CH
5989 if (current_function_anonymous_args && current_function_pretend_args_size)
5990 store_arg_regs = 1;
5991
f3bb6135
RE
5992 for (reg = 0; reg <= 10; reg++)
5993 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
5994 live_regs_mask |= (1 << reg);
5995
dd18ae56 5996 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
ed0e6530 5997 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
5998 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
5999
ff9940b0 6000 if (frame_pointer_needed)
e2c671ba 6001 live_regs_mask |= 0xD800;
62b10bbc 6002 else if (regs_ever_live[LR_REGNUM])
ff9940b0
RE
6003 {
6004 if (! current_function_args_size
f3bb6135 6005 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 6006 lr_save_eliminated = 1;
ff9940b0 6007 else
62b10bbc 6008 live_regs_mask |= 1 << LR_REGNUM;
ff9940b0 6009 }
cce8749e 6010
cce8749e
CH
6011 if (live_regs_mask)
6012 {
6354dc9b 6013 /* If a di mode load/store multiple is used, and the base register
ff9940b0
RE
6014 is r3, then r4 can become an ever live register without lr
6015 doing so, in this case we need to push lr as well, or we
6354dc9b 6016 will fail to get a proper return. */
62b10bbc 6017 live_regs_mask |= 1 << LR_REGNUM;
ff9940b0 6018 lr_save_eliminated = 0;
f3bb6135 6019
cce8749e
CH
6020 }
6021
e2c671ba 6022 if (lr_save_eliminated)
dd18ae56 6023 asm_fprintf (f,"\t%@ I don't think this function clobbers lr\n");
32de079a
RE
6024
6025#ifdef AOF_ASSEMBLER
6026 if (flag_pic)
dd18ae56 6027 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 6028#endif
f3bb6135 6029}
cce8749e 6030
949d79eb
RE
6031char *
6032arm_output_epilogue ()
cce8749e 6033{
949d79eb
RE
6034 int reg;
6035 int live_regs_mask = 0;
6354dc9b 6036 /* If we need this, then it will always be at least this much. */
b111229a 6037 int floats_offset = 12;
cce8749e 6038 rtx operands[3];
949d79eb
RE
6039 int frame_size = get_frame_size ();
6040 FILE *f = asm_out_file;
46406379 6041 int volatile_func = arm_volatile_func ();
cce8749e 6042
b36ba79f 6043 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 6044 return "";
cce8749e 6045
31fdb4d5
DE
6046 /* Naked functions don't have epilogues. */
6047 if (arm_naked_function_p (current_function_decl))
949d79eb 6048 return "";
31fdb4d5 6049
e2c671ba 6050 /* A volatile function should never return. Call abort. */
c11145f6 6051 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6052 {
86efdc8e 6053 rtx op;
ed0e6530 6054 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 6055 assemble_external_libcall (op);
e2c671ba 6056 output_asm_insn ("bl\t%a0", &op);
949d79eb 6057 return "";
e2c671ba
RE
6058 }
6059
f3bb6135
RE
6060 for (reg = 0; reg <= 10; reg++)
6061 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 6062 {
ff9940b0
RE
6063 live_regs_mask |= (1 << reg);
6064 floats_offset += 4;
cce8749e
CH
6065 }
6066
ed0e6530
PB
6067 /* If we aren't loading the PIC register, don't stack it even though it may
6068 be live. */
6069 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6070 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6071 {
6072 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6073 floats_offset += 4;
6074 }
6075
ff9940b0 6076 if (frame_pointer_needed)
cce8749e 6077 {
b111229a
RE
6078 if (arm_fpu_arch == FP_SOFT2)
6079 {
6080 for (reg = 23; reg > 15; reg--)
6081 if (regs_ever_live[reg] && ! call_used_regs[reg])
6082 {
6083 floats_offset += 12;
dd18ae56
NC
6084 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6085 reg, FP_REGNUM, floats_offset);
b111229a
RE
6086 }
6087 }
6088 else
6089 {
6090 int start_reg = 23;
6091
6092 for (reg = 23; reg > 15; reg--)
6093 {
6094 if (regs_ever_live[reg] && ! call_used_regs[reg])
6095 {
6096 floats_offset += 12;
6cfc7210 6097
6354dc9b 6098 /* We can't unstack more than four registers at once. */
b111229a
RE
6099 if (start_reg - reg == 3)
6100 {
dd18ae56
NC
6101 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6102 reg, FP_REGNUM, floats_offset);
b111229a
RE
6103 start_reg = reg - 1;
6104 }
6105 }
6106 else
6107 {
6108 if (reg != start_reg)
dd18ae56
NC
6109 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6110 reg + 1, start_reg - reg,
6111 FP_REGNUM, floats_offset);
b111229a
RE
6112 start_reg = reg - 1;
6113 }
6114 }
6115
6116 /* Just in case the last register checked also needs unstacking. */
6117 if (reg != start_reg)
dd18ae56
NC
6118 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6119 reg + 1, start_reg - reg,
6120 FP_REGNUM, floats_offset);
b111229a 6121 }
da6558fd 6122
6cfc7210 6123 if (TARGET_INTERWORK)
b111229a
RE
6124 {
6125 live_regs_mask |= 0x6800;
dd18ae56
NC
6126 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
6127 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
b111229a
RE
6128 }
6129 else
6130 {
6131 live_regs_mask |= 0xA800;
dd18ae56 6132 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
6133 TARGET_APCS_32 ? FALSE : TRUE);
6134 }
cce8749e
CH
6135 }
6136 else
6137 {
d2288d8d 6138 /* Restore stack pointer if necessary. */
56636818 6139 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
6140 {
6141 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
6142 operands[2] = GEN_INT (frame_size
6143 + current_function_outgoing_args_size);
d2288d8d
TG
6144 output_add_immediate (operands);
6145 }
6146
b111229a
RE
6147 if (arm_fpu_arch == FP_SOFT2)
6148 {
6149 for (reg = 16; reg < 24; reg++)
6150 if (regs_ever_live[reg] && ! call_used_regs[reg])
dd18ae56
NC
6151 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6152 reg, SP_REGNUM);
b111229a
RE
6153 }
6154 else
6155 {
6156 int start_reg = 16;
6157
6158 for (reg = 16; reg < 24; reg++)
6159 {
6160 if (regs_ever_live[reg] && ! call_used_regs[reg])
6161 {
6162 if (reg - start_reg == 3)
6163 {
dd18ae56
NC
6164 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6165 start_reg, SP_REGNUM);
b111229a
RE
6166 start_reg = reg + 1;
6167 }
6168 }
6169 else
6170 {
6171 if (reg != start_reg)
dd18ae56
NC
6172 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6173 start_reg, reg - start_reg,
6174 SP_REGNUM);
6cfc7210 6175
b111229a
RE
6176 start_reg = reg + 1;
6177 }
6178 }
6179
6180 /* Just in case the last register checked also needs unstacking. */
6181 if (reg != start_reg)
dd18ae56
NC
6182 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6183 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
6184 }
6185
62b10bbc 6186 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 6187 {
6cfc7210 6188 if (TARGET_INTERWORK)
b111229a
RE
6189 {
6190 if (! lr_save_eliminated)
62b10bbc 6191 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2
NC
6192
6193 if (live_regs_mask != 0)
dd18ae56 6194 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask, FALSE);
6cfc7210 6195
dd18ae56 6196 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
b111229a
RE
6197 }
6198 else if (lr_save_eliminated)
d7d01975
NC
6199 asm_fprintf (f,
6200 TARGET_APCS_32 ? "\tmov\t%r, %r\n" : "\tmovs\t%r, %r\n",
dd18ae56 6201 PC_REGNUM, LR_REGNUM);
32de079a 6202 else
dd18ae56 6203 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask | 0x8000,
32de079a 6204 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
6205 }
6206 else
6207 {
62b10bbc 6208 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 6209 {
6354dc9b 6210 /* Restore the integer regs, and the return address into lr. */
32de079a 6211 if (! lr_save_eliminated)
62b10bbc 6212 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
6213
6214 if (live_regs_mask != 0)
dd18ae56 6215 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask, FALSE);
cce8749e 6216 }
b111229a 6217
cce8749e
CH
6218 if (current_function_pretend_args_size)
6219 {
6354dc9b 6220 /* Unwind the pre-pushed regs. */
cce8749e 6221 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 6222 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
6223 output_add_immediate (operands);
6224 }
6354dc9b 6225 /* And finally, go home. */
6cfc7210 6226 if (TARGET_INTERWORK)
dd18ae56 6227 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
25b1c156 6228 else if (TARGET_APCS_32)
dd18ae56 6229 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
b111229a 6230 else
dd18ae56 6231 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
cce8749e
CH
6232 }
6233 }
f3bb6135 6234
949d79eb
RE
6235 return "";
6236}
6237
6238void
eb3921e8 6239output_func_epilogue (frame_size)
949d79eb
RE
6240 int frame_size;
6241{
6242 if (use_return_insn (FALSE) && return_used_this_function
6243 && (frame_size + current_function_outgoing_args_size) != 0
6244 && ! (frame_pointer_needed && TARGET_APCS))
914a3b8c 6245 abort ();
f3bb6135 6246
4b632bf1 6247 /* Reset the ARM-specific per-function variables. */
cce8749e 6248 current_function_anonymous_args = 0;
4b632bf1 6249 after_arm_reorg = 0;
f3bb6135 6250}
e2c671ba 6251
2c849145
JM
6252/* Generate and emit an insn that we will recognize as a push_multi.
6253 Unfortunately, since this insn does not reflect very well the actual
6254 semantics of the operation, we need to annotate the insn for the benefit
6255 of DWARF2 frame unwind information. */
2c849145 6256static rtx
e2c671ba
RE
6257emit_multi_reg_push (mask)
6258 int mask;
6259{
6260 int num_regs = 0;
6261 int i, j;
6262 rtx par;
2c849145
JM
6263 rtx dwarf;
6264 rtx tmp, reg;
e2c671ba
RE
6265
6266 for (i = 0; i < 16; i++)
6267 if (mask & (1 << i))
6268 num_regs++;
6269
6270 if (num_regs == 0 || num_regs > 16)
6271 abort ();
6272
43cffd11 6273 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
2c849145
JM
6274 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
6275 RTX_FRAME_RELATED_P (dwarf) = 1;
e2c671ba
RE
6276
6277 for (i = 0; i < 16; i++)
6278 {
6279 if (mask & (1 << i))
6280 {
2c849145
JM
6281 reg = gen_rtx_REG (SImode, i);
6282
e2c671ba 6283 XVECEXP (par, 0, 0)
43cffd11
RE
6284 = gen_rtx_SET (VOIDmode,
6285 gen_rtx_MEM (BLKmode,
6286 gen_rtx_PRE_DEC (BLKmode,
6287 stack_pointer_rtx)),
6288 gen_rtx_UNSPEC (BLKmode,
2c849145 6289 gen_rtvec (1, reg),
43cffd11 6290 2));
2c849145
JM
6291
6292 tmp = gen_rtx_SET (VOIDmode,
6293 gen_rtx_MEM (SImode,
6294 gen_rtx_PRE_DEC (BLKmode,
6295 stack_pointer_rtx)),
6296 reg);
6297 RTX_FRAME_RELATED_P (tmp) = 1;
6298 XVECEXP (dwarf, 0, num_regs - 1) = tmp;
6299
e2c671ba
RE
6300 break;
6301 }
6302 }
6303
6304 for (j = 1, i++; j < num_regs; i++)
6305 {
6306 if (mask & (1 << i))
6307 {
2c849145
JM
6308 reg = gen_rtx_REG (SImode, i);
6309
6310 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
6311
6312 tmp = gen_rtx_SET (VOIDmode,
6313 gen_rtx_MEM (SImode,
6314 gen_rtx_PRE_DEC (BLKmode,
6315 stack_pointer_rtx)),
6316 reg);
6317 RTX_FRAME_RELATED_P (tmp) = 1;
6318 XVECEXP (dwarf, 0, num_regs - j - 1) = tmp;
6319
e2c671ba
RE
6320 j++;
6321 }
6322 }
b111229a 6323
2c849145
JM
6324 par = emit_insn (par);
6325 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
6326 REG_NOTES (par));
6327 return par;
b111229a
RE
6328}
6329
2c849145 6330static rtx
b111229a
RE
6331emit_sfm (base_reg, count)
6332 int base_reg;
6333 int count;
6334{
6335 rtx par;
2c849145
JM
6336 rtx dwarf;
6337 rtx tmp, reg;
b111229a
RE
6338 int i;
6339
43cffd11 6340 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
6341 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
6342 RTX_FRAME_RELATED_P (dwarf) = 1;
6343
6344 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
6345
6346 XVECEXP (par, 0, 0)
6347 = gen_rtx_SET (VOIDmode,
6348 gen_rtx_MEM (BLKmode,
6349 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
6350 gen_rtx_UNSPEC (BLKmode,
2c849145 6351 gen_rtvec (1, reg),
43cffd11 6352 2));
2c849145
JM
6353 tmp
6354 = gen_rtx_SET (VOIDmode,
6355 gen_rtx_MEM (XFmode,
6356 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
6357 reg);
6358 RTX_FRAME_RELATED_P (tmp) = 1;
6359 XVECEXP (dwarf, 0, count - 1) = tmp;
6360
b111229a 6361 for (i = 1; i < count; i++)
2c849145
JM
6362 {
6363 reg = gen_rtx_REG (XFmode, base_reg++);
6364 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
6365
6366 tmp = gen_rtx_SET (VOIDmode,
6367 gen_rtx_MEM (XFmode,
6368 gen_rtx_PRE_DEC (BLKmode,
6369 stack_pointer_rtx)),
6370 reg);
6371 RTX_FRAME_RELATED_P (tmp) = 1;
6372 XVECEXP (dwarf, 0, count - i - 1) = tmp;
6373 }
b111229a 6374
2c849145
JM
6375 par = emit_insn (par);
6376 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
6377 REG_NOTES (par));
6378 return par;
e2c671ba
RE
6379}
6380
6381void
6382arm_expand_prologue ()
6383{
6384 int reg;
56636818
JL
6385 rtx amount = GEN_INT (-(get_frame_size ()
6386 + current_function_outgoing_args_size));
e2c671ba
RE
6387 int live_regs_mask = 0;
6388 int store_arg_regs = 0;
949d79eb
RE
6389 /* If this function doesn't return, then there is no need to push
6390 the call-saved regs. */
46406379 6391 int volatile_func = arm_volatile_func ();
2c849145 6392 rtx insn;
e2c671ba 6393
31fdb4d5
DE
6394 /* Naked functions don't have prologues. */
6395 if (arm_naked_function_p (current_function_decl))
6396 return;
6397
e2c671ba
RE
6398 if (current_function_anonymous_args && current_function_pretend_args_size)
6399 store_arg_regs = 1;
6400
6401 if (! volatile_func)
6ed30148
RE
6402 {
6403 for (reg = 0; reg <= 10; reg++)
6404 if (regs_ever_live[reg] && ! call_used_regs[reg])
6405 live_regs_mask |= 1 << reg;
6406
6407 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6408 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 6409
62b10bbc
NC
6410 if (regs_ever_live[LR_REGNUM])
6411 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 6412 }
e2c671ba
RE
6413
6414 if (frame_pointer_needed)
6415 {
6416 live_regs_mask |= 0xD800;
2c849145
JM
6417 insn = emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
6418 stack_pointer_rtx));
6419 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
6420 }
6421
6422 if (current_function_pretend_args_size)
6423 {
6424 if (store_arg_regs)
2c849145
JM
6425 insn = emit_multi_reg_push
6426 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 6427 else
2c849145
JM
6428 insn = emit_insn
6429 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
6430 GEN_INT (-current_function_pretend_args_size)));
6431 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
6432 }
6433
6434 if (live_regs_mask)
6435 {
6436 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 6437 we won't get a proper return. */
62b10bbc 6438 live_regs_mask |= 1 << LR_REGNUM;
2c849145
JM
6439 insn = emit_multi_reg_push (live_regs_mask);
6440 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
6441 }
6442
2c849145 6443 /* And now the floating point regs. */
e2c671ba 6444 if (! volatile_func)
b111229a
RE
6445 {
6446 if (arm_fpu_arch == FP_SOFT2)
6447 {
6448 for (reg = 23; reg > 15; reg--)
6449 if (regs_ever_live[reg] && ! call_used_regs[reg])
2c849145
JM
6450 {
6451 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
6452 insn = gen_rtx_MEM (XFmode, insn);
6453 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
6454 gen_rtx_REG (XFmode, reg)));
6455 RTX_FRAME_RELATED_P (insn) = 1;
6456 }
b111229a
RE
6457 }
6458 else
6459 {
6460 int start_reg = 23;
6461
6462 for (reg = 23; reg > 15; reg--)
6463 {
6464 if (regs_ever_live[reg] && ! call_used_regs[reg])
6465 {
6466 if (start_reg - reg == 3)
6467 {
2c849145
JM
6468 insn = emit_sfm (reg, 4);
6469 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
6470 start_reg = reg - 1;
6471 }
6472 }
6473 else
6474 {
6475 if (start_reg != reg)
2c849145
JM
6476 {
6477 insn = emit_sfm (reg + 1, start_reg - reg);
6478 RTX_FRAME_RELATED_P (insn) = 1;
6479 }
b111229a
RE
6480 start_reg = reg - 1;
6481 }
6482 }
6483
6484 if (start_reg != reg)
2c849145
JM
6485 {
6486 insn = emit_sfm (reg + 1, start_reg - reg);
6487 RTX_FRAME_RELATED_P (insn) = 1;
6488 }
b111229a
RE
6489 }
6490 }
e2c671ba
RE
6491
6492 if (frame_pointer_needed)
2c849145
JM
6493 {
6494 insn = GEN_INT (-(4 + current_function_pretend_args_size));
6495 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx,
6496 gen_rtx_REG (SImode, IP_REGNUM),
6497 insn));
6498 RTX_FRAME_RELATED_P (insn) = 1;
6499 }
e2c671ba
RE
6500
6501 if (amount != const0_rtx)
6502 {
2c849145
JM
6503 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
6504 amount));
6505 RTX_FRAME_RELATED_P (insn) = 1;
43cffd11
RE
6506 emit_insn (gen_rtx_CLOBBER (VOIDmode,
6507 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
e2c671ba
RE
6508 }
6509
6510 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
6511 the call to mcount. Similarly if the user has requested no
6512 scheduling in the prolog. */
6513 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
6514 emit_insn (gen_blockage ());
6515}
6516
cce8749e 6517\f
9997d19d
RE
6518/* If CODE is 'd', then the X is a condition operand and the instruction
6519 should only be executed if the condition is true.
ddd5a7c1 6520 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
6521 should only be executed if the condition is false: however, if the mode
6522 of the comparison is CCFPEmode, then always execute the instruction -- we
6523 do this because in these circumstances !GE does not necessarily imply LT;
6524 in these cases the instruction pattern will take care to make sure that
6525 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 6526 doing this instruction unconditionally.
9997d19d
RE
6527 If CODE is 'N' then X is a floating point operand that must be negated
6528 before output.
6529 If CODE is 'B' then output a bitwise inverted value of X (a const int).
6530 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
6531
6532void
6533arm_print_operand (stream, x, code)
62b10bbc 6534 FILE * stream;
9997d19d
RE
6535 rtx x;
6536 int code;
6537{
6538 switch (code)
6539 {
6540 case '@':
f3139301 6541 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
6542 return;
6543
6544 case '|':
f3139301 6545 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6546 return;
6547
6548 case '?':
6549 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
6550 fputs (arm_condition_codes[arm_current_cc], stream);
6551 return;
6552
6553 case 'N':
6554 {
6555 REAL_VALUE_TYPE r;
6556 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6557 r = REAL_VALUE_NEGATE (r);
6558 fprintf (stream, "%s", fp_const_from_val (&r));
6559 }
6560 return;
6561
6562 case 'B':
6563 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
6564 {
6565 HOST_WIDE_INT val;
6566 val = ARM_SIGN_EXTEND (~ INTVAL (x));
36ba9cb8 6567 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6568 }
9997d19d
RE
6569 else
6570 {
6571 putc ('~', stream);
6572 output_addr_const (stream, x);
6573 }
6574 return;
6575
6576 case 'i':
6577 fprintf (stream, "%s", arithmetic_instr (x, 1));
6578 return;
6579
6580 case 'I':
6581 fprintf (stream, "%s", arithmetic_instr (x, 0));
6582 return;
6583
6584 case 'S':
6585 {
6586 HOST_WIDE_INT val;
4bc74ece 6587 char * shift = shift_op (x, & val);
9997d19d 6588
e2c671ba
RE
6589 if (shift)
6590 {
4bc74ece 6591 fprintf (stream, ", %s ", shift_op (x, & val));
e2c671ba
RE
6592 if (val == -1)
6593 arm_print_operand (stream, XEXP (x, 1), 0);
6594 else
4bc74ece
NC
6595 {
6596 fputc ('#', stream);
36ba9cb8 6597 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 6598 }
e2c671ba 6599 }
9997d19d
RE
6600 }
6601 return;
6602
c1c2bc04
RE
6603 case 'Q':
6604 if (REGNO (x) > 15)
6605 abort ();
6606 fputs (REGISTER_PREFIX, stream);
6607 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
6608 return;
6609
9997d19d
RE
6610 case 'R':
6611 if (REGNO (x) > 15)
6612 abort ();
f3139301 6613 fputs (REGISTER_PREFIX, stream);
c1c2bc04 6614 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
6615 return;
6616
6617 case 'm':
f3139301 6618 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
6619 if (GET_CODE (XEXP (x, 0)) == REG)
6620 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
6621 else
6622 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
6623 return;
6624
6625 case 'M':
dd18ae56
NC
6626 asm_fprintf (stream, "{%r-%r}",
6627 REGNO (x), REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
6628 return;
6629
6630 case 'd':
6631 if (x)
6632 fputs (arm_condition_codes[get_arm_condition_code (x)],
6633 stream);
6634 return;
6635
6636 case 'D':
84ed5e79 6637 if (x)
9997d19d
RE
6638 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
6639 (get_arm_condition_code (x))],
6640 stream);
6641 return;
6642
6643 default:
6644 if (x == 0)
6645 abort ();
6646
6647 if (GET_CODE (x) == REG)
1d5473cb 6648 {
f3139301 6649 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
6650 fputs (reg_names[REGNO (x)], stream);
6651 }
9997d19d
RE
6652 else if (GET_CODE (x) == MEM)
6653 {
6654 output_memory_reference_mode = GET_MODE (x);
6655 output_address (XEXP (x, 0));
6656 }
6657 else if (GET_CODE (x) == CONST_DOUBLE)
6658 fprintf (stream, "#%s", fp_immediate_constant (x));
6659 else if (GET_CODE (x) == NEG)
6354dc9b 6660 abort (); /* This should never happen now. */
9997d19d
RE
6661 else
6662 {
6663 fputc ('#', stream);
6664 output_addr_const (stream, x);
6665 }
6666 }
6667}
cce8749e
CH
6668\f
6669/* A finite state machine takes care of noticing whether or not instructions
6670 can be conditionally executed, and thus decrease execution time and code
6671 size by deleting branch instructions. The fsm is controlled by
6672 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
6673
6674/* The state of the fsm controlling condition codes are:
6675 0: normal, do nothing special
6676 1: make ASM_OUTPUT_OPCODE not output this instruction
6677 2: make ASM_OUTPUT_OPCODE not output this instruction
6678 3: make instructions conditional
6679 4: make instructions conditional
6680
6681 State transitions (state->state by whom under condition):
6682 0 -> 1 final_prescan_insn if the `target' is a label
6683 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
6684 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
6685 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
6686 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
6687 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
6688 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
6689 (the target insn is arm_target_insn).
6690
ff9940b0
RE
6691 If the jump clobbers the conditions then we use states 2 and 4.
6692
6693 A similar thing can be done with conditional return insns.
6694
cce8749e
CH
6695 XXX In case the `target' is an unconditional branch, this conditionalising
6696 of the instructions always reduces code size, but not always execution
6697 time. But then, I want to reduce the code size to somewhere near what
6698 /bin/cc produces. */
6699
cce8749e
CH
6700/* Returns the index of the ARM condition code string in
6701 `arm_condition_codes'. COMPARISON should be an rtx like
6702 `(eq (...) (...))'. */
6703
84ed5e79 6704static enum arm_cond_code
cce8749e
CH
6705get_arm_condition_code (comparison)
6706 rtx comparison;
6707{
5165176d 6708 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
6709 register int code;
6710 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
6711
6712 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 6713 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
6714 XEXP (comparison, 1));
6715
6716 switch (mode)
cce8749e 6717 {
84ed5e79
RE
6718 case CC_DNEmode: code = ARM_NE; goto dominance;
6719 case CC_DEQmode: code = ARM_EQ; goto dominance;
6720 case CC_DGEmode: code = ARM_GE; goto dominance;
6721 case CC_DGTmode: code = ARM_GT; goto dominance;
6722 case CC_DLEmode: code = ARM_LE; goto dominance;
6723 case CC_DLTmode: code = ARM_LT; goto dominance;
6724 case CC_DGEUmode: code = ARM_CS; goto dominance;
6725 case CC_DGTUmode: code = ARM_HI; goto dominance;
6726 case CC_DLEUmode: code = ARM_LS; goto dominance;
6727 case CC_DLTUmode: code = ARM_CC;
6728
6729 dominance:
6730 if (comp_code != EQ && comp_code != NE)
6731 abort ();
6732
6733 if (comp_code == EQ)
6734 return ARM_INVERSE_CONDITION_CODE (code);
6735 return code;
6736
5165176d 6737 case CC_NOOVmode:
84ed5e79 6738 switch (comp_code)
5165176d 6739 {
84ed5e79
RE
6740 case NE: return ARM_NE;
6741 case EQ: return ARM_EQ;
6742 case GE: return ARM_PL;
6743 case LT: return ARM_MI;
5165176d
RE
6744 default: abort ();
6745 }
6746
6747 case CC_Zmode:
6748 case CCFPmode:
84ed5e79 6749 switch (comp_code)
5165176d 6750 {
84ed5e79
RE
6751 case NE: return ARM_NE;
6752 case EQ: return ARM_EQ;
5165176d
RE
6753 default: abort ();
6754 }
6755
6756 case CCFPEmode:
84ed5e79
RE
6757 switch (comp_code)
6758 {
6759 case GE: return ARM_GE;
6760 case GT: return ARM_GT;
6761 case LE: return ARM_LS;
6762 case LT: return ARM_MI;
6763 default: abort ();
6764 }
6765
6766 case CC_SWPmode:
6767 switch (comp_code)
6768 {
6769 case NE: return ARM_NE;
6770 case EQ: return ARM_EQ;
6771 case GE: return ARM_LE;
6772 case GT: return ARM_LT;
6773 case LE: return ARM_GE;
6774 case LT: return ARM_GT;
6775 case GEU: return ARM_LS;
6776 case GTU: return ARM_CC;
6777 case LEU: return ARM_CS;
6778 case LTU: return ARM_HI;
6779 default: abort ();
6780 }
6781
bd9c7e23
RE
6782 case CC_Cmode:
6783 switch (comp_code)
6784 {
6785 case LTU: return ARM_CS;
6786 case GEU: return ARM_CC;
6787 default: abort ();
6788 }
6789
5165176d 6790 case CCmode:
84ed5e79 6791 switch (comp_code)
5165176d 6792 {
84ed5e79
RE
6793 case NE: return ARM_NE;
6794 case EQ: return ARM_EQ;
6795 case GE: return ARM_GE;
6796 case GT: return ARM_GT;
6797 case LE: return ARM_LE;
6798 case LT: return ARM_LT;
6799 case GEU: return ARM_CS;
6800 case GTU: return ARM_HI;
6801 case LEU: return ARM_LS;
6802 case LTU: return ARM_CC;
5165176d
RE
6803 default: abort ();
6804 }
6805
cce8749e
CH
6806 default: abort ();
6807 }
84ed5e79
RE
6808
6809 abort ();
f3bb6135 6810}
cce8749e
CH
6811
6812
6813void
74bbc178 6814arm_final_prescan_insn (insn)
cce8749e 6815 rtx insn;
cce8749e
CH
6816{
6817 /* BODY will hold the body of INSN. */
6818 register rtx body = PATTERN (insn);
6819
6820 /* This will be 1 if trying to repeat the trick, and things need to be
6821 reversed if it appears to fail. */
6822 int reverse = 0;
6823
ff9940b0
RE
6824 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
6825 taken are clobbered, even if the rtl suggests otherwise. It also
6826 means that we have to grub around within the jump expression to find
6827 out what the conditions are when the jump isn't taken. */
6828 int jump_clobbers = 0;
6829
6354dc9b 6830 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
6831 int seeking_return = 0;
6832
cce8749e
CH
6833 /* START_INSN will hold the insn from where we start looking. This is the
6834 first insn after the following code_label if REVERSE is true. */
6835 rtx start_insn = insn;
6836
6837 /* If in state 4, check if the target branch is reached, in order to
6838 change back to state 0. */
6839 if (arm_ccfsm_state == 4)
6840 {
6841 if (insn == arm_target_insn)
f5a1b0d2
NC
6842 {
6843 arm_target_insn = NULL;
6844 arm_ccfsm_state = 0;
6845 }
cce8749e
CH
6846 return;
6847 }
6848
6849 /* If in state 3, it is possible to repeat the trick, if this insn is an
6850 unconditional branch to a label, and immediately following this branch
6851 is the previous target label which is only used once, and the label this
6852 branch jumps to is not too far off. */
6853 if (arm_ccfsm_state == 3)
6854 {
6855 if (simplejump_p (insn))
6856 {
6857 start_insn = next_nonnote_insn (start_insn);
6858 if (GET_CODE (start_insn) == BARRIER)
6859 {
6860 /* XXX Isn't this always a barrier? */
6861 start_insn = next_nonnote_insn (start_insn);
6862 }
6863 if (GET_CODE (start_insn) == CODE_LABEL
6864 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6865 && LABEL_NUSES (start_insn) == 1)
6866 reverse = TRUE;
6867 else
6868 return;
6869 }
ff9940b0
RE
6870 else if (GET_CODE (body) == RETURN)
6871 {
6872 start_insn = next_nonnote_insn (start_insn);
6873 if (GET_CODE (start_insn) == BARRIER)
6874 start_insn = next_nonnote_insn (start_insn);
6875 if (GET_CODE (start_insn) == CODE_LABEL
6876 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
6877 && LABEL_NUSES (start_insn) == 1)
6878 {
6879 reverse = TRUE;
6880 seeking_return = 1;
6881 }
6882 else
6883 return;
6884 }
cce8749e
CH
6885 else
6886 return;
6887 }
6888
6889 if (arm_ccfsm_state != 0 && !reverse)
6890 abort ();
6891 if (GET_CODE (insn) != JUMP_INSN)
6892 return;
6893
ddd5a7c1 6894 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
6895 the jump should always come first */
6896 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
6897 body = XVECEXP (body, 0, 0);
6898
6899#if 0
6900 /* If this is a conditional return then we don't want to know */
6901 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6902 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
6903 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
6904 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
6905 return;
6906#endif
6907
cce8749e
CH
6908 if (reverse
6909 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
6910 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
6911 {
bd9c7e23
RE
6912 int insns_skipped;
6913 int fail = FALSE, succeed = FALSE;
cce8749e
CH
6914 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
6915 int then_not_else = TRUE;
ff9940b0 6916 rtx this_insn = start_insn, label = 0;
cce8749e 6917
ff9940b0 6918 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
6919 {
6920 /* The code below is wrong for these, and I haven't time to
6921 fix it now. So we just do the safe thing and return. This
6922 whole function needs re-writing anyway. */
6923 jump_clobbers = 1;
6924 return;
6925 }
ff9940b0 6926
cce8749e
CH
6927 /* Register the insn jumped to. */
6928 if (reverse)
ff9940b0
RE
6929 {
6930 if (!seeking_return)
6931 label = XEXP (SET_SRC (body), 0);
6932 }
cce8749e
CH
6933 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
6934 label = XEXP (XEXP (SET_SRC (body), 1), 0);
6935 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
6936 {
6937 label = XEXP (XEXP (SET_SRC (body), 2), 0);
6938 then_not_else = FALSE;
6939 }
ff9940b0
RE
6940 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
6941 seeking_return = 1;
6942 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
6943 {
6944 seeking_return = 1;
6945 then_not_else = FALSE;
6946 }
cce8749e
CH
6947 else
6948 abort ();
6949
6950 /* See how many insns this branch skips, and what kind of insns. If all
6951 insns are okay, and the label or unconditional branch to the same
6952 label is not too far away, succeed. */
6953 for (insns_skipped = 0;
b36ba79f 6954 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
6955 {
6956 rtx scanbody;
6957
6958 this_insn = next_nonnote_insn (this_insn);
6959 if (!this_insn)
6960 break;
6961
cce8749e
CH
6962 switch (GET_CODE (this_insn))
6963 {
6964 case CODE_LABEL:
6965 /* Succeed if it is the target label, otherwise fail since
6966 control falls in from somewhere else. */
6967 if (this_insn == label)
6968 {
ff9940b0
RE
6969 if (jump_clobbers)
6970 {
6971 arm_ccfsm_state = 2;
6972 this_insn = next_nonnote_insn (this_insn);
6973 }
6974 else
6975 arm_ccfsm_state = 1;
cce8749e
CH
6976 succeed = TRUE;
6977 }
6978 else
6979 fail = TRUE;
6980 break;
6981
ff9940b0 6982 case BARRIER:
cce8749e 6983 /* Succeed if the following insn is the target label.
ff9940b0
RE
6984 Otherwise fail.
6985 If return insns are used then the last insn in a function
6354dc9b 6986 will be a barrier. */
cce8749e 6987 this_insn = next_nonnote_insn (this_insn);
ff9940b0 6988 if (this_insn && this_insn == label)
cce8749e 6989 {
ff9940b0
RE
6990 if (jump_clobbers)
6991 {
6992 arm_ccfsm_state = 2;
6993 this_insn = next_nonnote_insn (this_insn);
6994 }
6995 else
6996 arm_ccfsm_state = 1;
cce8749e
CH
6997 succeed = TRUE;
6998 }
6999 else
7000 fail = TRUE;
7001 break;
7002
ff9940b0 7003 case CALL_INSN:
2b835d68 7004 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 7005 calls. */
2b835d68 7006 if (TARGET_APCS_32)
bd9c7e23
RE
7007 {
7008 /* Succeed if the following insn is the target label,
7009 or if the following two insns are a barrier and
7010 the target label. */
7011 this_insn = next_nonnote_insn (this_insn);
7012 if (this_insn && GET_CODE (this_insn) == BARRIER)
7013 this_insn = next_nonnote_insn (this_insn);
7014
7015 if (this_insn && this_insn == label
b36ba79f 7016 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
7017 {
7018 if (jump_clobbers)
7019 {
7020 arm_ccfsm_state = 2;
7021 this_insn = next_nonnote_insn (this_insn);
7022 }
7023 else
7024 arm_ccfsm_state = 1;
7025 succeed = TRUE;
7026 }
7027 else
7028 fail = TRUE;
7029 }
ff9940b0 7030 break;
2b835d68 7031
cce8749e
CH
7032 case JUMP_INSN:
7033 /* If this is an unconditional branch to the same label, succeed.
7034 If it is to another label, do nothing. If it is conditional,
7035 fail. */
914a3b8c 7036 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 7037
ed4c4348 7038 scanbody = PATTERN (this_insn);
ff9940b0
RE
7039 if (GET_CODE (scanbody) == SET
7040 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
7041 {
7042 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
7043 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
7044 {
7045 arm_ccfsm_state = 2;
7046 succeed = TRUE;
7047 }
7048 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
7049 fail = TRUE;
7050 }
b36ba79f
RE
7051 /* Fail if a conditional return is undesirable (eg on a
7052 StrongARM), but still allow this if optimizing for size. */
7053 else if (GET_CODE (scanbody) == RETURN
7054 && ! use_return_insn (TRUE)
7055 && ! optimize_size)
7056 fail = TRUE;
ff9940b0
RE
7057 else if (GET_CODE (scanbody) == RETURN
7058 && seeking_return)
7059 {
7060 arm_ccfsm_state = 2;
7061 succeed = TRUE;
7062 }
7063 else if (GET_CODE (scanbody) == PARALLEL)
7064 {
7065 switch (get_attr_conds (this_insn))
7066 {
7067 case CONDS_NOCOND:
7068 break;
7069 default:
7070 fail = TRUE;
7071 break;
7072 }
7073 }
cce8749e
CH
7074 break;
7075
7076 case INSN:
ff9940b0
RE
7077 /* Instructions using or affecting the condition codes make it
7078 fail. */
ed4c4348 7079 scanbody = PATTERN (this_insn);
74641843
RE
7080 if (! (GET_CODE (scanbody) == SET
7081 || GET_CODE (scanbody) == PARALLEL)
7082 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
7083 fail = TRUE;
7084 break;
7085
7086 default:
7087 break;
7088 }
7089 }
7090 if (succeed)
7091 {
ff9940b0 7092 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 7093 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
7094 else if (seeking_return || arm_ccfsm_state == 2)
7095 {
7096 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
7097 {
7098 this_insn = next_nonnote_insn (this_insn);
7099 if (this_insn && (GET_CODE (this_insn) == BARRIER
7100 || GET_CODE (this_insn) == CODE_LABEL))
7101 abort ();
7102 }
7103 if (!this_insn)
7104 {
7105 /* Oh, dear! we ran off the end.. give up */
7106 recog (PATTERN (insn), insn, NULL_PTR);
7107 arm_ccfsm_state = 0;
abaa26e5 7108 arm_target_insn = NULL;
ff9940b0
RE
7109 return;
7110 }
7111 arm_target_insn = this_insn;
7112 }
cce8749e
CH
7113 else
7114 abort ();
ff9940b0
RE
7115 if (jump_clobbers)
7116 {
7117 if (reverse)
7118 abort ();
7119 arm_current_cc =
7120 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
7121 0), 0), 1));
7122 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
7123 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7124 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
7125 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7126 }
7127 else
7128 {
7129 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
7130 what it was. */
7131 if (!reverse)
7132 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
7133 0));
7134 }
cce8749e 7135
cce8749e
CH
7136 if (reverse || then_not_else)
7137 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7138 }
1ccbefce
RH
7139
7140 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 7141 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 7142 across this call; since the insn has been recognized already we
b020fd92 7143 call recog direct). */
ff9940b0 7144 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 7145 }
f3bb6135 7146}
cce8749e 7147
c27ba912
DM
7148/* Return the length of a function name prefix
7149 that starts with the character 'c'. */
7150static int
7151arm_get_strip_length (char c)
7152{
7153 switch (c)
7154 {
7155 ARM_NAME_ENCODING_LENGTHS
7156 default: return 0;
7157 }
7158}
7159
7160/* Return a pointer to a function's name with any
7161 and all prefix encodings stripped from it. */
7162const char *
7163arm_strip_name_encoding (const char * name)
7164{
7165 int skip;
7166
7167 while ((skip = arm_get_strip_length (* name)))
7168 name += skip;
7169
7170 return name;
7171}
7172
2b835d68 7173#ifdef AOF_ASSEMBLER
6354dc9b 7174/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 7175
32de079a
RE
7176rtx aof_pic_label = NULL_RTX;
7177struct pic_chain
7178{
62b10bbc
NC
7179 struct pic_chain * next;
7180 char * symname;
32de079a
RE
7181};
7182
62b10bbc 7183static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
7184
7185rtx
7186aof_pic_entry (x)
7187 rtx x;
7188{
62b10bbc 7189 struct pic_chain ** chainp;
32de079a
RE
7190 int offset;
7191
7192 if (aof_pic_label == NULL_RTX)
7193 {
92a432f4
RE
7194 /* We mark this here and not in arm_add_gc_roots() to avoid
7195 polluting even more code with ifdefs, and because it never
7196 contains anything useful until we assign to it here. */
7197 ggc_add_rtx_root (&aof_pic_label, 1);
32de079a
RE
7198 /* This needs to persist throughout the compilation. */
7199 end_temporary_allocation ();
43cffd11 7200 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
7201 resume_temporary_allocation ();
7202 }
7203
7204 for (offset = 0, chainp = &aof_pic_chain; *chainp;
7205 offset += 4, chainp = &(*chainp)->next)
7206 if ((*chainp)->symname == XSTR (x, 0))
7207 return plus_constant (aof_pic_label, offset);
7208
7209 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
7210 (*chainp)->next = NULL;
7211 (*chainp)->symname = XSTR (x, 0);
7212 return plus_constant (aof_pic_label, offset);
7213}
7214
7215void
7216aof_dump_pic_table (f)
62b10bbc 7217 FILE * f;
32de079a 7218{
62b10bbc 7219 struct pic_chain * chain;
32de079a
RE
7220
7221 if (aof_pic_chain == NULL)
7222 return;
7223
dd18ae56
NC
7224 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
7225 PIC_OFFSET_TABLE_REGNUM,
7226 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
7227 fputs ("|x$adcons|\n", f);
7228
7229 for (chain = aof_pic_chain; chain; chain = chain->next)
7230 {
7231 fputs ("\tDCD\t", f);
7232 assemble_name (f, chain->symname);
7233 fputs ("\n", f);
7234 }
7235}
7236
2b835d68
RE
7237int arm_text_section_count = 1;
7238
7239char *
84ed5e79 7240aof_text_section ()
2b835d68
RE
7241{
7242 static char buf[100];
2b835d68
RE
7243 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
7244 arm_text_section_count++);
7245 if (flag_pic)
7246 strcat (buf, ", PIC, REENTRANT");
7247 return buf;
7248}
7249
7250static int arm_data_section_count = 1;
7251
7252char *
7253aof_data_section ()
7254{
7255 static char buf[100];
7256 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
7257 return buf;
7258}
7259
7260/* The AOF assembler is religiously strict about declarations of
7261 imported and exported symbols, so that it is impossible to declare
956d6950 7262 a function as imported near the beginning of the file, and then to
2b835d68
RE
7263 export it later on. It is, however, possible to delay the decision
7264 until all the functions in the file have been compiled. To get
7265 around this, we maintain a list of the imports and exports, and
7266 delete from it any that are subsequently defined. At the end of
7267 compilation we spit the remainder of the list out before the END
7268 directive. */
7269
7270struct import
7271{
62b10bbc
NC
7272 struct import * next;
7273 char * name;
2b835d68
RE
7274};
7275
62b10bbc 7276static struct import * imports_list = NULL;
2b835d68
RE
7277
7278void
7279aof_add_import (name)
62b10bbc 7280 char * name;
2b835d68 7281{
62b10bbc 7282 struct import * new;
2b835d68
RE
7283
7284 for (new = imports_list; new; new = new->next)
7285 if (new->name == name)
7286 return;
7287
7288 new = (struct import *) xmalloc (sizeof (struct import));
7289 new->next = imports_list;
7290 imports_list = new;
7291 new->name = name;
7292}
7293
7294void
7295aof_delete_import (name)
62b10bbc 7296 char * name;
2b835d68 7297{
62b10bbc 7298 struct import ** old;
2b835d68
RE
7299
7300 for (old = &imports_list; *old; old = & (*old)->next)
7301 {
7302 if ((*old)->name == name)
7303 {
7304 *old = (*old)->next;
7305 return;
7306 }
7307 }
7308}
7309
7310int arm_main_function = 0;
7311
7312void
7313aof_dump_imports (f)
62b10bbc 7314 FILE * f;
2b835d68
RE
7315{
7316 /* The AOF assembler needs this to cause the startup code to be extracted
7317 from the library. Brining in __main causes the whole thing to work
7318 automagically. */
7319 if (arm_main_function)
7320 {
7321 text_section ();
7322 fputs ("\tIMPORT __main\n", f);
7323 fputs ("\tDCD __main\n", f);
7324 }
7325
7326 /* Now dump the remaining imports. */
7327 while (imports_list)
7328 {
7329 fprintf (f, "\tIMPORT\t");
7330 assemble_name (f, imports_list->name);
7331 fputc ('\n', f);
7332 imports_list = imports_list->next;
7333 }
7334}
7335#endif /* AOF_ASSEMBLER */
This page took 1.625051 seconds and 5 git commands to generate.