]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Daily bump.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
b36ba79f 1/* Output routines for GCC for ARM.
914a3b8c 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
b36ba79f 5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
43cffd11 25#include "system.h"
cce8749e 26#include "rtl.h"
d5b7b3ae
RE
27#include "tree.h"
28#include "tm_p.h"
cce8749e
CH
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-flags.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
49ad7cfa 39#include "function.h"
bee06f3d 40#include "expr.h"
ad076f4e 41#include "toplev.h"
aec3cfba 42#include "recog.h"
92a432f4 43#include "ggc.h"
d5b7b3ae 44#include "except.h"
c27ba912 45#include "tm_p.h"
cce8749e 46
d5b7b3ae
RE
47/* Forward definitions of types. */
48typedef struct minipool_node Mnode;
49typedef struct minipool_fixup Mfix;
50
51/* In order to improve the layout of the prototypes below
52 some short type abbreviations are defined here. */
53#define Hint HOST_WIDE_INT
54#define Mmode enum machine_mode
55#define Ulong unsigned long
56
57/* Forward function declarations. */
58static void arm_add_gc_roots PARAMS ((void));
59static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
60static int arm_naked_function_p PARAMS ((tree));
61static Ulong bit_count PARAMS ((signed int));
62static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
63static int eliminate_lr2ip PARAMS ((rtx *));
64static rtx emit_multi_reg_push PARAMS ((int));
65static rtx emit_sfm PARAMS ((int, int));
cd2b33d0 66static const char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
d5b7b3ae
RE
67static arm_cc get_arm_condition_code PARAMS ((rtx));
68static void init_fpa_table PARAMS ((void));
69static Hint int_log2 PARAMS ((Hint));
70static rtx is_jump_table PARAMS ((rtx));
cd2b33d0
NC
71static const char * output_multi_immediate PARAMS ((rtx *, const char *, const char *, int, Hint));
72static void print_multi_reg PARAMS ((FILE *, const char *, int, int, int));
d5b7b3ae 73static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
cd2b33d0 74static const char * shift_op PARAMS ((rtx, Hint *));
d5b7b3ae
RE
75static void arm_init_machine_status PARAMS ((struct function *));
76static void arm_mark_machine_status PARAMS ((struct function *));
77static int number_of_first_bit_set PARAMS ((int));
78static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
79static void thumb_exit PARAMS ((FILE *, int, rtx));
80static void thumb_pushpop PARAMS ((FILE *, int, int));
cd2b33d0 81static const char * thumb_condition_code PARAMS ((rtx, int));
d5b7b3ae
RE
82static rtx is_jump_table PARAMS ((rtx));
83static Hint get_jump_table_size PARAMS ((rtx));
84static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
85static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
86static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
87static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
88static void assign_minipool_offsets PARAMS ((Mfix *));
89static void arm_print_value PARAMS ((FILE *, rtx));
90static void dump_minipool PARAMS ((rtx));
91static int arm_barrier_cost PARAMS ((rtx));
92static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
93static void push_minipool_barrier PARAMS ((rtx, Hint));
94static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
95static void note_invalid_constants PARAMS ((rtx, Hint));
96\f
97#undef Hint
98#undef Mmode
99#undef Ulong
f3bb6135 100
c27ba912
DM
101/* The maximum number of insns skipped which will be conditionalised if
102 possible. */
103static int max_insns_skipped = 5;
104
105extern FILE * asm_out_file;
106
6354dc9b 107/* True if we are currently building a constant table. */
13bd191d
PB
108int making_const_table;
109
60d0536b 110/* Define the information needed to generate branch insns. This is
6354dc9b 111 stored from the compare operation. */
ff9940b0 112rtx arm_compare_op0, arm_compare_op1;
ff9940b0 113
6354dc9b 114/* What type of floating point are we tuning for? */
bee06f3d
RE
115enum floating_point_type arm_fpu;
116
6354dc9b 117/* What type of floating point instructions are available? */
b111229a
RE
118enum floating_point_type arm_fpu_arch;
119
6354dc9b 120/* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
2b835d68
RE
121enum prog_mode_type arm_prgmode;
122
6354dc9b 123/* Set by the -mfp=... option. */
f9cc092a 124const char * target_fp_name = NULL;
2b835d68 125
b355a481 126/* Used to parse -mstructure_size_boundary command line option. */
f9cc092a 127const char * structure_size_string = NULL;
723ae7c1 128int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
b355a481 129
aec3cfba 130/* Bit values used to identify processor capabilities. */
62b10bbc
NC
131#define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
132#define FL_FAST_MULT (1 << 1) /* Fast multiply */
133#define FL_MODE26 (1 << 2) /* 26-bit mode support */
134#define FL_MODE32 (1 << 3) /* 32-bit mode support */
135#define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
136#define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
137#define FL_THUMB (1 << 6) /* Thumb aware */
138#define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
139#define FL_STRONG (1 << 8) /* StrongARM */
aec3cfba 140
d5b7b3ae
RE
141/* The bits in this mask specify which instructions we are
142 allowed to generate. */
aec3cfba 143static int insn_flags = 0;
d5b7b3ae 144
aec3cfba
NC
145/* The bits in this mask specify which instruction scheduling options should
146 be used. Note - there is an overlap with the FL_FAST_MULT. For some
147 hardware we want to be able to generate the multiply instructions, but to
148 tune as if they were not present in the architecture. */
149static int tune_flags = 0;
150
151/* The following are used in the arm.md file as equivalents to bits
152 in the above two flag variables. */
153
2b835d68
RE
154/* Nonzero if this is an "M" variant of the processor. */
155int arm_fast_multiply = 0;
156
6354dc9b 157/* Nonzero if this chip supports the ARM Architecture 4 extensions. */
2b835d68
RE
158int arm_arch4 = 0;
159
6354dc9b 160/* Nonzero if this chip supports the ARM Architecture 5 extensions. */
62b10bbc
NC
161int arm_arch5 = 0;
162
aec3cfba 163/* Nonzero if this chip can benefit from load scheduling. */
f5a1b0d2
NC
164int arm_ld_sched = 0;
165
166/* Nonzero if this chip is a StrongARM. */
167int arm_is_strong = 0;
168
169/* Nonzero if this chip is a an ARM6 or an ARM7. */
170int arm_is_6_or_7 = 0;
b111229a 171
0616531f
RE
172/* Nonzero if generating Thumb instructions. */
173int thumb_code = 0;
174
cce8749e
CH
175/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
176 must report the mode of the memory reference from PRINT_OPERAND to
177 PRINT_OPERAND_ADDRESS. */
f3bb6135 178enum machine_mode output_memory_reference_mode;
cce8749e
CH
179
180/* Nonzero if the prologue must setup `fp'. */
181int current_function_anonymous_args;
182
32de079a 183/* The register number to be used for the PIC offset register. */
ed0e6530 184const char * arm_pic_register_string = NULL;
32de079a
RE
185int arm_pic_register = 9;
186
ff9940b0 187/* Set to 1 when a return insn is output, this means that the epilogue
6354dc9b 188 is not needed. */
d5b7b3ae 189int return_used_this_function;
ff9940b0 190
aec3cfba
NC
191/* Set to 1 after arm_reorg has started. Reset to start at the start of
192 the next function. */
4b632bf1
RE
193static int after_arm_reorg = 0;
194
aec3cfba 195/* The maximum number of insns to be used when loading a constant. */
2b835d68
RE
196static int arm_constant_limit = 3;
197
cce8749e
CH
198/* For an explanation of these variables, see final_prescan_insn below. */
199int arm_ccfsm_state;
84ed5e79 200enum arm_cond_code arm_current_cc;
cce8749e
CH
201rtx arm_target_insn;
202int arm_target_label;
9997d19d
RE
203
204/* The condition codes of the ARM, and the inverse function. */
cd2b33d0 205const char * arm_condition_codes[] =
9997d19d
RE
206{
207 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
208 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
209};
210
f5a1b0d2 211#define streq(string1, string2) (strcmp (string1, string2) == 0)
2b835d68 212\f
6354dc9b 213/* Initialization code. */
2b835d68 214
2b835d68
RE
215struct processors
216{
cd2b33d0 217 const char * name;
2b835d68
RE
218 unsigned int flags;
219};
220
221/* Not all of these give usefully different compilation alternatives,
222 but there is no simple way of generalizing them. */
f5a1b0d2
NC
223static struct processors all_cores[] =
224{
225 /* ARM Cores */
226
227 {"arm2", FL_CO_PROC | FL_MODE26 },
228 {"arm250", FL_CO_PROC | FL_MODE26 },
229 {"arm3", FL_CO_PROC | FL_MODE26 },
230 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
231 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
232 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
233 {"arm610", FL_MODE26 | FL_MODE32 },
234 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
949d79eb
RE
235 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
236 /* arm7m doesn't exist on its own, but only with D, (and I), but
d5b7b3ae 237 those don't alter the code, so arm7m is sometimes used. */
949d79eb
RE
238 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
239 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
240 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
241 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
242 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
243 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
244 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
246 {"arm710", FL_MODE26 | FL_MODE32 },
a120a3bd 247 {"arm720", FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
248 {"arm710c", FL_MODE26 | FL_MODE32 },
249 {"arm7100", FL_MODE26 | FL_MODE32 },
250 {"arm7500", FL_MODE26 | FL_MODE32 },
949d79eb
RE
251 /* Doesn't have an external co-proc, but does have embedded fpu. */
252 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
f5a1b0d2
NC
253 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
254 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
255 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
256 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
6cf32035
NC
257 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
258 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
f5a1b0d2
NC
259 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
260 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
261 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
262 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
263
264 {NULL, 0}
265};
266
267static struct processors all_architectures[] =
2b835d68 268{
f5a1b0d2
NC
269 /* ARM Architectures */
270
62b10bbc
NC
271 { "armv2", FL_CO_PROC | FL_MODE26 },
272 { "armv2a", FL_CO_PROC | FL_MODE26 },
273 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
274 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
949d79eb 275 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
b111229a
RE
276 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
277 implementations that support it, so we will leave it out for now. */
62b10bbc
NC
278 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
279 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
280 { NULL, 0 }
f5a1b0d2
NC
281};
282
283/* This is a magic stucture. The 'string' field is magically filled in
284 with a pointer to the value specified by the user on the command line
285 assuming that the user has specified such a value. */
286
287struct arm_cpu_select arm_select[] =
288{
289 /* string name processors */
290 { NULL, "-mcpu=", all_cores },
291 { NULL, "-march=", all_architectures },
292 { NULL, "-mtune=", all_cores }
2b835d68
RE
293};
294
aec3cfba 295/* Return the number of bits set in value' */
d5b7b3ae 296static unsigned long
aec3cfba
NC
297bit_count (value)
298 signed int value;
299{
d5b7b3ae 300 unsigned long count = 0;
aec3cfba
NC
301
302 while (value)
303 {
304 value &= ~(value & - value);
305 ++ count;
306 }
307
308 return count;
309}
310
2b835d68
RE
311/* Fix up any incompatible options that the user has specified.
312 This has now turned into a maze. */
313void
314arm_override_options ()
315{
ed4c4348 316 unsigned i;
f5a1b0d2
NC
317
318 /* Set up the flags based on the cpu/architecture selected by the user. */
319 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
bd9c7e23 320 {
f5a1b0d2
NC
321 struct arm_cpu_select * ptr = arm_select + i;
322
323 if (ptr->string != NULL && ptr->string[0] != '\0')
bd9c7e23 324 {
13bd191d 325 const struct processors * sel;
bd9c7e23 326
f5a1b0d2
NC
327 for (sel = ptr->processors; sel->name != NULL; sel ++)
328 if (streq (ptr->string, sel->name))
bd9c7e23 329 {
aec3cfba
NC
330 if (i == 2)
331 tune_flags = sel->flags;
332 else
b111229a 333 {
aec3cfba
NC
334 /* If we have been given an architecture and a processor
335 make sure that they are compatible. We only generate
336 a warning though, and we prefer the CPU over the
6354dc9b 337 architecture. */
aec3cfba 338 if (insn_flags != 0 && (insn_flags ^ sel->flags))
6cf32035 339 warning ("switch -mcpu=%s conflicts with -march= switch",
aec3cfba
NC
340 ptr->string);
341
342 insn_flags = sel->flags;
b111229a 343 }
f5a1b0d2 344
bd9c7e23
RE
345 break;
346 }
347
348 if (sel->name == NULL)
349 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
350 }
351 }
aec3cfba 352
f5a1b0d2 353 /* If the user did not specify a processor, choose one for them. */
aec3cfba 354 if (insn_flags == 0)
f5a1b0d2
NC
355 {
356 struct processors * sel;
aec3cfba
NC
357 unsigned int sought;
358 static struct cpu_default
359 {
cd2b33d0
NC
360 int cpu;
361 const char * name;
aec3cfba
NC
362 }
363 cpu_defaults[] =
364 {
365 { TARGET_CPU_arm2, "arm2" },
366 { TARGET_CPU_arm6, "arm6" },
367 { TARGET_CPU_arm610, "arm610" },
2aa0c933 368 { TARGET_CPU_arm710, "arm710" },
aec3cfba
NC
369 { TARGET_CPU_arm7m, "arm7m" },
370 { TARGET_CPU_arm7500fe, "arm7500fe" },
371 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
372 { TARGET_CPU_arm8, "arm8" },
373 { TARGET_CPU_arm810, "arm810" },
374 { TARGET_CPU_arm9, "arm9" },
375 { TARGET_CPU_strongarm, "strongarm" },
376 { TARGET_CPU_generic, "arm" },
377 { 0, 0 }
378 };
379 struct cpu_default * def;
380
381 /* Find the default. */
382 for (def = cpu_defaults; def->name; def ++)
383 if (def->cpu == TARGET_CPU_DEFAULT)
384 break;
385
386 /* Make sure we found the default CPU. */
387 if (def->name == NULL)
388 abort ();
389
390 /* Find the default CPU's flags. */
391 for (sel = all_cores; sel->name != NULL; sel ++)
392 if (streq (def->name, sel->name))
393 break;
394
395 if (sel->name == NULL)
396 abort ();
397
398 insn_flags = sel->flags;
399
400 /* Now check to see if the user has specified some command line
401 switch that require certain abilities from the cpu. */
402 sought = 0;
f5a1b0d2 403
d5b7b3ae 404 if (TARGET_INTERWORK || TARGET_THUMB)
f5a1b0d2 405 {
aec3cfba
NC
406 sought |= (FL_THUMB | FL_MODE32);
407
408 /* Force apcs-32 to be used for interworking. */
f5a1b0d2 409 target_flags |= ARM_FLAG_APCS_32;
aec3cfba 410
d5b7b3ae 411 /* There are no ARM processors that support both APCS-26 and
aec3cfba
NC
412 interworking. Therefore we force FL_MODE26 to be removed
413 from insn_flags here (if it was set), so that the search
414 below will always be able to find a compatible processor. */
415 insn_flags &= ~ FL_MODE26;
f5a1b0d2 416 }
d5b7b3ae 417 else if (! TARGET_APCS_32)
f5a1b0d2 418 sought |= FL_MODE26;
d5b7b3ae 419
aec3cfba 420 if (sought != 0 && ((sought & insn_flags) != sought))
f5a1b0d2 421 {
aec3cfba
NC
422 /* Try to locate a CPU type that supports all of the abilities
423 of the default CPU, plus the extra abilities requested by
424 the user. */
f5a1b0d2 425 for (sel = all_cores; sel->name != NULL; sel ++)
aec3cfba 426 if ((sel->flags & sought) == (sought | insn_flags))
f5a1b0d2
NC
427 break;
428
429 if (sel->name == NULL)
aec3cfba
NC
430 {
431 unsigned int current_bit_count = 0;
432 struct processors * best_fit = NULL;
433
434 /* Ideally we would like to issue an error message here
435 saying that it was not possible to find a CPU compatible
436 with the default CPU, but which also supports the command
437 line options specified by the programmer, and so they
438 ought to use the -mcpu=<name> command line option to
439 override the default CPU type.
440
441 Unfortunately this does not work with multilibing. We
442 need to be able to support multilibs for -mapcs-26 and for
443 -mthumb-interwork and there is no CPU that can support both
444 options. Instead if we cannot find a cpu that has both the
445 characteristics of the default cpu and the given command line
446 options we scan the array again looking for a best match. */
447 for (sel = all_cores; sel->name != NULL; sel ++)
448 if ((sel->flags & sought) == sought)
449 {
450 unsigned int count;
451
452 count = bit_count (sel->flags & insn_flags);
453
454 if (count >= current_bit_count)
455 {
456 best_fit = sel;
457 current_bit_count = count;
458 }
459 }
f5a1b0d2 460
aec3cfba
NC
461 if (best_fit == NULL)
462 abort ();
463 else
464 sel = best_fit;
465 }
466
467 insn_flags = sel->flags;
f5a1b0d2
NC
468 }
469 }
aec3cfba
NC
470
471 /* If tuning has not been specified, tune for whichever processor or
472 architecture has been selected. */
473 if (tune_flags == 0)
474 tune_flags = insn_flags;
475
f5a1b0d2
NC
476 /* Make sure that the processor choice does not conflict with any of the
477 other command line choices. */
aec3cfba 478 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
f5a1b0d2 479 {
aec3cfba
NC
480 /* If APCS-32 was not the default then it must have been set by the
481 user, so issue a warning message. If the user has specified
482 "-mapcs-32 -mcpu=arm2" then we loose here. */
483 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
484 warning ("target CPU does not support APCS-32" );
f5a1b0d2
NC
485 target_flags &= ~ ARM_FLAG_APCS_32;
486 }
aec3cfba 487 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
f5a1b0d2
NC
488 {
489 warning ("target CPU does not support APCS-26" );
490 target_flags |= ARM_FLAG_APCS_32;
491 }
492
6cfc7210 493 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
f5a1b0d2
NC
494 {
495 warning ("target CPU does not support interworking" );
6cfc7210 496 target_flags &= ~ARM_FLAG_INTERWORK;
f5a1b0d2
NC
497 }
498
d5b7b3ae
RE
499 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
500 {
501 warning ("target CPU does not supoport THUMB instructions.");
502 target_flags &= ~ARM_FLAG_THUMB;
503 }
504
505 if (TARGET_APCS_FRAME && TARGET_THUMB)
506 {
507 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
508 target_flags &= ~ARM_FLAG_APCS_FRAME;
509 }
510
511 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
512 from here where no function is being compiled currently. */
513 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
514 && TARGET_ARM)
515 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
516
517 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
518 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
519
520 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
521 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
522
f5a1b0d2 523 /* If interworking is enabled then APCS-32 must be selected as well. */
6cfc7210 524 if (TARGET_INTERWORK)
f5a1b0d2
NC
525 {
526 if (! TARGET_APCS_32)
527 warning ("interworking forces APCS-32 to be used" );
528 target_flags |= ARM_FLAG_APCS_32;
529 }
530
d5b7b3ae 531 if (TARGET_APCS_STACK && ! TARGET_APCS_FRAME)
f5a1b0d2
NC
532 {
533 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
534 target_flags |= ARM_FLAG_APCS_FRAME;
535 }
aec3cfba 536
2b835d68
RE
537 if (TARGET_POKE_FUNCTION_NAME)
538 target_flags |= ARM_FLAG_APCS_FRAME;
aec3cfba 539
2b835d68
RE
540 if (TARGET_APCS_REENT && flag_pic)
541 fatal ("-fpic and -mapcs-reent are incompatible");
aec3cfba 542
2b835d68 543 if (TARGET_APCS_REENT)
f5a1b0d2 544 warning ("APCS reentrant code not supported. Ignored");
aec3cfba 545
d5b7b3ae
RE
546 /* If this target is normally configured to use APCS frames, warn if they
547 are turned off and debugging is turned on. */
548 if (TARGET_ARM
549 && write_symbols != NO_DEBUG
550 && ! TARGET_APCS_FRAME
551 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
552 warning ("-g with -mno-apcs-frame may not give sensible debugging");
6cfc7210 553
32de079a
RE
554 /* If stack checking is disabled, we can use r10 as the PIC register,
555 which keeps r9 available. */
556 if (flag_pic && ! TARGET_APCS_STACK)
557 arm_pic_register = 10;
aec3cfba 558
2b835d68
RE
559 if (TARGET_APCS_FLOAT)
560 warning ("Passing floating point arguments in fp regs not yet supported");
f5a1b0d2 561
aec3cfba 562 /* Initialise boolean versions of the flags, for use in the arm.md file. */
2ca12935
JL
563 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
564 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
565 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
aec3cfba 566
2ca12935
JL
567 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
568 arm_is_strong = (tune_flags & FL_STRONG) != 0;
0616531f 569 thumb_code = (TARGET_ARM == 0);
d5b7b3ae
RE
570 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
571 && !(tune_flags & FL_ARCH4))) != 0;
f5a1b0d2 572
bd9c7e23
RE
573 /* Default value for floating point code... if no co-processor
574 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
575 assume the user has an FPA.
576 Note: this does not prevent use of floating point instructions,
577 -msoft-float does that. */
aec3cfba 578 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
f5a1b0d2 579
b111229a 580 if (target_fp_name)
2b835d68 581 {
f5a1b0d2 582 if (streq (target_fp_name, "2"))
b111229a 583 arm_fpu_arch = FP_SOFT2;
f5a1b0d2
NC
584 else if (streq (target_fp_name, "3"))
585 arm_fpu_arch = FP_SOFT3;
2b835d68 586 else
f5a1b0d2 587 fatal ("Invalid floating point emulation option: -mfpe-%s",
b111229a 588 target_fp_name);
2b835d68 589 }
b111229a
RE
590 else
591 arm_fpu_arch = FP_DEFAULT;
f5a1b0d2
NC
592
593 if (TARGET_FPE && arm_fpu != FP_HARD)
594 arm_fpu = FP_SOFT2;
aec3cfba 595
f5a1b0d2
NC
596 /* For arm2/3 there is no need to do any scheduling if there is only
597 a floating point emulator, or we are doing software floating-point. */
ed0e6530
PB
598 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
599 && (tune_flags & FL_MODE32) == 0)
f5a1b0d2 600 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
aec3cfba 601
cd2b33d0 602 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
b355a481
NC
603
604 if (structure_size_string != NULL)
605 {
606 int size = strtol (structure_size_string, NULL, 0);
607
608 if (size == 8 || size == 32)
609 arm_structure_size_boundary = size;
610 else
611 warning ("Structure size boundary can only be set to 8 or 32");
612 }
ed0e6530
PB
613
614 if (arm_pic_register_string != NULL)
615 {
616 int pic_register;
617
618 if (! flag_pic)
619 warning ("-mpic-register= is useless without -fpic");
620
621 pic_register = decode_reg_name (arm_pic_register_string);
622
623 /* Prevent the user from choosing an obviously stupid PIC register. */
624 if (pic_register < 0 || call_used_regs[pic_register]
625 || pic_register == HARD_FRAME_POINTER_REGNUM
626 || pic_register == STACK_POINTER_REGNUM
627 || pic_register >= PC_REGNUM)
628 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
629 else
630 arm_pic_register = pic_register;
631 }
d5b7b3ae
RE
632
633 if (TARGET_THUMB && flag_schedule_insns)
634 {
635 /* Don't warn since it's on by default in -O2. */
636 flag_schedule_insns = 0;
637 }
638
f5a1b0d2
NC
639 /* If optimizing for space, don't synthesize constants.
640 For processors with load scheduling, it never costs more than 2 cycles
641 to load a constant, and the load scheduler may well reduce that to 1. */
aec3cfba 642 if (optimize_size || (tune_flags & FL_LDSCHED))
f5a1b0d2 643 arm_constant_limit = 1;
aec3cfba 644
f5a1b0d2
NC
645 /* If optimizing for size, bump the number of instructions that we
646 are prepared to conditionally execute (even on a StrongARM).
647 Otherwise for the StrongARM, which has early execution of branches,
648 a sequence that is worth skipping is shorter. */
649 if (optimize_size)
650 max_insns_skipped = 6;
651 else if (arm_is_strong)
652 max_insns_skipped = 3;
92a432f4
RE
653
654 /* Register global variables with the garbage collector. */
655 arm_add_gc_roots ();
656}
657
658static void
659arm_add_gc_roots ()
660{
661 ggc_add_rtx_root (&arm_compare_op0, 1);
662 ggc_add_rtx_root (&arm_compare_op1, 1);
663 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
664 /* XXX: What about the minipool tables? */
2b835d68 665}
cce8749e 666\f
6354dc9b 667/* Return 1 if it is possible to return using a single instruction. */
ff9940b0 668int
b36ba79f
RE
669use_return_insn (iscond)
670 int iscond;
ff9940b0
RE
671{
672 int regno;
673
d5b7b3ae
RE
674 /* Never use a return instruction before reload has run. */
675 if (! reload_completed
676 /* Or if the function is variadic. */
f5a1b0d2 677 || current_function_pretend_args_size
ff9940b0 678 || current_function_anonymous_args
d5b7b3ae
RE
679 /* Of if the function calls __builtin_eh_return () */
680 || cfun->machine->eh_epilogue_sp_ofs != NULL
681 /* Or if there is no frame pointer and there is a stack adjustment. */
56636818 682 || ((get_frame_size () + current_function_outgoing_args_size != 0)
d5b7b3ae 683 && ! frame_pointer_needed))
ff9940b0
RE
684 return 0;
685
b111229a 686 /* Can't be done if interworking with Thumb, and any registers have been
b36ba79f
RE
687 stacked. Similarly, on StrongARM, conditional returns are expensive
688 if they aren't taken and registers have been stacked. */
f5a1b0d2 689 if (iscond && arm_is_strong && frame_pointer_needed)
b36ba79f 690 return 0;
d5b7b3ae 691
f5a1b0d2 692 if ((iscond && arm_is_strong)
6cfc7210 693 || TARGET_INTERWORK)
6ed30148 694 {
d5b7b3ae 695 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
6ed30148
RE
696 if (regs_ever_live[regno] && ! call_used_regs[regno])
697 return 0;
698
699 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
b111229a 700 return 0;
6ed30148 701 }
b111229a 702
ff9940b0 703 /* Can't be done if any of the FPU regs are pushed, since this also
6354dc9b 704 requires an insn. */
d5b7b3ae
RE
705 if (TARGET_HARD_FLOAT)
706 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
707 if (regs_ever_live[regno] && ! call_used_regs[regno])
708 return 0;
ff9940b0 709
31fdb4d5
DE
710 /* If a function is naked, don't use the "return" insn. */
711 if (arm_naked_function_p (current_function_decl))
712 return 0;
713
ff9940b0
RE
714 return 1;
715}
716
cce8749e
CH
717/* Return TRUE if int I is a valid immediate ARM constant. */
718
719int
720const_ok_for_arm (i)
ff9940b0 721 HOST_WIDE_INT i;
cce8749e 722{
e5951263 723 unsigned HOST_WIDE_INT mask = ~ HOST_UINT (0xFF);
cce8749e 724
56636818
JL
725 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
726 be all zero, or all one. */
e5951263
NC
727 if ((i & ~ HOST_UINT (0xffffffff)) != 0
728 && ((i & ~ HOST_UINT (0xffffffff))
729 != ((~ HOST_UINT (0))
730 & ~ HOST_UINT (0xffffffff))))
56636818
JL
731 return FALSE;
732
e2c671ba
RE
733 /* Fast return for 0 and powers of 2 */
734 if ((i & (i - 1)) == 0)
735 return TRUE;
736
cce8749e
CH
737 do
738 {
e5951263 739 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
f3bb6135 740 return TRUE;
abaa26e5 741 mask =
e5951263
NC
742 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
743 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
744 } while (mask != ~ HOST_UINT (0xFF));
cce8749e 745
f3bb6135
RE
746 return FALSE;
747}
cce8749e 748
6354dc9b 749/* Return true if I is a valid constant for the operation CODE. */
74bbc178
NC
750static int
751const_ok_for_op (i, code)
e2c671ba
RE
752 HOST_WIDE_INT i;
753 enum rtx_code code;
e2c671ba
RE
754{
755 if (const_ok_for_arm (i))
756 return 1;
757
758 switch (code)
759 {
760 case PLUS:
761 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
762
763 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
764 case XOR:
765 case IOR:
766 return 0;
767
768 case AND:
769 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
770
771 default:
772 abort ();
773 }
774}
775
776/* Emit a sequence of insns to handle a large constant.
777 CODE is the code of the operation required, it can be any of SET, PLUS,
778 IOR, AND, XOR, MINUS;
779 MODE is the mode in which the operation is being performed;
780 VAL is the integer to operate on;
781 SOURCE is the other operand (a register, or a null-pointer for SET);
782 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
783 either produce a simpler sequence, or we will want to cse the values.
784 Return value is the number of insns emitted. */
e2c671ba
RE
785
786int
787arm_split_constant (code, mode, val, target, source, subtargets)
788 enum rtx_code code;
789 enum machine_mode mode;
790 HOST_WIDE_INT val;
791 rtx target;
792 rtx source;
793 int subtargets;
2b835d68
RE
794{
795 if (subtargets || code == SET
796 || (GET_CODE (target) == REG && GET_CODE (source) == REG
797 && REGNO (target) != REGNO (source)))
798 {
4b632bf1
RE
799 /* After arm_reorg has been called, we can't fix up expensive
800 constants by pushing them into memory so we must synthesise
801 them in-line, regardless of the cost. This is only likely to
802 be more costly on chips that have load delay slots and we are
803 compiling without running the scheduler (so no splitting
aec3cfba
NC
804 occurred before the final instruction emission).
805
806 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
aec3cfba 807 */
4b632bf1
RE
808 if (! after_arm_reorg
809 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
810 > arm_constant_limit + (code != SET)))
2b835d68
RE
811 {
812 if (code == SET)
813 {
814 /* Currently SET is the only monadic value for CODE, all
815 the rest are diadic. */
43cffd11 816 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
2b835d68
RE
817 return 1;
818 }
819 else
820 {
821 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
822
43cffd11 823 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
2b835d68
RE
824 /* For MINUS, the value is subtracted from, since we never
825 have subtraction of a constant. */
826 if (code == MINUS)
43cffd11 827 emit_insn (gen_rtx_SET (VOIDmode, target,
d5b7b3ae 828 gen_rtx_MINUS (mode, temp, source)));
2b835d68 829 else
43cffd11
RE
830 emit_insn (gen_rtx_SET (VOIDmode, target,
831 gen_rtx (code, mode, source, temp)));
2b835d68
RE
832 return 2;
833 }
834 }
835 }
836
837 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
838}
839
840/* As above, but extra parameter GENERATE which, if clear, suppresses
841 RTL generation. */
d5b7b3ae 842static int
2b835d68
RE
843arm_gen_constant (code, mode, val, target, source, subtargets, generate)
844 enum rtx_code code;
845 enum machine_mode mode;
846 HOST_WIDE_INT val;
847 rtx target;
848 rtx source;
849 int subtargets;
850 int generate;
e2c671ba 851{
e2c671ba
RE
852 int can_invert = 0;
853 int can_negate = 0;
854 int can_negate_initial = 0;
855 int can_shift = 0;
856 int i;
857 int num_bits_set = 0;
858 int set_sign_bit_copies = 0;
859 int clear_sign_bit_copies = 0;
860 int clear_zero_bit_copies = 0;
861 int set_zero_bit_copies = 0;
862 int insns = 0;
e2c671ba 863 unsigned HOST_WIDE_INT temp1, temp2;
e5951263 864 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
e2c671ba 865
d5b7b3ae 866 /* Find out which operations are safe for a given CODE. Also do a quick
e2c671ba
RE
867 check for degenerate cases; these can occur when DImode operations
868 are split. */
869 switch (code)
870 {
871 case SET:
872 can_invert = 1;
873 can_shift = 1;
874 can_negate = 1;
875 break;
876
877 case PLUS:
878 can_negate = 1;
879 can_negate_initial = 1;
880 break;
881
882 case IOR:
e5951263 883 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 884 {
2b835d68 885 if (generate)
43cffd11
RE
886 emit_insn (gen_rtx_SET (VOIDmode, target,
887 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
888 return 1;
889 }
890 if (remainder == 0)
891 {
892 if (reload_completed && rtx_equal_p (target, source))
893 return 0;
2b835d68 894 if (generate)
43cffd11 895 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
896 return 1;
897 }
898 break;
899
900 case AND:
901 if (remainder == 0)
902 {
2b835d68 903 if (generate)
43cffd11 904 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
e2c671ba
RE
905 return 1;
906 }
e5951263 907 if (remainder == HOST_UINT (0xffffffff))
e2c671ba
RE
908 {
909 if (reload_completed && rtx_equal_p (target, source))
910 return 0;
2b835d68 911 if (generate)
43cffd11 912 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
913 return 1;
914 }
915 can_invert = 1;
916 break;
917
918 case XOR:
919 if (remainder == 0)
920 {
921 if (reload_completed && rtx_equal_p (target, source))
922 return 0;
2b835d68 923 if (generate)
43cffd11 924 emit_insn (gen_rtx_SET (VOIDmode, target, source));
e2c671ba
RE
925 return 1;
926 }
e5951263 927 if (remainder == HOST_UINT (0xffffffff))
e2c671ba 928 {
2b835d68 929 if (generate)
43cffd11
RE
930 emit_insn (gen_rtx_SET (VOIDmode, target,
931 gen_rtx_NOT (mode, source)));
e2c671ba
RE
932 return 1;
933 }
934
935 /* We don't know how to handle this yet below. */
936 abort ();
937
938 case MINUS:
939 /* We treat MINUS as (val - source), since (source - val) is always
940 passed as (source + (-val)). */
941 if (remainder == 0)
942 {
2b835d68 943 if (generate)
43cffd11
RE
944 emit_insn (gen_rtx_SET (VOIDmode, target,
945 gen_rtx_NEG (mode, source)));
e2c671ba
RE
946 return 1;
947 }
948 if (const_ok_for_arm (val))
949 {
2b835d68 950 if (generate)
43cffd11
RE
951 emit_insn (gen_rtx_SET (VOIDmode, target,
952 gen_rtx_MINUS (mode, GEN_INT (val),
953 source)));
e2c671ba
RE
954 return 1;
955 }
956 can_negate = 1;
957
958 break;
959
960 default:
961 abort ();
962 }
963
6354dc9b 964 /* If we can do it in one insn get out quickly. */
e2c671ba
RE
965 if (const_ok_for_arm (val)
966 || (can_negate_initial && const_ok_for_arm (-val))
967 || (can_invert && const_ok_for_arm (~val)))
968 {
2b835d68 969 if (generate)
43cffd11
RE
970 emit_insn (gen_rtx_SET (VOIDmode, target,
971 (source ? gen_rtx (code, mode, source,
972 GEN_INT (val))
973 : GEN_INT (val))));
e2c671ba
RE
974 return 1;
975 }
976
e2c671ba 977 /* Calculate a few attributes that may be useful for specific
6354dc9b 978 optimizations. */
e2c671ba
RE
979 for (i = 31; i >= 0; i--)
980 {
981 if ((remainder & (1 << i)) == 0)
982 clear_sign_bit_copies++;
983 else
984 break;
985 }
986
987 for (i = 31; i >= 0; i--)
988 {
989 if ((remainder & (1 << i)) != 0)
990 set_sign_bit_copies++;
991 else
992 break;
993 }
994
995 for (i = 0; i <= 31; i++)
996 {
997 if ((remainder & (1 << i)) == 0)
998 clear_zero_bit_copies++;
999 else
1000 break;
1001 }
1002
1003 for (i = 0; i <= 31; i++)
1004 {
1005 if ((remainder & (1 << i)) != 0)
1006 set_zero_bit_copies++;
1007 else
1008 break;
1009 }
1010
1011 switch (code)
1012 {
1013 case SET:
1014 /* See if we can do this by sign_extending a constant that is known
1015 to be negative. This is a good, way of doing it, since the shift
1016 may well merge into a subsequent insn. */
1017 if (set_sign_bit_copies > 1)
1018 {
1019 if (const_ok_for_arm
1020 (temp1 = ARM_SIGN_EXTEND (remainder
1021 << (set_sign_bit_copies - 1))))
1022 {
2b835d68
RE
1023 if (generate)
1024 {
d499463f 1025 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1026 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1027 GEN_INT (temp1)));
2b835d68
RE
1028 emit_insn (gen_ashrsi3 (target, new_src,
1029 GEN_INT (set_sign_bit_copies - 1)));
1030 }
e2c671ba
RE
1031 return 2;
1032 }
1033 /* For an inverted constant, we will need to set the low bits,
1034 these will be shifted out of harm's way. */
1035 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1036 if (const_ok_for_arm (~temp1))
1037 {
2b835d68
RE
1038 if (generate)
1039 {
d499463f 1040 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1041 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1042 GEN_INT (temp1)));
2b835d68
RE
1043 emit_insn (gen_ashrsi3 (target, new_src,
1044 GEN_INT (set_sign_bit_copies - 1)));
1045 }
e2c671ba
RE
1046 return 2;
1047 }
1048 }
1049
1050 /* See if we can generate this by setting the bottom (or the top)
1051 16 bits, and then shifting these into the other half of the
1052 word. We only look for the simplest cases, to do more would cost
1053 too much. Be careful, however, not to generate this when the
1054 alternative would take fewer insns. */
e5951263 1055 if (val & HOST_UINT (0xffff0000))
e2c671ba 1056 {
e5951263 1057 temp1 = remainder & HOST_UINT (0xffff0000);
e2c671ba
RE
1058 temp2 = remainder & 0x0000ffff;
1059
6354dc9b 1060 /* Overlaps outside this range are best done using other methods. */
e2c671ba
RE
1061 for (i = 9; i < 24; i++)
1062 {
d5b7b3ae 1063 if ((((temp2 | (temp2 << i))
e5951263 1064 & HOST_UINT (0xffffffff)) == remainder)
e2c671ba
RE
1065 && ! const_ok_for_arm (temp2))
1066 {
d499463f
RE
1067 rtx new_src = (subtargets
1068 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1069 : target);
1070 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 1071 source, subtargets, generate);
e2c671ba 1072 source = new_src;
2b835d68 1073 if (generate)
43cffd11
RE
1074 emit_insn (gen_rtx_SET
1075 (VOIDmode, target,
1076 gen_rtx_IOR (mode,
1077 gen_rtx_ASHIFT (mode, source,
1078 GEN_INT (i)),
1079 source)));
e2c671ba
RE
1080 return insns + 1;
1081 }
1082 }
1083
6354dc9b 1084 /* Don't duplicate cases already considered. */
e2c671ba
RE
1085 for (i = 17; i < 24; i++)
1086 {
1087 if (((temp1 | (temp1 >> i)) == remainder)
1088 && ! const_ok_for_arm (temp1))
1089 {
d499463f
RE
1090 rtx new_src = (subtargets
1091 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1092 : target);
1093 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 1094 source, subtargets, generate);
e2c671ba 1095 source = new_src;
2b835d68 1096 if (generate)
43cffd11
RE
1097 emit_insn
1098 (gen_rtx_SET (VOIDmode, target,
1099 gen_rtx_IOR
1100 (mode,
1101 gen_rtx_LSHIFTRT (mode, source,
1102 GEN_INT (i)),
1103 source)));
e2c671ba
RE
1104 return insns + 1;
1105 }
1106 }
1107 }
1108 break;
1109
1110 case IOR:
1111 case XOR:
7b64da89
RE
1112 /* If we have IOR or XOR, and the constant can be loaded in a
1113 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
1114 then this can be done in two instructions instead of 3-4. */
1115 if (subtargets
d499463f 1116 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
1117 || (reload_completed && ! reg_mentioned_p (target, source)))
1118 {
1119 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1120 {
2b835d68
RE
1121 if (generate)
1122 {
1123 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 1124
43cffd11
RE
1125 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1126 emit_insn (gen_rtx_SET (VOIDmode, target,
1127 gen_rtx (code, mode, source, sub)));
2b835d68 1128 }
e2c671ba
RE
1129 return 2;
1130 }
1131 }
1132
1133 if (code == XOR)
1134 break;
1135
1136 if (set_sign_bit_copies > 8
1137 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1138 {
2b835d68
RE
1139 if (generate)
1140 {
1141 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1142 rtx shift = GEN_INT (set_sign_bit_copies);
1143
43cffd11
RE
1144 emit_insn (gen_rtx_SET (VOIDmode, sub,
1145 gen_rtx_NOT (mode,
1146 gen_rtx_ASHIFT (mode,
1147 source,
f5a1b0d2 1148 shift))));
43cffd11
RE
1149 emit_insn (gen_rtx_SET (VOIDmode, target,
1150 gen_rtx_NOT (mode,
1151 gen_rtx_LSHIFTRT (mode, sub,
1152 shift))));
2b835d68 1153 }
e2c671ba
RE
1154 return 2;
1155 }
1156
1157 if (set_zero_bit_copies > 8
1158 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1159 {
2b835d68
RE
1160 if (generate)
1161 {
1162 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1163 rtx shift = GEN_INT (set_zero_bit_copies);
1164
43cffd11
RE
1165 emit_insn (gen_rtx_SET (VOIDmode, sub,
1166 gen_rtx_NOT (mode,
1167 gen_rtx_LSHIFTRT (mode,
1168 source,
f5a1b0d2 1169 shift))));
43cffd11
RE
1170 emit_insn (gen_rtx_SET (VOIDmode, target,
1171 gen_rtx_NOT (mode,
1172 gen_rtx_ASHIFT (mode, sub,
f5a1b0d2 1173 shift))));
2b835d68 1174 }
e2c671ba
RE
1175 return 2;
1176 }
1177
1178 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1179 {
2b835d68
RE
1180 if (generate)
1181 {
1182 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
43cffd11
RE
1183 emit_insn (gen_rtx_SET (VOIDmode, sub,
1184 gen_rtx_NOT (mode, source)));
2b835d68
RE
1185 source = sub;
1186 if (subtargets)
1187 sub = gen_reg_rtx (mode);
43cffd11
RE
1188 emit_insn (gen_rtx_SET (VOIDmode, sub,
1189 gen_rtx_AND (mode, source,
1190 GEN_INT (temp1))));
1191 emit_insn (gen_rtx_SET (VOIDmode, target,
1192 gen_rtx_NOT (mode, sub)));
2b835d68 1193 }
e2c671ba
RE
1194 return 3;
1195 }
1196 break;
1197
1198 case AND:
1199 /* See if two shifts will do 2 or more insn's worth of work. */
1200 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1201 {
e5951263 1202 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
e2c671ba 1203 << (32 - clear_sign_bit_copies))
e5951263 1204 & HOST_UINT (0xffffffff));
e2c671ba 1205
e5951263 1206 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1207 {
2b835d68
RE
1208 if (generate)
1209 {
d499463f 1210 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 1211 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1212 new_src, source, subtargets, 1);
1213 source = new_src;
2b835d68
RE
1214 }
1215 else
d499463f
RE
1216 {
1217 rtx targ = subtargets ? NULL_RTX : target;
1218 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1219 targ, source, subtargets, 0);
1220 }
2b835d68
RE
1221 }
1222
1223 if (generate)
1224 {
d499463f
RE
1225 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1226 rtx shift = GEN_INT (clear_sign_bit_copies);
1227
1228 emit_insn (gen_ashlsi3 (new_src, source, shift));
1229 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
1230 }
1231
e2c671ba
RE
1232 return insns + 2;
1233 }
1234
1235 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1236 {
1237 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba 1238
e5951263 1239 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
e2c671ba 1240 {
2b835d68
RE
1241 if (generate)
1242 {
d499463f
RE
1243 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1244
2b835d68 1245 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
1246 new_src, source, subtargets, 1);
1247 source = new_src;
2b835d68
RE
1248 }
1249 else
d499463f
RE
1250 {
1251 rtx targ = subtargets ? NULL_RTX : target;
1252
1253 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1254 targ, source, subtargets, 0);
1255 }
2b835d68
RE
1256 }
1257
1258 if (generate)
1259 {
d499463f
RE
1260 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1261 rtx shift = GEN_INT (clear_zero_bit_copies);
1262
1263 emit_insn (gen_lshrsi3 (new_src, source, shift));
1264 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
1265 }
1266
e2c671ba
RE
1267 return insns + 2;
1268 }
1269
1270 break;
1271
1272 default:
1273 break;
1274 }
1275
1276 for (i = 0; i < 32; i++)
1277 if (remainder & (1 << i))
1278 num_bits_set++;
1279
1280 if (code == AND || (can_invert && num_bits_set > 16))
e5951263 1281 remainder = (~remainder) & HOST_UINT (0xffffffff);
e2c671ba 1282 else if (code == PLUS && num_bits_set > 16)
e5951263 1283 remainder = (-remainder) & HOST_UINT (0xffffffff);
e2c671ba
RE
1284 else
1285 {
1286 can_invert = 0;
1287 can_negate = 0;
1288 }
1289
1290 /* Now try and find a way of doing the job in either two or three
1291 instructions.
1292 We start by looking for the largest block of zeros that are aligned on
1293 a 2-bit boundary, we then fill up the temps, wrapping around to the
1294 top of the word when we drop off the bottom.
6354dc9b 1295 In the worst case this code should produce no more than four insns. */
e2c671ba
RE
1296 {
1297 int best_start = 0;
1298 int best_consecutive_zeros = 0;
1299
1300 for (i = 0; i < 32; i += 2)
1301 {
1302 int consecutive_zeros = 0;
1303
1304 if (! (remainder & (3 << i)))
1305 {
1306 while ((i < 32) && ! (remainder & (3 << i)))
1307 {
1308 consecutive_zeros += 2;
1309 i += 2;
1310 }
1311 if (consecutive_zeros > best_consecutive_zeros)
1312 {
1313 best_consecutive_zeros = consecutive_zeros;
1314 best_start = i - consecutive_zeros;
1315 }
1316 i -= 2;
1317 }
1318 }
1319
1320 /* Now start emitting the insns, starting with the one with the highest
1321 bit set: we do this so that the smallest number will be emitted last;
6354dc9b 1322 this is more likely to be combinable with addressing insns. */
e2c671ba
RE
1323 i = best_start;
1324 do
1325 {
1326 int end;
1327
1328 if (i <= 0)
1329 i += 32;
1330 if (remainder & (3 << (i - 2)))
1331 {
1332 end = i - 8;
1333 if (end < 0)
1334 end += 32;
1335 temp1 = remainder & ((0x0ff << end)
1336 | ((i < end) ? (0xff >> (32 - end)) : 0));
1337 remainder &= ~temp1;
1338
d499463f 1339 if (generate)
e2c671ba 1340 {
d499463f
RE
1341 rtx new_src;
1342
1343 if (code == SET)
43cffd11
RE
1344 emit_insn (gen_rtx_SET (VOIDmode,
1345 new_src = (subtargets
1346 ? gen_reg_rtx (mode)
1347 : target),
1348 GEN_INT (can_invert
1349 ? ~temp1 : temp1)));
d499463f 1350 else if (code == MINUS)
43cffd11
RE
1351 emit_insn (gen_rtx_SET (VOIDmode,
1352 new_src = (subtargets
1353 ? gen_reg_rtx (mode)
1354 : target),
1355 gen_rtx (code, mode, GEN_INT (temp1),
1356 source)));
d499463f 1357 else
43cffd11
RE
1358 emit_insn (gen_rtx_SET (VOIDmode,
1359 new_src = (remainder
1360 ? (subtargets
1361 ? gen_reg_rtx (mode)
1362 : target)
1363 : target),
1364 gen_rtx (code, mode, source,
1365 GEN_INT (can_invert ? ~temp1
1366 : (can_negate
1367 ? -temp1
1368 : temp1)))));
d499463f 1369 source = new_src;
e2c671ba
RE
1370 }
1371
d499463f
RE
1372 if (code == SET)
1373 {
1374 can_invert = 0;
1375 code = PLUS;
1376 }
1377 else if (code == MINUS)
1378 code = PLUS;
1379
e2c671ba 1380 insns++;
e2c671ba
RE
1381 i -= 6;
1382 }
1383 i -= 2;
1384 } while (remainder);
1385 }
1386 return insns;
1387}
1388
bd9c7e23
RE
1389/* Canonicalize a comparison so that we are more likely to recognize it.
1390 This can be done for a few constant compares, where we can make the
1391 immediate value easier to load. */
1392enum rtx_code
1393arm_canonicalize_comparison (code, op1)
1394 enum rtx_code code;
62b10bbc 1395 rtx * op1;
bd9c7e23 1396{
ad076f4e 1397 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1398
1399 switch (code)
1400 {
1401 case EQ:
1402 case NE:
1403 return code;
1404
1405 case GT:
1406 case LE:
e5951263 1407 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
ad076f4e 1408 - 1)
bd9c7e23
RE
1409 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1410 {
1411 *op1 = GEN_INT (i+1);
1412 return code == GT ? GE : LT;
1413 }
1414 break;
1415
1416 case GE:
1417 case LT:
e5951263 1418 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1419 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1420 {
1421 *op1 = GEN_INT (i-1);
1422 return code == GE ? GT : LE;
1423 }
1424 break;
1425
1426 case GTU:
1427 case LEU:
e5951263 1428 if (i != ~ (HOST_UINT (0))
bd9c7e23
RE
1429 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1430 {
1431 *op1 = GEN_INT (i + 1);
1432 return code == GTU ? GEU : LTU;
1433 }
1434 break;
1435
1436 case GEU:
1437 case LTU:
1438 if (i != 0
1439 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1440 {
1441 *op1 = GEN_INT (i - 1);
1442 return code == GEU ? GTU : LEU;
1443 }
1444 break;
1445
1446 default:
1447 abort ();
1448 }
1449
1450 return code;
1451}
bd9c7e23 1452
f5a1b0d2
NC
1453/* Decide whether a type should be returned in memory (true)
1454 or in a register (false). This is called by the macro
1455 RETURN_IN_MEMORY. */
2b835d68
RE
1456int
1457arm_return_in_memory (type)
1458 tree type;
1459{
f5a1b0d2 1460 if (! AGGREGATE_TYPE_P (type))
9e291dbe 1461 /* All simple types are returned in registers. */
d7d01975 1462 return 0;
d5b7b3ae
RE
1463
1464 /* For the arm-wince targets we choose to be compitable with Microsoft's
1465 ARM and Thumb compilers, which always return aggregates in memory. */
1466#ifndef ARM_WINCE
1467
d7d01975 1468 if (int_size_in_bytes (type) > 4)
9e291dbe 1469 /* All structures/unions bigger than one word are returned in memory. */
d7d01975 1470 return 1;
d5b7b3ae 1471
d7d01975 1472 if (TREE_CODE (type) == RECORD_TYPE)
2b835d68
RE
1473 {
1474 tree field;
1475
3a2ea258
RE
1476 /* For a struct the APCS says that we only return in a register
1477 if the type is 'integer like' and every addressable element
1478 has an offset of zero. For practical purposes this means
1479 that the structure can have at most one non bit-field element
1480 and that this element must be the first one in the structure. */
1481
f5a1b0d2
NC
1482 /* Find the first field, ignoring non FIELD_DECL things which will
1483 have been created by C++. */
1484 for (field = TYPE_FIELDS (type);
1485 field && TREE_CODE (field) != FIELD_DECL;
1486 field = TREE_CHAIN (field))
1487 continue;
1488
1489 if (field == NULL)
9e291dbe 1490 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
f5a1b0d2 1491
d5b7b3ae
RE
1492 /* Check that the first field is valid for returning in a register. */
1493
1494 /* ... Floats are not allowed */
9e291dbe 1495 if (FLOAT_TYPE_P (TREE_TYPE (field)))
3a2ea258
RE
1496 return 1;
1497
d5b7b3ae
RE
1498 /* ... Aggregates that are not themselves valid for returning in
1499 a register are not allowed. */
9e291dbe 1500 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
3a2ea258 1501 return 1;
d5b7b3ae 1502
3a2ea258
RE
1503 /* Now check the remaining fields, if any. Only bitfields are allowed,
1504 since they are not addressable. */
f5a1b0d2
NC
1505 for (field = TREE_CHAIN (field);
1506 field;
1507 field = TREE_CHAIN (field))
1508 {
1509 if (TREE_CODE (field) != FIELD_DECL)
1510 continue;
1511
1512 if (! DECL_BIT_FIELD_TYPE (field))
1513 return 1;
1514 }
2b835d68
RE
1515
1516 return 0;
1517 }
d7d01975
NC
1518
1519 if (TREE_CODE (type) == UNION_TYPE)
2b835d68
RE
1520 {
1521 tree field;
1522
1523 /* Unions can be returned in registers if every element is
1524 integral, or can be returned in an integer register. */
f5a1b0d2
NC
1525 for (field = TYPE_FIELDS (type);
1526 field;
1527 field = TREE_CHAIN (field))
2b835d68 1528 {
f5a1b0d2
NC
1529 if (TREE_CODE (field) != FIELD_DECL)
1530 continue;
1531
6cc8c0b3
NC
1532 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1533 return 1;
1534
f5a1b0d2 1535 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
2b835d68
RE
1536 return 1;
1537 }
f5a1b0d2 1538
2b835d68
RE
1539 return 0;
1540 }
d5b7b3ae 1541#endif /* not ARM_WINCE */
f5a1b0d2 1542
d5b7b3ae 1543 /* Return all other types in memory. */
2b835d68
RE
1544 return 1;
1545}
1546
82e9d970
PB
1547/* Initialize a variable CUM of type CUMULATIVE_ARGS
1548 for a call to a function whose data type is FNTYPE.
1549 For a library call, FNTYPE is NULL. */
1550void
1551arm_init_cumulative_args (pcum, fntype, libname, indirect)
1552 CUMULATIVE_ARGS * pcum;
1553 tree fntype;
1554 rtx libname ATTRIBUTE_UNUSED;
1555 int indirect ATTRIBUTE_UNUSED;
1556{
1557 /* On the ARM, the offset starts at 0. */
c27ba912
DM
1558 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1559
82e9d970
PB
1560 pcum->call_cookie = CALL_NORMAL;
1561
1562 if (TARGET_LONG_CALLS)
1563 pcum->call_cookie = CALL_LONG;
1564
1565 /* Check for long call/short call attributes. The attributes
1566 override any command line option. */
1567 if (fntype)
1568 {
1569 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1570 pcum->call_cookie = CALL_SHORT;
1571 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1572 pcum->call_cookie = CALL_LONG;
1573 }
1574}
1575
1576/* Determine where to put an argument to a function.
1577 Value is zero to push the argument on the stack,
1578 or a hard register in which to store the argument.
1579
1580 MODE is the argument's machine mode.
1581 TYPE is the data type of the argument (as a tree).
1582 This is null for libcalls where that information may
1583 not be available.
1584 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1585 the preceding args and about the function being called.
1586 NAMED is nonzero if this argument is a named parameter
1587 (otherwise it is an extra parameter matching an ellipsis). */
1588rtx
1589arm_function_arg (pcum, mode, type, named)
1590 CUMULATIVE_ARGS * pcum;
1591 enum machine_mode mode;
1592 tree type ATTRIBUTE_UNUSED;
1593 int named;
1594{
1595 if (mode == VOIDmode)
1596 /* Compute operand 2 of the call insn. */
1597 return GEN_INT (pcum->call_cookie);
1598
1599 if (! named || pcum->nregs >= NUM_ARG_REGS)
1600 return NULL_RTX;
1601
1602 return gen_rtx_REG (mode, pcum->nregs);
1603}
82e9d970 1604\f
c27ba912
DM
1605/* Encode the current state of the #pragma [no_]long_calls. */
1606typedef enum
82e9d970 1607{
c27ba912
DM
1608 OFF, /* No #pramgma [no_]long_calls is in effect. */
1609 LONG, /* #pragma long_calls is in effect. */
1610 SHORT /* #pragma no_long_calls is in effect. */
1611} arm_pragma_enum;
82e9d970 1612
c27ba912 1613static arm_pragma_enum arm_pragma_long_calls = OFF;
82e9d970 1614
c27ba912
DM
1615/* Handle pragmas for compatibility with Intel's compilers.
1616 FIXME: This is incomplete, since it does not handle all
1617 the pragmas that the Intel compilers understand. */
82e9d970 1618int
c27ba912
DM
1619arm_process_pragma (p_getc, p_ungetc, pname)
1620 int (* p_getc) PARAMS ((void)) ATTRIBUTE_UNUSED;
1621 void (* p_ungetc) PARAMS ((int)) ATTRIBUTE_UNUSED;
1622 char * pname;
82e9d970 1623{
c27ba912
DM
1624 /* Should be pragma 'far' or equivalent for callx/balx here. */
1625 if (strcmp (pname, "long_calls") == 0)
1626 arm_pragma_long_calls = LONG;
1627 else if (strcmp (pname, "no_long_calls") == 0)
1628 arm_pragma_long_calls = SHORT;
1629 else if (strcmp (pname, "long_calls_off") == 0)
1630 arm_pragma_long_calls = OFF;
1631 else
82e9d970
PB
1632 return 0;
1633
c27ba912 1634 return 1;
82e9d970
PB
1635}
1636\f
1637/* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1638 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1639 assigned to TYPE. */
1640int
1641arm_valid_type_attribute_p (type, attributes, identifier, args)
1642 tree type;
1643 tree attributes ATTRIBUTE_UNUSED;
1644 tree identifier;
1645 tree args;
1646{
1647 if ( TREE_CODE (type) != FUNCTION_TYPE
1648 && TREE_CODE (type) != METHOD_TYPE
1649 && TREE_CODE (type) != FIELD_DECL
1650 && TREE_CODE (type) != TYPE_DECL)
1651 return 0;
1652
1653 /* Function calls made to this symbol must be done indirectly, because
1654 it may lie outside of the 26 bit addressing range of a normal function
1655 call. */
1656 if (is_attribute_p ("long_call", identifier))
1657 return (args == NULL_TREE);
c27ba912 1658
82e9d970
PB
1659 /* Whereas these functions are always known to reside within the 26 bit
1660 addressing range. */
1661 if (is_attribute_p ("short_call", identifier))
1662 return (args == NULL_TREE);
1663
1664 return 0;
1665}
1666
1667/* Return 0 if the attributes for two types are incompatible, 1 if they
1668 are compatible, and 2 if they are nearly compatible (which causes a
1669 warning to be generated). */
1670int
1671arm_comp_type_attributes (type1, type2)
1672 tree type1;
1673 tree type2;
1674{
1cb8d58a 1675 int l1, l2, s1, s2;
bd7fc26f 1676
82e9d970
PB
1677 /* Check for mismatch of non-default calling convention. */
1678 if (TREE_CODE (type1) != FUNCTION_TYPE)
1679 return 1;
1680
1681 /* Check for mismatched call attributes. */
1cb8d58a
NC
1682 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1683 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1684 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1685 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
bd7fc26f
NC
1686
1687 /* Only bother to check if an attribute is defined. */
1688 if (l1 | l2 | s1 | s2)
1689 {
1690 /* If one type has an attribute, the other must have the same attribute. */
1cb8d58a 1691 if ((l1 != l2) || (s1 != s2))
bd7fc26f 1692 return 0;
82e9d970 1693
bd7fc26f
NC
1694 /* Disallow mixed attributes. */
1695 if ((l1 & s2) || (l2 & s1))
1696 return 0;
1697 }
1698
1699 return 1;
82e9d970
PB
1700}
1701
c27ba912
DM
1702/* Encode long_call or short_call attribute by prefixing
1703 symbol name in DECL with a special character FLAG. */
1704void
1705arm_encode_call_attribute (decl, flag)
1706 tree decl;
cd2b33d0 1707 int flag;
c27ba912 1708{
3cce094d 1709 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6354dc9b
NC
1710 int len = strlen (str);
1711 char * newstr;
c27ba912
DM
1712
1713 if (TREE_CODE (decl) != FUNCTION_DECL)
1714 return;
1715
1716 /* Do not allow weak functions to be treated as short call. */
1717 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1718 return;
1719
1720 if (ggc_p)
1721 newstr = ggc_alloc_string (NULL, len + 2);
1722 else
1723 newstr = permalloc (len + 2);
1724
1725 sprintf (newstr, "%c%s", flag, str);
1726
1727 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1728}
1729
1730/* Assigns default attributes to newly defined type. This is used to
1731 set short_call/long_call attributes for function types of
1732 functions defined inside corresponding #pragma scopes. */
1733void
1734arm_set_default_type_attributes (type)
1735 tree type;
1736{
1737 /* Add __attribute__ ((long_call)) to all functions, when
1738 inside #pragma long_calls or __attribute__ ((short_call)),
1739 when inside #pragma no_long_calls. */
1740 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1741 {
1742 tree type_attr_list, attr_name;
1743 type_attr_list = TYPE_ATTRIBUTES (type);
1744
1745 if (arm_pragma_long_calls == LONG)
1746 attr_name = get_identifier ("long_call");
1747 else if (arm_pragma_long_calls == SHORT)
1748 attr_name = get_identifier ("short_call");
1749 else
1750 return;
1751
1752 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1753 TYPE_ATTRIBUTES (type) = type_attr_list;
1754 }
1755}
1756\f
1757/* Return 1 if the operand is a SYMBOL_REF for a function known to be
1758 defined within the current compilation unit. If this caanot be
1759 determined, then 0 is returned. */
1760static int
1761current_file_function_operand (sym_ref)
1762 rtx sym_ref;
1763{
1764 /* This is a bit of a fib. A function will have a short call flag
1765 applied to its name if it has the short call attribute, or it has
1766 already been defined within the current compilation unit. */
1767 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1768 return 1;
1769
1770 /* The current funciton is always defined within the current compilation
1771 unit. if it s a weak defintion however, then this may not be the real
1772 defintion of the function, and so we have to say no. */
1773 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
1774 && ! DECL_WEAK (current_function_decl))
1775 return 1;
1776
1777 /* We cannot make the determination - default to returning 0. */
1778 return 0;
1779}
1780
1781/* Return non-zero if a 32 bit "long_call" should be generated for
1782 this call. We generate a long_call if the function:
1783
1784 a. has an __attribute__((long call))
1785 or b. is within the scope of a #pragma long_calls
1786 or c. the -mlong-calls command line switch has been specified
1787
1788 However we do not generate a long call if the function:
1789
1790 d. has an __attribute__ ((short_call))
1791 or e. is inside the scope of a #pragma no_long_calls
1792 or f. has an __attribute__ ((section))
1793 or g. is defined within the current compilation unit.
1794
1795 This function will be called by C fragments contained in the machine
1796 description file. CALL_REF and CALL_COOKIE correspond to the matched
1797 rtl operands. CALL_SYMBOL is used to distinguish between
1798 two different callers of the function. It is set to 1 in the
1799 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1800 and "call_value" patterns. This is because of the difference in the
1801 SYM_REFs passed by these patterns. */
1802int
1803arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1804 rtx sym_ref;
1805 int call_cookie;
1806 int call_symbol;
1807{
1808 if (! call_symbol)
1809 {
1810 if (GET_CODE (sym_ref) != MEM)
1811 return 0;
1812
1813 sym_ref = XEXP (sym_ref, 0);
1814 }
1815
1816 if (GET_CODE (sym_ref) != SYMBOL_REF)
1817 return 0;
1818
1819 if (call_cookie & CALL_SHORT)
1820 return 0;
1821
1822 if (TARGET_LONG_CALLS && flag_function_sections)
1823 return 1;
1824
1825 if (current_file_function_operand (sym_ref, VOIDmode))
1826 return 0;
1827
1828 return (call_cookie & CALL_LONG)
1829 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1830 || TARGET_LONG_CALLS;
1831}
82e9d970 1832\f
32de079a
RE
1833int
1834legitimate_pic_operand_p (x)
1835 rtx x;
1836{
d5b7b3ae
RE
1837 if (CONSTANT_P (x)
1838 && flag_pic
32de079a
RE
1839 && (GET_CODE (x) == SYMBOL_REF
1840 || (GET_CODE (x) == CONST
1841 && GET_CODE (XEXP (x, 0)) == PLUS
1842 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1843 return 0;
1844
1845 return 1;
1846}
1847
1848rtx
1849legitimize_pic_address (orig, mode, reg)
1850 rtx orig;
1851 enum machine_mode mode;
1852 rtx reg;
1853{
1854 if (GET_CODE (orig) == SYMBOL_REF)
1855 {
1856 rtx pic_ref, address;
1857 rtx insn;
1858 int subregs = 0;
1859
1860 if (reg == 0)
1861 {
1862 if (reload_in_progress || reload_completed)
1863 abort ();
1864 else
1865 reg = gen_reg_rtx (Pmode);
1866
1867 subregs = 1;
1868 }
1869
1870#ifdef AOF_ASSEMBLER
1871 /* The AOF assembler can generate relocations for these directly, and
6354dc9b 1872 understands that the PIC register has to be added into the offset. */
32de079a
RE
1873 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1874#else
1875 if (subregs)
1876 address = gen_reg_rtx (Pmode);
1877 else
1878 address = reg;
1879
1880 emit_insn (gen_pic_load_addr (address, orig));
1881
43cffd11
RE
1882 pic_ref = gen_rtx_MEM (Pmode,
1883 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1884 address));
32de079a
RE
1885 RTX_UNCHANGING_P (pic_ref) = 1;
1886 insn = emit_move_insn (reg, pic_ref);
1887#endif
1888 current_function_uses_pic_offset_table = 1;
1889 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1890 by loop. */
43cffd11
RE
1891 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1892 REG_NOTES (insn));
32de079a
RE
1893 return reg;
1894 }
1895 else if (GET_CODE (orig) == CONST)
1896 {
1897 rtx base, offset;
1898
1899 if (GET_CODE (XEXP (orig, 0)) == PLUS
1900 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1901 return orig;
1902
1903 if (reg == 0)
1904 {
1905 if (reload_in_progress || reload_completed)
1906 abort ();
1907 else
1908 reg = gen_reg_rtx (Pmode);
1909 }
1910
1911 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1912 {
1913 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1914 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1915 base == reg ? 0 : reg);
1916 }
1917 else
1918 abort ();
1919
1920 if (GET_CODE (offset) == CONST_INT)
1921 {
1922 /* The base register doesn't really matter, we only want to
1923 test the index for the appropriate mode. */
1924 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1925
1926 if (! reload_in_progress && ! reload_completed)
1927 offset = force_reg (Pmode, offset);
1928 else
1929 abort ();
1930
1931 win:
1932 if (GET_CODE (offset) == CONST_INT)
1933 return plus_constant_for_output (base, INTVAL (offset));
1934 }
1935
1936 if (GET_MODE_SIZE (mode) > 4
1937 && (GET_MODE_CLASS (mode) == MODE_INT
1938 || TARGET_SOFT_FLOAT))
1939 {
1940 emit_insn (gen_addsi3 (reg, base, offset));
1941 return reg;
1942 }
1943
43cffd11 1944 return gen_rtx_PLUS (Pmode, base, offset);
32de079a
RE
1945 }
1946 else if (GET_CODE (orig) == LABEL_REF)
82e9d970
PB
1947 {
1948 current_function_uses_pic_offset_table = 1;
1949
1950 if (NEED_GOT_RELOC)
d5b7b3ae
RE
1951 {
1952 rtx pic_ref, address = gen_reg_rtx (Pmode);
1953
1954 emit_insn (gen_pic_load_addr (address, orig));
1955 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1956
1957 emit_move_insn (address, pic_ref);
1958 return address;
1959 }
82e9d970 1960 }
32de079a
RE
1961
1962 return orig;
1963}
1964
1965static rtx pic_rtx;
1966
1967int
62b10bbc 1968is_pic (x)
32de079a
RE
1969 rtx x;
1970{
1971 if (x == pic_rtx)
1972 return 1;
1973 return 0;
1974}
1975
1976void
1977arm_finalize_pic ()
1978{
1979#ifndef AOF_ASSEMBLER
1980 rtx l1, pic_tmp, pic_tmp2, seq;
1981 rtx global_offset_table;
1982
ed0e6530 1983 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
32de079a
RE
1984 return;
1985
1986 if (! flag_pic)
1987 abort ();
1988
1989 start_sequence ();
1990 l1 = gen_label_rtx ();
1991
43cffd11 1992 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
dfa08768 1993 /* On the ARM the PC register contains 'dot + 8' at the time of the
d5b7b3ae
RE
1994 addition, on the Thumb it is 'dot + 4'. */
1995 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
84306176
PB
1996 if (GOT_PCREL)
1997 pic_tmp2 = gen_rtx_CONST (VOIDmode,
43cffd11 1998 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
84306176
PB
1999 else
2000 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
43cffd11
RE
2001
2002 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
f5a1b0d2 2003
32de079a 2004 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
d5b7b3ae
RE
2005 if (TARGET_ARM)
2006 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2007 else
2008 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
32de079a
RE
2009
2010 seq = gen_sequence ();
2011 end_sequence ();
2012 emit_insn_after (seq, get_insns ());
2013
2014 /* Need to emit this whether or not we obey regdecls,
2015 since setjmp/longjmp can cause life info to screw up. */
43cffd11 2016 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
32de079a
RE
2017#endif /* AOF_ASSEMBLER */
2018}
2019
e2c671ba
RE
2020#define REG_OR_SUBREG_REG(X) \
2021 (GET_CODE (X) == REG \
2022 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2023
2024#define REG_OR_SUBREG_RTX(X) \
2025 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2026
d5b7b3ae
RE
2027#ifndef COSTS_N_INSNS
2028#define COSTS_N_INSNS(N) ((N) * 4 - 2)
2029#endif
e2c671ba
RE
2030
2031int
d5b7b3ae 2032arm_rtx_costs (x, code, outer)
e2c671ba 2033 rtx x;
74bbc178 2034 enum rtx_code code;
d5b7b3ae 2035 enum rtx_code outer;
e2c671ba
RE
2036{
2037 enum machine_mode mode = GET_MODE (x);
2038 enum rtx_code subcode;
2039 int extra_cost;
2040
d5b7b3ae
RE
2041 if (TARGET_THUMB)
2042 {
2043 switch (code)
2044 {
2045 case ASHIFT:
2046 case ASHIFTRT:
2047 case LSHIFTRT:
2048 case ROTATERT:
2049 case PLUS:
2050 case MINUS:
2051 case COMPARE:
2052 case NEG:
2053 case NOT:
2054 return COSTS_N_INSNS (1);
2055
2056 case MULT:
2057 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2058 {
2059 int cycles = 0;
2060 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2061
2062 while (i)
2063 {
2064 i >>= 2;
2065 cycles ++;
2066 }
2067 return COSTS_N_INSNS (2) + cycles;
2068 }
2069 return COSTS_N_INSNS (1) + 16;
2070
2071 case SET:
2072 return (COSTS_N_INSNS (1)
2073 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2074 + GET_CODE (SET_DEST (x)) == MEM));
2075
2076 case CONST_INT:
2077 if (outer == SET)
2078 {
2079 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2080 return 0;
2081 if (thumb_shiftable_const (INTVAL (x)))
2082 return COSTS_N_INSNS (2);
2083 return COSTS_N_INSNS (3);
2084 }
2085 else if (outer == PLUS
2086 && INTVAL (x) < 256 && INTVAL (x) > -256)
2087 return 0;
2088 else if (outer == COMPARE
2089 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2090 return 0;
2091 else if (outer == ASHIFT || outer == ASHIFTRT
2092 || outer == LSHIFTRT)
2093 return 0;
2094 return COSTS_N_INSNS (2);
2095
2096 case CONST:
2097 case CONST_DOUBLE:
2098 case LABEL_REF:
2099 case SYMBOL_REF:
2100 return COSTS_N_INSNS (3);
2101
2102 case UDIV:
2103 case UMOD:
2104 case DIV:
2105 case MOD:
2106 return 100;
2107
2108 case TRUNCATE:
2109 return 99;
2110
2111 case AND:
2112 case XOR:
2113 case IOR:
2114 /* XXX guess. */
2115 return 8;
2116
2117 case ADDRESSOF:
2118 case MEM:
2119 /* XXX another guess. */
2120 /* Memory costs quite a lot for the first word, but subsequent words
2121 load at the equivalent of a single insn each. */
2122 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2123 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2124
2125 case IF_THEN_ELSE:
2126 /* XXX a guess. */
2127 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2128 return 14;
2129 return 2;
2130
2131 case ZERO_EXTEND:
2132 /* XXX still guessing. */
2133 switch (GET_MODE (XEXP (x, 0)))
2134 {
2135 case QImode:
2136 return (1 + (mode == DImode ? 4 : 0)
2137 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2138
2139 case HImode:
2140 return (4 + (mode == DImode ? 4 : 0)
2141 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2142
2143 case SImode:
2144 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2145
2146 default:
2147 return 99;
2148 }
2149
2150 default:
2151 return 99;
2152#if 0
2153 case FFS:
2154 case FLOAT:
2155 case FIX:
2156 case UNSIGNED_FIX:
2157 /* XXX guess */
2158 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2159 rtx_name[code]);
2160 abort ();
2161#endif
2162 }
2163 }
2164
e2c671ba
RE
2165 switch (code)
2166 {
2167 case MEM:
2168 /* Memory costs quite a lot for the first word, but subsequent words
2169 load at the equivalent of a single insn each. */
2170 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2171 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2172
2173 case DIV:
2174 case MOD:
2175 return 100;
2176
2177 case ROTATE:
2178 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2179 return 4;
2180 /* Fall through */
2181 case ROTATERT:
2182 if (mode != SImode)
2183 return 8;
2184 /* Fall through */
2185 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2186 if (mode == DImode)
2187 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2188 + ((GET_CODE (XEXP (x, 0)) == REG
2189 || (GET_CODE (XEXP (x, 0)) == SUBREG
2190 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2191 ? 0 : 8));
2192 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2193 || (GET_CODE (XEXP (x, 0)) == SUBREG
2194 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2195 ? 0 : 4)
2196 + ((GET_CODE (XEXP (x, 1)) == REG
2197 || (GET_CODE (XEXP (x, 1)) == SUBREG
2198 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2199 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2200 ? 0 : 4));
2201
2202 case MINUS:
2203 if (mode == DImode)
2204 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2205 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2206 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2207 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2208 ? 0 : 8));
2209
2210 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2211 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2212 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2213 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2214 ? 0 : 8)
2215 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2216 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2217 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2218 ? 0 : 8));
2219
2220 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2221 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2222 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2223 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2224 || subcode == ASHIFTRT || subcode == LSHIFTRT
2225 || subcode == ROTATE || subcode == ROTATERT
2226 || (subcode == MULT
2227 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2228 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2229 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2230 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2231 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2232 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2233 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2234 return 1;
2235 /* Fall through */
2236
2237 case PLUS:
2238 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2239 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2240 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2241 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2242 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2243 ? 0 : 8));
2244
2245 /* Fall through */
2246 case AND: case XOR: case IOR:
2247 extra_cost = 0;
2248
2249 /* Normally the frame registers will be spilt into reg+const during
2250 reload, so it is a bad idea to combine them with other instructions,
2251 since then they might not be moved outside of loops. As a compromise
2252 we allow integration with ops that have a constant as their second
2253 operand. */
2254 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2255 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2256 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2257 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2258 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2259 extra_cost = 4;
2260
2261 if (mode == DImode)
2262 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2263 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2264 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2265 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2266 ? 0 : 8));
2267
2268 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2269 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2270 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2271 || (GET_CODE (XEXP (x, 1)) == CONST_INT
74bbc178 2272 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
e2c671ba
RE
2273 ? 0 : 4));
2274
2275 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2276 return (1 + extra_cost
2277 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2278 || subcode == LSHIFTRT || subcode == ASHIFTRT
2279 || subcode == ROTATE || subcode == ROTATERT
2280 || (subcode == MULT
2281 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2282 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 2283 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
2284 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2285 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 2286 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
2287 ? 0 : 4));
2288
2289 return 8;
2290
2291 case MULT:
b111229a 2292 /* There is no point basing this on the tuning, since it is always the
6354dc9b 2293 fast variant if it exists at all. */
2b835d68
RE
2294 if (arm_fast_multiply && mode == DImode
2295 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2296 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2297 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2298 return 8;
2299
e2c671ba
RE
2300 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2301 || mode == DImode)
2302 return 30;
2303
2304 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2305 {
2b835d68 2306 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
e5951263 2307 & HOST_UINT (0xffffffff));
e2c671ba
RE
2308 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2309 int j;
6354dc9b
NC
2310
2311 /* Tune as appropriate. */
aec3cfba 2312 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2a5307b1 2313
2b835d68 2314 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 2315 {
2b835d68 2316 i >>= booth_unit_size;
e2c671ba
RE
2317 add_cost += 2;
2318 }
2319
2320 return add_cost;
2321 }
2322
aec3cfba 2323 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 2324 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
2325 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2326
56636818
JL
2327 case TRUNCATE:
2328 if (arm_fast_multiply && mode == SImode
2329 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2330 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2331 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2332 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2333 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2334 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2335 return 8;
2336 return 99;
2337
e2c671ba
RE
2338 case NEG:
2339 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2340 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2341 /* Fall through */
2342 case NOT:
2343 if (mode == DImode)
2344 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2345
2346 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2347
2348 case IF_THEN_ELSE:
2349 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2350 return 14;
2351 return 2;
2352
2353 case COMPARE:
2354 return 1;
2355
2356 case ABS:
2357 return 4 + (mode == DImode ? 4 : 0);
2358
2359 case SIGN_EXTEND:
2360 if (GET_MODE (XEXP (x, 0)) == QImode)
2361 return (4 + (mode == DImode ? 4 : 0)
2362 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2363 /* Fall through */
2364 case ZERO_EXTEND:
2365 switch (GET_MODE (XEXP (x, 0)))
2366 {
2367 case QImode:
2368 return (1 + (mode == DImode ? 4 : 0)
2369 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2370
2371 case HImode:
2372 return (4 + (mode == DImode ? 4 : 0)
2373 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2374
2375 case SImode:
2376 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
2377
2378 default:
2379 break;
e2c671ba
RE
2380 }
2381 abort ();
2382
d5b7b3ae
RE
2383 case CONST_INT:
2384 if (const_ok_for_arm (INTVAL (x)))
2385 return outer == SET ? 2 : -1;
2386 else if (outer == AND
2387 && const_ok_for_arm (~ INTVAL (x)))
2388 return -1;
2389 else if ((outer == COMPARE
2390 || outer == PLUS || outer == MINUS)
2391 && const_ok_for_arm (- INTVAL (x)))
2392 return -1;
2393 else
2394 return 5;
2395
2396 case CONST:
2397 case LABEL_REF:
2398 case SYMBOL_REF:
2399 return 6;
2400
2401 case CONST_DOUBLE:
2402 if (const_double_rtx_ok_for_fpu (x))
2403 return outer == SET ? 2 : -1;
2404 else if ((outer == COMPARE || outer == PLUS)
2405 && neg_const_double_rtx_ok_for_fpu (x))
2406 return -1;
2407 return 7;
2408
e2c671ba
RE
2409 default:
2410 return 99;
2411 }
2412}
32de079a
RE
2413
2414int
2415arm_adjust_cost (insn, link, dep, cost)
2416 rtx insn;
2417 rtx link;
2418 rtx dep;
2419 int cost;
2420{
2421 rtx i_pat, d_pat;
2422
6354dc9b 2423 /* XXX This is not strictly true for the FPA. */
d5b7b3ae
RE
2424 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2425 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
b36ba79f
RE
2426 return 0;
2427
d5b7b3ae
RE
2428 /* Call insns don't incur a stall, even if they follow a load. */
2429 if (REG_NOTE_KIND (link) == 0
2430 && GET_CODE (insn) == CALL_INSN)
2431 return 1;
2432
32de079a
RE
2433 if ((i_pat = single_set (insn)) != NULL
2434 && GET_CODE (SET_SRC (i_pat)) == MEM
2435 && (d_pat = single_set (dep)) != NULL
2436 && GET_CODE (SET_DEST (d_pat)) == MEM)
2437 {
2438 /* This is a load after a store, there is no conflict if the load reads
2439 from a cached area. Assume that loads from the stack, and from the
2440 constant pool are cached, and that others will miss. This is a
6354dc9b 2441 hack. */
32de079a 2442
32de079a
RE
2443 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2444 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2445 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2446 || reg_mentioned_p (hard_frame_pointer_rtx,
2447 XEXP (SET_SRC (i_pat), 0)))
949d79eb 2448 return 1;
32de079a
RE
2449 }
2450
2451 return cost;
2452}
2453
6354dc9b 2454/* This code has been fixed for cross compilation. */
ff9940b0
RE
2455
2456static int fpa_consts_inited = 0;
2457
cd2b33d0 2458static const char * strings_fpa[8] =
62b10bbc 2459{
2b835d68
RE
2460 "0", "1", "2", "3",
2461 "4", "5", "0.5", "10"
2462};
ff9940b0
RE
2463
2464static REAL_VALUE_TYPE values_fpa[8];
2465
2466static void
2467init_fpa_table ()
2468{
2469 int i;
2470 REAL_VALUE_TYPE r;
2471
2472 for (i = 0; i < 8; i++)
2473 {
2474 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2475 values_fpa[i] = r;
2476 }
f3bb6135 2477
ff9940b0
RE
2478 fpa_consts_inited = 1;
2479}
2480
6354dc9b 2481/* Return TRUE if rtx X is a valid immediate FPU constant. */
cce8749e
CH
2482
2483int
2484const_double_rtx_ok_for_fpu (x)
2485 rtx x;
2486{
ff9940b0
RE
2487 REAL_VALUE_TYPE r;
2488 int i;
2489
2490 if (!fpa_consts_inited)
2491 init_fpa_table ();
2492
2493 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2494 if (REAL_VALUE_MINUS_ZERO (r))
2495 return 0;
f3bb6135 2496
ff9940b0
RE
2497 for (i = 0; i < 8; i++)
2498 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2499 return 1;
f3bb6135 2500
ff9940b0 2501 return 0;
f3bb6135 2502}
ff9940b0 2503
6354dc9b 2504/* Return TRUE if rtx X is a valid immediate FPU constant. */
ff9940b0
RE
2505
2506int
2507neg_const_double_rtx_ok_for_fpu (x)
2508 rtx x;
2509{
2510 REAL_VALUE_TYPE r;
2511 int i;
2512
2513 if (!fpa_consts_inited)
2514 init_fpa_table ();
2515
2516 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2517 r = REAL_VALUE_NEGATE (r);
2518 if (REAL_VALUE_MINUS_ZERO (r))
2519 return 0;
f3bb6135 2520
ff9940b0
RE
2521 for (i = 0; i < 8; i++)
2522 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2523 return 1;
f3bb6135 2524
ff9940b0 2525 return 0;
f3bb6135 2526}
cce8749e
CH
2527\f
2528/* Predicates for `match_operand' and `match_operator'. */
2529
ff9940b0 2530/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
2531 (SUBREG (MEM)...).
2532
2533 This function exists because at the time it was put in it led to better
2534 code. SUBREG(MEM) always needs a reload in the places where
2535 s_register_operand is used, and this seemed to lead to excessive
2536 reloading. */
ff9940b0
RE
2537
2538int
2539s_register_operand (op, mode)
2540 register rtx op;
2541 enum machine_mode mode;
2542{
2543 if (GET_MODE (op) != mode && mode != VOIDmode)
2544 return 0;
2545
2546 if (GET_CODE (op) == SUBREG)
f3bb6135 2547 op = SUBREG_REG (op);
ff9940b0
RE
2548
2549 /* We don't consider registers whose class is NO_REGS
2550 to be a register operand. */
d5b7b3ae 2551 /* XXX might have to check for lo regs only for thumb ??? */
ff9940b0
RE
2552 return (GET_CODE (op) == REG
2553 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2554 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2555}
2556
e2c671ba
RE
2557/* Only accept reg, subreg(reg), const_int. */
2558
2559int
2560reg_or_int_operand (op, mode)
2561 register rtx op;
2562 enum machine_mode mode;
2563{
2564 if (GET_CODE (op) == CONST_INT)
2565 return 1;
2566
2567 if (GET_MODE (op) != mode && mode != VOIDmode)
2568 return 0;
2569
2570 if (GET_CODE (op) == SUBREG)
2571 op = SUBREG_REG (op);
2572
2573 /* We don't consider registers whose class is NO_REGS
2574 to be a register operand. */
2575 return (GET_CODE (op) == REG
2576 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2577 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2578}
2579
ff9940b0
RE
2580/* Return 1 if OP is an item in memory, given that we are in reload. */
2581
2582int
d5b7b3ae 2583arm_reload_memory_operand (op, mode)
ff9940b0 2584 rtx op;
74bbc178 2585 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0
RE
2586{
2587 int regno = true_regnum (op);
2588
2589 return (! CONSTANT_P (op)
2590 && (regno == -1
2591 || (GET_CODE (op) == REG
2592 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2593}
2594
4d818c85 2595/* Return 1 if OP is a valid memory address, but not valid for a signed byte
d5b7b3ae
RE
2596 memory access (architecture V4).
2597 MODE is QImode if called when computing contraints, or VOIDmode when
2598 emitting patterns. In this latter case we cannot use memory_operand()
2599 because it will fail on badly formed MEMs, which is precisly what we are
2600 trying to catch. */
4d818c85
RE
2601int
2602bad_signed_byte_operand (op, mode)
2603 rtx op;
d5b7b3ae 2604 enum machine_mode mode ATTRIBUTE_UNUSED;
4d818c85 2605{
d5b7b3ae
RE
2606#if 0
2607 if ((mode == QImode && ! memory_operand (op, mode)) || GET_CODE (op) != MEM)
2608 return 0;
2609#endif
2610 if (GET_CODE (op) != MEM)
4d818c85
RE
2611 return 0;
2612
2613 op = XEXP (op, 0);
2614
6354dc9b 2615 /* A sum of anything more complex than reg + reg or reg + const is bad. */
4d818c85 2616 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
9c8cc54f
RE
2617 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2618 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2619 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
4d818c85
RE
2620 return 1;
2621
6354dc9b 2622 /* Big constants are also bad. */
4d818c85
RE
2623 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2624 && (INTVAL (XEXP (op, 1)) > 0xff
2625 || -INTVAL (XEXP (op, 1)) > 0xff))
2626 return 1;
2627
6354dc9b 2628 /* Everything else is good, or can will automatically be made so. */
4d818c85
RE
2629 return 0;
2630}
2631
cce8749e
CH
2632/* Return TRUE for valid operands for the rhs of an ARM instruction. */
2633
2634int
2635arm_rhs_operand (op, mode)
2636 rtx op;
2637 enum machine_mode mode;
2638{
ff9940b0 2639 return (s_register_operand (op, mode)
cce8749e 2640 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 2641}
cce8749e 2642
ff9940b0
RE
2643/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2644 */
2645
2646int
2647arm_rhsm_operand (op, mode)
2648 rtx op;
2649 enum machine_mode mode;
2650{
2651 return (s_register_operand (op, mode)
2652 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2653 || memory_operand (op, mode));
f3bb6135 2654}
ff9940b0
RE
2655
2656/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2657 constant that is valid when negated. */
2658
2659int
2660arm_add_operand (op, mode)
2661 rtx op;
2662 enum machine_mode mode;
2663{
d5b7b3ae
RE
2664 if (TARGET_THUMB)
2665 return thumb_cmp_operand (op, mode);
2666
ff9940b0
RE
2667 return (s_register_operand (op, mode)
2668 || (GET_CODE (op) == CONST_INT
2669 && (const_ok_for_arm (INTVAL (op))
2670 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 2671}
ff9940b0
RE
2672
2673int
2674arm_not_operand (op, mode)
2675 rtx op;
2676 enum machine_mode mode;
2677{
2678 return (s_register_operand (op, mode)
2679 || (GET_CODE (op) == CONST_INT
2680 && (const_ok_for_arm (INTVAL (op))
2681 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 2682}
ff9940b0 2683
5165176d
RE
2684/* Return TRUE if the operand is a memory reference which contains an
2685 offsettable address. */
2686int
2687offsettable_memory_operand (op, mode)
2688 register rtx op;
2689 enum machine_mode mode;
2690{
2691 if (mode == VOIDmode)
2692 mode = GET_MODE (op);
2693
2694 return (mode == GET_MODE (op)
2695 && GET_CODE (op) == MEM
2696 && offsettable_address_p (reload_completed | reload_in_progress,
2697 mode, XEXP (op, 0)));
2698}
2699
2700/* Return TRUE if the operand is a memory reference which is, or can be
2701 made word aligned by adjusting the offset. */
2702int
2703alignable_memory_operand (op, mode)
2704 register rtx op;
2705 enum machine_mode mode;
2706{
2707 rtx reg;
2708
2709 if (mode == VOIDmode)
2710 mode = GET_MODE (op);
2711
2712 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2713 return 0;
2714
2715 op = XEXP (op, 0);
2716
2717 return ((GET_CODE (reg = op) == REG
2718 || (GET_CODE (op) == SUBREG
2719 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2720 || (GET_CODE (op) == PLUS
2721 && GET_CODE (XEXP (op, 1)) == CONST_INT
2722 && (GET_CODE (reg = XEXP (op, 0)) == REG
2723 || (GET_CODE (XEXP (op, 0)) == SUBREG
2724 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
bdb429a5 2725 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
5165176d
RE
2726}
2727
b111229a
RE
2728/* Similar to s_register_operand, but does not allow hard integer
2729 registers. */
2730int
2731f_register_operand (op, mode)
2732 register rtx op;
2733 enum machine_mode mode;
2734{
2735 if (GET_MODE (op) != mode && mode != VOIDmode)
2736 return 0;
2737
2738 if (GET_CODE (op) == SUBREG)
2739 op = SUBREG_REG (op);
2740
2741 /* We don't consider registers whose class is NO_REGS
2742 to be a register operand. */
2743 return (GET_CODE (op) == REG
2744 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2745 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2746}
2747
cce8749e
CH
2748/* Return TRUE for valid operands for the rhs of an FPU instruction. */
2749
2750int
2751fpu_rhs_operand (op, mode)
2752 rtx op;
2753 enum machine_mode mode;
2754{
ff9940b0 2755 if (s_register_operand (op, mode))
f3bb6135 2756 return TRUE;
9ce71c6f
BS
2757
2758 if (GET_MODE (op) != mode && mode != VOIDmode)
2759 return FALSE;
2760
2761 if (GET_CODE (op) == CONST_DOUBLE)
2762 return const_double_rtx_ok_for_fpu (op);
f3bb6135
RE
2763
2764 return FALSE;
2765}
cce8749e 2766
ff9940b0
RE
2767int
2768fpu_add_operand (op, mode)
2769 rtx op;
2770 enum machine_mode mode;
2771{
2772 if (s_register_operand (op, mode))
f3bb6135 2773 return TRUE;
9ce71c6f
BS
2774
2775 if (GET_MODE (op) != mode && mode != VOIDmode)
2776 return FALSE;
2777
2778 if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
2779 return (const_double_rtx_ok_for_fpu (op)
2780 || neg_const_double_rtx_ok_for_fpu (op));
2781
2782 return FALSE;
ff9940b0
RE
2783}
2784
cce8749e
CH
2785/* Return nonzero if OP is a constant power of two. */
2786
2787int
2788power_of_two_operand (op, mode)
2789 rtx op;
74bbc178 2790 enum machine_mode mode ATTRIBUTE_UNUSED;
cce8749e
CH
2791{
2792 if (GET_CODE (op) == CONST_INT)
2793 {
d5b7b3ae 2794 HOST_WIDE_INT value = INTVAL (op);
f3bb6135 2795 return value != 0 && (value & (value - 1)) == 0;
cce8749e 2796 }
f3bb6135
RE
2797 return FALSE;
2798}
cce8749e
CH
2799
2800/* Return TRUE for a valid operand of a DImode operation.
e9c6b69b 2801 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
ff9940b0
RE
2802 Note that this disallows MEM(REG+REG), but allows
2803 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
2804
2805int
2806di_operand (op, mode)
2807 rtx op;
2808 enum machine_mode mode;
2809{
ff9940b0 2810 if (s_register_operand (op, mode))
f3bb6135 2811 return TRUE;
cce8749e 2812
9ce71c6f
BS
2813 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2814 return FALSE;
2815
e9c6b69b
NC
2816 if (GET_CODE (op) == SUBREG)
2817 op = SUBREG_REG (op);
2818
cce8749e
CH
2819 switch (GET_CODE (op))
2820 {
2821 case CONST_DOUBLE:
2822 case CONST_INT:
f3bb6135
RE
2823 return TRUE;
2824
cce8749e 2825 case MEM:
f3bb6135
RE
2826 return memory_address_p (DImode, XEXP (op, 0));
2827
cce8749e 2828 default:
f3bb6135 2829 return FALSE;
cce8749e 2830 }
f3bb6135 2831}
cce8749e 2832
d5b7b3ae
RE
2833/* Like di_operand, but don't accept constants. */
2834int
2835nonimmediate_di_operand (op, mode)
2836 rtx op;
2837 enum machine_mode mode;
2838{
2839 if (s_register_operand (op, mode))
2840 return TRUE;
2841
2842 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2843 return FALSE;
2844
2845 if (GET_CODE (op) == SUBREG)
2846 op = SUBREG_REG (op);
2847
2848 if (GET_CODE (op) == MEM)
2849 return memory_address_p (DImode, XEXP (op, 0));
2850
2851 return FALSE;
2852}
2853
f3139301 2854/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
e9c6b69b 2855 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
f3139301
DE
2856 Note that this disallows MEM(REG+REG), but allows
2857 MEM(PRE/POST_INC/DEC(REG)). */
2858
2859int
2860soft_df_operand (op, mode)
2861 rtx op;
2862 enum machine_mode mode;
2863{
2864 if (s_register_operand (op, mode))
2865 return TRUE;
2866
9ce71c6f
BS
2867 if (mode != VOIDmode && GET_MODE (op) != mode)
2868 return FALSE;
2869
37b80d2e
BS
2870 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2871 return FALSE;
2872
e9c6b69b
NC
2873 if (GET_CODE (op) == SUBREG)
2874 op = SUBREG_REG (op);
9ce71c6f 2875
f3139301
DE
2876 switch (GET_CODE (op))
2877 {
2878 case CONST_DOUBLE:
2879 return TRUE;
2880
2881 case MEM:
2882 return memory_address_p (DFmode, XEXP (op, 0));
2883
2884 default:
2885 return FALSE;
2886 }
2887}
2888
d5b7b3ae
RE
2889/* Like soft_df_operand, but don't accept constants. */
2890int
2891nonimmediate_soft_df_operand (op, mode)
2892 rtx op;
2893 enum machine_mode mode;
2894{
2895 if (s_register_operand (op, mode))
2896 return TRUE;
2897
2898 if (mode != VOIDmode && GET_MODE (op) != mode)
2899 return FALSE;
2900
2901 if (GET_CODE (op) == SUBREG)
2902 op = SUBREG_REG (op);
2903
2904 if (GET_CODE (op) == MEM)
2905 return memory_address_p (DFmode, XEXP (op, 0));
2906 return FALSE;
2907}
cce8749e 2908
d5b7b3ae 2909/* Return TRUE for valid index operands. */
cce8749e
CH
2910int
2911index_operand (op, mode)
2912 rtx op;
2913 enum machine_mode mode;
2914{
d5b7b3ae 2915 return (s_register_operand (op, mode)
ff9940b0 2916 || (immediate_operand (op, mode)
d5b7b3ae
RE
2917 && (GET_CODE (op) != CONST_INT
2918 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
f3bb6135 2919}
cce8749e 2920
ff9940b0
RE
2921/* Return TRUE for valid shifts by a constant. This also accepts any
2922 power of two on the (somewhat overly relaxed) assumption that the
6354dc9b 2923 shift operator in this case was a mult. */
ff9940b0
RE
2924
2925int
2926const_shift_operand (op, mode)
2927 rtx op;
2928 enum machine_mode mode;
2929{
2930 return (power_of_two_operand (op, mode)
2931 || (immediate_operand (op, mode)
d5b7b3ae
RE
2932 && (GET_CODE (op) != CONST_INT
2933 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
f3bb6135 2934}
ff9940b0 2935
cce8749e
CH
2936/* Return TRUE for arithmetic operators which can be combined with a multiply
2937 (shift). */
2938
2939int
2940shiftable_operator (x, mode)
2941 rtx x;
2942 enum machine_mode mode;
2943{
2944 if (GET_MODE (x) != mode)
2945 return FALSE;
2946 else
2947 {
2948 enum rtx_code code = GET_CODE (x);
2949
2950 return (code == PLUS || code == MINUS
2951 || code == IOR || code == XOR || code == AND);
2952 }
f3bb6135 2953}
cce8749e 2954
6ab589e0
JL
2955/* Return TRUE for binary logical operators. */
2956
2957int
2958logical_binary_operator (x, mode)
2959 rtx x;
2960 enum machine_mode mode;
2961{
2962 if (GET_MODE (x) != mode)
2963 return FALSE;
2964 else
2965 {
2966 enum rtx_code code = GET_CODE (x);
2967
2968 return (code == IOR || code == XOR || code == AND);
2969 }
2970}
2971
6354dc9b 2972/* Return TRUE for shift operators. */
cce8749e
CH
2973
2974int
2975shift_operator (x, mode)
2976 rtx x;
2977 enum machine_mode mode;
2978{
2979 if (GET_MODE (x) != mode)
2980 return FALSE;
2981 else
2982 {
2983 enum rtx_code code = GET_CODE (x);
2984
ff9940b0 2985 if (code == MULT)
aec3cfba 2986 return power_of_two_operand (XEXP (x, 1), mode);
f3bb6135 2987
e2c671ba
RE
2988 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2989 || code == ROTATERT);
cce8749e 2990 }
f3bb6135 2991}
ff9940b0 2992
6354dc9b
NC
2993/* Return TRUE if x is EQ or NE. */
2994int
2995equality_operator (x, mode)
f3bb6135 2996 rtx x;
74bbc178 2997 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 2998{
f3bb6135 2999 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
3000}
3001
6354dc9b 3002/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
ff9940b0
RE
3003int
3004minmax_operator (x, mode)
3005 rtx x;
3006 enum machine_mode mode;
3007{
3008 enum rtx_code code = GET_CODE (x);
3009
3010 if (GET_MODE (x) != mode)
3011 return FALSE;
f3bb6135 3012
ff9940b0 3013 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 3014}
ff9940b0 3015
ff9940b0 3016/* Return TRUE if this is the condition code register, if we aren't given
6354dc9b 3017 a mode, accept any class CCmode register. */
ff9940b0
RE
3018int
3019cc_register (x, mode)
f3bb6135
RE
3020 rtx x;
3021 enum machine_mode mode;
ff9940b0
RE
3022{
3023 if (mode == VOIDmode)
3024 {
3025 mode = GET_MODE (x);
d5b7b3ae 3026
ff9940b0
RE
3027 if (GET_MODE_CLASS (mode) != MODE_CC)
3028 return FALSE;
3029 }
f3bb6135 3030
d5b7b3ae
RE
3031 if ( GET_MODE (x) == mode
3032 && GET_CODE (x) == REG
3033 && REGNO (x) == CC_REGNUM)
ff9940b0 3034 return TRUE;
f3bb6135 3035
ff9940b0
RE
3036 return FALSE;
3037}
5bbe2d40
RE
3038
3039/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
3040 a mode, accept any class CCmode register which indicates a dominance
3041 expression. */
5bbe2d40 3042int
84ed5e79 3043dominant_cc_register (x, mode)
5bbe2d40
RE
3044 rtx x;
3045 enum machine_mode mode;
3046{
3047 if (mode == VOIDmode)
3048 {
3049 mode = GET_MODE (x);
d5b7b3ae 3050
84ed5e79 3051 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
3052 return FALSE;
3053 }
3054
d5b7b3ae 3055 if ( mode != CC_DNEmode && mode != CC_DEQmode
84ed5e79
RE
3056 && mode != CC_DLEmode && mode != CC_DLTmode
3057 && mode != CC_DGEmode && mode != CC_DGTmode
3058 && mode != CC_DLEUmode && mode != CC_DLTUmode
3059 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3060 return FALSE;
3061
d5b7b3ae 3062 return cc_register (x, mode);
5bbe2d40
RE
3063}
3064
2b835d68
RE
3065/* Return TRUE if X references a SYMBOL_REF. */
3066int
3067symbol_mentioned_p (x)
3068 rtx x;
3069{
6f7d635c 3070 register const char * fmt;
2b835d68
RE
3071 register int i;
3072
3073 if (GET_CODE (x) == SYMBOL_REF)
3074 return 1;
3075
3076 fmt = GET_RTX_FORMAT (GET_CODE (x));
d5b7b3ae 3077
2b835d68
RE
3078 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3079 {
3080 if (fmt[i] == 'E')
3081 {
3082 register int j;
3083
3084 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3085 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3086 return 1;
3087 }
3088 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3089 return 1;
3090 }
3091
3092 return 0;
3093}
3094
3095/* Return TRUE if X references a LABEL_REF. */
3096int
3097label_mentioned_p (x)
3098 rtx x;
3099{
6f7d635c 3100 register const char * fmt;
2b835d68
RE
3101 register int i;
3102
3103 if (GET_CODE (x) == LABEL_REF)
3104 return 1;
3105
3106 fmt = GET_RTX_FORMAT (GET_CODE (x));
3107 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3108 {
3109 if (fmt[i] == 'E')
3110 {
3111 register int j;
3112
3113 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3114 if (label_mentioned_p (XVECEXP (x, i, j)))
3115 return 1;
3116 }
3117 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3118 return 1;
3119 }
3120
3121 return 0;
3122}
3123
ff9940b0
RE
3124enum rtx_code
3125minmax_code (x)
f3bb6135 3126 rtx x;
ff9940b0
RE
3127{
3128 enum rtx_code code = GET_CODE (x);
3129
3130 if (code == SMAX)
3131 return GE;
f3bb6135 3132 else if (code == SMIN)
ff9940b0 3133 return LE;
f3bb6135 3134 else if (code == UMIN)
ff9940b0 3135 return LEU;
f3bb6135 3136 else if (code == UMAX)
ff9940b0 3137 return GEU;
f3bb6135 3138
ff9940b0
RE
3139 abort ();
3140}
3141
6354dc9b 3142/* Return 1 if memory locations are adjacent. */
f3bb6135 3143int
ff9940b0
RE
3144adjacent_mem_locations (a, b)
3145 rtx a, b;
3146{
3147 int val0 = 0, val1 = 0;
3148 int reg0, reg1;
3149
3150 if ((GET_CODE (XEXP (a, 0)) == REG
3151 || (GET_CODE (XEXP (a, 0)) == PLUS
3152 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3153 && (GET_CODE (XEXP (b, 0)) == REG
3154 || (GET_CODE (XEXP (b, 0)) == PLUS
3155 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3156 {
3157 if (GET_CODE (XEXP (a, 0)) == PLUS)
3158 {
3159 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3160 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3161 }
3162 else
3163 reg0 = REGNO (XEXP (a, 0));
3164 if (GET_CODE (XEXP (b, 0)) == PLUS)
3165 {
3166 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3167 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3168 }
3169 else
3170 reg1 = REGNO (XEXP (b, 0));
3171 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3172 }
3173 return 0;
3174}
3175
3176/* Return 1 if OP is a load multiple operation. It is known to be
6354dc9b 3177 parallel and the first section will be tested. */
f3bb6135 3178int
ff9940b0
RE
3179load_multiple_operation (op, mode)
3180 rtx op;
74bbc178 3181 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3182{
f3bb6135 3183 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3184 int dest_regno;
3185 rtx src_addr;
f3bb6135 3186 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3187 rtx elt;
3188
3189 if (count <= 1
3190 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3191 return 0;
3192
6354dc9b 3193 /* Check to see if this might be a write-back. */
ff9940b0
RE
3194 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3195 {
3196 i++;
3197 base = 1;
3198
6354dc9b 3199 /* Now check it more carefully. */
ff9940b0
RE
3200 if (GET_CODE (SET_DEST (elt)) != REG
3201 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3202 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3203 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3204 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3205 return 0;
ff9940b0
RE
3206 }
3207
3208 /* Perform a quick check so we don't blow up below. */
3209 if (count <= i
3210 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3211 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3212 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3213 return 0;
3214
3215 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3216 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3217
3218 for (; i < count; i++)
3219 {
ed4c4348 3220 elt = XVECEXP (op, 0, i);
ff9940b0
RE
3221
3222 if (GET_CODE (elt) != SET
3223 || GET_CODE (SET_DEST (elt)) != REG
3224 || GET_MODE (SET_DEST (elt)) != SImode
6354dc9b 3225 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
ff9940b0
RE
3226 || GET_CODE (SET_SRC (elt)) != MEM
3227 || GET_MODE (SET_SRC (elt)) != SImode
3228 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3229 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3230 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3231 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3232 return 0;
3233 }
3234
3235 return 1;
3236}
3237
3238/* Return 1 if OP is a store multiple operation. It is known to be
6354dc9b 3239 parallel and the first section will be tested. */
f3bb6135 3240int
ff9940b0
RE
3241store_multiple_operation (op, mode)
3242 rtx op;
74bbc178 3243 enum machine_mode mode ATTRIBUTE_UNUSED;
ff9940b0 3244{
f3bb6135 3245 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
3246 int src_regno;
3247 rtx dest_addr;
f3bb6135 3248 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
3249 rtx elt;
3250
3251 if (count <= 1
3252 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3253 return 0;
3254
6354dc9b 3255 /* Check to see if this might be a write-back. */
ff9940b0
RE
3256 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3257 {
3258 i++;
3259 base = 1;
3260
6354dc9b 3261 /* Now check it more carefully. */
ff9940b0
RE
3262 if (GET_CODE (SET_DEST (elt)) != REG
3263 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3264 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3265 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
41e3f998 3266 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
ff9940b0 3267 return 0;
ff9940b0
RE
3268 }
3269
3270 /* Perform a quick check so we don't blow up below. */
3271 if (count <= i
3272 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3273 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3274 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3275 return 0;
3276
3277 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3278 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3279
3280 for (; i < count; i++)
3281 {
3282 elt = XVECEXP (op, 0, i);
3283
3284 if (GET_CODE (elt) != SET
3285 || GET_CODE (SET_SRC (elt)) != REG
3286 || GET_MODE (SET_SRC (elt)) != SImode
6354dc9b 3287 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
ff9940b0
RE
3288 || GET_CODE (SET_DEST (elt)) != MEM
3289 || GET_MODE (SET_DEST (elt)) != SImode
3290 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3291 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3292 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3293 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3294 return 0;
3295 }
3296
3297 return 1;
3298}
e2c671ba 3299
84ed5e79
RE
3300int
3301load_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3302 rtx * operands;
84ed5e79 3303 int nops;
62b10bbc
NC
3304 int * regs;
3305 int * base;
3306 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3307{
3308 int unsorted_regs[4];
3309 HOST_WIDE_INT unsorted_offsets[4];
3310 int order[4];
ad076f4e 3311 int base_reg = -1;
84ed5e79
RE
3312 int i;
3313
3314 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3315 extended if required. */
3316 if (nops < 2 || nops > 4)
3317 abort ();
3318
3319 /* Loop over the operands and check that the memory references are
3320 suitable (ie immediate offsets from the same base register). At
3321 the same time, extract the target register, and the memory
3322 offsets. */
3323 for (i = 0; i < nops; i++)
3324 {
3325 rtx reg;
3326 rtx offset;
3327
56636818
JL
3328 /* Convert a subreg of a mem into the mem itself. */
3329 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3330 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3331
84ed5e79
RE
3332 if (GET_CODE (operands[nops + i]) != MEM)
3333 abort ();
3334
3335 /* Don't reorder volatile memory references; it doesn't seem worth
3336 looking for the case where the order is ok anyway. */
3337 if (MEM_VOLATILE_P (operands[nops + i]))
3338 return 0;
3339
3340 offset = const0_rtx;
3341
3342 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3343 || (GET_CODE (reg) == SUBREG
3344 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3345 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3346 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3347 == REG)
3348 || (GET_CODE (reg) == SUBREG
3349 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3350 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3351 == CONST_INT)))
3352 {
3353 if (i == 0)
3354 {
d5b7b3ae 3355 base_reg = REGNO (reg);
84ed5e79
RE
3356 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3357 ? REGNO (operands[i])
3358 : REGNO (SUBREG_REG (operands[i])));
3359 order[0] = 0;
3360 }
3361 else
3362 {
6354dc9b 3363 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3364 /* Not addressed from the same base register. */
3365 return 0;
3366
3367 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3368 ? REGNO (operands[i])
3369 : REGNO (SUBREG_REG (operands[i])));
3370 if (unsorted_regs[i] < unsorted_regs[order[0]])
3371 order[0] = i;
3372 }
3373
3374 /* If it isn't an integer register, or if it overwrites the
3375 base register but isn't the last insn in the list, then
3376 we can't do this. */
3377 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3378 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3379 return 0;
3380
3381 unsorted_offsets[i] = INTVAL (offset);
3382 }
3383 else
3384 /* Not a suitable memory address. */
3385 return 0;
3386 }
3387
3388 /* All the useful information has now been extracted from the
3389 operands into unsorted_regs and unsorted_offsets; additionally,
3390 order[0] has been set to the lowest numbered register in the
3391 list. Sort the registers into order, and check that the memory
3392 offsets are ascending and adjacent. */
3393
3394 for (i = 1; i < nops; i++)
3395 {
3396 int j;
3397
3398 order[i] = order[i - 1];
3399 for (j = 0; j < nops; j++)
3400 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3401 && (order[i] == order[i - 1]
3402 || unsorted_regs[j] < unsorted_regs[order[i]]))
3403 order[i] = j;
3404
3405 /* Have we found a suitable register? if not, one must be used more
3406 than once. */
3407 if (order[i] == order[i - 1])
3408 return 0;
3409
3410 /* Is the memory address adjacent and ascending? */
3411 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3412 return 0;
3413 }
3414
3415 if (base)
3416 {
3417 *base = base_reg;
3418
3419 for (i = 0; i < nops; i++)
3420 regs[i] = unsorted_regs[order[i]];
3421
3422 *load_offset = unsorted_offsets[order[0]];
3423 }
3424
3425 if (unsorted_offsets[order[0]] == 0)
3426 return 1; /* ldmia */
3427
3428 if (unsorted_offsets[order[0]] == 4)
3429 return 2; /* ldmib */
3430
3431 if (unsorted_offsets[order[nops - 1]] == 0)
3432 return 3; /* ldmda */
3433
3434 if (unsorted_offsets[order[nops - 1]] == -4)
3435 return 4; /* ldmdb */
3436
949d79eb
RE
3437 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3438 if the offset isn't small enough. The reason 2 ldrs are faster
3439 is because these ARMs are able to do more than one cache access
3440 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3441 whilst the ARM8 has a double bandwidth cache. This means that
3442 these cores can do both an instruction fetch and a data fetch in
3443 a single cycle, so the trick of calculating the address into a
3444 scratch register (one of the result regs) and then doing a load
3445 multiple actually becomes slower (and no smaller in code size).
3446 That is the transformation
6cc8c0b3
NC
3447
3448 ldr rd1, [rbase + offset]
3449 ldr rd2, [rbase + offset + 4]
3450
3451 to
3452
3453 add rd1, rbase, offset
3454 ldmia rd1, {rd1, rd2}
3455
949d79eb
RE
3456 produces worse code -- '3 cycles + any stalls on rd2' instead of
3457 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3458 access per cycle, the first sequence could never complete in less
3459 than 6 cycles, whereas the ldm sequence would only take 5 and
3460 would make better use of sequential accesses if not hitting the
3461 cache.
3462
3463 We cheat here and test 'arm_ld_sched' which we currently know to
3464 only be true for the ARM8, ARM9 and StrongARM. If this ever
3465 changes, then the test below needs to be reworked. */
f5a1b0d2 3466 if (nops == 2 && arm_ld_sched)
b36ba79f
RE
3467 return 0;
3468
84ed5e79
RE
3469 /* Can't do it without setting up the offset, only do this if it takes
3470 no more than one insn. */
3471 return (const_ok_for_arm (unsorted_offsets[order[0]])
3472 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3473}
3474
cd2b33d0 3475const char *
84ed5e79 3476emit_ldm_seq (operands, nops)
62b10bbc 3477 rtx * operands;
84ed5e79
RE
3478 int nops;
3479{
3480 int regs[4];
3481 int base_reg;
3482 HOST_WIDE_INT offset;
3483 char buf[100];
3484 int i;
3485
3486 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3487 {
3488 case 1:
3489 strcpy (buf, "ldm%?ia\t");
3490 break;
3491
3492 case 2:
3493 strcpy (buf, "ldm%?ib\t");
3494 break;
3495
3496 case 3:
3497 strcpy (buf, "ldm%?da\t");
3498 break;
3499
3500 case 4:
3501 strcpy (buf, "ldm%?db\t");
3502 break;
3503
3504 case 5:
3505 if (offset >= 0)
3506 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3507 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3508 (long) offset);
3509 else
3510 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3511 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3512 (long) -offset);
3513 output_asm_insn (buf, operands);
3514 base_reg = regs[0];
3515 strcpy (buf, "ldm%?ia\t");
3516 break;
3517
3518 default:
3519 abort ();
3520 }
3521
3522 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3523 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3524
3525 for (i = 1; i < nops; i++)
3526 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3527 reg_names[regs[i]]);
3528
3529 strcat (buf, "}\t%@ phole ldm");
3530
3531 output_asm_insn (buf, operands);
3532 return "";
3533}
3534
3535int
3536store_multiple_sequence (operands, nops, regs, base, load_offset)
62b10bbc 3537 rtx * operands;
84ed5e79 3538 int nops;
62b10bbc
NC
3539 int * regs;
3540 int * base;
3541 HOST_WIDE_INT * load_offset;
84ed5e79
RE
3542{
3543 int unsorted_regs[4];
3544 HOST_WIDE_INT unsorted_offsets[4];
3545 int order[4];
ad076f4e 3546 int base_reg = -1;
84ed5e79
RE
3547 int i;
3548
3549 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3550 extended if required. */
3551 if (nops < 2 || nops > 4)
3552 abort ();
3553
3554 /* Loop over the operands and check that the memory references are
3555 suitable (ie immediate offsets from the same base register). At
3556 the same time, extract the target register, and the memory
3557 offsets. */
3558 for (i = 0; i < nops; i++)
3559 {
3560 rtx reg;
3561 rtx offset;
3562
56636818
JL
3563 /* Convert a subreg of a mem into the mem itself. */
3564 if (GET_CODE (operands[nops + i]) == SUBREG)
d5b7b3ae 3565 operands[nops + i] = alter_subreg (operands[nops + i]);
56636818 3566
84ed5e79
RE
3567 if (GET_CODE (operands[nops + i]) != MEM)
3568 abort ();
3569
3570 /* Don't reorder volatile memory references; it doesn't seem worth
3571 looking for the case where the order is ok anyway. */
3572 if (MEM_VOLATILE_P (operands[nops + i]))
3573 return 0;
3574
3575 offset = const0_rtx;
3576
3577 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3578 || (GET_CODE (reg) == SUBREG
3579 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3580 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3581 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3582 == REG)
3583 || (GET_CODE (reg) == SUBREG
3584 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3585 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3586 == CONST_INT)))
3587 {
3588 if (i == 0)
3589 {
62b10bbc 3590 base_reg = REGNO (reg);
84ed5e79
RE
3591 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3592 ? REGNO (operands[i])
3593 : REGNO (SUBREG_REG (operands[i])));
3594 order[0] = 0;
3595 }
3596 else
3597 {
6354dc9b 3598 if (base_reg != (int) REGNO (reg))
84ed5e79
RE
3599 /* Not addressed from the same base register. */
3600 return 0;
3601
3602 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3603 ? REGNO (operands[i])
3604 : REGNO (SUBREG_REG (operands[i])));
3605 if (unsorted_regs[i] < unsorted_regs[order[0]])
3606 order[0] = i;
3607 }
3608
3609 /* If it isn't an integer register, then we can't do this. */
3610 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3611 return 0;
3612
3613 unsorted_offsets[i] = INTVAL (offset);
3614 }
3615 else
3616 /* Not a suitable memory address. */
3617 return 0;
3618 }
3619
3620 /* All the useful information has now been extracted from the
3621 operands into unsorted_regs and unsorted_offsets; additionally,
3622 order[0] has been set to the lowest numbered register in the
3623 list. Sort the registers into order, and check that the memory
3624 offsets are ascending and adjacent. */
3625
3626 for (i = 1; i < nops; i++)
3627 {
3628 int j;
3629
3630 order[i] = order[i - 1];
3631 for (j = 0; j < nops; j++)
3632 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3633 && (order[i] == order[i - 1]
3634 || unsorted_regs[j] < unsorted_regs[order[i]]))
3635 order[i] = j;
3636
3637 /* Have we found a suitable register? if not, one must be used more
3638 than once. */
3639 if (order[i] == order[i - 1])
3640 return 0;
3641
3642 /* Is the memory address adjacent and ascending? */
3643 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3644 return 0;
3645 }
3646
3647 if (base)
3648 {
3649 *base = base_reg;
3650
3651 for (i = 0; i < nops; i++)
3652 regs[i] = unsorted_regs[order[i]];
3653
3654 *load_offset = unsorted_offsets[order[0]];
3655 }
3656
3657 if (unsorted_offsets[order[0]] == 0)
3658 return 1; /* stmia */
3659
3660 if (unsorted_offsets[order[0]] == 4)
3661 return 2; /* stmib */
3662
3663 if (unsorted_offsets[order[nops - 1]] == 0)
3664 return 3; /* stmda */
3665
3666 if (unsorted_offsets[order[nops - 1]] == -4)
3667 return 4; /* stmdb */
3668
3669 return 0;
3670}
3671
cd2b33d0 3672const char *
84ed5e79 3673emit_stm_seq (operands, nops)
62b10bbc 3674 rtx * operands;
84ed5e79
RE
3675 int nops;
3676{
3677 int regs[4];
3678 int base_reg;
3679 HOST_WIDE_INT offset;
3680 char buf[100];
3681 int i;
3682
3683 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3684 {
3685 case 1:
3686 strcpy (buf, "stm%?ia\t");
3687 break;
3688
3689 case 2:
3690 strcpy (buf, "stm%?ib\t");
3691 break;
3692
3693 case 3:
3694 strcpy (buf, "stm%?da\t");
3695 break;
3696
3697 case 4:
3698 strcpy (buf, "stm%?db\t");
3699 break;
3700
3701 default:
3702 abort ();
3703 }
3704
3705 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3706 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3707
3708 for (i = 1; i < nops; i++)
3709 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3710 reg_names[regs[i]]);
3711
3712 strcat (buf, "}\t%@ phole stm");
3713
3714 output_asm_insn (buf, operands);
3715 return "";
3716}
3717
e2c671ba
RE
3718int
3719multi_register_push (op, mode)
0a81f500 3720 rtx op;
74bbc178 3721 enum machine_mode mode ATTRIBUTE_UNUSED;
e2c671ba
RE
3722{
3723 if (GET_CODE (op) != PARALLEL
3724 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3725 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3726 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3727 return 0;
3728
3729 return 1;
3730}
ff9940b0 3731\f
d7d01975 3732/* Routines for use with attributes. */
f3bb6135 3733
31fdb4d5 3734/* Return nonzero if ATTR is a valid attribute for DECL.
d7d01975
NC
3735 ATTRIBUTES are any existing attributes and ARGS are
3736 the arguments supplied with ATTR.
31fdb4d5
DE
3737
3738 Supported attributes:
3739
d5b7b3ae
RE
3740 naked:
3741 don't output any prologue or epilogue code, the user is assumed
3742 to do the right thing.
3743
3744 interfacearm:
3745 Always assume that this function will be entered in ARM mode,
3746 not Thumb mode, and that the caller wishes to be returned to in
3747 ARM mode. */
31fdb4d5 3748int
74bbc178 3749arm_valid_machine_decl_attribute (decl, attr, args)
31fdb4d5 3750 tree decl;
31fdb4d5
DE
3751 tree attr;
3752 tree args;
3753{
3754 if (args != NULL_TREE)
3755 return 0;
3756
3757 if (is_attribute_p ("naked", attr))
3758 return TREE_CODE (decl) == FUNCTION_DECL;
d5b7b3ae
RE
3759
3760#ifdef ARM_PE
3761 if (is_attribute_p ("interfacearm", attr))
3762 return TREE_CODE (decl) == FUNCTION_DECL;
3763#endif /* ARM_PE */
3764
31fdb4d5
DE
3765 return 0;
3766}
3767
3768/* Return non-zero if FUNC is a naked function. */
31fdb4d5
DE
3769static int
3770arm_naked_function_p (func)
3771 tree func;
3772{
3773 tree a;
3774
3775 if (TREE_CODE (func) != FUNCTION_DECL)
3776 abort ();
2e943e99 3777
31fdb4d5
DE
3778 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3779 return a != NULL_TREE;
3780}
f3bb6135 3781\f
6354dc9b 3782/* Routines for use in generating RTL. */
f3bb6135 3783rtx
56636818 3784arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
c6df88cb 3785 in_struct_p, scalar_p)
ff9940b0
RE
3786 int base_regno;
3787 int count;
3788 rtx from;
3789 int up;
3790 int write_back;
56636818
JL
3791 int unchanging_p;
3792 int in_struct_p;
c6df88cb 3793 int scalar_p;
ff9940b0
RE
3794{
3795 int i = 0, j;
3796 rtx result;
3797 int sign = up ? 1 : -1;
56636818 3798 rtx mem;
ff9940b0 3799
43cffd11 3800 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3801 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3802 if (write_back)
f3bb6135 3803 {
ff9940b0 3804 XVECEXP (result, 0, 0)
43cffd11
RE
3805 = gen_rtx_SET (GET_MODE (from), from,
3806 plus_constant (from, count * 4 * sign));
ff9940b0
RE
3807 i = 1;
3808 count++;
f3bb6135
RE
3809 }
3810
ff9940b0 3811 for (j = 0; i < count; i++, j++)
f3bb6135 3812 {
43cffd11 3813 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
56636818
JL
3814 RTX_UNCHANGING_P (mem) = unchanging_p;
3815 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3816 MEM_SCALAR_P (mem) = scalar_p;
43cffd11
RE
3817 XVECEXP (result, 0, i)
3818 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
f3bb6135
RE
3819 }
3820
ff9940b0
RE
3821 return result;
3822}
3823
f3bb6135 3824rtx
56636818 3825arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
c6df88cb 3826 in_struct_p, scalar_p)
ff9940b0
RE
3827 int base_regno;
3828 int count;
3829 rtx to;
3830 int up;
3831 int write_back;
56636818
JL
3832 int unchanging_p;
3833 int in_struct_p;
c6df88cb 3834 int scalar_p;
ff9940b0
RE
3835{
3836 int i = 0, j;
3837 rtx result;
3838 int sign = up ? 1 : -1;
56636818 3839 rtx mem;
ff9940b0 3840
43cffd11 3841 result = gen_rtx_PARALLEL (VOIDmode,
41e3f998 3842 rtvec_alloc (count + (write_back ? 1 : 0)));
ff9940b0 3843 if (write_back)
f3bb6135 3844 {
ff9940b0 3845 XVECEXP (result, 0, 0)
43cffd11
RE
3846 = gen_rtx_SET (GET_MODE (to), to,
3847 plus_constant (to, count * 4 * sign));
ff9940b0
RE
3848 i = 1;
3849 count++;
f3bb6135
RE
3850 }
3851
ff9940b0 3852 for (j = 0; i < count; i++, j++)
f3bb6135 3853 {
43cffd11 3854 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
56636818
JL
3855 RTX_UNCHANGING_P (mem) = unchanging_p;
3856 MEM_IN_STRUCT_P (mem) = in_struct_p;
c6df88cb 3857 MEM_SCALAR_P (mem) = scalar_p;
56636818 3858
43cffd11
RE
3859 XVECEXP (result, 0, i)
3860 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
f3bb6135
RE
3861 }
3862
ff9940b0
RE
3863 return result;
3864}
3865
880e2516
RE
3866int
3867arm_gen_movstrqi (operands)
62b10bbc 3868 rtx * operands;
880e2516
RE
3869{
3870 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 3871 int i;
880e2516 3872 rtx src, dst;
ad076f4e 3873 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 3874 rtx part_bytes_reg = NULL;
56636818
JL
3875 rtx mem;
3876 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
c6df88cb 3877 int dst_scalar_p, src_scalar_p;
880e2516
RE
3878
3879 if (GET_CODE (operands[2]) != CONST_INT
3880 || GET_CODE (operands[3]) != CONST_INT
3881 || INTVAL (operands[2]) > 64
3882 || INTVAL (operands[3]) & 3)
3883 return 0;
3884
3885 st_dst = XEXP (operands[0], 0);
3886 st_src = XEXP (operands[1], 0);
56636818
JL
3887
3888 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3889 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
c6df88cb 3890 dst_scalar_p = MEM_SCALAR_P (operands[0]);
56636818
JL
3891 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3892 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
c6df88cb 3893 src_scalar_p = MEM_SCALAR_P (operands[1]);
56636818 3894
880e2516
RE
3895 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3896 fin_src = src = copy_to_mode_reg (SImode, st_src);
3897
d5b7b3ae 3898 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
880e2516
RE
3899 out_words_to_go = INTVAL (operands[2]) / 4;
3900 last_bytes = INTVAL (operands[2]) & 3;
3901
3902 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
43cffd11 3903 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
880e2516
RE
3904
3905 for (i = 0; in_words_to_go >= 2; i+=4)
3906 {
bd9c7e23 3907 if (in_words_to_go > 4)
56636818 3908 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
c6df88cb
MM
3909 src_unchanging_p,
3910 src_in_struct_p,
3911 src_scalar_p));
bd9c7e23
RE
3912 else
3913 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818 3914 FALSE, src_unchanging_p,
c6df88cb 3915 src_in_struct_p, src_scalar_p));
bd9c7e23 3916
880e2516
RE
3917 if (out_words_to_go)
3918 {
bd9c7e23 3919 if (out_words_to_go > 4)
56636818
JL
3920 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3921 dst_unchanging_p,
c6df88cb
MM
3922 dst_in_struct_p,
3923 dst_scalar_p));
bd9c7e23
RE
3924 else if (out_words_to_go != 1)
3925 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3926 dst, TRUE,
3927 (last_bytes == 0
56636818
JL
3928 ? FALSE : TRUE),
3929 dst_unchanging_p,
c6df88cb
MM
3930 dst_in_struct_p,
3931 dst_scalar_p));
880e2516
RE
3932 else
3933 {
43cffd11 3934 mem = gen_rtx_MEM (SImode, dst);
56636818
JL
3935 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3936 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 3937 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 3938 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
bd9c7e23
RE
3939 if (last_bytes != 0)
3940 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
3941 }
3942 }
3943
3944 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3945 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3946 }
3947
3948 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3949 if (out_words_to_go)
62b10bbc
NC
3950 {
3951 rtx sreg;
3952
3953 mem = gen_rtx_MEM (SImode, src);
3954 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3955 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3956 MEM_SCALAR_P (mem) = src_scalar_p;
3957 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
3958 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
3959
3960 mem = gen_rtx_MEM (SImode, dst);
3961 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3962 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3963 MEM_SCALAR_P (mem) = dst_scalar_p;
3964 emit_move_insn (mem, sreg);
3965 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3966 in_words_to_go--;
3967
3968 if (in_words_to_go) /* Sanity check */
3969 abort ();
3970 }
880e2516
RE
3971
3972 if (in_words_to_go)
3973 {
3974 if (in_words_to_go < 0)
3975 abort ();
3976
43cffd11 3977 mem = gen_rtx_MEM (SImode, src);
56636818
JL
3978 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3979 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
c6df88cb 3980 MEM_SCALAR_P (mem) = src_scalar_p;
56636818 3981 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3982 }
3983
d5b7b3ae
RE
3984 if (last_bytes && part_bytes_reg == NULL)
3985 abort ();
3986
880e2516
RE
3987 if (BYTES_BIG_ENDIAN && last_bytes)
3988 {
3989 rtx tmp = gen_reg_rtx (SImode);
3990
6354dc9b 3991 /* The bytes we want are in the top end of the word. */
bee06f3d
RE
3992 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3993 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3994 part_bytes_reg = tmp;
3995
3996 while (last_bytes)
3997 {
43cffd11 3998 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
56636818
JL
3999 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4000 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4001 MEM_SCALAR_P (mem) = dst_scalar_p;
43cffd11 4002 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
62b10bbc 4003
880e2516
RE
4004 if (--last_bytes)
4005 {
4006 tmp = gen_reg_rtx (SImode);
4007 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4008 part_bytes_reg = tmp;
4009 }
4010 }
4011
4012 }
4013 else
4014 {
d5b7b3ae 4015 if (last_bytes > 1)
880e2516 4016 {
d5b7b3ae 4017 mem = gen_rtx_MEM (HImode, dst);
56636818
JL
4018 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4019 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
c6df88cb 4020 MEM_SCALAR_P (mem) = dst_scalar_p;
d5b7b3ae
RE
4021 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4022 last_bytes -= 2;
4023 if (last_bytes)
880e2516
RE
4024 {
4025 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23 4026
d5b7b3ae
RE
4027 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4028 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
880e2516
RE
4029 part_bytes_reg = tmp;
4030 }
4031 }
d5b7b3ae
RE
4032
4033 if (last_bytes)
4034 {
4035 mem = gen_rtx_MEM (QImode, dst);
4036 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4037 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4038 MEM_SCALAR_P (mem) = dst_scalar_p;
4039 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4040 }
880e2516
RE
4041 }
4042
4043 return 1;
4044}
4045
5165176d
RE
4046/* Generate a memory reference for a half word, such that it will be loaded
4047 into the top 16 bits of the word. We can assume that the address is
4048 known to be alignable and of the form reg, or plus (reg, const). */
4049rtx
d5b7b3ae 4050arm_gen_rotated_half_load (memref)
5165176d
RE
4051 rtx memref;
4052{
4053 HOST_WIDE_INT offset = 0;
4054 rtx base = XEXP (memref, 0);
4055
4056 if (GET_CODE (base) == PLUS)
4057 {
4058 offset = INTVAL (XEXP (base, 1));
4059 base = XEXP (base, 0);
4060 }
4061
956d6950 4062 /* If we aren't allowed to generate unaligned addresses, then fail. */
5f1e6755 4063 if (TARGET_MMU_TRAPS
5165176d
RE
4064 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4065 return NULL;
4066
43cffd11 4067 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5165176d
RE
4068
4069 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4070 return base;
4071
43cffd11 4072 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5165176d
RE
4073}
4074
84ed5e79 4075static enum machine_mode
74bbc178 4076select_dominance_cc_mode (x, y, cond_or)
84ed5e79
RE
4077 rtx x;
4078 rtx y;
4079 HOST_WIDE_INT cond_or;
4080{
4081 enum rtx_code cond1, cond2;
4082 int swapped = 0;
4083
4084 /* Currently we will probably get the wrong result if the individual
4085 comparisons are not simple. This also ensures that it is safe to
956d6950 4086 reverse a comparison if necessary. */
84ed5e79
RE
4087 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4088 != CCmode)
4089 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4090 != CCmode))
4091 return CCmode;
4092
4093 if (cond_or)
4094 cond1 = reverse_condition (cond1);
4095
4096 /* If the comparisons are not equal, and one doesn't dominate the other,
4097 then we can't do this. */
4098 if (cond1 != cond2
4099 && ! comparison_dominates_p (cond1, cond2)
4100 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
4101 return CCmode;
4102
4103 if (swapped)
4104 {
4105 enum rtx_code temp = cond1;
4106 cond1 = cond2;
4107 cond2 = temp;
4108 }
4109
4110 switch (cond1)
4111 {
4112 case EQ:
4113 if (cond2 == EQ || ! cond_or)
4114 return CC_DEQmode;
4115
4116 switch (cond2)
4117 {
4118 case LE: return CC_DLEmode;
4119 case LEU: return CC_DLEUmode;
4120 case GE: return CC_DGEmode;
4121 case GEU: return CC_DGEUmode;
ad076f4e 4122 default: break;
84ed5e79
RE
4123 }
4124
4125 break;
4126
4127 case LT:
4128 if (cond2 == LT || ! cond_or)
4129 return CC_DLTmode;
4130 if (cond2 == LE)
4131 return CC_DLEmode;
4132 if (cond2 == NE)
4133 return CC_DNEmode;
4134 break;
4135
4136 case GT:
4137 if (cond2 == GT || ! cond_or)
4138 return CC_DGTmode;
4139 if (cond2 == GE)
4140 return CC_DGEmode;
4141 if (cond2 == NE)
4142 return CC_DNEmode;
4143 break;
4144
4145 case LTU:
4146 if (cond2 == LTU || ! cond_or)
4147 return CC_DLTUmode;
4148 if (cond2 == LEU)
4149 return CC_DLEUmode;
4150 if (cond2 == NE)
4151 return CC_DNEmode;
4152 break;
4153
4154 case GTU:
4155 if (cond2 == GTU || ! cond_or)
4156 return CC_DGTUmode;
4157 if (cond2 == GEU)
4158 return CC_DGEUmode;
4159 if (cond2 == NE)
4160 return CC_DNEmode;
4161 break;
4162
4163 /* The remaining cases only occur when both comparisons are the
4164 same. */
4165 case NE:
4166 return CC_DNEmode;
4167
4168 case LE:
4169 return CC_DLEmode;
4170
4171 case GE:
4172 return CC_DGEmode;
4173
4174 case LEU:
4175 return CC_DLEUmode;
4176
4177 case GEU:
4178 return CC_DGEUmode;
ad076f4e
RE
4179
4180 default:
4181 break;
84ed5e79
RE
4182 }
4183
4184 abort ();
4185}
4186
4187enum machine_mode
4188arm_select_cc_mode (op, x, y)
4189 enum rtx_code op;
4190 rtx x;
4191 rtx y;
4192{
4193 /* All floating point compares return CCFP if it is an equality
4194 comparison, and CCFPE otherwise. */
4195 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4196 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
4197
4198 /* A compare with a shifted operand. Because of canonicalization, the
4199 comparison will have to be swapped when we emit the assembler. */
4200 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4201 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4202 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4203 || GET_CODE (x) == ROTATERT))
4204 return CC_SWPmode;
4205
956d6950
JL
4206 /* This is a special case that is used by combine to allow a
4207 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 4208 followed by a comparison of the shifted integer (only valid for
956d6950 4209 equalities and unsigned inequalities). */
84ed5e79
RE
4210 if (GET_MODE (x) == SImode
4211 && GET_CODE (x) == ASHIFT
4212 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4213 && GET_CODE (XEXP (x, 0)) == SUBREG
4214 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4215 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4216 && (op == EQ || op == NE
4217 || op == GEU || op == GTU || op == LTU || op == LEU)
4218 && GET_CODE (y) == CONST_INT)
4219 return CC_Zmode;
4220
4221 /* An operation that sets the condition codes as a side-effect, the
4222 V flag is not set correctly, so we can only use comparisons where
4223 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4224 instead. */
4225 if (GET_MODE (x) == SImode
4226 && y == const0_rtx
4227 && (op == EQ || op == NE || op == LT || op == GE)
4228 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4229 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4230 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4231 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4232 || GET_CODE (x) == LSHIFTRT
4233 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4234 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4235 return CC_NOOVmode;
4236
4237 /* A construct for a conditional compare, if the false arm contains
4238 0, then both conditions must be true, otherwise either condition
4239 must be true. Not all conditions are possible, so CCmode is
4240 returned if it can't be done. */
4241 if (GET_CODE (x) == IF_THEN_ELSE
4242 && (XEXP (x, 2) == const0_rtx
4243 || XEXP (x, 2) == const1_rtx)
4244 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4245 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
74bbc178 4246 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
84ed5e79
RE
4247 INTVAL (XEXP (x, 2)));
4248
4249 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4250 return CC_Zmode;
4251
bd9c7e23
RE
4252 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4253 && GET_CODE (x) == PLUS
4254 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4255 return CC_Cmode;
4256
84ed5e79
RE
4257 return CCmode;
4258}
4259
ff9940b0
RE
4260/* X and Y are two things to compare using CODE. Emit the compare insn and
4261 return the rtx for register 0 in the proper mode. FP means this is a
4262 floating point compare: I don't think that it is needed on the arm. */
4263
4264rtx
d5b7b3ae 4265arm_gen_compare_reg (code, x, y)
ff9940b0
RE
4266 enum rtx_code code;
4267 rtx x, y;
4268{
4269 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
d5b7b3ae 4270 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
ff9940b0 4271
43cffd11
RE
4272 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4273 gen_rtx_COMPARE (mode, x, y)));
ff9940b0
RE
4274
4275 return cc_reg;
4276}
4277
0a81f500
RE
4278void
4279arm_reload_in_hi (operands)
62b10bbc 4280 rtx * operands;
0a81f500 4281{
f9cc092a
RE
4282 rtx ref = operands[1];
4283 rtx base, scratch;
4284 HOST_WIDE_INT offset = 0;
4285
4286 if (GET_CODE (ref) == SUBREG)
4287 {
4288 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4289 if (BYTES_BIG_ENDIAN)
4290 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4291 - MIN (UNITS_PER_WORD,
4292 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4293 ref = SUBREG_REG (ref);
4294 }
4295
4296 if (GET_CODE (ref) == REG)
4297 {
4298 /* We have a pseudo which has been spilt onto the stack; there
4299 are two cases here: the first where there is a simple
4300 stack-slot replacement and a second where the stack-slot is
4301 out of range, or is used as a subreg. */
4302 if (reg_equiv_mem[REGNO (ref)])
4303 {
4304 ref = reg_equiv_mem[REGNO (ref)];
4305 base = find_replacement (&XEXP (ref, 0));
4306 }
4307 else
6354dc9b 4308 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4309 base = reg_equiv_address[REGNO (ref)];
4310 }
4311 else
4312 base = find_replacement (&XEXP (ref, 0));
0a81f500 4313
e5e809f4
JL
4314 /* Handle the case where the address is too complex to be offset by 1. */
4315 if (GET_CODE (base) == MINUS
4316 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4317 {
f9cc092a 4318 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
e5e809f4 4319
43cffd11 4320 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
e5e809f4
JL
4321 base = base_plus;
4322 }
f9cc092a
RE
4323 else if (GET_CODE (base) == PLUS)
4324 {
6354dc9b 4325 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4326 HOST_WIDE_INT hi, lo;
4327
4328 offset += INTVAL (XEXP (base, 1));
4329 base = XEXP (base, 0);
4330
6354dc9b 4331 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4332 /* Valid range for lo is -4095 -> 4095 */
4333 lo = (offset >= 0
4334 ? (offset & 0xfff)
4335 : -((-offset) & 0xfff));
4336
4337 /* Corner case, if lo is the max offset then we would be out of range
4338 once we have added the additional 1 below, so bump the msb into the
4339 pre-loading insn(s). */
4340 if (lo == 4095)
4341 lo &= 0x7ff;
4342
e5951263
NC
4343 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4344 ^ HOST_INT (0x80000000))
4345 - HOST_INT (0x80000000));
f9cc092a
RE
4346
4347 if (hi + lo != offset)
4348 abort ();
4349
4350 if (hi != 0)
4351 {
4352 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4353
4354 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4355 that require more than one insn. */
f9cc092a
RE
4356 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4357 base = base_plus;
4358 offset = lo;
4359 }
4360 }
e5e809f4 4361
f9cc092a
RE
4362 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4363 emit_insn (gen_zero_extendqisi2 (scratch,
4364 gen_rtx_MEM (QImode,
4365 plus_constant (base,
4366 offset))));
43cffd11
RE
4367 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4368 gen_rtx_MEM (QImode,
f9cc092a
RE
4369 plus_constant (base,
4370 offset + 1))));
b3b15f14 4371 if (! BYTES_BIG_ENDIAN)
43cffd11
RE
4372 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4373 gen_rtx_IOR (SImode,
4374 gen_rtx_ASHIFT
4375 (SImode,
4376 gen_rtx_SUBREG (SImode, operands[0], 0),
4377 GEN_INT (8)),
f9cc092a 4378 scratch)));
0a81f500 4379 else
43cffd11
RE
4380 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4381 gen_rtx_IOR (SImode,
f9cc092a 4382 gen_rtx_ASHIFT (SImode, scratch,
43cffd11
RE
4383 GEN_INT (8)),
4384 gen_rtx_SUBREG (SImode, operands[0],
4385 0))));
0a81f500
RE
4386}
4387
f9cc092a
RE
4388/* Handle storing a half-word to memory during reload by synthesising as two
4389 byte stores. Take care not to clobber the input values until after we
4390 have moved them somewhere safe. This code assumes that if the DImode
4391 scratch in operands[2] overlaps either the input value or output address
4392 in some way, then that value must die in this insn (we absolutely need
4393 two scratch registers for some corner cases). */
f3bb6135 4394void
af48348a 4395arm_reload_out_hi (operands)
62b10bbc 4396 rtx * operands;
af48348a 4397{
f9cc092a
RE
4398 rtx ref = operands[0];
4399 rtx outval = operands[1];
4400 rtx base, scratch;
4401 HOST_WIDE_INT offset = 0;
4402
4403 if (GET_CODE (ref) == SUBREG)
4404 {
4405 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4406 if (BYTES_BIG_ENDIAN)
4407 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4408 - MIN (UNITS_PER_WORD,
4409 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4410 ref = SUBREG_REG (ref);
4411 }
4412
4413
4414 if (GET_CODE (ref) == REG)
4415 {
4416 /* We have a pseudo which has been spilt onto the stack; there
4417 are two cases here: the first where there is a simple
4418 stack-slot replacement and a second where the stack-slot is
4419 out of range, or is used as a subreg. */
4420 if (reg_equiv_mem[REGNO (ref)])
4421 {
4422 ref = reg_equiv_mem[REGNO (ref)];
4423 base = find_replacement (&XEXP (ref, 0));
4424 }
4425 else
6354dc9b 4426 /* The slot is out of range, or was dressed up in a SUBREG. */
f9cc092a
RE
4427 base = reg_equiv_address[REGNO (ref)];
4428 }
4429 else
4430 base = find_replacement (&XEXP (ref, 0));
4431
4432 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4433
4434 /* Handle the case where the address is too complex to be offset by 1. */
4435 if (GET_CODE (base) == MINUS
4436 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4437 {
4438 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4439
4440 /* Be careful not to destroy OUTVAL. */
4441 if (reg_overlap_mentioned_p (base_plus, outval))
4442 {
4443 /* Updating base_plus might destroy outval, see if we can
4444 swap the scratch and base_plus. */
4445 if (! reg_overlap_mentioned_p (scratch, outval))
4446 {
4447 rtx tmp = scratch;
4448 scratch = base_plus;
4449 base_plus = tmp;
4450 }
4451 else
4452 {
4453 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4454
4455 /* Be conservative and copy OUTVAL into the scratch now,
4456 this should only be necessary if outval is a subreg
4457 of something larger than a word. */
4458 /* XXX Might this clobber base? I can't see how it can,
4459 since scratch is known to overlap with OUTVAL, and
4460 must be wider than a word. */
4461 emit_insn (gen_movhi (scratch_hi, outval));
4462 outval = scratch_hi;
4463 }
4464 }
4465
4466 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4467 base = base_plus;
4468 }
4469 else if (GET_CODE (base) == PLUS)
4470 {
6354dc9b 4471 /* The addend must be CONST_INT, or we would have dealt with it above. */
f9cc092a
RE
4472 HOST_WIDE_INT hi, lo;
4473
4474 offset += INTVAL (XEXP (base, 1));
4475 base = XEXP (base, 0);
4476
6354dc9b 4477 /* Rework the address into a legal sequence of insns. */
f9cc092a
RE
4478 /* Valid range for lo is -4095 -> 4095 */
4479 lo = (offset >= 0
4480 ? (offset & 0xfff)
4481 : -((-offset) & 0xfff));
4482
4483 /* Corner case, if lo is the max offset then we would be out of range
4484 once we have added the additional 1 below, so bump the msb into the
4485 pre-loading insn(s). */
4486 if (lo == 4095)
4487 lo &= 0x7ff;
4488
e5951263
NC
4489 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4490 ^ HOST_INT (0x80000000))
4491 - HOST_INT (0x80000000));
f9cc092a
RE
4492
4493 if (hi + lo != offset)
4494 abort ();
4495
4496 if (hi != 0)
4497 {
4498 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4499
4500 /* Be careful not to destroy OUTVAL. */
4501 if (reg_overlap_mentioned_p (base_plus, outval))
4502 {
4503 /* Updating base_plus might destroy outval, see if we
4504 can swap the scratch and base_plus. */
4505 if (! reg_overlap_mentioned_p (scratch, outval))
4506 {
4507 rtx tmp = scratch;
4508 scratch = base_plus;
4509 base_plus = tmp;
4510 }
4511 else
4512 {
4513 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4514
4515 /* Be conservative and copy outval into scratch now,
4516 this should only be necessary if outval is a
4517 subreg of something larger than a word. */
4518 /* XXX Might this clobber base? I can't see how it
4519 can, since scratch is known to overlap with
4520 outval. */
4521 emit_insn (gen_movhi (scratch_hi, outval));
4522 outval = scratch_hi;
4523 }
4524 }
4525
4526 /* Get the base address; addsi3 knows how to handle constants
6354dc9b 4527 that require more than one insn. */
f9cc092a
RE
4528 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4529 base = base_plus;
4530 offset = lo;
4531 }
4532 }
af48348a 4533
b5cc037f
RE
4534 if (BYTES_BIG_ENDIAN)
4535 {
f9cc092a
RE
4536 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4537 plus_constant (base, offset + 1)),
4538 gen_rtx_SUBREG (QImode, outval, 0)));
4539 emit_insn (gen_lshrsi3 (scratch,
4540 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4541 GEN_INT (8)));
f9cc092a
RE
4542 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4543 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f
RE
4544 }
4545 else
4546 {
f9cc092a
RE
4547 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4548 gen_rtx_SUBREG (QImode, outval, 0)));
4549 emit_insn (gen_lshrsi3 (scratch,
4550 gen_rtx_SUBREG (SImode, outval, 0),
b5cc037f 4551 GEN_INT (8)));
f9cc092a
RE
4552 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4553 plus_constant (base, offset + 1)),
4554 gen_rtx_SUBREG (QImode, scratch, 0)));
b5cc037f 4555 }
af48348a 4556}
2b835d68 4557\f
d5b7b3ae
RE
4558/* Print a symbolic form of X to the debug file, F. */
4559static void
4560arm_print_value (f, x)
4561 FILE * f;
4562 rtx x;
4563{
4564 switch (GET_CODE (x))
4565 {
4566 case CONST_INT:
4567 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4568 return;
4569
4570 case CONST_DOUBLE:
4571 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4572 return;
4573
4574 case CONST_STRING:
4575 fprintf (f, "\"%s\"", XSTR (x, 0));
4576 return;
4577
4578 case SYMBOL_REF:
4579 fprintf (f, "`%s'", XSTR (x, 0));
4580 return;
4581
4582 case LABEL_REF:
4583 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4584 return;
4585
4586 case CONST:
4587 arm_print_value (f, XEXP (x, 0));
4588 return;
4589
4590 case PLUS:
4591 arm_print_value (f, XEXP (x, 0));
4592 fprintf (f, "+");
4593 arm_print_value (f, XEXP (x, 1));
4594 return;
4595
4596 case PC:
4597 fprintf (f, "pc");
4598 return;
4599
4600 default:
4601 fprintf (f, "????");
4602 return;
4603 }
4604}
4605\f
2b835d68 4606/* Routines for manipulation of the constant pool. */
2b835d68 4607
949d79eb
RE
4608/* Arm instructions cannot load a large constant directly into a
4609 register; they have to come from a pc relative load. The constant
4610 must therefore be placed in the addressable range of the pc
4611 relative load. Depending on the precise pc relative load
4612 instruction the range is somewhere between 256 bytes and 4k. This
4613 means that we often have to dump a constant inside a function, and
2b835d68
RE
4614 generate code to branch around it.
4615
949d79eb
RE
4616 It is important to minimize this, since the branches will slow
4617 things down and make the code larger.
2b835d68 4618
949d79eb
RE
4619 Normally we can hide the table after an existing unconditional
4620 branch so that there is no interruption of the flow, but in the
4621 worst case the code looks like this:
2b835d68
RE
4622
4623 ldr rn, L1
949d79eb 4624 ...
2b835d68
RE
4625 b L2
4626 align
4627 L1: .long value
4628 L2:
949d79eb 4629 ...
2b835d68 4630
2b835d68 4631 ldr rn, L3
949d79eb 4632 ...
2b835d68
RE
4633 b L4
4634 align
2b835d68
RE
4635 L3: .long value
4636 L4:
949d79eb
RE
4637 ...
4638
4639 We fix this by performing a scan after scheduling, which notices
4640 which instructions need to have their operands fetched from the
4641 constant table and builds the table.
4642
4643 The algorithm starts by building a table of all the constants that
4644 need fixing up and all the natural barriers in the function (places
4645 where a constant table can be dropped without breaking the flow).
4646 For each fixup we note how far the pc-relative replacement will be
4647 able to reach and the offset of the instruction into the function.
4648
4649 Having built the table we then group the fixes together to form
4650 tables that are as large as possible (subject to addressing
4651 constraints) and emit each table of constants after the last
4652 barrier that is within range of all the instructions in the group.
4653 If a group does not contain a barrier, then we forcibly create one
4654 by inserting a jump instruction into the flow. Once the table has
4655 been inserted, the insns are then modified to reference the
4656 relevant entry in the pool.
4657
6354dc9b 4658 Possible enhancements to the algorithm (not implemented) are:
949d79eb 4659
d5b7b3ae 4660 1) For some processors and object formats, there may be benefit in
949d79eb
RE
4661 aligning the pools to the start of cache lines; this alignment
4662 would need to be taken into account when calculating addressability
6354dc9b 4663 of a pool. */
2b835d68 4664
d5b7b3ae
RE
4665/* These typedefs are located at the start of this file, so that
4666 they can be used in the prototypes there. This comment is to
4667 remind readers of that fact so that the following structures
4668 can be understood more easily.
4669
4670 typedef struct minipool_node Mnode;
4671 typedef struct minipool_fixup Mfix; */
4672
4673struct minipool_node
4674{
4675 /* Doubly linked chain of entries. */
4676 Mnode * next;
4677 Mnode * prev;
4678 /* The maximum offset into the code that this entry can be placed. While
4679 pushing fixes for forward references, all entries are sorted in order
4680 of increasing max_address. */
4681 HOST_WIDE_INT max_address;
4682 /* Similarly for a entry inserted for a backwards ref. */
4683 HOST_WIDE_INT min_address;
4684 /* The number of fixes referencing this entry. This can become zero
4685 if we "unpush" an entry. In this case we ignore the entry when we
4686 come to emit the code. */
4687 int refcount;
4688 /* The offset from the start of the minipool. */
4689 HOST_WIDE_INT offset;
4690 /* The value in table. */
4691 rtx value;
4692 /* The mode of value. */
4693 enum machine_mode mode;
4694 int fix_size;
4695};
4696
4697struct minipool_fixup
2b835d68 4698{
d5b7b3ae
RE
4699 Mfix * next;
4700 rtx insn;
4701 HOST_WIDE_INT address;
4702 rtx * loc;
4703 enum machine_mode mode;
4704 int fix_size;
4705 rtx value;
4706 Mnode * minipool;
4707 HOST_WIDE_INT forwards;
4708 HOST_WIDE_INT backwards;
4709};
2b835d68 4710
d5b7b3ae
RE
4711/* Fixes less than a word need padding out to a word boundary. */
4712#define MINIPOOL_FIX_SIZE(mode) \
4713 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
2b835d68 4714
d5b7b3ae
RE
4715static Mnode * minipool_vector_head;
4716static Mnode * minipool_vector_tail;
4717static rtx minipool_vector_label;
332072db 4718
d5b7b3ae
RE
4719/* The linked list of all minipool fixes required for this function. */
4720Mfix * minipool_fix_head;
4721Mfix * minipool_fix_tail;
4722/* The fix entry for the current minipool, once it has been placed. */
4723Mfix * minipool_barrier;
4724
4725/* Determines if INSN is the start of a jump table. Returns the end
4726 of the TABLE or NULL_RTX. */
4727static rtx
4728is_jump_table (insn)
4729 rtx insn;
2b835d68 4730{
d5b7b3ae 4731 rtx table;
da6558fd 4732
d5b7b3ae
RE
4733 if (GET_CODE (insn) == JUMP_INSN
4734 && JUMP_LABEL (insn) != NULL
4735 && ((table = next_real_insn (JUMP_LABEL (insn)))
4736 == next_real_insn (insn))
4737 && table != NULL
4738 && GET_CODE (table) == JUMP_INSN
4739 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4740 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4741 return table;
4742
4743 return NULL_RTX;
2b835d68
RE
4744}
4745
d5b7b3ae
RE
4746static HOST_WIDE_INT
4747get_jump_table_size (insn)
4748 rtx insn;
2b835d68 4749{
d5b7b3ae
RE
4750 rtx body = PATTERN (insn);
4751 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
2b835d68 4752
d5b7b3ae
RE
4753 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
4754}
2b835d68 4755
d5b7b3ae
RE
4756/* Move a minipool fix MP from its current location to before MAX_MP.
4757 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
4758 contrains may need updating. */
4759static Mnode *
4760move_minipool_fix_forward_ref (mp, max_mp, max_address)
4761 Mnode * mp;
4762 Mnode * max_mp;
4763 HOST_WIDE_INT max_address;
4764{
4765 /* This should never be true and the code below assumes these are
4766 different. */
4767 if (mp == max_mp)
4768 abort ();
4769
4770 if (max_mp == NULL)
4771 {
4772 if (max_address < mp->max_address)
4773 mp->max_address = max_address;
4774 }
4775 else
2b835d68 4776 {
d5b7b3ae
RE
4777 if (max_address > max_mp->max_address - mp->fix_size)
4778 mp->max_address = max_mp->max_address - mp->fix_size;
4779 else
4780 mp->max_address = max_address;
2b835d68 4781
d5b7b3ae
RE
4782 /* Unlink MP from its current position. Since max_mp is non-null,
4783 mp->prev must be non-null. */
4784 mp->prev->next = mp->next;
4785 if (mp->next != NULL)
4786 mp->next->prev = mp->prev;
4787 else
4788 minipool_vector_tail = mp->prev;
2b835d68 4789
d5b7b3ae
RE
4790 /* Re-insert it before MAX_MP. */
4791 mp->next = max_mp;
4792 mp->prev = max_mp->prev;
4793 max_mp->prev = mp;
4794
4795 if (mp->prev != NULL)
4796 mp->prev->next = mp;
4797 else
4798 minipool_vector_head = mp;
4799 }
2b835d68 4800
d5b7b3ae
RE
4801 /* Save the new entry. */
4802 max_mp = mp;
4803
4804 /* Scan over the preceeding entries and adjust their addresses as
4805 required. */
4806 while (mp->prev != NULL
4807 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4808 {
4809 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4810 mp = mp->prev;
2b835d68
RE
4811 }
4812
d5b7b3ae 4813 return max_mp;
2b835d68
RE
4814}
4815
d5b7b3ae
RE
4816/* Add a constant to the minipool for a forward reference. Returns the
4817 node added or NULL if the constant will not fit in this pool. */
4818static Mnode *
4819add_minipool_forward_ref (fix)
4820 Mfix * fix;
4821{
4822 /* If set, max_mp is the first pool_entry that has a lower
4823 constraint than the one we are trying to add. */
4824 Mnode * max_mp = NULL;
4825 HOST_WIDE_INT max_address = fix->address + fix->forwards;
4826 Mnode * mp;
4827
4828 /* If this fix's address is greater than the address of the first
4829 entry, then we can't put the fix in this pool. We subtract the
4830 size of the current fix to ensure that if the table is fully
4831 packed we still have enough room to insert this value by suffling
4832 the other fixes forwards. */
4833 if (minipool_vector_head &&
4834 fix->address >= minipool_vector_head->max_address - fix->fix_size)
4835 return NULL;
2b835d68 4836
d5b7b3ae
RE
4837 /* Scan the pool to see if a constant with the same value has
4838 already been added. While we are doing this, also note the
4839 location where we must insert the constant if it doesn't already
4840 exist. */
4841 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4842 {
4843 if (GET_CODE (fix->value) == GET_CODE (mp->value)
4844 && fix->mode == mp->mode
4845 && (GET_CODE (fix->value) != CODE_LABEL
4846 || (CODE_LABEL_NUMBER (fix->value)
4847 == CODE_LABEL_NUMBER (mp->value)))
4848 && rtx_equal_p (fix->value, mp->value))
4849 {
4850 /* More than one fix references this entry. */
4851 mp->refcount++;
4852 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
4853 }
4854
4855 /* Note the insertion point if necessary. */
4856 if (max_mp == NULL
4857 && mp->max_address > max_address)
4858 max_mp = mp;
4859 }
4860
4861 /* The value is not currently in the minipool, so we need to create
4862 a new entry for it. If MAX_MP is NULL, the entry will be put on
4863 the end of the list since the placement is less constrained than
4864 any existing entry. Otherwise, we insert the new fix before
4865 MAX_MP and, if neceesary, adjust the constraints on the other
4866 entries. */
4867 mp = xmalloc (sizeof (* mp));
4868 mp->fix_size = fix->fix_size;
4869 mp->mode = fix->mode;
4870 mp->value = fix->value;
4871 mp->refcount = 1;
4872 /* Not yet required for a backwards ref. */
4873 mp->min_address = -65536;
4874
4875 if (max_mp == NULL)
4876 {
4877 mp->max_address = max_address;
4878 mp->next = NULL;
4879 mp->prev = minipool_vector_tail;
4880
4881 if (mp->prev == NULL)
4882 {
4883 minipool_vector_head = mp;
4884 minipool_vector_label = gen_label_rtx ();
7551cbc7 4885 }
2b835d68 4886 else
d5b7b3ae 4887 mp->prev->next = mp;
2b835d68 4888
d5b7b3ae
RE
4889 minipool_vector_tail = mp;
4890 }
4891 else
4892 {
4893 if (max_address > max_mp->max_address - mp->fix_size)
4894 mp->max_address = max_mp->max_address - mp->fix_size;
4895 else
4896 mp->max_address = max_address;
4897
4898 mp->next = max_mp;
4899 mp->prev = max_mp->prev;
4900 max_mp->prev = mp;
4901 if (mp->prev != NULL)
4902 mp->prev->next = mp;
4903 else
4904 minipool_vector_head = mp;
4905 }
4906
4907 /* Save the new entry. */
4908 max_mp = mp;
4909
4910 /* Scan over the preceeding entries and adjust their addresses as
4911 required. */
4912 while (mp->prev != NULL
4913 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4914 {
4915 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4916 mp = mp->prev;
2b835d68
RE
4917 }
4918
d5b7b3ae
RE
4919 return max_mp;
4920}
4921
4922static Mnode *
4923move_minipool_fix_backward_ref (mp, min_mp, min_address)
4924 Mnode * mp;
4925 Mnode * min_mp;
4926 HOST_WIDE_INT min_address;
4927{
4928 HOST_WIDE_INT offset;
4929
4930 /* This should never be true, and the code below assumes these are
4931 different. */
4932 if (mp == min_mp)
4933 abort ();
4934
4935 if (min_mp == NULL)
2b835d68 4936 {
d5b7b3ae
RE
4937 if (min_address > mp->min_address)
4938 mp->min_address = min_address;
4939 }
4940 else
4941 {
4942 /* We will adjust this below if it is too loose. */
4943 mp->min_address = min_address;
4944
4945 /* Unlink MP from its current position. Since min_mp is non-null,
4946 mp->next must be non-null. */
4947 mp->next->prev = mp->prev;
4948 if (mp->prev != NULL)
4949 mp->prev->next = mp->next;
4950 else
4951 minipool_vector_head = mp->next;
4952
4953 /* Reinsert it after MIN_MP. */
4954 mp->prev = min_mp;
4955 mp->next = min_mp->next;
4956 min_mp->next = mp;
4957 if (mp->next != NULL)
4958 mp->next->prev = mp;
2b835d68 4959 else
d5b7b3ae
RE
4960 minipool_vector_tail = mp;
4961 }
4962
4963 min_mp = mp;
4964
4965 offset = 0;
4966 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4967 {
4968 mp->offset = offset;
4969 if (mp->refcount > 0)
4970 offset += mp->fix_size;
4971
4972 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
4973 mp->next->min_address = mp->min_address + mp->fix_size;
4974 }
4975
4976 return min_mp;
4977}
4978
4979/* Add a constant to the minipool for a backward reference. Returns the
4980 node added or NULL if the constant will not fit in this pool.
4981
4982 Note that the code for insertion for a backwards reference can be
4983 somewhat confusing because the calculated offsets for each fix do
4984 not take into account the size of the pool (which is still under
4985 construction. */
4986static Mnode *
4987add_minipool_backward_ref (fix)
4988 Mfix * fix;
4989{
4990 /* If set, min_mp is the last pool_entry that has a lower constraint
4991 than the one we are trying to add. */
4992 Mnode * min_mp = NULL;
4993 /* This can be negative, since it is only a constraint. */
4994 HOST_WIDE_INT min_address = fix->address - fix->backwards;
4995 Mnode * mp;
4996
4997 /* If we can't reach the current pool from this insn, or if we can't
4998 insert this entry at the end of the pool without pushing other
4999 fixes out of range, then we don't try. This ensures that we
5000 can't fail later on. */
5001 if (min_address >= minipool_barrier->address
5002 || (minipool_vector_tail->min_address + fix->fix_size
5003 >= minipool_barrier->address))
5004 return NULL;
5005
5006 /* Scan the pool to see if a constant with the same value has
5007 already been added. While we are doing this, also note the
5008 location where we must insert the constant if it doesn't already
5009 exist. */
5010 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5011 {
5012 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5013 && fix->mode == mp->mode
5014 && (GET_CODE (fix->value) != CODE_LABEL
5015 || (CODE_LABEL_NUMBER (fix->value)
5016 == CODE_LABEL_NUMBER (mp->value)))
5017 && rtx_equal_p (fix->value, mp->value)
5018 /* Check that there is enough slack to move this entry to the
5019 end of the table (this is conservative). */
5020 && (mp->max_address
5021 > (minipool_barrier->address
5022 + minipool_vector_tail->offset
5023 + minipool_vector_tail->fix_size)))
5024 {
5025 mp->refcount++;
5026 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5027 }
5028
5029 if (min_mp != NULL)
5030 mp->min_address += fix->fix_size;
5031 else
5032 {
5033 /* Note the insertion point if necessary. */
5034 if (mp->min_address < min_address)
5035 min_mp = mp;
5036 else if (mp->max_address
5037 < minipool_barrier->address + mp->offset + fix->fix_size)
5038 {
5039 /* Inserting before this entry would push the fix beyond
5040 its maximum address (which can happen if we have
5041 re-located a forwards fix); force the new fix to come
5042 after it. */
5043 min_mp = mp;
5044 min_address = mp->min_address + fix->fix_size;
5045 }
5046 }
5047 }
5048
5049 /* We need to create a new entry. */
5050 mp = xmalloc (sizeof (* mp));
5051 mp->fix_size = fix->fix_size;
5052 mp->mode = fix->mode;
5053 mp->value = fix->value;
5054 mp->refcount = 1;
5055 mp->max_address = minipool_barrier->address + 65536;
5056
5057 mp->min_address = min_address;
5058
5059 if (min_mp == NULL)
5060 {
5061 mp->prev = NULL;
5062 mp->next = minipool_vector_head;
5063
5064 if (mp->next == NULL)
5065 {
5066 minipool_vector_tail = mp;
5067 minipool_vector_label = gen_label_rtx ();
5068 }
5069 else
5070 mp->next->prev = mp;
5071
5072 minipool_vector_head = mp;
5073 }
5074 else
5075 {
5076 mp->next = min_mp->next;
5077 mp->prev = min_mp;
5078 min_mp->next = mp;
da6558fd 5079
d5b7b3ae
RE
5080 if (mp->next != NULL)
5081 mp->next->prev = mp;
5082 else
5083 minipool_vector_tail = mp;
5084 }
5085
5086 /* Save the new entry. */
5087 min_mp = mp;
5088
5089 if (mp->prev)
5090 mp = mp->prev;
5091 else
5092 mp->offset = 0;
5093
5094 /* Scan over the following entries and adjust their offsets. */
5095 while (mp->next != NULL)
5096 {
5097 if (mp->next->min_address < mp->min_address + mp->fix_size)
5098 mp->next->min_address = mp->min_address + mp->fix_size;
5099
5100 if (mp->refcount)
5101 mp->next->offset = mp->offset + mp->fix_size;
5102 else
5103 mp->next->offset = mp->offset;
5104
5105 mp = mp->next;
5106 }
5107
5108 return min_mp;
5109}
5110
5111static void
5112assign_minipool_offsets (barrier)
5113 Mfix * barrier;
5114{
5115 HOST_WIDE_INT offset = 0;
5116 Mnode * mp;
5117
5118 minipool_barrier = barrier;
5119
5120 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5121 {
5122 mp->offset = offset;
da6558fd 5123
d5b7b3ae
RE
5124 if (mp->refcount > 0)
5125 offset += mp->fix_size;
5126 }
5127}
5128
5129/* Output the literal table */
5130static void
5131dump_minipool (scan)
5132 rtx scan;
5133{
5134 Mnode * mp;
5135 Mnode * nmp;
5136
5137 if (rtl_dump_file)
5138 fprintf (rtl_dump_file,
5139 ";; Emitting minipool after insn %u; address %ld\n",
5140 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5141
5142 scan = emit_label_after (gen_label_rtx (), scan);
5143 scan = emit_insn_after (gen_align_4 (), scan);
5144 scan = emit_label_after (minipool_vector_label, scan);
5145
5146 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5147 {
5148 if (mp->refcount > 0)
5149 {
5150 if (rtl_dump_file)
5151 {
5152 fprintf (rtl_dump_file,
5153 ";; Offset %u, min %ld, max %ld ",
5154 (unsigned) mp->offset, (unsigned long) mp->min_address,
5155 (unsigned long) mp->max_address);
5156 arm_print_value (rtl_dump_file, mp->value);
5157 fputc ('\n', rtl_dump_file);
5158 }
5159
5160 switch (mp->fix_size)
5161 {
5162#ifdef HAVE_consttable_1
5163 case 1:
5164 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5165 break;
5166
5167#endif
5168#ifdef HAVE_consttable_2
5169 case 2:
5170 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5171 break;
5172
5173#endif
5174#ifdef HAVE_consttable_4
5175 case 4:
5176 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5177 break;
5178
5179#endif
5180#ifdef HAVE_consttable_8
5181 case 8:
5182 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5183 break;
5184
5185#endif
5186 default:
5187 abort ();
5188 break;
5189 }
5190 }
5191
5192 nmp = mp->next;
5193 free (mp);
2b835d68
RE
5194 }
5195
d5b7b3ae
RE
5196 minipool_vector_head = minipool_vector_tail = NULL;
5197 scan = emit_insn_after (gen_consttable_end (), scan);
5198 scan = emit_barrier_after (scan);
2b835d68
RE
5199}
5200
d5b7b3ae
RE
5201/* Return the cost of forcibly inserting a barrier after INSN. */
5202static int
5203arm_barrier_cost (insn)
5204 rtx insn;
949d79eb 5205{
d5b7b3ae
RE
5206 /* Basing the location of the pool on the loop depth is preferable,
5207 but at the moment, the basic block information seems to be
5208 corrupt by this stage of the compilation. */
5209 int base_cost = 50;
5210 rtx next = next_nonnote_insn (insn);
5211
5212 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5213 base_cost -= 20;
5214
5215 switch (GET_CODE (insn))
5216 {
5217 case CODE_LABEL:
5218 /* It will always be better to place the table before the label, rather
5219 than after it. */
5220 return 50;
949d79eb 5221
d5b7b3ae
RE
5222 case INSN:
5223 case CALL_INSN:
5224 return base_cost;
5225
5226 case JUMP_INSN:
5227 return base_cost - 10;
5228
5229 default:
5230 return base_cost + 10;
5231 }
5232}
5233
5234/* Find the best place in the insn stream in the range
5235 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5236 Create the barrier by inserting a jump and add a new fix entry for
5237 it. */
5238static Mfix *
5239create_fix_barrier (fix, max_address)
5240 Mfix * fix;
5241 HOST_WIDE_INT max_address;
5242{
5243 HOST_WIDE_INT count = 0;
5244 rtx barrier;
5245 rtx from = fix->insn;
5246 rtx selected = from;
5247 int selected_cost;
5248 HOST_WIDE_INT selected_address;
5249 Mfix * new_fix;
5250 HOST_WIDE_INT max_count = max_address - fix->address;
5251 rtx label = gen_label_rtx ();
5252
5253 selected_cost = arm_barrier_cost (from);
5254 selected_address = fix->address;
5255
5256 while (from && count < max_count)
5257 {
5258 rtx tmp;
5259 int new_cost;
5260
5261 /* This code shouldn't have been called if there was a natural barrier
5262 within range. */
5263 if (GET_CODE (from) == BARRIER)
5264 abort ();
5265
5266 /* Count the length of this insn. */
5267 count += get_attr_length (from);
5268
5269 /* If there is a jump table, add its length. */
5270 tmp = is_jump_table (from);
5271 if (tmp != NULL)
5272 {
5273 count += get_jump_table_size (tmp);
5274
5275 /* Jump tables aren't in a basic block, so base the cost on
5276 the dispatch insn. If we select this location, we will
5277 still put the pool after the table. */
5278 new_cost = arm_barrier_cost (from);
5279
5280 if (count < max_count && new_cost <= selected_cost)
5281 {
5282 selected = tmp;
5283 selected_cost = new_cost;
5284 selected_address = fix->address + count;
5285 }
5286
5287 /* Continue after the dispatch table. */
5288 from = NEXT_INSN (tmp);
5289 continue;
5290 }
5291
5292 new_cost = arm_barrier_cost (from);
5293
5294 if (count < max_count && new_cost <= selected_cost)
5295 {
5296 selected = from;
5297 selected_cost = new_cost;
5298 selected_address = fix->address + count;
5299 }
5300
5301 from = NEXT_INSN (from);
5302 }
5303
5304 /* Create a new JUMP_INSN that branches around a barrier. */
5305 from = emit_jump_insn_after (gen_jump (label), selected);
5306 JUMP_LABEL (from) = label;
5307 barrier = emit_barrier_after (from);
5308 emit_label_after (label, barrier);
5309
5310 /* Create a minipool barrier entry for the new barrier. */
5311 new_fix = (Mfix *) oballoc (sizeof (* new_fix));
5312 new_fix->insn = barrier;
5313 new_fix->address = selected_address;
5314 new_fix->next = fix->next;
5315 fix->next = new_fix;
5316
5317 return new_fix;
5318}
5319
5320/* Record that there is a natural barrier in the insn stream at
5321 ADDRESS. */
949d79eb
RE
5322static void
5323push_minipool_barrier (insn, address)
2b835d68 5324 rtx insn;
d5b7b3ae 5325 HOST_WIDE_INT address;
2b835d68 5326{
d5b7b3ae 5327 Mfix * fix = (Mfix *) oballoc (sizeof (* fix));
ad076f4e 5328
949d79eb
RE
5329 fix->insn = insn;
5330 fix->address = address;
2b835d68 5331
949d79eb
RE
5332 fix->next = NULL;
5333 if (minipool_fix_head != NULL)
5334 minipool_fix_tail->next = fix;
5335 else
5336 minipool_fix_head = fix;
5337
5338 minipool_fix_tail = fix;
5339}
2b835d68 5340
d5b7b3ae
RE
5341/* Record INSN, which will need fixing up to load a value from the
5342 minipool. ADDRESS is the offset of the insn since the start of the
5343 function; LOC is a pointer to the part of the insn which requires
5344 fixing; VALUE is the constant that must be loaded, which is of type
5345 MODE. */
949d79eb
RE
5346static void
5347push_minipool_fix (insn, address, loc, mode, value)
5348 rtx insn;
d5b7b3ae
RE
5349 HOST_WIDE_INT address;
5350 rtx * loc;
949d79eb
RE
5351 enum machine_mode mode;
5352 rtx value;
5353{
d5b7b3ae 5354 Mfix * fix = (Mfix *) oballoc (sizeof (* fix));
949d79eb
RE
5355
5356#ifdef AOF_ASSEMBLER
5357 /* PIC symbol refereneces need to be converted into offsets into the
5358 based area. */
d5b7b3ae
RE
5359 /* XXX This shouldn't be done here. */
5360 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
949d79eb
RE
5361 value = aof_pic_entry (value);
5362#endif /* AOF_ASSEMBLER */
5363
5364 fix->insn = insn;
5365 fix->address = address;
5366 fix->loc = loc;
5367 fix->mode = mode;
d5b7b3ae 5368 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
949d79eb 5369 fix->value = value;
d5b7b3ae
RE
5370 fix->forwards = get_attr_pool_range (insn);
5371 fix->backwards = get_attr_neg_pool_range (insn);
5372 fix->minipool = NULL;
949d79eb
RE
5373
5374 /* If an insn doesn't have a range defined for it, then it isn't
5375 expecting to be reworked by this code. Better to abort now than
5376 to generate duff assembly code. */
d5b7b3ae 5377 if (fix->forwards == 0 && fix->backwards == 0)
949d79eb
RE
5378 abort ();
5379
d5b7b3ae
RE
5380 if (rtl_dump_file)
5381 {
5382 fprintf (rtl_dump_file,
5383 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5384 GET_MODE_NAME (mode),
5385 INSN_UID (insn), (unsigned long) address,
5386 -1 * (long)fix->backwards, (long)fix->forwards);
5387 arm_print_value (rtl_dump_file, fix->value);
5388 fprintf (rtl_dump_file, "\n");
5389 }
5390
6354dc9b 5391 /* Add it to the chain of fixes. */
949d79eb 5392 fix->next = NULL;
d5b7b3ae 5393
949d79eb
RE
5394 if (minipool_fix_head != NULL)
5395 minipool_fix_tail->next = fix;
5396 else
5397 minipool_fix_head = fix;
5398
5399 minipool_fix_tail = fix;
5400}
5401
d5b7b3ae 5402/* Scan INSN and note any of its operands that need fixing. */
949d79eb
RE
5403static void
5404note_invalid_constants (insn, address)
5405 rtx insn;
d5b7b3ae 5406 HOST_WIDE_INT address;
949d79eb
RE
5407{
5408 int opno;
5409
d5b7b3ae 5410 extract_insn (insn);
949d79eb 5411
949d79eb
RE
5412 if (! constrain_operands (1))
5413 fatal_insn_not_found (insn);
5414
d5b7b3ae
RE
5415 /* Fill in recog_op_alt with information about the constraints of this
5416 insn. */
949d79eb
RE
5417 preprocess_constraints ();
5418
1ccbefce 5419 for (opno = 0; opno < recog_data.n_operands; opno++)
949d79eb 5420 {
6354dc9b 5421 /* Things we need to fix can only occur in inputs. */
36ab44c7 5422 if (recog_data.operand_type[opno] != OP_IN)
949d79eb
RE
5423 continue;
5424
5425 /* If this alternative is a memory reference, then any mention
5426 of constants in this alternative is really to fool reload
5427 into allowing us to accept one there. We need to fix them up
5428 now so that we output the right code. */
5429 if (recog_op_alt[opno][which_alternative].memory_ok)
5430 {
1ccbefce 5431 rtx op = recog_data.operand[opno];
949d79eb
RE
5432
5433 if (CONSTANT_P (op))
1ccbefce
RH
5434 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5435 recog_data.operand_mode[opno], op);
d5b7b3ae
RE
5436#if 0
5437 /* RWE: Now we look correctly at the operands for the insn,
5438 this shouldn't be needed any more. */
949d79eb 5439#ifndef AOF_ASSEMBLER
d5b7b3ae 5440 /* XXX Is this still needed? */
949d79eb 5441 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
1ccbefce
RH
5442 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5443 recog_data.operand_mode[opno],
5444 XVECEXP (op, 0, 0));
949d79eb 5445#endif
d5b7b3ae
RE
5446#endif
5447 else if (GET_CODE (op) == MEM
949d79eb
RE
5448 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5449 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
1ccbefce
RH
5450 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5451 recog_data.operand_mode[opno],
949d79eb
RE
5452 get_pool_constant (XEXP (op, 0)));
5453 }
2b835d68 5454 }
2b835d68
RE
5455}
5456
5457void
5458arm_reorg (first)
5459 rtx first;
5460{
5461 rtx insn;
d5b7b3ae
RE
5462 HOST_WIDE_INT address = 0;
5463 Mfix * fix;
ad076f4e 5464
949d79eb 5465 minipool_fix_head = minipool_fix_tail = NULL;
2b835d68 5466
949d79eb
RE
5467 /* The first insn must always be a note, or the code below won't
5468 scan it properly. */
5469 if (GET_CODE (first) != NOTE)
5470 abort ();
5471
5472 /* Scan all the insns and record the operands that will need fixing. */
5473 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
2b835d68 5474 {
2b835d68 5475
949d79eb 5476 if (GET_CODE (insn) == BARRIER)
d5b7b3ae 5477 push_minipool_barrier (insn, address);
949d79eb
RE
5478 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5479 || GET_CODE (insn) == JUMP_INSN)
5480 {
5481 rtx table;
5482
5483 note_invalid_constants (insn, address);
5484 address += get_attr_length (insn);
d5b7b3ae 5485
949d79eb
RE
5486 /* If the insn is a vector jump, add the size of the table
5487 and skip the table. */
d5b7b3ae 5488 if ((table = is_jump_table (insn)) != NULL)
2b835d68 5489 {
d5b7b3ae 5490 address += get_jump_table_size (table);
949d79eb
RE
5491 insn = table;
5492 }
5493 }
5494 }
332072db 5495
d5b7b3ae
RE
5496 fix = minipool_fix_head;
5497
949d79eb 5498 /* Now scan the fixups and perform the required changes. */
d5b7b3ae 5499 while (fix)
949d79eb 5500 {
d5b7b3ae
RE
5501 Mfix * ftmp;
5502 Mfix * fdel;
5503 Mfix * last_added_fix;
5504 Mfix * last_barrier = NULL;
5505 Mfix * this_fix;
949d79eb
RE
5506
5507 /* Skip any further barriers before the next fix. */
5508 while (fix && GET_CODE (fix->insn) == BARRIER)
5509 fix = fix->next;
5510
d5b7b3ae 5511 /* No more fixes. */
949d79eb
RE
5512 if (fix == NULL)
5513 break;
332072db 5514
d5b7b3ae 5515 last_added_fix = NULL;
2b835d68 5516
d5b7b3ae 5517 for (ftmp = fix; ftmp; ftmp = ftmp->next)
949d79eb 5518 {
949d79eb 5519 if (GET_CODE (ftmp->insn) == BARRIER)
949d79eb 5520 {
d5b7b3ae
RE
5521 if (ftmp->address >= minipool_vector_head->max_address)
5522 break;
2b835d68 5523
d5b7b3ae 5524 last_barrier = ftmp;
2b835d68 5525 }
d5b7b3ae
RE
5526 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5527 break;
5528
5529 last_added_fix = ftmp; /* Keep track of the last fix added. */
2b835d68 5530 }
949d79eb 5531
d5b7b3ae
RE
5532 /* If we found a barrier, drop back to that; any fixes that we
5533 could have reached but come after the barrier will now go in
5534 the next mini-pool. */
949d79eb
RE
5535 if (last_barrier != NULL)
5536 {
d5b7b3ae
RE
5537 /* Reduce the refcount for those fixes that won't go into this
5538 pool after all. */
5539 for (fdel = last_barrier->next;
5540 fdel && fdel != ftmp;
5541 fdel = fdel->next)
5542 {
5543 fdel->minipool->refcount--;
5544 fdel->minipool = NULL;
5545 }
5546
949d79eb
RE
5547 ftmp = last_barrier;
5548 }
5549 else
2bfa88dc 5550 {
d5b7b3ae
RE
5551 /* ftmp is first fix that we can't fit into this pool and
5552 there no natural barriers that we could use. Insert a
5553 new barrier in the code somewhere between the previous
5554 fix and this one, and arrange to jump around it. */
5555 HOST_WIDE_INT max_address;
5556
5557 /* The last item on the list of fixes must be a barrier, so
5558 we can never run off the end of the list of fixes without
5559 last_barrier being set. */
5560 if (ftmp == NULL)
5561 abort ();
5562
5563 max_address = minipool_vector_head->max_address;
2bfa88dc
RE
5564 /* Check that there isn't another fix that is in range that
5565 we couldn't fit into this pool because the pool was
5566 already too large: we need to put the pool before such an
5567 instruction. */
d5b7b3ae
RE
5568 if (ftmp->address < max_address)
5569 max_address = ftmp->address;
5570
5571 last_barrier = create_fix_barrier (last_added_fix, max_address);
5572 }
5573
5574 assign_minipool_offsets (last_barrier);
5575
5576 while (ftmp)
5577 {
5578 if (GET_CODE (ftmp->insn) != BARRIER
5579 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5580 == NULL))
5581 break;
2bfa88dc 5582
d5b7b3ae 5583 ftmp = ftmp->next;
2bfa88dc 5584 }
949d79eb
RE
5585
5586 /* Scan over the fixes we have identified for this pool, fixing them
5587 up and adding the constants to the pool itself. */
d5b7b3ae 5588 for (this_fix = fix; this_fix && ftmp != this_fix;
949d79eb
RE
5589 this_fix = this_fix->next)
5590 if (GET_CODE (this_fix->insn) != BARRIER)
5591 {
949d79eb
RE
5592 rtx addr
5593 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5594 minipool_vector_label),
d5b7b3ae 5595 this_fix->minipool->offset);
949d79eb
RE
5596 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5597 }
5598
d5b7b3ae 5599 dump_minipool (last_barrier->insn);
949d79eb 5600 fix = ftmp;
2b835d68 5601 }
4b632bf1 5602
949d79eb
RE
5603 /* From now on we must synthesize any constants that we can't handle
5604 directly. This can happen if the RTL gets split during final
5605 instruction generation. */
4b632bf1 5606 after_arm_reorg = 1;
2b835d68 5607}
cce8749e
CH
5608\f
5609/* Routines to output assembly language. */
5610
f3bb6135 5611/* If the rtx is the correct value then return the string of the number.
ff9940b0 5612 In this way we can ensure that valid double constants are generated even
6354dc9b 5613 when cross compiling. */
cd2b33d0 5614const char *
ff9940b0 5615fp_immediate_constant (x)
b5cc037f 5616 rtx x;
ff9940b0
RE
5617{
5618 REAL_VALUE_TYPE r;
5619 int i;
5620
5621 if (!fpa_consts_inited)
5622 init_fpa_table ();
5623
5624 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5625 for (i = 0; i < 8; i++)
5626 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5627 return strings_fpa[i];
f3bb6135 5628
ff9940b0
RE
5629 abort ();
5630}
5631
9997d19d 5632/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
cd2b33d0 5633static const char *
9997d19d 5634fp_const_from_val (r)
62b10bbc 5635 REAL_VALUE_TYPE * r;
9997d19d
RE
5636{
5637 int i;
5638
5639 if (! fpa_consts_inited)
5640 init_fpa_table ();
5641
5642 for (i = 0; i < 8; i++)
5643 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5644 return strings_fpa[i];
5645
5646 abort ();
5647}
ff9940b0 5648
cce8749e
CH
5649/* Output the operands of a LDM/STM instruction to STREAM.
5650 MASK is the ARM register set mask of which only bits 0-15 are important.
5651 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5652 must follow the register list. */
5653
d5b7b3ae 5654static void
dd18ae56 5655print_multi_reg (stream, instr, reg, mask, hat)
62b10bbc 5656 FILE * stream;
cd2b33d0 5657 const char * instr;
dd18ae56
NC
5658 int reg;
5659 int mask;
5660 int hat;
cce8749e
CH
5661{
5662 int i;
5663 int not_first = FALSE;
5664
1d5473cb 5665 fputc ('\t', stream);
dd18ae56 5666 asm_fprintf (stream, instr, reg);
1d5473cb 5667 fputs (", {", stream);
62b10bbc 5668
d5b7b3ae 5669 for (i = 0; i <= LAST_ARM_REGNUM; i++)
cce8749e
CH
5670 if (mask & (1 << i))
5671 {
5672 if (not_first)
5673 fprintf (stream, ", ");
62b10bbc 5674
dd18ae56 5675 asm_fprintf (stream, "%r", i);
cce8749e
CH
5676 not_first = TRUE;
5677 }
f3bb6135 5678
cce8749e 5679 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 5680}
cce8749e 5681
6354dc9b 5682/* Output a 'call' insn. */
cce8749e 5683
cd2b33d0 5684const char *
cce8749e 5685output_call (operands)
62b10bbc 5686 rtx * operands;
cce8749e 5687{
6354dc9b 5688 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
cce8749e 5689
62b10bbc 5690 if (REGNO (operands[0]) == LR_REGNUM)
cce8749e 5691 {
62b10bbc 5692 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
1d5473cb 5693 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 5694 }
62b10bbc 5695
1d5473cb 5696 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
da6558fd 5697
6cfc7210 5698 if (TARGET_INTERWORK)
da6558fd
NC
5699 output_asm_insn ("bx%?\t%0", operands);
5700 else
5701 output_asm_insn ("mov%?\t%|pc, %0", operands);
5702
f3bb6135
RE
5703 return "";
5704}
cce8749e 5705
ff9940b0
RE
5706static int
5707eliminate_lr2ip (x)
62b10bbc 5708 rtx * x;
ff9940b0
RE
5709{
5710 int something_changed = 0;
62b10bbc 5711 rtx x0 = * x;
ff9940b0
RE
5712 int code = GET_CODE (x0);
5713 register int i, j;
6f7d635c 5714 register const char * fmt;
ff9940b0
RE
5715
5716 switch (code)
5717 {
5718 case REG:
62b10bbc 5719 if (REGNO (x0) == LR_REGNUM)
ff9940b0 5720 {
62b10bbc 5721 *x = gen_rtx_REG (SImode, IP_REGNUM);
ff9940b0
RE
5722 return 1;
5723 }
5724 return 0;
5725 default:
6354dc9b 5726 /* Scan through the sub-elements and change any references there. */
ff9940b0 5727 fmt = GET_RTX_FORMAT (code);
62b10bbc 5728
ff9940b0
RE
5729 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5730 if (fmt[i] == 'e')
5731 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5732 else if (fmt[i] == 'E')
5733 for (j = 0; j < XVECLEN (x0, i); j++)
5734 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
62b10bbc 5735
ff9940b0
RE
5736 return something_changed;
5737 }
5738}
5739
6354dc9b 5740/* Output a 'call' insn that is a reference in memory. */
ff9940b0 5741
cd2b33d0 5742const char *
ff9940b0 5743output_call_mem (operands)
62b10bbc 5744 rtx * operands;
ff9940b0 5745{
6354dc9b
NC
5746 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5747 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
ff9940b0 5748 if (eliminate_lr2ip (&operands[0]))
1d5473cb 5749 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 5750
6cfc7210 5751 if (TARGET_INTERWORK)
da6558fd
NC
5752 {
5753 output_asm_insn ("ldr%?\t%|ip, %0", operands);
5754 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5755 output_asm_insn ("bx%?\t%|ip", operands);
5756 }
5757 else
5758 {
5759 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5760 output_asm_insn ("ldr%?\t%|pc, %0", operands);
5761 }
5762
f3bb6135
RE
5763 return "";
5764}
ff9940b0
RE
5765
5766
5767/* Output a move from arm registers to an fpu registers.
5768 OPERANDS[0] is an fpu register.
5769 OPERANDS[1] is the first registers of an arm register pair. */
5770
cd2b33d0 5771const char *
ff9940b0 5772output_mov_long_double_fpu_from_arm (operands)
62b10bbc 5773 rtx * operands;
ff9940b0
RE
5774{
5775 int arm_reg0 = REGNO (operands[1]);
5776 rtx ops[3];
5777
62b10bbc
NC
5778 if (arm_reg0 == IP_REGNUM)
5779 abort ();
f3bb6135 5780
43cffd11
RE
5781 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5782 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5783 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5784
1d5473cb
RE
5785 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
5786 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
62b10bbc 5787
f3bb6135
RE
5788 return "";
5789}
ff9940b0
RE
5790
5791/* Output a move from an fpu register to arm registers.
5792 OPERANDS[0] is the first registers of an arm register pair.
5793 OPERANDS[1] is an fpu register. */
5794
cd2b33d0 5795const char *
ff9940b0 5796output_mov_long_double_arm_from_fpu (operands)
62b10bbc 5797 rtx * operands;
ff9940b0
RE
5798{
5799 int arm_reg0 = REGNO (operands[0]);
5800 rtx ops[3];
5801
62b10bbc
NC
5802 if (arm_reg0 == IP_REGNUM)
5803 abort ();
f3bb6135 5804
43cffd11
RE
5805 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5806 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5807 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
ff9940b0 5808
1d5473cb
RE
5809 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
5810 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
5811 return "";
5812}
ff9940b0
RE
5813
5814/* Output a move from arm registers to arm registers of a long double
5815 OPERANDS[0] is the destination.
5816 OPERANDS[1] is the source. */
cd2b33d0 5817const char *
ff9940b0 5818output_mov_long_double_arm_from_arm (operands)
62b10bbc 5819 rtx * operands;
ff9940b0 5820{
6354dc9b 5821 /* We have to be careful here because the two might overlap. */
ff9940b0
RE
5822 int dest_start = REGNO (operands[0]);
5823 int src_start = REGNO (operands[1]);
5824 rtx ops[2];
5825 int i;
5826
5827 if (dest_start < src_start)
5828 {
5829 for (i = 0; i < 3; i++)
5830 {
43cffd11
RE
5831 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5832 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5833 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5834 }
5835 }
5836 else
5837 {
5838 for (i = 2; i >= 0; i--)
5839 {
43cffd11
RE
5840 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5841 ops[1] = gen_rtx_REG (SImode, src_start + i);
9997d19d 5842 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
5843 }
5844 }
f3bb6135 5845
ff9940b0
RE
5846 return "";
5847}
5848
5849
cce8749e
CH
5850/* Output a move from arm registers to an fpu registers.
5851 OPERANDS[0] is an fpu register.
5852 OPERANDS[1] is the first registers of an arm register pair. */
5853
cd2b33d0 5854const char *
cce8749e 5855output_mov_double_fpu_from_arm (operands)
62b10bbc 5856 rtx * operands;
cce8749e
CH
5857{
5858 int arm_reg0 = REGNO (operands[1]);
5859 rtx ops[2];
5860
62b10bbc
NC
5861 if (arm_reg0 == IP_REGNUM)
5862 abort ();
5863
43cffd11
RE
5864 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5865 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5866 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5867 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
5868 return "";
5869}
cce8749e
CH
5870
5871/* Output a move from an fpu register to arm registers.
5872 OPERANDS[0] is the first registers of an arm register pair.
5873 OPERANDS[1] is an fpu register. */
5874
cd2b33d0 5875const char *
cce8749e 5876output_mov_double_arm_from_fpu (operands)
62b10bbc 5877 rtx * operands;
cce8749e
CH
5878{
5879 int arm_reg0 = REGNO (operands[0]);
5880 rtx ops[2];
5881
62b10bbc
NC
5882 if (arm_reg0 == IP_REGNUM)
5883 abort ();
f3bb6135 5884
43cffd11
RE
5885 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5886 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
1d5473cb
RE
5887 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5888 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
5889 return "";
5890}
cce8749e
CH
5891
5892/* Output a move between double words.
5893 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5894 or MEM<-REG and all MEMs must be offsettable addresses. */
5895
cd2b33d0 5896const char *
cce8749e 5897output_move_double (operands)
aec3cfba 5898 rtx * operands;
cce8749e
CH
5899{
5900 enum rtx_code code0 = GET_CODE (operands[0]);
5901 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 5902 rtx otherops[3];
cce8749e
CH
5903
5904 if (code0 == REG)
5905 {
5906 int reg0 = REGNO (operands[0]);
5907
43cffd11 5908 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
aec3cfba 5909
cce8749e
CH
5910 if (code1 == REG)
5911 {
5912 int reg1 = REGNO (operands[1]);
62b10bbc
NC
5913 if (reg1 == IP_REGNUM)
5914 abort ();
f3bb6135 5915
6354dc9b 5916 /* Ensure the second source is not overwritten. */
c1c2bc04 5917 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6cfc7210 5918 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 5919 else
6cfc7210 5920 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
5921 }
5922 else if (code1 == CONST_DOUBLE)
5923 {
226a5051
RE
5924 if (GET_MODE (operands[1]) == DFmode)
5925 {
5926 long l[2];
5927 union real_extract u;
5928
5929 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
5930 sizeof (u));
5931 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
d5b7b3ae
RE
5932 otherops[1] = GEN_INT (l[1]);
5933 operands[1] = GEN_INT (l[0]);
226a5051 5934 }
c1c2bc04
RE
5935 else if (GET_MODE (operands[1]) != VOIDmode)
5936 abort ();
5937 else if (WORDS_BIG_ENDIAN)
5938 {
5939
5940 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5941 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5942 }
226a5051
RE
5943 else
5944 {
c1c2bc04 5945
226a5051
RE
5946 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5947 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5948 }
6cfc7210 5949
c1c2bc04
RE
5950 output_mov_immediate (operands);
5951 output_mov_immediate (otherops);
cce8749e
CH
5952 }
5953 else if (code1 == CONST_INT)
5954 {
56636818
JL
5955#if HOST_BITS_PER_WIDE_INT > 32
5956 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
5957 what the upper word is. */
5958 if (WORDS_BIG_ENDIAN)
5959 {
5960 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5961 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5962 }
5963 else
5964 {
5965 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5966 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5967 }
5968#else
6354dc9b 5969 /* Sign extend the intval into the high-order word. */
c1c2bc04
RE
5970 if (WORDS_BIG_ENDIAN)
5971 {
5972 otherops[1] = operands[1];
5973 operands[1] = (INTVAL (operands[1]) < 0
5974 ? constm1_rtx : const0_rtx);
5975 }
ff9940b0 5976 else
c1c2bc04 5977 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 5978#endif
c1c2bc04
RE
5979 output_mov_immediate (otherops);
5980 output_mov_immediate (operands);
cce8749e
CH
5981 }
5982 else if (code1 == MEM)
5983 {
ff9940b0 5984 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 5985 {
ff9940b0 5986 case REG:
9997d19d 5987 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 5988 break;
2b835d68 5989
ff9940b0 5990 case PRE_INC:
6354dc9b 5991 abort (); /* Should never happen now. */
ff9940b0 5992 break;
2b835d68 5993
ff9940b0 5994 case PRE_DEC:
2b835d68 5995 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 5996 break;
2b835d68 5997
ff9940b0 5998 case POST_INC:
9997d19d 5999 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 6000 break;
2b835d68 6001
ff9940b0 6002 case POST_DEC:
6354dc9b 6003 abort (); /* Should never happen now. */
ff9940b0 6004 break;
2b835d68
RE
6005
6006 case LABEL_REF:
6007 case CONST:
6008 output_asm_insn ("adr%?\t%0, %1", operands);
6009 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6010 break;
6011
ff9940b0 6012 default:
aec3cfba
NC
6013 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6014 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
cce8749e 6015 {
2b835d68
RE
6016 otherops[0] = operands[0];
6017 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6018 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6019 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6020 {
6021 if (GET_CODE (otherops[2]) == CONST_INT)
6022 {
6023 switch (INTVAL (otherops[2]))
6024 {
6025 case -8:
6026 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6027 return "";
6028 case -4:
6029 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6030 return "";
6031 case 4:
6032 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6033 return "";
6034 }
6035 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6036 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6037 else
6038 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6039 }
6040 else
6041 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6042 }
6043 else
6044 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6cfc7210 6045
2b835d68
RE
6046 return "ldm%?ia\t%0, %M0";
6047 }
6048 else
6049 {
6050 otherops[1] = adj_offsettable_operand (operands[1], 4);
6051 /* Take care of overlapping base/data reg. */
6052 if (reg_mentioned_p (operands[0], operands[1]))
6053 {
6054 output_asm_insn ("ldr%?\t%0, %1", otherops);
6055 output_asm_insn ("ldr%?\t%0, %1", operands);
6056 }
6057 else
6058 {
6059 output_asm_insn ("ldr%?\t%0, %1", operands);
6060 output_asm_insn ("ldr%?\t%0, %1", otherops);
6061 }
cce8749e
CH
6062 }
6063 }
6064 }
2b835d68 6065 else
6354dc9b 6066 abort (); /* Constraints should prevent this. */
cce8749e
CH
6067 }
6068 else if (code0 == MEM && code1 == REG)
6069 {
62b10bbc
NC
6070 if (REGNO (operands[1]) == IP_REGNUM)
6071 abort ();
2b835d68 6072
ff9940b0
RE
6073 switch (GET_CODE (XEXP (operands[0], 0)))
6074 {
6075 case REG:
9997d19d 6076 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 6077 break;
2b835d68 6078
ff9940b0 6079 case PRE_INC:
6354dc9b 6080 abort (); /* Should never happen now. */
ff9940b0 6081 break;
2b835d68 6082
ff9940b0 6083 case PRE_DEC:
2b835d68 6084 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 6085 break;
2b835d68 6086
ff9940b0 6087 case POST_INC:
9997d19d 6088 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 6089 break;
2b835d68 6090
ff9940b0 6091 case POST_DEC:
6354dc9b 6092 abort (); /* Should never happen now. */
ff9940b0 6093 break;
2b835d68
RE
6094
6095 case PLUS:
6096 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6097 {
6098 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6099 {
6100 case -8:
6101 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6102 return "";
6103
6104 case -4:
6105 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6106 return "";
6107
6108 case 4:
6109 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6110 return "";
6111 }
6112 }
6113 /* Fall through */
6114
ff9940b0 6115 default:
cce8749e 6116 otherops[0] = adj_offsettable_operand (operands[0], 4);
43cffd11 6117 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
9997d19d
RE
6118 output_asm_insn ("str%?\t%1, %0", operands);
6119 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
6120 }
6121 }
2b835d68 6122 else
62b10bbc 6123 abort (); /* Constraints should prevent this */
cce8749e 6124
9997d19d
RE
6125 return "";
6126}
cce8749e
CH
6127
6128
6129/* Output an arbitrary MOV reg, #n.
6130 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6131
cd2b33d0 6132const char *
cce8749e 6133output_mov_immediate (operands)
62b10bbc 6134 rtx * operands;
cce8749e 6135{
f3bb6135 6136 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
6137 int n_ones = 0;
6138 int i;
6139
6140 /* Try to use one MOV */
cce8749e 6141 if (const_ok_for_arm (n))
f3bb6135 6142 {
9997d19d 6143 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
6144 return "";
6145 }
cce8749e
CH
6146
6147 /* Try to use one MVN */
f3bb6135 6148 if (const_ok_for_arm (~n))
cce8749e 6149 {
f3bb6135 6150 operands[1] = GEN_INT (~n);
9997d19d 6151 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 6152 return "";
cce8749e
CH
6153 }
6154
6354dc9b 6155 /* If all else fails, make it out of ORRs or BICs as appropriate. */
cce8749e
CH
6156
6157 for (i=0; i < 32; i++)
6158 if (n & 1 << i)
6159 n_ones++;
6160
6354dc9b 6161 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
e5951263 6162 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
cce8749e 6163 else
d5b7b3ae 6164 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
f3bb6135
RE
6165
6166 return "";
6167}
cce8749e
CH
6168
6169
6170/* Output an ADD r, s, #n where n may be too big for one instruction. If
6171 adding zero to one register, output nothing. */
6172
cd2b33d0 6173const char *
cce8749e 6174output_add_immediate (operands)
62b10bbc 6175 rtx * operands;
cce8749e 6176{
f3bb6135 6177 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
6178
6179 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6180 {
6181 if (n < 0)
6182 output_multi_immediate (operands,
9997d19d
RE
6183 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6184 -n);
cce8749e
CH
6185 else
6186 output_multi_immediate (operands,
9997d19d
RE
6187 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6188 n);
cce8749e 6189 }
f3bb6135
RE
6190
6191 return "";
6192}
cce8749e 6193
cce8749e
CH
6194/* Output a multiple immediate operation.
6195 OPERANDS is the vector of operands referred to in the output patterns.
6196 INSTR1 is the output pattern to use for the first constant.
6197 INSTR2 is the output pattern to use for subsequent constants.
6198 IMMED_OP is the index of the constant slot in OPERANDS.
6199 N is the constant value. */
6200
cd2b33d0 6201static const char *
cce8749e 6202output_multi_immediate (operands, instr1, instr2, immed_op, n)
62b10bbc 6203 rtx * operands;
cd2b33d0
NC
6204 const char * instr1;
6205 const char * instr2;
f3bb6135
RE
6206 int immed_op;
6207 HOST_WIDE_INT n;
cce8749e 6208{
f3bb6135 6209#if HOST_BITS_PER_WIDE_INT > 32
e5951263 6210 n &= HOST_UINT (0xffffffff);
f3bb6135
RE
6211#endif
6212
cce8749e
CH
6213 if (n == 0)
6214 {
6215 operands[immed_op] = const0_rtx;
6354dc9b 6216 output_asm_insn (instr1, operands); /* Quick and easy output. */
cce8749e
CH
6217 }
6218 else
6219 {
6220 int i;
cd2b33d0 6221 const char * instr = instr1;
cce8749e 6222
6354dc9b 6223 /* Note that n is never zero here (which would give no output). */
cce8749e
CH
6224 for (i = 0; i < 32; i += 2)
6225 {
6226 if (n & (3 << i))
6227 {
f3bb6135
RE
6228 operands[immed_op] = GEN_INT (n & (255 << i));
6229 output_asm_insn (instr, operands);
cce8749e
CH
6230 instr = instr2;
6231 i += 6;
6232 }
6233 }
6234 }
cd2b33d0 6235
f3bb6135 6236 return "";
9997d19d 6237}
cce8749e
CH
6238
6239
6240/* Return the appropriate ARM instruction for the operation code.
6241 The returned result should not be overwritten. OP is the rtx of the
6242 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6243 was shifted. */
6244
cd2b33d0 6245const char *
cce8749e
CH
6246arithmetic_instr (op, shift_first_arg)
6247 rtx op;
f3bb6135 6248 int shift_first_arg;
cce8749e 6249{
9997d19d 6250 switch (GET_CODE (op))
cce8749e
CH
6251 {
6252 case PLUS:
f3bb6135
RE
6253 return "add";
6254
cce8749e 6255 case MINUS:
f3bb6135
RE
6256 return shift_first_arg ? "rsb" : "sub";
6257
cce8749e 6258 case IOR:
f3bb6135
RE
6259 return "orr";
6260
cce8749e 6261 case XOR:
f3bb6135
RE
6262 return "eor";
6263
cce8749e 6264 case AND:
f3bb6135
RE
6265 return "and";
6266
cce8749e 6267 default:
f3bb6135 6268 abort ();
cce8749e 6269 }
f3bb6135 6270}
cce8749e
CH
6271
6272
6273/* Ensure valid constant shifts and return the appropriate shift mnemonic
6274 for the operation code. The returned result should not be overwritten.
6275 OP is the rtx code of the shift.
9997d19d 6276 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6354dc9b 6277 shift. */
cce8749e 6278
cd2b33d0 6279static const char *
9997d19d
RE
6280shift_op (op, amountp)
6281 rtx op;
6282 HOST_WIDE_INT *amountp;
cce8749e 6283{
cd2b33d0 6284 const char * mnem;
e2c671ba 6285 enum rtx_code code = GET_CODE (op);
cce8749e 6286
9997d19d
RE
6287 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6288 *amountp = -1;
6289 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6290 *amountp = INTVAL (XEXP (op, 1));
6291 else
6292 abort ();
6293
e2c671ba 6294 switch (code)
cce8749e
CH
6295 {
6296 case ASHIFT:
6297 mnem = "asl";
6298 break;
f3bb6135 6299
cce8749e
CH
6300 case ASHIFTRT:
6301 mnem = "asr";
cce8749e 6302 break;
f3bb6135 6303
cce8749e
CH
6304 case LSHIFTRT:
6305 mnem = "lsr";
cce8749e 6306 break;
f3bb6135 6307
9997d19d
RE
6308 case ROTATERT:
6309 mnem = "ror";
9997d19d
RE
6310 break;
6311
ff9940b0 6312 case MULT:
e2c671ba
RE
6313 /* We never have to worry about the amount being other than a
6314 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
6315 if (*amountp != -1)
6316 *amountp = int_log2 (*amountp);
6317 else
6318 abort ();
f3bb6135
RE
6319 return "asl";
6320
cce8749e 6321 default:
f3bb6135 6322 abort ();
cce8749e
CH
6323 }
6324
e2c671ba
RE
6325 if (*amountp != -1)
6326 {
6327 /* This is not 100% correct, but follows from the desire to merge
6328 multiplication by a power of 2 with the recognizer for a
6329 shift. >=32 is not a valid shift for "asl", so we must try and
6330 output a shift that produces the correct arithmetical result.
ddd5a7c1 6331 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
6332 is not set correctly if we set the flags; but we never use the
6333 carry bit from such an operation, so we can ignore that. */
6334 if (code == ROTATERT)
6335 *amountp &= 31; /* Rotate is just modulo 32 */
6336 else if (*amountp != (*amountp & 31))
6337 {
6338 if (code == ASHIFT)
6339 mnem = "lsr";
6340 *amountp = 32;
6341 }
6342
6343 /* Shifts of 0 are no-ops. */
6344 if (*amountp == 0)
6345 return NULL;
6346 }
6347
9997d19d
RE
6348 return mnem;
6349}
cce8749e
CH
6350
6351
6354dc9b 6352/* Obtain the shift from the POWER of two. */
18af7313 6353static HOST_WIDE_INT
cce8749e 6354int_log2 (power)
f3bb6135 6355 HOST_WIDE_INT power;
cce8749e 6356{
f3bb6135 6357 HOST_WIDE_INT shift = 0;
cce8749e 6358
e5951263 6359 while ((((HOST_INT (1)) << shift) & power) == 0)
cce8749e
CH
6360 {
6361 if (shift > 31)
f3bb6135 6362 abort ();
cce8749e
CH
6363 shift++;
6364 }
f3bb6135
RE
6365
6366 return shift;
6367}
cce8749e 6368
cce8749e
CH
6369/* Output a .ascii pseudo-op, keeping track of lengths. This is because
6370 /bin/as is horribly restrictive. */
6cfc7210 6371#define MAX_ASCII_LEN 51
cce8749e
CH
6372
6373void
6374output_ascii_pseudo_op (stream, p, len)
62b10bbc 6375 FILE * stream;
3cce094d 6376 const unsigned char * p;
cce8749e
CH
6377 int len;
6378{
6379 int i;
6cfc7210 6380 int len_so_far = 0;
cce8749e 6381
6cfc7210
NC
6382 fputs ("\t.ascii\t\"", stream);
6383
cce8749e
CH
6384 for (i = 0; i < len; i++)
6385 {
6386 register int c = p[i];
6387
6cfc7210 6388 if (len_so_far >= MAX_ASCII_LEN)
cce8749e 6389 {
6cfc7210 6390 fputs ("\"\n\t.ascii\t\"", stream);
cce8749e 6391 len_so_far = 0;
cce8749e
CH
6392 }
6393
6cfc7210 6394 switch (c)
cce8749e 6395 {
6cfc7210
NC
6396 case TARGET_TAB:
6397 fputs ("\\t", stream);
6398 len_so_far += 2;
6399 break;
6400
6401 case TARGET_FF:
6402 fputs ("\\f", stream);
6403 len_so_far += 2;
6404 break;
6405
6406 case TARGET_BS:
6407 fputs ("\\b", stream);
6408 len_so_far += 2;
6409 break;
6410
6411 case TARGET_CR:
6412 fputs ("\\r", stream);
6413 len_so_far += 2;
6414 break;
6415
6416 case TARGET_NEWLINE:
6417 fputs ("\\n", stream);
6418 c = p [i + 1];
6419 if ((c >= ' ' && c <= '~')
6420 || c == TARGET_TAB)
6421 /* This is a good place for a line break. */
6422 len_so_far = MAX_ASCII_LEN;
6423 else
6424 len_so_far += 2;
6425 break;
6426
6427 case '\"':
6428 case '\\':
6429 putc ('\\', stream);
6430 len_so_far ++;
6431 /* drop through. */
f3bb6135 6432
6cfc7210
NC
6433 default:
6434 if (c >= ' ' && c <= '~')
6435 {
6436 putc (c, stream);
6437 len_so_far ++;
6438 }
6439 else
6440 {
6441 fprintf (stream, "\\%03o", c);
6442 len_so_far += 4;
6443 }
6444 break;
cce8749e 6445 }
cce8749e 6446 }
f3bb6135 6447
cce8749e 6448 fputs ("\"\n", stream);
f3bb6135 6449}
cce8749e 6450\f
ff9940b0 6451
cd2b33d0 6452const char *
84ed5e79 6453output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
6454 rtx operand;
6455 int really_return;
84ed5e79 6456 int reverse;
ff9940b0
RE
6457{
6458 char instr[100];
6459 int reg, live_regs = 0;
46406379 6460 int volatile_func = arm_volatile_func ();
e2c671ba 6461
d5b7b3ae
RE
6462 /* If a function is naked, don't use the "return" insn. */
6463 if (arm_naked_function_p (current_function_decl))
6464 return "";
6465
e2c671ba 6466 return_used_this_function = 1;
d5b7b3ae 6467
62b10bbc 6468 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6469 {
e2c671ba 6470 /* If this function was declared non-returning, and we have found a tail
3a5a4282
PB
6471 call, then we have to trust that the called function won't return. */
6472 if (really_return)
6473 {
6474 rtx ops[2];
6475
6476 /* Otherwise, trap an attempted return by aborting. */
6477 ops[0] = operand;
6478 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6479 : "abort");
6480 assemble_external_libcall (ops[1]);
6481 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6482 }
6483
e2c671ba
RE
6484 return "";
6485 }
6486
f3bb6135 6487 if (current_function_calls_alloca && ! really_return)
62b10bbc 6488 abort ();
d5b7b3ae 6489
f3bb6135
RE
6490 for (reg = 0; reg <= 10; reg++)
6491 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
6492 live_regs++;
6493
d5b7b3ae
RE
6494 if (! TARGET_APCS_FRAME
6495 && ! frame_pointer_needed
6496 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6497 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6498 live_regs++;
6499
ed0e6530
PB
6500 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6501 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6502 live_regs++;
6503
0616531f 6504 if (live_regs || regs_ever_live[LR_REGNUM])
ff9940b0
RE
6505 live_regs++;
6506
6507 if (frame_pointer_needed)
6508 live_regs += 4;
6509
3a5a4282
PB
6510 /* On some ARM architectures it is faster to use LDR rather than LDM to
6511 load a single register. On other architectures, the cost is the same. */
6512 if (live_regs == 1
6513 && regs_ever_live[LR_REGNUM]
3a5a4282 6514 && ! really_return)
d5b7b3ae
RE
6515 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6516 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6517 else if (live_regs == 1
6518 && regs_ever_live[LR_REGNUM]
d5b7b3ae
RE
6519 && TARGET_APCS_32)
6520 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6521 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
3a5a4282 6522 else if (live_regs)
ff9940b0 6523 {
0616531f 6524 if (! regs_ever_live[LR_REGNUM])
ff9940b0 6525 live_regs++;
f3bb6135 6526
ff9940b0 6527 if (frame_pointer_needed)
84ed5e79
RE
6528 strcpy (instr,
6529 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 6530 else
84ed5e79
RE
6531 strcpy (instr,
6532 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
6533
6534 for (reg = 0; reg <= 10; reg++)
62b10bbc 6535 if (regs_ever_live[reg]
6ed30148 6536 && (! call_used_regs[reg]
ed0e6530
PB
6537 || (flag_pic && ! TARGET_SINGLE_PIC_BASE
6538 && reg == PIC_OFFSET_TABLE_REGNUM)))
ff9940b0 6539 {
1d5473cb 6540 strcat (instr, "%|");
ff9940b0
RE
6541 strcat (instr, reg_names[reg]);
6542 if (--live_regs)
6543 strcat (instr, ", ");
6544 }
f3bb6135 6545
ff9940b0
RE
6546 if (frame_pointer_needed)
6547 {
1d5473cb 6548 strcat (instr, "%|");
ff9940b0
RE
6549 strcat (instr, reg_names[11]);
6550 strcat (instr, ", ");
1d5473cb 6551 strcat (instr, "%|");
ff9940b0
RE
6552 strcat (instr, reg_names[13]);
6553 strcat (instr, ", ");
1d5473cb 6554 strcat (instr, "%|");
6cfc7210 6555 strcat (instr, TARGET_INTERWORK || (! really_return)
62b10bbc 6556 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
ff9940b0
RE
6557 }
6558 else
1d5473cb 6559 {
d5b7b3ae
RE
6560 if (! TARGET_APCS_FRAME
6561 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6562 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6563 {
6564 strcat (instr, "%|");
6565 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6566 strcat (instr, ", ");
6567 }
6568
1d5473cb 6569 strcat (instr, "%|");
d5b7b3ae 6570
6cfc7210 6571 if (TARGET_INTERWORK && really_return)
62b10bbc 6572 strcat (instr, reg_names[IP_REGNUM]);
da6558fd 6573 else
62b10bbc 6574 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
1d5473cb 6575 }
d5b7b3ae 6576
2b835d68 6577 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 6578 output_asm_insn (instr, &operand);
da6558fd 6579
6cfc7210 6580 if (TARGET_INTERWORK && really_return)
da6558fd
NC
6581 {
6582 strcpy (instr, "bx%?");
6583 strcat (instr, reverse ? "%D0" : "%d0");
6584 strcat (instr, "\t%|");
6585 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6586
6587 output_asm_insn (instr, & operand);
6588 }
ff9940b0
RE
6589 }
6590 else if (really_return)
6591 {
6cfc7210 6592 if (TARGET_INTERWORK)
25b1c156 6593 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
b111229a
RE
6594 else
6595 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6596 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
da6558fd
NC
6597
6598 output_asm_insn (instr, & operand);
ff9940b0 6599 }
f3bb6135 6600
ff9940b0
RE
6601 return "";
6602}
6603
e82ea128
DE
6604/* Return nonzero if optimizing and the current function is volatile.
6605 Such functions never return, and many memory cycles can be saved
6606 by not storing register values that will never be needed again.
6607 This optimization was added to speed up context switching in a
6354dc9b 6608 kernel application. */
e2c671ba
RE
6609int
6610arm_volatile_func ()
6611{
6354dc9b
NC
6612 return (optimize > 0
6613 && current_function_nothrow
46406379 6614 && TREE_THIS_VOLATILE (current_function_decl));
e2c671ba
RE
6615}
6616
ef179a26
NC
6617/* Write the function name into the code section, directly preceding
6618 the function prologue.
6619
6620 Code will be output similar to this:
6621 t0
6622 .ascii "arm_poke_function_name", 0
6623 .align
6624 t1
6625 .word 0xff000000 + (t1 - t0)
6626 arm_poke_function_name
6627 mov ip, sp
6628 stmfd sp!, {fp, ip, lr, pc}
6629 sub fp, ip, #4
6630
6631 When performing a stack backtrace, code can inspect the value
6632 of 'pc' stored at 'fp' + 0. If the trace function then looks
6633 at location pc - 12 and the top 8 bits are set, then we know
6634 that there is a function name embedded immediately preceding this
6635 location and has length ((pc[-3]) & 0xff000000).
6636
6637 We assume that pc is declared as a pointer to an unsigned long.
6638
6639 It is of no benefit to output the function name if we are assembling
6640 a leaf function. These function types will not contain a stack
6641 backtrace structure, therefore it is not possible to determine the
6642 function name. */
6643
6644void
6645arm_poke_function_name (stream, name)
6646 FILE * stream;
6647 char * name;
6648{
6649 unsigned long alignlength;
6650 unsigned long length;
6651 rtx x;
6652
d5b7b3ae
RE
6653 length = strlen (name) + 1;
6654 alignlength = ROUND_UP (length);
ef179a26 6655
949d79eb 6656 ASM_OUTPUT_ASCII (stream, name, length);
ef179a26 6657 ASM_OUTPUT_ALIGN (stream, 2);
e5951263 6658 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
ef179a26
NC
6659 ASM_OUTPUT_INT (stream, x);
6660}
6661
ff9940b0
RE
6662/* The amount of stack adjustment that happens here, in output_return and in
6663 output_epilogue must be exactly the same as was calculated during reload,
6664 or things will point to the wrong place. The only time we can safely
6665 ignore this constraint is when a function has no arguments on the stack,
6666 no stack frame requirement and no live registers execpt for `lr'. If we
6667 can guarantee that by making all function calls into tail calls and that
6668 lr is not clobbered in any other way, then there is no need to push lr
6354dc9b 6669 onto the stack. */
cce8749e 6670void
d5b7b3ae 6671output_arm_prologue (f, frame_size)
6cfc7210 6672 FILE * f;
cce8749e
CH
6673 int frame_size;
6674{
f3bb6135 6675 int reg, live_regs_mask = 0;
46406379 6676 int volatile_func = arm_volatile_func ();
cce8749e 6677
cce8749e
CH
6678 /* Nonzero if we must stuff some register arguments onto the stack as if
6679 they were passed there. */
6680 int store_arg_regs = 0;
6681
abaa26e5 6682 if (arm_ccfsm_state || arm_target_insn)
6354dc9b 6683 abort (); /* Sanity check. */
31fdb4d5
DE
6684
6685 if (arm_naked_function_p (current_function_decl))
6686 return;
6687
ff9940b0 6688 return_used_this_function = 0;
ff9940b0 6689
dd18ae56
NC
6690 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6691 current_function_args_size,
6692 current_function_pretend_args_size, frame_size);
6693 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6694 frame_pointer_needed,
6695 current_function_anonymous_args);
cce8749e 6696
e2c671ba 6697 if (volatile_func)
dd18ae56 6698 asm_fprintf (f, "\t%@ Volatile function.\n");
e2c671ba 6699
cce8749e
CH
6700 if (current_function_anonymous_args && current_function_pretend_args_size)
6701 store_arg_regs = 1;
6702
f3bb6135
RE
6703 for (reg = 0; reg <= 10; reg++)
6704 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
6705 live_regs_mask |= (1 << reg);
6706
d5b7b3ae
RE
6707 if (! TARGET_APCS_FRAME
6708 && ! frame_pointer_needed
6709 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6710 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6711 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6712
dd18ae56 6713 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
ed0e6530 6714 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6715 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6716
ff9940b0 6717 if (frame_pointer_needed)
e2c671ba 6718 live_regs_mask |= 0xD800;
62b10bbc 6719 else if (regs_ever_live[LR_REGNUM])
ff9940b0 6720 {
62b10bbc 6721 live_regs_mask |= 1 << LR_REGNUM;
cce8749e
CH
6722 }
6723
0616531f
RE
6724 if (live_regs_mask)
6725 /* If a di mode load/store multiple is used, and the base register
6726 is r3, then r4 can become an ever live register without lr
6727 doing so, in this case we need to push lr as well, or we
6728 will fail to get a proper return. */
6729 live_regs_mask |= 1 << LR_REGNUM;
32de079a
RE
6730
6731#ifdef AOF_ASSEMBLER
6732 if (flag_pic)
dd18ae56 6733 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
32de079a 6734#endif
f3bb6135 6735}
cce8749e 6736
cd2b33d0 6737const char *
0616531f
RE
6738arm_output_epilogue (really_return)
6739 int really_return;
cce8749e 6740{
949d79eb
RE
6741 int reg;
6742 int live_regs_mask = 0;
6354dc9b 6743 /* If we need this, then it will always be at least this much. */
b111229a 6744 int floats_offset = 12;
cce8749e 6745 rtx operands[3];
949d79eb 6746 int frame_size = get_frame_size ();
d5b7b3ae
RE
6747 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
6748 FILE * f = asm_out_file;
e5951263 6749 int volatile_func = arm_volatile_func ();
d5b7b3ae 6750 int return_regnum;
cce8749e 6751
b36ba79f 6752 if (use_return_insn (FALSE) && return_used_this_function)
949d79eb 6753 return "";
cce8749e 6754
31fdb4d5
DE
6755 /* Naked functions don't have epilogues. */
6756 if (arm_naked_function_p (current_function_decl))
949d79eb 6757 return "";
31fdb4d5 6758
d5b7b3ae
RE
6759 /* If we are throwing an exception, the address we want to jump to is in
6760 R1; otherwise, it's in LR. */
6761 return_regnum = eh_ofs ? 2 : LR_REGNUM;
6762
0616531f
RE
6763 /* If we are throwing an exception, then we really must be doing a return,
6764 so we can't tail-call. */
6765 if (eh_ofs && ! really_return)
6766 abort();
6767
e2c671ba 6768 /* A volatile function should never return. Call abort. */
c11145f6 6769 if (TARGET_ABORT_NORETURN && volatile_func)
e2c671ba 6770 {
86efdc8e 6771 rtx op;
ed0e6530 6772 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
2b835d68 6773 assemble_external_libcall (op);
e2c671ba 6774 output_asm_insn ("bl\t%a0", &op);
949d79eb 6775 return "";
e2c671ba
RE
6776 }
6777
f3bb6135
RE
6778 for (reg = 0; reg <= 10; reg++)
6779 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 6780 {
ff9940b0
RE
6781 live_regs_mask |= (1 << reg);
6782 floats_offset += 4;
cce8749e
CH
6783 }
6784
d5b7b3ae
RE
6785 /* Handle the frame pointer as a special case. */
6786 if (! TARGET_APCS_FRAME
6787 && ! frame_pointer_needed
6788 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6789 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6790 {
6791 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6792 floats_offset += 4;
6793 }
6794
ed0e6530
PB
6795 /* If we aren't loading the PIC register, don't stack it even though it may
6796 be live. */
6797 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6798 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6ed30148
RE
6799 {
6800 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6801 floats_offset += 4;
6802 }
6803
ff9940b0 6804 if (frame_pointer_needed)
cce8749e 6805 {
b111229a
RE
6806 if (arm_fpu_arch == FP_SOFT2)
6807 {
d5b7b3ae 6808 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a
RE
6809 if (regs_ever_live[reg] && ! call_used_regs[reg])
6810 {
6811 floats_offset += 12;
dd18ae56
NC
6812 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6813 reg, FP_REGNUM, floats_offset);
b111229a
RE
6814 }
6815 }
6816 else
6817 {
d5b7b3ae 6818 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 6819
d5b7b3ae 6820 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
b111229a
RE
6821 {
6822 if (regs_ever_live[reg] && ! call_used_regs[reg])
6823 {
6824 floats_offset += 12;
6cfc7210 6825
6354dc9b 6826 /* We can't unstack more than four registers at once. */
b111229a
RE
6827 if (start_reg - reg == 3)
6828 {
dd18ae56
NC
6829 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6830 reg, FP_REGNUM, floats_offset);
b111229a
RE
6831 start_reg = reg - 1;
6832 }
6833 }
6834 else
6835 {
6836 if (reg != start_reg)
dd18ae56
NC
6837 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6838 reg + 1, start_reg - reg,
6839 FP_REGNUM, floats_offset);
b111229a
RE
6840 start_reg = reg - 1;
6841 }
6842 }
6843
6844 /* Just in case the last register checked also needs unstacking. */
6845 if (reg != start_reg)
dd18ae56
NC
6846 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6847 reg + 1, start_reg - reg,
6848 FP_REGNUM, floats_offset);
b111229a 6849 }
da6558fd 6850
6cfc7210 6851 if (TARGET_INTERWORK)
b111229a
RE
6852 {
6853 live_regs_mask |= 0x6800;
dd18ae56 6854 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
d5b7b3ae
RE
6855 if (eh_ofs)
6856 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6857 REGNO (eh_ofs));
0616531f
RE
6858 if (really_return)
6859 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
d5b7b3ae 6860 }
0616531f 6861 else if (eh_ofs || ! really_return)
d5b7b3ae
RE
6862 {
6863 live_regs_mask |= 0x6800;
6864 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
0616531f
RE
6865 if (eh_ofs)
6866 {
6867 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6868 REGNO (eh_ofs));
6869 /* Even in 26-bit mode we do a mov (rather than a movs)
6870 because we don't have the PSR bits set in the
6871 address. */
6872 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6873 }
b111229a
RE
6874 }
6875 else
6876 {
6877 live_regs_mask |= 0xA800;
dd18ae56 6878 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
b111229a
RE
6879 TARGET_APCS_32 ? FALSE : TRUE);
6880 }
cce8749e
CH
6881 }
6882 else
6883 {
d2288d8d 6884 /* Restore stack pointer if necessary. */
56636818 6885 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
6886 {
6887 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
6888 operands[2] = GEN_INT (frame_size
6889 + current_function_outgoing_args_size);
d2288d8d
TG
6890 output_add_immediate (operands);
6891 }
6892
b111229a
RE
6893 if (arm_fpu_arch == FP_SOFT2)
6894 {
d5b7b3ae 6895 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a 6896 if (regs_ever_live[reg] && ! call_used_regs[reg])
dd18ae56
NC
6897 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6898 reg, SP_REGNUM);
b111229a
RE
6899 }
6900 else
6901 {
d5b7b3ae 6902 int start_reg = FIRST_ARM_FP_REGNUM;
b111229a 6903
d5b7b3ae 6904 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
b111229a
RE
6905 {
6906 if (regs_ever_live[reg] && ! call_used_regs[reg])
6907 {
6908 if (reg - start_reg == 3)
6909 {
dd18ae56
NC
6910 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6911 start_reg, SP_REGNUM);
b111229a
RE
6912 start_reg = reg + 1;
6913 }
6914 }
6915 else
6916 {
6917 if (reg != start_reg)
dd18ae56
NC
6918 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6919 start_reg, reg - start_reg,
6920 SP_REGNUM);
6cfc7210 6921
b111229a
RE
6922 start_reg = reg + 1;
6923 }
6924 }
6925
6926 /* Just in case the last register checked also needs unstacking. */
6927 if (reg != start_reg)
dd18ae56
NC
6928 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6929 start_reg, reg - start_reg, SP_REGNUM);
b111229a
RE
6930 }
6931
62b10bbc 6932 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
cce8749e 6933 {
6cfc7210 6934 if (TARGET_INTERWORK)
b111229a 6935 {
0616531f 6936 live_regs_mask |= 1 << LR_REGNUM;
f5a1b0d2 6937
d5b7b3ae
RE
6938 /* Handle LR on its own. */
6939 if (live_regs_mask == (1 << LR_REGNUM))
6940 {
6941 if (eh_ofs)
6942 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
6943 SP_REGNUM);
6944 else
6945 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
6946 SP_REGNUM);
6947 }
6948 else if (live_regs_mask != 0)
6949 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
6950 FALSE);
6951
6952 if (eh_ofs)
6953 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6954 REGNO (eh_ofs));
6955
0616531f
RE
6956 if (really_return)
6957 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
b111229a 6958 }
d5b7b3ae
RE
6959 else if (eh_ofs)
6960 {
6961 if (live_regs_mask == 0)
6962 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
6963 else
6964 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
6965 live_regs_mask | (1 << LR_REGNUM), FALSE);
6966
6967 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6968 REGNO (eh_ofs));
6969 /* Jump to the target; even in 26-bit mode. */
6970 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6971 }
0616531f
RE
6972 else if (TARGET_APCS_32 && live_regs_mask == 0 && ! really_return)
6973 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
6974 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
d5b7b3ae 6975 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
0616531f
RE
6976 else if (! really_return)
6977 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
6978 live_regs_mask | (1 << LR_REGNUM), FALSE);
32de079a 6979 else
d5b7b3ae
RE
6980 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
6981 live_regs_mask | (1 << PC_REGNUM),
32de079a 6982 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
6983 }
6984 else
6985 {
62b10bbc 6986 if (live_regs_mask || regs_ever_live[LR_REGNUM])
cce8749e 6987 {
6354dc9b 6988 /* Restore the integer regs, and the return address into lr. */
0616531f 6989 live_regs_mask |= 1 << LR_REGNUM;
32de079a 6990
d5b7b3ae
RE
6991 if (live_regs_mask == (1 << LR_REGNUM))
6992 {
6993 if (eh_ofs)
6994 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
6995 SP_REGNUM);
6996 else
6997 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
6998 SP_REGNUM);
6999 }
7000 else if (live_regs_mask != 0)
7001 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7002 FALSE);
cce8749e 7003 }
b111229a 7004
cce8749e
CH
7005 if (current_function_pretend_args_size)
7006 {
6354dc9b 7007 /* Unwind the pre-pushed regs. */
cce8749e 7008 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 7009 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
7010 output_add_immediate (operands);
7011 }
d5b7b3ae
RE
7012
7013 if (eh_ofs)
7014 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7015 REGNO (eh_ofs));
0616531f
RE
7016
7017 if (really_return)
7018 {
7019 /* And finally, go home. */
7020 if (TARGET_INTERWORK)
7021 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7022 else if (TARGET_APCS_32 || eh_ofs)
7023 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7024 else
7025 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7026 }
cce8749e
CH
7027 }
7028 }
f3bb6135 7029
949d79eb
RE
7030 return "";
7031}
7032
7033void
eb3921e8 7034output_func_epilogue (frame_size)
949d79eb
RE
7035 int frame_size;
7036{
d5b7b3ae
RE
7037 if (TARGET_THUMB)
7038 {
7039 /* ??? Probably not safe to set this here, since it assumes that a
7040 function will be emitted as assembly immediately after we generate
7041 RTL for it. This does not happen for inline functions. */
7042 return_used_this_function = 0;
7043 }
7044 else
7045 {
7046 if (use_return_insn (FALSE)
7047 && return_used_this_function
7048 && (frame_size + current_function_outgoing_args_size) != 0
7049 && ! frame_pointer_needed)
7050 abort ();
f3bb6135 7051
d5b7b3ae
RE
7052 /* Reset the ARM-specific per-function variables. */
7053 current_function_anonymous_args = 0;
7054 after_arm_reorg = 0;
7055 }
f3bb6135 7056}
e2c671ba 7057
2c849145
JM
7058/* Generate and emit an insn that we will recognize as a push_multi.
7059 Unfortunately, since this insn does not reflect very well the actual
7060 semantics of the operation, we need to annotate the insn for the benefit
7061 of DWARF2 frame unwind information. */
2c849145 7062static rtx
e2c671ba
RE
7063emit_multi_reg_push (mask)
7064 int mask;
7065{
7066 int num_regs = 0;
7067 int i, j;
7068 rtx par;
2c849145
JM
7069 rtx dwarf;
7070 rtx tmp, reg;
e2c671ba 7071
d5b7b3ae 7072 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba 7073 if (mask & (1 << i))
d5b7b3ae 7074 num_regs ++;
e2c671ba
RE
7075
7076 if (num_regs == 0 || num_regs > 16)
7077 abort ();
7078
43cffd11 7079 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
2c849145
JM
7080 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7081 RTX_FRAME_RELATED_P (dwarf) = 1;
e2c671ba 7082
d5b7b3ae 7083 for (i = 0; i <= LAST_ARM_REGNUM; i++)
e2c671ba
RE
7084 {
7085 if (mask & (1 << i))
7086 {
2c849145
JM
7087 reg = gen_rtx_REG (SImode, i);
7088
e2c671ba 7089 XVECEXP (par, 0, 0)
43cffd11
RE
7090 = gen_rtx_SET (VOIDmode,
7091 gen_rtx_MEM (BLKmode,
7092 gen_rtx_PRE_DEC (BLKmode,
7093 stack_pointer_rtx)),
7094 gen_rtx_UNSPEC (BLKmode,
2c849145 7095 gen_rtvec (1, reg),
43cffd11 7096 2));
2c849145
JM
7097
7098 tmp = gen_rtx_SET (VOIDmode,
7099 gen_rtx_MEM (SImode,
7100 gen_rtx_PRE_DEC (BLKmode,
7101 stack_pointer_rtx)),
7102 reg);
7103 RTX_FRAME_RELATED_P (tmp) = 1;
7104 XVECEXP (dwarf, 0, num_regs - 1) = tmp;
7105
e2c671ba
RE
7106 break;
7107 }
7108 }
7109
7110 for (j = 1, i++; j < num_regs; i++)
7111 {
7112 if (mask & (1 << i))
7113 {
2c849145
JM
7114 reg = gen_rtx_REG (SImode, i);
7115
7116 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7117
7118 tmp = gen_rtx_SET (VOIDmode,
7119 gen_rtx_MEM (SImode,
7120 gen_rtx_PRE_DEC (BLKmode,
7121 stack_pointer_rtx)),
7122 reg);
7123 RTX_FRAME_RELATED_P (tmp) = 1;
7124 XVECEXP (dwarf, 0, num_regs - j - 1) = tmp;
7125
e2c671ba
RE
7126 j++;
7127 }
7128 }
b111229a 7129
2c849145
JM
7130 par = emit_insn (par);
7131 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7132 REG_NOTES (par));
7133 return par;
b111229a
RE
7134}
7135
2c849145 7136static rtx
b111229a
RE
7137emit_sfm (base_reg, count)
7138 int base_reg;
7139 int count;
7140{
7141 rtx par;
2c849145
JM
7142 rtx dwarf;
7143 rtx tmp, reg;
b111229a
RE
7144 int i;
7145
43cffd11 7146 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
2c849145
JM
7147 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7148 RTX_FRAME_RELATED_P (dwarf) = 1;
7149
7150 reg = gen_rtx_REG (XFmode, base_reg++);
43cffd11
RE
7151
7152 XVECEXP (par, 0, 0)
7153 = gen_rtx_SET (VOIDmode,
7154 gen_rtx_MEM (BLKmode,
7155 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7156 gen_rtx_UNSPEC (BLKmode,
2c849145 7157 gen_rtvec (1, reg),
43cffd11 7158 2));
2c849145
JM
7159 tmp
7160 = gen_rtx_SET (VOIDmode,
7161 gen_rtx_MEM (XFmode,
7162 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7163 reg);
7164 RTX_FRAME_RELATED_P (tmp) = 1;
7165 XVECEXP (dwarf, 0, count - 1) = tmp;
7166
b111229a 7167 for (i = 1; i < count; i++)
2c849145
JM
7168 {
7169 reg = gen_rtx_REG (XFmode, base_reg++);
7170 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7171
7172 tmp = gen_rtx_SET (VOIDmode,
7173 gen_rtx_MEM (XFmode,
7174 gen_rtx_PRE_DEC (BLKmode,
7175 stack_pointer_rtx)),
7176 reg);
7177 RTX_FRAME_RELATED_P (tmp) = 1;
7178 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7179 }
b111229a 7180
2c849145
JM
7181 par = emit_insn (par);
7182 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7183 REG_NOTES (par));
7184 return par;
e2c671ba
RE
7185}
7186
7187void
7188arm_expand_prologue ()
7189{
7190 int reg;
56636818
JL
7191 rtx amount = GEN_INT (-(get_frame_size ()
7192 + current_function_outgoing_args_size));
e2c671ba
RE
7193 int live_regs_mask = 0;
7194 int store_arg_regs = 0;
949d79eb
RE
7195 /* If this function doesn't return, then there is no need to push
7196 the call-saved regs. */
46406379 7197 int volatile_func = arm_volatile_func ();
2c849145 7198 rtx insn;
e2c671ba 7199
31fdb4d5
DE
7200 /* Naked functions don't have prologues. */
7201 if (arm_naked_function_p (current_function_decl))
7202 return;
7203
e2c671ba
RE
7204 if (current_function_anonymous_args && current_function_pretend_args_size)
7205 store_arg_regs = 1;
7206
7207 if (! volatile_func)
6ed30148
RE
7208 {
7209 for (reg = 0; reg <= 10; reg++)
7210 if (regs_ever_live[reg] && ! call_used_regs[reg])
7211 live_regs_mask |= 1 << reg;
7212
d5b7b3ae
RE
7213 if (! TARGET_APCS_FRAME
7214 && ! frame_pointer_needed
7215 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7216 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7217 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7218
6ed30148
RE
7219 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7220 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
e2c671ba 7221
62b10bbc
NC
7222 if (regs_ever_live[LR_REGNUM])
7223 live_regs_mask |= 1 << LR_REGNUM;
6ed30148 7224 }
e2c671ba
RE
7225
7226 if (frame_pointer_needed)
7227 {
7228 live_regs_mask |= 0xD800;
2c849145
JM
7229 insn = emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
7230 stack_pointer_rtx));
7231 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7232 }
7233
7234 if (current_function_pretend_args_size)
7235 {
7236 if (store_arg_regs)
2c849145
JM
7237 insn = emit_multi_reg_push
7238 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
e2c671ba 7239 else
2c849145
JM
7240 insn = emit_insn
7241 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7242 GEN_INT (-current_function_pretend_args_size)));
7243 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7244 }
7245
7246 if (live_regs_mask)
7247 {
7248 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 7249 we won't get a proper return. */
62b10bbc 7250 live_regs_mask |= 1 << LR_REGNUM;
2c849145
JM
7251 insn = emit_multi_reg_push (live_regs_mask);
7252 RTX_FRAME_RELATED_P (insn) = 1;
e2c671ba
RE
7253 }
7254
d5b7b3ae
RE
7255 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7256
e2c671ba 7257 if (! volatile_func)
b111229a
RE
7258 {
7259 if (arm_fpu_arch == FP_SOFT2)
7260 {
d5b7b3ae 7261 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a 7262 if (regs_ever_live[reg] && ! call_used_regs[reg])
2c849145
JM
7263 {
7264 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7265 insn = gen_rtx_MEM (XFmode, insn);
7266 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7267 gen_rtx_REG (XFmode, reg)));
7268 RTX_FRAME_RELATED_P (insn) = 1;
7269 }
b111229a
RE
7270 }
7271 else
7272 {
d5b7b3ae 7273 int start_reg = LAST_ARM_FP_REGNUM;
b111229a 7274
d5b7b3ae 7275 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
b111229a
RE
7276 {
7277 if (regs_ever_live[reg] && ! call_used_regs[reg])
7278 {
7279 if (start_reg - reg == 3)
7280 {
2c849145
JM
7281 insn = emit_sfm (reg, 4);
7282 RTX_FRAME_RELATED_P (insn) = 1;
b111229a
RE
7283 start_reg = reg - 1;
7284 }
7285 }
7286 else
7287 {
7288 if (start_reg != reg)
2c849145
JM
7289 {
7290 insn = emit_sfm (reg + 1, start_reg - reg);
7291 RTX_FRAME_RELATED_P (insn) = 1;
7292 }
b111229a
RE
7293 start_reg = reg - 1;
7294 }
7295 }
7296
7297 if (start_reg != reg)
2c849145
JM
7298 {
7299 insn = emit_sfm (reg + 1, start_reg - reg);
7300 RTX_FRAME_RELATED_P (insn) = 1;
7301 }
b111229a
RE
7302 }
7303 }
e2c671ba
RE
7304
7305 if (frame_pointer_needed)
2c849145
JM
7306 {
7307 insn = GEN_INT (-(4 + current_function_pretend_args_size));
7308 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx,
7309 gen_rtx_REG (SImode, IP_REGNUM),
7310 insn));
7311 RTX_FRAME_RELATED_P (insn) = 1;
7312 }
e2c671ba
RE
7313
7314 if (amount != const0_rtx)
7315 {
2c849145
JM
7316 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7317 amount));
7318 RTX_FRAME_RELATED_P (insn) = 1;
43cffd11
RE
7319 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7320 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
e2c671ba
RE
7321 }
7322
7323 /* If we are profiling, make sure no instructions are scheduled before
f5a1b0d2
NC
7324 the call to mcount. Similarly if the user has requested no
7325 scheduling in the prolog. */
7326 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
e2c671ba
RE
7327 emit_insn (gen_blockage ());
7328}
cce8749e 7329\f
9997d19d
RE
7330/* If CODE is 'd', then the X is a condition operand and the instruction
7331 should only be executed if the condition is true.
ddd5a7c1 7332 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
7333 should only be executed if the condition is false: however, if the mode
7334 of the comparison is CCFPEmode, then always execute the instruction -- we
7335 do this because in these circumstances !GE does not necessarily imply LT;
7336 in these cases the instruction pattern will take care to make sure that
7337 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 7338 doing this instruction unconditionally.
9997d19d
RE
7339 If CODE is 'N' then X is a floating point operand that must be negated
7340 before output.
7341 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7342 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7343
7344void
7345arm_print_operand (stream, x, code)
62b10bbc 7346 FILE * stream;
9997d19d
RE
7347 rtx x;
7348 int code;
7349{
7350 switch (code)
7351 {
7352 case '@':
f3139301 7353 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
7354 return;
7355
d5b7b3ae
RE
7356 case '_':
7357 fputs (user_label_prefix, stream);
7358 return;
7359
9997d19d 7360 case '|':
f3139301 7361 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
7362 return;
7363
7364 case '?':
7365 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
7366 fputs (arm_condition_codes[arm_current_cc], stream);
7367 return;
7368
7369 case 'N':
7370 {
7371 REAL_VALUE_TYPE r;
7372 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7373 r = REAL_VALUE_NEGATE (r);
7374 fprintf (stream, "%s", fp_const_from_val (&r));
7375 }
7376 return;
7377
7378 case 'B':
7379 if (GET_CODE (x) == CONST_INT)
4bc74ece
NC
7380 {
7381 HOST_WIDE_INT val;
7382 val = ARM_SIGN_EXTEND (~ INTVAL (x));
36ba9cb8 7383 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7384 }
9997d19d
RE
7385 else
7386 {
7387 putc ('~', stream);
7388 output_addr_const (stream, x);
7389 }
7390 return;
7391
7392 case 'i':
7393 fprintf (stream, "%s", arithmetic_instr (x, 1));
7394 return;
7395
7396 case 'I':
7397 fprintf (stream, "%s", arithmetic_instr (x, 0));
7398 return;
7399
7400 case 'S':
7401 {
7402 HOST_WIDE_INT val;
cd2b33d0 7403 const char * shift = shift_op (x, & val);
9997d19d 7404
e2c671ba
RE
7405 if (shift)
7406 {
4bc74ece 7407 fprintf (stream, ", %s ", shift_op (x, & val));
e2c671ba
RE
7408 if (val == -1)
7409 arm_print_operand (stream, XEXP (x, 1), 0);
7410 else
4bc74ece
NC
7411 {
7412 fputc ('#', stream);
36ba9cb8 7413 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
4bc74ece 7414 }
e2c671ba 7415 }
9997d19d
RE
7416 }
7417 return;
7418
d5b7b3ae
RE
7419 /* An explanation of the 'Q', 'R' and 'H' register operands:
7420
7421 In a pair of registers containing a DI or DF value the 'Q'
7422 operand returns the register number of the register containing
7423 the least signficant part of the value. The 'R' operand returns
7424 the register number of the register containing the most
7425 significant part of the value.
7426
7427 The 'H' operand returns the higher of the two register numbers.
7428 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7429 same as the 'Q' operand, since the most signficant part of the
7430 value is held in the lower number register. The reverse is true
7431 on systems where WORDS_BIG_ENDIAN is false.
7432
7433 The purpose of these operands is to distinguish between cases
7434 where the endian-ness of the values is important (for example
7435 when they are added together), and cases where the endian-ness
7436 is irrelevant, but the order of register operations is important.
7437 For example when loading a value from memory into a register
7438 pair, the endian-ness does not matter. Provided that the value
7439 from the lower memory address is put into the lower numbered
7440 register, and the value from the higher address is put into the
7441 higher numbered register, the load will work regardless of whether
7442 the value being loaded is big-wordian or little-wordian. The
7443 order of the two register loads can matter however, if the address
7444 of the memory location is actually held in one of the registers
7445 being overwritten by the load. */
c1c2bc04 7446 case 'Q':
d5b7b3ae 7447 if (REGNO (x) > LAST_ARM_REGNUM)
c1c2bc04 7448 abort ();
d5b7b3ae 7449 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
c1c2bc04
RE
7450 return;
7451
9997d19d 7452 case 'R':
d5b7b3ae 7453 if (REGNO (x) > LAST_ARM_REGNUM)
9997d19d 7454 abort ();
d5b7b3ae
RE
7455 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7456 return;
7457
7458 case 'H':
7459 if (REGNO (x) > LAST_ARM_REGNUM)
7460 abort ();
7461 asm_fprintf (stream, "%r", REGNO (x) + 1);
9997d19d
RE
7462 return;
7463
7464 case 'm':
d5b7b3ae
RE
7465 asm_fprintf (stream, "%r",
7466 GET_CODE (XEXP (x, 0)) == REG
7467 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9997d19d
RE
7468 return;
7469
7470 case 'M':
dd18ae56 7471 asm_fprintf (stream, "{%r-%r}",
d5b7b3ae
RE
7472 REGNO (x),
7473 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
9997d19d
RE
7474 return;
7475
7476 case 'd':
d5b7b3ae
RE
7477 if (! x)
7478 return;
7479
7480 if (TARGET_ARM)
9997d19d
RE
7481 fputs (arm_condition_codes[get_arm_condition_code (x)],
7482 stream);
d5b7b3ae
RE
7483 else
7484 fputs (thumb_condition_code (x, 0), stream);
9997d19d
RE
7485 return;
7486
7487 case 'D':
d5b7b3ae
RE
7488 if (! x)
7489 return;
7490
7491 if (TARGET_ARM)
7492 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7493 (get_arm_condition_code (x))],
9997d19d 7494 stream);
d5b7b3ae
RE
7495 else
7496 fputs (thumb_condition_code (x, 1), stream);
9997d19d
RE
7497 return;
7498
7499 default:
7500 if (x == 0)
7501 abort ();
7502
7503 if (GET_CODE (x) == REG)
d5b7b3ae 7504 asm_fprintf (stream, "%r", REGNO (x));
9997d19d
RE
7505 else if (GET_CODE (x) == MEM)
7506 {
7507 output_memory_reference_mode = GET_MODE (x);
7508 output_address (XEXP (x, 0));
7509 }
7510 else if (GET_CODE (x) == CONST_DOUBLE)
7511 fprintf (stream, "#%s", fp_immediate_constant (x));
7512 else if (GET_CODE (x) == NEG)
6354dc9b 7513 abort (); /* This should never happen now. */
9997d19d
RE
7514 else
7515 {
7516 fputc ('#', stream);
7517 output_addr_const (stream, x);
7518 }
7519 }
7520}
cce8749e
CH
7521\f
7522/* A finite state machine takes care of noticing whether or not instructions
7523 can be conditionally executed, and thus decrease execution time and code
7524 size by deleting branch instructions. The fsm is controlled by
7525 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7526
7527/* The state of the fsm controlling condition codes are:
7528 0: normal, do nothing special
7529 1: make ASM_OUTPUT_OPCODE not output this instruction
7530 2: make ASM_OUTPUT_OPCODE not output this instruction
7531 3: make instructions conditional
7532 4: make instructions conditional
7533
7534 State transitions (state->state by whom under condition):
7535 0 -> 1 final_prescan_insn if the `target' is a label
7536 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7537 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7538 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7539 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7540 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7541 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7542 (the target insn is arm_target_insn).
7543
ff9940b0
RE
7544 If the jump clobbers the conditions then we use states 2 and 4.
7545
7546 A similar thing can be done with conditional return insns.
7547
cce8749e
CH
7548 XXX In case the `target' is an unconditional branch, this conditionalising
7549 of the instructions always reduces code size, but not always execution
7550 time. But then, I want to reduce the code size to somewhere near what
7551 /bin/cc produces. */
7552
cce8749e
CH
7553/* Returns the index of the ARM condition code string in
7554 `arm_condition_codes'. COMPARISON should be an rtx like
7555 `(eq (...) (...))'. */
7556
84ed5e79 7557static enum arm_cond_code
cce8749e
CH
7558get_arm_condition_code (comparison)
7559 rtx comparison;
7560{
5165176d 7561 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
7562 register int code;
7563 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
7564
7565 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 7566 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
7567 XEXP (comparison, 1));
7568
7569 switch (mode)
cce8749e 7570 {
84ed5e79
RE
7571 case CC_DNEmode: code = ARM_NE; goto dominance;
7572 case CC_DEQmode: code = ARM_EQ; goto dominance;
7573 case CC_DGEmode: code = ARM_GE; goto dominance;
7574 case CC_DGTmode: code = ARM_GT; goto dominance;
7575 case CC_DLEmode: code = ARM_LE; goto dominance;
7576 case CC_DLTmode: code = ARM_LT; goto dominance;
7577 case CC_DGEUmode: code = ARM_CS; goto dominance;
7578 case CC_DGTUmode: code = ARM_HI; goto dominance;
7579 case CC_DLEUmode: code = ARM_LS; goto dominance;
7580 case CC_DLTUmode: code = ARM_CC;
7581
7582 dominance:
7583 if (comp_code != EQ && comp_code != NE)
7584 abort ();
7585
7586 if (comp_code == EQ)
7587 return ARM_INVERSE_CONDITION_CODE (code);
7588 return code;
7589
5165176d 7590 case CC_NOOVmode:
84ed5e79 7591 switch (comp_code)
5165176d 7592 {
84ed5e79
RE
7593 case NE: return ARM_NE;
7594 case EQ: return ARM_EQ;
7595 case GE: return ARM_PL;
7596 case LT: return ARM_MI;
5165176d
RE
7597 default: abort ();
7598 }
7599
7600 case CC_Zmode:
7601 case CCFPmode:
84ed5e79 7602 switch (comp_code)
5165176d 7603 {
84ed5e79
RE
7604 case NE: return ARM_NE;
7605 case EQ: return ARM_EQ;
5165176d
RE
7606 default: abort ();
7607 }
7608
7609 case CCFPEmode:
84ed5e79
RE
7610 switch (comp_code)
7611 {
7612 case GE: return ARM_GE;
7613 case GT: return ARM_GT;
7614 case LE: return ARM_LS;
7615 case LT: return ARM_MI;
7616 default: abort ();
7617 }
7618
7619 case CC_SWPmode:
7620 switch (comp_code)
7621 {
7622 case NE: return ARM_NE;
7623 case EQ: return ARM_EQ;
7624 case GE: return ARM_LE;
7625 case GT: return ARM_LT;
7626 case LE: return ARM_GE;
7627 case LT: return ARM_GT;
7628 case GEU: return ARM_LS;
7629 case GTU: return ARM_CC;
7630 case LEU: return ARM_CS;
7631 case LTU: return ARM_HI;
7632 default: abort ();
7633 }
7634
bd9c7e23
RE
7635 case CC_Cmode:
7636 switch (comp_code)
7637 {
7638 case LTU: return ARM_CS;
7639 case GEU: return ARM_CC;
7640 default: abort ();
7641 }
7642
5165176d 7643 case CCmode:
84ed5e79 7644 switch (comp_code)
5165176d 7645 {
84ed5e79
RE
7646 case NE: return ARM_NE;
7647 case EQ: return ARM_EQ;
7648 case GE: return ARM_GE;
7649 case GT: return ARM_GT;
7650 case LE: return ARM_LE;
7651 case LT: return ARM_LT;
7652 case GEU: return ARM_CS;
7653 case GTU: return ARM_HI;
7654 case LEU: return ARM_LS;
7655 case LTU: return ARM_CC;
5165176d
RE
7656 default: abort ();
7657 }
7658
cce8749e
CH
7659 default: abort ();
7660 }
84ed5e79
RE
7661
7662 abort ();
f3bb6135 7663}
cce8749e
CH
7664
7665
7666void
74bbc178 7667arm_final_prescan_insn (insn)
cce8749e 7668 rtx insn;
cce8749e
CH
7669{
7670 /* BODY will hold the body of INSN. */
7671 register rtx body = PATTERN (insn);
7672
7673 /* This will be 1 if trying to repeat the trick, and things need to be
7674 reversed if it appears to fail. */
7675 int reverse = 0;
7676
ff9940b0
RE
7677 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
7678 taken are clobbered, even if the rtl suggests otherwise. It also
7679 means that we have to grub around within the jump expression to find
7680 out what the conditions are when the jump isn't taken. */
7681 int jump_clobbers = 0;
7682
6354dc9b 7683 /* If we start with a return insn, we only succeed if we find another one. */
ff9940b0
RE
7684 int seeking_return = 0;
7685
cce8749e
CH
7686 /* START_INSN will hold the insn from where we start looking. This is the
7687 first insn after the following code_label if REVERSE is true. */
7688 rtx start_insn = insn;
7689
7690 /* If in state 4, check if the target branch is reached, in order to
7691 change back to state 0. */
7692 if (arm_ccfsm_state == 4)
7693 {
7694 if (insn == arm_target_insn)
f5a1b0d2
NC
7695 {
7696 arm_target_insn = NULL;
7697 arm_ccfsm_state = 0;
7698 }
cce8749e
CH
7699 return;
7700 }
7701
7702 /* If in state 3, it is possible to repeat the trick, if this insn is an
7703 unconditional branch to a label, and immediately following this branch
7704 is the previous target label which is only used once, and the label this
7705 branch jumps to is not too far off. */
7706 if (arm_ccfsm_state == 3)
7707 {
7708 if (simplejump_p (insn))
7709 {
7710 start_insn = next_nonnote_insn (start_insn);
7711 if (GET_CODE (start_insn) == BARRIER)
7712 {
7713 /* XXX Isn't this always a barrier? */
7714 start_insn = next_nonnote_insn (start_insn);
7715 }
7716 if (GET_CODE (start_insn) == CODE_LABEL
7717 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7718 && LABEL_NUSES (start_insn) == 1)
7719 reverse = TRUE;
7720 else
7721 return;
7722 }
ff9940b0
RE
7723 else if (GET_CODE (body) == RETURN)
7724 {
7725 start_insn = next_nonnote_insn (start_insn);
7726 if (GET_CODE (start_insn) == BARRIER)
7727 start_insn = next_nonnote_insn (start_insn);
7728 if (GET_CODE (start_insn) == CODE_LABEL
7729 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7730 && LABEL_NUSES (start_insn) == 1)
7731 {
7732 reverse = TRUE;
7733 seeking_return = 1;
7734 }
7735 else
7736 return;
7737 }
cce8749e
CH
7738 else
7739 return;
7740 }
7741
7742 if (arm_ccfsm_state != 0 && !reverse)
7743 abort ();
7744 if (GET_CODE (insn) != JUMP_INSN)
7745 return;
7746
ddd5a7c1 7747 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
7748 the jump should always come first */
7749 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
7750 body = XVECEXP (body, 0, 0);
7751
7752#if 0
7753 /* If this is a conditional return then we don't want to know */
7754 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7755 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
7756 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
7757 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
7758 return;
7759#endif
7760
cce8749e
CH
7761 if (reverse
7762 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7763 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
7764 {
bd9c7e23
RE
7765 int insns_skipped;
7766 int fail = FALSE, succeed = FALSE;
cce8749e
CH
7767 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
7768 int then_not_else = TRUE;
ff9940b0 7769 rtx this_insn = start_insn, label = 0;
cce8749e 7770
ff9940b0 7771 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
7772 {
7773 /* The code below is wrong for these, and I haven't time to
7774 fix it now. So we just do the safe thing and return. This
7775 whole function needs re-writing anyway. */
7776 jump_clobbers = 1;
7777 return;
7778 }
ff9940b0 7779
cce8749e
CH
7780 /* Register the insn jumped to. */
7781 if (reverse)
ff9940b0
RE
7782 {
7783 if (!seeking_return)
7784 label = XEXP (SET_SRC (body), 0);
7785 }
cce8749e
CH
7786 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
7787 label = XEXP (XEXP (SET_SRC (body), 1), 0);
7788 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
7789 {
7790 label = XEXP (XEXP (SET_SRC (body), 2), 0);
7791 then_not_else = FALSE;
7792 }
ff9940b0
RE
7793 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
7794 seeking_return = 1;
7795 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
7796 {
7797 seeking_return = 1;
7798 then_not_else = FALSE;
7799 }
cce8749e
CH
7800 else
7801 abort ();
7802
7803 /* See how many insns this branch skips, and what kind of insns. If all
7804 insns are okay, and the label or unconditional branch to the same
7805 label is not too far away, succeed. */
7806 for (insns_skipped = 0;
b36ba79f 7807 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
cce8749e
CH
7808 {
7809 rtx scanbody;
7810
7811 this_insn = next_nonnote_insn (this_insn);
7812 if (!this_insn)
7813 break;
7814
cce8749e
CH
7815 switch (GET_CODE (this_insn))
7816 {
7817 case CODE_LABEL:
7818 /* Succeed if it is the target label, otherwise fail since
7819 control falls in from somewhere else. */
7820 if (this_insn == label)
7821 {
ff9940b0
RE
7822 if (jump_clobbers)
7823 {
7824 arm_ccfsm_state = 2;
7825 this_insn = next_nonnote_insn (this_insn);
7826 }
7827 else
7828 arm_ccfsm_state = 1;
cce8749e
CH
7829 succeed = TRUE;
7830 }
7831 else
7832 fail = TRUE;
7833 break;
7834
ff9940b0 7835 case BARRIER:
cce8749e 7836 /* Succeed if the following insn is the target label.
ff9940b0
RE
7837 Otherwise fail.
7838 If return insns are used then the last insn in a function
6354dc9b 7839 will be a barrier. */
cce8749e 7840 this_insn = next_nonnote_insn (this_insn);
ff9940b0 7841 if (this_insn && this_insn == label)
cce8749e 7842 {
ff9940b0
RE
7843 if (jump_clobbers)
7844 {
7845 arm_ccfsm_state = 2;
7846 this_insn = next_nonnote_insn (this_insn);
7847 }
7848 else
7849 arm_ccfsm_state = 1;
cce8749e
CH
7850 succeed = TRUE;
7851 }
7852 else
7853 fail = TRUE;
7854 break;
7855
ff9940b0 7856 case CALL_INSN:
2b835d68 7857 /* If using 32-bit addresses the cc is not preserved over
914a3b8c 7858 calls. */
2b835d68 7859 if (TARGET_APCS_32)
bd9c7e23
RE
7860 {
7861 /* Succeed if the following insn is the target label,
7862 or if the following two insns are a barrier and
7863 the target label. */
7864 this_insn = next_nonnote_insn (this_insn);
7865 if (this_insn && GET_CODE (this_insn) == BARRIER)
7866 this_insn = next_nonnote_insn (this_insn);
7867
7868 if (this_insn && this_insn == label
b36ba79f 7869 && insns_skipped < max_insns_skipped)
bd9c7e23
RE
7870 {
7871 if (jump_clobbers)
7872 {
7873 arm_ccfsm_state = 2;
7874 this_insn = next_nonnote_insn (this_insn);
7875 }
7876 else
7877 arm_ccfsm_state = 1;
7878 succeed = TRUE;
7879 }
7880 else
7881 fail = TRUE;
7882 }
ff9940b0 7883 break;
2b835d68 7884
cce8749e
CH
7885 case JUMP_INSN:
7886 /* If this is an unconditional branch to the same label, succeed.
7887 If it is to another label, do nothing. If it is conditional,
7888 fail. */
914a3b8c 7889 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 7890
ed4c4348 7891 scanbody = PATTERN (this_insn);
ff9940b0
RE
7892 if (GET_CODE (scanbody) == SET
7893 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
7894 {
7895 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
7896 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
7897 {
7898 arm_ccfsm_state = 2;
7899 succeed = TRUE;
7900 }
7901 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
7902 fail = TRUE;
7903 }
b36ba79f
RE
7904 /* Fail if a conditional return is undesirable (eg on a
7905 StrongARM), but still allow this if optimizing for size. */
7906 else if (GET_CODE (scanbody) == RETURN
7907 && ! use_return_insn (TRUE)
7908 && ! optimize_size)
7909 fail = TRUE;
ff9940b0
RE
7910 else if (GET_CODE (scanbody) == RETURN
7911 && seeking_return)
7912 {
7913 arm_ccfsm_state = 2;
7914 succeed = TRUE;
7915 }
7916 else if (GET_CODE (scanbody) == PARALLEL)
7917 {
7918 switch (get_attr_conds (this_insn))
7919 {
7920 case CONDS_NOCOND:
7921 break;
7922 default:
7923 fail = TRUE;
7924 break;
7925 }
7926 }
cce8749e
CH
7927 break;
7928
7929 case INSN:
ff9940b0
RE
7930 /* Instructions using or affecting the condition codes make it
7931 fail. */
ed4c4348 7932 scanbody = PATTERN (this_insn);
74641843
RE
7933 if (! (GET_CODE (scanbody) == SET
7934 || GET_CODE (scanbody) == PARALLEL)
7935 || get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
7936 fail = TRUE;
7937 break;
7938
7939 default:
7940 break;
7941 }
7942 }
7943 if (succeed)
7944 {
ff9940b0 7945 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 7946 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
7947 else if (seeking_return || arm_ccfsm_state == 2)
7948 {
7949 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
7950 {
7951 this_insn = next_nonnote_insn (this_insn);
7952 if (this_insn && (GET_CODE (this_insn) == BARRIER
7953 || GET_CODE (this_insn) == CODE_LABEL))
7954 abort ();
7955 }
7956 if (!this_insn)
7957 {
7958 /* Oh, dear! we ran off the end.. give up */
7959 recog (PATTERN (insn), insn, NULL_PTR);
7960 arm_ccfsm_state = 0;
abaa26e5 7961 arm_target_insn = NULL;
ff9940b0
RE
7962 return;
7963 }
7964 arm_target_insn = this_insn;
7965 }
cce8749e
CH
7966 else
7967 abort ();
ff9940b0
RE
7968 if (jump_clobbers)
7969 {
7970 if (reverse)
7971 abort ();
7972 arm_current_cc =
7973 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
7974 0), 0), 1));
7975 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
7976 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7977 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
7978 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7979 }
7980 else
7981 {
7982 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
7983 what it was. */
7984 if (!reverse)
7985 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
7986 0));
7987 }
cce8749e 7988
cce8749e
CH
7989 if (reverse || then_not_else)
7990 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7991 }
d5b7b3ae 7992
1ccbefce 7993 /* Restore recog_data (getting the attributes of other insns can
ff9940b0 7994 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 7995 across this call; since the insn has been recognized already we
b020fd92 7996 call recog direct). */
ff9940b0 7997 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 7998 }
f3bb6135 7999}
cce8749e 8000
d5b7b3ae
RE
8001int
8002arm_regno_class (regno)
8003 int regno;
8004{
8005 if (TARGET_THUMB)
8006 {
8007 if (regno == STACK_POINTER_REGNUM)
8008 return STACK_REG;
8009 if (regno == CC_REGNUM)
8010 return CC_REG;
8011 if (regno < 8)
8012 return LO_REGS;
8013 return HI_REGS;
8014 }
8015
8016 if ( regno <= LAST_ARM_REGNUM
8017 || regno == FRAME_POINTER_REGNUM
8018 || regno == ARG_POINTER_REGNUM)
8019 return GENERAL_REGS;
8020
8021 if (regno == CC_REGNUM)
8022 return NO_REGS;
8023
8024 return FPU_REGS;
8025}
8026
8027/* Handle a special case when computing the offset
8028 of an argument from the frame pointer. */
8029int
8030arm_debugger_arg_offset (value, addr)
8031 int value;
8032 rtx addr;
8033{
8034 rtx insn;
8035
8036 /* We are only interested if dbxout_parms() failed to compute the offset. */
8037 if (value != 0)
8038 return 0;
8039
8040 /* We can only cope with the case where the address is held in a register. */
8041 if (GET_CODE (addr) != REG)
8042 return 0;
8043
8044 /* If we are using the frame pointer to point at the argument, then
8045 an offset of 0 is correct. */
cd2b33d0 8046 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
d5b7b3ae
RE
8047 return 0;
8048
8049 /* If we are using the stack pointer to point at the
8050 argument, then an offset of 0 is correct. */
8051 if ((TARGET_THUMB || ! frame_pointer_needed)
8052 && REGNO (addr) == SP_REGNUM)
8053 return 0;
8054
8055 /* Oh dear. The argument is pointed to by a register rather
8056 than being held in a register, or being stored at a known
8057 offset from the frame pointer. Since GDB only understands
8058 those two kinds of argument we must translate the address
8059 held in the register into an offset from the frame pointer.
8060 We do this by searching through the insns for the function
8061 looking to see where this register gets its value. If the
8062 register is initialised from the frame pointer plus an offset
8063 then we are in luck and we can continue, otherwise we give up.
8064
8065 This code is exercised by producing debugging information
8066 for a function with arguments like this:
8067
8068 double func (double a, double b, int c, double d) {return d;}
8069
8070 Without this code the stab for parameter 'd' will be set to
8071 an offset of 0 from the frame pointer, rather than 8. */
8072
8073 /* The if() statement says:
8074
8075 If the insn is a normal instruction
8076 and if the insn is setting the value in a register
8077 and if the register being set is the register holding the address of the argument
8078 and if the address is computing by an addition
8079 that involves adding to a register
8080 which is the frame pointer
8081 a constant integer
8082
8083 then... */
8084
8085 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8086 {
8087 if ( GET_CODE (insn) == INSN
8088 && GET_CODE (PATTERN (insn)) == SET
8089 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8090 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8091 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
cd2b33d0 8092 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
d5b7b3ae
RE
8093 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8094 )
8095 {
8096 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8097
8098 break;
8099 }
8100 }
8101
8102 if (value == 0)
8103 {
8104 debug_rtx (addr);
8105 warning ("Unable to compute real location of stacked parameter");
8106 value = 8; /* XXX magic hack */
8107 }
8108
8109 return value;
8110}
8111
8112\f
8113/* Recursively search through all of the blocks in a function
8114 checking to see if any of the variables created in that
8115 function match the RTX called 'orig'. If they do then
8116 replace them with the RTX called 'new'. */
8117
8118static void
8119replace_symbols_in_block (block, orig, new)
8120 tree block;
8121 rtx orig;
8122 rtx new;
8123{
8124 for (; block; block = BLOCK_CHAIN (block))
8125 {
8126 tree sym;
8127
8128 if (! TREE_USED (block))
8129 continue;
8130
8131 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8132 {
8133 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8134 || DECL_IGNORED_P (sym)
8135 || TREE_CODE (sym) != VAR_DECL
8136 || DECL_EXTERNAL (sym)
8137 || ! rtx_equal_p (DECL_RTL (sym), orig)
8138 )
8139 continue;
8140
8141 DECL_RTL (sym) = new;
8142 }
8143
8144 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8145 }
8146}
8147
8148/* Return the number (counting from 0) of the least significant set
8149 bit in MASK. */
8150#ifdef __GNUC__
8151inline
8152#endif
8153static int
8154number_of_first_bit_set (mask)
8155 int mask;
8156{
8157 int bit;
8158
8159 for (bit = 0;
8160 (mask & (1 << bit)) == 0;
8161 ++ bit)
8162 continue;
8163
8164 return bit;
8165}
8166
8167/* Generate code to return from a thumb function.
8168 If 'reg_containing_return_addr' is -1, then the return address is
8169 actually on the stack, at the stack pointer. */
8170static void
8171thumb_exit (f, reg_containing_return_addr, eh_ofs)
8172 FILE * f;
8173 int reg_containing_return_addr;
8174 rtx eh_ofs;
8175{
8176 unsigned regs_available_for_popping;
8177 unsigned regs_to_pop;
8178 int pops_needed;
8179 unsigned available;
8180 unsigned required;
8181 int mode;
8182 int size;
8183 int restore_a4 = FALSE;
8184
8185 /* Compute the registers we need to pop. */
8186 regs_to_pop = 0;
8187 pops_needed = 0;
8188
8189 /* There is an assumption here, that if eh_ofs is not NULL, the
8190 normal return address will have been pushed. */
8191 if (reg_containing_return_addr == -1 || eh_ofs)
8192 {
8193 /* When we are generating a return for __builtin_eh_return,
8194 reg_containing_return_addr must specify the return regno. */
8195 if (eh_ofs && reg_containing_return_addr == -1)
8196 abort ();
8197
8198 regs_to_pop |= 1 << LR_REGNUM;
8199 ++ pops_needed;
8200 }
8201
8202 if (TARGET_BACKTRACE)
8203 {
8204 /* Restore the (ARM) frame pointer and stack pointer. */
8205 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8206 pops_needed += 2;
8207 }
8208
8209 /* If there is nothing to pop then just emit the BX instruction and
8210 return. */
8211 if (pops_needed == 0)
8212 {
8213 if (eh_ofs)
8214 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8215
8216 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8217 return;
8218 }
8219 /* Otherwise if we are not supporting interworking and we have not created
8220 a backtrace structure and the function was not entered in ARM mode then
8221 just pop the return address straight into the PC. */
8222 else if ( ! TARGET_INTERWORK
8223 && ! TARGET_BACKTRACE
8224 && ! is_called_in_ARM_mode (current_function_decl))
8225 {
8226 if (eh_ofs)
8227 {
8228 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8229 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8230 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8231 }
8232 else
8233 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8234
8235 return;
8236 }
8237
8238 /* Find out how many of the (return) argument registers we can corrupt. */
8239 regs_available_for_popping = 0;
8240
8241 /* If returning via __builtin_eh_return, the bottom three registers
8242 all contain information needed for the return. */
8243 if (eh_ofs)
8244 size = 12;
8245 else
8246 {
8247#ifdef RTX_CODE
8248 /* If we can deduce the registers used from the function's
8249 return value. This is more reliable that examining
8250 regs_ever_live[] because that will be set if the register is
8251 ever used in the function, not just if the register is used
8252 to hold a return value. */
8253
8254 if (current_function_return_rtx != 0)
8255 mode = GET_MODE (current_function_return_rtx);
8256 else
8257#endif
8258 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8259
8260 size = GET_MODE_SIZE (mode);
8261
8262 if (size == 0)
8263 {
8264 /* In a void function we can use any argument register.
8265 In a function that returns a structure on the stack
8266 we can use the second and third argument registers. */
8267 if (mode == VOIDmode)
8268 regs_available_for_popping =
8269 (1 << ARG_REGISTER (1))
8270 | (1 << ARG_REGISTER (2))
8271 | (1 << ARG_REGISTER (3));
8272 else
8273 regs_available_for_popping =
8274 (1 << ARG_REGISTER (2))
8275 | (1 << ARG_REGISTER (3));
8276 }
8277 else if (size <= 4)
8278 regs_available_for_popping =
8279 (1 << ARG_REGISTER (2))
8280 | (1 << ARG_REGISTER (3));
8281 else if (size <= 8)
8282 regs_available_for_popping =
8283 (1 << ARG_REGISTER (3));
8284 }
8285
8286 /* Match registers to be popped with registers into which we pop them. */
8287 for (available = regs_available_for_popping,
8288 required = regs_to_pop;
8289 required != 0 && available != 0;
8290 available &= ~(available & - available),
8291 required &= ~(required & - required))
8292 -- pops_needed;
8293
8294 /* If we have any popping registers left over, remove them. */
8295 if (available > 0)
8296 regs_available_for_popping &= ~ available;
8297
8298 /* Otherwise if we need another popping register we can use
8299 the fourth argument register. */
8300 else if (pops_needed)
8301 {
8302 /* If we have not found any free argument registers and
8303 reg a4 contains the return address, we must move it. */
8304 if (regs_available_for_popping == 0
8305 && reg_containing_return_addr == LAST_ARG_REGNUM)
8306 {
8307 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8308 reg_containing_return_addr = LR_REGNUM;
8309 }
8310 else if (size > 12)
8311 {
8312 /* Register a4 is being used to hold part of the return value,
8313 but we have dire need of a free, low register. */
8314 restore_a4 = TRUE;
8315
8316 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8317 }
8318
8319 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8320 {
8321 /* The fourth argument register is available. */
8322 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8323
8324 -- pops_needed;
8325 }
8326 }
8327
8328 /* Pop as many registers as we can. */
8329 thumb_pushpop (f, regs_available_for_popping, FALSE);
8330
8331 /* Process the registers we popped. */
8332 if (reg_containing_return_addr == -1)
8333 {
8334 /* The return address was popped into the lowest numbered register. */
8335 regs_to_pop &= ~ (1 << LR_REGNUM);
8336
8337 reg_containing_return_addr =
8338 number_of_first_bit_set (regs_available_for_popping);
8339
8340 /* Remove this register for the mask of available registers, so that
8341 the return address will not be corrupted by futher pops. */
8342 regs_available_for_popping &= ~ (1 << reg_containing_return_addr);
8343 }
8344
8345 /* If we popped other registers then handle them here. */
8346 if (regs_available_for_popping)
8347 {
8348 int frame_pointer;
8349
8350 /* Work out which register currently contains the frame pointer. */
8351 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8352
8353 /* Move it into the correct place. */
8354 asm_fprintf (f, "\tmov\t%r, %r\n",
8355 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8356
8357 /* (Temporarily) remove it from the mask of popped registers. */
8358 regs_available_for_popping &= ~ (1 << frame_pointer);
8359 regs_to_pop &= ~ (1 << ARM_HARD_FRAME_POINTER_REGNUM);
8360
8361 if (regs_available_for_popping)
8362 {
8363 int stack_pointer;
8364
8365 /* We popped the stack pointer as well,
8366 find the register that contains it. */
8367 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8368
8369 /* Move it into the stack register. */
8370 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8371
8372 /* At this point we have popped all necessary registers, so
8373 do not worry about restoring regs_available_for_popping
8374 to its correct value:
8375
8376 assert (pops_needed == 0)
8377 assert (regs_available_for_popping == (1 << frame_pointer))
8378 assert (regs_to_pop == (1 << STACK_POINTER)) */
8379 }
8380 else
8381 {
8382 /* Since we have just move the popped value into the frame
8383 pointer, the popping register is available for reuse, and
8384 we know that we still have the stack pointer left to pop. */
8385 regs_available_for_popping |= (1 << frame_pointer);
8386 }
8387 }
8388
8389 /* If we still have registers left on the stack, but we no longer have
8390 any registers into which we can pop them, then we must move the return
8391 address into the link register and make available the register that
8392 contained it. */
8393 if (regs_available_for_popping == 0 && pops_needed > 0)
8394 {
8395 regs_available_for_popping |= 1 << reg_containing_return_addr;
8396
8397 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8398 reg_containing_return_addr);
8399
8400 reg_containing_return_addr = LR_REGNUM;
8401 }
8402
8403 /* If we have registers left on the stack then pop some more.
8404 We know that at most we will want to pop FP and SP. */
8405 if (pops_needed > 0)
8406 {
8407 int popped_into;
8408 int move_to;
8409
8410 thumb_pushpop (f, regs_available_for_popping, FALSE);
8411
8412 /* We have popped either FP or SP.
8413 Move whichever one it is into the correct register. */
8414 popped_into = number_of_first_bit_set (regs_available_for_popping);
8415 move_to = number_of_first_bit_set (regs_to_pop);
8416
8417 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8418
8419 regs_to_pop &= ~ (1 << move_to);
8420
8421 -- pops_needed;
8422 }
8423
8424 /* If we still have not popped everything then we must have only
8425 had one register available to us and we are now popping the SP. */
8426 if (pops_needed > 0)
8427 {
8428 int popped_into;
8429
8430 thumb_pushpop (f, regs_available_for_popping, FALSE);
8431
8432 popped_into = number_of_first_bit_set (regs_available_for_popping);
8433
8434 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8435 /*
8436 assert (regs_to_pop == (1 << STACK_POINTER))
8437 assert (pops_needed == 1)
8438 */
8439 }
8440
8441 /* If necessary restore the a4 register. */
8442 if (restore_a4)
8443 {
8444 if (reg_containing_return_addr != LR_REGNUM)
8445 {
8446 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8447 reg_containing_return_addr = LR_REGNUM;
8448 }
8449
8450 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8451 }
8452
8453 if (eh_ofs)
8454 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8455
8456 /* Return to caller. */
8457 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8458}
8459
8460/* Emit code to push or pop registers to or from the stack. */
8461static void
8462thumb_pushpop (f, mask, push)
8463 FILE * f;
8464 int mask;
8465 int push;
8466{
8467 int regno;
8468 int lo_mask = mask & 0xFF;
8469
8470 if (lo_mask == 0 && ! push && (mask & (1 << 15)))
8471 {
8472 /* Special case. Do not generate a POP PC statement here, do it in
8473 thumb_exit() */
8474 thumb_exit (f, -1, NULL_RTX);
8475 return;
8476 }
8477
8478 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8479
8480 /* Look at the low registers first. */
8481 for (regno = 0; regno <= LAST_LO_REGNUM; regno ++, lo_mask >>= 1)
8482 {
8483 if (lo_mask & 1)
8484 {
8485 asm_fprintf (f, "%r", regno);
8486
8487 if ((lo_mask & ~1) != 0)
8488 fprintf (f, ", ");
8489 }
8490 }
8491
8492 if (push && (mask & (1 << LR_REGNUM)))
8493 {
8494 /* Catch pushing the LR. */
8495 if (mask & 0xFF)
8496 fprintf (f, ", ");
8497
8498 asm_fprintf (f, "%r", LR_REGNUM);
8499 }
8500 else if (!push && (mask & (1 << PC_REGNUM)))
8501 {
8502 /* Catch popping the PC. */
8503 if (TARGET_INTERWORK || TARGET_BACKTRACE)
8504 {
8505 /* The PC is never poped directly, instead
8506 it is popped into r3 and then BX is used. */
8507 fprintf (f, "}\n");
8508
8509 thumb_exit (f, -1, NULL_RTX);
8510
8511 return;
8512 }
8513 else
8514 {
8515 if (mask & 0xFF)
8516 fprintf (f, ", ");
8517
8518 asm_fprintf (f, "%r", PC_REGNUM);
8519 }
8520 }
8521
8522 fprintf (f, "}\n");
8523}
8524\f
8525void
8526thumb_final_prescan_insn (insn)
8527 rtx insn;
8528{
8529 extern int * insn_addresses;
8530
8531 if (flag_print_asm_name)
8532 asm_fprintf (asm_out_file, "%@ 0x%04x\n", insn_addresses[INSN_UID (insn)]);
8533}
8534
8535int
8536thumb_shiftable_const (val)
8537 unsigned HOST_WIDE_INT val;
8538{
8539 unsigned HOST_WIDE_INT mask = 0xff;
8540 int i;
8541
8542 if (val == 0) /* XXX */
8543 return 0;
8544
8545 for (i = 0; i < 25; i++)
8546 if ((val & (mask << i)) == val)
8547 return 1;
8548
8549 return 0;
8550}
8551
8552/* Returns non-zero if the current function contains,
8553 or might contain a far jump. */
8554int
8555thumb_far_jump_used_p (int in_prologue)
8556{
8557 rtx insn;
8558
8559 /* This test is only important for leaf functions. */
8560 /* assert (! leaf_function_p ()); */
8561
8562 /* If we have already decided that far jumps may be used,
8563 do not bother checking again, and always return true even if
8564 it turns out that they are not being used. Once we have made
8565 the decision that far jumps are present (and that hence the link
8566 register will be pushed onto the stack) we cannot go back on it. */
8567 if (cfun->machine->far_jump_used)
8568 return 1;
8569
8570 /* If this function is not being called from the prologue/epilogue
8571 generation code then it must be being called from the
8572 INITIAL_ELIMINATION_OFFSET macro. */
8573 if (! in_prologue)
8574 {
8575 /* In this case we know that we are being asked about the elimination
8576 of the arg pointer register. If that register is not being used,
8577 then there are no arguments on the stack, and we do not have to
8578 worry that a far jump might force the prologue to push the link
8579 register, changing the stack offsets. In this case we can just
8580 return false, since the presence of far jumps in the function will
8581 not affect stack offsets.
8582
8583 If the arg pointer is live (or if it was live, but has now been
8584 eliminated and so set to dead) then we do have to test to see if
8585 the function might contain a far jump. This test can lead to some
8586 false negatives, since before reload is completed, then length of
8587 branch instructions is not known, so gcc defaults to returning their
8588 longest length, which in turn sets the far jump attribute to true.
8589
8590 A false negative will not result in bad code being generated, but it
8591 will result in a needless push and pop of the link register. We
8592 hope that this does not occur too often. */
8593 if (regs_ever_live [ARG_POINTER_REGNUM])
8594 cfun->machine->arg_pointer_live = 1;
8595 else if (! cfun->machine->arg_pointer_live)
8596 return 0;
8597 }
8598
8599 /* Check to see if the function contains a branch
8600 insn with the far jump attribute set. */
8601 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8602 {
8603 if (GET_CODE (insn) == JUMP_INSN
8604 /* Ignore tablejump patterns. */
8605 && GET_CODE (PATTERN (insn)) != ADDR_VEC
8606 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
8607 && get_attr_far_jump (insn) == FAR_JUMP_YES
8608 )
8609 {
8610 /* Record the fact that we have decied that
8611 the function does use far jumps. */
8612 cfun->machine->far_jump_used = 1;
8613 return 1;
8614 }
8615 }
8616
8617 return 0;
8618}
8619
8620/* Return non-zero if FUNC must be entered in ARM mode. */
8621int
8622is_called_in_ARM_mode (func)
8623 tree func;
8624{
8625 if (TREE_CODE (func) != FUNCTION_DECL)
8626 abort ();
8627
8628 /* Ignore the problem about functions whoes address is taken. */
8629 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
8630 return TRUE;
8631
8632#ifdef ARM_PE
8633 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
8634#else
8635 return FALSE;
8636#endif
8637}
8638
8639/* The bits which aren't usefully expanded as rtl. */
cd2b33d0 8640const char *
d5b7b3ae
RE
8641thumb_unexpanded_epilogue ()
8642{
8643 int regno;
8644 int live_regs_mask = 0;
8645 int high_regs_pushed = 0;
8646 int leaf_function = leaf_function_p ();
8647 int had_to_push_lr;
8648 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8649
8650 if (return_used_this_function)
8651 return "";
8652
8653 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8654 if (regs_ever_live[regno] && ! call_used_regs[regno]
8655 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8656 live_regs_mask |= 1 << regno;
8657
8658 for (regno = 8; regno < 13; regno++)
8659 {
8660 if (regs_ever_live[regno] && ! call_used_regs[regno]
8661 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8662 high_regs_pushed ++;
8663 }
8664
8665 /* The prolog may have pushed some high registers to use as
8666 work registers. eg the testuite file:
8667 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
8668 compiles to produce:
8669 push {r4, r5, r6, r7, lr}
8670 mov r7, r9
8671 mov r6, r8
8672 push {r6, r7}
8673 as part of the prolog. We have to undo that pushing here. */
8674
8675 if (high_regs_pushed)
8676 {
8677 int mask = live_regs_mask;
8678 int next_hi_reg;
8679 int size;
8680 int mode;
8681
8682#ifdef RTX_CODE
8683 /* If we can deduce the registers used from the function's return value.
8684 This is more reliable that examining regs_ever_live[] because that
8685 will be set if the register is ever used in the function, not just if
8686 the register is used to hold a return value. */
8687
8688 if (current_function_return_rtx != 0)
8689 mode = GET_MODE (current_function_return_rtx);
8690 else
8691#endif
8692 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8693
8694 size = GET_MODE_SIZE (mode);
8695
8696 /* Unless we are returning a type of size > 12 register r3 is
8697 available. */
8698 if (size < 13)
8699 mask |= 1 << 3;
8700
8701 if (mask == 0)
8702 /* Oh dear! We have no low registers into which we can pop
8703 high registers! */
8704 fatal ("No low registers available for popping high registers");
8705
8706 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
8707 if (regs_ever_live[next_hi_reg] && ! call_used_regs[next_hi_reg]
8708 && ! (TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
8709 break;
8710
8711 while (high_regs_pushed)
8712 {
8713 /* Find lo register(s) into which the high register(s) can
8714 be popped. */
8715 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8716 {
8717 if (mask & (1 << regno))
8718 high_regs_pushed--;
8719 if (high_regs_pushed == 0)
8720 break;
8721 }
8722
8723 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
8724
8725 /* Pop the values into the low register(s). */
8726 thumb_pushpop (asm_out_file, mask, 0);
8727
8728 /* Move the value(s) into the high registers. */
8729 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8730 {
8731 if (mask & (1 << regno))
8732 {
8733 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
8734 regno);
8735
8736 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
8737 if (regs_ever_live[next_hi_reg] &&
8738 ! call_used_regs[next_hi_reg]
8739 && ! (TARGET_SINGLE_PIC_BASE
8740 && (next_hi_reg == arm_pic_register)))
8741 break;
8742 }
8743 }
8744 }
8745 }
8746
8747 had_to_push_lr = (live_regs_mask || ! leaf_function
8748 || thumb_far_jump_used_p (1));
8749
8750 if (TARGET_BACKTRACE
8751 && ((live_regs_mask & 0xFF) == 0)
8752 && regs_ever_live [LAST_ARG_REGNUM] != 0)
8753 {
8754 /* The stack backtrace structure creation code had to
8755 push R7 in order to get a work register, so we pop
8756 it now. */
8757 live_regs_mask |= (1 << LAST_LO_REGNUM);
8758 }
8759
8760 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
8761 {
8762 if (had_to_push_lr
8763 && ! is_called_in_ARM_mode (current_function_decl)
8764 && ! eh_ofs)
8765 live_regs_mask |= 1 << PC_REGNUM;
8766
8767 /* Either no argument registers were pushed or a backtrace
8768 structure was created which includes an adjusted stack
8769 pointer, so just pop everything. */
8770 if (live_regs_mask)
8771 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8772
8773 if (eh_ofs)
8774 thumb_exit (asm_out_file, 2, eh_ofs);
8775 /* We have either just popped the return address into the
8776 PC or it is was kept in LR for the entire function or
8777 it is still on the stack because we do not want to
8778 return by doing a pop {pc}. */
8779 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
8780 thumb_exit (asm_out_file,
8781 (had_to_push_lr
8782 && is_called_in_ARM_mode (current_function_decl)) ?
8783 -1 : LR_REGNUM, NULL_RTX);
8784 }
8785 else
8786 {
8787 /* Pop everything but the return address. */
8788 live_regs_mask &= ~ (1 << PC_REGNUM);
8789
8790 if (live_regs_mask)
8791 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8792
8793 if (had_to_push_lr)
8794 /* Get the return address into a temporary register. */
8795 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
8796
8797 /* Remove the argument registers that were pushed onto the stack. */
8798 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
8799 SP_REGNUM, SP_REGNUM,
8800 current_function_pretend_args_size);
8801
8802 if (eh_ofs)
8803 thumb_exit (asm_out_file, 2, eh_ofs);
8804 else
8805 thumb_exit (asm_out_file,
8806 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
8807 }
8808
8809 return "";
8810}
8811
8812/* Functions to save and restore machine-specific function data. */
8813
8814static void
8815arm_mark_machine_status (p)
8816 struct function * p;
8817{
8818 struct machine_function *machine = p->machine;
8819
8820 ggc_mark_rtx (machine->ra_rtx);
8821 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
8822}
8823
8824static void
8825arm_init_machine_status (p)
8826 struct function * p;
8827{
8828 p->machine =
8829 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
8830}
8831
8832/* Return an RTX indicating where the return address to the
8833 calling function can be found. */
8834rtx
8835arm_return_addr (count, frame)
8836 int count;
8837 rtx frame ATTRIBUTE_UNUSED;
8838{
8839 rtx reg;
8840
8841 if (count != 0)
8842 return NULL_RTX;
8843
8844 reg = cfun->machine->ra_rtx;
8845
8846 if (reg == NULL)
8847 {
8848 rtx init;
8849
8850 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
8851 the prologue. */
8852 reg = gen_reg_rtx (Pmode);
8853 cfun->machine->ra_rtx = reg;
8854
8855 if (! TARGET_APCS_32)
8856 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
8857 GEN_INT (RETURN_ADDR_MASK26));
8858 else
8859 init = gen_rtx_REG (Pmode, LR_REGNUM);
8860
8861 init = gen_rtx_SET (VOIDmode, reg, init);
8862
8863 /* Emit the insn to the prologue with the other argument copies. */
8864 push_topmost_sequence ();
8865 emit_insn_after (init, get_insns ());
8866 pop_topmost_sequence ();
8867 }
8868
8869 return reg;
8870}
8871
8872/* Do anything needed before RTL is emitted for each function. */
8873void
8874arm_init_expanders ()
8875{
8876 /* Arrange to initialize and mark the machine per-function status. */
8877 init_machine_status = arm_init_machine_status;
8878 mark_machine_status = arm_mark_machine_status;
8879}
8880
8881/* Generate the rest of a function's prologue. */
8882void
8883thumb_expand_prologue ()
8884{
8885 HOST_WIDE_INT amount = (get_frame_size ()
8886 + current_function_outgoing_args_size);
8887
8888 /* Naked functions don't have prologues. */
8889 if (arm_naked_function_p (current_function_decl))
8890 return;
8891
8892 if (frame_pointer_needed)
8893 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
8894
8895 if (amount)
8896 {
8897 amount = ROUND_UP (amount);
8898
8899 if (amount < 512)
8900 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8901 GEN_INT (- amount)));
8902 else
8903 {
8904 int regno;
8905 rtx reg;
8906
8907 /* The stack decrement is too big for an immediate value in a single
8908 insn. In theory we could issue multiple subtracts, but after
8909 three of them it becomes more space efficient to place the full
8910 value in the constant pool and load into a register. (Also the
8911 ARM debugger really likes to see only one stack decrement per
8912 function). So instead we look for a scratch register into which
8913 we can load the decrement, and then we subtract this from the
8914 stack pointer. Unfortunately on the thumb the only available
8915 scratch registers are the argument registers, and we cannot use
8916 these as they may hold arguments to the function. Instead we
8917 attempt to locate a call preserved register which is used by this
8918 function. If we can find one, then we know that it will have
8919 been pushed at the start of the prologue and so we can corrupt
8920 it now. */
8921 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
8922 if (regs_ever_live[regno]
8923 && ! call_used_regs[regno] /* Paranoia */
8924 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
8925 && ! (frame_pointer_needed
8926 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
8927 break;
8928
8929 if (regno > LAST_LO_REGNUM) /* Very unlikely */
8930 {
8931 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
8932
8933 /* Choose an arbitary, non-argument low register. */
8934 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
8935
8936 /* Save it by copying it into a high, scratch register. */
8937 emit_insn (gen_movsi (spare, reg));
8938
8939 /* Decrement the stack. */
8940 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
8941 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8942 reg));
8943
8944 /* Restore the low register's original value. */
8945 emit_insn (gen_movsi (reg, spare));
8946
8947 /* Emit a USE of the restored scratch register, so that flow
8948 analysis will not consider the restore redundant. The
8949 register won't be used again in this function and isn't
8950 restored by the epilogue. */
8951 emit_insn (gen_rtx_USE (VOIDmode, reg));
8952 }
8953 else
8954 {
8955 reg = gen_rtx (REG, SImode, regno);
8956
8957 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
8958 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8959 reg));
8960 }
8961 }
8962 }
8963
8964 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
8965 emit_insn (gen_blockage ());
8966}
8967
8968void
8969thumb_expand_epilogue ()
8970{
8971 HOST_WIDE_INT amount = (get_frame_size ()
8972 + current_function_outgoing_args_size);
8973
8974 /* Naked functions don't have epilogues. */
8975 if (arm_naked_function_p (current_function_decl))
8976 return;
8977
8978 if (frame_pointer_needed)
8979 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
8980 else if (amount)
8981 {
8982 amount = ROUND_UP (amount);
8983
8984 if (amount < 512)
8985 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8986 GEN_INT (amount)));
8987 else
8988 {
8989 /* r3 is always free in the epilogue. */
8990 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
8991
8992 emit_insn (gen_movsi (reg, GEN_INT (amount)));
8993 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
8994 }
8995 }
8996
8997 /* Emit a USE (stack_pointer_rtx), so that
8998 the stack adjustment will not be deleted. */
8999 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9000
9001 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9002 emit_insn (gen_blockage ());
9003}
9004
9005void
9006output_thumb_prologue (f)
9007 FILE * f;
9008{
9009 int live_regs_mask = 0;
9010 int high_regs_pushed = 0;
9011 int store_arg_regs = 0;
9012 int regno;
9013
9014 if (arm_naked_function_p (current_function_decl))
9015 return;
9016
9017 if (is_called_in_ARM_mode (current_function_decl))
9018 {
9019 const char * name;
9020
9021 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9022 abort ();
9023 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9024 abort ();
9025 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9026
9027 /* Generate code sequence to switch us into Thumb mode. */
9028 /* The .code 32 directive has already been emitted by
9029 ASM_DECLARE_FUNCITON_NAME */
9030 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9031 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9032
9033 /* Generate a label, so that the debugger will notice the
9034 change in instruction sets. This label is also used by
9035 the assembler to bypass the ARM code when this function
9036 is called from a Thumb encoded function elsewhere in the
9037 same file. Hence the definition of STUB_NAME here must
9038 agree with the definition in gas/config/tc-arm.c */
9039
9040#define STUB_NAME ".real_start_of"
9041
9042 asm_fprintf (f, "\t.code\t16\n");
9043#ifdef ARM_PE
9044 if (arm_dllexport_name_p (name))
e5951263 9045 name = arm_strip_name_encoding (name);
d5b7b3ae
RE
9046#endif
9047 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9048 asm_fprintf (f, "\t.thumb_func\n");
9049 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9050 }
9051
9052 if (current_function_anonymous_args && current_function_pretend_args_size)
9053 store_arg_regs = 1;
9054
9055 if (current_function_pretend_args_size)
9056 {
9057 if (store_arg_regs)
9058 {
9059 int num_pushes;
9060
9061 asm_fprintf (f, "\tpush\t{");
9062
9063 num_pushes = NUM_INTS (current_function_pretend_args_size);
9064
9065 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9066 regno <= LAST_ARG_REGNUM;
9067 regno ++)
9068 asm_fprintf (f, "%r%s", regno,
9069 regno == LAST_ARG_REGNUM ? "" : ", ");
9070
9071 asm_fprintf (f, "}\n");
9072 }
9073 else
9074 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9075 SP_REGNUM, SP_REGNUM,
9076 current_function_pretend_args_size);
9077 }
9078
9079 for (regno = 0; regno <= LAST_LO_REGNUM; regno ++)
9080 if (regs_ever_live[regno] && ! call_used_regs[regno]
9081 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9082 live_regs_mask |= 1 << regno;
9083
9084 if (live_regs_mask || ! leaf_function_p () || thumb_far_jump_used_p (1))
9085 live_regs_mask |= 1 << LR_REGNUM;
9086
9087 if (TARGET_BACKTRACE)
9088 {
9089 int offset;
9090 int work_register = 0;
9091 int wr;
9092
9093 /* We have been asked to create a stack backtrace structure.
9094 The code looks like this:
9095
9096 0 .align 2
9097 0 func:
9098 0 sub SP, #16 Reserve space for 4 registers.
9099 2 push {R7} Get a work register.
9100 4 add R7, SP, #20 Get the stack pointer before the push.
9101 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9102 8 mov R7, PC Get hold of the start of this code plus 12.
9103 10 str R7, [SP, #16] Store it.
9104 12 mov R7, FP Get hold of the current frame pointer.
9105 14 str R7, [SP, #4] Store it.
9106 16 mov R7, LR Get hold of the current return address.
9107 18 str R7, [SP, #12] Store it.
9108 20 add R7, SP, #16 Point at the start of the backtrace structure.
9109 22 mov FP, R7 Put this value into the frame pointer. */
9110
9111 if ((live_regs_mask & 0xFF) == 0)
9112 {
9113 /* See if the a4 register is free. */
9114
9115 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9116 work_register = LAST_ARG_REGNUM;
9117 else /* We must push a register of our own */
9118 live_regs_mask |= (1 << LAST_LO_REGNUM);
9119 }
9120
9121 if (work_register == 0)
9122 {
9123 /* Select a register from the list that will be pushed to
9124 use as our work register. */
9125 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9126 if ((1 << work_register) & live_regs_mask)
9127 break;
9128 }
9129
9130 asm_fprintf
9131 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9132 SP_REGNUM, SP_REGNUM);
9133
9134 if (live_regs_mask)
9135 thumb_pushpop (f, live_regs_mask, 1);
9136
9137 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9138 if (wr & live_regs_mask)
9139 offset += 4;
9140
9141 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9142 offset + 16 + current_function_pretend_args_size);
9143
9144 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9145 offset + 4);
9146
9147 /* Make sure that the instruction fetching the PC is in the right place
9148 to calculate "start of backtrace creation code + 12". */
9149 if (live_regs_mask)
9150 {
9151 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9152 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9153 offset + 12);
9154 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9155 ARM_HARD_FRAME_POINTER_REGNUM);
9156 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9157 offset);
9158 }
9159 else
9160 {
9161 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9162 ARM_HARD_FRAME_POINTER_REGNUM);
9163 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9164 offset);
9165 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9166 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9167 offset + 12);
9168 }
9169
9170 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9171 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9172 offset + 8);
9173 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9174 offset + 12);
9175 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9176 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9177 }
9178 else if (live_regs_mask)
9179 thumb_pushpop (f, live_regs_mask, 1);
9180
9181 for (regno = 8; regno < 13; regno++)
9182 {
9183 if (regs_ever_live[regno] && ! call_used_regs[regno]
9184 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9185 high_regs_pushed ++;
9186 }
9187
9188 if (high_regs_pushed)
9189 {
9190 int pushable_regs = 0;
9191 int mask = live_regs_mask & 0xff;
9192 int next_hi_reg;
9193
9194 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9195 {
9196 if (regs_ever_live[next_hi_reg] && ! call_used_regs[next_hi_reg]
9197 && ! (TARGET_SINGLE_PIC_BASE
9198 && (next_hi_reg == arm_pic_register)))
9199 break;
9200 }
9201
9202 pushable_regs = mask;
9203
9204 if (pushable_regs == 0)
9205 {
9206 /* Desperation time -- this probably will never happen. */
9207 if (regs_ever_live[LAST_ARG_REGNUM]
9208 || ! call_used_regs[LAST_ARG_REGNUM])
9209 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9210 mask = 1 << LAST_ARG_REGNUM;
9211 }
9212
9213 while (high_regs_pushed > 0)
9214 {
9215 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9216 {
9217 if (mask & (1 << regno))
9218 {
9219 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9220
9221 high_regs_pushed --;
9222
9223 if (high_regs_pushed)
9224 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9225 next_hi_reg--)
9226 {
9227 if (regs_ever_live[next_hi_reg]
9228 && ! call_used_regs[next_hi_reg]
9229 && ! (TARGET_SINGLE_PIC_BASE
9230 && (next_hi_reg == arm_pic_register)))
9231 break;
9232 }
9233 else
9234 {
9235 mask &= ~ ((1 << regno) - 1);
9236 break;
9237 }
9238 }
9239 }
9240
9241 thumb_pushpop (f, mask, 1);
9242 }
9243
9244 if (pushable_regs == 0
9245 && (regs_ever_live[LAST_ARG_REGNUM]
9246 || ! call_used_regs[LAST_ARG_REGNUM]))
9247 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9248 }
9249}
9250
9251/* Handle the case of a double word load into a low register from
9252 a computed memory address. The computed address may involve a
9253 register which is overwritten by the load. */
9254
cd2b33d0 9255const char *
d5b7b3ae
RE
9256thumb_load_double_from_address (operands)
9257 rtx * operands;
9258{
9259 rtx addr;
9260 rtx base;
9261 rtx offset;
9262 rtx arg1;
9263 rtx arg2;
9264
9265 if (GET_CODE (operands[0]) != REG)
9266 fatal ("thumb_load_double_from_address: destination is not a register");
9267
9268 if (GET_CODE (operands[1]) != MEM)
9269 {
9270 debug_rtx (operands[1]);
9271 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9272 }
9273
9274 /* Get the memory address. */
9275 addr = XEXP (operands[1], 0);
9276
9277 /* Work out how the memory address is computed. */
9278 switch (GET_CODE (addr))
9279 {
9280 case REG:
9281 operands[2] = gen_rtx (MEM, SImode,
9282 plus_constant (XEXP (operands[1], 0), 4));
9283
9284 if (REGNO (operands[0]) == REGNO (addr))
9285 {
9286 output_asm_insn ("ldr\t%H0, %2", operands);
9287 output_asm_insn ("ldr\t%0, %1", operands);
9288 }
9289 else
9290 {
9291 output_asm_insn ("ldr\t%0, %1", operands);
9292 output_asm_insn ("ldr\t%H0, %2", operands);
9293 }
9294 break;
9295
9296 case CONST:
9297 /* Compute <address> + 4 for the high order load. */
9298 operands[2] = gen_rtx (MEM, SImode,
9299 plus_constant (XEXP (operands[1], 0), 4));
9300
9301 output_asm_insn ("ldr\t%0, %1", operands);
9302 output_asm_insn ("ldr\t%H0, %2", operands);
9303 break;
9304
9305 case PLUS:
9306 arg1 = XEXP (addr, 0);
9307 arg2 = XEXP (addr, 1);
9308
9309 if (CONSTANT_P (arg1))
9310 base = arg2, offset = arg1;
9311 else
9312 base = arg1, offset = arg2;
9313
9314 if (GET_CODE (base) != REG)
9315 fatal ("thumb_load_double_from_address: base is not a register");
9316
9317 /* Catch the case of <address> = <reg> + <reg> */
9318 if (GET_CODE (offset) == REG)
9319 {
9320 int reg_offset = REGNO (offset);
9321 int reg_base = REGNO (base);
9322 int reg_dest = REGNO (operands[0]);
9323
9324 /* Add the base and offset registers together into the
9325 higher destination register. */
9326 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9327 reg_dest + 1, reg_base, reg_offset);
9328
9329 /* Load the lower destination register from the address in
9330 the higher destination register. */
9331 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9332 reg_dest, reg_dest + 1);
9333
9334 /* Load the higher destination register from its own address
9335 plus 4. */
9336 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9337 reg_dest + 1, reg_dest + 1);
9338 }
9339 else
9340 {
9341 /* Compute <address> + 4 for the high order load. */
9342 operands[2] = gen_rtx (MEM, SImode,
9343 plus_constant (XEXP (operands[1], 0), 4));
9344
9345 /* If the computed address is held in the low order register
9346 then load the high order register first, otherwise always
9347 load the low order register first. */
9348 if (REGNO (operands[0]) == REGNO (base))
9349 {
9350 output_asm_insn ("ldr\t%H0, %2", operands);
9351 output_asm_insn ("ldr\t%0, %1", operands);
9352 }
9353 else
9354 {
9355 output_asm_insn ("ldr\t%0, %1", operands);
9356 output_asm_insn ("ldr\t%H0, %2", operands);
9357 }
9358 }
9359 break;
9360
9361 case LABEL_REF:
9362 /* With no registers to worry about we can just load the value
9363 directly. */
9364 operands[2] = gen_rtx (MEM, SImode,
9365 plus_constant (XEXP (operands[1], 0), 4));
9366
9367 output_asm_insn ("ldr\t%H0, %2", operands);
9368 output_asm_insn ("ldr\t%0, %1", operands);
9369 break;
9370
9371 default:
9372 debug_rtx (operands[1]);
9373 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9374 break;
9375 }
9376
9377 return "";
9378}
9379
9380
cd2b33d0 9381const char *
d5b7b3ae
RE
9382thumb_output_move_mem_multiple (n, operands)
9383 int n;
9384 rtx * operands;
9385{
9386 rtx tmp;
9387
9388 switch (n)
9389 {
9390 case 2:
9391 if (REGNO (operands[2]) > REGNO (operands[3]))
9392 {
9393 tmp = operands[2];
9394 operands[2] = operands[3];
9395 operands[3] = tmp;
9396 }
9397 output_asm_insn ("ldmia\t%1!, {%2, %3}", operands);
9398 output_asm_insn ("stmia\t%0!, {%2, %3}", operands);
9399 break;
9400
9401 case 3:
9402 if (REGNO (operands[2]) > REGNO (operands[3]))
9403 {
9404 tmp = operands[2];
9405 operands[2] = operands[3];
9406 operands[3] = tmp;
9407 }
9408 if (REGNO (operands[3]) > REGNO (operands[4]))
9409 {
9410 tmp = operands[3];
9411 operands[3] = operands[4];
9412 operands[4] = tmp;
9413 }
9414 if (REGNO (operands[2]) > REGNO (operands[3]))
9415 {
9416 tmp = operands[2];
9417 operands[2] = operands[3];
9418 operands[3] = tmp;
9419 }
9420
9421 output_asm_insn ("ldmia\t%1!, {%2, %3, %4}", operands);
9422 output_asm_insn ("stmia\t%0!, {%2, %3, %4}", operands);
9423 break;
9424
9425 default:
9426 abort ();
9427 }
9428
9429 return "";
9430}
9431
9432/* Routines for generating rtl */
9433
9434void
9435thumb_expand_movstrqi (operands)
9436 rtx * operands;
9437{
9438 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9439 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9440 HOST_WIDE_INT len = INTVAL (operands[2]);
9441 HOST_WIDE_INT offset = 0;
9442
9443 while (len >= 12)
9444 {
9445 emit_insn (gen_movmem12b (out, in));
9446 len -= 12;
9447 }
9448
9449 if (len >= 8)
9450 {
9451 emit_insn (gen_movmem8b (out, in));
9452 len -= 8;
9453 }
9454
9455 if (len >= 4)
9456 {
9457 rtx reg = gen_reg_rtx (SImode);
9458 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9459 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9460 len -= 4;
9461 offset += 4;
9462 }
9463
9464 if (len >= 2)
9465 {
9466 rtx reg = gen_reg_rtx (HImode);
9467 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9468 plus_constant (in, offset))));
9469 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9470 reg));
9471 len -= 2;
9472 offset += 2;
9473 }
9474
9475 if (len)
9476 {
9477 rtx reg = gen_reg_rtx (QImode);
9478 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9479 plus_constant (in, offset))));
9480 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9481 reg));
9482 }
9483}
9484
9485int
9486thumb_cmp_operand (op, mode)
9487 rtx op;
9488 enum machine_mode mode;
9489{
9490 return ((GET_CODE (op) == CONST_INT
9491 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9492 || register_operand (op, mode));
9493}
9494
cd2b33d0 9495static const char *
d5b7b3ae
RE
9496thumb_condition_code (x, invert)
9497 rtx x;
9498 int invert;
9499{
cd2b33d0 9500 static const char * conds[] =
d5b7b3ae
RE
9501 {
9502 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
9503 "hi", "ls", "ge", "lt", "gt", "le"
9504 };
9505 int val;
9506
9507 switch (GET_CODE (x))
9508 {
9509 case EQ: val = 0; break;
9510 case NE: val = 1; break;
9511 case GEU: val = 2; break;
9512 case LTU: val = 3; break;
9513 case GTU: val = 8; break;
9514 case LEU: val = 9; break;
9515 case GE: val = 10; break;
9516 case LT: val = 11; break;
9517 case GT: val = 12; break;
9518 case LE: val = 13; break;
9519 default:
9520 abort ();
9521 }
9522
9523 return conds[val ^ invert];
9524}
9525
9526/* Handle storing a half-word to memory during reload. */
9527void
9528thumb_reload_out_hi (operands)
9529 rtx * operands;
9530{
9531 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
9532}
9533
9534/* Handle storing a half-word to memory during reload. */
9535void
9536thumb_reload_in_hi (operands)
9537 rtx * operands ATTRIBUTE_UNUSED;
9538{
9539 abort ();
9540}
9541
c27ba912
DM
9542/* Return the length of a function name prefix
9543 that starts with the character 'c'. */
9544static int
9545arm_get_strip_length (char c)
9546{
9547 switch (c)
9548 {
9549 ARM_NAME_ENCODING_LENGTHS
9550 default: return 0;
9551 }
9552}
9553
9554/* Return a pointer to a function's name with any
9555 and all prefix encodings stripped from it. */
9556const char *
9557arm_strip_name_encoding (const char * name)
9558{
9559 int skip;
9560
9561 while ((skip = arm_get_strip_length (* name)))
9562 name += skip;
9563
9564 return name;
9565}
9566
2b835d68 9567#ifdef AOF_ASSEMBLER
6354dc9b 9568/* Special functions only needed when producing AOF syntax assembler. */
2b835d68 9569
32de079a
RE
9570rtx aof_pic_label = NULL_RTX;
9571struct pic_chain
9572{
62b10bbc
NC
9573 struct pic_chain * next;
9574 char * symname;
32de079a
RE
9575};
9576
62b10bbc 9577static struct pic_chain * aof_pic_chain = NULL;
32de079a
RE
9578
9579rtx
9580aof_pic_entry (x)
9581 rtx x;
9582{
62b10bbc 9583 struct pic_chain ** chainp;
32de079a
RE
9584 int offset;
9585
9586 if (aof_pic_label == NULL_RTX)
9587 {
92a432f4
RE
9588 /* We mark this here and not in arm_add_gc_roots() to avoid
9589 polluting even more code with ifdefs, and because it never
9590 contains anything useful until we assign to it here. */
d5b7b3ae 9591 ggc_add_rtx_root (& aof_pic_label, 1);
32de079a
RE
9592 /* This needs to persist throughout the compilation. */
9593 end_temporary_allocation ();
43cffd11 9594 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
32de079a
RE
9595 resume_temporary_allocation ();
9596 }
9597
9598 for (offset = 0, chainp = &aof_pic_chain; *chainp;
9599 offset += 4, chainp = &(*chainp)->next)
9600 if ((*chainp)->symname == XSTR (x, 0))
9601 return plus_constant (aof_pic_label, offset);
9602
9603 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
9604 (*chainp)->next = NULL;
9605 (*chainp)->symname = XSTR (x, 0);
9606 return plus_constant (aof_pic_label, offset);
9607}
9608
9609void
9610aof_dump_pic_table (f)
62b10bbc 9611 FILE * f;
32de079a 9612{
62b10bbc 9613 struct pic_chain * chain;
32de079a
RE
9614
9615 if (aof_pic_chain == NULL)
9616 return;
9617
dd18ae56
NC
9618 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
9619 PIC_OFFSET_TABLE_REGNUM,
9620 PIC_OFFSET_TABLE_REGNUM);
32de079a
RE
9621 fputs ("|x$adcons|\n", f);
9622
9623 for (chain = aof_pic_chain; chain; chain = chain->next)
9624 {
9625 fputs ("\tDCD\t", f);
9626 assemble_name (f, chain->symname);
9627 fputs ("\n", f);
9628 }
9629}
9630
2b835d68
RE
9631int arm_text_section_count = 1;
9632
9633char *
84ed5e79 9634aof_text_section ()
2b835d68
RE
9635{
9636 static char buf[100];
2b835d68
RE
9637 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
9638 arm_text_section_count++);
9639 if (flag_pic)
9640 strcat (buf, ", PIC, REENTRANT");
9641 return buf;
9642}
9643
9644static int arm_data_section_count = 1;
9645
9646char *
9647aof_data_section ()
9648{
9649 static char buf[100];
9650 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
9651 return buf;
9652}
9653
9654/* The AOF assembler is religiously strict about declarations of
9655 imported and exported symbols, so that it is impossible to declare
956d6950 9656 a function as imported near the beginning of the file, and then to
2b835d68
RE
9657 export it later on. It is, however, possible to delay the decision
9658 until all the functions in the file have been compiled. To get
9659 around this, we maintain a list of the imports and exports, and
9660 delete from it any that are subsequently defined. At the end of
9661 compilation we spit the remainder of the list out before the END
9662 directive. */
9663
9664struct import
9665{
62b10bbc
NC
9666 struct import * next;
9667 char * name;
2b835d68
RE
9668};
9669
62b10bbc 9670static struct import * imports_list = NULL;
2b835d68
RE
9671
9672void
9673aof_add_import (name)
62b10bbc 9674 char * name;
2b835d68 9675{
62b10bbc 9676 struct import * new;
2b835d68
RE
9677
9678 for (new = imports_list; new; new = new->next)
9679 if (new->name == name)
9680 return;
9681
9682 new = (struct import *) xmalloc (sizeof (struct import));
9683 new->next = imports_list;
9684 imports_list = new;
9685 new->name = name;
9686}
9687
9688void
9689aof_delete_import (name)
62b10bbc 9690 char * name;
2b835d68 9691{
62b10bbc 9692 struct import ** old;
2b835d68
RE
9693
9694 for (old = &imports_list; *old; old = & (*old)->next)
9695 {
9696 if ((*old)->name == name)
9697 {
9698 *old = (*old)->next;
9699 return;
9700 }
9701 }
9702}
9703
9704int arm_main_function = 0;
9705
9706void
9707aof_dump_imports (f)
62b10bbc 9708 FILE * f;
2b835d68
RE
9709{
9710 /* The AOF assembler needs this to cause the startup code to be extracted
9711 from the library. Brining in __main causes the whole thing to work
9712 automagically. */
9713 if (arm_main_function)
9714 {
9715 text_section ();
9716 fputs ("\tIMPORT __main\n", f);
9717 fputs ("\tDCD __main\n", f);
9718 }
9719
9720 /* Now dump the remaining imports. */
9721 while (imports_list)
9722 {
9723 fprintf (f, "\tIMPORT\t");
9724 assemble_name (f, imports_list->name);
9725 fputc ('\n', f);
9726 imports_list = imports_list->next;
9727 }
9728}
9729#endif /* AOF_ASSEMBLER */
This page took 1.929921 seconds and 5 git commands to generate.