]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
arm.c (add_constant): New parameter address_only, change caller.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
cce8749e 1/* Output routines for GCC for ARM/RISCiX.
e5e809f4 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
ff9940b0 5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
cce8749e 25#include <stdio.h>
f3bb6135 26#include <string.h>
cce8749e
CH
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "flags.h"
af48348a 37#include "reload.h"
e2c671ba 38#include "tree.h"
bee06f3d 39#include "expr.h"
ad076f4e 40#include "toplev.h"
cce8749e
CH
41
42/* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44#define MAX_INSNS_SKIPPED 5
45
46/* Some function declarations. */
cce8749e 47extern FILE *asm_out_file;
cce8749e 48
18af7313
RE
49static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
50static char *output_multi_immediate PROTO ((rtx *, char *, char *, int,
51 HOST_WIDE_INT));
2b835d68
RE
52static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
53 HOST_WIDE_INT, rtx, rtx, int, int));
18af7313
RE
54static int arm_naked_function_p PROTO ((tree));
55static void init_fpa_table PROTO ((void));
56static enum machine_mode select_dominance_cc_mode PROTO ((enum rtx_code, rtx,
57 rtx, HOST_WIDE_INT));
332072db 58static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode, int *));
18af7313
RE
59static void dump_table PROTO ((rtx));
60static int fixit PROTO ((rtx, enum machine_mode, int));
61static rtx find_barrier PROTO ((rtx, int));
62static int broken_move PROTO ((rtx));
63static char *fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
64static int eliminate_lr2ip PROTO ((rtx *));
65static char *shift_op PROTO ((rtx, HOST_WIDE_INT *));
66static int pattern_really_clobbers_lr PROTO ((rtx));
67static int function_really_clobbers_lr PROTO ((rtx));
68static void emit_multi_reg_push PROTO ((int));
b111229a 69static void emit_sfm PROTO ((int, int));
18af7313 70static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
f3bb6135 71
ff9940b0
RE
72/* Define the information needed to generate branch insns. This is
73 stored from the compare operation. */
74
75rtx arm_compare_op0, arm_compare_op1;
76int arm_compare_fp;
77
78/* What type of cpu are we compiling for? */
ff9940b0
RE
79enum processor_type arm_cpu;
80
b111229a 81/* What type of floating point are we tuning for? */
bee06f3d
RE
82enum floating_point_type arm_fpu;
83
b111229a
RE
84/* What type of floating point instructions are available? */
85enum floating_point_type arm_fpu_arch;
86
2b835d68
RE
87/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
88enum prog_mode_type arm_prgmode;
89
b111229a
RE
90/* Set by the -mfp=... option */
91char *target_fp_name = NULL;
2b835d68
RE
92
93/* Nonzero if this is an "M" variant of the processor. */
94int arm_fast_multiply = 0;
95
32de079a 96/* Nonzero if this chip supports the ARM Architecture 4 extensions */
2b835d68
RE
97int arm_arch4 = 0;
98
b111229a
RE
99/* Set to the features we should tune the code for (multiply speed etc). */
100int tune_flags = 0;
101
cce8749e
CH
102/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
103 must report the mode of the memory reference from PRINT_OPERAND to
104 PRINT_OPERAND_ADDRESS. */
f3bb6135 105enum machine_mode output_memory_reference_mode;
cce8749e
CH
106
107/* Nonzero if the prologue must setup `fp'. */
108int current_function_anonymous_args;
109
32de079a
RE
110/* The register number to be used for the PIC offset register. */
111int arm_pic_register = 9;
112
cce8749e
CH
113/* Location counter of .text segment. */
114int arm_text_location = 0;
115
ff9940b0
RE
116/* Set to one if we think that lr is only saved because of subroutine calls,
117 but all of these can be `put after' return insns */
118int lr_save_eliminated;
119
ff9940b0
RE
120/* Set to 1 when a return insn is output, this means that the epilogue
121 is not needed. */
122
123static int return_used_this_function;
124
2b835d68
RE
125static int arm_constant_limit = 3;
126
cce8749e
CH
127/* For an explanation of these variables, see final_prescan_insn below. */
128int arm_ccfsm_state;
84ed5e79 129enum arm_cond_code arm_current_cc;
cce8749e
CH
130rtx arm_target_insn;
131int arm_target_label;
9997d19d
RE
132
133/* The condition codes of the ARM, and the inverse function. */
134char *arm_condition_codes[] =
135{
136 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
137 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
138};
139
84ed5e79 140static enum arm_cond_code get_arm_condition_code ();
2b835d68
RE
141
142\f
143/* Initialization code */
144
b111229a 145struct arm_cpu_select arm_select[4] =
bd9c7e23
RE
146{
147 /* switch name, tune arch */
148 { (char *)0, "--with-cpu=", 1, 1 },
149 { (char *)0, "-mcpu=", 1, 1 },
b111229a 150 { (char *)0, "-march=", 0, 1 },
bd9c7e23
RE
151 { (char *)0, "-mtune=", 1, 0 },
152};
153
2b835d68
RE
154#define FL_CO_PROC 0x01 /* Has external co-processor bus */
155#define FL_FAST_MULT 0x02 /* Fast multiply */
156#define FL_MODE26 0x04 /* 26-bit mode support */
157#define FL_MODE32 0x08 /* 32-bit mode support */
158#define FL_ARCH4 0x10 /* Architecture rel 4 */
159#define FL_THUMB 0x20 /* Thumb aware */
32de079a 160
2b835d68
RE
161struct processors
162{
163 char *name;
164 enum processor_type type;
165 unsigned int flags;
166};
167
168/* Not all of these give usefully different compilation alternatives,
169 but there is no simple way of generalizing them. */
170static struct processors all_procs[] =
171{
172 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
173 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
174 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
175 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68
RE
176 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
177 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
2b835d68 178 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
956d6950 179 /* arm7m doesn't exist on its own, only in conjunction with D, (and I), but
32de079a
RE
180 those don't alter the code, so it is sometimes known as the arm7m */
181 {"arm7m", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
182 | FL_MODE26)},
2b835d68
RE
183 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
184 | FL_MODE26)},
185 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
186 | FL_MODE26)},
187 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68 188 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
bd9c7e23 189 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
2b835d68 190 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
32de079a
RE
191 /* Doesn't really have an external co-proc, but does have embedded fpu */
192 {"arm7500fe", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68
RE
193 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
194 | FL_ARCH4 | FL_THUMB)},
32de079a
RE
195 {"arm8", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
196 | FL_ARCH4)},
197 {"arm810", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
198 | FL_ARCH4)},
199 {"strongarm", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
200 | FL_ARCH4)},
201 {"strongarm110", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
202 | FL_ARCH4)},
b111229a
RE
203 {"armv2", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
204 {"armv2a", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
205 {"armv3", PROCESSOR_NONE, FL_CO_PROC | FL_MODE32 | FL_MODE26},
206 {"armv3m", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
207 | FL_MODE26)},
208 {"armv4", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
209 | FL_MODE26 | FL_ARCH4)},
210 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
211 implementations that support it, so we will leave it out for now. */
212 {"armv4t", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
213 | FL_ARCH4)},
2b835d68
RE
214 {NULL, 0, 0}
215};
216
217/* Fix up any incompatible options that the user has specified.
218 This has now turned into a maze. */
219void
220arm_override_options ()
221{
222 int arm_thumb_aware = 0;
bd9c7e23 223 int flags = 0;
ed4c4348 224 unsigned i;
bd9c7e23 225 struct arm_cpu_select *ptr;
32de079a
RE
226 static struct cpu_default {
227 int cpu;
228 char *name;
229 } cpu_defaults[] = {
230 { TARGET_CPU_arm2, "arm2" },
231 { TARGET_CPU_arm6, "arm6" },
232 { TARGET_CPU_arm610, "arm610" },
233 { TARGET_CPU_arm7dm, "arm7dm" },
234 { TARGET_CPU_arm7500fe, "arm7500fe" },
235 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
236 { TARGET_CPU_arm8, "arm8" },
237 { TARGET_CPU_arm810, "arm810" },
238 { TARGET_CPU_strongarm, "strongarm" },
239 { 0, 0 }
240 };
241 struct cpu_default *def;
242
243 /* Set the default. */
244 for (def = &cpu_defaults[0]; def->name; ++def)
245 if (def->cpu == TARGET_CPU_DEFAULT)
246 break;
247 if (! def->name)
248 abort ();
bd9c7e23 249
32de079a 250 arm_select[0].string = def->name;
bd9c7e23
RE
251
252 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
253 {
254 ptr = &arm_select[i];
255 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
256 {
257 struct processors *sel;
258
259 for (sel = all_procs; sel->name != NULL; sel++)
260 if (! strcmp (ptr->string, sel->name))
261 {
b111229a
RE
262 /* -march= is the only flag that can take an architecture
263 type, so if we match when the tune bit is set, the
264 option was invalid. */
bd9c7e23 265 if (ptr->set_tune_p)
b111229a
RE
266 {
267 if (sel->type == PROCESSOR_NONE)
268 continue; /* Its an architecture, not a cpu */
269
270 arm_cpu = sel->type;
271 tune_flags = sel->flags;
272 }
bd9c7e23
RE
273
274 if (ptr->set_arch_p)
275 flags = sel->flags;
b111229a 276
bd9c7e23
RE
277 break;
278 }
279
280 if (sel->name == NULL)
281 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
282 }
283 }
2b835d68
RE
284
285 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
286 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
287
288 if (TARGET_POKE_FUNCTION_NAME)
289 target_flags |= ARM_FLAG_APCS_FRAME;
290
291 if (TARGET_6)
32de079a 292 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
2b835d68
RE
293
294 if (TARGET_3)
32de079a 295 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
2b835d68 296
2b835d68
RE
297 if (TARGET_APCS_REENT && flag_pic)
298 fatal ("-fpic and -mapcs-reent are incompatible");
299
300 if (TARGET_APCS_REENT)
32de079a
RE
301 warning ("APCS reentrant code not supported.");
302
303 /* If stack checking is disabled, we can use r10 as the PIC register,
304 which keeps r9 available. */
305 if (flag_pic && ! TARGET_APCS_STACK)
306 arm_pic_register = 10;
2b835d68 307
32de079a
RE
308 /* Well, I'm about to have a go, but pic is NOT going to be compatible
309 with APCS reentrancy, since that requires too much support in the
310 assembler and linker, and the ARMASM assembler seems to lack some
311 required directives. */
2b835d68 312 if (flag_pic)
b4b68717 313 warning ("Position independent code not supported");
2b835d68
RE
314
315 if (TARGET_APCS_FLOAT)
316 warning ("Passing floating point arguments in fp regs not yet supported");
317
318 if (TARGET_APCS_STACK && ! TARGET_APCS)
319 {
320 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
321 target_flags |= ARM_FLAG_APCS_FRAME;
322 }
323
b111229a 324 /* Default is to tune for an FPA */
2b835d68
RE
325 arm_fpu = FP_HARD;
326
bd9c7e23
RE
327 /* Default value for floating point code... if no co-processor
328 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
329 assume the user has an FPA.
330 Note: this does not prevent use of floating point instructions,
331 -msoft-float does that. */
ad076f4e 332 if ((tune_flags & FL_CO_PROC) == 0)
bd9c7e23 333 arm_fpu = FP_SOFT3;
b111229a 334
bd9c7e23
RE
335 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
336 arm_arch4 = (flags & FL_ARCH4) != 0;
337 arm_thumb_aware = (flags & FL_THUMB) != 0;
2b835d68 338
b111229a 339 if (target_fp_name)
2b835d68 340 {
b111229a
RE
341 if (strcmp (target_fp_name, "2") == 0)
342 arm_fpu_arch = FP_SOFT2;
343 else if (strcmp (target_fp_name, "3") == 0)
344 arm_fpu_arch = FP_HARD;
2b835d68 345 else
b111229a
RE
346 fatal ("Invalid floating point emulation option: -mfpe=%s",
347 target_fp_name);
2b835d68 348 }
b111229a
RE
349 else
350 arm_fpu_arch = FP_DEFAULT;
2b835d68
RE
351
352 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
353 {
354 warning ("This processor variant does not support Thumb interworking");
355 target_flags &= ~ARM_FLAG_THUMB;
356 }
357
358 if (TARGET_FPE && arm_fpu != FP_HARD)
359 arm_fpu = FP_SOFT2;
360
361 /* For arm2/3 there is no need to do any scheduling if there is only
362 a floating point emulator, or we are doing software floating-point. */
363 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
364 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
365
366 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
367}
cce8749e 368\f
32de079a 369
ff9940b0
RE
370/* Return 1 if it is possible to return using a single instruction */
371
372int
373use_return_insn ()
374{
375 int regno;
376
377 if (!reload_completed ||current_function_pretend_args_size
378 || current_function_anonymous_args
56636818
JL
379 || ((get_frame_size () + current_function_outgoing_args_size != 0)
380 && !(TARGET_APCS || frame_pointer_needed)))
ff9940b0
RE
381 return 0;
382
b111229a
RE
383 /* Can't be done if interworking with Thumb, and any registers have been
384 stacked */
385 if (TARGET_THUMB_INTERWORK)
386 for (regno = 0; regno < 16; regno++)
387 if (regs_ever_live[regno] && ! call_used_regs[regno])
388 return 0;
389
ff9940b0
RE
390 /* Can't be done if any of the FPU regs are pushed, since this also
391 requires an insn */
b111229a
RE
392 for (regno = 16; regno < 24; regno++)
393 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
394 return 0;
395
31fdb4d5
DE
396 /* If a function is naked, don't use the "return" insn. */
397 if (arm_naked_function_p (current_function_decl))
398 return 0;
399
ff9940b0
RE
400 return 1;
401}
402
cce8749e
CH
403/* Return TRUE if int I is a valid immediate ARM constant. */
404
405int
406const_ok_for_arm (i)
ff9940b0 407 HOST_WIDE_INT i;
cce8749e 408{
ed4c4348 409 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 410
56636818
JL
411 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
412 be all zero, or all one. */
413 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
414 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
ed4c4348
RE
415 != ((~(unsigned HOST_WIDE_INT) 0)
416 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
417 return FALSE;
418
e2c671ba
RE
419 /* Fast return for 0 and powers of 2 */
420 if ((i & (i - 1)) == 0)
421 return TRUE;
422
cce8749e
CH
423 do
424 {
abaa26e5 425 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 426 return TRUE;
abaa26e5
RE
427 mask =
428 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
429 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
ed4c4348 430 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 431
f3bb6135
RE
432 return FALSE;
433}
cce8749e 434
e2c671ba
RE
435/* Return true if I is a valid constant for the operation CODE. */
436int
437const_ok_for_op (i, code, mode)
438 HOST_WIDE_INT i;
439 enum rtx_code code;
440 enum machine_mode mode;
441{
442 if (const_ok_for_arm (i))
443 return 1;
444
445 switch (code)
446 {
447 case PLUS:
448 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
449
450 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
451 case XOR:
452 case IOR:
453 return 0;
454
455 case AND:
456 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
457
458 default:
459 abort ();
460 }
461}
462
463/* Emit a sequence of insns to handle a large constant.
464 CODE is the code of the operation required, it can be any of SET, PLUS,
465 IOR, AND, XOR, MINUS;
466 MODE is the mode in which the operation is being performed;
467 VAL is the integer to operate on;
468 SOURCE is the other operand (a register, or a null-pointer for SET);
469 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
470 either produce a simpler sequence, or we will want to cse the values.
471 Return value is the number of insns emitted. */
e2c671ba
RE
472
473int
474arm_split_constant (code, mode, val, target, source, subtargets)
475 enum rtx_code code;
476 enum machine_mode mode;
477 HOST_WIDE_INT val;
478 rtx target;
479 rtx source;
480 int subtargets;
2b835d68
RE
481{
482 if (subtargets || code == SET
483 || (GET_CODE (target) == REG && GET_CODE (source) == REG
484 && REGNO (target) != REGNO (source)))
485 {
2b835d68
RE
486 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
487 > arm_constant_limit + (code != SET))
488 {
489 if (code == SET)
490 {
491 /* Currently SET is the only monadic value for CODE, all
492 the rest are diadic. */
493 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
494 return 1;
495 }
496 else
497 {
498 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
499
500 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
501 /* For MINUS, the value is subtracted from, since we never
502 have subtraction of a constant. */
503 if (code == MINUS)
504 emit_insn (gen_rtx (SET, VOIDmode, target,
505 gen_rtx (code, mode, temp, source)));
506 else
507 emit_insn (gen_rtx (SET, VOIDmode, target,
508 gen_rtx (code, mode, source, temp)));
509 return 2;
510 }
511 }
512 }
513
514 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
515}
516
517/* As above, but extra parameter GENERATE which, if clear, suppresses
518 RTL generation. */
519int
520arm_gen_constant (code, mode, val, target, source, subtargets, generate)
521 enum rtx_code code;
522 enum machine_mode mode;
523 HOST_WIDE_INT val;
524 rtx target;
525 rtx source;
526 int subtargets;
527 int generate;
e2c671ba 528{
e2c671ba
RE
529 int can_invert = 0;
530 int can_negate = 0;
531 int can_negate_initial = 0;
532 int can_shift = 0;
533 int i;
534 int num_bits_set = 0;
535 int set_sign_bit_copies = 0;
536 int clear_sign_bit_copies = 0;
537 int clear_zero_bit_copies = 0;
538 int set_zero_bit_copies = 0;
539 int insns = 0;
e2c671ba
RE
540 unsigned HOST_WIDE_INT temp1, temp2;
541 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
542
543 /* find out which operations are safe for a given CODE. Also do a quick
544 check for degenerate cases; these can occur when DImode operations
545 are split. */
546 switch (code)
547 {
548 case SET:
549 can_invert = 1;
550 can_shift = 1;
551 can_negate = 1;
552 break;
553
554 case PLUS:
555 can_negate = 1;
556 can_negate_initial = 1;
557 break;
558
559 case IOR:
560 if (remainder == 0xffffffff)
561 {
2b835d68
RE
562 if (generate)
563 emit_insn (gen_rtx (SET, VOIDmode, target,
564 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
565 return 1;
566 }
567 if (remainder == 0)
568 {
569 if (reload_completed && rtx_equal_p (target, source))
570 return 0;
2b835d68
RE
571 if (generate)
572 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
573 return 1;
574 }
575 break;
576
577 case AND:
578 if (remainder == 0)
579 {
2b835d68
RE
580 if (generate)
581 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
e2c671ba
RE
582 return 1;
583 }
584 if (remainder == 0xffffffff)
585 {
586 if (reload_completed && rtx_equal_p (target, source))
587 return 0;
2b835d68
RE
588 if (generate)
589 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
590 return 1;
591 }
592 can_invert = 1;
593 break;
594
595 case XOR:
596 if (remainder == 0)
597 {
598 if (reload_completed && rtx_equal_p (target, source))
599 return 0;
2b835d68
RE
600 if (generate)
601 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
602 return 1;
603 }
604 if (remainder == 0xffffffff)
605 {
2b835d68
RE
606 if (generate)
607 emit_insn (gen_rtx (SET, VOIDmode, target,
608 gen_rtx (NOT, mode, source)));
e2c671ba
RE
609 return 1;
610 }
611
612 /* We don't know how to handle this yet below. */
613 abort ();
614
615 case MINUS:
616 /* We treat MINUS as (val - source), since (source - val) is always
617 passed as (source + (-val)). */
618 if (remainder == 0)
619 {
2b835d68
RE
620 if (generate)
621 emit_insn (gen_rtx (SET, VOIDmode, target,
622 gen_rtx (NEG, mode, source)));
e2c671ba
RE
623 return 1;
624 }
625 if (const_ok_for_arm (val))
626 {
2b835d68
RE
627 if (generate)
628 emit_insn (gen_rtx (SET, VOIDmode, target,
629 gen_rtx (MINUS, mode, GEN_INT (val), source)));
e2c671ba
RE
630 return 1;
631 }
632 can_negate = 1;
633
634 break;
635
636 default:
637 abort ();
638 }
639
640 /* If we can do it in one insn get out quickly */
641 if (const_ok_for_arm (val)
642 || (can_negate_initial && const_ok_for_arm (-val))
643 || (can_invert && const_ok_for_arm (~val)))
644 {
2b835d68
RE
645 if (generate)
646 emit_insn (gen_rtx (SET, VOIDmode, target,
647 (source ? gen_rtx (code, mode, source,
648 GEN_INT (val))
649 : GEN_INT (val))));
e2c671ba
RE
650 return 1;
651 }
652
653
654 /* Calculate a few attributes that may be useful for specific
655 optimizations. */
656
657 for (i = 31; i >= 0; i--)
658 {
659 if ((remainder & (1 << i)) == 0)
660 clear_sign_bit_copies++;
661 else
662 break;
663 }
664
665 for (i = 31; i >= 0; i--)
666 {
667 if ((remainder & (1 << i)) != 0)
668 set_sign_bit_copies++;
669 else
670 break;
671 }
672
673 for (i = 0; i <= 31; i++)
674 {
675 if ((remainder & (1 << i)) == 0)
676 clear_zero_bit_copies++;
677 else
678 break;
679 }
680
681 for (i = 0; i <= 31; i++)
682 {
683 if ((remainder & (1 << i)) != 0)
684 set_zero_bit_copies++;
685 else
686 break;
687 }
688
689 switch (code)
690 {
691 case SET:
692 /* See if we can do this by sign_extending a constant that is known
693 to be negative. This is a good, way of doing it, since the shift
694 may well merge into a subsequent insn. */
695 if (set_sign_bit_copies > 1)
696 {
697 if (const_ok_for_arm
698 (temp1 = ARM_SIGN_EXTEND (remainder
699 << (set_sign_bit_copies - 1))))
700 {
2b835d68
RE
701 if (generate)
702 {
d499463f 703 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
704 emit_insn (gen_rtx (SET, VOIDmode, new_src,
705 GEN_INT (temp1)));
706 emit_insn (gen_ashrsi3 (target, new_src,
707 GEN_INT (set_sign_bit_copies - 1)));
708 }
e2c671ba
RE
709 return 2;
710 }
711 /* For an inverted constant, we will need to set the low bits,
712 these will be shifted out of harm's way. */
713 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
714 if (const_ok_for_arm (~temp1))
715 {
2b835d68
RE
716 if (generate)
717 {
d499463f 718 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
719 emit_insn (gen_rtx (SET, VOIDmode, new_src,
720 GEN_INT (temp1)));
721 emit_insn (gen_ashrsi3 (target, new_src,
722 GEN_INT (set_sign_bit_copies - 1)));
723 }
e2c671ba
RE
724 return 2;
725 }
726 }
727
728 /* See if we can generate this by setting the bottom (or the top)
729 16 bits, and then shifting these into the other half of the
730 word. We only look for the simplest cases, to do more would cost
731 too much. Be careful, however, not to generate this when the
732 alternative would take fewer insns. */
733 if (val & 0xffff0000)
734 {
735 temp1 = remainder & 0xffff0000;
736 temp2 = remainder & 0x0000ffff;
737
738 /* Overlaps outside this range are best done using other methods. */
739 for (i = 9; i < 24; i++)
740 {
741 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
742 && ! const_ok_for_arm (temp2))
743 {
d499463f
RE
744 rtx new_src = (subtargets
745 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
746 : target);
747 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 748 source, subtargets, generate);
e2c671ba 749 source = new_src;
2b835d68
RE
750 if (generate)
751 emit_insn (gen_rtx (SET, VOIDmode, target,
752 gen_rtx (IOR, mode,
753 gen_rtx (ASHIFT, mode, source,
754 GEN_INT (i)),
755 source)));
e2c671ba
RE
756 return insns + 1;
757 }
758 }
759
760 /* Don't duplicate cases already considered. */
761 for (i = 17; i < 24; i++)
762 {
763 if (((temp1 | (temp1 >> i)) == remainder)
764 && ! const_ok_for_arm (temp1))
765 {
d499463f
RE
766 rtx new_src = (subtargets
767 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
768 : target);
769 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 770 source, subtargets, generate);
e2c671ba 771 source = new_src;
2b835d68
RE
772 if (generate)
773 emit_insn (gen_rtx (SET, VOIDmode, target,
774 gen_rtx (IOR, mode,
775 gen_rtx (LSHIFTRT, mode,
776 source, GEN_INT (i)),
777 source)));
e2c671ba
RE
778 return insns + 1;
779 }
780 }
781 }
782 break;
783
784 case IOR:
785 case XOR:
7b64da89
RE
786 /* If we have IOR or XOR, and the constant can be loaded in a
787 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
788 then this can be done in two instructions instead of 3-4. */
789 if (subtargets
d499463f 790 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
791 || (reload_completed && ! reg_mentioned_p (target, source)))
792 {
793 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
794 {
2b835d68
RE
795 if (generate)
796 {
797 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 798
7b64da89 799 emit_insn (gen_rtx (SET, VOIDmode, sub, GEN_INT (val)));
2b835d68
RE
800 emit_insn (gen_rtx (SET, VOIDmode, target,
801 gen_rtx (code, mode, source, sub)));
802 }
e2c671ba
RE
803 return 2;
804 }
805 }
806
807 if (code == XOR)
808 break;
809
810 if (set_sign_bit_copies > 8
811 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
812 {
2b835d68
RE
813 if (generate)
814 {
815 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
816 rtx shift = GEN_INT (set_sign_bit_copies);
817
818 emit_insn (gen_rtx (SET, VOIDmode, sub,
819 gen_rtx (NOT, mode,
820 gen_rtx (ASHIFT, mode, source,
821 shift))));
822 emit_insn (gen_rtx (SET, VOIDmode, target,
823 gen_rtx (NOT, mode,
824 gen_rtx (LSHIFTRT, mode, sub,
825 shift))));
826 }
e2c671ba
RE
827 return 2;
828 }
829
830 if (set_zero_bit_copies > 8
831 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
832 {
2b835d68
RE
833 if (generate)
834 {
835 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
836 rtx shift = GEN_INT (set_zero_bit_copies);
837
838 emit_insn (gen_rtx (SET, VOIDmode, sub,
839 gen_rtx (NOT, mode,
840 gen_rtx (LSHIFTRT, mode, source,
841 shift))));
842 emit_insn (gen_rtx (SET, VOIDmode, target,
843 gen_rtx (NOT, mode,
844 gen_rtx (ASHIFT, mode, sub,
845 shift))));
846 }
e2c671ba
RE
847 return 2;
848 }
849
850 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
851 {
2b835d68
RE
852 if (generate)
853 {
854 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
855 emit_insn (gen_rtx (SET, VOIDmode, sub,
856 gen_rtx (NOT, mode, source)));
857 source = sub;
858 if (subtargets)
859 sub = gen_reg_rtx (mode);
860 emit_insn (gen_rtx (SET, VOIDmode, sub,
861 gen_rtx (AND, mode, source,
862 GEN_INT (temp1))));
863 emit_insn (gen_rtx (SET, VOIDmode, target,
864 gen_rtx (NOT, mode, sub)));
865 }
e2c671ba
RE
866 return 3;
867 }
868 break;
869
870 case AND:
871 /* See if two shifts will do 2 or more insn's worth of work. */
872 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
873 {
874 HOST_WIDE_INT shift_mask = ((0xffffffff
875 << (32 - clear_sign_bit_copies))
876 & 0xffffffff);
e2c671ba
RE
877
878 if ((remainder | shift_mask) != 0xffffffff)
879 {
2b835d68
RE
880 if (generate)
881 {
d499463f 882 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 883 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
884 new_src, source, subtargets, 1);
885 source = new_src;
2b835d68
RE
886 }
887 else
d499463f
RE
888 {
889 rtx targ = subtargets ? NULL_RTX : target;
890 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
891 targ, source, subtargets, 0);
892 }
2b835d68
RE
893 }
894
895 if (generate)
896 {
d499463f
RE
897 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
898 rtx shift = GEN_INT (clear_sign_bit_copies);
899
900 emit_insn (gen_ashlsi3 (new_src, source, shift));
901 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
902 }
903
e2c671ba
RE
904 return insns + 2;
905 }
906
907 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
908 {
909 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba
RE
910
911 if ((remainder | shift_mask) != 0xffffffff)
912 {
2b835d68
RE
913 if (generate)
914 {
d499463f
RE
915 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
916
2b835d68 917 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
918 new_src, source, subtargets, 1);
919 source = new_src;
2b835d68
RE
920 }
921 else
d499463f
RE
922 {
923 rtx targ = subtargets ? NULL_RTX : target;
924
925 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
926 targ, source, subtargets, 0);
927 }
2b835d68
RE
928 }
929
930 if (generate)
931 {
d499463f
RE
932 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
933 rtx shift = GEN_INT (clear_zero_bit_copies);
934
935 emit_insn (gen_lshrsi3 (new_src, source, shift));
936 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
937 }
938
e2c671ba
RE
939 return insns + 2;
940 }
941
942 break;
943
944 default:
945 break;
946 }
947
948 for (i = 0; i < 32; i++)
949 if (remainder & (1 << i))
950 num_bits_set++;
951
952 if (code == AND || (can_invert && num_bits_set > 16))
953 remainder = (~remainder) & 0xffffffff;
954 else if (code == PLUS && num_bits_set > 16)
955 remainder = (-remainder) & 0xffffffff;
956 else
957 {
958 can_invert = 0;
959 can_negate = 0;
960 }
961
962 /* Now try and find a way of doing the job in either two or three
963 instructions.
964 We start by looking for the largest block of zeros that are aligned on
965 a 2-bit boundary, we then fill up the temps, wrapping around to the
966 top of the word when we drop off the bottom.
967 In the worst case this code should produce no more than four insns. */
968 {
969 int best_start = 0;
970 int best_consecutive_zeros = 0;
971
972 for (i = 0; i < 32; i += 2)
973 {
974 int consecutive_zeros = 0;
975
976 if (! (remainder & (3 << i)))
977 {
978 while ((i < 32) && ! (remainder & (3 << i)))
979 {
980 consecutive_zeros += 2;
981 i += 2;
982 }
983 if (consecutive_zeros > best_consecutive_zeros)
984 {
985 best_consecutive_zeros = consecutive_zeros;
986 best_start = i - consecutive_zeros;
987 }
988 i -= 2;
989 }
990 }
991
992 /* Now start emitting the insns, starting with the one with the highest
993 bit set: we do this so that the smallest number will be emitted last;
994 this is more likely to be combinable with addressing insns. */
995 i = best_start;
996 do
997 {
998 int end;
999
1000 if (i <= 0)
1001 i += 32;
1002 if (remainder & (3 << (i - 2)))
1003 {
1004 end = i - 8;
1005 if (end < 0)
1006 end += 32;
1007 temp1 = remainder & ((0x0ff << end)
1008 | ((i < end) ? (0xff >> (32 - end)) : 0));
1009 remainder &= ~temp1;
1010
d499463f 1011 if (generate)
e2c671ba 1012 {
d499463f
RE
1013 rtx new_src;
1014
1015 if (code == SET)
2b835d68
RE
1016 emit_insn (gen_rtx (SET, VOIDmode,
1017 new_src = (subtargets
1018 ? gen_reg_rtx (mode)
1019 : target),
1020 GEN_INT (can_invert ? ~temp1 : temp1)));
d499463f 1021 else if (code == MINUS)
2b835d68
RE
1022 emit_insn (gen_rtx (SET, VOIDmode,
1023 new_src = (subtargets
1024 ? gen_reg_rtx (mode)
1025 : target),
1026 gen_rtx (code, mode, GEN_INT (temp1),
1027 source)));
d499463f 1028 else
2b835d68
RE
1029 emit_insn (gen_rtx (SET, VOIDmode,
1030 new_src = (remainder
1031 ? (subtargets
1032 ? gen_reg_rtx (mode)
1033 : target)
1034 : target),
1035 gen_rtx (code, mode, source,
1036 GEN_INT (can_invert ? ~temp1
1037 : (can_negate
1038 ? -temp1
1039 : temp1)))));
d499463f 1040 source = new_src;
e2c671ba
RE
1041 }
1042
d499463f
RE
1043 if (code == SET)
1044 {
1045 can_invert = 0;
1046 code = PLUS;
1047 }
1048 else if (code == MINUS)
1049 code = PLUS;
1050
e2c671ba 1051 insns++;
e2c671ba
RE
1052 i -= 6;
1053 }
1054 i -= 2;
1055 } while (remainder);
1056 }
1057 return insns;
1058}
1059
bd9c7e23
RE
1060/* Canonicalize a comparison so that we are more likely to recognize it.
1061 This can be done for a few constant compares, where we can make the
1062 immediate value easier to load. */
1063enum rtx_code
1064arm_canonicalize_comparison (code, op1)
1065 enum rtx_code code;
1066 rtx *op1;
1067{
ad076f4e 1068 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1069
1070 switch (code)
1071 {
1072 case EQ:
1073 case NE:
1074 return code;
1075
1076 case GT:
1077 case LE:
ad076f4e
RE
1078 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1079 - 1)
bd9c7e23
RE
1080 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1081 {
1082 *op1 = GEN_INT (i+1);
1083 return code == GT ? GE : LT;
1084 }
1085 break;
1086
1087 case GE:
1088 case LT:
ad076f4e 1089 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1090 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1091 {
1092 *op1 = GEN_INT (i-1);
1093 return code == GE ? GT : LE;
1094 }
1095 break;
1096
1097 case GTU:
1098 case LEU:
ad076f4e 1099 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1100 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1101 {
1102 *op1 = GEN_INT (i + 1);
1103 return code == GTU ? GEU : LTU;
1104 }
1105 break;
1106
1107 case GEU:
1108 case LTU:
1109 if (i != 0
1110 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1111 {
1112 *op1 = GEN_INT (i - 1);
1113 return code == GEU ? GTU : LEU;
1114 }
1115 break;
1116
1117 default:
1118 abort ();
1119 }
1120
1121 return code;
1122}
1123
1124
2b835d68
RE
1125/* Handle aggregates that are not laid out in a BLKmode element.
1126 This is a sub-element of RETURN_IN_MEMORY. */
1127int
1128arm_return_in_memory (type)
1129 tree type;
1130{
1131 if (TREE_CODE (type) == RECORD_TYPE)
1132 {
1133 tree field;
1134
1135 /* For a struct, we can return in a register if every element was a
1136 bit-field. */
1137 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1138 if (TREE_CODE (field) != FIELD_DECL
1139 || ! DECL_BIT_FIELD_TYPE (field))
1140 return 1;
1141
1142 return 0;
1143 }
1144 else if (TREE_CODE (type) == UNION_TYPE)
1145 {
1146 tree field;
1147
1148 /* Unions can be returned in registers if every element is
1149 integral, or can be returned in an integer register. */
1150 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1151 {
1152 if (TREE_CODE (field) != FIELD_DECL
1153 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1154 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1155 || FLOAT_TYPE_P (TREE_TYPE (field)))
1156 return 1;
1157 }
1158 return 0;
1159 }
1160 /* XXX Not sure what should be done for other aggregates, so put them in
1161 memory. */
1162 return 1;
1163}
1164
32de079a
RE
1165int
1166legitimate_pic_operand_p (x)
1167 rtx x;
1168{
1169 if (CONSTANT_P (x) && flag_pic
1170 && (GET_CODE (x) == SYMBOL_REF
1171 || (GET_CODE (x) == CONST
1172 && GET_CODE (XEXP (x, 0)) == PLUS
1173 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1174 return 0;
1175
1176 return 1;
1177}
1178
1179rtx
1180legitimize_pic_address (orig, mode, reg)
1181 rtx orig;
1182 enum machine_mode mode;
1183 rtx reg;
1184{
1185 if (GET_CODE (orig) == SYMBOL_REF)
1186 {
1187 rtx pic_ref, address;
1188 rtx insn;
1189 int subregs = 0;
1190
1191 if (reg == 0)
1192 {
1193 if (reload_in_progress || reload_completed)
1194 abort ();
1195 else
1196 reg = gen_reg_rtx (Pmode);
1197
1198 subregs = 1;
1199 }
1200
1201#ifdef AOF_ASSEMBLER
1202 /* The AOF assembler can generate relocations for these directly, and
1203 understands that the PIC register has to be added into the offset.
1204 */
1205 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1206#else
1207 if (subregs)
1208 address = gen_reg_rtx (Pmode);
1209 else
1210 address = reg;
1211
1212 emit_insn (gen_pic_load_addr (address, orig));
1213
1214 pic_ref = gen_rtx (MEM, Pmode,
1215 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, address));
1216 RTX_UNCHANGING_P (pic_ref) = 1;
1217 insn = emit_move_insn (reg, pic_ref);
1218#endif
1219 current_function_uses_pic_offset_table = 1;
1220 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1221 by loop. */
1222 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, orig,
1223 REG_NOTES (insn));
1224 return reg;
1225 }
1226 else if (GET_CODE (orig) == CONST)
1227 {
1228 rtx base, offset;
1229
1230 if (GET_CODE (XEXP (orig, 0)) == PLUS
1231 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1232 return orig;
1233
1234 if (reg == 0)
1235 {
1236 if (reload_in_progress || reload_completed)
1237 abort ();
1238 else
1239 reg = gen_reg_rtx (Pmode);
1240 }
1241
1242 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1243 {
1244 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1245 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1246 base == reg ? 0 : reg);
1247 }
1248 else
1249 abort ();
1250
1251 if (GET_CODE (offset) == CONST_INT)
1252 {
1253 /* The base register doesn't really matter, we only want to
1254 test the index for the appropriate mode. */
1255 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1256
1257 if (! reload_in_progress && ! reload_completed)
1258 offset = force_reg (Pmode, offset);
1259 else
1260 abort ();
1261
1262 win:
1263 if (GET_CODE (offset) == CONST_INT)
1264 return plus_constant_for_output (base, INTVAL (offset));
1265 }
1266
1267 if (GET_MODE_SIZE (mode) > 4
1268 && (GET_MODE_CLASS (mode) == MODE_INT
1269 || TARGET_SOFT_FLOAT))
1270 {
1271 emit_insn (gen_addsi3 (reg, base, offset));
1272 return reg;
1273 }
1274
1275 return gen_rtx (PLUS, Pmode, base, offset);
1276 }
1277 else if (GET_CODE (orig) == LABEL_REF)
1278 current_function_uses_pic_offset_table = 1;
1279
1280 return orig;
1281}
1282
1283static rtx pic_rtx;
1284
1285int
1286is_pic(x)
1287 rtx x;
1288{
1289 if (x == pic_rtx)
1290 return 1;
1291 return 0;
1292}
1293
1294void
1295arm_finalize_pic ()
1296{
1297#ifndef AOF_ASSEMBLER
1298 rtx l1, pic_tmp, pic_tmp2, seq;
1299 rtx global_offset_table;
1300
1301 if (current_function_uses_pic_offset_table == 0)
1302 return;
1303
1304 if (! flag_pic)
1305 abort ();
1306
1307 start_sequence ();
1308 l1 = gen_label_rtx ();
1309
1310 global_offset_table = gen_rtx (SYMBOL_REF, Pmode, "_GLOBAL_OFFSET_TABLE_");
956d6950
JL
1311 /* The PC contains 'dot'+8, but the label L1 is on the next
1312 instruction, so the offset is only 'dot'+4. */
32de079a
RE
1313 pic_tmp = gen_rtx (CONST, VOIDmode,
1314 gen_rtx (PLUS, Pmode,
1315 gen_rtx (LABEL_REF, VOIDmode, l1),
956d6950 1316 GEN_INT (4)));
32de079a
RE
1317 pic_tmp2 = gen_rtx (CONST, VOIDmode,
1318 gen_rtx (PLUS, Pmode,
1319 global_offset_table,
1320 pc_rtx));
1321
1322 pic_rtx = gen_rtx (CONST, Pmode,
1323 gen_rtx (MINUS, Pmode, pic_tmp2, pic_tmp));
1324
1325 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
1326 emit_jump_insn (gen_pic_add_dot_plus_eight(l1, pic_offset_table_rtx));
1327 emit_label (l1);
1328
1329 seq = gen_sequence ();
1330 end_sequence ();
1331 emit_insn_after (seq, get_insns ());
1332
1333 /* Need to emit this whether or not we obey regdecls,
1334 since setjmp/longjmp can cause life info to screw up. */
1335 emit_insn (gen_rtx (USE, VOIDmode, pic_offset_table_rtx));
1336#endif /* AOF_ASSEMBLER */
1337}
1338
e2c671ba
RE
1339#define REG_OR_SUBREG_REG(X) \
1340 (GET_CODE (X) == REG \
1341 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1342
1343#define REG_OR_SUBREG_RTX(X) \
1344 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1345
1346#define ARM_FRAME_RTX(X) \
1347 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1348 || (X) == arg_pointer_rtx)
1349
1350int
1351arm_rtx_costs (x, code, outer_code)
1352 rtx x;
1353 enum rtx_code code, outer_code;
1354{
1355 enum machine_mode mode = GET_MODE (x);
1356 enum rtx_code subcode;
1357 int extra_cost;
1358
1359 switch (code)
1360 {
1361 case MEM:
1362 /* Memory costs quite a lot for the first word, but subsequent words
1363 load at the equivalent of a single insn each. */
1364 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1365 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1366
1367 case DIV:
1368 case MOD:
1369 return 100;
1370
1371 case ROTATE:
1372 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1373 return 4;
1374 /* Fall through */
1375 case ROTATERT:
1376 if (mode != SImode)
1377 return 8;
1378 /* Fall through */
1379 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1380 if (mode == DImode)
1381 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1382 + ((GET_CODE (XEXP (x, 0)) == REG
1383 || (GET_CODE (XEXP (x, 0)) == SUBREG
1384 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1385 ? 0 : 8));
1386 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1387 || (GET_CODE (XEXP (x, 0)) == SUBREG
1388 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1389 ? 0 : 4)
1390 + ((GET_CODE (XEXP (x, 1)) == REG
1391 || (GET_CODE (XEXP (x, 1)) == SUBREG
1392 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1393 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1394 ? 0 : 4));
1395
1396 case MINUS:
1397 if (mode == DImode)
1398 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1399 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1400 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1401 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1402 ? 0 : 8));
1403
1404 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1405 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1406 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1407 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1408 ? 0 : 8)
1409 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1410 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1411 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1412 ? 0 : 8));
1413
1414 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1415 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1416 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1417 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1418 || subcode == ASHIFTRT || subcode == LSHIFTRT
1419 || subcode == ROTATE || subcode == ROTATERT
1420 || (subcode == MULT
1421 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1422 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1423 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1424 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1425 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1426 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1427 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1428 return 1;
1429 /* Fall through */
1430
1431 case PLUS:
1432 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1433 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1434 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1435 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1436 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1437 ? 0 : 8));
1438
1439 /* Fall through */
1440 case AND: case XOR: case IOR:
1441 extra_cost = 0;
1442
1443 /* Normally the frame registers will be spilt into reg+const during
1444 reload, so it is a bad idea to combine them with other instructions,
1445 since then they might not be moved outside of loops. As a compromise
1446 we allow integration with ops that have a constant as their second
1447 operand. */
1448 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1449 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1450 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1451 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1452 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1453 extra_cost = 4;
1454
1455 if (mode == DImode)
1456 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1457 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1458 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1459 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1460 ? 0 : 8));
1461
1462 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1463 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1464 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1465 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1466 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1467 ? 0 : 4));
1468
1469 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1470 return (1 + extra_cost
1471 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1472 || subcode == LSHIFTRT || subcode == ASHIFTRT
1473 || subcode == ROTATE || subcode == ROTATERT
1474 || (subcode == MULT
1475 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1476 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 1477 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
1478 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1479 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 1480 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
1481 ? 0 : 4));
1482
1483 return 8;
1484
1485 case MULT:
b111229a
RE
1486 /* There is no point basing this on the tuning, since it is always the
1487 fast variant if it exists at all */
2b835d68
RE
1488 if (arm_fast_multiply && mode == DImode
1489 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1490 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1491 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1492 return 8;
1493
e2c671ba
RE
1494 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1495 || mode == DImode)
1496 return 30;
1497
1498 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1499 {
2b835d68
RE
1500 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1501 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
1502 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1503 int j;
b111229a
RE
1504 /* Tune as appropriate */
1505 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2b835d68
RE
1506
1507 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 1508 {
2b835d68 1509 i >>= booth_unit_size;
e2c671ba
RE
1510 add_cost += 2;
1511 }
1512
1513 return add_cost;
1514 }
1515
b111229a 1516 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 1517 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
1518 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1519
56636818
JL
1520 case TRUNCATE:
1521 if (arm_fast_multiply && mode == SImode
1522 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1523 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1524 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1525 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1526 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1527 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1528 return 8;
1529 return 99;
1530
e2c671ba
RE
1531 case NEG:
1532 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1533 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1534 /* Fall through */
1535 case NOT:
1536 if (mode == DImode)
1537 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1538
1539 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1540
1541 case IF_THEN_ELSE:
1542 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1543 return 14;
1544 return 2;
1545
1546 case COMPARE:
1547 return 1;
1548
1549 case ABS:
1550 return 4 + (mode == DImode ? 4 : 0);
1551
1552 case SIGN_EXTEND:
1553 if (GET_MODE (XEXP (x, 0)) == QImode)
1554 return (4 + (mode == DImode ? 4 : 0)
1555 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1556 /* Fall through */
1557 case ZERO_EXTEND:
1558 switch (GET_MODE (XEXP (x, 0)))
1559 {
1560 case QImode:
1561 return (1 + (mode == DImode ? 4 : 0)
1562 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1563
1564 case HImode:
1565 return (4 + (mode == DImode ? 4 : 0)
1566 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1567
1568 case SImode:
1569 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
1570
1571 default:
1572 break;
e2c671ba
RE
1573 }
1574 abort ();
1575
1576 default:
1577 return 99;
1578 }
1579}
32de079a
RE
1580
1581int
1582arm_adjust_cost (insn, link, dep, cost)
1583 rtx insn;
1584 rtx link;
1585 rtx dep;
1586 int cost;
1587{
1588 rtx i_pat, d_pat;
1589
1590 if ((i_pat = single_set (insn)) != NULL
1591 && GET_CODE (SET_SRC (i_pat)) == MEM
1592 && (d_pat = single_set (dep)) != NULL
1593 && GET_CODE (SET_DEST (d_pat)) == MEM)
1594 {
1595 /* This is a load after a store, there is no conflict if the load reads
1596 from a cached area. Assume that loads from the stack, and from the
1597 constant pool are cached, and that others will miss. This is a
1598 hack. */
1599
1600/* debug_rtx (insn);
1601 debug_rtx (dep);
1602 debug_rtx (link);
1603 fprintf (stderr, "costs %d\n", cost); */
1604
1605 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1606 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1607 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1608 || reg_mentioned_p (hard_frame_pointer_rtx,
1609 XEXP (SET_SRC (i_pat), 0)))
1610 {
1611/* fprintf (stderr, "***** Now 1\n"); */
1612 return 1;
1613 }
1614 }
1615
1616 return cost;
1617}
1618
ff9940b0
RE
1619/* This code has been fixed for cross compilation. */
1620
1621static int fpa_consts_inited = 0;
1622
1623char *strings_fpa[8] = {
2b835d68
RE
1624 "0", "1", "2", "3",
1625 "4", "5", "0.5", "10"
1626};
ff9940b0
RE
1627
1628static REAL_VALUE_TYPE values_fpa[8];
1629
1630static void
1631init_fpa_table ()
1632{
1633 int i;
1634 REAL_VALUE_TYPE r;
1635
1636 for (i = 0; i < 8; i++)
1637 {
1638 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1639 values_fpa[i] = r;
1640 }
f3bb6135 1641
ff9940b0
RE
1642 fpa_consts_inited = 1;
1643}
1644
cce8749e
CH
1645/* Return TRUE if rtx X is a valid immediate FPU constant. */
1646
1647int
1648const_double_rtx_ok_for_fpu (x)
1649 rtx x;
1650{
ff9940b0
RE
1651 REAL_VALUE_TYPE r;
1652 int i;
1653
1654 if (!fpa_consts_inited)
1655 init_fpa_table ();
1656
1657 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1658 if (REAL_VALUE_MINUS_ZERO (r))
1659 return 0;
f3bb6135 1660
ff9940b0
RE
1661 for (i = 0; i < 8; i++)
1662 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1663 return 1;
f3bb6135 1664
ff9940b0 1665 return 0;
f3bb6135 1666}
ff9940b0
RE
1667
1668/* Return TRUE if rtx X is a valid immediate FPU constant. */
1669
1670int
1671neg_const_double_rtx_ok_for_fpu (x)
1672 rtx x;
1673{
1674 REAL_VALUE_TYPE r;
1675 int i;
1676
1677 if (!fpa_consts_inited)
1678 init_fpa_table ();
1679
1680 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1681 r = REAL_VALUE_NEGATE (r);
1682 if (REAL_VALUE_MINUS_ZERO (r))
1683 return 0;
f3bb6135 1684
ff9940b0
RE
1685 for (i = 0; i < 8; i++)
1686 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1687 return 1;
f3bb6135 1688
ff9940b0 1689 return 0;
f3bb6135 1690}
cce8749e
CH
1691\f
1692/* Predicates for `match_operand' and `match_operator'. */
1693
ff9940b0 1694/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
1695 (SUBREG (MEM)...).
1696
1697 This function exists because at the time it was put in it led to better
1698 code. SUBREG(MEM) always needs a reload in the places where
1699 s_register_operand is used, and this seemed to lead to excessive
1700 reloading. */
ff9940b0
RE
1701
1702int
1703s_register_operand (op, mode)
1704 register rtx op;
1705 enum machine_mode mode;
1706{
1707 if (GET_MODE (op) != mode && mode != VOIDmode)
1708 return 0;
1709
1710 if (GET_CODE (op) == SUBREG)
f3bb6135 1711 op = SUBREG_REG (op);
ff9940b0
RE
1712
1713 /* We don't consider registers whose class is NO_REGS
1714 to be a register operand. */
1715 return (GET_CODE (op) == REG
1716 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1717 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1718}
1719
e2c671ba
RE
1720/* Only accept reg, subreg(reg), const_int. */
1721
1722int
1723reg_or_int_operand (op, mode)
1724 register rtx op;
1725 enum machine_mode mode;
1726{
1727 if (GET_CODE (op) == CONST_INT)
1728 return 1;
1729
1730 if (GET_MODE (op) != mode && mode != VOIDmode)
1731 return 0;
1732
1733 if (GET_CODE (op) == SUBREG)
1734 op = SUBREG_REG (op);
1735
1736 /* We don't consider registers whose class is NO_REGS
1737 to be a register operand. */
1738 return (GET_CODE (op) == REG
1739 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1740 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1741}
1742
ff9940b0
RE
1743/* Return 1 if OP is an item in memory, given that we are in reload. */
1744
1745int
1746reload_memory_operand (op, mode)
1747 rtx op;
1748 enum machine_mode mode;
1749{
1750 int regno = true_regnum (op);
1751
1752 return (! CONSTANT_P (op)
1753 && (regno == -1
1754 || (GET_CODE (op) == REG
1755 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1756}
1757
4d818c85
RE
1758/* Return 1 if OP is a valid memory address, but not valid for a signed byte
1759 memory access (architecture V4) */
1760int
1761bad_signed_byte_operand (op, mode)
1762 rtx op;
1763 enum machine_mode mode;
1764{
1765 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
1766 return 0;
1767
1768 op = XEXP (op, 0);
1769
1770 /* A sum of anything more complex than reg + reg or reg + const is bad */
1771 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
1772 && ! s_register_operand (XEXP (op, 0), VOIDmode))
1773 return 1;
1774
1775 /* Big constants are also bad */
1776 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
1777 && (INTVAL (XEXP (op, 1)) > 0xff
1778 || -INTVAL (XEXP (op, 1)) > 0xff))
1779 return 1;
1780
1781 /* Everything else is good, or can will automatically be made so. */
1782 return 0;
1783}
1784
cce8749e
CH
1785/* Return TRUE for valid operands for the rhs of an ARM instruction. */
1786
1787int
1788arm_rhs_operand (op, mode)
1789 rtx op;
1790 enum machine_mode mode;
1791{
ff9940b0 1792 return (s_register_operand (op, mode)
cce8749e 1793 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 1794}
cce8749e 1795
ff9940b0
RE
1796/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1797 */
1798
1799int
1800arm_rhsm_operand (op, mode)
1801 rtx op;
1802 enum machine_mode mode;
1803{
1804 return (s_register_operand (op, mode)
1805 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1806 || memory_operand (op, mode));
f3bb6135 1807}
ff9940b0
RE
1808
1809/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1810 constant that is valid when negated. */
1811
1812int
1813arm_add_operand (op, mode)
1814 rtx op;
1815 enum machine_mode mode;
1816{
1817 return (s_register_operand (op, mode)
1818 || (GET_CODE (op) == CONST_INT
1819 && (const_ok_for_arm (INTVAL (op))
1820 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 1821}
ff9940b0
RE
1822
1823int
1824arm_not_operand (op, mode)
1825 rtx op;
1826 enum machine_mode mode;
1827{
1828 return (s_register_operand (op, mode)
1829 || (GET_CODE (op) == CONST_INT
1830 && (const_ok_for_arm (INTVAL (op))
1831 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 1832}
ff9940b0 1833
5165176d
RE
1834/* Return TRUE if the operand is a memory reference which contains an
1835 offsettable address. */
1836int
1837offsettable_memory_operand (op, mode)
1838 register rtx op;
1839 enum machine_mode mode;
1840{
1841 if (mode == VOIDmode)
1842 mode = GET_MODE (op);
1843
1844 return (mode == GET_MODE (op)
1845 && GET_CODE (op) == MEM
1846 && offsettable_address_p (reload_completed | reload_in_progress,
1847 mode, XEXP (op, 0)));
1848}
1849
1850/* Return TRUE if the operand is a memory reference which is, or can be
1851 made word aligned by adjusting the offset. */
1852int
1853alignable_memory_operand (op, mode)
1854 register rtx op;
1855 enum machine_mode mode;
1856{
1857 rtx reg;
1858
1859 if (mode == VOIDmode)
1860 mode = GET_MODE (op);
1861
1862 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1863 return 0;
1864
1865 op = XEXP (op, 0);
1866
1867 return ((GET_CODE (reg = op) == REG
1868 || (GET_CODE (op) == SUBREG
1869 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1870 || (GET_CODE (op) == PLUS
1871 && GET_CODE (XEXP (op, 1)) == CONST_INT
1872 && (GET_CODE (reg = XEXP (op, 0)) == REG
1873 || (GET_CODE (XEXP (op, 0)) == SUBREG
1874 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1875 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1876}
1877
b111229a
RE
1878/* Similar to s_register_operand, but does not allow hard integer
1879 registers. */
1880int
1881f_register_operand (op, mode)
1882 register rtx op;
1883 enum machine_mode mode;
1884{
1885 if (GET_MODE (op) != mode && mode != VOIDmode)
1886 return 0;
1887
1888 if (GET_CODE (op) == SUBREG)
1889 op = SUBREG_REG (op);
1890
1891 /* We don't consider registers whose class is NO_REGS
1892 to be a register operand. */
1893 return (GET_CODE (op) == REG
1894 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1895 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
1896}
1897
cce8749e
CH
1898/* Return TRUE for valid operands for the rhs of an FPU instruction. */
1899
1900int
1901fpu_rhs_operand (op, mode)
1902 rtx op;
1903 enum machine_mode mode;
1904{
ff9940b0 1905 if (s_register_operand (op, mode))
f3bb6135 1906 return TRUE;
cce8749e
CH
1907 else if (GET_CODE (op) == CONST_DOUBLE)
1908 return (const_double_rtx_ok_for_fpu (op));
f3bb6135
RE
1909
1910 return FALSE;
1911}
cce8749e 1912
ff9940b0
RE
1913int
1914fpu_add_operand (op, mode)
1915 rtx op;
1916 enum machine_mode mode;
1917{
1918 if (s_register_operand (op, mode))
f3bb6135 1919 return TRUE;
ff9940b0 1920 else if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
1921 return (const_double_rtx_ok_for_fpu (op)
1922 || neg_const_double_rtx_ok_for_fpu (op));
1923
1924 return FALSE;
ff9940b0
RE
1925}
1926
cce8749e
CH
1927/* Return nonzero if OP is a constant power of two. */
1928
1929int
1930power_of_two_operand (op, mode)
1931 rtx op;
1932 enum machine_mode mode;
1933{
1934 if (GET_CODE (op) == CONST_INT)
1935 {
f3bb6135
RE
1936 HOST_WIDE_INT value = INTVAL(op);
1937 return value != 0 && (value & (value - 1)) == 0;
cce8749e 1938 }
f3bb6135
RE
1939 return FALSE;
1940}
cce8749e
CH
1941
1942/* Return TRUE for a valid operand of a DImode operation.
ff9940b0
RE
1943 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1944 Note that this disallows MEM(REG+REG), but allows
1945 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
1946
1947int
1948di_operand (op, mode)
1949 rtx op;
1950 enum machine_mode mode;
1951{
ff9940b0 1952 if (s_register_operand (op, mode))
f3bb6135 1953 return TRUE;
cce8749e
CH
1954
1955 switch (GET_CODE (op))
1956 {
1957 case CONST_DOUBLE:
1958 case CONST_INT:
f3bb6135
RE
1959 return TRUE;
1960
cce8749e 1961 case MEM:
f3bb6135
RE
1962 return memory_address_p (DImode, XEXP (op, 0));
1963
cce8749e 1964 default:
f3bb6135 1965 return FALSE;
cce8749e 1966 }
f3bb6135 1967}
cce8749e 1968
f3139301
DE
1969/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1970 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1971 Note that this disallows MEM(REG+REG), but allows
1972 MEM(PRE/POST_INC/DEC(REG)). */
1973
1974int
1975soft_df_operand (op, mode)
1976 rtx op;
1977 enum machine_mode mode;
1978{
1979 if (s_register_operand (op, mode))
1980 return TRUE;
1981
1982 switch (GET_CODE (op))
1983 {
1984 case CONST_DOUBLE:
1985 return TRUE;
1986
1987 case MEM:
1988 return memory_address_p (DFmode, XEXP (op, 0));
1989
1990 default:
1991 return FALSE;
1992 }
1993}
1994
cce8749e
CH
1995/* Return TRUE for valid index operands. */
1996
1997int
1998index_operand (op, mode)
1999 rtx op;
2000 enum machine_mode mode;
2001{
ff9940b0
RE
2002 return (s_register_operand(op, mode)
2003 || (immediate_operand (op, mode)
2004 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2005}
cce8749e 2006
ff9940b0
RE
2007/* Return TRUE for valid shifts by a constant. This also accepts any
2008 power of two on the (somewhat overly relaxed) assumption that the
2009 shift operator in this case was a mult. */
2010
2011int
2012const_shift_operand (op, mode)
2013 rtx op;
2014 enum machine_mode mode;
2015{
2016 return (power_of_two_operand (op, mode)
2017 || (immediate_operand (op, mode)
2018 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2019}
ff9940b0 2020
cce8749e
CH
2021/* Return TRUE for arithmetic operators which can be combined with a multiply
2022 (shift). */
2023
2024int
2025shiftable_operator (x, mode)
2026 rtx x;
2027 enum machine_mode mode;
2028{
2029 if (GET_MODE (x) != mode)
2030 return FALSE;
2031 else
2032 {
2033 enum rtx_code code = GET_CODE (x);
2034
2035 return (code == PLUS || code == MINUS
2036 || code == IOR || code == XOR || code == AND);
2037 }
f3bb6135 2038}
cce8749e
CH
2039
2040/* Return TRUE for shift operators. */
2041
2042int
2043shift_operator (x, mode)
2044 rtx x;
2045 enum machine_mode mode;
2046{
2047 if (GET_MODE (x) != mode)
2048 return FALSE;
2049 else
2050 {
2051 enum rtx_code code = GET_CODE (x);
2052
ff9940b0
RE
2053 if (code == MULT)
2054 return power_of_two_operand (XEXP (x, 1));
f3bb6135 2055
e2c671ba
RE
2056 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2057 || code == ROTATERT);
cce8749e 2058 }
f3bb6135 2059}
ff9940b0
RE
2060
2061int equality_operator (x, mode)
f3bb6135
RE
2062 rtx x;
2063 enum machine_mode mode;
ff9940b0 2064{
f3bb6135 2065 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2066}
2067
2068/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2069
2070int
2071minmax_operator (x, mode)
2072 rtx x;
2073 enum machine_mode mode;
2074{
2075 enum rtx_code code = GET_CODE (x);
2076
2077 if (GET_MODE (x) != mode)
2078 return FALSE;
f3bb6135 2079
ff9940b0 2080 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2081}
ff9940b0
RE
2082
2083/* return TRUE if x is EQ or NE */
2084
2085/* Return TRUE if this is the condition code register, if we aren't given
2086 a mode, accept any class CCmode register */
2087
2088int
2089cc_register (x, mode)
f3bb6135
RE
2090 rtx x;
2091 enum machine_mode mode;
ff9940b0
RE
2092{
2093 if (mode == VOIDmode)
2094 {
2095 mode = GET_MODE (x);
2096 if (GET_MODE_CLASS (mode) != MODE_CC)
2097 return FALSE;
2098 }
f3bb6135 2099
ff9940b0
RE
2100 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2101 return TRUE;
f3bb6135 2102
ff9940b0
RE
2103 return FALSE;
2104}
5bbe2d40
RE
2105
2106/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2107 a mode, accept any class CCmode register which indicates a dominance
2108 expression. */
5bbe2d40
RE
2109
2110int
84ed5e79 2111dominant_cc_register (x, mode)
5bbe2d40
RE
2112 rtx x;
2113 enum machine_mode mode;
2114{
2115 if (mode == VOIDmode)
2116 {
2117 mode = GET_MODE (x);
84ed5e79 2118 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2119 return FALSE;
2120 }
2121
84ed5e79
RE
2122 if (mode != CC_DNEmode && mode != CC_DEQmode
2123 && mode != CC_DLEmode && mode != CC_DLTmode
2124 && mode != CC_DGEmode && mode != CC_DGTmode
2125 && mode != CC_DLEUmode && mode != CC_DLTUmode
2126 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2127 return FALSE;
2128
5bbe2d40
RE
2129 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2130 return TRUE;
2131
2132 return FALSE;
2133}
2134
2b835d68
RE
2135/* Return TRUE if X references a SYMBOL_REF. */
2136int
2137symbol_mentioned_p (x)
2138 rtx x;
2139{
2140 register char *fmt;
2141 register int i;
2142
2143 if (GET_CODE (x) == SYMBOL_REF)
2144 return 1;
2145
2146 fmt = GET_RTX_FORMAT (GET_CODE (x));
2147 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2148 {
2149 if (fmt[i] == 'E')
2150 {
2151 register int j;
2152
2153 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2154 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2155 return 1;
2156 }
2157 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2158 return 1;
2159 }
2160
2161 return 0;
2162}
2163
2164/* Return TRUE if X references a LABEL_REF. */
2165int
2166label_mentioned_p (x)
2167 rtx x;
2168{
2169 register char *fmt;
2170 register int i;
2171
2172 if (GET_CODE (x) == LABEL_REF)
2173 return 1;
2174
2175 fmt = GET_RTX_FORMAT (GET_CODE (x));
2176 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2177 {
2178 if (fmt[i] == 'E')
2179 {
2180 register int j;
2181
2182 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2183 if (label_mentioned_p (XVECEXP (x, i, j)))
2184 return 1;
2185 }
2186 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2187 return 1;
2188 }
2189
2190 return 0;
2191}
2192
ff9940b0
RE
2193enum rtx_code
2194minmax_code (x)
f3bb6135 2195 rtx x;
ff9940b0
RE
2196{
2197 enum rtx_code code = GET_CODE (x);
2198
2199 if (code == SMAX)
2200 return GE;
f3bb6135 2201 else if (code == SMIN)
ff9940b0 2202 return LE;
f3bb6135 2203 else if (code == UMIN)
ff9940b0 2204 return LEU;
f3bb6135 2205 else if (code == UMAX)
ff9940b0 2206 return GEU;
f3bb6135 2207
ff9940b0
RE
2208 abort ();
2209}
2210
2211/* Return 1 if memory locations are adjacent */
2212
f3bb6135 2213int
ff9940b0
RE
2214adjacent_mem_locations (a, b)
2215 rtx a, b;
2216{
2217 int val0 = 0, val1 = 0;
2218 int reg0, reg1;
2219
2220 if ((GET_CODE (XEXP (a, 0)) == REG
2221 || (GET_CODE (XEXP (a, 0)) == PLUS
2222 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2223 && (GET_CODE (XEXP (b, 0)) == REG
2224 || (GET_CODE (XEXP (b, 0)) == PLUS
2225 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2226 {
2227 if (GET_CODE (XEXP (a, 0)) == PLUS)
2228 {
2229 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2230 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2231 }
2232 else
2233 reg0 = REGNO (XEXP (a, 0));
2234 if (GET_CODE (XEXP (b, 0)) == PLUS)
2235 {
2236 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2237 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2238 }
2239 else
2240 reg1 = REGNO (XEXP (b, 0));
2241 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2242 }
2243 return 0;
2244}
2245
2246/* Return 1 if OP is a load multiple operation. It is known to be
2247 parallel and the first section will be tested. */
2248
f3bb6135 2249int
ff9940b0
RE
2250load_multiple_operation (op, mode)
2251 rtx op;
2252 enum machine_mode mode;
2253{
f3bb6135 2254 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2255 int dest_regno;
2256 rtx src_addr;
f3bb6135 2257 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2258 rtx elt;
2259
2260 if (count <= 1
2261 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2262 return 0;
2263
2264 /* Check to see if this might be a write-back */
2265 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2266 {
2267 i++;
2268 base = 1;
2269
2270 /* Now check it more carefully */
2271 if (GET_CODE (SET_DEST (elt)) != REG
2272 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2273 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2274 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2275 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2276 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2277 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2278 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2279 != REGNO (SET_DEST (elt)))
2280 return 0;
f3bb6135 2281
ff9940b0
RE
2282 count--;
2283 }
2284
2285 /* Perform a quick check so we don't blow up below. */
2286 if (count <= i
2287 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2288 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2289 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2290 return 0;
2291
2292 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2293 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2294
2295 for (; i < count; i++)
2296 {
ed4c4348 2297 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2298
2299 if (GET_CODE (elt) != SET
2300 || GET_CODE (SET_DEST (elt)) != REG
2301 || GET_MODE (SET_DEST (elt)) != SImode
2302 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2303 || GET_CODE (SET_SRC (elt)) != MEM
2304 || GET_MODE (SET_SRC (elt)) != SImode
2305 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2306 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2307 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2308 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2309 return 0;
2310 }
2311
2312 return 1;
2313}
2314
2315/* Return 1 if OP is a store multiple operation. It is known to be
2316 parallel and the first section will be tested. */
2317
f3bb6135 2318int
ff9940b0
RE
2319store_multiple_operation (op, mode)
2320 rtx op;
2321 enum machine_mode mode;
2322{
f3bb6135 2323 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2324 int src_regno;
2325 rtx dest_addr;
f3bb6135 2326 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2327 rtx elt;
2328
2329 if (count <= 1
2330 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2331 return 0;
2332
2333 /* Check to see if this might be a write-back */
2334 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2335 {
2336 i++;
2337 base = 1;
2338
2339 /* Now check it more carefully */
2340 if (GET_CODE (SET_DEST (elt)) != REG
2341 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2342 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2343 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2344 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2345 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2346 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2347 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2348 != REGNO (SET_DEST (elt)))
2349 return 0;
f3bb6135 2350
ff9940b0
RE
2351 count--;
2352 }
2353
2354 /* Perform a quick check so we don't blow up below. */
2355 if (count <= i
2356 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2357 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2358 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2359 return 0;
2360
2361 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2362 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2363
2364 for (; i < count; i++)
2365 {
2366 elt = XVECEXP (op, 0, i);
2367
2368 if (GET_CODE (elt) != SET
2369 || GET_CODE (SET_SRC (elt)) != REG
2370 || GET_MODE (SET_SRC (elt)) != SImode
2371 || REGNO (SET_SRC (elt)) != src_regno + i - base
2372 || GET_CODE (SET_DEST (elt)) != MEM
2373 || GET_MODE (SET_DEST (elt)) != SImode
2374 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2375 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2376 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2377 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2378 return 0;
2379 }
2380
2381 return 1;
2382}
e2c671ba 2383
84ed5e79
RE
2384int
2385load_multiple_sequence (operands, nops, regs, base, load_offset)
2386 rtx *operands;
2387 int nops;
2388 int *regs;
2389 int *base;
2390 HOST_WIDE_INT *load_offset;
2391{
2392 int unsorted_regs[4];
2393 HOST_WIDE_INT unsorted_offsets[4];
2394 int order[4];
ad076f4e 2395 int base_reg = -1;
84ed5e79
RE
2396 int i;
2397
2398 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2399 extended if required. */
2400 if (nops < 2 || nops > 4)
2401 abort ();
2402
2403 /* Loop over the operands and check that the memory references are
2404 suitable (ie immediate offsets from the same base register). At
2405 the same time, extract the target register, and the memory
2406 offsets. */
2407 for (i = 0; i < nops; i++)
2408 {
2409 rtx reg;
2410 rtx offset;
2411
56636818
JL
2412 /* Convert a subreg of a mem into the mem itself. */
2413 if (GET_CODE (operands[nops + i]) == SUBREG)
2414 operands[nops + i] = alter_subreg(operands[nops + i]);
2415
84ed5e79
RE
2416 if (GET_CODE (operands[nops + i]) != MEM)
2417 abort ();
2418
2419 /* Don't reorder volatile memory references; it doesn't seem worth
2420 looking for the case where the order is ok anyway. */
2421 if (MEM_VOLATILE_P (operands[nops + i]))
2422 return 0;
2423
2424 offset = const0_rtx;
2425
2426 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2427 || (GET_CODE (reg) == SUBREG
2428 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2429 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2430 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2431 == REG)
2432 || (GET_CODE (reg) == SUBREG
2433 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2434 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2435 == CONST_INT)))
2436 {
2437 if (i == 0)
2438 {
2439 base_reg = REGNO(reg);
2440 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2441 ? REGNO (operands[i])
2442 : REGNO (SUBREG_REG (operands[i])));
2443 order[0] = 0;
2444 }
2445 else
2446 {
2447 if (base_reg != REGNO (reg))
2448 /* Not addressed from the same base register. */
2449 return 0;
2450
2451 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2452 ? REGNO (operands[i])
2453 : REGNO (SUBREG_REG (operands[i])));
2454 if (unsorted_regs[i] < unsorted_regs[order[0]])
2455 order[0] = i;
2456 }
2457
2458 /* If it isn't an integer register, or if it overwrites the
2459 base register but isn't the last insn in the list, then
2460 we can't do this. */
2461 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2462 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2463 return 0;
2464
2465 unsorted_offsets[i] = INTVAL (offset);
2466 }
2467 else
2468 /* Not a suitable memory address. */
2469 return 0;
2470 }
2471
2472 /* All the useful information has now been extracted from the
2473 operands into unsorted_regs and unsorted_offsets; additionally,
2474 order[0] has been set to the lowest numbered register in the
2475 list. Sort the registers into order, and check that the memory
2476 offsets are ascending and adjacent. */
2477
2478 for (i = 1; i < nops; i++)
2479 {
2480 int j;
2481
2482 order[i] = order[i - 1];
2483 for (j = 0; j < nops; j++)
2484 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2485 && (order[i] == order[i - 1]
2486 || unsorted_regs[j] < unsorted_regs[order[i]]))
2487 order[i] = j;
2488
2489 /* Have we found a suitable register? if not, one must be used more
2490 than once. */
2491 if (order[i] == order[i - 1])
2492 return 0;
2493
2494 /* Is the memory address adjacent and ascending? */
2495 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2496 return 0;
2497 }
2498
2499 if (base)
2500 {
2501 *base = base_reg;
2502
2503 for (i = 0; i < nops; i++)
2504 regs[i] = unsorted_regs[order[i]];
2505
2506 *load_offset = unsorted_offsets[order[0]];
2507 }
2508
2509 if (unsorted_offsets[order[0]] == 0)
2510 return 1; /* ldmia */
2511
2512 if (unsorted_offsets[order[0]] == 4)
2513 return 2; /* ldmib */
2514
2515 if (unsorted_offsets[order[nops - 1]] == 0)
2516 return 3; /* ldmda */
2517
2518 if (unsorted_offsets[order[nops - 1]] == -4)
2519 return 4; /* ldmdb */
2520
2521 /* Can't do it without setting up the offset, only do this if it takes
2522 no more than one insn. */
2523 return (const_ok_for_arm (unsorted_offsets[order[0]])
2524 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2525}
2526
2527char *
2528emit_ldm_seq (operands, nops)
2529 rtx *operands;
2530 int nops;
2531{
2532 int regs[4];
2533 int base_reg;
2534 HOST_WIDE_INT offset;
2535 char buf[100];
2536 int i;
2537
2538 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2539 {
2540 case 1:
2541 strcpy (buf, "ldm%?ia\t");
2542 break;
2543
2544 case 2:
2545 strcpy (buf, "ldm%?ib\t");
2546 break;
2547
2548 case 3:
2549 strcpy (buf, "ldm%?da\t");
2550 break;
2551
2552 case 4:
2553 strcpy (buf, "ldm%?db\t");
2554 break;
2555
2556 case 5:
2557 if (offset >= 0)
2558 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2559 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2560 (long) offset);
2561 else
2562 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2563 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2564 (long) -offset);
2565 output_asm_insn (buf, operands);
2566 base_reg = regs[0];
2567 strcpy (buf, "ldm%?ia\t");
2568 break;
2569
2570 default:
2571 abort ();
2572 }
2573
2574 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2575 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2576
2577 for (i = 1; i < nops; i++)
2578 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2579 reg_names[regs[i]]);
2580
2581 strcat (buf, "}\t%@ phole ldm");
2582
2583 output_asm_insn (buf, operands);
2584 return "";
2585}
2586
2587int
2588store_multiple_sequence (operands, nops, regs, base, load_offset)
2589 rtx *operands;
2590 int nops;
2591 int *regs;
2592 int *base;
2593 HOST_WIDE_INT *load_offset;
2594{
2595 int unsorted_regs[4];
2596 HOST_WIDE_INT unsorted_offsets[4];
2597 int order[4];
ad076f4e 2598 int base_reg = -1;
84ed5e79
RE
2599 int i;
2600
2601 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2602 extended if required. */
2603 if (nops < 2 || nops > 4)
2604 abort ();
2605
2606 /* Loop over the operands and check that the memory references are
2607 suitable (ie immediate offsets from the same base register). At
2608 the same time, extract the target register, and the memory
2609 offsets. */
2610 for (i = 0; i < nops; i++)
2611 {
2612 rtx reg;
2613 rtx offset;
2614
56636818
JL
2615 /* Convert a subreg of a mem into the mem itself. */
2616 if (GET_CODE (operands[nops + i]) == SUBREG)
2617 operands[nops + i] = alter_subreg(operands[nops + i]);
2618
84ed5e79
RE
2619 if (GET_CODE (operands[nops + i]) != MEM)
2620 abort ();
2621
2622 /* Don't reorder volatile memory references; it doesn't seem worth
2623 looking for the case where the order is ok anyway. */
2624 if (MEM_VOLATILE_P (operands[nops + i]))
2625 return 0;
2626
2627 offset = const0_rtx;
2628
2629 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2630 || (GET_CODE (reg) == SUBREG
2631 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2632 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2633 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2634 == REG)
2635 || (GET_CODE (reg) == SUBREG
2636 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2637 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2638 == CONST_INT)))
2639 {
2640 if (i == 0)
2641 {
2642 base_reg = REGNO(reg);
2643 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2644 ? REGNO (operands[i])
2645 : REGNO (SUBREG_REG (operands[i])));
2646 order[0] = 0;
2647 }
2648 else
2649 {
2650 if (base_reg != REGNO (reg))
2651 /* Not addressed from the same base register. */
2652 return 0;
2653
2654 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2655 ? REGNO (operands[i])
2656 : REGNO (SUBREG_REG (operands[i])));
2657 if (unsorted_regs[i] < unsorted_regs[order[0]])
2658 order[0] = i;
2659 }
2660
2661 /* If it isn't an integer register, then we can't do this. */
2662 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2663 return 0;
2664
2665 unsorted_offsets[i] = INTVAL (offset);
2666 }
2667 else
2668 /* Not a suitable memory address. */
2669 return 0;
2670 }
2671
2672 /* All the useful information has now been extracted from the
2673 operands into unsorted_regs and unsorted_offsets; additionally,
2674 order[0] has been set to the lowest numbered register in the
2675 list. Sort the registers into order, and check that the memory
2676 offsets are ascending and adjacent. */
2677
2678 for (i = 1; i < nops; i++)
2679 {
2680 int j;
2681
2682 order[i] = order[i - 1];
2683 for (j = 0; j < nops; j++)
2684 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2685 && (order[i] == order[i - 1]
2686 || unsorted_regs[j] < unsorted_regs[order[i]]))
2687 order[i] = j;
2688
2689 /* Have we found a suitable register? if not, one must be used more
2690 than once. */
2691 if (order[i] == order[i - 1])
2692 return 0;
2693
2694 /* Is the memory address adjacent and ascending? */
2695 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2696 return 0;
2697 }
2698
2699 if (base)
2700 {
2701 *base = base_reg;
2702
2703 for (i = 0; i < nops; i++)
2704 regs[i] = unsorted_regs[order[i]];
2705
2706 *load_offset = unsorted_offsets[order[0]];
2707 }
2708
2709 if (unsorted_offsets[order[0]] == 0)
2710 return 1; /* stmia */
2711
2712 if (unsorted_offsets[order[0]] == 4)
2713 return 2; /* stmib */
2714
2715 if (unsorted_offsets[order[nops - 1]] == 0)
2716 return 3; /* stmda */
2717
2718 if (unsorted_offsets[order[nops - 1]] == -4)
2719 return 4; /* stmdb */
2720
2721 return 0;
2722}
2723
2724char *
2725emit_stm_seq (operands, nops)
2726 rtx *operands;
2727 int nops;
2728{
2729 int regs[4];
2730 int base_reg;
2731 HOST_WIDE_INT offset;
2732 char buf[100];
2733 int i;
2734
2735 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2736 {
2737 case 1:
2738 strcpy (buf, "stm%?ia\t");
2739 break;
2740
2741 case 2:
2742 strcpy (buf, "stm%?ib\t");
2743 break;
2744
2745 case 3:
2746 strcpy (buf, "stm%?da\t");
2747 break;
2748
2749 case 4:
2750 strcpy (buf, "stm%?db\t");
2751 break;
2752
2753 default:
2754 abort ();
2755 }
2756
2757 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2758 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2759
2760 for (i = 1; i < nops; i++)
2761 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2762 reg_names[regs[i]]);
2763
2764 strcat (buf, "}\t%@ phole stm");
2765
2766 output_asm_insn (buf, operands);
2767 return "";
2768}
2769
e2c671ba
RE
2770int
2771multi_register_push (op, mode)
0a81f500
RE
2772 rtx op;
2773 enum machine_mode mode;
e2c671ba
RE
2774{
2775 if (GET_CODE (op) != PARALLEL
2776 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2777 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2778 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2779 return 0;
2780
2781 return 1;
2782}
2783
ff9940b0 2784\f
f3bb6135
RE
2785/* Routines for use with attributes */
2786
31fdb4d5
DE
2787/* Return nonzero if ATTR is a valid attribute for DECL.
2788 ATTRIBUTES are any existing attributes and ARGS are the arguments
2789 supplied with ATTR.
2790
2791 Supported attributes:
2792
2793 naked: don't output any prologue or epilogue code, the user is assumed
2794 to do the right thing. */
2795
2796int
2797arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2798 tree decl;
2799 tree attributes;
2800 tree attr;
2801 tree args;
2802{
2803 if (args != NULL_TREE)
2804 return 0;
2805
2806 if (is_attribute_p ("naked", attr))
2807 return TREE_CODE (decl) == FUNCTION_DECL;
2808 return 0;
2809}
2810
2811/* Return non-zero if FUNC is a naked function. */
2812
2813static int
2814arm_naked_function_p (func)
2815 tree func;
2816{
2817 tree a;
2818
2819 if (TREE_CODE (func) != FUNCTION_DECL)
2820 abort ();
2821
2822 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2823 return a != NULL_TREE;
2824}
f3bb6135 2825\f
ff9940b0
RE
2826/* Routines for use in generating RTL */
2827
f3bb6135 2828rtx
56636818
JL
2829arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
2830 in_struct_p)
ff9940b0
RE
2831 int base_regno;
2832 int count;
2833 rtx from;
2834 int up;
2835 int write_back;
56636818
JL
2836 int unchanging_p;
2837 int in_struct_p;
ff9940b0
RE
2838{
2839 int i = 0, j;
2840 rtx result;
2841 int sign = up ? 1 : -1;
56636818 2842 rtx mem;
ff9940b0
RE
2843
2844 result = gen_rtx (PARALLEL, VOIDmode,
2845 rtvec_alloc (count + (write_back ? 2 : 0)));
2846 if (write_back)
f3bb6135 2847 {
ff9940b0 2848 XVECEXP (result, 0, 0)
f3bb6135
RE
2849 = gen_rtx (SET, GET_MODE (from), from,
2850 plus_constant (from, count * 4 * sign));
ff9940b0
RE
2851 i = 1;
2852 count++;
f3bb6135
RE
2853 }
2854
ff9940b0 2855 for (j = 0; i < count; i++, j++)
f3bb6135 2856 {
56636818
JL
2857 mem = gen_rtx (MEM, SImode, plus_constant (from, j * 4 * sign));
2858 RTX_UNCHANGING_P (mem) = unchanging_p;
2859 MEM_IN_STRUCT_P (mem) = in_struct_p;
2860
2861 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode,
2862 gen_rtx (REG, SImode, base_regno + j),
2863 mem);
f3bb6135
RE
2864 }
2865
ff9940b0
RE
2866 if (write_back)
2867 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
2868
2869 return result;
2870}
2871
f3bb6135 2872rtx
56636818
JL
2873arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
2874 in_struct_p)
ff9940b0
RE
2875 int base_regno;
2876 int count;
2877 rtx to;
2878 int up;
2879 int write_back;
56636818
JL
2880 int unchanging_p;
2881 int in_struct_p;
ff9940b0
RE
2882{
2883 int i = 0, j;
2884 rtx result;
2885 int sign = up ? 1 : -1;
56636818 2886 rtx mem;
ff9940b0
RE
2887
2888 result = gen_rtx (PARALLEL, VOIDmode,
2889 rtvec_alloc (count + (write_back ? 2 : 0)));
2890 if (write_back)
f3bb6135 2891 {
ff9940b0 2892 XVECEXP (result, 0, 0)
f3bb6135
RE
2893 = gen_rtx (SET, GET_MODE (to), to,
2894 plus_constant (to, count * 4 * sign));
ff9940b0
RE
2895 i = 1;
2896 count++;
f3bb6135
RE
2897 }
2898
ff9940b0 2899 for (j = 0; i < count; i++, j++)
f3bb6135 2900 {
56636818
JL
2901 mem = gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign));
2902 RTX_UNCHANGING_P (mem) = unchanging_p;
2903 MEM_IN_STRUCT_P (mem) = in_struct_p;
2904
2905 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode, mem,
2906 gen_rtx (REG, SImode, base_regno + j));
f3bb6135
RE
2907 }
2908
ff9940b0
RE
2909 if (write_back)
2910 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
2911
2912 return result;
2913}
2914
880e2516
RE
2915int
2916arm_gen_movstrqi (operands)
2917 rtx *operands;
2918{
2919 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 2920 int i;
880e2516 2921 rtx src, dst;
ad076f4e 2922 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 2923 rtx part_bytes_reg = NULL;
56636818
JL
2924 rtx mem;
2925 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
880e2516
RE
2926
2927 if (GET_CODE (operands[2]) != CONST_INT
2928 || GET_CODE (operands[3]) != CONST_INT
2929 || INTVAL (operands[2]) > 64
2930 || INTVAL (operands[3]) & 3)
2931 return 0;
2932
2933 st_dst = XEXP (operands[0], 0);
2934 st_src = XEXP (operands[1], 0);
56636818
JL
2935
2936 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
2937 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2938 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
2939 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2940
880e2516
RE
2941 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2942 fin_src = src = copy_to_mode_reg (SImode, st_src);
2943
2944 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2945 out_words_to_go = INTVAL (operands[2]) / 4;
2946 last_bytes = INTVAL (operands[2]) & 3;
2947
2948 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2949 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2950
2951 for (i = 0; in_words_to_go >= 2; i+=4)
2952 {
bd9c7e23 2953 if (in_words_to_go > 4)
56636818
JL
2954 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
2955 src_unchanging_p, src_in_struct_p));
bd9c7e23
RE
2956 else
2957 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818
JL
2958 FALSE, src_unchanging_p,
2959 src_in_struct_p));
bd9c7e23 2960
880e2516
RE
2961 if (out_words_to_go)
2962 {
bd9c7e23 2963 if (out_words_to_go > 4)
56636818
JL
2964 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
2965 dst_unchanging_p,
2966 dst_in_struct_p));
bd9c7e23
RE
2967 else if (out_words_to_go != 1)
2968 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2969 dst, TRUE,
2970 (last_bytes == 0
56636818
JL
2971 ? FALSE : TRUE),
2972 dst_unchanging_p,
2973 dst_in_struct_p));
880e2516
RE
2974 else
2975 {
56636818
JL
2976 mem = gen_rtx (MEM, SImode, dst);
2977 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
2978 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
2979 emit_move_insn (mem, gen_rtx (REG, SImode, 0));
bd9c7e23
RE
2980 if (last_bytes != 0)
2981 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
2982 }
2983 }
2984
2985 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2986 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2987 }
2988
2989 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2990 if (out_words_to_go)
2991 {
2992 rtx sreg;
2993
56636818
JL
2994 mem = gen_rtx (MEM, SImode, src);
2995 RTX_UNCHANGING_P (mem) = src_unchanging_p;
2996 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
2997 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
880e2516 2998 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
56636818
JL
2999
3000 mem = gen_rtx (MEM, SImode, dst);
3001 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3002 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3003 emit_move_insn (mem, sreg);
880e2516
RE
3004 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3005 in_words_to_go--;
3006
3007 if (in_words_to_go) /* Sanity check */
3008 abort ();
3009 }
3010
3011 if (in_words_to_go)
3012 {
3013 if (in_words_to_go < 0)
3014 abort ();
3015
56636818
JL
3016 mem = gen_rtx (MEM, SImode, src);
3017 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3018 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3019 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3020 }
3021
3022 if (BYTES_BIG_ENDIAN && last_bytes)
3023 {
3024 rtx tmp = gen_reg_rtx (SImode);
3025
3026 if (part_bytes_reg == NULL)
3027 abort ();
3028
3029 /* The bytes we want are in the top end of the word */
bee06f3d
RE
3030 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3031 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3032 part_bytes_reg = tmp;
3033
3034 while (last_bytes)
3035 {
56636818
JL
3036 mem = gen_rtx (MEM, QImode, plus_constant (dst, last_bytes - 1));
3037 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3038 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3039 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
880e2516
RE
3040 if (--last_bytes)
3041 {
3042 tmp = gen_reg_rtx (SImode);
3043 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3044 part_bytes_reg = tmp;
3045 }
3046 }
3047
3048 }
3049 else
3050 {
3051 while (last_bytes)
3052 {
3053 if (part_bytes_reg == NULL)
3054 abort ();
3055
56636818
JL
3056 mem = gen_rtx (MEM, QImode, dst);
3057 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3058 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3059 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
880e2516
RE
3060 if (--last_bytes)
3061 {
3062 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3063
3064 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3065 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3066 part_bytes_reg = tmp;
3067 }
3068 }
3069 }
3070
3071 return 1;
3072}
3073
5165176d
RE
3074/* Generate a memory reference for a half word, such that it will be loaded
3075 into the top 16 bits of the word. We can assume that the address is
3076 known to be alignable and of the form reg, or plus (reg, const). */
3077rtx
3078gen_rotated_half_load (memref)
3079 rtx memref;
3080{
3081 HOST_WIDE_INT offset = 0;
3082 rtx base = XEXP (memref, 0);
3083
3084 if (GET_CODE (base) == PLUS)
3085 {
3086 offset = INTVAL (XEXP (base, 1));
3087 base = XEXP (base, 0);
3088 }
3089
956d6950 3090 /* If we aren't allowed to generate unaligned addresses, then fail. */
5165176d
RE
3091 if (TARGET_SHORT_BY_BYTES
3092 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3093 return NULL;
3094
3095 base = gen_rtx (MEM, SImode, plus_constant (base, offset & ~2));
3096
3097 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3098 return base;
3099
3100 return gen_rtx (ROTATE, SImode, base, GEN_INT (16));
3101}
3102
84ed5e79
RE
3103static enum machine_mode
3104select_dominance_cc_mode (op, x, y, cond_or)
3105 enum rtx_code op;
3106 rtx x;
3107 rtx y;
3108 HOST_WIDE_INT cond_or;
3109{
3110 enum rtx_code cond1, cond2;
3111 int swapped = 0;
3112
3113 /* Currently we will probably get the wrong result if the individual
3114 comparisons are not simple. This also ensures that it is safe to
956d6950 3115 reverse a comparison if necessary. */
84ed5e79
RE
3116 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3117 != CCmode)
3118 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3119 != CCmode))
3120 return CCmode;
3121
3122 if (cond_or)
3123 cond1 = reverse_condition (cond1);
3124
3125 /* If the comparisons are not equal, and one doesn't dominate the other,
3126 then we can't do this. */
3127 if (cond1 != cond2
3128 && ! comparison_dominates_p (cond1, cond2)
3129 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3130 return CCmode;
3131
3132 if (swapped)
3133 {
3134 enum rtx_code temp = cond1;
3135 cond1 = cond2;
3136 cond2 = temp;
3137 }
3138
3139 switch (cond1)
3140 {
3141 case EQ:
3142 if (cond2 == EQ || ! cond_or)
3143 return CC_DEQmode;
3144
3145 switch (cond2)
3146 {
3147 case LE: return CC_DLEmode;
3148 case LEU: return CC_DLEUmode;
3149 case GE: return CC_DGEmode;
3150 case GEU: return CC_DGEUmode;
ad076f4e 3151 default: break;
84ed5e79
RE
3152 }
3153
3154 break;
3155
3156 case LT:
3157 if (cond2 == LT || ! cond_or)
3158 return CC_DLTmode;
3159 if (cond2 == LE)
3160 return CC_DLEmode;
3161 if (cond2 == NE)
3162 return CC_DNEmode;
3163 break;
3164
3165 case GT:
3166 if (cond2 == GT || ! cond_or)
3167 return CC_DGTmode;
3168 if (cond2 == GE)
3169 return CC_DGEmode;
3170 if (cond2 == NE)
3171 return CC_DNEmode;
3172 break;
3173
3174 case LTU:
3175 if (cond2 == LTU || ! cond_or)
3176 return CC_DLTUmode;
3177 if (cond2 == LEU)
3178 return CC_DLEUmode;
3179 if (cond2 == NE)
3180 return CC_DNEmode;
3181 break;
3182
3183 case GTU:
3184 if (cond2 == GTU || ! cond_or)
3185 return CC_DGTUmode;
3186 if (cond2 == GEU)
3187 return CC_DGEUmode;
3188 if (cond2 == NE)
3189 return CC_DNEmode;
3190 break;
3191
3192 /* The remaining cases only occur when both comparisons are the
3193 same. */
3194 case NE:
3195 return CC_DNEmode;
3196
3197 case LE:
3198 return CC_DLEmode;
3199
3200 case GE:
3201 return CC_DGEmode;
3202
3203 case LEU:
3204 return CC_DLEUmode;
3205
3206 case GEU:
3207 return CC_DGEUmode;
ad076f4e
RE
3208
3209 default:
3210 break;
84ed5e79
RE
3211 }
3212
3213 abort ();
3214}
3215
3216enum machine_mode
3217arm_select_cc_mode (op, x, y)
3218 enum rtx_code op;
3219 rtx x;
3220 rtx y;
3221{
3222 /* All floating point compares return CCFP if it is an equality
3223 comparison, and CCFPE otherwise. */
3224 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3225 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3226
3227 /* A compare with a shifted operand. Because of canonicalization, the
3228 comparison will have to be swapped when we emit the assembler. */
3229 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3230 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3231 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3232 || GET_CODE (x) == ROTATERT))
3233 return CC_SWPmode;
3234
956d6950
JL
3235 /* This is a special case that is used by combine to allow a
3236 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3237 followed by a comparison of the shifted integer (only valid for
956d6950 3238 equalities and unsigned inequalities). */
84ed5e79
RE
3239 if (GET_MODE (x) == SImode
3240 && GET_CODE (x) == ASHIFT
3241 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3242 && GET_CODE (XEXP (x, 0)) == SUBREG
3243 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3244 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3245 && (op == EQ || op == NE
3246 || op == GEU || op == GTU || op == LTU || op == LEU)
3247 && GET_CODE (y) == CONST_INT)
3248 return CC_Zmode;
3249
3250 /* An operation that sets the condition codes as a side-effect, the
3251 V flag is not set correctly, so we can only use comparisons where
3252 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3253 instead. */
3254 if (GET_MODE (x) == SImode
3255 && y == const0_rtx
3256 && (op == EQ || op == NE || op == LT || op == GE)
3257 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3258 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3259 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3260 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3261 || GET_CODE (x) == LSHIFTRT
3262 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3263 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3264 return CC_NOOVmode;
3265
3266 /* A construct for a conditional compare, if the false arm contains
3267 0, then both conditions must be true, otherwise either condition
3268 must be true. Not all conditions are possible, so CCmode is
3269 returned if it can't be done. */
3270 if (GET_CODE (x) == IF_THEN_ELSE
3271 && (XEXP (x, 2) == const0_rtx
3272 || XEXP (x, 2) == const1_rtx)
3273 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3274 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
3275 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
3276 INTVAL (XEXP (x, 2)));
3277
3278 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3279 return CC_Zmode;
3280
bd9c7e23
RE
3281 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3282 && GET_CODE (x) == PLUS
3283 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3284 return CC_Cmode;
3285
84ed5e79
RE
3286 return CCmode;
3287}
3288
ff9940b0
RE
3289/* X and Y are two things to compare using CODE. Emit the compare insn and
3290 return the rtx for register 0 in the proper mode. FP means this is a
3291 floating point compare: I don't think that it is needed on the arm. */
3292
3293rtx
3294gen_compare_reg (code, x, y, fp)
3295 enum rtx_code code;
3296 rtx x, y;
ed4c4348 3297 int fp;
ff9940b0
RE
3298{
3299 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
3300 rtx cc_reg = gen_rtx (REG, mode, 24);
3301
3302 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
3303 gen_rtx (COMPARE, mode, x, y)));
3304
3305 return cc_reg;
3306}
3307
0a81f500
RE
3308void
3309arm_reload_in_hi (operands)
3310 rtx *operands;
3311{
3312 rtx base = find_replacement (&XEXP (operands[1], 0));
3313
3314 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
e5e809f4
JL
3315 /* Handle the case where the address is too complex to be offset by 1. */
3316 if (GET_CODE (base) == MINUS
3317 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3318 {
3319 rtx base_plus = gen_rtx (REG, SImode, REGNO (operands[0]));
3320
3321 emit_insn (gen_rtx (SET, VOIDmode, base_plus, base));
3322 base = base_plus;
3323 }
3324
0a81f500
RE
3325 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
3326 gen_rtx (MEM, QImode,
3327 plus_constant (base, 1))));
3328 if (BYTES_BIG_ENDIAN)
3329 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3330 operands[0], 0),
3331 gen_rtx (IOR, SImode,
3332 gen_rtx (ASHIFT, SImode,
3333 gen_rtx (SUBREG, SImode,
3334 operands[0], 0),
3335 GEN_INT (8)),
3336 operands[2])));
3337 else
3338 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3339 operands[0], 0),
3340 gen_rtx (IOR, SImode,
3341 gen_rtx (ASHIFT, SImode,
3342 operands[2],
3343 GEN_INT (8)),
3344 gen_rtx (SUBREG, SImode, operands[0], 0))));
3345}
3346
f3bb6135 3347void
af48348a 3348arm_reload_out_hi (operands)
f3bb6135 3349 rtx *operands;
af48348a
RK
3350{
3351 rtx base = find_replacement (&XEXP (operands[0], 0));
3352
b5cc037f
RE
3353 if (BYTES_BIG_ENDIAN)
3354 {
3355 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3356 gen_rtx (SUBREG, QImode, operands[1], 0)));
3357 emit_insn (gen_lshrsi3 (operands[2],
3358 gen_rtx (SUBREG, SImode, operands[1], 0),
3359 GEN_INT (8)));
3360 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3361 gen_rtx (SUBREG, QImode, operands[2], 0)));
3362 }
3363 else
3364 {
3365 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3366 gen_rtx (SUBREG, QImode, operands[1], 0)));
3367 emit_insn (gen_lshrsi3 (operands[2],
3368 gen_rtx (SUBREG, SImode, operands[1], 0),
3369 GEN_INT (8)));
3370 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3371 gen_rtx (SUBREG, QImode, operands[2], 0)));
3372 }
af48348a 3373}
2b835d68
RE
3374\f
3375/* Routines for manipulation of the constant pool. */
3376/* This is unashamedly hacked from the version in sh.c, since the problem is
3377 extremely similar. */
3378
3379/* Arm instructions cannot load a large constant into a register,
3380 constants have to come from a pc relative load. The reference of a pc
3381 relative load instruction must be less than 1k infront of the instruction.
3382 This means that we often have to dump a constant inside a function, and
3383 generate code to branch around it.
3384
3385 It is important to minimize this, since the branches will slow things
3386 down and make things bigger.
3387
3388 Worst case code looks like:
3389
3390 ldr rn, L1
3391 b L2
3392 align
3393 L1: .long value
3394 L2:
3395 ..
3396
3397 ldr rn, L3
3398 b L4
3399 align
3400 L3: .long value
3401 L4:
3402 ..
3403
3404 We fix this by performing a scan before scheduling, which notices which
3405 instructions need to have their operands fetched from the constant table
3406 and builds the table.
3407
3408
3409 The algorithm is:
3410
3411 scan, find an instruction which needs a pcrel move. Look forward, find th
3412 last barrier which is within MAX_COUNT bytes of the requirement.
3413 If there isn't one, make one. Process all the instructions between
3414 the find and the barrier.
3415
3416 In the above example, we can tell that L3 is within 1k of L1, so
3417 the first move can be shrunk from the 2 insn+constant sequence into
3418 just 1 insn, and the constant moved to L3 to make:
3419
3420 ldr rn, L1
3421 ..
3422 ldr rn, L3
3423 b L4
3424 align
3425 L1: .long value
3426 L3: .long value
3427 L4:
3428
3429 Then the second move becomes the target for the shortening process.
3430
3431 */
3432
3433typedef struct
3434{
3435 rtx value; /* Value in table */
3436 HOST_WIDE_INT next_offset;
3437 enum machine_mode mode; /* Mode of value */
3438} pool_node;
3439
3440/* The maximum number of constants that can fit into one pool, since
3441 the pc relative range is 0...1020 bytes and constants are at least 4
3442 bytes long */
3443
3444#define MAX_POOL_SIZE (1020/4)
3445static pool_node pool_vector[MAX_POOL_SIZE];
3446static int pool_size;
3447static rtx pool_vector_label;
3448
332072db
RE
3449/* Add a constant to the pool and return its offset within the current
3450 pool.
3451
3452 X is the rtx we want to replace. MODE is its mode. On return,
3453 ADDRESS_ONLY will be non-zero if we really want the address of such
3454 a constant, not the constant itself. */
2b835d68 3455static HOST_WIDE_INT
332072db 3456add_constant (x, mode, address_only)
2b835d68
RE
3457 rtx x;
3458 enum machine_mode mode;
332072db 3459 int *address_only;
2b835d68
RE
3460{
3461 int i;
2b835d68
RE
3462 HOST_WIDE_INT offset;
3463
332072db 3464 *address_only = 0;
2b835d68
RE
3465 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3466 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3467 x = get_pool_constant (XEXP (x, 0));
332072db
RE
3468 else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P(x))
3469 {
3470 *address_only = 1;
3471 x = get_pool_constant (x);
3472 }
2b835d68
RE
3473#ifndef AOF_ASSEMBLER
3474 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3475 x = XVECEXP (x, 0, 0);
3476#endif
3477
32de079a
RE
3478#ifdef AOF_ASSEMBLER
3479 /* PIC Symbol references need to be converted into offsets into the
3480 based area. */
3481 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
3482 x = aof_pic_entry (x);
3483#endif /* AOF_ASSEMBLER */
3484
2b835d68
RE
3485 /* First see if we've already got it */
3486 for (i = 0; i < pool_size; i++)
3487 {
3488 if (GET_CODE (x) == pool_vector[i].value->code
3489 && mode == pool_vector[i].mode)
3490 {
3491 if (GET_CODE (x) == CODE_LABEL)
3492 {
3493 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3494 continue;
3495 }
3496 if (rtx_equal_p (x, pool_vector[i].value))
3497 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3498 }
3499 }
3500
3501 /* Need a new one */
3502 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3503 offset = 0;
3504 if (pool_size == 0)
3505 pool_vector_label = gen_label_rtx ();
3506 else
3507 pool_vector[pool_size].next_offset
3508 += (offset = pool_vector[pool_size - 1].next_offset);
3509
3510 pool_vector[pool_size].value = x;
3511 pool_vector[pool_size].mode = mode;
3512 pool_size++;
3513 return offset;
3514}
3515
3516/* Output the literal table */
3517static void
3518dump_table (scan)
3519 rtx scan;
3520{
3521 int i;
3522
3523 scan = emit_label_after (gen_label_rtx (), scan);
3524 scan = emit_insn_after (gen_align_4 (), scan);
3525 scan = emit_label_after (pool_vector_label, scan);
3526
3527 for (i = 0; i < pool_size; i++)
3528 {
3529 pool_node *p = pool_vector + i;
3530
3531 switch (GET_MODE_SIZE (p->mode))
3532 {
3533 case 4:
3534 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3535 break;
3536
3537 case 8:
3538 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3539 break;
3540
3541 default:
3542 abort ();
3543 break;
3544 }
3545 }
3546
3547 scan = emit_insn_after (gen_consttable_end (), scan);
3548 scan = emit_barrier_after (scan);
3549 pool_size = 0;
3550}
3551
3552/* Non zero if the src operand needs to be fixed up */
3553static int
3554fixit (src, mode, destreg)
3555 rtx src;
3556 enum machine_mode mode;
3557 int destreg;
3558{
3559 if (CONSTANT_P (src))
3560 {
3561 if (GET_CODE (src) == CONST_INT)
3562 return (! const_ok_for_arm (INTVAL (src))
3563 && ! const_ok_for_arm (~INTVAL (src)));
3564 if (GET_CODE (src) == CONST_DOUBLE)
3565 return (GET_MODE (src) == VOIDmode
3566 || destreg < 16
3567 || (! const_double_rtx_ok_for_fpu (src)
3568 && ! neg_const_double_rtx_ok_for_fpu (src)));
3569 return symbol_mentioned_p (src);
3570 }
3571#ifndef AOF_ASSEMBLER
3572 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3573 return 1;
3574#endif
3575 else
3576 return (mode == SImode && GET_CODE (src) == MEM
3577 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3578 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3579}
3580
3581/* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3582static rtx
3583find_barrier (from, max_count)
3584 rtx from;
3585 int max_count;
3586{
3587 int count = 0;
3588 rtx found_barrier = 0;
e5e809f4 3589 rtx last = from;
2b835d68
RE
3590
3591 while (from && count < max_count)
3592 {
3593 if (GET_CODE (from) == BARRIER)
a2538ff7 3594 return from;
2b835d68
RE
3595
3596 /* Count the length of this insn */
3597 if (GET_CODE (from) == INSN
3598 && GET_CODE (PATTERN (from)) == SET
3599 && CONSTANT_P (SET_SRC (PATTERN (from)))
3600 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
d499463f 3601 count += 8;
2b835d68
RE
3602 else
3603 count += get_attr_length (from);
3604
e5e809f4 3605 last = from;
2b835d68
RE
3606 from = NEXT_INSN (from);
3607 }
3608
3609 if (!found_barrier)
3610 {
3611 /* We didn't find a barrier in time to
3612 dump our stuff, so we'll make one */
3613 rtx label = gen_label_rtx ();
3614
3615 if (from)
e5e809f4 3616 from = PREV_INSN (last);
2b835d68
RE
3617 else
3618 from = get_last_insn ();
3619
3620 /* Walk back to be just before any jump */
3621 while (GET_CODE (from) == JUMP_INSN
3622 || GET_CODE (from) == NOTE
3623 || GET_CODE (from) == CODE_LABEL)
3624 from = PREV_INSN (from);
3625
3626 from = emit_jump_insn_after (gen_jump (label), from);
3627 JUMP_LABEL (from) = label;
3628 found_barrier = emit_barrier_after (from);
3629 emit_label_after (label, found_barrier);
3630 return found_barrier;
3631 }
3632
3633 return found_barrier;
3634}
3635
3636/* Non zero if the insn is a move instruction which needs to be fixed. */
3637static int
3638broken_move (insn)
3639 rtx insn;
3640{
3641 if (!INSN_DELETED_P (insn)
3642 && GET_CODE (insn) == INSN
3643 && GET_CODE (PATTERN (insn)) == SET)
3644 {
3645 rtx pat = PATTERN (insn);
3646 rtx src = SET_SRC (pat);
3647 rtx dst = SET_DEST (pat);
3648 int destreg;
3649 enum machine_mode mode = GET_MODE (dst);
ad076f4e 3650
2b835d68
RE
3651 if (dst == pc_rtx)
3652 return 0;
3653
3654 if (GET_CODE (dst) == REG)
3655 destreg = REGNO (dst);
3656 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3657 destreg = REGNO (SUBREG_REG (dst));
ad076f4e
RE
3658 else
3659 return 0;
2b835d68
RE
3660
3661 return fixit (src, mode, destreg);
3662 }
3663 return 0;
3664}
3665
3666void
3667arm_reorg (first)
3668 rtx first;
3669{
3670 rtx insn;
3671 int count_size;
2b835d68
RE
3672
3673#if 0
3674 /* The ldr instruction can work with up to a 4k offset, and most constants
3675 will be loaded with one of these instructions; however, the adr
3676 instruction and the ldf instructions only work with a 1k offset. This
3677 code needs to be rewritten to use the 4k offset when possible, and to
3678 adjust when a 1k offset is needed. For now we just use a 1k offset
3679 from the start. */
3680 count_size = 4000;
3681
3682 /* Floating point operands can't work further than 1024 bytes from the
3683 PC, so to make things simple we restrict all loads for such functions.
3684 */
3685 if (TARGET_HARD_FLOAT)
ad076f4e
RE
3686 {
3687 int regno;
3688
3689 for (regno = 16; regno < 24; regno++)
3690 if (regs_ever_live[regno])
3691 {
3692 count_size = 1000;
3693 break;
3694 }
3695 }
2b835d68
RE
3696#else
3697 count_size = 1000;
3698#endif /* 0 */
3699
3700 for (insn = first; insn; insn = NEXT_INSN (insn))
3701 {
3702 if (broken_move (insn))
3703 {
3704 /* This is a broken move instruction, scan ahead looking for
3705 a barrier to stick the constant table behind */
3706 rtx scan;
3707 rtx barrier = find_barrier (insn, count_size);
3708
3709 /* Now find all the moves between the points and modify them */
3710 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3711 {
3712 if (broken_move (scan))
3713 {
3714 /* This is a broken move instruction, add it to the pool */
3715 rtx pat = PATTERN (scan);
3716 rtx src = SET_SRC (pat);
3717 rtx dst = SET_DEST (pat);
3718 enum machine_mode mode = GET_MODE (dst);
3719 HOST_WIDE_INT offset;
3720 rtx newinsn = scan;
3721 rtx newsrc;
3722 rtx addr;
3723 int scratch;
332072db 3724 int address_only;
2b835d68
RE
3725
3726 /* If this is an HImode constant load, convert it into
3727 an SImode constant load. Since the register is always
3728 32 bits this is safe. We have to do this, since the
3729 load pc-relative instruction only does a 32-bit load. */
3730 if (mode == HImode)
3731 {
3732 mode = SImode;
3733 if (GET_CODE (dst) != REG)
3734 abort ();
3735 PUT_MODE (dst, SImode);
3736 }
3737
332072db 3738 offset = add_constant (src, mode, &address_only);
2b835d68
RE
3739 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
3740 pool_vector_label),
3741 offset);
3742
332072db
RE
3743 /* If we only want the address of the pool entry, or
3744 for wide moves to integer regs we need to split
3745 the address calculation off into a separate insn.
3746 If necessary, the load can then be done with a
3747 load-multiple. This is safe, since we have
3748 already noted the length of such insns to be 8,
3749 and we are immediately over-writing the scratch
3750 we have grabbed with the final result. */
3751 if ((address_only || GET_MODE_SIZE (mode) > 4)
2b835d68
RE
3752 && (scratch = REGNO (dst)) < 16)
3753 {
332072db
RE
3754 rtx reg;
3755
3756 if (mode == SImode)
3757 reg = dst;
3758 else
3759 reg = gen_rtx (REG, SImode, scratch);
3760
2b835d68
RE
3761 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3762 newinsn);
3763 addr = reg;
3764 }
3765
332072db
RE
3766 if (! address_only)
3767 {
3768 newsrc = gen_rtx (MEM, mode, addr);
3769
3770 /* XXX Fixme -- I think the following is bogus. */
3771 /* Build a jump insn wrapper around the move instead
3772 of an ordinary insn, because we want to have room for
3773 the target label rtx in fld[7], which an ordinary
3774 insn doesn't have. */
3775 newinsn = emit_jump_insn_after
3776 (gen_rtx (SET, VOIDmode, dst, newsrc), newinsn);
3777 JUMP_LABEL (newinsn) = pool_vector_label;
3778
3779 /* But it's still an ordinary insn */
3780 PUT_CODE (newinsn, INSN);
3781 }
2b835d68
RE
3782
3783 /* Kill old insn */
3784 delete_insn (scan);
3785 scan = newinsn;
3786 }
3787 }
3788 dump_table (barrier);
3789 insn = scan;
3790 }
3791 }
3792}
3793
cce8749e
CH
3794\f
3795/* Routines to output assembly language. */
3796
f3bb6135 3797/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
3798 In this way we can ensure that valid double constants are generated even
3799 when cross compiling. */
3800char *
3801fp_immediate_constant (x)
b5cc037f 3802 rtx x;
ff9940b0
RE
3803{
3804 REAL_VALUE_TYPE r;
3805 int i;
3806
3807 if (!fpa_consts_inited)
3808 init_fpa_table ();
3809
3810 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3811 for (i = 0; i < 8; i++)
3812 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3813 return strings_fpa[i];
f3bb6135 3814
ff9940b0
RE
3815 abort ();
3816}
3817
9997d19d
RE
3818/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3819static char *
3820fp_const_from_val (r)
3821 REAL_VALUE_TYPE *r;
3822{
3823 int i;
3824
3825 if (! fpa_consts_inited)
3826 init_fpa_table ();
3827
3828 for (i = 0; i < 8; i++)
3829 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3830 return strings_fpa[i];
3831
3832 abort ();
3833}
ff9940b0 3834
cce8749e
CH
3835/* Output the operands of a LDM/STM instruction to STREAM.
3836 MASK is the ARM register set mask of which only bits 0-15 are important.
3837 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3838 must follow the register list. */
3839
3840void
3841print_multi_reg (stream, instr, mask, hat)
3842 FILE *stream;
3843 char *instr;
3844 int mask, hat;
3845{
3846 int i;
3847 int not_first = FALSE;
3848
1d5473cb 3849 fputc ('\t', stream);
f3139301 3850 fprintf (stream, instr, REGISTER_PREFIX);
1d5473cb 3851 fputs (", {", stream);
cce8749e
CH
3852 for (i = 0; i < 16; i++)
3853 if (mask & (1 << i))
3854 {
3855 if (not_first)
3856 fprintf (stream, ", ");
f3139301 3857 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
cce8749e
CH
3858 not_first = TRUE;
3859 }
f3bb6135 3860
cce8749e 3861 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 3862}
cce8749e
CH
3863
3864/* Output a 'call' insn. */
3865
3866char *
3867output_call (operands)
f3bb6135 3868 rtx *operands;
cce8749e 3869{
cce8749e
CH
3870 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3871
3872 if (REGNO (operands[0]) == 14)
3873 {
3874 operands[0] = gen_rtx (REG, SImode, 12);
1d5473cb 3875 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 3876 }
1d5473cb
RE
3877 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3878 output_asm_insn ("mov%?\t%|pc, %0", operands);
f3bb6135
RE
3879 return "";
3880}
cce8749e 3881
ff9940b0
RE
3882static int
3883eliminate_lr2ip (x)
f3bb6135 3884 rtx *x;
ff9940b0
RE
3885{
3886 int something_changed = 0;
3887 rtx x0 = *x;
3888 int code = GET_CODE (x0);
3889 register int i, j;
3890 register char *fmt;
3891
3892 switch (code)
3893 {
3894 case REG:
3895 if (REGNO (x0) == 14)
3896 {
3897 *x = gen_rtx (REG, SImode, 12);
3898 return 1;
3899 }
3900 return 0;
3901 default:
3902 /* Scan through the sub-elements and change any references there */
3903 fmt = GET_RTX_FORMAT (code);
3904 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3905 if (fmt[i] == 'e')
3906 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3907 else if (fmt[i] == 'E')
3908 for (j = 0; j < XVECLEN (x0, i); j++)
3909 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3910 return something_changed;
3911 }
3912}
3913
3914/* Output a 'call' insn that is a reference in memory. */
3915
3916char *
3917output_call_mem (operands)
f3bb6135 3918 rtx *operands;
ff9940b0
RE
3919{
3920 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3921 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3922 */
3923 if (eliminate_lr2ip (&operands[0]))
1d5473cb 3924 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 3925
1d5473cb
RE
3926 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3927 output_asm_insn ("ldr%?\t%|pc, %0", operands);
f3bb6135
RE
3928 return "";
3929}
ff9940b0
RE
3930
3931
3932/* Output a move from arm registers to an fpu registers.
3933 OPERANDS[0] is an fpu register.
3934 OPERANDS[1] is the first registers of an arm register pair. */
3935
3936char *
3937output_mov_long_double_fpu_from_arm (operands)
f3bb6135 3938 rtx *operands;
ff9940b0
RE
3939{
3940 int arm_reg0 = REGNO (operands[1]);
3941 rtx ops[3];
3942
3943 if (arm_reg0 == 12)
3944 abort();
f3bb6135 3945
ff9940b0
RE
3946 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3947 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3948 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3949
1d5473cb
RE
3950 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
3951 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
f3bb6135
RE
3952 return "";
3953}
ff9940b0
RE
3954
3955/* Output a move from an fpu register to arm registers.
3956 OPERANDS[0] is the first registers of an arm register pair.
3957 OPERANDS[1] is an fpu register. */
3958
3959char *
3960output_mov_long_double_arm_from_fpu (operands)
f3bb6135 3961 rtx *operands;
ff9940b0
RE
3962{
3963 int arm_reg0 = REGNO (operands[0]);
3964 rtx ops[3];
3965
3966 if (arm_reg0 == 12)
3967 abort();
f3bb6135 3968
ff9940b0
RE
3969 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3970 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3971 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3972
1d5473cb
RE
3973 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
3974 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
3975 return "";
3976}
ff9940b0
RE
3977
3978/* Output a move from arm registers to arm registers of a long double
3979 OPERANDS[0] is the destination.
3980 OPERANDS[1] is the source. */
3981char *
3982output_mov_long_double_arm_from_arm (operands)
f3bb6135 3983 rtx *operands;
ff9940b0
RE
3984{
3985 /* We have to be careful here because the two might overlap */
3986 int dest_start = REGNO (operands[0]);
3987 int src_start = REGNO (operands[1]);
3988 rtx ops[2];
3989 int i;
3990
3991 if (dest_start < src_start)
3992 {
3993 for (i = 0; i < 3; i++)
3994 {
3995 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3996 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 3997 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
3998 }
3999 }
4000 else
4001 {
4002 for (i = 2; i >= 0; i--)
4003 {
4004 ops[0] = gen_rtx (REG, SImode, dest_start + i);
4005 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 4006 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
4007 }
4008 }
f3bb6135 4009
ff9940b0
RE
4010 return "";
4011}
4012
4013
cce8749e
CH
4014/* Output a move from arm registers to an fpu registers.
4015 OPERANDS[0] is an fpu register.
4016 OPERANDS[1] is the first registers of an arm register pair. */
4017
4018char *
4019output_mov_double_fpu_from_arm (operands)
f3bb6135 4020 rtx *operands;
cce8749e
CH
4021{
4022 int arm_reg0 = REGNO (operands[1]);
4023 rtx ops[2];
4024
4025 if (arm_reg0 == 12)
4026 abort();
4027 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4028 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
4029 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
4030 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
4031 return "";
4032}
cce8749e
CH
4033
4034/* Output a move from an fpu register to arm registers.
4035 OPERANDS[0] is the first registers of an arm register pair.
4036 OPERANDS[1] is an fpu register. */
4037
4038char *
4039output_mov_double_arm_from_fpu (operands)
f3bb6135 4040 rtx *operands;
cce8749e
CH
4041{
4042 int arm_reg0 = REGNO (operands[0]);
4043 rtx ops[2];
4044
4045 if (arm_reg0 == 12)
4046 abort();
f3bb6135 4047
cce8749e
CH
4048 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4049 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
4050 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
4051 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
4052 return "";
4053}
cce8749e
CH
4054
4055/* Output a move between double words.
4056 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
4057 or MEM<-REG and all MEMs must be offsettable addresses. */
4058
4059char *
4060output_move_double (operands)
f3bb6135 4061 rtx *operands;
cce8749e
CH
4062{
4063 enum rtx_code code0 = GET_CODE (operands[0]);
4064 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 4065 rtx otherops[3];
cce8749e
CH
4066
4067 if (code0 == REG)
4068 {
4069 int reg0 = REGNO (operands[0]);
4070
4071 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
4072 if (code1 == REG)
4073 {
4074 int reg1 = REGNO (operands[1]);
4075 if (reg1 == 12)
4076 abort();
f3bb6135 4077
cce8749e 4078 /* Ensure the second source is not overwritten */
c1c2bc04
RE
4079 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
4080 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 4081 else
c1c2bc04 4082 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
4083 }
4084 else if (code1 == CONST_DOUBLE)
4085 {
226a5051
RE
4086 if (GET_MODE (operands[1]) == DFmode)
4087 {
4088 long l[2];
4089 union real_extract u;
4090
4091 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4092 sizeof (u));
4093 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4094 otherops[1] = GEN_INT(l[1]);
4095 operands[1] = GEN_INT(l[0]);
4096 }
c1c2bc04
RE
4097 else if (GET_MODE (operands[1]) != VOIDmode)
4098 abort ();
4099 else if (WORDS_BIG_ENDIAN)
4100 {
4101
4102 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4103 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4104 }
226a5051
RE
4105 else
4106 {
c1c2bc04 4107
226a5051
RE
4108 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4109 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4110 }
c1c2bc04
RE
4111 output_mov_immediate (operands);
4112 output_mov_immediate (otherops);
cce8749e
CH
4113 }
4114 else if (code1 == CONST_INT)
4115 {
56636818
JL
4116#if HOST_BITS_PER_WIDE_INT > 32
4117 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4118 what the upper word is. */
4119 if (WORDS_BIG_ENDIAN)
4120 {
4121 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4122 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4123 }
4124 else
4125 {
4126 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4127 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4128 }
4129#else
4130 /* Sign extend the intval into the high-order word */
c1c2bc04
RE
4131 if (WORDS_BIG_ENDIAN)
4132 {
4133 otherops[1] = operands[1];
4134 operands[1] = (INTVAL (operands[1]) < 0
4135 ? constm1_rtx : const0_rtx);
4136 }
ff9940b0 4137 else
c1c2bc04 4138 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 4139#endif
c1c2bc04
RE
4140 output_mov_immediate (otherops);
4141 output_mov_immediate (operands);
cce8749e
CH
4142 }
4143 else if (code1 == MEM)
4144 {
ff9940b0 4145 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 4146 {
ff9940b0 4147 case REG:
9997d19d 4148 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 4149 break;
2b835d68 4150
ff9940b0 4151 case PRE_INC:
2b835d68 4152 abort (); /* Should never happen now */
ff9940b0 4153 break;
2b835d68 4154
ff9940b0 4155 case PRE_DEC:
2b835d68 4156 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 4157 break;
2b835d68 4158
ff9940b0 4159 case POST_INC:
9997d19d 4160 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 4161 break;
2b835d68 4162
ff9940b0 4163 case POST_DEC:
2b835d68 4164 abort (); /* Should never happen now */
ff9940b0 4165 break;
2b835d68
RE
4166
4167 case LABEL_REF:
4168 case CONST:
4169 output_asm_insn ("adr%?\t%0, %1", operands);
4170 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4171 break;
4172
ff9940b0 4173 default:
2b835d68 4174 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
cce8749e 4175 {
2b835d68
RE
4176 otherops[0] = operands[0];
4177 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4178 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4179 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4180 {
4181 if (GET_CODE (otherops[2]) == CONST_INT)
4182 {
4183 switch (INTVAL (otherops[2]))
4184 {
4185 case -8:
4186 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4187 return "";
4188 case -4:
4189 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4190 return "";
4191 case 4:
4192 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4193 return "";
4194 }
4195 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4196 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4197 else
4198 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4199 }
4200 else
4201 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4202 }
4203 else
4204 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
4205 return "ldm%?ia\t%0, %M0";
4206 }
4207 else
4208 {
4209 otherops[1] = adj_offsettable_operand (operands[1], 4);
4210 /* Take care of overlapping base/data reg. */
4211 if (reg_mentioned_p (operands[0], operands[1]))
4212 {
4213 output_asm_insn ("ldr%?\t%0, %1", otherops);
4214 output_asm_insn ("ldr%?\t%0, %1", operands);
4215 }
4216 else
4217 {
4218 output_asm_insn ("ldr%?\t%0, %1", operands);
4219 output_asm_insn ("ldr%?\t%0, %1", otherops);
4220 }
cce8749e
CH
4221 }
4222 }
4223 }
2b835d68
RE
4224 else
4225 abort(); /* Constraints should prevent this */
cce8749e
CH
4226 }
4227 else if (code0 == MEM && code1 == REG)
4228 {
4229 if (REGNO (operands[1]) == 12)
4230 abort();
2b835d68 4231
ff9940b0
RE
4232 switch (GET_CODE (XEXP (operands[0], 0)))
4233 {
4234 case REG:
9997d19d 4235 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 4236 break;
2b835d68 4237
ff9940b0 4238 case PRE_INC:
2b835d68 4239 abort (); /* Should never happen now */
ff9940b0 4240 break;
2b835d68 4241
ff9940b0 4242 case PRE_DEC:
2b835d68 4243 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 4244 break;
2b835d68 4245
ff9940b0 4246 case POST_INC:
9997d19d 4247 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 4248 break;
2b835d68 4249
ff9940b0 4250 case POST_DEC:
2b835d68 4251 abort (); /* Should never happen now */
ff9940b0 4252 break;
2b835d68
RE
4253
4254 case PLUS:
4255 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4256 {
4257 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4258 {
4259 case -8:
4260 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4261 return "";
4262
4263 case -4:
4264 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4265 return "";
4266
4267 case 4:
4268 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4269 return "";
4270 }
4271 }
4272 /* Fall through */
4273
ff9940b0 4274 default:
cce8749e
CH
4275 otherops[0] = adj_offsettable_operand (operands[0], 4);
4276 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
9997d19d
RE
4277 output_asm_insn ("str%?\t%1, %0", operands);
4278 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
4279 }
4280 }
2b835d68
RE
4281 else
4282 abort(); /* Constraints should prevent this */
cce8749e 4283
9997d19d
RE
4284 return "";
4285}
cce8749e
CH
4286
4287
4288/* Output an arbitrary MOV reg, #n.
4289 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4290
4291char *
4292output_mov_immediate (operands)
f3bb6135 4293 rtx *operands;
cce8749e 4294{
f3bb6135 4295 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
4296 int n_ones = 0;
4297 int i;
4298
4299 /* Try to use one MOV */
cce8749e 4300 if (const_ok_for_arm (n))
f3bb6135 4301 {
9997d19d 4302 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
4303 return "";
4304 }
cce8749e
CH
4305
4306 /* Try to use one MVN */
f3bb6135 4307 if (const_ok_for_arm (~n))
cce8749e 4308 {
f3bb6135 4309 operands[1] = GEN_INT (~n);
9997d19d 4310 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 4311 return "";
cce8749e
CH
4312 }
4313
4314 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4315
4316 for (i=0; i < 32; i++)
4317 if (n & 1 << i)
4318 n_ones++;
4319
4320 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
4321 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4322 ~n);
cce8749e 4323 else
9997d19d
RE
4324 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4325 n);
f3bb6135
RE
4326
4327 return "";
4328}
cce8749e
CH
4329
4330
4331/* Output an ADD r, s, #n where n may be too big for one instruction. If
4332 adding zero to one register, output nothing. */
4333
4334char *
4335output_add_immediate (operands)
f3bb6135 4336 rtx *operands;
cce8749e 4337{
f3bb6135 4338 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
4339
4340 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4341 {
4342 if (n < 0)
4343 output_multi_immediate (operands,
9997d19d
RE
4344 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4345 -n);
cce8749e
CH
4346 else
4347 output_multi_immediate (operands,
9997d19d
RE
4348 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4349 n);
cce8749e 4350 }
f3bb6135
RE
4351
4352 return "";
4353}
cce8749e 4354
cce8749e
CH
4355/* Output a multiple immediate operation.
4356 OPERANDS is the vector of operands referred to in the output patterns.
4357 INSTR1 is the output pattern to use for the first constant.
4358 INSTR2 is the output pattern to use for subsequent constants.
4359 IMMED_OP is the index of the constant slot in OPERANDS.
4360 N is the constant value. */
4361
18af7313 4362static char *
cce8749e 4363output_multi_immediate (operands, instr1, instr2, immed_op, n)
f3bb6135 4364 rtx *operands;
cce8749e 4365 char *instr1, *instr2;
f3bb6135
RE
4366 int immed_op;
4367 HOST_WIDE_INT n;
cce8749e 4368{
f3bb6135
RE
4369#if HOST_BITS_PER_WIDE_INT > 32
4370 n &= 0xffffffff;
4371#endif
4372
cce8749e
CH
4373 if (n == 0)
4374 {
4375 operands[immed_op] = const0_rtx;
f3bb6135 4376 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
4377 }
4378 else
4379 {
4380 int i;
4381 char *instr = instr1;
4382
4383 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
4384 for (i = 0; i < 32; i += 2)
4385 {
4386 if (n & (3 << i))
4387 {
f3bb6135
RE
4388 operands[immed_op] = GEN_INT (n & (255 << i));
4389 output_asm_insn (instr, operands);
cce8749e
CH
4390 instr = instr2;
4391 i += 6;
4392 }
4393 }
4394 }
f3bb6135 4395 return "";
9997d19d 4396}
cce8749e
CH
4397
4398
4399/* Return the appropriate ARM instruction for the operation code.
4400 The returned result should not be overwritten. OP is the rtx of the
4401 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4402 was shifted. */
4403
4404char *
4405arithmetic_instr (op, shift_first_arg)
4406 rtx op;
f3bb6135 4407 int shift_first_arg;
cce8749e 4408{
9997d19d 4409 switch (GET_CODE (op))
cce8749e
CH
4410 {
4411 case PLUS:
f3bb6135
RE
4412 return "add";
4413
cce8749e 4414 case MINUS:
f3bb6135
RE
4415 return shift_first_arg ? "rsb" : "sub";
4416
cce8749e 4417 case IOR:
f3bb6135
RE
4418 return "orr";
4419
cce8749e 4420 case XOR:
f3bb6135
RE
4421 return "eor";
4422
cce8749e 4423 case AND:
f3bb6135
RE
4424 return "and";
4425
cce8749e 4426 default:
f3bb6135 4427 abort ();
cce8749e 4428 }
f3bb6135 4429}
cce8749e
CH
4430
4431
4432/* Ensure valid constant shifts and return the appropriate shift mnemonic
4433 for the operation code. The returned result should not be overwritten.
4434 OP is the rtx code of the shift.
9997d19d
RE
4435 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4436 shift. */
cce8749e 4437
9997d19d
RE
4438static char *
4439shift_op (op, amountp)
4440 rtx op;
4441 HOST_WIDE_INT *amountp;
cce8749e 4442{
cce8749e 4443 char *mnem;
e2c671ba 4444 enum rtx_code code = GET_CODE (op);
cce8749e 4445
9997d19d
RE
4446 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4447 *amountp = -1;
4448 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4449 *amountp = INTVAL (XEXP (op, 1));
4450 else
4451 abort ();
4452
e2c671ba 4453 switch (code)
cce8749e
CH
4454 {
4455 case ASHIFT:
4456 mnem = "asl";
4457 break;
f3bb6135 4458
cce8749e
CH
4459 case ASHIFTRT:
4460 mnem = "asr";
cce8749e 4461 break;
f3bb6135 4462
cce8749e
CH
4463 case LSHIFTRT:
4464 mnem = "lsr";
cce8749e 4465 break;
f3bb6135 4466
9997d19d
RE
4467 case ROTATERT:
4468 mnem = "ror";
9997d19d
RE
4469 break;
4470
ff9940b0 4471 case MULT:
e2c671ba
RE
4472 /* We never have to worry about the amount being other than a
4473 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
4474 if (*amountp != -1)
4475 *amountp = int_log2 (*amountp);
4476 else
4477 abort ();
f3bb6135
RE
4478 return "asl";
4479
cce8749e 4480 default:
f3bb6135 4481 abort ();
cce8749e
CH
4482 }
4483
e2c671ba
RE
4484 if (*amountp != -1)
4485 {
4486 /* This is not 100% correct, but follows from the desire to merge
4487 multiplication by a power of 2 with the recognizer for a
4488 shift. >=32 is not a valid shift for "asl", so we must try and
4489 output a shift that produces the correct arithmetical result.
ddd5a7c1 4490 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
4491 is not set correctly if we set the flags; but we never use the
4492 carry bit from such an operation, so we can ignore that. */
4493 if (code == ROTATERT)
4494 *amountp &= 31; /* Rotate is just modulo 32 */
4495 else if (*amountp != (*amountp & 31))
4496 {
4497 if (code == ASHIFT)
4498 mnem = "lsr";
4499 *amountp = 32;
4500 }
4501
4502 /* Shifts of 0 are no-ops. */
4503 if (*amountp == 0)
4504 return NULL;
4505 }
4506
9997d19d
RE
4507 return mnem;
4508}
cce8749e
CH
4509
4510
4511/* Obtain the shift from the POWER of two. */
4512
18af7313 4513static HOST_WIDE_INT
cce8749e 4514int_log2 (power)
f3bb6135 4515 HOST_WIDE_INT power;
cce8749e 4516{
f3bb6135 4517 HOST_WIDE_INT shift = 0;
cce8749e 4518
2b835d68 4519 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
4520 {
4521 if (shift > 31)
f3bb6135 4522 abort ();
cce8749e
CH
4523 shift++;
4524 }
f3bb6135
RE
4525
4526 return shift;
4527}
cce8749e 4528
cce8749e
CH
4529/* Output a .ascii pseudo-op, keeping track of lengths. This is because
4530 /bin/as is horribly restrictive. */
4531
4532void
4533output_ascii_pseudo_op (stream, p, len)
4534 FILE *stream;
f1b3f515 4535 unsigned char *p;
cce8749e
CH
4536 int len;
4537{
4538 int i;
4539 int len_so_far = 1000;
4540 int chars_so_far = 0;
4541
4542 for (i = 0; i < len; i++)
4543 {
4544 register int c = p[i];
4545
4546 if (len_so_far > 50)
4547 {
4548 if (chars_so_far)
4549 fputs ("\"\n", stream);
4550 fputs ("\t.ascii\t\"", stream);
4551 len_so_far = 0;
cce8749e
CH
4552 chars_so_far = 0;
4553 }
4554
4555 if (c == '\"' || c == '\\')
4556 {
4557 putc('\\', stream);
4558 len_so_far++;
4559 }
f3bb6135 4560
cce8749e
CH
4561 if (c >= ' ' && c < 0177)
4562 {
4563 putc (c, stream);
4564 len_so_far++;
4565 }
4566 else
4567 {
4568 fprintf (stream, "\\%03o", c);
4569 len_so_far +=4;
4570 }
f3bb6135 4571
cce8749e
CH
4572 chars_so_far++;
4573 }
f3bb6135 4574
cce8749e 4575 fputs ("\"\n", stream);
f3bb6135 4576}
cce8749e 4577\f
ff9940b0
RE
4578
4579/* Try to determine whether a pattern really clobbers the link register.
4580 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
4581 if we combine a call followed by a return.
4582 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4583 such a check should not be needed because these only update an existing
4584 value within a register; the register must still be set elsewhere within
4585 the function. */
ff9940b0
RE
4586
4587static int
4588pattern_really_clobbers_lr (x)
f3bb6135 4589 rtx x;
ff9940b0
RE
4590{
4591 int i;
4592
4593 switch (GET_CODE (x))
4594 {
4595 case SET:
4596 switch (GET_CODE (SET_DEST (x)))
4597 {
4598 case REG:
4599 return REGNO (SET_DEST (x)) == 14;
f3bb6135 4600
ff9940b0
RE
4601 case SUBREG:
4602 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4603 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
f3bb6135 4604
0e84b556
RK
4605 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4606 return 0;
ff9940b0 4607 abort ();
f3bb6135 4608
ff9940b0
RE
4609 default:
4610 return 0;
4611 }
f3bb6135 4612
ff9940b0
RE
4613 case PARALLEL:
4614 for (i = 0; i < XVECLEN (x, 0); i++)
4615 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4616 return 1;
4617 return 0;
f3bb6135 4618
ff9940b0
RE
4619 case CLOBBER:
4620 switch (GET_CODE (XEXP (x, 0)))
4621 {
4622 case REG:
4623 return REGNO (XEXP (x, 0)) == 14;
f3bb6135 4624
ff9940b0
RE
4625 case SUBREG:
4626 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4627 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4628 abort ();
f3bb6135 4629
ff9940b0
RE
4630 default:
4631 return 0;
4632 }
f3bb6135 4633
ff9940b0
RE
4634 case UNSPEC:
4635 return 1;
f3bb6135 4636
ff9940b0
RE
4637 default:
4638 return 0;
4639 }
4640}
4641
4642static int
4643function_really_clobbers_lr (first)
f3bb6135 4644 rtx first;
ff9940b0
RE
4645{
4646 rtx insn, next;
4647
4648 for (insn = first; insn; insn = next_nonnote_insn (insn))
4649 {
4650 switch (GET_CODE (insn))
4651 {
4652 case BARRIER:
4653 case NOTE:
4654 case CODE_LABEL:
4655 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4656 case INLINE_HEADER:
4657 break;
f3bb6135 4658
ff9940b0
RE
4659 case INSN:
4660 if (pattern_really_clobbers_lr (PATTERN (insn)))
4661 return 1;
4662 break;
f3bb6135 4663
ff9940b0
RE
4664 case CALL_INSN:
4665 /* Don't yet know how to handle those calls that are not to a
4666 SYMBOL_REF */
4667 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4668 abort ();
f3bb6135 4669
ff9940b0
RE
4670 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4671 {
4672 case CALL:
4673 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4674 != SYMBOL_REF)
4675 return 1;
4676 break;
f3bb6135 4677
ff9940b0
RE
4678 case SET:
4679 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4680 0, 0)), 0), 0))
4681 != SYMBOL_REF)
4682 return 1;
4683 break;
f3bb6135 4684
ff9940b0
RE
4685 default: /* Don't recognize it, be safe */
4686 return 1;
4687 }
f3bb6135 4688
ff9940b0
RE
4689 /* A call can be made (by peepholing) not to clobber lr iff it is
4690 followed by a return. There may, however, be a use insn iff
4691 we are returning the result of the call.
4692 If we run off the end of the insn chain, then that means the
4693 call was at the end of the function. Unfortunately we don't
4694 have a return insn for the peephole to recognize, so we
4695 must reject this. (Can this be fixed by adding our own insn?) */
4696 if ((next = next_nonnote_insn (insn)) == NULL)
4697 return 1;
f3bb6135 4698
32de079a
RE
4699 /* No need to worry about lr if the call never returns */
4700 if (GET_CODE (next) == BARRIER)
4701 break;
4702
ff9940b0
RE
4703 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4704 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4705 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4706 == REGNO (XEXP (PATTERN (next), 0))))
4707 if ((next = next_nonnote_insn (next)) == NULL)
4708 return 1;
f3bb6135 4709
ff9940b0
RE
4710 if (GET_CODE (next) == JUMP_INSN
4711 && GET_CODE (PATTERN (next)) == RETURN)
4712 break;
4713 return 1;
f3bb6135 4714
ff9940b0
RE
4715 default:
4716 abort ();
4717 }
4718 }
f3bb6135 4719
ff9940b0
RE
4720 /* We have reached the end of the chain so lr was _not_ clobbered */
4721 return 0;
4722}
4723
4724char *
84ed5e79 4725output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
4726 rtx operand;
4727 int really_return;
84ed5e79 4728 int reverse;
ff9940b0
RE
4729{
4730 char instr[100];
4731 int reg, live_regs = 0;
e2c671ba
RE
4732 int volatile_func = (optimize > 0
4733 && TREE_THIS_VOLATILE (current_function_decl));
4734
4735 return_used_this_function = 1;
ff9940b0 4736
e2c671ba
RE
4737 if (volatile_func)
4738 {
4739 rtx ops[2];
4740 /* If this function was declared non-returning, and we have found a tail
4741 call, then we have to trust that the called function won't return. */
4742 if (! really_return)
4743 return "";
4744
4745 /* Otherwise, trap an attempted return by aborting. */
4746 ops[0] = operand;
4747 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 4748 assemble_external_libcall (ops[1]);
84ed5e79 4749 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
e2c671ba
RE
4750 return "";
4751 }
4752
f3bb6135 4753 if (current_function_calls_alloca && ! really_return)
ff9940b0
RE
4754 abort();
4755
f3bb6135
RE
4756 for (reg = 0; reg <= 10; reg++)
4757 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
4758 live_regs++;
4759
f3bb6135 4760 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
ff9940b0
RE
4761 live_regs++;
4762
4763 if (frame_pointer_needed)
4764 live_regs += 4;
4765
4766 if (live_regs)
4767 {
f3bb6135 4768 if (lr_save_eliminated || ! regs_ever_live[14])
ff9940b0 4769 live_regs++;
f3bb6135 4770
ff9940b0 4771 if (frame_pointer_needed)
84ed5e79
RE
4772 strcpy (instr,
4773 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 4774 else
84ed5e79
RE
4775 strcpy (instr,
4776 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
4777
4778 for (reg = 0; reg <= 10; reg++)
4779 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0 4780 {
1d5473cb 4781 strcat (instr, "%|");
ff9940b0
RE
4782 strcat (instr, reg_names[reg]);
4783 if (--live_regs)
4784 strcat (instr, ", ");
4785 }
f3bb6135 4786
ff9940b0
RE
4787 if (frame_pointer_needed)
4788 {
1d5473cb 4789 strcat (instr, "%|");
ff9940b0
RE
4790 strcat (instr, reg_names[11]);
4791 strcat (instr, ", ");
1d5473cb 4792 strcat (instr, "%|");
ff9940b0
RE
4793 strcat (instr, reg_names[13]);
4794 strcat (instr, ", ");
1d5473cb 4795 strcat (instr, "%|");
ff9940b0
RE
4796 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4797 }
4798 else
1d5473cb
RE
4799 {
4800 strcat (instr, "%|");
4801 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4802 }
2b835d68 4803 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 4804 output_asm_insn (instr, &operand);
ff9940b0
RE
4805 }
4806 else if (really_return)
4807 {
b111229a
RE
4808 if (TARGET_THUMB_INTERWORK)
4809 sprintf (instr, "bx%%?%%%s\t%%|lr", reverse ? "D" : "d");
4810 else
4811 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4812 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
f3bb6135 4813 output_asm_insn (instr, &operand);
ff9940b0 4814 }
f3bb6135 4815
ff9940b0
RE
4816 return "";
4817}
4818
e82ea128
DE
4819/* Return nonzero if optimizing and the current function is volatile.
4820 Such functions never return, and many memory cycles can be saved
4821 by not storing register values that will never be needed again.
4822 This optimization was added to speed up context switching in a
4823 kernel application. */
a0b2ce4c 4824
e2c671ba
RE
4825int
4826arm_volatile_func ()
4827{
4828 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4829}
4830
ff9940b0
RE
4831/* The amount of stack adjustment that happens here, in output_return and in
4832 output_epilogue must be exactly the same as was calculated during reload,
4833 or things will point to the wrong place. The only time we can safely
4834 ignore this constraint is when a function has no arguments on the stack,
4835 no stack frame requirement and no live registers execpt for `lr'. If we
4836 can guarantee that by making all function calls into tail calls and that
4837 lr is not clobbered in any other way, then there is no need to push lr
4838 onto the stack. */
4839
cce8749e 4840void
f3bb6135 4841output_func_prologue (f, frame_size)
cce8749e
CH
4842 FILE *f;
4843 int frame_size;
4844{
f3bb6135 4845 int reg, live_regs_mask = 0;
e2c671ba
RE
4846 int volatile_func = (optimize > 0
4847 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 4848
cce8749e
CH
4849 /* Nonzero if we must stuff some register arguments onto the stack as if
4850 they were passed there. */
4851 int store_arg_regs = 0;
4852
abaa26e5
RE
4853 if (arm_ccfsm_state || arm_target_insn)
4854 abort (); /* Sanity check */
31fdb4d5
DE
4855
4856 if (arm_naked_function_p (current_function_decl))
4857 return;
4858
ff9940b0
RE
4859 return_used_this_function = 0;
4860 lr_save_eliminated = 0;
4861
f3139301
DE
4862 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4863 ASM_COMMENT_START, current_function_args_size,
1d5473cb 4864 current_function_pretend_args_size, frame_size);
f3139301
DE
4865 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4866 ASM_COMMENT_START, frame_pointer_needed,
1d5473cb 4867 current_function_anonymous_args);
cce8749e 4868
e2c671ba 4869 if (volatile_func)
f3139301 4870 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
e2c671ba 4871
cce8749e
CH
4872 if (current_function_anonymous_args && current_function_pretend_args_size)
4873 store_arg_regs = 1;
4874
f3bb6135
RE
4875 for (reg = 0; reg <= 10; reg++)
4876 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
4877 live_regs_mask |= (1 << reg);
4878
ff9940b0 4879 if (frame_pointer_needed)
e2c671ba 4880 live_regs_mask |= 0xD800;
cce8749e 4881 else if (regs_ever_live[14])
ff9940b0
RE
4882 {
4883 if (! current_function_args_size
f3bb6135 4884 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 4885 lr_save_eliminated = 1;
ff9940b0
RE
4886 else
4887 live_regs_mask |= 0x4000;
4888 }
cce8749e 4889
cce8749e
CH
4890 if (live_regs_mask)
4891 {
ff9940b0
RE
4892 /* if a di mode load/store multiple is used, and the base register
4893 is r3, then r4 can become an ever live register without lr
4894 doing so, in this case we need to push lr as well, or we
4895 will fail to get a proper return. */
4896
4897 live_regs_mask |= 0x4000;
4898 lr_save_eliminated = 0;
f3bb6135 4899
cce8749e
CH
4900 }
4901
e2c671ba 4902 if (lr_save_eliminated)
f3139301
DE
4903 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4904 ASM_COMMENT_START);
32de079a
RE
4905
4906#ifdef AOF_ASSEMBLER
4907 if (flag_pic)
4908 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
4909 reg_names[PIC_OFFSET_TABLE_REGNUM]);
4910#endif
f3bb6135 4911}
cce8749e
CH
4912
4913
4914void
f3bb6135 4915output_func_epilogue (f, frame_size)
cce8749e
CH
4916 FILE *f;
4917 int frame_size;
4918{
b111229a
RE
4919 int reg, live_regs_mask = 0;
4920 /* If we need this then it will always be at least this much */
4921 int floats_offset = 12;
cce8749e 4922 rtx operands[3];
e2c671ba
RE
4923 int volatile_func = (optimize > 0
4924 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 4925
ff9940b0 4926 if (use_return_insn() && return_used_this_function)
cce8749e 4927 {
56636818
JL
4928 if ((frame_size + current_function_outgoing_args_size) != 0
4929 && !(frame_pointer_needed || TARGET_APCS))
4930 abort ();
f3bb6135 4931 goto epilogue_done;
cce8749e 4932 }
cce8749e 4933
31fdb4d5
DE
4934 /* Naked functions don't have epilogues. */
4935 if (arm_naked_function_p (current_function_decl))
4936 goto epilogue_done;
4937
e2c671ba
RE
4938 /* A volatile function should never return. Call abort. */
4939 if (volatile_func)
4940 {
4941 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 4942 assemble_external_libcall (op);
e2c671ba 4943 output_asm_insn ("bl\t%a0", &op);
e2c671ba
RE
4944 goto epilogue_done;
4945 }
4946
f3bb6135
RE
4947 for (reg = 0; reg <= 10; reg++)
4948 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 4949 {
ff9940b0
RE
4950 live_regs_mask |= (1 << reg);
4951 floats_offset += 4;
cce8749e
CH
4952 }
4953
ff9940b0 4954 if (frame_pointer_needed)
cce8749e 4955 {
b111229a
RE
4956 if (arm_fpu_arch == FP_SOFT2)
4957 {
4958 for (reg = 23; reg > 15; reg--)
4959 if (regs_ever_live[reg] && ! call_used_regs[reg])
4960 {
4961 floats_offset += 12;
4962 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
4963 reg_names[reg], REGISTER_PREFIX, floats_offset);
4964 }
4965 }
4966 else
4967 {
4968 int start_reg = 23;
4969
4970 for (reg = 23; reg > 15; reg--)
4971 {
4972 if (regs_ever_live[reg] && ! call_used_regs[reg])
4973 {
4974 floats_offset += 12;
4975 /* We can't unstack more than four registers at once */
4976 if (start_reg - reg == 3)
4977 {
4978 fprintf (f, "\tlfm\t%s%s, 4, [%sfp, #-%d]\n",
4979 REGISTER_PREFIX, reg_names[reg],
4980 REGISTER_PREFIX, floats_offset);
4981 start_reg = reg - 1;
4982 }
4983 }
4984 else
4985 {
4986 if (reg != start_reg)
4987 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4988 REGISTER_PREFIX, reg_names[reg + 1],
4989 start_reg - reg, REGISTER_PREFIX, floats_offset);
ff9940b0 4990
b111229a
RE
4991 start_reg = reg - 1;
4992 }
4993 }
4994
4995 /* Just in case the last register checked also needs unstacking. */
4996 if (reg != start_reg)
4997 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4998 REGISTER_PREFIX, reg_names[reg + 1],
4999 start_reg - reg, REGISTER_PREFIX, floats_offset);
5000 }
5001
5002 if (TARGET_THUMB_INTERWORK)
5003 {
5004 live_regs_mask |= 0x6800;
5005 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask, FALSE);
5006 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5007 }
5008 else
5009 {
5010 live_regs_mask |= 0xA800;
5011 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
5012 TARGET_APCS_32 ? FALSE : TRUE);
5013 }
cce8749e
CH
5014 }
5015 else
5016 {
d2288d8d 5017 /* Restore stack pointer if necessary. */
56636818 5018 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
5019 {
5020 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
5021 operands[2] = GEN_INT (frame_size
5022 + current_function_outgoing_args_size);
d2288d8d
TG
5023 output_add_immediate (operands);
5024 }
5025
b111229a
RE
5026 if (arm_fpu_arch == FP_SOFT2)
5027 {
5028 for (reg = 16; reg < 24; reg++)
5029 if (regs_ever_live[reg] && ! call_used_regs[reg])
5030 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
5031 reg_names[reg], REGISTER_PREFIX);
5032 }
5033 else
5034 {
5035 int start_reg = 16;
5036
5037 for (reg = 16; reg < 24; reg++)
5038 {
5039 if (regs_ever_live[reg] && ! call_used_regs[reg])
5040 {
5041 if (reg - start_reg == 3)
5042 {
5043 fprintf (f, "\tlfmfd\t%s%s, 4, [%ssp]!\n",
5044 REGISTER_PREFIX, reg_names[start_reg],
5045 REGISTER_PREFIX);
5046 start_reg = reg + 1;
5047 }
5048 }
5049 else
5050 {
5051 if (reg != start_reg)
5052 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5053 REGISTER_PREFIX, reg_names[start_reg],
5054 reg - start_reg, REGISTER_PREFIX);
5055
5056 start_reg = reg + 1;
5057 }
5058 }
5059
5060 /* Just in case the last register checked also needs unstacking. */
5061 if (reg != start_reg)
5062 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5063 REGISTER_PREFIX, reg_names[start_reg],
5064 reg - start_reg, REGISTER_PREFIX);
5065 }
5066
cce8749e
CH
5067 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
5068 {
b111229a
RE
5069 if (TARGET_THUMB_INTERWORK)
5070 {
5071 if (! lr_save_eliminated)
5072 print_multi_reg(f, "ldmfd\t%ssp!", live_regs_mask | 0x4000,
5073 FALSE);
5074
5075 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5076 }
5077 else if (lr_save_eliminated)
32de079a
RE
5078 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5079 : "\tmovs\t%spc, %slr\n"),
5080 REGISTER_PREFIX, REGISTER_PREFIX, f);
5081 else
5082 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
5083 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
5084 }
5085 else
5086 {
ff9940b0 5087 if (live_regs_mask || regs_ever_live[14])
cce8749e 5088 {
32de079a
RE
5089 /* Restore the integer regs, and the return address into lr */
5090 if (! lr_save_eliminated)
5091 live_regs_mask |= 0x4000;
5092
5093 if (live_regs_mask != 0)
32de079a 5094 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
cce8749e 5095 }
b111229a 5096
cce8749e
CH
5097 if (current_function_pretend_args_size)
5098 {
32de079a 5099 /* Unwind the pre-pushed regs */
cce8749e 5100 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 5101 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
5102 output_add_immediate (operands);
5103 }
32de079a 5104 /* And finally, go home */
b111229a
RE
5105 if (TARGET_THUMB_INTERWORK)
5106 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5107 else
5108 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5109 : "\tmovs\t%spc, %slr\n"),
5110 REGISTER_PREFIX, REGISTER_PREFIX, f);
cce8749e
CH
5111 }
5112 }
f3bb6135 5113
32de079a 5114epilogue_done:
f3bb6135 5115
cce8749e 5116 current_function_anonymous_args = 0;
f3bb6135 5117}
e2c671ba
RE
5118
5119static void
5120emit_multi_reg_push (mask)
5121 int mask;
5122{
5123 int num_regs = 0;
5124 int i, j;
5125 rtx par;
5126
5127 for (i = 0; i < 16; i++)
5128 if (mask & (1 << i))
5129 num_regs++;
5130
5131 if (num_regs == 0 || num_regs > 16)
5132 abort ();
5133
5134 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
5135
5136 for (i = 0; i < 16; i++)
5137 {
5138 if (mask & (1 << i))
5139 {
5140 XVECEXP (par, 0, 0)
5141 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
5142 gen_rtx (PRE_DEC, BLKmode,
5143 stack_pointer_rtx)),
5144 gen_rtx (UNSPEC, BLKmode,
5145 gen_rtvec (1, gen_rtx (REG, SImode, i)),
5146 2));
5147 break;
5148 }
5149 }
5150
5151 for (j = 1, i++; j < num_regs; i++)
5152 {
5153 if (mask & (1 << i))
5154 {
5155 XVECEXP (par, 0, j)
5156 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
5157 j++;
5158 }
5159 }
b111229a
RE
5160
5161 emit_insn (par);
5162}
5163
5164static void
5165emit_sfm (base_reg, count)
5166 int base_reg;
5167 int count;
5168{
5169 rtx par;
5170 int i;
5171
5172 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (count));
5173
5174 XVECEXP (par, 0, 0) = gen_rtx (SET, VOIDmode,
5175 gen_rtx (MEM, BLKmode,
5176 gen_rtx (PRE_DEC, BLKmode,
5177 stack_pointer_rtx)),
5178 gen_rtx (UNSPEC, BLKmode,
5179 gen_rtvec (1, gen_rtx (REG, XFmode,
5180 base_reg++)),
5181 2));
5182 for (i = 1; i < count; i++)
5183 XVECEXP (par, 0, i) = gen_rtx (USE, VOIDmode,
5184 gen_rtx (REG, XFmode, base_reg++));
5185
e2c671ba
RE
5186 emit_insn (par);
5187}
5188
5189void
5190arm_expand_prologue ()
5191{
5192 int reg;
56636818
JL
5193 rtx amount = GEN_INT (-(get_frame_size ()
5194 + current_function_outgoing_args_size));
e2c671ba
RE
5195 int live_regs_mask = 0;
5196 int store_arg_regs = 0;
5197 int volatile_func = (optimize > 0
5198 && TREE_THIS_VOLATILE (current_function_decl));
5199
31fdb4d5
DE
5200 /* Naked functions don't have prologues. */
5201 if (arm_naked_function_p (current_function_decl))
5202 return;
5203
e2c671ba
RE
5204 if (current_function_anonymous_args && current_function_pretend_args_size)
5205 store_arg_regs = 1;
5206
5207 if (! volatile_func)
5208 for (reg = 0; reg <= 10; reg++)
5209 if (regs_ever_live[reg] && ! call_used_regs[reg])
5210 live_regs_mask |= 1 << reg;
5211
5212 if (! volatile_func && regs_ever_live[14])
5213 live_regs_mask |= 0x4000;
5214
5215 if (frame_pointer_needed)
5216 {
5217 live_regs_mask |= 0xD800;
5218 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
5219 stack_pointer_rtx));
5220 }
5221
5222 if (current_function_pretend_args_size)
5223 {
5224 if (store_arg_regs)
5225 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
5226 & 0xf);
5227 else
5228 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5229 GEN_INT (-current_function_pretend_args_size)));
5230 }
5231
5232 if (live_regs_mask)
5233 {
5234 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 5235 we won't get a proper return. */
e2c671ba
RE
5236 live_regs_mask |= 0x4000;
5237 emit_multi_reg_push (live_regs_mask);
5238 }
5239
5240 /* For now the integer regs are still pushed in output_func_epilogue (). */
5241
5242 if (! volatile_func)
b111229a
RE
5243 {
5244 if (arm_fpu_arch == FP_SOFT2)
5245 {
5246 for (reg = 23; reg > 15; reg--)
5247 if (regs_ever_live[reg] && ! call_used_regs[reg])
5248 emit_insn (gen_rtx (SET, VOIDmode,
5249 gen_rtx (MEM, XFmode,
5250 gen_rtx (PRE_DEC, XFmode,
5251 stack_pointer_rtx)),
5252 gen_rtx (REG, XFmode, reg)));
5253 }
5254 else
5255 {
5256 int start_reg = 23;
5257
5258 for (reg = 23; reg > 15; reg--)
5259 {
5260 if (regs_ever_live[reg] && ! call_used_regs[reg])
5261 {
5262 if (start_reg - reg == 3)
5263 {
5264 emit_sfm (reg, 4);
5265 start_reg = reg - 1;
5266 }
5267 }
5268 else
5269 {
5270 if (start_reg != reg)
5271 emit_sfm (reg + 1, start_reg - reg);
5272 start_reg = reg - 1;
5273 }
5274 }
5275
5276 if (start_reg != reg)
5277 emit_sfm (reg + 1, start_reg - reg);
5278 }
5279 }
e2c671ba
RE
5280
5281 if (frame_pointer_needed)
5282 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
5283 (GEN_INT
5284 (-(4 + current_function_pretend_args_size)))));
5285
5286 if (amount != const0_rtx)
5287 {
5288 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
5289 emit_insn (gen_rtx (CLOBBER, VOIDmode,
5290 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
5291 }
5292
5293 /* If we are profiling, make sure no instructions are scheduled before
5294 the call to mcount. */
5295 if (profile_flag || profile_block_flag)
5296 emit_insn (gen_blockage ());
5297}
5298
cce8749e 5299\f
9997d19d
RE
5300/* If CODE is 'd', then the X is a condition operand and the instruction
5301 should only be executed if the condition is true.
ddd5a7c1 5302 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
5303 should only be executed if the condition is false: however, if the mode
5304 of the comparison is CCFPEmode, then always execute the instruction -- we
5305 do this because in these circumstances !GE does not necessarily imply LT;
5306 in these cases the instruction pattern will take care to make sure that
5307 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 5308 doing this instruction unconditionally.
9997d19d
RE
5309 If CODE is 'N' then X is a floating point operand that must be negated
5310 before output.
5311 If CODE is 'B' then output a bitwise inverted value of X (a const int).
5312 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
5313
5314void
5315arm_print_operand (stream, x, code)
5316 FILE *stream;
5317 rtx x;
5318 int code;
5319{
5320 switch (code)
5321 {
5322 case '@':
f3139301 5323 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
5324 return;
5325
5326 case '|':
f3139301 5327 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5328 return;
5329
5330 case '?':
5331 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
5332 fputs (arm_condition_codes[arm_current_cc], stream);
5333 return;
5334
5335 case 'N':
5336 {
5337 REAL_VALUE_TYPE r;
5338 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5339 r = REAL_VALUE_NEGATE (r);
5340 fprintf (stream, "%s", fp_const_from_val (&r));
5341 }
5342 return;
5343
5344 case 'B':
5345 if (GET_CODE (x) == CONST_INT)
5346 fprintf (stream,
5347#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5348 "%d",
5349#else
5350 "%ld",
5351#endif
5352 ARM_SIGN_EXTEND (~ INTVAL (x)));
5353 else
5354 {
5355 putc ('~', stream);
5356 output_addr_const (stream, x);
5357 }
5358 return;
5359
5360 case 'i':
5361 fprintf (stream, "%s", arithmetic_instr (x, 1));
5362 return;
5363
5364 case 'I':
5365 fprintf (stream, "%s", arithmetic_instr (x, 0));
5366 return;
5367
5368 case 'S':
5369 {
5370 HOST_WIDE_INT val;
e2c671ba 5371 char *shift = shift_op (x, &val);
9997d19d 5372
e2c671ba
RE
5373 if (shift)
5374 {
5375 fprintf (stream, ", %s ", shift_op (x, &val));
5376 if (val == -1)
5377 arm_print_operand (stream, XEXP (x, 1), 0);
5378 else
5379 fprintf (stream,
9997d19d 5380#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
e2c671ba 5381 "#%d",
9997d19d 5382#else
e2c671ba 5383 "#%ld",
9997d19d 5384#endif
e2c671ba
RE
5385 val);
5386 }
9997d19d
RE
5387 }
5388 return;
5389
c1c2bc04
RE
5390 case 'Q':
5391 if (REGNO (x) > 15)
5392 abort ();
5393 fputs (REGISTER_PREFIX, stream);
5394 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
5395 return;
5396
9997d19d
RE
5397 case 'R':
5398 if (REGNO (x) > 15)
5399 abort ();
f3139301 5400 fputs (REGISTER_PREFIX, stream);
c1c2bc04 5401 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
5402 return;
5403
5404 case 'm':
f3139301 5405 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5406 if (GET_CODE (XEXP (x, 0)) == REG)
5407 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
5408 else
5409 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
5410 return;
5411
5412 case 'M':
f3139301
DE
5413 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
5414 REGISTER_PREFIX, reg_names[REGNO (x) - 1
1d5473cb
RE
5415 + ((GET_MODE_SIZE (GET_MODE (x))
5416 + GET_MODE_SIZE (SImode) - 1)
5417 / GET_MODE_SIZE (SImode))]);
9997d19d
RE
5418 return;
5419
5420 case 'd':
5421 if (x)
5422 fputs (arm_condition_codes[get_arm_condition_code (x)],
5423 stream);
5424 return;
5425
5426 case 'D':
84ed5e79 5427 if (x)
9997d19d
RE
5428 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
5429 (get_arm_condition_code (x))],
5430 stream);
5431 return;
5432
5433 default:
5434 if (x == 0)
5435 abort ();
5436
5437 if (GET_CODE (x) == REG)
1d5473cb 5438 {
f3139301 5439 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
5440 fputs (reg_names[REGNO (x)], stream);
5441 }
9997d19d
RE
5442 else if (GET_CODE (x) == MEM)
5443 {
5444 output_memory_reference_mode = GET_MODE (x);
5445 output_address (XEXP (x, 0));
5446 }
5447 else if (GET_CODE (x) == CONST_DOUBLE)
5448 fprintf (stream, "#%s", fp_immediate_constant (x));
5449 else if (GET_CODE (x) == NEG)
5450 abort (); /* This should never happen now. */
5451 else
5452 {
5453 fputc ('#', stream);
5454 output_addr_const (stream, x);
5455 }
5456 }
5457}
5458
cce8749e
CH
5459\f
5460/* A finite state machine takes care of noticing whether or not instructions
5461 can be conditionally executed, and thus decrease execution time and code
5462 size by deleting branch instructions. The fsm is controlled by
5463 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
5464
5465/* The state of the fsm controlling condition codes are:
5466 0: normal, do nothing special
5467 1: make ASM_OUTPUT_OPCODE not output this instruction
5468 2: make ASM_OUTPUT_OPCODE not output this instruction
5469 3: make instructions conditional
5470 4: make instructions conditional
5471
5472 State transitions (state->state by whom under condition):
5473 0 -> 1 final_prescan_insn if the `target' is a label
5474 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
5475 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
5476 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
5477 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
5478 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
5479 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
5480 (the target insn is arm_target_insn).
5481
ff9940b0
RE
5482 If the jump clobbers the conditions then we use states 2 and 4.
5483
5484 A similar thing can be done with conditional return insns.
5485
cce8749e
CH
5486 XXX In case the `target' is an unconditional branch, this conditionalising
5487 of the instructions always reduces code size, but not always execution
5488 time. But then, I want to reduce the code size to somewhere near what
5489 /bin/cc produces. */
5490
cce8749e
CH
5491/* Returns the index of the ARM condition code string in
5492 `arm_condition_codes'. COMPARISON should be an rtx like
5493 `(eq (...) (...))'. */
5494
84ed5e79 5495static enum arm_cond_code
cce8749e
CH
5496get_arm_condition_code (comparison)
5497 rtx comparison;
5498{
5165176d 5499 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
5500 register int code;
5501 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
5502
5503 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 5504 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
5505 XEXP (comparison, 1));
5506
5507 switch (mode)
cce8749e 5508 {
84ed5e79
RE
5509 case CC_DNEmode: code = ARM_NE; goto dominance;
5510 case CC_DEQmode: code = ARM_EQ; goto dominance;
5511 case CC_DGEmode: code = ARM_GE; goto dominance;
5512 case CC_DGTmode: code = ARM_GT; goto dominance;
5513 case CC_DLEmode: code = ARM_LE; goto dominance;
5514 case CC_DLTmode: code = ARM_LT; goto dominance;
5515 case CC_DGEUmode: code = ARM_CS; goto dominance;
5516 case CC_DGTUmode: code = ARM_HI; goto dominance;
5517 case CC_DLEUmode: code = ARM_LS; goto dominance;
5518 case CC_DLTUmode: code = ARM_CC;
5519
5520 dominance:
5521 if (comp_code != EQ && comp_code != NE)
5522 abort ();
5523
5524 if (comp_code == EQ)
5525 return ARM_INVERSE_CONDITION_CODE (code);
5526 return code;
5527
5165176d 5528 case CC_NOOVmode:
84ed5e79 5529 switch (comp_code)
5165176d 5530 {
84ed5e79
RE
5531 case NE: return ARM_NE;
5532 case EQ: return ARM_EQ;
5533 case GE: return ARM_PL;
5534 case LT: return ARM_MI;
5165176d
RE
5535 default: abort ();
5536 }
5537
5538 case CC_Zmode:
5539 case CCFPmode:
84ed5e79 5540 switch (comp_code)
5165176d 5541 {
84ed5e79
RE
5542 case NE: return ARM_NE;
5543 case EQ: return ARM_EQ;
5165176d
RE
5544 default: abort ();
5545 }
5546
5547 case CCFPEmode:
84ed5e79
RE
5548 switch (comp_code)
5549 {
5550 case GE: return ARM_GE;
5551 case GT: return ARM_GT;
5552 case LE: return ARM_LS;
5553 case LT: return ARM_MI;
5554 default: abort ();
5555 }
5556
5557 case CC_SWPmode:
5558 switch (comp_code)
5559 {
5560 case NE: return ARM_NE;
5561 case EQ: return ARM_EQ;
5562 case GE: return ARM_LE;
5563 case GT: return ARM_LT;
5564 case LE: return ARM_GE;
5565 case LT: return ARM_GT;
5566 case GEU: return ARM_LS;
5567 case GTU: return ARM_CC;
5568 case LEU: return ARM_CS;
5569 case LTU: return ARM_HI;
5570 default: abort ();
5571 }
5572
bd9c7e23
RE
5573 case CC_Cmode:
5574 switch (comp_code)
5575 {
5576 case LTU: return ARM_CS;
5577 case GEU: return ARM_CC;
5578 default: abort ();
5579 }
5580
5165176d 5581 case CCmode:
84ed5e79 5582 switch (comp_code)
5165176d 5583 {
84ed5e79
RE
5584 case NE: return ARM_NE;
5585 case EQ: return ARM_EQ;
5586 case GE: return ARM_GE;
5587 case GT: return ARM_GT;
5588 case LE: return ARM_LE;
5589 case LT: return ARM_LT;
5590 case GEU: return ARM_CS;
5591 case GTU: return ARM_HI;
5592 case LEU: return ARM_LS;
5593 case LTU: return ARM_CC;
5165176d
RE
5594 default: abort ();
5595 }
5596
cce8749e
CH
5597 default: abort ();
5598 }
84ed5e79
RE
5599
5600 abort ();
f3bb6135 5601}
cce8749e
CH
5602
5603
5604void
5605final_prescan_insn (insn, opvec, noperands)
5606 rtx insn;
5607 rtx *opvec;
5608 int noperands;
5609{
5610 /* BODY will hold the body of INSN. */
5611 register rtx body = PATTERN (insn);
5612
5613 /* This will be 1 if trying to repeat the trick, and things need to be
5614 reversed if it appears to fail. */
5615 int reverse = 0;
5616
ff9940b0
RE
5617 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5618 taken are clobbered, even if the rtl suggests otherwise. It also
5619 means that we have to grub around within the jump expression to find
5620 out what the conditions are when the jump isn't taken. */
5621 int jump_clobbers = 0;
5622
5623 /* If we start with a return insn, we only succeed if we find another one. */
5624 int seeking_return = 0;
5625
cce8749e
CH
5626 /* START_INSN will hold the insn from where we start looking. This is the
5627 first insn after the following code_label if REVERSE is true. */
5628 rtx start_insn = insn;
5629
5630 /* If in state 4, check if the target branch is reached, in order to
5631 change back to state 0. */
5632 if (arm_ccfsm_state == 4)
5633 {
5634 if (insn == arm_target_insn)
abaa26e5
RE
5635 {
5636 arm_target_insn = NULL;
cce8749e 5637 arm_ccfsm_state = 0;
abaa26e5 5638 }
cce8749e
CH
5639 return;
5640 }
5641
5642 /* If in state 3, it is possible to repeat the trick, if this insn is an
5643 unconditional branch to a label, and immediately following this branch
5644 is the previous target label which is only used once, and the label this
5645 branch jumps to is not too far off. */
5646 if (arm_ccfsm_state == 3)
5647 {
5648 if (simplejump_p (insn))
5649 {
5650 start_insn = next_nonnote_insn (start_insn);
5651 if (GET_CODE (start_insn) == BARRIER)
5652 {
5653 /* XXX Isn't this always a barrier? */
5654 start_insn = next_nonnote_insn (start_insn);
5655 }
5656 if (GET_CODE (start_insn) == CODE_LABEL
5657 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5658 && LABEL_NUSES (start_insn) == 1)
5659 reverse = TRUE;
5660 else
5661 return;
5662 }
ff9940b0
RE
5663 else if (GET_CODE (body) == RETURN)
5664 {
5665 start_insn = next_nonnote_insn (start_insn);
5666 if (GET_CODE (start_insn) == BARRIER)
5667 start_insn = next_nonnote_insn (start_insn);
5668 if (GET_CODE (start_insn) == CODE_LABEL
5669 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5670 && LABEL_NUSES (start_insn) == 1)
5671 {
5672 reverse = TRUE;
5673 seeking_return = 1;
5674 }
5675 else
5676 return;
5677 }
cce8749e
CH
5678 else
5679 return;
5680 }
5681
5682 if (arm_ccfsm_state != 0 && !reverse)
5683 abort ();
5684 if (GET_CODE (insn) != JUMP_INSN)
5685 return;
5686
ddd5a7c1 5687 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
5688 the jump should always come first */
5689 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5690 body = XVECEXP (body, 0, 0);
5691
5692#if 0
5693 /* If this is a conditional return then we don't want to know */
5694 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5695 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5696 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5697 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5698 return;
5699#endif
5700
cce8749e
CH
5701 if (reverse
5702 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5703 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5704 {
bd9c7e23
RE
5705 int insns_skipped;
5706 int fail = FALSE, succeed = FALSE;
cce8749e
CH
5707 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5708 int then_not_else = TRUE;
ff9940b0 5709 rtx this_insn = start_insn, label = 0;
cce8749e 5710
ff9940b0 5711 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
5712 {
5713 /* The code below is wrong for these, and I haven't time to
5714 fix it now. So we just do the safe thing and return. This
5715 whole function needs re-writing anyway. */
5716 jump_clobbers = 1;
5717 return;
5718 }
ff9940b0 5719
cce8749e
CH
5720 /* Register the insn jumped to. */
5721 if (reverse)
ff9940b0
RE
5722 {
5723 if (!seeking_return)
5724 label = XEXP (SET_SRC (body), 0);
5725 }
cce8749e
CH
5726 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5727 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5728 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5729 {
5730 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5731 then_not_else = FALSE;
5732 }
ff9940b0
RE
5733 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5734 seeking_return = 1;
5735 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5736 {
5737 seeking_return = 1;
5738 then_not_else = FALSE;
5739 }
cce8749e
CH
5740 else
5741 abort ();
5742
5743 /* See how many insns this branch skips, and what kind of insns. If all
5744 insns are okay, and the label or unconditional branch to the same
5745 label is not too far away, succeed. */
5746 for (insns_skipped = 0;
bd9c7e23 5747 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
cce8749e
CH
5748 {
5749 rtx scanbody;
5750
5751 this_insn = next_nonnote_insn (this_insn);
5752 if (!this_insn)
5753 break;
5754
cce8749e
CH
5755 switch (GET_CODE (this_insn))
5756 {
5757 case CODE_LABEL:
5758 /* Succeed if it is the target label, otherwise fail since
5759 control falls in from somewhere else. */
5760 if (this_insn == label)
5761 {
ff9940b0
RE
5762 if (jump_clobbers)
5763 {
5764 arm_ccfsm_state = 2;
5765 this_insn = next_nonnote_insn (this_insn);
5766 }
5767 else
5768 arm_ccfsm_state = 1;
cce8749e
CH
5769 succeed = TRUE;
5770 }
5771 else
5772 fail = TRUE;
5773 break;
5774
ff9940b0 5775 case BARRIER:
cce8749e 5776 /* Succeed if the following insn is the target label.
ff9940b0
RE
5777 Otherwise fail.
5778 If return insns are used then the last insn in a function
5779 will be a barrier. */
cce8749e 5780 this_insn = next_nonnote_insn (this_insn);
ff9940b0 5781 if (this_insn && this_insn == label)
cce8749e 5782 {
ff9940b0
RE
5783 if (jump_clobbers)
5784 {
5785 arm_ccfsm_state = 2;
5786 this_insn = next_nonnote_insn (this_insn);
5787 }
5788 else
5789 arm_ccfsm_state = 1;
cce8749e
CH
5790 succeed = TRUE;
5791 }
5792 else
5793 fail = TRUE;
5794 break;
5795
ff9940b0 5796 case CALL_INSN:
2b835d68
RE
5797 /* If using 32-bit addresses the cc is not preserved over
5798 calls */
5799 if (TARGET_APCS_32)
bd9c7e23
RE
5800 {
5801 /* Succeed if the following insn is the target label,
5802 or if the following two insns are a barrier and
5803 the target label. */
5804 this_insn = next_nonnote_insn (this_insn);
5805 if (this_insn && GET_CODE (this_insn) == BARRIER)
5806 this_insn = next_nonnote_insn (this_insn);
5807
5808 if (this_insn && this_insn == label
5809 && insns_skipped < MAX_INSNS_SKIPPED)
5810 {
5811 if (jump_clobbers)
5812 {
5813 arm_ccfsm_state = 2;
5814 this_insn = next_nonnote_insn (this_insn);
5815 }
5816 else
5817 arm_ccfsm_state = 1;
5818 succeed = TRUE;
5819 }
5820 else
5821 fail = TRUE;
5822 }
ff9940b0 5823 break;
2b835d68 5824
cce8749e
CH
5825 case JUMP_INSN:
5826 /* If this is an unconditional branch to the same label, succeed.
5827 If it is to another label, do nothing. If it is conditional,
5828 fail. */
ed4c4348 5829 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 5830
ed4c4348 5831 scanbody = PATTERN (this_insn);
ff9940b0
RE
5832 if (GET_CODE (scanbody) == SET
5833 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
5834 {
5835 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5836 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5837 {
5838 arm_ccfsm_state = 2;
5839 succeed = TRUE;
5840 }
5841 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5842 fail = TRUE;
5843 }
ff9940b0
RE
5844 else if (GET_CODE (scanbody) == RETURN
5845 && seeking_return)
5846 {
5847 arm_ccfsm_state = 2;
5848 succeed = TRUE;
5849 }
5850 else if (GET_CODE (scanbody) == PARALLEL)
5851 {
5852 switch (get_attr_conds (this_insn))
5853 {
5854 case CONDS_NOCOND:
5855 break;
5856 default:
5857 fail = TRUE;
5858 break;
5859 }
5860 }
cce8749e
CH
5861 break;
5862
5863 case INSN:
ff9940b0
RE
5864 /* Instructions using or affecting the condition codes make it
5865 fail. */
ed4c4348 5866 scanbody = PATTERN (this_insn);
ff9940b0
RE
5867 if ((GET_CODE (scanbody) == SET
5868 || GET_CODE (scanbody) == PARALLEL)
5869 && get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
5870 fail = TRUE;
5871 break;
5872
5873 default:
5874 break;
5875 }
5876 }
5877 if (succeed)
5878 {
ff9940b0 5879 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 5880 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
5881 else if (seeking_return || arm_ccfsm_state == 2)
5882 {
5883 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5884 {
5885 this_insn = next_nonnote_insn (this_insn);
5886 if (this_insn && (GET_CODE (this_insn) == BARRIER
5887 || GET_CODE (this_insn) == CODE_LABEL))
5888 abort ();
5889 }
5890 if (!this_insn)
5891 {
5892 /* Oh, dear! we ran off the end.. give up */
5893 recog (PATTERN (insn), insn, NULL_PTR);
5894 arm_ccfsm_state = 0;
abaa26e5 5895 arm_target_insn = NULL;
ff9940b0
RE
5896 return;
5897 }
5898 arm_target_insn = this_insn;
5899 }
cce8749e
CH
5900 else
5901 abort ();
ff9940b0
RE
5902 if (jump_clobbers)
5903 {
5904 if (reverse)
5905 abort ();
5906 arm_current_cc =
5907 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5908 0), 0), 1));
5909 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5910 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5911 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5912 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5913 }
5914 else
5915 {
5916 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5917 what it was. */
5918 if (!reverse)
5919 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5920 0));
5921 }
cce8749e 5922
cce8749e
CH
5923 if (reverse || then_not_else)
5924 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5925 }
ff9940b0
RE
5926 /* restore recog_operand (getting the attributes of other insns can
5927 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 5928 across this call; since the insn has been recognized already we
ff9940b0
RE
5929 call recog direct). */
5930 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 5931 }
f3bb6135 5932}
cce8749e 5933
2b835d68
RE
5934#ifdef AOF_ASSEMBLER
5935/* Special functions only needed when producing AOF syntax assembler. */
5936
32de079a
RE
5937rtx aof_pic_label = NULL_RTX;
5938struct pic_chain
5939{
5940 struct pic_chain *next;
5941 char *symname;
5942};
5943
5944static struct pic_chain *aof_pic_chain = NULL;
5945
5946rtx
5947aof_pic_entry (x)
5948 rtx x;
5949{
5950 struct pic_chain **chainp;
5951 int offset;
5952
5953 if (aof_pic_label == NULL_RTX)
5954 {
5955 /* This needs to persist throughout the compilation. */
5956 end_temporary_allocation ();
5957 aof_pic_label = gen_rtx (SYMBOL_REF, Pmode, "x$adcons");
5958 resume_temporary_allocation ();
5959 }
5960
5961 for (offset = 0, chainp = &aof_pic_chain; *chainp;
5962 offset += 4, chainp = &(*chainp)->next)
5963 if ((*chainp)->symname == XSTR (x, 0))
5964 return plus_constant (aof_pic_label, offset);
5965
5966 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
5967 (*chainp)->next = NULL;
5968 (*chainp)->symname = XSTR (x, 0);
5969 return plus_constant (aof_pic_label, offset);
5970}
5971
5972void
5973aof_dump_pic_table (f)
5974 FILE *f;
5975{
5976 struct pic_chain *chain;
5977
5978 if (aof_pic_chain == NULL)
5979 return;
5980
5981 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
5982 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
5983 reg_names[PIC_OFFSET_TABLE_REGNUM]);
5984 fputs ("|x$adcons|\n", f);
5985
5986 for (chain = aof_pic_chain; chain; chain = chain->next)
5987 {
5988 fputs ("\tDCD\t", f);
5989 assemble_name (f, chain->symname);
5990 fputs ("\n", f);
5991 }
5992}
5993
2b835d68
RE
5994int arm_text_section_count = 1;
5995
5996char *
84ed5e79 5997aof_text_section ()
2b835d68
RE
5998{
5999 static char buf[100];
2b835d68
RE
6000 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
6001 arm_text_section_count++);
6002 if (flag_pic)
6003 strcat (buf, ", PIC, REENTRANT");
6004 return buf;
6005}
6006
6007static int arm_data_section_count = 1;
6008
6009char *
6010aof_data_section ()
6011{
6012 static char buf[100];
6013 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
6014 return buf;
6015}
6016
6017/* The AOF assembler is religiously strict about declarations of
6018 imported and exported symbols, so that it is impossible to declare
956d6950 6019 a function as imported near the beginning of the file, and then to
2b835d68
RE
6020 export it later on. It is, however, possible to delay the decision
6021 until all the functions in the file have been compiled. To get
6022 around this, we maintain a list of the imports and exports, and
6023 delete from it any that are subsequently defined. At the end of
6024 compilation we spit the remainder of the list out before the END
6025 directive. */
6026
6027struct import
6028{
6029 struct import *next;
6030 char *name;
6031};
6032
6033static struct import *imports_list = NULL;
6034
6035void
6036aof_add_import (name)
6037 char *name;
6038{
6039 struct import *new;
6040
6041 for (new = imports_list; new; new = new->next)
6042 if (new->name == name)
6043 return;
6044
6045 new = (struct import *) xmalloc (sizeof (struct import));
6046 new->next = imports_list;
6047 imports_list = new;
6048 new->name = name;
6049}
6050
6051void
6052aof_delete_import (name)
6053 char *name;
6054{
6055 struct import **old;
6056
6057 for (old = &imports_list; *old; old = & (*old)->next)
6058 {
6059 if ((*old)->name == name)
6060 {
6061 *old = (*old)->next;
6062 return;
6063 }
6064 }
6065}
6066
6067int arm_main_function = 0;
6068
6069void
6070aof_dump_imports (f)
6071 FILE *f;
6072{
6073 /* The AOF assembler needs this to cause the startup code to be extracted
6074 from the library. Brining in __main causes the whole thing to work
6075 automagically. */
6076 if (arm_main_function)
6077 {
6078 text_section ();
6079 fputs ("\tIMPORT __main\n", f);
6080 fputs ("\tDCD __main\n", f);
6081 }
6082
6083 /* Now dump the remaining imports. */
6084 while (imports_list)
6085 {
6086 fprintf (f, "\tIMPORT\t");
6087 assemble_name (f, imports_list->name);
6088 fputc ('\n', f);
6089 imports_list = imports_list->next;
6090 }
6091}
6092#endif /* AOF_ASSEMBLER */
This page took 1.049935 seconds and 5 git commands to generate.