]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Daily bump.
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
cce8749e 1/* Output routines for GCC for ARM/RISCiX.
e5e809f4 2 Copyright (C) 1991, 93, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
cce8749e 3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
956d6950 4 and Martin Simmons (@harleqn.co.uk).
ff9940b0 5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
56636818 24#include "config.h"
cce8749e 25#include <stdio.h>
f3bb6135 26#include <string.h>
cce8749e
CH
27#include "rtl.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "real.h"
31#include "insn-config.h"
32#include "conditions.h"
33#include "insn-flags.h"
34#include "output.h"
35#include "insn-attr.h"
36#include "flags.h"
af48348a 37#include "reload.h"
e2c671ba 38#include "tree.h"
bee06f3d 39#include "expr.h"
ad076f4e 40#include "toplev.h"
cce8749e
CH
41
42/* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44#define MAX_INSNS_SKIPPED 5
45
46/* Some function declarations. */
cce8749e 47extern FILE *asm_out_file;
cce8749e 48
18af7313
RE
49static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
50static char *output_multi_immediate PROTO ((rtx *, char *, char *, int,
51 HOST_WIDE_INT));
2b835d68
RE
52static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
53 HOST_WIDE_INT, rtx, rtx, int, int));
18af7313
RE
54static int arm_naked_function_p PROTO ((tree));
55static void init_fpa_table PROTO ((void));
56static enum machine_mode select_dominance_cc_mode PROTO ((enum rtx_code, rtx,
57 rtx, HOST_WIDE_INT));
58static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode));
59static void dump_table PROTO ((rtx));
60static int fixit PROTO ((rtx, enum machine_mode, int));
61static rtx find_barrier PROTO ((rtx, int));
62static int broken_move PROTO ((rtx));
63static char *fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
64static int eliminate_lr2ip PROTO ((rtx *));
65static char *shift_op PROTO ((rtx, HOST_WIDE_INT *));
66static int pattern_really_clobbers_lr PROTO ((rtx));
67static int function_really_clobbers_lr PROTO ((rtx));
68static void emit_multi_reg_push PROTO ((int));
b111229a 69static void emit_sfm PROTO ((int, int));
18af7313 70static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
f3bb6135 71
ff9940b0
RE
72/* Define the information needed to generate branch insns. This is
73 stored from the compare operation. */
74
75rtx arm_compare_op0, arm_compare_op1;
76int arm_compare_fp;
77
78/* What type of cpu are we compiling for? */
ff9940b0
RE
79enum processor_type arm_cpu;
80
b111229a 81/* What type of floating point are we tuning for? */
bee06f3d
RE
82enum floating_point_type arm_fpu;
83
b111229a
RE
84/* What type of floating point instructions are available? */
85enum floating_point_type arm_fpu_arch;
86
2b835d68
RE
87/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
88enum prog_mode_type arm_prgmode;
89
b111229a
RE
90/* Set by the -mfp=... option */
91char *target_fp_name = NULL;
2b835d68
RE
92
93/* Nonzero if this is an "M" variant of the processor. */
94int arm_fast_multiply = 0;
95
32de079a 96/* Nonzero if this chip supports the ARM Architecture 4 extensions */
2b835d68
RE
97int arm_arch4 = 0;
98
b111229a
RE
99/* Set to the features we should tune the code for (multiply speed etc). */
100int tune_flags = 0;
101
cce8749e
CH
102/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
103 must report the mode of the memory reference from PRINT_OPERAND to
104 PRINT_OPERAND_ADDRESS. */
f3bb6135 105enum machine_mode output_memory_reference_mode;
cce8749e
CH
106
107/* Nonzero if the prologue must setup `fp'. */
108int current_function_anonymous_args;
109
32de079a
RE
110/* The register number to be used for the PIC offset register. */
111int arm_pic_register = 9;
112
cce8749e
CH
113/* Location counter of .text segment. */
114int arm_text_location = 0;
115
ff9940b0
RE
116/* Set to one if we think that lr is only saved because of subroutine calls,
117 but all of these can be `put after' return insns */
118int lr_save_eliminated;
119
ff9940b0
RE
120/* Set to 1 when a return insn is output, this means that the epilogue
121 is not needed. */
122
123static int return_used_this_function;
124
2b835d68
RE
125static int arm_constant_limit = 3;
126
cce8749e
CH
127/* For an explanation of these variables, see final_prescan_insn below. */
128int arm_ccfsm_state;
84ed5e79 129enum arm_cond_code arm_current_cc;
cce8749e
CH
130rtx arm_target_insn;
131int arm_target_label;
9997d19d
RE
132
133/* The condition codes of the ARM, and the inverse function. */
134char *arm_condition_codes[] =
135{
136 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
137 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
138};
139
84ed5e79 140static enum arm_cond_code get_arm_condition_code ();
2b835d68
RE
141
142\f
143/* Initialization code */
144
b111229a 145struct arm_cpu_select arm_select[4] =
bd9c7e23
RE
146{
147 /* switch name, tune arch */
148 { (char *)0, "--with-cpu=", 1, 1 },
149 { (char *)0, "-mcpu=", 1, 1 },
b111229a 150 { (char *)0, "-march=", 0, 1 },
bd9c7e23
RE
151 { (char *)0, "-mtune=", 1, 0 },
152};
153
2b835d68
RE
154#define FL_CO_PROC 0x01 /* Has external co-processor bus */
155#define FL_FAST_MULT 0x02 /* Fast multiply */
156#define FL_MODE26 0x04 /* 26-bit mode support */
157#define FL_MODE32 0x08 /* 32-bit mode support */
158#define FL_ARCH4 0x10 /* Architecture rel 4 */
159#define FL_THUMB 0x20 /* Thumb aware */
32de079a 160
2b835d68
RE
161struct processors
162{
163 char *name;
164 enum processor_type type;
165 unsigned int flags;
166};
167
168/* Not all of these give usefully different compilation alternatives,
169 but there is no simple way of generalizing them. */
170static struct processors all_procs[] =
171{
172 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
173 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
174 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
175 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68
RE
176 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
177 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
2b835d68 178 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
956d6950 179 /* arm7m doesn't exist on its own, only in conjunction with D, (and I), but
32de079a
RE
180 those don't alter the code, so it is sometimes known as the arm7m */
181 {"arm7m", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
182 | FL_MODE26)},
2b835d68
RE
183 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
184 | FL_MODE26)},
185 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
186 | FL_MODE26)},
187 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68 188 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
bd9c7e23 189 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
2b835d68 190 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
32de079a
RE
191 /* Doesn't really have an external co-proc, but does have embedded fpu */
192 {"arm7500fe", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
2b835d68
RE
193 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
194 | FL_ARCH4 | FL_THUMB)},
32de079a
RE
195 {"arm8", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
196 | FL_ARCH4)},
197 {"arm810", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
198 | FL_ARCH4)},
199 {"strongarm", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
200 | FL_ARCH4)},
201 {"strongarm110", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
202 | FL_ARCH4)},
b111229a
RE
203 {"armv2", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
204 {"armv2a", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
205 {"armv3", PROCESSOR_NONE, FL_CO_PROC | FL_MODE32 | FL_MODE26},
206 {"armv3m", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
207 | FL_MODE26)},
208 {"armv4", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
209 | FL_MODE26 | FL_ARCH4)},
210 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
211 implementations that support it, so we will leave it out for now. */
212 {"armv4t", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
213 | FL_ARCH4)},
2b835d68
RE
214 {NULL, 0, 0}
215};
216
217/* Fix up any incompatible options that the user has specified.
218 This has now turned into a maze. */
219void
220arm_override_options ()
221{
222 int arm_thumb_aware = 0;
bd9c7e23 223 int flags = 0;
ed4c4348 224 unsigned i;
bd9c7e23 225 struct arm_cpu_select *ptr;
32de079a
RE
226 static struct cpu_default {
227 int cpu;
228 char *name;
229 } cpu_defaults[] = {
230 { TARGET_CPU_arm2, "arm2" },
231 { TARGET_CPU_arm6, "arm6" },
232 { TARGET_CPU_arm610, "arm610" },
233 { TARGET_CPU_arm7dm, "arm7dm" },
234 { TARGET_CPU_arm7500fe, "arm7500fe" },
235 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
236 { TARGET_CPU_arm8, "arm8" },
237 { TARGET_CPU_arm810, "arm810" },
238 { TARGET_CPU_strongarm, "strongarm" },
239 { 0, 0 }
240 };
241 struct cpu_default *def;
242
243 /* Set the default. */
244 for (def = &cpu_defaults[0]; def->name; ++def)
245 if (def->cpu == TARGET_CPU_DEFAULT)
246 break;
247 if (! def->name)
248 abort ();
bd9c7e23 249
32de079a 250 arm_select[0].string = def->name;
bd9c7e23
RE
251
252 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
253 {
254 ptr = &arm_select[i];
255 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
256 {
257 struct processors *sel;
258
259 for (sel = all_procs; sel->name != NULL; sel++)
260 if (! strcmp (ptr->string, sel->name))
261 {
b111229a
RE
262 /* -march= is the only flag that can take an architecture
263 type, so if we match when the tune bit is set, the
264 option was invalid. */
bd9c7e23 265 if (ptr->set_tune_p)
b111229a
RE
266 {
267 if (sel->type == PROCESSOR_NONE)
268 continue; /* Its an architecture, not a cpu */
269
270 arm_cpu = sel->type;
271 tune_flags = sel->flags;
272 }
bd9c7e23
RE
273
274 if (ptr->set_arch_p)
275 flags = sel->flags;
b111229a 276
bd9c7e23
RE
277 break;
278 }
279
280 if (sel->name == NULL)
281 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
282 }
283 }
2b835d68
RE
284
285 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
286 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
287
288 if (TARGET_POKE_FUNCTION_NAME)
289 target_flags |= ARM_FLAG_APCS_FRAME;
290
291 if (TARGET_6)
32de079a 292 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
2b835d68
RE
293
294 if (TARGET_3)
32de079a 295 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
2b835d68 296
2b835d68
RE
297 if (TARGET_APCS_REENT && flag_pic)
298 fatal ("-fpic and -mapcs-reent are incompatible");
299
300 if (TARGET_APCS_REENT)
32de079a
RE
301 warning ("APCS reentrant code not supported.");
302
303 /* If stack checking is disabled, we can use r10 as the PIC register,
304 which keeps r9 available. */
305 if (flag_pic && ! TARGET_APCS_STACK)
306 arm_pic_register = 10;
2b835d68 307
32de079a
RE
308 /* Well, I'm about to have a go, but pic is NOT going to be compatible
309 with APCS reentrancy, since that requires too much support in the
310 assembler and linker, and the ARMASM assembler seems to lack some
311 required directives. */
2b835d68 312 if (flag_pic)
b4b68717 313 warning ("Position independent code not supported");
2b835d68
RE
314
315 if (TARGET_APCS_FLOAT)
316 warning ("Passing floating point arguments in fp regs not yet supported");
317
318 if (TARGET_APCS_STACK && ! TARGET_APCS)
319 {
320 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
321 target_flags |= ARM_FLAG_APCS_FRAME;
322 }
323
b111229a 324 /* Default is to tune for an FPA */
2b835d68
RE
325 arm_fpu = FP_HARD;
326
bd9c7e23
RE
327 /* Default value for floating point code... if no co-processor
328 bus, then schedule for emulated floating point. Otherwise,
b111229a
RE
329 assume the user has an FPA.
330 Note: this does not prevent use of floating point instructions,
331 -msoft-float does that. */
ad076f4e 332 if ((tune_flags & FL_CO_PROC) == 0)
bd9c7e23 333 arm_fpu = FP_SOFT3;
b111229a 334
bd9c7e23
RE
335 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
336 arm_arch4 = (flags & FL_ARCH4) != 0;
337 arm_thumb_aware = (flags & FL_THUMB) != 0;
2b835d68 338
b111229a 339 if (target_fp_name)
2b835d68 340 {
b111229a
RE
341 if (strcmp (target_fp_name, "2") == 0)
342 arm_fpu_arch = FP_SOFT2;
343 else if (strcmp (target_fp_name, "3") == 0)
344 arm_fpu_arch = FP_HARD;
2b835d68 345 else
b111229a
RE
346 fatal ("Invalid floating point emulation option: -mfpe=%s",
347 target_fp_name);
2b835d68 348 }
b111229a
RE
349 else
350 arm_fpu_arch = FP_DEFAULT;
2b835d68
RE
351
352 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
353 {
354 warning ("This processor variant does not support Thumb interworking");
355 target_flags &= ~ARM_FLAG_THUMB;
356 }
357
358 if (TARGET_FPE && arm_fpu != FP_HARD)
359 arm_fpu = FP_SOFT2;
360
361 /* For arm2/3 there is no need to do any scheduling if there is only
362 a floating point emulator, or we are doing software floating-point. */
363 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
364 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
365
366 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
367}
cce8749e 368\f
32de079a 369
ff9940b0
RE
370/* Return 1 if it is possible to return using a single instruction */
371
372int
373use_return_insn ()
374{
375 int regno;
376
377 if (!reload_completed ||current_function_pretend_args_size
378 || current_function_anonymous_args
56636818
JL
379 || ((get_frame_size () + current_function_outgoing_args_size != 0)
380 && !(TARGET_APCS || frame_pointer_needed)))
ff9940b0
RE
381 return 0;
382
b111229a
RE
383 /* Can't be done if interworking with Thumb, and any registers have been
384 stacked */
385 if (TARGET_THUMB_INTERWORK)
386 for (regno = 0; regno < 16; regno++)
387 if (regs_ever_live[regno] && ! call_used_regs[regno])
388 return 0;
389
ff9940b0
RE
390 /* Can't be done if any of the FPU regs are pushed, since this also
391 requires an insn */
b111229a
RE
392 for (regno = 16; regno < 24; regno++)
393 if (regs_ever_live[regno] && ! call_used_regs[regno])
ff9940b0
RE
394 return 0;
395
31fdb4d5
DE
396 /* If a function is naked, don't use the "return" insn. */
397 if (arm_naked_function_p (current_function_decl))
398 return 0;
399
ff9940b0
RE
400 return 1;
401}
402
cce8749e
CH
403/* Return TRUE if int I is a valid immediate ARM constant. */
404
405int
406const_ok_for_arm (i)
ff9940b0 407 HOST_WIDE_INT i;
cce8749e 408{
ed4c4348 409 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
cce8749e 410
56636818
JL
411 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
412 be all zero, or all one. */
413 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
414 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
ed4c4348
RE
415 != ((~(unsigned HOST_WIDE_INT) 0)
416 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
56636818
JL
417 return FALSE;
418
e2c671ba
RE
419 /* Fast return for 0 and powers of 2 */
420 if ((i & (i - 1)) == 0)
421 return TRUE;
422
cce8749e
CH
423 do
424 {
abaa26e5 425 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 426 return TRUE;
abaa26e5
RE
427 mask =
428 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
429 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
ed4c4348 430 } while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
cce8749e 431
f3bb6135
RE
432 return FALSE;
433}
cce8749e 434
e2c671ba
RE
435/* Return true if I is a valid constant for the operation CODE. */
436int
437const_ok_for_op (i, code, mode)
438 HOST_WIDE_INT i;
439 enum rtx_code code;
440 enum machine_mode mode;
441{
442 if (const_ok_for_arm (i))
443 return 1;
444
445 switch (code)
446 {
447 case PLUS:
448 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
449
450 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
451 case XOR:
452 case IOR:
453 return 0;
454
455 case AND:
456 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
457
458 default:
459 abort ();
460 }
461}
462
463/* Emit a sequence of insns to handle a large constant.
464 CODE is the code of the operation required, it can be any of SET, PLUS,
465 IOR, AND, XOR, MINUS;
466 MODE is the mode in which the operation is being performed;
467 VAL is the integer to operate on;
468 SOURCE is the other operand (a register, or a null-pointer for SET);
469 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
470 either produce a simpler sequence, or we will want to cse the values.
471 Return value is the number of insns emitted. */
e2c671ba
RE
472
473int
474arm_split_constant (code, mode, val, target, source, subtargets)
475 enum rtx_code code;
476 enum machine_mode mode;
477 HOST_WIDE_INT val;
478 rtx target;
479 rtx source;
480 int subtargets;
2b835d68
RE
481{
482 if (subtargets || code == SET
483 || (GET_CODE (target) == REG && GET_CODE (source) == REG
484 && REGNO (target) != REGNO (source)))
485 {
2b835d68
RE
486 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
487 > arm_constant_limit + (code != SET))
488 {
489 if (code == SET)
490 {
491 /* Currently SET is the only monadic value for CODE, all
492 the rest are diadic. */
493 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
494 return 1;
495 }
496 else
497 {
498 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
499
500 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
501 /* For MINUS, the value is subtracted from, since we never
502 have subtraction of a constant. */
503 if (code == MINUS)
504 emit_insn (gen_rtx (SET, VOIDmode, target,
505 gen_rtx (code, mode, temp, source)));
506 else
507 emit_insn (gen_rtx (SET, VOIDmode, target,
508 gen_rtx (code, mode, source, temp)));
509 return 2;
510 }
511 }
512 }
513
514 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
515}
516
517/* As above, but extra parameter GENERATE which, if clear, suppresses
518 RTL generation. */
519int
520arm_gen_constant (code, mode, val, target, source, subtargets, generate)
521 enum rtx_code code;
522 enum machine_mode mode;
523 HOST_WIDE_INT val;
524 rtx target;
525 rtx source;
526 int subtargets;
527 int generate;
e2c671ba 528{
e2c671ba
RE
529 int can_invert = 0;
530 int can_negate = 0;
531 int can_negate_initial = 0;
532 int can_shift = 0;
533 int i;
534 int num_bits_set = 0;
535 int set_sign_bit_copies = 0;
536 int clear_sign_bit_copies = 0;
537 int clear_zero_bit_copies = 0;
538 int set_zero_bit_copies = 0;
539 int insns = 0;
e2c671ba
RE
540 unsigned HOST_WIDE_INT temp1, temp2;
541 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
542
543 /* find out which operations are safe for a given CODE. Also do a quick
544 check for degenerate cases; these can occur when DImode operations
545 are split. */
546 switch (code)
547 {
548 case SET:
549 can_invert = 1;
550 can_shift = 1;
551 can_negate = 1;
552 break;
553
554 case PLUS:
555 can_negate = 1;
556 can_negate_initial = 1;
557 break;
558
559 case IOR:
560 if (remainder == 0xffffffff)
561 {
2b835d68
RE
562 if (generate)
563 emit_insn (gen_rtx (SET, VOIDmode, target,
564 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
565 return 1;
566 }
567 if (remainder == 0)
568 {
569 if (reload_completed && rtx_equal_p (target, source))
570 return 0;
2b835d68
RE
571 if (generate)
572 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
573 return 1;
574 }
575 break;
576
577 case AND:
578 if (remainder == 0)
579 {
2b835d68
RE
580 if (generate)
581 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
e2c671ba
RE
582 return 1;
583 }
584 if (remainder == 0xffffffff)
585 {
586 if (reload_completed && rtx_equal_p (target, source))
587 return 0;
2b835d68
RE
588 if (generate)
589 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
590 return 1;
591 }
592 can_invert = 1;
593 break;
594
595 case XOR:
596 if (remainder == 0)
597 {
598 if (reload_completed && rtx_equal_p (target, source))
599 return 0;
2b835d68
RE
600 if (generate)
601 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
602 return 1;
603 }
604 if (remainder == 0xffffffff)
605 {
2b835d68
RE
606 if (generate)
607 emit_insn (gen_rtx (SET, VOIDmode, target,
608 gen_rtx (NOT, mode, source)));
e2c671ba
RE
609 return 1;
610 }
611
612 /* We don't know how to handle this yet below. */
613 abort ();
614
615 case MINUS:
616 /* We treat MINUS as (val - source), since (source - val) is always
617 passed as (source + (-val)). */
618 if (remainder == 0)
619 {
2b835d68
RE
620 if (generate)
621 emit_insn (gen_rtx (SET, VOIDmode, target,
622 gen_rtx (NEG, mode, source)));
e2c671ba
RE
623 return 1;
624 }
625 if (const_ok_for_arm (val))
626 {
2b835d68
RE
627 if (generate)
628 emit_insn (gen_rtx (SET, VOIDmode, target,
629 gen_rtx (MINUS, mode, GEN_INT (val), source)));
e2c671ba
RE
630 return 1;
631 }
632 can_negate = 1;
633
634 break;
635
636 default:
637 abort ();
638 }
639
640 /* If we can do it in one insn get out quickly */
641 if (const_ok_for_arm (val)
642 || (can_negate_initial && const_ok_for_arm (-val))
643 || (can_invert && const_ok_for_arm (~val)))
644 {
2b835d68
RE
645 if (generate)
646 emit_insn (gen_rtx (SET, VOIDmode, target,
647 (source ? gen_rtx (code, mode, source,
648 GEN_INT (val))
649 : GEN_INT (val))));
e2c671ba
RE
650 return 1;
651 }
652
653
654 /* Calculate a few attributes that may be useful for specific
655 optimizations. */
656
657 for (i = 31; i >= 0; i--)
658 {
659 if ((remainder & (1 << i)) == 0)
660 clear_sign_bit_copies++;
661 else
662 break;
663 }
664
665 for (i = 31; i >= 0; i--)
666 {
667 if ((remainder & (1 << i)) != 0)
668 set_sign_bit_copies++;
669 else
670 break;
671 }
672
673 for (i = 0; i <= 31; i++)
674 {
675 if ((remainder & (1 << i)) == 0)
676 clear_zero_bit_copies++;
677 else
678 break;
679 }
680
681 for (i = 0; i <= 31; i++)
682 {
683 if ((remainder & (1 << i)) != 0)
684 set_zero_bit_copies++;
685 else
686 break;
687 }
688
689 switch (code)
690 {
691 case SET:
692 /* See if we can do this by sign_extending a constant that is known
693 to be negative. This is a good, way of doing it, since the shift
694 may well merge into a subsequent insn. */
695 if (set_sign_bit_copies > 1)
696 {
697 if (const_ok_for_arm
698 (temp1 = ARM_SIGN_EXTEND (remainder
699 << (set_sign_bit_copies - 1))))
700 {
2b835d68
RE
701 if (generate)
702 {
d499463f 703 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
704 emit_insn (gen_rtx (SET, VOIDmode, new_src,
705 GEN_INT (temp1)));
706 emit_insn (gen_ashrsi3 (target, new_src,
707 GEN_INT (set_sign_bit_copies - 1)));
708 }
e2c671ba
RE
709 return 2;
710 }
711 /* For an inverted constant, we will need to set the low bits,
712 these will be shifted out of harm's way. */
713 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
714 if (const_ok_for_arm (~temp1))
715 {
2b835d68
RE
716 if (generate)
717 {
d499463f 718 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
719 emit_insn (gen_rtx (SET, VOIDmode, new_src,
720 GEN_INT (temp1)));
721 emit_insn (gen_ashrsi3 (target, new_src,
722 GEN_INT (set_sign_bit_copies - 1)));
723 }
e2c671ba
RE
724 return 2;
725 }
726 }
727
728 /* See if we can generate this by setting the bottom (or the top)
729 16 bits, and then shifting these into the other half of the
730 word. We only look for the simplest cases, to do more would cost
731 too much. Be careful, however, not to generate this when the
732 alternative would take fewer insns. */
733 if (val & 0xffff0000)
734 {
735 temp1 = remainder & 0xffff0000;
736 temp2 = remainder & 0x0000ffff;
737
738 /* Overlaps outside this range are best done using other methods. */
739 for (i = 9; i < 24; i++)
740 {
741 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
742 && ! const_ok_for_arm (temp2))
743 {
d499463f
RE
744 rtx new_src = (subtargets
745 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
746 : target);
747 insns = arm_gen_constant (code, mode, temp2, new_src,
2b835d68 748 source, subtargets, generate);
e2c671ba 749 source = new_src;
2b835d68
RE
750 if (generate)
751 emit_insn (gen_rtx (SET, VOIDmode, target,
752 gen_rtx (IOR, mode,
753 gen_rtx (ASHIFT, mode, source,
754 GEN_INT (i)),
755 source)));
e2c671ba
RE
756 return insns + 1;
757 }
758 }
759
760 /* Don't duplicate cases already considered. */
761 for (i = 17; i < 24; i++)
762 {
763 if (((temp1 | (temp1 >> i)) == remainder)
764 && ! const_ok_for_arm (temp1))
765 {
d499463f
RE
766 rtx new_src = (subtargets
767 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
768 : target);
769 insns = arm_gen_constant (code, mode, temp1, new_src,
2b835d68 770 source, subtargets, generate);
e2c671ba 771 source = new_src;
2b835d68
RE
772 if (generate)
773 emit_insn (gen_rtx (SET, VOIDmode, target,
774 gen_rtx (IOR, mode,
775 gen_rtx (LSHIFTRT, mode,
776 source, GEN_INT (i)),
777 source)));
e2c671ba
RE
778 return insns + 1;
779 }
780 }
781 }
782 break;
783
784 case IOR:
785 case XOR:
7b64da89
RE
786 /* If we have IOR or XOR, and the constant can be loaded in a
787 single instruction, and we can find a temporary to put it in,
e2c671ba
RE
788 then this can be done in two instructions instead of 3-4. */
789 if (subtargets
d499463f 790 /* TARGET can't be NULL if SUBTARGETS is 0 */
e2c671ba
RE
791 || (reload_completed && ! reg_mentioned_p (target, source)))
792 {
793 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
794 {
2b835d68
RE
795 if (generate)
796 {
797 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 798
7b64da89 799 emit_insn (gen_rtx (SET, VOIDmode, sub, GEN_INT (val)));
2b835d68
RE
800 emit_insn (gen_rtx (SET, VOIDmode, target,
801 gen_rtx (code, mode, source, sub)));
802 }
e2c671ba
RE
803 return 2;
804 }
805 }
806
807 if (code == XOR)
808 break;
809
810 if (set_sign_bit_copies > 8
811 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
812 {
2b835d68
RE
813 if (generate)
814 {
815 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
816 rtx shift = GEN_INT (set_sign_bit_copies);
817
818 emit_insn (gen_rtx (SET, VOIDmode, sub,
819 gen_rtx (NOT, mode,
820 gen_rtx (ASHIFT, mode, source,
821 shift))));
822 emit_insn (gen_rtx (SET, VOIDmode, target,
823 gen_rtx (NOT, mode,
824 gen_rtx (LSHIFTRT, mode, sub,
825 shift))));
826 }
e2c671ba
RE
827 return 2;
828 }
829
830 if (set_zero_bit_copies > 8
831 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
832 {
2b835d68
RE
833 if (generate)
834 {
835 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
836 rtx shift = GEN_INT (set_zero_bit_copies);
837
838 emit_insn (gen_rtx (SET, VOIDmode, sub,
839 gen_rtx (NOT, mode,
840 gen_rtx (LSHIFTRT, mode, source,
841 shift))));
842 emit_insn (gen_rtx (SET, VOIDmode, target,
843 gen_rtx (NOT, mode,
844 gen_rtx (ASHIFT, mode, sub,
845 shift))));
846 }
e2c671ba
RE
847 return 2;
848 }
849
850 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
851 {
2b835d68
RE
852 if (generate)
853 {
854 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
855 emit_insn (gen_rtx (SET, VOIDmode, sub,
856 gen_rtx (NOT, mode, source)));
857 source = sub;
858 if (subtargets)
859 sub = gen_reg_rtx (mode);
860 emit_insn (gen_rtx (SET, VOIDmode, sub,
861 gen_rtx (AND, mode, source,
862 GEN_INT (temp1))));
863 emit_insn (gen_rtx (SET, VOIDmode, target,
864 gen_rtx (NOT, mode, sub)));
865 }
e2c671ba
RE
866 return 3;
867 }
868 break;
869
870 case AND:
871 /* See if two shifts will do 2 or more insn's worth of work. */
872 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
873 {
874 HOST_WIDE_INT shift_mask = ((0xffffffff
875 << (32 - clear_sign_bit_copies))
876 & 0xffffffff);
e2c671ba
RE
877
878 if ((remainder | shift_mask) != 0xffffffff)
879 {
2b835d68
RE
880 if (generate)
881 {
d499463f 882 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
2b835d68 883 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
884 new_src, source, subtargets, 1);
885 source = new_src;
2b835d68
RE
886 }
887 else
d499463f
RE
888 {
889 rtx targ = subtargets ? NULL_RTX : target;
890 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
891 targ, source, subtargets, 0);
892 }
2b835d68
RE
893 }
894
895 if (generate)
896 {
d499463f
RE
897 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
898 rtx shift = GEN_INT (clear_sign_bit_copies);
899
900 emit_insn (gen_ashlsi3 (new_src, source, shift));
901 emit_insn (gen_lshrsi3 (target, new_src, shift));
e2c671ba
RE
902 }
903
e2c671ba
RE
904 return insns + 2;
905 }
906
907 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
908 {
909 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
e2c671ba
RE
910
911 if ((remainder | shift_mask) != 0xffffffff)
912 {
2b835d68
RE
913 if (generate)
914 {
d499463f
RE
915 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
916
2b835d68 917 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
d499463f
RE
918 new_src, source, subtargets, 1);
919 source = new_src;
2b835d68
RE
920 }
921 else
d499463f
RE
922 {
923 rtx targ = subtargets ? NULL_RTX : target;
924
925 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
926 targ, source, subtargets, 0);
927 }
2b835d68
RE
928 }
929
930 if (generate)
931 {
d499463f
RE
932 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
933 rtx shift = GEN_INT (clear_zero_bit_copies);
934
935 emit_insn (gen_lshrsi3 (new_src, source, shift));
936 emit_insn (gen_ashlsi3 (target, new_src, shift));
e2c671ba
RE
937 }
938
e2c671ba
RE
939 return insns + 2;
940 }
941
942 break;
943
944 default:
945 break;
946 }
947
948 for (i = 0; i < 32; i++)
949 if (remainder & (1 << i))
950 num_bits_set++;
951
952 if (code == AND || (can_invert && num_bits_set > 16))
953 remainder = (~remainder) & 0xffffffff;
954 else if (code == PLUS && num_bits_set > 16)
955 remainder = (-remainder) & 0xffffffff;
956 else
957 {
958 can_invert = 0;
959 can_negate = 0;
960 }
961
962 /* Now try and find a way of doing the job in either two or three
963 instructions.
964 We start by looking for the largest block of zeros that are aligned on
965 a 2-bit boundary, we then fill up the temps, wrapping around to the
966 top of the word when we drop off the bottom.
967 In the worst case this code should produce no more than four insns. */
968 {
969 int best_start = 0;
970 int best_consecutive_zeros = 0;
971
972 for (i = 0; i < 32; i += 2)
973 {
974 int consecutive_zeros = 0;
975
976 if (! (remainder & (3 << i)))
977 {
978 while ((i < 32) && ! (remainder & (3 << i)))
979 {
980 consecutive_zeros += 2;
981 i += 2;
982 }
983 if (consecutive_zeros > best_consecutive_zeros)
984 {
985 best_consecutive_zeros = consecutive_zeros;
986 best_start = i - consecutive_zeros;
987 }
988 i -= 2;
989 }
990 }
991
992 /* Now start emitting the insns, starting with the one with the highest
993 bit set: we do this so that the smallest number will be emitted last;
994 this is more likely to be combinable with addressing insns. */
995 i = best_start;
996 do
997 {
998 int end;
999
1000 if (i <= 0)
1001 i += 32;
1002 if (remainder & (3 << (i - 2)))
1003 {
1004 end = i - 8;
1005 if (end < 0)
1006 end += 32;
1007 temp1 = remainder & ((0x0ff << end)
1008 | ((i < end) ? (0xff >> (32 - end)) : 0));
1009 remainder &= ~temp1;
1010
d499463f 1011 if (generate)
e2c671ba 1012 {
d499463f
RE
1013 rtx new_src;
1014
1015 if (code == SET)
2b835d68
RE
1016 emit_insn (gen_rtx (SET, VOIDmode,
1017 new_src = (subtargets
1018 ? gen_reg_rtx (mode)
1019 : target),
1020 GEN_INT (can_invert ? ~temp1 : temp1)));
d499463f 1021 else if (code == MINUS)
2b835d68
RE
1022 emit_insn (gen_rtx (SET, VOIDmode,
1023 new_src = (subtargets
1024 ? gen_reg_rtx (mode)
1025 : target),
1026 gen_rtx (code, mode, GEN_INT (temp1),
1027 source)));
d499463f 1028 else
2b835d68
RE
1029 emit_insn (gen_rtx (SET, VOIDmode,
1030 new_src = (remainder
1031 ? (subtargets
1032 ? gen_reg_rtx (mode)
1033 : target)
1034 : target),
1035 gen_rtx (code, mode, source,
1036 GEN_INT (can_invert ? ~temp1
1037 : (can_negate
1038 ? -temp1
1039 : temp1)))));
d499463f 1040 source = new_src;
e2c671ba
RE
1041 }
1042
d499463f
RE
1043 if (code == SET)
1044 {
1045 can_invert = 0;
1046 code = PLUS;
1047 }
1048 else if (code == MINUS)
1049 code = PLUS;
1050
e2c671ba 1051 insns++;
e2c671ba
RE
1052 i -= 6;
1053 }
1054 i -= 2;
1055 } while (remainder);
1056 }
1057 return insns;
1058}
1059
bd9c7e23
RE
1060/* Canonicalize a comparison so that we are more likely to recognize it.
1061 This can be done for a few constant compares, where we can make the
1062 immediate value easier to load. */
1063enum rtx_code
1064arm_canonicalize_comparison (code, op1)
1065 enum rtx_code code;
1066 rtx *op1;
1067{
ad076f4e 1068 unsigned HOST_WIDE_INT i = INTVAL (*op1);
bd9c7e23
RE
1069
1070 switch (code)
1071 {
1072 case EQ:
1073 case NE:
1074 return code;
1075
1076 case GT:
1077 case LE:
ad076f4e
RE
1078 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1079 - 1)
bd9c7e23
RE
1080 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1081 {
1082 *op1 = GEN_INT (i+1);
1083 return code == GT ? GE : LT;
1084 }
1085 break;
1086
1087 case GE:
1088 case LT:
ad076f4e 1089 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
bd9c7e23
RE
1090 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1091 {
1092 *op1 = GEN_INT (i-1);
1093 return code == GE ? GT : LE;
1094 }
1095 break;
1096
1097 case GTU:
1098 case LEU:
ad076f4e 1099 if (i != ~((unsigned HOST_WIDE_INT) 0)
bd9c7e23
RE
1100 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1101 {
1102 *op1 = GEN_INT (i + 1);
1103 return code == GTU ? GEU : LTU;
1104 }
1105 break;
1106
1107 case GEU:
1108 case LTU:
1109 if (i != 0
1110 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1111 {
1112 *op1 = GEN_INT (i - 1);
1113 return code == GEU ? GTU : LEU;
1114 }
1115 break;
1116
1117 default:
1118 abort ();
1119 }
1120
1121 return code;
1122}
1123
1124
2b835d68
RE
1125/* Handle aggregates that are not laid out in a BLKmode element.
1126 This is a sub-element of RETURN_IN_MEMORY. */
1127int
1128arm_return_in_memory (type)
1129 tree type;
1130{
1131 if (TREE_CODE (type) == RECORD_TYPE)
1132 {
1133 tree field;
1134
1135 /* For a struct, we can return in a register if every element was a
1136 bit-field. */
1137 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1138 if (TREE_CODE (field) != FIELD_DECL
1139 || ! DECL_BIT_FIELD_TYPE (field))
1140 return 1;
1141
1142 return 0;
1143 }
1144 else if (TREE_CODE (type) == UNION_TYPE)
1145 {
1146 tree field;
1147
1148 /* Unions can be returned in registers if every element is
1149 integral, or can be returned in an integer register. */
1150 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1151 {
1152 if (TREE_CODE (field) != FIELD_DECL
1153 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1154 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1155 || FLOAT_TYPE_P (TREE_TYPE (field)))
1156 return 1;
1157 }
1158 return 0;
1159 }
1160 /* XXX Not sure what should be done for other aggregates, so put them in
1161 memory. */
1162 return 1;
1163}
1164
32de079a
RE
1165int
1166legitimate_pic_operand_p (x)
1167 rtx x;
1168{
1169 if (CONSTANT_P (x) && flag_pic
1170 && (GET_CODE (x) == SYMBOL_REF
1171 || (GET_CODE (x) == CONST
1172 && GET_CODE (XEXP (x, 0)) == PLUS
1173 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1174 return 0;
1175
1176 return 1;
1177}
1178
1179rtx
1180legitimize_pic_address (orig, mode, reg)
1181 rtx orig;
1182 enum machine_mode mode;
1183 rtx reg;
1184{
1185 if (GET_CODE (orig) == SYMBOL_REF)
1186 {
1187 rtx pic_ref, address;
1188 rtx insn;
1189 int subregs = 0;
1190
1191 if (reg == 0)
1192 {
1193 if (reload_in_progress || reload_completed)
1194 abort ();
1195 else
1196 reg = gen_reg_rtx (Pmode);
1197
1198 subregs = 1;
1199 }
1200
1201#ifdef AOF_ASSEMBLER
1202 /* The AOF assembler can generate relocations for these directly, and
1203 understands that the PIC register has to be added into the offset.
1204 */
1205 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1206#else
1207 if (subregs)
1208 address = gen_reg_rtx (Pmode);
1209 else
1210 address = reg;
1211
1212 emit_insn (gen_pic_load_addr (address, orig));
1213
1214 pic_ref = gen_rtx (MEM, Pmode,
1215 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, address));
1216 RTX_UNCHANGING_P (pic_ref) = 1;
1217 insn = emit_move_insn (reg, pic_ref);
1218#endif
1219 current_function_uses_pic_offset_table = 1;
1220 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1221 by loop. */
1222 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, orig,
1223 REG_NOTES (insn));
1224 return reg;
1225 }
1226 else if (GET_CODE (orig) == CONST)
1227 {
1228 rtx base, offset;
1229
1230 if (GET_CODE (XEXP (orig, 0)) == PLUS
1231 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1232 return orig;
1233
1234 if (reg == 0)
1235 {
1236 if (reload_in_progress || reload_completed)
1237 abort ();
1238 else
1239 reg = gen_reg_rtx (Pmode);
1240 }
1241
1242 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1243 {
1244 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1245 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1246 base == reg ? 0 : reg);
1247 }
1248 else
1249 abort ();
1250
1251 if (GET_CODE (offset) == CONST_INT)
1252 {
1253 /* The base register doesn't really matter, we only want to
1254 test the index for the appropriate mode. */
1255 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1256
1257 if (! reload_in_progress && ! reload_completed)
1258 offset = force_reg (Pmode, offset);
1259 else
1260 abort ();
1261
1262 win:
1263 if (GET_CODE (offset) == CONST_INT)
1264 return plus_constant_for_output (base, INTVAL (offset));
1265 }
1266
1267 if (GET_MODE_SIZE (mode) > 4
1268 && (GET_MODE_CLASS (mode) == MODE_INT
1269 || TARGET_SOFT_FLOAT))
1270 {
1271 emit_insn (gen_addsi3 (reg, base, offset));
1272 return reg;
1273 }
1274
1275 return gen_rtx (PLUS, Pmode, base, offset);
1276 }
1277 else if (GET_CODE (orig) == LABEL_REF)
1278 current_function_uses_pic_offset_table = 1;
1279
1280 return orig;
1281}
1282
1283static rtx pic_rtx;
1284
1285int
1286is_pic(x)
1287 rtx x;
1288{
1289 if (x == pic_rtx)
1290 return 1;
1291 return 0;
1292}
1293
1294void
1295arm_finalize_pic ()
1296{
1297#ifndef AOF_ASSEMBLER
1298 rtx l1, pic_tmp, pic_tmp2, seq;
1299 rtx global_offset_table;
1300
1301 if (current_function_uses_pic_offset_table == 0)
1302 return;
1303
1304 if (! flag_pic)
1305 abort ();
1306
1307 start_sequence ();
1308 l1 = gen_label_rtx ();
1309
1310 global_offset_table = gen_rtx (SYMBOL_REF, Pmode, "_GLOBAL_OFFSET_TABLE_");
956d6950
JL
1311 /* The PC contains 'dot'+8, but the label L1 is on the next
1312 instruction, so the offset is only 'dot'+4. */
32de079a
RE
1313 pic_tmp = gen_rtx (CONST, VOIDmode,
1314 gen_rtx (PLUS, Pmode,
1315 gen_rtx (LABEL_REF, VOIDmode, l1),
956d6950 1316 GEN_INT (4)));
32de079a
RE
1317 pic_tmp2 = gen_rtx (CONST, VOIDmode,
1318 gen_rtx (PLUS, Pmode,
1319 global_offset_table,
1320 pc_rtx));
1321
1322 pic_rtx = gen_rtx (CONST, Pmode,
1323 gen_rtx (MINUS, Pmode, pic_tmp2, pic_tmp));
1324
1325 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
1326 emit_jump_insn (gen_pic_add_dot_plus_eight(l1, pic_offset_table_rtx));
1327 emit_label (l1);
1328
1329 seq = gen_sequence ();
1330 end_sequence ();
1331 emit_insn_after (seq, get_insns ());
1332
1333 /* Need to emit this whether or not we obey regdecls,
1334 since setjmp/longjmp can cause life info to screw up. */
1335 emit_insn (gen_rtx (USE, VOIDmode, pic_offset_table_rtx));
1336#endif /* AOF_ASSEMBLER */
1337}
1338
e2c671ba
RE
1339#define REG_OR_SUBREG_REG(X) \
1340 (GET_CODE (X) == REG \
1341 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1342
1343#define REG_OR_SUBREG_RTX(X) \
1344 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1345
1346#define ARM_FRAME_RTX(X) \
1347 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1348 || (X) == arg_pointer_rtx)
1349
1350int
1351arm_rtx_costs (x, code, outer_code)
1352 rtx x;
1353 enum rtx_code code, outer_code;
1354{
1355 enum machine_mode mode = GET_MODE (x);
1356 enum rtx_code subcode;
1357 int extra_cost;
1358
1359 switch (code)
1360 {
1361 case MEM:
1362 /* Memory costs quite a lot for the first word, but subsequent words
1363 load at the equivalent of a single insn each. */
1364 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1365 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1366
1367 case DIV:
1368 case MOD:
1369 return 100;
1370
1371 case ROTATE:
1372 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1373 return 4;
1374 /* Fall through */
1375 case ROTATERT:
1376 if (mode != SImode)
1377 return 8;
1378 /* Fall through */
1379 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1380 if (mode == DImode)
1381 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1382 + ((GET_CODE (XEXP (x, 0)) == REG
1383 || (GET_CODE (XEXP (x, 0)) == SUBREG
1384 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1385 ? 0 : 8));
1386 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1387 || (GET_CODE (XEXP (x, 0)) == SUBREG
1388 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1389 ? 0 : 4)
1390 + ((GET_CODE (XEXP (x, 1)) == REG
1391 || (GET_CODE (XEXP (x, 1)) == SUBREG
1392 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1393 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1394 ? 0 : 4));
1395
1396 case MINUS:
1397 if (mode == DImode)
1398 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1399 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1400 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1401 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1402 ? 0 : 8));
1403
1404 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1405 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1406 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1407 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1408 ? 0 : 8)
1409 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1410 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1411 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1412 ? 0 : 8));
1413
1414 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1415 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1416 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1417 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1418 || subcode == ASHIFTRT || subcode == LSHIFTRT
1419 || subcode == ROTATE || subcode == ROTATERT
1420 || (subcode == MULT
1421 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1422 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1423 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1424 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1425 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1426 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1427 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1428 return 1;
1429 /* Fall through */
1430
1431 case PLUS:
1432 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1433 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1434 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1435 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1436 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1437 ? 0 : 8));
1438
1439 /* Fall through */
1440 case AND: case XOR: case IOR:
1441 extra_cost = 0;
1442
1443 /* Normally the frame registers will be spilt into reg+const during
1444 reload, so it is a bad idea to combine them with other instructions,
1445 since then they might not be moved outside of loops. As a compromise
1446 we allow integration with ops that have a constant as their second
1447 operand. */
1448 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1449 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1450 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1451 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1452 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1453 extra_cost = 4;
1454
1455 if (mode == DImode)
1456 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1457 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1458 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1459 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1460 ? 0 : 8));
1461
1462 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1463 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1464 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1465 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1466 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1467 ? 0 : 4));
1468
1469 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1470 return (1 + extra_cost
1471 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1472 || subcode == LSHIFTRT || subcode == ASHIFTRT
1473 || subcode == ROTATE || subcode == ROTATERT
1474 || (subcode == MULT
1475 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1476 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
ad076f4e 1477 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
e2c671ba
RE
1478 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1479 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
ad076f4e 1480 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
e2c671ba
RE
1481 ? 0 : 4));
1482
1483 return 8;
1484
1485 case MULT:
b111229a
RE
1486 /* There is no point basing this on the tuning, since it is always the
1487 fast variant if it exists at all */
2b835d68
RE
1488 if (arm_fast_multiply && mode == DImode
1489 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1490 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1491 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1492 return 8;
1493
e2c671ba
RE
1494 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1495 || mode == DImode)
1496 return 30;
1497
1498 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1499 {
2b835d68
RE
1500 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1501 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
1502 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1503 int j;
b111229a
RE
1504 /* Tune as appropriate */
1505 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2b835d68
RE
1506
1507 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 1508 {
2b835d68 1509 i >>= booth_unit_size;
e2c671ba
RE
1510 add_cost += 2;
1511 }
1512
1513 return add_cost;
1514 }
1515
b111229a 1516 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2b835d68 1517 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
1518 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1519
56636818
JL
1520 case TRUNCATE:
1521 if (arm_fast_multiply && mode == SImode
1522 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1523 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1524 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1525 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1526 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1527 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1528 return 8;
1529 return 99;
1530
e2c671ba
RE
1531 case NEG:
1532 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1533 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1534 /* Fall through */
1535 case NOT:
1536 if (mode == DImode)
1537 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1538
1539 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1540
1541 case IF_THEN_ELSE:
1542 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1543 return 14;
1544 return 2;
1545
1546 case COMPARE:
1547 return 1;
1548
1549 case ABS:
1550 return 4 + (mode == DImode ? 4 : 0);
1551
1552 case SIGN_EXTEND:
1553 if (GET_MODE (XEXP (x, 0)) == QImode)
1554 return (4 + (mode == DImode ? 4 : 0)
1555 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1556 /* Fall through */
1557 case ZERO_EXTEND:
1558 switch (GET_MODE (XEXP (x, 0)))
1559 {
1560 case QImode:
1561 return (1 + (mode == DImode ? 4 : 0)
1562 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1563
1564 case HImode:
1565 return (4 + (mode == DImode ? 4 : 0)
1566 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1567
1568 case SImode:
1569 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
ad076f4e
RE
1570
1571 default:
1572 break;
e2c671ba
RE
1573 }
1574 abort ();
1575
1576 default:
1577 return 99;
1578 }
1579}
32de079a
RE
1580
1581int
1582arm_adjust_cost (insn, link, dep, cost)
1583 rtx insn;
1584 rtx link;
1585 rtx dep;
1586 int cost;
1587{
1588 rtx i_pat, d_pat;
1589
1590 if ((i_pat = single_set (insn)) != NULL
1591 && GET_CODE (SET_SRC (i_pat)) == MEM
1592 && (d_pat = single_set (dep)) != NULL
1593 && GET_CODE (SET_DEST (d_pat)) == MEM)
1594 {
1595 /* This is a load after a store, there is no conflict if the load reads
1596 from a cached area. Assume that loads from the stack, and from the
1597 constant pool are cached, and that others will miss. This is a
1598 hack. */
1599
1600/* debug_rtx (insn);
1601 debug_rtx (dep);
1602 debug_rtx (link);
1603 fprintf (stderr, "costs %d\n", cost); */
1604
1605 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1606 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1607 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1608 || reg_mentioned_p (hard_frame_pointer_rtx,
1609 XEXP (SET_SRC (i_pat), 0)))
1610 {
1611/* fprintf (stderr, "***** Now 1\n"); */
1612 return 1;
1613 }
1614 }
1615
1616 return cost;
1617}
1618
ff9940b0
RE
1619/* This code has been fixed for cross compilation. */
1620
1621static int fpa_consts_inited = 0;
1622
1623char *strings_fpa[8] = {
2b835d68
RE
1624 "0", "1", "2", "3",
1625 "4", "5", "0.5", "10"
1626};
ff9940b0
RE
1627
1628static REAL_VALUE_TYPE values_fpa[8];
1629
1630static void
1631init_fpa_table ()
1632{
1633 int i;
1634 REAL_VALUE_TYPE r;
1635
1636 for (i = 0; i < 8; i++)
1637 {
1638 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1639 values_fpa[i] = r;
1640 }
f3bb6135 1641
ff9940b0
RE
1642 fpa_consts_inited = 1;
1643}
1644
cce8749e
CH
1645/* Return TRUE if rtx X is a valid immediate FPU constant. */
1646
1647int
1648const_double_rtx_ok_for_fpu (x)
1649 rtx x;
1650{
ff9940b0
RE
1651 REAL_VALUE_TYPE r;
1652 int i;
1653
1654 if (!fpa_consts_inited)
1655 init_fpa_table ();
1656
1657 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1658 if (REAL_VALUE_MINUS_ZERO (r))
1659 return 0;
f3bb6135 1660
ff9940b0
RE
1661 for (i = 0; i < 8; i++)
1662 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1663 return 1;
f3bb6135 1664
ff9940b0 1665 return 0;
f3bb6135 1666}
ff9940b0
RE
1667
1668/* Return TRUE if rtx X is a valid immediate FPU constant. */
1669
1670int
1671neg_const_double_rtx_ok_for_fpu (x)
1672 rtx x;
1673{
1674 REAL_VALUE_TYPE r;
1675 int i;
1676
1677 if (!fpa_consts_inited)
1678 init_fpa_table ();
1679
1680 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1681 r = REAL_VALUE_NEGATE (r);
1682 if (REAL_VALUE_MINUS_ZERO (r))
1683 return 0;
f3bb6135 1684
ff9940b0
RE
1685 for (i = 0; i < 8; i++)
1686 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1687 return 1;
f3bb6135 1688
ff9940b0 1689 return 0;
f3bb6135 1690}
cce8749e
CH
1691\f
1692/* Predicates for `match_operand' and `match_operator'. */
1693
ff9940b0 1694/* s_register_operand is the same as register_operand, but it doesn't accept
56a38cec
DE
1695 (SUBREG (MEM)...).
1696
1697 This function exists because at the time it was put in it led to better
1698 code. SUBREG(MEM) always needs a reload in the places where
1699 s_register_operand is used, and this seemed to lead to excessive
1700 reloading. */
ff9940b0
RE
1701
1702int
1703s_register_operand (op, mode)
1704 register rtx op;
1705 enum machine_mode mode;
1706{
1707 if (GET_MODE (op) != mode && mode != VOIDmode)
1708 return 0;
1709
1710 if (GET_CODE (op) == SUBREG)
f3bb6135 1711 op = SUBREG_REG (op);
ff9940b0
RE
1712
1713 /* We don't consider registers whose class is NO_REGS
1714 to be a register operand. */
1715 return (GET_CODE (op) == REG
1716 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1717 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1718}
1719
e2c671ba
RE
1720/* Only accept reg, subreg(reg), const_int. */
1721
1722int
1723reg_or_int_operand (op, mode)
1724 register rtx op;
1725 enum machine_mode mode;
1726{
1727 if (GET_CODE (op) == CONST_INT)
1728 return 1;
1729
1730 if (GET_MODE (op) != mode && mode != VOIDmode)
1731 return 0;
1732
1733 if (GET_CODE (op) == SUBREG)
1734 op = SUBREG_REG (op);
1735
1736 /* We don't consider registers whose class is NO_REGS
1737 to be a register operand. */
1738 return (GET_CODE (op) == REG
1739 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1740 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1741}
1742
ff9940b0
RE
1743/* Return 1 if OP is an item in memory, given that we are in reload. */
1744
1745int
1746reload_memory_operand (op, mode)
1747 rtx op;
1748 enum machine_mode mode;
1749{
1750 int regno = true_regnum (op);
1751
1752 return (! CONSTANT_P (op)
1753 && (regno == -1
1754 || (GET_CODE (op) == REG
1755 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1756}
1757
4d818c85
RE
1758/* Return 1 if OP is a valid memory address, but not valid for a signed byte
1759 memory access (architecture V4) */
1760int
1761bad_signed_byte_operand (op, mode)
1762 rtx op;
1763 enum machine_mode mode;
1764{
1765 if (! memory_operand (op, mode) || GET_CODE (op) != MEM)
1766 return 0;
1767
1768 op = XEXP (op, 0);
1769
1770 /* A sum of anything more complex than reg + reg or reg + const is bad */
1771 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
1772 && ! s_register_operand (XEXP (op, 0), VOIDmode))
1773 return 1;
1774
1775 /* Big constants are also bad */
1776 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
1777 && (INTVAL (XEXP (op, 1)) > 0xff
1778 || -INTVAL (XEXP (op, 1)) > 0xff))
1779 return 1;
1780
1781 /* Everything else is good, or can will automatically be made so. */
1782 return 0;
1783}
1784
cce8749e
CH
1785/* Return TRUE for valid operands for the rhs of an ARM instruction. */
1786
1787int
1788arm_rhs_operand (op, mode)
1789 rtx op;
1790 enum machine_mode mode;
1791{
ff9940b0 1792 return (s_register_operand (op, mode)
cce8749e 1793 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 1794}
cce8749e 1795
ff9940b0
RE
1796/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1797 */
1798
1799int
1800arm_rhsm_operand (op, mode)
1801 rtx op;
1802 enum machine_mode mode;
1803{
1804 return (s_register_operand (op, mode)
1805 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1806 || memory_operand (op, mode));
f3bb6135 1807}
ff9940b0
RE
1808
1809/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1810 constant that is valid when negated. */
1811
1812int
1813arm_add_operand (op, mode)
1814 rtx op;
1815 enum machine_mode mode;
1816{
1817 return (s_register_operand (op, mode)
1818 || (GET_CODE (op) == CONST_INT
1819 && (const_ok_for_arm (INTVAL (op))
1820 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 1821}
ff9940b0
RE
1822
1823int
1824arm_not_operand (op, mode)
1825 rtx op;
1826 enum machine_mode mode;
1827{
1828 return (s_register_operand (op, mode)
1829 || (GET_CODE (op) == CONST_INT
1830 && (const_ok_for_arm (INTVAL (op))
1831 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 1832}
ff9940b0 1833
5165176d
RE
1834/* Return TRUE if the operand is a memory reference which contains an
1835 offsettable address. */
1836int
1837offsettable_memory_operand (op, mode)
1838 register rtx op;
1839 enum machine_mode mode;
1840{
1841 if (mode == VOIDmode)
1842 mode = GET_MODE (op);
1843
1844 return (mode == GET_MODE (op)
1845 && GET_CODE (op) == MEM
1846 && offsettable_address_p (reload_completed | reload_in_progress,
1847 mode, XEXP (op, 0)));
1848}
1849
1850/* Return TRUE if the operand is a memory reference which is, or can be
1851 made word aligned by adjusting the offset. */
1852int
1853alignable_memory_operand (op, mode)
1854 register rtx op;
1855 enum machine_mode mode;
1856{
1857 rtx reg;
1858
1859 if (mode == VOIDmode)
1860 mode = GET_MODE (op);
1861
1862 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1863 return 0;
1864
1865 op = XEXP (op, 0);
1866
1867 return ((GET_CODE (reg = op) == REG
1868 || (GET_CODE (op) == SUBREG
1869 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1870 || (GET_CODE (op) == PLUS
1871 && GET_CODE (XEXP (op, 1)) == CONST_INT
1872 && (GET_CODE (reg = XEXP (op, 0)) == REG
1873 || (GET_CODE (XEXP (op, 0)) == SUBREG
1874 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1875 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1876}
1877
b111229a
RE
1878/* Similar to s_register_operand, but does not allow hard integer
1879 registers. */
1880int
1881f_register_operand (op, mode)
1882 register rtx op;
1883 enum machine_mode mode;
1884{
1885 if (GET_MODE (op) != mode && mode != VOIDmode)
1886 return 0;
1887
1888 if (GET_CODE (op) == SUBREG)
1889 op = SUBREG_REG (op);
1890
1891 /* We don't consider registers whose class is NO_REGS
1892 to be a register operand. */
1893 return (GET_CODE (op) == REG
1894 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1895 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
1896}
1897
cce8749e
CH
1898/* Return TRUE for valid operands for the rhs of an FPU instruction. */
1899
1900int
1901fpu_rhs_operand (op, mode)
1902 rtx op;
1903 enum machine_mode mode;
1904{
ff9940b0 1905 if (s_register_operand (op, mode))
f3bb6135 1906 return TRUE;
cce8749e
CH
1907 else if (GET_CODE (op) == CONST_DOUBLE)
1908 return (const_double_rtx_ok_for_fpu (op));
f3bb6135
RE
1909
1910 return FALSE;
1911}
cce8749e 1912
ff9940b0
RE
1913int
1914fpu_add_operand (op, mode)
1915 rtx op;
1916 enum machine_mode mode;
1917{
1918 if (s_register_operand (op, mode))
f3bb6135 1919 return TRUE;
ff9940b0 1920 else if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
1921 return (const_double_rtx_ok_for_fpu (op)
1922 || neg_const_double_rtx_ok_for_fpu (op));
1923
1924 return FALSE;
ff9940b0
RE
1925}
1926
cce8749e
CH
1927/* Return nonzero if OP is a constant power of two. */
1928
1929int
1930power_of_two_operand (op, mode)
1931 rtx op;
1932 enum machine_mode mode;
1933{
1934 if (GET_CODE (op) == CONST_INT)
1935 {
f3bb6135
RE
1936 HOST_WIDE_INT value = INTVAL(op);
1937 return value != 0 && (value & (value - 1)) == 0;
cce8749e 1938 }
f3bb6135
RE
1939 return FALSE;
1940}
cce8749e
CH
1941
1942/* Return TRUE for a valid operand of a DImode operation.
ff9940b0
RE
1943 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1944 Note that this disallows MEM(REG+REG), but allows
1945 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
1946
1947int
1948di_operand (op, mode)
1949 rtx op;
1950 enum machine_mode mode;
1951{
ff9940b0 1952 if (s_register_operand (op, mode))
f3bb6135 1953 return TRUE;
cce8749e
CH
1954
1955 switch (GET_CODE (op))
1956 {
1957 case CONST_DOUBLE:
1958 case CONST_INT:
f3bb6135
RE
1959 return TRUE;
1960
cce8749e 1961 case MEM:
f3bb6135
RE
1962 return memory_address_p (DImode, XEXP (op, 0));
1963
cce8749e 1964 default:
f3bb6135 1965 return FALSE;
cce8749e 1966 }
f3bb6135 1967}
cce8749e 1968
f3139301
DE
1969/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1970 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1971 Note that this disallows MEM(REG+REG), but allows
1972 MEM(PRE/POST_INC/DEC(REG)). */
1973
1974int
1975soft_df_operand (op, mode)
1976 rtx op;
1977 enum machine_mode mode;
1978{
1979 if (s_register_operand (op, mode))
1980 return TRUE;
1981
1982 switch (GET_CODE (op))
1983 {
1984 case CONST_DOUBLE:
1985 return TRUE;
1986
1987 case MEM:
1988 return memory_address_p (DFmode, XEXP (op, 0));
1989
1990 default:
1991 return FALSE;
1992 }
1993}
1994
cce8749e
CH
1995/* Return TRUE for valid index operands. */
1996
1997int
1998index_operand (op, mode)
1999 rtx op;
2000 enum machine_mode mode;
2001{
ff9940b0
RE
2002 return (s_register_operand(op, mode)
2003 || (immediate_operand (op, mode)
2004 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 2005}
cce8749e 2006
ff9940b0
RE
2007/* Return TRUE for valid shifts by a constant. This also accepts any
2008 power of two on the (somewhat overly relaxed) assumption that the
2009 shift operator in this case was a mult. */
2010
2011int
2012const_shift_operand (op, mode)
2013 rtx op;
2014 enum machine_mode mode;
2015{
2016 return (power_of_two_operand (op, mode)
2017 || (immediate_operand (op, mode)
2018 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 2019}
ff9940b0 2020
cce8749e
CH
2021/* Return TRUE for arithmetic operators which can be combined with a multiply
2022 (shift). */
2023
2024int
2025shiftable_operator (x, mode)
2026 rtx x;
2027 enum machine_mode mode;
2028{
2029 if (GET_MODE (x) != mode)
2030 return FALSE;
2031 else
2032 {
2033 enum rtx_code code = GET_CODE (x);
2034
2035 return (code == PLUS || code == MINUS
2036 || code == IOR || code == XOR || code == AND);
2037 }
f3bb6135 2038}
cce8749e
CH
2039
2040/* Return TRUE for shift operators. */
2041
2042int
2043shift_operator (x, mode)
2044 rtx x;
2045 enum machine_mode mode;
2046{
2047 if (GET_MODE (x) != mode)
2048 return FALSE;
2049 else
2050 {
2051 enum rtx_code code = GET_CODE (x);
2052
ff9940b0
RE
2053 if (code == MULT)
2054 return power_of_two_operand (XEXP (x, 1));
f3bb6135 2055
e2c671ba
RE
2056 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2057 || code == ROTATERT);
cce8749e 2058 }
f3bb6135 2059}
ff9940b0
RE
2060
2061int equality_operator (x, mode)
f3bb6135
RE
2062 rtx x;
2063 enum machine_mode mode;
ff9940b0 2064{
f3bb6135 2065 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
2066}
2067
2068/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2069
2070int
2071minmax_operator (x, mode)
2072 rtx x;
2073 enum machine_mode mode;
2074{
2075 enum rtx_code code = GET_CODE (x);
2076
2077 if (GET_MODE (x) != mode)
2078 return FALSE;
f3bb6135 2079
ff9940b0 2080 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 2081}
ff9940b0
RE
2082
2083/* return TRUE if x is EQ or NE */
2084
2085/* Return TRUE if this is the condition code register, if we aren't given
2086 a mode, accept any class CCmode register */
2087
2088int
2089cc_register (x, mode)
f3bb6135
RE
2090 rtx x;
2091 enum machine_mode mode;
ff9940b0
RE
2092{
2093 if (mode == VOIDmode)
2094 {
2095 mode = GET_MODE (x);
2096 if (GET_MODE_CLASS (mode) != MODE_CC)
2097 return FALSE;
2098 }
f3bb6135 2099
ff9940b0
RE
2100 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2101 return TRUE;
f3bb6135 2102
ff9940b0
RE
2103 return FALSE;
2104}
5bbe2d40
RE
2105
2106/* Return TRUE if this is the condition code register, if we aren't given
84ed5e79
RE
2107 a mode, accept any class CCmode register which indicates a dominance
2108 expression. */
5bbe2d40
RE
2109
2110int
84ed5e79 2111dominant_cc_register (x, mode)
5bbe2d40
RE
2112 rtx x;
2113 enum machine_mode mode;
2114{
2115 if (mode == VOIDmode)
2116 {
2117 mode = GET_MODE (x);
84ed5e79 2118 if (GET_MODE_CLASS (mode) != MODE_CC)
5bbe2d40
RE
2119 return FALSE;
2120 }
2121
84ed5e79
RE
2122 if (mode != CC_DNEmode && mode != CC_DEQmode
2123 && mode != CC_DLEmode && mode != CC_DLTmode
2124 && mode != CC_DGEmode && mode != CC_DGTmode
2125 && mode != CC_DLEUmode && mode != CC_DLTUmode
2126 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2127 return FALSE;
2128
5bbe2d40
RE
2129 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2130 return TRUE;
2131
2132 return FALSE;
2133}
2134
2b835d68
RE
2135/* Return TRUE if X references a SYMBOL_REF. */
2136int
2137symbol_mentioned_p (x)
2138 rtx x;
2139{
2140 register char *fmt;
2141 register int i;
2142
2143 if (GET_CODE (x) == SYMBOL_REF)
2144 return 1;
2145
2146 fmt = GET_RTX_FORMAT (GET_CODE (x));
2147 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2148 {
2149 if (fmt[i] == 'E')
2150 {
2151 register int j;
2152
2153 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2154 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2155 return 1;
2156 }
2157 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2158 return 1;
2159 }
2160
2161 return 0;
2162}
2163
2164/* Return TRUE if X references a LABEL_REF. */
2165int
2166label_mentioned_p (x)
2167 rtx x;
2168{
2169 register char *fmt;
2170 register int i;
2171
2172 if (GET_CODE (x) == LABEL_REF)
2173 return 1;
2174
2175 fmt = GET_RTX_FORMAT (GET_CODE (x));
2176 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2177 {
2178 if (fmt[i] == 'E')
2179 {
2180 register int j;
2181
2182 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2183 if (label_mentioned_p (XVECEXP (x, i, j)))
2184 return 1;
2185 }
2186 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2187 return 1;
2188 }
2189
2190 return 0;
2191}
2192
ff9940b0
RE
2193enum rtx_code
2194minmax_code (x)
f3bb6135 2195 rtx x;
ff9940b0
RE
2196{
2197 enum rtx_code code = GET_CODE (x);
2198
2199 if (code == SMAX)
2200 return GE;
f3bb6135 2201 else if (code == SMIN)
ff9940b0 2202 return LE;
f3bb6135 2203 else if (code == UMIN)
ff9940b0 2204 return LEU;
f3bb6135 2205 else if (code == UMAX)
ff9940b0 2206 return GEU;
f3bb6135 2207
ff9940b0
RE
2208 abort ();
2209}
2210
2211/* Return 1 if memory locations are adjacent */
2212
f3bb6135 2213int
ff9940b0
RE
2214adjacent_mem_locations (a, b)
2215 rtx a, b;
2216{
2217 int val0 = 0, val1 = 0;
2218 int reg0, reg1;
2219
2220 if ((GET_CODE (XEXP (a, 0)) == REG
2221 || (GET_CODE (XEXP (a, 0)) == PLUS
2222 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2223 && (GET_CODE (XEXP (b, 0)) == REG
2224 || (GET_CODE (XEXP (b, 0)) == PLUS
2225 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2226 {
2227 if (GET_CODE (XEXP (a, 0)) == PLUS)
2228 {
2229 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2230 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2231 }
2232 else
2233 reg0 = REGNO (XEXP (a, 0));
2234 if (GET_CODE (XEXP (b, 0)) == PLUS)
2235 {
2236 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2237 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2238 }
2239 else
2240 reg1 = REGNO (XEXP (b, 0));
2241 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2242 }
2243 return 0;
2244}
2245
2246/* Return 1 if OP is a load multiple operation. It is known to be
2247 parallel and the first section will be tested. */
2248
f3bb6135 2249int
ff9940b0
RE
2250load_multiple_operation (op, mode)
2251 rtx op;
2252 enum machine_mode mode;
2253{
f3bb6135 2254 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2255 int dest_regno;
2256 rtx src_addr;
f3bb6135 2257 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2258 rtx elt;
2259
2260 if (count <= 1
2261 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2262 return 0;
2263
2264 /* Check to see if this might be a write-back */
2265 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2266 {
2267 i++;
2268 base = 1;
2269
2270 /* Now check it more carefully */
2271 if (GET_CODE (SET_DEST (elt)) != REG
2272 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2273 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2274 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2275 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2276 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2277 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2278 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2279 != REGNO (SET_DEST (elt)))
2280 return 0;
f3bb6135 2281
ff9940b0
RE
2282 count--;
2283 }
2284
2285 /* Perform a quick check so we don't blow up below. */
2286 if (count <= i
2287 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2288 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2289 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2290 return 0;
2291
2292 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2293 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2294
2295 for (; i < count; i++)
2296 {
ed4c4348 2297 elt = XVECEXP (op, 0, i);
ff9940b0
RE
2298
2299 if (GET_CODE (elt) != SET
2300 || GET_CODE (SET_DEST (elt)) != REG
2301 || GET_MODE (SET_DEST (elt)) != SImode
2302 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2303 || GET_CODE (SET_SRC (elt)) != MEM
2304 || GET_MODE (SET_SRC (elt)) != SImode
2305 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2306 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2307 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2308 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2309 return 0;
2310 }
2311
2312 return 1;
2313}
2314
2315/* Return 1 if OP is a store multiple operation. It is known to be
2316 parallel and the first section will be tested. */
2317
f3bb6135 2318int
ff9940b0
RE
2319store_multiple_operation (op, mode)
2320 rtx op;
2321 enum machine_mode mode;
2322{
f3bb6135 2323 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
2324 int src_regno;
2325 rtx dest_addr;
f3bb6135 2326 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
2327 rtx elt;
2328
2329 if (count <= 1
2330 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2331 return 0;
2332
2333 /* Check to see if this might be a write-back */
2334 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2335 {
2336 i++;
2337 base = 1;
2338
2339 /* Now check it more carefully */
2340 if (GET_CODE (SET_DEST (elt)) != REG
2341 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2342 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2343 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2344 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2345 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2346 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2347 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2348 != REGNO (SET_DEST (elt)))
2349 return 0;
f3bb6135 2350
ff9940b0
RE
2351 count--;
2352 }
2353
2354 /* Perform a quick check so we don't blow up below. */
2355 if (count <= i
2356 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2357 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2358 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2359 return 0;
2360
2361 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2362 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2363
2364 for (; i < count; i++)
2365 {
2366 elt = XVECEXP (op, 0, i);
2367
2368 if (GET_CODE (elt) != SET
2369 || GET_CODE (SET_SRC (elt)) != REG
2370 || GET_MODE (SET_SRC (elt)) != SImode
2371 || REGNO (SET_SRC (elt)) != src_regno + i - base
2372 || GET_CODE (SET_DEST (elt)) != MEM
2373 || GET_MODE (SET_DEST (elt)) != SImode
2374 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2375 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2376 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2377 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2378 return 0;
2379 }
2380
2381 return 1;
2382}
e2c671ba 2383
84ed5e79
RE
2384int
2385load_multiple_sequence (operands, nops, regs, base, load_offset)
2386 rtx *operands;
2387 int nops;
2388 int *regs;
2389 int *base;
2390 HOST_WIDE_INT *load_offset;
2391{
2392 int unsorted_regs[4];
2393 HOST_WIDE_INT unsorted_offsets[4];
2394 int order[4];
ad076f4e 2395 int base_reg = -1;
84ed5e79
RE
2396 int i;
2397
2398 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2399 extended if required. */
2400 if (nops < 2 || nops > 4)
2401 abort ();
2402
2403 /* Loop over the operands and check that the memory references are
2404 suitable (ie immediate offsets from the same base register). At
2405 the same time, extract the target register, and the memory
2406 offsets. */
2407 for (i = 0; i < nops; i++)
2408 {
2409 rtx reg;
2410 rtx offset;
2411
56636818
JL
2412 /* Convert a subreg of a mem into the mem itself. */
2413 if (GET_CODE (operands[nops + i]) == SUBREG)
2414 operands[nops + i] = alter_subreg(operands[nops + i]);
2415
84ed5e79
RE
2416 if (GET_CODE (operands[nops + i]) != MEM)
2417 abort ();
2418
2419 /* Don't reorder volatile memory references; it doesn't seem worth
2420 looking for the case where the order is ok anyway. */
2421 if (MEM_VOLATILE_P (operands[nops + i]))
2422 return 0;
2423
2424 offset = const0_rtx;
2425
2426 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2427 || (GET_CODE (reg) == SUBREG
2428 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2429 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2430 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2431 == REG)
2432 || (GET_CODE (reg) == SUBREG
2433 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2434 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2435 == CONST_INT)))
2436 {
2437 if (i == 0)
2438 {
2439 base_reg = REGNO(reg);
2440 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2441 ? REGNO (operands[i])
2442 : REGNO (SUBREG_REG (operands[i])));
2443 order[0] = 0;
2444 }
2445 else
2446 {
2447 if (base_reg != REGNO (reg))
2448 /* Not addressed from the same base register. */
2449 return 0;
2450
2451 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2452 ? REGNO (operands[i])
2453 : REGNO (SUBREG_REG (operands[i])));
2454 if (unsorted_regs[i] < unsorted_regs[order[0]])
2455 order[0] = i;
2456 }
2457
2458 /* If it isn't an integer register, or if it overwrites the
2459 base register but isn't the last insn in the list, then
2460 we can't do this. */
2461 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2462 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2463 return 0;
2464
2465 unsorted_offsets[i] = INTVAL (offset);
2466 }
2467 else
2468 /* Not a suitable memory address. */
2469 return 0;
2470 }
2471
2472 /* All the useful information has now been extracted from the
2473 operands into unsorted_regs and unsorted_offsets; additionally,
2474 order[0] has been set to the lowest numbered register in the
2475 list. Sort the registers into order, and check that the memory
2476 offsets are ascending and adjacent. */
2477
2478 for (i = 1; i < nops; i++)
2479 {
2480 int j;
2481
2482 order[i] = order[i - 1];
2483 for (j = 0; j < nops; j++)
2484 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2485 && (order[i] == order[i - 1]
2486 || unsorted_regs[j] < unsorted_regs[order[i]]))
2487 order[i] = j;
2488
2489 /* Have we found a suitable register? if not, one must be used more
2490 than once. */
2491 if (order[i] == order[i - 1])
2492 return 0;
2493
2494 /* Is the memory address adjacent and ascending? */
2495 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2496 return 0;
2497 }
2498
2499 if (base)
2500 {
2501 *base = base_reg;
2502
2503 for (i = 0; i < nops; i++)
2504 regs[i] = unsorted_regs[order[i]];
2505
2506 *load_offset = unsorted_offsets[order[0]];
2507 }
2508
2509 if (unsorted_offsets[order[0]] == 0)
2510 return 1; /* ldmia */
2511
2512 if (unsorted_offsets[order[0]] == 4)
2513 return 2; /* ldmib */
2514
2515 if (unsorted_offsets[order[nops - 1]] == 0)
2516 return 3; /* ldmda */
2517
2518 if (unsorted_offsets[order[nops - 1]] == -4)
2519 return 4; /* ldmdb */
2520
2521 /* Can't do it without setting up the offset, only do this if it takes
2522 no more than one insn. */
2523 return (const_ok_for_arm (unsorted_offsets[order[0]])
2524 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2525}
2526
2527char *
2528emit_ldm_seq (operands, nops)
2529 rtx *operands;
2530 int nops;
2531{
2532 int regs[4];
2533 int base_reg;
2534 HOST_WIDE_INT offset;
2535 char buf[100];
2536 int i;
2537
2538 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2539 {
2540 case 1:
2541 strcpy (buf, "ldm%?ia\t");
2542 break;
2543
2544 case 2:
2545 strcpy (buf, "ldm%?ib\t");
2546 break;
2547
2548 case 3:
2549 strcpy (buf, "ldm%?da\t");
2550 break;
2551
2552 case 4:
2553 strcpy (buf, "ldm%?db\t");
2554 break;
2555
2556 case 5:
2557 if (offset >= 0)
2558 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2559 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2560 (long) offset);
2561 else
2562 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2563 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2564 (long) -offset);
2565 output_asm_insn (buf, operands);
2566 base_reg = regs[0];
2567 strcpy (buf, "ldm%?ia\t");
2568 break;
2569
2570 default:
2571 abort ();
2572 }
2573
2574 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2575 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2576
2577 for (i = 1; i < nops; i++)
2578 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2579 reg_names[regs[i]]);
2580
2581 strcat (buf, "}\t%@ phole ldm");
2582
2583 output_asm_insn (buf, operands);
2584 return "";
2585}
2586
2587int
2588store_multiple_sequence (operands, nops, regs, base, load_offset)
2589 rtx *operands;
2590 int nops;
2591 int *regs;
2592 int *base;
2593 HOST_WIDE_INT *load_offset;
2594{
2595 int unsorted_regs[4];
2596 HOST_WIDE_INT unsorted_offsets[4];
2597 int order[4];
ad076f4e 2598 int base_reg = -1;
84ed5e79
RE
2599 int i;
2600
2601 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2602 extended if required. */
2603 if (nops < 2 || nops > 4)
2604 abort ();
2605
2606 /* Loop over the operands and check that the memory references are
2607 suitable (ie immediate offsets from the same base register). At
2608 the same time, extract the target register, and the memory
2609 offsets. */
2610 for (i = 0; i < nops; i++)
2611 {
2612 rtx reg;
2613 rtx offset;
2614
56636818
JL
2615 /* Convert a subreg of a mem into the mem itself. */
2616 if (GET_CODE (operands[nops + i]) == SUBREG)
2617 operands[nops + i] = alter_subreg(operands[nops + i]);
2618
84ed5e79
RE
2619 if (GET_CODE (operands[nops + i]) != MEM)
2620 abort ();
2621
2622 /* Don't reorder volatile memory references; it doesn't seem worth
2623 looking for the case where the order is ok anyway. */
2624 if (MEM_VOLATILE_P (operands[nops + i]))
2625 return 0;
2626
2627 offset = const0_rtx;
2628
2629 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2630 || (GET_CODE (reg) == SUBREG
2631 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2632 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2633 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2634 == REG)
2635 || (GET_CODE (reg) == SUBREG
2636 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2637 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2638 == CONST_INT)))
2639 {
2640 if (i == 0)
2641 {
2642 base_reg = REGNO(reg);
2643 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2644 ? REGNO (operands[i])
2645 : REGNO (SUBREG_REG (operands[i])));
2646 order[0] = 0;
2647 }
2648 else
2649 {
2650 if (base_reg != REGNO (reg))
2651 /* Not addressed from the same base register. */
2652 return 0;
2653
2654 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2655 ? REGNO (operands[i])
2656 : REGNO (SUBREG_REG (operands[i])));
2657 if (unsorted_regs[i] < unsorted_regs[order[0]])
2658 order[0] = i;
2659 }
2660
2661 /* If it isn't an integer register, then we can't do this. */
2662 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2663 return 0;
2664
2665 unsorted_offsets[i] = INTVAL (offset);
2666 }
2667 else
2668 /* Not a suitable memory address. */
2669 return 0;
2670 }
2671
2672 /* All the useful information has now been extracted from the
2673 operands into unsorted_regs and unsorted_offsets; additionally,
2674 order[0] has been set to the lowest numbered register in the
2675 list. Sort the registers into order, and check that the memory
2676 offsets are ascending and adjacent. */
2677
2678 for (i = 1; i < nops; i++)
2679 {
2680 int j;
2681
2682 order[i] = order[i - 1];
2683 for (j = 0; j < nops; j++)
2684 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2685 && (order[i] == order[i - 1]
2686 || unsorted_regs[j] < unsorted_regs[order[i]]))
2687 order[i] = j;
2688
2689 /* Have we found a suitable register? if not, one must be used more
2690 than once. */
2691 if (order[i] == order[i - 1])
2692 return 0;
2693
2694 /* Is the memory address adjacent and ascending? */
2695 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2696 return 0;
2697 }
2698
2699 if (base)
2700 {
2701 *base = base_reg;
2702
2703 for (i = 0; i < nops; i++)
2704 regs[i] = unsorted_regs[order[i]];
2705
2706 *load_offset = unsorted_offsets[order[0]];
2707 }
2708
2709 if (unsorted_offsets[order[0]] == 0)
2710 return 1; /* stmia */
2711
2712 if (unsorted_offsets[order[0]] == 4)
2713 return 2; /* stmib */
2714
2715 if (unsorted_offsets[order[nops - 1]] == 0)
2716 return 3; /* stmda */
2717
2718 if (unsorted_offsets[order[nops - 1]] == -4)
2719 return 4; /* stmdb */
2720
2721 return 0;
2722}
2723
2724char *
2725emit_stm_seq (operands, nops)
2726 rtx *operands;
2727 int nops;
2728{
2729 int regs[4];
2730 int base_reg;
2731 HOST_WIDE_INT offset;
2732 char buf[100];
2733 int i;
2734
2735 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2736 {
2737 case 1:
2738 strcpy (buf, "stm%?ia\t");
2739 break;
2740
2741 case 2:
2742 strcpy (buf, "stm%?ib\t");
2743 break;
2744
2745 case 3:
2746 strcpy (buf, "stm%?da\t");
2747 break;
2748
2749 case 4:
2750 strcpy (buf, "stm%?db\t");
2751 break;
2752
2753 default:
2754 abort ();
2755 }
2756
2757 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2758 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2759
2760 for (i = 1; i < nops; i++)
2761 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2762 reg_names[regs[i]]);
2763
2764 strcat (buf, "}\t%@ phole stm");
2765
2766 output_asm_insn (buf, operands);
2767 return "";
2768}
2769
e2c671ba
RE
2770int
2771multi_register_push (op, mode)
0a81f500
RE
2772 rtx op;
2773 enum machine_mode mode;
e2c671ba
RE
2774{
2775 if (GET_CODE (op) != PARALLEL
2776 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2777 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2778 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2779 return 0;
2780
2781 return 1;
2782}
2783
ff9940b0 2784\f
f3bb6135
RE
2785/* Routines for use with attributes */
2786
31fdb4d5
DE
2787/* Return nonzero if ATTR is a valid attribute for DECL.
2788 ATTRIBUTES are any existing attributes and ARGS are the arguments
2789 supplied with ATTR.
2790
2791 Supported attributes:
2792
2793 naked: don't output any prologue or epilogue code, the user is assumed
2794 to do the right thing. */
2795
2796int
2797arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2798 tree decl;
2799 tree attributes;
2800 tree attr;
2801 tree args;
2802{
2803 if (args != NULL_TREE)
2804 return 0;
2805
2806 if (is_attribute_p ("naked", attr))
2807 return TREE_CODE (decl) == FUNCTION_DECL;
2808 return 0;
2809}
2810
2811/* Return non-zero if FUNC is a naked function. */
2812
2813static int
2814arm_naked_function_p (func)
2815 tree func;
2816{
2817 tree a;
2818
2819 if (TREE_CODE (func) != FUNCTION_DECL)
2820 abort ();
2821
2822 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2823 return a != NULL_TREE;
2824}
f3bb6135 2825\f
ff9940b0
RE
2826/* Routines for use in generating RTL */
2827
f3bb6135 2828rtx
56636818
JL
2829arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
2830 in_struct_p)
ff9940b0
RE
2831 int base_regno;
2832 int count;
2833 rtx from;
2834 int up;
2835 int write_back;
56636818
JL
2836 int unchanging_p;
2837 int in_struct_p;
ff9940b0
RE
2838{
2839 int i = 0, j;
2840 rtx result;
2841 int sign = up ? 1 : -1;
56636818 2842 rtx mem;
ff9940b0
RE
2843
2844 result = gen_rtx (PARALLEL, VOIDmode,
2845 rtvec_alloc (count + (write_back ? 2 : 0)));
2846 if (write_back)
f3bb6135 2847 {
ff9940b0 2848 XVECEXP (result, 0, 0)
f3bb6135
RE
2849 = gen_rtx (SET, GET_MODE (from), from,
2850 plus_constant (from, count * 4 * sign));
ff9940b0
RE
2851 i = 1;
2852 count++;
f3bb6135
RE
2853 }
2854
ff9940b0 2855 for (j = 0; i < count; i++, j++)
f3bb6135 2856 {
56636818
JL
2857 mem = gen_rtx (MEM, SImode, plus_constant (from, j * 4 * sign));
2858 RTX_UNCHANGING_P (mem) = unchanging_p;
2859 MEM_IN_STRUCT_P (mem) = in_struct_p;
2860
2861 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode,
2862 gen_rtx (REG, SImode, base_regno + j),
2863 mem);
f3bb6135
RE
2864 }
2865
ff9940b0
RE
2866 if (write_back)
2867 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
2868
2869 return result;
2870}
2871
f3bb6135 2872rtx
56636818
JL
2873arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
2874 in_struct_p)
ff9940b0
RE
2875 int base_regno;
2876 int count;
2877 rtx to;
2878 int up;
2879 int write_back;
56636818
JL
2880 int unchanging_p;
2881 int in_struct_p;
ff9940b0
RE
2882{
2883 int i = 0, j;
2884 rtx result;
2885 int sign = up ? 1 : -1;
56636818 2886 rtx mem;
ff9940b0
RE
2887
2888 result = gen_rtx (PARALLEL, VOIDmode,
2889 rtvec_alloc (count + (write_back ? 2 : 0)));
2890 if (write_back)
f3bb6135 2891 {
ff9940b0 2892 XVECEXP (result, 0, 0)
f3bb6135
RE
2893 = gen_rtx (SET, GET_MODE (to), to,
2894 plus_constant (to, count * 4 * sign));
ff9940b0
RE
2895 i = 1;
2896 count++;
f3bb6135
RE
2897 }
2898
ff9940b0 2899 for (j = 0; i < count; i++, j++)
f3bb6135 2900 {
56636818
JL
2901 mem = gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign));
2902 RTX_UNCHANGING_P (mem) = unchanging_p;
2903 MEM_IN_STRUCT_P (mem) = in_struct_p;
2904
2905 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode, mem,
2906 gen_rtx (REG, SImode, base_regno + j));
f3bb6135
RE
2907 }
2908
ff9940b0
RE
2909 if (write_back)
2910 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
2911
2912 return result;
2913}
2914
880e2516
RE
2915int
2916arm_gen_movstrqi (operands)
2917 rtx *operands;
2918{
2919 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
ad076f4e 2920 int i;
880e2516 2921 rtx src, dst;
ad076f4e 2922 rtx st_src, st_dst, fin_src, fin_dst;
880e2516 2923 rtx part_bytes_reg = NULL;
56636818
JL
2924 rtx mem;
2925 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
880e2516
RE
2926
2927 if (GET_CODE (operands[2]) != CONST_INT
2928 || GET_CODE (operands[3]) != CONST_INT
2929 || INTVAL (operands[2]) > 64
2930 || INTVAL (operands[3]) & 3)
2931 return 0;
2932
2933 st_dst = XEXP (operands[0], 0);
2934 st_src = XEXP (operands[1], 0);
56636818
JL
2935
2936 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
2937 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2938 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
2939 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2940
880e2516
RE
2941 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2942 fin_src = src = copy_to_mode_reg (SImode, st_src);
2943
2944 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2945 out_words_to_go = INTVAL (operands[2]) / 4;
2946 last_bytes = INTVAL (operands[2]) & 3;
2947
2948 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2949 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2950
2951 for (i = 0; in_words_to_go >= 2; i+=4)
2952 {
bd9c7e23 2953 if (in_words_to_go > 4)
56636818
JL
2954 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
2955 src_unchanging_p, src_in_struct_p));
bd9c7e23
RE
2956 else
2957 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
56636818
JL
2958 FALSE, src_unchanging_p,
2959 src_in_struct_p));
bd9c7e23 2960
880e2516
RE
2961 if (out_words_to_go)
2962 {
bd9c7e23 2963 if (out_words_to_go > 4)
56636818
JL
2964 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
2965 dst_unchanging_p,
2966 dst_in_struct_p));
bd9c7e23
RE
2967 else if (out_words_to_go != 1)
2968 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2969 dst, TRUE,
2970 (last_bytes == 0
56636818
JL
2971 ? FALSE : TRUE),
2972 dst_unchanging_p,
2973 dst_in_struct_p));
880e2516
RE
2974 else
2975 {
56636818
JL
2976 mem = gen_rtx (MEM, SImode, dst);
2977 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
2978 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
2979 emit_move_insn (mem, gen_rtx (REG, SImode, 0));
bd9c7e23
RE
2980 if (last_bytes != 0)
2981 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
880e2516
RE
2982 }
2983 }
2984
2985 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2986 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2987 }
2988
2989 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2990 if (out_words_to_go)
2991 {
2992 rtx sreg;
2993
56636818
JL
2994 mem = gen_rtx (MEM, SImode, src);
2995 RTX_UNCHANGING_P (mem) = src_unchanging_p;
2996 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
2997 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
880e2516 2998 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
56636818
JL
2999
3000 mem = gen_rtx (MEM, SImode, dst);
3001 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3002 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3003 emit_move_insn (mem, sreg);
880e2516
RE
3004 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3005 in_words_to_go--;
3006
3007 if (in_words_to_go) /* Sanity check */
3008 abort ();
3009 }
3010
3011 if (in_words_to_go)
3012 {
3013 if (in_words_to_go < 0)
3014 abort ();
3015
56636818
JL
3016 mem = gen_rtx (MEM, SImode, src);
3017 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3018 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3019 part_bytes_reg = copy_to_mode_reg (SImode, mem);
880e2516
RE
3020 }
3021
3022 if (BYTES_BIG_ENDIAN && last_bytes)
3023 {
3024 rtx tmp = gen_reg_rtx (SImode);
3025
3026 if (part_bytes_reg == NULL)
3027 abort ();
3028
3029 /* The bytes we want are in the top end of the word */
bee06f3d
RE
3030 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3031 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
3032 part_bytes_reg = tmp;
3033
3034 while (last_bytes)
3035 {
56636818
JL
3036 mem = gen_rtx (MEM, QImode, plus_constant (dst, last_bytes - 1));
3037 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3038 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3039 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
880e2516
RE
3040 if (--last_bytes)
3041 {
3042 tmp = gen_reg_rtx (SImode);
3043 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3044 part_bytes_reg = tmp;
3045 }
3046 }
3047
3048 }
3049 else
3050 {
3051 while (last_bytes)
3052 {
3053 if (part_bytes_reg == NULL)
3054 abort ();
3055
56636818
JL
3056 mem = gen_rtx (MEM, QImode, dst);
3057 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3058 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3059 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
880e2516
RE
3060 if (--last_bytes)
3061 {
3062 rtx tmp = gen_reg_rtx (SImode);
bd9c7e23
RE
3063
3064 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
880e2516
RE
3065 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3066 part_bytes_reg = tmp;
3067 }
3068 }
3069 }
3070
3071 return 1;
3072}
3073
5165176d
RE
3074/* Generate a memory reference for a half word, such that it will be loaded
3075 into the top 16 bits of the word. We can assume that the address is
3076 known to be alignable and of the form reg, or plus (reg, const). */
3077rtx
3078gen_rotated_half_load (memref)
3079 rtx memref;
3080{
3081 HOST_WIDE_INT offset = 0;
3082 rtx base = XEXP (memref, 0);
3083
3084 if (GET_CODE (base) == PLUS)
3085 {
3086 offset = INTVAL (XEXP (base, 1));
3087 base = XEXP (base, 0);
3088 }
3089
956d6950 3090 /* If we aren't allowed to generate unaligned addresses, then fail. */
5165176d
RE
3091 if (TARGET_SHORT_BY_BYTES
3092 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3093 return NULL;
3094
3095 base = gen_rtx (MEM, SImode, plus_constant (base, offset & ~2));
3096
3097 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3098 return base;
3099
3100 return gen_rtx (ROTATE, SImode, base, GEN_INT (16));
3101}
3102
84ed5e79
RE
3103static enum machine_mode
3104select_dominance_cc_mode (op, x, y, cond_or)
3105 enum rtx_code op;
3106 rtx x;
3107 rtx y;
3108 HOST_WIDE_INT cond_or;
3109{
3110 enum rtx_code cond1, cond2;
3111 int swapped = 0;
3112
3113 /* Currently we will probably get the wrong result if the individual
3114 comparisons are not simple. This also ensures that it is safe to
956d6950 3115 reverse a comparison if necessary. */
84ed5e79
RE
3116 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3117 != CCmode)
3118 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3119 != CCmode))
3120 return CCmode;
3121
3122 if (cond_or)
3123 cond1 = reverse_condition (cond1);
3124
3125 /* If the comparisons are not equal, and one doesn't dominate the other,
3126 then we can't do this. */
3127 if (cond1 != cond2
3128 && ! comparison_dominates_p (cond1, cond2)
3129 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3130 return CCmode;
3131
3132 if (swapped)
3133 {
3134 enum rtx_code temp = cond1;
3135 cond1 = cond2;
3136 cond2 = temp;
3137 }
3138
3139 switch (cond1)
3140 {
3141 case EQ:
3142 if (cond2 == EQ || ! cond_or)
3143 return CC_DEQmode;
3144
3145 switch (cond2)
3146 {
3147 case LE: return CC_DLEmode;
3148 case LEU: return CC_DLEUmode;
3149 case GE: return CC_DGEmode;
3150 case GEU: return CC_DGEUmode;
ad076f4e 3151 default: break;
84ed5e79
RE
3152 }
3153
3154 break;
3155
3156 case LT:
3157 if (cond2 == LT || ! cond_or)
3158 return CC_DLTmode;
3159 if (cond2 == LE)
3160 return CC_DLEmode;
3161 if (cond2 == NE)
3162 return CC_DNEmode;
3163 break;
3164
3165 case GT:
3166 if (cond2 == GT || ! cond_or)
3167 return CC_DGTmode;
3168 if (cond2 == GE)
3169 return CC_DGEmode;
3170 if (cond2 == NE)
3171 return CC_DNEmode;
3172 break;
3173
3174 case LTU:
3175 if (cond2 == LTU || ! cond_or)
3176 return CC_DLTUmode;
3177 if (cond2 == LEU)
3178 return CC_DLEUmode;
3179 if (cond2 == NE)
3180 return CC_DNEmode;
3181 break;
3182
3183 case GTU:
3184 if (cond2 == GTU || ! cond_or)
3185 return CC_DGTUmode;
3186 if (cond2 == GEU)
3187 return CC_DGEUmode;
3188 if (cond2 == NE)
3189 return CC_DNEmode;
3190 break;
3191
3192 /* The remaining cases only occur when both comparisons are the
3193 same. */
3194 case NE:
3195 return CC_DNEmode;
3196
3197 case LE:
3198 return CC_DLEmode;
3199
3200 case GE:
3201 return CC_DGEmode;
3202
3203 case LEU:
3204 return CC_DLEUmode;
3205
3206 case GEU:
3207 return CC_DGEUmode;
ad076f4e
RE
3208
3209 default:
3210 break;
84ed5e79
RE
3211 }
3212
3213 abort ();
3214}
3215
3216enum machine_mode
3217arm_select_cc_mode (op, x, y)
3218 enum rtx_code op;
3219 rtx x;
3220 rtx y;
3221{
3222 /* All floating point compares return CCFP if it is an equality
3223 comparison, and CCFPE otherwise. */
3224 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3225 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3226
3227 /* A compare with a shifted operand. Because of canonicalization, the
3228 comparison will have to be swapped when we emit the assembler. */
3229 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3230 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3231 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3232 || GET_CODE (x) == ROTATERT))
3233 return CC_SWPmode;
3234
956d6950
JL
3235 /* This is a special case that is used by combine to allow a
3236 comparison of a shifted byte load to be split into a zero-extend
84ed5e79 3237 followed by a comparison of the shifted integer (only valid for
956d6950 3238 equalities and unsigned inequalities). */
84ed5e79
RE
3239 if (GET_MODE (x) == SImode
3240 && GET_CODE (x) == ASHIFT
3241 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3242 && GET_CODE (XEXP (x, 0)) == SUBREG
3243 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3244 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3245 && (op == EQ || op == NE
3246 || op == GEU || op == GTU || op == LTU || op == LEU)
3247 && GET_CODE (y) == CONST_INT)
3248 return CC_Zmode;
3249
3250 /* An operation that sets the condition codes as a side-effect, the
3251 V flag is not set correctly, so we can only use comparisons where
3252 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3253 instead. */
3254 if (GET_MODE (x) == SImode
3255 && y == const0_rtx
3256 && (op == EQ || op == NE || op == LT || op == GE)
3257 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3258 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3259 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3260 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3261 || GET_CODE (x) == LSHIFTRT
3262 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3263 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3264 return CC_NOOVmode;
3265
3266 /* A construct for a conditional compare, if the false arm contains
3267 0, then both conditions must be true, otherwise either condition
3268 must be true. Not all conditions are possible, so CCmode is
3269 returned if it can't be done. */
3270 if (GET_CODE (x) == IF_THEN_ELSE
3271 && (XEXP (x, 2) == const0_rtx
3272 || XEXP (x, 2) == const1_rtx)
3273 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3274 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
3275 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
3276 INTVAL (XEXP (x, 2)));
3277
3278 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3279 return CC_Zmode;
3280
bd9c7e23
RE
3281 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3282 && GET_CODE (x) == PLUS
3283 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3284 return CC_Cmode;
3285
84ed5e79
RE
3286 return CCmode;
3287}
3288
ff9940b0
RE
3289/* X and Y are two things to compare using CODE. Emit the compare insn and
3290 return the rtx for register 0 in the proper mode. FP means this is a
3291 floating point compare: I don't think that it is needed on the arm. */
3292
3293rtx
3294gen_compare_reg (code, x, y, fp)
3295 enum rtx_code code;
3296 rtx x, y;
ed4c4348 3297 int fp;
ff9940b0
RE
3298{
3299 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
3300 rtx cc_reg = gen_rtx (REG, mode, 24);
3301
3302 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
3303 gen_rtx (COMPARE, mode, x, y)));
3304
3305 return cc_reg;
3306}
3307
0a81f500
RE
3308void
3309arm_reload_in_hi (operands)
3310 rtx *operands;
3311{
3312 rtx base = find_replacement (&XEXP (operands[1], 0));
3313
3314 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
e5e809f4
JL
3315 /* Handle the case where the address is too complex to be offset by 1. */
3316 if (GET_CODE (base) == MINUS
3317 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3318 {
3319 rtx base_plus = gen_rtx (REG, SImode, REGNO (operands[0]));
3320
3321 emit_insn (gen_rtx (SET, VOIDmode, base_plus, base));
3322 base = base_plus;
3323 }
3324
0a81f500
RE
3325 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
3326 gen_rtx (MEM, QImode,
3327 plus_constant (base, 1))));
3328 if (BYTES_BIG_ENDIAN)
3329 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3330 operands[0], 0),
3331 gen_rtx (IOR, SImode,
3332 gen_rtx (ASHIFT, SImode,
3333 gen_rtx (SUBREG, SImode,
3334 operands[0], 0),
3335 GEN_INT (8)),
3336 operands[2])));
3337 else
3338 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3339 operands[0], 0),
3340 gen_rtx (IOR, SImode,
3341 gen_rtx (ASHIFT, SImode,
3342 operands[2],
3343 GEN_INT (8)),
3344 gen_rtx (SUBREG, SImode, operands[0], 0))));
3345}
3346
f3bb6135 3347void
af48348a 3348arm_reload_out_hi (operands)
f3bb6135 3349 rtx *operands;
af48348a
RK
3350{
3351 rtx base = find_replacement (&XEXP (operands[0], 0));
3352
b5cc037f
RE
3353 if (BYTES_BIG_ENDIAN)
3354 {
3355 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3356 gen_rtx (SUBREG, QImode, operands[1], 0)));
3357 emit_insn (gen_lshrsi3 (operands[2],
3358 gen_rtx (SUBREG, SImode, operands[1], 0),
3359 GEN_INT (8)));
3360 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3361 gen_rtx (SUBREG, QImode, operands[2], 0)));
3362 }
3363 else
3364 {
3365 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3366 gen_rtx (SUBREG, QImode, operands[1], 0)));
3367 emit_insn (gen_lshrsi3 (operands[2],
3368 gen_rtx (SUBREG, SImode, operands[1], 0),
3369 GEN_INT (8)));
3370 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3371 gen_rtx (SUBREG, QImode, operands[2], 0)));
3372 }
af48348a 3373}
2b835d68
RE
3374\f
3375/* Routines for manipulation of the constant pool. */
3376/* This is unashamedly hacked from the version in sh.c, since the problem is
3377 extremely similar. */
3378
3379/* Arm instructions cannot load a large constant into a register,
3380 constants have to come from a pc relative load. The reference of a pc
3381 relative load instruction must be less than 1k infront of the instruction.
3382 This means that we often have to dump a constant inside a function, and
3383 generate code to branch around it.
3384
3385 It is important to minimize this, since the branches will slow things
3386 down and make things bigger.
3387
3388 Worst case code looks like:
3389
3390 ldr rn, L1
3391 b L2
3392 align
3393 L1: .long value
3394 L2:
3395 ..
3396
3397 ldr rn, L3
3398 b L4
3399 align
3400 L3: .long value
3401 L4:
3402 ..
3403
3404 We fix this by performing a scan before scheduling, which notices which
3405 instructions need to have their operands fetched from the constant table
3406 and builds the table.
3407
3408
3409 The algorithm is:
3410
3411 scan, find an instruction which needs a pcrel move. Look forward, find th
3412 last barrier which is within MAX_COUNT bytes of the requirement.
3413 If there isn't one, make one. Process all the instructions between
3414 the find and the barrier.
3415
3416 In the above example, we can tell that L3 is within 1k of L1, so
3417 the first move can be shrunk from the 2 insn+constant sequence into
3418 just 1 insn, and the constant moved to L3 to make:
3419
3420 ldr rn, L1
3421 ..
3422 ldr rn, L3
3423 b L4
3424 align
3425 L1: .long value
3426 L3: .long value
3427 L4:
3428
3429 Then the second move becomes the target for the shortening process.
3430
3431 */
3432
3433typedef struct
3434{
3435 rtx value; /* Value in table */
3436 HOST_WIDE_INT next_offset;
3437 enum machine_mode mode; /* Mode of value */
3438} pool_node;
3439
3440/* The maximum number of constants that can fit into one pool, since
3441 the pc relative range is 0...1020 bytes and constants are at least 4
3442 bytes long */
3443
3444#define MAX_POOL_SIZE (1020/4)
3445static pool_node pool_vector[MAX_POOL_SIZE];
3446static int pool_size;
3447static rtx pool_vector_label;
3448
3449/* Add a constant to the pool and return its label. */
3450static HOST_WIDE_INT
3451add_constant (x, mode)
3452 rtx x;
3453 enum machine_mode mode;
3454{
3455 int i;
2b835d68
RE
3456 HOST_WIDE_INT offset;
3457
3458 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3459 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3460 x = get_pool_constant (XEXP (x, 0));
3461#ifndef AOF_ASSEMBLER
3462 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3463 x = XVECEXP (x, 0, 0);
3464#endif
3465
32de079a
RE
3466#ifdef AOF_ASSEMBLER
3467 /* PIC Symbol references need to be converted into offsets into the
3468 based area. */
3469 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
3470 x = aof_pic_entry (x);
3471#endif /* AOF_ASSEMBLER */
3472
2b835d68
RE
3473 /* First see if we've already got it */
3474 for (i = 0; i < pool_size; i++)
3475 {
3476 if (GET_CODE (x) == pool_vector[i].value->code
3477 && mode == pool_vector[i].mode)
3478 {
3479 if (GET_CODE (x) == CODE_LABEL)
3480 {
3481 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3482 continue;
3483 }
3484 if (rtx_equal_p (x, pool_vector[i].value))
3485 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3486 }
3487 }
3488
3489 /* Need a new one */
3490 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3491 offset = 0;
3492 if (pool_size == 0)
3493 pool_vector_label = gen_label_rtx ();
3494 else
3495 pool_vector[pool_size].next_offset
3496 += (offset = pool_vector[pool_size - 1].next_offset);
3497
3498 pool_vector[pool_size].value = x;
3499 pool_vector[pool_size].mode = mode;
3500 pool_size++;
3501 return offset;
3502}
3503
3504/* Output the literal table */
3505static void
3506dump_table (scan)
3507 rtx scan;
3508{
3509 int i;
3510
3511 scan = emit_label_after (gen_label_rtx (), scan);
3512 scan = emit_insn_after (gen_align_4 (), scan);
3513 scan = emit_label_after (pool_vector_label, scan);
3514
3515 for (i = 0; i < pool_size; i++)
3516 {
3517 pool_node *p = pool_vector + i;
3518
3519 switch (GET_MODE_SIZE (p->mode))
3520 {
3521 case 4:
3522 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3523 break;
3524
3525 case 8:
3526 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3527 break;
3528
3529 default:
3530 abort ();
3531 break;
3532 }
3533 }
3534
3535 scan = emit_insn_after (gen_consttable_end (), scan);
3536 scan = emit_barrier_after (scan);
3537 pool_size = 0;
3538}
3539
3540/* Non zero if the src operand needs to be fixed up */
3541static int
3542fixit (src, mode, destreg)
3543 rtx src;
3544 enum machine_mode mode;
3545 int destreg;
3546{
3547 if (CONSTANT_P (src))
3548 {
3549 if (GET_CODE (src) == CONST_INT)
3550 return (! const_ok_for_arm (INTVAL (src))
3551 && ! const_ok_for_arm (~INTVAL (src)));
3552 if (GET_CODE (src) == CONST_DOUBLE)
3553 return (GET_MODE (src) == VOIDmode
3554 || destreg < 16
3555 || (! const_double_rtx_ok_for_fpu (src)
3556 && ! neg_const_double_rtx_ok_for_fpu (src)));
3557 return symbol_mentioned_p (src);
3558 }
3559#ifndef AOF_ASSEMBLER
3560 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3561 return 1;
3562#endif
3563 else
3564 return (mode == SImode && GET_CODE (src) == MEM
3565 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3566 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3567}
3568
3569/* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3570static rtx
3571find_barrier (from, max_count)
3572 rtx from;
3573 int max_count;
3574{
3575 int count = 0;
3576 rtx found_barrier = 0;
e5e809f4 3577 rtx last = from;
2b835d68
RE
3578
3579 while (from && count < max_count)
3580 {
3581 if (GET_CODE (from) == BARRIER)
a2538ff7 3582 return from;
2b835d68
RE
3583
3584 /* Count the length of this insn */
3585 if (GET_CODE (from) == INSN
3586 && GET_CODE (PATTERN (from)) == SET
3587 && CONSTANT_P (SET_SRC (PATTERN (from)))
3588 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
d499463f 3589 count += 8;
2b835d68
RE
3590 else
3591 count += get_attr_length (from);
3592
e5e809f4 3593 last = from;
2b835d68
RE
3594 from = NEXT_INSN (from);
3595 }
3596
3597 if (!found_barrier)
3598 {
3599 /* We didn't find a barrier in time to
3600 dump our stuff, so we'll make one */
3601 rtx label = gen_label_rtx ();
3602
3603 if (from)
e5e809f4 3604 from = PREV_INSN (last);
2b835d68
RE
3605 else
3606 from = get_last_insn ();
3607
3608 /* Walk back to be just before any jump */
3609 while (GET_CODE (from) == JUMP_INSN
3610 || GET_CODE (from) == NOTE
3611 || GET_CODE (from) == CODE_LABEL)
3612 from = PREV_INSN (from);
3613
3614 from = emit_jump_insn_after (gen_jump (label), from);
3615 JUMP_LABEL (from) = label;
3616 found_barrier = emit_barrier_after (from);
3617 emit_label_after (label, found_barrier);
3618 return found_barrier;
3619 }
3620
3621 return found_barrier;
3622}
3623
3624/* Non zero if the insn is a move instruction which needs to be fixed. */
3625static int
3626broken_move (insn)
3627 rtx insn;
3628{
3629 if (!INSN_DELETED_P (insn)
3630 && GET_CODE (insn) == INSN
3631 && GET_CODE (PATTERN (insn)) == SET)
3632 {
3633 rtx pat = PATTERN (insn);
3634 rtx src = SET_SRC (pat);
3635 rtx dst = SET_DEST (pat);
3636 int destreg;
3637 enum machine_mode mode = GET_MODE (dst);
ad076f4e 3638
2b835d68
RE
3639 if (dst == pc_rtx)
3640 return 0;
3641
3642 if (GET_CODE (dst) == REG)
3643 destreg = REGNO (dst);
3644 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3645 destreg = REGNO (SUBREG_REG (dst));
ad076f4e
RE
3646 else
3647 return 0;
2b835d68
RE
3648
3649 return fixit (src, mode, destreg);
3650 }
3651 return 0;
3652}
3653
3654void
3655arm_reorg (first)
3656 rtx first;
3657{
3658 rtx insn;
3659 int count_size;
2b835d68
RE
3660
3661#if 0
3662 /* The ldr instruction can work with up to a 4k offset, and most constants
3663 will be loaded with one of these instructions; however, the adr
3664 instruction and the ldf instructions only work with a 1k offset. This
3665 code needs to be rewritten to use the 4k offset when possible, and to
3666 adjust when a 1k offset is needed. For now we just use a 1k offset
3667 from the start. */
3668 count_size = 4000;
3669
3670 /* Floating point operands can't work further than 1024 bytes from the
3671 PC, so to make things simple we restrict all loads for such functions.
3672 */
3673 if (TARGET_HARD_FLOAT)
ad076f4e
RE
3674 {
3675 int regno;
3676
3677 for (regno = 16; regno < 24; regno++)
3678 if (regs_ever_live[regno])
3679 {
3680 count_size = 1000;
3681 break;
3682 }
3683 }
2b835d68
RE
3684#else
3685 count_size = 1000;
3686#endif /* 0 */
3687
3688 for (insn = first; insn; insn = NEXT_INSN (insn))
3689 {
3690 if (broken_move (insn))
3691 {
3692 /* This is a broken move instruction, scan ahead looking for
3693 a barrier to stick the constant table behind */
3694 rtx scan;
3695 rtx barrier = find_barrier (insn, count_size);
3696
3697 /* Now find all the moves between the points and modify them */
3698 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3699 {
3700 if (broken_move (scan))
3701 {
3702 /* This is a broken move instruction, add it to the pool */
3703 rtx pat = PATTERN (scan);
3704 rtx src = SET_SRC (pat);
3705 rtx dst = SET_DEST (pat);
3706 enum machine_mode mode = GET_MODE (dst);
3707 HOST_WIDE_INT offset;
3708 rtx newinsn = scan;
3709 rtx newsrc;
3710 rtx addr;
3711 int scratch;
3712
3713 /* If this is an HImode constant load, convert it into
3714 an SImode constant load. Since the register is always
3715 32 bits this is safe. We have to do this, since the
3716 load pc-relative instruction only does a 32-bit load. */
3717 if (mode == HImode)
3718 {
3719 mode = SImode;
3720 if (GET_CODE (dst) != REG)
3721 abort ();
3722 PUT_MODE (dst, SImode);
3723 }
3724
3725 offset = add_constant (src, mode);
3726 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
3727 pool_vector_label),
3728 offset);
3729
3730 /* For wide moves to integer regs we need to split the
3731 address calculation off into a separate insn, so that
3732 the load can then be done with a load-multiple. This is
3733 safe, since we have already noted the length of such
3734 insns to be 8, and we are immediately over-writing the
3735 scratch we have grabbed with the final result. */
3736 if (GET_MODE_SIZE (mode) > 4
3737 && (scratch = REGNO (dst)) < 16)
3738 {
3739 rtx reg = gen_rtx (REG, SImode, scratch);
3740 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3741 newinsn);
3742 addr = reg;
3743 }
3744
3745 newsrc = gen_rtx (MEM, mode, addr);
3746
3747 /* Build a jump insn wrapper around the move instead
3748 of an ordinary insn, because we want to have room for
3749 the target label rtx in fld[7], which an ordinary
3750 insn doesn't have. */
3751 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
3752 dst, newsrc),
3753 newinsn);
3754 JUMP_LABEL (newinsn) = pool_vector_label;
3755
3756 /* But it's still an ordinary insn */
3757 PUT_CODE (newinsn, INSN);
3758
3759 /* Kill old insn */
3760 delete_insn (scan);
3761 scan = newinsn;
3762 }
3763 }
3764 dump_table (barrier);
3765 insn = scan;
3766 }
3767 }
3768}
3769
cce8749e
CH
3770\f
3771/* Routines to output assembly language. */
3772
f3bb6135 3773/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
3774 In this way we can ensure that valid double constants are generated even
3775 when cross compiling. */
3776char *
3777fp_immediate_constant (x)
b5cc037f 3778 rtx x;
ff9940b0
RE
3779{
3780 REAL_VALUE_TYPE r;
3781 int i;
3782
3783 if (!fpa_consts_inited)
3784 init_fpa_table ();
3785
3786 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3787 for (i = 0; i < 8; i++)
3788 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3789 return strings_fpa[i];
f3bb6135 3790
ff9940b0
RE
3791 abort ();
3792}
3793
9997d19d
RE
3794/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3795static char *
3796fp_const_from_val (r)
3797 REAL_VALUE_TYPE *r;
3798{
3799 int i;
3800
3801 if (! fpa_consts_inited)
3802 init_fpa_table ();
3803
3804 for (i = 0; i < 8; i++)
3805 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3806 return strings_fpa[i];
3807
3808 abort ();
3809}
ff9940b0 3810
cce8749e
CH
3811/* Output the operands of a LDM/STM instruction to STREAM.
3812 MASK is the ARM register set mask of which only bits 0-15 are important.
3813 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3814 must follow the register list. */
3815
3816void
3817print_multi_reg (stream, instr, mask, hat)
3818 FILE *stream;
3819 char *instr;
3820 int mask, hat;
3821{
3822 int i;
3823 int not_first = FALSE;
3824
1d5473cb 3825 fputc ('\t', stream);
f3139301 3826 fprintf (stream, instr, REGISTER_PREFIX);
1d5473cb 3827 fputs (", {", stream);
cce8749e
CH
3828 for (i = 0; i < 16; i++)
3829 if (mask & (1 << i))
3830 {
3831 if (not_first)
3832 fprintf (stream, ", ");
f3139301 3833 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
cce8749e
CH
3834 not_first = TRUE;
3835 }
f3bb6135 3836
cce8749e 3837 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 3838}
cce8749e
CH
3839
3840/* Output a 'call' insn. */
3841
3842char *
3843output_call (operands)
f3bb6135 3844 rtx *operands;
cce8749e 3845{
cce8749e
CH
3846 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3847
3848 if (REGNO (operands[0]) == 14)
3849 {
3850 operands[0] = gen_rtx (REG, SImode, 12);
1d5473cb 3851 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 3852 }
1d5473cb
RE
3853 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3854 output_asm_insn ("mov%?\t%|pc, %0", operands);
f3bb6135
RE
3855 return "";
3856}
cce8749e 3857
ff9940b0
RE
3858static int
3859eliminate_lr2ip (x)
f3bb6135 3860 rtx *x;
ff9940b0
RE
3861{
3862 int something_changed = 0;
3863 rtx x0 = *x;
3864 int code = GET_CODE (x0);
3865 register int i, j;
3866 register char *fmt;
3867
3868 switch (code)
3869 {
3870 case REG:
3871 if (REGNO (x0) == 14)
3872 {
3873 *x = gen_rtx (REG, SImode, 12);
3874 return 1;
3875 }
3876 return 0;
3877 default:
3878 /* Scan through the sub-elements and change any references there */
3879 fmt = GET_RTX_FORMAT (code);
3880 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3881 if (fmt[i] == 'e')
3882 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3883 else if (fmt[i] == 'E')
3884 for (j = 0; j < XVECLEN (x0, i); j++)
3885 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3886 return something_changed;
3887 }
3888}
3889
3890/* Output a 'call' insn that is a reference in memory. */
3891
3892char *
3893output_call_mem (operands)
f3bb6135 3894 rtx *operands;
ff9940b0
RE
3895{
3896 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3897 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3898 */
3899 if (eliminate_lr2ip (&operands[0]))
1d5473cb 3900 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 3901
1d5473cb
RE
3902 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3903 output_asm_insn ("ldr%?\t%|pc, %0", operands);
f3bb6135
RE
3904 return "";
3905}
ff9940b0
RE
3906
3907
3908/* Output a move from arm registers to an fpu registers.
3909 OPERANDS[0] is an fpu register.
3910 OPERANDS[1] is the first registers of an arm register pair. */
3911
3912char *
3913output_mov_long_double_fpu_from_arm (operands)
f3bb6135 3914 rtx *operands;
ff9940b0
RE
3915{
3916 int arm_reg0 = REGNO (operands[1]);
3917 rtx ops[3];
3918
3919 if (arm_reg0 == 12)
3920 abort();
f3bb6135 3921
ff9940b0
RE
3922 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3923 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3924 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3925
1d5473cb
RE
3926 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
3927 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
f3bb6135
RE
3928 return "";
3929}
ff9940b0
RE
3930
3931/* Output a move from an fpu register to arm registers.
3932 OPERANDS[0] is the first registers of an arm register pair.
3933 OPERANDS[1] is an fpu register. */
3934
3935char *
3936output_mov_long_double_arm_from_fpu (operands)
f3bb6135 3937 rtx *operands;
ff9940b0
RE
3938{
3939 int arm_reg0 = REGNO (operands[0]);
3940 rtx ops[3];
3941
3942 if (arm_reg0 == 12)
3943 abort();
f3bb6135 3944
ff9940b0
RE
3945 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3946 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3947 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3948
1d5473cb
RE
3949 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
3950 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
3951 return "";
3952}
ff9940b0
RE
3953
3954/* Output a move from arm registers to arm registers of a long double
3955 OPERANDS[0] is the destination.
3956 OPERANDS[1] is the source. */
3957char *
3958output_mov_long_double_arm_from_arm (operands)
f3bb6135 3959 rtx *operands;
ff9940b0
RE
3960{
3961 /* We have to be careful here because the two might overlap */
3962 int dest_start = REGNO (operands[0]);
3963 int src_start = REGNO (operands[1]);
3964 rtx ops[2];
3965 int i;
3966
3967 if (dest_start < src_start)
3968 {
3969 for (i = 0; i < 3; i++)
3970 {
3971 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3972 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 3973 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
3974 }
3975 }
3976 else
3977 {
3978 for (i = 2; i >= 0; i--)
3979 {
3980 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3981 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 3982 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
3983 }
3984 }
f3bb6135 3985
ff9940b0
RE
3986 return "";
3987}
3988
3989
cce8749e
CH
3990/* Output a move from arm registers to an fpu registers.
3991 OPERANDS[0] is an fpu register.
3992 OPERANDS[1] is the first registers of an arm register pair. */
3993
3994char *
3995output_mov_double_fpu_from_arm (operands)
f3bb6135 3996 rtx *operands;
cce8749e
CH
3997{
3998 int arm_reg0 = REGNO (operands[1]);
3999 rtx ops[2];
4000
4001 if (arm_reg0 == 12)
4002 abort();
4003 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4004 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
4005 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
4006 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
4007 return "";
4008}
cce8749e
CH
4009
4010/* Output a move from an fpu register to arm registers.
4011 OPERANDS[0] is the first registers of an arm register pair.
4012 OPERANDS[1] is an fpu register. */
4013
4014char *
4015output_mov_double_arm_from_fpu (operands)
f3bb6135 4016 rtx *operands;
cce8749e
CH
4017{
4018 int arm_reg0 = REGNO (operands[0]);
4019 rtx ops[2];
4020
4021 if (arm_reg0 == 12)
4022 abort();
f3bb6135 4023
cce8749e
CH
4024 ops[0] = gen_rtx (REG, SImode, arm_reg0);
4025 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
4026 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
4027 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
4028 return "";
4029}
cce8749e
CH
4030
4031/* Output a move between double words.
4032 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
4033 or MEM<-REG and all MEMs must be offsettable addresses. */
4034
4035char *
4036output_move_double (operands)
f3bb6135 4037 rtx *operands;
cce8749e
CH
4038{
4039 enum rtx_code code0 = GET_CODE (operands[0]);
4040 enum rtx_code code1 = GET_CODE (operands[1]);
56636818 4041 rtx otherops[3];
cce8749e
CH
4042
4043 if (code0 == REG)
4044 {
4045 int reg0 = REGNO (operands[0]);
4046
4047 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
4048 if (code1 == REG)
4049 {
4050 int reg1 = REGNO (operands[1]);
4051 if (reg1 == 12)
4052 abort();
f3bb6135 4053
cce8749e 4054 /* Ensure the second source is not overwritten */
c1c2bc04
RE
4055 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
4056 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
cce8749e 4057 else
c1c2bc04 4058 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
cce8749e
CH
4059 }
4060 else if (code1 == CONST_DOUBLE)
4061 {
226a5051
RE
4062 if (GET_MODE (operands[1]) == DFmode)
4063 {
4064 long l[2];
4065 union real_extract u;
4066
4067 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4068 sizeof (u));
4069 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4070 otherops[1] = GEN_INT(l[1]);
4071 operands[1] = GEN_INT(l[0]);
4072 }
c1c2bc04
RE
4073 else if (GET_MODE (operands[1]) != VOIDmode)
4074 abort ();
4075 else if (WORDS_BIG_ENDIAN)
4076 {
4077
4078 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4079 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4080 }
226a5051
RE
4081 else
4082 {
c1c2bc04 4083
226a5051
RE
4084 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4085 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4086 }
c1c2bc04
RE
4087 output_mov_immediate (operands);
4088 output_mov_immediate (otherops);
cce8749e
CH
4089 }
4090 else if (code1 == CONST_INT)
4091 {
56636818
JL
4092#if HOST_BITS_PER_WIDE_INT > 32
4093 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4094 what the upper word is. */
4095 if (WORDS_BIG_ENDIAN)
4096 {
4097 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4098 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4099 }
4100 else
4101 {
4102 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4103 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4104 }
4105#else
4106 /* Sign extend the intval into the high-order word */
c1c2bc04
RE
4107 if (WORDS_BIG_ENDIAN)
4108 {
4109 otherops[1] = operands[1];
4110 operands[1] = (INTVAL (operands[1]) < 0
4111 ? constm1_rtx : const0_rtx);
4112 }
ff9940b0 4113 else
c1c2bc04 4114 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
56636818 4115#endif
c1c2bc04
RE
4116 output_mov_immediate (otherops);
4117 output_mov_immediate (operands);
cce8749e
CH
4118 }
4119 else if (code1 == MEM)
4120 {
ff9940b0 4121 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 4122 {
ff9940b0 4123 case REG:
9997d19d 4124 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 4125 break;
2b835d68 4126
ff9940b0 4127 case PRE_INC:
2b835d68 4128 abort (); /* Should never happen now */
ff9940b0 4129 break;
2b835d68 4130
ff9940b0 4131 case PRE_DEC:
2b835d68 4132 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 4133 break;
2b835d68 4134
ff9940b0 4135 case POST_INC:
9997d19d 4136 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 4137 break;
2b835d68 4138
ff9940b0 4139 case POST_DEC:
2b835d68 4140 abort (); /* Should never happen now */
ff9940b0 4141 break;
2b835d68
RE
4142
4143 case LABEL_REF:
4144 case CONST:
4145 output_asm_insn ("adr%?\t%0, %1", operands);
4146 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4147 break;
4148
ff9940b0 4149 default:
2b835d68 4150 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
cce8749e 4151 {
2b835d68
RE
4152 otherops[0] = operands[0];
4153 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4154 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4155 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4156 {
4157 if (GET_CODE (otherops[2]) == CONST_INT)
4158 {
4159 switch (INTVAL (otherops[2]))
4160 {
4161 case -8:
4162 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4163 return "";
4164 case -4:
4165 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4166 return "";
4167 case 4:
4168 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4169 return "";
4170 }
4171 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4172 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4173 else
4174 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4175 }
4176 else
4177 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4178 }
4179 else
4180 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
4181 return "ldm%?ia\t%0, %M0";
4182 }
4183 else
4184 {
4185 otherops[1] = adj_offsettable_operand (operands[1], 4);
4186 /* Take care of overlapping base/data reg. */
4187 if (reg_mentioned_p (operands[0], operands[1]))
4188 {
4189 output_asm_insn ("ldr%?\t%0, %1", otherops);
4190 output_asm_insn ("ldr%?\t%0, %1", operands);
4191 }
4192 else
4193 {
4194 output_asm_insn ("ldr%?\t%0, %1", operands);
4195 output_asm_insn ("ldr%?\t%0, %1", otherops);
4196 }
cce8749e
CH
4197 }
4198 }
4199 }
2b835d68
RE
4200 else
4201 abort(); /* Constraints should prevent this */
cce8749e
CH
4202 }
4203 else if (code0 == MEM && code1 == REG)
4204 {
4205 if (REGNO (operands[1]) == 12)
4206 abort();
2b835d68 4207
ff9940b0
RE
4208 switch (GET_CODE (XEXP (operands[0], 0)))
4209 {
4210 case REG:
9997d19d 4211 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 4212 break;
2b835d68 4213
ff9940b0 4214 case PRE_INC:
2b835d68 4215 abort (); /* Should never happen now */
ff9940b0 4216 break;
2b835d68 4217
ff9940b0 4218 case PRE_DEC:
2b835d68 4219 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 4220 break;
2b835d68 4221
ff9940b0 4222 case POST_INC:
9997d19d 4223 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 4224 break;
2b835d68 4225
ff9940b0 4226 case POST_DEC:
2b835d68 4227 abort (); /* Should never happen now */
ff9940b0 4228 break;
2b835d68
RE
4229
4230 case PLUS:
4231 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4232 {
4233 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4234 {
4235 case -8:
4236 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4237 return "";
4238
4239 case -4:
4240 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4241 return "";
4242
4243 case 4:
4244 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4245 return "";
4246 }
4247 }
4248 /* Fall through */
4249
ff9940b0 4250 default:
cce8749e
CH
4251 otherops[0] = adj_offsettable_operand (operands[0], 4);
4252 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
9997d19d
RE
4253 output_asm_insn ("str%?\t%1, %0", operands);
4254 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
4255 }
4256 }
2b835d68
RE
4257 else
4258 abort(); /* Constraints should prevent this */
cce8749e 4259
9997d19d
RE
4260 return "";
4261}
cce8749e
CH
4262
4263
4264/* Output an arbitrary MOV reg, #n.
4265 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4266
4267char *
4268output_mov_immediate (operands)
f3bb6135 4269 rtx *operands;
cce8749e 4270{
f3bb6135 4271 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
4272 int n_ones = 0;
4273 int i;
4274
4275 /* Try to use one MOV */
cce8749e 4276 if (const_ok_for_arm (n))
f3bb6135 4277 {
9997d19d 4278 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
4279 return "";
4280 }
cce8749e
CH
4281
4282 /* Try to use one MVN */
f3bb6135 4283 if (const_ok_for_arm (~n))
cce8749e 4284 {
f3bb6135 4285 operands[1] = GEN_INT (~n);
9997d19d 4286 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 4287 return "";
cce8749e
CH
4288 }
4289
4290 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4291
4292 for (i=0; i < 32; i++)
4293 if (n & 1 << i)
4294 n_ones++;
4295
4296 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
4297 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4298 ~n);
cce8749e 4299 else
9997d19d
RE
4300 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4301 n);
f3bb6135
RE
4302
4303 return "";
4304}
cce8749e
CH
4305
4306
4307/* Output an ADD r, s, #n where n may be too big for one instruction. If
4308 adding zero to one register, output nothing. */
4309
4310char *
4311output_add_immediate (operands)
f3bb6135 4312 rtx *operands;
cce8749e 4313{
f3bb6135 4314 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
4315
4316 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4317 {
4318 if (n < 0)
4319 output_multi_immediate (operands,
9997d19d
RE
4320 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4321 -n);
cce8749e
CH
4322 else
4323 output_multi_immediate (operands,
9997d19d
RE
4324 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4325 n);
cce8749e 4326 }
f3bb6135
RE
4327
4328 return "";
4329}
cce8749e 4330
cce8749e
CH
4331/* Output a multiple immediate operation.
4332 OPERANDS is the vector of operands referred to in the output patterns.
4333 INSTR1 is the output pattern to use for the first constant.
4334 INSTR2 is the output pattern to use for subsequent constants.
4335 IMMED_OP is the index of the constant slot in OPERANDS.
4336 N is the constant value. */
4337
18af7313 4338static char *
cce8749e 4339output_multi_immediate (operands, instr1, instr2, immed_op, n)
f3bb6135 4340 rtx *operands;
cce8749e 4341 char *instr1, *instr2;
f3bb6135
RE
4342 int immed_op;
4343 HOST_WIDE_INT n;
cce8749e 4344{
f3bb6135
RE
4345#if HOST_BITS_PER_WIDE_INT > 32
4346 n &= 0xffffffff;
4347#endif
4348
cce8749e
CH
4349 if (n == 0)
4350 {
4351 operands[immed_op] = const0_rtx;
f3bb6135 4352 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
4353 }
4354 else
4355 {
4356 int i;
4357 char *instr = instr1;
4358
4359 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
4360 for (i = 0; i < 32; i += 2)
4361 {
4362 if (n & (3 << i))
4363 {
f3bb6135
RE
4364 operands[immed_op] = GEN_INT (n & (255 << i));
4365 output_asm_insn (instr, operands);
cce8749e
CH
4366 instr = instr2;
4367 i += 6;
4368 }
4369 }
4370 }
f3bb6135 4371 return "";
9997d19d 4372}
cce8749e
CH
4373
4374
4375/* Return the appropriate ARM instruction for the operation code.
4376 The returned result should not be overwritten. OP is the rtx of the
4377 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4378 was shifted. */
4379
4380char *
4381arithmetic_instr (op, shift_first_arg)
4382 rtx op;
f3bb6135 4383 int shift_first_arg;
cce8749e 4384{
9997d19d 4385 switch (GET_CODE (op))
cce8749e
CH
4386 {
4387 case PLUS:
f3bb6135
RE
4388 return "add";
4389
cce8749e 4390 case MINUS:
f3bb6135
RE
4391 return shift_first_arg ? "rsb" : "sub";
4392
cce8749e 4393 case IOR:
f3bb6135
RE
4394 return "orr";
4395
cce8749e 4396 case XOR:
f3bb6135
RE
4397 return "eor";
4398
cce8749e 4399 case AND:
f3bb6135
RE
4400 return "and";
4401
cce8749e 4402 default:
f3bb6135 4403 abort ();
cce8749e 4404 }
f3bb6135 4405}
cce8749e
CH
4406
4407
4408/* Ensure valid constant shifts and return the appropriate shift mnemonic
4409 for the operation code. The returned result should not be overwritten.
4410 OP is the rtx code of the shift.
9997d19d
RE
4411 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4412 shift. */
cce8749e 4413
9997d19d
RE
4414static char *
4415shift_op (op, amountp)
4416 rtx op;
4417 HOST_WIDE_INT *amountp;
cce8749e 4418{
cce8749e 4419 char *mnem;
e2c671ba 4420 enum rtx_code code = GET_CODE (op);
cce8749e 4421
9997d19d
RE
4422 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4423 *amountp = -1;
4424 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4425 *amountp = INTVAL (XEXP (op, 1));
4426 else
4427 abort ();
4428
e2c671ba 4429 switch (code)
cce8749e
CH
4430 {
4431 case ASHIFT:
4432 mnem = "asl";
4433 break;
f3bb6135 4434
cce8749e
CH
4435 case ASHIFTRT:
4436 mnem = "asr";
cce8749e 4437 break;
f3bb6135 4438
cce8749e
CH
4439 case LSHIFTRT:
4440 mnem = "lsr";
cce8749e 4441 break;
f3bb6135 4442
9997d19d
RE
4443 case ROTATERT:
4444 mnem = "ror";
9997d19d
RE
4445 break;
4446
ff9940b0 4447 case MULT:
e2c671ba
RE
4448 /* We never have to worry about the amount being other than a
4449 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
4450 if (*amountp != -1)
4451 *amountp = int_log2 (*amountp);
4452 else
4453 abort ();
f3bb6135
RE
4454 return "asl";
4455
cce8749e 4456 default:
f3bb6135 4457 abort ();
cce8749e
CH
4458 }
4459
e2c671ba
RE
4460 if (*amountp != -1)
4461 {
4462 /* This is not 100% correct, but follows from the desire to merge
4463 multiplication by a power of 2 with the recognizer for a
4464 shift. >=32 is not a valid shift for "asl", so we must try and
4465 output a shift that produces the correct arithmetical result.
ddd5a7c1 4466 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
4467 is not set correctly if we set the flags; but we never use the
4468 carry bit from such an operation, so we can ignore that. */
4469 if (code == ROTATERT)
4470 *amountp &= 31; /* Rotate is just modulo 32 */
4471 else if (*amountp != (*amountp & 31))
4472 {
4473 if (code == ASHIFT)
4474 mnem = "lsr";
4475 *amountp = 32;
4476 }
4477
4478 /* Shifts of 0 are no-ops. */
4479 if (*amountp == 0)
4480 return NULL;
4481 }
4482
9997d19d
RE
4483 return mnem;
4484}
cce8749e
CH
4485
4486
4487/* Obtain the shift from the POWER of two. */
4488
18af7313 4489static HOST_WIDE_INT
cce8749e 4490int_log2 (power)
f3bb6135 4491 HOST_WIDE_INT power;
cce8749e 4492{
f3bb6135 4493 HOST_WIDE_INT shift = 0;
cce8749e 4494
2b835d68 4495 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
4496 {
4497 if (shift > 31)
f3bb6135 4498 abort ();
cce8749e
CH
4499 shift++;
4500 }
f3bb6135
RE
4501
4502 return shift;
4503}
cce8749e 4504
cce8749e
CH
4505/* Output a .ascii pseudo-op, keeping track of lengths. This is because
4506 /bin/as is horribly restrictive. */
4507
4508void
4509output_ascii_pseudo_op (stream, p, len)
4510 FILE *stream;
f1b3f515 4511 unsigned char *p;
cce8749e
CH
4512 int len;
4513{
4514 int i;
4515 int len_so_far = 1000;
4516 int chars_so_far = 0;
4517
4518 for (i = 0; i < len; i++)
4519 {
4520 register int c = p[i];
4521
4522 if (len_so_far > 50)
4523 {
4524 if (chars_so_far)
4525 fputs ("\"\n", stream);
4526 fputs ("\t.ascii\t\"", stream);
4527 len_so_far = 0;
cce8749e
CH
4528 chars_so_far = 0;
4529 }
4530
4531 if (c == '\"' || c == '\\')
4532 {
4533 putc('\\', stream);
4534 len_so_far++;
4535 }
f3bb6135 4536
cce8749e
CH
4537 if (c >= ' ' && c < 0177)
4538 {
4539 putc (c, stream);
4540 len_so_far++;
4541 }
4542 else
4543 {
4544 fprintf (stream, "\\%03o", c);
4545 len_so_far +=4;
4546 }
f3bb6135 4547
cce8749e
CH
4548 chars_so_far++;
4549 }
f3bb6135 4550
cce8749e 4551 fputs ("\"\n", stream);
f3bb6135 4552}
cce8749e 4553\f
ff9940b0
RE
4554
4555/* Try to determine whether a pattern really clobbers the link register.
4556 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
4557 if we combine a call followed by a return.
4558 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4559 such a check should not be needed because these only update an existing
4560 value within a register; the register must still be set elsewhere within
4561 the function. */
ff9940b0
RE
4562
4563static int
4564pattern_really_clobbers_lr (x)
f3bb6135 4565 rtx x;
ff9940b0
RE
4566{
4567 int i;
4568
4569 switch (GET_CODE (x))
4570 {
4571 case SET:
4572 switch (GET_CODE (SET_DEST (x)))
4573 {
4574 case REG:
4575 return REGNO (SET_DEST (x)) == 14;
f3bb6135 4576
ff9940b0
RE
4577 case SUBREG:
4578 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4579 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
f3bb6135 4580
0e84b556
RK
4581 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4582 return 0;
ff9940b0 4583 abort ();
f3bb6135 4584
ff9940b0
RE
4585 default:
4586 return 0;
4587 }
f3bb6135 4588
ff9940b0
RE
4589 case PARALLEL:
4590 for (i = 0; i < XVECLEN (x, 0); i++)
4591 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4592 return 1;
4593 return 0;
f3bb6135 4594
ff9940b0
RE
4595 case CLOBBER:
4596 switch (GET_CODE (XEXP (x, 0)))
4597 {
4598 case REG:
4599 return REGNO (XEXP (x, 0)) == 14;
f3bb6135 4600
ff9940b0
RE
4601 case SUBREG:
4602 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4603 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4604 abort ();
f3bb6135 4605
ff9940b0
RE
4606 default:
4607 return 0;
4608 }
f3bb6135 4609
ff9940b0
RE
4610 case UNSPEC:
4611 return 1;
f3bb6135 4612
ff9940b0
RE
4613 default:
4614 return 0;
4615 }
4616}
4617
4618static int
4619function_really_clobbers_lr (first)
f3bb6135 4620 rtx first;
ff9940b0
RE
4621{
4622 rtx insn, next;
4623
4624 for (insn = first; insn; insn = next_nonnote_insn (insn))
4625 {
4626 switch (GET_CODE (insn))
4627 {
4628 case BARRIER:
4629 case NOTE:
4630 case CODE_LABEL:
4631 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4632 case INLINE_HEADER:
4633 break;
f3bb6135 4634
ff9940b0
RE
4635 case INSN:
4636 if (pattern_really_clobbers_lr (PATTERN (insn)))
4637 return 1;
4638 break;
f3bb6135 4639
ff9940b0
RE
4640 case CALL_INSN:
4641 /* Don't yet know how to handle those calls that are not to a
4642 SYMBOL_REF */
4643 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4644 abort ();
f3bb6135 4645
ff9940b0
RE
4646 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4647 {
4648 case CALL:
4649 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4650 != SYMBOL_REF)
4651 return 1;
4652 break;
f3bb6135 4653
ff9940b0
RE
4654 case SET:
4655 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4656 0, 0)), 0), 0))
4657 != SYMBOL_REF)
4658 return 1;
4659 break;
f3bb6135 4660
ff9940b0
RE
4661 default: /* Don't recognize it, be safe */
4662 return 1;
4663 }
f3bb6135 4664
ff9940b0
RE
4665 /* A call can be made (by peepholing) not to clobber lr iff it is
4666 followed by a return. There may, however, be a use insn iff
4667 we are returning the result of the call.
4668 If we run off the end of the insn chain, then that means the
4669 call was at the end of the function. Unfortunately we don't
4670 have a return insn for the peephole to recognize, so we
4671 must reject this. (Can this be fixed by adding our own insn?) */
4672 if ((next = next_nonnote_insn (insn)) == NULL)
4673 return 1;
f3bb6135 4674
32de079a
RE
4675 /* No need to worry about lr if the call never returns */
4676 if (GET_CODE (next) == BARRIER)
4677 break;
4678
ff9940b0
RE
4679 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4680 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4681 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4682 == REGNO (XEXP (PATTERN (next), 0))))
4683 if ((next = next_nonnote_insn (next)) == NULL)
4684 return 1;
f3bb6135 4685
ff9940b0
RE
4686 if (GET_CODE (next) == JUMP_INSN
4687 && GET_CODE (PATTERN (next)) == RETURN)
4688 break;
4689 return 1;
f3bb6135 4690
ff9940b0
RE
4691 default:
4692 abort ();
4693 }
4694 }
f3bb6135 4695
ff9940b0
RE
4696 /* We have reached the end of the chain so lr was _not_ clobbered */
4697 return 0;
4698}
4699
4700char *
84ed5e79 4701output_return_instruction (operand, really_return, reverse)
f3bb6135
RE
4702 rtx operand;
4703 int really_return;
84ed5e79 4704 int reverse;
ff9940b0
RE
4705{
4706 char instr[100];
4707 int reg, live_regs = 0;
e2c671ba
RE
4708 int volatile_func = (optimize > 0
4709 && TREE_THIS_VOLATILE (current_function_decl));
4710
4711 return_used_this_function = 1;
ff9940b0 4712
e2c671ba
RE
4713 if (volatile_func)
4714 {
4715 rtx ops[2];
4716 /* If this function was declared non-returning, and we have found a tail
4717 call, then we have to trust that the called function won't return. */
4718 if (! really_return)
4719 return "";
4720
4721 /* Otherwise, trap an attempted return by aborting. */
4722 ops[0] = operand;
4723 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 4724 assemble_external_libcall (ops[1]);
84ed5e79 4725 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
e2c671ba
RE
4726 return "";
4727 }
4728
f3bb6135 4729 if (current_function_calls_alloca && ! really_return)
ff9940b0
RE
4730 abort();
4731
f3bb6135
RE
4732 for (reg = 0; reg <= 10; reg++)
4733 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
4734 live_regs++;
4735
f3bb6135 4736 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
ff9940b0
RE
4737 live_regs++;
4738
4739 if (frame_pointer_needed)
4740 live_regs += 4;
4741
4742 if (live_regs)
4743 {
f3bb6135 4744 if (lr_save_eliminated || ! regs_ever_live[14])
ff9940b0 4745 live_regs++;
f3bb6135 4746
ff9940b0 4747 if (frame_pointer_needed)
84ed5e79
RE
4748 strcpy (instr,
4749 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
ff9940b0 4750 else
84ed5e79
RE
4751 strcpy (instr,
4752 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
4753
4754 for (reg = 0; reg <= 10; reg++)
4755 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0 4756 {
1d5473cb 4757 strcat (instr, "%|");
ff9940b0
RE
4758 strcat (instr, reg_names[reg]);
4759 if (--live_regs)
4760 strcat (instr, ", ");
4761 }
f3bb6135 4762
ff9940b0
RE
4763 if (frame_pointer_needed)
4764 {
1d5473cb 4765 strcat (instr, "%|");
ff9940b0
RE
4766 strcat (instr, reg_names[11]);
4767 strcat (instr, ", ");
1d5473cb 4768 strcat (instr, "%|");
ff9940b0
RE
4769 strcat (instr, reg_names[13]);
4770 strcat (instr, ", ");
1d5473cb 4771 strcat (instr, "%|");
ff9940b0
RE
4772 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4773 }
4774 else
1d5473cb
RE
4775 {
4776 strcat (instr, "%|");
4777 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4778 }
2b835d68 4779 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 4780 output_asm_insn (instr, &operand);
ff9940b0
RE
4781 }
4782 else if (really_return)
4783 {
b111229a
RE
4784 if (TARGET_THUMB_INTERWORK)
4785 sprintf (instr, "bx%%?%%%s\t%%|lr", reverse ? "D" : "d");
4786 else
4787 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4788 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
f3bb6135 4789 output_asm_insn (instr, &operand);
ff9940b0 4790 }
f3bb6135 4791
ff9940b0
RE
4792 return "";
4793}
4794
e82ea128
DE
4795/* Return nonzero if optimizing and the current function is volatile.
4796 Such functions never return, and many memory cycles can be saved
4797 by not storing register values that will never be needed again.
4798 This optimization was added to speed up context switching in a
4799 kernel application. */
a0b2ce4c 4800
e2c671ba
RE
4801int
4802arm_volatile_func ()
4803{
4804 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4805}
4806
ff9940b0
RE
4807/* The amount of stack adjustment that happens here, in output_return and in
4808 output_epilogue must be exactly the same as was calculated during reload,
4809 or things will point to the wrong place. The only time we can safely
4810 ignore this constraint is when a function has no arguments on the stack,
4811 no stack frame requirement and no live registers execpt for `lr'. If we
4812 can guarantee that by making all function calls into tail calls and that
4813 lr is not clobbered in any other way, then there is no need to push lr
4814 onto the stack. */
4815
cce8749e 4816void
f3bb6135 4817output_func_prologue (f, frame_size)
cce8749e
CH
4818 FILE *f;
4819 int frame_size;
4820{
f3bb6135 4821 int reg, live_regs_mask = 0;
e2c671ba
RE
4822 int volatile_func = (optimize > 0
4823 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 4824
cce8749e
CH
4825 /* Nonzero if we must stuff some register arguments onto the stack as if
4826 they were passed there. */
4827 int store_arg_regs = 0;
4828
abaa26e5
RE
4829 if (arm_ccfsm_state || arm_target_insn)
4830 abort (); /* Sanity check */
31fdb4d5
DE
4831
4832 if (arm_naked_function_p (current_function_decl))
4833 return;
4834
ff9940b0
RE
4835 return_used_this_function = 0;
4836 lr_save_eliminated = 0;
4837
f3139301
DE
4838 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4839 ASM_COMMENT_START, current_function_args_size,
1d5473cb 4840 current_function_pretend_args_size, frame_size);
f3139301
DE
4841 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4842 ASM_COMMENT_START, frame_pointer_needed,
1d5473cb 4843 current_function_anonymous_args);
cce8749e 4844
e2c671ba 4845 if (volatile_func)
f3139301 4846 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
e2c671ba 4847
cce8749e
CH
4848 if (current_function_anonymous_args && current_function_pretend_args_size)
4849 store_arg_regs = 1;
4850
f3bb6135
RE
4851 for (reg = 0; reg <= 10; reg++)
4852 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
4853 live_regs_mask |= (1 << reg);
4854
ff9940b0 4855 if (frame_pointer_needed)
e2c671ba 4856 live_regs_mask |= 0xD800;
cce8749e 4857 else if (regs_ever_live[14])
ff9940b0
RE
4858 {
4859 if (! current_function_args_size
f3bb6135 4860 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 4861 lr_save_eliminated = 1;
ff9940b0
RE
4862 else
4863 live_regs_mask |= 0x4000;
4864 }
cce8749e 4865
cce8749e
CH
4866 if (live_regs_mask)
4867 {
ff9940b0
RE
4868 /* if a di mode load/store multiple is used, and the base register
4869 is r3, then r4 can become an ever live register without lr
4870 doing so, in this case we need to push lr as well, or we
4871 will fail to get a proper return. */
4872
4873 live_regs_mask |= 0x4000;
4874 lr_save_eliminated = 0;
f3bb6135 4875
cce8749e
CH
4876 }
4877
e2c671ba 4878 if (lr_save_eliminated)
f3139301
DE
4879 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4880 ASM_COMMENT_START);
32de079a
RE
4881
4882#ifdef AOF_ASSEMBLER
4883 if (flag_pic)
4884 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
4885 reg_names[PIC_OFFSET_TABLE_REGNUM]);
4886#endif
f3bb6135 4887}
cce8749e
CH
4888
4889
4890void
f3bb6135 4891output_func_epilogue (f, frame_size)
cce8749e
CH
4892 FILE *f;
4893 int frame_size;
4894{
b111229a
RE
4895 int reg, live_regs_mask = 0;
4896 /* If we need this then it will always be at least this much */
4897 int floats_offset = 12;
cce8749e 4898 rtx operands[3];
e2c671ba
RE
4899 int volatile_func = (optimize > 0
4900 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 4901
ff9940b0 4902 if (use_return_insn() && return_used_this_function)
cce8749e 4903 {
56636818
JL
4904 if ((frame_size + current_function_outgoing_args_size) != 0
4905 && !(frame_pointer_needed || TARGET_APCS))
4906 abort ();
f3bb6135 4907 goto epilogue_done;
cce8749e 4908 }
cce8749e 4909
31fdb4d5
DE
4910 /* Naked functions don't have epilogues. */
4911 if (arm_naked_function_p (current_function_decl))
4912 goto epilogue_done;
4913
e2c671ba
RE
4914 /* A volatile function should never return. Call abort. */
4915 if (volatile_func)
4916 {
4917 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 4918 assemble_external_libcall (op);
e2c671ba 4919 output_asm_insn ("bl\t%a0", &op);
e2c671ba
RE
4920 goto epilogue_done;
4921 }
4922
f3bb6135
RE
4923 for (reg = 0; reg <= 10; reg++)
4924 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 4925 {
ff9940b0
RE
4926 live_regs_mask |= (1 << reg);
4927 floats_offset += 4;
cce8749e
CH
4928 }
4929
ff9940b0 4930 if (frame_pointer_needed)
cce8749e 4931 {
b111229a
RE
4932 if (arm_fpu_arch == FP_SOFT2)
4933 {
4934 for (reg = 23; reg > 15; reg--)
4935 if (regs_ever_live[reg] && ! call_used_regs[reg])
4936 {
4937 floats_offset += 12;
4938 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
4939 reg_names[reg], REGISTER_PREFIX, floats_offset);
4940 }
4941 }
4942 else
4943 {
4944 int start_reg = 23;
4945
4946 for (reg = 23; reg > 15; reg--)
4947 {
4948 if (regs_ever_live[reg] && ! call_used_regs[reg])
4949 {
4950 floats_offset += 12;
4951 /* We can't unstack more than four registers at once */
4952 if (start_reg - reg == 3)
4953 {
4954 fprintf (f, "\tlfm\t%s%s, 4, [%sfp, #-%d]\n",
4955 REGISTER_PREFIX, reg_names[reg],
4956 REGISTER_PREFIX, floats_offset);
4957 start_reg = reg - 1;
4958 }
4959 }
4960 else
4961 {
4962 if (reg != start_reg)
4963 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4964 REGISTER_PREFIX, reg_names[reg + 1],
4965 start_reg - reg, REGISTER_PREFIX, floats_offset);
ff9940b0 4966
b111229a
RE
4967 start_reg = reg - 1;
4968 }
4969 }
4970
4971 /* Just in case the last register checked also needs unstacking. */
4972 if (reg != start_reg)
4973 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4974 REGISTER_PREFIX, reg_names[reg + 1],
4975 start_reg - reg, REGISTER_PREFIX, floats_offset);
4976 }
4977
4978 if (TARGET_THUMB_INTERWORK)
4979 {
4980 live_regs_mask |= 0x6800;
4981 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask, FALSE);
4982 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
4983 }
4984 else
4985 {
4986 live_regs_mask |= 0xA800;
4987 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
4988 TARGET_APCS_32 ? FALSE : TRUE);
4989 }
cce8749e
CH
4990 }
4991 else
4992 {
d2288d8d 4993 /* Restore stack pointer if necessary. */
56636818 4994 if (frame_size + current_function_outgoing_args_size != 0)
d2288d8d
TG
4995 {
4996 operands[0] = operands[1] = stack_pointer_rtx;
56636818
JL
4997 operands[2] = GEN_INT (frame_size
4998 + current_function_outgoing_args_size);
d2288d8d
TG
4999 output_add_immediate (operands);
5000 }
5001
b111229a
RE
5002 if (arm_fpu_arch == FP_SOFT2)
5003 {
5004 for (reg = 16; reg < 24; reg++)
5005 if (regs_ever_live[reg] && ! call_used_regs[reg])
5006 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
5007 reg_names[reg], REGISTER_PREFIX);
5008 }
5009 else
5010 {
5011 int start_reg = 16;
5012
5013 for (reg = 16; reg < 24; reg++)
5014 {
5015 if (regs_ever_live[reg] && ! call_used_regs[reg])
5016 {
5017 if (reg - start_reg == 3)
5018 {
5019 fprintf (f, "\tlfmfd\t%s%s, 4, [%ssp]!\n",
5020 REGISTER_PREFIX, reg_names[start_reg],
5021 REGISTER_PREFIX);
5022 start_reg = reg + 1;
5023 }
5024 }
5025 else
5026 {
5027 if (reg != start_reg)
5028 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5029 REGISTER_PREFIX, reg_names[start_reg],
5030 reg - start_reg, REGISTER_PREFIX);
5031
5032 start_reg = reg + 1;
5033 }
5034 }
5035
5036 /* Just in case the last register checked also needs unstacking. */
5037 if (reg != start_reg)
5038 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
5039 REGISTER_PREFIX, reg_names[start_reg],
5040 reg - start_reg, REGISTER_PREFIX);
5041 }
5042
cce8749e
CH
5043 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
5044 {
b111229a
RE
5045 if (TARGET_THUMB_INTERWORK)
5046 {
5047 if (! lr_save_eliminated)
5048 print_multi_reg(f, "ldmfd\t%ssp!", live_regs_mask | 0x4000,
5049 FALSE);
5050
5051 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5052 }
5053 else if (lr_save_eliminated)
32de079a
RE
5054 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5055 : "\tmovs\t%spc, %slr\n"),
5056 REGISTER_PREFIX, REGISTER_PREFIX, f);
5057 else
5058 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
5059 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
5060 }
5061 else
5062 {
ff9940b0 5063 if (live_regs_mask || regs_ever_live[14])
cce8749e 5064 {
32de079a
RE
5065 /* Restore the integer regs, and the return address into lr */
5066 if (! lr_save_eliminated)
5067 live_regs_mask |= 0x4000;
5068
5069 if (live_regs_mask != 0)
32de079a 5070 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
cce8749e 5071 }
b111229a 5072
cce8749e
CH
5073 if (current_function_pretend_args_size)
5074 {
32de079a 5075 /* Unwind the pre-pushed regs */
cce8749e 5076 operands[0] = operands[1] = stack_pointer_rtx;
3a598fbe 5077 operands[2] = GEN_INT (current_function_pretend_args_size);
cce8749e
CH
5078 output_add_immediate (operands);
5079 }
32de079a 5080 /* And finally, go home */
b111229a
RE
5081 if (TARGET_THUMB_INTERWORK)
5082 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5083 else
5084 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5085 : "\tmovs\t%spc, %slr\n"),
5086 REGISTER_PREFIX, REGISTER_PREFIX, f);
cce8749e
CH
5087 }
5088 }
f3bb6135 5089
32de079a 5090epilogue_done:
f3bb6135 5091
cce8749e 5092 current_function_anonymous_args = 0;
f3bb6135 5093}
e2c671ba
RE
5094
5095static void
5096emit_multi_reg_push (mask)
5097 int mask;
5098{
5099 int num_regs = 0;
5100 int i, j;
5101 rtx par;
5102
5103 for (i = 0; i < 16; i++)
5104 if (mask & (1 << i))
5105 num_regs++;
5106
5107 if (num_regs == 0 || num_regs > 16)
5108 abort ();
5109
5110 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
5111
5112 for (i = 0; i < 16; i++)
5113 {
5114 if (mask & (1 << i))
5115 {
5116 XVECEXP (par, 0, 0)
5117 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
5118 gen_rtx (PRE_DEC, BLKmode,
5119 stack_pointer_rtx)),
5120 gen_rtx (UNSPEC, BLKmode,
5121 gen_rtvec (1, gen_rtx (REG, SImode, i)),
5122 2));
5123 break;
5124 }
5125 }
5126
5127 for (j = 1, i++; j < num_regs; i++)
5128 {
5129 if (mask & (1 << i))
5130 {
5131 XVECEXP (par, 0, j)
5132 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
5133 j++;
5134 }
5135 }
b111229a
RE
5136
5137 emit_insn (par);
5138}
5139
5140static void
5141emit_sfm (base_reg, count)
5142 int base_reg;
5143 int count;
5144{
5145 rtx par;
5146 int i;
5147
5148 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (count));
5149
5150 XVECEXP (par, 0, 0) = gen_rtx (SET, VOIDmode,
5151 gen_rtx (MEM, BLKmode,
5152 gen_rtx (PRE_DEC, BLKmode,
5153 stack_pointer_rtx)),
5154 gen_rtx (UNSPEC, BLKmode,
5155 gen_rtvec (1, gen_rtx (REG, XFmode,
5156 base_reg++)),
5157 2));
5158 for (i = 1; i < count; i++)
5159 XVECEXP (par, 0, i) = gen_rtx (USE, VOIDmode,
5160 gen_rtx (REG, XFmode, base_reg++));
5161
e2c671ba
RE
5162 emit_insn (par);
5163}
5164
5165void
5166arm_expand_prologue ()
5167{
5168 int reg;
56636818
JL
5169 rtx amount = GEN_INT (-(get_frame_size ()
5170 + current_function_outgoing_args_size));
e2c671ba
RE
5171 int live_regs_mask = 0;
5172 int store_arg_regs = 0;
5173 int volatile_func = (optimize > 0
5174 && TREE_THIS_VOLATILE (current_function_decl));
5175
31fdb4d5
DE
5176 /* Naked functions don't have prologues. */
5177 if (arm_naked_function_p (current_function_decl))
5178 return;
5179
e2c671ba
RE
5180 if (current_function_anonymous_args && current_function_pretend_args_size)
5181 store_arg_regs = 1;
5182
5183 if (! volatile_func)
5184 for (reg = 0; reg <= 10; reg++)
5185 if (regs_ever_live[reg] && ! call_used_regs[reg])
5186 live_regs_mask |= 1 << reg;
5187
5188 if (! volatile_func && regs_ever_live[14])
5189 live_regs_mask |= 0x4000;
5190
5191 if (frame_pointer_needed)
5192 {
5193 live_regs_mask |= 0xD800;
5194 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
5195 stack_pointer_rtx));
5196 }
5197
5198 if (current_function_pretend_args_size)
5199 {
5200 if (store_arg_regs)
5201 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
5202 & 0xf);
5203 else
5204 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5205 GEN_INT (-current_function_pretend_args_size)));
5206 }
5207
5208 if (live_regs_mask)
5209 {
5210 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 5211 we won't get a proper return. */
e2c671ba
RE
5212 live_regs_mask |= 0x4000;
5213 emit_multi_reg_push (live_regs_mask);
5214 }
5215
5216 /* For now the integer regs are still pushed in output_func_epilogue (). */
5217
5218 if (! volatile_func)
b111229a
RE
5219 {
5220 if (arm_fpu_arch == FP_SOFT2)
5221 {
5222 for (reg = 23; reg > 15; reg--)
5223 if (regs_ever_live[reg] && ! call_used_regs[reg])
5224 emit_insn (gen_rtx (SET, VOIDmode,
5225 gen_rtx (MEM, XFmode,
5226 gen_rtx (PRE_DEC, XFmode,
5227 stack_pointer_rtx)),
5228 gen_rtx (REG, XFmode, reg)));
5229 }
5230 else
5231 {
5232 int start_reg = 23;
5233
5234 for (reg = 23; reg > 15; reg--)
5235 {
5236 if (regs_ever_live[reg] && ! call_used_regs[reg])
5237 {
5238 if (start_reg - reg == 3)
5239 {
5240 emit_sfm (reg, 4);
5241 start_reg = reg - 1;
5242 }
5243 }
5244 else
5245 {
5246 if (start_reg != reg)
5247 emit_sfm (reg + 1, start_reg - reg);
5248 start_reg = reg - 1;
5249 }
5250 }
5251
5252 if (start_reg != reg)
5253 emit_sfm (reg + 1, start_reg - reg);
5254 }
5255 }
e2c671ba
RE
5256
5257 if (frame_pointer_needed)
5258 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
5259 (GEN_INT
5260 (-(4 + current_function_pretend_args_size)))));
5261
5262 if (amount != const0_rtx)
5263 {
5264 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
5265 emit_insn (gen_rtx (CLOBBER, VOIDmode,
5266 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
5267 }
5268
5269 /* If we are profiling, make sure no instructions are scheduled before
5270 the call to mcount. */
5271 if (profile_flag || profile_block_flag)
5272 emit_insn (gen_blockage ());
5273}
5274
cce8749e 5275\f
9997d19d
RE
5276/* If CODE is 'd', then the X is a condition operand and the instruction
5277 should only be executed if the condition is true.
ddd5a7c1 5278 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
5279 should only be executed if the condition is false: however, if the mode
5280 of the comparison is CCFPEmode, then always execute the instruction -- we
5281 do this because in these circumstances !GE does not necessarily imply LT;
5282 in these cases the instruction pattern will take care to make sure that
5283 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 5284 doing this instruction unconditionally.
9997d19d
RE
5285 If CODE is 'N' then X is a floating point operand that must be negated
5286 before output.
5287 If CODE is 'B' then output a bitwise inverted value of X (a const int).
5288 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
5289
5290void
5291arm_print_operand (stream, x, code)
5292 FILE *stream;
5293 rtx x;
5294 int code;
5295{
5296 switch (code)
5297 {
5298 case '@':
f3139301 5299 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
5300 return;
5301
5302 case '|':
f3139301 5303 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5304 return;
5305
5306 case '?':
5307 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
5308 fputs (arm_condition_codes[arm_current_cc], stream);
5309 return;
5310
5311 case 'N':
5312 {
5313 REAL_VALUE_TYPE r;
5314 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5315 r = REAL_VALUE_NEGATE (r);
5316 fprintf (stream, "%s", fp_const_from_val (&r));
5317 }
5318 return;
5319
5320 case 'B':
5321 if (GET_CODE (x) == CONST_INT)
5322 fprintf (stream,
5323#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5324 "%d",
5325#else
5326 "%ld",
5327#endif
5328 ARM_SIGN_EXTEND (~ INTVAL (x)));
5329 else
5330 {
5331 putc ('~', stream);
5332 output_addr_const (stream, x);
5333 }
5334 return;
5335
5336 case 'i':
5337 fprintf (stream, "%s", arithmetic_instr (x, 1));
5338 return;
5339
5340 case 'I':
5341 fprintf (stream, "%s", arithmetic_instr (x, 0));
5342 return;
5343
5344 case 'S':
5345 {
5346 HOST_WIDE_INT val;
e2c671ba 5347 char *shift = shift_op (x, &val);
9997d19d 5348
e2c671ba
RE
5349 if (shift)
5350 {
5351 fprintf (stream, ", %s ", shift_op (x, &val));
5352 if (val == -1)
5353 arm_print_operand (stream, XEXP (x, 1), 0);
5354 else
5355 fprintf (stream,
9997d19d 5356#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
e2c671ba 5357 "#%d",
9997d19d 5358#else
e2c671ba 5359 "#%ld",
9997d19d 5360#endif
e2c671ba
RE
5361 val);
5362 }
9997d19d
RE
5363 }
5364 return;
5365
c1c2bc04
RE
5366 case 'Q':
5367 if (REGNO (x) > 15)
5368 abort ();
5369 fputs (REGISTER_PREFIX, stream);
5370 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
5371 return;
5372
9997d19d
RE
5373 case 'R':
5374 if (REGNO (x) > 15)
5375 abort ();
f3139301 5376 fputs (REGISTER_PREFIX, stream);
c1c2bc04 5377 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
9997d19d
RE
5378 return;
5379
5380 case 'm':
f3139301 5381 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
5382 if (GET_CODE (XEXP (x, 0)) == REG)
5383 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
5384 else
5385 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
5386 return;
5387
5388 case 'M':
f3139301
DE
5389 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
5390 REGISTER_PREFIX, reg_names[REGNO (x) - 1
1d5473cb
RE
5391 + ((GET_MODE_SIZE (GET_MODE (x))
5392 + GET_MODE_SIZE (SImode) - 1)
5393 / GET_MODE_SIZE (SImode))]);
9997d19d
RE
5394 return;
5395
5396 case 'd':
5397 if (x)
5398 fputs (arm_condition_codes[get_arm_condition_code (x)],
5399 stream);
5400 return;
5401
5402 case 'D':
84ed5e79 5403 if (x)
9997d19d
RE
5404 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
5405 (get_arm_condition_code (x))],
5406 stream);
5407 return;
5408
5409 default:
5410 if (x == 0)
5411 abort ();
5412
5413 if (GET_CODE (x) == REG)
1d5473cb 5414 {
f3139301 5415 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
5416 fputs (reg_names[REGNO (x)], stream);
5417 }
9997d19d
RE
5418 else if (GET_CODE (x) == MEM)
5419 {
5420 output_memory_reference_mode = GET_MODE (x);
5421 output_address (XEXP (x, 0));
5422 }
5423 else if (GET_CODE (x) == CONST_DOUBLE)
5424 fprintf (stream, "#%s", fp_immediate_constant (x));
5425 else if (GET_CODE (x) == NEG)
5426 abort (); /* This should never happen now. */
5427 else
5428 {
5429 fputc ('#', stream);
5430 output_addr_const (stream, x);
5431 }
5432 }
5433}
5434
cce8749e
CH
5435\f
5436/* A finite state machine takes care of noticing whether or not instructions
5437 can be conditionally executed, and thus decrease execution time and code
5438 size by deleting branch instructions. The fsm is controlled by
5439 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
5440
5441/* The state of the fsm controlling condition codes are:
5442 0: normal, do nothing special
5443 1: make ASM_OUTPUT_OPCODE not output this instruction
5444 2: make ASM_OUTPUT_OPCODE not output this instruction
5445 3: make instructions conditional
5446 4: make instructions conditional
5447
5448 State transitions (state->state by whom under condition):
5449 0 -> 1 final_prescan_insn if the `target' is a label
5450 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
5451 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
5452 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
5453 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
5454 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
5455 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
5456 (the target insn is arm_target_insn).
5457
ff9940b0
RE
5458 If the jump clobbers the conditions then we use states 2 and 4.
5459
5460 A similar thing can be done with conditional return insns.
5461
cce8749e
CH
5462 XXX In case the `target' is an unconditional branch, this conditionalising
5463 of the instructions always reduces code size, but not always execution
5464 time. But then, I want to reduce the code size to somewhere near what
5465 /bin/cc produces. */
5466
cce8749e
CH
5467/* Returns the index of the ARM condition code string in
5468 `arm_condition_codes'. COMPARISON should be an rtx like
5469 `(eq (...) (...))'. */
5470
84ed5e79 5471static enum arm_cond_code
cce8749e
CH
5472get_arm_condition_code (comparison)
5473 rtx comparison;
5474{
5165176d 5475 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
84ed5e79
RE
5476 register int code;
5477 register enum rtx_code comp_code = GET_CODE (comparison);
5165176d
RE
5478
5479 if (GET_MODE_CLASS (mode) != MODE_CC)
84ed5e79 5480 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165176d
RE
5481 XEXP (comparison, 1));
5482
5483 switch (mode)
cce8749e 5484 {
84ed5e79
RE
5485 case CC_DNEmode: code = ARM_NE; goto dominance;
5486 case CC_DEQmode: code = ARM_EQ; goto dominance;
5487 case CC_DGEmode: code = ARM_GE; goto dominance;
5488 case CC_DGTmode: code = ARM_GT; goto dominance;
5489 case CC_DLEmode: code = ARM_LE; goto dominance;
5490 case CC_DLTmode: code = ARM_LT; goto dominance;
5491 case CC_DGEUmode: code = ARM_CS; goto dominance;
5492 case CC_DGTUmode: code = ARM_HI; goto dominance;
5493 case CC_DLEUmode: code = ARM_LS; goto dominance;
5494 case CC_DLTUmode: code = ARM_CC;
5495
5496 dominance:
5497 if (comp_code != EQ && comp_code != NE)
5498 abort ();
5499
5500 if (comp_code == EQ)
5501 return ARM_INVERSE_CONDITION_CODE (code);
5502 return code;
5503
5165176d 5504 case CC_NOOVmode:
84ed5e79 5505 switch (comp_code)
5165176d 5506 {
84ed5e79
RE
5507 case NE: return ARM_NE;
5508 case EQ: return ARM_EQ;
5509 case GE: return ARM_PL;
5510 case LT: return ARM_MI;
5165176d
RE
5511 default: abort ();
5512 }
5513
5514 case CC_Zmode:
5515 case CCFPmode:
84ed5e79 5516 switch (comp_code)
5165176d 5517 {
84ed5e79
RE
5518 case NE: return ARM_NE;
5519 case EQ: return ARM_EQ;
5165176d
RE
5520 default: abort ();
5521 }
5522
5523 case CCFPEmode:
84ed5e79
RE
5524 switch (comp_code)
5525 {
5526 case GE: return ARM_GE;
5527 case GT: return ARM_GT;
5528 case LE: return ARM_LS;
5529 case LT: return ARM_MI;
5530 default: abort ();
5531 }
5532
5533 case CC_SWPmode:
5534 switch (comp_code)
5535 {
5536 case NE: return ARM_NE;
5537 case EQ: return ARM_EQ;
5538 case GE: return ARM_LE;
5539 case GT: return ARM_LT;
5540 case LE: return ARM_GE;
5541 case LT: return ARM_GT;
5542 case GEU: return ARM_LS;
5543 case GTU: return ARM_CC;
5544 case LEU: return ARM_CS;
5545 case LTU: return ARM_HI;
5546 default: abort ();
5547 }
5548
bd9c7e23
RE
5549 case CC_Cmode:
5550 switch (comp_code)
5551 {
5552 case LTU: return ARM_CS;
5553 case GEU: return ARM_CC;
5554 default: abort ();
5555 }
5556
5165176d 5557 case CCmode:
84ed5e79 5558 switch (comp_code)
5165176d 5559 {
84ed5e79
RE
5560 case NE: return ARM_NE;
5561 case EQ: return ARM_EQ;
5562 case GE: return ARM_GE;
5563 case GT: return ARM_GT;
5564 case LE: return ARM_LE;
5565 case LT: return ARM_LT;
5566 case GEU: return ARM_CS;
5567 case GTU: return ARM_HI;
5568 case LEU: return ARM_LS;
5569 case LTU: return ARM_CC;
5165176d
RE
5570 default: abort ();
5571 }
5572
cce8749e
CH
5573 default: abort ();
5574 }
84ed5e79
RE
5575
5576 abort ();
f3bb6135 5577}
cce8749e
CH
5578
5579
5580void
5581final_prescan_insn (insn, opvec, noperands)
5582 rtx insn;
5583 rtx *opvec;
5584 int noperands;
5585{
5586 /* BODY will hold the body of INSN. */
5587 register rtx body = PATTERN (insn);
5588
5589 /* This will be 1 if trying to repeat the trick, and things need to be
5590 reversed if it appears to fail. */
5591 int reverse = 0;
5592
ff9940b0
RE
5593 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5594 taken are clobbered, even if the rtl suggests otherwise. It also
5595 means that we have to grub around within the jump expression to find
5596 out what the conditions are when the jump isn't taken. */
5597 int jump_clobbers = 0;
5598
5599 /* If we start with a return insn, we only succeed if we find another one. */
5600 int seeking_return = 0;
5601
cce8749e
CH
5602 /* START_INSN will hold the insn from where we start looking. This is the
5603 first insn after the following code_label if REVERSE is true. */
5604 rtx start_insn = insn;
5605
5606 /* If in state 4, check if the target branch is reached, in order to
5607 change back to state 0. */
5608 if (arm_ccfsm_state == 4)
5609 {
5610 if (insn == arm_target_insn)
abaa26e5
RE
5611 {
5612 arm_target_insn = NULL;
cce8749e 5613 arm_ccfsm_state = 0;
abaa26e5 5614 }
cce8749e
CH
5615 return;
5616 }
5617
5618 /* If in state 3, it is possible to repeat the trick, if this insn is an
5619 unconditional branch to a label, and immediately following this branch
5620 is the previous target label which is only used once, and the label this
5621 branch jumps to is not too far off. */
5622 if (arm_ccfsm_state == 3)
5623 {
5624 if (simplejump_p (insn))
5625 {
5626 start_insn = next_nonnote_insn (start_insn);
5627 if (GET_CODE (start_insn) == BARRIER)
5628 {
5629 /* XXX Isn't this always a barrier? */
5630 start_insn = next_nonnote_insn (start_insn);
5631 }
5632 if (GET_CODE (start_insn) == CODE_LABEL
5633 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5634 && LABEL_NUSES (start_insn) == 1)
5635 reverse = TRUE;
5636 else
5637 return;
5638 }
ff9940b0
RE
5639 else if (GET_CODE (body) == RETURN)
5640 {
5641 start_insn = next_nonnote_insn (start_insn);
5642 if (GET_CODE (start_insn) == BARRIER)
5643 start_insn = next_nonnote_insn (start_insn);
5644 if (GET_CODE (start_insn) == CODE_LABEL
5645 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5646 && LABEL_NUSES (start_insn) == 1)
5647 {
5648 reverse = TRUE;
5649 seeking_return = 1;
5650 }
5651 else
5652 return;
5653 }
cce8749e
CH
5654 else
5655 return;
5656 }
5657
5658 if (arm_ccfsm_state != 0 && !reverse)
5659 abort ();
5660 if (GET_CODE (insn) != JUMP_INSN)
5661 return;
5662
ddd5a7c1 5663 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
5664 the jump should always come first */
5665 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5666 body = XVECEXP (body, 0, 0);
5667
5668#if 0
5669 /* If this is a conditional return then we don't want to know */
5670 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5671 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5672 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5673 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5674 return;
5675#endif
5676
cce8749e
CH
5677 if (reverse
5678 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5679 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5680 {
bd9c7e23
RE
5681 int insns_skipped;
5682 int fail = FALSE, succeed = FALSE;
cce8749e
CH
5683 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5684 int then_not_else = TRUE;
ff9940b0 5685 rtx this_insn = start_insn, label = 0;
cce8749e 5686
ff9940b0 5687 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
5688 {
5689 /* The code below is wrong for these, and I haven't time to
5690 fix it now. So we just do the safe thing and return. This
5691 whole function needs re-writing anyway. */
5692 jump_clobbers = 1;
5693 return;
5694 }
ff9940b0 5695
cce8749e
CH
5696 /* Register the insn jumped to. */
5697 if (reverse)
ff9940b0
RE
5698 {
5699 if (!seeking_return)
5700 label = XEXP (SET_SRC (body), 0);
5701 }
cce8749e
CH
5702 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5703 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5704 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5705 {
5706 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5707 then_not_else = FALSE;
5708 }
ff9940b0
RE
5709 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5710 seeking_return = 1;
5711 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5712 {
5713 seeking_return = 1;
5714 then_not_else = FALSE;
5715 }
cce8749e
CH
5716 else
5717 abort ();
5718
5719 /* See how many insns this branch skips, and what kind of insns. If all
5720 insns are okay, and the label or unconditional branch to the same
5721 label is not too far away, succeed. */
5722 for (insns_skipped = 0;
bd9c7e23 5723 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
cce8749e
CH
5724 {
5725 rtx scanbody;
5726
5727 this_insn = next_nonnote_insn (this_insn);
5728 if (!this_insn)
5729 break;
5730
cce8749e
CH
5731 switch (GET_CODE (this_insn))
5732 {
5733 case CODE_LABEL:
5734 /* Succeed if it is the target label, otherwise fail since
5735 control falls in from somewhere else. */
5736 if (this_insn == label)
5737 {
ff9940b0
RE
5738 if (jump_clobbers)
5739 {
5740 arm_ccfsm_state = 2;
5741 this_insn = next_nonnote_insn (this_insn);
5742 }
5743 else
5744 arm_ccfsm_state = 1;
cce8749e
CH
5745 succeed = TRUE;
5746 }
5747 else
5748 fail = TRUE;
5749 break;
5750
ff9940b0 5751 case BARRIER:
cce8749e 5752 /* Succeed if the following insn is the target label.
ff9940b0
RE
5753 Otherwise fail.
5754 If return insns are used then the last insn in a function
5755 will be a barrier. */
cce8749e 5756 this_insn = next_nonnote_insn (this_insn);
ff9940b0 5757 if (this_insn && this_insn == label)
cce8749e 5758 {
ff9940b0
RE
5759 if (jump_clobbers)
5760 {
5761 arm_ccfsm_state = 2;
5762 this_insn = next_nonnote_insn (this_insn);
5763 }
5764 else
5765 arm_ccfsm_state = 1;
cce8749e
CH
5766 succeed = TRUE;
5767 }
5768 else
5769 fail = TRUE;
5770 break;
5771
ff9940b0 5772 case CALL_INSN:
2b835d68
RE
5773 /* If using 32-bit addresses the cc is not preserved over
5774 calls */
5775 if (TARGET_APCS_32)
bd9c7e23
RE
5776 {
5777 /* Succeed if the following insn is the target label,
5778 or if the following two insns are a barrier and
5779 the target label. */
5780 this_insn = next_nonnote_insn (this_insn);
5781 if (this_insn && GET_CODE (this_insn) == BARRIER)
5782 this_insn = next_nonnote_insn (this_insn);
5783
5784 if (this_insn && this_insn == label
5785 && insns_skipped < MAX_INSNS_SKIPPED)
5786 {
5787 if (jump_clobbers)
5788 {
5789 arm_ccfsm_state = 2;
5790 this_insn = next_nonnote_insn (this_insn);
5791 }
5792 else
5793 arm_ccfsm_state = 1;
5794 succeed = TRUE;
5795 }
5796 else
5797 fail = TRUE;
5798 }
ff9940b0 5799 break;
2b835d68 5800
cce8749e
CH
5801 case JUMP_INSN:
5802 /* If this is an unconditional branch to the same label, succeed.
5803 If it is to another label, do nothing. If it is conditional,
5804 fail. */
ed4c4348 5805 /* XXX Probably, the tests for SET and the PC are unnecessary. */
cce8749e 5806
ed4c4348 5807 scanbody = PATTERN (this_insn);
ff9940b0
RE
5808 if (GET_CODE (scanbody) == SET
5809 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
5810 {
5811 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5812 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5813 {
5814 arm_ccfsm_state = 2;
5815 succeed = TRUE;
5816 }
5817 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5818 fail = TRUE;
5819 }
ff9940b0
RE
5820 else if (GET_CODE (scanbody) == RETURN
5821 && seeking_return)
5822 {
5823 arm_ccfsm_state = 2;
5824 succeed = TRUE;
5825 }
5826 else if (GET_CODE (scanbody) == PARALLEL)
5827 {
5828 switch (get_attr_conds (this_insn))
5829 {
5830 case CONDS_NOCOND:
5831 break;
5832 default:
5833 fail = TRUE;
5834 break;
5835 }
5836 }
cce8749e
CH
5837 break;
5838
5839 case INSN:
ff9940b0
RE
5840 /* Instructions using or affecting the condition codes make it
5841 fail. */
ed4c4348 5842 scanbody = PATTERN (this_insn);
ff9940b0
RE
5843 if ((GET_CODE (scanbody) == SET
5844 || GET_CODE (scanbody) == PARALLEL)
5845 && get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
5846 fail = TRUE;
5847 break;
5848
5849 default:
5850 break;
5851 }
5852 }
5853 if (succeed)
5854 {
ff9940b0 5855 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 5856 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
5857 else if (seeking_return || arm_ccfsm_state == 2)
5858 {
5859 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5860 {
5861 this_insn = next_nonnote_insn (this_insn);
5862 if (this_insn && (GET_CODE (this_insn) == BARRIER
5863 || GET_CODE (this_insn) == CODE_LABEL))
5864 abort ();
5865 }
5866 if (!this_insn)
5867 {
5868 /* Oh, dear! we ran off the end.. give up */
5869 recog (PATTERN (insn), insn, NULL_PTR);
5870 arm_ccfsm_state = 0;
abaa26e5 5871 arm_target_insn = NULL;
ff9940b0
RE
5872 return;
5873 }
5874 arm_target_insn = this_insn;
5875 }
cce8749e
CH
5876 else
5877 abort ();
ff9940b0
RE
5878 if (jump_clobbers)
5879 {
5880 if (reverse)
5881 abort ();
5882 arm_current_cc =
5883 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5884 0), 0), 1));
5885 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5886 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5887 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5888 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5889 }
5890 else
5891 {
5892 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5893 what it was. */
5894 if (!reverse)
5895 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5896 0));
5897 }
cce8749e 5898
cce8749e
CH
5899 if (reverse || then_not_else)
5900 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5901 }
ff9940b0
RE
5902 /* restore recog_operand (getting the attributes of other insns can
5903 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 5904 across this call; since the insn has been recognized already we
ff9940b0
RE
5905 call recog direct). */
5906 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 5907 }
f3bb6135 5908}
cce8749e 5909
2b835d68
RE
5910#ifdef AOF_ASSEMBLER
5911/* Special functions only needed when producing AOF syntax assembler. */
5912
32de079a
RE
5913rtx aof_pic_label = NULL_RTX;
5914struct pic_chain
5915{
5916 struct pic_chain *next;
5917 char *symname;
5918};
5919
5920static struct pic_chain *aof_pic_chain = NULL;
5921
5922rtx
5923aof_pic_entry (x)
5924 rtx x;
5925{
5926 struct pic_chain **chainp;
5927 int offset;
5928
5929 if (aof_pic_label == NULL_RTX)
5930 {
5931 /* This needs to persist throughout the compilation. */
5932 end_temporary_allocation ();
5933 aof_pic_label = gen_rtx (SYMBOL_REF, Pmode, "x$adcons");
5934 resume_temporary_allocation ();
5935 }
5936
5937 for (offset = 0, chainp = &aof_pic_chain; *chainp;
5938 offset += 4, chainp = &(*chainp)->next)
5939 if ((*chainp)->symname == XSTR (x, 0))
5940 return plus_constant (aof_pic_label, offset);
5941
5942 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
5943 (*chainp)->next = NULL;
5944 (*chainp)->symname = XSTR (x, 0);
5945 return plus_constant (aof_pic_label, offset);
5946}
5947
5948void
5949aof_dump_pic_table (f)
5950 FILE *f;
5951{
5952 struct pic_chain *chain;
5953
5954 if (aof_pic_chain == NULL)
5955 return;
5956
5957 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
5958 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
5959 reg_names[PIC_OFFSET_TABLE_REGNUM]);
5960 fputs ("|x$adcons|\n", f);
5961
5962 for (chain = aof_pic_chain; chain; chain = chain->next)
5963 {
5964 fputs ("\tDCD\t", f);
5965 assemble_name (f, chain->symname);
5966 fputs ("\n", f);
5967 }
5968}
5969
2b835d68
RE
5970int arm_text_section_count = 1;
5971
5972char *
84ed5e79 5973aof_text_section ()
2b835d68
RE
5974{
5975 static char buf[100];
2b835d68
RE
5976 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
5977 arm_text_section_count++);
5978 if (flag_pic)
5979 strcat (buf, ", PIC, REENTRANT");
5980 return buf;
5981}
5982
5983static int arm_data_section_count = 1;
5984
5985char *
5986aof_data_section ()
5987{
5988 static char buf[100];
5989 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
5990 return buf;
5991}
5992
5993/* The AOF assembler is religiously strict about declarations of
5994 imported and exported symbols, so that it is impossible to declare
956d6950 5995 a function as imported near the beginning of the file, and then to
2b835d68
RE
5996 export it later on. It is, however, possible to delay the decision
5997 until all the functions in the file have been compiled. To get
5998 around this, we maintain a list of the imports and exports, and
5999 delete from it any that are subsequently defined. At the end of
6000 compilation we spit the remainder of the list out before the END
6001 directive. */
6002
6003struct import
6004{
6005 struct import *next;
6006 char *name;
6007};
6008
6009static struct import *imports_list = NULL;
6010
6011void
6012aof_add_import (name)
6013 char *name;
6014{
6015 struct import *new;
6016
6017 for (new = imports_list; new; new = new->next)
6018 if (new->name == name)
6019 return;
6020
6021 new = (struct import *) xmalloc (sizeof (struct import));
6022 new->next = imports_list;
6023 imports_list = new;
6024 new->name = name;
6025}
6026
6027void
6028aof_delete_import (name)
6029 char *name;
6030{
6031 struct import **old;
6032
6033 for (old = &imports_list; *old; old = & (*old)->next)
6034 {
6035 if ((*old)->name == name)
6036 {
6037 *old = (*old)->next;
6038 return;
6039 }
6040 }
6041}
6042
6043int arm_main_function = 0;
6044
6045void
6046aof_dump_imports (f)
6047 FILE *f;
6048{
6049 /* The AOF assembler needs this to cause the startup code to be extracted
6050 from the library. Brining in __main causes the whole thing to work
6051 automagically. */
6052 if (arm_main_function)
6053 {
6054 text_section ();
6055 fputs ("\tIMPORT __main\n", f);
6056 fputs ("\tDCD __main\n", f);
6057 }
6058
6059 /* Now dump the remaining imports. */
6060 while (imports_list)
6061 {
6062 fprintf (f, "\tIMPORT\t");
6063 assemble_name (f, imports_list->name);
6064 fputc ('\n', f);
6065 imports_list = imports_list->next;
6066 }
6067}
6068#endif /* AOF_ASSEMBLER */
This page took 2.413163 seconds and 5 git commands to generate.