]> gcc.gnu.org Git - gcc.git/blame - gcc/config/arm/arm.c
Initial revision
[gcc.git] / gcc / config / arm / arm.c
CommitLineData
cce8749e 1/* Output routines for GCC for ARM/RISCiX.
c0a2dc12 2 Copyright (C) 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
cce8749e
CH
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
ff9940b0 5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
cce8749e
CH
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
8fb289e7
RK
21the Free Software Foundation, 59 Temple Place - Suite 330,
22Boston, MA 02111-1307, USA. */
ff9940b0 23
cce8749e 24#include <stdio.h>
f3bb6135 25#include <string.h>
e3166964 26#include "assert.h"
cce8749e
CH
27#include "config.h"
28#include "rtl.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "real.h"
32#include "insn-config.h"
33#include "conditions.h"
34#include "insn-flags.h"
35#include "output.h"
36#include "insn-attr.h"
37#include "flags.h"
af48348a 38#include "reload.h"
e2c671ba 39#include "tree.h"
bee06f3d 40#include "expr.h"
cce8749e
CH
41
42/* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44#define MAX_INSNS_SKIPPED 5
45
46/* Some function declarations. */
cce8749e
CH
47extern FILE *asm_out_file;
48extern char *output_multi_immediate ();
cce8749e
CH
49extern void arm_increase_location ();
50
f3bb6135
RE
51HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
52static int get_prologue_size PROTO ((void));
2b835d68
RE
53static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
54 HOST_WIDE_INT, rtx, rtx, int, int));
f3bb6135 55
ff9940b0
RE
56/* Define the information needed to generate branch insns. This is
57 stored from the compare operation. */
58
59rtx arm_compare_op0, arm_compare_op1;
60int arm_compare_fp;
61
62/* What type of cpu are we compiling for? */
ff9940b0
RE
63enum processor_type arm_cpu;
64
ddd5a7c1 65/* What type of floating point are we compiling for? */
bee06f3d
RE
66enum floating_point_type arm_fpu;
67
2b835d68
RE
68/* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
69enum prog_mode_type arm_prgmode;
70
71char *target_cpu_name = ARM_CPU_NAME;
72char *target_fpe_name = NULL;
73
74/* Nonzero if this is an "M" variant of the processor. */
75int arm_fast_multiply = 0;
76
77/* Nonzero if this chip support the ARM Architecture 4 extensions */
78int arm_arch4 = 0;
79
cce8749e
CH
80/* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
81 must report the mode of the memory reference from PRINT_OPERAND to
82 PRINT_OPERAND_ADDRESS. */
f3bb6135 83enum machine_mode output_memory_reference_mode;
cce8749e
CH
84
85/* Nonzero if the prologue must setup `fp'. */
86int current_function_anonymous_args;
87
88/* Location counter of .text segment. */
89int arm_text_location = 0;
90
ff9940b0
RE
91/* Set to one if we think that lr is only saved because of subroutine calls,
92 but all of these can be `put after' return insns */
93int lr_save_eliminated;
94
cce8749e
CH
95/* A hash table is used to store text segment labels and their associated
96 offset from the start of the text segment. */
97struct label_offset
98{
99 char *name;
100 int offset;
101 struct label_offset *cdr;
102};
103
104#define LABEL_HASH_SIZE 257
105
106static struct label_offset *offset_table[LABEL_HASH_SIZE];
107
ff9940b0
RE
108/* Set to 1 when a return insn is output, this means that the epilogue
109 is not needed. */
110
111static int return_used_this_function;
112
2b835d68
RE
113static int arm_constant_limit = 3;
114
cce8749e
CH
115/* For an explanation of these variables, see final_prescan_insn below. */
116int arm_ccfsm_state;
117int arm_current_cc;
118rtx arm_target_insn;
119int arm_target_label;
9997d19d
RE
120
121/* The condition codes of the ARM, and the inverse function. */
122char *arm_condition_codes[] =
123{
124 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
125 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
126};
127
128#define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
2b835d68
RE
129
130\f
131/* Initialization code */
132
133#define FL_CO_PROC 0x01 /* Has external co-processor bus */
134#define FL_FAST_MULT 0x02 /* Fast multiply */
135#define FL_MODE26 0x04 /* 26-bit mode support */
136#define FL_MODE32 0x08 /* 32-bit mode support */
137#define FL_ARCH4 0x10 /* Architecture rel 4 */
138#define FL_THUMB 0x20 /* Thumb aware */
139struct processors
140{
141 char *name;
142 enum processor_type type;
143 unsigned int flags;
144};
145
146/* Not all of these give usefully different compilation alternatives,
147 but there is no simple way of generalizing them. */
148static struct processors all_procs[] =
149{
150 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
151 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
152 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
153 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
154 {"arm60", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
155 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
156 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
157 {"arm620", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
158 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
159 {"arm70", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
160 {"arm7d", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
161 {"arm7di", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
162 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
163 | FL_MODE26)},
164 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
165 | FL_MODE26)},
166 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
167 {"arm700i", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
168 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
169 {"arm710c", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
170 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
171 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
172 | FL_ARCH4 | FL_THUMB)},
173 {NULL, 0, 0}
174};
175
176/* Fix up any incompatible options that the user has specified.
177 This has now turned into a maze. */
178void
179arm_override_options ()
180{
181 int arm_thumb_aware = 0;
182
183 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
184 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
185
186 if (TARGET_POKE_FUNCTION_NAME)
187 target_flags |= ARM_FLAG_APCS_FRAME;
188
189 if (TARGET_6)
190 {
191 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu-<proc>");
192 target_flags |= ARM_FLAG_APCS_32;
193 arm_cpu = PROCESSOR_ARM6;
194 }
195
196 if (TARGET_3)
197 {
198 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu-<proc>");
199 target_flags &= ~ARM_FLAG_APCS_32;
200 arm_cpu = PROCESSOR_ARM2;
201 }
202
203 if ((TARGET_3 || TARGET_6) && target_cpu_name != NULL)
204 fatal ("Incompatible mix of old and new options. -m%d and -mcpu-%s",
205 TARGET_3 ? 3 : 6, target_cpu_name);
206
207 if (TARGET_APCS_REENT && flag_pic)
208 fatal ("-fpic and -mapcs-reent are incompatible");
209
210 if (TARGET_APCS_REENT)
211 warning ("APCS reentrant code not supported. Ignored");
212
213 if (flag_pic)
214 warning ("Position independent code not supported. Ignored");
215
216 if (TARGET_APCS_FLOAT)
217 warning ("Passing floating point arguments in fp regs not yet supported");
218
219 if (TARGET_APCS_STACK && ! TARGET_APCS)
220 {
221 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
222 target_flags |= ARM_FLAG_APCS_FRAME;
223 }
224
225 arm_cpu = TARGET_6 ? PROCESSOR_ARM6: PROCESSOR_ARM2;
226 arm_fpu = FP_HARD;
227
228 if (target_cpu_name != NULL)
229 {
230 char *c = target_cpu_name;
231 struct processors *proc;
232
233 /* Match against the supported types. */
234 for (proc = all_procs; proc->name != NULL; proc++)
235 {
236 if (strcmp (proc->name, c) == 0)
237 break;
238 }
239
240 if (proc->name)
241 {
242 arm_cpu = proc->type;
243
244 /* Default value for floating point code... if no co-processor
245 bus, then schedule for emulated floating point. Otherwise,
246 assume the user has an FPA, unless overridden with -mfpe-... */
247 if (proc->flags & FL_CO_PROC == 0)
248 arm_fpu = FP_SOFT3;
249 else
250 arm_fpu = FP_HARD;
251 arm_fast_multiply = (proc->flags & FL_FAST_MULT) != 0;
252 arm_arch4 = (proc->flags & FL_ARCH4) != 0;
253 arm_thumb_aware = (proc->flags & FL_THUMB) != 0;
254 /* Processors with a load delay slot can load constants faster,
255 from the pool than it takes to construct them, so reduce the
256 complexity of the constant that we will try to generate
257 inline. */
258 }
259 else
260 fatal ("Unrecognized cpu type: %s", target_cpu_name);
261 }
262
263 if (target_fpe_name)
264 {
265 if (strcmp (target_fpe_name, "2") == 0)
266 arm_fpu = FP_SOFT2;
267 else if (strcmp (target_fpe_name, "3") == 0)
268 arm_fpu = FP_SOFT3;
269 else
270 fatal ("Invalid floating point emulation option: -mfpe-%s",
271 target_fpe_name);
272 }
273
274 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
275 {
276 warning ("This processor variant does not support Thumb interworking");
277 target_flags &= ~ARM_FLAG_THUMB;
278 }
279
280 if (TARGET_FPE && arm_fpu != FP_HARD)
281 arm_fpu = FP_SOFT2;
282
283 /* For arm2/3 there is no need to do any scheduling if there is only
284 a floating point emulator, or we are doing software floating-point. */
285 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
286 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
287
288 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
289}
290
cce8749e 291\f
ff9940b0
RE
292/* Return 1 if it is possible to return using a single instruction */
293
294int
295use_return_insn ()
296{
297 int regno;
298
299 if (!reload_completed ||current_function_pretend_args_size
300 || current_function_anonymous_args
301 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
302 return 0;
303
304 /* Can't be done if any of the FPU regs are pushed, since this also
305 requires an insn */
306 for (regno = 20; regno < 24; regno++)
307 if (regs_ever_live[regno])
308 return 0;
309
310 return 1;
311}
312
cce8749e
CH
313/* Return TRUE if int I is a valid immediate ARM constant. */
314
315int
316const_ok_for_arm (i)
ff9940b0 317 HOST_WIDE_INT i;
cce8749e 318{
ff9940b0 319 unsigned HOST_WIDE_INT mask = ~0xFF;
cce8749e 320
e2c671ba
RE
321 /* Fast return for 0 and powers of 2 */
322 if ((i & (i - 1)) == 0)
323 return TRUE;
324
cce8749e
CH
325 do
326 {
abaa26e5 327 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
f3bb6135 328 return TRUE;
abaa26e5
RE
329 mask =
330 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
331 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
cce8749e
CH
332 } while (mask != ~0xFF);
333
f3bb6135
RE
334 return FALSE;
335}
cce8749e 336
e2c671ba
RE
337/* Return true if I is a valid constant for the operation CODE. */
338int
339const_ok_for_op (i, code, mode)
340 HOST_WIDE_INT i;
341 enum rtx_code code;
342 enum machine_mode mode;
343{
344 if (const_ok_for_arm (i))
345 return 1;
346
347 switch (code)
348 {
349 case PLUS:
350 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
351
352 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
353 case XOR:
354 case IOR:
355 return 0;
356
357 case AND:
358 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
359
360 default:
361 abort ();
362 }
363}
364
365/* Emit a sequence of insns to handle a large constant.
366 CODE is the code of the operation required, it can be any of SET, PLUS,
367 IOR, AND, XOR, MINUS;
368 MODE is the mode in which the operation is being performed;
369 VAL is the integer to operate on;
370 SOURCE is the other operand (a register, or a null-pointer for SET);
371 SUBTARGETS means it is safe to create scratch registers if that will
2b835d68
RE
372 either produce a simpler sequence, or we will want to cse the values.
373 Return value is the number of insns emitted. */
e2c671ba
RE
374
375int
376arm_split_constant (code, mode, val, target, source, subtargets)
377 enum rtx_code code;
378 enum machine_mode mode;
379 HOST_WIDE_INT val;
380 rtx target;
381 rtx source;
382 int subtargets;
2b835d68
RE
383{
384 if (subtargets || code == SET
385 || (GET_CODE (target) == REG && GET_CODE (source) == REG
386 && REGNO (target) != REGNO (source)))
387 {
388 rtx temp;
389
390 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
391 > arm_constant_limit + (code != SET))
392 {
393 if (code == SET)
394 {
395 /* Currently SET is the only monadic value for CODE, all
396 the rest are diadic. */
397 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
398 return 1;
399 }
400 else
401 {
402 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
403
404 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
405 /* For MINUS, the value is subtracted from, since we never
406 have subtraction of a constant. */
407 if (code == MINUS)
408 emit_insn (gen_rtx (SET, VOIDmode, target,
409 gen_rtx (code, mode, temp, source)));
410 else
411 emit_insn (gen_rtx (SET, VOIDmode, target,
412 gen_rtx (code, mode, source, temp)));
413 return 2;
414 }
415 }
416 }
417
418 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
419}
420
421/* As above, but extra parameter GENERATE which, if clear, suppresses
422 RTL generation. */
423int
424arm_gen_constant (code, mode, val, target, source, subtargets, generate)
425 enum rtx_code code;
426 enum machine_mode mode;
427 HOST_WIDE_INT val;
428 rtx target;
429 rtx source;
430 int subtargets;
431 int generate;
e2c671ba
RE
432{
433 int can_add = 0;
434 int can_invert = 0;
435 int can_negate = 0;
436 int can_negate_initial = 0;
437 int can_shift = 0;
438 int i;
439 int num_bits_set = 0;
440 int set_sign_bit_copies = 0;
441 int clear_sign_bit_copies = 0;
442 int clear_zero_bit_copies = 0;
443 int set_zero_bit_copies = 0;
444 int insns = 0;
445 rtx new_src;
446 unsigned HOST_WIDE_INT temp1, temp2;
447 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
448
449 /* find out which operations are safe for a given CODE. Also do a quick
450 check for degenerate cases; these can occur when DImode operations
451 are split. */
452 switch (code)
453 {
454 case SET:
455 can_invert = 1;
456 can_shift = 1;
457 can_negate = 1;
458 break;
459
460 case PLUS:
461 can_negate = 1;
462 can_negate_initial = 1;
463 break;
464
465 case IOR:
466 if (remainder == 0xffffffff)
467 {
2b835d68
RE
468 if (generate)
469 emit_insn (gen_rtx (SET, VOIDmode, target,
470 GEN_INT (ARM_SIGN_EXTEND (val))));
e2c671ba
RE
471 return 1;
472 }
473 if (remainder == 0)
474 {
475 if (reload_completed && rtx_equal_p (target, source))
476 return 0;
2b835d68
RE
477 if (generate)
478 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
479 return 1;
480 }
481 break;
482
483 case AND:
484 if (remainder == 0)
485 {
2b835d68
RE
486 if (generate)
487 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
e2c671ba
RE
488 return 1;
489 }
490 if (remainder == 0xffffffff)
491 {
492 if (reload_completed && rtx_equal_p (target, source))
493 return 0;
2b835d68
RE
494 if (generate)
495 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
496 return 1;
497 }
498 can_invert = 1;
499 break;
500
501 case XOR:
502 if (remainder == 0)
503 {
504 if (reload_completed && rtx_equal_p (target, source))
505 return 0;
2b835d68
RE
506 if (generate)
507 emit_insn (gen_rtx (SET, VOIDmode, target, source));
e2c671ba
RE
508 return 1;
509 }
510 if (remainder == 0xffffffff)
511 {
2b835d68
RE
512 if (generate)
513 emit_insn (gen_rtx (SET, VOIDmode, target,
514 gen_rtx (NOT, mode, source)));
e2c671ba
RE
515 return 1;
516 }
517
518 /* We don't know how to handle this yet below. */
519 abort ();
520
521 case MINUS:
522 /* We treat MINUS as (val - source), since (source - val) is always
523 passed as (source + (-val)). */
524 if (remainder == 0)
525 {
2b835d68
RE
526 if (generate)
527 emit_insn (gen_rtx (SET, VOIDmode, target,
528 gen_rtx (NEG, mode, source)));
e2c671ba
RE
529 return 1;
530 }
531 if (const_ok_for_arm (val))
532 {
2b835d68
RE
533 if (generate)
534 emit_insn (gen_rtx (SET, VOIDmode, target,
535 gen_rtx (MINUS, mode, GEN_INT (val), source)));
e2c671ba
RE
536 return 1;
537 }
538 can_negate = 1;
539
540 break;
541
542 default:
543 abort ();
544 }
545
546 /* If we can do it in one insn get out quickly */
547 if (const_ok_for_arm (val)
548 || (can_negate_initial && const_ok_for_arm (-val))
549 || (can_invert && const_ok_for_arm (~val)))
550 {
2b835d68
RE
551 if (generate)
552 emit_insn (gen_rtx (SET, VOIDmode, target,
553 (source ? gen_rtx (code, mode, source,
554 GEN_INT (val))
555 : GEN_INT (val))));
e2c671ba
RE
556 return 1;
557 }
558
559
560 /* Calculate a few attributes that may be useful for specific
561 optimizations. */
562
563 for (i = 31; i >= 0; i--)
564 {
565 if ((remainder & (1 << i)) == 0)
566 clear_sign_bit_copies++;
567 else
568 break;
569 }
570
571 for (i = 31; i >= 0; i--)
572 {
573 if ((remainder & (1 << i)) != 0)
574 set_sign_bit_copies++;
575 else
576 break;
577 }
578
579 for (i = 0; i <= 31; i++)
580 {
581 if ((remainder & (1 << i)) == 0)
582 clear_zero_bit_copies++;
583 else
584 break;
585 }
586
587 for (i = 0; i <= 31; i++)
588 {
589 if ((remainder & (1 << i)) != 0)
590 set_zero_bit_copies++;
591 else
592 break;
593 }
594
595 switch (code)
596 {
597 case SET:
598 /* See if we can do this by sign_extending a constant that is known
599 to be negative. This is a good, way of doing it, since the shift
600 may well merge into a subsequent insn. */
601 if (set_sign_bit_copies > 1)
602 {
603 if (const_ok_for_arm
604 (temp1 = ARM_SIGN_EXTEND (remainder
605 << (set_sign_bit_copies - 1))))
606 {
2b835d68
RE
607 if (generate)
608 {
609 new_src = subtargets ? gen_reg_rtx (mode) : target;
610 emit_insn (gen_rtx (SET, VOIDmode, new_src,
611 GEN_INT (temp1)));
612 emit_insn (gen_ashrsi3 (target, new_src,
613 GEN_INT (set_sign_bit_copies - 1)));
614 }
e2c671ba
RE
615 return 2;
616 }
617 /* For an inverted constant, we will need to set the low bits,
618 these will be shifted out of harm's way. */
619 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
620 if (const_ok_for_arm (~temp1))
621 {
2b835d68
RE
622 if (generate)
623 {
624 new_src = subtargets ? gen_reg_rtx (mode) : target;
625 emit_insn (gen_rtx (SET, VOIDmode, new_src,
626 GEN_INT (temp1)));
627 emit_insn (gen_ashrsi3 (target, new_src,
628 GEN_INT (set_sign_bit_copies - 1)));
629 }
e2c671ba
RE
630 return 2;
631 }
632 }
633
634 /* See if we can generate this by setting the bottom (or the top)
635 16 bits, and then shifting these into the other half of the
636 word. We only look for the simplest cases, to do more would cost
637 too much. Be careful, however, not to generate this when the
638 alternative would take fewer insns. */
639 if (val & 0xffff0000)
640 {
641 temp1 = remainder & 0xffff0000;
642 temp2 = remainder & 0x0000ffff;
643
644 /* Overlaps outside this range are best done using other methods. */
645 for (i = 9; i < 24; i++)
646 {
647 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
648 && ! const_ok_for_arm (temp2))
649 {
2b835d68
RE
650 insns = arm_gen_constant (code, mode, temp2,
651 new_src = (subtargets
652 ? gen_reg_rtx (mode)
653 : target),
654 source, subtargets, generate);
e2c671ba 655 source = new_src;
2b835d68
RE
656 if (generate)
657 emit_insn (gen_rtx (SET, VOIDmode, target,
658 gen_rtx (IOR, mode,
659 gen_rtx (ASHIFT, mode, source,
660 GEN_INT (i)),
661 source)));
e2c671ba
RE
662 return insns + 1;
663 }
664 }
665
666 /* Don't duplicate cases already considered. */
667 for (i = 17; i < 24; i++)
668 {
669 if (((temp1 | (temp1 >> i)) == remainder)
670 && ! const_ok_for_arm (temp1))
671 {
2b835d68
RE
672 insns = arm_gen_constant (code, mode, temp1,
673 new_src = (subtargets
674 ? gen_reg_rtx (mode)
675 : target),
676 source, subtargets, generate);
e2c671ba 677 source = new_src;
2b835d68
RE
678 if (generate)
679 emit_insn (gen_rtx (SET, VOIDmode, target,
680 gen_rtx (IOR, mode,
681 gen_rtx (LSHIFTRT, mode,
682 source, GEN_INT (i)),
683 source)));
e2c671ba
RE
684 return insns + 1;
685 }
686 }
687 }
688 break;
689
690 case IOR:
691 case XOR:
692 /* If we have IOR or XOR, and the inverse of the constant can be loaded
693 in a single instruction, and we can find a temporary to put it in,
694 then this can be done in two instructions instead of 3-4. */
695 if (subtargets
696 || (reload_completed && ! reg_mentioned_p (target, source)))
697 {
698 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
699 {
2b835d68
RE
700 if (generate)
701 {
702 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
e2c671ba 703
2b835d68
RE
704 emit_insn (gen_rtx (SET, VOIDmode, sub,
705 GEN_INT (ARM_SIGN_EXTEND (~ val))));
706 emit_insn (gen_rtx (SET, VOIDmode, target,
707 gen_rtx (code, mode, source, sub)));
708 }
e2c671ba
RE
709 return 2;
710 }
711 }
712
713 if (code == XOR)
714 break;
715
716 if (set_sign_bit_copies > 8
717 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
718 {
2b835d68
RE
719 if (generate)
720 {
721 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
722 rtx shift = GEN_INT (set_sign_bit_copies);
723
724 emit_insn (gen_rtx (SET, VOIDmode, sub,
725 gen_rtx (NOT, mode,
726 gen_rtx (ASHIFT, mode, source,
727 shift))));
728 emit_insn (gen_rtx (SET, VOIDmode, target,
729 gen_rtx (NOT, mode,
730 gen_rtx (LSHIFTRT, mode, sub,
731 shift))));
732 }
e2c671ba
RE
733 return 2;
734 }
735
736 if (set_zero_bit_copies > 8
737 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
738 {
2b835d68
RE
739 if (generate)
740 {
741 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
742 rtx shift = GEN_INT (set_zero_bit_copies);
743
744 emit_insn (gen_rtx (SET, VOIDmode, sub,
745 gen_rtx (NOT, mode,
746 gen_rtx (LSHIFTRT, mode, source,
747 shift))));
748 emit_insn (gen_rtx (SET, VOIDmode, target,
749 gen_rtx (NOT, mode,
750 gen_rtx (ASHIFT, mode, sub,
751 shift))));
752 }
e2c671ba
RE
753 return 2;
754 }
755
756 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
757 {
2b835d68
RE
758 if (generate)
759 {
760 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
761 emit_insn (gen_rtx (SET, VOIDmode, sub,
762 gen_rtx (NOT, mode, source)));
763 source = sub;
764 if (subtargets)
765 sub = gen_reg_rtx (mode);
766 emit_insn (gen_rtx (SET, VOIDmode, sub,
767 gen_rtx (AND, mode, source,
768 GEN_INT (temp1))));
769 emit_insn (gen_rtx (SET, VOIDmode, target,
770 gen_rtx (NOT, mode, sub)));
771 }
e2c671ba
RE
772 return 3;
773 }
774 break;
775
776 case AND:
777 /* See if two shifts will do 2 or more insn's worth of work. */
778 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
779 {
780 HOST_WIDE_INT shift_mask = ((0xffffffff
781 << (32 - clear_sign_bit_copies))
782 & 0xffffffff);
783 rtx new_source;
2b835d68 784 rtx shift;
e2c671ba
RE
785
786 if ((remainder | shift_mask) != 0xffffffff)
787 {
2b835d68
RE
788 if (generate)
789 {
790 new_source = subtargets ? gen_reg_rtx (mode) : target;
791 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
792 new_source, source, subtargets, 1);
793 source = new_source;
794 }
795 else
796 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
797 new_source, source, subtargets, 0);
798 }
799
800 if (generate)
801 {
802 shift = GEN_INT (clear_sign_bit_copies);
e2c671ba 803 new_source = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
804 emit_insn (gen_ashlsi3 (new_source, source, shift));
805 emit_insn (gen_lshrsi3 (target, new_source, shift));
e2c671ba
RE
806 }
807
e2c671ba
RE
808 return insns + 2;
809 }
810
811 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
812 {
813 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
814 rtx new_source;
2b835d68 815 rtx shift;
e2c671ba
RE
816
817 if ((remainder | shift_mask) != 0xffffffff)
818 {
2b835d68
RE
819 if (generate)
820 {
821 new_source = subtargets ? gen_reg_rtx (mode) : target;
822 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
823 new_source, source, subtargets, 1);
824 source = new_source;
825 }
826 else
827 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
828 new_source, source, subtargets, 0);
829 }
830
831 if (generate)
832 {
833 shift = GEN_INT (clear_zero_bit_copies);
e2c671ba 834 new_source = subtargets ? gen_reg_rtx (mode) : target;
2b835d68
RE
835 emit_insn (gen_lshrsi3 (new_source, source, shift));
836 emit_insn (gen_ashlsi3 (target, new_source, shift));
e2c671ba
RE
837 }
838
e2c671ba
RE
839 return insns + 2;
840 }
841
842 break;
843
844 default:
845 break;
846 }
847
848 for (i = 0; i < 32; i++)
849 if (remainder & (1 << i))
850 num_bits_set++;
851
852 if (code == AND || (can_invert && num_bits_set > 16))
853 remainder = (~remainder) & 0xffffffff;
854 else if (code == PLUS && num_bits_set > 16)
855 remainder = (-remainder) & 0xffffffff;
856 else
857 {
858 can_invert = 0;
859 can_negate = 0;
860 }
861
862 /* Now try and find a way of doing the job in either two or three
863 instructions.
864 We start by looking for the largest block of zeros that are aligned on
865 a 2-bit boundary, we then fill up the temps, wrapping around to the
866 top of the word when we drop off the bottom.
867 In the worst case this code should produce no more than four insns. */
868 {
869 int best_start = 0;
870 int best_consecutive_zeros = 0;
871
872 for (i = 0; i < 32; i += 2)
873 {
874 int consecutive_zeros = 0;
875
876 if (! (remainder & (3 << i)))
877 {
878 while ((i < 32) && ! (remainder & (3 << i)))
879 {
880 consecutive_zeros += 2;
881 i += 2;
882 }
883 if (consecutive_zeros > best_consecutive_zeros)
884 {
885 best_consecutive_zeros = consecutive_zeros;
886 best_start = i - consecutive_zeros;
887 }
888 i -= 2;
889 }
890 }
891
892 /* Now start emitting the insns, starting with the one with the highest
893 bit set: we do this so that the smallest number will be emitted last;
894 this is more likely to be combinable with addressing insns. */
895 i = best_start;
896 do
897 {
898 int end;
899
900 if (i <= 0)
901 i += 32;
902 if (remainder & (3 << (i - 2)))
903 {
904 end = i - 8;
905 if (end < 0)
906 end += 32;
907 temp1 = remainder & ((0x0ff << end)
908 | ((i < end) ? (0xff >> (32 - end)) : 0));
909 remainder &= ~temp1;
910
911 if (code == SET)
912 {
2b835d68
RE
913 if (generate)
914 emit_insn (gen_rtx (SET, VOIDmode,
915 new_src = (subtargets
916 ? gen_reg_rtx (mode)
917 : target),
918 GEN_INT (can_invert ? ~temp1 : temp1)));
e2c671ba
RE
919 can_invert = 0;
920 code = PLUS;
921 }
922 else if (code == MINUS)
923 {
2b835d68
RE
924 if (generate)
925 emit_insn (gen_rtx (SET, VOIDmode,
926 new_src = (subtargets
927 ? gen_reg_rtx (mode)
928 : target),
929 gen_rtx (code, mode, GEN_INT (temp1),
930 source)));
e2c671ba
RE
931 code = PLUS;
932 }
933 else
934 {
2b835d68
RE
935 if (generate)
936 emit_insn (gen_rtx (SET, VOIDmode,
937 new_src = (remainder
938 ? (subtargets
939 ? gen_reg_rtx (mode)
940 : target)
941 : target),
942 gen_rtx (code, mode, source,
943 GEN_INT (can_invert ? ~temp1
944 : (can_negate
945 ? -temp1
946 : temp1)))));
e2c671ba
RE
947 }
948
949 insns++;
950 source = new_src;
951 i -= 6;
952 }
953 i -= 2;
954 } while (remainder);
955 }
956 return insns;
957}
958
2b835d68
RE
959/* Handle aggregates that are not laid out in a BLKmode element.
960 This is a sub-element of RETURN_IN_MEMORY. */
961int
962arm_return_in_memory (type)
963 tree type;
964{
965 if (TREE_CODE (type) == RECORD_TYPE)
966 {
967 tree field;
968
969 /* For a struct, we can return in a register if every element was a
970 bit-field. */
971 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
972 if (TREE_CODE (field) != FIELD_DECL
973 || ! DECL_BIT_FIELD_TYPE (field))
974 return 1;
975
976 return 0;
977 }
978 else if (TREE_CODE (type) == UNION_TYPE)
979 {
980 tree field;
981
982 /* Unions can be returned in registers if every element is
983 integral, or can be returned in an integer register. */
984 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
985 {
986 if (TREE_CODE (field) != FIELD_DECL
987 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
988 && RETURN_IN_MEMORY (TREE_TYPE (field)))
989 || FLOAT_TYPE_P (TREE_TYPE (field)))
990 return 1;
991 }
992 return 0;
993 }
994 /* XXX Not sure what should be done for other aggregates, so put them in
995 memory. */
996 return 1;
997}
998
e2c671ba
RE
999#define REG_OR_SUBREG_REG(X) \
1000 (GET_CODE (X) == REG \
1001 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1002
1003#define REG_OR_SUBREG_RTX(X) \
1004 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1005
1006#define ARM_FRAME_RTX(X) \
1007 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1008 || (X) == arg_pointer_rtx)
1009
1010int
1011arm_rtx_costs (x, code, outer_code)
1012 rtx x;
1013 enum rtx_code code, outer_code;
1014{
1015 enum machine_mode mode = GET_MODE (x);
1016 enum rtx_code subcode;
1017 int extra_cost;
1018
1019 switch (code)
1020 {
1021 case MEM:
1022 /* Memory costs quite a lot for the first word, but subsequent words
1023 load at the equivalent of a single insn each. */
1024 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1025 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1026
1027 case DIV:
1028 case MOD:
1029 return 100;
1030
1031 case ROTATE:
1032 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1033 return 4;
1034 /* Fall through */
1035 case ROTATERT:
1036 if (mode != SImode)
1037 return 8;
1038 /* Fall through */
1039 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1040 if (mode == DImode)
1041 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1042 + ((GET_CODE (XEXP (x, 0)) == REG
1043 || (GET_CODE (XEXP (x, 0)) == SUBREG
1044 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1045 ? 0 : 8));
1046 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1047 || (GET_CODE (XEXP (x, 0)) == SUBREG
1048 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1049 ? 0 : 4)
1050 + ((GET_CODE (XEXP (x, 1)) == REG
1051 || (GET_CODE (XEXP (x, 1)) == SUBREG
1052 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1053 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1054 ? 0 : 4));
1055
1056 case MINUS:
1057 if (mode == DImode)
1058 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1059 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1060 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1061 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1062 ? 0 : 8));
1063
1064 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1065 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1066 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1067 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1068 ? 0 : 8)
1069 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1070 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1071 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1072 ? 0 : 8));
1073
1074 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1075 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1076 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1077 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1078 || subcode == ASHIFTRT || subcode == LSHIFTRT
1079 || subcode == ROTATE || subcode == ROTATERT
1080 || (subcode == MULT
1081 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1082 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1083 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1084 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1085 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1086 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1087 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1088 return 1;
1089 /* Fall through */
1090
1091 case PLUS:
1092 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1093 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1094 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1095 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1096 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1097 ? 0 : 8));
1098
1099 /* Fall through */
1100 case AND: case XOR: case IOR:
1101 extra_cost = 0;
1102
1103 /* Normally the frame registers will be spilt into reg+const during
1104 reload, so it is a bad idea to combine them with other instructions,
1105 since then they might not be moved outside of loops. As a compromise
1106 we allow integration with ops that have a constant as their second
1107 operand. */
1108 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1109 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1110 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1111 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1112 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1113 extra_cost = 4;
1114
1115 if (mode == DImode)
1116 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1117 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1118 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1119 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1120 ? 0 : 8));
1121
1122 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1123 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1124 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1125 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1126 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1127 ? 0 : 4));
1128
1129 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1130 return (1 + extra_cost
1131 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1132 || subcode == LSHIFTRT || subcode == ASHIFTRT
1133 || subcode == ROTATE || subcode == ROTATERT
1134 || (subcode == MULT
1135 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1136 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1137 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1138 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1139 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1140 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1141 ? 0 : 4));
1142
1143 return 8;
1144
1145 case MULT:
2b835d68
RE
1146 if (arm_fast_multiply && mode == DImode
1147 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1148 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1149 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1150 return 8;
1151
e2c671ba
RE
1152 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1153 || mode == DImode)
1154 return 30;
1155
1156 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1157 {
2b835d68
RE
1158 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1159 & (unsigned HOST_WIDE_INT) 0xffffffff);
e2c671ba
RE
1160 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1161 int j;
2b835d68
RE
1162 int booth_unit_size = (arm_fast_multiply ? 8 : 2);
1163
1164 for (j = 0; i && j < 32; j += booth_unit_size)
e2c671ba 1165 {
2b835d68 1166 i >>= booth_unit_size;
e2c671ba
RE
1167 add_cost += 2;
1168 }
1169
1170 return add_cost;
1171 }
1172
2b835d68
RE
1173 return ((arm_fast_multiply ? 8 : 30)
1174 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
e2c671ba
RE
1175 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1176
1177 case NEG:
1178 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1179 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1180 /* Fall through */
1181 case NOT:
1182 if (mode == DImode)
1183 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1184
1185 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1186
1187 case IF_THEN_ELSE:
1188 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1189 return 14;
1190 return 2;
1191
1192 case COMPARE:
1193 return 1;
1194
1195 case ABS:
1196 return 4 + (mode == DImode ? 4 : 0);
1197
1198 case SIGN_EXTEND:
1199 if (GET_MODE (XEXP (x, 0)) == QImode)
1200 return (4 + (mode == DImode ? 4 : 0)
1201 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1202 /* Fall through */
1203 case ZERO_EXTEND:
1204 switch (GET_MODE (XEXP (x, 0)))
1205 {
1206 case QImode:
1207 return (1 + (mode == DImode ? 4 : 0)
1208 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1209
1210 case HImode:
1211 return (4 + (mode == DImode ? 4 : 0)
1212 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1213
1214 case SImode:
1215 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1216 }
1217 abort ();
1218
1219 default:
1220 return 99;
1221 }
1222}
1223
ff9940b0
RE
1224/* This code has been fixed for cross compilation. */
1225
1226static int fpa_consts_inited = 0;
1227
1228char *strings_fpa[8] = {
2b835d68
RE
1229 "0", "1", "2", "3",
1230 "4", "5", "0.5", "10"
1231};
ff9940b0
RE
1232
1233static REAL_VALUE_TYPE values_fpa[8];
1234
1235static void
1236init_fpa_table ()
1237{
1238 int i;
1239 REAL_VALUE_TYPE r;
1240
1241 for (i = 0; i < 8; i++)
1242 {
1243 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1244 values_fpa[i] = r;
1245 }
f3bb6135 1246
ff9940b0
RE
1247 fpa_consts_inited = 1;
1248}
1249
cce8749e
CH
1250/* Return TRUE if rtx X is a valid immediate FPU constant. */
1251
1252int
1253const_double_rtx_ok_for_fpu (x)
1254 rtx x;
1255{
ff9940b0
RE
1256 REAL_VALUE_TYPE r;
1257 int i;
1258
1259 if (!fpa_consts_inited)
1260 init_fpa_table ();
1261
1262 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1263 if (REAL_VALUE_MINUS_ZERO (r))
1264 return 0;
f3bb6135 1265
ff9940b0
RE
1266 for (i = 0; i < 8; i++)
1267 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1268 return 1;
f3bb6135 1269
ff9940b0 1270 return 0;
f3bb6135 1271}
ff9940b0
RE
1272
1273/* Return TRUE if rtx X is a valid immediate FPU constant. */
1274
1275int
1276neg_const_double_rtx_ok_for_fpu (x)
1277 rtx x;
1278{
1279 REAL_VALUE_TYPE r;
1280 int i;
1281
1282 if (!fpa_consts_inited)
1283 init_fpa_table ();
1284
1285 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1286 r = REAL_VALUE_NEGATE (r);
1287 if (REAL_VALUE_MINUS_ZERO (r))
1288 return 0;
f3bb6135 1289
ff9940b0
RE
1290 for (i = 0; i < 8; i++)
1291 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1292 return 1;
f3bb6135 1293
ff9940b0 1294 return 0;
f3bb6135 1295}
cce8749e
CH
1296\f
1297/* Predicates for `match_operand' and `match_operator'. */
1298
ff9940b0
RE
1299/* s_register_operand is the same as register_operand, but it doesn't accept
1300 (SUBREG (MEM)...). */
1301
1302int
1303s_register_operand (op, mode)
1304 register rtx op;
1305 enum machine_mode mode;
1306{
1307 if (GET_MODE (op) != mode && mode != VOIDmode)
1308 return 0;
1309
1310 if (GET_CODE (op) == SUBREG)
f3bb6135 1311 op = SUBREG_REG (op);
ff9940b0
RE
1312
1313 /* We don't consider registers whose class is NO_REGS
1314 to be a register operand. */
1315 return (GET_CODE (op) == REG
1316 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1317 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1318}
1319
e2c671ba
RE
1320/* Only accept reg, subreg(reg), const_int. */
1321
1322int
1323reg_or_int_operand (op, mode)
1324 register rtx op;
1325 enum machine_mode mode;
1326{
1327 if (GET_CODE (op) == CONST_INT)
1328 return 1;
1329
1330 if (GET_MODE (op) != mode && mode != VOIDmode)
1331 return 0;
1332
1333 if (GET_CODE (op) == SUBREG)
1334 op = SUBREG_REG (op);
1335
1336 /* We don't consider registers whose class is NO_REGS
1337 to be a register operand. */
1338 return (GET_CODE (op) == REG
1339 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1340 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1341}
1342
ff9940b0
RE
1343/* Return 1 if OP is an item in memory, given that we are in reload. */
1344
1345int
1346reload_memory_operand (op, mode)
1347 rtx op;
1348 enum machine_mode mode;
1349{
1350 int regno = true_regnum (op);
1351
1352 return (! CONSTANT_P (op)
1353 && (regno == -1
1354 || (GET_CODE (op) == REG
1355 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1356}
1357
cce8749e
CH
1358/* Return TRUE for valid operands for the rhs of an ARM instruction. */
1359
1360int
1361arm_rhs_operand (op, mode)
1362 rtx op;
1363 enum machine_mode mode;
1364{
ff9940b0 1365 return (s_register_operand (op, mode)
cce8749e 1366 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
f3bb6135 1367}
cce8749e 1368
ff9940b0
RE
1369/* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1370 */
1371
1372int
1373arm_rhsm_operand (op, mode)
1374 rtx op;
1375 enum machine_mode mode;
1376{
1377 return (s_register_operand (op, mode)
1378 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1379 || memory_operand (op, mode));
f3bb6135 1380}
ff9940b0
RE
1381
1382/* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1383 constant that is valid when negated. */
1384
1385int
1386arm_add_operand (op, mode)
1387 rtx op;
1388 enum machine_mode mode;
1389{
1390 return (s_register_operand (op, mode)
1391 || (GET_CODE (op) == CONST_INT
1392 && (const_ok_for_arm (INTVAL (op))
1393 || const_ok_for_arm (-INTVAL (op)))));
f3bb6135 1394}
ff9940b0
RE
1395
1396int
1397arm_not_operand (op, mode)
1398 rtx op;
1399 enum machine_mode mode;
1400{
1401 return (s_register_operand (op, mode)
1402 || (GET_CODE (op) == CONST_INT
1403 && (const_ok_for_arm (INTVAL (op))
1404 || const_ok_for_arm (~INTVAL (op)))));
f3bb6135 1405}
ff9940b0 1406
cce8749e
CH
1407/* Return TRUE for valid operands for the rhs of an FPU instruction. */
1408
1409int
1410fpu_rhs_operand (op, mode)
1411 rtx op;
1412 enum machine_mode mode;
1413{
ff9940b0 1414 if (s_register_operand (op, mode))
f3bb6135 1415 return TRUE;
cce8749e
CH
1416 else if (GET_CODE (op) == CONST_DOUBLE)
1417 return (const_double_rtx_ok_for_fpu (op));
f3bb6135
RE
1418
1419 return FALSE;
1420}
cce8749e 1421
ff9940b0
RE
1422int
1423fpu_add_operand (op, mode)
1424 rtx op;
1425 enum machine_mode mode;
1426{
1427 if (s_register_operand (op, mode))
f3bb6135 1428 return TRUE;
ff9940b0 1429 else if (GET_CODE (op) == CONST_DOUBLE)
f3bb6135
RE
1430 return (const_double_rtx_ok_for_fpu (op)
1431 || neg_const_double_rtx_ok_for_fpu (op));
1432
1433 return FALSE;
ff9940b0
RE
1434}
1435
cce8749e
CH
1436/* Return nonzero if OP is a constant power of two. */
1437
1438int
1439power_of_two_operand (op, mode)
1440 rtx op;
1441 enum machine_mode mode;
1442{
1443 if (GET_CODE (op) == CONST_INT)
1444 {
f3bb6135
RE
1445 HOST_WIDE_INT value = INTVAL(op);
1446 return value != 0 && (value & (value - 1)) == 0;
cce8749e 1447 }
f3bb6135
RE
1448 return FALSE;
1449}
cce8749e
CH
1450
1451/* Return TRUE for a valid operand of a DImode operation.
ff9940b0
RE
1452 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1453 Note that this disallows MEM(REG+REG), but allows
1454 MEM(PRE/POST_INC/DEC(REG)). */
cce8749e
CH
1455
1456int
1457di_operand (op, mode)
1458 rtx op;
1459 enum machine_mode mode;
1460{
ff9940b0 1461 if (s_register_operand (op, mode))
f3bb6135 1462 return TRUE;
cce8749e
CH
1463
1464 switch (GET_CODE (op))
1465 {
1466 case CONST_DOUBLE:
1467 case CONST_INT:
f3bb6135
RE
1468 return TRUE;
1469
cce8749e 1470 case MEM:
f3bb6135
RE
1471 return memory_address_p (DImode, XEXP (op, 0));
1472
cce8749e 1473 default:
f3bb6135 1474 return FALSE;
cce8749e 1475 }
f3bb6135 1476}
cce8749e 1477
f3139301
DE
1478/* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1479 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1480 Note that this disallows MEM(REG+REG), but allows
1481 MEM(PRE/POST_INC/DEC(REG)). */
1482
1483int
1484soft_df_operand (op, mode)
1485 rtx op;
1486 enum machine_mode mode;
1487{
1488 if (s_register_operand (op, mode))
1489 return TRUE;
1490
1491 switch (GET_CODE (op))
1492 {
1493 case CONST_DOUBLE:
1494 return TRUE;
1495
1496 case MEM:
1497 return memory_address_p (DFmode, XEXP (op, 0));
1498
1499 default:
1500 return FALSE;
1501 }
1502}
1503
cce8749e
CH
1504/* Return TRUE for valid index operands. */
1505
1506int
1507index_operand (op, mode)
1508 rtx op;
1509 enum machine_mode mode;
1510{
ff9940b0
RE
1511 return (s_register_operand(op, mode)
1512 || (immediate_operand (op, mode)
1513 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
f3bb6135 1514}
cce8749e 1515
ff9940b0
RE
1516/* Return TRUE for valid shifts by a constant. This also accepts any
1517 power of two on the (somewhat overly relaxed) assumption that the
1518 shift operator in this case was a mult. */
1519
1520int
1521const_shift_operand (op, mode)
1522 rtx op;
1523 enum machine_mode mode;
1524{
1525 return (power_of_two_operand (op, mode)
1526 || (immediate_operand (op, mode)
1527 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
f3bb6135 1528}
ff9940b0 1529
cce8749e
CH
1530/* Return TRUE for arithmetic operators which can be combined with a multiply
1531 (shift). */
1532
1533int
1534shiftable_operator (x, mode)
1535 rtx x;
1536 enum machine_mode mode;
1537{
1538 if (GET_MODE (x) != mode)
1539 return FALSE;
1540 else
1541 {
1542 enum rtx_code code = GET_CODE (x);
1543
1544 return (code == PLUS || code == MINUS
1545 || code == IOR || code == XOR || code == AND);
1546 }
f3bb6135 1547}
cce8749e
CH
1548
1549/* Return TRUE for shift operators. */
1550
1551int
1552shift_operator (x, mode)
1553 rtx x;
1554 enum machine_mode mode;
1555{
1556 if (GET_MODE (x) != mode)
1557 return FALSE;
1558 else
1559 {
1560 enum rtx_code code = GET_CODE (x);
1561
ff9940b0
RE
1562 if (code == MULT)
1563 return power_of_two_operand (XEXP (x, 1));
f3bb6135 1564
e2c671ba
RE
1565 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1566 || code == ROTATERT);
cce8749e 1567 }
f3bb6135 1568}
ff9940b0
RE
1569
1570int equality_operator (x, mode)
f3bb6135
RE
1571 rtx x;
1572 enum machine_mode mode;
ff9940b0 1573{
f3bb6135 1574 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
ff9940b0
RE
1575}
1576
1577/* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1578
1579int
1580minmax_operator (x, mode)
1581 rtx x;
1582 enum machine_mode mode;
1583{
1584 enum rtx_code code = GET_CODE (x);
1585
1586 if (GET_MODE (x) != mode)
1587 return FALSE;
f3bb6135 1588
ff9940b0 1589 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
f3bb6135 1590}
ff9940b0
RE
1591
1592/* return TRUE if x is EQ or NE */
1593
1594/* Return TRUE if this is the condition code register, if we aren't given
1595 a mode, accept any class CCmode register */
1596
1597int
1598cc_register (x, mode)
f3bb6135
RE
1599 rtx x;
1600 enum machine_mode mode;
ff9940b0
RE
1601{
1602 if (mode == VOIDmode)
1603 {
1604 mode = GET_MODE (x);
1605 if (GET_MODE_CLASS (mode) != MODE_CC)
1606 return FALSE;
1607 }
f3bb6135 1608
ff9940b0
RE
1609 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1610 return TRUE;
f3bb6135 1611
ff9940b0
RE
1612 return FALSE;
1613}
5bbe2d40
RE
1614
1615/* Return TRUE if this is the condition code register, if we aren't given
1616 a mode, accept any mode in class CC_MODE that is reversible */
1617
1618int
1619reversible_cc_register (x, mode)
1620 rtx x;
1621 enum machine_mode mode;
1622{
1623 if (mode == VOIDmode)
1624 {
1625 mode = GET_MODE (x);
1626 if (GET_MODE_CLASS (mode) != MODE_CC
1627 && GET_CODE (x) == REG && REGNO (x) == 24)
1628 abort ();
1629 if (GET_MODE_CLASS (mode) != MODE_CC
1630 || (! flag_fast_math && ! REVERSIBLE_CC_MODE (mode)))
1631 return FALSE;
1632 }
1633
1634 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1635 return TRUE;
1636
1637 return FALSE;
1638}
1639
2b835d68
RE
1640/* Return TRUE if X references a SYMBOL_REF. */
1641int
1642symbol_mentioned_p (x)
1643 rtx x;
1644{
1645 register char *fmt;
1646 register int i;
1647
1648 if (GET_CODE (x) == SYMBOL_REF)
1649 return 1;
1650
1651 fmt = GET_RTX_FORMAT (GET_CODE (x));
1652 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1653 {
1654 if (fmt[i] == 'E')
1655 {
1656 register int j;
1657
1658 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1659 if (symbol_mentioned_p (XVECEXP (x, i, j)))
1660 return 1;
1661 }
1662 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
1663 return 1;
1664 }
1665
1666 return 0;
1667}
1668
1669/* Return TRUE if X references a LABEL_REF. */
1670int
1671label_mentioned_p (x)
1672 rtx x;
1673{
1674 register char *fmt;
1675 register int i;
1676
1677 if (GET_CODE (x) == LABEL_REF)
1678 return 1;
1679
1680 fmt = GET_RTX_FORMAT (GET_CODE (x));
1681 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1682 {
1683 if (fmt[i] == 'E')
1684 {
1685 register int j;
1686
1687 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1688 if (label_mentioned_p (XVECEXP (x, i, j)))
1689 return 1;
1690 }
1691 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
1692 return 1;
1693 }
1694
1695 return 0;
1696}
1697
ff9940b0
RE
1698enum rtx_code
1699minmax_code (x)
f3bb6135 1700 rtx x;
ff9940b0
RE
1701{
1702 enum rtx_code code = GET_CODE (x);
1703
1704 if (code == SMAX)
1705 return GE;
f3bb6135 1706 else if (code == SMIN)
ff9940b0 1707 return LE;
f3bb6135 1708 else if (code == UMIN)
ff9940b0 1709 return LEU;
f3bb6135 1710 else if (code == UMAX)
ff9940b0 1711 return GEU;
f3bb6135 1712
ff9940b0
RE
1713 abort ();
1714}
1715
1716/* Return 1 if memory locations are adjacent */
1717
f3bb6135 1718int
ff9940b0
RE
1719adjacent_mem_locations (a, b)
1720 rtx a, b;
1721{
1722 int val0 = 0, val1 = 0;
1723 int reg0, reg1;
1724
1725 if ((GET_CODE (XEXP (a, 0)) == REG
1726 || (GET_CODE (XEXP (a, 0)) == PLUS
1727 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
1728 && (GET_CODE (XEXP (b, 0)) == REG
1729 || (GET_CODE (XEXP (b, 0)) == PLUS
1730 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
1731 {
1732 if (GET_CODE (XEXP (a, 0)) == PLUS)
1733 {
1734 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
1735 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
1736 }
1737 else
1738 reg0 = REGNO (XEXP (a, 0));
1739 if (GET_CODE (XEXP (b, 0)) == PLUS)
1740 {
1741 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
1742 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
1743 }
1744 else
1745 reg1 = REGNO (XEXP (b, 0));
1746 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
1747 }
1748 return 0;
1749}
1750
1751/* Return 1 if OP is a load multiple operation. It is known to be
1752 parallel and the first section will be tested. */
1753
f3bb6135 1754int
ff9940b0
RE
1755load_multiple_operation (op, mode)
1756 rtx op;
1757 enum machine_mode mode;
1758{
f3bb6135 1759 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
1760 int dest_regno;
1761 rtx src_addr;
f3bb6135 1762 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
1763 rtx elt;
1764
1765 if (count <= 1
1766 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1767 return 0;
1768
1769 /* Check to see if this might be a write-back */
1770 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1771 {
1772 i++;
1773 base = 1;
1774
1775 /* Now check it more carefully */
1776 if (GET_CODE (SET_DEST (elt)) != REG
1777 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1778 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1779 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1780 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1781 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1782 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1783 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1784 != REGNO (SET_DEST (elt)))
1785 return 0;
f3bb6135 1786
ff9940b0
RE
1787 count--;
1788 }
1789
1790 /* Perform a quick check so we don't blow up below. */
1791 if (count <= i
1792 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1793 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
1794 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
1795 return 0;
1796
1797 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
1798 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
1799
1800 for (; i < count; i++)
1801 {
1802 rtx elt = XVECEXP (op, 0, i);
1803
1804 if (GET_CODE (elt) != SET
1805 || GET_CODE (SET_DEST (elt)) != REG
1806 || GET_MODE (SET_DEST (elt)) != SImode
1807 || REGNO (SET_DEST (elt)) != dest_regno + i - base
1808 || GET_CODE (SET_SRC (elt)) != MEM
1809 || GET_MODE (SET_SRC (elt)) != SImode
1810 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1811 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1812 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1813 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
1814 return 0;
1815 }
1816
1817 return 1;
1818}
1819
1820/* Return 1 if OP is a store multiple operation. It is known to be
1821 parallel and the first section will be tested. */
1822
f3bb6135 1823int
ff9940b0
RE
1824store_multiple_operation (op, mode)
1825 rtx op;
1826 enum machine_mode mode;
1827{
f3bb6135 1828 HOST_WIDE_INT count = XVECLEN (op, 0);
ff9940b0
RE
1829 int src_regno;
1830 rtx dest_addr;
f3bb6135 1831 HOST_WIDE_INT i = 1, base = 0;
ff9940b0
RE
1832 rtx elt;
1833
1834 if (count <= 1
1835 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1836 return 0;
1837
1838 /* Check to see if this might be a write-back */
1839 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1840 {
1841 i++;
1842 base = 1;
1843
1844 /* Now check it more carefully */
1845 if (GET_CODE (SET_DEST (elt)) != REG
1846 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1847 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1848 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1849 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1850 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1851 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1852 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1853 != REGNO (SET_DEST (elt)))
1854 return 0;
f3bb6135 1855
ff9940b0
RE
1856 count--;
1857 }
1858
1859 /* Perform a quick check so we don't blow up below. */
1860 if (count <= i
1861 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1862 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
1863 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
1864 return 0;
1865
1866 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
1867 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
1868
1869 for (; i < count; i++)
1870 {
1871 elt = XVECEXP (op, 0, i);
1872
1873 if (GET_CODE (elt) != SET
1874 || GET_CODE (SET_SRC (elt)) != REG
1875 || GET_MODE (SET_SRC (elt)) != SImode
1876 || REGNO (SET_SRC (elt)) != src_regno + i - base
1877 || GET_CODE (SET_DEST (elt)) != MEM
1878 || GET_MODE (SET_DEST (elt)) != SImode
1879 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1880 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1881 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1882 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
1883 return 0;
1884 }
1885
1886 return 1;
1887}
e2c671ba
RE
1888
1889int
1890multi_register_push (op, mode)
0a81f500
RE
1891 rtx op;
1892 enum machine_mode mode;
e2c671ba
RE
1893{
1894 if (GET_CODE (op) != PARALLEL
1895 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
1896 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
1897 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
1898 return 0;
1899
1900 return 1;
1901}
1902
ff9940b0 1903\f
f3bb6135
RE
1904/* Routines for use with attributes */
1905
1906int
1907const_pool_offset (symbol)
9997d19d 1908 rtx symbol;
f3bb6135
RE
1909{
1910 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
1911}
1912\f
ff9940b0
RE
1913/* Routines for use in generating RTL */
1914
f3bb6135
RE
1915rtx
1916arm_gen_load_multiple (base_regno, count, from, up, write_back)
ff9940b0
RE
1917 int base_regno;
1918 int count;
1919 rtx from;
1920 int up;
1921 int write_back;
1922{
1923 int i = 0, j;
1924 rtx result;
1925 int sign = up ? 1 : -1;
1926
1927 result = gen_rtx (PARALLEL, VOIDmode,
1928 rtvec_alloc (count + (write_back ? 2 : 0)));
1929 if (write_back)
f3bb6135 1930 {
ff9940b0 1931 XVECEXP (result, 0, 0)
f3bb6135
RE
1932 = gen_rtx (SET, GET_MODE (from), from,
1933 plus_constant (from, count * 4 * sign));
ff9940b0
RE
1934 i = 1;
1935 count++;
f3bb6135
RE
1936 }
1937
ff9940b0 1938 for (j = 0; i < count; i++, j++)
f3bb6135 1939 {
ff9940b0 1940 XVECEXP (result, 0, i)
f3bb6135
RE
1941 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
1942 gen_rtx (MEM, SImode,
1943 plus_constant (from, j * 4 * sign)));
1944 }
1945
ff9940b0
RE
1946 if (write_back)
1947 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
1948
1949 return result;
1950}
1951
f3bb6135
RE
1952rtx
1953arm_gen_store_multiple (base_regno, count, to, up, write_back)
ff9940b0
RE
1954 int base_regno;
1955 int count;
1956 rtx to;
1957 int up;
1958 int write_back;
1959{
1960 int i = 0, j;
1961 rtx result;
1962 int sign = up ? 1 : -1;
1963
1964 result = gen_rtx (PARALLEL, VOIDmode,
1965 rtvec_alloc (count + (write_back ? 2 : 0)));
1966 if (write_back)
f3bb6135 1967 {
ff9940b0 1968 XVECEXP (result, 0, 0)
f3bb6135
RE
1969 = gen_rtx (SET, GET_MODE (to), to,
1970 plus_constant (to, count * 4 * sign));
ff9940b0
RE
1971 i = 1;
1972 count++;
f3bb6135
RE
1973 }
1974
ff9940b0 1975 for (j = 0; i < count; i++, j++)
f3bb6135 1976 {
ff9940b0 1977 XVECEXP (result, 0, i)
f3bb6135
RE
1978 = gen_rtx (SET, VOIDmode,
1979 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
1980 gen_rtx (REG, SImode, base_regno + j));
1981 }
1982
ff9940b0
RE
1983 if (write_back)
1984 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
1985
1986 return result;
1987}
1988
880e2516
RE
1989int
1990arm_gen_movstrqi (operands)
1991 rtx *operands;
1992{
1993 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
1994 int i, r;
880e2516
RE
1995 rtx src, dst;
1996 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
1997 rtx part_bytes_reg = NULL;
1998 extern int optimize;
1999
2000 if (GET_CODE (operands[2]) != CONST_INT
2001 || GET_CODE (operands[3]) != CONST_INT
2002 || INTVAL (operands[2]) > 64
2003 || INTVAL (operands[3]) & 3)
2004 return 0;
2005
2006 st_dst = XEXP (operands[0], 0);
2007 st_src = XEXP (operands[1], 0);
2008 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2009 fin_src = src = copy_to_mode_reg (SImode, st_src);
2010
2011 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2012 out_words_to_go = INTVAL (operands[2]) / 4;
2013 last_bytes = INTVAL (operands[2]) & 3;
2014
2015 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2016 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2017
2018 for (i = 0; in_words_to_go >= 2; i+=4)
2019 {
2020 emit_insn (arm_gen_load_multiple (0, (in_words_to_go > 4
2021 ? 4 : in_words_to_go),
2022 src, TRUE, TRUE));
2023 if (out_words_to_go)
2024 {
2025 if (out_words_to_go != 1)
2026 emit_insn (arm_gen_store_multiple (0, (out_words_to_go > 4
2027 ? 4 : out_words_to_go),
2028 dst, TRUE, TRUE));
2029 else
2030 {
2031 emit_move_insn (gen_rtx (MEM, SImode, dst),
2032 gen_rtx (REG, SImode, 0));
2033 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2034 }
2035 }
2036
2037 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2038 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2039 }
2040
2041 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2042 if (out_words_to_go)
2043 {
2044 rtx sreg;
2045
2046 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
2047 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2048 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
2049 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2050 in_words_to_go--;
2051
2052 if (in_words_to_go) /* Sanity check */
2053 abort ();
2054 }
2055
2056 if (in_words_to_go)
2057 {
2058 if (in_words_to_go < 0)
2059 abort ();
2060
2061 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
2062 emit_insn (gen_addsi3 (src, src, GEN_INT (4)));
2063 }
2064
2065 if (BYTES_BIG_ENDIAN && last_bytes)
2066 {
2067 rtx tmp = gen_reg_rtx (SImode);
2068
2069 if (part_bytes_reg == NULL)
2070 abort ();
2071
2072 /* The bytes we want are in the top end of the word */
bee06f3d
RE
2073 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2074 GEN_INT (8 * (4 - last_bytes))));
880e2516
RE
2075 part_bytes_reg = tmp;
2076
2077 while (last_bytes)
2078 {
2079 emit_move_insn (gen_rtx (MEM, QImode,
2080 plus_constant (dst, last_bytes - 1)),
2081 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2082 if (--last_bytes)
2083 {
2084 tmp = gen_reg_rtx (SImode);
2085 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2086 part_bytes_reg = tmp;
2087 }
2088 }
2089
2090 }
2091 else
2092 {
2093 while (last_bytes)
2094 {
2095 if (part_bytes_reg == NULL)
2096 abort ();
2097
2098 emit_move_insn (gen_rtx (MEM, QImode, dst),
2099 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2100 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
2101 if (--last_bytes)
2102 {
2103 rtx tmp = gen_reg_rtx (SImode);
2104 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2105 part_bytes_reg = tmp;
2106 }
2107 }
2108 }
2109
2110 return 1;
2111}
2112
ff9940b0
RE
2113/* X and Y are two things to compare using CODE. Emit the compare insn and
2114 return the rtx for register 0 in the proper mode. FP means this is a
2115 floating point compare: I don't think that it is needed on the arm. */
2116
2117rtx
2118gen_compare_reg (code, x, y, fp)
2119 enum rtx_code code;
2120 rtx x, y;
2121{
2122 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
2123 rtx cc_reg = gen_rtx (REG, mode, 24);
2124
2125 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
2126 gen_rtx (COMPARE, mode, x, y)));
2127
2128 return cc_reg;
2129}
2130
0a81f500
RE
2131void
2132arm_reload_in_hi (operands)
2133 rtx *operands;
2134{
2135 rtx base = find_replacement (&XEXP (operands[1], 0));
2136
2137 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
2138 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
2139 gen_rtx (MEM, QImode,
2140 plus_constant (base, 1))));
2141 if (BYTES_BIG_ENDIAN)
2142 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2143 operands[0], 0),
2144 gen_rtx (IOR, SImode,
2145 gen_rtx (ASHIFT, SImode,
2146 gen_rtx (SUBREG, SImode,
2147 operands[0], 0),
2148 GEN_INT (8)),
2149 operands[2])));
2150 else
2151 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2152 operands[0], 0),
2153 gen_rtx (IOR, SImode,
2154 gen_rtx (ASHIFT, SImode,
2155 operands[2],
2156 GEN_INT (8)),
2157 gen_rtx (SUBREG, SImode, operands[0], 0))));
2158}
2159
f3bb6135 2160void
af48348a 2161arm_reload_out_hi (operands)
f3bb6135 2162 rtx *operands;
af48348a
RK
2163{
2164 rtx base = find_replacement (&XEXP (operands[0], 0));
2165
b5cc037f
RE
2166 if (BYTES_BIG_ENDIAN)
2167 {
2168 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2169 gen_rtx (SUBREG, QImode, operands[1], 0)));
2170 emit_insn (gen_lshrsi3 (operands[2],
2171 gen_rtx (SUBREG, SImode, operands[1], 0),
2172 GEN_INT (8)));
2173 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2174 gen_rtx (SUBREG, QImode, operands[2], 0)));
2175 }
2176 else
2177 {
2178 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2179 gen_rtx (SUBREG, QImode, operands[1], 0)));
2180 emit_insn (gen_lshrsi3 (operands[2],
2181 gen_rtx (SUBREG, SImode, operands[1], 0),
2182 GEN_INT (8)));
2183 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2184 gen_rtx (SUBREG, QImode, operands[2], 0)));
2185 }
af48348a 2186}
ff9940b0
RE
2187\f
2188/* Check to see if a branch is forwards or backwards. Return TRUE if it
2189 is backwards. */
2190
2191int
2192arm_backwards_branch (from, to)
f3bb6135 2193 int from, to;
ff9940b0 2194{
f3bb6135 2195 return insn_addresses[to] <= insn_addresses[from];
ff9940b0
RE
2196}
2197
2198/* Check to see if a branch is within the distance that can be done using
2199 an arithmetic expression. */
2200int
2201short_branch (from, to)
f3bb6135 2202 int from, to;
ff9940b0 2203{
5472d00b 2204 int delta = insn_addresses[from] + 8 - insn_addresses[to];
ff9940b0 2205
5472d00b 2206 return abs (delta) < 980; /* A small margin for safety */
ff9940b0
RE
2207}
2208
2209/* Check to see that the insn isn't the target of the conditionalizing
2210 code */
2211int
2212arm_insn_not_targeted (insn)
f3bb6135 2213 rtx insn;
ff9940b0
RE
2214{
2215 return insn != arm_target_insn;
2216}
2217
2b835d68
RE
2218\f
2219/* Routines for manipulation of the constant pool. */
2220/* This is unashamedly hacked from the version in sh.c, since the problem is
2221 extremely similar. */
2222
2223/* Arm instructions cannot load a large constant into a register,
2224 constants have to come from a pc relative load. The reference of a pc
2225 relative load instruction must be less than 1k infront of the instruction.
2226 This means that we often have to dump a constant inside a function, and
2227 generate code to branch around it.
2228
2229 It is important to minimize this, since the branches will slow things
2230 down and make things bigger.
2231
2232 Worst case code looks like:
2233
2234 ldr rn, L1
2235 b L2
2236 align
2237 L1: .long value
2238 L2:
2239 ..
2240
2241 ldr rn, L3
2242 b L4
2243 align
2244 L3: .long value
2245 L4:
2246 ..
2247
2248 We fix this by performing a scan before scheduling, which notices which
2249 instructions need to have their operands fetched from the constant table
2250 and builds the table.
2251
2252
2253 The algorithm is:
2254
2255 scan, find an instruction which needs a pcrel move. Look forward, find th
2256 last barrier which is within MAX_COUNT bytes of the requirement.
2257 If there isn't one, make one. Process all the instructions between
2258 the find and the barrier.
2259
2260 In the above example, we can tell that L3 is within 1k of L1, so
2261 the first move can be shrunk from the 2 insn+constant sequence into
2262 just 1 insn, and the constant moved to L3 to make:
2263
2264 ldr rn, L1
2265 ..
2266 ldr rn, L3
2267 b L4
2268 align
2269 L1: .long value
2270 L3: .long value
2271 L4:
2272
2273 Then the second move becomes the target for the shortening process.
2274
2275 */
2276
2277typedef struct
2278{
2279 rtx value; /* Value in table */
2280 HOST_WIDE_INT next_offset;
2281 enum machine_mode mode; /* Mode of value */
2282} pool_node;
2283
2284/* The maximum number of constants that can fit into one pool, since
2285 the pc relative range is 0...1020 bytes and constants are at least 4
2286 bytes long */
2287
2288#define MAX_POOL_SIZE (1020/4)
2289static pool_node pool_vector[MAX_POOL_SIZE];
2290static int pool_size;
2291static rtx pool_vector_label;
2292
2293/* Add a constant to the pool and return its label. */
2294static HOST_WIDE_INT
2295add_constant (x, mode)
2296 rtx x;
2297 enum machine_mode mode;
2298{
2299 int i;
2300 rtx lab;
2301 HOST_WIDE_INT offset;
2302
2303 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
2304 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2305 x = get_pool_constant (XEXP (x, 0));
2306#ifndef AOF_ASSEMBLER
2307 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
2308 x = XVECEXP (x, 0, 0);
2309#endif
2310
2311 /* First see if we've already got it */
2312 for (i = 0; i < pool_size; i++)
2313 {
2314 if (GET_CODE (x) == pool_vector[i].value->code
2315 && mode == pool_vector[i].mode)
2316 {
2317 if (GET_CODE (x) == CODE_LABEL)
2318 {
2319 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
2320 continue;
2321 }
2322 if (rtx_equal_p (x, pool_vector[i].value))
2323 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
2324 }
2325 }
2326
2327 /* Need a new one */
2328 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
2329 offset = 0;
2330 if (pool_size == 0)
2331 pool_vector_label = gen_label_rtx ();
2332 else
2333 pool_vector[pool_size].next_offset
2334 += (offset = pool_vector[pool_size - 1].next_offset);
2335
2336 pool_vector[pool_size].value = x;
2337 pool_vector[pool_size].mode = mode;
2338 pool_size++;
2339 return offset;
2340}
2341
2342/* Output the literal table */
2343static void
2344dump_table (scan)
2345 rtx scan;
2346{
2347 int i;
2348
2349 scan = emit_label_after (gen_label_rtx (), scan);
2350 scan = emit_insn_after (gen_align_4 (), scan);
2351 scan = emit_label_after (pool_vector_label, scan);
2352
2353 for (i = 0; i < pool_size; i++)
2354 {
2355 pool_node *p = pool_vector + i;
2356
2357 switch (GET_MODE_SIZE (p->mode))
2358 {
2359 case 4:
2360 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
2361 break;
2362
2363 case 8:
2364 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
2365 break;
2366
2367 default:
2368 abort ();
2369 break;
2370 }
2371 }
2372
2373 scan = emit_insn_after (gen_consttable_end (), scan);
2374 scan = emit_barrier_after (scan);
2375 pool_size = 0;
2376}
2377
2378/* Non zero if the src operand needs to be fixed up */
2379static int
2380fixit (src, mode, destreg)
2381 rtx src;
2382 enum machine_mode mode;
2383 int destreg;
2384{
2385 if (CONSTANT_P (src))
2386 {
2387 if (GET_CODE (src) == CONST_INT)
2388 return (! const_ok_for_arm (INTVAL (src))
2389 && ! const_ok_for_arm (~INTVAL (src)));
2390 if (GET_CODE (src) == CONST_DOUBLE)
2391 return (GET_MODE (src) == VOIDmode
2392 || destreg < 16
2393 || (! const_double_rtx_ok_for_fpu (src)
2394 && ! neg_const_double_rtx_ok_for_fpu (src)));
2395 return symbol_mentioned_p (src);
2396 }
2397#ifndef AOF_ASSEMBLER
2398 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
2399 return 1;
2400#endif
2401 else
2402 return (mode == SImode && GET_CODE (src) == MEM
2403 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
2404 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
2405}
2406
2407/* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
2408static rtx
2409find_barrier (from, max_count)
2410 rtx from;
2411 int max_count;
2412{
2413 int count = 0;
2414 rtx found_barrier = 0;
2415
2416 while (from && count < max_count)
2417 {
2418 if (GET_CODE (from) == BARRIER)
2419 found_barrier = from;
2420
2421 /* Count the length of this insn */
2422 if (GET_CODE (from) == INSN
2423 && GET_CODE (PATTERN (from)) == SET
2424 && CONSTANT_P (SET_SRC (PATTERN (from)))
2425 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
2426 {
2427 rtx src = SET_SRC (PATTERN (from));
2428 count += 2;
2429 }
2430 else
2431 count += get_attr_length (from);
2432
2433 from = NEXT_INSN (from);
2434 }
2435
2436 if (!found_barrier)
2437 {
2438 /* We didn't find a barrier in time to
2439 dump our stuff, so we'll make one */
2440 rtx label = gen_label_rtx ();
2441
2442 if (from)
2443 from = PREV_INSN (from);
2444 else
2445 from = get_last_insn ();
2446
2447 /* Walk back to be just before any jump */
2448 while (GET_CODE (from) == JUMP_INSN
2449 || GET_CODE (from) == NOTE
2450 || GET_CODE (from) == CODE_LABEL)
2451 from = PREV_INSN (from);
2452
2453 from = emit_jump_insn_after (gen_jump (label), from);
2454 JUMP_LABEL (from) = label;
2455 found_barrier = emit_barrier_after (from);
2456 emit_label_after (label, found_barrier);
2457 return found_barrier;
2458 }
2459
2460 return found_barrier;
2461}
2462
2463/* Non zero if the insn is a move instruction which needs to be fixed. */
2464static int
2465broken_move (insn)
2466 rtx insn;
2467{
2468 if (!INSN_DELETED_P (insn)
2469 && GET_CODE (insn) == INSN
2470 && GET_CODE (PATTERN (insn)) == SET)
2471 {
2472 rtx pat = PATTERN (insn);
2473 rtx src = SET_SRC (pat);
2474 rtx dst = SET_DEST (pat);
2475 int destreg;
2476 enum machine_mode mode = GET_MODE (dst);
2477 if (dst == pc_rtx)
2478 return 0;
2479
2480 if (GET_CODE (dst) == REG)
2481 destreg = REGNO (dst);
2482 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
2483 destreg = REGNO (SUBREG_REG (dst));
2484
2485 return fixit (src, mode, destreg);
2486 }
2487 return 0;
2488}
2489
2490void
2491arm_reorg (first)
2492 rtx first;
2493{
2494 rtx insn;
2495 int count_size;
2496 int regno;
2497
2498#if 0
2499 /* The ldr instruction can work with up to a 4k offset, and most constants
2500 will be loaded with one of these instructions; however, the adr
2501 instruction and the ldf instructions only work with a 1k offset. This
2502 code needs to be rewritten to use the 4k offset when possible, and to
2503 adjust when a 1k offset is needed. For now we just use a 1k offset
2504 from the start. */
2505 count_size = 4000;
2506
2507 /* Floating point operands can't work further than 1024 bytes from the
2508 PC, so to make things simple we restrict all loads for such functions.
2509 */
2510 if (TARGET_HARD_FLOAT)
2511 for (regno = 16; regno < 24; regno++)
2512 if (regs_ever_live[regno])
2513 {
2514 count_size = 1000;
2515 break;
2516 }
2517#else
2518 count_size = 1000;
2519#endif /* 0 */
2520
2521 for (insn = first; insn; insn = NEXT_INSN (insn))
2522 {
2523 if (broken_move (insn))
2524 {
2525 /* This is a broken move instruction, scan ahead looking for
2526 a barrier to stick the constant table behind */
2527 rtx scan;
2528 rtx barrier = find_barrier (insn, count_size);
2529
2530 /* Now find all the moves between the points and modify them */
2531 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
2532 {
2533 if (broken_move (scan))
2534 {
2535 /* This is a broken move instruction, add it to the pool */
2536 rtx pat = PATTERN (scan);
2537 rtx src = SET_SRC (pat);
2538 rtx dst = SET_DEST (pat);
2539 enum machine_mode mode = GET_MODE (dst);
2540 HOST_WIDE_INT offset;
2541 rtx newinsn = scan;
2542 rtx newsrc;
2543 rtx addr;
2544 int scratch;
2545
2546 /* If this is an HImode constant load, convert it into
2547 an SImode constant load. Since the register is always
2548 32 bits this is safe. We have to do this, since the
2549 load pc-relative instruction only does a 32-bit load. */
2550 if (mode == HImode)
2551 {
2552 mode = SImode;
2553 if (GET_CODE (dst) != REG)
2554 abort ();
2555 PUT_MODE (dst, SImode);
2556 }
2557
2558 offset = add_constant (src, mode);
2559 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
2560 pool_vector_label),
2561 offset);
2562
2563 /* For wide moves to integer regs we need to split the
2564 address calculation off into a separate insn, so that
2565 the load can then be done with a load-multiple. This is
2566 safe, since we have already noted the length of such
2567 insns to be 8, and we are immediately over-writing the
2568 scratch we have grabbed with the final result. */
2569 if (GET_MODE_SIZE (mode) > 4
2570 && (scratch = REGNO (dst)) < 16)
2571 {
2572 rtx reg = gen_rtx (REG, SImode, scratch);
2573 newinsn = emit_insn_after (gen_movaddr (reg, addr),
2574 newinsn);
2575 addr = reg;
2576 }
2577
2578 newsrc = gen_rtx (MEM, mode, addr);
2579
2580 /* Build a jump insn wrapper around the move instead
2581 of an ordinary insn, because we want to have room for
2582 the target label rtx in fld[7], which an ordinary
2583 insn doesn't have. */
2584 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
2585 dst, newsrc),
2586 newinsn);
2587 JUMP_LABEL (newinsn) = pool_vector_label;
2588
2589 /* But it's still an ordinary insn */
2590 PUT_CODE (newinsn, INSN);
2591
2592 /* Kill old insn */
2593 delete_insn (scan);
2594 scan = newinsn;
2595 }
2596 }
2597 dump_table (barrier);
2598 insn = scan;
2599 }
2600 }
2601}
2602
cce8749e
CH
2603\f
2604/* Routines to output assembly language. */
2605
f3bb6135 2606/* If the rtx is the correct value then return the string of the number.
ff9940b0
RE
2607 In this way we can ensure that valid double constants are generated even
2608 when cross compiling. */
2609char *
2610fp_immediate_constant (x)
b5cc037f 2611 rtx x;
ff9940b0
RE
2612{
2613 REAL_VALUE_TYPE r;
2614 int i;
2615
2616 if (!fpa_consts_inited)
2617 init_fpa_table ();
2618
2619 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2620 for (i = 0; i < 8; i++)
2621 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2622 return strings_fpa[i];
f3bb6135 2623
ff9940b0
RE
2624 abort ();
2625}
2626
9997d19d
RE
2627/* As for fp_immediate_constant, but value is passed directly, not in rtx. */
2628static char *
2629fp_const_from_val (r)
2630 REAL_VALUE_TYPE *r;
2631{
2632 int i;
2633
2634 if (! fpa_consts_inited)
2635 init_fpa_table ();
2636
2637 for (i = 0; i < 8; i++)
2638 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
2639 return strings_fpa[i];
2640
2641 abort ();
2642}
ff9940b0 2643
cce8749e
CH
2644/* Output the operands of a LDM/STM instruction to STREAM.
2645 MASK is the ARM register set mask of which only bits 0-15 are important.
2646 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
2647 must follow the register list. */
2648
2649void
2650print_multi_reg (stream, instr, mask, hat)
2651 FILE *stream;
2652 char *instr;
2653 int mask, hat;
2654{
2655 int i;
2656 int not_first = FALSE;
2657
1d5473cb 2658 fputc ('\t', stream);
f3139301 2659 fprintf (stream, instr, REGISTER_PREFIX);
1d5473cb 2660 fputs (", {", stream);
cce8749e
CH
2661 for (i = 0; i < 16; i++)
2662 if (mask & (1 << i))
2663 {
2664 if (not_first)
2665 fprintf (stream, ", ");
f3139301 2666 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
cce8749e
CH
2667 not_first = TRUE;
2668 }
f3bb6135 2669
cce8749e 2670 fprintf (stream, "}%s\n", hat ? "^" : "");
f3bb6135 2671}
cce8749e
CH
2672
2673/* Output a 'call' insn. */
2674
2675char *
2676output_call (operands)
f3bb6135 2677 rtx *operands;
cce8749e 2678{
cce8749e
CH
2679 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
2680
2681 if (REGNO (operands[0]) == 14)
2682 {
2683 operands[0] = gen_rtx (REG, SImode, 12);
1d5473cb 2684 output_asm_insn ("mov%?\t%0, %|lr", operands);
cce8749e 2685 }
1d5473cb
RE
2686 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
2687 output_asm_insn ("mov%?\t%|pc, %0", operands);
f3bb6135
RE
2688 return "";
2689}
cce8749e 2690
ff9940b0
RE
2691static int
2692eliminate_lr2ip (x)
f3bb6135 2693 rtx *x;
ff9940b0
RE
2694{
2695 int something_changed = 0;
2696 rtx x0 = *x;
2697 int code = GET_CODE (x0);
2698 register int i, j;
2699 register char *fmt;
2700
2701 switch (code)
2702 {
2703 case REG:
2704 if (REGNO (x0) == 14)
2705 {
2706 *x = gen_rtx (REG, SImode, 12);
2707 return 1;
2708 }
2709 return 0;
2710 default:
2711 /* Scan through the sub-elements and change any references there */
2712 fmt = GET_RTX_FORMAT (code);
2713 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2714 if (fmt[i] == 'e')
2715 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
2716 else if (fmt[i] == 'E')
2717 for (j = 0; j < XVECLEN (x0, i); j++)
2718 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
2719 return something_changed;
2720 }
2721}
2722
2723/* Output a 'call' insn that is a reference in memory. */
2724
2725char *
2726output_call_mem (operands)
f3bb6135 2727 rtx *operands;
ff9940b0
RE
2728{
2729 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
2730 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
2731 */
2732 if (eliminate_lr2ip (&operands[0]))
1d5473cb 2733 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
f3bb6135 2734
1d5473cb
RE
2735 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
2736 output_asm_insn ("ldr%?\t%|pc, %0", operands);
f3bb6135
RE
2737 return "";
2738}
ff9940b0
RE
2739
2740
2741/* Output a move from arm registers to an fpu registers.
2742 OPERANDS[0] is an fpu register.
2743 OPERANDS[1] is the first registers of an arm register pair. */
2744
2745char *
2746output_mov_long_double_fpu_from_arm (operands)
f3bb6135 2747 rtx *operands;
ff9940b0
RE
2748{
2749 int arm_reg0 = REGNO (operands[1]);
2750 rtx ops[3];
2751
2752 if (arm_reg0 == 12)
2753 abort();
f3bb6135 2754
ff9940b0
RE
2755 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2756 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2757 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
2758
1d5473cb
RE
2759 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
2760 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
f3bb6135
RE
2761 return "";
2762}
ff9940b0
RE
2763
2764/* Output a move from an fpu register to arm registers.
2765 OPERANDS[0] is the first registers of an arm register pair.
2766 OPERANDS[1] is an fpu register. */
2767
2768char *
2769output_mov_long_double_arm_from_fpu (operands)
f3bb6135 2770 rtx *operands;
ff9940b0
RE
2771{
2772 int arm_reg0 = REGNO (operands[0]);
2773 rtx ops[3];
2774
2775 if (arm_reg0 == 12)
2776 abort();
f3bb6135 2777
ff9940b0
RE
2778 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2779 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2780 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
2781
1d5473cb
RE
2782 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
2783 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
f3bb6135
RE
2784 return "";
2785}
ff9940b0
RE
2786
2787/* Output a move from arm registers to arm registers of a long double
2788 OPERANDS[0] is the destination.
2789 OPERANDS[1] is the source. */
2790char *
2791output_mov_long_double_arm_from_arm (operands)
f3bb6135 2792 rtx *operands;
ff9940b0
RE
2793{
2794 /* We have to be careful here because the two might overlap */
2795 int dest_start = REGNO (operands[0]);
2796 int src_start = REGNO (operands[1]);
2797 rtx ops[2];
2798 int i;
2799
2800 if (dest_start < src_start)
2801 {
2802 for (i = 0; i < 3; i++)
2803 {
2804 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2805 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 2806 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
2807 }
2808 }
2809 else
2810 {
2811 for (i = 2; i >= 0; i--)
2812 {
2813 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2814 ops[1] = gen_rtx (REG, SImode, src_start + i);
9997d19d 2815 output_asm_insn ("mov%?\t%0, %1", ops);
ff9940b0
RE
2816 }
2817 }
f3bb6135 2818
ff9940b0
RE
2819 return "";
2820}
2821
2822
cce8749e
CH
2823/* Output a move from arm registers to an fpu registers.
2824 OPERANDS[0] is an fpu register.
2825 OPERANDS[1] is the first registers of an arm register pair. */
2826
2827char *
2828output_mov_double_fpu_from_arm (operands)
f3bb6135 2829 rtx *operands;
cce8749e
CH
2830{
2831 int arm_reg0 = REGNO (operands[1]);
2832 rtx ops[2];
2833
2834 if (arm_reg0 == 12)
2835 abort();
2836 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2837 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
2838 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
2839 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
f3bb6135
RE
2840 return "";
2841}
cce8749e
CH
2842
2843/* Output a move from an fpu register to arm registers.
2844 OPERANDS[0] is the first registers of an arm register pair.
2845 OPERANDS[1] is an fpu register. */
2846
2847char *
2848output_mov_double_arm_from_fpu (operands)
f3bb6135 2849 rtx *operands;
cce8749e
CH
2850{
2851 int arm_reg0 = REGNO (operands[0]);
2852 rtx ops[2];
2853
2854 if (arm_reg0 == 12)
2855 abort();
f3bb6135 2856
cce8749e
CH
2857 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2858 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1d5473cb
RE
2859 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
2860 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
f3bb6135
RE
2861 return "";
2862}
cce8749e
CH
2863
2864/* Output a move between double words.
2865 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
2866 or MEM<-REG and all MEMs must be offsettable addresses. */
2867
2868char *
2869output_move_double (operands)
f3bb6135 2870 rtx *operands;
cce8749e
CH
2871{
2872 enum rtx_code code0 = GET_CODE (operands[0]);
2873 enum rtx_code code1 = GET_CODE (operands[1]);
2874 rtx otherops[2];
2875
2876 if (code0 == REG)
2877 {
2878 int reg0 = REGNO (operands[0]);
2879
2880 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
2881 if (code1 == REG)
2882 {
2883 int reg1 = REGNO (operands[1]);
2884 if (reg1 == 12)
2885 abort();
f3bb6135 2886
cce8749e
CH
2887 otherops[1] = gen_rtx (REG, SImode, 1 + reg1);
2888
2889 /* Ensure the second source is not overwritten */
2890 if (reg0 == 1 + reg1)
2891 {
9997d19d
RE
2892 output_asm_insn("mov%?\t%0, %1", otherops);
2893 output_asm_insn("mov%?\t%0, %1", operands);
cce8749e
CH
2894 }
2895 else
2896 {
9997d19d
RE
2897 output_asm_insn("mov%?\t%0, %1", operands);
2898 output_asm_insn("mov%?\t%0, %1", otherops);
cce8749e
CH
2899 }
2900 }
2901 else if (code1 == CONST_DOUBLE)
2902 {
226a5051
RE
2903 if (GET_MODE (operands[1]) == DFmode)
2904 {
2905 long l[2];
2906 union real_extract u;
2907
2908 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
2909 sizeof (u));
2910 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
2911 otherops[1] = GEN_INT(l[1]);
2912 operands[1] = GEN_INT(l[0]);
2913 }
2914 else
2915 {
2916 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
2917 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
2918 }
ff9940b0
RE
2919 output_mov_immediate (operands, FALSE, "");
2920 output_mov_immediate (otherops, FALSE, "");
cce8749e
CH
2921 }
2922 else if (code1 == CONST_INT)
2923 {
2924 otherops[1] = const0_rtx;
ff9940b0
RE
2925 /* sign extend the intval into the high-order word */
2926 /* Note: output_mov_immediate may clobber operands[1], so we
2927 put this out first */
2928 if (INTVAL (operands[1]) < 0)
9997d19d 2929 output_asm_insn ("mvn%?\t%0, %1", otherops);
ff9940b0 2930 else
9997d19d 2931 output_asm_insn ("mov%?\t%0, %1", otherops);
ff9940b0 2932 output_mov_immediate (operands, FALSE, "");
cce8749e
CH
2933 }
2934 else if (code1 == MEM)
2935 {
ff9940b0 2936 switch (GET_CODE (XEXP (operands[1], 0)))
cce8749e 2937 {
ff9940b0 2938 case REG:
9997d19d 2939 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
ff9940b0 2940 break;
2b835d68 2941
ff9940b0 2942 case PRE_INC:
2b835d68 2943 abort (); /* Should never happen now */
ff9940b0 2944 break;
2b835d68 2945
ff9940b0 2946 case PRE_DEC:
2b835d68 2947 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
ff9940b0 2948 break;
2b835d68 2949
ff9940b0 2950 case POST_INC:
9997d19d 2951 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
ff9940b0 2952 break;
2b835d68 2953
ff9940b0 2954 case POST_DEC:
2b835d68 2955 abort (); /* Should never happen now */
ff9940b0 2956 break;
2b835d68
RE
2957
2958 case LABEL_REF:
2959 case CONST:
2960 output_asm_insn ("adr%?\t%0, %1", operands);
2961 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
2962 break;
2963
ff9940b0 2964 default:
2b835d68 2965 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
cce8749e 2966 {
2b835d68
RE
2967 otherops[0] = operands[0];
2968 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
2969 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
2970 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
2971 {
2972 if (GET_CODE (otherops[2]) == CONST_INT)
2973 {
2974 switch (INTVAL (otherops[2]))
2975 {
2976 case -8:
2977 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
2978 return "";
2979 case -4:
2980 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
2981 return "";
2982 case 4:
2983 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
2984 return "";
2985 }
2986 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
2987 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
2988 else
2989 output_asm_insn ("add%?\t%0, %1, %2", otherops);
2990 }
2991 else
2992 output_asm_insn ("add%?\t%0, %1, %2", otherops);
2993 }
2994 else
2995 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
2996 return "ldm%?ia\t%0, %M0";
2997 }
2998 else
2999 {
3000 otherops[1] = adj_offsettable_operand (operands[1], 4);
3001 /* Take care of overlapping base/data reg. */
3002 if (reg_mentioned_p (operands[0], operands[1]))
3003 {
3004 output_asm_insn ("ldr%?\t%0, %1", otherops);
3005 output_asm_insn ("ldr%?\t%0, %1", operands);
3006 }
3007 else
3008 {
3009 output_asm_insn ("ldr%?\t%0, %1", operands);
3010 output_asm_insn ("ldr%?\t%0, %1", otherops);
3011 }
cce8749e
CH
3012 }
3013 }
3014 }
2b835d68
RE
3015 else
3016 abort(); /* Constraints should prevent this */
cce8749e
CH
3017 }
3018 else if (code0 == MEM && code1 == REG)
3019 {
3020 if (REGNO (operands[1]) == 12)
3021 abort();
2b835d68 3022
ff9940b0
RE
3023 switch (GET_CODE (XEXP (operands[0], 0)))
3024 {
3025 case REG:
9997d19d 3026 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
ff9940b0 3027 break;
2b835d68 3028
ff9940b0 3029 case PRE_INC:
2b835d68 3030 abort (); /* Should never happen now */
ff9940b0 3031 break;
2b835d68 3032
ff9940b0 3033 case PRE_DEC:
2b835d68 3034 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
ff9940b0 3035 break;
2b835d68 3036
ff9940b0 3037 case POST_INC:
9997d19d 3038 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
ff9940b0 3039 break;
2b835d68 3040
ff9940b0 3041 case POST_DEC:
2b835d68 3042 abort (); /* Should never happen now */
ff9940b0 3043 break;
2b835d68
RE
3044
3045 case PLUS:
3046 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
3047 {
3048 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
3049 {
3050 case -8:
3051 output_asm_insn ("stm%?db\t%m0, %M1", operands);
3052 return "";
3053
3054 case -4:
3055 output_asm_insn ("stm%?da\t%m0, %M1", operands);
3056 return "";
3057
3058 case 4:
3059 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
3060 return "";
3061 }
3062 }
3063 /* Fall through */
3064
ff9940b0 3065 default:
cce8749e
CH
3066 otherops[0] = adj_offsettable_operand (operands[0], 4);
3067 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
9997d19d
RE
3068 output_asm_insn ("str%?\t%1, %0", operands);
3069 output_asm_insn ("str%?\t%1, %0", otherops);
cce8749e
CH
3070 }
3071 }
2b835d68
RE
3072 else
3073 abort(); /* Constraints should prevent this */
cce8749e 3074
9997d19d
RE
3075 return "";
3076}
cce8749e
CH
3077
3078
3079/* Output an arbitrary MOV reg, #n.
3080 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
3081
3082char *
3083output_mov_immediate (operands)
f3bb6135 3084 rtx *operands;
cce8749e 3085{
f3bb6135 3086 HOST_WIDE_INT n = INTVAL (operands[1]);
cce8749e
CH
3087 int n_ones = 0;
3088 int i;
3089
3090 /* Try to use one MOV */
cce8749e 3091 if (const_ok_for_arm (n))
f3bb6135 3092 {
9997d19d 3093 output_asm_insn ("mov%?\t%0, %1", operands);
f3bb6135
RE
3094 return "";
3095 }
cce8749e
CH
3096
3097 /* Try to use one MVN */
f3bb6135 3098 if (const_ok_for_arm (~n))
cce8749e 3099 {
f3bb6135 3100 operands[1] = GEN_INT (~n);
9997d19d 3101 output_asm_insn ("mvn%?\t%0, %1", operands);
f3bb6135 3102 return "";
cce8749e
CH
3103 }
3104
3105 /* If all else fails, make it out of ORRs or BICs as appropriate. */
3106
3107 for (i=0; i < 32; i++)
3108 if (n & 1 << i)
3109 n_ones++;
3110
3111 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
9997d19d
RE
3112 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
3113 ~n);
cce8749e 3114 else
9997d19d
RE
3115 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
3116 n);
f3bb6135
RE
3117
3118 return "";
3119}
cce8749e
CH
3120
3121
3122/* Output an ADD r, s, #n where n may be too big for one instruction. If
3123 adding zero to one register, output nothing. */
3124
3125char *
3126output_add_immediate (operands)
f3bb6135 3127 rtx *operands;
cce8749e 3128{
f3bb6135 3129 HOST_WIDE_INT n = INTVAL (operands[2]);
cce8749e
CH
3130
3131 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
3132 {
3133 if (n < 0)
3134 output_multi_immediate (operands,
9997d19d
RE
3135 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
3136 -n);
cce8749e
CH
3137 else
3138 output_multi_immediate (operands,
9997d19d
RE
3139 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
3140 n);
cce8749e 3141 }
f3bb6135
RE
3142
3143 return "";
3144}
cce8749e 3145
cce8749e
CH
3146/* Output a multiple immediate operation.
3147 OPERANDS is the vector of operands referred to in the output patterns.
3148 INSTR1 is the output pattern to use for the first constant.
3149 INSTR2 is the output pattern to use for subsequent constants.
3150 IMMED_OP is the index of the constant slot in OPERANDS.
3151 N is the constant value. */
3152
3153char *
3154output_multi_immediate (operands, instr1, instr2, immed_op, n)
f3bb6135 3155 rtx *operands;
cce8749e 3156 char *instr1, *instr2;
f3bb6135
RE
3157 int immed_op;
3158 HOST_WIDE_INT n;
cce8749e 3159{
f3bb6135
RE
3160#if HOST_BITS_PER_WIDE_INT > 32
3161 n &= 0xffffffff;
3162#endif
3163
cce8749e
CH
3164 if (n == 0)
3165 {
3166 operands[immed_op] = const0_rtx;
f3bb6135 3167 output_asm_insn (instr1, operands); /* Quick and easy output */
cce8749e
CH
3168 }
3169 else
3170 {
3171 int i;
3172 char *instr = instr1;
3173
3174 /* Note that n is never zero here (which would give no output) */
cce8749e
CH
3175 for (i = 0; i < 32; i += 2)
3176 {
3177 if (n & (3 << i))
3178 {
f3bb6135
RE
3179 operands[immed_op] = GEN_INT (n & (255 << i));
3180 output_asm_insn (instr, operands);
cce8749e
CH
3181 instr = instr2;
3182 i += 6;
3183 }
3184 }
3185 }
f3bb6135 3186 return "";
9997d19d 3187}
cce8749e
CH
3188
3189
3190/* Return the appropriate ARM instruction for the operation code.
3191 The returned result should not be overwritten. OP is the rtx of the
3192 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
3193 was shifted. */
3194
3195char *
3196arithmetic_instr (op, shift_first_arg)
3197 rtx op;
f3bb6135 3198 int shift_first_arg;
cce8749e 3199{
9997d19d 3200 switch (GET_CODE (op))
cce8749e
CH
3201 {
3202 case PLUS:
f3bb6135
RE
3203 return "add";
3204
cce8749e 3205 case MINUS:
f3bb6135
RE
3206 return shift_first_arg ? "rsb" : "sub";
3207
cce8749e 3208 case IOR:
f3bb6135
RE
3209 return "orr";
3210
cce8749e 3211 case XOR:
f3bb6135
RE
3212 return "eor";
3213
cce8749e 3214 case AND:
f3bb6135
RE
3215 return "and";
3216
cce8749e 3217 default:
f3bb6135 3218 abort ();
cce8749e 3219 }
f3bb6135 3220}
cce8749e
CH
3221
3222
3223/* Ensure valid constant shifts and return the appropriate shift mnemonic
3224 for the operation code. The returned result should not be overwritten.
3225 OP is the rtx code of the shift.
9997d19d
RE
3226 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
3227 shift. */
cce8749e 3228
9997d19d
RE
3229static char *
3230shift_op (op, amountp)
3231 rtx op;
3232 HOST_WIDE_INT *amountp;
cce8749e 3233{
cce8749e 3234 char *mnem;
e2c671ba 3235 enum rtx_code code = GET_CODE (op);
cce8749e 3236
9997d19d
RE
3237 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
3238 *amountp = -1;
3239 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
3240 *amountp = INTVAL (XEXP (op, 1));
3241 else
3242 abort ();
3243
e2c671ba 3244 switch (code)
cce8749e
CH
3245 {
3246 case ASHIFT:
3247 mnem = "asl";
3248 break;
f3bb6135 3249
cce8749e
CH
3250 case ASHIFTRT:
3251 mnem = "asr";
cce8749e 3252 break;
f3bb6135 3253
cce8749e
CH
3254 case LSHIFTRT:
3255 mnem = "lsr";
cce8749e 3256 break;
f3bb6135 3257
9997d19d
RE
3258 case ROTATERT:
3259 mnem = "ror";
9997d19d
RE
3260 break;
3261
ff9940b0 3262 case MULT:
e2c671ba
RE
3263 /* We never have to worry about the amount being other than a
3264 power of 2, since this case can never be reloaded from a reg. */
9997d19d
RE
3265 if (*amountp != -1)
3266 *amountp = int_log2 (*amountp);
3267 else
3268 abort ();
f3bb6135
RE
3269 return "asl";
3270
cce8749e 3271 default:
f3bb6135 3272 abort ();
cce8749e
CH
3273 }
3274
e2c671ba
RE
3275 if (*amountp != -1)
3276 {
3277 /* This is not 100% correct, but follows from the desire to merge
3278 multiplication by a power of 2 with the recognizer for a
3279 shift. >=32 is not a valid shift for "asl", so we must try and
3280 output a shift that produces the correct arithmetical result.
ddd5a7c1 3281 Using lsr #32 is identical except for the fact that the carry bit
e2c671ba
RE
3282 is not set correctly if we set the flags; but we never use the
3283 carry bit from such an operation, so we can ignore that. */
3284 if (code == ROTATERT)
3285 *amountp &= 31; /* Rotate is just modulo 32 */
3286 else if (*amountp != (*amountp & 31))
3287 {
3288 if (code == ASHIFT)
3289 mnem = "lsr";
3290 *amountp = 32;
3291 }
3292
3293 /* Shifts of 0 are no-ops. */
3294 if (*amountp == 0)
3295 return NULL;
3296 }
3297
9997d19d
RE
3298 return mnem;
3299}
cce8749e
CH
3300
3301
3302/* Obtain the shift from the POWER of two. */
3303
f3bb6135 3304HOST_WIDE_INT
cce8749e 3305int_log2 (power)
f3bb6135 3306 HOST_WIDE_INT power;
cce8749e 3307{
f3bb6135 3308 HOST_WIDE_INT shift = 0;
cce8749e 3309
2b835d68 3310 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
cce8749e
CH
3311 {
3312 if (shift > 31)
f3bb6135 3313 abort ();
cce8749e
CH
3314 shift++;
3315 }
f3bb6135
RE
3316
3317 return shift;
3318}
cce8749e 3319
cce8749e
CH
3320/* Output a .ascii pseudo-op, keeping track of lengths. This is because
3321 /bin/as is horribly restrictive. */
3322
3323void
3324output_ascii_pseudo_op (stream, p, len)
3325 FILE *stream;
f1b3f515 3326 unsigned char *p;
cce8749e
CH
3327 int len;
3328{
3329 int i;
3330 int len_so_far = 1000;
3331 int chars_so_far = 0;
3332
3333 for (i = 0; i < len; i++)
3334 {
3335 register int c = p[i];
3336
3337 if (len_so_far > 50)
3338 {
3339 if (chars_so_far)
3340 fputs ("\"\n", stream);
3341 fputs ("\t.ascii\t\"", stream);
3342 len_so_far = 0;
3343 arm_increase_location (chars_so_far);
3344 chars_so_far = 0;
3345 }
3346
3347 if (c == '\"' || c == '\\')
3348 {
3349 putc('\\', stream);
3350 len_so_far++;
3351 }
f3bb6135 3352
cce8749e
CH
3353 if (c >= ' ' && c < 0177)
3354 {
3355 putc (c, stream);
3356 len_so_far++;
3357 }
3358 else
3359 {
3360 fprintf (stream, "\\%03o", c);
3361 len_so_far +=4;
3362 }
f3bb6135 3363
cce8749e
CH
3364 chars_so_far++;
3365 }
f3bb6135 3366
cce8749e
CH
3367 fputs ("\"\n", stream);
3368 arm_increase_location (chars_so_far);
f3bb6135 3369}
cce8749e 3370\f
ff9940b0
RE
3371
3372/* Try to determine whether a pattern really clobbers the link register.
3373 This information is useful when peepholing, so that lr need not be pushed
0e84b556
RK
3374 if we combine a call followed by a return.
3375 NOTE: This code does not check for side-effect expressions in a SET_SRC:
3376 such a check should not be needed because these only update an existing
3377 value within a register; the register must still be set elsewhere within
3378 the function. */
ff9940b0
RE
3379
3380static int
3381pattern_really_clobbers_lr (x)
f3bb6135 3382 rtx x;
ff9940b0
RE
3383{
3384 int i;
3385
3386 switch (GET_CODE (x))
3387 {
3388 case SET:
3389 switch (GET_CODE (SET_DEST (x)))
3390 {
3391 case REG:
3392 return REGNO (SET_DEST (x)) == 14;
f3bb6135 3393
ff9940b0
RE
3394 case SUBREG:
3395 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
3396 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
f3bb6135 3397
0e84b556
RK
3398 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
3399 return 0;
ff9940b0 3400 abort ();
f3bb6135 3401
ff9940b0
RE
3402 default:
3403 return 0;
3404 }
f3bb6135 3405
ff9940b0
RE
3406 case PARALLEL:
3407 for (i = 0; i < XVECLEN (x, 0); i++)
3408 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
3409 return 1;
3410 return 0;
f3bb6135 3411
ff9940b0
RE
3412 case CLOBBER:
3413 switch (GET_CODE (XEXP (x, 0)))
3414 {
3415 case REG:
3416 return REGNO (XEXP (x, 0)) == 14;
f3bb6135 3417
ff9940b0
RE
3418 case SUBREG:
3419 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3420 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
3421 abort ();
f3bb6135 3422
ff9940b0
RE
3423 default:
3424 return 0;
3425 }
f3bb6135 3426
ff9940b0
RE
3427 case UNSPEC:
3428 return 1;
f3bb6135 3429
ff9940b0
RE
3430 default:
3431 return 0;
3432 }
3433}
3434
3435static int
3436function_really_clobbers_lr (first)
f3bb6135 3437 rtx first;
ff9940b0
RE
3438{
3439 rtx insn, next;
3440
3441 for (insn = first; insn; insn = next_nonnote_insn (insn))
3442 {
3443 switch (GET_CODE (insn))
3444 {
3445 case BARRIER:
3446 case NOTE:
3447 case CODE_LABEL:
3448 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
3449 case INLINE_HEADER:
3450 break;
f3bb6135 3451
ff9940b0
RE
3452 case INSN:
3453 if (pattern_really_clobbers_lr (PATTERN (insn)))
3454 return 1;
3455 break;
f3bb6135 3456
ff9940b0
RE
3457 case CALL_INSN:
3458 /* Don't yet know how to handle those calls that are not to a
3459 SYMBOL_REF */
3460 if (GET_CODE (PATTERN (insn)) != PARALLEL)
3461 abort ();
f3bb6135 3462
ff9940b0
RE
3463 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
3464 {
3465 case CALL:
3466 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
3467 != SYMBOL_REF)
3468 return 1;
3469 break;
f3bb6135 3470
ff9940b0
RE
3471 case SET:
3472 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
3473 0, 0)), 0), 0))
3474 != SYMBOL_REF)
3475 return 1;
3476 break;
f3bb6135 3477
ff9940b0
RE
3478 default: /* Don't recognize it, be safe */
3479 return 1;
3480 }
f3bb6135 3481
ff9940b0
RE
3482 /* A call can be made (by peepholing) not to clobber lr iff it is
3483 followed by a return. There may, however, be a use insn iff
3484 we are returning the result of the call.
3485 If we run off the end of the insn chain, then that means the
3486 call was at the end of the function. Unfortunately we don't
3487 have a return insn for the peephole to recognize, so we
3488 must reject this. (Can this be fixed by adding our own insn?) */
3489 if ((next = next_nonnote_insn (insn)) == NULL)
3490 return 1;
f3bb6135 3491
ff9940b0
RE
3492 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
3493 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3494 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
3495 == REGNO (XEXP (PATTERN (next), 0))))
3496 if ((next = next_nonnote_insn (next)) == NULL)
3497 return 1;
f3bb6135 3498
ff9940b0
RE
3499 if (GET_CODE (next) == JUMP_INSN
3500 && GET_CODE (PATTERN (next)) == RETURN)
3501 break;
3502 return 1;
f3bb6135 3503
ff9940b0
RE
3504 default:
3505 abort ();
3506 }
3507 }
f3bb6135 3508
ff9940b0
RE
3509 /* We have reached the end of the chain so lr was _not_ clobbered */
3510 return 0;
3511}
3512
3513char *
3514output_return_instruction (operand, really_return)
f3bb6135
RE
3515 rtx operand;
3516 int really_return;
ff9940b0
RE
3517{
3518 char instr[100];
3519 int reg, live_regs = 0;
e2c671ba
RE
3520 int volatile_func = (optimize > 0
3521 && TREE_THIS_VOLATILE (current_function_decl));
3522
3523 return_used_this_function = 1;
ff9940b0 3524
e2c671ba
RE
3525 if (volatile_func)
3526 {
3527 rtx ops[2];
3528 /* If this function was declared non-returning, and we have found a tail
3529 call, then we have to trust that the called function won't return. */
3530 if (! really_return)
3531 return "";
3532
3533 /* Otherwise, trap an attempted return by aborting. */
3534 ops[0] = operand;
3535 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 3536 assemble_external_libcall (ops[1]);
e2c671ba
RE
3537 output_asm_insn ("bl%d0\t%a1", ops);
3538 return "";
3539 }
3540
f3bb6135 3541 if (current_function_calls_alloca && ! really_return)
ff9940b0
RE
3542 abort();
3543
f3bb6135
RE
3544 for (reg = 0; reg <= 10; reg++)
3545 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0
RE
3546 live_regs++;
3547
f3bb6135 3548 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
ff9940b0
RE
3549 live_regs++;
3550
3551 if (frame_pointer_needed)
3552 live_regs += 4;
3553
3554 if (live_regs)
3555 {
f3bb6135 3556 if (lr_save_eliminated || ! regs_ever_live[14])
ff9940b0 3557 live_regs++;
f3bb6135 3558
ff9940b0 3559 if (frame_pointer_needed)
1d5473cb 3560 strcpy (instr, "ldm%?%d0ea\t%|fp, {");
ff9940b0 3561 else
1d5473cb 3562 strcpy (instr, "ldm%?%d0fd\t%|sp!, {");
f3bb6135
RE
3563
3564 for (reg = 0; reg <= 10; reg++)
3565 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0 3566 {
1d5473cb 3567 strcat (instr, "%|");
ff9940b0
RE
3568 strcat (instr, reg_names[reg]);
3569 if (--live_regs)
3570 strcat (instr, ", ");
3571 }
f3bb6135 3572
ff9940b0
RE
3573 if (frame_pointer_needed)
3574 {
1d5473cb 3575 strcat (instr, "%|");
ff9940b0
RE
3576 strcat (instr, reg_names[11]);
3577 strcat (instr, ", ");
1d5473cb 3578 strcat (instr, "%|");
ff9940b0
RE
3579 strcat (instr, reg_names[13]);
3580 strcat (instr, ", ");
1d5473cb 3581 strcat (instr, "%|");
ff9940b0
RE
3582 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
3583 }
3584 else
1d5473cb
RE
3585 {
3586 strcat (instr, "%|");
3587 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
3588 }
2b835d68 3589 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
f3bb6135 3590 output_asm_insn (instr, &operand);
ff9940b0
RE
3591 }
3592 else if (really_return)
3593 {
2b835d68
RE
3594 strcpy (instr, (TARGET_APCS_32
3595 ? "mov%?%d0\t%|pc, %|lr" : "mov%?%d0s\t%|pc, %|lr"));
f3bb6135 3596 output_asm_insn (instr, &operand);
ff9940b0 3597 }
f3bb6135 3598
ff9940b0
RE
3599 return "";
3600}
3601
e82ea128
DE
3602/* Return nonzero if optimizing and the current function is volatile.
3603 Such functions never return, and many memory cycles can be saved
3604 by not storing register values that will never be needed again.
3605 This optimization was added to speed up context switching in a
3606 kernel application. */
a0b2ce4c 3607
e2c671ba
RE
3608int
3609arm_volatile_func ()
3610{
3611 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
3612}
3613
f3bb6135
RE
3614/* Return the size of the prologue. It's not too bad if we slightly
3615 over-estimate. */
3616
3617static int
3618get_prologue_size ()
3619{
e2c671ba 3620 return profile_flag ? 12 : 0;
f3bb6135
RE
3621}
3622
ff9940b0
RE
3623/* The amount of stack adjustment that happens here, in output_return and in
3624 output_epilogue must be exactly the same as was calculated during reload,
3625 or things will point to the wrong place. The only time we can safely
3626 ignore this constraint is when a function has no arguments on the stack,
3627 no stack frame requirement and no live registers execpt for `lr'. If we
3628 can guarantee that by making all function calls into tail calls and that
3629 lr is not clobbered in any other way, then there is no need to push lr
3630 onto the stack. */
3631
cce8749e 3632void
f3bb6135 3633output_func_prologue (f, frame_size)
cce8749e
CH
3634 FILE *f;
3635 int frame_size;
3636{
f3bb6135 3637 int reg, live_regs_mask = 0;
cce8749e 3638 rtx operands[3];
e2c671ba
RE
3639 int volatile_func = (optimize > 0
3640 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 3641
cce8749e
CH
3642 /* Nonzero if we must stuff some register arguments onto the stack as if
3643 they were passed there. */
3644 int store_arg_regs = 0;
3645
abaa26e5
RE
3646 if (arm_ccfsm_state || arm_target_insn)
3647 abort (); /* Sanity check */
3648
ff9940b0
RE
3649 return_used_this_function = 0;
3650 lr_save_eliminated = 0;
3651
f3139301
DE
3652 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
3653 ASM_COMMENT_START, current_function_args_size,
1d5473cb 3654 current_function_pretend_args_size, frame_size);
f3139301
DE
3655 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
3656 ASM_COMMENT_START, frame_pointer_needed,
1d5473cb 3657 current_function_anonymous_args);
cce8749e 3658
e2c671ba 3659 if (volatile_func)
f3139301 3660 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
e2c671ba 3661
cce8749e
CH
3662 if (current_function_anonymous_args && current_function_pretend_args_size)
3663 store_arg_regs = 1;
3664
f3bb6135
RE
3665 for (reg = 0; reg <= 10; reg++)
3666 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e
CH
3667 live_regs_mask |= (1 << reg);
3668
ff9940b0 3669 if (frame_pointer_needed)
e2c671ba 3670 live_regs_mask |= 0xD800;
cce8749e 3671 else if (regs_ever_live[14])
ff9940b0
RE
3672 {
3673 if (! current_function_args_size
f3bb6135 3674 && ! function_really_clobbers_lr (get_insns ()))
e2c671ba 3675 lr_save_eliminated = 1;
ff9940b0
RE
3676 else
3677 live_regs_mask |= 0x4000;
3678 }
cce8749e 3679
cce8749e
CH
3680 if (live_regs_mask)
3681 {
ff9940b0
RE
3682 /* if a di mode load/store multiple is used, and the base register
3683 is r3, then r4 can become an ever live register without lr
3684 doing so, in this case we need to push lr as well, or we
3685 will fail to get a proper return. */
3686
3687 live_regs_mask |= 0x4000;
3688 lr_save_eliminated = 0;
f3bb6135 3689
cce8749e
CH
3690 }
3691
e2c671ba 3692 if (lr_save_eliminated)
f3139301
DE
3693 fprintf (f,"\t%s I don't think this function clobbers lr\n",
3694 ASM_COMMENT_START);
f3bb6135 3695}
cce8749e
CH
3696
3697
3698void
f3bb6135 3699output_func_epilogue (f, frame_size)
cce8749e
CH
3700 FILE *f;
3701 int frame_size;
3702{
abaa26e5 3703 int reg, live_regs_mask = 0, code_size = 0;
ff9940b0
RE
3704 /* If we need this then it will always be at lesat this much */
3705 int floats_offset = 24;
cce8749e 3706 rtx operands[3];
e2c671ba
RE
3707 int volatile_func = (optimize > 0
3708 && TREE_THIS_VOLATILE (current_function_decl));
cce8749e 3709
ff9940b0 3710 if (use_return_insn() && return_used_this_function)
cce8749e 3711 {
ff9940b0
RE
3712 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
3713 {
3714 abort ();
3715 }
f3bb6135 3716 goto epilogue_done;
cce8749e 3717 }
cce8749e 3718
e2c671ba
RE
3719 /* A volatile function should never return. Call abort. */
3720 if (volatile_func)
3721 {
3722 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
2b835d68 3723 assemble_external_libcall (op);
e2c671ba
RE
3724 output_asm_insn ("bl\t%a0", &op);
3725 code_size = 4;
3726 goto epilogue_done;
3727 }
3728
f3bb6135
RE
3729 for (reg = 0; reg <= 10; reg++)
3730 if (regs_ever_live[reg] && ! call_used_regs[reg])
cce8749e 3731 {
ff9940b0
RE
3732 live_regs_mask |= (1 << reg);
3733 floats_offset += 4;
cce8749e
CH
3734 }
3735
ff9940b0 3736 if (frame_pointer_needed)
cce8749e 3737 {
f3bb6135
RE
3738 for (reg = 23; reg > 15; reg--)
3739 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0 3740 {
f3139301
DE
3741 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
3742 reg_names[reg], REGISTER_PREFIX, floats_offset);
ff9940b0
RE
3743 floats_offset += 12;
3744 code_size += 4;
3745 }
3746
3747 live_regs_mask |= 0xA800;
1d5473cb 3748 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
2b835d68 3749 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
3750 code_size += 4;
3751 }
3752 else
3753 {
d2288d8d
TG
3754 /* Restore stack pointer if necessary. */
3755 if (frame_size)
3756 {
3757 operands[0] = operands[1] = stack_pointer_rtx;
3758 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
3759 output_add_immediate (operands);
3760 }
3761
f3bb6135
RE
3762 for (reg = 16; reg < 24; reg++)
3763 if (regs_ever_live[reg] && ! call_used_regs[reg])
ff9940b0 3764 {
f3139301
DE
3765 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
3766 reg_names[reg], REGISTER_PREFIX);
ff9940b0
RE
3767 code_size += 4;
3768 }
cce8749e
CH
3769 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
3770 {
1d5473cb 3771 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
2b835d68 3772 TARGET_APCS_32 ? FALSE : TRUE);
cce8749e
CH
3773 code_size += 4;
3774 }
3775 else
3776 {
ff9940b0 3777 if (live_regs_mask || regs_ever_live[14])
cce8749e 3778 {
ff9940b0 3779 live_regs_mask |= 0x4000;
1d5473cb 3780 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
cce8749e
CH
3781 code_size += 4;
3782 }
3783 if (current_function_pretend_args_size)
3784 {
3785 operands[0] = operands[1] = stack_pointer_rtx;
3786 operands[2] = gen_rtx (CONST_INT, VOIDmode,
3787 current_function_pretend_args_size);
3788 output_add_immediate (operands);
3789 }
2b835d68
RE
3790 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
3791 : "\tmovs\t%spc, %slr\n"),
f3139301 3792 REGISTER_PREFIX, REGISTER_PREFIX, f);
cce8749e
CH
3793 code_size += 4;
3794 }
3795 }
f3bb6135
RE
3796
3797 epilogue_done:
3798
3799 /* insn_addresses isn't allocated when not optimizing */
3800
3801 if (optimize > 0)
3802 arm_increase_location (code_size
3803 + insn_addresses[INSN_UID (get_last_insn ())]
3804 + get_prologue_size ());
3805
cce8749e 3806 current_function_anonymous_args = 0;
f3bb6135 3807}
e2c671ba
RE
3808
3809static void
3810emit_multi_reg_push (mask)
3811 int mask;
3812{
3813 int num_regs = 0;
3814 int i, j;
3815 rtx par;
3816
3817 for (i = 0; i < 16; i++)
3818 if (mask & (1 << i))
3819 num_regs++;
3820
3821 if (num_regs == 0 || num_regs > 16)
3822 abort ();
3823
3824 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
3825
3826 for (i = 0; i < 16; i++)
3827 {
3828 if (mask & (1 << i))
3829 {
3830 XVECEXP (par, 0, 0)
3831 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
3832 gen_rtx (PRE_DEC, BLKmode,
3833 stack_pointer_rtx)),
3834 gen_rtx (UNSPEC, BLKmode,
3835 gen_rtvec (1, gen_rtx (REG, SImode, i)),
3836 2));
3837 break;
3838 }
3839 }
3840
3841 for (j = 1, i++; j < num_regs; i++)
3842 {
3843 if (mask & (1 << i))
3844 {
3845 XVECEXP (par, 0, j)
3846 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
3847 j++;
3848 }
3849 }
3850 emit_insn (par);
3851}
3852
3853void
3854arm_expand_prologue ()
3855{
3856 int reg;
3857 rtx amount = GEN_INT (- get_frame_size ());
3858 rtx push_insn;
3859 int num_regs;
3860 int live_regs_mask = 0;
3861 int store_arg_regs = 0;
3862 int volatile_func = (optimize > 0
3863 && TREE_THIS_VOLATILE (current_function_decl));
3864
3865 if (current_function_anonymous_args && current_function_pretend_args_size)
3866 store_arg_regs = 1;
3867
3868 if (! volatile_func)
3869 for (reg = 0; reg <= 10; reg++)
3870 if (regs_ever_live[reg] && ! call_used_regs[reg])
3871 live_regs_mask |= 1 << reg;
3872
3873 if (! volatile_func && regs_ever_live[14])
3874 live_regs_mask |= 0x4000;
3875
3876 if (frame_pointer_needed)
3877 {
3878 live_regs_mask |= 0xD800;
3879 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
3880 stack_pointer_rtx));
3881 }
3882
3883 if (current_function_pretend_args_size)
3884 {
3885 if (store_arg_regs)
3886 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
3887 & 0xf);
3888 else
3889 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
3890 GEN_INT (-current_function_pretend_args_size)));
3891 }
3892
3893 if (live_regs_mask)
3894 {
3895 /* If we have to push any regs, then we must push lr as well, or
ddd5a7c1 3896 we won't get a proper return. */
e2c671ba
RE
3897 live_regs_mask |= 0x4000;
3898 emit_multi_reg_push (live_regs_mask);
3899 }
3900
3901 /* For now the integer regs are still pushed in output_func_epilogue (). */
3902
3903 if (! volatile_func)
3904 for (reg = 23; reg > 15; reg--)
3905 if (regs_ever_live[reg] && ! call_used_regs[reg])
3906 emit_insn (gen_rtx (SET, VOIDmode,
3907 gen_rtx (MEM, XFmode,
3908 gen_rtx (PRE_DEC, XFmode,
3909 stack_pointer_rtx)),
3910 gen_rtx (REG, XFmode, reg)));
3911
3912 if (frame_pointer_needed)
3913 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
3914 (GEN_INT
3915 (-(4 + current_function_pretend_args_size)))));
3916
3917 if (amount != const0_rtx)
3918 {
3919 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
3920 emit_insn (gen_rtx (CLOBBER, VOIDmode,
3921 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
3922 }
3923
3924 /* If we are profiling, make sure no instructions are scheduled before
3925 the call to mcount. */
3926 if (profile_flag || profile_block_flag)
3927 emit_insn (gen_blockage ());
3928}
3929
cce8749e 3930\f
9997d19d
RE
3931/* If CODE is 'd', then the X is a condition operand and the instruction
3932 should only be executed if the condition is true.
ddd5a7c1 3933 if CODE is 'D', then the X is a condition operand and the instruction
9997d19d
RE
3934 should only be executed if the condition is false: however, if the mode
3935 of the comparison is CCFPEmode, then always execute the instruction -- we
3936 do this because in these circumstances !GE does not necessarily imply LT;
3937 in these cases the instruction pattern will take care to make sure that
3938 an instruction containing %d will follow, thereby undoing the effects of
ddd5a7c1 3939 doing this instruction unconditionally.
9997d19d
RE
3940 If CODE is 'N' then X is a floating point operand that must be negated
3941 before output.
3942 If CODE is 'B' then output a bitwise inverted value of X (a const int).
3943 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
3944
3945void
3946arm_print_operand (stream, x, code)
3947 FILE *stream;
3948 rtx x;
3949 int code;
3950{
3951 switch (code)
3952 {
3953 case '@':
f3139301 3954 fputs (ASM_COMMENT_START, stream);
9997d19d
RE
3955 return;
3956
3957 case '|':
f3139301 3958 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
3959 return;
3960
3961 case '?':
3962 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
3963 fputs (arm_condition_codes[arm_current_cc], stream);
3964 return;
3965
3966 case 'N':
3967 {
3968 REAL_VALUE_TYPE r;
3969 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3970 r = REAL_VALUE_NEGATE (r);
3971 fprintf (stream, "%s", fp_const_from_val (&r));
3972 }
3973 return;
3974
3975 case 'B':
3976 if (GET_CODE (x) == CONST_INT)
3977 fprintf (stream,
3978#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3979 "%d",
3980#else
3981 "%ld",
3982#endif
3983 ARM_SIGN_EXTEND (~ INTVAL (x)));
3984 else
3985 {
3986 putc ('~', stream);
3987 output_addr_const (stream, x);
3988 }
3989 return;
3990
3991 case 'i':
3992 fprintf (stream, "%s", arithmetic_instr (x, 1));
3993 return;
3994
3995 case 'I':
3996 fprintf (stream, "%s", arithmetic_instr (x, 0));
3997 return;
3998
3999 case 'S':
4000 {
4001 HOST_WIDE_INT val;
e2c671ba 4002 char *shift = shift_op (x, &val);
9997d19d 4003
e2c671ba
RE
4004 if (shift)
4005 {
4006 fprintf (stream, ", %s ", shift_op (x, &val));
4007 if (val == -1)
4008 arm_print_operand (stream, XEXP (x, 1), 0);
4009 else
4010 fprintf (stream,
9997d19d 4011#if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
e2c671ba 4012 "#%d",
9997d19d 4013#else
e2c671ba 4014 "#%ld",
9997d19d 4015#endif
e2c671ba
RE
4016 val);
4017 }
9997d19d
RE
4018 }
4019 return;
4020
4021 case 'R':
4022 if (REGNO (x) > 15)
4023 abort ();
f3139301 4024 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
4025 fputs (reg_names[REGNO (x) + 1], stream);
4026 return;
4027
4028 case 'm':
f3139301 4029 fputs (REGISTER_PREFIX, stream);
9997d19d
RE
4030 if (GET_CODE (XEXP (x, 0)) == REG)
4031 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
4032 else
4033 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
4034 return;
4035
4036 case 'M':
f3139301
DE
4037 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
4038 REGISTER_PREFIX, reg_names[REGNO (x) - 1
1d5473cb
RE
4039 + ((GET_MODE_SIZE (GET_MODE (x))
4040 + GET_MODE_SIZE (SImode) - 1)
4041 / GET_MODE_SIZE (SImode))]);
9997d19d
RE
4042 return;
4043
4044 case 'd':
4045 if (x)
4046 fputs (arm_condition_codes[get_arm_condition_code (x)],
4047 stream);
4048 return;
4049
4050 case 'D':
4051 if (x && (flag_fast_math
4052 || GET_CODE (x) == EQ || GET_CODE (x) == NE
4053 || (GET_MODE (XEXP (x, 0)) != CCFPEmode
4054 && (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))
4055 != MODE_FLOAT))))
4056 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
4057 (get_arm_condition_code (x))],
4058 stream);
4059 return;
4060
4061 default:
4062 if (x == 0)
4063 abort ();
4064
4065 if (GET_CODE (x) == REG)
1d5473cb 4066 {
f3139301 4067 fputs (REGISTER_PREFIX, stream);
1d5473cb
RE
4068 fputs (reg_names[REGNO (x)], stream);
4069 }
9997d19d
RE
4070 else if (GET_CODE (x) == MEM)
4071 {
4072 output_memory_reference_mode = GET_MODE (x);
4073 output_address (XEXP (x, 0));
4074 }
4075 else if (GET_CODE (x) == CONST_DOUBLE)
4076 fprintf (stream, "#%s", fp_immediate_constant (x));
4077 else if (GET_CODE (x) == NEG)
4078 abort (); /* This should never happen now. */
4079 else
4080 {
4081 fputc ('#', stream);
4082 output_addr_const (stream, x);
4083 }
4084 }
4085}
4086
cce8749e
CH
4087/* Increase the `arm_text_location' by AMOUNT if we're in the text
4088 segment. */
4089
4090void
4091arm_increase_location (amount)
4092 int amount;
4093{
4094 if (in_text_section ())
4095 arm_text_location += amount;
f3bb6135 4096}
cce8749e
CH
4097
4098
4099/* Output a label definition. If this label is within the .text segment, it
4100 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
4101 Maybe GCC remembers names not starting with a `*' for a long time, but this
4102 is a minority anyway, so we just make a copy. Do not store the leading `*'
4103 if the name starts with one. */
4104
4105void
4106arm_asm_output_label (stream, name)
4107 FILE *stream;
4108 char *name;
4109{
4110 char *real_name, *s;
4111 struct label_offset *cur;
4112 int hash = 0;
4113
2b835d68 4114 ARM_OUTPUT_LABEL (stream, name);
cce8749e
CH
4115 if (! in_text_section ())
4116 return;
4117
4118 if (name[0] == '*')
4119 {
4120 real_name = xmalloc (1 + strlen (&name[1]));
4121 strcpy (real_name, &name[1]);
4122 }
4123 else
4124 {
4125 real_name = xmalloc (2 + strlen (name));
f3139301 4126 strcpy (real_name, USER_LABEL_PREFIX);
cce8749e
CH
4127 strcat (real_name, name);
4128 }
4129 for (s = real_name; *s; s++)
4130 hash += *s;
f3bb6135 4131
cce8749e 4132 hash = hash % LABEL_HASH_SIZE;
838928c2 4133 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
cce8749e
CH
4134 cur->name = real_name;
4135 cur->offset = arm_text_location;
4136 cur->cdr = offset_table[hash];
4137 offset_table[hash] = cur;
f3bb6135 4138}
cce8749e 4139
cce8749e
CH
4140/* Output code resembling an .lcomm directive. /bin/as doesn't have this
4141 directive hence this hack, which works by reserving some `.space' in the
4142 bss segment directly.
4143
4144 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
4145 define STATIC COMMON space but merely STATIC BSS space. */
4146
4147void
4148output_lcomm_directive (stream, name, size, rounded)
4149 FILE *stream;
4150 char *name;
4151 int size, rounded;
4152{
f3139301 4153 fprintf (stream, "\n\t.bss\t%s .lcomm\n", ASM_COMMENT_START);
cce8749e
CH
4154 assemble_name (stream, name);
4155 fprintf (stream, ":\t.space\t%d\n", rounded);
4156 if (in_text_section ())
4157 fputs ("\n\t.text\n", stream);
4158 else
4159 fputs ("\n\t.data\n", stream);
f3bb6135 4160}
cce8749e
CH
4161\f
4162/* A finite state machine takes care of noticing whether or not instructions
4163 can be conditionally executed, and thus decrease execution time and code
4164 size by deleting branch instructions. The fsm is controlled by
4165 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
4166
4167/* The state of the fsm controlling condition codes are:
4168 0: normal, do nothing special
4169 1: make ASM_OUTPUT_OPCODE not output this instruction
4170 2: make ASM_OUTPUT_OPCODE not output this instruction
4171 3: make instructions conditional
4172 4: make instructions conditional
4173
4174 State transitions (state->state by whom under condition):
4175 0 -> 1 final_prescan_insn if the `target' is a label
4176 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
4177 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
4178 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
4179 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
4180 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
4181 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
4182 (the target insn is arm_target_insn).
4183
ff9940b0
RE
4184 If the jump clobbers the conditions then we use states 2 and 4.
4185
4186 A similar thing can be done with conditional return insns.
4187
cce8749e
CH
4188 XXX In case the `target' is an unconditional branch, this conditionalising
4189 of the instructions always reduces code size, but not always execution
4190 time. But then, I want to reduce the code size to somewhere near what
4191 /bin/cc produces. */
4192
cce8749e
CH
4193/* Returns the index of the ARM condition code string in
4194 `arm_condition_codes'. COMPARISON should be an rtx like
4195 `(eq (...) (...))'. */
4196
4197int
4198get_arm_condition_code (comparison)
4199 rtx comparison;
4200{
4201 switch (GET_CODE (comparison))
4202 {
4203 case NE: return (1);
4204 case EQ: return (0);
4205 case GE: return (10);
4206 case GT: return (12);
4207 case LE: return (13);
4208 case LT: return (11);
4209 case GEU: return (2);
4210 case GTU: return (8);
4211 case LEU: return (9);
4212 case LTU: return (3);
4213 default: abort ();
4214 }
4215 /*NOTREACHED*/
4216 return (42);
f3bb6135 4217}
cce8749e
CH
4218
4219
4220void
4221final_prescan_insn (insn, opvec, noperands)
4222 rtx insn;
4223 rtx *opvec;
4224 int noperands;
4225{
4226 /* BODY will hold the body of INSN. */
4227 register rtx body = PATTERN (insn);
4228
4229 /* This will be 1 if trying to repeat the trick, and things need to be
4230 reversed if it appears to fail. */
4231 int reverse = 0;
4232
ff9940b0
RE
4233 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
4234 taken are clobbered, even if the rtl suggests otherwise. It also
4235 means that we have to grub around within the jump expression to find
4236 out what the conditions are when the jump isn't taken. */
4237 int jump_clobbers = 0;
4238
4239 /* If we start with a return insn, we only succeed if we find another one. */
4240 int seeking_return = 0;
4241
cce8749e
CH
4242 /* START_INSN will hold the insn from where we start looking. This is the
4243 first insn after the following code_label if REVERSE is true. */
4244 rtx start_insn = insn;
4245
4246 /* If in state 4, check if the target branch is reached, in order to
4247 change back to state 0. */
4248 if (arm_ccfsm_state == 4)
4249 {
4250 if (insn == arm_target_insn)
abaa26e5
RE
4251 {
4252 arm_target_insn = NULL;
cce8749e 4253 arm_ccfsm_state = 0;
abaa26e5 4254 }
cce8749e
CH
4255 return;
4256 }
4257
4258 /* If in state 3, it is possible to repeat the trick, if this insn is an
4259 unconditional branch to a label, and immediately following this branch
4260 is the previous target label which is only used once, and the label this
4261 branch jumps to is not too far off. */
4262 if (arm_ccfsm_state == 3)
4263 {
4264 if (simplejump_p (insn))
4265 {
4266 start_insn = next_nonnote_insn (start_insn);
4267 if (GET_CODE (start_insn) == BARRIER)
4268 {
4269 /* XXX Isn't this always a barrier? */
4270 start_insn = next_nonnote_insn (start_insn);
4271 }
4272 if (GET_CODE (start_insn) == CODE_LABEL
4273 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
4274 && LABEL_NUSES (start_insn) == 1)
4275 reverse = TRUE;
4276 else
4277 return;
4278 }
ff9940b0
RE
4279 else if (GET_CODE (body) == RETURN)
4280 {
4281 start_insn = next_nonnote_insn (start_insn);
4282 if (GET_CODE (start_insn) == BARRIER)
4283 start_insn = next_nonnote_insn (start_insn);
4284 if (GET_CODE (start_insn) == CODE_LABEL
4285 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
4286 && LABEL_NUSES (start_insn) == 1)
4287 {
4288 reverse = TRUE;
4289 seeking_return = 1;
4290 }
4291 else
4292 return;
4293 }
cce8749e
CH
4294 else
4295 return;
4296 }
4297
4298 if (arm_ccfsm_state != 0 && !reverse)
4299 abort ();
4300 if (GET_CODE (insn) != JUMP_INSN)
4301 return;
4302
ddd5a7c1 4303 /* This jump might be paralleled with a clobber of the condition codes
ff9940b0
RE
4304 the jump should always come first */
4305 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4306 body = XVECEXP (body, 0, 0);
4307
4308#if 0
4309 /* If this is a conditional return then we don't want to know */
4310 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
4311 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
4312 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
4313 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
4314 return;
4315#endif
4316
cce8749e
CH
4317 if (reverse
4318 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
4319 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
4320 {
4321 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
4322 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
4323 int then_not_else = TRUE;
ff9940b0 4324 rtx this_insn = start_insn, label = 0;
cce8749e 4325
ff9940b0 4326 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5bbe2d40
RE
4327 {
4328 /* The code below is wrong for these, and I haven't time to
4329 fix it now. So we just do the safe thing and return. This
4330 whole function needs re-writing anyway. */
4331 jump_clobbers = 1;
4332 return;
4333 }
ff9940b0 4334
cce8749e
CH
4335 /* Register the insn jumped to. */
4336 if (reverse)
ff9940b0
RE
4337 {
4338 if (!seeking_return)
4339 label = XEXP (SET_SRC (body), 0);
4340 }
cce8749e
CH
4341 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
4342 label = XEXP (XEXP (SET_SRC (body), 1), 0);
4343 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
4344 {
4345 label = XEXP (XEXP (SET_SRC (body), 2), 0);
4346 then_not_else = FALSE;
4347 }
ff9940b0
RE
4348 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
4349 seeking_return = 1;
4350 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
4351 {
4352 seeking_return = 1;
4353 then_not_else = FALSE;
4354 }
cce8749e
CH
4355 else
4356 abort ();
4357
4358 /* See how many insns this branch skips, and what kind of insns. If all
4359 insns are okay, and the label or unconditional branch to the same
4360 label is not too far away, succeed. */
4361 for (insns_skipped = 0;
4362 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
4363 insns_skipped++)
4364 {
4365 rtx scanbody;
4366
4367 this_insn = next_nonnote_insn (this_insn);
4368 if (!this_insn)
4369 break;
4370
4371 scanbody = PATTERN (this_insn);
4372
4373 switch (GET_CODE (this_insn))
4374 {
4375 case CODE_LABEL:
4376 /* Succeed if it is the target label, otherwise fail since
4377 control falls in from somewhere else. */
4378 if (this_insn == label)
4379 {
ff9940b0
RE
4380 if (jump_clobbers)
4381 {
4382 arm_ccfsm_state = 2;
4383 this_insn = next_nonnote_insn (this_insn);
4384 }
4385 else
4386 arm_ccfsm_state = 1;
cce8749e
CH
4387 succeed = TRUE;
4388 }
4389 else
4390 fail = TRUE;
4391 break;
4392
ff9940b0 4393 case BARRIER:
cce8749e 4394 /* Succeed if the following insn is the target label.
ff9940b0
RE
4395 Otherwise fail.
4396 If return insns are used then the last insn in a function
4397 will be a barrier. */
cce8749e 4398 this_insn = next_nonnote_insn (this_insn);
ff9940b0 4399 if (this_insn && this_insn == label)
cce8749e 4400 {
ff9940b0
RE
4401 if (jump_clobbers)
4402 {
4403 arm_ccfsm_state = 2;
4404 this_insn = next_nonnote_insn (this_insn);
4405 }
4406 else
4407 arm_ccfsm_state = 1;
cce8749e
CH
4408 succeed = TRUE;
4409 }
4410 else
4411 fail = TRUE;
4412 break;
4413
ff9940b0 4414 case CALL_INSN:
2b835d68
RE
4415 /* If using 32-bit addresses the cc is not preserved over
4416 calls */
4417 if (TARGET_APCS_32)
ff9940b0
RE
4418 fail = TRUE;
4419 break;
2b835d68 4420
cce8749e
CH
4421 case JUMP_INSN:
4422 /* If this is an unconditional branch to the same label, succeed.
4423 If it is to another label, do nothing. If it is conditional,
4424 fail. */
4425 /* XXX Probably, the test for the SET and the PC are unnecessary. */
4426
ff9940b0
RE
4427 if (GET_CODE (scanbody) == SET
4428 && GET_CODE (SET_DEST (scanbody)) == PC)
cce8749e
CH
4429 {
4430 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
4431 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
4432 {
4433 arm_ccfsm_state = 2;
4434 succeed = TRUE;
4435 }
4436 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
4437 fail = TRUE;
4438 }
ff9940b0
RE
4439 else if (GET_CODE (scanbody) == RETURN
4440 && seeking_return)
4441 {
4442 arm_ccfsm_state = 2;
4443 succeed = TRUE;
4444 }
4445 else if (GET_CODE (scanbody) == PARALLEL)
4446 {
4447 switch (get_attr_conds (this_insn))
4448 {
4449 case CONDS_NOCOND:
4450 break;
4451 default:
4452 fail = TRUE;
4453 break;
4454 }
4455 }
cce8749e
CH
4456 break;
4457
4458 case INSN:
ff9940b0
RE
4459 /* Instructions using or affecting the condition codes make it
4460 fail. */
4461 if ((GET_CODE (scanbody) == SET
4462 || GET_CODE (scanbody) == PARALLEL)
4463 && get_attr_conds (this_insn) != CONDS_NOCOND)
cce8749e
CH
4464 fail = TRUE;
4465 break;
4466
4467 default:
4468 break;
4469 }
4470 }
4471 if (succeed)
4472 {
ff9940b0 4473 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
cce8749e 4474 arm_target_label = CODE_LABEL_NUMBER (label);
ff9940b0
RE
4475 else if (seeking_return || arm_ccfsm_state == 2)
4476 {
4477 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
4478 {
4479 this_insn = next_nonnote_insn (this_insn);
4480 if (this_insn && (GET_CODE (this_insn) == BARRIER
4481 || GET_CODE (this_insn) == CODE_LABEL))
4482 abort ();
4483 }
4484 if (!this_insn)
4485 {
4486 /* Oh, dear! we ran off the end.. give up */
4487 recog (PATTERN (insn), insn, NULL_PTR);
4488 arm_ccfsm_state = 0;
abaa26e5 4489 arm_target_insn = NULL;
ff9940b0
RE
4490 return;
4491 }
4492 arm_target_insn = this_insn;
4493 }
cce8749e
CH
4494 else
4495 abort ();
ff9940b0
RE
4496 if (jump_clobbers)
4497 {
4498 if (reverse)
4499 abort ();
4500 arm_current_cc =
4501 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
4502 0), 0), 1));
4503 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
4504 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4505 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
4506 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4507 }
4508 else
4509 {
4510 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
4511 what it was. */
4512 if (!reverse)
4513 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
4514 0));
4515 }
cce8749e 4516
cce8749e
CH
4517 if (reverse || then_not_else)
4518 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4519 }
ff9940b0
RE
4520 /* restore recog_operand (getting the attributes of other insns can
4521 destroy this array, but final.c assumes that it remains intact
ddd5a7c1 4522 across this call; since the insn has been recognized already we
ff9940b0
RE
4523 call recog direct). */
4524 recog (PATTERN (insn), insn, NULL_PTR);
cce8749e 4525 }
f3bb6135 4526}
cce8749e 4527
2b835d68
RE
4528#ifdef AOF_ASSEMBLER
4529/* Special functions only needed when producing AOF syntax assembler. */
4530
4531int arm_text_section_count = 1;
4532
4533char *
4534aof_text_section (in_readonly)
4535 int in_readonly;
4536{
4537 static char buf[100];
4538 if (in_readonly)
4539 return "";
4540 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
4541 arm_text_section_count++);
4542 if (flag_pic)
4543 strcat (buf, ", PIC, REENTRANT");
4544 return buf;
4545}
4546
4547static int arm_data_section_count = 1;
4548
4549char *
4550aof_data_section ()
4551{
4552 static char buf[100];
4553 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
4554 return buf;
4555}
4556
4557/* The AOF assembler is religiously strict about declarations of
4558 imported and exported symbols, so that it is impossible to declare
4559 a function as imported near the begining of the file, and then to
4560 export it later on. It is, however, possible to delay the decision
4561 until all the functions in the file have been compiled. To get
4562 around this, we maintain a list of the imports and exports, and
4563 delete from it any that are subsequently defined. At the end of
4564 compilation we spit the remainder of the list out before the END
4565 directive. */
4566
4567struct import
4568{
4569 struct import *next;
4570 char *name;
4571};
4572
4573static struct import *imports_list = NULL;
4574
4575void
4576aof_add_import (name)
4577 char *name;
4578{
4579 struct import *new;
4580
4581 for (new = imports_list; new; new = new->next)
4582 if (new->name == name)
4583 return;
4584
4585 new = (struct import *) xmalloc (sizeof (struct import));
4586 new->next = imports_list;
4587 imports_list = new;
4588 new->name = name;
4589}
4590
4591void
4592aof_delete_import (name)
4593 char *name;
4594{
4595 struct import **old;
4596
4597 for (old = &imports_list; *old; old = & (*old)->next)
4598 {
4599 if ((*old)->name == name)
4600 {
4601 *old = (*old)->next;
4602 return;
4603 }
4604 }
4605}
4606
4607int arm_main_function = 0;
4608
4609void
4610aof_dump_imports (f)
4611 FILE *f;
4612{
4613 /* The AOF assembler needs this to cause the startup code to be extracted
4614 from the library. Brining in __main causes the whole thing to work
4615 automagically. */
4616 if (arm_main_function)
4617 {
4618 text_section ();
4619 fputs ("\tIMPORT __main\n", f);
4620 fputs ("\tDCD __main\n", f);
4621 }
4622
4623 /* Now dump the remaining imports. */
4624 while (imports_list)
4625 {
4626 fprintf (f, "\tIMPORT\t");
4627 assemble_name (f, imports_list->name);
4628 fputc ('\n', f);
4629 imports_list = imports_list->next;
4630 }
4631}
4632#endif /* AOF_ASSEMBLER */
This page took 0.7604 seconds and 5 git commands to generate.