]> gcc.gnu.org Git - gcc.git/blame - gcc/emit-rtl.c
alias.c (get_alias_set, [...]): Use MEM_P.
[gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
affad9a4 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
23b2ce53 4
1322177d 5This file is part of GCC.
23b2ce53 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
23b2ce53 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
23b2ce53
RS
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
23b2ce53
RS
21
22
23/* Middle-to-low level generation of rtx code and insns.
24
f822fcf7
KH
25 This file contains support functions for creating rtl expressions
26 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
27
28 The patterns of the insns are created by machine-dependent
29 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
30 the machine description. These routines make the individual rtx's
31 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
33 dependent is the kind of rtx's they make and what arguments they
34 use. */
23b2ce53
RS
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
01198c2f 40#include "toplev.h"
23b2ce53 41#include "rtl.h"
a25c7971 42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
23b2ce53
RS
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
47#include "regs.h"
aff48bca 48#include "hard-reg-set.h"
c13e8210 49#include "hashtab.h"
23b2ce53 50#include "insn-config.h"
e9a25f70 51#include "recog.h"
23b2ce53 52#include "real.h"
0dfa1860 53#include "bitmap.h"
a05924f9 54#include "basic-block.h"
87ff9c8e 55#include "ggc.h"
e1772ac0 56#include "debug.h"
d23c55c2 57#include "langhooks.h"
ca695ac9 58
1d445e9e
ILT
59/* Commonly used modes. */
60
0f41302f
MS
61enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
62enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 63enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 64enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 65
23b2ce53
RS
66
67/* This is *not* reset after each function. It gives each CODE_LABEL
68 in the entire compilation a unique label number. */
69
044b4de3 70static GTY(()) int label_num = 1;
23b2ce53 71
23b2ce53
RS
72/* Highest label number in current function.
73 Zero means use the value of label_num instead.
74 This is nonzero only when belatedly compiling an inline function. */
75
76static int last_label_num;
77
f9bed9d3 78/* Value label_num had when set_new_last_label_num was called.
23b2ce53
RS
79 If label_num has not changed since then, last_label_num is valid. */
80
81static int base_label_num;
82
83/* Nonzero means do not generate NOTEs for source line numbers. */
84
85static int no_line_numbers;
86
87/* Commonly used rtx's, so that we only need space for one copy.
88 These are initialized once for the entire compilation.
5692c7bc
ZW
89 All of these are unique; no other rtx-object will be equal to any
90 of these. */
23b2ce53 91
5da077de 92rtx global_rtl[GR_MAX];
23b2ce53 93
6cde4876
JL
94/* Commonly used RTL for hard registers. These objects are not necessarily
95 unique, so we allocate them separately from global_rtl. They are
96 initialized once per compilation unit, then copied into regno_reg_rtx
97 at the beginning of each function. */
98static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99
23b2ce53
RS
100/* We record floating-point CONST_DOUBLEs in each floating-point mode for
101 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
102 record a copy of const[012]_rtx. */
103
104rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
105
68d75312
JC
106rtx const_true_rtx;
107
23b2ce53
RS
108REAL_VALUE_TYPE dconst0;
109REAL_VALUE_TYPE dconst1;
110REAL_VALUE_TYPE dconst2;
f7657db9
KG
111REAL_VALUE_TYPE dconst3;
112REAL_VALUE_TYPE dconst10;
23b2ce53 113REAL_VALUE_TYPE dconstm1;
03f2ea93
RS
114REAL_VALUE_TYPE dconstm2;
115REAL_VALUE_TYPE dconsthalf;
f7657db9 116REAL_VALUE_TYPE dconstthird;
ab01a87c
KG
117REAL_VALUE_TYPE dconstpi;
118REAL_VALUE_TYPE dconste;
23b2ce53
RS
119
120/* All references to the following fixed hard registers go through
121 these unique rtl objects. On machines where the frame-pointer and
122 arg-pointer are the same register, they use the same unique object.
123
124 After register allocation, other rtl objects which used to be pseudo-regs
125 may be clobbered to refer to the frame-pointer register.
126 But references that were originally to the frame-pointer can be
127 distinguished from the others because they contain frame_pointer_rtx.
128
ac6f08b0
DE
129 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
130 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 131 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
132 register elimination hard_frame_pointer_rtx should always be used.
133 On machines where the two registers are same (most) then these are the
134 same.
135
23b2ce53
RS
136 In an inline procedure, the stack and frame pointer rtxs may not be
137 used for anything else. */
23b2ce53
RS
138rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
139rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
140rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
141
a4417a86
JW
142/* This is used to implement __builtin_return_address for some machines.
143 See for instance the MIPS port. */
144rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
145
23b2ce53
RS
146/* We make one copy of (const_int C) where C is in
147 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
148 to save space during the compilation and simplify comparisons of
149 integers. */
150
5da077de 151rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 152
c13e8210
MM
153/* A hash table storing CONST_INTs whose absolute value is greater
154 than MAX_SAVED_CONST_INT. */
155
e2500fed
GK
156static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
157 htab_t const_int_htab;
c13e8210 158
173b24b9 159/* A hash table storing memory attribute structures. */
e2500fed
GK
160static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
161 htab_t mem_attrs_htab;
173b24b9 162
a560d4d4
JH
163/* A hash table storing register attribute structures. */
164static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
165 htab_t reg_attrs_htab;
166
5692c7bc 167/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
168static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
169 htab_t const_double_htab;
5692c7bc 170
01d939e8
BS
171#define first_insn (cfun->emit->x_first_insn)
172#define last_insn (cfun->emit->x_last_insn)
173#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
fd3acbb3 174#define last_location (cfun->emit->x_last_location)
01d939e8 175#define first_label_num (cfun->emit->x_first_label_num)
23b2ce53 176
502b8322
AJ
177static rtx make_jump_insn_raw (rtx);
178static rtx make_call_insn_raw (rtx);
179static rtx find_line_note (rtx);
180static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
502b8322
AJ
181static void unshare_all_decls (tree);
182static void reset_used_decls (tree);
183static void mark_label_nuses (rtx);
184static hashval_t const_int_htab_hash (const void *);
185static int const_int_htab_eq (const void *, const void *);
186static hashval_t const_double_htab_hash (const void *);
187static int const_double_htab_eq (const void *, const void *);
188static rtx lookup_const_double (rtx);
189static hashval_t mem_attrs_htab_hash (const void *);
190static int mem_attrs_htab_eq (const void *, const void *);
191static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
192 enum machine_mode);
193static hashval_t reg_attrs_htab_hash (const void *);
194static int reg_attrs_htab_eq (const void *, const void *);
195static reg_attrs *get_reg_attrs (tree, int);
196static tree component_ref_for_mem_expr (tree);
197static rtx gen_const_vector_0 (enum machine_mode);
198static rtx gen_complex_constant_part (enum machine_mode, rtx, int);
32b32b16 199static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 200
6b24c259
JH
201/* Probability of the conditional branch currently proceeded by try_split.
202 Set to -1 otherwise. */
203int split_branch_probability = -1;
ca695ac9 204\f
c13e8210
MM
205/* Returns a hash code for X (which is a really a CONST_INT). */
206
207static hashval_t
502b8322 208const_int_htab_hash (const void *x)
c13e8210 209{
bcda12f4 210 return (hashval_t) INTVAL ((rtx) x);
c13e8210
MM
211}
212
cc2902df 213/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
214 CONST_INT) is the same as that given by Y (which is really a
215 HOST_WIDE_INT *). */
216
217static int
502b8322 218const_int_htab_eq (const void *x, const void *y)
c13e8210 219{
5692c7bc
ZW
220 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
221}
222
223/* Returns a hash code for X (which is really a CONST_DOUBLE). */
224static hashval_t
502b8322 225const_double_htab_hash (const void *x)
5692c7bc 226{
5692c7bc 227 rtx value = (rtx) x;
46b33600 228 hashval_t h;
5692c7bc 229
46b33600
RH
230 if (GET_MODE (value) == VOIDmode)
231 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
232 else
fe352c29 233 {
15c812e3 234 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
235 /* MODE is used in the comparison, so it should be in the hash. */
236 h ^= GET_MODE (value);
237 }
5692c7bc
ZW
238 return h;
239}
240
cc2902df 241/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
242 is the same as that represented by Y (really a ...) */
243static int
502b8322 244const_double_htab_eq (const void *x, const void *y)
5692c7bc
ZW
245{
246 rtx a = (rtx)x, b = (rtx)y;
5692c7bc
ZW
247
248 if (GET_MODE (a) != GET_MODE (b))
249 return 0;
8580f7a0
RH
250 if (GET_MODE (a) == VOIDmode)
251 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
252 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
253 else
254 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
255 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
256}
257
173b24b9
RK
258/* Returns a hash code for X (which is a really a mem_attrs *). */
259
260static hashval_t
502b8322 261mem_attrs_htab_hash (const void *x)
173b24b9
RK
262{
263 mem_attrs *p = (mem_attrs *) x;
264
265 return (p->alias ^ (p->align * 1000)
266 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
267 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
998d7deb 268 ^ (size_t) p->expr);
173b24b9
RK
269}
270
cc2902df 271/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
272 mem_attrs *) is the same as that given by Y (which is also really a
273 mem_attrs *). */
c13e8210
MM
274
275static int
502b8322 276mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 277{
173b24b9
RK
278 mem_attrs *p = (mem_attrs *) x;
279 mem_attrs *q = (mem_attrs *) y;
280
998d7deb 281 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
173b24b9 282 && p->size == q->size && p->align == q->align);
c13e8210
MM
283}
284
173b24b9 285/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
286 one identical to it is not already in the table. We are doing this for
287 MEM of mode MODE. */
173b24b9
RK
288
289static mem_attrs *
502b8322
AJ
290get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
291 unsigned int align, enum machine_mode mode)
173b24b9
RK
292{
293 mem_attrs attrs;
294 void **slot;
295
bb056a77
OH
296 /* If everything is the default, we can just return zero.
297 This must match what the corresponding MEM_* macros return when the
298 field is not present. */
998d7deb 299 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
300 && (size == 0
301 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
302 && (STRICT_ALIGNMENT && mode != BLKmode
303 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
304 return 0;
305
173b24b9 306 attrs.alias = alias;
998d7deb 307 attrs.expr = expr;
173b24b9
RK
308 attrs.offset = offset;
309 attrs.size = size;
310 attrs.align = align;
311
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
314 {
315 *slot = ggc_alloc (sizeof (mem_attrs));
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
317 }
318
319 return *slot;
c13e8210
MM
320}
321
a560d4d4
JH
322/* Returns a hash code for X (which is a really a reg_attrs *). */
323
324static hashval_t
502b8322 325reg_attrs_htab_hash (const void *x)
a560d4d4
JH
326{
327 reg_attrs *p = (reg_attrs *) x;
328
329 return ((p->offset * 1000) ^ (long) p->decl);
330}
331
6356f892 332/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
335
336static int
502b8322 337reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4
JH
338{
339 reg_attrs *p = (reg_attrs *) x;
340 reg_attrs *q = (reg_attrs *) y;
341
342 return (p->decl == q->decl && p->offset == q->offset);
343}
344/* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
347
348static reg_attrs *
502b8322 349get_reg_attrs (tree decl, int offset)
a560d4d4
JH
350{
351 reg_attrs attrs;
352 void **slot;
353
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
357
358 attrs.decl = decl;
359 attrs.offset = offset;
360
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
363 {
364 *slot = ggc_alloc (sizeof (reg_attrs));
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
366 }
367
368 return *slot;
369}
370
08394eef
BS
371/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
372 don't attempt to share with the various global pieces of rtl (such as
373 frame_pointer_rtx). */
374
375rtx
502b8322 376gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
377{
378 rtx x = gen_rtx_raw_REG (mode, regno);
379 ORIGINAL_REGNO (x) = regno;
380 return x;
381}
382
c5c76735
JL
383/* There are some RTL codes that require special attention; the generation
384 functions do the raw handling. If you add to this list, modify
385 special_rtx in gengenrtl.c as well. */
386
3b80f6ca 387rtx
502b8322 388gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 389{
c13e8210
MM
390 void **slot;
391
3b80f6ca 392 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 393 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
394
395#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
396 if (const_true_rtx && arg == STORE_FLAG_VALUE)
397 return const_true_rtx;
398#endif
399
c13e8210 400 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
401 slot = htab_find_slot_with_hash (const_int_htab, &arg,
402 (hashval_t) arg, INSERT);
29105cea 403 if (*slot == 0)
1f8f4a0b 404 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
405
406 return (rtx) *slot;
3b80f6ca
RH
407}
408
2496c7bd 409rtx
502b8322 410gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
411{
412 return GEN_INT (trunc_int_for_mode (c, mode));
413}
414
5692c7bc
ZW
415/* CONST_DOUBLEs might be created from pairs of integers, or from
416 REAL_VALUE_TYPEs. Also, their length is known only at run time,
417 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
418
419/* Determine whether REAL, a CONST_DOUBLE, already exists in the
420 hash table. If so, return its counterpart; otherwise add it
421 to the hash table and return it. */
422static rtx
502b8322 423lookup_const_double (rtx real)
5692c7bc
ZW
424{
425 void **slot = htab_find_slot (const_double_htab, real, INSERT);
426 if (*slot == 0)
427 *slot = real;
428
429 return (rtx) *slot;
430}
29105cea 431
5692c7bc
ZW
432/* Return a CONST_DOUBLE rtx for a floating-point value specified by
433 VALUE in mode MODE. */
0133b7d9 434rtx
502b8322 435const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 436{
5692c7bc
ZW
437 rtx real = rtx_alloc (CONST_DOUBLE);
438 PUT_MODE (real, mode);
439
440 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
441
442 return lookup_const_double (real);
443}
444
445/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
446 of ints: I0 is the low-order word and I1 is the high-order word.
447 Do not use this routine for non-integer modes; convert to
448 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
449
450rtx
502b8322 451immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
452{
453 rtx value;
454 unsigned int i;
455
456 if (mode != VOIDmode)
457 {
458 int width;
459 if (GET_MODE_CLASS (mode) != MODE_INT
cb2a532e
AH
460 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
461 /* We can get a 0 for an error mark. */
462 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
463 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
5692c7bc
ZW
464 abort ();
465
466 /* We clear out all bits that don't belong in MODE, unless they and
467 our sign bit are all one. So we get either a reasonable negative
468 value or a reasonable unsigned value for this mode. */
469 width = GET_MODE_BITSIZE (mode);
470 if (width < HOST_BITS_PER_WIDE_INT
471 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
472 != ((HOST_WIDE_INT) (-1) << (width - 1))))
473 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
474 else if (width == HOST_BITS_PER_WIDE_INT
475 && ! (i1 == ~0 && i0 < 0))
476 i1 = 0;
477 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
478 /* We cannot represent this value as a constant. */
479 abort ();
480
481 /* If this would be an entire word for the target, but is not for
482 the host, then sign-extend on the host so that the number will
483 look the same way on the host that it would on the target.
484
485 For example, when building a 64 bit alpha hosted 32 bit sparc
486 targeted compiler, then we want the 32 bit unsigned value -1 to be
487 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
488 The latter confuses the sparc backend. */
489
490 if (width < HOST_BITS_PER_WIDE_INT
491 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
492 i0 |= ((HOST_WIDE_INT) (-1) << width);
2454beaf 493
5692c7bc
ZW
494 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
495 CONST_INT.
2454beaf 496
5692c7bc
ZW
497 ??? Strictly speaking, this is wrong if we create a CONST_INT for
498 a large unsigned constant with the size of MODE being
499 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
500 in a wider mode. In that case we will mis-interpret it as a
501 negative number.
2454beaf 502
5692c7bc
ZW
503 Unfortunately, the only alternative is to make a CONST_DOUBLE for
504 any constant in any mode if it is an unsigned constant larger
505 than the maximum signed integer in an int on the host. However,
506 doing this will break everyone that always expects to see a
507 CONST_INT for SImode and smaller.
508
509 We have always been making CONST_INTs in this case, so nothing
510 new is being broken. */
511
512 if (width <= HOST_BITS_PER_WIDE_INT)
513 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
514 }
515
516 /* If this integer fits in one word, return a CONST_INT. */
517 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
518 return GEN_INT (i0);
519
520 /* We use VOIDmode for integers. */
521 value = rtx_alloc (CONST_DOUBLE);
522 PUT_MODE (value, VOIDmode);
523
524 CONST_DOUBLE_LOW (value) = i0;
525 CONST_DOUBLE_HIGH (value) = i1;
526
527 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
528 XWINT (value, i) = 0;
529
530 return lookup_const_double (value);
0133b7d9
RH
531}
532
3b80f6ca 533rtx
502b8322 534gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
535{
536 /* In case the MD file explicitly references the frame pointer, have
537 all such references point to the same frame pointer. This is
538 used during frame pointer elimination to distinguish the explicit
539 references to these registers from pseudos that happened to be
540 assigned to them.
541
542 If we have eliminated the frame pointer or arg pointer, we will
543 be using it as a normal register, for example as a spill
544 register. In such cases, we might be accessing it in a mode that
545 is not Pmode and therefore cannot use the pre-allocated rtx.
546
547 Also don't do this when we are making new REGs in reload, since
548 we don't want to get confused with the real pointers. */
549
550 if (mode == Pmode && !reload_in_progress)
551 {
e10c79fe
LB
552 if (regno == FRAME_POINTER_REGNUM
553 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
554 return frame_pointer_rtx;
555#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
556 if (regno == HARD_FRAME_POINTER_REGNUM
557 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
558 return hard_frame_pointer_rtx;
559#endif
560#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 561 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
562 return arg_pointer_rtx;
563#endif
564#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 565 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
566 return return_address_pointer_rtx;
567#endif
fc555370 568 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 569 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 570 return pic_offset_table_rtx;
bcb33994 571 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
572 return stack_pointer_rtx;
573 }
574
006a94b0 575#if 0
6cde4876 576 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
577 an existing entry in that table to avoid useless generation of RTL.
578
579 This code is disabled for now until we can fix the various backends
580 which depend on having non-shared hard registers in some cases. Long
581 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
582 on the amount of useless RTL that gets generated.
583
584 We'll also need to fix some code that runs after reload that wants to
585 set ORIGINAL_REGNO. */
586
6cde4876
JL
587 if (cfun
588 && cfun->emit
589 && regno_reg_rtx
590 && regno < FIRST_PSEUDO_REGISTER
591 && reg_raw_mode[regno] == mode)
592 return regno_reg_rtx[regno];
006a94b0 593#endif
6cde4876 594
08394eef 595 return gen_raw_REG (mode, regno);
3b80f6ca
RH
596}
597
41472af8 598rtx
502b8322 599gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
600{
601 rtx rt = gen_rtx_raw_MEM (mode, addr);
602
603 /* This field is not cleared by the mere allocation of the rtx, so
604 we clear it here. */
173b24b9 605 MEM_ATTRS (rt) = 0;
41472af8
MM
606
607 return rt;
608}
ddef6bc7
JJ
609
610rtx
502b8322 611gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
ddef6bc7
JJ
612{
613 /* This is the most common failure type.
614 Catch it early so we can see who does it. */
615 if ((offset % GET_MODE_SIZE (mode)) != 0)
616 abort ();
617
618 /* This check isn't usable right now because combine will
619 throw arbitrary crap like a CALL into a SUBREG in
620 gen_lowpart_for_combine so we must just eat it. */
621#if 0
622 /* Check for this too. */
623 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
624 abort ();
625#endif
5692c7bc 626 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
627}
628
173b24b9
RK
629/* Generate a SUBREG representing the least-significant part of REG if MODE
630 is smaller than mode of REG, otherwise paradoxical SUBREG. */
631
ddef6bc7 632rtx
502b8322 633gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
634{
635 enum machine_mode inmode;
ddef6bc7
JJ
636
637 inmode = GET_MODE (reg);
638 if (inmode == VOIDmode)
639 inmode = mode;
e0e08ac2
JH
640 return gen_rtx_SUBREG (mode, reg,
641 subreg_lowpart_offset (mode, inmode));
ddef6bc7 642}
c5c76735 643\f
23b2ce53
RS
644/* gen_rtvec (n, [rt1, ..., rtn])
645**
646** This routine creates an rtvec and stores within it the
647** pointers to rtx's which are its arguments.
648*/
649
650/*VARARGS1*/
651rtvec
e34d07f2 652gen_rtvec (int n, ...)
23b2ce53 653{
6268b922 654 int i, save_n;
23b2ce53 655 rtx *vector;
e34d07f2 656 va_list p;
23b2ce53 657
e34d07f2 658 va_start (p, n);
23b2ce53
RS
659
660 if (n == 0)
661 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
662
703ad42b 663 vector = alloca (n * sizeof (rtx));
4f90e4a0 664
23b2ce53
RS
665 for (i = 0; i < n; i++)
666 vector[i] = va_arg (p, rtx);
6268b922
KG
667
668 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
669 save_n = n;
e34d07f2 670 va_end (p);
23b2ce53 671
6268b922 672 return gen_rtvec_v (save_n, vector);
23b2ce53
RS
673}
674
675rtvec
502b8322 676gen_rtvec_v (int n, rtx *argp)
23b2ce53 677{
b3694847
SS
678 int i;
679 rtvec rt_val;
23b2ce53
RS
680
681 if (n == 0)
682 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
683
684 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
685
686 for (i = 0; i < n; i++)
8f985ec4 687 rt_val->elem[i] = *argp++;
23b2ce53
RS
688
689 return rt_val;
690}
691\f
692/* Generate a REG rtx for a new pseudo register of mode MODE.
693 This pseudo is assigned the next sequential register number. */
694
695rtx
502b8322 696gen_reg_rtx (enum machine_mode mode)
23b2ce53 697{
01d939e8 698 struct function *f = cfun;
b3694847 699 rtx val;
23b2ce53 700
f1db3576
JL
701 /* Don't let anything called after initial flow analysis create new
702 registers. */
703 if (no_new_pseudos)
23b2ce53
RS
704 abort ();
705
1b3d8f8a
GK
706 if (generating_concat_p
707 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
708 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
709 {
710 /* For complex modes, don't make a single pseudo.
711 Instead, make a CONCAT of two pseudos.
712 This allows noncontiguous allocation of the real and imaginary parts,
713 which makes much better code. Besides, allocating DCmode
714 pseudos overstrains reload on some machines like the 386. */
715 rtx realpart, imagpart;
27e58a70 716 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
717
718 realpart = gen_reg_rtx (partmode);
719 imagpart = gen_reg_rtx (partmode);
3b80f6ca 720 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
721 }
722
a560d4d4 723 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 724 enough to have an element for this pseudo reg number. */
23b2ce53 725
3502dc9c 726 if (reg_rtx_no == f->emit->regno_pointer_align_length)
23b2ce53 727 {
3502dc9c 728 int old_size = f->emit->regno_pointer_align_length;
e2ecd91c 729 char *new;
0d4903b8 730 rtx *new1;
0d4903b8 731
e2500fed 732 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
49ad7cfa 733 memset (new + old_size, 0, old_size);
f9e158c3 734 f->emit->regno_pointer_align = (unsigned char *) new;
49ad7cfa 735
703ad42b
KG
736 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
737 old_size * 2 * sizeof (rtx));
49ad7cfa 738 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
739 regno_reg_rtx = new1;
740
3502dc9c 741 f->emit->regno_pointer_align_length = old_size * 2;
23b2ce53
RS
742 }
743
08394eef 744 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
745 regno_reg_rtx[reg_rtx_no++] = val;
746 return val;
747}
748
e314a036
JZ
749/* Generate a register with same attributes as REG, but offsetted by OFFSET.
750 Do the big endian correction if needed. */
a560d4d4
JH
751
752rtx
502b8322 753gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
a560d4d4
JH
754{
755 rtx new = gen_rtx_REG (mode, regno);
e314a036
JZ
756 tree decl;
757 HOST_WIDE_INT var_size;
758
759 /* PR middle-end/14084
760 The problem appears when a variable is stored in a larger register
761 and later it is used in the original mode or some mode in between
762 or some part of variable is accessed.
763
764 On little endian machines there is no problem because
765 the REG_OFFSET of the start of the variable is the same when
766 accessed in any mode (it is 0).
767
768 However, this is not true on big endian machines.
769 The offset of the start of the variable is different when accessed
770 in different modes.
771 When we are taking a part of the REG we have to change the OFFSET
772 from offset WRT size of mode of REG to offset WRT size of variable.
773
774 If we would not do the big endian correction the resulting REG_OFFSET
775 would be larger than the size of the DECL.
776
777 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
778
779 REG.mode MODE DECL size old offset new offset description
780 DI SI 4 4 0 int32 in SImode
781 DI SI 1 4 0 char in SImode
782 DI QI 1 7 0 char in QImode
783 DI QI 4 5 1 1st element in QImode
784 of char[4]
785 DI HI 4 6 2 1st element in HImode
786 of int16[2]
787
788 If the size of DECL is equal or greater than the size of REG
789 we can't do this correction because the register holds the
790 whole variable or a part of the variable and thus the REG_OFFSET
791 is already correct. */
792
793 decl = REG_EXPR (reg);
794 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
795 && decl != NULL
796 && offset > 0
797 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
798 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
799 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
800 {
801 int offset_le;
802
803 /* Convert machine endian to little endian WRT size of mode of REG. */
804 if (WORDS_BIG_ENDIAN)
805 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
806 / UNITS_PER_WORD) * UNITS_PER_WORD;
807 else
808 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
809
810 if (BYTES_BIG_ENDIAN)
811 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
812 % UNITS_PER_WORD);
813 else
814 offset_le += offset % UNITS_PER_WORD;
815
816 if (offset_le >= var_size)
817 {
818 /* MODE is wider than the variable so the new reg will cover
819 the whole variable so the resulting OFFSET should be 0. */
820 offset = 0;
821 }
822 else
823 {
824 /* Convert little endian to machine endian WRT size of variable. */
825 if (WORDS_BIG_ENDIAN)
826 offset = ((var_size - 1 - offset_le)
827 / UNITS_PER_WORD) * UNITS_PER_WORD;
828 else
829 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
830
831 if (BYTES_BIG_ENDIAN)
832 offset += ((var_size - 1 - offset_le)
833 % UNITS_PER_WORD);
834 else
835 offset += offset_le % UNITS_PER_WORD;
836 }
837 }
838
a560d4d4 839 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
502b8322 840 REG_OFFSET (reg) + offset);
a560d4d4
JH
841 return new;
842}
843
844/* Set the decl for MEM to DECL. */
845
846void
502b8322 847set_reg_attrs_from_mem (rtx reg, rtx mem)
a560d4d4
JH
848{
849 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
850 REG_ATTRS (reg)
851 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
852}
853
9d18e06b
JZ
854/* Set the register attributes for registers contained in PARM_RTX.
855 Use needed values from memory attributes of MEM. */
856
857void
502b8322 858set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 859{
f8cfc6aa 860 if (REG_P (parm_rtx))
9d18e06b
JZ
861 set_reg_attrs_from_mem (parm_rtx, mem);
862 else if (GET_CODE (parm_rtx) == PARALLEL)
863 {
864 /* Check for a NULL entry in the first slot, used to indicate that the
865 parameter goes both on the stack and in registers. */
866 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
867 for (; i < XVECLEN (parm_rtx, 0); i++)
868 {
869 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 870 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
871 REG_ATTRS (XEXP (x, 0))
872 = get_reg_attrs (MEM_EXPR (mem),
873 INTVAL (XEXP (x, 1)));
874 }
875 }
876}
877
a560d4d4
JH
878/* Assign the RTX X to declaration T. */
879void
502b8322 880set_decl_rtl (tree t, rtx x)
a560d4d4
JH
881{
882 DECL_CHECK (t)->decl.rtl = x;
883
fbe6ec81
JZ
884 if (!x)
885 return;
886 /* For register, we maintain the reverse information too. */
f8cfc6aa 887 if (REG_P (x))
fbe6ec81
JZ
888 REG_ATTRS (x) = get_reg_attrs (t, 0);
889 else if (GET_CODE (x) == SUBREG)
890 REG_ATTRS (SUBREG_REG (x))
891 = get_reg_attrs (t, -SUBREG_BYTE (x));
892 if (GET_CODE (x) == CONCAT)
893 {
894 if (REG_P (XEXP (x, 0)))
895 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
896 if (REG_P (XEXP (x, 1)))
897 REG_ATTRS (XEXP (x, 1))
898 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
899 }
900 if (GET_CODE (x) == PARALLEL)
901 {
902 int i;
903 for (i = 0; i < XVECLEN (x, 0); i++)
904 {
905 rtx y = XVECEXP (x, 0, i);
906 if (REG_P (XEXP (y, 0)))
907 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
908 }
909 }
910}
911
912/* Assign the RTX X to parameter declaration T. */
913void
914set_decl_incoming_rtl (tree t, rtx x)
915{
916 DECL_INCOMING_RTL (t) = x;
917
a560d4d4
JH
918 if (!x)
919 return;
4d6922ee 920 /* For register, we maintain the reverse information too. */
f8cfc6aa 921 if (REG_P (x))
a560d4d4
JH
922 REG_ATTRS (x) = get_reg_attrs (t, 0);
923 else if (GET_CODE (x) == SUBREG)
924 REG_ATTRS (SUBREG_REG (x))
925 = get_reg_attrs (t, -SUBREG_BYTE (x));
926 if (GET_CODE (x) == CONCAT)
927 {
928 if (REG_P (XEXP (x, 0)))
929 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
930 if (REG_P (XEXP (x, 1)))
931 REG_ATTRS (XEXP (x, 1))
932 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
933 }
934 if (GET_CODE (x) == PARALLEL)
935 {
d4afac5b
JZ
936 int i, start;
937
938 /* Check for a NULL entry, used to indicate that the parameter goes
939 both on the stack and in registers. */
940 if (XEXP (XVECEXP (x, 0, 0), 0))
941 start = 0;
942 else
943 start = 1;
944
945 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
946 {
947 rtx y = XVECEXP (x, 0, i);
948 if (REG_P (XEXP (y, 0)))
949 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
950 }
951 }
952}
953
754fdcca
RK
954/* Identify REG (which may be a CONCAT) as a user register. */
955
956void
502b8322 957mark_user_reg (rtx reg)
754fdcca
RK
958{
959 if (GET_CODE (reg) == CONCAT)
960 {
961 REG_USERVAR_P (XEXP (reg, 0)) = 1;
962 REG_USERVAR_P (XEXP (reg, 1)) = 1;
963 }
f8cfc6aa 964 else if (REG_P (reg))
754fdcca
RK
965 REG_USERVAR_P (reg) = 1;
966 else
967 abort ();
968}
969
86fe05e0
RK
970/* Identify REG as a probable pointer register and show its alignment
971 as ALIGN, if nonzero. */
23b2ce53
RS
972
973void
502b8322 974mark_reg_pointer (rtx reg, int align)
23b2ce53 975{
3502dc9c 976 if (! REG_POINTER (reg))
00995e78 977 {
3502dc9c 978 REG_POINTER (reg) = 1;
86fe05e0 979
00995e78
RE
980 if (align)
981 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
982 }
983 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 984 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 985 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
986}
987
988/* Return 1 plus largest pseudo reg number used in the current function. */
989
990int
502b8322 991max_reg_num (void)
23b2ce53
RS
992{
993 return reg_rtx_no;
994}
995
996/* Return 1 + the largest label number used so far in the current function. */
997
998int
502b8322 999max_label_num (void)
23b2ce53
RS
1000{
1001 if (last_label_num && label_num == base_label_num)
1002 return last_label_num;
1003 return label_num;
1004}
1005
1006/* Return first label number used in this function (if any were used). */
1007
1008int
502b8322 1009get_first_label_num (void)
23b2ce53
RS
1010{
1011 return first_label_num;
1012}
6de9cd9a
DN
1013
1014/* If the rtx for label was created during the expansion of a nested
1015 function, then first_label_num won't include this label number.
1016 Fix this now so that array indicies work later. */
1017
1018void
1019maybe_set_first_label_num (rtx x)
1020{
1021 if (CODE_LABEL_NUMBER (x) < first_label_num)
1022 first_label_num = CODE_LABEL_NUMBER (x);
1023}
23b2ce53 1024\f
ddef6bc7
JJ
1025/* Return the final regno of X, which is a SUBREG of a hard
1026 register. */
1027int
502b8322 1028subreg_hard_regno (rtx x, int check_mode)
ddef6bc7
JJ
1029{
1030 enum machine_mode mode = GET_MODE (x);
1031 unsigned int byte_offset, base_regno, final_regno;
1032 rtx reg = SUBREG_REG (x);
1033
1034 /* This is where we attempt to catch illegal subregs
1035 created by the compiler. */
1036 if (GET_CODE (x) != SUBREG
f8cfc6aa 1037 || !REG_P (reg))
ddef6bc7
JJ
1038 abort ();
1039 base_regno = REGNO (reg);
1040 if (base_regno >= FIRST_PSEUDO_REGISTER)
1041 abort ();
0607953c 1042 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
ddef6bc7 1043 abort ();
04c5580f
JH
1044#ifdef ENABLE_CHECKING
1045 if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
502b8322 1046 SUBREG_BYTE (x), mode))
04c5580f
JH
1047 abort ();
1048#endif
ddef6bc7
JJ
1049 /* Catch non-congruent offsets too. */
1050 byte_offset = SUBREG_BYTE (x);
1051 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1052 abort ();
1053
1054 final_regno = subreg_regno (x);
1055
1056 return final_regno;
1057}
1058
23b2ce53
RS
1059/* Return a value representing some low-order bits of X, where the number
1060 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1061 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1062 representation is returned.
1063
1064 This function handles the cases in common between gen_lowpart, below,
1065 and two variants in cse.c and combine.c. These are the cases that can
1066 be safely handled at all points in the compilation.
1067
1068 If this is not a case we can handle, return 0. */
1069
1070rtx
502b8322 1071gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1072{
ddef6bc7 1073 int msize = GET_MODE_SIZE (mode);
550d1387 1074 int xsize;
ddef6bc7 1075 int offset = 0;
550d1387
GK
1076 enum machine_mode innermode;
1077
1078 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1079 so we have to make one up. Yuk. */
1080 innermode = GET_MODE (x);
1081 if (GET_CODE (x) == CONST_INT && msize <= HOST_BITS_PER_WIDE_INT)
1082 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1083 else if (innermode == VOIDmode)
1084 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1085
1086 xsize = GET_MODE_SIZE (innermode);
1087
1088 if (innermode == VOIDmode || innermode == BLKmode)
1089 abort ();
23b2ce53 1090
550d1387 1091 if (innermode == mode)
23b2ce53
RS
1092 return x;
1093
1094 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1095 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1096 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1097 return 0;
1098
53501a19 1099 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
550d1387 1100 if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
53501a19
BS
1101 return 0;
1102
550d1387 1103 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1104
1105 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1106 && (GET_MODE_CLASS (mode) == MODE_INT
1107 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1108 {
1109 /* If we are getting the low-order part of something that has been
1110 sign- or zero-extended, we can either just use the object being
1111 extended or make a narrower extension. If we want an even smaller
1112 piece than the size of the object being extended, call ourselves
1113 recursively.
1114
1115 This case is used mostly by combine and cse. */
1116
1117 if (GET_MODE (XEXP (x, 0)) == mode)
1118 return XEXP (x, 0);
550d1387 1119 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1120 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1121 else if (msize < xsize)
3b80f6ca 1122 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1123 }
f8cfc6aa 1124 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387
GK
1125 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1126 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1127 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1128
23b2ce53
RS
1129 /* Otherwise, we can't do this. */
1130 return 0;
1131}
1132\f
b1d673be
RS
1133/* Return the constant real or imaginary part (which has mode MODE)
1134 of a complex value X. The IMAGPART_P argument determines whether
1135 the real or complex component should be returned. This function
1136 returns NULL_RTX if the component isn't a constant. */
1137
1138static rtx
502b8322 1139gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
b1d673be
RS
1140{
1141 tree decl, part;
1142
3c0cb5de 1143 if (MEM_P (x)
4c2da7f2 1144 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
b1d673be
RS
1145 {
1146 decl = SYMBOL_REF_DECL (XEXP (x, 0));
1147 if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST)
1148 {
1149 part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
1150 if (TREE_CODE (part) == REAL_CST
1151 || TREE_CODE (part) == INTEGER_CST)
1152 return expand_expr (part, NULL_RTX, mode, 0);
1153 }
1154 }
1155 return NULL_RTX;
1156}
1157
280194b0
RS
1158/* Return the real part (which has mode MODE) of a complex value X.
1159 This always comes at the low address in memory. */
1160
1161rtx
502b8322 1162gen_realpart (enum machine_mode mode, rtx x)
280194b0 1163{
b1d673be
RS
1164 rtx part;
1165
1166 /* Handle complex constants. */
1167 part = gen_complex_constant_part (mode, x, 0);
1168 if (part != NULL_RTX)
1169 return part;
1170
e0e08ac2
JH
1171 if (WORDS_BIG_ENDIAN
1172 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1173 && REG_P (x)
1174 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4 1175 internal_error
c725bd79 1176 ("can't access real part of complex value in hard register");
dc139c90 1177 else if (WORDS_BIG_ENDIAN)
280194b0
RS
1178 return gen_highpart (mode, x);
1179 else
1180 return gen_lowpart (mode, x);
1181}
1182
1183/* Return the imaginary part (which has mode MODE) of a complex value X.
1184 This always comes at the high address in memory. */
1185
1186rtx
502b8322 1187gen_imagpart (enum machine_mode mode, rtx x)
280194b0 1188{
b1d673be
RS
1189 rtx part;
1190
1191 /* Handle complex constants. */
1192 part = gen_complex_constant_part (mode, x, 1);
1193 if (part != NULL_RTX)
1194 return part;
1195
e0e08ac2 1196 if (WORDS_BIG_ENDIAN)
280194b0 1197 return gen_lowpart (mode, x);
ddef6bc7 1198 else if (! WORDS_BIG_ENDIAN
40c0c3cf
JL
1199 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1200 && REG_P (x)
1201 && REGNO (x) < FIRST_PSEUDO_REGISTER)
400500c4
RK
1202 internal_error
1203 ("can't access imaginary part of complex value in hard register");
280194b0
RS
1204 else
1205 return gen_highpart (mode, x);
1206}
1207\f
ccba022b 1208rtx
502b8322 1209gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1210{
ddef6bc7 1211 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1212 rtx result;
ddef6bc7 1213
ccba022b
RS
1214 /* This case loses if X is a subreg. To catch bugs early,
1215 complain if an invalid MODE is used even in other cases. */
ddef6bc7 1216 if (msize > UNITS_PER_WORD
c5898ca8 1217 && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)))
ccba022b 1218 abort ();
ddef6bc7 1219
e0e08ac2
JH
1220 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1221 subreg_highpart_offset (mode, GET_MODE (x)));
09482e0d
JW
1222
1223 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1224 the target if we have a MEM. gen_highpart must return a valid operand,
1225 emitting code if necessary to do so. */
3c0cb5de 1226 if (result != NULL_RTX && MEM_P (result))
09482e0d
JW
1227 result = validize_mem (result);
1228
e0e08ac2
JH
1229 if (!result)
1230 abort ();
1231 return result;
1232}
5222e470 1233
26d249eb 1234/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1235 be VOIDmode constant. */
1236rtx
502b8322 1237gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1238{
1239 if (GET_MODE (exp) != VOIDmode)
1240 {
1241 if (GET_MODE (exp) != innermode)
1242 abort ();
1243 return gen_highpart (outermode, exp);
1244 }
1245 return simplify_gen_subreg (outermode, exp, innermode,
1246 subreg_highpart_offset (outermode, innermode));
1247}
68252e27 1248
e0e08ac2
JH
1249/* Return offset in bytes to get OUTERMODE low part
1250 of the value in mode INNERMODE stored in memory in target format. */
8698cce3 1251
e0e08ac2 1252unsigned int
502b8322 1253subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1254{
1255 unsigned int offset = 0;
1256 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1257
e0e08ac2 1258 if (difference > 0)
ccba022b 1259 {
e0e08ac2
JH
1260 if (WORDS_BIG_ENDIAN)
1261 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1262 if (BYTES_BIG_ENDIAN)
1263 offset += difference % UNITS_PER_WORD;
ccba022b 1264 }
ddef6bc7 1265
e0e08ac2 1266 return offset;
ccba022b 1267}
eea50aa0 1268
e0e08ac2
JH
1269/* Return offset in bytes to get OUTERMODE high part
1270 of the value in mode INNERMODE stored in memory in target format. */
1271unsigned int
502b8322 1272subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1273{
1274 unsigned int offset = 0;
1275 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1276
e0e08ac2 1277 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
68252e27 1278 abort ();
e0e08ac2 1279
eea50aa0
JH
1280 if (difference > 0)
1281 {
e0e08ac2 1282 if (! WORDS_BIG_ENDIAN)
eea50aa0 1283 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1284 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1285 offset += difference % UNITS_PER_WORD;
1286 }
1287
e0e08ac2 1288 return offset;
eea50aa0 1289}
ccba022b 1290
23b2ce53
RS
1291/* Return 1 iff X, assumed to be a SUBREG,
1292 refers to the least significant part of its containing reg.
1293 If X is not a SUBREG, always return 1 (it is its own low part!). */
1294
1295int
502b8322 1296subreg_lowpart_p (rtx x)
23b2ce53
RS
1297{
1298 if (GET_CODE (x) != SUBREG)
1299 return 1;
a3a03040
RK
1300 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1301 return 0;
23b2ce53 1302
e0e08ac2
JH
1303 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1304 == SUBREG_BYTE (x));
23b2ce53
RS
1305}
1306\f
ddef6bc7
JJ
1307/* Return subword OFFSET of operand OP.
1308 The word number, OFFSET, is interpreted as the word number starting
1309 at the low-order address. OFFSET 0 is the low-order word if not
1310 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1311
1312 If we cannot extract the required word, we return zero. Otherwise,
1313 an rtx corresponding to the requested word will be returned.
1314
1315 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1316 reload has completed, a valid address will always be returned. After
1317 reload, if a valid address cannot be returned, we return zero.
1318
1319 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1320 it is the responsibility of the caller.
1321
1322 MODE is the mode of OP in case it is a CONST_INT.
1323
1324 ??? This is still rather broken for some cases. The problem for the
1325 moment is that all callers of this thing provide no 'goal mode' to
1326 tell us to work with. This exists because all callers were written
0631e0bf
JH
1327 in a word based SUBREG world.
1328 Now use of this function can be deprecated by simplify_subreg in most
1329 cases.
1330 */
ddef6bc7
JJ
1331
1332rtx
502b8322 1333operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1334{
1335 if (mode == VOIDmode)
1336 mode = GET_MODE (op);
1337
1338 if (mode == VOIDmode)
1339 abort ();
1340
30f7a378 1341 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1342 if (mode != BLKmode
1343 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1344 return 0;
1345
30f7a378 1346 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1347 if (mode != BLKmode
1348 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1349 return const0_rtx;
1350
ddef6bc7 1351 /* Form a new MEM at the requested address. */
3c0cb5de 1352 if (MEM_P (op))
ddef6bc7 1353 {
f1ec5147 1354 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1355
f1ec5147
RK
1356 if (! validate_address)
1357 return new;
1358
1359 else if (reload_completed)
ddef6bc7 1360 {
f1ec5147
RK
1361 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1362 return 0;
ddef6bc7 1363 }
f1ec5147
RK
1364 else
1365 return replace_equiv_address (new, XEXP (new, 0));
ddef6bc7
JJ
1366 }
1367
0631e0bf
JH
1368 /* Rest can be handled by simplify_subreg. */
1369 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1370}
1371
23b2ce53
RS
1372/* Similar to `operand_subword', but never return 0. If we can't extract
1373 the required subword, put OP into a register and try again. If that fails,
750c9258 1374 abort. We always validate the address in this case.
23b2ce53
RS
1375
1376 MODE is the mode of OP, in case it is CONST_INT. */
1377
1378rtx
502b8322 1379operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1380{
ddef6bc7 1381 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1382
1383 if (result)
1384 return result;
1385
1386 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1387 {
1388 /* If this is a register which can not be accessed by words, copy it
1389 to a pseudo register. */
f8cfc6aa 1390 if (REG_P (op))
77e6b0eb
JC
1391 op = copy_to_reg (op);
1392 else
1393 op = force_reg (mode, op);
1394 }
23b2ce53 1395
ddef6bc7 1396 result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1397 if (result == 0)
1398 abort ();
1399
1400 return result;
1401}
1402\f
1403/* Given a compare instruction, swap the operands.
1404 A test instruction is changed into a compare of 0 against the operand. */
1405
1406void
502b8322 1407reverse_comparison (rtx insn)
23b2ce53
RS
1408{
1409 rtx body = PATTERN (insn);
1410 rtx comp;
1411
1412 if (GET_CODE (body) == SET)
1413 comp = SET_SRC (body);
1414 else
1415 comp = SET_SRC (XVECEXP (body, 0, 0));
1416
1417 if (GET_CODE (comp) == COMPARE)
1418 {
1419 rtx op0 = XEXP (comp, 0);
1420 rtx op1 = XEXP (comp, 1);
1421 XEXP (comp, 0) = op1;
1422 XEXP (comp, 1) = op0;
1423 }
1424 else
1425 {
c5c76735
JL
1426 rtx new = gen_rtx_COMPARE (VOIDmode,
1427 CONST0_RTX (GET_MODE (comp)), comp);
23b2ce53
RS
1428 if (GET_CODE (body) == SET)
1429 SET_SRC (body) = new;
1430 else
1431 SET_SRC (XVECEXP (body, 0, 0)) = new;
1432 }
1433}
1434\f
998d7deb
RH
1435/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1436 or (2) a component ref of something variable. Represent the later with
1437 a NULL expression. */
1438
1439static tree
502b8322 1440component_ref_for_mem_expr (tree ref)
998d7deb
RH
1441{
1442 tree inner = TREE_OPERAND (ref, 0);
1443
1444 if (TREE_CODE (inner) == COMPONENT_REF)
1445 inner = component_ref_for_mem_expr (inner);
c56e3582
RK
1446 else
1447 {
c56e3582 1448 /* Now remove any conversions: they don't change what the underlying
6fce44af 1449 object is. Likewise for SAVE_EXPR. */
c56e3582
RK
1450 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1451 || TREE_CODE (inner) == NON_LVALUE_EXPR
1452 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
6fce44af
RK
1453 || TREE_CODE (inner) == SAVE_EXPR)
1454 inner = TREE_OPERAND (inner, 0);
c56e3582
RK
1455
1456 if (! DECL_P (inner))
1457 inner = NULL_TREE;
1458 }
998d7deb
RH
1459
1460 if (inner == TREE_OPERAND (ref, 0))
1461 return ref;
1462 else
44de5aeb
RK
1463 return build (COMPONENT_REF, TREE_TYPE (ref), inner, TREE_OPERAND (ref, 1),
1464 NULL_TREE);
998d7deb 1465}
173b24b9 1466
2b3493c8
AK
1467/* Returns 1 if both MEM_EXPR can be considered equal
1468 and 0 otherwise. */
1469
1470int
1471mem_expr_equal_p (tree expr1, tree expr2)
1472{
1473 if (expr1 == expr2)
1474 return 1;
1475
1476 if (! expr1 || ! expr2)
1477 return 0;
1478
1479 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1480 return 0;
1481
1482 if (TREE_CODE (expr1) == COMPONENT_REF)
1483 return
1484 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1485 TREE_OPERAND (expr2, 0))
1486 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1487 TREE_OPERAND (expr2, 1));
1488
1489 if (TREE_CODE (expr1) == INDIRECT_REF)
1490 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1491 TREE_OPERAND (expr2, 0));
1492
1493 /* Decls with different pointers can't be equal. */
1494 if (DECL_P (expr1))
1495 return 0;
1496
1497 abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1498 have been resolved here. */
1499}
1500
173b24b9
RK
1501/* Given REF, a MEM, and T, either the type of X or the expression
1502 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1503 if we are making a new object of this type. BITPOS is nonzero if
1504 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1505
1506void
502b8322
AJ
1507set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1508 HOST_WIDE_INT bitpos)
173b24b9 1509{
8ac61af7 1510 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
998d7deb 1511 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1512 rtx offset = MEM_OFFSET (ref);
1513 rtx size = MEM_SIZE (ref);
1514 unsigned int align = MEM_ALIGN (ref);
6f1087be 1515 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1516 tree type;
1517
1518 /* It can happen that type_for_mode was given a mode for which there
1519 is no language-level type. In which case it returns NULL, which
1520 we can see here. */
1521 if (t == NULL_TREE)
1522 return;
1523
1524 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1525 if (type == error_mark_node)
1526 return;
173b24b9 1527
173b24b9
RK
1528 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1529 wrong answer, as it assumes that DECL_RTL already has the right alias
1530 info. Callers should not set DECL_RTL until after the call to
1531 set_mem_attributes. */
1532 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1533 abort ();
1534
738cc472 1535 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1536 front-end routine) and use it. */
1537 alias = get_alias_set (t);
173b24b9 1538
a5e9c810 1539 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
173b24b9 1540 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
03bf2c23 1541 RTX_UNCHANGING_P (ref)
1285011e 1542 |= ((lang_hooks.honor_readonly
4f976745 1543 && (TYPE_READONLY (type) || (t != type && TREE_READONLY (t))))
1285011e 1544 || (! TYPE_P (t) && TREE_CONSTANT (t)));
f8ad8d7c 1545 MEM_POINTER (ref) = POINTER_TYPE_P (type);
5cb2183e 1546 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (t);
173b24b9 1547
8ac61af7
RK
1548 /* If we are making an object of this type, or if this is a DECL, we know
1549 that it is a scalar if the type is not an aggregate. */
1550 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
173b24b9
RK
1551 MEM_SCALAR_P (ref) = 1;
1552
c3d32120
RK
1553 /* We can set the alignment from the type if we are making an object,
1554 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1555 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1556 align = MAX (align, TYPE_ALIGN (type));
40c0668b 1557
738cc472
RK
1558 /* If the size is known, we can set that. */
1559 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1560 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1561
80965c18
RK
1562 /* If T is not a type, we may be able to deduce some more information about
1563 the expression. */
1564 if (! TYPE_P (t))
8ac61af7
RK
1565 {
1566 maybe_set_unchanging (ref, t);
1567 if (TREE_THIS_VOLATILE (t))
1568 MEM_VOLATILE_P (ref) = 1;
173b24b9 1569
c56e3582
RK
1570 /* Now remove any conversions: they don't change what the underlying
1571 object is. Likewise for SAVE_EXPR. */
8ac61af7 1572 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
c56e3582
RK
1573 || TREE_CODE (t) == NON_LVALUE_EXPR
1574 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1575 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1576 t = TREE_OPERAND (t, 0);
1577
10b76d73
RK
1578 /* If this expression can't be addressed (e.g., it contains a reference
1579 to a non-addressable field), show we don't change its alias set. */
1580 if (! can_address_p (t))
1581 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1582
8ac61af7
RK
1583 /* If this is a decl, set the attributes of the MEM from it. */
1584 if (DECL_P (t))
1585 {
998d7deb
RH
1586 expr = t;
1587 offset = const0_rtx;
6f1087be 1588 apply_bitpos = bitpos;
8ac61af7
RK
1589 size = (DECL_SIZE_UNIT (t)
1590 && host_integerp (DECL_SIZE_UNIT (t), 1)
1591 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1592 align = DECL_ALIGN (t);
8ac61af7
RK
1593 }
1594
40c0668b 1595 /* If this is a constant, we know the alignment. */
9ddfb1a7
RK
1596 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1597 {
1598 align = TYPE_ALIGN (type);
1599#ifdef CONSTANT_ALIGNMENT
1600 align = CONSTANT_ALIGNMENT (t, align);
1601#endif
1602 }
998d7deb
RH
1603
1604 /* If this is a field reference and not a bit-field, record it. */
1605 /* ??? There is some information that can be gleened from bit-fields,
1606 such as the word offset in the structure that might be modified.
1607 But skip it for now. */
1608 else if (TREE_CODE (t) == COMPONENT_REF
1609 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1610 {
1611 expr = component_ref_for_mem_expr (t);
1612 offset = const0_rtx;
6f1087be 1613 apply_bitpos = bitpos;
998d7deb
RH
1614 /* ??? Any reason the field size would be different than
1615 the size we got from the type? */
1616 }
1617
1618 /* If this is an array reference, look for an outer field reference. */
1619 else if (TREE_CODE (t) == ARRAY_REF)
1620 {
1621 tree off_tree = size_zero_node;
1b1838b6
JW
1622 /* We can't modify t, because we use it at the end of the
1623 function. */
1624 tree t2 = t;
998d7deb
RH
1625
1626 do
1627 {
1b1838b6 1628 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1629 tree low_bound = array_ref_low_bound (t2);
1630 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1631
1632 /* We assume all arrays have sizes that are a multiple of a byte.
1633 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1634 index, then convert to sizetype and multiply by the size of
1635 the array element. */
1636 if (! integer_zerop (low_bound))
2567406a
JH
1637 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1638 index, low_bound));
1639
44de5aeb
RK
1640 off_tree = size_binop (PLUS_EXPR,
1641 size_binop (MULT_EXPR, convert (sizetype,
1642 index),
1643 unit_size),
1644 off_tree);
1b1838b6 1645 t2 = TREE_OPERAND (t2, 0);
998d7deb 1646 }
1b1838b6 1647 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1648
1b1838b6 1649 if (DECL_P (t2))
c67a1cf6 1650 {
1b1838b6 1651 expr = t2;
40cb04f1 1652 offset = NULL;
c67a1cf6 1653 if (host_integerp (off_tree, 1))
40cb04f1
RH
1654 {
1655 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1656 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1657 align = DECL_ALIGN (t2);
fc555370 1658 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1
RH
1659 align = aoff;
1660 offset = GEN_INT (ioff);
6f1087be 1661 apply_bitpos = bitpos;
40cb04f1 1662 }
c67a1cf6 1663 }
1b1838b6 1664 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1665 {
1b1838b6 1666 expr = component_ref_for_mem_expr (t2);
998d7deb 1667 if (host_integerp (off_tree, 1))
6f1087be
RH
1668 {
1669 offset = GEN_INT (tree_low_cst (off_tree, 1));
1670 apply_bitpos = bitpos;
1671 }
998d7deb
RH
1672 /* ??? Any reason the field size would be different than
1673 the size we got from the type? */
1674 }
c67a1cf6 1675 else if (flag_argument_noalias > 1
1b1838b6
JW
1676 && TREE_CODE (t2) == INDIRECT_REF
1677 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1678 {
1b1838b6 1679 expr = t2;
c67a1cf6
RH
1680 offset = NULL;
1681 }
1682 }
1683
1684 /* If this is a Fortran indirect argument reference, record the
1685 parameter decl. */
1686 else if (flag_argument_noalias > 1
1687 && TREE_CODE (t) == INDIRECT_REF
1688 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1689 {
1690 expr = t;
1691 offset = NULL;
998d7deb 1692 }
8ac61af7
RK
1693 }
1694
15c812e3 1695 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1696 bit position offset. Similarly, increase the size of the accessed
1697 object to contain the negative offset. */
6f1087be 1698 if (apply_bitpos)
8c317c5f
RH
1699 {
1700 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1701 if (size)
1702 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1703 }
6f1087be 1704
8ac61af7 1705 /* Now set the attributes we computed above. */
10b76d73 1706 MEM_ATTRS (ref)
998d7deb 1707 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
1708
1709 /* If this is already known to be a scalar or aggregate, we are done. */
1710 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1711 return;
1712
8ac61af7
RK
1713 /* If it is a reference into an aggregate, this is part of an aggregate.
1714 Otherwise we don't know. */
173b24b9
RK
1715 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1716 || TREE_CODE (t) == ARRAY_RANGE_REF
1717 || TREE_CODE (t) == BIT_FIELD_REF)
1718 MEM_IN_STRUCT_P (ref) = 1;
1719}
1720
6f1087be 1721void
502b8322 1722set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1723{
1724 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1725}
1726
a560d4d4
JH
1727/* Set the decl for MEM to DECL. */
1728
1729void
502b8322 1730set_mem_attrs_from_reg (rtx mem, rtx reg)
a560d4d4
JH
1731{
1732 MEM_ATTRS (mem)
1733 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1734 GEN_INT (REG_OFFSET (reg)),
1735 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1736}
1737
173b24b9
RK
1738/* Set the alias set of MEM to SET. */
1739
1740void
502b8322 1741set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
173b24b9 1742{
68252e27 1743#ifdef ENABLE_CHECKING
173b24b9
RK
1744 /* If the new and old alias sets don't conflict, something is wrong. */
1745 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1746 abort ();
173b24b9
RK
1747#endif
1748
998d7deb 1749 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
1750 MEM_SIZE (mem), MEM_ALIGN (mem),
1751 GET_MODE (mem));
173b24b9 1752}
738cc472 1753
d022d93e 1754/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1755
1756void
502b8322 1757set_mem_align (rtx mem, unsigned int align)
738cc472 1758{
998d7deb 1759 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
1760 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1761 GET_MODE (mem));
738cc472 1762}
1285011e 1763
998d7deb 1764/* Set the expr for MEM to EXPR. */
1285011e
RK
1765
1766void
502b8322 1767set_mem_expr (rtx mem, tree expr)
1285011e
RK
1768{
1769 MEM_ATTRS (mem)
998d7deb 1770 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
1771 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1772}
998d7deb
RH
1773
1774/* Set the offset of MEM to OFFSET. */
1775
1776void
502b8322 1777set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1778{
1779 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1780 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1781 GET_MODE (mem));
35aff10b
AM
1782}
1783
1784/* Set the size of MEM to SIZE. */
1785
1786void
502b8322 1787set_mem_size (rtx mem, rtx size)
35aff10b
AM
1788{
1789 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1790 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1791 GET_MODE (mem));
998d7deb 1792}
173b24b9 1793\f
738cc472
RK
1794/* Return a memory reference like MEMREF, but with its mode changed to MODE
1795 and its address changed to ADDR. (VOIDmode means don't change the mode.
1796 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1797 returned memory location is required to be valid. The memory
1798 attributes are not changed. */
23b2ce53 1799
738cc472 1800static rtx
502b8322 1801change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53
RS
1802{
1803 rtx new;
1804
3c0cb5de 1805 if (!MEM_P (memref))
23b2ce53
RS
1806 abort ();
1807 if (mode == VOIDmode)
1808 mode = GET_MODE (memref);
1809 if (addr == 0)
1810 addr = XEXP (memref, 0);
a74ff877
JH
1811 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1812 && (!validate || memory_address_p (mode, addr)))
1813 return memref;
23b2ce53 1814
f1ec5147 1815 if (validate)
23b2ce53 1816 {
f1ec5147
RK
1817 if (reload_in_progress || reload_completed)
1818 {
1819 if (! memory_address_p (mode, addr))
1820 abort ();
1821 }
1822 else
1823 addr = memory_address (mode, addr);
23b2ce53 1824 }
750c9258 1825
9b04c6a8
RK
1826 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1827 return memref;
1828
3b80f6ca 1829 new = gen_rtx_MEM (mode, addr);
c6df88cb 1830 MEM_COPY_ATTRIBUTES (new, memref);
23b2ce53
RS
1831 return new;
1832}
792760b9 1833
738cc472
RK
1834/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1835 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1836
1837rtx
502b8322 1838change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1839{
4e44c1ef 1840 rtx new = change_address_1 (memref, mode, addr, 1), size;
738cc472 1841 enum machine_mode mmode = GET_MODE (new);
4e44c1ef
JJ
1842 unsigned int align;
1843
1844 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1845 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1846
fdb1c7b3
JH
1847 /* If there are no changes, just return the original memory reference. */
1848 if (new == memref)
4e44c1ef
JJ
1849 {
1850 if (MEM_ATTRS (memref) == 0
1851 || (MEM_EXPR (memref) == NULL
1852 && MEM_OFFSET (memref) == NULL
1853 && MEM_SIZE (memref) == size
1854 && MEM_ALIGN (memref) == align))
1855 return new;
1856
64fc7c00 1857 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
4e44c1ef
JJ
1858 MEM_COPY_ATTRIBUTES (new, memref);
1859 }
fdb1c7b3 1860
738cc472 1861 MEM_ATTRS (new)
4e44c1ef 1862 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
823e3574 1863
738cc472 1864 return new;
f4ef873c 1865}
792760b9 1866
738cc472
RK
1867/* Return a memory reference like MEMREF, but with its mode changed
1868 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1869 nonzero, the memory address is forced to be valid.
1870 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1871 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
1872
1873rtx
502b8322
AJ
1874adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1875 int validate, int adjust)
f1ec5147 1876{
823e3574 1877 rtx addr = XEXP (memref, 0);
738cc472
RK
1878 rtx new;
1879 rtx memoffset = MEM_OFFSET (memref);
10b76d73 1880 rtx size = 0;
738cc472 1881 unsigned int memalign = MEM_ALIGN (memref);
823e3574 1882
fdb1c7b3
JH
1883 /* If there are no changes, just return the original memory reference. */
1884 if (mode == GET_MODE (memref) && !offset
1885 && (!validate || memory_address_p (mode, addr)))
1886 return memref;
1887
d14419e4 1888 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 1889 This may happen even if offset is nonzero -- consider
d14419e4
RH
1890 (plus (plus reg reg) const_int) -- so do this always. */
1891 addr = copy_rtx (addr);
1892
4a78c787
RH
1893 if (adjust)
1894 {
1895 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1896 object, we can merge it into the LO_SUM. */
1897 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1898 && offset >= 0
1899 && (unsigned HOST_WIDE_INT) offset
1900 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1901 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1902 plus_constant (XEXP (addr, 1), offset));
1903 else
1904 addr = plus_constant (addr, offset);
1905 }
823e3574 1906
738cc472
RK
1907 new = change_address_1 (memref, mode, addr, validate);
1908
1909 /* Compute the new values of the memory attributes due to this adjustment.
1910 We add the offsets and update the alignment. */
1911 if (memoffset)
1912 memoffset = GEN_INT (offset + INTVAL (memoffset));
1913
03bf2c23
RK
1914 /* Compute the new alignment by taking the MIN of the alignment and the
1915 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1916 if zero. */
1917 if (offset != 0)
3bf1e984
RK
1918 memalign
1919 = MIN (memalign,
1920 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 1921
10b76d73 1922 /* We can compute the size in a number of ways. */
a06ef755
RK
1923 if (GET_MODE (new) != BLKmode)
1924 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
10b76d73
RK
1925 else if (MEM_SIZE (memref))
1926 size = plus_constant (MEM_SIZE (memref), -offset);
1927
998d7deb 1928 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
10b76d73 1929 memoffset, size, memalign, GET_MODE (new));
738cc472
RK
1930
1931 /* At some point, we should validate that this offset is within the object,
1932 if all the appropriate values are known. */
1933 return new;
f1ec5147
RK
1934}
1935
630036c6
JJ
1936/* Return a memory reference like MEMREF, but with its mode changed
1937 to MODE and its address changed to ADDR, which is assumed to be
1938 MEMREF offseted by OFFSET bytes. If VALIDATE is
1939 nonzero, the memory address is forced to be valid. */
1940
1941rtx
502b8322
AJ
1942adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1943 HOST_WIDE_INT offset, int validate)
630036c6
JJ
1944{
1945 memref = change_address_1 (memref, VOIDmode, addr, validate);
1946 return adjust_address_1 (memref, mode, offset, validate, 0);
1947}
1948
8ac61af7
RK
1949/* Return a memory reference like MEMREF, but whose address is changed by
1950 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1951 known to be in OFFSET (possibly 1). */
0d4903b8
RK
1952
1953rtx
502b8322 1954offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 1955{
e3c8ea67
RH
1956 rtx new, addr = XEXP (memref, 0);
1957
1958 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1959
68252e27 1960 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 1961 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
1962
1963 However, if we did go and rearrange things, we can wind up not
1964 being able to recognize the magic around pic_offset_table_rtx.
1965 This stuff is fragile, and is yet another example of why it is
1966 bad to expose PIC machinery too early. */
1967 if (! memory_address_p (GET_MODE (memref), new)
1968 && GET_CODE (addr) == PLUS
1969 && XEXP (addr, 0) == pic_offset_table_rtx)
1970 {
1971 addr = force_reg (GET_MODE (addr), addr);
1972 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1973 }
1974
f6041ed8 1975 update_temp_slot_address (XEXP (memref, 0), new);
e3c8ea67 1976 new = change_address_1 (memref, VOIDmode, new, 1);
0d4903b8 1977
fdb1c7b3
JH
1978 /* If there are no changes, just return the original memory reference. */
1979 if (new == memref)
1980 return new;
1981
0d4903b8
RK
1982 /* Update the alignment to reflect the offset. Reset the offset, which
1983 we don't know. */
2cc2d4bb
RK
1984 MEM_ATTRS (new)
1985 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 1986 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2cc2d4bb 1987 GET_MODE (new));
0d4903b8
RK
1988 return new;
1989}
68252e27 1990
792760b9
RK
1991/* Return a memory reference like MEMREF, but with its address changed to
1992 ADDR. The caller is asserting that the actual piece of memory pointed
1993 to is the same, just the form of the address is being changed, such as
1994 by putting something into a register. */
1995
1996rtx
502b8322 1997replace_equiv_address (rtx memref, rtx addr)
792760b9 1998{
738cc472
RK
1999 /* change_address_1 copies the memory attribute structure without change
2000 and that's exactly what we want here. */
40c0668b 2001 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 2002 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 2003}
738cc472 2004
f1ec5147
RK
2005/* Likewise, but the reference is not required to be valid. */
2006
2007rtx
502b8322 2008replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 2009{
f1ec5147
RK
2010 return change_address_1 (memref, VOIDmode, addr, 0);
2011}
e7dfe4bb
RH
2012
2013/* Return a memory reference like MEMREF, but with its mode widened to
2014 MODE and offset by OFFSET. This would be used by targets that e.g.
2015 cannot issue QImode memory operations and have to use SImode memory
2016 operations plus masking logic. */
2017
2018rtx
502b8322 2019widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb
RH
2020{
2021 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2022 tree expr = MEM_EXPR (new);
2023 rtx memoffset = MEM_OFFSET (new);
2024 unsigned int size = GET_MODE_SIZE (mode);
2025
fdb1c7b3
JH
2026 /* If there are no changes, just return the original memory reference. */
2027 if (new == memref)
2028 return new;
2029
e7dfe4bb
RH
2030 /* If we don't know what offset we were at within the expression, then
2031 we can't know if we've overstepped the bounds. */
fa1591cb 2032 if (! memoffset)
e7dfe4bb
RH
2033 expr = NULL_TREE;
2034
2035 while (expr)
2036 {
2037 if (TREE_CODE (expr) == COMPONENT_REF)
2038 {
2039 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2040 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2041
2042 if (! DECL_SIZE_UNIT (field))
2043 {
2044 expr = NULL_TREE;
2045 break;
2046 }
2047
2048 /* Is the field at least as large as the access? If so, ok,
2049 otherwise strip back to the containing structure. */
03667700
RK
2050 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2051 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2052 && INTVAL (memoffset) >= 0)
2053 break;
2054
44de5aeb 2055 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2056 {
2057 expr = NULL_TREE;
2058 break;
2059 }
2060
2061 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2062 memoffset
2063 = (GEN_INT (INTVAL (memoffset)
2064 + tree_low_cst (offset, 1)
2065 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2066 / BITS_PER_UNIT)));
e7dfe4bb
RH
2067 }
2068 /* Similarly for the decl. */
2069 else if (DECL_P (expr)
2070 && DECL_SIZE_UNIT (expr)
45f79783 2071 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2072 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2073 && (! memoffset || INTVAL (memoffset) >= 0))
2074 break;
2075 else
2076 {
2077 /* The widened memory access overflows the expression, which means
2078 that it could alias another expression. Zap it. */
2079 expr = NULL_TREE;
2080 break;
2081 }
2082 }
2083
2084 if (! expr)
2085 memoffset = NULL_RTX;
2086
2087 /* The widened memory may alias other stuff, so zap the alias set. */
2088 /* ??? Maybe use get_alias_set on any remaining expression. */
2089
2090 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2091 MEM_ALIGN (new), mode);
2092
2093 return new;
2094}
23b2ce53
RS
2095\f
2096/* Return a newly created CODE_LABEL rtx with a unique label number. */
2097
2098rtx
502b8322 2099gen_label_rtx (void)
23b2ce53 2100{
0dc36574 2101 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2102 NULL, label_num++, NULL);
23b2ce53
RS
2103}
2104\f
2105/* For procedure integration. */
2106
23b2ce53 2107/* Install new pointers to the first and last insns in the chain.
86fe05e0 2108 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2109 Used for an inline-procedure after copying the insn chain. */
2110
2111void
502b8322 2112set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2113{
86fe05e0
RK
2114 rtx insn;
2115
23b2ce53
RS
2116 first_insn = first;
2117 last_insn = last;
86fe05e0
RK
2118 cur_insn_uid = 0;
2119
2120 for (insn = first; insn; insn = NEXT_INSN (insn))
2121 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2122
2123 cur_insn_uid++;
23b2ce53
RS
2124}
2125
49ad7cfa
BS
2126/* Set the last label number found in the current function.
2127 This is used when belatedly compiling an inline function. */
23b2ce53
RS
2128
2129void
502b8322 2130set_new_last_label_num (int last)
23b2ce53 2131{
49ad7cfa
BS
2132 base_label_num = label_num;
2133 last_label_num = last;
23b2ce53 2134}
49ad7cfa 2135\f
23b2ce53
RS
2136/* Restore all variables describing the current status from the structure *P.
2137 This is used after a nested function. */
2138
2139void
502b8322 2140restore_emit_status (struct function *p ATTRIBUTE_UNUSED)
23b2ce53 2141{
457a2d9c 2142 last_label_num = 0;
23b2ce53
RS
2143}
2144\f
750c9258 2145/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2146 structure. This routine should only be called once. */
23b2ce53 2147
fd743bc1
PB
2148static void
2149unshare_all_rtl_1 (tree fndecl, rtx insn)
23b2ce53 2150{
d1b81779 2151 tree decl;
23b2ce53 2152
d1b81779
GK
2153 /* Make sure that virtual parameters are not shared. */
2154 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
19e7881c 2155 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
d1b81779 2156
5c6df058
AO
2157 /* Make sure that virtual stack slots are not shared. */
2158 unshare_all_decls (DECL_INITIAL (fndecl));
2159
d1b81779 2160 /* Unshare just about everything else. */
2c07f13b 2161 unshare_all_rtl_in_chain (insn);
750c9258 2162
23b2ce53
RS
2163 /* Make sure the addresses of stack slots found outside the insn chain
2164 (such as, in DECL_RTL of a variable) are not shared
2165 with the insn chain.
2166
2167 This special care is necessary when the stack slot MEM does not
2168 actually appear in the insn chain. If it does appear, its address
2169 is unshared from all else at that point. */
242b0ce6 2170 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2171}
2172
750c9258 2173/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2174 structure, again. This is a fairly expensive thing to do so it
2175 should be done sparingly. */
2176
2177void
502b8322 2178unshare_all_rtl_again (rtx insn)
d1b81779
GK
2179{
2180 rtx p;
624c87aa
RE
2181 tree decl;
2182
d1b81779 2183 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2184 if (INSN_P (p))
d1b81779
GK
2185 {
2186 reset_used_flags (PATTERN (p));
2187 reset_used_flags (REG_NOTES (p));
2188 reset_used_flags (LOG_LINKS (p));
2189 }
624c87aa 2190
2d4aecb3
AO
2191 /* Make sure that virtual stack slots are not shared. */
2192 reset_used_decls (DECL_INITIAL (cfun->decl));
2193
624c87aa
RE
2194 /* Make sure that virtual parameters are not shared. */
2195 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2196 reset_used_flags (DECL_RTL (decl));
2197
2198 reset_used_flags (stack_slot_list);
2199
fd743bc1
PB
2200 unshare_all_rtl_1 (cfun->decl, insn);
2201}
2202
2203void
2204unshare_all_rtl (void)
2205{
2206 unshare_all_rtl_1 (current_function_decl, get_insns ());
d1b81779
GK
2207}
2208
2c07f13b
JH
2209/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2210 Recursively does the same for subexpressions. */
2211
2212static void
2213verify_rtx_sharing (rtx orig, rtx insn)
2214{
2215 rtx x = orig;
2216 int i;
2217 enum rtx_code code;
2218 const char *format_ptr;
2219
2220 if (x == 0)
2221 return;
2222
2223 code = GET_CODE (x);
2224
2225 /* These types may be freely shared. */
2226
2227 switch (code)
2228 {
2229 case REG:
2230 case QUEUED:
2231 case CONST_INT:
2232 case CONST_DOUBLE:
2233 case CONST_VECTOR:
2234 case SYMBOL_REF:
2235 case LABEL_REF:
2236 case CODE_LABEL:
2237 case PC:
2238 case CC0:
2239 case SCRATCH:
2c07f13b 2240 return;
3e89ed8d
JH
2241 /* SCRATCH must be shared because they represent distinct values. */
2242 case CLOBBER:
2243 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2244 return;
2245 break;
2c07f13b
JH
2246
2247 case CONST:
2248 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2249 a LABEL_REF, it isn't sharable. */
2250 if (GET_CODE (XEXP (x, 0)) == PLUS
2251 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2252 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2253 return;
2254 break;
2255
2256 case MEM:
2257 /* A MEM is allowed to be shared if its address is constant. */
2258 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2259 || reload_completed || reload_in_progress)
2260 return;
2261
2262 break;
2263
2264 default:
2265 break;
2266 }
2267
2268 /* This rtx may not be shared. If it has already been seen,
2269 replace it with a copy of itself. */
2270
2271 if (RTX_FLAG (x, used))
2272 {
2273 error ("Invalid rtl sharing found in the insn");
2274 debug_rtx (insn);
2275 error ("Shared rtx");
2276 debug_rtx (x);
2277 abort ();
2278 }
2279 RTX_FLAG (x, used) = 1;
2280
6614fd40 2281 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2282
2283 format_ptr = GET_RTX_FORMAT (code);
2284
2285 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2286 {
2287 switch (*format_ptr++)
2288 {
2289 case 'e':
2290 verify_rtx_sharing (XEXP (x, i), insn);
2291 break;
2292
2293 case 'E':
2294 if (XVEC (x, i) != NULL)
2295 {
2296 int j;
2297 int len = XVECLEN (x, i);
2298
2299 for (j = 0; j < len; j++)
2300 {
2301 /* We allow sharing of ASM_OPERANDS inside single instruction. */
2302 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2303 && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS)
2304 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2305 else
2306 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2307 }
2308 }
2309 break;
2310 }
2311 }
2312 return;
2313}
2314
ba228239 2315/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2316 sharing in between the subexpressions. */
2317
2318void
2319verify_rtl_sharing (void)
2320{
2321 rtx p;
2322
2323 for (p = get_insns (); p; p = NEXT_INSN (p))
2324 if (INSN_P (p))
2325 {
2326 reset_used_flags (PATTERN (p));
2327 reset_used_flags (REG_NOTES (p));
2328 reset_used_flags (LOG_LINKS (p));
2329 }
2330
2331 for (p = get_insns (); p; p = NEXT_INSN (p))
2332 if (INSN_P (p))
2333 {
2334 verify_rtx_sharing (PATTERN (p), p);
2335 verify_rtx_sharing (REG_NOTES (p), p);
2336 verify_rtx_sharing (LOG_LINKS (p), p);
2337 }
2338}
2339
d1b81779
GK
2340/* Go through all the RTL insn bodies and copy any invalid shared structure.
2341 Assumes the mark bits are cleared at entry. */
2342
2c07f13b
JH
2343void
2344unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2345{
2346 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2347 if (INSN_P (insn))
d1b81779
GK
2348 {
2349 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2350 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2351 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2352 }
2353}
2354
5c6df058
AO
2355/* Go through all virtual stack slots of a function and copy any
2356 shared structure. */
2357static void
502b8322 2358unshare_all_decls (tree blk)
5c6df058
AO
2359{
2360 tree t;
2361
2362 /* Copy shared decls. */
2363 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2364 if (DECL_RTL_SET_P (t))
2365 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
5c6df058
AO
2366
2367 /* Now process sub-blocks. */
2368 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2369 unshare_all_decls (t);
2370}
2371
2d4aecb3 2372/* Go through all virtual stack slots of a function and mark them as
30f7a378 2373 not shared. */
2d4aecb3 2374static void
502b8322 2375reset_used_decls (tree blk)
2d4aecb3
AO
2376{
2377 tree t;
2378
2379 /* Mark decls. */
2380 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2381 if (DECL_RTL_SET_P (t))
2382 reset_used_flags (DECL_RTL (t));
2d4aecb3
AO
2383
2384 /* Now process sub-blocks. */
2385 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2386 reset_used_decls (t);
2387}
2388
127c1ba5 2389/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
93fe8e92
RK
2390 placed in the result directly, rather than being copied. MAY_SHARE is
2391 either a MEM of an EXPR_LIST of MEMs. */
127c1ba5
RK
2392
2393rtx
502b8322 2394copy_most_rtx (rtx orig, rtx may_share)
127c1ba5
RK
2395{
2396 rtx copy;
2397 int i, j;
2398 RTX_CODE code;
2399 const char *format_ptr;
2400
93fe8e92
RK
2401 if (orig == may_share
2402 || (GET_CODE (may_share) == EXPR_LIST
2403 && in_expr_list_p (may_share, orig)))
127c1ba5
RK
2404 return orig;
2405
2406 code = GET_CODE (orig);
2407
2408 switch (code)
2409 {
2410 case REG:
2411 case QUEUED:
2412 case CONST_INT:
2413 case CONST_DOUBLE:
2414 case CONST_VECTOR:
2415 case SYMBOL_REF:
2416 case CODE_LABEL:
2417 case PC:
2418 case CC0:
2419 return orig;
2420 default:
2421 break;
2422 }
2423
2424 copy = rtx_alloc (code);
2425 PUT_MODE (copy, GET_MODE (orig));
2adc7f12
JJ
2426 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2427 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2428 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2adc7f12 2429 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
6de9cd9a 2430 RTX_FLAG (copy, return_val) = RTX_FLAG (orig, return_val);
127c1ba5
RK
2431
2432 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2433
2434 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2435 {
2436 switch (*format_ptr++)
2437 {
2438 case 'e':
2439 XEXP (copy, i) = XEXP (orig, i);
2440 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2441 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2442 break;
2443
2444 case 'u':
2445 XEXP (copy, i) = XEXP (orig, i);
2446 break;
2447
2448 case 'E':
2449 case 'V':
2450 XVEC (copy, i) = XVEC (orig, i);
2451 if (XVEC (orig, i) != NULL)
2452 {
2453 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2454 for (j = 0; j < XVECLEN (copy, i); j++)
2455 XVECEXP (copy, i, j)
2456 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2457 }
2458 break;
2459
2460 case 'w':
2461 XWINT (copy, i) = XWINT (orig, i);
2462 break;
2463
2464 case 'n':
2465 case 'i':
2466 XINT (copy, i) = XINT (orig, i);
2467 break;
2468
2469 case 't':
2470 XTREE (copy, i) = XTREE (orig, i);
2471 break;
2472
2473 case 's':
2474 case 'S':
2475 XSTR (copy, i) = XSTR (orig, i);
2476 break;
2477
2478 case '0':
e1de1560 2479 X0ANY (copy, i) = X0ANY (orig, i);
127c1ba5
RK
2480 break;
2481
2482 default:
2483 abort ();
2484 }
2485 }
2486 return copy;
2487}
2488
23b2ce53 2489/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2490 Recursively does the same for subexpressions. Uses
2491 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2492
2493rtx
502b8322 2494copy_rtx_if_shared (rtx orig)
23b2ce53 2495{
32b32b16
AP
2496 copy_rtx_if_shared_1 (&orig);
2497 return orig;
2498}
2499
ff954f39
AP
2500/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2501 use. Recursively does the same for subexpressions. */
2502
32b32b16
AP
2503static void
2504copy_rtx_if_shared_1 (rtx *orig1)
2505{
2506 rtx x;
b3694847
SS
2507 int i;
2508 enum rtx_code code;
32b32b16 2509 rtx *last_ptr;
b3694847 2510 const char *format_ptr;
23b2ce53 2511 int copied = 0;
32b32b16
AP
2512 int length;
2513
2514 /* Repeat is used to turn tail-recursion into iteration. */
2515repeat:
2516 x = *orig1;
23b2ce53
RS
2517
2518 if (x == 0)
32b32b16 2519 return;
23b2ce53
RS
2520
2521 code = GET_CODE (x);
2522
2523 /* These types may be freely shared. */
2524
2525 switch (code)
2526 {
2527 case REG:
2528 case QUEUED:
2529 case CONST_INT:
2530 case CONST_DOUBLE:
69ef87e2 2531 case CONST_VECTOR:
23b2ce53 2532 case SYMBOL_REF:
2c07f13b 2533 case LABEL_REF:
23b2ce53
RS
2534 case CODE_LABEL:
2535 case PC:
2536 case CC0:
2537 case SCRATCH:
0f41302f 2538 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2539 return;
3e89ed8d
JH
2540 case CLOBBER:
2541 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2542 return;
2543 break;
23b2ce53 2544
b851ea09
RK
2545 case CONST:
2546 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2547 a LABEL_REF, it isn't sharable. */
2548 if (GET_CODE (XEXP (x, 0)) == PLUS
2549 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2550 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
32b32b16 2551 return;
b851ea09
RK
2552 break;
2553
23b2ce53
RS
2554 case INSN:
2555 case JUMP_INSN:
2556 case CALL_INSN:
2557 case NOTE:
23b2ce53
RS
2558 case BARRIER:
2559 /* The chain of insns is not being copied. */
32b32b16 2560 return;
23b2ce53 2561
e9a25f70
JL
2562 default:
2563 break;
23b2ce53
RS
2564 }
2565
2566 /* This rtx may not be shared. If it has already been seen,
2567 replace it with a copy of itself. */
2568
2adc7f12 2569 if (RTX_FLAG (x, used))
23b2ce53 2570 {
b3694847 2571 rtx copy;
23b2ce53
RS
2572
2573 copy = rtx_alloc (code);
e1de1560 2574 memcpy (copy, x, RTX_SIZE (code));
23b2ce53
RS
2575 x = copy;
2576 copied = 1;
2577 }
2adc7f12 2578 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2579
2580 /* Now scan the subexpressions recursively.
2581 We can store any replaced subexpressions directly into X
2582 since we know X is not shared! Any vectors in X
2583 must be copied if X was copied. */
2584
2585 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2586 length = GET_RTX_LENGTH (code);
2587 last_ptr = NULL;
2588
2589 for (i = 0; i < length; i++)
23b2ce53
RS
2590 {
2591 switch (*format_ptr++)
2592 {
2593 case 'e':
32b32b16
AP
2594 if (last_ptr)
2595 copy_rtx_if_shared_1 (last_ptr);
2596 last_ptr = &XEXP (x, i);
23b2ce53
RS
2597 break;
2598
2599 case 'E':
2600 if (XVEC (x, i) != NULL)
2601 {
b3694847 2602 int j;
f0722107 2603 int len = XVECLEN (x, i);
32b32b16 2604
6614fd40
KH
2605 /* Copy the vector iff I copied the rtx and the length
2606 is nonzero. */
f0722107 2607 if (copied && len > 0)
8f985ec4 2608 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
32b32b16 2609
5d3cc252 2610 /* Call recursively on all inside the vector. */
f0722107 2611 for (j = 0; j < len; j++)
32b32b16
AP
2612 {
2613 if (last_ptr)
2614 copy_rtx_if_shared_1 (last_ptr);
2615 last_ptr = &XVECEXP (x, i, j);
2616 }
23b2ce53
RS
2617 }
2618 break;
2619 }
2620 }
32b32b16
AP
2621 *orig1 = x;
2622 if (last_ptr)
2623 {
2624 orig1 = last_ptr;
2625 goto repeat;
2626 }
2627 return;
23b2ce53
RS
2628}
2629
2630/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2631 to look for shared sub-parts. */
2632
2633void
502b8322 2634reset_used_flags (rtx x)
23b2ce53 2635{
b3694847
SS
2636 int i, j;
2637 enum rtx_code code;
2638 const char *format_ptr;
32b32b16 2639 int length;
23b2ce53 2640
32b32b16
AP
2641 /* Repeat is used to turn tail-recursion into iteration. */
2642repeat:
23b2ce53
RS
2643 if (x == 0)
2644 return;
2645
2646 code = GET_CODE (x);
2647
9faa82d8 2648 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2649 for them. */
2650
2651 switch (code)
2652 {
2653 case REG:
2654 case QUEUED:
2655 case CONST_INT:
2656 case CONST_DOUBLE:
69ef87e2 2657 case CONST_VECTOR:
23b2ce53
RS
2658 case SYMBOL_REF:
2659 case CODE_LABEL:
2660 case PC:
2661 case CC0:
2662 return;
2663
2664 case INSN:
2665 case JUMP_INSN:
2666 case CALL_INSN:
2667 case NOTE:
2668 case LABEL_REF:
2669 case BARRIER:
2670 /* The chain of insns is not being copied. */
2671 return;
750c9258 2672
e9a25f70
JL
2673 default:
2674 break;
23b2ce53
RS
2675 }
2676
2adc7f12 2677 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2678
2679 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2680 length = GET_RTX_LENGTH (code);
2681
2682 for (i = 0; i < length; i++)
23b2ce53
RS
2683 {
2684 switch (*format_ptr++)
2685 {
2686 case 'e':
32b32b16
AP
2687 if (i == length-1)
2688 {
2689 x = XEXP (x, i);
2690 goto repeat;
2691 }
23b2ce53
RS
2692 reset_used_flags (XEXP (x, i));
2693 break;
2694
2695 case 'E':
2696 for (j = 0; j < XVECLEN (x, i); j++)
2697 reset_used_flags (XVECEXP (x, i, j));
2698 break;
2699 }
2700 }
2701}
2c07f13b
JH
2702
2703/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2704 to look for shared sub-parts. */
2705
2706void
2707set_used_flags (rtx x)
2708{
2709 int i, j;
2710 enum rtx_code code;
2711 const char *format_ptr;
2712
2713 if (x == 0)
2714 return;
2715
2716 code = GET_CODE (x);
2717
2718 /* These types may be freely shared so we needn't do any resetting
2719 for them. */
2720
2721 switch (code)
2722 {
2723 case REG:
2724 case QUEUED:
2725 case CONST_INT:
2726 case CONST_DOUBLE:
2727 case CONST_VECTOR:
2728 case SYMBOL_REF:
2729 case CODE_LABEL:
2730 case PC:
2731 case CC0:
2732 return;
2733
2734 case INSN:
2735 case JUMP_INSN:
2736 case CALL_INSN:
2737 case NOTE:
2738 case LABEL_REF:
2739 case BARRIER:
2740 /* The chain of insns is not being copied. */
2741 return;
2742
2743 default:
2744 break;
2745 }
2746
2747 RTX_FLAG (x, used) = 1;
2748
2749 format_ptr = GET_RTX_FORMAT (code);
2750 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2751 {
2752 switch (*format_ptr++)
2753 {
2754 case 'e':
2755 set_used_flags (XEXP (x, i));
2756 break;
2757
2758 case 'E':
2759 for (j = 0; j < XVECLEN (x, i); j++)
2760 set_used_flags (XVECEXP (x, i, j));
2761 break;
2762 }
2763 }
2764}
23b2ce53
RS
2765\f
2766/* Copy X if necessary so that it won't be altered by changes in OTHER.
2767 Return X or the rtx for the pseudo reg the value of X was copied into.
2768 OTHER must be valid as a SET_DEST. */
2769
2770rtx
502b8322 2771make_safe_from (rtx x, rtx other)
23b2ce53
RS
2772{
2773 while (1)
2774 switch (GET_CODE (other))
2775 {
2776 case SUBREG:
2777 other = SUBREG_REG (other);
2778 break;
2779 case STRICT_LOW_PART:
2780 case SIGN_EXTEND:
2781 case ZERO_EXTEND:
2782 other = XEXP (other, 0);
2783 break;
2784 default:
2785 goto done;
2786 }
2787 done:
3c0cb5de 2788 if ((MEM_P (other)
23b2ce53 2789 && ! CONSTANT_P (x)
f8cfc6aa 2790 && !REG_P (x)
23b2ce53 2791 && GET_CODE (x) != SUBREG)
f8cfc6aa 2792 || (REG_P (other)
23b2ce53
RS
2793 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2794 || reg_mentioned_p (other, x))))
2795 {
2796 rtx temp = gen_reg_rtx (GET_MODE (x));
2797 emit_move_insn (temp, x);
2798 return temp;
2799 }
2800 return x;
2801}
2802\f
2803/* Emission of insns (adding them to the doubly-linked list). */
2804
2805/* Return the first insn of the current sequence or current function. */
2806
2807rtx
502b8322 2808get_insns (void)
23b2ce53
RS
2809{
2810 return first_insn;
2811}
2812
3dec4024
JH
2813/* Specify a new insn as the first in the chain. */
2814
2815void
502b8322 2816set_first_insn (rtx insn)
3dec4024
JH
2817{
2818 if (PREV_INSN (insn) != 0)
2819 abort ();
2820 first_insn = insn;
2821}
2822
23b2ce53
RS
2823/* Return the last insn emitted in current sequence or current function. */
2824
2825rtx
502b8322 2826get_last_insn (void)
23b2ce53
RS
2827{
2828 return last_insn;
2829}
2830
2831/* Specify a new insn as the last in the chain. */
2832
2833void
502b8322 2834set_last_insn (rtx insn)
23b2ce53
RS
2835{
2836 if (NEXT_INSN (insn) != 0)
2837 abort ();
2838 last_insn = insn;
2839}
2840
2841/* Return the last insn emitted, even if it is in a sequence now pushed. */
2842
2843rtx
502b8322 2844get_last_insn_anywhere (void)
23b2ce53
RS
2845{
2846 struct sequence_stack *stack;
2847 if (last_insn)
2848 return last_insn;
49ad7cfa 2849 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2850 if (stack->last != 0)
2851 return stack->last;
2852 return 0;
2853}
2854
2a496e8b
JDA
2855/* Return the first nonnote insn emitted in current sequence or current
2856 function. This routine looks inside SEQUENCEs. */
2857
2858rtx
502b8322 2859get_first_nonnote_insn (void)
2a496e8b
JDA
2860{
2861 rtx insn = first_insn;
2862
2863 while (insn)
2864 {
2865 insn = next_insn (insn);
2866 if (insn == 0 || GET_CODE (insn) != NOTE)
2867 break;
2868 }
2869
2870 return insn;
2871}
2872
2873/* Return the last nonnote insn emitted in current sequence or current
2874 function. This routine looks inside SEQUENCEs. */
2875
2876rtx
502b8322 2877get_last_nonnote_insn (void)
2a496e8b
JDA
2878{
2879 rtx insn = last_insn;
2880
2881 while (insn)
2882 {
2883 insn = previous_insn (insn);
2884 if (insn == 0 || GET_CODE (insn) != NOTE)
2885 break;
2886 }
2887
2888 return insn;
2889}
2890
23b2ce53
RS
2891/* Return a number larger than any instruction's uid in this function. */
2892
2893int
502b8322 2894get_max_uid (void)
23b2ce53
RS
2895{
2896 return cur_insn_uid;
2897}
aeeeda03 2898
673b5311
MM
2899/* Renumber instructions so that no instruction UIDs are wasted. */
2900
aeeeda03 2901void
502b8322 2902renumber_insns (FILE *stream)
aeeeda03
MM
2903{
2904 rtx insn;
aeeeda03 2905
673b5311
MM
2906 /* If we're not supposed to renumber instructions, don't. */
2907 if (!flag_renumber_insns)
2908 return;
2909
aeeeda03
MM
2910 /* If there aren't that many instructions, then it's not really
2911 worth renumbering them. */
673b5311 2912 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
aeeeda03
MM
2913 return;
2914
2915 cur_insn_uid = 1;
2916
2917 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
673b5311
MM
2918 {
2919 if (stream)
750c9258 2920 fprintf (stream, "Renumbering insn %d to %d\n",
673b5311
MM
2921 INSN_UID (insn), cur_insn_uid);
2922 INSN_UID (insn) = cur_insn_uid++;
2923 }
aeeeda03 2924}
23b2ce53
RS
2925\f
2926/* Return the next insn. If it is a SEQUENCE, return the first insn
2927 of the sequence. */
2928
2929rtx
502b8322 2930next_insn (rtx insn)
23b2ce53
RS
2931{
2932 if (insn)
2933 {
2934 insn = NEXT_INSN (insn);
2935 if (insn && GET_CODE (insn) == INSN
2936 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2937 insn = XVECEXP (PATTERN (insn), 0, 0);
2938 }
2939
2940 return insn;
2941}
2942
2943/* Return the previous insn. If it is a SEQUENCE, return the last insn
2944 of the sequence. */
2945
2946rtx
502b8322 2947previous_insn (rtx insn)
23b2ce53
RS
2948{
2949 if (insn)
2950 {
2951 insn = PREV_INSN (insn);
2952 if (insn && GET_CODE (insn) == INSN
2953 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2954 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2955 }
2956
2957 return insn;
2958}
2959
2960/* Return the next insn after INSN that is not a NOTE. This routine does not
2961 look inside SEQUENCEs. */
2962
2963rtx
502b8322 2964next_nonnote_insn (rtx insn)
23b2ce53
RS
2965{
2966 while (insn)
2967 {
2968 insn = NEXT_INSN (insn);
2969 if (insn == 0 || GET_CODE (insn) != NOTE)
2970 break;
2971 }
2972
2973 return insn;
2974}
2975
2976/* Return the previous insn before INSN that is not a NOTE. This routine does
2977 not look inside SEQUENCEs. */
2978
2979rtx
502b8322 2980prev_nonnote_insn (rtx insn)
23b2ce53
RS
2981{
2982 while (insn)
2983 {
2984 insn = PREV_INSN (insn);
2985 if (insn == 0 || GET_CODE (insn) != NOTE)
2986 break;
2987 }
2988
2989 return insn;
2990}
2991
2992/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2993 or 0, if there is none. This routine does not look inside
0f41302f 2994 SEQUENCEs. */
23b2ce53
RS
2995
2996rtx
502b8322 2997next_real_insn (rtx insn)
23b2ce53
RS
2998{
2999 while (insn)
3000 {
3001 insn = NEXT_INSN (insn);
bb8a619e 3002 if (insn == 0 || INSN_P (insn))
23b2ce53
RS
3003 break;
3004 }
3005
3006 return insn;
3007}
3008
3009/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3010 or 0, if there is none. This routine does not look inside
3011 SEQUENCEs. */
3012
3013rtx
502b8322 3014prev_real_insn (rtx insn)
23b2ce53
RS
3015{
3016 while (insn)
3017 {
3018 insn = PREV_INSN (insn);
bb8a619e 3019 if (insn == 0 || INSN_P (insn))
23b2ce53
RS
3020 break;
3021 }
3022
3023 return insn;
3024}
3025
ee960939
OH
3026/* Return the last CALL_INSN in the current list, or 0 if there is none.
3027 This routine does not look inside SEQUENCEs. */
3028
3029rtx
502b8322 3030last_call_insn (void)
ee960939
OH
3031{
3032 rtx insn;
3033
3034 for (insn = get_last_insn ();
3035 insn && GET_CODE (insn) != CALL_INSN;
3036 insn = PREV_INSN (insn))
3037 ;
3038
3039 return insn;
3040}
3041
23b2ce53
RS
3042/* Find the next insn after INSN that really does something. This routine
3043 does not look inside SEQUENCEs. Until reload has completed, this is the
3044 same as next_real_insn. */
3045
69732dcb 3046int
502b8322 3047active_insn_p (rtx insn)
69732dcb 3048{
23b8ba81
RH
3049 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3050 || (GET_CODE (insn) == INSN
3051 && (! reload_completed
3052 || (GET_CODE (PATTERN (insn)) != USE
3053 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
3054}
3055
23b2ce53 3056rtx
502b8322 3057next_active_insn (rtx insn)
23b2ce53
RS
3058{
3059 while (insn)
3060 {
3061 insn = NEXT_INSN (insn);
69732dcb 3062 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
3063 break;
3064 }
3065
3066 return insn;
3067}
3068
3069/* Find the last insn before INSN that really does something. This routine
3070 does not look inside SEQUENCEs. Until reload has completed, this is the
3071 same as prev_real_insn. */
3072
3073rtx
502b8322 3074prev_active_insn (rtx insn)
23b2ce53
RS
3075{
3076 while (insn)
3077 {
3078 insn = PREV_INSN (insn);
69732dcb 3079 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
3080 break;
3081 }
3082
3083 return insn;
3084}
3085
3086/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3087
3088rtx
502b8322 3089next_label (rtx insn)
23b2ce53
RS
3090{
3091 while (insn)
3092 {
3093 insn = NEXT_INSN (insn);
3094 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3095 break;
3096 }
3097
3098 return insn;
3099}
3100
3101/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3102
3103rtx
502b8322 3104prev_label (rtx insn)
23b2ce53
RS
3105{
3106 while (insn)
3107 {
3108 insn = PREV_INSN (insn);
3109 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3110 break;
3111 }
3112
3113 return insn;
3114}
6c2511d3
RS
3115
3116/* Return the last label to mark the same position as LABEL. Return null
3117 if LABEL itself is null. */
3118
3119rtx
3120skip_consecutive_labels (rtx label)
3121{
3122 rtx insn;
3123
3124 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3125 if (LABEL_P (insn))
3126 label = insn;
3127
3128 return label;
3129}
23b2ce53
RS
3130\f
3131#ifdef HAVE_cc0
c572e5ba
JVA
3132/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3133 and REG_CC_USER notes so we can find it. */
3134
3135void
502b8322 3136link_cc0_insns (rtx insn)
c572e5ba
JVA
3137{
3138 rtx user = next_nonnote_insn (insn);
3139
3140 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3141 user = XVECEXP (PATTERN (user), 0, 0);
3142
c5c76735
JL
3143 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3144 REG_NOTES (user));
3b80f6ca 3145 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
c572e5ba
JVA
3146}
3147
23b2ce53
RS
3148/* Return the next insn that uses CC0 after INSN, which is assumed to
3149 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3150 applied to the result of this function should yield INSN).
3151
3152 Normally, this is simply the next insn. However, if a REG_CC_USER note
3153 is present, it contains the insn that uses CC0.
3154
3155 Return 0 if we can't find the insn. */
3156
3157rtx
502b8322 3158next_cc0_user (rtx insn)
23b2ce53 3159{
906c4e36 3160 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3161
3162 if (note)
3163 return XEXP (note, 0);
3164
3165 insn = next_nonnote_insn (insn);
3166 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3167 insn = XVECEXP (PATTERN (insn), 0, 0);
3168
2c3c49de 3169 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3170 return insn;
3171
3172 return 0;
3173}
3174
3175/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3176 note, it is the previous insn. */
3177
3178rtx
502b8322 3179prev_cc0_setter (rtx insn)
23b2ce53 3180{
906c4e36 3181 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3182
3183 if (note)
3184 return XEXP (note, 0);
3185
3186 insn = prev_nonnote_insn (insn);
3187 if (! sets_cc0_p (PATTERN (insn)))
3188 abort ();
3189
3190 return insn;
3191}
3192#endif
e5bef2e4
HB
3193
3194/* Increment the label uses for all labels present in rtx. */
3195
3196static void
502b8322 3197mark_label_nuses (rtx x)
e5bef2e4 3198{
b3694847
SS
3199 enum rtx_code code;
3200 int i, j;
3201 const char *fmt;
e5bef2e4
HB
3202
3203 code = GET_CODE (x);
7537fc90 3204 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3205 LABEL_NUSES (XEXP (x, 0))++;
3206
3207 fmt = GET_RTX_FORMAT (code);
3208 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3209 {
3210 if (fmt[i] == 'e')
0fb7aeda 3211 mark_label_nuses (XEXP (x, i));
e5bef2e4 3212 else if (fmt[i] == 'E')
0fb7aeda 3213 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3214 mark_label_nuses (XVECEXP (x, i, j));
3215 }
3216}
3217
23b2ce53
RS
3218\f
3219/* Try splitting insns that can be split for better scheduling.
3220 PAT is the pattern which might split.
3221 TRIAL is the insn providing PAT.
cc2902df 3222 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3223
3224 If this routine succeeds in splitting, it returns the first or last
11147ebe 3225 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3226 returns TRIAL. If the insn to be returned can be split, it will be. */
3227
3228rtx
502b8322 3229try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3230{
3231 rtx before = PREV_INSN (trial);
3232 rtx after = NEXT_INSN (trial);
23b2ce53
RS
3233 int has_barrier = 0;
3234 rtx tem;
6b24c259
JH
3235 rtx note, seq;
3236 int probability;
599aedd9
RH
3237 rtx insn_last, insn;
3238 int njumps = 0;
6b24c259
JH
3239
3240 if (any_condjump_p (trial)
3241 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3242 split_branch_probability = INTVAL (XEXP (note, 0));
3243 probability = split_branch_probability;
3244
3245 seq = split_insns (pat, trial);
3246
3247 split_branch_probability = -1;
23b2ce53
RS
3248
3249 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3250 We may need to handle this specially. */
3251 if (after && GET_CODE (after) == BARRIER)
3252 {
3253 has_barrier = 1;
3254 after = NEXT_INSN (after);
3255 }
3256
599aedd9
RH
3257 if (!seq)
3258 return trial;
3259
3260 /* Avoid infinite loop if any insn of the result matches
3261 the original pattern. */
3262 insn_last = seq;
3263 while (1)
23b2ce53 3264 {
599aedd9
RH
3265 if (INSN_P (insn_last)
3266 && rtx_equal_p (PATTERN (insn_last), pat))
3267 return trial;
3268 if (!NEXT_INSN (insn_last))
3269 break;
3270 insn_last = NEXT_INSN (insn_last);
3271 }
750c9258 3272
599aedd9
RH
3273 /* Mark labels. */
3274 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3275 {
3276 if (GET_CODE (insn) == JUMP_INSN)
3277 {
3278 mark_jump_label (PATTERN (insn), insn, 0);
3279 njumps++;
3280 if (probability != -1
3281 && any_condjump_p (insn)
3282 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3283 {
599aedd9
RH
3284 /* We can preserve the REG_BR_PROB notes only if exactly
3285 one jump is created, otherwise the machine description
3286 is responsible for this step using
3287 split_branch_probability variable. */
3288 if (njumps != 1)
3289 abort ();
3290 REG_NOTES (insn)
3291 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3292 GEN_INT (probability),
3293 REG_NOTES (insn));
2f937369 3294 }
599aedd9
RH
3295 }
3296 }
3297
3298 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3299 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3300 if (GET_CODE (trial) == CALL_INSN)
3301 {
3302 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3303 if (GET_CODE (insn) == CALL_INSN)
3304 {
f6a1f3f6
RH
3305 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3306 while (*p)
3307 p = &XEXP (*p, 1);
3308 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3309 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3310 }
3311 }
4b5e8abe 3312
599aedd9
RH
3313 /* Copy notes, particularly those related to the CFG. */
3314 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3315 {
3316 switch (REG_NOTE_KIND (note))
3317 {
3318 case REG_EH_REGION:
2f937369
DM
3319 insn = insn_last;
3320 while (insn != NULL_RTX)
3321 {
599aedd9
RH
3322 if (GET_CODE (insn) == CALL_INSN
3323 || (flag_non_call_exceptions
3324 && may_trap_p (PATTERN (insn))))
3325 REG_NOTES (insn)
3326 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3327 XEXP (note, 0),
3328 REG_NOTES (insn));
2f937369
DM
3329 insn = PREV_INSN (insn);
3330 }
599aedd9 3331 break;
216183ce 3332
599aedd9
RH
3333 case REG_NORETURN:
3334 case REG_SETJMP:
3335 case REG_ALWAYS_RETURN:
3336 insn = insn_last;
3337 while (insn != NULL_RTX)
216183ce 3338 {
599aedd9
RH
3339 if (GET_CODE (insn) == CALL_INSN)
3340 REG_NOTES (insn)
3341 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3342 XEXP (note, 0),
3343 REG_NOTES (insn));
3344 insn = PREV_INSN (insn);
216183ce 3345 }
599aedd9 3346 break;
d6e95df8 3347
599aedd9
RH
3348 case REG_NON_LOCAL_GOTO:
3349 insn = insn_last;
3350 while (insn != NULL_RTX)
2f937369 3351 {
599aedd9
RH
3352 if (GET_CODE (insn) == JUMP_INSN)
3353 REG_NOTES (insn)
3354 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3355 XEXP (note, 0),
3356 REG_NOTES (insn));
3357 insn = PREV_INSN (insn);
2f937369 3358 }
599aedd9 3359 break;
e5bef2e4 3360
599aedd9
RH
3361 default:
3362 break;
23b2ce53 3363 }
599aedd9
RH
3364 }
3365
3366 /* If there are LABELS inside the split insns increment the
3367 usage count so we don't delete the label. */
3368 if (GET_CODE (trial) == INSN)
3369 {
3370 insn = insn_last;
3371 while (insn != NULL_RTX)
23b2ce53 3372 {
599aedd9
RH
3373 if (GET_CODE (insn) == INSN)
3374 mark_label_nuses (PATTERN (insn));
23b2ce53 3375
599aedd9
RH
3376 insn = PREV_INSN (insn);
3377 }
23b2ce53
RS
3378 }
3379
0435312e 3380 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3381
3382 delete_insn (trial);
3383 if (has_barrier)
3384 emit_barrier_after (tem);
3385
3386 /* Recursively call try_split for each new insn created; by the
3387 time control returns here that insn will be fully split, so
3388 set LAST and continue from the insn after the one returned.
3389 We can't use next_active_insn here since AFTER may be a note.
3390 Ignore deleted insns, which can be occur if not optimizing. */
3391 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3392 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3393 tem = try_split (PATTERN (tem), tem, 1);
3394
3395 /* Return either the first or the last insn, depending on which was
3396 requested. */
3397 return last
3398 ? (after ? PREV_INSN (after) : last_insn)
3399 : NEXT_INSN (before);
23b2ce53
RS
3400}
3401\f
3402/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3403 Store PATTERN in the pattern slots. */
23b2ce53
RS
3404
3405rtx
502b8322 3406make_insn_raw (rtx pattern)
23b2ce53 3407{
b3694847 3408 rtx insn;
23b2ce53 3409
1f8f4a0b 3410 insn = rtx_alloc (INSN);
23b2ce53 3411
43127294 3412 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3413 PATTERN (insn) = pattern;
3414 INSN_CODE (insn) = -1;
1632afca
RS
3415 LOG_LINKS (insn) = NULL;
3416 REG_NOTES (insn) = NULL;
0435312e 3417 INSN_LOCATOR (insn) = 0;
ba4f7968 3418 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3419
47984720
NC
3420#ifdef ENABLE_RTL_CHECKING
3421 if (insn
2c3c49de 3422 && INSN_P (insn)
47984720
NC
3423 && (returnjump_p (insn)
3424 || (GET_CODE (insn) == SET
3425 && SET_DEST (insn) == pc_rtx)))
3426 {
3427 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3428 debug_rtx (insn);
3429 }
3430#endif
750c9258 3431
23b2ce53
RS
3432 return insn;
3433}
3434
2f937369 3435/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53
RS
3436
3437static rtx
502b8322 3438make_jump_insn_raw (rtx pattern)
23b2ce53 3439{
b3694847 3440 rtx insn;
23b2ce53 3441
4b1f5e8c 3442 insn = rtx_alloc (JUMP_INSN);
1632afca 3443 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3444
3445 PATTERN (insn) = pattern;
3446 INSN_CODE (insn) = -1;
1632afca
RS
3447 LOG_LINKS (insn) = NULL;
3448 REG_NOTES (insn) = NULL;
3449 JUMP_LABEL (insn) = NULL;
0435312e 3450 INSN_LOCATOR (insn) = 0;
ba4f7968 3451 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3452
3453 return insn;
3454}
aff507f4 3455
2f937369 3456/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3457
3458static rtx
502b8322 3459make_call_insn_raw (rtx pattern)
aff507f4 3460{
b3694847 3461 rtx insn;
aff507f4
RK
3462
3463 insn = rtx_alloc (CALL_INSN);
3464 INSN_UID (insn) = cur_insn_uid++;
3465
3466 PATTERN (insn) = pattern;
3467 INSN_CODE (insn) = -1;
3468 LOG_LINKS (insn) = NULL;
3469 REG_NOTES (insn) = NULL;
3470 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
0435312e 3471 INSN_LOCATOR (insn) = 0;
ba4f7968 3472 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3473
3474 return insn;
3475}
23b2ce53
RS
3476\f
3477/* Add INSN to the end of the doubly-linked list.
3478 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3479
3480void
502b8322 3481add_insn (rtx insn)
23b2ce53
RS
3482{
3483 PREV_INSN (insn) = last_insn;
3484 NEXT_INSN (insn) = 0;
3485
3486 if (NULL != last_insn)
3487 NEXT_INSN (last_insn) = insn;
3488
3489 if (NULL == first_insn)
3490 first_insn = insn;
3491
3492 last_insn = insn;
3493}
3494
a0ae8e8d
RK
3495/* Add INSN into the doubly-linked list after insn AFTER. This and
3496 the next should be the only functions called to insert an insn once
ba213285 3497 delay slots have been filled since only they know how to update a
a0ae8e8d 3498 SEQUENCE. */
23b2ce53
RS
3499
3500void
502b8322 3501add_insn_after (rtx insn, rtx after)
23b2ce53
RS
3502{
3503 rtx next = NEXT_INSN (after);
3c030e88 3504 basic_block bb;
23b2ce53 3505
6782074d 3506 if (optimize && INSN_DELETED_P (after))
ba213285
RK
3507 abort ();
3508
23b2ce53
RS
3509 NEXT_INSN (insn) = next;
3510 PREV_INSN (insn) = after;
3511
3512 if (next)
3513 {
3514 PREV_INSN (next) = insn;
3515 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3516 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3517 }
3518 else if (last_insn == after)
3519 last_insn = insn;
3520 else
3521 {
49ad7cfa 3522 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3523 /* Scan all pending sequences too. */
3524 for (; stack; stack = stack->next)
3525 if (after == stack->last)
fef0509b
RK
3526 {
3527 stack->last = insn;
3528 break;
3529 }
a0ae8e8d
RK
3530
3531 if (stack == 0)
3532 abort ();
23b2ce53
RS
3533 }
3534
ba4f7968
JH
3535 if (GET_CODE (after) != BARRIER
3536 && GET_CODE (insn) != BARRIER
3c030e88
JH
3537 && (bb = BLOCK_FOR_INSN (after)))
3538 {
3539 set_block_for_insn (insn, bb);
38c1593d 3540 if (INSN_P (insn))
68252e27 3541 bb->flags |= BB_DIRTY;
3c030e88 3542 /* Should not happen as first in the BB is always
a1f300c0 3543 either NOTE or LABEL. */
a813c111 3544 if (BB_END (bb) == after
3c030e88
JH
3545 /* Avoid clobbering of structure when creating new BB. */
3546 && GET_CODE (insn) != BARRIER
3547 && (GET_CODE (insn) != NOTE
3548 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
a813c111 3549 BB_END (bb) = insn;
3c030e88
JH
3550 }
3551
23b2ce53
RS
3552 NEXT_INSN (after) = insn;
3553 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3554 {
3555 rtx sequence = PATTERN (after);
3556 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3557 }
3558}
3559
a0ae8e8d
RK
3560/* Add INSN into the doubly-linked list before insn BEFORE. This and
3561 the previous should be the only functions called to insert an insn once
ba213285 3562 delay slots have been filled since only they know how to update a
a0ae8e8d
RK
3563 SEQUENCE. */
3564
3565void
502b8322 3566add_insn_before (rtx insn, rtx before)
a0ae8e8d
RK
3567{
3568 rtx prev = PREV_INSN (before);
3c030e88 3569 basic_block bb;
a0ae8e8d 3570
6782074d 3571 if (optimize && INSN_DELETED_P (before))
ba213285
RK
3572 abort ();
3573
a0ae8e8d
RK
3574 PREV_INSN (insn) = prev;
3575 NEXT_INSN (insn) = before;
3576
3577 if (prev)
3578 {
3579 NEXT_INSN (prev) = insn;
3580 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3581 {
3582 rtx sequence = PATTERN (prev);
3583 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3584 }
3585 }
3586 else if (first_insn == before)
3587 first_insn = insn;
3588 else
3589 {
49ad7cfa 3590 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3591 /* Scan all pending sequences too. */
3592 for (; stack; stack = stack->next)
3593 if (before == stack->first)
fef0509b
RK
3594 {
3595 stack->first = insn;
3596 break;
3597 }
a0ae8e8d
RK
3598
3599 if (stack == 0)
3600 abort ();
3601 }
3602
ba4f7968
JH
3603 if (GET_CODE (before) != BARRIER
3604 && GET_CODE (insn) != BARRIER
3c030e88
JH
3605 && (bb = BLOCK_FOR_INSN (before)))
3606 {
3607 set_block_for_insn (insn, bb);
38c1593d 3608 if (INSN_P (insn))
68252e27 3609 bb->flags |= BB_DIRTY;
3c030e88 3610 /* Should not happen as first in the BB is always
a1f300c0 3611 either NOTE or LABEl. */
a813c111 3612 if (BB_HEAD (bb) == insn
3c030e88
JH
3613 /* Avoid clobbering of structure when creating new BB. */
3614 && GET_CODE (insn) != BARRIER
3615 && (GET_CODE (insn) != NOTE
3616 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3617 abort ();
3618 }
3619
a0ae8e8d
RK
3620 PREV_INSN (before) = insn;
3621 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3622 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3623}
3624
89e99eea
DB
3625/* Remove an insn from its doubly-linked list. This function knows how
3626 to handle sequences. */
3627void
502b8322 3628remove_insn (rtx insn)
89e99eea
DB
3629{
3630 rtx next = NEXT_INSN (insn);
3631 rtx prev = PREV_INSN (insn);
53c17031
JH
3632 basic_block bb;
3633
89e99eea
DB
3634 if (prev)
3635 {
3636 NEXT_INSN (prev) = next;
3637 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3638 {
3639 rtx sequence = PATTERN (prev);
3640 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3641 }
3642 }
3643 else if (first_insn == insn)
3644 first_insn = next;
3645 else
3646 {
49ad7cfa 3647 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3648 /* Scan all pending sequences too. */
3649 for (; stack; stack = stack->next)
3650 if (insn == stack->first)
3651 {
3652 stack->first = next;
3653 break;
3654 }
3655
3656 if (stack == 0)
3657 abort ();
3658 }
3659
3660 if (next)
3661 {
3662 PREV_INSN (next) = prev;
3663 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3664 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3665 }
3666 else if (last_insn == insn)
3667 last_insn = prev;
3668 else
3669 {
49ad7cfa 3670 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3671 /* Scan all pending sequences too. */
3672 for (; stack; stack = stack->next)
3673 if (insn == stack->last)
3674 {
3675 stack->last = prev;
3676 break;
3677 }
3678
3679 if (stack == 0)
3680 abort ();
3681 }
ba4f7968 3682 if (GET_CODE (insn) != BARRIER
53c17031
JH
3683 && (bb = BLOCK_FOR_INSN (insn)))
3684 {
38c1593d 3685 if (INSN_P (insn))
68252e27 3686 bb->flags |= BB_DIRTY;
a813c111 3687 if (BB_HEAD (bb) == insn)
53c17031 3688 {
3bf1e984
RK
3689 /* Never ever delete the basic block note without deleting whole
3690 basic block. */
53c17031
JH
3691 if (GET_CODE (insn) == NOTE)
3692 abort ();
a813c111 3693 BB_HEAD (bb) = next;
53c17031 3694 }
a813c111
SB
3695 if (BB_END (bb) == insn)
3696 BB_END (bb) = prev;
53c17031 3697 }
89e99eea
DB
3698}
3699
ee960939
OH
3700/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3701
3702void
502b8322 3703add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939
OH
3704{
3705 if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
3706 abort ();
3707
3708 /* Put the register usage information on the CALL. If there is already
3709 some usage information, put ours at the end. */
3710 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3711 {
3712 rtx link;
3713
3714 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3715 link = XEXP (link, 1))
3716 ;
3717
3718 XEXP (link, 1) = call_fusage;
3719 }
3720 else
3721 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3722}
3723
23b2ce53
RS
3724/* Delete all insns made since FROM.
3725 FROM becomes the new last instruction. */
3726
3727void
502b8322 3728delete_insns_since (rtx from)
23b2ce53
RS
3729{
3730 if (from == 0)
3731 first_insn = 0;
3732 else
3733 NEXT_INSN (from) = 0;
3734 last_insn = from;
3735}
3736
5dab5552
MS
3737/* This function is deprecated, please use sequences instead.
3738
3739 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3740 The insns to be moved are those between FROM and TO.
3741 They are moved to a new position after the insn AFTER.
3742 AFTER must not be FROM or TO or any insn in between.
3743
3744 This function does not know about SEQUENCEs and hence should not be
3745 called after delay-slot filling has been done. */
3746
3747void
502b8322 3748reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3749{
3750 /* Splice this bunch out of where it is now. */
3751 if (PREV_INSN (from))
3752 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3753 if (NEXT_INSN (to))
3754 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3755 if (last_insn == to)
3756 last_insn = PREV_INSN (from);
3757 if (first_insn == from)
3758 first_insn = NEXT_INSN (to);
3759
3760 /* Make the new neighbors point to it and it to them. */
3761 if (NEXT_INSN (after))
3762 PREV_INSN (NEXT_INSN (after)) = to;
3763
3764 NEXT_INSN (to) = NEXT_INSN (after);
3765 PREV_INSN (from) = after;
3766 NEXT_INSN (after) = from;
3767 if (after == last_insn)
3768 last_insn = to;
3769}
3770
3c030e88
JH
3771/* Same as function above, but take care to update BB boundaries. */
3772void
502b8322 3773reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3774{
3775 rtx prev = PREV_INSN (from);
3776 basic_block bb, bb2;
3777
3778 reorder_insns_nobb (from, to, after);
3779
ba4f7968 3780 if (GET_CODE (after) != BARRIER
3c030e88
JH
3781 && (bb = BLOCK_FOR_INSN (after)))
3782 {
3783 rtx x;
38c1593d 3784 bb->flags |= BB_DIRTY;
68252e27 3785
ba4f7968 3786 if (GET_CODE (from) != BARRIER
3c030e88
JH
3787 && (bb2 = BLOCK_FOR_INSN (from)))
3788 {
a813c111
SB
3789 if (BB_END (bb2) == to)
3790 BB_END (bb2) = prev;
38c1593d 3791 bb2->flags |= BB_DIRTY;
3c030e88
JH
3792 }
3793
a813c111
SB
3794 if (BB_END (bb) == after)
3795 BB_END (bb) = to;
3c030e88
JH
3796
3797 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3798 set_block_for_insn (x, bb);
3799 }
3800}
3801
23b2ce53
RS
3802/* Return the line note insn preceding INSN. */
3803
3804static rtx
502b8322 3805find_line_note (rtx insn)
23b2ce53
RS
3806{
3807 if (no_line_numbers)
3808 return 0;
3809
3810 for (; insn; insn = PREV_INSN (insn))
3811 if (GET_CODE (insn) == NOTE
0fb7aeda 3812 && NOTE_LINE_NUMBER (insn) >= 0)
23b2ce53
RS
3813 break;
3814
3815 return insn;
3816}
3817
64b59a80 3818/* Remove unnecessary notes from the instruction stream. */
aeeeda03
MM
3819
3820void
502b8322 3821remove_unnecessary_notes (void)
aeeeda03 3822{
542d73ae
RH
3823 rtx block_stack = NULL_RTX;
3824 rtx eh_stack = NULL_RTX;
aeeeda03
MM
3825 rtx insn;
3826 rtx next;
542d73ae 3827 rtx tmp;
aeeeda03 3828
116eebd6
MM
3829 /* We must not remove the first instruction in the function because
3830 the compiler depends on the first instruction being a note. */
aeeeda03
MM
3831 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3832 {
3833 /* Remember what's next. */
3834 next = NEXT_INSN (insn);
3835
3836 /* We're only interested in notes. */
3837 if (GET_CODE (insn) != NOTE)
3838 continue;
3839
542d73ae 3840 switch (NOTE_LINE_NUMBER (insn))
18c038b9 3841 {
542d73ae 3842 case NOTE_INSN_DELETED:
e803a64b 3843 case NOTE_INSN_LOOP_END_TOP_COND:
542d73ae
RH
3844 remove_insn (insn);
3845 break;
3846
3847 case NOTE_INSN_EH_REGION_BEG:
3848 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3849 break;
3850
3851 case NOTE_INSN_EH_REGION_END:
3852 /* Too many end notes. */
3853 if (eh_stack == NULL_RTX)
3854 abort ();
3855 /* Mismatched nesting. */
3856 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3857 abort ();
3858 tmp = eh_stack;
3859 eh_stack = XEXP (eh_stack, 1);
3860 free_INSN_LIST_node (tmp);
3861 break;
3862
3863 case NOTE_INSN_BLOCK_BEG:
3864 /* By now, all notes indicating lexical blocks should have
3865 NOTE_BLOCK filled in. */
3866 if (NOTE_BLOCK (insn) == NULL_TREE)
3867 abort ();
3868 block_stack = alloc_INSN_LIST (insn, block_stack);
3869 break;
3870
3871 case NOTE_INSN_BLOCK_END:
3872 /* Too many end notes. */
3873 if (block_stack == NULL_RTX)
3874 abort ();
3875 /* Mismatched nesting. */
3876 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3877 abort ();
3878 tmp = block_stack;
3879 block_stack = XEXP (block_stack, 1);
3880 free_INSN_LIST_node (tmp);
3881
18c038b9
MM
3882 /* Scan back to see if there are any non-note instructions
3883 between INSN and the beginning of this block. If not,
3884 then there is no PC range in the generated code that will
3885 actually be in this block, so there's no point in
3886 remembering the existence of the block. */
68252e27 3887 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
18c038b9
MM
3888 {
3889 /* This block contains a real instruction. Note that we
3890 don't include labels; if the only thing in the block
3891 is a label, then there are still no PC values that
3892 lie within the block. */
542d73ae 3893 if (INSN_P (tmp))
18c038b9
MM
3894 break;
3895
3896 /* We're only interested in NOTEs. */
542d73ae 3897 if (GET_CODE (tmp) != NOTE)
18c038b9
MM
3898 continue;
3899
542d73ae 3900 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
18c038b9 3901 {
e1772ac0
NB
3902 /* We just verified that this BLOCK matches us with
3903 the block_stack check above. Never delete the
3904 BLOCK for the outermost scope of the function; we
3905 can refer to names from that scope even if the
3906 block notes are messed up. */
3907 if (! is_body_block (NOTE_BLOCK (insn))
3908 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
deb5e280 3909 {
542d73ae 3910 remove_insn (tmp);
deb5e280
JM
3911 remove_insn (insn);
3912 }
18c038b9
MM
3913 break;
3914 }
542d73ae 3915 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
18c038b9
MM
3916 /* There's a nested block. We need to leave the
3917 current block in place since otherwise the debugger
3918 wouldn't be able to show symbols from our block in
3919 the nested block. */
3920 break;
3921 }
3922 }
aeeeda03 3923 }
542d73ae
RH
3924
3925 /* Too many begin notes. */
3926 if (block_stack || eh_stack)
3927 abort ();
aeeeda03
MM
3928}
3929
23b2ce53 3930\f
2f937369
DM
3931/* Emit insn(s) of given code and pattern
3932 at a specified place within the doubly-linked list.
23b2ce53 3933
2f937369
DM
3934 All of the emit_foo global entry points accept an object
3935 X which is either an insn list or a PATTERN of a single
3936 instruction.
23b2ce53 3937
2f937369
DM
3938 There are thus a few canonical ways to generate code and
3939 emit it at a specific place in the instruction stream. For
3940 example, consider the instruction named SPOT and the fact that
3941 we would like to emit some instructions before SPOT. We might
3942 do it like this:
23b2ce53 3943
2f937369
DM
3944 start_sequence ();
3945 ... emit the new instructions ...
3946 insns_head = get_insns ();
3947 end_sequence ();
23b2ce53 3948
2f937369 3949 emit_insn_before (insns_head, SPOT);
23b2ce53 3950
2f937369
DM
3951 It used to be common to generate SEQUENCE rtl instead, but that
3952 is a relic of the past which no longer occurs. The reason is that
3953 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3954 generated would almost certainly die right after it was created. */
23b2ce53 3955
2f937369 3956/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
3957
3958rtx
502b8322 3959emit_insn_before (rtx x, rtx before)
23b2ce53 3960{
2f937369 3961 rtx last = before;
b3694847 3962 rtx insn;
23b2ce53 3963
2f937369
DM
3964#ifdef ENABLE_RTL_CHECKING
3965 if (before == NULL_RTX)
3966 abort ();
3967#endif
3968
3969 if (x == NULL_RTX)
3970 return last;
3971
3972 switch (GET_CODE (x))
23b2ce53 3973 {
2f937369
DM
3974 case INSN:
3975 case JUMP_INSN:
3976 case CALL_INSN:
3977 case CODE_LABEL:
3978 case BARRIER:
3979 case NOTE:
3980 insn = x;
3981 while (insn)
3982 {
3983 rtx next = NEXT_INSN (insn);
3984 add_insn_before (insn, before);
3985 last = insn;
3986 insn = next;
3987 }
3988 break;
3989
3990#ifdef ENABLE_RTL_CHECKING
3991 case SEQUENCE:
3992 abort ();
3993 break;
3994#endif
3995
3996 default:
3997 last = make_insn_raw (x);
3998 add_insn_before (last, before);
3999 break;
23b2ce53
RS
4000 }
4001
2f937369 4002 return last;
23b2ce53
RS
4003}
4004
2f937369 4005/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
4006 and output it before the instruction BEFORE. */
4007
4008rtx
502b8322 4009emit_jump_insn_before (rtx x, rtx before)
23b2ce53 4010{
d950dee3 4011 rtx insn, last = NULL_RTX;
aff507f4 4012
2f937369
DM
4013#ifdef ENABLE_RTL_CHECKING
4014 if (before == NULL_RTX)
4015 abort ();
4016#endif
4017
4018 switch (GET_CODE (x))
aff507f4 4019 {
2f937369
DM
4020 case INSN:
4021 case JUMP_INSN:
4022 case CALL_INSN:
4023 case CODE_LABEL:
4024 case BARRIER:
4025 case NOTE:
4026 insn = x;
4027 while (insn)
4028 {
4029 rtx next = NEXT_INSN (insn);
4030 add_insn_before (insn, before);
4031 last = insn;
4032 insn = next;
4033 }
4034 break;
4035
4036#ifdef ENABLE_RTL_CHECKING
4037 case SEQUENCE:
4038 abort ();
4039 break;
4040#endif
4041
4042 default:
4043 last = make_jump_insn_raw (x);
4044 add_insn_before (last, before);
4045 break;
aff507f4
RK
4046 }
4047
2f937369 4048 return last;
23b2ce53
RS
4049}
4050
2f937369 4051/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
4052 and output it before the instruction BEFORE. */
4053
4054rtx
502b8322 4055emit_call_insn_before (rtx x, rtx before)
969d70ca 4056{
d950dee3 4057 rtx last = NULL_RTX, insn;
969d70ca 4058
2f937369
DM
4059#ifdef ENABLE_RTL_CHECKING
4060 if (before == NULL_RTX)
4061 abort ();
4062#endif
4063
4064 switch (GET_CODE (x))
969d70ca 4065 {
2f937369
DM
4066 case INSN:
4067 case JUMP_INSN:
4068 case CALL_INSN:
4069 case CODE_LABEL:
4070 case BARRIER:
4071 case NOTE:
4072 insn = x;
4073 while (insn)
4074 {
4075 rtx next = NEXT_INSN (insn);
4076 add_insn_before (insn, before);
4077 last = insn;
4078 insn = next;
4079 }
4080 break;
4081
4082#ifdef ENABLE_RTL_CHECKING
4083 case SEQUENCE:
4084 abort ();
4085 break;
4086#endif
4087
4088 default:
4089 last = make_call_insn_raw (x);
4090 add_insn_before (last, before);
4091 break;
969d70ca
JH
4092 }
4093
2f937369 4094 return last;
969d70ca
JH
4095}
4096
23b2ce53 4097/* Make an insn of code BARRIER
e881bb1b 4098 and output it before the insn BEFORE. */
23b2ce53
RS
4099
4100rtx
502b8322 4101emit_barrier_before (rtx before)
23b2ce53 4102{
b3694847 4103 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4104
4105 INSN_UID (insn) = cur_insn_uid++;
4106
a0ae8e8d 4107 add_insn_before (insn, before);
23b2ce53
RS
4108 return insn;
4109}
4110
e881bb1b
RH
4111/* Emit the label LABEL before the insn BEFORE. */
4112
4113rtx
502b8322 4114emit_label_before (rtx label, rtx before)
e881bb1b
RH
4115{
4116 /* This can be called twice for the same label as a result of the
4117 confusion that follows a syntax error! So make it harmless. */
4118 if (INSN_UID (label) == 0)
4119 {
4120 INSN_UID (label) = cur_insn_uid++;
4121 add_insn_before (label, before);
4122 }
4123
4124 return label;
4125}
4126
23b2ce53
RS
4127/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4128
4129rtx
502b8322 4130emit_note_before (int subtype, rtx before)
23b2ce53 4131{
b3694847 4132 rtx note = rtx_alloc (NOTE);
23b2ce53 4133 INSN_UID (note) = cur_insn_uid++;
6773e15f 4134#ifndef USE_MAPPED_LOCATION
23b2ce53 4135 NOTE_SOURCE_FILE (note) = 0;
6773e15f 4136#endif
23b2ce53 4137 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4138 BLOCK_FOR_INSN (note) = NULL;
23b2ce53 4139
a0ae8e8d 4140 add_insn_before (note, before);
23b2ce53
RS
4141 return note;
4142}
4143\f
2f937369
DM
4144/* Helper for emit_insn_after, handles lists of instructions
4145 efficiently. */
23b2ce53 4146
502b8322 4147static rtx emit_insn_after_1 (rtx, rtx);
2f937369
DM
4148
4149static rtx
502b8322 4150emit_insn_after_1 (rtx first, rtx after)
23b2ce53 4151{
2f937369
DM
4152 rtx last;
4153 rtx after_after;
4154 basic_block bb;
23b2ce53 4155
2f937369
DM
4156 if (GET_CODE (after) != BARRIER
4157 && (bb = BLOCK_FOR_INSN (after)))
23b2ce53 4158 {
2f937369
DM
4159 bb->flags |= BB_DIRTY;
4160 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4161 if (GET_CODE (last) != BARRIER)
4162 set_block_for_insn (last, bb);
4163 if (GET_CODE (last) != BARRIER)
4164 set_block_for_insn (last, bb);
a813c111
SB
4165 if (BB_END (bb) == after)
4166 BB_END (bb) = last;
23b2ce53
RS
4167 }
4168 else
2f937369
DM
4169 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4170 continue;
4171
4172 after_after = NEXT_INSN (after);
4173
4174 NEXT_INSN (after) = first;
4175 PREV_INSN (first) = after;
4176 NEXT_INSN (last) = after_after;
4177 if (after_after)
4178 PREV_INSN (after_after) = last;
4179
4180 if (after == last_insn)
4181 last_insn = last;
4182 return last;
4183}
4184
4185/* Make X be output after the insn AFTER. */
4186
4187rtx
502b8322 4188emit_insn_after (rtx x, rtx after)
2f937369
DM
4189{
4190 rtx last = after;
4191
4192#ifdef ENABLE_RTL_CHECKING
4193 if (after == NULL_RTX)
4194 abort ();
4195#endif
4196
4197 if (x == NULL_RTX)
4198 return last;
4199
4200 switch (GET_CODE (x))
23b2ce53 4201 {
2f937369
DM
4202 case INSN:
4203 case JUMP_INSN:
4204 case CALL_INSN:
4205 case CODE_LABEL:
4206 case BARRIER:
4207 case NOTE:
4208 last = emit_insn_after_1 (x, after);
4209 break;
4210
4211#ifdef ENABLE_RTL_CHECKING
4212 case SEQUENCE:
4213 abort ();
4214 break;
4215#endif
4216
4217 default:
4218 last = make_insn_raw (x);
4219 add_insn_after (last, after);
4220 break;
23b2ce53
RS
4221 }
4222
2f937369 4223 return last;
23b2ce53
RS
4224}
4225
255680cf
RK
4226/* Similar to emit_insn_after, except that line notes are to be inserted so
4227 as to act as if this insn were at FROM. */
4228
4229void
502b8322 4230emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
255680cf
RK
4231{
4232 rtx from_line = find_line_note (from);
4233 rtx after_line = find_line_note (after);
2f937369 4234 rtx insn = emit_insn_after (x, after);
255680cf
RK
4235
4236 if (from_line)
5f2fc772 4237 emit_note_copy_after (from_line, after);
255680cf
RK
4238
4239 if (after_line)
5f2fc772 4240 emit_note_copy_after (after_line, insn);
255680cf
RK
4241}
4242
2f937369 4243/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
4244 and output it after the insn AFTER. */
4245
4246rtx
502b8322 4247emit_jump_insn_after (rtx x, rtx after)
23b2ce53 4248{
2f937369 4249 rtx last;
23b2ce53 4250
2f937369
DM
4251#ifdef ENABLE_RTL_CHECKING
4252 if (after == NULL_RTX)
4253 abort ();
4254#endif
4255
4256 switch (GET_CODE (x))
23b2ce53 4257 {
2f937369
DM
4258 case INSN:
4259 case JUMP_INSN:
4260 case CALL_INSN:
4261 case CODE_LABEL:
4262 case BARRIER:
4263 case NOTE:
4264 last = emit_insn_after_1 (x, after);
4265 break;
4266
4267#ifdef ENABLE_RTL_CHECKING
4268 case SEQUENCE:
4269 abort ();
4270 break;
4271#endif
4272
4273 default:
4274 last = make_jump_insn_raw (x);
4275 add_insn_after (last, after);
4276 break;
23b2ce53
RS
4277 }
4278
2f937369
DM
4279 return last;
4280}
4281
4282/* Make an instruction with body X and code CALL_INSN
4283 and output it after the instruction AFTER. */
4284
4285rtx
502b8322 4286emit_call_insn_after (rtx x, rtx after)
2f937369
DM
4287{
4288 rtx last;
4289
4290#ifdef ENABLE_RTL_CHECKING
4291 if (after == NULL_RTX)
4292 abort ();
4293#endif
4294
4295 switch (GET_CODE (x))
4296 {
4297 case INSN:
4298 case JUMP_INSN:
4299 case CALL_INSN:
4300 case CODE_LABEL:
4301 case BARRIER:
4302 case NOTE:
4303 last = emit_insn_after_1 (x, after);
4304 break;
4305
4306#ifdef ENABLE_RTL_CHECKING
4307 case SEQUENCE:
4308 abort ();
4309 break;
4310#endif
4311
4312 default:
4313 last = make_call_insn_raw (x);
4314 add_insn_after (last, after);
4315 break;
4316 }
4317
4318 return last;
23b2ce53
RS
4319}
4320
4321/* Make an insn of code BARRIER
4322 and output it after the insn AFTER. */
4323
4324rtx
502b8322 4325emit_barrier_after (rtx after)
23b2ce53 4326{
b3694847 4327 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4328
4329 INSN_UID (insn) = cur_insn_uid++;
4330
4331 add_insn_after (insn, after);
4332 return insn;
4333}
4334
4335/* Emit the label LABEL after the insn AFTER. */
4336
4337rtx
502b8322 4338emit_label_after (rtx label, rtx after)
23b2ce53
RS
4339{
4340 /* This can be called twice for the same label
4341 as a result of the confusion that follows a syntax error!
4342 So make it harmless. */
4343 if (INSN_UID (label) == 0)
4344 {
4345 INSN_UID (label) = cur_insn_uid++;
4346 add_insn_after (label, after);
4347 }
4348
4349 return label;
4350}
4351
4352/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4353
4354rtx
502b8322 4355emit_note_after (int subtype, rtx after)
23b2ce53 4356{
b3694847 4357 rtx note = rtx_alloc (NOTE);
23b2ce53 4358 INSN_UID (note) = cur_insn_uid++;
6773e15f 4359#ifndef USE_MAPPED_LOCATION
23b2ce53 4360 NOTE_SOURCE_FILE (note) = 0;
6773e15f 4361#endif
23b2ce53 4362 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4363 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4364 add_insn_after (note, after);
4365 return note;
4366}
4367
5f2fc772 4368/* Emit a copy of note ORIG after the insn AFTER. */
23b2ce53
RS
4369
4370rtx
5f2fc772 4371emit_note_copy_after (rtx orig, rtx after)
23b2ce53 4372{
b3694847 4373 rtx note;
23b2ce53 4374
5f2fc772 4375 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
23b2ce53
RS
4376 {
4377 cur_insn_uid++;
4378 return 0;
4379 }
4380
68252e27 4381 note = rtx_alloc (NOTE);
23b2ce53 4382 INSN_UID (note) = cur_insn_uid++;
5f2fc772
NS
4383 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4384 NOTE_DATA (note) = NOTE_DATA (orig);
ba4f7968 4385 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4386 add_insn_after (note, after);
4387 return note;
4388}
4389\f
0435312e 4390/* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4391rtx
502b8322 4392emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4393{
4394 rtx last = emit_insn_after (pattern, after);
0d682900 4395
dd3adcf8
DJ
4396 if (pattern == NULL_RTX)
4397 return last;
4398
2f937369
DM
4399 after = NEXT_INSN (after);
4400 while (1)
4401 {
d11cea13 4402 if (active_insn_p (after))
0435312e 4403 INSN_LOCATOR (after) = loc;
2f937369
DM
4404 if (after == last)
4405 break;
4406 after = NEXT_INSN (after);
4407 }
0d682900
JH
4408 return last;
4409}
4410
0435312e 4411/* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4412rtx
502b8322 4413emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4414{
4415 rtx last = emit_jump_insn_after (pattern, after);
2f937369 4416
dd3adcf8
DJ
4417 if (pattern == NULL_RTX)
4418 return last;
4419
2f937369
DM
4420 after = NEXT_INSN (after);
4421 while (1)
4422 {
d11cea13 4423 if (active_insn_p (after))
0435312e 4424 INSN_LOCATOR (after) = loc;
2f937369
DM
4425 if (after == last)
4426 break;
4427 after = NEXT_INSN (after);
4428 }
0d682900
JH
4429 return last;
4430}
4431
0435312e 4432/* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */
0d682900 4433rtx
502b8322 4434emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900
JH
4435{
4436 rtx last = emit_call_insn_after (pattern, after);
2f937369 4437
dd3adcf8
DJ
4438 if (pattern == NULL_RTX)
4439 return last;
4440
2f937369
DM
4441 after = NEXT_INSN (after);
4442 while (1)
4443 {
d11cea13 4444 if (active_insn_p (after))
0435312e 4445 INSN_LOCATOR (after) = loc;
2f937369
DM
4446 if (after == last)
4447 break;
4448 after = NEXT_INSN (after);
4449 }
0d682900
JH
4450 return last;
4451}
4452
0435312e 4453/* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */
0d682900 4454rtx
502b8322 4455emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4456{
4457 rtx first = PREV_INSN (before);
4458 rtx last = emit_insn_before (pattern, before);
4459
dd3adcf8
DJ
4460 if (pattern == NULL_RTX)
4461 return last;
4462
2f937369
DM
4463 first = NEXT_INSN (first);
4464 while (1)
4465 {
d11cea13 4466 if (active_insn_p (first))
0435312e 4467 INSN_LOCATOR (first) = loc;
2f937369
DM
4468 if (first == last)
4469 break;
4470 first = NEXT_INSN (first);
4471 }
0d682900
JH
4472 return last;
4473}
4474\f
2f937369
DM
4475/* Take X and emit it at the end of the doubly-linked
4476 INSN list.
23b2ce53
RS
4477
4478 Returns the last insn emitted. */
4479
4480rtx
502b8322 4481emit_insn (rtx x)
23b2ce53 4482{
2f937369
DM
4483 rtx last = last_insn;
4484 rtx insn;
23b2ce53 4485
2f937369
DM
4486 if (x == NULL_RTX)
4487 return last;
23b2ce53 4488
2f937369
DM
4489 switch (GET_CODE (x))
4490 {
4491 case INSN:
4492 case JUMP_INSN:
4493 case CALL_INSN:
4494 case CODE_LABEL:
4495 case BARRIER:
4496 case NOTE:
4497 insn = x;
4498 while (insn)
23b2ce53 4499 {
2f937369 4500 rtx next = NEXT_INSN (insn);
23b2ce53 4501 add_insn (insn);
2f937369
DM
4502 last = insn;
4503 insn = next;
23b2ce53 4504 }
2f937369 4505 break;
23b2ce53 4506
2f937369
DM
4507#ifdef ENABLE_RTL_CHECKING
4508 case SEQUENCE:
4509 abort ();
4510 break;
4511#endif
23b2ce53 4512
2f937369
DM
4513 default:
4514 last = make_insn_raw (x);
4515 add_insn (last);
4516 break;
23b2ce53
RS
4517 }
4518
4519 return last;
4520}
4521
2f937369
DM
4522/* Make an insn of code JUMP_INSN with pattern X
4523 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4524
4525rtx
502b8322 4526emit_jump_insn (rtx x)
23b2ce53 4527{
d950dee3 4528 rtx last = NULL_RTX, insn;
23b2ce53 4529
2f937369 4530 switch (GET_CODE (x))
23b2ce53 4531 {
2f937369
DM
4532 case INSN:
4533 case JUMP_INSN:
4534 case CALL_INSN:
4535 case CODE_LABEL:
4536 case BARRIER:
4537 case NOTE:
4538 insn = x;
4539 while (insn)
4540 {
4541 rtx next = NEXT_INSN (insn);
4542 add_insn (insn);
4543 last = insn;
4544 insn = next;
4545 }
4546 break;
e0a5c5eb 4547
2f937369
DM
4548#ifdef ENABLE_RTL_CHECKING
4549 case SEQUENCE:
4550 abort ();
4551 break;
4552#endif
e0a5c5eb 4553
2f937369
DM
4554 default:
4555 last = make_jump_insn_raw (x);
4556 add_insn (last);
4557 break;
3c030e88 4558 }
e0a5c5eb
RS
4559
4560 return last;
4561}
4562
2f937369 4563/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4564 and add it to the end of the doubly-linked list. */
4565
4566rtx
502b8322 4567emit_call_insn (rtx x)
23b2ce53 4568{
2f937369
DM
4569 rtx insn;
4570
4571 switch (GET_CODE (x))
23b2ce53 4572 {
2f937369
DM
4573 case INSN:
4574 case JUMP_INSN:
4575 case CALL_INSN:
4576 case CODE_LABEL:
4577 case BARRIER:
4578 case NOTE:
4579 insn = emit_insn (x);
4580 break;
23b2ce53 4581
2f937369
DM
4582#ifdef ENABLE_RTL_CHECKING
4583 case SEQUENCE:
4584 abort ();
4585 break;
4586#endif
23b2ce53 4587
2f937369
DM
4588 default:
4589 insn = make_call_insn_raw (x);
23b2ce53 4590 add_insn (insn);
2f937369 4591 break;
23b2ce53 4592 }
2f937369
DM
4593
4594 return insn;
23b2ce53
RS
4595}
4596
4597/* Add the label LABEL to the end of the doubly-linked list. */
4598
4599rtx
502b8322 4600emit_label (rtx label)
23b2ce53
RS
4601{
4602 /* This can be called twice for the same label
4603 as a result of the confusion that follows a syntax error!
4604 So make it harmless. */
4605 if (INSN_UID (label) == 0)
4606 {
4607 INSN_UID (label) = cur_insn_uid++;
4608 add_insn (label);
4609 }
4610 return label;
4611}
4612
4613/* Make an insn of code BARRIER
4614 and add it to the end of the doubly-linked list. */
4615
4616rtx
502b8322 4617emit_barrier (void)
23b2ce53 4618{
b3694847 4619 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4620 INSN_UID (barrier) = cur_insn_uid++;
4621 add_insn (barrier);
4622 return barrier;
4623}
4624
0cea056b
NS
4625/* Make line numbering NOTE insn for LOCATION add it to the end
4626 of the doubly-linked list, but only if line-numbers are desired for
4627 debugging info and it doesn't match the previous one. */
23b2ce53
RS
4628
4629rtx
0cea056b 4630emit_line_note (location_t location)
23b2ce53 4631{
2e040219 4632 rtx note;
0cea056b
NS
4633
4634 set_file_and_line_for_stmt (location);
4635
6773e15f
PB
4636#ifdef USE_MAPPED_LOCATION
4637 if (location == last_location)
4638 return NULL_RTX;
4639#else
0cea056b
NS
4640 if (location.file && last_location.file
4641 && !strcmp (location.file, last_location.file)
4642 && location.line == last_location.line)
fd3acbb3 4643 return NULL_RTX;
6773e15f 4644#endif
0cea056b
NS
4645 last_location = location;
4646
23b2ce53 4647 if (no_line_numbers)
fd3acbb3
NS
4648 {
4649 cur_insn_uid++;
4650 return NULL_RTX;
4651 }
23b2ce53 4652
6773e15f
PB
4653#ifdef USE_MAPPED_LOCATION
4654 note = emit_note ((int) location);
4655#else
0cea056b
NS
4656 note = emit_note (location.line);
4657 NOTE_SOURCE_FILE (note) = location.file;
6773e15f 4658#endif
5f2fc772
NS
4659
4660 return note;
4661}
4662
4663/* Emit a copy of note ORIG. */
502b8322 4664
5f2fc772
NS
4665rtx
4666emit_note_copy (rtx orig)
4667{
4668 rtx note;
4669
4670 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4671 {
4672 cur_insn_uid++;
4673 return NULL_RTX;
4674 }
4675
4676 note = rtx_alloc (NOTE);
4677
4678 INSN_UID (note) = cur_insn_uid++;
4679 NOTE_DATA (note) = NOTE_DATA (orig);
4680 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4681 BLOCK_FOR_INSN (note) = NULL;
4682 add_insn (note);
4683
2e040219 4684 return note;
23b2ce53
RS
4685}
4686
2e040219
NS
4687/* Make an insn of code NOTE or type NOTE_NO
4688 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4689
4690rtx
502b8322 4691emit_note (int note_no)
23b2ce53 4692{
b3694847 4693 rtx note;
23b2ce53 4694
23b2ce53
RS
4695 note = rtx_alloc (NOTE);
4696 INSN_UID (note) = cur_insn_uid++;
2e040219 4697 NOTE_LINE_NUMBER (note) = note_no;
dd107e66 4698 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4699 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4700 add_insn (note);
4701 return note;
4702}
4703
23b2ce53 4704/* Cause next statement to emit a line note even if the line number
0cea056b 4705 has not changed. */
23b2ce53
RS
4706
4707void
502b8322 4708force_next_line_note (void)
23b2ce53 4709{
6773e15f
PB
4710#ifdef USE_MAPPED_LOCATION
4711 last_location = -1;
4712#else
fd3acbb3 4713 last_location.line = -1;
6773e15f 4714#endif
23b2ce53 4715}
87b47c85
AM
4716
4717/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4718 note of this type already exists, remove it first. */
87b47c85 4719
3d238248 4720rtx
502b8322 4721set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4722{
4723 rtx note = find_reg_note (insn, kind, NULL_RTX);
4724
52488da1
JW
4725 switch (kind)
4726 {
4727 case REG_EQUAL:
4728 case REG_EQUIV:
4729 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4730 has multiple sets (some callers assume single_set
4731 means the insn only has one set, when in fact it
4732 means the insn only has one * useful * set). */
4733 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4734 {
4735 if (note)
4736 abort ();
4737 return NULL_RTX;
4738 }
4739
4740 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4741 It serves no useful purpose and breaks eliminate_regs. */
4742 if (GET_CODE (datum) == ASM_OPERANDS)
4743 return NULL_RTX;
4744 break;
4745
4746 default:
4747 break;
4748 }
3d238248 4749
750c9258 4750 if (note)
3d238248
JJ
4751 {
4752 XEXP (note, 0) = datum;
4753 return note;
4754 }
87b47c85
AM
4755
4756 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3d238248 4757 return REG_NOTES (insn);
87b47c85 4758}
23b2ce53
RS
4759\f
4760/* Return an indication of which type of insn should have X as a body.
4761 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4762
4763enum rtx_code
502b8322 4764classify_insn (rtx x)
23b2ce53
RS
4765{
4766 if (GET_CODE (x) == CODE_LABEL)
4767 return CODE_LABEL;
4768 if (GET_CODE (x) == CALL)
4769 return CALL_INSN;
4770 if (GET_CODE (x) == RETURN)
4771 return JUMP_INSN;
4772 if (GET_CODE (x) == SET)
4773 {
4774 if (SET_DEST (x) == pc_rtx)
4775 return JUMP_INSN;
4776 else if (GET_CODE (SET_SRC (x)) == CALL)
4777 return CALL_INSN;
4778 else
4779 return INSN;
4780 }
4781 if (GET_CODE (x) == PARALLEL)
4782 {
b3694847 4783 int j;
23b2ce53
RS
4784 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4785 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4786 return CALL_INSN;
4787 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4788 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4789 return JUMP_INSN;
4790 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4791 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4792 return CALL_INSN;
4793 }
4794 return INSN;
4795}
4796
4797/* Emit the rtl pattern X as an appropriate kind of insn.
4798 If X is a label, it is simply added into the insn chain. */
4799
4800rtx
502b8322 4801emit (rtx x)
23b2ce53
RS
4802{
4803 enum rtx_code code = classify_insn (x);
4804
4805 if (code == CODE_LABEL)
4806 return emit_label (x);
4807 else if (code == INSN)
4808 return emit_insn (x);
4809 else if (code == JUMP_INSN)
4810 {
b3694847 4811 rtx insn = emit_jump_insn (x);
7f1c097d 4812 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
23b2ce53
RS
4813 return emit_barrier ();
4814 return insn;
4815 }
4816 else if (code == CALL_INSN)
4817 return emit_call_insn (x);
4818 else
4819 abort ();
4820}
4821\f
e2500fed 4822/* Space for free sequence stack entries. */
1431042e 4823static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 4824
5c7a310f
MM
4825/* Begin emitting insns to a sequence which can be packaged in an
4826 RTL_EXPR. If this sequence will contain something that might cause
4827 the compiler to pop arguments to function calls (because those
4828 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4829 details), use do_pending_stack_adjust before calling this function.
4830 That will ensure that the deferred pops are not accidentally
4eb00163 4831 emitted in the middle of this sequence. */
23b2ce53
RS
4832
4833void
502b8322 4834start_sequence (void)
23b2ce53
RS
4835{
4836 struct sequence_stack *tem;
4837
e2500fed
GK
4838 if (free_sequence_stack != NULL)
4839 {
4840 tem = free_sequence_stack;
4841 free_sequence_stack = tem->next;
4842 }
4843 else
703ad42b 4844 tem = ggc_alloc (sizeof (struct sequence_stack));
23b2ce53 4845
49ad7cfa 4846 tem->next = seq_stack;
23b2ce53
RS
4847 tem->first = first_insn;
4848 tem->last = last_insn;
591ccf92 4849 tem->sequence_rtl_expr = seq_rtl_expr;
23b2ce53 4850
49ad7cfa 4851 seq_stack = tem;
23b2ce53
RS
4852
4853 first_insn = 0;
4854 last_insn = 0;
4855}
4856
591ccf92
MM
4857/* Similarly, but indicate that this sequence will be placed in T, an
4858 RTL_EXPR. See the documentation for start_sequence for more
4859 information about how to use this function. */
4860
4861void
502b8322 4862start_sequence_for_rtl_expr (tree t)
591ccf92
MM
4863{
4864 start_sequence ();
4865
4866 seq_rtl_expr = t;
4867}
4868
5c7a310f
MM
4869/* Set up the insn chain starting with FIRST as the current sequence,
4870 saving the previously current one. See the documentation for
4871 start_sequence for more information about how to use this function. */
23b2ce53
RS
4872
4873void
502b8322 4874push_to_sequence (rtx first)
23b2ce53
RS
4875{
4876 rtx last;
4877
4878 start_sequence ();
4879
4880 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4881
4882 first_insn = first;
4883 last_insn = last;
4884}
4885
c14f7160
ML
4886/* Set up the insn chain from a chain stort in FIRST to LAST. */
4887
4888void
502b8322 4889push_to_full_sequence (rtx first, rtx last)
c14f7160
ML
4890{
4891 start_sequence ();
4892 first_insn = first;
4893 last_insn = last;
4894 /* We really should have the end of the insn chain here. */
4895 if (last && NEXT_INSN (last))
4896 abort ();
4897}
4898
f15ae3a1
TW
4899/* Set up the outer-level insn chain
4900 as the current sequence, saving the previously current one. */
4901
4902void
502b8322 4903push_topmost_sequence (void)
f15ae3a1 4904{
aefdd5ab 4905 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
4906
4907 start_sequence ();
4908
49ad7cfa 4909 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4910 top = stack;
4911
4912 first_insn = top->first;
4913 last_insn = top->last;
591ccf92 4914 seq_rtl_expr = top->sequence_rtl_expr;
f15ae3a1
TW
4915}
4916
4917/* After emitting to the outer-level insn chain, update the outer-level
4918 insn chain, and restore the previous saved state. */
4919
4920void
502b8322 4921pop_topmost_sequence (void)
f15ae3a1 4922{
aefdd5ab 4923 struct sequence_stack *stack, *top = NULL;
f15ae3a1 4924
49ad7cfa 4925 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4926 top = stack;
4927
4928 top->first = first_insn;
4929 top->last = last_insn;
591ccf92 4930 /* ??? Why don't we save seq_rtl_expr here? */
f15ae3a1
TW
4931
4932 end_sequence ();
4933}
4934
23b2ce53
RS
4935/* After emitting to a sequence, restore previous saved state.
4936
5c7a310f 4937 To get the contents of the sequence just made, you must call
2f937369 4938 `get_insns' *before* calling here.
5c7a310f
MM
4939
4940 If the compiler might have deferred popping arguments while
4941 generating this sequence, and this sequence will not be immediately
4942 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 4943 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
4944 pops are inserted into this sequence, and not into some random
4945 location in the instruction stream. See INHIBIT_DEFER_POP for more
4946 information about deferred popping of arguments. */
23b2ce53
RS
4947
4948void
502b8322 4949end_sequence (void)
23b2ce53 4950{
49ad7cfa 4951 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
4952
4953 first_insn = tem->first;
4954 last_insn = tem->last;
591ccf92 4955 seq_rtl_expr = tem->sequence_rtl_expr;
49ad7cfa 4956 seq_stack = tem->next;
23b2ce53 4957
e2500fed
GK
4958 memset (tem, 0, sizeof (*tem));
4959 tem->next = free_sequence_stack;
4960 free_sequence_stack = tem;
23b2ce53
RS
4961}
4962
4963/* Return 1 if currently emitting into a sequence. */
4964
4965int
502b8322 4966in_sequence_p (void)
23b2ce53 4967{
49ad7cfa 4968 return seq_stack != 0;
23b2ce53 4969}
23b2ce53 4970\f
59ec66dc
MM
4971/* Put the various virtual registers into REGNO_REG_RTX. */
4972
4973void
502b8322 4974init_virtual_regs (struct emit_status *es)
59ec66dc 4975{
49ad7cfa
BS
4976 rtx *ptr = es->x_regno_reg_rtx;
4977 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4978 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4979 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4980 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4981 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4982}
4983
da43a810
BS
4984\f
4985/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4986static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4987static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4988static int copy_insn_n_scratches;
4989
4990/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4991 copied an ASM_OPERANDS.
4992 In that case, it is the original input-operand vector. */
4993static rtvec orig_asm_operands_vector;
4994
4995/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4996 copied an ASM_OPERANDS.
4997 In that case, it is the copied input-operand vector. */
4998static rtvec copy_asm_operands_vector;
4999
5000/* Likewise for the constraints vector. */
5001static rtvec orig_asm_constraints_vector;
5002static rtvec copy_asm_constraints_vector;
5003
5004/* Recursively create a new copy of an rtx for copy_insn.
5005 This function differs from copy_rtx in that it handles SCRATCHes and
5006 ASM_OPERANDs properly.
5007 Normally, this function is not used directly; use copy_insn as front end.
5008 However, you could first copy an insn pattern with copy_insn and then use
5009 this function afterwards to properly copy any REG_NOTEs containing
5010 SCRATCHes. */
5011
5012rtx
502b8322 5013copy_insn_1 (rtx orig)
da43a810 5014{
b3694847
SS
5015 rtx copy;
5016 int i, j;
5017 RTX_CODE code;
5018 const char *format_ptr;
da43a810
BS
5019
5020 code = GET_CODE (orig);
5021
5022 switch (code)
5023 {
5024 case REG:
5025 case QUEUED:
5026 case CONST_INT:
5027 case CONST_DOUBLE:
69ef87e2 5028 case CONST_VECTOR:
da43a810
BS
5029 case SYMBOL_REF:
5030 case CODE_LABEL:
5031 case PC:
5032 case CC0:
5033 case ADDRESSOF:
5034 return orig;
3e89ed8d
JH
5035 case CLOBBER:
5036 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5037 return orig;
5038 break;
da43a810
BS
5039
5040 case SCRATCH:
5041 for (i = 0; i < copy_insn_n_scratches; i++)
5042 if (copy_insn_scratch_in[i] == orig)
5043 return copy_insn_scratch_out[i];
5044 break;
5045
5046 case CONST:
5047 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5048 a LABEL_REF, it isn't sharable. */
5049 if (GET_CODE (XEXP (orig, 0)) == PLUS
5050 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5051 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5052 return orig;
5053 break;
750c9258 5054
da43a810
BS
5055 /* A MEM with a constant address is not sharable. The problem is that
5056 the constant address may need to be reloaded. If the mem is shared,
5057 then reloading one copy of this mem will cause all copies to appear
5058 to have been reloaded. */
5059
5060 default:
5061 break;
5062 }
5063
5064 copy = rtx_alloc (code);
5065
5066 /* Copy the various flags, and other information. We assume that
5067 all fields need copying, and then clear the fields that should
5068 not be copied. That is the sensible default behavior, and forces
5069 us to explicitly document why we are *not* copying a flag. */
e1de1560 5070 memcpy (copy, orig, RTX_HDR_SIZE);
da43a810
BS
5071
5072 /* We do not copy the USED flag, which is used as a mark bit during
5073 walks over the RTL. */
2adc7f12 5074 RTX_FLAG (copy, used) = 0;
da43a810
BS
5075
5076 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 5077 if (INSN_P (orig))
da43a810 5078 {
2adc7f12
JJ
5079 RTX_FLAG (copy, jump) = 0;
5080 RTX_FLAG (copy, call) = 0;
5081 RTX_FLAG (copy, frame_related) = 0;
da43a810 5082 }
750c9258 5083
da43a810
BS
5084 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5085
5086 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5087 {
e1de1560 5088 copy->u.fld[i] = orig->u.fld[i];
da43a810
BS
5089 switch (*format_ptr++)
5090 {
5091 case 'e':
da43a810
BS
5092 if (XEXP (orig, i) != NULL)
5093 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5094 break;
5095
da43a810
BS
5096 case 'E':
5097 case 'V':
da43a810
BS
5098 if (XVEC (orig, i) == orig_asm_constraints_vector)
5099 XVEC (copy, i) = copy_asm_constraints_vector;
5100 else if (XVEC (orig, i) == orig_asm_operands_vector)
5101 XVEC (copy, i) = copy_asm_operands_vector;
5102 else if (XVEC (orig, i) != NULL)
5103 {
5104 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5105 for (j = 0; j < XVECLEN (copy, i); j++)
5106 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5107 }
5108 break;
5109
da43a810 5110 case 't':
da43a810 5111 case 'w':
da43a810 5112 case 'i':
da43a810
BS
5113 case 's':
5114 case 'S':
e63db8f6
BS
5115 case 'u':
5116 case '0':
5117 /* These are left unchanged. */
da43a810
BS
5118 break;
5119
5120 default:
5121 abort ();
5122 }
5123 }
5124
5125 if (code == SCRATCH)
5126 {
5127 i = copy_insn_n_scratches++;
5128 if (i >= MAX_RECOG_OPERANDS)
5129 abort ();
5130 copy_insn_scratch_in[i] = orig;
5131 copy_insn_scratch_out[i] = copy;
5132 }
5133 else if (code == ASM_OPERANDS)
5134 {
6462bb43
AO
5135 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5136 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5137 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5138 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
5139 }
5140
5141 return copy;
5142}
5143
5144/* Create a new copy of an rtx.
5145 This function differs from copy_rtx in that it handles SCRATCHes and
5146 ASM_OPERANDs properly.
5147 INSN doesn't really have to be a full INSN; it could be just the
5148 pattern. */
5149rtx
502b8322 5150copy_insn (rtx insn)
da43a810
BS
5151{
5152 copy_insn_n_scratches = 0;
5153 orig_asm_operands_vector = 0;
5154 orig_asm_constraints_vector = 0;
5155 copy_asm_operands_vector = 0;
5156 copy_asm_constraints_vector = 0;
5157 return copy_insn_1 (insn);
5158}
59ec66dc 5159
23b2ce53
RS
5160/* Initialize data structures and variables in this file
5161 before generating rtl for each function. */
5162
5163void
502b8322 5164init_emit (void)
23b2ce53 5165{
01d939e8 5166 struct function *f = cfun;
23b2ce53 5167
703ad42b 5168 f->emit = ggc_alloc (sizeof (struct emit_status));
23b2ce53
RS
5169 first_insn = NULL;
5170 last_insn = NULL;
591ccf92 5171 seq_rtl_expr = NULL;
23b2ce53
RS
5172 cur_insn_uid = 1;
5173 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 5174 last_location = UNKNOWN_LOCATION;
23b2ce53
RS
5175 first_label_num = label_num;
5176 last_label_num = 0;
49ad7cfa 5177 seq_stack = NULL;
23b2ce53 5178
23b2ce53
RS
5179 /* Init the tables that describe all the pseudo regs. */
5180
3502dc9c 5181 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 5182
49ad7cfa 5183 f->emit->regno_pointer_align
703ad42b
KG
5184 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5185 * sizeof (unsigned char));
86fe05e0 5186
750c9258 5187 regno_reg_rtx
703ad42b 5188 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
0d4903b8 5189
e50126e8 5190 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
5191 memcpy (regno_reg_rtx,
5192 static_regno_reg_rtx,
5193 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 5194
23b2ce53 5195 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
49ad7cfa 5196 init_virtual_regs (f->emit);
740ab4a2
RK
5197
5198 /* Indicate that the virtual registers and stack locations are
5199 all pointers. */
3502dc9c
JDA
5200 REG_POINTER (stack_pointer_rtx) = 1;
5201 REG_POINTER (frame_pointer_rtx) = 1;
5202 REG_POINTER (hard_frame_pointer_rtx) = 1;
5203 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5204
3502dc9c
JDA
5205 REG_POINTER (virtual_incoming_args_rtx) = 1;
5206 REG_POINTER (virtual_stack_vars_rtx) = 1;
5207 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5208 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5209 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5210
86fe05e0 5211#ifdef STACK_BOUNDARY
bdb429a5
RK
5212 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5213 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5214 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5215 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5216
5217 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5218 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5219 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5220 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5221 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5222#endif
5223
5e82e7bd
JVA
5224#ifdef INIT_EXPANDERS
5225 INIT_EXPANDERS;
5226#endif
23b2ce53
RS
5227}
5228
ff88fe10 5229/* Generate the constant 0. */
69ef87e2
AH
5230
5231static rtx
502b8322 5232gen_const_vector_0 (enum machine_mode mode)
69ef87e2
AH
5233{
5234 rtx tem;
5235 rtvec v;
5236 int units, i;
5237 enum machine_mode inner;
5238
5239 units = GET_MODE_NUNITS (mode);
5240 inner = GET_MODE_INNER (mode);
5241
5242 v = rtvec_alloc (units);
5243
5244 /* We need to call this function after we to set CONST0_RTX first. */
5245 if (!CONST0_RTX (inner))
5246 abort ();
5247
5248 for (i = 0; i < units; ++i)
5249 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5250
a06e3c40 5251 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5252 return tem;
5253}
5254
a06e3c40
R
5255/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5256 all elements are zero. */
5257rtx
502b8322 5258gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40
R
5259{
5260 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5261 int i;
5262
5263 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5264 if (RTVEC_ELT (v, i) != inner_zero)
5265 return gen_rtx_raw_CONST_VECTOR (mode, v);
5266 return CONST0_RTX (mode);
5267}
5268
23b2ce53
RS
5269/* Create some permanent unique rtl objects shared between all functions.
5270 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5271
5272void
502b8322 5273init_emit_once (int line_numbers)
23b2ce53
RS
5274{
5275 int i;
5276 enum machine_mode mode;
9ec36da5 5277 enum machine_mode double_mode;
23b2ce53 5278
59e4e217 5279 /* We need reg_raw_mode, so initialize the modes now. */
28420116
PB
5280 init_reg_modes_once ();
5281
5692c7bc
ZW
5282 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5283 tables. */
17211ab5
GK
5284 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5285 const_int_htab_eq, NULL);
173b24b9 5286
17211ab5
GK
5287 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5288 const_double_htab_eq, NULL);
5692c7bc 5289
17211ab5
GK
5290 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5291 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5292 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5293 reg_attrs_htab_eq, NULL);
67673f5c 5294
23b2ce53
RS
5295 no_line_numbers = ! line_numbers;
5296
43fa6302
AS
5297 /* Compute the word and byte modes. */
5298
5299 byte_mode = VOIDmode;
5300 word_mode = VOIDmode;
5301 double_mode = VOIDmode;
5302
5303 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5304 mode = GET_MODE_WIDER_MODE (mode))
5305 {
5306 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5307 && byte_mode == VOIDmode)
5308 byte_mode = mode;
5309
5310 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5311 && word_mode == VOIDmode)
5312 word_mode = mode;
5313 }
5314
43fa6302
AS
5315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5316 mode = GET_MODE_WIDER_MODE (mode))
5317 {
5318 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5319 && double_mode == VOIDmode)
5320 double_mode = mode;
5321 }
5322
5323 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5324
5da077de
AS
5325 /* Assign register numbers to the globally defined register rtx.
5326 This must be done at runtime because the register number field
5327 is in a union and some compilers can't initialize unions. */
5328
2fb00d7f
KH
5329 pc_rtx = gen_rtx_PC (VOIDmode);
5330 cc0_rtx = gen_rtx_CC0 (VOIDmode);
08394eef
BS
5331 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5332 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5da077de 5333 if (hard_frame_pointer_rtx == 0)
750c9258 5334 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
08394eef 5335 HARD_FRAME_POINTER_REGNUM);
5da077de 5336 if (arg_pointer_rtx == 0)
08394eef 5337 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
750c9258 5338 virtual_incoming_args_rtx =
08394eef 5339 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
750c9258 5340 virtual_stack_vars_rtx =
08394eef 5341 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
750c9258 5342 virtual_stack_dynamic_rtx =
08394eef 5343 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
750c9258
AJ
5344 virtual_outgoing_args_rtx =
5345 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
08394eef 5346 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5da077de 5347
6cde4876
JL
5348 /* Initialize RTL for commonly used hard registers. These are
5349 copied into regno_reg_rtx as we begin to compile each function. */
5350 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5351 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5352
5da077de 5353#ifdef INIT_EXPANDERS
414c4dc4
NC
5354 /* This is to initialize {init|mark|free}_machine_status before the first
5355 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5356 end which calls push_function_context_to before the first call to
5da077de
AS
5357 init_function_start. */
5358 INIT_EXPANDERS;
5359#endif
5360
23b2ce53
RS
5361 /* Create the unique rtx's for certain rtx codes and operand values. */
5362
a2a8cc44 5363 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5364 tries to use these variables. */
23b2ce53 5365 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5366 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5367 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5368
68d75312
JC
5369 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5370 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5371 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5372 else
3b80f6ca 5373 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5374
5692c7bc
ZW
5375 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5376 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5377 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
f7657db9
KG
5378 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5379 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5692c7bc 5380 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
03f2ea93
RS
5381 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5382
5383 dconsthalf = dconst1;
1e92bbb9 5384 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5385
f7657db9
KG
5386 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5387
ab01a87c
KG
5388 /* Initialize mathematical constants for constant folding builtins.
5389 These constants need to be given to at least 160 bits precision. */
5390 real_from_string (&dconstpi,
5391 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5392 real_from_string (&dconste,
5393 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5394
f7657db9 5395 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5396 {
b216cd4a
ZW
5397 REAL_VALUE_TYPE *r =
5398 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5399
23b2ce53
RS
5400 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5401 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5402 const_tiny_rtx[i][(int) mode] =
5403 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5404
906c4e36 5405 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53
RS
5406
5407 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5408 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5409 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5410
5411 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5412 mode != VOIDmode;
5413 mode = GET_MODE_WIDER_MODE (mode))
5414 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5415 }
5416
69ef87e2
AH
5417 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5418 mode != VOIDmode;
5419 mode = GET_MODE_WIDER_MODE (mode))
ff88fe10 5420 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
69ef87e2
AH
5421
5422 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5423 mode != VOIDmode;
5424 mode = GET_MODE_WIDER_MODE (mode))
ff88fe10 5425 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
69ef87e2 5426
dbbbbf3b
JDA
5427 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5428 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5429 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5430
f0417c82
RH
5431 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5432 if (STORE_FLAG_VALUE == 1)
5433 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5434
a7e1e2ac
AO
5435#ifdef RETURN_ADDRESS_POINTER_REGNUM
5436 return_address_pointer_rtx
08394eef 5437 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
a7e1e2ac
AO
5438#endif
5439
a7e1e2ac
AO
5440#ifdef STATIC_CHAIN_REGNUM
5441 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5442
5443#ifdef STATIC_CHAIN_INCOMING_REGNUM
5444 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5445 static_chain_incoming_rtx
5446 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5447 else
5448#endif
5449 static_chain_incoming_rtx = static_chain_rtx;
5450#endif
5451
5452#ifdef STATIC_CHAIN
5453 static_chain_rtx = STATIC_CHAIN;
5454
5455#ifdef STATIC_CHAIN_INCOMING
5456 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5457#else
5458 static_chain_incoming_rtx = static_chain_rtx;
5459#endif
5460#endif
5461
fc555370 5462 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
751551d5 5463 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
23b2ce53 5464}
a11759a3
JR
5465\f
5466/* Query and clear/ restore no_line_numbers. This is used by the
5467 switch / case handling in stmt.c to give proper line numbers in
5468 warnings about unreachable code. */
5469
5470int
502b8322 5471force_line_numbers (void)
a11759a3
JR
5472{
5473 int old = no_line_numbers;
5474
5475 no_line_numbers = 0;
5476 if (old)
5477 force_next_line_note ();
5478 return old;
5479}
5480
5481void
502b8322 5482restore_line_number_status (int old_value)
a11759a3
JR
5483{
5484 no_line_numbers = old_value;
5485}
969d70ca
JH
5486
5487/* Produce exact duplicate of insn INSN after AFTER.
5488 Care updating of libcall regions if present. */
5489
5490rtx
502b8322 5491emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca
JH
5492{
5493 rtx new;
5494 rtx note1, note2, link;
5495
5496 switch (GET_CODE (insn))
5497 {
5498 case INSN:
5499 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5500 break;
5501
5502 case JUMP_INSN:
5503 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5504 break;
5505
5506 case CALL_INSN:
5507 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5508 if (CALL_INSN_FUNCTION_USAGE (insn))
5509 CALL_INSN_FUNCTION_USAGE (new)
5510 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5511 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5512 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5513 break;
5514
5515 default:
5516 abort ();
5517 }
5518
5519 /* Update LABEL_NUSES. */
5520 mark_jump_label (PATTERN (new), new, 0);
5521
0435312e 5522 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
ba4f7968 5523
969d70ca
JH
5524 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5525 make them. */
5526 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5527 if (REG_NOTE_KIND (link) != REG_LABEL)
5528 {
5529 if (GET_CODE (link) == EXPR_LIST)
5530 REG_NOTES (new)
5531 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5532 XEXP (link, 0),
5533 REG_NOTES (new)));
5534 else
5535 REG_NOTES (new)
5536 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5537 XEXP (link, 0),
5538 REG_NOTES (new)));
5539 }
5540
5541 /* Fix the libcall sequences. */
5542 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5543 {
5544 rtx p = new;
5545 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5546 p = PREV_INSN (p);
5547 XEXP (note1, 0) = p;
5548 XEXP (note2, 0) = new;
5549 }
6f0d3566 5550 INSN_CODE (new) = INSN_CODE (insn);
969d70ca
JH
5551 return new;
5552}
e2500fed 5553
1431042e 5554static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5555rtx
5556gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5557{
5558 if (hard_reg_clobbers[mode][regno])
5559 return hard_reg_clobbers[mode][regno];
5560 else
5561 return (hard_reg_clobbers[mode][regno] =
5562 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5563}
5564
e2500fed 5565#include "gt-emit-rtl.h"
This page took 2.71951 seconds and 5 git commands to generate.