]> gcc.gnu.org Git - gcc.git/blame - gcc/emit-rtl.c
trans-io.c (set_string): Use fold_build2 and build_int_cst instead of build2 and...
[gcc.git] / gcc / emit-rtl.c
CommitLineData
5e6908ea 1/* Emit RTL for the GCC expander.
ef58a523 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
b6f65e3c
RS
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4 Free Software Foundation, Inc.
23b2ce53 5
1322177d 6This file is part of GCC.
23b2ce53 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
23b2ce53 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
23b2ce53
RS
17
18You should have received a copy of the GNU General Public License
1322177d 19along with GCC; see the file COPYING. If not, write to the Free
366ccddb
KC
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA. */
23b2ce53
RS
22
23
24/* Middle-to-low level generation of rtx code and insns.
25
f822fcf7
KH
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
23b2ce53
RS
28
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
f822fcf7
KH
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
a2a8cc44
KH
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
23b2ce53
RS
36
37#include "config.h"
670ee920 38#include "system.h"
4977bab6
ZW
39#include "coretypes.h"
40#include "tm.h"
01198c2f 41#include "toplev.h"
23b2ce53 42#include "rtl.h"
a25c7971 43#include "tree.h"
6baf1cc8 44#include "tm_p.h"
23b2ce53
RS
45#include "flags.h"
46#include "function.h"
47#include "expr.h"
48#include "regs.h"
aff48bca 49#include "hard-reg-set.h"
c13e8210 50#include "hashtab.h"
23b2ce53 51#include "insn-config.h"
e9a25f70 52#include "recog.h"
23b2ce53 53#include "real.h"
0dfa1860 54#include "bitmap.h"
a05924f9 55#include "basic-block.h"
87ff9c8e 56#include "ggc.h"
e1772ac0 57#include "debug.h"
d23c55c2 58#include "langhooks.h"
ef330312 59#include "tree-pass.h"
ca695ac9 60
1d445e9e
ILT
61/* Commonly used modes. */
62
0f41302f
MS
63enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
9ec36da5 65enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
0f41302f 66enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
1d445e9e 67
23b2ce53
RS
68
69/* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
044b4de3 72static GTY(()) int label_num = 1;
23b2ce53 73
23b2ce53
RS
74/* Nonzero means do not generate NOTEs for source line numbers. */
75
76static int no_line_numbers;
77
78/* Commonly used rtx's, so that we only need space for one copy.
79 These are initialized once for the entire compilation.
5692c7bc
ZW
80 All of these are unique; no other rtx-object will be equal to any
81 of these. */
23b2ce53 82
5da077de 83rtx global_rtl[GR_MAX];
23b2ce53 84
6cde4876
JL
85/* Commonly used RTL for hard registers. These objects are not necessarily
86 unique, so we allocate them separately from global_rtl. They are
87 initialized once per compilation unit, then copied into regno_reg_rtx
88 at the beginning of each function. */
89static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
90
23b2ce53
RS
91/* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx. */
94
95rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
96
68d75312
JC
97rtx const_true_rtx;
98
23b2ce53
RS
99REAL_VALUE_TYPE dconst0;
100REAL_VALUE_TYPE dconst1;
101REAL_VALUE_TYPE dconst2;
f7657db9
KG
102REAL_VALUE_TYPE dconst3;
103REAL_VALUE_TYPE dconst10;
23b2ce53 104REAL_VALUE_TYPE dconstm1;
03f2ea93
RS
105REAL_VALUE_TYPE dconstm2;
106REAL_VALUE_TYPE dconsthalf;
f7657db9 107REAL_VALUE_TYPE dconstthird;
ab01a87c
KG
108REAL_VALUE_TYPE dconstpi;
109REAL_VALUE_TYPE dconste;
23b2ce53
RS
110
111/* All references to the following fixed hard registers go through
112 these unique rtl objects. On machines where the frame-pointer and
113 arg-pointer are the same register, they use the same unique object.
114
115 After register allocation, other rtl objects which used to be pseudo-regs
116 may be clobbered to refer to the frame-pointer register.
117 But references that were originally to the frame-pointer can be
118 distinguished from the others because they contain frame_pointer_rtx.
119
ac6f08b0
DE
120 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
121 tricky: until register elimination has taken place hard_frame_pointer_rtx
750c9258 122 should be used if it is being set, and frame_pointer_rtx otherwise. After
ac6f08b0
DE
123 register elimination hard_frame_pointer_rtx should always be used.
124 On machines where the two registers are same (most) then these are the
125 same.
126
23b2ce53
RS
127 In an inline procedure, the stack and frame pointer rtxs may not be
128 used for anything else. */
23b2ce53
RS
129rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
130rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
131rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
132
a4417a86
JW
133/* This is used to implement __builtin_return_address for some machines.
134 See for instance the MIPS port. */
135rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
136
23b2ce53
RS
137/* We make one copy of (const_int C) where C is in
138 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
139 to save space during the compilation and simplify comparisons of
140 integers. */
141
5da077de 142rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
23b2ce53 143
c13e8210
MM
144/* A hash table storing CONST_INTs whose absolute value is greater
145 than MAX_SAVED_CONST_INT. */
146
e2500fed
GK
147static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148 htab_t const_int_htab;
c13e8210 149
173b24b9 150/* A hash table storing memory attribute structures. */
e2500fed
GK
151static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
152 htab_t mem_attrs_htab;
173b24b9 153
a560d4d4
JH
154/* A hash table storing register attribute structures. */
155static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
156 htab_t reg_attrs_htab;
157
5692c7bc 158/* A hash table storing all CONST_DOUBLEs. */
e2500fed
GK
159static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
160 htab_t const_double_htab;
5692c7bc 161
01d939e8
BS
162#define first_insn (cfun->emit->x_first_insn)
163#define last_insn (cfun->emit->x_last_insn)
164#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
fd3acbb3 165#define last_location (cfun->emit->x_last_location)
01d939e8 166#define first_label_num (cfun->emit->x_first_label_num)
23b2ce53 167
502b8322
AJ
168static rtx make_call_insn_raw (rtx);
169static rtx find_line_note (rtx);
170static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
502b8322
AJ
171static void unshare_all_decls (tree);
172static void reset_used_decls (tree);
173static void mark_label_nuses (rtx);
174static hashval_t const_int_htab_hash (const void *);
175static int const_int_htab_eq (const void *, const void *);
176static hashval_t const_double_htab_hash (const void *);
177static int const_double_htab_eq (const void *, const void *);
178static rtx lookup_const_double (rtx);
179static hashval_t mem_attrs_htab_hash (const void *);
180static int mem_attrs_htab_eq (const void *, const void *);
181static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
182 enum machine_mode);
183static hashval_t reg_attrs_htab_hash (const void *);
184static int reg_attrs_htab_eq (const void *, const void *);
185static reg_attrs *get_reg_attrs (tree, int);
186static tree component_ref_for_mem_expr (tree);
a73b091d 187static rtx gen_const_vector (enum machine_mode, int);
32b32b16 188static void copy_rtx_if_shared_1 (rtx *orig);
c13e8210 189
6b24c259
JH
190/* Probability of the conditional branch currently proceeded by try_split.
191 Set to -1 otherwise. */
192int split_branch_probability = -1;
ca695ac9 193\f
c13e8210
MM
194/* Returns a hash code for X (which is a really a CONST_INT). */
195
196static hashval_t
502b8322 197const_int_htab_hash (const void *x)
c13e8210 198{
bcda12f4 199 return (hashval_t) INTVAL ((rtx) x);
c13e8210
MM
200}
201
cc2902df 202/* Returns nonzero if the value represented by X (which is really a
c13e8210
MM
203 CONST_INT) is the same as that given by Y (which is really a
204 HOST_WIDE_INT *). */
205
206static int
502b8322 207const_int_htab_eq (const void *x, const void *y)
c13e8210 208{
5692c7bc
ZW
209 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210}
211
212/* Returns a hash code for X (which is really a CONST_DOUBLE). */
213static hashval_t
502b8322 214const_double_htab_hash (const void *x)
5692c7bc 215{
5692c7bc 216 rtx value = (rtx) x;
46b33600 217 hashval_t h;
5692c7bc 218
46b33600
RH
219 if (GET_MODE (value) == VOIDmode)
220 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221 else
fe352c29 222 {
15c812e3 223 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
fe352c29
DJ
224 /* MODE is used in the comparison, so it should be in the hash. */
225 h ^= GET_MODE (value);
226 }
5692c7bc
ZW
227 return h;
228}
229
cc2902df 230/* Returns nonzero if the value represented by X (really a ...)
5692c7bc
ZW
231 is the same as that represented by Y (really a ...) */
232static int
502b8322 233const_double_htab_eq (const void *x, const void *y)
5692c7bc
ZW
234{
235 rtx a = (rtx)x, b = (rtx)y;
5692c7bc
ZW
236
237 if (GET_MODE (a) != GET_MODE (b))
238 return 0;
8580f7a0
RH
239 if (GET_MODE (a) == VOIDmode)
240 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242 else
243 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244 CONST_DOUBLE_REAL_VALUE (b));
c13e8210
MM
245}
246
173b24b9
RK
247/* Returns a hash code for X (which is a really a mem_attrs *). */
248
249static hashval_t
502b8322 250mem_attrs_htab_hash (const void *x)
173b24b9
RK
251{
252 mem_attrs *p = (mem_attrs *) x;
253
254 return (p->alias ^ (p->align * 1000)
255 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
78b76d08 257 ^ (size_t) iterative_hash_expr (p->expr, 0));
173b24b9
RK
258}
259
cc2902df 260/* Returns nonzero if the value represented by X (which is really a
173b24b9
RK
261 mem_attrs *) is the same as that given by Y (which is also really a
262 mem_attrs *). */
c13e8210
MM
263
264static int
502b8322 265mem_attrs_htab_eq (const void *x, const void *y)
c13e8210 266{
173b24b9
RK
267 mem_attrs *p = (mem_attrs *) x;
268 mem_attrs *q = (mem_attrs *) y;
269
78b76d08
SB
270 return (p->alias == q->alias && p->offset == q->offset
271 && p->size == q->size && p->align == q->align
272 && (p->expr == q->expr
273 || (p->expr != NULL_TREE && q->expr != NULL_TREE
274 && operand_equal_p (p->expr, q->expr, 0))));
c13e8210
MM
275}
276
173b24b9 277/* Allocate a new mem_attrs structure and insert it into the hash table if
10b76d73
RK
278 one identical to it is not already in the table. We are doing this for
279 MEM of mode MODE. */
173b24b9
RK
280
281static mem_attrs *
502b8322
AJ
282get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
283 unsigned int align, enum machine_mode mode)
173b24b9
RK
284{
285 mem_attrs attrs;
286 void **slot;
287
bb056a77
OH
288 /* If everything is the default, we can just return zero.
289 This must match what the corresponding MEM_* macros return when the
290 field is not present. */
998d7deb 291 if (alias == 0 && expr == 0 && offset == 0
10b76d73
RK
292 && (size == 0
293 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
bb056a77
OH
294 && (STRICT_ALIGNMENT && mode != BLKmode
295 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
10b76d73
RK
296 return 0;
297
173b24b9 298 attrs.alias = alias;
998d7deb 299 attrs.expr = expr;
173b24b9
RK
300 attrs.offset = offset;
301 attrs.size = size;
302 attrs.align = align;
303
304 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
305 if (*slot == 0)
306 {
307 *slot = ggc_alloc (sizeof (mem_attrs));
308 memcpy (*slot, &attrs, sizeof (mem_attrs));
309 }
310
311 return *slot;
c13e8210
MM
312}
313
a560d4d4
JH
314/* Returns a hash code for X (which is a really a reg_attrs *). */
315
316static hashval_t
502b8322 317reg_attrs_htab_hash (const void *x)
a560d4d4
JH
318{
319 reg_attrs *p = (reg_attrs *) x;
320
321 return ((p->offset * 1000) ^ (long) p->decl);
322}
323
6356f892 324/* Returns nonzero if the value represented by X (which is really a
a560d4d4
JH
325 reg_attrs *) is the same as that given by Y (which is also really a
326 reg_attrs *). */
327
328static int
502b8322 329reg_attrs_htab_eq (const void *x, const void *y)
a560d4d4
JH
330{
331 reg_attrs *p = (reg_attrs *) x;
332 reg_attrs *q = (reg_attrs *) y;
333
334 return (p->decl == q->decl && p->offset == q->offset);
335}
336/* Allocate a new reg_attrs structure and insert it into the hash table if
337 one identical to it is not already in the table. We are doing this for
338 MEM of mode MODE. */
339
340static reg_attrs *
502b8322 341get_reg_attrs (tree decl, int offset)
a560d4d4
JH
342{
343 reg_attrs attrs;
344 void **slot;
345
346 /* If everything is the default, we can just return zero. */
347 if (decl == 0 && offset == 0)
348 return 0;
349
350 attrs.decl = decl;
351 attrs.offset = offset;
352
353 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
354 if (*slot == 0)
355 {
356 *slot = ggc_alloc (sizeof (reg_attrs));
357 memcpy (*slot, &attrs, sizeof (reg_attrs));
358 }
359
360 return *slot;
361}
362
08394eef
BS
363/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
364 don't attempt to share with the various global pieces of rtl (such as
365 frame_pointer_rtx). */
366
367rtx
502b8322 368gen_raw_REG (enum machine_mode mode, int regno)
08394eef
BS
369{
370 rtx x = gen_rtx_raw_REG (mode, regno);
371 ORIGINAL_REGNO (x) = regno;
372 return x;
373}
374
c5c76735
JL
375/* There are some RTL codes that require special attention; the generation
376 functions do the raw handling. If you add to this list, modify
377 special_rtx in gengenrtl.c as well. */
378
3b80f6ca 379rtx
502b8322 380gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
3b80f6ca 381{
c13e8210
MM
382 void **slot;
383
3b80f6ca 384 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
5da077de 385 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
3b80f6ca
RH
386
387#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
388 if (const_true_rtx && arg == STORE_FLAG_VALUE)
389 return const_true_rtx;
390#endif
391
c13e8210 392 /* Look up the CONST_INT in the hash table. */
e38992e8
RK
393 slot = htab_find_slot_with_hash (const_int_htab, &arg,
394 (hashval_t) arg, INSERT);
29105cea 395 if (*slot == 0)
1f8f4a0b 396 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
c13e8210
MM
397
398 return (rtx) *slot;
3b80f6ca
RH
399}
400
2496c7bd 401rtx
502b8322 402gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
2496c7bd
LB
403{
404 return GEN_INT (trunc_int_for_mode (c, mode));
405}
406
5692c7bc
ZW
407/* CONST_DOUBLEs might be created from pairs of integers, or from
408 REAL_VALUE_TYPEs. Also, their length is known only at run time,
409 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
410
411/* Determine whether REAL, a CONST_DOUBLE, already exists in the
412 hash table. If so, return its counterpart; otherwise add it
413 to the hash table and return it. */
414static rtx
502b8322 415lookup_const_double (rtx real)
5692c7bc
ZW
416{
417 void **slot = htab_find_slot (const_double_htab, real, INSERT);
418 if (*slot == 0)
419 *slot = real;
420
421 return (rtx) *slot;
422}
29105cea 423
5692c7bc
ZW
424/* Return a CONST_DOUBLE rtx for a floating-point value specified by
425 VALUE in mode MODE. */
0133b7d9 426rtx
502b8322 427const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
0133b7d9 428{
5692c7bc
ZW
429 rtx real = rtx_alloc (CONST_DOUBLE);
430 PUT_MODE (real, mode);
431
9e254451 432 real->u.rv = value;
5692c7bc
ZW
433
434 return lookup_const_double (real);
435}
436
437/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
438 of ints: I0 is the low-order word and I1 is the high-order word.
439 Do not use this routine for non-integer modes; convert to
440 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
441
442rtx
502b8322 443immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
5692c7bc
ZW
444{
445 rtx value;
446 unsigned int i;
447
65acccdd
ZD
448 /* There are the following cases (note that there are no modes with
449 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
450
451 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
452 gen_int_mode.
453 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
454 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
455 from copies of the sign bit, and sign of i0 and i1 are the same), then
456 we return a CONST_INT for i0.
457 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
5692c7bc
ZW
458 if (mode != VOIDmode)
459 {
5b0264cb
NS
460 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
461 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
462 /* We can get a 0 for an error mark. */
463 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
464 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
5692c7bc 465
65acccdd
ZD
466 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
467 return gen_int_mode (i0, mode);
468
469 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
5692c7bc
ZW
470 }
471
472 /* If this integer fits in one word, return a CONST_INT. */
473 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
474 return GEN_INT (i0);
475
476 /* We use VOIDmode for integers. */
477 value = rtx_alloc (CONST_DOUBLE);
478 PUT_MODE (value, VOIDmode);
479
480 CONST_DOUBLE_LOW (value) = i0;
481 CONST_DOUBLE_HIGH (value) = i1;
482
483 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
484 XWINT (value, i) = 0;
485
486 return lookup_const_double (value);
0133b7d9
RH
487}
488
3b80f6ca 489rtx
502b8322 490gen_rtx_REG (enum machine_mode mode, unsigned int regno)
3b80f6ca
RH
491{
492 /* In case the MD file explicitly references the frame pointer, have
493 all such references point to the same frame pointer. This is
494 used during frame pointer elimination to distinguish the explicit
495 references to these registers from pseudos that happened to be
496 assigned to them.
497
498 If we have eliminated the frame pointer or arg pointer, we will
499 be using it as a normal register, for example as a spill
500 register. In such cases, we might be accessing it in a mode that
501 is not Pmode and therefore cannot use the pre-allocated rtx.
502
503 Also don't do this when we are making new REGs in reload, since
504 we don't want to get confused with the real pointers. */
505
506 if (mode == Pmode && !reload_in_progress)
507 {
e10c79fe
LB
508 if (regno == FRAME_POINTER_REGNUM
509 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
510 return frame_pointer_rtx;
511#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
e10c79fe
LB
512 if (regno == HARD_FRAME_POINTER_REGNUM
513 && (!reload_completed || frame_pointer_needed))
3b80f6ca
RH
514 return hard_frame_pointer_rtx;
515#endif
516#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
bcb33994 517 if (regno == ARG_POINTER_REGNUM)
3b80f6ca
RH
518 return arg_pointer_rtx;
519#endif
520#ifdef RETURN_ADDRESS_POINTER_REGNUM
bcb33994 521 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
3b80f6ca
RH
522 return return_address_pointer_rtx;
523#endif
fc555370 524 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
2d67bd7b 525 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
68252e27 526 return pic_offset_table_rtx;
bcb33994 527 if (regno == STACK_POINTER_REGNUM)
3b80f6ca
RH
528 return stack_pointer_rtx;
529 }
530
006a94b0 531#if 0
6cde4876 532 /* If the per-function register table has been set up, try to re-use
006a94b0
JL
533 an existing entry in that table to avoid useless generation of RTL.
534
535 This code is disabled for now until we can fix the various backends
536 which depend on having non-shared hard registers in some cases. Long
537 term we want to re-enable this code as it can significantly cut down
e10c79fe
LB
538 on the amount of useless RTL that gets generated.
539
540 We'll also need to fix some code that runs after reload that wants to
541 set ORIGINAL_REGNO. */
542
6cde4876
JL
543 if (cfun
544 && cfun->emit
545 && regno_reg_rtx
546 && regno < FIRST_PSEUDO_REGISTER
547 && reg_raw_mode[regno] == mode)
548 return regno_reg_rtx[regno];
006a94b0 549#endif
6cde4876 550
08394eef 551 return gen_raw_REG (mode, regno);
3b80f6ca
RH
552}
553
41472af8 554rtx
502b8322 555gen_rtx_MEM (enum machine_mode mode, rtx addr)
41472af8
MM
556{
557 rtx rt = gen_rtx_raw_MEM (mode, addr);
558
559 /* This field is not cleared by the mere allocation of the rtx, so
560 we clear it here. */
173b24b9 561 MEM_ATTRS (rt) = 0;
41472af8
MM
562
563 return rt;
564}
ddef6bc7 565
542a8afa
RH
566/* Generate a memory referring to non-trapping constant memory. */
567
568rtx
569gen_const_mem (enum machine_mode mode, rtx addr)
570{
571 rtx mem = gen_rtx_MEM (mode, addr);
572 MEM_READONLY_P (mem) = 1;
573 MEM_NOTRAP_P (mem) = 1;
574 return mem;
575}
576
bf877a76
R
577/* Generate a MEM referring to fixed portions of the frame, e.g., register
578 save areas. */
579
580rtx
581gen_frame_mem (enum machine_mode mode, rtx addr)
582{
583 rtx mem = gen_rtx_MEM (mode, addr);
584 MEM_NOTRAP_P (mem) = 1;
585 set_mem_alias_set (mem, get_frame_alias_set ());
586 return mem;
587}
588
589/* Generate a MEM referring to a temporary use of the stack, not part
590 of the fixed stack frame. For example, something which is pushed
591 by a target splitter. */
592rtx
593gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
594{
595 rtx mem = gen_rtx_MEM (mode, addr);
596 MEM_NOTRAP_P (mem) = 1;
597 if (!current_function_calls_alloca)
598 set_mem_alias_set (mem, get_frame_alias_set ());
599 return mem;
600}
601
beb72684
RH
602/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
603 this construct would be valid, and false otherwise. */
604
605bool
606validate_subreg (enum machine_mode omode, enum machine_mode imode,
607 rtx reg, unsigned int offset)
ddef6bc7 608{
beb72684
RH
609 unsigned int isize = GET_MODE_SIZE (imode);
610 unsigned int osize = GET_MODE_SIZE (omode);
611
612 /* All subregs must be aligned. */
613 if (offset % osize != 0)
614 return false;
615
616 /* The subreg offset cannot be outside the inner object. */
617 if (offset >= isize)
618 return false;
619
620 /* ??? This should not be here. Temporarily continue to allow word_mode
621 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
622 Generally, backends are doing something sketchy but it'll take time to
623 fix them all. */
624 if (omode == word_mode)
625 ;
626 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
627 is the culprit here, and not the backends. */
628 else if (osize >= UNITS_PER_WORD && isize >= osize)
629 ;
630 /* Allow component subregs of complex and vector. Though given the below
631 extraction rules, it's not always clear what that means. */
632 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
633 && GET_MODE_INNER (imode) == omode)
634 ;
635 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
636 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
637 represent this. It's questionable if this ought to be represented at
638 all -- why can't this all be hidden in post-reload splitters that make
639 arbitrarily mode changes to the registers themselves. */
640 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
641 ;
642 /* Subregs involving floating point modes are not allowed to
643 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
644 (subreg:SI (reg:DF) 0) isn't. */
645 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
646 {
647 if (isize != osize)
648 return false;
649 }
ddef6bc7 650
beb72684
RH
651 /* Paradoxical subregs must have offset zero. */
652 if (osize > isize)
653 return offset == 0;
654
655 /* This is a normal subreg. Verify that the offset is representable. */
656
657 /* For hard registers, we already have most of these rules collected in
658 subreg_offset_representable_p. */
659 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
660 {
661 unsigned int regno = REGNO (reg);
662
663#ifdef CANNOT_CHANGE_MODE_CLASS
664 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
665 && GET_MODE_INNER (imode) == omode)
666 ;
667 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
668 return false;
ddef6bc7 669#endif
beb72684
RH
670
671 return subreg_offset_representable_p (regno, imode, offset, omode);
672 }
673
674 /* For pseudo registers, we want most of the same checks. Namely:
675 If the register no larger than a word, the subreg must be lowpart.
676 If the register is larger than a word, the subreg must be the lowpart
677 of a subword. A subreg does *not* perform arbitrary bit extraction.
678 Given that we've already checked mode/offset alignment, we only have
679 to check subword subregs here. */
680 if (osize < UNITS_PER_WORD)
681 {
682 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
683 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
684 if (offset % UNITS_PER_WORD != low_off)
685 return false;
686 }
687 return true;
688}
689
690rtx
691gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
692{
693 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
5692c7bc 694 return gen_rtx_raw_SUBREG (mode, reg, offset);
ddef6bc7
JJ
695}
696
173b24b9
RK
697/* Generate a SUBREG representing the least-significant part of REG if MODE
698 is smaller than mode of REG, otherwise paradoxical SUBREG. */
699
ddef6bc7 700rtx
502b8322 701gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
ddef6bc7
JJ
702{
703 enum machine_mode inmode;
ddef6bc7
JJ
704
705 inmode = GET_MODE (reg);
706 if (inmode == VOIDmode)
707 inmode = mode;
e0e08ac2
JH
708 return gen_rtx_SUBREG (mode, reg,
709 subreg_lowpart_offset (mode, inmode));
ddef6bc7 710}
c5c76735 711\f
23b2ce53
RS
712/* gen_rtvec (n, [rt1, ..., rtn])
713**
714** This routine creates an rtvec and stores within it the
715** pointers to rtx's which are its arguments.
716*/
717
718/*VARARGS1*/
719rtvec
e34d07f2 720gen_rtvec (int n, ...)
23b2ce53 721{
6268b922 722 int i, save_n;
23b2ce53 723 rtx *vector;
e34d07f2 724 va_list p;
23b2ce53 725
e34d07f2 726 va_start (p, n);
23b2ce53
RS
727
728 if (n == 0)
729 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
730
703ad42b 731 vector = alloca (n * sizeof (rtx));
4f90e4a0 732
23b2ce53
RS
733 for (i = 0; i < n; i++)
734 vector[i] = va_arg (p, rtx);
6268b922
KG
735
736 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
737 save_n = n;
e34d07f2 738 va_end (p);
23b2ce53 739
6268b922 740 return gen_rtvec_v (save_n, vector);
23b2ce53
RS
741}
742
743rtvec
502b8322 744gen_rtvec_v (int n, rtx *argp)
23b2ce53 745{
b3694847
SS
746 int i;
747 rtvec rt_val;
23b2ce53
RS
748
749 if (n == 0)
750 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
751
752 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
753
754 for (i = 0; i < n; i++)
8f985ec4 755 rt_val->elem[i] = *argp++;
23b2ce53
RS
756
757 return rt_val;
758}
759\f
760/* Generate a REG rtx for a new pseudo register of mode MODE.
761 This pseudo is assigned the next sequential register number. */
762
763rtx
502b8322 764gen_reg_rtx (enum machine_mode mode)
23b2ce53 765{
01d939e8 766 struct function *f = cfun;
b3694847 767 rtx val;
23b2ce53 768
f1db3576
JL
769 /* Don't let anything called after initial flow analysis create new
770 registers. */
5b0264cb 771 gcc_assert (!no_new_pseudos);
23b2ce53 772
1b3d8f8a
GK
773 if (generating_concat_p
774 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
775 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
fc84e8a8
RS
776 {
777 /* For complex modes, don't make a single pseudo.
778 Instead, make a CONCAT of two pseudos.
779 This allows noncontiguous allocation of the real and imaginary parts,
780 which makes much better code. Besides, allocating DCmode
781 pseudos overstrains reload on some machines like the 386. */
782 rtx realpart, imagpart;
27e58a70 783 enum machine_mode partmode = GET_MODE_INNER (mode);
fc84e8a8
RS
784
785 realpart = gen_reg_rtx (partmode);
786 imagpart = gen_reg_rtx (partmode);
3b80f6ca 787 return gen_rtx_CONCAT (mode, realpart, imagpart);
fc84e8a8
RS
788 }
789
a560d4d4 790 /* Make sure regno_pointer_align, and regno_reg_rtx are large
0d4903b8 791 enough to have an element for this pseudo reg number. */
23b2ce53 792
3502dc9c 793 if (reg_rtx_no == f->emit->regno_pointer_align_length)
23b2ce53 794 {
3502dc9c 795 int old_size = f->emit->regno_pointer_align_length;
e2ecd91c 796 char *new;
0d4903b8 797 rtx *new1;
0d4903b8 798
e2500fed 799 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
49ad7cfa 800 memset (new + old_size, 0, old_size);
f9e158c3 801 f->emit->regno_pointer_align = (unsigned char *) new;
49ad7cfa 802
703ad42b
KG
803 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
804 old_size * 2 * sizeof (rtx));
49ad7cfa 805 memset (new1 + old_size, 0, old_size * sizeof (rtx));
23b2ce53
RS
806 regno_reg_rtx = new1;
807
3502dc9c 808 f->emit->regno_pointer_align_length = old_size * 2;
23b2ce53
RS
809 }
810
08394eef 811 val = gen_raw_REG (mode, reg_rtx_no);
23b2ce53
RS
812 regno_reg_rtx[reg_rtx_no++] = val;
813 return val;
814}
815
e314a036
JZ
816/* Generate a register with same attributes as REG, but offsetted by OFFSET.
817 Do the big endian correction if needed. */
a560d4d4
JH
818
819rtx
502b8322 820gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
a560d4d4
JH
821{
822 rtx new = gen_rtx_REG (mode, regno);
e314a036
JZ
823 tree decl;
824 HOST_WIDE_INT var_size;
825
826 /* PR middle-end/14084
827 The problem appears when a variable is stored in a larger register
828 and later it is used in the original mode or some mode in between
829 or some part of variable is accessed.
830
831 On little endian machines there is no problem because
832 the REG_OFFSET of the start of the variable is the same when
833 accessed in any mode (it is 0).
834
835 However, this is not true on big endian machines.
836 The offset of the start of the variable is different when accessed
837 in different modes.
838 When we are taking a part of the REG we have to change the OFFSET
839 from offset WRT size of mode of REG to offset WRT size of variable.
840
841 If we would not do the big endian correction the resulting REG_OFFSET
842 would be larger than the size of the DECL.
843
844 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
845
846 REG.mode MODE DECL size old offset new offset description
847 DI SI 4 4 0 int32 in SImode
848 DI SI 1 4 0 char in SImode
849 DI QI 1 7 0 char in QImode
850 DI QI 4 5 1 1st element in QImode
851 of char[4]
852 DI HI 4 6 2 1st element in HImode
853 of int16[2]
854
855 If the size of DECL is equal or greater than the size of REG
856 we can't do this correction because the register holds the
857 whole variable or a part of the variable and thus the REG_OFFSET
858 is already correct. */
859
860 decl = REG_EXPR (reg);
861 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
862 && decl != NULL
863 && offset > 0
864 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
865 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
866 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
867 {
868 int offset_le;
869
870 /* Convert machine endian to little endian WRT size of mode of REG. */
871 if (WORDS_BIG_ENDIAN)
872 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
873 / UNITS_PER_WORD) * UNITS_PER_WORD;
874 else
875 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
876
877 if (BYTES_BIG_ENDIAN)
878 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
879 % UNITS_PER_WORD);
880 else
881 offset_le += offset % UNITS_PER_WORD;
882
883 if (offset_le >= var_size)
884 {
885 /* MODE is wider than the variable so the new reg will cover
886 the whole variable so the resulting OFFSET should be 0. */
887 offset = 0;
888 }
889 else
890 {
891 /* Convert little endian to machine endian WRT size of variable. */
892 if (WORDS_BIG_ENDIAN)
893 offset = ((var_size - 1 - offset_le)
894 / UNITS_PER_WORD) * UNITS_PER_WORD;
895 else
896 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
897
898 if (BYTES_BIG_ENDIAN)
899 offset += ((var_size - 1 - offset_le)
900 % UNITS_PER_WORD);
901 else
902 offset += offset_le % UNITS_PER_WORD;
903 }
904 }
905
a560d4d4 906 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
502b8322 907 REG_OFFSET (reg) + offset);
a560d4d4
JH
908 return new;
909}
910
911/* Set the decl for MEM to DECL. */
912
913void
502b8322 914set_reg_attrs_from_mem (rtx reg, rtx mem)
a560d4d4
JH
915{
916 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
917 REG_ATTRS (reg)
918 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
919}
920
9d18e06b
JZ
921/* Set the register attributes for registers contained in PARM_RTX.
922 Use needed values from memory attributes of MEM. */
923
924void
502b8322 925set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
9d18e06b 926{
f8cfc6aa 927 if (REG_P (parm_rtx))
9d18e06b
JZ
928 set_reg_attrs_from_mem (parm_rtx, mem);
929 else if (GET_CODE (parm_rtx) == PARALLEL)
930 {
931 /* Check for a NULL entry in the first slot, used to indicate that the
932 parameter goes both on the stack and in registers. */
933 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
934 for (; i < XVECLEN (parm_rtx, 0); i++)
935 {
936 rtx x = XVECEXP (parm_rtx, 0, i);
f8cfc6aa 937 if (REG_P (XEXP (x, 0)))
9d18e06b
JZ
938 REG_ATTRS (XEXP (x, 0))
939 = get_reg_attrs (MEM_EXPR (mem),
940 INTVAL (XEXP (x, 1)));
941 }
942 }
943}
944
a560d4d4
JH
945/* Assign the RTX X to declaration T. */
946void
502b8322 947set_decl_rtl (tree t, rtx x)
a560d4d4 948{
820cc88f 949 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
a560d4d4 950
fbe6ec81
JZ
951 if (!x)
952 return;
953 /* For register, we maintain the reverse information too. */
f8cfc6aa 954 if (REG_P (x))
fbe6ec81
JZ
955 REG_ATTRS (x) = get_reg_attrs (t, 0);
956 else if (GET_CODE (x) == SUBREG)
957 REG_ATTRS (SUBREG_REG (x))
958 = get_reg_attrs (t, -SUBREG_BYTE (x));
959 if (GET_CODE (x) == CONCAT)
960 {
961 if (REG_P (XEXP (x, 0)))
962 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
963 if (REG_P (XEXP (x, 1)))
964 REG_ATTRS (XEXP (x, 1))
965 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
966 }
967 if (GET_CODE (x) == PARALLEL)
968 {
969 int i;
970 for (i = 0; i < XVECLEN (x, 0); i++)
971 {
972 rtx y = XVECEXP (x, 0, i);
973 if (REG_P (XEXP (y, 0)))
974 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
975 }
976 }
977}
978
979/* Assign the RTX X to parameter declaration T. */
980void
981set_decl_incoming_rtl (tree t, rtx x)
982{
983 DECL_INCOMING_RTL (t) = x;
984
a560d4d4
JH
985 if (!x)
986 return;
4d6922ee 987 /* For register, we maintain the reverse information too. */
f8cfc6aa 988 if (REG_P (x))
a560d4d4
JH
989 REG_ATTRS (x) = get_reg_attrs (t, 0);
990 else if (GET_CODE (x) == SUBREG)
991 REG_ATTRS (SUBREG_REG (x))
992 = get_reg_attrs (t, -SUBREG_BYTE (x));
993 if (GET_CODE (x) == CONCAT)
994 {
995 if (REG_P (XEXP (x, 0)))
996 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
997 if (REG_P (XEXP (x, 1)))
998 REG_ATTRS (XEXP (x, 1))
999 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1000 }
1001 if (GET_CODE (x) == PARALLEL)
1002 {
d4afac5b
JZ
1003 int i, start;
1004
1005 /* Check for a NULL entry, used to indicate that the parameter goes
1006 both on the stack and in registers. */
1007 if (XEXP (XVECEXP (x, 0, 0), 0))
1008 start = 0;
1009 else
1010 start = 1;
1011
1012 for (i = start; i < XVECLEN (x, 0); i++)
a560d4d4
JH
1013 {
1014 rtx y = XVECEXP (x, 0, i);
1015 if (REG_P (XEXP (y, 0)))
1016 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1017 }
1018 }
1019}
1020
754fdcca
RK
1021/* Identify REG (which may be a CONCAT) as a user register. */
1022
1023void
502b8322 1024mark_user_reg (rtx reg)
754fdcca
RK
1025{
1026 if (GET_CODE (reg) == CONCAT)
1027 {
1028 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1029 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1030 }
754fdcca 1031 else
5b0264cb
NS
1032 {
1033 gcc_assert (REG_P (reg));
1034 REG_USERVAR_P (reg) = 1;
1035 }
754fdcca
RK
1036}
1037
86fe05e0
RK
1038/* Identify REG as a probable pointer register and show its alignment
1039 as ALIGN, if nonzero. */
23b2ce53
RS
1040
1041void
502b8322 1042mark_reg_pointer (rtx reg, int align)
23b2ce53 1043{
3502dc9c 1044 if (! REG_POINTER (reg))
00995e78 1045 {
3502dc9c 1046 REG_POINTER (reg) = 1;
86fe05e0 1047
00995e78
RE
1048 if (align)
1049 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1050 }
1051 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
6614fd40 1052 /* We can no-longer be sure just how aligned this pointer is. */
86fe05e0 1053 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
23b2ce53
RS
1054}
1055
1056/* Return 1 plus largest pseudo reg number used in the current function. */
1057
1058int
502b8322 1059max_reg_num (void)
23b2ce53
RS
1060{
1061 return reg_rtx_no;
1062}
1063
1064/* Return 1 + the largest label number used so far in the current function. */
1065
1066int
502b8322 1067max_label_num (void)
23b2ce53 1068{
23b2ce53
RS
1069 return label_num;
1070}
1071
1072/* Return first label number used in this function (if any were used). */
1073
1074int
502b8322 1075get_first_label_num (void)
23b2ce53
RS
1076{
1077 return first_label_num;
1078}
6de9cd9a
DN
1079
1080/* If the rtx for label was created during the expansion of a nested
1081 function, then first_label_num won't include this label number.
1082 Fix this now so that array indicies work later. */
1083
1084void
1085maybe_set_first_label_num (rtx x)
1086{
1087 if (CODE_LABEL_NUMBER (x) < first_label_num)
1088 first_label_num = CODE_LABEL_NUMBER (x);
1089}
23b2ce53
RS
1090\f
1091/* Return a value representing some low-order bits of X, where the number
1092 of low-order bits is given by MODE. Note that no conversion is done
750c9258 1093 between floating-point and fixed-point values, rather, the bit
23b2ce53
RS
1094 representation is returned.
1095
1096 This function handles the cases in common between gen_lowpart, below,
1097 and two variants in cse.c and combine.c. These are the cases that can
1098 be safely handled at all points in the compilation.
1099
1100 If this is not a case we can handle, return 0. */
1101
1102rtx
502b8322 1103gen_lowpart_common (enum machine_mode mode, rtx x)
23b2ce53 1104{
ddef6bc7 1105 int msize = GET_MODE_SIZE (mode);
550d1387 1106 int xsize;
ddef6bc7 1107 int offset = 0;
550d1387
GK
1108 enum machine_mode innermode;
1109
1110 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1111 so we have to make one up. Yuk. */
1112 innermode = GET_MODE (x);
db487452
R
1113 if (GET_CODE (x) == CONST_INT
1114 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
550d1387
GK
1115 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1116 else if (innermode == VOIDmode)
1117 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1118
1119 xsize = GET_MODE_SIZE (innermode);
1120
5b0264cb 1121 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
23b2ce53 1122
550d1387 1123 if (innermode == mode)
23b2ce53
RS
1124 return x;
1125
1126 /* MODE must occupy no more words than the mode of X. */
550d1387
GK
1127 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1128 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
23b2ce53
RS
1129 return 0;
1130
53501a19 1131 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
3d8bf70f 1132 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
53501a19
BS
1133 return 0;
1134
550d1387 1135 offset = subreg_lowpart_offset (mode, innermode);
23b2ce53
RS
1136
1137 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
83e9c679
RK
1138 && (GET_MODE_CLASS (mode) == MODE_INT
1139 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
23b2ce53
RS
1140 {
1141 /* If we are getting the low-order part of something that has been
1142 sign- or zero-extended, we can either just use the object being
1143 extended or make a narrower extension. If we want an even smaller
1144 piece than the size of the object being extended, call ourselves
1145 recursively.
1146
1147 This case is used mostly by combine and cse. */
1148
1149 if (GET_MODE (XEXP (x, 0)) == mode)
1150 return XEXP (x, 0);
550d1387 1151 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
23b2ce53 1152 return gen_lowpart_common (mode, XEXP (x, 0));
550d1387 1153 else if (msize < xsize)
3b80f6ca 1154 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
23b2ce53 1155 }
f8cfc6aa 1156 else if (GET_CODE (x) == SUBREG || REG_P (x)
550d1387
GK
1157 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1158 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1159 return simplify_gen_subreg (mode, x, innermode, offset);
8aada4ad 1160
23b2ce53
RS
1161 /* Otherwise, we can't do this. */
1162 return 0;
1163}
1164\f
ccba022b 1165rtx
502b8322 1166gen_highpart (enum machine_mode mode, rtx x)
ccba022b 1167{
ddef6bc7 1168 unsigned int msize = GET_MODE_SIZE (mode);
e0e08ac2 1169 rtx result;
ddef6bc7 1170
ccba022b
RS
1171 /* This case loses if X is a subreg. To catch bugs early,
1172 complain if an invalid MODE is used even in other cases. */
5b0264cb
NS
1173 gcc_assert (msize <= UNITS_PER_WORD
1174 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
ddef6bc7 1175
e0e08ac2
JH
1176 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1177 subreg_highpart_offset (mode, GET_MODE (x)));
5b0264cb
NS
1178 gcc_assert (result);
1179
09482e0d
JW
1180 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1181 the target if we have a MEM. gen_highpart must return a valid operand,
1182 emitting code if necessary to do so. */
5b0264cb
NS
1183 if (MEM_P (result))
1184 {
1185 result = validize_mem (result);
1186 gcc_assert (result);
1187 }
1188
e0e08ac2
JH
1189 return result;
1190}
5222e470 1191
26d249eb 1192/* Like gen_highpart, but accept mode of EXP operand in case EXP can
5222e470
JH
1193 be VOIDmode constant. */
1194rtx
502b8322 1195gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
5222e470
JH
1196{
1197 if (GET_MODE (exp) != VOIDmode)
1198 {
5b0264cb 1199 gcc_assert (GET_MODE (exp) == innermode);
5222e470
JH
1200 return gen_highpart (outermode, exp);
1201 }
1202 return simplify_gen_subreg (outermode, exp, innermode,
1203 subreg_highpart_offset (outermode, innermode));
1204}
68252e27 1205
e0e08ac2
JH
1206/* Return offset in bytes to get OUTERMODE low part
1207 of the value in mode INNERMODE stored in memory in target format. */
8698cce3 1208
e0e08ac2 1209unsigned int
502b8322 1210subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
e0e08ac2
JH
1211{
1212 unsigned int offset = 0;
1213 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
8698cce3 1214
e0e08ac2 1215 if (difference > 0)
ccba022b 1216 {
e0e08ac2
JH
1217 if (WORDS_BIG_ENDIAN)
1218 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1219 if (BYTES_BIG_ENDIAN)
1220 offset += difference % UNITS_PER_WORD;
ccba022b 1221 }
ddef6bc7 1222
e0e08ac2 1223 return offset;
ccba022b 1224}
eea50aa0 1225
e0e08ac2
JH
1226/* Return offset in bytes to get OUTERMODE high part
1227 of the value in mode INNERMODE stored in memory in target format. */
1228unsigned int
502b8322 1229subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
eea50aa0
JH
1230{
1231 unsigned int offset = 0;
1232 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1233
5b0264cb 1234 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
e0e08ac2 1235
eea50aa0
JH
1236 if (difference > 0)
1237 {
e0e08ac2 1238 if (! WORDS_BIG_ENDIAN)
eea50aa0 1239 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
e0e08ac2 1240 if (! BYTES_BIG_ENDIAN)
eea50aa0
JH
1241 offset += difference % UNITS_PER_WORD;
1242 }
1243
e0e08ac2 1244 return offset;
eea50aa0 1245}
ccba022b 1246
23b2ce53
RS
1247/* Return 1 iff X, assumed to be a SUBREG,
1248 refers to the least significant part of its containing reg.
1249 If X is not a SUBREG, always return 1 (it is its own low part!). */
1250
1251int
502b8322 1252subreg_lowpart_p (rtx x)
23b2ce53
RS
1253{
1254 if (GET_CODE (x) != SUBREG)
1255 return 1;
a3a03040
RK
1256 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1257 return 0;
23b2ce53 1258
e0e08ac2
JH
1259 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1260 == SUBREG_BYTE (x));
23b2ce53
RS
1261}
1262\f
ddef6bc7
JJ
1263/* Return subword OFFSET of operand OP.
1264 The word number, OFFSET, is interpreted as the word number starting
1265 at the low-order address. OFFSET 0 is the low-order word if not
1266 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1267
1268 If we cannot extract the required word, we return zero. Otherwise,
1269 an rtx corresponding to the requested word will be returned.
1270
1271 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1272 reload has completed, a valid address will always be returned. After
1273 reload, if a valid address cannot be returned, we return zero.
1274
1275 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1276 it is the responsibility of the caller.
1277
1278 MODE is the mode of OP in case it is a CONST_INT.
1279
1280 ??? This is still rather broken for some cases. The problem for the
1281 moment is that all callers of this thing provide no 'goal mode' to
1282 tell us to work with. This exists because all callers were written
0631e0bf
JH
1283 in a word based SUBREG world.
1284 Now use of this function can be deprecated by simplify_subreg in most
1285 cases.
1286 */
ddef6bc7
JJ
1287
1288rtx
502b8322 1289operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
ddef6bc7
JJ
1290{
1291 if (mode == VOIDmode)
1292 mode = GET_MODE (op);
1293
5b0264cb 1294 gcc_assert (mode != VOIDmode);
ddef6bc7 1295
30f7a378 1296 /* If OP is narrower than a word, fail. */
ddef6bc7
JJ
1297 if (mode != BLKmode
1298 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1299 return 0;
1300
30f7a378 1301 /* If we want a word outside OP, return zero. */
ddef6bc7
JJ
1302 if (mode != BLKmode
1303 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1304 return const0_rtx;
1305
ddef6bc7 1306 /* Form a new MEM at the requested address. */
3c0cb5de 1307 if (MEM_P (op))
ddef6bc7 1308 {
f1ec5147 1309 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
ddef6bc7 1310
f1ec5147
RK
1311 if (! validate_address)
1312 return new;
1313
1314 else if (reload_completed)
ddef6bc7 1315 {
f1ec5147
RK
1316 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1317 return 0;
ddef6bc7 1318 }
f1ec5147
RK
1319 else
1320 return replace_equiv_address (new, XEXP (new, 0));
ddef6bc7
JJ
1321 }
1322
0631e0bf
JH
1323 /* Rest can be handled by simplify_subreg. */
1324 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
ddef6bc7
JJ
1325}
1326
535a42b1
NS
1327/* Similar to `operand_subword', but never return 0. If we can't
1328 extract the required subword, put OP into a register and try again.
1329 The second attempt must succeed. We always validate the address in
1330 this case.
23b2ce53
RS
1331
1332 MODE is the mode of OP, in case it is CONST_INT. */
1333
1334rtx
502b8322 1335operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
23b2ce53 1336{
ddef6bc7 1337 rtx result = operand_subword (op, offset, 1, mode);
23b2ce53
RS
1338
1339 if (result)
1340 return result;
1341
1342 if (mode != BLKmode && mode != VOIDmode)
77e6b0eb
JC
1343 {
1344 /* If this is a register which can not be accessed by words, copy it
1345 to a pseudo register. */
f8cfc6aa 1346 if (REG_P (op))
77e6b0eb
JC
1347 op = copy_to_reg (op);
1348 else
1349 op = force_reg (mode, op);
1350 }
23b2ce53 1351
ddef6bc7 1352 result = operand_subword (op, offset, 1, mode);
5b0264cb 1353 gcc_assert (result);
23b2ce53
RS
1354
1355 return result;
1356}
1357\f
998d7deb
RH
1358/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1359 or (2) a component ref of something variable. Represent the later with
1360 a NULL expression. */
1361
1362static tree
502b8322 1363component_ref_for_mem_expr (tree ref)
998d7deb
RH
1364{
1365 tree inner = TREE_OPERAND (ref, 0);
1366
1367 if (TREE_CODE (inner) == COMPONENT_REF)
1368 inner = component_ref_for_mem_expr (inner);
c56e3582
RK
1369 else
1370 {
c56e3582 1371 /* Now remove any conversions: they don't change what the underlying
6fce44af 1372 object is. Likewise for SAVE_EXPR. */
c56e3582
RK
1373 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1374 || TREE_CODE (inner) == NON_LVALUE_EXPR
1375 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
6fce44af
RK
1376 || TREE_CODE (inner) == SAVE_EXPR)
1377 inner = TREE_OPERAND (inner, 0);
c56e3582
RK
1378
1379 if (! DECL_P (inner))
1380 inner = NULL_TREE;
1381 }
998d7deb
RH
1382
1383 if (inner == TREE_OPERAND (ref, 0))
1384 return ref;
1385 else
3244e67d
RS
1386 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1387 TREE_OPERAND (ref, 1), NULL_TREE);
998d7deb 1388}
173b24b9 1389
2b3493c8
AK
1390/* Returns 1 if both MEM_EXPR can be considered equal
1391 and 0 otherwise. */
1392
1393int
1394mem_expr_equal_p (tree expr1, tree expr2)
1395{
1396 if (expr1 == expr2)
1397 return 1;
1398
1399 if (! expr1 || ! expr2)
1400 return 0;
1401
1402 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1403 return 0;
1404
1405 if (TREE_CODE (expr1) == COMPONENT_REF)
1406 return
1407 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1408 TREE_OPERAND (expr2, 0))
1409 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1410 TREE_OPERAND (expr2, 1));
1411
1b096a0a 1412 if (INDIRECT_REF_P (expr1))
2b3493c8
AK
1413 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1414 TREE_OPERAND (expr2, 0));
2b3493c8 1415
5b0264cb 1416 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
2b3493c8 1417 have been resolved here. */
5b0264cb
NS
1418 gcc_assert (DECL_P (expr1));
1419
1420 /* Decls with different pointers can't be equal. */
1421 return 0;
2b3493c8
AK
1422}
1423
173b24b9
RK
1424/* Given REF, a MEM, and T, either the type of X or the expression
1425 corresponding to REF, set the memory attributes. OBJECTP is nonzero
6f1087be
RH
1426 if we are making a new object of this type. BITPOS is nonzero if
1427 there is an offset outstanding on T that will be applied later. */
173b24b9
RK
1428
1429void
502b8322
AJ
1430set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1431 HOST_WIDE_INT bitpos)
173b24b9 1432{
8ac61af7 1433 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
998d7deb 1434 tree expr = MEM_EXPR (ref);
8ac61af7
RK
1435 rtx offset = MEM_OFFSET (ref);
1436 rtx size = MEM_SIZE (ref);
1437 unsigned int align = MEM_ALIGN (ref);
6f1087be 1438 HOST_WIDE_INT apply_bitpos = 0;
173b24b9
RK
1439 tree type;
1440
1441 /* It can happen that type_for_mode was given a mode for which there
1442 is no language-level type. In which case it returns NULL, which
1443 we can see here. */
1444 if (t == NULL_TREE)
1445 return;
1446
1447 type = TYPE_P (t) ? t : TREE_TYPE (t);
eeb23c11
MM
1448 if (type == error_mark_node)
1449 return;
173b24b9 1450
173b24b9
RK
1451 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1452 wrong answer, as it assumes that DECL_RTL already has the right alias
1453 info. Callers should not set DECL_RTL until after the call to
1454 set_mem_attributes. */
5b0264cb 1455 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
173b24b9 1456
738cc472 1457 /* Get the alias set from the expression or type (perhaps using a
8ac61af7
RK
1458 front-end routine) and use it. */
1459 alias = get_alias_set (t);
173b24b9 1460
a5e9c810 1461 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
173b24b9 1462 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
f8ad8d7c 1463 MEM_POINTER (ref) = POINTER_TYPE_P (type);
173b24b9 1464
8ac61af7
RK
1465 /* If we are making an object of this type, or if this is a DECL, we know
1466 that it is a scalar if the type is not an aggregate. */
1467 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
173b24b9
RK
1468 MEM_SCALAR_P (ref) = 1;
1469
c3d32120
RK
1470 /* We can set the alignment from the type if we are making an object,
1471 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
7ccf35ed
DN
1472 if (objectp || TREE_CODE (t) == INDIRECT_REF
1473 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1474 || TYPE_ALIGN_OK (type))
c3d32120 1475 align = MAX (align, TYPE_ALIGN (type));
7ccf35ed
DN
1476 else
1477 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1478 {
1479 if (integer_zerop (TREE_OPERAND (t, 1)))
1480 /* We don't know anything about the alignment. */
1481 align = BITS_PER_UNIT;
1482 else
1483 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1484 }
40c0668b 1485
738cc472
RK
1486 /* If the size is known, we can set that. */
1487 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
8ac61af7 1488 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
738cc472 1489
80965c18
RK
1490 /* If T is not a type, we may be able to deduce some more information about
1491 the expression. */
1492 if (! TYPE_P (t))
8ac61af7 1493 {
8476af98 1494 tree base;
389fdba0 1495
8ac61af7
RK
1496 if (TREE_THIS_VOLATILE (t))
1497 MEM_VOLATILE_P (ref) = 1;
173b24b9 1498
c56e3582
RK
1499 /* Now remove any conversions: they don't change what the underlying
1500 object is. Likewise for SAVE_EXPR. */
8ac61af7 1501 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
c56e3582
RK
1502 || TREE_CODE (t) == NON_LVALUE_EXPR
1503 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1504 || TREE_CODE (t) == SAVE_EXPR)
8ac61af7
RK
1505 t = TREE_OPERAND (t, 0);
1506
8476af98
RH
1507 /* We may look through structure-like accesses for the purposes of
1508 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1509 base = t;
1510 while (TREE_CODE (base) == COMPONENT_REF
1511 || TREE_CODE (base) == REALPART_EXPR
1512 || TREE_CODE (base) == IMAGPART_EXPR
1513 || TREE_CODE (base) == BIT_FIELD_REF)
1514 base = TREE_OPERAND (base, 0);
1515
1516 if (DECL_P (base))
1517 {
1518 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1519 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1520 else
1521 MEM_NOTRAP_P (ref) = 1;
1522 }
1523 else
1524 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1525
1526 base = get_base_address (base);
1527 if (base && DECL_P (base)
1528 && TREE_READONLY (base)
1529 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1530 {
1531 tree base_type = TREE_TYPE (base);
1532 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1533 || DECL_ARTIFICIAL (base));
1534 MEM_READONLY_P (ref) = 1;
1535 }
1536
2039d7aa
RH
1537 /* If this expression uses it's parent's alias set, mark it such
1538 that we won't change it. */
1539 if (component_uses_parent_alias_set (t))
10b76d73
RK
1540 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1541
8ac61af7
RK
1542 /* If this is a decl, set the attributes of the MEM from it. */
1543 if (DECL_P (t))
1544 {
998d7deb
RH
1545 expr = t;
1546 offset = const0_rtx;
6f1087be 1547 apply_bitpos = bitpos;
8ac61af7
RK
1548 size = (DECL_SIZE_UNIT (t)
1549 && host_integerp (DECL_SIZE_UNIT (t), 1)
1550 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
68252e27 1551 align = DECL_ALIGN (t);
8ac61af7
RK
1552 }
1553
40c0668b 1554 /* If this is a constant, we know the alignment. */
6615c446 1555 else if (CONSTANT_CLASS_P (t))
9ddfb1a7
RK
1556 {
1557 align = TYPE_ALIGN (type);
1558#ifdef CONSTANT_ALIGNMENT
1559 align = CONSTANT_ALIGNMENT (t, align);
1560#endif
1561 }
998d7deb
RH
1562
1563 /* If this is a field reference and not a bit-field, record it. */
1564 /* ??? There is some information that can be gleened from bit-fields,
1565 such as the word offset in the structure that might be modified.
1566 But skip it for now. */
1567 else if (TREE_CODE (t) == COMPONENT_REF
1568 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1569 {
1570 expr = component_ref_for_mem_expr (t);
1571 offset = const0_rtx;
6f1087be 1572 apply_bitpos = bitpos;
998d7deb
RH
1573 /* ??? Any reason the field size would be different than
1574 the size we got from the type? */
1575 }
1576
1577 /* If this is an array reference, look for an outer field reference. */
1578 else if (TREE_CODE (t) == ARRAY_REF)
1579 {
1580 tree off_tree = size_zero_node;
1b1838b6
JW
1581 /* We can't modify t, because we use it at the end of the
1582 function. */
1583 tree t2 = t;
998d7deb
RH
1584
1585 do
1586 {
1b1838b6 1587 tree index = TREE_OPERAND (t2, 1);
44de5aeb
RK
1588 tree low_bound = array_ref_low_bound (t2);
1589 tree unit_size = array_ref_element_size (t2);
2567406a
JH
1590
1591 /* We assume all arrays have sizes that are a multiple of a byte.
1592 First subtract the lower bound, if any, in the type of the
44de5aeb
RK
1593 index, then convert to sizetype and multiply by the size of
1594 the array element. */
1595 if (! integer_zerop (low_bound))
4845b383
KH
1596 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1597 index, low_bound);
2567406a 1598
44de5aeb 1599 off_tree = size_binop (PLUS_EXPR,
b6f65e3c
RS
1600 size_binop (MULT_EXPR,
1601 fold_convert (sizetype,
1602 index),
44de5aeb
RK
1603 unit_size),
1604 off_tree);
1b1838b6 1605 t2 = TREE_OPERAND (t2, 0);
998d7deb 1606 }
1b1838b6 1607 while (TREE_CODE (t2) == ARRAY_REF);
998d7deb 1608
1b1838b6 1609 if (DECL_P (t2))
c67a1cf6 1610 {
1b1838b6 1611 expr = t2;
40cb04f1 1612 offset = NULL;
c67a1cf6 1613 if (host_integerp (off_tree, 1))
40cb04f1
RH
1614 {
1615 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1616 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1b1838b6 1617 align = DECL_ALIGN (t2);
fc555370 1618 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
40cb04f1
RH
1619 align = aoff;
1620 offset = GEN_INT (ioff);
6f1087be 1621 apply_bitpos = bitpos;
40cb04f1 1622 }
c67a1cf6 1623 }
1b1838b6 1624 else if (TREE_CODE (t2) == COMPONENT_REF)
998d7deb 1625 {
1b1838b6 1626 expr = component_ref_for_mem_expr (t2);
998d7deb 1627 if (host_integerp (off_tree, 1))
6f1087be
RH
1628 {
1629 offset = GEN_INT (tree_low_cst (off_tree, 1));
1630 apply_bitpos = bitpos;
1631 }
998d7deb
RH
1632 /* ??? Any reason the field size would be different than
1633 the size we got from the type? */
1634 }
c67a1cf6 1635 else if (flag_argument_noalias > 1
1b096a0a 1636 && (INDIRECT_REF_P (t2))
1b1838b6 1637 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
c67a1cf6 1638 {
1b1838b6 1639 expr = t2;
c67a1cf6
RH
1640 offset = NULL;
1641 }
1642 }
1643
1644 /* If this is a Fortran indirect argument reference, record the
1645 parameter decl. */
1646 else if (flag_argument_noalias > 1
1b096a0a 1647 && (INDIRECT_REF_P (t))
c67a1cf6
RH
1648 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1649 {
1650 expr = t;
1651 offset = NULL;
998d7deb 1652 }
8ac61af7
RK
1653 }
1654
15c812e3 1655 /* If we modified OFFSET based on T, then subtract the outstanding
8c317c5f
RH
1656 bit position offset. Similarly, increase the size of the accessed
1657 object to contain the negative offset. */
6f1087be 1658 if (apply_bitpos)
8c317c5f
RH
1659 {
1660 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1661 if (size)
1662 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1663 }
6f1087be 1664
7ccf35ed
DN
1665 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1666 {
1667 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1668 we're overlapping. */
1669 offset = NULL;
1670 expr = NULL;
1671 }
1672
8ac61af7 1673 /* Now set the attributes we computed above. */
10b76d73 1674 MEM_ATTRS (ref)
998d7deb 1675 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
8ac61af7
RK
1676
1677 /* If this is already known to be a scalar or aggregate, we are done. */
1678 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
738cc472
RK
1679 return;
1680
8ac61af7
RK
1681 /* If it is a reference into an aggregate, this is part of an aggregate.
1682 Otherwise we don't know. */
173b24b9
RK
1683 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1684 || TREE_CODE (t) == ARRAY_RANGE_REF
1685 || TREE_CODE (t) == BIT_FIELD_REF)
1686 MEM_IN_STRUCT_P (ref) = 1;
1687}
1688
6f1087be 1689void
502b8322 1690set_mem_attributes (rtx ref, tree t, int objectp)
6f1087be
RH
1691{
1692 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1693}
1694
a560d4d4
JH
1695/* Set the decl for MEM to DECL. */
1696
1697void
502b8322 1698set_mem_attrs_from_reg (rtx mem, rtx reg)
a560d4d4
JH
1699{
1700 MEM_ATTRS (mem)
1701 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1702 GEN_INT (REG_OFFSET (reg)),
1703 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1704}
1705
173b24b9
RK
1706/* Set the alias set of MEM to SET. */
1707
1708void
502b8322 1709set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
173b24b9 1710{
68252e27 1711#ifdef ENABLE_CHECKING
173b24b9 1712 /* If the new and old alias sets don't conflict, something is wrong. */
5b0264cb 1713 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
173b24b9
RK
1714#endif
1715
998d7deb 1716 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
10b76d73
RK
1717 MEM_SIZE (mem), MEM_ALIGN (mem),
1718 GET_MODE (mem));
173b24b9 1719}
738cc472 1720
d022d93e 1721/* Set the alignment of MEM to ALIGN bits. */
738cc472
RK
1722
1723void
502b8322 1724set_mem_align (rtx mem, unsigned int align)
738cc472 1725{
998d7deb 1726 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
10b76d73
RK
1727 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1728 GET_MODE (mem));
738cc472 1729}
1285011e 1730
998d7deb 1731/* Set the expr for MEM to EXPR. */
1285011e
RK
1732
1733void
502b8322 1734set_mem_expr (rtx mem, tree expr)
1285011e
RK
1735{
1736 MEM_ATTRS (mem)
998d7deb 1737 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1285011e
RK
1738 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1739}
998d7deb
RH
1740
1741/* Set the offset of MEM to OFFSET. */
1742
1743void
502b8322 1744set_mem_offset (rtx mem, rtx offset)
998d7deb
RH
1745{
1746 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1747 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1748 GET_MODE (mem));
35aff10b
AM
1749}
1750
1751/* Set the size of MEM to SIZE. */
1752
1753void
502b8322 1754set_mem_size (rtx mem, rtx size)
35aff10b
AM
1755{
1756 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1757 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1758 GET_MODE (mem));
998d7deb 1759}
173b24b9 1760\f
738cc472
RK
1761/* Return a memory reference like MEMREF, but with its mode changed to MODE
1762 and its address changed to ADDR. (VOIDmode means don't change the mode.
1763 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1764 returned memory location is required to be valid. The memory
1765 attributes are not changed. */
23b2ce53 1766
738cc472 1767static rtx
502b8322 1768change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
23b2ce53
RS
1769{
1770 rtx new;
1771
5b0264cb 1772 gcc_assert (MEM_P (memref));
23b2ce53
RS
1773 if (mode == VOIDmode)
1774 mode = GET_MODE (memref);
1775 if (addr == 0)
1776 addr = XEXP (memref, 0);
a74ff877
JH
1777 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1778 && (!validate || memory_address_p (mode, addr)))
1779 return memref;
23b2ce53 1780
f1ec5147 1781 if (validate)
23b2ce53 1782 {
f1ec5147 1783 if (reload_in_progress || reload_completed)
5b0264cb 1784 gcc_assert (memory_address_p (mode, addr));
f1ec5147
RK
1785 else
1786 addr = memory_address (mode, addr);
23b2ce53 1787 }
750c9258 1788
9b04c6a8
RK
1789 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1790 return memref;
1791
3b80f6ca 1792 new = gen_rtx_MEM (mode, addr);
c6df88cb 1793 MEM_COPY_ATTRIBUTES (new, memref);
23b2ce53
RS
1794 return new;
1795}
792760b9 1796
738cc472
RK
1797/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1798 way we are changing MEMREF, so we only preserve the alias set. */
f4ef873c
RK
1799
1800rtx
502b8322 1801change_address (rtx memref, enum machine_mode mode, rtx addr)
f4ef873c 1802{
4e44c1ef 1803 rtx new = change_address_1 (memref, mode, addr, 1), size;
738cc472 1804 enum machine_mode mmode = GET_MODE (new);
4e44c1ef
JJ
1805 unsigned int align;
1806
1807 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1808 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
c2f7bcc3 1809
fdb1c7b3
JH
1810 /* If there are no changes, just return the original memory reference. */
1811 if (new == memref)
4e44c1ef
JJ
1812 {
1813 if (MEM_ATTRS (memref) == 0
1814 || (MEM_EXPR (memref) == NULL
1815 && MEM_OFFSET (memref) == NULL
1816 && MEM_SIZE (memref) == size
1817 && MEM_ALIGN (memref) == align))
1818 return new;
1819
64fc7c00 1820 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
4e44c1ef
JJ
1821 MEM_COPY_ATTRIBUTES (new, memref);
1822 }
fdb1c7b3 1823
738cc472 1824 MEM_ATTRS (new)
4e44c1ef 1825 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
823e3574 1826
738cc472 1827 return new;
f4ef873c 1828}
792760b9 1829
738cc472
RK
1830/* Return a memory reference like MEMREF, but with its mode changed
1831 to MODE and its address offset by OFFSET bytes. If VALIDATE is
630036c6
JJ
1832 nonzero, the memory address is forced to be valid.
1833 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1834 and caller is responsible for adjusting MEMREF base register. */
f1ec5147
RK
1835
1836rtx
502b8322
AJ
1837adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1838 int validate, int adjust)
f1ec5147 1839{
823e3574 1840 rtx addr = XEXP (memref, 0);
738cc472
RK
1841 rtx new;
1842 rtx memoffset = MEM_OFFSET (memref);
10b76d73 1843 rtx size = 0;
738cc472 1844 unsigned int memalign = MEM_ALIGN (memref);
823e3574 1845
fdb1c7b3
JH
1846 /* If there are no changes, just return the original memory reference. */
1847 if (mode == GET_MODE (memref) && !offset
1848 && (!validate || memory_address_p (mode, addr)))
1849 return memref;
1850
d14419e4 1851 /* ??? Prefer to create garbage instead of creating shared rtl.
cc2902df 1852 This may happen even if offset is nonzero -- consider
d14419e4
RH
1853 (plus (plus reg reg) const_int) -- so do this always. */
1854 addr = copy_rtx (addr);
1855
4a78c787
RH
1856 if (adjust)
1857 {
1858 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1859 object, we can merge it into the LO_SUM. */
1860 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1861 && offset >= 0
1862 && (unsigned HOST_WIDE_INT) offset
1863 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1864 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1865 plus_constant (XEXP (addr, 1), offset));
1866 else
1867 addr = plus_constant (addr, offset);
1868 }
823e3574 1869
738cc472
RK
1870 new = change_address_1 (memref, mode, addr, validate);
1871
1872 /* Compute the new values of the memory attributes due to this adjustment.
1873 We add the offsets and update the alignment. */
1874 if (memoffset)
1875 memoffset = GEN_INT (offset + INTVAL (memoffset));
1876
03bf2c23
RK
1877 /* Compute the new alignment by taking the MIN of the alignment and the
1878 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1879 if zero. */
1880 if (offset != 0)
3bf1e984
RK
1881 memalign
1882 = MIN (memalign,
1883 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
738cc472 1884
10b76d73 1885 /* We can compute the size in a number of ways. */
a06ef755
RK
1886 if (GET_MODE (new) != BLKmode)
1887 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
10b76d73
RK
1888 else if (MEM_SIZE (memref))
1889 size = plus_constant (MEM_SIZE (memref), -offset);
1890
998d7deb 1891 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
10b76d73 1892 memoffset, size, memalign, GET_MODE (new));
738cc472
RK
1893
1894 /* At some point, we should validate that this offset is within the object,
1895 if all the appropriate values are known. */
1896 return new;
f1ec5147
RK
1897}
1898
630036c6
JJ
1899/* Return a memory reference like MEMREF, but with its mode changed
1900 to MODE and its address changed to ADDR, which is assumed to be
1901 MEMREF offseted by OFFSET bytes. If VALIDATE is
1902 nonzero, the memory address is forced to be valid. */
1903
1904rtx
502b8322
AJ
1905adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1906 HOST_WIDE_INT offset, int validate)
630036c6
JJ
1907{
1908 memref = change_address_1 (memref, VOIDmode, addr, validate);
1909 return adjust_address_1 (memref, mode, offset, validate, 0);
1910}
1911
8ac61af7
RK
1912/* Return a memory reference like MEMREF, but whose address is changed by
1913 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1914 known to be in OFFSET (possibly 1). */
0d4903b8
RK
1915
1916rtx
502b8322 1917offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
0d4903b8 1918{
e3c8ea67
RH
1919 rtx new, addr = XEXP (memref, 0);
1920
1921 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1922
68252e27 1923 /* At this point we don't know _why_ the address is invalid. It
4d6922ee 1924 could have secondary memory references, multiplies or anything.
e3c8ea67
RH
1925
1926 However, if we did go and rearrange things, we can wind up not
1927 being able to recognize the magic around pic_offset_table_rtx.
1928 This stuff is fragile, and is yet another example of why it is
1929 bad to expose PIC machinery too early. */
1930 if (! memory_address_p (GET_MODE (memref), new)
1931 && GET_CODE (addr) == PLUS
1932 && XEXP (addr, 0) == pic_offset_table_rtx)
1933 {
1934 addr = force_reg (GET_MODE (addr), addr);
1935 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1936 }
1937
f6041ed8 1938 update_temp_slot_address (XEXP (memref, 0), new);
e3c8ea67 1939 new = change_address_1 (memref, VOIDmode, new, 1);
0d4903b8 1940
fdb1c7b3
JH
1941 /* If there are no changes, just return the original memory reference. */
1942 if (new == memref)
1943 return new;
1944
0d4903b8
RK
1945 /* Update the alignment to reflect the offset. Reset the offset, which
1946 we don't know. */
2cc2d4bb
RK
1947 MEM_ATTRS (new)
1948 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
9ceca302 1949 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2cc2d4bb 1950 GET_MODE (new));
0d4903b8
RK
1951 return new;
1952}
68252e27 1953
792760b9
RK
1954/* Return a memory reference like MEMREF, but with its address changed to
1955 ADDR. The caller is asserting that the actual piece of memory pointed
1956 to is the same, just the form of the address is being changed, such as
1957 by putting something into a register. */
1958
1959rtx
502b8322 1960replace_equiv_address (rtx memref, rtx addr)
792760b9 1961{
738cc472
RK
1962 /* change_address_1 copies the memory attribute structure without change
1963 and that's exactly what we want here. */
40c0668b 1964 update_temp_slot_address (XEXP (memref, 0), addr);
738cc472 1965 return change_address_1 (memref, VOIDmode, addr, 1);
792760b9 1966}
738cc472 1967
f1ec5147
RK
1968/* Likewise, but the reference is not required to be valid. */
1969
1970rtx
502b8322 1971replace_equiv_address_nv (rtx memref, rtx addr)
f1ec5147 1972{
f1ec5147
RK
1973 return change_address_1 (memref, VOIDmode, addr, 0);
1974}
e7dfe4bb
RH
1975
1976/* Return a memory reference like MEMREF, but with its mode widened to
1977 MODE and offset by OFFSET. This would be used by targets that e.g.
1978 cannot issue QImode memory operations and have to use SImode memory
1979 operations plus masking logic. */
1980
1981rtx
502b8322 1982widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
e7dfe4bb
RH
1983{
1984 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1985 tree expr = MEM_EXPR (new);
1986 rtx memoffset = MEM_OFFSET (new);
1987 unsigned int size = GET_MODE_SIZE (mode);
1988
fdb1c7b3
JH
1989 /* If there are no changes, just return the original memory reference. */
1990 if (new == memref)
1991 return new;
1992
e7dfe4bb
RH
1993 /* If we don't know what offset we were at within the expression, then
1994 we can't know if we've overstepped the bounds. */
fa1591cb 1995 if (! memoffset)
e7dfe4bb
RH
1996 expr = NULL_TREE;
1997
1998 while (expr)
1999 {
2000 if (TREE_CODE (expr) == COMPONENT_REF)
2001 {
2002 tree field = TREE_OPERAND (expr, 1);
44de5aeb 2003 tree offset = component_ref_field_offset (expr);
e7dfe4bb
RH
2004
2005 if (! DECL_SIZE_UNIT (field))
2006 {
2007 expr = NULL_TREE;
2008 break;
2009 }
2010
2011 /* Is the field at least as large as the access? If so, ok,
2012 otherwise strip back to the containing structure. */
03667700
RK
2013 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2014 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
e7dfe4bb
RH
2015 && INTVAL (memoffset) >= 0)
2016 break;
2017
44de5aeb 2018 if (! host_integerp (offset, 1))
e7dfe4bb
RH
2019 {
2020 expr = NULL_TREE;
2021 break;
2022 }
2023
2024 expr = TREE_OPERAND (expr, 0);
44de5aeb
RK
2025 memoffset
2026 = (GEN_INT (INTVAL (memoffset)
2027 + tree_low_cst (offset, 1)
2028 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2029 / BITS_PER_UNIT)));
e7dfe4bb
RH
2030 }
2031 /* Similarly for the decl. */
2032 else if (DECL_P (expr)
2033 && DECL_SIZE_UNIT (expr)
45f79783 2034 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
e7dfe4bb
RH
2035 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2036 && (! memoffset || INTVAL (memoffset) >= 0))
2037 break;
2038 else
2039 {
2040 /* The widened memory access overflows the expression, which means
2041 that it could alias another expression. Zap it. */
2042 expr = NULL_TREE;
2043 break;
2044 }
2045 }
2046
2047 if (! expr)
2048 memoffset = NULL_RTX;
2049
2050 /* The widened memory may alias other stuff, so zap the alias set. */
2051 /* ??? Maybe use get_alias_set on any remaining expression. */
2052
2053 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2054 MEM_ALIGN (new), mode);
2055
2056 return new;
2057}
23b2ce53
RS
2058\f
2059/* Return a newly created CODE_LABEL rtx with a unique label number. */
2060
2061rtx
502b8322 2062gen_label_rtx (void)
23b2ce53 2063{
0dc36574 2064 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
502b8322 2065 NULL, label_num++, NULL);
23b2ce53
RS
2066}
2067\f
2068/* For procedure integration. */
2069
23b2ce53 2070/* Install new pointers to the first and last insns in the chain.
86fe05e0 2071 Also, set cur_insn_uid to one higher than the last in use.
23b2ce53
RS
2072 Used for an inline-procedure after copying the insn chain. */
2073
2074void
502b8322 2075set_new_first_and_last_insn (rtx first, rtx last)
23b2ce53 2076{
86fe05e0
RK
2077 rtx insn;
2078
23b2ce53
RS
2079 first_insn = first;
2080 last_insn = last;
86fe05e0
RK
2081 cur_insn_uid = 0;
2082
2083 for (insn = first; insn; insn = NEXT_INSN (insn))
2084 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2085
2086 cur_insn_uid++;
23b2ce53 2087}
23b2ce53 2088\f
750c9258 2089/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779 2090 structure. This routine should only be called once. */
23b2ce53 2091
fd743bc1
PB
2092static void
2093unshare_all_rtl_1 (tree fndecl, rtx insn)
23b2ce53 2094{
d1b81779 2095 tree decl;
23b2ce53 2096
d1b81779
GK
2097 /* Make sure that virtual parameters are not shared. */
2098 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
19e7881c 2099 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
d1b81779 2100
5c6df058
AO
2101 /* Make sure that virtual stack slots are not shared. */
2102 unshare_all_decls (DECL_INITIAL (fndecl));
2103
d1b81779 2104 /* Unshare just about everything else. */
2c07f13b 2105 unshare_all_rtl_in_chain (insn);
750c9258 2106
23b2ce53
RS
2107 /* Make sure the addresses of stack slots found outside the insn chain
2108 (such as, in DECL_RTL of a variable) are not shared
2109 with the insn chain.
2110
2111 This special care is necessary when the stack slot MEM does not
2112 actually appear in the insn chain. If it does appear, its address
2113 is unshared from all else at that point. */
242b0ce6 2114 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
23b2ce53
RS
2115}
2116
750c9258 2117/* Go through all the RTL insn bodies and copy any invalid shared
d1b81779
GK
2118 structure, again. This is a fairly expensive thing to do so it
2119 should be done sparingly. */
2120
2121void
502b8322 2122unshare_all_rtl_again (rtx insn)
d1b81779
GK
2123{
2124 rtx p;
624c87aa
RE
2125 tree decl;
2126
d1b81779 2127 for (p = insn; p; p = NEXT_INSN (p))
2c3c49de 2128 if (INSN_P (p))
d1b81779
GK
2129 {
2130 reset_used_flags (PATTERN (p));
2131 reset_used_flags (REG_NOTES (p));
2132 reset_used_flags (LOG_LINKS (p));
2133 }
624c87aa 2134
2d4aecb3
AO
2135 /* Make sure that virtual stack slots are not shared. */
2136 reset_used_decls (DECL_INITIAL (cfun->decl));
2137
624c87aa
RE
2138 /* Make sure that virtual parameters are not shared. */
2139 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2140 reset_used_flags (DECL_RTL (decl));
2141
2142 reset_used_flags (stack_slot_list);
2143
fd743bc1
PB
2144 unshare_all_rtl_1 (cfun->decl, insn);
2145}
2146
c2924966 2147unsigned int
fd743bc1
PB
2148unshare_all_rtl (void)
2149{
2150 unshare_all_rtl_1 (current_function_decl, get_insns ());
c2924966 2151 return 0;
d1b81779
GK
2152}
2153
ef330312
PB
2154struct tree_opt_pass pass_unshare_all_rtl =
2155{
defb77dc 2156 "unshare", /* name */
ef330312
PB
2157 NULL, /* gate */
2158 unshare_all_rtl, /* execute */
2159 NULL, /* sub */
2160 NULL, /* next */
2161 0, /* static_pass_number */
2162 0, /* tv_id */
2163 0, /* properties_required */
2164 0, /* properties_provided */
2165 0, /* properties_destroyed */
2166 0, /* todo_flags_start */
defb77dc 2167 TODO_dump_func, /* todo_flags_finish */
ef330312
PB
2168 0 /* letter */
2169};
2170
2171
2c07f13b
JH
2172/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2173 Recursively does the same for subexpressions. */
2174
2175static void
2176verify_rtx_sharing (rtx orig, rtx insn)
2177{
2178 rtx x = orig;
2179 int i;
2180 enum rtx_code code;
2181 const char *format_ptr;
2182
2183 if (x == 0)
2184 return;
2185
2186 code = GET_CODE (x);
2187
2188 /* These types may be freely shared. */
2189
2190 switch (code)
2191 {
2192 case REG:
2c07f13b
JH
2193 case CONST_INT:
2194 case CONST_DOUBLE:
2195 case CONST_VECTOR:
2196 case SYMBOL_REF:
2197 case LABEL_REF:
2198 case CODE_LABEL:
2199 case PC:
2200 case CC0:
2201 case SCRATCH:
2c07f13b 2202 return;
3e89ed8d
JH
2203 /* SCRATCH must be shared because they represent distinct values. */
2204 case CLOBBER:
2205 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2206 return;
2207 break;
2c07f13b
JH
2208
2209 case CONST:
2210 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2211 a LABEL_REF, it isn't sharable. */
2212 if (GET_CODE (XEXP (x, 0)) == PLUS
2213 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2214 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2215 return;
2216 break;
2217
2218 case MEM:
2219 /* A MEM is allowed to be shared if its address is constant. */
2220 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2221 || reload_completed || reload_in_progress)
2222 return;
2223
2224 break;
2225
2226 default:
2227 break;
2228 }
2229
2230 /* This rtx may not be shared. If it has already been seen,
2231 replace it with a copy of itself. */
1a2caa7a 2232#ifdef ENABLE_CHECKING
2c07f13b
JH
2233 if (RTX_FLAG (x, used))
2234 {
ab532386 2235 error ("invalid rtl sharing found in the insn");
2c07f13b 2236 debug_rtx (insn);
ab532386 2237 error ("shared rtx");
2c07f13b 2238 debug_rtx (x);
ab532386 2239 internal_error ("internal consistency failure");
2c07f13b 2240 }
1a2caa7a
NS
2241#endif
2242 gcc_assert (!RTX_FLAG (x, used));
2243
2c07f13b
JH
2244 RTX_FLAG (x, used) = 1;
2245
6614fd40 2246 /* Now scan the subexpressions recursively. */
2c07f13b
JH
2247
2248 format_ptr = GET_RTX_FORMAT (code);
2249
2250 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2251 {
2252 switch (*format_ptr++)
2253 {
2254 case 'e':
2255 verify_rtx_sharing (XEXP (x, i), insn);
2256 break;
2257
2258 case 'E':
2259 if (XVEC (x, i) != NULL)
2260 {
2261 int j;
2262 int len = XVECLEN (x, i);
2263
2264 for (j = 0; j < len; j++)
2265 {
1a2caa7a
NS
2266 /* We allow sharing of ASM_OPERANDS inside single
2267 instruction. */
2c07f13b 2268 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
1a2caa7a
NS
2269 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2270 == ASM_OPERANDS))
2c07f13b
JH
2271 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2272 else
2273 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2274 }
2275 }
2276 break;
2277 }
2278 }
2279 return;
2280}
2281
ba228239 2282/* Go through all the RTL insn bodies and check that there is no unexpected
2c07f13b
JH
2283 sharing in between the subexpressions. */
2284
2285void
2286verify_rtl_sharing (void)
2287{
2288 rtx p;
2289
2290 for (p = get_insns (); p; p = NEXT_INSN (p))
2291 if (INSN_P (p))
2292 {
2293 reset_used_flags (PATTERN (p));
2294 reset_used_flags (REG_NOTES (p));
2295 reset_used_flags (LOG_LINKS (p));
2296 }
2297
2298 for (p = get_insns (); p; p = NEXT_INSN (p))
2299 if (INSN_P (p))
2300 {
2301 verify_rtx_sharing (PATTERN (p), p);
2302 verify_rtx_sharing (REG_NOTES (p), p);
2303 verify_rtx_sharing (LOG_LINKS (p), p);
2304 }
2305}
2306
d1b81779
GK
2307/* Go through all the RTL insn bodies and copy any invalid shared structure.
2308 Assumes the mark bits are cleared at entry. */
2309
2c07f13b
JH
2310void
2311unshare_all_rtl_in_chain (rtx insn)
d1b81779
GK
2312{
2313 for (; insn; insn = NEXT_INSN (insn))
2c3c49de 2314 if (INSN_P (insn))
d1b81779
GK
2315 {
2316 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2317 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2318 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2319 }
2320}
2321
5c6df058
AO
2322/* Go through all virtual stack slots of a function and copy any
2323 shared structure. */
2324static void
502b8322 2325unshare_all_decls (tree blk)
5c6df058
AO
2326{
2327 tree t;
2328
2329 /* Copy shared decls. */
2330 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2331 if (DECL_RTL_SET_P (t))
2332 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
5c6df058
AO
2333
2334 /* Now process sub-blocks. */
2335 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2336 unshare_all_decls (t);
2337}
2338
2d4aecb3 2339/* Go through all virtual stack slots of a function and mark them as
30f7a378 2340 not shared. */
2d4aecb3 2341static void
502b8322 2342reset_used_decls (tree blk)
2d4aecb3
AO
2343{
2344 tree t;
2345
2346 /* Mark decls. */
2347 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
19e7881c
MM
2348 if (DECL_RTL_SET_P (t))
2349 reset_used_flags (DECL_RTL (t));
2d4aecb3
AO
2350
2351 /* Now process sub-blocks. */
2352 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2353 reset_used_decls (t);
2354}
2355
23b2ce53 2356/* Mark ORIG as in use, and return a copy of it if it was already in use.
ff954f39
AP
2357 Recursively does the same for subexpressions. Uses
2358 copy_rtx_if_shared_1 to reduce stack space. */
23b2ce53
RS
2359
2360rtx
502b8322 2361copy_rtx_if_shared (rtx orig)
23b2ce53 2362{
32b32b16
AP
2363 copy_rtx_if_shared_1 (&orig);
2364 return orig;
2365}
2366
ff954f39
AP
2367/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2368 use. Recursively does the same for subexpressions. */
2369
32b32b16
AP
2370static void
2371copy_rtx_if_shared_1 (rtx *orig1)
2372{
2373 rtx x;
b3694847
SS
2374 int i;
2375 enum rtx_code code;
32b32b16 2376 rtx *last_ptr;
b3694847 2377 const char *format_ptr;
23b2ce53 2378 int copied = 0;
32b32b16
AP
2379 int length;
2380
2381 /* Repeat is used to turn tail-recursion into iteration. */
2382repeat:
2383 x = *orig1;
23b2ce53
RS
2384
2385 if (x == 0)
32b32b16 2386 return;
23b2ce53
RS
2387
2388 code = GET_CODE (x);
2389
2390 /* These types may be freely shared. */
2391
2392 switch (code)
2393 {
2394 case REG:
23b2ce53
RS
2395 case CONST_INT:
2396 case CONST_DOUBLE:
69ef87e2 2397 case CONST_VECTOR:
23b2ce53 2398 case SYMBOL_REF:
2c07f13b 2399 case LABEL_REF:
23b2ce53
RS
2400 case CODE_LABEL:
2401 case PC:
2402 case CC0:
2403 case SCRATCH:
0f41302f 2404 /* SCRATCH must be shared because they represent distinct values. */
32b32b16 2405 return;
3e89ed8d
JH
2406 case CLOBBER:
2407 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2408 return;
2409 break;
23b2ce53 2410
b851ea09
RK
2411 case CONST:
2412 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2413 a LABEL_REF, it isn't sharable. */
2414 if (GET_CODE (XEXP (x, 0)) == PLUS
2415 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2416 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
32b32b16 2417 return;
b851ea09
RK
2418 break;
2419
23b2ce53
RS
2420 case INSN:
2421 case JUMP_INSN:
2422 case CALL_INSN:
2423 case NOTE:
23b2ce53
RS
2424 case BARRIER:
2425 /* The chain of insns is not being copied. */
32b32b16 2426 return;
23b2ce53 2427
e9a25f70
JL
2428 default:
2429 break;
23b2ce53
RS
2430 }
2431
2432 /* This rtx may not be shared. If it has already been seen,
2433 replace it with a copy of itself. */
2434
2adc7f12 2435 if (RTX_FLAG (x, used))
23b2ce53 2436 {
aacd3885 2437 x = shallow_copy_rtx (x);
23b2ce53
RS
2438 copied = 1;
2439 }
2adc7f12 2440 RTX_FLAG (x, used) = 1;
23b2ce53
RS
2441
2442 /* Now scan the subexpressions recursively.
2443 We can store any replaced subexpressions directly into X
2444 since we know X is not shared! Any vectors in X
2445 must be copied if X was copied. */
2446
2447 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2448 length = GET_RTX_LENGTH (code);
2449 last_ptr = NULL;
2450
2451 for (i = 0; i < length; i++)
23b2ce53
RS
2452 {
2453 switch (*format_ptr++)
2454 {
2455 case 'e':
32b32b16
AP
2456 if (last_ptr)
2457 copy_rtx_if_shared_1 (last_ptr);
2458 last_ptr = &XEXP (x, i);
23b2ce53
RS
2459 break;
2460
2461 case 'E':
2462 if (XVEC (x, i) != NULL)
2463 {
b3694847 2464 int j;
f0722107 2465 int len = XVECLEN (x, i);
32b32b16 2466
6614fd40
KH
2467 /* Copy the vector iff I copied the rtx and the length
2468 is nonzero. */
f0722107 2469 if (copied && len > 0)
8f985ec4 2470 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
32b32b16 2471
5d3cc252 2472 /* Call recursively on all inside the vector. */
f0722107 2473 for (j = 0; j < len; j++)
32b32b16
AP
2474 {
2475 if (last_ptr)
2476 copy_rtx_if_shared_1 (last_ptr);
2477 last_ptr = &XVECEXP (x, i, j);
2478 }
23b2ce53
RS
2479 }
2480 break;
2481 }
2482 }
32b32b16
AP
2483 *orig1 = x;
2484 if (last_ptr)
2485 {
2486 orig1 = last_ptr;
2487 goto repeat;
2488 }
2489 return;
23b2ce53
RS
2490}
2491
2492/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2493 to look for shared sub-parts. */
2494
2495void
502b8322 2496reset_used_flags (rtx x)
23b2ce53 2497{
b3694847
SS
2498 int i, j;
2499 enum rtx_code code;
2500 const char *format_ptr;
32b32b16 2501 int length;
23b2ce53 2502
32b32b16
AP
2503 /* Repeat is used to turn tail-recursion into iteration. */
2504repeat:
23b2ce53
RS
2505 if (x == 0)
2506 return;
2507
2508 code = GET_CODE (x);
2509
9faa82d8 2510 /* These types may be freely shared so we needn't do any resetting
23b2ce53
RS
2511 for them. */
2512
2513 switch (code)
2514 {
2515 case REG:
23b2ce53
RS
2516 case CONST_INT:
2517 case CONST_DOUBLE:
69ef87e2 2518 case CONST_VECTOR:
23b2ce53
RS
2519 case SYMBOL_REF:
2520 case CODE_LABEL:
2521 case PC:
2522 case CC0:
2523 return;
2524
2525 case INSN:
2526 case JUMP_INSN:
2527 case CALL_INSN:
2528 case NOTE:
2529 case LABEL_REF:
2530 case BARRIER:
2531 /* The chain of insns is not being copied. */
2532 return;
750c9258 2533
e9a25f70
JL
2534 default:
2535 break;
23b2ce53
RS
2536 }
2537
2adc7f12 2538 RTX_FLAG (x, used) = 0;
23b2ce53
RS
2539
2540 format_ptr = GET_RTX_FORMAT (code);
32b32b16
AP
2541 length = GET_RTX_LENGTH (code);
2542
2543 for (i = 0; i < length; i++)
23b2ce53
RS
2544 {
2545 switch (*format_ptr++)
2546 {
2547 case 'e':
32b32b16
AP
2548 if (i == length-1)
2549 {
2550 x = XEXP (x, i);
2551 goto repeat;
2552 }
23b2ce53
RS
2553 reset_used_flags (XEXP (x, i));
2554 break;
2555
2556 case 'E':
2557 for (j = 0; j < XVECLEN (x, i); j++)
2558 reset_used_flags (XVECEXP (x, i, j));
2559 break;
2560 }
2561 }
2562}
2c07f13b
JH
2563
2564/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2565 to look for shared sub-parts. */
2566
2567void
2568set_used_flags (rtx x)
2569{
2570 int i, j;
2571 enum rtx_code code;
2572 const char *format_ptr;
2573
2574 if (x == 0)
2575 return;
2576
2577 code = GET_CODE (x);
2578
2579 /* These types may be freely shared so we needn't do any resetting
2580 for them. */
2581
2582 switch (code)
2583 {
2584 case REG:
2c07f13b
JH
2585 case CONST_INT:
2586 case CONST_DOUBLE:
2587 case CONST_VECTOR:
2588 case SYMBOL_REF:
2589 case CODE_LABEL:
2590 case PC:
2591 case CC0:
2592 return;
2593
2594 case INSN:
2595 case JUMP_INSN:
2596 case CALL_INSN:
2597 case NOTE:
2598 case LABEL_REF:
2599 case BARRIER:
2600 /* The chain of insns is not being copied. */
2601 return;
2602
2603 default:
2604 break;
2605 }
2606
2607 RTX_FLAG (x, used) = 1;
2608
2609 format_ptr = GET_RTX_FORMAT (code);
2610 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2611 {
2612 switch (*format_ptr++)
2613 {
2614 case 'e':
2615 set_used_flags (XEXP (x, i));
2616 break;
2617
2618 case 'E':
2619 for (j = 0; j < XVECLEN (x, i); j++)
2620 set_used_flags (XVECEXP (x, i, j));
2621 break;
2622 }
2623 }
2624}
23b2ce53
RS
2625\f
2626/* Copy X if necessary so that it won't be altered by changes in OTHER.
2627 Return X or the rtx for the pseudo reg the value of X was copied into.
2628 OTHER must be valid as a SET_DEST. */
2629
2630rtx
502b8322 2631make_safe_from (rtx x, rtx other)
23b2ce53
RS
2632{
2633 while (1)
2634 switch (GET_CODE (other))
2635 {
2636 case SUBREG:
2637 other = SUBREG_REG (other);
2638 break;
2639 case STRICT_LOW_PART:
2640 case SIGN_EXTEND:
2641 case ZERO_EXTEND:
2642 other = XEXP (other, 0);
2643 break;
2644 default:
2645 goto done;
2646 }
2647 done:
3c0cb5de 2648 if ((MEM_P (other)
23b2ce53 2649 && ! CONSTANT_P (x)
f8cfc6aa 2650 && !REG_P (x)
23b2ce53 2651 && GET_CODE (x) != SUBREG)
f8cfc6aa 2652 || (REG_P (other)
23b2ce53
RS
2653 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2654 || reg_mentioned_p (other, x))))
2655 {
2656 rtx temp = gen_reg_rtx (GET_MODE (x));
2657 emit_move_insn (temp, x);
2658 return temp;
2659 }
2660 return x;
2661}
2662\f
2663/* Emission of insns (adding them to the doubly-linked list). */
2664
2665/* Return the first insn of the current sequence or current function. */
2666
2667rtx
502b8322 2668get_insns (void)
23b2ce53
RS
2669{
2670 return first_insn;
2671}
2672
3dec4024
JH
2673/* Specify a new insn as the first in the chain. */
2674
2675void
502b8322 2676set_first_insn (rtx insn)
3dec4024 2677{
5b0264cb 2678 gcc_assert (!PREV_INSN (insn));
3dec4024
JH
2679 first_insn = insn;
2680}
2681
23b2ce53
RS
2682/* Return the last insn emitted in current sequence or current function. */
2683
2684rtx
502b8322 2685get_last_insn (void)
23b2ce53
RS
2686{
2687 return last_insn;
2688}
2689
2690/* Specify a new insn as the last in the chain. */
2691
2692void
502b8322 2693set_last_insn (rtx insn)
23b2ce53 2694{
5b0264cb 2695 gcc_assert (!NEXT_INSN (insn));
23b2ce53
RS
2696 last_insn = insn;
2697}
2698
2699/* Return the last insn emitted, even if it is in a sequence now pushed. */
2700
2701rtx
502b8322 2702get_last_insn_anywhere (void)
23b2ce53
RS
2703{
2704 struct sequence_stack *stack;
2705 if (last_insn)
2706 return last_insn;
49ad7cfa 2707 for (stack = seq_stack; stack; stack = stack->next)
23b2ce53
RS
2708 if (stack->last != 0)
2709 return stack->last;
2710 return 0;
2711}
2712
2a496e8b
JDA
2713/* Return the first nonnote insn emitted in current sequence or current
2714 function. This routine looks inside SEQUENCEs. */
2715
2716rtx
502b8322 2717get_first_nonnote_insn (void)
2a496e8b 2718{
91373fe8
JDA
2719 rtx insn = first_insn;
2720
2721 if (insn)
2722 {
2723 if (NOTE_P (insn))
2724 for (insn = next_insn (insn);
2725 insn && NOTE_P (insn);
2726 insn = next_insn (insn))
2727 continue;
2728 else
2729 {
2ca202e7 2730 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2731 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2732 insn = XVECEXP (PATTERN (insn), 0, 0);
2733 }
2734 }
2a496e8b
JDA
2735
2736 return insn;
2737}
2738
2739/* Return the last nonnote insn emitted in current sequence or current
2740 function. This routine looks inside SEQUENCEs. */
2741
2742rtx
502b8322 2743get_last_nonnote_insn (void)
2a496e8b 2744{
91373fe8
JDA
2745 rtx insn = last_insn;
2746
2747 if (insn)
2748 {
2749 if (NOTE_P (insn))
2750 for (insn = previous_insn (insn);
2751 insn && NOTE_P (insn);
2752 insn = previous_insn (insn))
2753 continue;
2754 else
2755 {
2ca202e7 2756 if (NONJUMP_INSN_P (insn)
91373fe8
JDA
2757 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2758 insn = XVECEXP (PATTERN (insn), 0,
2759 XVECLEN (PATTERN (insn), 0) - 1);
2760 }
2761 }
2a496e8b
JDA
2762
2763 return insn;
2764}
2765
23b2ce53
RS
2766/* Return a number larger than any instruction's uid in this function. */
2767
2768int
502b8322 2769get_max_uid (void)
23b2ce53
RS
2770{
2771 return cur_insn_uid;
2772}
aeeeda03 2773
673b5311
MM
2774/* Renumber instructions so that no instruction UIDs are wasted. */
2775
aeeeda03 2776void
10d22567 2777renumber_insns (void)
aeeeda03
MM
2778{
2779 rtx insn;
aeeeda03 2780
673b5311
MM
2781 /* If we're not supposed to renumber instructions, don't. */
2782 if (!flag_renumber_insns)
2783 return;
2784
aeeeda03
MM
2785 /* If there aren't that many instructions, then it's not really
2786 worth renumbering them. */
673b5311 2787 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
aeeeda03
MM
2788 return;
2789
2790 cur_insn_uid = 1;
2791
2792 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
673b5311 2793 {
10d22567
ZD
2794 if (dump_file)
2795 fprintf (dump_file, "Renumbering insn %d to %d\n",
673b5311
MM
2796 INSN_UID (insn), cur_insn_uid);
2797 INSN_UID (insn) = cur_insn_uid++;
2798 }
aeeeda03 2799}
23b2ce53
RS
2800\f
2801/* Return the next insn. If it is a SEQUENCE, return the first insn
2802 of the sequence. */
2803
2804rtx
502b8322 2805next_insn (rtx insn)
23b2ce53
RS
2806{
2807 if (insn)
2808 {
2809 insn = NEXT_INSN (insn);
4b4bf941 2810 if (insn && NONJUMP_INSN_P (insn)
23b2ce53
RS
2811 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2812 insn = XVECEXP (PATTERN (insn), 0, 0);
2813 }
2814
2815 return insn;
2816}
2817
2818/* Return the previous insn. If it is a SEQUENCE, return the last insn
2819 of the sequence. */
2820
2821rtx
502b8322 2822previous_insn (rtx insn)
23b2ce53
RS
2823{
2824 if (insn)
2825 {
2826 insn = PREV_INSN (insn);
4b4bf941 2827 if (insn && NONJUMP_INSN_P (insn)
23b2ce53
RS
2828 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2829 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2830 }
2831
2832 return insn;
2833}
2834
2835/* Return the next insn after INSN that is not a NOTE. This routine does not
2836 look inside SEQUENCEs. */
2837
2838rtx
502b8322 2839next_nonnote_insn (rtx insn)
23b2ce53
RS
2840{
2841 while (insn)
2842 {
2843 insn = NEXT_INSN (insn);
4b4bf941 2844 if (insn == 0 || !NOTE_P (insn))
23b2ce53
RS
2845 break;
2846 }
2847
2848 return insn;
2849}
2850
2851/* Return the previous insn before INSN that is not a NOTE. This routine does
2852 not look inside SEQUENCEs. */
2853
2854rtx
502b8322 2855prev_nonnote_insn (rtx insn)
23b2ce53
RS
2856{
2857 while (insn)
2858 {
2859 insn = PREV_INSN (insn);
4b4bf941 2860 if (insn == 0 || !NOTE_P (insn))
23b2ce53
RS
2861 break;
2862 }
2863
2864 return insn;
2865}
2866
2867/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2868 or 0, if there is none. This routine does not look inside
0f41302f 2869 SEQUENCEs. */
23b2ce53
RS
2870
2871rtx
502b8322 2872next_real_insn (rtx insn)
23b2ce53
RS
2873{
2874 while (insn)
2875 {
2876 insn = NEXT_INSN (insn);
bb8a619e 2877 if (insn == 0 || INSN_P (insn))
23b2ce53
RS
2878 break;
2879 }
2880
2881 return insn;
2882}
2883
2884/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2885 or 0, if there is none. This routine does not look inside
2886 SEQUENCEs. */
2887
2888rtx
502b8322 2889prev_real_insn (rtx insn)
23b2ce53
RS
2890{
2891 while (insn)
2892 {
2893 insn = PREV_INSN (insn);
bb8a619e 2894 if (insn == 0 || INSN_P (insn))
23b2ce53
RS
2895 break;
2896 }
2897
2898 return insn;
2899}
2900
ee960939
OH
2901/* Return the last CALL_INSN in the current list, or 0 if there is none.
2902 This routine does not look inside SEQUENCEs. */
2903
2904rtx
502b8322 2905last_call_insn (void)
ee960939
OH
2906{
2907 rtx insn;
2908
2909 for (insn = get_last_insn ();
4b4bf941 2910 insn && !CALL_P (insn);
ee960939
OH
2911 insn = PREV_INSN (insn))
2912 ;
2913
2914 return insn;
2915}
2916
23b2ce53
RS
2917/* Find the next insn after INSN that really does something. This routine
2918 does not look inside SEQUENCEs. Until reload has completed, this is the
2919 same as next_real_insn. */
2920
69732dcb 2921int
502b8322 2922active_insn_p (rtx insn)
69732dcb 2923{
4b4bf941
JQ
2924 return (CALL_P (insn) || JUMP_P (insn)
2925 || (NONJUMP_INSN_P (insn)
23b8ba81
RH
2926 && (! reload_completed
2927 || (GET_CODE (PATTERN (insn)) != USE
2928 && GET_CODE (PATTERN (insn)) != CLOBBER))));
69732dcb
RH
2929}
2930
23b2ce53 2931rtx
502b8322 2932next_active_insn (rtx insn)
23b2ce53
RS
2933{
2934 while (insn)
2935 {
2936 insn = NEXT_INSN (insn);
69732dcb 2937 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2938 break;
2939 }
2940
2941 return insn;
2942}
2943
2944/* Find the last insn before INSN that really does something. This routine
2945 does not look inside SEQUENCEs. Until reload has completed, this is the
2946 same as prev_real_insn. */
2947
2948rtx
502b8322 2949prev_active_insn (rtx insn)
23b2ce53
RS
2950{
2951 while (insn)
2952 {
2953 insn = PREV_INSN (insn);
69732dcb 2954 if (insn == 0 || active_insn_p (insn))
23b2ce53
RS
2955 break;
2956 }
2957
2958 return insn;
2959}
2960
2961/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2962
2963rtx
502b8322 2964next_label (rtx insn)
23b2ce53
RS
2965{
2966 while (insn)
2967 {
2968 insn = NEXT_INSN (insn);
4b4bf941 2969 if (insn == 0 || LABEL_P (insn))
23b2ce53
RS
2970 break;
2971 }
2972
2973 return insn;
2974}
2975
2976/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2977
2978rtx
502b8322 2979prev_label (rtx insn)
23b2ce53
RS
2980{
2981 while (insn)
2982 {
2983 insn = PREV_INSN (insn);
4b4bf941 2984 if (insn == 0 || LABEL_P (insn))
23b2ce53
RS
2985 break;
2986 }
2987
2988 return insn;
2989}
6c2511d3
RS
2990
2991/* Return the last label to mark the same position as LABEL. Return null
2992 if LABEL itself is null. */
2993
2994rtx
2995skip_consecutive_labels (rtx label)
2996{
2997 rtx insn;
2998
2999 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3000 if (LABEL_P (insn))
3001 label = insn;
3002
3003 return label;
3004}
23b2ce53
RS
3005\f
3006#ifdef HAVE_cc0
c572e5ba
JVA
3007/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3008 and REG_CC_USER notes so we can find it. */
3009
3010void
502b8322 3011link_cc0_insns (rtx insn)
c572e5ba
JVA
3012{
3013 rtx user = next_nonnote_insn (insn);
3014
4b4bf941 3015 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
c572e5ba
JVA
3016 user = XVECEXP (PATTERN (user), 0, 0);
3017
c5c76735
JL
3018 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3019 REG_NOTES (user));
3b80f6ca 3020 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
c572e5ba
JVA
3021}
3022
23b2ce53
RS
3023/* Return the next insn that uses CC0 after INSN, which is assumed to
3024 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3025 applied to the result of this function should yield INSN).
3026
3027 Normally, this is simply the next insn. However, if a REG_CC_USER note
3028 is present, it contains the insn that uses CC0.
3029
3030 Return 0 if we can't find the insn. */
3031
3032rtx
502b8322 3033next_cc0_user (rtx insn)
23b2ce53 3034{
906c4e36 3035 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
23b2ce53
RS
3036
3037 if (note)
3038 return XEXP (note, 0);
3039
3040 insn = next_nonnote_insn (insn);
4b4bf941 3041 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
23b2ce53
RS
3042 insn = XVECEXP (PATTERN (insn), 0, 0);
3043
2c3c49de 3044 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
23b2ce53
RS
3045 return insn;
3046
3047 return 0;
3048}
3049
3050/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3051 note, it is the previous insn. */
3052
3053rtx
502b8322 3054prev_cc0_setter (rtx insn)
23b2ce53 3055{
906c4e36 3056 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
23b2ce53
RS
3057
3058 if (note)
3059 return XEXP (note, 0);
3060
3061 insn = prev_nonnote_insn (insn);
5b0264cb 3062 gcc_assert (sets_cc0_p (PATTERN (insn)));
23b2ce53
RS
3063
3064 return insn;
3065}
3066#endif
e5bef2e4
HB
3067
3068/* Increment the label uses for all labels present in rtx. */
3069
3070static void
502b8322 3071mark_label_nuses (rtx x)
e5bef2e4 3072{
b3694847
SS
3073 enum rtx_code code;
3074 int i, j;
3075 const char *fmt;
e5bef2e4
HB
3076
3077 code = GET_CODE (x);
7537fc90 3078 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
e5bef2e4
HB
3079 LABEL_NUSES (XEXP (x, 0))++;
3080
3081 fmt = GET_RTX_FORMAT (code);
3082 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3083 {
3084 if (fmt[i] == 'e')
0fb7aeda 3085 mark_label_nuses (XEXP (x, i));
e5bef2e4 3086 else if (fmt[i] == 'E')
0fb7aeda 3087 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
e5bef2e4
HB
3088 mark_label_nuses (XVECEXP (x, i, j));
3089 }
3090}
3091
23b2ce53
RS
3092\f
3093/* Try splitting insns that can be split for better scheduling.
3094 PAT is the pattern which might split.
3095 TRIAL is the insn providing PAT.
cc2902df 3096 LAST is nonzero if we should return the last insn of the sequence produced.
23b2ce53
RS
3097
3098 If this routine succeeds in splitting, it returns the first or last
11147ebe 3099 replacement insn depending on the value of LAST. Otherwise, it
23b2ce53
RS
3100 returns TRIAL. If the insn to be returned can be split, it will be. */
3101
3102rtx
502b8322 3103try_split (rtx pat, rtx trial, int last)
23b2ce53
RS
3104{
3105 rtx before = PREV_INSN (trial);
3106 rtx after = NEXT_INSN (trial);
23b2ce53
RS
3107 int has_barrier = 0;
3108 rtx tem;
6b24c259
JH
3109 rtx note, seq;
3110 int probability;
599aedd9
RH
3111 rtx insn_last, insn;
3112 int njumps = 0;
6b24c259
JH
3113
3114 if (any_condjump_p (trial)
3115 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3116 split_branch_probability = INTVAL (XEXP (note, 0));
3117 probability = split_branch_probability;
3118
3119 seq = split_insns (pat, trial);
3120
3121 split_branch_probability = -1;
23b2ce53
RS
3122
3123 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3124 We may need to handle this specially. */
4b4bf941 3125 if (after && BARRIER_P (after))
23b2ce53
RS
3126 {
3127 has_barrier = 1;
3128 after = NEXT_INSN (after);
3129 }
3130
599aedd9
RH
3131 if (!seq)
3132 return trial;
3133
3134 /* Avoid infinite loop if any insn of the result matches
3135 the original pattern. */
3136 insn_last = seq;
3137 while (1)
23b2ce53 3138 {
599aedd9
RH
3139 if (INSN_P (insn_last)
3140 && rtx_equal_p (PATTERN (insn_last), pat))
3141 return trial;
3142 if (!NEXT_INSN (insn_last))
3143 break;
3144 insn_last = NEXT_INSN (insn_last);
3145 }
750c9258 3146
599aedd9
RH
3147 /* Mark labels. */
3148 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3149 {
4b4bf941 3150 if (JUMP_P (insn))
599aedd9
RH
3151 {
3152 mark_jump_label (PATTERN (insn), insn, 0);
3153 njumps++;
3154 if (probability != -1
3155 && any_condjump_p (insn)
3156 && !find_reg_note (insn, REG_BR_PROB, 0))
2f937369 3157 {
599aedd9
RH
3158 /* We can preserve the REG_BR_PROB notes only if exactly
3159 one jump is created, otherwise the machine description
3160 is responsible for this step using
3161 split_branch_probability variable. */
5b0264cb 3162 gcc_assert (njumps == 1);
599aedd9
RH
3163 REG_NOTES (insn)
3164 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3165 GEN_INT (probability),
3166 REG_NOTES (insn));
2f937369 3167 }
599aedd9
RH
3168 }
3169 }
3170
3171 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3172 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
4b4bf941 3173 if (CALL_P (trial))
599aedd9
RH
3174 {
3175 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
4b4bf941 3176 if (CALL_P (insn))
599aedd9 3177 {
f6a1f3f6
RH
3178 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3179 while (*p)
3180 p = &XEXP (*p, 1);
3181 *p = CALL_INSN_FUNCTION_USAGE (trial);
599aedd9
RH
3182 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3183 }
3184 }
4b5e8abe 3185
599aedd9
RH
3186 /* Copy notes, particularly those related to the CFG. */
3187 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3188 {
3189 switch (REG_NOTE_KIND (note))
3190 {
3191 case REG_EH_REGION:
2f937369
DM
3192 insn = insn_last;
3193 while (insn != NULL_RTX)
3194 {
4b4bf941 3195 if (CALL_P (insn)
d3a583b1 3196 || (flag_non_call_exceptions && INSN_P (insn)
599aedd9
RH
3197 && may_trap_p (PATTERN (insn))))
3198 REG_NOTES (insn)
3199 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3200 XEXP (note, 0),
3201 REG_NOTES (insn));
2f937369
DM
3202 insn = PREV_INSN (insn);
3203 }
599aedd9 3204 break;
216183ce 3205
599aedd9
RH
3206 case REG_NORETURN:
3207 case REG_SETJMP:
599aedd9
RH
3208 insn = insn_last;
3209 while (insn != NULL_RTX)
216183ce 3210 {
4b4bf941 3211 if (CALL_P (insn))
599aedd9
RH
3212 REG_NOTES (insn)
3213 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3214 XEXP (note, 0),
3215 REG_NOTES (insn));
3216 insn = PREV_INSN (insn);
216183ce 3217 }
599aedd9 3218 break;
d6e95df8 3219
599aedd9
RH
3220 case REG_NON_LOCAL_GOTO:
3221 insn = insn_last;
3222 while (insn != NULL_RTX)
2f937369 3223 {
4b4bf941 3224 if (JUMP_P (insn))
599aedd9
RH
3225 REG_NOTES (insn)
3226 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3227 XEXP (note, 0),
3228 REG_NOTES (insn));
3229 insn = PREV_INSN (insn);
2f937369 3230 }
599aedd9 3231 break;
e5bef2e4 3232
599aedd9
RH
3233 default:
3234 break;
23b2ce53 3235 }
599aedd9
RH
3236 }
3237
3238 /* If there are LABELS inside the split insns increment the
3239 usage count so we don't delete the label. */
4b4bf941 3240 if (NONJUMP_INSN_P (trial))
599aedd9
RH
3241 {
3242 insn = insn_last;
3243 while (insn != NULL_RTX)
23b2ce53 3244 {
4b4bf941 3245 if (NONJUMP_INSN_P (insn))
599aedd9 3246 mark_label_nuses (PATTERN (insn));
23b2ce53 3247
599aedd9
RH
3248 insn = PREV_INSN (insn);
3249 }
23b2ce53
RS
3250 }
3251
0435312e 3252 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
599aedd9
RH
3253
3254 delete_insn (trial);
3255 if (has_barrier)
3256 emit_barrier_after (tem);
3257
3258 /* Recursively call try_split for each new insn created; by the
3259 time control returns here that insn will be fully split, so
3260 set LAST and continue from the insn after the one returned.
3261 We can't use next_active_insn here since AFTER may be a note.
3262 Ignore deleted insns, which can be occur if not optimizing. */
3263 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3264 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3265 tem = try_split (PATTERN (tem), tem, 1);
3266
3267 /* Return either the first or the last insn, depending on which was
3268 requested. */
3269 return last
3270 ? (after ? PREV_INSN (after) : last_insn)
3271 : NEXT_INSN (before);
23b2ce53
RS
3272}
3273\f
3274/* Make and return an INSN rtx, initializing all its slots.
4b1f5e8c 3275 Store PATTERN in the pattern slots. */
23b2ce53
RS
3276
3277rtx
502b8322 3278make_insn_raw (rtx pattern)
23b2ce53 3279{
b3694847 3280 rtx insn;
23b2ce53 3281
1f8f4a0b 3282 insn = rtx_alloc (INSN);
23b2ce53 3283
43127294 3284 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3285 PATTERN (insn) = pattern;
3286 INSN_CODE (insn) = -1;
1632afca
RS
3287 LOG_LINKS (insn) = NULL;
3288 REG_NOTES (insn) = NULL;
0435312e 3289 INSN_LOCATOR (insn) = 0;
ba4f7968 3290 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53 3291
47984720
NC
3292#ifdef ENABLE_RTL_CHECKING
3293 if (insn
2c3c49de 3294 && INSN_P (insn)
47984720
NC
3295 && (returnjump_p (insn)
3296 || (GET_CODE (insn) == SET
3297 && SET_DEST (insn) == pc_rtx)))
3298 {
d4ee4d25 3299 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
47984720
NC
3300 debug_rtx (insn);
3301 }
3302#endif
750c9258 3303
23b2ce53
RS
3304 return insn;
3305}
3306
2f937369 3307/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
23b2ce53 3308
38109dab 3309rtx
502b8322 3310make_jump_insn_raw (rtx pattern)
23b2ce53 3311{
b3694847 3312 rtx insn;
23b2ce53 3313
4b1f5e8c 3314 insn = rtx_alloc (JUMP_INSN);
1632afca 3315 INSN_UID (insn) = cur_insn_uid++;
23b2ce53
RS
3316
3317 PATTERN (insn) = pattern;
3318 INSN_CODE (insn) = -1;
1632afca
RS
3319 LOG_LINKS (insn) = NULL;
3320 REG_NOTES (insn) = NULL;
3321 JUMP_LABEL (insn) = NULL;
0435312e 3322 INSN_LOCATOR (insn) = 0;
ba4f7968 3323 BLOCK_FOR_INSN (insn) = NULL;
23b2ce53
RS
3324
3325 return insn;
3326}
aff507f4 3327
2f937369 3328/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
aff507f4
RK
3329
3330static rtx
502b8322 3331make_call_insn_raw (rtx pattern)
aff507f4 3332{
b3694847 3333 rtx insn;
aff507f4
RK
3334
3335 insn = rtx_alloc (CALL_INSN);
3336 INSN_UID (insn) = cur_insn_uid++;
3337
3338 PATTERN (insn) = pattern;
3339 INSN_CODE (insn) = -1;
3340 LOG_LINKS (insn) = NULL;
3341 REG_NOTES (insn) = NULL;
3342 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
0435312e 3343 INSN_LOCATOR (insn) = 0;
ba4f7968 3344 BLOCK_FOR_INSN (insn) = NULL;
aff507f4
RK
3345
3346 return insn;
3347}
23b2ce53
RS
3348\f
3349/* Add INSN to the end of the doubly-linked list.
3350 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3351
3352void
502b8322 3353add_insn (rtx insn)
23b2ce53
RS
3354{
3355 PREV_INSN (insn) = last_insn;
3356 NEXT_INSN (insn) = 0;
3357
3358 if (NULL != last_insn)
3359 NEXT_INSN (last_insn) = insn;
3360
3361 if (NULL == first_insn)
3362 first_insn = insn;
3363
3364 last_insn = insn;
3365}
3366
a0ae8e8d
RK
3367/* Add INSN into the doubly-linked list after insn AFTER. This and
3368 the next should be the only functions called to insert an insn once
ba213285 3369 delay slots have been filled since only they know how to update a
a0ae8e8d 3370 SEQUENCE. */
23b2ce53
RS
3371
3372void
502b8322 3373add_insn_after (rtx insn, rtx after)
23b2ce53
RS
3374{
3375 rtx next = NEXT_INSN (after);
3c030e88 3376 basic_block bb;
23b2ce53 3377
5b0264cb 3378 gcc_assert (!optimize || !INSN_DELETED_P (after));
ba213285 3379
23b2ce53
RS
3380 NEXT_INSN (insn) = next;
3381 PREV_INSN (insn) = after;
3382
3383 if (next)
3384 {
3385 PREV_INSN (next) = insn;
4b4bf941 3386 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
23b2ce53
RS
3387 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3388 }
3389 else if (last_insn == after)
3390 last_insn = insn;
3391 else
3392 {
49ad7cfa 3393 struct sequence_stack *stack = seq_stack;
23b2ce53
RS
3394 /* Scan all pending sequences too. */
3395 for (; stack; stack = stack->next)
3396 if (after == stack->last)
fef0509b
RK
3397 {
3398 stack->last = insn;
3399 break;
3400 }
a0ae8e8d 3401
5b0264cb 3402 gcc_assert (stack);
23b2ce53
RS
3403 }
3404
4b4bf941
JQ
3405 if (!BARRIER_P (after)
3406 && !BARRIER_P (insn)
3c030e88
JH
3407 && (bb = BLOCK_FOR_INSN (after)))
3408 {
3409 set_block_for_insn (insn, bb);
38c1593d 3410 if (INSN_P (insn))
68252e27 3411 bb->flags |= BB_DIRTY;
3c030e88 3412 /* Should not happen as first in the BB is always
a1f300c0 3413 either NOTE or LABEL. */
a813c111 3414 if (BB_END (bb) == after
3c030e88 3415 /* Avoid clobbering of structure when creating new BB. */
4b4bf941
JQ
3416 && !BARRIER_P (insn)
3417 && (!NOTE_P (insn)
3c030e88 3418 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
a813c111 3419 BB_END (bb) = insn;
3c030e88
JH
3420 }
3421
23b2ce53 3422 NEXT_INSN (after) = insn;
4b4bf941 3423 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
23b2ce53
RS
3424 {
3425 rtx sequence = PATTERN (after);
3426 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3427 }
3428}
3429
a0ae8e8d
RK
3430/* Add INSN into the doubly-linked list before insn BEFORE. This and
3431 the previous should be the only functions called to insert an insn once
ba213285 3432 delay slots have been filled since only they know how to update a
a0ae8e8d
RK
3433 SEQUENCE. */
3434
3435void
502b8322 3436add_insn_before (rtx insn, rtx before)
a0ae8e8d
RK
3437{
3438 rtx prev = PREV_INSN (before);
3c030e88 3439 basic_block bb;
a0ae8e8d 3440
5b0264cb 3441 gcc_assert (!optimize || !INSN_DELETED_P (before));
ba213285 3442
a0ae8e8d
RK
3443 PREV_INSN (insn) = prev;
3444 NEXT_INSN (insn) = before;
3445
3446 if (prev)
3447 {
3448 NEXT_INSN (prev) = insn;
4b4bf941 3449 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
a0ae8e8d
RK
3450 {
3451 rtx sequence = PATTERN (prev);
3452 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3453 }
3454 }
3455 else if (first_insn == before)
3456 first_insn = insn;
3457 else
3458 {
49ad7cfa 3459 struct sequence_stack *stack = seq_stack;
a0ae8e8d
RK
3460 /* Scan all pending sequences too. */
3461 for (; stack; stack = stack->next)
3462 if (before == stack->first)
fef0509b
RK
3463 {
3464 stack->first = insn;
3465 break;
3466 }
a0ae8e8d 3467
5b0264cb 3468 gcc_assert (stack);
a0ae8e8d
RK
3469 }
3470
4b4bf941
JQ
3471 if (!BARRIER_P (before)
3472 && !BARRIER_P (insn)
3c030e88
JH
3473 && (bb = BLOCK_FOR_INSN (before)))
3474 {
3475 set_block_for_insn (insn, bb);
38c1593d 3476 if (INSN_P (insn))
68252e27 3477 bb->flags |= BB_DIRTY;
5b0264cb 3478 /* Should not happen as first in the BB is always either NOTE or
43e05e45 3479 LABEL. */
5b0264cb
NS
3480 gcc_assert (BB_HEAD (bb) != insn
3481 /* Avoid clobbering of structure when creating new BB. */
3482 || BARRIER_P (insn)
3483 || (NOTE_P (insn)
3484 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3c030e88
JH
3485 }
3486
a0ae8e8d 3487 PREV_INSN (before) = insn;
4b4bf941 3488 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
a0ae8e8d
RK
3489 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3490}
3491
89e99eea
DB
3492/* Remove an insn from its doubly-linked list. This function knows how
3493 to handle sequences. */
3494void
502b8322 3495remove_insn (rtx insn)
89e99eea
DB
3496{
3497 rtx next = NEXT_INSN (insn);
3498 rtx prev = PREV_INSN (insn);
53c17031
JH
3499 basic_block bb;
3500
89e99eea
DB
3501 if (prev)
3502 {
3503 NEXT_INSN (prev) = next;
4b4bf941 3504 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
89e99eea
DB
3505 {
3506 rtx sequence = PATTERN (prev);
3507 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3508 }
3509 }
3510 else if (first_insn == insn)
3511 first_insn = next;
3512 else
3513 {
49ad7cfa 3514 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3515 /* Scan all pending sequences too. */
3516 for (; stack; stack = stack->next)
3517 if (insn == stack->first)
3518 {
3519 stack->first = next;
3520 break;
3521 }
3522
5b0264cb 3523 gcc_assert (stack);
89e99eea
DB
3524 }
3525
3526 if (next)
3527 {
3528 PREV_INSN (next) = prev;
4b4bf941 3529 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
89e99eea
DB
3530 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3531 }
3532 else if (last_insn == insn)
3533 last_insn = prev;
3534 else
3535 {
49ad7cfa 3536 struct sequence_stack *stack = seq_stack;
89e99eea
DB
3537 /* Scan all pending sequences too. */
3538 for (; stack; stack = stack->next)
3539 if (insn == stack->last)
3540 {
3541 stack->last = prev;
3542 break;
3543 }
3544
5b0264cb 3545 gcc_assert (stack);
89e99eea 3546 }
4b4bf941 3547 if (!BARRIER_P (insn)
53c17031
JH
3548 && (bb = BLOCK_FOR_INSN (insn)))
3549 {
38c1593d 3550 if (INSN_P (insn))
68252e27 3551 bb->flags |= BB_DIRTY;
a813c111 3552 if (BB_HEAD (bb) == insn)
53c17031 3553 {
3bf1e984
RK
3554 /* Never ever delete the basic block note without deleting whole
3555 basic block. */
5b0264cb 3556 gcc_assert (!NOTE_P (insn));
a813c111 3557 BB_HEAD (bb) = next;
53c17031 3558 }
a813c111
SB
3559 if (BB_END (bb) == insn)
3560 BB_END (bb) = prev;
53c17031 3561 }
89e99eea
DB
3562}
3563
ee960939
OH
3564/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3565
3566void
502b8322 3567add_function_usage_to (rtx call_insn, rtx call_fusage)
ee960939 3568{
5b0264cb 3569 gcc_assert (call_insn && CALL_P (call_insn));
ee960939
OH
3570
3571 /* Put the register usage information on the CALL. If there is already
3572 some usage information, put ours at the end. */
3573 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3574 {
3575 rtx link;
3576
3577 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3578 link = XEXP (link, 1))
3579 ;
3580
3581 XEXP (link, 1) = call_fusage;
3582 }
3583 else
3584 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3585}
3586
23b2ce53
RS
3587/* Delete all insns made since FROM.
3588 FROM becomes the new last instruction. */
3589
3590void
502b8322 3591delete_insns_since (rtx from)
23b2ce53
RS
3592{
3593 if (from == 0)
3594 first_insn = 0;
3595 else
3596 NEXT_INSN (from) = 0;
3597 last_insn = from;
3598}
3599
5dab5552
MS
3600/* This function is deprecated, please use sequences instead.
3601
3602 Move a consecutive bunch of insns to a different place in the chain.
23b2ce53
RS
3603 The insns to be moved are those between FROM and TO.
3604 They are moved to a new position after the insn AFTER.
3605 AFTER must not be FROM or TO or any insn in between.
3606
3607 This function does not know about SEQUENCEs and hence should not be
3608 called after delay-slot filling has been done. */
3609
3610void
502b8322 3611reorder_insns_nobb (rtx from, rtx to, rtx after)
23b2ce53
RS
3612{
3613 /* Splice this bunch out of where it is now. */
3614 if (PREV_INSN (from))
3615 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3616 if (NEXT_INSN (to))
3617 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3618 if (last_insn == to)
3619 last_insn = PREV_INSN (from);
3620 if (first_insn == from)
3621 first_insn = NEXT_INSN (to);
3622
3623 /* Make the new neighbors point to it and it to them. */
3624 if (NEXT_INSN (after))
3625 PREV_INSN (NEXT_INSN (after)) = to;
3626
3627 NEXT_INSN (to) = NEXT_INSN (after);
3628 PREV_INSN (from) = after;
3629 NEXT_INSN (after) = from;
3630 if (after == last_insn)
3631 last_insn = to;
3632}
3633
3c030e88
JH
3634/* Same as function above, but take care to update BB boundaries. */
3635void
502b8322 3636reorder_insns (rtx from, rtx to, rtx after)
3c030e88
JH
3637{
3638 rtx prev = PREV_INSN (from);
3639 basic_block bb, bb2;
3640
3641 reorder_insns_nobb (from, to, after);
3642
4b4bf941 3643 if (!BARRIER_P (after)
3c030e88
JH
3644 && (bb = BLOCK_FOR_INSN (after)))
3645 {
3646 rtx x;
38c1593d 3647 bb->flags |= BB_DIRTY;
68252e27 3648
4b4bf941 3649 if (!BARRIER_P (from)
3c030e88
JH
3650 && (bb2 = BLOCK_FOR_INSN (from)))
3651 {
a813c111
SB
3652 if (BB_END (bb2) == to)
3653 BB_END (bb2) = prev;
38c1593d 3654 bb2->flags |= BB_DIRTY;
3c030e88
JH
3655 }
3656
a813c111
SB
3657 if (BB_END (bb) == after)
3658 BB_END (bb) = to;
3c030e88
JH
3659
3660 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
7bd5ed5c
GS
3661 if (!BARRIER_P (x))
3662 set_block_for_insn (x, bb);
3c030e88
JH
3663 }
3664}
3665
23b2ce53
RS
3666/* Return the line note insn preceding INSN. */
3667
3668static rtx
502b8322 3669find_line_note (rtx insn)
23b2ce53
RS
3670{
3671 if (no_line_numbers)
3672 return 0;
3673
3674 for (; insn; insn = PREV_INSN (insn))
4b4bf941 3675 if (NOTE_P (insn)
0fb7aeda 3676 && NOTE_LINE_NUMBER (insn) >= 0)
23b2ce53
RS
3677 break;
3678
3679 return insn;
3680}
3681
23b2ce53 3682\f
2f937369
DM
3683/* Emit insn(s) of given code and pattern
3684 at a specified place within the doubly-linked list.
23b2ce53 3685
2f937369
DM
3686 All of the emit_foo global entry points accept an object
3687 X which is either an insn list or a PATTERN of a single
3688 instruction.
23b2ce53 3689
2f937369
DM
3690 There are thus a few canonical ways to generate code and
3691 emit it at a specific place in the instruction stream. For
3692 example, consider the instruction named SPOT and the fact that
3693 we would like to emit some instructions before SPOT. We might
3694 do it like this:
23b2ce53 3695
2f937369
DM
3696 start_sequence ();
3697 ... emit the new instructions ...
3698 insns_head = get_insns ();
3699 end_sequence ();
23b2ce53 3700
2f937369 3701 emit_insn_before (insns_head, SPOT);
23b2ce53 3702
2f937369
DM
3703 It used to be common to generate SEQUENCE rtl instead, but that
3704 is a relic of the past which no longer occurs. The reason is that
3705 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3706 generated would almost certainly die right after it was created. */
23b2ce53 3707
2f937369 3708/* Make X be output before the instruction BEFORE. */
23b2ce53
RS
3709
3710rtx
a7102479 3711emit_insn_before_noloc (rtx x, rtx before)
23b2ce53 3712{
2f937369 3713 rtx last = before;
b3694847 3714 rtx insn;
23b2ce53 3715
5b0264cb 3716 gcc_assert (before);
2f937369
DM
3717
3718 if (x == NULL_RTX)
3719 return last;
3720
3721 switch (GET_CODE (x))
23b2ce53 3722 {
2f937369
DM
3723 case INSN:
3724 case JUMP_INSN:
3725 case CALL_INSN:
3726 case CODE_LABEL:
3727 case BARRIER:
3728 case NOTE:
3729 insn = x;
3730 while (insn)
3731 {
3732 rtx next = NEXT_INSN (insn);
3733 add_insn_before (insn, before);
3734 last = insn;
3735 insn = next;
3736 }
3737 break;
3738
3739#ifdef ENABLE_RTL_CHECKING
3740 case SEQUENCE:
5b0264cb 3741 gcc_unreachable ();
2f937369
DM
3742 break;
3743#endif
3744
3745 default:
3746 last = make_insn_raw (x);
3747 add_insn_before (last, before);
3748 break;
23b2ce53
RS
3749 }
3750
2f937369 3751 return last;
23b2ce53
RS
3752}
3753
2f937369 3754/* Make an instruction with body X and code JUMP_INSN
23b2ce53
RS
3755 and output it before the instruction BEFORE. */
3756
3757rtx
a7102479 3758emit_jump_insn_before_noloc (rtx x, rtx before)
23b2ce53 3759{
d950dee3 3760 rtx insn, last = NULL_RTX;
aff507f4 3761
5b0264cb 3762 gcc_assert (before);
2f937369
DM
3763
3764 switch (GET_CODE (x))
aff507f4 3765 {
2f937369
DM
3766 case INSN:
3767 case JUMP_INSN:
3768 case CALL_INSN:
3769 case CODE_LABEL:
3770 case BARRIER:
3771 case NOTE:
3772 insn = x;
3773 while (insn)
3774 {
3775 rtx next = NEXT_INSN (insn);
3776 add_insn_before (insn, before);
3777 last = insn;
3778 insn = next;
3779 }
3780 break;
3781
3782#ifdef ENABLE_RTL_CHECKING
3783 case SEQUENCE:
5b0264cb 3784 gcc_unreachable ();
2f937369
DM
3785 break;
3786#endif
3787
3788 default:
3789 last = make_jump_insn_raw (x);
3790 add_insn_before (last, before);
3791 break;
aff507f4
RK
3792 }
3793
2f937369 3794 return last;
23b2ce53
RS
3795}
3796
2f937369 3797/* Make an instruction with body X and code CALL_INSN
969d70ca
JH
3798 and output it before the instruction BEFORE. */
3799
3800rtx
a7102479 3801emit_call_insn_before_noloc (rtx x, rtx before)
969d70ca 3802{
d950dee3 3803 rtx last = NULL_RTX, insn;
969d70ca 3804
5b0264cb 3805 gcc_assert (before);
2f937369
DM
3806
3807 switch (GET_CODE (x))
969d70ca 3808 {
2f937369
DM
3809 case INSN:
3810 case JUMP_INSN:
3811 case CALL_INSN:
3812 case CODE_LABEL:
3813 case BARRIER:
3814 case NOTE:
3815 insn = x;
3816 while (insn)
3817 {
3818 rtx next = NEXT_INSN (insn);
3819 add_insn_before (insn, before);
3820 last = insn;
3821 insn = next;
3822 }
3823 break;
3824
3825#ifdef ENABLE_RTL_CHECKING
3826 case SEQUENCE:
5b0264cb 3827 gcc_unreachable ();
2f937369
DM
3828 break;
3829#endif
3830
3831 default:
3832 last = make_call_insn_raw (x);
3833 add_insn_before (last, before);
3834 break;
969d70ca
JH
3835 }
3836
2f937369 3837 return last;
969d70ca
JH
3838}
3839
23b2ce53 3840/* Make an insn of code BARRIER
e881bb1b 3841 and output it before the insn BEFORE. */
23b2ce53
RS
3842
3843rtx
502b8322 3844emit_barrier_before (rtx before)
23b2ce53 3845{
b3694847 3846 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
3847
3848 INSN_UID (insn) = cur_insn_uid++;
3849
a0ae8e8d 3850 add_insn_before (insn, before);
23b2ce53
RS
3851 return insn;
3852}
3853
e881bb1b
RH
3854/* Emit the label LABEL before the insn BEFORE. */
3855
3856rtx
502b8322 3857emit_label_before (rtx label, rtx before)
e881bb1b
RH
3858{
3859 /* This can be called twice for the same label as a result of the
3860 confusion that follows a syntax error! So make it harmless. */
3861 if (INSN_UID (label) == 0)
3862 {
3863 INSN_UID (label) = cur_insn_uid++;
3864 add_insn_before (label, before);
3865 }
3866
3867 return label;
3868}
3869
23b2ce53
RS
3870/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3871
3872rtx
502b8322 3873emit_note_before (int subtype, rtx before)
23b2ce53 3874{
b3694847 3875 rtx note = rtx_alloc (NOTE);
23b2ce53 3876 INSN_UID (note) = cur_insn_uid++;
6773e15f 3877#ifndef USE_MAPPED_LOCATION
23b2ce53 3878 NOTE_SOURCE_FILE (note) = 0;
6773e15f 3879#endif
23b2ce53 3880 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 3881 BLOCK_FOR_INSN (note) = NULL;
23b2ce53 3882
a0ae8e8d 3883 add_insn_before (note, before);
23b2ce53
RS
3884 return note;
3885}
3886\f
2f937369
DM
3887/* Helper for emit_insn_after, handles lists of instructions
3888 efficiently. */
23b2ce53 3889
502b8322 3890static rtx emit_insn_after_1 (rtx, rtx);
2f937369
DM
3891
3892static rtx
502b8322 3893emit_insn_after_1 (rtx first, rtx after)
23b2ce53 3894{
2f937369
DM
3895 rtx last;
3896 rtx after_after;
3897 basic_block bb;
23b2ce53 3898
4b4bf941 3899 if (!BARRIER_P (after)
2f937369 3900 && (bb = BLOCK_FOR_INSN (after)))
23b2ce53 3901 {
2f937369
DM
3902 bb->flags |= BB_DIRTY;
3903 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4b4bf941 3904 if (!BARRIER_P (last))
2f937369 3905 set_block_for_insn (last, bb);
4b4bf941 3906 if (!BARRIER_P (last))
2f937369 3907 set_block_for_insn (last, bb);
a813c111
SB
3908 if (BB_END (bb) == after)
3909 BB_END (bb) = last;
23b2ce53
RS
3910 }
3911 else
2f937369
DM
3912 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3913 continue;
3914
3915 after_after = NEXT_INSN (after);
3916
3917 NEXT_INSN (after) = first;
3918 PREV_INSN (first) = after;
3919 NEXT_INSN (last) = after_after;
3920 if (after_after)
3921 PREV_INSN (after_after) = last;
3922
3923 if (after == last_insn)
3924 last_insn = last;
3925 return last;
3926}
3927
3928/* Make X be output after the insn AFTER. */
3929
3930rtx
a7102479 3931emit_insn_after_noloc (rtx x, rtx after)
2f937369
DM
3932{
3933 rtx last = after;
3934
5b0264cb 3935 gcc_assert (after);
2f937369
DM
3936
3937 if (x == NULL_RTX)
3938 return last;
3939
3940 switch (GET_CODE (x))
23b2ce53 3941 {
2f937369
DM
3942 case INSN:
3943 case JUMP_INSN:
3944 case CALL_INSN:
3945 case CODE_LABEL:
3946 case BARRIER:
3947 case NOTE:
3948 last = emit_insn_after_1 (x, after);
3949 break;
3950
3951#ifdef ENABLE_RTL_CHECKING
3952 case SEQUENCE:
5b0264cb 3953 gcc_unreachable ();
2f937369
DM
3954 break;
3955#endif
3956
3957 default:
3958 last = make_insn_raw (x);
3959 add_insn_after (last, after);
3960 break;
23b2ce53
RS
3961 }
3962
2f937369 3963 return last;
23b2ce53
RS
3964}
3965
255680cf
RK
3966/* Similar to emit_insn_after, except that line notes are to be inserted so
3967 as to act as if this insn were at FROM. */
3968
3969void
502b8322 3970emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
255680cf
RK
3971{
3972 rtx from_line = find_line_note (from);
3973 rtx after_line = find_line_note (after);
2f937369 3974 rtx insn = emit_insn_after (x, after);
255680cf
RK
3975
3976 if (from_line)
5f2fc772 3977 emit_note_copy_after (from_line, after);
255680cf
RK
3978
3979 if (after_line)
5f2fc772 3980 emit_note_copy_after (after_line, insn);
255680cf
RK
3981}
3982
2f937369 3983/* Make an insn of code JUMP_INSN with body X
23b2ce53
RS
3984 and output it after the insn AFTER. */
3985
3986rtx
a7102479 3987emit_jump_insn_after_noloc (rtx x, rtx after)
23b2ce53 3988{
2f937369 3989 rtx last;
23b2ce53 3990
5b0264cb 3991 gcc_assert (after);
2f937369
DM
3992
3993 switch (GET_CODE (x))
23b2ce53 3994 {
2f937369
DM
3995 case INSN:
3996 case JUMP_INSN:
3997 case CALL_INSN:
3998 case CODE_LABEL:
3999 case BARRIER:
4000 case NOTE:
4001 last = emit_insn_after_1 (x, after);
4002 break;
4003
4004#ifdef ENABLE_RTL_CHECKING
4005 case SEQUENCE:
5b0264cb 4006 gcc_unreachable ();
2f937369
DM
4007 break;
4008#endif
4009
4010 default:
4011 last = make_jump_insn_raw (x);
4012 add_insn_after (last, after);
4013 break;
23b2ce53
RS
4014 }
4015
2f937369
DM
4016 return last;
4017}
4018
4019/* Make an instruction with body X and code CALL_INSN
4020 and output it after the instruction AFTER. */
4021
4022rtx
a7102479 4023emit_call_insn_after_noloc (rtx x, rtx after)
2f937369
DM
4024{
4025 rtx last;
4026
5b0264cb 4027 gcc_assert (after);
2f937369
DM
4028
4029 switch (GET_CODE (x))
4030 {
4031 case INSN:
4032 case JUMP_INSN:
4033 case CALL_INSN:
4034 case CODE_LABEL:
4035 case BARRIER:
4036 case NOTE:
4037 last = emit_insn_after_1 (x, after);
4038 break;
4039
4040#ifdef ENABLE_RTL_CHECKING
4041 case SEQUENCE:
5b0264cb 4042 gcc_unreachable ();
2f937369
DM
4043 break;
4044#endif
4045
4046 default:
4047 last = make_call_insn_raw (x);
4048 add_insn_after (last, after);
4049 break;
4050 }
4051
4052 return last;
23b2ce53
RS
4053}
4054
4055/* Make an insn of code BARRIER
4056 and output it after the insn AFTER. */
4057
4058rtx
502b8322 4059emit_barrier_after (rtx after)
23b2ce53 4060{
b3694847 4061 rtx insn = rtx_alloc (BARRIER);
23b2ce53
RS
4062
4063 INSN_UID (insn) = cur_insn_uid++;
4064
4065 add_insn_after (insn, after);
4066 return insn;
4067}
4068
4069/* Emit the label LABEL after the insn AFTER. */
4070
4071rtx
502b8322 4072emit_label_after (rtx label, rtx after)
23b2ce53
RS
4073{
4074 /* This can be called twice for the same label
4075 as a result of the confusion that follows a syntax error!
4076 So make it harmless. */
4077 if (INSN_UID (label) == 0)
4078 {
4079 INSN_UID (label) = cur_insn_uid++;
4080 add_insn_after (label, after);
4081 }
4082
4083 return label;
4084}
4085
4086/* Emit a note of subtype SUBTYPE after the insn AFTER. */
4087
4088rtx
502b8322 4089emit_note_after (int subtype, rtx after)
23b2ce53 4090{
b3694847 4091 rtx note = rtx_alloc (NOTE);
23b2ce53 4092 INSN_UID (note) = cur_insn_uid++;
6773e15f 4093#ifndef USE_MAPPED_LOCATION
23b2ce53 4094 NOTE_SOURCE_FILE (note) = 0;
6773e15f 4095#endif
23b2ce53 4096 NOTE_LINE_NUMBER (note) = subtype;
ba4f7968 4097 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4098 add_insn_after (note, after);
4099 return note;
4100}
4101
5f2fc772 4102/* Emit a copy of note ORIG after the insn AFTER. */
23b2ce53
RS
4103
4104rtx
5f2fc772 4105emit_note_copy_after (rtx orig, rtx after)
23b2ce53 4106{
b3694847 4107 rtx note;
23b2ce53 4108
5f2fc772 4109 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
23b2ce53
RS
4110 {
4111 cur_insn_uid++;
4112 return 0;
4113 }
4114
68252e27 4115 note = rtx_alloc (NOTE);
23b2ce53 4116 INSN_UID (note) = cur_insn_uid++;
5f2fc772
NS
4117 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4118 NOTE_DATA (note) = NOTE_DATA (orig);
ba4f7968 4119 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4120 add_insn_after (note, after);
4121 return note;
4122}
4123\f
a7102479 4124/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4125rtx
502b8322 4126emit_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4127{
a7102479 4128 rtx last = emit_insn_after_noloc (pattern, after);
0d682900 4129
a7102479 4130 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4131 return last;
4132
2f937369
DM
4133 after = NEXT_INSN (after);
4134 while (1)
4135 {
a7102479 4136 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4137 INSN_LOCATOR (after) = loc;
2f937369
DM
4138 if (after == last)
4139 break;
4140 after = NEXT_INSN (after);
4141 }
0d682900
JH
4142 return last;
4143}
4144
a7102479
JH
4145/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4146rtx
4147emit_insn_after (rtx pattern, rtx after)
4148{
4149 if (INSN_P (after))
4150 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4151 else
4152 return emit_insn_after_noloc (pattern, after);
4153}
4154
4155/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4156rtx
502b8322 4157emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4158{
a7102479 4159 rtx last = emit_jump_insn_after_noloc (pattern, after);
2f937369 4160
a7102479 4161 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4162 return last;
4163
2f937369
DM
4164 after = NEXT_INSN (after);
4165 while (1)
4166 {
a7102479 4167 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4168 INSN_LOCATOR (after) = loc;
2f937369
DM
4169 if (after == last)
4170 break;
4171 after = NEXT_INSN (after);
4172 }
0d682900
JH
4173 return last;
4174}
4175
a7102479
JH
4176/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4177rtx
4178emit_jump_insn_after (rtx pattern, rtx after)
4179{
4180 if (INSN_P (after))
4181 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4182 else
4183 return emit_jump_insn_after_noloc (pattern, after);
4184}
4185
4186/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4187rtx
502b8322 4188emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
0d682900 4189{
a7102479 4190 rtx last = emit_call_insn_after_noloc (pattern, after);
2f937369 4191
a7102479 4192 if (pattern == NULL_RTX || !loc)
dd3adcf8
DJ
4193 return last;
4194
2f937369
DM
4195 after = NEXT_INSN (after);
4196 while (1)
4197 {
a7102479 4198 if (active_insn_p (after) && !INSN_LOCATOR (after))
0435312e 4199 INSN_LOCATOR (after) = loc;
2f937369
DM
4200 if (after == last)
4201 break;
4202 after = NEXT_INSN (after);
4203 }
0d682900
JH
4204 return last;
4205}
4206
a7102479
JH
4207/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4208rtx
4209emit_call_insn_after (rtx pattern, rtx after)
4210{
4211 if (INSN_P (after))
4212 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4213 else
4214 return emit_call_insn_after_noloc (pattern, after);
4215}
4216
4217/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
0d682900 4218rtx
502b8322 4219emit_insn_before_setloc (rtx pattern, rtx before, int loc)
0d682900
JH
4220{
4221 rtx first = PREV_INSN (before);
a7102479
JH
4222 rtx last = emit_insn_before_noloc (pattern, before);
4223
4224 if (pattern == NULL_RTX || !loc)
4225 return last;
4226
4227 first = NEXT_INSN (first);
4228 while (1)
4229 {
4230 if (active_insn_p (first) && !INSN_LOCATOR (first))
4231 INSN_LOCATOR (first) = loc;
4232 if (first == last)
4233 break;
4234 first = NEXT_INSN (first);
4235 }
4236 return last;
4237}
4238
4239/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4240rtx
4241emit_insn_before (rtx pattern, rtx before)
4242{
4243 if (INSN_P (before))
4244 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4245 else
4246 return emit_insn_before_noloc (pattern, before);
4247}
4248
4249/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4250rtx
4251emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4252{
4253 rtx first = PREV_INSN (before);
4254 rtx last = emit_jump_insn_before_noloc (pattern, before);
4255
4256 if (pattern == NULL_RTX)
4257 return last;
4258
4259 first = NEXT_INSN (first);
4260 while (1)
4261 {
4262 if (active_insn_p (first) && !INSN_LOCATOR (first))
4263 INSN_LOCATOR (first) = loc;
4264 if (first == last)
4265 break;
4266 first = NEXT_INSN (first);
4267 }
4268 return last;
4269}
4270
4271/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4272rtx
4273emit_jump_insn_before (rtx pattern, rtx before)
4274{
4275 if (INSN_P (before))
4276 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4277 else
4278 return emit_jump_insn_before_noloc (pattern, before);
4279}
4280
4281/* like emit_insn_before_noloc, but set insn_locator according to scope. */
4282rtx
4283emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4284{
4285 rtx first = PREV_INSN (before);
4286 rtx last = emit_call_insn_before_noloc (pattern, before);
0d682900 4287
dd3adcf8
DJ
4288 if (pattern == NULL_RTX)
4289 return last;
4290
2f937369
DM
4291 first = NEXT_INSN (first);
4292 while (1)
4293 {
a7102479 4294 if (active_insn_p (first) && !INSN_LOCATOR (first))
0435312e 4295 INSN_LOCATOR (first) = loc;
2f937369
DM
4296 if (first == last)
4297 break;
4298 first = NEXT_INSN (first);
4299 }
0d682900
JH
4300 return last;
4301}
a7102479
JH
4302
4303/* like emit_call_insn_before_noloc,
4304 but set insn_locator according to before. */
4305rtx
4306emit_call_insn_before (rtx pattern, rtx before)
4307{
4308 if (INSN_P (before))
4309 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4310 else
4311 return emit_call_insn_before_noloc (pattern, before);
4312}
0d682900 4313\f
2f937369
DM
4314/* Take X and emit it at the end of the doubly-linked
4315 INSN list.
23b2ce53
RS
4316
4317 Returns the last insn emitted. */
4318
4319rtx
502b8322 4320emit_insn (rtx x)
23b2ce53 4321{
2f937369
DM
4322 rtx last = last_insn;
4323 rtx insn;
23b2ce53 4324
2f937369
DM
4325 if (x == NULL_RTX)
4326 return last;
23b2ce53 4327
2f937369
DM
4328 switch (GET_CODE (x))
4329 {
4330 case INSN:
4331 case JUMP_INSN:
4332 case CALL_INSN:
4333 case CODE_LABEL:
4334 case BARRIER:
4335 case NOTE:
4336 insn = x;
4337 while (insn)
23b2ce53 4338 {
2f937369 4339 rtx next = NEXT_INSN (insn);
23b2ce53 4340 add_insn (insn);
2f937369
DM
4341 last = insn;
4342 insn = next;
23b2ce53 4343 }
2f937369 4344 break;
23b2ce53 4345
2f937369
DM
4346#ifdef ENABLE_RTL_CHECKING
4347 case SEQUENCE:
5b0264cb 4348 gcc_unreachable ();
2f937369
DM
4349 break;
4350#endif
23b2ce53 4351
2f937369
DM
4352 default:
4353 last = make_insn_raw (x);
4354 add_insn (last);
4355 break;
23b2ce53
RS
4356 }
4357
4358 return last;
4359}
4360
2f937369
DM
4361/* Make an insn of code JUMP_INSN with pattern X
4362 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4363
4364rtx
502b8322 4365emit_jump_insn (rtx x)
23b2ce53 4366{
d950dee3 4367 rtx last = NULL_RTX, insn;
23b2ce53 4368
2f937369 4369 switch (GET_CODE (x))
23b2ce53 4370 {
2f937369
DM
4371 case INSN:
4372 case JUMP_INSN:
4373 case CALL_INSN:
4374 case CODE_LABEL:
4375 case BARRIER:
4376 case NOTE:
4377 insn = x;
4378 while (insn)
4379 {
4380 rtx next = NEXT_INSN (insn);
4381 add_insn (insn);
4382 last = insn;
4383 insn = next;
4384 }
4385 break;
e0a5c5eb 4386
2f937369
DM
4387#ifdef ENABLE_RTL_CHECKING
4388 case SEQUENCE:
5b0264cb 4389 gcc_unreachable ();
2f937369
DM
4390 break;
4391#endif
e0a5c5eb 4392
2f937369
DM
4393 default:
4394 last = make_jump_insn_raw (x);
4395 add_insn (last);
4396 break;
3c030e88 4397 }
e0a5c5eb
RS
4398
4399 return last;
4400}
4401
2f937369 4402/* Make an insn of code CALL_INSN with pattern X
23b2ce53
RS
4403 and add it to the end of the doubly-linked list. */
4404
4405rtx
502b8322 4406emit_call_insn (rtx x)
23b2ce53 4407{
2f937369
DM
4408 rtx insn;
4409
4410 switch (GET_CODE (x))
23b2ce53 4411 {
2f937369
DM
4412 case INSN:
4413 case JUMP_INSN:
4414 case CALL_INSN:
4415 case CODE_LABEL:
4416 case BARRIER:
4417 case NOTE:
4418 insn = emit_insn (x);
4419 break;
23b2ce53 4420
2f937369
DM
4421#ifdef ENABLE_RTL_CHECKING
4422 case SEQUENCE:
5b0264cb 4423 gcc_unreachable ();
2f937369
DM
4424 break;
4425#endif
23b2ce53 4426
2f937369
DM
4427 default:
4428 insn = make_call_insn_raw (x);
23b2ce53 4429 add_insn (insn);
2f937369 4430 break;
23b2ce53 4431 }
2f937369
DM
4432
4433 return insn;
23b2ce53
RS
4434}
4435
4436/* Add the label LABEL to the end of the doubly-linked list. */
4437
4438rtx
502b8322 4439emit_label (rtx label)
23b2ce53
RS
4440{
4441 /* This can be called twice for the same label
4442 as a result of the confusion that follows a syntax error!
4443 So make it harmless. */
4444 if (INSN_UID (label) == 0)
4445 {
4446 INSN_UID (label) = cur_insn_uid++;
4447 add_insn (label);
4448 }
4449 return label;
4450}
4451
4452/* Make an insn of code BARRIER
4453 and add it to the end of the doubly-linked list. */
4454
4455rtx
502b8322 4456emit_barrier (void)
23b2ce53 4457{
b3694847 4458 rtx barrier = rtx_alloc (BARRIER);
23b2ce53
RS
4459 INSN_UID (barrier) = cur_insn_uid++;
4460 add_insn (barrier);
4461 return barrier;
4462}
4463
0cea056b
NS
4464/* Make line numbering NOTE insn for LOCATION add it to the end
4465 of the doubly-linked list, but only if line-numbers are desired for
4466 debugging info and it doesn't match the previous one. */
23b2ce53
RS
4467
4468rtx
0cea056b 4469emit_line_note (location_t location)
23b2ce53 4470{
2e040219 4471 rtx note;
0cea056b 4472
6773e15f
PB
4473#ifdef USE_MAPPED_LOCATION
4474 if (location == last_location)
4475 return NULL_RTX;
4476#else
0cea056b
NS
4477 if (location.file && last_location.file
4478 && !strcmp (location.file, last_location.file)
4479 && location.line == last_location.line)
fd3acbb3 4480 return NULL_RTX;
6773e15f 4481#endif
0cea056b
NS
4482 last_location = location;
4483
23b2ce53 4484 if (no_line_numbers)
fd3acbb3
NS
4485 {
4486 cur_insn_uid++;
4487 return NULL_RTX;
4488 }
23b2ce53 4489
6773e15f
PB
4490#ifdef USE_MAPPED_LOCATION
4491 note = emit_note ((int) location);
4492#else
0cea056b
NS
4493 note = emit_note (location.line);
4494 NOTE_SOURCE_FILE (note) = location.file;
6773e15f 4495#endif
5f2fc772
NS
4496
4497 return note;
4498}
4499
4500/* Emit a copy of note ORIG. */
502b8322 4501
5f2fc772
NS
4502rtx
4503emit_note_copy (rtx orig)
4504{
4505 rtx note;
4506
4507 if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4508 {
4509 cur_insn_uid++;
4510 return NULL_RTX;
4511 }
4512
4513 note = rtx_alloc (NOTE);
4514
4515 INSN_UID (note) = cur_insn_uid++;
4516 NOTE_DATA (note) = NOTE_DATA (orig);
4517 NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4518 BLOCK_FOR_INSN (note) = NULL;
4519 add_insn (note);
4520
2e040219 4521 return note;
23b2ce53
RS
4522}
4523
2e040219
NS
4524/* Make an insn of code NOTE or type NOTE_NO
4525 and add it to the end of the doubly-linked list. */
23b2ce53
RS
4526
4527rtx
502b8322 4528emit_note (int note_no)
23b2ce53 4529{
b3694847 4530 rtx note;
23b2ce53 4531
23b2ce53
RS
4532 note = rtx_alloc (NOTE);
4533 INSN_UID (note) = cur_insn_uid++;
2e040219 4534 NOTE_LINE_NUMBER (note) = note_no;
dd107e66 4535 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
ba4f7968 4536 BLOCK_FOR_INSN (note) = NULL;
23b2ce53
RS
4537 add_insn (note);
4538 return note;
4539}
4540
23b2ce53 4541/* Cause next statement to emit a line note even if the line number
0cea056b 4542 has not changed. */
23b2ce53
RS
4543
4544void
502b8322 4545force_next_line_note (void)
23b2ce53 4546{
6773e15f
PB
4547#ifdef USE_MAPPED_LOCATION
4548 last_location = -1;
4549#else
fd3acbb3 4550 last_location.line = -1;
6773e15f 4551#endif
23b2ce53 4552}
87b47c85
AM
4553
4554/* Place a note of KIND on insn INSN with DATUM as the datum. If a
30f7a378 4555 note of this type already exists, remove it first. */
87b47c85 4556
3d238248 4557rtx
502b8322 4558set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
87b47c85
AM
4559{
4560 rtx note = find_reg_note (insn, kind, NULL_RTX);
4561
52488da1
JW
4562 switch (kind)
4563 {
4564 case REG_EQUAL:
4565 case REG_EQUIV:
4566 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4567 has multiple sets (some callers assume single_set
4568 means the insn only has one set, when in fact it
4569 means the insn only has one * useful * set). */
4570 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4571 {
5b0264cb 4572 gcc_assert (!note);
52488da1
JW
4573 return NULL_RTX;
4574 }
4575
4576 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4577 It serves no useful purpose and breaks eliminate_regs. */
4578 if (GET_CODE (datum) == ASM_OPERANDS)
4579 return NULL_RTX;
4580 break;
4581
4582 default:
4583 break;
4584 }
3d238248 4585
750c9258 4586 if (note)
3d238248
JJ
4587 {
4588 XEXP (note, 0) = datum;
4589 return note;
4590 }
87b47c85
AM
4591
4592 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3d238248 4593 return REG_NOTES (insn);
87b47c85 4594}
23b2ce53
RS
4595\f
4596/* Return an indication of which type of insn should have X as a body.
4597 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4598
d78db459 4599static enum rtx_code
502b8322 4600classify_insn (rtx x)
23b2ce53 4601{
4b4bf941 4602 if (LABEL_P (x))
23b2ce53
RS
4603 return CODE_LABEL;
4604 if (GET_CODE (x) == CALL)
4605 return CALL_INSN;
4606 if (GET_CODE (x) == RETURN)
4607 return JUMP_INSN;
4608 if (GET_CODE (x) == SET)
4609 {
4610 if (SET_DEST (x) == pc_rtx)
4611 return JUMP_INSN;
4612 else if (GET_CODE (SET_SRC (x)) == CALL)
4613 return CALL_INSN;
4614 else
4615 return INSN;
4616 }
4617 if (GET_CODE (x) == PARALLEL)
4618 {
b3694847 4619 int j;
23b2ce53
RS
4620 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4621 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4622 return CALL_INSN;
4623 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4624 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4625 return JUMP_INSN;
4626 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4627 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4628 return CALL_INSN;
4629 }
4630 return INSN;
4631}
4632
4633/* Emit the rtl pattern X as an appropriate kind of insn.
4634 If X is a label, it is simply added into the insn chain. */
4635
4636rtx
502b8322 4637emit (rtx x)
23b2ce53
RS
4638{
4639 enum rtx_code code = classify_insn (x);
4640
5b0264cb 4641 switch (code)
23b2ce53 4642 {
5b0264cb
NS
4643 case CODE_LABEL:
4644 return emit_label (x);
4645 case INSN:
4646 return emit_insn (x);
4647 case JUMP_INSN:
4648 {
4649 rtx insn = emit_jump_insn (x);
4650 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4651 return emit_barrier ();
4652 return insn;
4653 }
4654 case CALL_INSN:
4655 return emit_call_insn (x);
4656 default:
4657 gcc_unreachable ();
23b2ce53 4658 }
23b2ce53
RS
4659}
4660\f
e2500fed 4661/* Space for free sequence stack entries. */
1431042e 4662static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
e2500fed 4663
4dfa0342
RH
4664/* Begin emitting insns to a sequence. If this sequence will contain
4665 something that might cause the compiler to pop arguments to function
4666 calls (because those pops have previously been deferred; see
4667 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4668 before calling this function. That will ensure that the deferred
4669 pops are not accidentally emitted in the middle of this sequence. */
23b2ce53
RS
4670
4671void
502b8322 4672start_sequence (void)
23b2ce53
RS
4673{
4674 struct sequence_stack *tem;
4675
e2500fed
GK
4676 if (free_sequence_stack != NULL)
4677 {
4678 tem = free_sequence_stack;
4679 free_sequence_stack = tem->next;
4680 }
4681 else
703ad42b 4682 tem = ggc_alloc (sizeof (struct sequence_stack));
23b2ce53 4683
49ad7cfa 4684 tem->next = seq_stack;
23b2ce53
RS
4685 tem->first = first_insn;
4686 tem->last = last_insn;
4687
49ad7cfa 4688 seq_stack = tem;
23b2ce53
RS
4689
4690 first_insn = 0;
4691 last_insn = 0;
4692}
4693
5c7a310f
MM
4694/* Set up the insn chain starting with FIRST as the current sequence,
4695 saving the previously current one. See the documentation for
4696 start_sequence for more information about how to use this function. */
23b2ce53
RS
4697
4698void
502b8322 4699push_to_sequence (rtx first)
23b2ce53
RS
4700{
4701 rtx last;
4702
4703 start_sequence ();
4704
4705 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4706
4707 first_insn = first;
4708 last_insn = last;
4709}
4710
f15ae3a1
TW
4711/* Set up the outer-level insn chain
4712 as the current sequence, saving the previously current one. */
4713
4714void
502b8322 4715push_topmost_sequence (void)
f15ae3a1 4716{
aefdd5ab 4717 struct sequence_stack *stack, *top = NULL;
f15ae3a1
TW
4718
4719 start_sequence ();
4720
49ad7cfa 4721 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4722 top = stack;
4723
4724 first_insn = top->first;
4725 last_insn = top->last;
4726}
4727
4728/* After emitting to the outer-level insn chain, update the outer-level
4729 insn chain, and restore the previous saved state. */
4730
4731void
502b8322 4732pop_topmost_sequence (void)
f15ae3a1 4733{
aefdd5ab 4734 struct sequence_stack *stack, *top = NULL;
f15ae3a1 4735
49ad7cfa 4736 for (stack = seq_stack; stack; stack = stack->next)
f15ae3a1
TW
4737 top = stack;
4738
4739 top->first = first_insn;
4740 top->last = last_insn;
4741
4742 end_sequence ();
4743}
4744
23b2ce53
RS
4745/* After emitting to a sequence, restore previous saved state.
4746
5c7a310f 4747 To get the contents of the sequence just made, you must call
2f937369 4748 `get_insns' *before* calling here.
5c7a310f
MM
4749
4750 If the compiler might have deferred popping arguments while
4751 generating this sequence, and this sequence will not be immediately
4752 inserted into the instruction stream, use do_pending_stack_adjust
2f937369 4753 before calling get_insns. That will ensure that the deferred
5c7a310f
MM
4754 pops are inserted into this sequence, and not into some random
4755 location in the instruction stream. See INHIBIT_DEFER_POP for more
4756 information about deferred popping of arguments. */
23b2ce53
RS
4757
4758void
502b8322 4759end_sequence (void)
23b2ce53 4760{
49ad7cfa 4761 struct sequence_stack *tem = seq_stack;
23b2ce53
RS
4762
4763 first_insn = tem->first;
4764 last_insn = tem->last;
49ad7cfa 4765 seq_stack = tem->next;
23b2ce53 4766
e2500fed
GK
4767 memset (tem, 0, sizeof (*tem));
4768 tem->next = free_sequence_stack;
4769 free_sequence_stack = tem;
23b2ce53
RS
4770}
4771
4772/* Return 1 if currently emitting into a sequence. */
4773
4774int
502b8322 4775in_sequence_p (void)
23b2ce53 4776{
49ad7cfa 4777 return seq_stack != 0;
23b2ce53 4778}
23b2ce53 4779\f
59ec66dc
MM
4780/* Put the various virtual registers into REGNO_REG_RTX. */
4781
2bbdec73 4782static void
502b8322 4783init_virtual_regs (struct emit_status *es)
59ec66dc 4784{
49ad7cfa
BS
4785 rtx *ptr = es->x_regno_reg_rtx;
4786 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4787 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4788 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4789 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4790 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4791}
4792
da43a810
BS
4793\f
4794/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4795static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4796static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4797static int copy_insn_n_scratches;
4798
4799/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4800 copied an ASM_OPERANDS.
4801 In that case, it is the original input-operand vector. */
4802static rtvec orig_asm_operands_vector;
4803
4804/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4805 copied an ASM_OPERANDS.
4806 In that case, it is the copied input-operand vector. */
4807static rtvec copy_asm_operands_vector;
4808
4809/* Likewise for the constraints vector. */
4810static rtvec orig_asm_constraints_vector;
4811static rtvec copy_asm_constraints_vector;
4812
4813/* Recursively create a new copy of an rtx for copy_insn.
4814 This function differs from copy_rtx in that it handles SCRATCHes and
4815 ASM_OPERANDs properly.
4816 Normally, this function is not used directly; use copy_insn as front end.
4817 However, you could first copy an insn pattern with copy_insn and then use
4818 this function afterwards to properly copy any REG_NOTEs containing
4819 SCRATCHes. */
4820
4821rtx
502b8322 4822copy_insn_1 (rtx orig)
da43a810 4823{
b3694847
SS
4824 rtx copy;
4825 int i, j;
4826 RTX_CODE code;
4827 const char *format_ptr;
da43a810
BS
4828
4829 code = GET_CODE (orig);
4830
4831 switch (code)
4832 {
4833 case REG:
da43a810
BS
4834 case CONST_INT:
4835 case CONST_DOUBLE:
69ef87e2 4836 case CONST_VECTOR:
da43a810
BS
4837 case SYMBOL_REF:
4838 case CODE_LABEL:
4839 case PC:
4840 case CC0:
da43a810 4841 return orig;
3e89ed8d
JH
4842 case CLOBBER:
4843 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4844 return orig;
4845 break;
da43a810
BS
4846
4847 case SCRATCH:
4848 for (i = 0; i < copy_insn_n_scratches; i++)
4849 if (copy_insn_scratch_in[i] == orig)
4850 return copy_insn_scratch_out[i];
4851 break;
4852
4853 case CONST:
4854 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4855 a LABEL_REF, it isn't sharable. */
4856 if (GET_CODE (XEXP (orig, 0)) == PLUS
4857 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4858 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4859 return orig;
4860 break;
750c9258 4861
da43a810
BS
4862 /* A MEM with a constant address is not sharable. The problem is that
4863 the constant address may need to be reloaded. If the mem is shared,
4864 then reloading one copy of this mem will cause all copies to appear
4865 to have been reloaded. */
4866
4867 default:
4868 break;
4869 }
4870
aacd3885
RS
4871 /* Copy the various flags, fields, and other information. We assume
4872 that all fields need copying, and then clear the fields that should
da43a810
BS
4873 not be copied. That is the sensible default behavior, and forces
4874 us to explicitly document why we are *not* copying a flag. */
aacd3885 4875 copy = shallow_copy_rtx (orig);
da43a810
BS
4876
4877 /* We do not copy the USED flag, which is used as a mark bit during
4878 walks over the RTL. */
2adc7f12 4879 RTX_FLAG (copy, used) = 0;
da43a810
BS
4880
4881 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
ec8e098d 4882 if (INSN_P (orig))
da43a810 4883 {
2adc7f12
JJ
4884 RTX_FLAG (copy, jump) = 0;
4885 RTX_FLAG (copy, call) = 0;
4886 RTX_FLAG (copy, frame_related) = 0;
da43a810 4887 }
750c9258 4888
da43a810
BS
4889 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4890
4891 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
aacd3885
RS
4892 switch (*format_ptr++)
4893 {
4894 case 'e':
4895 if (XEXP (orig, i) != NULL)
4896 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4897 break;
da43a810 4898
aacd3885
RS
4899 case 'E':
4900 case 'V':
4901 if (XVEC (orig, i) == orig_asm_constraints_vector)
4902 XVEC (copy, i) = copy_asm_constraints_vector;
4903 else if (XVEC (orig, i) == orig_asm_operands_vector)
4904 XVEC (copy, i) = copy_asm_operands_vector;
4905 else if (XVEC (orig, i) != NULL)
4906 {
4907 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4908 for (j = 0; j < XVECLEN (copy, i); j++)
4909 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4910 }
4911 break;
da43a810 4912
aacd3885
RS
4913 case 't':
4914 case 'w':
4915 case 'i':
4916 case 's':
4917 case 'S':
4918 case 'u':
4919 case '0':
4920 /* These are left unchanged. */
4921 break;
da43a810 4922
aacd3885
RS
4923 default:
4924 gcc_unreachable ();
4925 }
da43a810
BS
4926
4927 if (code == SCRATCH)
4928 {
4929 i = copy_insn_n_scratches++;
5b0264cb 4930 gcc_assert (i < MAX_RECOG_OPERANDS);
da43a810
BS
4931 copy_insn_scratch_in[i] = orig;
4932 copy_insn_scratch_out[i] = copy;
4933 }
4934 else if (code == ASM_OPERANDS)
4935 {
6462bb43
AO
4936 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4937 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4938 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4939 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
da43a810
BS
4940 }
4941
4942 return copy;
4943}
4944
4945/* Create a new copy of an rtx.
4946 This function differs from copy_rtx in that it handles SCRATCHes and
4947 ASM_OPERANDs properly.
4948 INSN doesn't really have to be a full INSN; it could be just the
4949 pattern. */
4950rtx
502b8322 4951copy_insn (rtx insn)
da43a810
BS
4952{
4953 copy_insn_n_scratches = 0;
4954 orig_asm_operands_vector = 0;
4955 orig_asm_constraints_vector = 0;
4956 copy_asm_operands_vector = 0;
4957 copy_asm_constraints_vector = 0;
4958 return copy_insn_1 (insn);
4959}
59ec66dc 4960
23b2ce53
RS
4961/* Initialize data structures and variables in this file
4962 before generating rtl for each function. */
4963
4964void
502b8322 4965init_emit (void)
23b2ce53 4966{
01d939e8 4967 struct function *f = cfun;
23b2ce53 4968
703ad42b 4969 f->emit = ggc_alloc (sizeof (struct emit_status));
23b2ce53
RS
4970 first_insn = NULL;
4971 last_insn = NULL;
4972 cur_insn_uid = 1;
4973 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
6773e15f 4974 last_location = UNKNOWN_LOCATION;
23b2ce53 4975 first_label_num = label_num;
49ad7cfa 4976 seq_stack = NULL;
23b2ce53 4977
23b2ce53
RS
4978 /* Init the tables that describe all the pseudo regs. */
4979
3502dc9c 4980 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
23b2ce53 4981
49ad7cfa 4982 f->emit->regno_pointer_align
703ad42b
KG
4983 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
4984 * sizeof (unsigned char));
86fe05e0 4985
750c9258 4986 regno_reg_rtx
703ad42b 4987 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
0d4903b8 4988
e50126e8 4989 /* Put copies of all the hard registers into regno_reg_rtx. */
6cde4876
JL
4990 memcpy (regno_reg_rtx,
4991 static_regno_reg_rtx,
4992 FIRST_PSEUDO_REGISTER * sizeof (rtx));
e50126e8 4993
23b2ce53 4994 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
49ad7cfa 4995 init_virtual_regs (f->emit);
740ab4a2
RK
4996
4997 /* Indicate that the virtual registers and stack locations are
4998 all pointers. */
3502dc9c
JDA
4999 REG_POINTER (stack_pointer_rtx) = 1;
5000 REG_POINTER (frame_pointer_rtx) = 1;
5001 REG_POINTER (hard_frame_pointer_rtx) = 1;
5002 REG_POINTER (arg_pointer_rtx) = 1;
740ab4a2 5003
3502dc9c
JDA
5004 REG_POINTER (virtual_incoming_args_rtx) = 1;
5005 REG_POINTER (virtual_stack_vars_rtx) = 1;
5006 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5007 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5008 REG_POINTER (virtual_cfa_rtx) = 1;
5e82e7bd 5009
86fe05e0 5010#ifdef STACK_BOUNDARY
bdb429a5
RK
5011 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5012 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5013 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5014 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5015
5016 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5017 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5018 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5019 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5020 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
86fe05e0
RK
5021#endif
5022
5e82e7bd
JVA
5023#ifdef INIT_EXPANDERS
5024 INIT_EXPANDERS;
5025#endif
23b2ce53
RS
5026}
5027
a73b091d 5028/* Generate a vector constant for mode MODE and constant value CONSTANT. */
69ef87e2
AH
5029
5030static rtx
a73b091d 5031gen_const_vector (enum machine_mode mode, int constant)
69ef87e2
AH
5032{
5033 rtx tem;
5034 rtvec v;
5035 int units, i;
5036 enum machine_mode inner;
5037
5038 units = GET_MODE_NUNITS (mode);
5039 inner = GET_MODE_INNER (mode);
5040
15ed7b52
JG
5041 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5042
69ef87e2
AH
5043 v = rtvec_alloc (units);
5044
a73b091d
JW
5045 /* We need to call this function after we set the scalar const_tiny_rtx
5046 entries. */
5047 gcc_assert (const_tiny_rtx[constant][(int) inner]);
69ef87e2
AH
5048
5049 for (i = 0; i < units; ++i)
a73b091d 5050 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
69ef87e2 5051
a06e3c40 5052 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
69ef87e2
AH
5053 return tem;
5054}
5055
a06e3c40 5056/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
a73b091d 5057 all elements are zero, and the one vector when all elements are one. */
a06e3c40 5058rtx
502b8322 5059gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
a06e3c40 5060{
a73b091d
JW
5061 enum machine_mode inner = GET_MODE_INNER (mode);
5062 int nunits = GET_MODE_NUNITS (mode);
5063 rtx x;
a06e3c40
R
5064 int i;
5065
a73b091d
JW
5066 /* Check to see if all of the elements have the same value. */
5067 x = RTVEC_ELT (v, nunits - 1);
5068 for (i = nunits - 2; i >= 0; i--)
5069 if (RTVEC_ELT (v, i) != x)
5070 break;
5071
5072 /* If the values are all the same, check to see if we can use one of the
5073 standard constant vectors. */
5074 if (i == -1)
5075 {
5076 if (x == CONST0_RTX (inner))
5077 return CONST0_RTX (mode);
5078 else if (x == CONST1_RTX (inner))
5079 return CONST1_RTX (mode);
5080 }
5081
5082 return gen_rtx_raw_CONST_VECTOR (mode, v);
a06e3c40
R
5083}
5084
23b2ce53
RS
5085/* Create some permanent unique rtl objects shared between all functions.
5086 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5087
5088void
502b8322 5089init_emit_once (int line_numbers)
23b2ce53
RS
5090{
5091 int i;
5092 enum machine_mode mode;
9ec36da5 5093 enum machine_mode double_mode;
23b2ce53 5094
59e4e217 5095 /* We need reg_raw_mode, so initialize the modes now. */
28420116
PB
5096 init_reg_modes_once ();
5097
5692c7bc
ZW
5098 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5099 tables. */
17211ab5
GK
5100 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5101 const_int_htab_eq, NULL);
173b24b9 5102
17211ab5
GK
5103 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5104 const_double_htab_eq, NULL);
5692c7bc 5105
17211ab5
GK
5106 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5107 mem_attrs_htab_eq, NULL);
a560d4d4
JH
5108 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5109 reg_attrs_htab_eq, NULL);
67673f5c 5110
23b2ce53
RS
5111 no_line_numbers = ! line_numbers;
5112
43fa6302
AS
5113 /* Compute the word and byte modes. */
5114
5115 byte_mode = VOIDmode;
5116 word_mode = VOIDmode;
5117 double_mode = VOIDmode;
5118
15ed7b52
JG
5119 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5120 mode != VOIDmode;
43fa6302
AS
5121 mode = GET_MODE_WIDER_MODE (mode))
5122 {
5123 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5124 && byte_mode == VOIDmode)
5125 byte_mode = mode;
5126
5127 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5128 && word_mode == VOIDmode)
5129 word_mode = mode;
5130 }
5131
15ed7b52
JG
5132 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5133 mode != VOIDmode;
43fa6302
AS
5134 mode = GET_MODE_WIDER_MODE (mode))
5135 {
5136 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5137 && double_mode == VOIDmode)
5138 double_mode = mode;
5139 }
5140
5141 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5142
5da077de
AS
5143 /* Assign register numbers to the globally defined register rtx.
5144 This must be done at runtime because the register number field
5145 is in a union and some compilers can't initialize unions. */
5146
2fb00d7f
KH
5147 pc_rtx = gen_rtx_PC (VOIDmode);
5148 cc0_rtx = gen_rtx_CC0 (VOIDmode);
08394eef
BS
5149 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5150 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5da077de 5151 if (hard_frame_pointer_rtx == 0)
750c9258 5152 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
08394eef 5153 HARD_FRAME_POINTER_REGNUM);
5da077de 5154 if (arg_pointer_rtx == 0)
08394eef 5155 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
750c9258 5156 virtual_incoming_args_rtx =
08394eef 5157 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
750c9258 5158 virtual_stack_vars_rtx =
08394eef 5159 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
750c9258 5160 virtual_stack_dynamic_rtx =
08394eef 5161 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
750c9258
AJ
5162 virtual_outgoing_args_rtx =
5163 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
08394eef 5164 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5da077de 5165
6cde4876
JL
5166 /* Initialize RTL for commonly used hard registers. These are
5167 copied into regno_reg_rtx as we begin to compile each function. */
5168 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5169 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5170
5da077de 5171#ifdef INIT_EXPANDERS
414c4dc4
NC
5172 /* This is to initialize {init|mark|free}_machine_status before the first
5173 call to push_function_context_to. This is needed by the Chill front
a1f300c0 5174 end which calls push_function_context_to before the first call to
5da077de
AS
5175 init_function_start. */
5176 INIT_EXPANDERS;
5177#endif
5178
23b2ce53
RS
5179 /* Create the unique rtx's for certain rtx codes and operand values. */
5180
a2a8cc44 5181 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
c5c76735 5182 tries to use these variables. */
23b2ce53 5183 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
750c9258 5184 const_int_rtx[i + MAX_SAVED_CONST_INT] =
f1b690f1 5185 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
23b2ce53 5186
68d75312
JC
5187 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5188 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5da077de 5189 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
68d75312 5190 else
3b80f6ca 5191 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
23b2ce53 5192
5692c7bc
ZW
5193 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5194 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5195 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
f7657db9
KG
5196 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5197 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5692c7bc 5198 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
03f2ea93
RS
5199 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5200
5201 dconsthalf = dconst1;
1e92bbb9 5202 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
23b2ce53 5203
f7657db9
KG
5204 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5205
ab01a87c
KG
5206 /* Initialize mathematical constants for constant folding builtins.
5207 These constants need to be given to at least 160 bits precision. */
5208 real_from_string (&dconstpi,
5209 "3.1415926535897932384626433832795028841971693993751058209749445923078");
5210 real_from_string (&dconste,
5211 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5212
f7657db9 5213 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
23b2ce53 5214 {
b216cd4a
ZW
5215 REAL_VALUE_TYPE *r =
5216 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5217
15ed7b52
JG
5218 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5219 mode != VOIDmode;
5220 mode = GET_MODE_WIDER_MODE (mode))
5221 const_tiny_rtx[i][(int) mode] =
5222 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5223
5224 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5225 mode != VOIDmode;
23b2ce53 5226 mode = GET_MODE_WIDER_MODE (mode))
5692c7bc
ZW
5227 const_tiny_rtx[i][(int) mode] =
5228 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
23b2ce53 5229
906c4e36 5230 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
23b2ce53 5231
15ed7b52
JG
5232 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5233 mode != VOIDmode;
23b2ce53 5234 mode = GET_MODE_WIDER_MODE (mode))
906c4e36 5235 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
33d3e559
RS
5236
5237 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5238 mode != VOIDmode;
5239 mode = GET_MODE_WIDER_MODE (mode))
5240 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
23b2ce53
RS
5241 }
5242
69ef87e2
AH
5243 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5244 mode != VOIDmode;
5245 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5246 {
5247 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5248 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5249 }
69ef87e2
AH
5250
5251 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5252 mode != VOIDmode;
5253 mode = GET_MODE_WIDER_MODE (mode))
a73b091d
JW
5254 {
5255 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5256 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5257 }
69ef87e2 5258
dbbbbf3b
JDA
5259 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5260 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5261 const_tiny_rtx[0][i] = const0_rtx;
23b2ce53 5262
f0417c82
RH
5263 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5264 if (STORE_FLAG_VALUE == 1)
5265 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5266
a7e1e2ac
AO
5267#ifdef RETURN_ADDRESS_POINTER_REGNUM
5268 return_address_pointer_rtx
08394eef 5269 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
a7e1e2ac
AO
5270#endif
5271
a7e1e2ac
AO
5272#ifdef STATIC_CHAIN_REGNUM
5273 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5274
5275#ifdef STATIC_CHAIN_INCOMING_REGNUM
5276 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5277 static_chain_incoming_rtx
5278 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5279 else
5280#endif
5281 static_chain_incoming_rtx = static_chain_rtx;
5282#endif
5283
5284#ifdef STATIC_CHAIN
5285 static_chain_rtx = STATIC_CHAIN;
5286
5287#ifdef STATIC_CHAIN_INCOMING
5288 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5289#else
5290 static_chain_incoming_rtx = static_chain_rtx;
5291#endif
5292#endif
5293
fc555370 5294 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
751551d5 5295 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
23b2ce53 5296}
a11759a3 5297\f
969d70ca
JH
5298/* Produce exact duplicate of insn INSN after AFTER.
5299 Care updating of libcall regions if present. */
5300
5301rtx
502b8322 5302emit_copy_of_insn_after (rtx insn, rtx after)
969d70ca
JH
5303{
5304 rtx new;
5305 rtx note1, note2, link;
5306
5307 switch (GET_CODE (insn))
5308 {
5309 case INSN:
5310 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5311 break;
5312
5313 case JUMP_INSN:
5314 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5315 break;
5316
5317 case CALL_INSN:
5318 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5319 if (CALL_INSN_FUNCTION_USAGE (insn))
5320 CALL_INSN_FUNCTION_USAGE (new)
5321 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5322 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5323 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5324 break;
5325
5326 default:
5b0264cb 5327 gcc_unreachable ();
969d70ca
JH
5328 }
5329
5330 /* Update LABEL_NUSES. */
5331 mark_jump_label (PATTERN (new), new, 0);
5332
0435312e 5333 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
ba4f7968 5334
0a3d71f5
JW
5335 /* If the old insn is frame related, then so is the new one. This is
5336 primarily needed for IA-64 unwind info which marks epilogue insns,
5337 which may be duplicated by the basic block reordering code. */
5338 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5339
969d70ca
JH
5340 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5341 make them. */
5342 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5343 if (REG_NOTE_KIND (link) != REG_LABEL)
5344 {
5345 if (GET_CODE (link) == EXPR_LIST)
5346 REG_NOTES (new)
5347 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5348 XEXP (link, 0),
5349 REG_NOTES (new)));
5350 else
5351 REG_NOTES (new)
5352 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5353 XEXP (link, 0),
5354 REG_NOTES (new)));
5355 }
5356
5357 /* Fix the libcall sequences. */
5358 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5359 {
5360 rtx p = new;
5361 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5362 p = PREV_INSN (p);
5363 XEXP (note1, 0) = p;
5364 XEXP (note2, 0) = new;
5365 }
6f0d3566 5366 INSN_CODE (new) = INSN_CODE (insn);
969d70ca
JH
5367 return new;
5368}
e2500fed 5369
1431042e 5370static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
3e89ed8d
JH
5371rtx
5372gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5373{
5374 if (hard_reg_clobbers[mode][regno])
5375 return hard_reg_clobbers[mode][regno];
5376 else
5377 return (hard_reg_clobbers[mode][regno] =
5378 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5379}
5380
e2500fed 5381#include "gt-emit-rtl.h"
This page took 3.040151 seconds and 5 git commands to generate.