]> gcc.gnu.org Git - gcc.git/blob - gcc/emit-rtl.c
emit-rtl.c (try_split): Handle 1-1 splits of call insns properly.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 /* Middle-to-low level generation of rtx code and insns.
24
25 This file contains the functions `gen_rtx', `gen_reg_rtx'
26 and `gen_label_rtx' that are the usual ways of creating rtl
27 expressions for most purposes.
28
29 It also has the functions for creating insns and linking
30 them in the doubly-linked chain.
31
32 The patterns of the insns are created by machine-dependent
33 routines in insn-emit.c, which is generated automatically from
34 the machine description. These routines use `gen_rtx' to make
35 the individual rtx's of the pattern; what is machine dependent
36 is the kind of rtx's they make and what arguments they use. */
37
38 #include "config.h"
39 #include "system.h"
40 #include "coretypes.h"
41 #include "tm.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "real.h"
55 #include "bitmap.h"
56 #include "basic-block.h"
57 #include "ggc.h"
58 #include "debug.h"
59 #include "langhooks.h"
60
61 /* Commonly used modes. */
62
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
67
68
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
71
72 static GTY(()) int label_num = 1;
73
74 /* Highest label number in current function.
75 Zero means use the value of label_num instead.
76 This is nonzero only when belatedly compiling an inline function. */
77
78 static int last_label_num;
79
80 /* Value label_num had when set_new_first_and_last_label_number was called.
81 If label_num has not changed since then, last_label_num is valid. */
82
83 static int base_label_num;
84
85 /* Nonzero means do not generate NOTEs for source line numbers. */
86
87 static int no_line_numbers;
88
89 /* Commonly used rtx's, so that we only need space for one copy.
90 These are initialized once for the entire compilation.
91 All of these are unique; no other rtx-object will be equal to any
92 of these. */
93
94 rtx global_rtl[GR_MAX];
95
96 /* Commonly used RTL for hard registers. These objects are not necessarily
97 unique, so we allocate them separately from global_rtl. They are
98 initialized once per compilation unit, then copied into regno_reg_rtx
99 at the beginning of each function. */
100 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
101
102 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
103 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
104 record a copy of const[012]_rtx. */
105
106 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
107
108 rtx const_true_rtx;
109
110 REAL_VALUE_TYPE dconst0;
111 REAL_VALUE_TYPE dconst1;
112 REAL_VALUE_TYPE dconst2;
113 REAL_VALUE_TYPE dconstm1;
114
115 /* All references to the following fixed hard registers go through
116 these unique rtl objects. On machines where the frame-pointer and
117 arg-pointer are the same register, they use the same unique object.
118
119 After register allocation, other rtl objects which used to be pseudo-regs
120 may be clobbered to refer to the frame-pointer register.
121 But references that were originally to the frame-pointer can be
122 distinguished from the others because they contain frame_pointer_rtx.
123
124 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
125 tricky: until register elimination has taken place hard_frame_pointer_rtx
126 should be used if it is being set, and frame_pointer_rtx otherwise. After
127 register elimination hard_frame_pointer_rtx should always be used.
128 On machines where the two registers are same (most) then these are the
129 same.
130
131 In an inline procedure, the stack and frame pointer rtxs may not be
132 used for anything else. */
133 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
134 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
135 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
136 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
137 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
138
139 /* This is used to implement __builtin_return_address for some machines.
140 See for instance the MIPS port. */
141 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
142
143 /* We make one copy of (const_int C) where C is in
144 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
145 to save space during the compilation and simplify comparisons of
146 integers. */
147
148 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
149
150 /* A hash table storing CONST_INTs whose absolute value is greater
151 than MAX_SAVED_CONST_INT. */
152
153 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
154 htab_t const_int_htab;
155
156 /* A hash table storing memory attribute structures. */
157 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
158 htab_t mem_attrs_htab;
159
160 /* A hash table storing register attribute structures. */
161 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
162 htab_t reg_attrs_htab;
163
164 /* A hash table storing all CONST_DOUBLEs. */
165 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
166 htab_t const_double_htab;
167
168 #define first_insn (cfun->emit->x_first_insn)
169 #define last_insn (cfun->emit->x_last_insn)
170 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
171 #define last_linenum (cfun->emit->x_last_linenum)
172 #define last_filename (cfun->emit->x_last_filename)
173 #define first_label_num (cfun->emit->x_first_label_num)
174
175 static rtx make_jump_insn_raw PARAMS ((rtx));
176 static rtx make_call_insn_raw PARAMS ((rtx));
177 static rtx find_line_note PARAMS ((rtx));
178 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
179 int));
180 static void unshare_all_rtl_1 PARAMS ((rtx));
181 static void unshare_all_decls PARAMS ((tree));
182 static void reset_used_decls PARAMS ((tree));
183 static void mark_label_nuses PARAMS ((rtx));
184 static hashval_t const_int_htab_hash PARAMS ((const void *));
185 static int const_int_htab_eq PARAMS ((const void *,
186 const void *));
187 static hashval_t const_double_htab_hash PARAMS ((const void *));
188 static int const_double_htab_eq PARAMS ((const void *,
189 const void *));
190 static rtx lookup_const_double PARAMS ((rtx));
191 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
192 static int mem_attrs_htab_eq PARAMS ((const void *,
193 const void *));
194 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
195 rtx, unsigned int,
196 enum machine_mode));
197 static hashval_t reg_attrs_htab_hash PARAMS ((const void *));
198 static int reg_attrs_htab_eq PARAMS ((const void *,
199 const void *));
200 static reg_attrs *get_reg_attrs PARAMS ((tree, int));
201 static tree component_ref_for_mem_expr PARAMS ((tree));
202 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
203
204 /* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206 int split_branch_probability = -1;
207 \f
208 /* Returns a hash code for X (which is a really a CONST_INT). */
209
210 static hashval_t
211 const_int_htab_hash (x)
212 const void *x;
213 {
214 return (hashval_t) INTVAL ((struct rtx_def *) x);
215 }
216
217 /* Returns nonzero if the value represented by X (which is really a
218 CONST_INT) is the same as that given by Y (which is really a
219 HOST_WIDE_INT *). */
220
221 static int
222 const_int_htab_eq (x, y)
223 const void *x;
224 const void *y;
225 {
226 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
227 }
228
229 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
230 static hashval_t
231 const_double_htab_hash (x)
232 const void *x;
233 {
234 rtx value = (rtx) x;
235 hashval_t h;
236
237 if (GET_MODE (value) == VOIDmode)
238 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
239 else
240 {
241 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
242 /* MODE is used in the comparison, so it should be in the hash. */
243 h ^= GET_MODE (value);
244 }
245 return h;
246 }
247
248 /* Returns nonzero if the value represented by X (really a ...)
249 is the same as that represented by Y (really a ...) */
250 static int
251 const_double_htab_eq (x, y)
252 const void *x;
253 const void *y;
254 {
255 rtx a = (rtx)x, b = (rtx)y;
256
257 if (GET_MODE (a) != GET_MODE (b))
258 return 0;
259 if (GET_MODE (a) == VOIDmode)
260 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
261 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
262 else
263 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
264 CONST_DOUBLE_REAL_VALUE (b));
265 }
266
267 /* Returns a hash code for X (which is a really a mem_attrs *). */
268
269 static hashval_t
270 mem_attrs_htab_hash (x)
271 const void *x;
272 {
273 mem_attrs *p = (mem_attrs *) x;
274
275 return (p->alias ^ (p->align * 1000)
276 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
277 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
278 ^ (size_t) p->expr);
279 }
280
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
284
285 static int
286 mem_attrs_htab_eq (x, y)
287 const void *x;
288 const void *y;
289 {
290 mem_attrs *p = (mem_attrs *) x;
291 mem_attrs *q = (mem_attrs *) y;
292
293 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
294 && p->size == q->size && p->align == q->align);
295 }
296
297 /* Allocate a new mem_attrs structure and insert it into the hash table if
298 one identical to it is not already in the table. We are doing this for
299 MEM of mode MODE. */
300
301 static mem_attrs *
302 get_mem_attrs (alias, expr, offset, size, align, mode)
303 HOST_WIDE_INT alias;
304 tree expr;
305 rtx offset;
306 rtx size;
307 unsigned int align;
308 enum machine_mode mode;
309 {
310 mem_attrs attrs;
311 void **slot;
312
313 /* If everything is the default, we can just return zero. */
314 if (alias == 0 && expr == 0 && offset == 0
315 && (size == 0
316 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
317 && (align == BITS_PER_UNIT
318 || (STRICT_ALIGNMENT
319 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
320 return 0;
321
322 attrs.alias = alias;
323 attrs.expr = expr;
324 attrs.offset = offset;
325 attrs.size = size;
326 attrs.align = align;
327
328 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
329 if (*slot == 0)
330 {
331 *slot = ggc_alloc (sizeof (mem_attrs));
332 memcpy (*slot, &attrs, sizeof (mem_attrs));
333 }
334
335 return *slot;
336 }
337
338 /* Returns a hash code for X (which is a really a reg_attrs *). */
339
340 static hashval_t
341 reg_attrs_htab_hash (x)
342 const void *x;
343 {
344 reg_attrs *p = (reg_attrs *) x;
345
346 return ((p->offset * 1000) ^ (long) p->decl);
347 }
348
349 /* Returns non-zero if the value represented by X (which is really a
350 reg_attrs *) is the same as that given by Y (which is also really a
351 reg_attrs *). */
352
353 static int
354 reg_attrs_htab_eq (x, y)
355 const void *x;
356 const void *y;
357 {
358 reg_attrs *p = (reg_attrs *) x;
359 reg_attrs *q = (reg_attrs *) y;
360
361 return (p->decl == q->decl && p->offset == q->offset);
362 }
363 /* Allocate a new reg_attrs structure and insert it into the hash table if
364 one identical to it is not already in the table. We are doing this for
365 MEM of mode MODE. */
366
367 static reg_attrs *
368 get_reg_attrs (decl, offset)
369 tree decl;
370 int offset;
371 {
372 reg_attrs attrs;
373 void **slot;
374
375 /* If everything is the default, we can just return zero. */
376 if (decl == 0 && offset == 0)
377 return 0;
378
379 attrs.decl = decl;
380 attrs.offset = offset;
381
382 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
383 if (*slot == 0)
384 {
385 *slot = ggc_alloc (sizeof (reg_attrs));
386 memcpy (*slot, &attrs, sizeof (reg_attrs));
387 }
388
389 return *slot;
390 }
391
392 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
393 don't attempt to share with the various global pieces of rtl (such as
394 frame_pointer_rtx). */
395
396 rtx
397 gen_raw_REG (mode, regno)
398 enum machine_mode mode;
399 int regno;
400 {
401 rtx x = gen_rtx_raw_REG (mode, regno);
402 ORIGINAL_REGNO (x) = regno;
403 return x;
404 }
405
406 /* There are some RTL codes that require special attention; the generation
407 functions do the raw handling. If you add to this list, modify
408 special_rtx in gengenrtl.c as well. */
409
410 rtx
411 gen_rtx_CONST_INT (mode, arg)
412 enum machine_mode mode ATTRIBUTE_UNUSED;
413 HOST_WIDE_INT arg;
414 {
415 void **slot;
416
417 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
418 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
419
420 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
421 if (const_true_rtx && arg == STORE_FLAG_VALUE)
422 return const_true_rtx;
423 #endif
424
425 /* Look up the CONST_INT in the hash table. */
426 slot = htab_find_slot_with_hash (const_int_htab, &arg,
427 (hashval_t) arg, INSERT);
428 if (*slot == 0)
429 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
430
431 return (rtx) *slot;
432 }
433
434 rtx
435 gen_int_mode (c, mode)
436 HOST_WIDE_INT c;
437 enum machine_mode mode;
438 {
439 return GEN_INT (trunc_int_for_mode (c, mode));
440 }
441
442 /* CONST_DOUBLEs might be created from pairs of integers, or from
443 REAL_VALUE_TYPEs. Also, their length is known only at run time,
444 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
445
446 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
447 hash table. If so, return its counterpart; otherwise add it
448 to the hash table and return it. */
449 static rtx
450 lookup_const_double (real)
451 rtx real;
452 {
453 void **slot = htab_find_slot (const_double_htab, real, INSERT);
454 if (*slot == 0)
455 *slot = real;
456
457 return (rtx) *slot;
458 }
459
460 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
461 VALUE in mode MODE. */
462 rtx
463 const_double_from_real_value (value, mode)
464 REAL_VALUE_TYPE value;
465 enum machine_mode mode;
466 {
467 rtx real = rtx_alloc (CONST_DOUBLE);
468 PUT_MODE (real, mode);
469
470 memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
471
472 return lookup_const_double (real);
473 }
474
475 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
476 of ints: I0 is the low-order word and I1 is the high-order word.
477 Do not use this routine for non-integer modes; convert to
478 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
479
480 rtx
481 immed_double_const (i0, i1, mode)
482 HOST_WIDE_INT i0, i1;
483 enum machine_mode mode;
484 {
485 rtx value;
486 unsigned int i;
487
488 if (mode != VOIDmode)
489 {
490 int width;
491 if (GET_MODE_CLASS (mode) != MODE_INT
492 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
493 /* We can get a 0 for an error mark. */
494 && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
495 && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
496 abort ();
497
498 /* We clear out all bits that don't belong in MODE, unless they and
499 our sign bit are all one. So we get either a reasonable negative
500 value or a reasonable unsigned value for this mode. */
501 width = GET_MODE_BITSIZE (mode);
502 if (width < HOST_BITS_PER_WIDE_INT
503 && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
504 != ((HOST_WIDE_INT) (-1) << (width - 1))))
505 i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
506 else if (width == HOST_BITS_PER_WIDE_INT
507 && ! (i1 == ~0 && i0 < 0))
508 i1 = 0;
509 else if (width > 2 * HOST_BITS_PER_WIDE_INT)
510 /* We cannot represent this value as a constant. */
511 abort ();
512
513 /* If this would be an entire word for the target, but is not for
514 the host, then sign-extend on the host so that the number will
515 look the same way on the host that it would on the target.
516
517 For example, when building a 64 bit alpha hosted 32 bit sparc
518 targeted compiler, then we want the 32 bit unsigned value -1 to be
519 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
520 The latter confuses the sparc backend. */
521
522 if (width < HOST_BITS_PER_WIDE_INT
523 && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
524 i0 |= ((HOST_WIDE_INT) (-1) << width);
525
526 /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
527 CONST_INT.
528
529 ??? Strictly speaking, this is wrong if we create a CONST_INT for
530 a large unsigned constant with the size of MODE being
531 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
532 in a wider mode. In that case we will mis-interpret it as a
533 negative number.
534
535 Unfortunately, the only alternative is to make a CONST_DOUBLE for
536 any constant in any mode if it is an unsigned constant larger
537 than the maximum signed integer in an int on the host. However,
538 doing this will break everyone that always expects to see a
539 CONST_INT for SImode and smaller.
540
541 We have always been making CONST_INTs in this case, so nothing
542 new is being broken. */
543
544 if (width <= HOST_BITS_PER_WIDE_INT)
545 i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
546 }
547
548 /* If this integer fits in one word, return a CONST_INT. */
549 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
550 return GEN_INT (i0);
551
552 /* We use VOIDmode for integers. */
553 value = rtx_alloc (CONST_DOUBLE);
554 PUT_MODE (value, VOIDmode);
555
556 CONST_DOUBLE_LOW (value) = i0;
557 CONST_DOUBLE_HIGH (value) = i1;
558
559 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
560 XWINT (value, i) = 0;
561
562 return lookup_const_double (value);
563 }
564
565 rtx
566 gen_rtx_REG (mode, regno)
567 enum machine_mode mode;
568 unsigned int regno;
569 {
570 /* In case the MD file explicitly references the frame pointer, have
571 all such references point to the same frame pointer. This is
572 used during frame pointer elimination to distinguish the explicit
573 references to these registers from pseudos that happened to be
574 assigned to them.
575
576 If we have eliminated the frame pointer or arg pointer, we will
577 be using it as a normal register, for example as a spill
578 register. In such cases, we might be accessing it in a mode that
579 is not Pmode and therefore cannot use the pre-allocated rtx.
580
581 Also don't do this when we are making new REGs in reload, since
582 we don't want to get confused with the real pointers. */
583
584 if (mode == Pmode && !reload_in_progress)
585 {
586 if (regno == FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
588 return frame_pointer_rtx;
589 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
590 if (regno == HARD_FRAME_POINTER_REGNUM
591 && (!reload_completed || frame_pointer_needed))
592 return hard_frame_pointer_rtx;
593 #endif
594 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
595 if (regno == ARG_POINTER_REGNUM)
596 return arg_pointer_rtx;
597 #endif
598 #ifdef RETURN_ADDRESS_POINTER_REGNUM
599 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
600 return return_address_pointer_rtx;
601 #endif
602 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
603 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
604 return pic_offset_table_rtx;
605 if (regno == STACK_POINTER_REGNUM)
606 return stack_pointer_rtx;
607 }
608
609 #if 0
610 /* If the per-function register table has been set up, try to re-use
611 an existing entry in that table to avoid useless generation of RTL.
612
613 This code is disabled for now until we can fix the various backends
614 which depend on having non-shared hard registers in some cases. Long
615 term we want to re-enable this code as it can significantly cut down
616 on the amount of useless RTL that gets generated.
617
618 We'll also need to fix some code that runs after reload that wants to
619 set ORIGINAL_REGNO. */
620
621 if (cfun
622 && cfun->emit
623 && regno_reg_rtx
624 && regno < FIRST_PSEUDO_REGISTER
625 && reg_raw_mode[regno] == mode)
626 return regno_reg_rtx[regno];
627 #endif
628
629 return gen_raw_REG (mode, regno);
630 }
631
632 rtx
633 gen_rtx_MEM (mode, addr)
634 enum machine_mode mode;
635 rtx addr;
636 {
637 rtx rt = gen_rtx_raw_MEM (mode, addr);
638
639 /* This field is not cleared by the mere allocation of the rtx, so
640 we clear it here. */
641 MEM_ATTRS (rt) = 0;
642
643 return rt;
644 }
645
646 rtx
647 gen_rtx_SUBREG (mode, reg, offset)
648 enum machine_mode mode;
649 rtx reg;
650 int offset;
651 {
652 /* This is the most common failure type.
653 Catch it early so we can see who does it. */
654 if ((offset % GET_MODE_SIZE (mode)) != 0)
655 abort ();
656
657 /* This check isn't usable right now because combine will
658 throw arbitrary crap like a CALL into a SUBREG in
659 gen_lowpart_for_combine so we must just eat it. */
660 #if 0
661 /* Check for this too. */
662 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
663 abort ();
664 #endif
665 return gen_rtx_raw_SUBREG (mode, reg, offset);
666 }
667
668 /* Generate a SUBREG representing the least-significant part of REG if MODE
669 is smaller than mode of REG, otherwise paradoxical SUBREG. */
670
671 rtx
672 gen_lowpart_SUBREG (mode, reg)
673 enum machine_mode mode;
674 rtx reg;
675 {
676 enum machine_mode inmode;
677
678 inmode = GET_MODE (reg);
679 if (inmode == VOIDmode)
680 inmode = mode;
681 return gen_rtx_SUBREG (mode, reg,
682 subreg_lowpart_offset (mode, inmode));
683 }
684 \f
685 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
686 **
687 ** This routine generates an RTX of the size specified by
688 ** <code>, which is an RTX code. The RTX structure is initialized
689 ** from the arguments <element1> through <elementn>, which are
690 ** interpreted according to the specific RTX type's format. The
691 ** special machine mode associated with the rtx (if any) is specified
692 ** in <mode>.
693 **
694 ** gen_rtx can be invoked in a way which resembles the lisp-like
695 ** rtx it will generate. For example, the following rtx structure:
696 **
697 ** (plus:QI (mem:QI (reg:SI 1))
698 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
699 **
700 ** ...would be generated by the following C code:
701 **
702 ** gen_rtx (PLUS, QImode,
703 ** gen_rtx (MEM, QImode,
704 ** gen_rtx (REG, SImode, 1)),
705 ** gen_rtx (MEM, QImode,
706 ** gen_rtx (PLUS, SImode,
707 ** gen_rtx (REG, SImode, 2),
708 ** gen_rtx (REG, SImode, 3)))),
709 */
710
711 /*VARARGS2*/
712 rtx
713 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
714 {
715 int i; /* Array indices... */
716 const char *fmt; /* Current rtx's format... */
717 rtx rt_val; /* RTX to return to caller... */
718
719 VA_OPEN (p, mode);
720 VA_FIXEDARG (p, enum rtx_code, code);
721 VA_FIXEDARG (p, enum machine_mode, mode);
722
723 switch (code)
724 {
725 case CONST_INT:
726 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
727 break;
728
729 case CONST_DOUBLE:
730 {
731 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
732 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
733
734 rt_val = immed_double_const (arg0, arg1, mode);
735 }
736 break;
737
738 case REG:
739 rt_val = gen_rtx_REG (mode, va_arg (p, int));
740 break;
741
742 case MEM:
743 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
744 break;
745
746 default:
747 rt_val = rtx_alloc (code); /* Allocate the storage space. */
748 rt_val->mode = mode; /* Store the machine mode... */
749
750 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
751 for (i = 0; i < GET_RTX_LENGTH (code); i++)
752 {
753 switch (*fmt++)
754 {
755 case '0': /* Unused field. */
756 break;
757
758 case 'i': /* An integer? */
759 XINT (rt_val, i) = va_arg (p, int);
760 break;
761
762 case 'w': /* A wide integer? */
763 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
764 break;
765
766 case 's': /* A string? */
767 XSTR (rt_val, i) = va_arg (p, char *);
768 break;
769
770 case 'e': /* An expression? */
771 case 'u': /* An insn? Same except when printing. */
772 XEXP (rt_val, i) = va_arg (p, rtx);
773 break;
774
775 case 'E': /* An RTX vector? */
776 XVEC (rt_val, i) = va_arg (p, rtvec);
777 break;
778
779 case 'b': /* A bitmap? */
780 XBITMAP (rt_val, i) = va_arg (p, bitmap);
781 break;
782
783 case 't': /* A tree? */
784 XTREE (rt_val, i) = va_arg (p, tree);
785 break;
786
787 default:
788 abort ();
789 }
790 }
791 break;
792 }
793
794 VA_CLOSE (p);
795 return rt_val;
796 }
797
798 /* gen_rtvec (n, [rt1, ..., rtn])
799 **
800 ** This routine creates an rtvec and stores within it the
801 ** pointers to rtx's which are its arguments.
802 */
803
804 /*VARARGS1*/
805 rtvec
806 gen_rtvec VPARAMS ((int n, ...))
807 {
808 int i, save_n;
809 rtx *vector;
810
811 VA_OPEN (p, n);
812 VA_FIXEDARG (p, int, n);
813
814 if (n == 0)
815 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
816
817 vector = (rtx *) alloca (n * sizeof (rtx));
818
819 for (i = 0; i < n; i++)
820 vector[i] = va_arg (p, rtx);
821
822 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
823 save_n = n;
824 VA_CLOSE (p);
825
826 return gen_rtvec_v (save_n, vector);
827 }
828
829 rtvec
830 gen_rtvec_v (n, argp)
831 int n;
832 rtx *argp;
833 {
834 int i;
835 rtvec rt_val;
836
837 if (n == 0)
838 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
839
840 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
841
842 for (i = 0; i < n; i++)
843 rt_val->elem[i] = *argp++;
844
845 return rt_val;
846 }
847 \f
848 /* Generate a REG rtx for a new pseudo register of mode MODE.
849 This pseudo is assigned the next sequential register number. */
850
851 rtx
852 gen_reg_rtx (mode)
853 enum machine_mode mode;
854 {
855 struct function *f = cfun;
856 rtx val;
857
858 /* Don't let anything called after initial flow analysis create new
859 registers. */
860 if (no_new_pseudos)
861 abort ();
862
863 if (generating_concat_p
864 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
865 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
866 {
867 /* For complex modes, don't make a single pseudo.
868 Instead, make a CONCAT of two pseudos.
869 This allows noncontiguous allocation of the real and imaginary parts,
870 which makes much better code. Besides, allocating DCmode
871 pseudos overstrains reload on some machines like the 386. */
872 rtx realpart, imagpart;
873 enum machine_mode partmode = GET_MODE_INNER (mode);
874
875 realpart = gen_reg_rtx (partmode);
876 imagpart = gen_reg_rtx (partmode);
877 return gen_rtx_CONCAT (mode, realpart, imagpart);
878 }
879
880 /* Make sure regno_pointer_align, and regno_reg_rtx are large
881 enough to have an element for this pseudo reg number. */
882
883 if (reg_rtx_no == f->emit->regno_pointer_align_length)
884 {
885 int old_size = f->emit->regno_pointer_align_length;
886 char *new;
887 rtx *new1;
888
889 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
890 memset (new + old_size, 0, old_size);
891 f->emit->regno_pointer_align = (unsigned char *) new;
892
893 new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
894 old_size * 2 * sizeof (rtx));
895 memset (new1 + old_size, 0, old_size * sizeof (rtx));
896 regno_reg_rtx = new1;
897
898 f->emit->regno_pointer_align_length = old_size * 2;
899 }
900
901 val = gen_raw_REG (mode, reg_rtx_no);
902 regno_reg_rtx[reg_rtx_no++] = val;
903 return val;
904 }
905
906 /* Generate an register with same attributes as REG,
907 but offsetted by OFFSET. */
908
909 rtx
910 gen_rtx_REG_offset (reg, mode, regno, offset)
911 enum machine_mode mode;
912 unsigned int regno;
913 int offset;
914 rtx reg;
915 {
916 rtx new = gen_rtx_REG (mode, regno);
917 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
918 REG_OFFSET (reg) + offset);
919 return new;
920 }
921
922 /* Set the decl for MEM to DECL. */
923
924 void
925 set_reg_attrs_from_mem (reg, mem)
926 rtx reg;
927 rtx mem;
928 {
929 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
930 REG_ATTRS (reg)
931 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
932 }
933
934 /* Set the register attributes for registers contained in PARM_RTX.
935 Use needed values from memory attributes of MEM. */
936
937 void
938 set_reg_attrs_for_parm (parm_rtx, mem)
939 rtx parm_rtx;
940 rtx mem;
941 {
942 if (GET_CODE (parm_rtx) == REG)
943 set_reg_attrs_from_mem (parm_rtx, mem);
944 else if (GET_CODE (parm_rtx) == PARALLEL)
945 {
946 /* Check for a NULL entry in the first slot, used to indicate that the
947 parameter goes both on the stack and in registers. */
948 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
949 for (; i < XVECLEN (parm_rtx, 0); i++)
950 {
951 rtx x = XVECEXP (parm_rtx, 0, i);
952 if (GET_CODE (XEXP (x, 0)) == REG)
953 REG_ATTRS (XEXP (x, 0))
954 = get_reg_attrs (MEM_EXPR (mem),
955 INTVAL (XEXP (x, 1)));
956 }
957 }
958 }
959
960 /* Assign the RTX X to declaration T. */
961 void
962 set_decl_rtl (t, x)
963 tree t;
964 rtx x;
965 {
966 DECL_CHECK (t)->decl.rtl = x;
967
968 if (!x)
969 return;
970 /* For register, we maitain the reverse information too. */
971 if (GET_CODE (x) == REG)
972 REG_ATTRS (x) = get_reg_attrs (t, 0);
973 else if (GET_CODE (x) == SUBREG)
974 REG_ATTRS (SUBREG_REG (x))
975 = get_reg_attrs (t, -SUBREG_BYTE (x));
976 if (GET_CODE (x) == CONCAT)
977 {
978 if (REG_P (XEXP (x, 0)))
979 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
980 if (REG_P (XEXP (x, 1)))
981 REG_ATTRS (XEXP (x, 1))
982 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
983 }
984 if (GET_CODE (x) == PARALLEL)
985 {
986 int i;
987 for (i = 0; i < XVECLEN (x, 0); i++)
988 {
989 rtx y = XVECEXP (x, 0, i);
990 if (REG_P (XEXP (y, 0)))
991 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
992 }
993 }
994 }
995
996 /* Identify REG (which may be a CONCAT) as a user register. */
997
998 void
999 mark_user_reg (reg)
1000 rtx reg;
1001 {
1002 if (GET_CODE (reg) == CONCAT)
1003 {
1004 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1005 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1006 }
1007 else if (GET_CODE (reg) == REG)
1008 REG_USERVAR_P (reg) = 1;
1009 else
1010 abort ();
1011 }
1012
1013 /* Identify REG as a probable pointer register and show its alignment
1014 as ALIGN, if nonzero. */
1015
1016 void
1017 mark_reg_pointer (reg, align)
1018 rtx reg;
1019 int align;
1020 {
1021 if (! REG_POINTER (reg))
1022 {
1023 REG_POINTER (reg) = 1;
1024
1025 if (align)
1026 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1027 }
1028 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1029 /* We can no-longer be sure just how aligned this pointer is */
1030 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1031 }
1032
1033 /* Return 1 plus largest pseudo reg number used in the current function. */
1034
1035 int
1036 max_reg_num ()
1037 {
1038 return reg_rtx_no;
1039 }
1040
1041 /* Return 1 + the largest label number used so far in the current function. */
1042
1043 int
1044 max_label_num ()
1045 {
1046 if (last_label_num && label_num == base_label_num)
1047 return last_label_num;
1048 return label_num;
1049 }
1050
1051 /* Return first label number used in this function (if any were used). */
1052
1053 int
1054 get_first_label_num ()
1055 {
1056 return first_label_num;
1057 }
1058 \f
1059 /* Return the final regno of X, which is a SUBREG of a hard
1060 register. */
1061 int
1062 subreg_hard_regno (x, check_mode)
1063 rtx x;
1064 int check_mode;
1065 {
1066 enum machine_mode mode = GET_MODE (x);
1067 unsigned int byte_offset, base_regno, final_regno;
1068 rtx reg = SUBREG_REG (x);
1069
1070 /* This is where we attempt to catch illegal subregs
1071 created by the compiler. */
1072 if (GET_CODE (x) != SUBREG
1073 || GET_CODE (reg) != REG)
1074 abort ();
1075 base_regno = REGNO (reg);
1076 if (base_regno >= FIRST_PSEUDO_REGISTER)
1077 abort ();
1078 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
1079 abort ();
1080
1081 /* Catch non-congruent offsets too. */
1082 byte_offset = SUBREG_BYTE (x);
1083 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
1084 abort ();
1085
1086 final_regno = subreg_regno (x);
1087
1088 return final_regno;
1089 }
1090
1091 /* Return a value representing some low-order bits of X, where the number
1092 of low-order bits is given by MODE. Note that no conversion is done
1093 between floating-point and fixed-point values, rather, the bit
1094 representation is returned.
1095
1096 This function handles the cases in common between gen_lowpart, below,
1097 and two variants in cse.c and combine.c. These are the cases that can
1098 be safely handled at all points in the compilation.
1099
1100 If this is not a case we can handle, return 0. */
1101
1102 rtx
1103 gen_lowpart_common (mode, x)
1104 enum machine_mode mode;
1105 rtx x;
1106 {
1107 int msize = GET_MODE_SIZE (mode);
1108 int xsize = GET_MODE_SIZE (GET_MODE (x));
1109 int offset = 0;
1110
1111 if (GET_MODE (x) == mode)
1112 return x;
1113
1114 /* MODE must occupy no more words than the mode of X. */
1115 if (GET_MODE (x) != VOIDmode
1116 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1117 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
1118 return 0;
1119
1120 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1121 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1122 && GET_MODE (x) != VOIDmode && msize > xsize)
1123 return 0;
1124
1125 offset = subreg_lowpart_offset (mode, GET_MODE (x));
1126
1127 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1128 && (GET_MODE_CLASS (mode) == MODE_INT
1129 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1130 {
1131 /* If we are getting the low-order part of something that has been
1132 sign- or zero-extended, we can either just use the object being
1133 extended or make a narrower extension. If we want an even smaller
1134 piece than the size of the object being extended, call ourselves
1135 recursively.
1136
1137 This case is used mostly by combine and cse. */
1138
1139 if (GET_MODE (XEXP (x, 0)) == mode)
1140 return XEXP (x, 0);
1141 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1142 return gen_lowpart_common (mode, XEXP (x, 0));
1143 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1144 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1145 }
1146 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1147 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1148 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1149 else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1150 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1151 && GET_MODE (x) == VOIDmode)
1152 return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1153 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1154 from the low-order part of the constant. */
1155 else if ((GET_MODE_CLASS (mode) == MODE_INT
1156 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1157 && GET_MODE (x) == VOIDmode
1158 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1159 {
1160 /* If MODE is twice the host word size, X is already the desired
1161 representation. Otherwise, if MODE is wider than a word, we can't
1162 do this. If MODE is exactly a word, return just one CONST_INT. */
1163
1164 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1165 return x;
1166 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1167 return 0;
1168 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1169 return (GET_CODE (x) == CONST_INT ? x
1170 : GEN_INT (CONST_DOUBLE_LOW (x)));
1171 else
1172 {
1173 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
1174 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1175 : CONST_DOUBLE_LOW (x));
1176
1177 /* Sign extend to HOST_WIDE_INT. */
1178 val = trunc_int_for_mode (val, mode);
1179
1180 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1181 : GEN_INT (val));
1182 }
1183 }
1184
1185 /* The floating-point emulator can handle all conversions between
1186 FP and integer operands. This simplifies reload because it
1187 doesn't have to deal with constructs like (subreg:DI
1188 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
1189 /* Single-precision floats are always 32-bits and double-precision
1190 floats are always 64-bits. */
1191
1192 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1193 && GET_MODE_BITSIZE (mode) == 32
1194 && GET_CODE (x) == CONST_INT)
1195 {
1196 REAL_VALUE_TYPE r;
1197 long i = INTVAL (x);
1198
1199 real_from_target (&r, &i, mode);
1200 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1201 }
1202 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1203 && GET_MODE_BITSIZE (mode) == 64
1204 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1205 && GET_MODE (x) == VOIDmode)
1206 {
1207 REAL_VALUE_TYPE r;
1208 HOST_WIDE_INT low, high;
1209 long i[2];
1210
1211 if (GET_CODE (x) == CONST_INT)
1212 {
1213 low = INTVAL (x);
1214 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1215 }
1216 else
1217 {
1218 low = CONST_DOUBLE_LOW (x);
1219 high = CONST_DOUBLE_HIGH (x);
1220 }
1221
1222 if (HOST_BITS_PER_WIDE_INT > 32)
1223 high = low >> 31 >> 1;
1224
1225 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1226 target machine. */
1227 if (WORDS_BIG_ENDIAN)
1228 i[0] = high, i[1] = low;
1229 else
1230 i[0] = low, i[1] = high;
1231
1232 real_from_target (&r, i, mode);
1233 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1234 }
1235 else if ((GET_MODE_CLASS (mode) == MODE_INT
1236 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1237 && GET_CODE (x) == CONST_DOUBLE
1238 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1239 {
1240 REAL_VALUE_TYPE r;
1241 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1242 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1243
1244 /* Convert 'r' into an array of four 32-bit words in target word
1245 order. */
1246 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1247 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1248 {
1249 case 32:
1250 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1251 i[1] = 0;
1252 i[2] = 0;
1253 i[3 - 3 * endian] = 0;
1254 break;
1255 case 64:
1256 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1257 i[2 - 2 * endian] = 0;
1258 i[3 - 2 * endian] = 0;
1259 break;
1260 case 96:
1261 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1262 i[3 - 3 * endian] = 0;
1263 break;
1264 case 128:
1265 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1266 break;
1267 default:
1268 abort ();
1269 }
1270 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1271 and return it. */
1272 #if HOST_BITS_PER_WIDE_INT == 32
1273 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1274 #else
1275 if (HOST_BITS_PER_WIDE_INT != 64)
1276 abort ();
1277
1278 return immed_double_const ((((unsigned long) i[3 * endian])
1279 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1280 (((unsigned long) i[2 - endian])
1281 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1282 mode);
1283 #endif
1284 }
1285
1286 /* Otherwise, we can't do this. */
1287 return 0;
1288 }
1289 \f
1290 /* Return the real part (which has mode MODE) of a complex value X.
1291 This always comes at the low address in memory. */
1292
1293 rtx
1294 gen_realpart (mode, x)
1295 enum machine_mode mode;
1296 rtx x;
1297 {
1298 if (WORDS_BIG_ENDIAN
1299 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1300 && REG_P (x)
1301 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1302 internal_error
1303 ("can't access real part of complex value in hard register");
1304 else if (WORDS_BIG_ENDIAN)
1305 return gen_highpart (mode, x);
1306 else
1307 return gen_lowpart (mode, x);
1308 }
1309
1310 /* Return the imaginary part (which has mode MODE) of a complex value X.
1311 This always comes at the high address in memory. */
1312
1313 rtx
1314 gen_imagpart (mode, x)
1315 enum machine_mode mode;
1316 rtx x;
1317 {
1318 if (WORDS_BIG_ENDIAN)
1319 return gen_lowpart (mode, x);
1320 else if (! WORDS_BIG_ENDIAN
1321 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1322 && REG_P (x)
1323 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1324 internal_error
1325 ("can't access imaginary part of complex value in hard register");
1326 else
1327 return gen_highpart (mode, x);
1328 }
1329
1330 /* Return 1 iff X, assumed to be a SUBREG,
1331 refers to the real part of the complex value in its containing reg.
1332 Complex values are always stored with the real part in the first word,
1333 regardless of WORDS_BIG_ENDIAN. */
1334
1335 int
1336 subreg_realpart_p (x)
1337 rtx x;
1338 {
1339 if (GET_CODE (x) != SUBREG)
1340 abort ();
1341
1342 return ((unsigned int) SUBREG_BYTE (x)
1343 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1344 }
1345 \f
1346 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1347 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1348 least-significant part of X.
1349 MODE specifies how big a part of X to return;
1350 it usually should not be larger than a word.
1351 If X is a MEM whose address is a QUEUED, the value may be so also. */
1352
1353 rtx
1354 gen_lowpart (mode, x)
1355 enum machine_mode mode;
1356 rtx x;
1357 {
1358 rtx result = gen_lowpart_common (mode, x);
1359
1360 if (result)
1361 return result;
1362 else if (GET_CODE (x) == REG)
1363 {
1364 /* Must be a hard reg that's not valid in MODE. */
1365 result = gen_lowpart_common (mode, copy_to_reg (x));
1366 if (result == 0)
1367 abort ();
1368 return result;
1369 }
1370 else if (GET_CODE (x) == MEM)
1371 {
1372 /* The only additional case we can do is MEM. */
1373 int offset = 0;
1374
1375 /* The following exposes the use of "x" to CSE. */
1376 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
1377 && SCALAR_INT_MODE_P (GET_MODE (x))
1378 && ! no_new_pseudos)
1379 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1380
1381 if (WORDS_BIG_ENDIAN)
1382 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1383 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1384
1385 if (BYTES_BIG_ENDIAN)
1386 /* Adjust the address so that the address-after-the-data
1387 is unchanged. */
1388 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1389 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1390
1391 return adjust_address (x, mode, offset);
1392 }
1393 else if (GET_CODE (x) == ADDRESSOF)
1394 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1395 else
1396 abort ();
1397 }
1398
1399 /* Like `gen_lowpart', but refer to the most significant part.
1400 This is used to access the imaginary part of a complex number. */
1401
1402 rtx
1403 gen_highpart (mode, x)
1404 enum machine_mode mode;
1405 rtx x;
1406 {
1407 unsigned int msize = GET_MODE_SIZE (mode);
1408 rtx result;
1409
1410 /* This case loses if X is a subreg. To catch bugs early,
1411 complain if an invalid MODE is used even in other cases. */
1412 if (msize > UNITS_PER_WORD
1413 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1414 abort ();
1415
1416 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1417 subreg_highpart_offset (mode, GET_MODE (x)));
1418
1419 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1420 the target if we have a MEM. gen_highpart must return a valid operand,
1421 emitting code if necessary to do so. */
1422 if (result != NULL_RTX && GET_CODE (result) == MEM)
1423 result = validize_mem (result);
1424
1425 if (!result)
1426 abort ();
1427 return result;
1428 }
1429
1430 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1431 be VOIDmode constant. */
1432 rtx
1433 gen_highpart_mode (outermode, innermode, exp)
1434 enum machine_mode outermode, innermode;
1435 rtx exp;
1436 {
1437 if (GET_MODE (exp) != VOIDmode)
1438 {
1439 if (GET_MODE (exp) != innermode)
1440 abort ();
1441 return gen_highpart (outermode, exp);
1442 }
1443 return simplify_gen_subreg (outermode, exp, innermode,
1444 subreg_highpart_offset (outermode, innermode));
1445 }
1446
1447 /* Return offset in bytes to get OUTERMODE low part
1448 of the value in mode INNERMODE stored in memory in target format. */
1449
1450 unsigned int
1451 subreg_lowpart_offset (outermode, innermode)
1452 enum machine_mode outermode, innermode;
1453 {
1454 unsigned int offset = 0;
1455 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1456
1457 if (difference > 0)
1458 {
1459 if (WORDS_BIG_ENDIAN)
1460 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1461 if (BYTES_BIG_ENDIAN)
1462 offset += difference % UNITS_PER_WORD;
1463 }
1464
1465 return offset;
1466 }
1467
1468 /* Return offset in bytes to get OUTERMODE high part
1469 of the value in mode INNERMODE stored in memory in target format. */
1470 unsigned int
1471 subreg_highpart_offset (outermode, innermode)
1472 enum machine_mode outermode, innermode;
1473 {
1474 unsigned int offset = 0;
1475 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1476
1477 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1478 abort ();
1479
1480 if (difference > 0)
1481 {
1482 if (! WORDS_BIG_ENDIAN)
1483 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1484 if (! BYTES_BIG_ENDIAN)
1485 offset += difference % UNITS_PER_WORD;
1486 }
1487
1488 return offset;
1489 }
1490
1491 /* Return 1 iff X, assumed to be a SUBREG,
1492 refers to the least significant part of its containing reg.
1493 If X is not a SUBREG, always return 1 (it is its own low part!). */
1494
1495 int
1496 subreg_lowpart_p (x)
1497 rtx x;
1498 {
1499 if (GET_CODE (x) != SUBREG)
1500 return 1;
1501 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1502 return 0;
1503
1504 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1505 == SUBREG_BYTE (x));
1506 }
1507 \f
1508
1509 /* Helper routine for all the constant cases of operand_subword.
1510 Some places invoke this directly. */
1511
1512 rtx
1513 constant_subword (op, offset, mode)
1514 rtx op;
1515 int offset;
1516 enum machine_mode mode;
1517 {
1518 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1519 HOST_WIDE_INT val;
1520
1521 /* If OP is already an integer word, return it. */
1522 if (GET_MODE_CLASS (mode) == MODE_INT
1523 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1524 return op;
1525
1526 /* The output is some bits, the width of the target machine's word.
1527 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1528 host can't. */
1529 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1530 && GET_MODE_CLASS (mode) == MODE_FLOAT
1531 && GET_MODE_BITSIZE (mode) == 64
1532 && GET_CODE (op) == CONST_DOUBLE)
1533 {
1534 long k[2];
1535 REAL_VALUE_TYPE rv;
1536
1537 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1538 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1539
1540 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1541 which the words are written depends on the word endianness.
1542 ??? This is a potential portability problem and should
1543 be fixed at some point.
1544
1545 We must exercise caution with the sign bit. By definition there
1546 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1547 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1548 So we explicitly mask and sign-extend as necessary. */
1549 if (BITS_PER_WORD == 32)
1550 {
1551 val = k[offset];
1552 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1553 return GEN_INT (val);
1554 }
1555 #if HOST_BITS_PER_WIDE_INT >= 64
1556 else if (BITS_PER_WORD >= 64 && offset == 0)
1557 {
1558 val = k[! WORDS_BIG_ENDIAN];
1559 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1560 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1561 return GEN_INT (val);
1562 }
1563 #endif
1564 else if (BITS_PER_WORD == 16)
1565 {
1566 val = k[offset >> 1];
1567 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1568 val >>= 16;
1569 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1570 return GEN_INT (val);
1571 }
1572 else
1573 abort ();
1574 }
1575 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1576 && GET_MODE_CLASS (mode) == MODE_FLOAT
1577 && GET_MODE_BITSIZE (mode) > 64
1578 && GET_CODE (op) == CONST_DOUBLE)
1579 {
1580 long k[4];
1581 REAL_VALUE_TYPE rv;
1582
1583 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1584 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1585
1586 if (BITS_PER_WORD == 32)
1587 {
1588 val = k[offset];
1589 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1590 return GEN_INT (val);
1591 }
1592 #if HOST_BITS_PER_WIDE_INT >= 64
1593 else if (BITS_PER_WORD >= 64 && offset <= 1)
1594 {
1595 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1596 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1597 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1598 return GEN_INT (val);
1599 }
1600 #endif
1601 else
1602 abort ();
1603 }
1604
1605 /* Single word float is a little harder, since single- and double-word
1606 values often do not have the same high-order bits. We have already
1607 verified that we want the only defined word of the single-word value. */
1608 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1609 && GET_MODE_BITSIZE (mode) == 32
1610 && GET_CODE (op) == CONST_DOUBLE)
1611 {
1612 long l;
1613 REAL_VALUE_TYPE rv;
1614
1615 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1616 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1617
1618 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1619 val = l;
1620 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1621
1622 if (BITS_PER_WORD == 16)
1623 {
1624 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1625 val >>= 16;
1626 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1627 }
1628
1629 return GEN_INT (val);
1630 }
1631
1632 /* The only remaining cases that we can handle are integers.
1633 Convert to proper endianness now since these cases need it.
1634 At this point, offset == 0 means the low-order word.
1635
1636 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1637 in general. However, if OP is (const_int 0), we can just return
1638 it for any word. */
1639
1640 if (op == const0_rtx)
1641 return op;
1642
1643 if (GET_MODE_CLASS (mode) != MODE_INT
1644 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1645 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1646 return 0;
1647
1648 if (WORDS_BIG_ENDIAN)
1649 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1650
1651 /* Find out which word on the host machine this value is in and get
1652 it from the constant. */
1653 val = (offset / size_ratio == 0
1654 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1655 : (GET_CODE (op) == CONST_INT
1656 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1657
1658 /* Get the value we want into the low bits of val. */
1659 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1660 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1661
1662 val = trunc_int_for_mode (val, word_mode);
1663
1664 return GEN_INT (val);
1665 }
1666
1667 /* Return subword OFFSET of operand OP.
1668 The word number, OFFSET, is interpreted as the word number starting
1669 at the low-order address. OFFSET 0 is the low-order word if not
1670 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1671
1672 If we cannot extract the required word, we return zero. Otherwise,
1673 an rtx corresponding to the requested word will be returned.
1674
1675 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1676 reload has completed, a valid address will always be returned. After
1677 reload, if a valid address cannot be returned, we return zero.
1678
1679 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1680 it is the responsibility of the caller.
1681
1682 MODE is the mode of OP in case it is a CONST_INT.
1683
1684 ??? This is still rather broken for some cases. The problem for the
1685 moment is that all callers of this thing provide no 'goal mode' to
1686 tell us to work with. This exists because all callers were written
1687 in a word based SUBREG world.
1688 Now use of this function can be deprecated by simplify_subreg in most
1689 cases.
1690 */
1691
1692 rtx
1693 operand_subword (op, offset, validate_address, mode)
1694 rtx op;
1695 unsigned int offset;
1696 int validate_address;
1697 enum machine_mode mode;
1698 {
1699 if (mode == VOIDmode)
1700 mode = GET_MODE (op);
1701
1702 if (mode == VOIDmode)
1703 abort ();
1704
1705 /* If OP is narrower than a word, fail. */
1706 if (mode != BLKmode
1707 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1708 return 0;
1709
1710 /* If we want a word outside OP, return zero. */
1711 if (mode != BLKmode
1712 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1713 return const0_rtx;
1714
1715 /* Form a new MEM at the requested address. */
1716 if (GET_CODE (op) == MEM)
1717 {
1718 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1719
1720 if (! validate_address)
1721 return new;
1722
1723 else if (reload_completed)
1724 {
1725 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1726 return 0;
1727 }
1728 else
1729 return replace_equiv_address (new, XEXP (new, 0));
1730 }
1731
1732 /* Rest can be handled by simplify_subreg. */
1733 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1734 }
1735
1736 /* Similar to `operand_subword', but never return 0. If we can't extract
1737 the required subword, put OP into a register and try again. If that fails,
1738 abort. We always validate the address in this case.
1739
1740 MODE is the mode of OP, in case it is CONST_INT. */
1741
1742 rtx
1743 operand_subword_force (op, offset, mode)
1744 rtx op;
1745 unsigned int offset;
1746 enum machine_mode mode;
1747 {
1748 rtx result = operand_subword (op, offset, 1, mode);
1749
1750 if (result)
1751 return result;
1752
1753 if (mode != BLKmode && mode != VOIDmode)
1754 {
1755 /* If this is a register which can not be accessed by words, copy it
1756 to a pseudo register. */
1757 if (GET_CODE (op) == REG)
1758 op = copy_to_reg (op);
1759 else
1760 op = force_reg (mode, op);
1761 }
1762
1763 result = operand_subword (op, offset, 1, mode);
1764 if (result == 0)
1765 abort ();
1766
1767 return result;
1768 }
1769 \f
1770 /* Given a compare instruction, swap the operands.
1771 A test instruction is changed into a compare of 0 against the operand. */
1772
1773 void
1774 reverse_comparison (insn)
1775 rtx insn;
1776 {
1777 rtx body = PATTERN (insn);
1778 rtx comp;
1779
1780 if (GET_CODE (body) == SET)
1781 comp = SET_SRC (body);
1782 else
1783 comp = SET_SRC (XVECEXP (body, 0, 0));
1784
1785 if (GET_CODE (comp) == COMPARE)
1786 {
1787 rtx op0 = XEXP (comp, 0);
1788 rtx op1 = XEXP (comp, 1);
1789 XEXP (comp, 0) = op1;
1790 XEXP (comp, 1) = op0;
1791 }
1792 else
1793 {
1794 rtx new = gen_rtx_COMPARE (VOIDmode,
1795 CONST0_RTX (GET_MODE (comp)), comp);
1796 if (GET_CODE (body) == SET)
1797 SET_SRC (body) = new;
1798 else
1799 SET_SRC (XVECEXP (body, 0, 0)) = new;
1800 }
1801 }
1802 \f
1803 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1804 or (2) a component ref of something variable. Represent the later with
1805 a NULL expression. */
1806
1807 static tree
1808 component_ref_for_mem_expr (ref)
1809 tree ref;
1810 {
1811 tree inner = TREE_OPERAND (ref, 0);
1812
1813 if (TREE_CODE (inner) == COMPONENT_REF)
1814 inner = component_ref_for_mem_expr (inner);
1815 else
1816 {
1817 tree placeholder_ptr = 0;
1818
1819 /* Now remove any conversions: they don't change what the underlying
1820 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1821 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1822 || TREE_CODE (inner) == NON_LVALUE_EXPR
1823 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1824 || TREE_CODE (inner) == SAVE_EXPR
1825 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1826 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1827 inner = find_placeholder (inner, &placeholder_ptr);
1828 else
1829 inner = TREE_OPERAND (inner, 0);
1830
1831 if (! DECL_P (inner))
1832 inner = NULL_TREE;
1833 }
1834
1835 if (inner == TREE_OPERAND (ref, 0))
1836 return ref;
1837 else
1838 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1839 TREE_OPERAND (ref, 1));
1840 }
1841
1842 /* Given REF, a MEM, and T, either the type of X or the expression
1843 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1844 if we are making a new object of this type. BITPOS is nonzero if
1845 there is an offset outstanding on T that will be applied later. */
1846
1847 void
1848 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1849 rtx ref;
1850 tree t;
1851 int objectp;
1852 HOST_WIDE_INT bitpos;
1853 {
1854 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1855 tree expr = MEM_EXPR (ref);
1856 rtx offset = MEM_OFFSET (ref);
1857 rtx size = MEM_SIZE (ref);
1858 unsigned int align = MEM_ALIGN (ref);
1859 HOST_WIDE_INT apply_bitpos = 0;
1860 tree type;
1861
1862 /* It can happen that type_for_mode was given a mode for which there
1863 is no language-level type. In which case it returns NULL, which
1864 we can see here. */
1865 if (t == NULL_TREE)
1866 return;
1867
1868 type = TYPE_P (t) ? t : TREE_TYPE (t);
1869
1870 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1871 wrong answer, as it assumes that DECL_RTL already has the right alias
1872 info. Callers should not set DECL_RTL until after the call to
1873 set_mem_attributes. */
1874 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1875 abort ();
1876
1877 /* Get the alias set from the expression or type (perhaps using a
1878 front-end routine) and use it. */
1879 alias = get_alias_set (t);
1880
1881 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1882 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1883 RTX_UNCHANGING_P (ref)
1884 |= ((lang_hooks.honor_readonly
1885 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1886 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1887
1888 /* If we are making an object of this type, or if this is a DECL, we know
1889 that it is a scalar if the type is not an aggregate. */
1890 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1891 MEM_SCALAR_P (ref) = 1;
1892
1893 /* We can set the alignment from the type if we are making an object,
1894 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1895 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1896 align = MAX (align, TYPE_ALIGN (type));
1897
1898 /* If the size is known, we can set that. */
1899 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1900 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1901
1902 /* If T is not a type, we may be able to deduce some more information about
1903 the expression. */
1904 if (! TYPE_P (t))
1905 {
1906 maybe_set_unchanging (ref, t);
1907 if (TREE_THIS_VOLATILE (t))
1908 MEM_VOLATILE_P (ref) = 1;
1909
1910 /* Now remove any conversions: they don't change what the underlying
1911 object is. Likewise for SAVE_EXPR. */
1912 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1913 || TREE_CODE (t) == NON_LVALUE_EXPR
1914 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1915 || TREE_CODE (t) == SAVE_EXPR)
1916 t = TREE_OPERAND (t, 0);
1917
1918 /* If this expression can't be addressed (e.g., it contains a reference
1919 to a non-addressable field), show we don't change its alias set. */
1920 if (! can_address_p (t))
1921 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1922
1923 /* If this is a decl, set the attributes of the MEM from it. */
1924 if (DECL_P (t))
1925 {
1926 expr = t;
1927 offset = const0_rtx;
1928 apply_bitpos = bitpos;
1929 size = (DECL_SIZE_UNIT (t)
1930 && host_integerp (DECL_SIZE_UNIT (t), 1)
1931 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1932 align = DECL_ALIGN (t);
1933 }
1934
1935 /* If this is a constant, we know the alignment. */
1936 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1937 {
1938 align = TYPE_ALIGN (type);
1939 #ifdef CONSTANT_ALIGNMENT
1940 align = CONSTANT_ALIGNMENT (t, align);
1941 #endif
1942 }
1943
1944 /* If this is a field reference and not a bit-field, record it. */
1945 /* ??? There is some information that can be gleened from bit-fields,
1946 such as the word offset in the structure that might be modified.
1947 But skip it for now. */
1948 else if (TREE_CODE (t) == COMPONENT_REF
1949 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1950 {
1951 expr = component_ref_for_mem_expr (t);
1952 offset = const0_rtx;
1953 apply_bitpos = bitpos;
1954 /* ??? Any reason the field size would be different than
1955 the size we got from the type? */
1956 }
1957
1958 /* If this is an array reference, look for an outer field reference. */
1959 else if (TREE_CODE (t) == ARRAY_REF)
1960 {
1961 tree off_tree = size_zero_node;
1962
1963 do
1964 {
1965 tree index = TREE_OPERAND (t, 1);
1966 tree array = TREE_OPERAND (t, 0);
1967 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1968 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1969 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1970
1971 /* We assume all arrays have sizes that are a multiple of a byte.
1972 First subtract the lower bound, if any, in the type of the
1973 index, then convert to sizetype and multiply by the size of the
1974 array element. */
1975 if (low_bound != 0 && ! integer_zerop (low_bound))
1976 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1977 index, low_bound));
1978
1979 /* If the index has a self-referential type, pass it to a
1980 WITH_RECORD_EXPR; if the component size is, pass our
1981 component to one. */
1982 if (! TREE_CONSTANT (index)
1983 && contains_placeholder_p (index))
1984 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t);
1985 if (! TREE_CONSTANT (unit_size)
1986 && contains_placeholder_p (unit_size))
1987 unit_size = build (WITH_RECORD_EXPR, sizetype,
1988 unit_size, array);
1989
1990 off_tree
1991 = fold (build (PLUS_EXPR, sizetype,
1992 fold (build (MULT_EXPR, sizetype,
1993 index,
1994 unit_size)),
1995 off_tree));
1996 t = TREE_OPERAND (t, 0);
1997 }
1998 while (TREE_CODE (t) == ARRAY_REF);
1999
2000 if (DECL_P (t))
2001 {
2002 expr = t;
2003 offset = NULL;
2004 if (host_integerp (off_tree, 1))
2005 {
2006 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
2007 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
2008 align = DECL_ALIGN (t);
2009 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
2010 align = aoff;
2011 offset = GEN_INT (ioff);
2012 apply_bitpos = bitpos;
2013 }
2014 }
2015 else if (TREE_CODE (t) == COMPONENT_REF)
2016 {
2017 expr = component_ref_for_mem_expr (t);
2018 if (host_integerp (off_tree, 1))
2019 {
2020 offset = GEN_INT (tree_low_cst (off_tree, 1));
2021 apply_bitpos = bitpos;
2022 }
2023 /* ??? Any reason the field size would be different than
2024 the size we got from the type? */
2025 }
2026 else if (flag_argument_noalias > 1
2027 && TREE_CODE (t) == INDIRECT_REF
2028 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2029 {
2030 expr = t;
2031 offset = NULL;
2032 }
2033 }
2034
2035 /* If this is a Fortran indirect argument reference, record the
2036 parameter decl. */
2037 else if (flag_argument_noalias > 1
2038 && TREE_CODE (t) == INDIRECT_REF
2039 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
2040 {
2041 expr = t;
2042 offset = NULL;
2043 }
2044 }
2045
2046 /* If we modified OFFSET based on T, then subtract the outstanding
2047 bit position offset. Similarly, increase the size of the accessed
2048 object to contain the negative offset. */
2049 if (apply_bitpos)
2050 {
2051 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
2052 if (size)
2053 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
2054 }
2055
2056 /* Now set the attributes we computed above. */
2057 MEM_ATTRS (ref)
2058 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
2059
2060 /* If this is already known to be a scalar or aggregate, we are done. */
2061 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
2062 return;
2063
2064 /* If it is a reference into an aggregate, this is part of an aggregate.
2065 Otherwise we don't know. */
2066 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
2067 || TREE_CODE (t) == ARRAY_RANGE_REF
2068 || TREE_CODE (t) == BIT_FIELD_REF)
2069 MEM_IN_STRUCT_P (ref) = 1;
2070 }
2071
2072 void
2073 set_mem_attributes (ref, t, objectp)
2074 rtx ref;
2075 tree t;
2076 int objectp;
2077 {
2078 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2079 }
2080
2081 /* Set the decl for MEM to DECL. */
2082
2083 void
2084 set_mem_attrs_from_reg (mem, reg)
2085 rtx mem;
2086 rtx reg;
2087 {
2088 MEM_ATTRS (mem)
2089 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
2090 GEN_INT (REG_OFFSET (reg)),
2091 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2092 }
2093
2094 /* Set the alias set of MEM to SET. */
2095
2096 void
2097 set_mem_alias_set (mem, set)
2098 rtx mem;
2099 HOST_WIDE_INT set;
2100 {
2101 #ifdef ENABLE_CHECKING
2102 /* If the new and old alias sets don't conflict, something is wrong. */
2103 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
2104 abort ();
2105 #endif
2106
2107 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
2108 MEM_SIZE (mem), MEM_ALIGN (mem),
2109 GET_MODE (mem));
2110 }
2111
2112 /* Set the alignment of MEM to ALIGN bits. */
2113
2114 void
2115 set_mem_align (mem, align)
2116 rtx mem;
2117 unsigned int align;
2118 {
2119 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2120 MEM_OFFSET (mem), MEM_SIZE (mem), align,
2121 GET_MODE (mem));
2122 }
2123
2124 /* Set the expr for MEM to EXPR. */
2125
2126 void
2127 set_mem_expr (mem, expr)
2128 rtx mem;
2129 tree expr;
2130 {
2131 MEM_ATTRS (mem)
2132 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
2133 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
2134 }
2135
2136 /* Set the offset of MEM to OFFSET. */
2137
2138 void
2139 set_mem_offset (mem, offset)
2140 rtx mem, offset;
2141 {
2142 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2143 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
2144 GET_MODE (mem));
2145 }
2146
2147 /* Set the size of MEM to SIZE. */
2148
2149 void
2150 set_mem_size (mem, size)
2151 rtx mem, size;
2152 {
2153 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
2154 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
2155 GET_MODE (mem));
2156 }
2157 \f
2158 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2159 and its address changed to ADDR. (VOIDmode means don't change the mode.
2160 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2161 returned memory location is required to be valid. The memory
2162 attributes are not changed. */
2163
2164 static rtx
2165 change_address_1 (memref, mode, addr, validate)
2166 rtx memref;
2167 enum machine_mode mode;
2168 rtx addr;
2169 int validate;
2170 {
2171 rtx new;
2172
2173 if (GET_CODE (memref) != MEM)
2174 abort ();
2175 if (mode == VOIDmode)
2176 mode = GET_MODE (memref);
2177 if (addr == 0)
2178 addr = XEXP (memref, 0);
2179
2180 if (validate)
2181 {
2182 if (reload_in_progress || reload_completed)
2183 {
2184 if (! memory_address_p (mode, addr))
2185 abort ();
2186 }
2187 else
2188 addr = memory_address (mode, addr);
2189 }
2190
2191 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2192 return memref;
2193
2194 new = gen_rtx_MEM (mode, addr);
2195 MEM_COPY_ATTRIBUTES (new, memref);
2196 return new;
2197 }
2198
2199 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2200 way we are changing MEMREF, so we only preserve the alias set. */
2201
2202 rtx
2203 change_address (memref, mode, addr)
2204 rtx memref;
2205 enum machine_mode mode;
2206 rtx addr;
2207 {
2208 rtx new = change_address_1 (memref, mode, addr, 1);
2209 enum machine_mode mmode = GET_MODE (new);
2210
2211 MEM_ATTRS (new)
2212 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2213 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2214 (mmode == BLKmode ? BITS_PER_UNIT
2215 : GET_MODE_ALIGNMENT (mmode)),
2216 mmode);
2217
2218 return new;
2219 }
2220
2221 /* Return a memory reference like MEMREF, but with its mode changed
2222 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2223 nonzero, the memory address is forced to be valid.
2224 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2225 and caller is responsible for adjusting MEMREF base register. */
2226
2227 rtx
2228 adjust_address_1 (memref, mode, offset, validate, adjust)
2229 rtx memref;
2230 enum machine_mode mode;
2231 HOST_WIDE_INT offset;
2232 int validate, adjust;
2233 {
2234 rtx addr = XEXP (memref, 0);
2235 rtx new;
2236 rtx memoffset = MEM_OFFSET (memref);
2237 rtx size = 0;
2238 unsigned int memalign = MEM_ALIGN (memref);
2239
2240 /* ??? Prefer to create garbage instead of creating shared rtl.
2241 This may happen even if offset is nonzero -- consider
2242 (plus (plus reg reg) const_int) -- so do this always. */
2243 addr = copy_rtx (addr);
2244
2245 if (adjust)
2246 {
2247 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2248 object, we can merge it into the LO_SUM. */
2249 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2250 && offset >= 0
2251 && (unsigned HOST_WIDE_INT) offset
2252 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2253 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2254 plus_constant (XEXP (addr, 1), offset));
2255 else
2256 addr = plus_constant (addr, offset);
2257 }
2258
2259 new = change_address_1 (memref, mode, addr, validate);
2260
2261 /* Compute the new values of the memory attributes due to this adjustment.
2262 We add the offsets and update the alignment. */
2263 if (memoffset)
2264 memoffset = GEN_INT (offset + INTVAL (memoffset));
2265
2266 /* Compute the new alignment by taking the MIN of the alignment and the
2267 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2268 if zero. */
2269 if (offset != 0)
2270 memalign
2271 = MIN (memalign,
2272 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2273
2274 /* We can compute the size in a number of ways. */
2275 if (GET_MODE (new) != BLKmode)
2276 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2277 else if (MEM_SIZE (memref))
2278 size = plus_constant (MEM_SIZE (memref), -offset);
2279
2280 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2281 memoffset, size, memalign, GET_MODE (new));
2282
2283 /* At some point, we should validate that this offset is within the object,
2284 if all the appropriate values are known. */
2285 return new;
2286 }
2287
2288 /* Return a memory reference like MEMREF, but with its mode changed
2289 to MODE and its address changed to ADDR, which is assumed to be
2290 MEMREF offseted by OFFSET bytes. If VALIDATE is
2291 nonzero, the memory address is forced to be valid. */
2292
2293 rtx
2294 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2295 rtx memref;
2296 enum machine_mode mode;
2297 rtx addr;
2298 HOST_WIDE_INT offset;
2299 int validate;
2300 {
2301 memref = change_address_1 (memref, VOIDmode, addr, validate);
2302 return adjust_address_1 (memref, mode, offset, validate, 0);
2303 }
2304
2305 /* Return a memory reference like MEMREF, but whose address is changed by
2306 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2307 known to be in OFFSET (possibly 1). */
2308
2309 rtx
2310 offset_address (memref, offset, pow2)
2311 rtx memref;
2312 rtx offset;
2313 HOST_WIDE_INT pow2;
2314 {
2315 rtx new, addr = XEXP (memref, 0);
2316
2317 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2318
2319 /* At this point we don't know _why_ the address is invalid. It
2320 could have secondary memory refereces, multiplies or anything.
2321
2322 However, if we did go and rearrange things, we can wind up not
2323 being able to recognize the magic around pic_offset_table_rtx.
2324 This stuff is fragile, and is yet another example of why it is
2325 bad to expose PIC machinery too early. */
2326 if (! memory_address_p (GET_MODE (memref), new)
2327 && GET_CODE (addr) == PLUS
2328 && XEXP (addr, 0) == pic_offset_table_rtx)
2329 {
2330 addr = force_reg (GET_MODE (addr), addr);
2331 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2332 }
2333
2334 update_temp_slot_address (XEXP (memref, 0), new);
2335 new = change_address_1 (memref, VOIDmode, new, 1);
2336
2337 /* Update the alignment to reflect the offset. Reset the offset, which
2338 we don't know. */
2339 MEM_ATTRS (new)
2340 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2341 MIN (MEM_ALIGN (memref),
2342 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2343 GET_MODE (new));
2344 return new;
2345 }
2346
2347 /* Return a memory reference like MEMREF, but with its address changed to
2348 ADDR. The caller is asserting that the actual piece of memory pointed
2349 to is the same, just the form of the address is being changed, such as
2350 by putting something into a register. */
2351
2352 rtx
2353 replace_equiv_address (memref, addr)
2354 rtx memref;
2355 rtx addr;
2356 {
2357 /* change_address_1 copies the memory attribute structure without change
2358 and that's exactly what we want here. */
2359 update_temp_slot_address (XEXP (memref, 0), addr);
2360 return change_address_1 (memref, VOIDmode, addr, 1);
2361 }
2362
2363 /* Likewise, but the reference is not required to be valid. */
2364
2365 rtx
2366 replace_equiv_address_nv (memref, addr)
2367 rtx memref;
2368 rtx addr;
2369 {
2370 return change_address_1 (memref, VOIDmode, addr, 0);
2371 }
2372
2373 /* Return a memory reference like MEMREF, but with its mode widened to
2374 MODE and offset by OFFSET. This would be used by targets that e.g.
2375 cannot issue QImode memory operations and have to use SImode memory
2376 operations plus masking logic. */
2377
2378 rtx
2379 widen_memory_access (memref, mode, offset)
2380 rtx memref;
2381 enum machine_mode mode;
2382 HOST_WIDE_INT offset;
2383 {
2384 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2385 tree expr = MEM_EXPR (new);
2386 rtx memoffset = MEM_OFFSET (new);
2387 unsigned int size = GET_MODE_SIZE (mode);
2388
2389 /* If we don't know what offset we were at within the expression, then
2390 we can't know if we've overstepped the bounds. */
2391 if (! memoffset)
2392 expr = NULL_TREE;
2393
2394 while (expr)
2395 {
2396 if (TREE_CODE (expr) == COMPONENT_REF)
2397 {
2398 tree field = TREE_OPERAND (expr, 1);
2399
2400 if (! DECL_SIZE_UNIT (field))
2401 {
2402 expr = NULL_TREE;
2403 break;
2404 }
2405
2406 /* Is the field at least as large as the access? If so, ok,
2407 otherwise strip back to the containing structure. */
2408 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2409 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2410 && INTVAL (memoffset) >= 0)
2411 break;
2412
2413 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2414 {
2415 expr = NULL_TREE;
2416 break;
2417 }
2418
2419 expr = TREE_OPERAND (expr, 0);
2420 memoffset = (GEN_INT (INTVAL (memoffset)
2421 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2422 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2423 / BITS_PER_UNIT)));
2424 }
2425 /* Similarly for the decl. */
2426 else if (DECL_P (expr)
2427 && DECL_SIZE_UNIT (expr)
2428 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2429 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2430 && (! memoffset || INTVAL (memoffset) >= 0))
2431 break;
2432 else
2433 {
2434 /* The widened memory access overflows the expression, which means
2435 that it could alias another expression. Zap it. */
2436 expr = NULL_TREE;
2437 break;
2438 }
2439 }
2440
2441 if (! expr)
2442 memoffset = NULL_RTX;
2443
2444 /* The widened memory may alias other stuff, so zap the alias set. */
2445 /* ??? Maybe use get_alias_set on any remaining expression. */
2446
2447 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2448 MEM_ALIGN (new), mode);
2449
2450 return new;
2451 }
2452 \f
2453 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2454
2455 rtx
2456 gen_label_rtx ()
2457 {
2458 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2459 NULL, label_num++, NULL);
2460 }
2461 \f
2462 /* For procedure integration. */
2463
2464 /* Install new pointers to the first and last insns in the chain.
2465 Also, set cur_insn_uid to one higher than the last in use.
2466 Used for an inline-procedure after copying the insn chain. */
2467
2468 void
2469 set_new_first_and_last_insn (first, last)
2470 rtx first, last;
2471 {
2472 rtx insn;
2473
2474 first_insn = first;
2475 last_insn = last;
2476 cur_insn_uid = 0;
2477
2478 for (insn = first; insn; insn = NEXT_INSN (insn))
2479 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2480
2481 cur_insn_uid++;
2482 }
2483
2484 /* Set the range of label numbers found in the current function.
2485 This is used when belatedly compiling an inline function. */
2486
2487 void
2488 set_new_first_and_last_label_num (first, last)
2489 int first, last;
2490 {
2491 base_label_num = label_num;
2492 first_label_num = first;
2493 last_label_num = last;
2494 }
2495
2496 /* Set the last label number found in the current function.
2497 This is used when belatedly compiling an inline function. */
2498
2499 void
2500 set_new_last_label_num (last)
2501 int last;
2502 {
2503 base_label_num = label_num;
2504 last_label_num = last;
2505 }
2506 \f
2507 /* Restore all variables describing the current status from the structure *P.
2508 This is used after a nested function. */
2509
2510 void
2511 restore_emit_status (p)
2512 struct function *p ATTRIBUTE_UNUSED;
2513 {
2514 last_label_num = 0;
2515 }
2516 \f
2517 /* Go through all the RTL insn bodies and copy any invalid shared
2518 structure. This routine should only be called once. */
2519
2520 void
2521 unshare_all_rtl (fndecl, insn)
2522 tree fndecl;
2523 rtx insn;
2524 {
2525 tree decl;
2526
2527 /* Make sure that virtual parameters are not shared. */
2528 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2529 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2530
2531 /* Make sure that virtual stack slots are not shared. */
2532 unshare_all_decls (DECL_INITIAL (fndecl));
2533
2534 /* Unshare just about everything else. */
2535 unshare_all_rtl_1 (insn);
2536
2537 /* Make sure the addresses of stack slots found outside the insn chain
2538 (such as, in DECL_RTL of a variable) are not shared
2539 with the insn chain.
2540
2541 This special care is necessary when the stack slot MEM does not
2542 actually appear in the insn chain. If it does appear, its address
2543 is unshared from all else at that point. */
2544 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2545 }
2546
2547 /* Go through all the RTL insn bodies and copy any invalid shared
2548 structure, again. This is a fairly expensive thing to do so it
2549 should be done sparingly. */
2550
2551 void
2552 unshare_all_rtl_again (insn)
2553 rtx insn;
2554 {
2555 rtx p;
2556 tree decl;
2557
2558 for (p = insn; p; p = NEXT_INSN (p))
2559 if (INSN_P (p))
2560 {
2561 reset_used_flags (PATTERN (p));
2562 reset_used_flags (REG_NOTES (p));
2563 reset_used_flags (LOG_LINKS (p));
2564 }
2565
2566 /* Make sure that virtual stack slots are not shared. */
2567 reset_used_decls (DECL_INITIAL (cfun->decl));
2568
2569 /* Make sure that virtual parameters are not shared. */
2570 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2571 reset_used_flags (DECL_RTL (decl));
2572
2573 reset_used_flags (stack_slot_list);
2574
2575 unshare_all_rtl (cfun->decl, insn);
2576 }
2577
2578 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2579 Assumes the mark bits are cleared at entry. */
2580
2581 static void
2582 unshare_all_rtl_1 (insn)
2583 rtx insn;
2584 {
2585 for (; insn; insn = NEXT_INSN (insn))
2586 if (INSN_P (insn))
2587 {
2588 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2589 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2590 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2591 }
2592 }
2593
2594 /* Go through all virtual stack slots of a function and copy any
2595 shared structure. */
2596 static void
2597 unshare_all_decls (blk)
2598 tree blk;
2599 {
2600 tree t;
2601
2602 /* Copy shared decls. */
2603 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2604 if (DECL_RTL_SET_P (t))
2605 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2606
2607 /* Now process sub-blocks. */
2608 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2609 unshare_all_decls (t);
2610 }
2611
2612 /* Go through all virtual stack slots of a function and mark them as
2613 not shared. */
2614 static void
2615 reset_used_decls (blk)
2616 tree blk;
2617 {
2618 tree t;
2619
2620 /* Mark decls. */
2621 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2622 if (DECL_RTL_SET_P (t))
2623 reset_used_flags (DECL_RTL (t));
2624
2625 /* Now process sub-blocks. */
2626 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2627 reset_used_decls (t);
2628 }
2629
2630 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2631 placed in the result directly, rather than being copied. MAY_SHARE is
2632 either a MEM of an EXPR_LIST of MEMs. */
2633
2634 rtx
2635 copy_most_rtx (orig, may_share)
2636 rtx orig;
2637 rtx may_share;
2638 {
2639 rtx copy;
2640 int i, j;
2641 RTX_CODE code;
2642 const char *format_ptr;
2643
2644 if (orig == may_share
2645 || (GET_CODE (may_share) == EXPR_LIST
2646 && in_expr_list_p (may_share, orig)))
2647 return orig;
2648
2649 code = GET_CODE (orig);
2650
2651 switch (code)
2652 {
2653 case REG:
2654 case QUEUED:
2655 case CONST_INT:
2656 case CONST_DOUBLE:
2657 case CONST_VECTOR:
2658 case SYMBOL_REF:
2659 case CODE_LABEL:
2660 case PC:
2661 case CC0:
2662 return orig;
2663 default:
2664 break;
2665 }
2666
2667 copy = rtx_alloc (code);
2668 PUT_MODE (copy, GET_MODE (orig));
2669 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2670 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2671 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2672 RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2673 RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2674
2675 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2676
2677 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2678 {
2679 switch (*format_ptr++)
2680 {
2681 case 'e':
2682 XEXP (copy, i) = XEXP (orig, i);
2683 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2684 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2685 break;
2686
2687 case 'u':
2688 XEXP (copy, i) = XEXP (orig, i);
2689 break;
2690
2691 case 'E':
2692 case 'V':
2693 XVEC (copy, i) = XVEC (orig, i);
2694 if (XVEC (orig, i) != NULL)
2695 {
2696 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2697 for (j = 0; j < XVECLEN (copy, i); j++)
2698 XVECEXP (copy, i, j)
2699 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2700 }
2701 break;
2702
2703 case 'w':
2704 XWINT (copy, i) = XWINT (orig, i);
2705 break;
2706
2707 case 'n':
2708 case 'i':
2709 XINT (copy, i) = XINT (orig, i);
2710 break;
2711
2712 case 't':
2713 XTREE (copy, i) = XTREE (orig, i);
2714 break;
2715
2716 case 's':
2717 case 'S':
2718 XSTR (copy, i) = XSTR (orig, i);
2719 break;
2720
2721 case '0':
2722 /* Copy this through the wide int field; that's safest. */
2723 X0WINT (copy, i) = X0WINT (orig, i);
2724 break;
2725
2726 default:
2727 abort ();
2728 }
2729 }
2730 return copy;
2731 }
2732
2733 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2734 Recursively does the same for subexpressions. */
2735
2736 rtx
2737 copy_rtx_if_shared (orig)
2738 rtx orig;
2739 {
2740 rtx x = orig;
2741 int i;
2742 enum rtx_code code;
2743 const char *format_ptr;
2744 int copied = 0;
2745
2746 if (x == 0)
2747 return 0;
2748
2749 code = GET_CODE (x);
2750
2751 /* These types may be freely shared. */
2752
2753 switch (code)
2754 {
2755 case REG:
2756 case QUEUED:
2757 case CONST_INT:
2758 case CONST_DOUBLE:
2759 case CONST_VECTOR:
2760 case SYMBOL_REF:
2761 case CODE_LABEL:
2762 case PC:
2763 case CC0:
2764 case SCRATCH:
2765 /* SCRATCH must be shared because they represent distinct values. */
2766 return x;
2767
2768 case CONST:
2769 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2770 a LABEL_REF, it isn't sharable. */
2771 if (GET_CODE (XEXP (x, 0)) == PLUS
2772 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2773 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2774 return x;
2775 break;
2776
2777 case INSN:
2778 case JUMP_INSN:
2779 case CALL_INSN:
2780 case NOTE:
2781 case BARRIER:
2782 /* The chain of insns is not being copied. */
2783 return x;
2784
2785 case MEM:
2786 /* A MEM is allowed to be shared if its address is constant.
2787
2788 We used to allow sharing of MEMs which referenced
2789 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2790 that can lose. instantiate_virtual_regs will not unshare
2791 the MEMs, and combine may change the structure of the address
2792 because it looks safe and profitable in one context, but
2793 in some other context it creates unrecognizable RTL. */
2794 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2795 return x;
2796
2797 break;
2798
2799 default:
2800 break;
2801 }
2802
2803 /* This rtx may not be shared. If it has already been seen,
2804 replace it with a copy of itself. */
2805
2806 if (RTX_FLAG (x, used))
2807 {
2808 rtx copy;
2809
2810 copy = rtx_alloc (code);
2811 memcpy (copy, x,
2812 (sizeof (*copy) - sizeof (copy->fld)
2813 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2814 x = copy;
2815 copied = 1;
2816 }
2817 RTX_FLAG (x, used) = 1;
2818
2819 /* Now scan the subexpressions recursively.
2820 We can store any replaced subexpressions directly into X
2821 since we know X is not shared! Any vectors in X
2822 must be copied if X was copied. */
2823
2824 format_ptr = GET_RTX_FORMAT (code);
2825
2826 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2827 {
2828 switch (*format_ptr++)
2829 {
2830 case 'e':
2831 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2832 break;
2833
2834 case 'E':
2835 if (XVEC (x, i) != NULL)
2836 {
2837 int j;
2838 int len = XVECLEN (x, i);
2839
2840 if (copied && len > 0)
2841 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2842 for (j = 0; j < len; j++)
2843 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2844 }
2845 break;
2846 }
2847 }
2848 return x;
2849 }
2850
2851 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2852 to look for shared sub-parts. */
2853
2854 void
2855 reset_used_flags (x)
2856 rtx x;
2857 {
2858 int i, j;
2859 enum rtx_code code;
2860 const char *format_ptr;
2861
2862 if (x == 0)
2863 return;
2864
2865 code = GET_CODE (x);
2866
2867 /* These types may be freely shared so we needn't do any resetting
2868 for them. */
2869
2870 switch (code)
2871 {
2872 case REG:
2873 case QUEUED:
2874 case CONST_INT:
2875 case CONST_DOUBLE:
2876 case CONST_VECTOR:
2877 case SYMBOL_REF:
2878 case CODE_LABEL:
2879 case PC:
2880 case CC0:
2881 return;
2882
2883 case INSN:
2884 case JUMP_INSN:
2885 case CALL_INSN:
2886 case NOTE:
2887 case LABEL_REF:
2888 case BARRIER:
2889 /* The chain of insns is not being copied. */
2890 return;
2891
2892 default:
2893 break;
2894 }
2895
2896 RTX_FLAG (x, used) = 0;
2897
2898 format_ptr = GET_RTX_FORMAT (code);
2899 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2900 {
2901 switch (*format_ptr++)
2902 {
2903 case 'e':
2904 reset_used_flags (XEXP (x, i));
2905 break;
2906
2907 case 'E':
2908 for (j = 0; j < XVECLEN (x, i); j++)
2909 reset_used_flags (XVECEXP (x, i, j));
2910 break;
2911 }
2912 }
2913 }
2914 \f
2915 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2916 Return X or the rtx for the pseudo reg the value of X was copied into.
2917 OTHER must be valid as a SET_DEST. */
2918
2919 rtx
2920 make_safe_from (x, other)
2921 rtx x, other;
2922 {
2923 while (1)
2924 switch (GET_CODE (other))
2925 {
2926 case SUBREG:
2927 other = SUBREG_REG (other);
2928 break;
2929 case STRICT_LOW_PART:
2930 case SIGN_EXTEND:
2931 case ZERO_EXTEND:
2932 other = XEXP (other, 0);
2933 break;
2934 default:
2935 goto done;
2936 }
2937 done:
2938 if ((GET_CODE (other) == MEM
2939 && ! CONSTANT_P (x)
2940 && GET_CODE (x) != REG
2941 && GET_CODE (x) != SUBREG)
2942 || (GET_CODE (other) == REG
2943 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2944 || reg_mentioned_p (other, x))))
2945 {
2946 rtx temp = gen_reg_rtx (GET_MODE (x));
2947 emit_move_insn (temp, x);
2948 return temp;
2949 }
2950 return x;
2951 }
2952 \f
2953 /* Emission of insns (adding them to the doubly-linked list). */
2954
2955 /* Return the first insn of the current sequence or current function. */
2956
2957 rtx
2958 get_insns ()
2959 {
2960 return first_insn;
2961 }
2962
2963 /* Specify a new insn as the first in the chain. */
2964
2965 void
2966 set_first_insn (insn)
2967 rtx insn;
2968 {
2969 if (PREV_INSN (insn) != 0)
2970 abort ();
2971 first_insn = insn;
2972 }
2973
2974 /* Return the last insn emitted in current sequence or current function. */
2975
2976 rtx
2977 get_last_insn ()
2978 {
2979 return last_insn;
2980 }
2981
2982 /* Specify a new insn as the last in the chain. */
2983
2984 void
2985 set_last_insn (insn)
2986 rtx insn;
2987 {
2988 if (NEXT_INSN (insn) != 0)
2989 abort ();
2990 last_insn = insn;
2991 }
2992
2993 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2994
2995 rtx
2996 get_last_insn_anywhere ()
2997 {
2998 struct sequence_stack *stack;
2999 if (last_insn)
3000 return last_insn;
3001 for (stack = seq_stack; stack; stack = stack->next)
3002 if (stack->last != 0)
3003 return stack->last;
3004 return 0;
3005 }
3006
3007 /* Return the first nonnote insn emitted in current sequence or current
3008 function. This routine looks inside SEQUENCEs. */
3009
3010 rtx
3011 get_first_nonnote_insn ()
3012 {
3013 rtx insn = first_insn;
3014
3015 while (insn)
3016 {
3017 insn = next_insn (insn);
3018 if (insn == 0 || GET_CODE (insn) != NOTE)
3019 break;
3020 }
3021
3022 return insn;
3023 }
3024
3025 /* Return the last nonnote insn emitted in current sequence or current
3026 function. This routine looks inside SEQUENCEs. */
3027
3028 rtx
3029 get_last_nonnote_insn ()
3030 {
3031 rtx insn = last_insn;
3032
3033 while (insn)
3034 {
3035 insn = previous_insn (insn);
3036 if (insn == 0 || GET_CODE (insn) != NOTE)
3037 break;
3038 }
3039
3040 return insn;
3041 }
3042
3043 /* Return a number larger than any instruction's uid in this function. */
3044
3045 int
3046 get_max_uid ()
3047 {
3048 return cur_insn_uid;
3049 }
3050
3051 /* Renumber instructions so that no instruction UIDs are wasted. */
3052
3053 void
3054 renumber_insns (stream)
3055 FILE *stream;
3056 {
3057 rtx insn;
3058
3059 /* If we're not supposed to renumber instructions, don't. */
3060 if (!flag_renumber_insns)
3061 return;
3062
3063 /* If there aren't that many instructions, then it's not really
3064 worth renumbering them. */
3065 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
3066 return;
3067
3068 cur_insn_uid = 1;
3069
3070 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3071 {
3072 if (stream)
3073 fprintf (stream, "Renumbering insn %d to %d\n",
3074 INSN_UID (insn), cur_insn_uid);
3075 INSN_UID (insn) = cur_insn_uid++;
3076 }
3077 }
3078 \f
3079 /* Return the next insn. If it is a SEQUENCE, return the first insn
3080 of the sequence. */
3081
3082 rtx
3083 next_insn (insn)
3084 rtx insn;
3085 {
3086 if (insn)
3087 {
3088 insn = NEXT_INSN (insn);
3089 if (insn && GET_CODE (insn) == INSN
3090 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3091 insn = XVECEXP (PATTERN (insn), 0, 0);
3092 }
3093
3094 return insn;
3095 }
3096
3097 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3098 of the sequence. */
3099
3100 rtx
3101 previous_insn (insn)
3102 rtx insn;
3103 {
3104 if (insn)
3105 {
3106 insn = PREV_INSN (insn);
3107 if (insn && GET_CODE (insn) == INSN
3108 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3109 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3110 }
3111
3112 return insn;
3113 }
3114
3115 /* Return the next insn after INSN that is not a NOTE. This routine does not
3116 look inside SEQUENCEs. */
3117
3118 rtx
3119 next_nonnote_insn (insn)
3120 rtx insn;
3121 {
3122 while (insn)
3123 {
3124 insn = NEXT_INSN (insn);
3125 if (insn == 0 || GET_CODE (insn) != NOTE)
3126 break;
3127 }
3128
3129 return insn;
3130 }
3131
3132 /* Return the previous insn before INSN that is not a NOTE. This routine does
3133 not look inside SEQUENCEs. */
3134
3135 rtx
3136 prev_nonnote_insn (insn)
3137 rtx insn;
3138 {
3139 while (insn)
3140 {
3141 insn = PREV_INSN (insn);
3142 if (insn == 0 || GET_CODE (insn) != NOTE)
3143 break;
3144 }
3145
3146 return insn;
3147 }
3148
3149 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3150 or 0, if there is none. This routine does not look inside
3151 SEQUENCEs. */
3152
3153 rtx
3154 next_real_insn (insn)
3155 rtx insn;
3156 {
3157 while (insn)
3158 {
3159 insn = NEXT_INSN (insn);
3160 if (insn == 0 || GET_CODE (insn) == INSN
3161 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3162 break;
3163 }
3164
3165 return insn;
3166 }
3167
3168 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3169 or 0, if there is none. This routine does not look inside
3170 SEQUENCEs. */
3171
3172 rtx
3173 prev_real_insn (insn)
3174 rtx insn;
3175 {
3176 while (insn)
3177 {
3178 insn = PREV_INSN (insn);
3179 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3180 || GET_CODE (insn) == JUMP_INSN)
3181 break;
3182 }
3183
3184 return insn;
3185 }
3186
3187 /* Find the next insn after INSN that really does something. This routine
3188 does not look inside SEQUENCEs. Until reload has completed, this is the
3189 same as next_real_insn. */
3190
3191 int
3192 active_insn_p (insn)
3193 rtx insn;
3194 {
3195 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3196 || (GET_CODE (insn) == INSN
3197 && (! reload_completed
3198 || (GET_CODE (PATTERN (insn)) != USE
3199 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3200 }
3201
3202 rtx
3203 next_active_insn (insn)
3204 rtx insn;
3205 {
3206 while (insn)
3207 {
3208 insn = NEXT_INSN (insn);
3209 if (insn == 0 || active_insn_p (insn))
3210 break;
3211 }
3212
3213 return insn;
3214 }
3215
3216 /* Find the last insn before INSN that really does something. This routine
3217 does not look inside SEQUENCEs. Until reload has completed, this is the
3218 same as prev_real_insn. */
3219
3220 rtx
3221 prev_active_insn (insn)
3222 rtx insn;
3223 {
3224 while (insn)
3225 {
3226 insn = PREV_INSN (insn);
3227 if (insn == 0 || active_insn_p (insn))
3228 break;
3229 }
3230
3231 return insn;
3232 }
3233
3234 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3235
3236 rtx
3237 next_label (insn)
3238 rtx insn;
3239 {
3240 while (insn)
3241 {
3242 insn = NEXT_INSN (insn);
3243 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3244 break;
3245 }
3246
3247 return insn;
3248 }
3249
3250 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3251
3252 rtx
3253 prev_label (insn)
3254 rtx insn;
3255 {
3256 while (insn)
3257 {
3258 insn = PREV_INSN (insn);
3259 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3260 break;
3261 }
3262
3263 return insn;
3264 }
3265 \f
3266 #ifdef HAVE_cc0
3267 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3268 and REG_CC_USER notes so we can find it. */
3269
3270 void
3271 link_cc0_insns (insn)
3272 rtx insn;
3273 {
3274 rtx user = next_nonnote_insn (insn);
3275
3276 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3277 user = XVECEXP (PATTERN (user), 0, 0);
3278
3279 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3280 REG_NOTES (user));
3281 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3282 }
3283
3284 /* Return the next insn that uses CC0 after INSN, which is assumed to
3285 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3286 applied to the result of this function should yield INSN).
3287
3288 Normally, this is simply the next insn. However, if a REG_CC_USER note
3289 is present, it contains the insn that uses CC0.
3290
3291 Return 0 if we can't find the insn. */
3292
3293 rtx
3294 next_cc0_user (insn)
3295 rtx insn;
3296 {
3297 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3298
3299 if (note)
3300 return XEXP (note, 0);
3301
3302 insn = next_nonnote_insn (insn);
3303 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3304 insn = XVECEXP (PATTERN (insn), 0, 0);
3305
3306 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3307 return insn;
3308
3309 return 0;
3310 }
3311
3312 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3313 note, it is the previous insn. */
3314
3315 rtx
3316 prev_cc0_setter (insn)
3317 rtx insn;
3318 {
3319 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3320
3321 if (note)
3322 return XEXP (note, 0);
3323
3324 insn = prev_nonnote_insn (insn);
3325 if (! sets_cc0_p (PATTERN (insn)))
3326 abort ();
3327
3328 return insn;
3329 }
3330 #endif
3331
3332 /* Increment the label uses for all labels present in rtx. */
3333
3334 static void
3335 mark_label_nuses (x)
3336 rtx x;
3337 {
3338 enum rtx_code code;
3339 int i, j;
3340 const char *fmt;
3341
3342 code = GET_CODE (x);
3343 if (code == LABEL_REF)
3344 LABEL_NUSES (XEXP (x, 0))++;
3345
3346 fmt = GET_RTX_FORMAT (code);
3347 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3348 {
3349 if (fmt[i] == 'e')
3350 mark_label_nuses (XEXP (x, i));
3351 else if (fmt[i] == 'E')
3352 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3353 mark_label_nuses (XVECEXP (x, i, j));
3354 }
3355 }
3356
3357 \f
3358 /* Try splitting insns that can be split for better scheduling.
3359 PAT is the pattern which might split.
3360 TRIAL is the insn providing PAT.
3361 LAST is nonzero if we should return the last insn of the sequence produced.
3362
3363 If this routine succeeds in splitting, it returns the first or last
3364 replacement insn depending on the value of LAST. Otherwise, it
3365 returns TRIAL. If the insn to be returned can be split, it will be. */
3366
3367 rtx
3368 try_split (pat, trial, last)
3369 rtx pat, trial;
3370 int last;
3371 {
3372 rtx before = PREV_INSN (trial);
3373 rtx after = NEXT_INSN (trial);
3374 int has_barrier = 0;
3375 rtx tem;
3376 rtx note, seq;
3377 int probability;
3378 rtx insn_last, insn;
3379 int njumps = 0;
3380
3381 if (any_condjump_p (trial)
3382 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3383 split_branch_probability = INTVAL (XEXP (note, 0));
3384 probability = split_branch_probability;
3385
3386 seq = split_insns (pat, trial);
3387
3388 split_branch_probability = -1;
3389
3390 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3391 We may need to handle this specially. */
3392 if (after && GET_CODE (after) == BARRIER)
3393 {
3394 has_barrier = 1;
3395 after = NEXT_INSN (after);
3396 }
3397
3398 if (!seq)
3399 return trial;
3400
3401 /* Avoid infinite loop if any insn of the result matches
3402 the original pattern. */
3403 insn_last = seq;
3404 while (1)
3405 {
3406 if (INSN_P (insn_last)
3407 && rtx_equal_p (PATTERN (insn_last), pat))
3408 return trial;
3409 if (!NEXT_INSN (insn_last))
3410 break;
3411 insn_last = NEXT_INSN (insn_last);
3412 }
3413
3414 /* Mark labels. */
3415 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3416 {
3417 if (GET_CODE (insn) == JUMP_INSN)
3418 {
3419 mark_jump_label (PATTERN (insn), insn, 0);
3420 njumps++;
3421 if (probability != -1
3422 && any_condjump_p (insn)
3423 && !find_reg_note (insn, REG_BR_PROB, 0))
3424 {
3425 /* We can preserve the REG_BR_PROB notes only if exactly
3426 one jump is created, otherwise the machine description
3427 is responsible for this step using
3428 split_branch_probability variable. */
3429 if (njumps != 1)
3430 abort ();
3431 REG_NOTES (insn)
3432 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3433 GEN_INT (probability),
3434 REG_NOTES (insn));
3435 }
3436 }
3437 }
3438
3439 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3440 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3441 if (GET_CODE (trial) == CALL_INSN)
3442 {
3443 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3444 if (GET_CODE (insn) == CALL_INSN)
3445 {
3446 CALL_INSN_FUNCTION_USAGE (insn)
3447 = CALL_INSN_FUNCTION_USAGE (trial);
3448 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3449 }
3450 }
3451
3452 /* Copy notes, particularly those related to the CFG. */
3453 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3454 {
3455 switch (REG_NOTE_KIND (note))
3456 {
3457 case REG_EH_REGION:
3458 insn = insn_last;
3459 while (insn != NULL_RTX)
3460 {
3461 if (GET_CODE (insn) == CALL_INSN
3462 || (flag_non_call_exceptions
3463 && may_trap_p (PATTERN (insn))))
3464 REG_NOTES (insn)
3465 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3466 XEXP (note, 0),
3467 REG_NOTES (insn));
3468 insn = PREV_INSN (insn);
3469 }
3470 break;
3471
3472 case REG_NORETURN:
3473 case REG_SETJMP:
3474 case REG_ALWAYS_RETURN:
3475 insn = insn_last;
3476 while (insn != NULL_RTX)
3477 {
3478 if (GET_CODE (insn) == CALL_INSN)
3479 REG_NOTES (insn)
3480 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3481 XEXP (note, 0),
3482 REG_NOTES (insn));
3483 insn = PREV_INSN (insn);
3484 }
3485 break;
3486
3487 case REG_NON_LOCAL_GOTO:
3488 insn = insn_last;
3489 while (insn != NULL_RTX)
3490 {
3491 if (GET_CODE (insn) == JUMP_INSN)
3492 REG_NOTES (insn)
3493 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3494 XEXP (note, 0),
3495 REG_NOTES (insn));
3496 insn = PREV_INSN (insn);
3497 }
3498 break;
3499
3500 default:
3501 break;
3502 }
3503 }
3504
3505 /* If there are LABELS inside the split insns increment the
3506 usage count so we don't delete the label. */
3507 if (GET_CODE (trial) == INSN)
3508 {
3509 insn = insn_last;
3510 while (insn != NULL_RTX)
3511 {
3512 if (GET_CODE (insn) == INSN)
3513 mark_label_nuses (PATTERN (insn));
3514
3515 insn = PREV_INSN (insn);
3516 }
3517 }
3518
3519 tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3520
3521 delete_insn (trial);
3522 if (has_barrier)
3523 emit_barrier_after (tem);
3524
3525 /* Recursively call try_split for each new insn created; by the
3526 time control returns here that insn will be fully split, so
3527 set LAST and continue from the insn after the one returned.
3528 We can't use next_active_insn here since AFTER may be a note.
3529 Ignore deleted insns, which can be occur if not optimizing. */
3530 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3531 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3532 tem = try_split (PATTERN (tem), tem, 1);
3533
3534 /* Return either the first or the last insn, depending on which was
3535 requested. */
3536 return last
3537 ? (after ? PREV_INSN (after) : last_insn)
3538 : NEXT_INSN (before);
3539 }
3540 \f
3541 /* Make and return an INSN rtx, initializing all its slots.
3542 Store PATTERN in the pattern slots. */
3543
3544 rtx
3545 make_insn_raw (pattern)
3546 rtx pattern;
3547 {
3548 rtx insn;
3549
3550 insn = rtx_alloc (INSN);
3551
3552 INSN_UID (insn) = cur_insn_uid++;
3553 PATTERN (insn) = pattern;
3554 INSN_CODE (insn) = -1;
3555 LOG_LINKS (insn) = NULL;
3556 REG_NOTES (insn) = NULL;
3557 INSN_SCOPE (insn) = NULL;
3558 BLOCK_FOR_INSN (insn) = NULL;
3559
3560 #ifdef ENABLE_RTL_CHECKING
3561 if (insn
3562 && INSN_P (insn)
3563 && (returnjump_p (insn)
3564 || (GET_CODE (insn) == SET
3565 && SET_DEST (insn) == pc_rtx)))
3566 {
3567 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3568 debug_rtx (insn);
3569 }
3570 #endif
3571
3572 return insn;
3573 }
3574
3575 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3576
3577 static rtx
3578 make_jump_insn_raw (pattern)
3579 rtx pattern;
3580 {
3581 rtx insn;
3582
3583 insn = rtx_alloc (JUMP_INSN);
3584 INSN_UID (insn) = cur_insn_uid++;
3585
3586 PATTERN (insn) = pattern;
3587 INSN_CODE (insn) = -1;
3588 LOG_LINKS (insn) = NULL;
3589 REG_NOTES (insn) = NULL;
3590 JUMP_LABEL (insn) = NULL;
3591 INSN_SCOPE (insn) = NULL;
3592 BLOCK_FOR_INSN (insn) = NULL;
3593
3594 return insn;
3595 }
3596
3597 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3598
3599 static rtx
3600 make_call_insn_raw (pattern)
3601 rtx pattern;
3602 {
3603 rtx insn;
3604
3605 insn = rtx_alloc (CALL_INSN);
3606 INSN_UID (insn) = cur_insn_uid++;
3607
3608 PATTERN (insn) = pattern;
3609 INSN_CODE (insn) = -1;
3610 LOG_LINKS (insn) = NULL;
3611 REG_NOTES (insn) = NULL;
3612 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3613 INSN_SCOPE (insn) = NULL;
3614 BLOCK_FOR_INSN (insn) = NULL;
3615
3616 return insn;
3617 }
3618 \f
3619 /* Add INSN to the end of the doubly-linked list.
3620 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3621
3622 void
3623 add_insn (insn)
3624 rtx insn;
3625 {
3626 PREV_INSN (insn) = last_insn;
3627 NEXT_INSN (insn) = 0;
3628
3629 if (NULL != last_insn)
3630 NEXT_INSN (last_insn) = insn;
3631
3632 if (NULL == first_insn)
3633 first_insn = insn;
3634
3635 last_insn = insn;
3636 }
3637
3638 /* Add INSN into the doubly-linked list after insn AFTER. This and
3639 the next should be the only functions called to insert an insn once
3640 delay slots have been filled since only they know how to update a
3641 SEQUENCE. */
3642
3643 void
3644 add_insn_after (insn, after)
3645 rtx insn, after;
3646 {
3647 rtx next = NEXT_INSN (after);
3648 basic_block bb;
3649
3650 if (optimize && INSN_DELETED_P (after))
3651 abort ();
3652
3653 NEXT_INSN (insn) = next;
3654 PREV_INSN (insn) = after;
3655
3656 if (next)
3657 {
3658 PREV_INSN (next) = insn;
3659 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3660 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3661 }
3662 else if (last_insn == after)
3663 last_insn = insn;
3664 else
3665 {
3666 struct sequence_stack *stack = seq_stack;
3667 /* Scan all pending sequences too. */
3668 for (; stack; stack = stack->next)
3669 if (after == stack->last)
3670 {
3671 stack->last = insn;
3672 break;
3673 }
3674
3675 if (stack == 0)
3676 abort ();
3677 }
3678
3679 if (GET_CODE (after) != BARRIER
3680 && GET_CODE (insn) != BARRIER
3681 && (bb = BLOCK_FOR_INSN (after)))
3682 {
3683 set_block_for_insn (insn, bb);
3684 if (INSN_P (insn))
3685 bb->flags |= BB_DIRTY;
3686 /* Should not happen as first in the BB is always
3687 either NOTE or LABEL. */
3688 if (bb->end == after
3689 /* Avoid clobbering of structure when creating new BB. */
3690 && GET_CODE (insn) != BARRIER
3691 && (GET_CODE (insn) != NOTE
3692 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3693 bb->end = insn;
3694 }
3695
3696 NEXT_INSN (after) = insn;
3697 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3698 {
3699 rtx sequence = PATTERN (after);
3700 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3701 }
3702 }
3703
3704 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3705 the previous should be the only functions called to insert an insn once
3706 delay slots have been filled since only they know how to update a
3707 SEQUENCE. */
3708
3709 void
3710 add_insn_before (insn, before)
3711 rtx insn, before;
3712 {
3713 rtx prev = PREV_INSN (before);
3714 basic_block bb;
3715
3716 if (optimize && INSN_DELETED_P (before))
3717 abort ();
3718
3719 PREV_INSN (insn) = prev;
3720 NEXT_INSN (insn) = before;
3721
3722 if (prev)
3723 {
3724 NEXT_INSN (prev) = insn;
3725 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3726 {
3727 rtx sequence = PATTERN (prev);
3728 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3729 }
3730 }
3731 else if (first_insn == before)
3732 first_insn = insn;
3733 else
3734 {
3735 struct sequence_stack *stack = seq_stack;
3736 /* Scan all pending sequences too. */
3737 for (; stack; stack = stack->next)
3738 if (before == stack->first)
3739 {
3740 stack->first = insn;
3741 break;
3742 }
3743
3744 if (stack == 0)
3745 abort ();
3746 }
3747
3748 if (GET_CODE (before) != BARRIER
3749 && GET_CODE (insn) != BARRIER
3750 && (bb = BLOCK_FOR_INSN (before)))
3751 {
3752 set_block_for_insn (insn, bb);
3753 if (INSN_P (insn))
3754 bb->flags |= BB_DIRTY;
3755 /* Should not happen as first in the BB is always
3756 either NOTE or LABEl. */
3757 if (bb->head == insn
3758 /* Avoid clobbering of structure when creating new BB. */
3759 && GET_CODE (insn) != BARRIER
3760 && (GET_CODE (insn) != NOTE
3761 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3762 abort ();
3763 }
3764
3765 PREV_INSN (before) = insn;
3766 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3767 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3768 }
3769
3770 /* Remove an insn from its doubly-linked list. This function knows how
3771 to handle sequences. */
3772 void
3773 remove_insn (insn)
3774 rtx insn;
3775 {
3776 rtx next = NEXT_INSN (insn);
3777 rtx prev = PREV_INSN (insn);
3778 basic_block bb;
3779
3780 if (prev)
3781 {
3782 NEXT_INSN (prev) = next;
3783 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3784 {
3785 rtx sequence = PATTERN (prev);
3786 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3787 }
3788 }
3789 else if (first_insn == insn)
3790 first_insn = next;
3791 else
3792 {
3793 struct sequence_stack *stack = seq_stack;
3794 /* Scan all pending sequences too. */
3795 for (; stack; stack = stack->next)
3796 if (insn == stack->first)
3797 {
3798 stack->first = next;
3799 break;
3800 }
3801
3802 if (stack == 0)
3803 abort ();
3804 }
3805
3806 if (next)
3807 {
3808 PREV_INSN (next) = prev;
3809 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3810 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3811 }
3812 else if (last_insn == insn)
3813 last_insn = prev;
3814 else
3815 {
3816 struct sequence_stack *stack = seq_stack;
3817 /* Scan all pending sequences too. */
3818 for (; stack; stack = stack->next)
3819 if (insn == stack->last)
3820 {
3821 stack->last = prev;
3822 break;
3823 }
3824
3825 if (stack == 0)
3826 abort ();
3827 }
3828 if (GET_CODE (insn) != BARRIER
3829 && (bb = BLOCK_FOR_INSN (insn)))
3830 {
3831 if (INSN_P (insn))
3832 bb->flags |= BB_DIRTY;
3833 if (bb->head == insn)
3834 {
3835 /* Never ever delete the basic block note without deleting whole
3836 basic block. */
3837 if (GET_CODE (insn) == NOTE)
3838 abort ();
3839 bb->head = next;
3840 }
3841 if (bb->end == insn)
3842 bb->end = prev;
3843 }
3844 }
3845
3846 /* Delete all insns made since FROM.
3847 FROM becomes the new last instruction. */
3848
3849 void
3850 delete_insns_since (from)
3851 rtx from;
3852 {
3853 if (from == 0)
3854 first_insn = 0;
3855 else
3856 NEXT_INSN (from) = 0;
3857 last_insn = from;
3858 }
3859
3860 /* This function is deprecated, please use sequences instead.
3861
3862 Move a consecutive bunch of insns to a different place in the chain.
3863 The insns to be moved are those between FROM and TO.
3864 They are moved to a new position after the insn AFTER.
3865 AFTER must not be FROM or TO or any insn in between.
3866
3867 This function does not know about SEQUENCEs and hence should not be
3868 called after delay-slot filling has been done. */
3869
3870 void
3871 reorder_insns_nobb (from, to, after)
3872 rtx from, to, after;
3873 {
3874 /* Splice this bunch out of where it is now. */
3875 if (PREV_INSN (from))
3876 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3877 if (NEXT_INSN (to))
3878 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3879 if (last_insn == to)
3880 last_insn = PREV_INSN (from);
3881 if (first_insn == from)
3882 first_insn = NEXT_INSN (to);
3883
3884 /* Make the new neighbors point to it and it to them. */
3885 if (NEXT_INSN (after))
3886 PREV_INSN (NEXT_INSN (after)) = to;
3887
3888 NEXT_INSN (to) = NEXT_INSN (after);
3889 PREV_INSN (from) = after;
3890 NEXT_INSN (after) = from;
3891 if (after == last_insn)
3892 last_insn = to;
3893 }
3894
3895 /* Same as function above, but take care to update BB boundaries. */
3896 void
3897 reorder_insns (from, to, after)
3898 rtx from, to, after;
3899 {
3900 rtx prev = PREV_INSN (from);
3901 basic_block bb, bb2;
3902
3903 reorder_insns_nobb (from, to, after);
3904
3905 if (GET_CODE (after) != BARRIER
3906 && (bb = BLOCK_FOR_INSN (after)))
3907 {
3908 rtx x;
3909 bb->flags |= BB_DIRTY;
3910
3911 if (GET_CODE (from) != BARRIER
3912 && (bb2 = BLOCK_FOR_INSN (from)))
3913 {
3914 if (bb2->end == to)
3915 bb2->end = prev;
3916 bb2->flags |= BB_DIRTY;
3917 }
3918
3919 if (bb->end == after)
3920 bb->end = to;
3921
3922 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3923 set_block_for_insn (x, bb);
3924 }
3925 }
3926
3927 /* Return the line note insn preceding INSN. */
3928
3929 static rtx
3930 find_line_note (insn)
3931 rtx insn;
3932 {
3933 if (no_line_numbers)
3934 return 0;
3935
3936 for (; insn; insn = PREV_INSN (insn))
3937 if (GET_CODE (insn) == NOTE
3938 && NOTE_LINE_NUMBER (insn) >= 0)
3939 break;
3940
3941 return insn;
3942 }
3943
3944 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3945 of the moved insns when debugging. This may insert a note between AFTER
3946 and FROM, and another one after TO. */
3947
3948 void
3949 reorder_insns_with_line_notes (from, to, after)
3950 rtx from, to, after;
3951 {
3952 rtx from_line = find_line_note (from);
3953 rtx after_line = find_line_note (after);
3954
3955 reorder_insns (from, to, after);
3956
3957 if (from_line == after_line)
3958 return;
3959
3960 if (from_line)
3961 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3962 NOTE_LINE_NUMBER (from_line),
3963 after);
3964 if (after_line)
3965 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3966 NOTE_LINE_NUMBER (after_line),
3967 to);
3968 }
3969
3970 /* Remove unnecessary notes from the instruction stream. */
3971
3972 void
3973 remove_unnecessary_notes ()
3974 {
3975 rtx block_stack = NULL_RTX;
3976 rtx eh_stack = NULL_RTX;
3977 rtx insn;
3978 rtx next;
3979 rtx tmp;
3980
3981 /* We must not remove the first instruction in the function because
3982 the compiler depends on the first instruction being a note. */
3983 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3984 {
3985 /* Remember what's next. */
3986 next = NEXT_INSN (insn);
3987
3988 /* We're only interested in notes. */
3989 if (GET_CODE (insn) != NOTE)
3990 continue;
3991
3992 switch (NOTE_LINE_NUMBER (insn))
3993 {
3994 case NOTE_INSN_DELETED:
3995 case NOTE_INSN_LOOP_END_TOP_COND:
3996 remove_insn (insn);
3997 break;
3998
3999 case NOTE_INSN_EH_REGION_BEG:
4000 eh_stack = alloc_INSN_LIST (insn, eh_stack);
4001 break;
4002
4003 case NOTE_INSN_EH_REGION_END:
4004 /* Too many end notes. */
4005 if (eh_stack == NULL_RTX)
4006 abort ();
4007 /* Mismatched nesting. */
4008 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
4009 abort ();
4010 tmp = eh_stack;
4011 eh_stack = XEXP (eh_stack, 1);
4012 free_INSN_LIST_node (tmp);
4013 break;
4014
4015 case NOTE_INSN_BLOCK_BEG:
4016 /* By now, all notes indicating lexical blocks should have
4017 NOTE_BLOCK filled in. */
4018 if (NOTE_BLOCK (insn) == NULL_TREE)
4019 abort ();
4020 block_stack = alloc_INSN_LIST (insn, block_stack);
4021 break;
4022
4023 case NOTE_INSN_BLOCK_END:
4024 /* Too many end notes. */
4025 if (block_stack == NULL_RTX)
4026 abort ();
4027 /* Mismatched nesting. */
4028 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
4029 abort ();
4030 tmp = block_stack;
4031 block_stack = XEXP (block_stack, 1);
4032 free_INSN_LIST_node (tmp);
4033
4034 /* Scan back to see if there are any non-note instructions
4035 between INSN and the beginning of this block. If not,
4036 then there is no PC range in the generated code that will
4037 actually be in this block, so there's no point in
4038 remembering the existence of the block. */
4039 for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
4040 {
4041 /* This block contains a real instruction. Note that we
4042 don't include labels; if the only thing in the block
4043 is a label, then there are still no PC values that
4044 lie within the block. */
4045 if (INSN_P (tmp))
4046 break;
4047
4048 /* We're only interested in NOTEs. */
4049 if (GET_CODE (tmp) != NOTE)
4050 continue;
4051
4052 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
4053 {
4054 /* We just verified that this BLOCK matches us with
4055 the block_stack check above. Never delete the
4056 BLOCK for the outermost scope of the function; we
4057 can refer to names from that scope even if the
4058 block notes are messed up. */
4059 if (! is_body_block (NOTE_BLOCK (insn))
4060 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
4061 {
4062 remove_insn (tmp);
4063 remove_insn (insn);
4064 }
4065 break;
4066 }
4067 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
4068 /* There's a nested block. We need to leave the
4069 current block in place since otherwise the debugger
4070 wouldn't be able to show symbols from our block in
4071 the nested block. */
4072 break;
4073 }
4074 }
4075 }
4076
4077 /* Too many begin notes. */
4078 if (block_stack || eh_stack)
4079 abort ();
4080 }
4081
4082 \f
4083 /* Emit insn(s) of given code and pattern
4084 at a specified place within the doubly-linked list.
4085
4086 All of the emit_foo global entry points accept an object
4087 X which is either an insn list or a PATTERN of a single
4088 instruction.
4089
4090 There are thus a few canonical ways to generate code and
4091 emit it at a specific place in the instruction stream. For
4092 example, consider the instruction named SPOT and the fact that
4093 we would like to emit some instructions before SPOT. We might
4094 do it like this:
4095
4096 start_sequence ();
4097 ... emit the new instructions ...
4098 insns_head = get_insns ();
4099 end_sequence ();
4100
4101 emit_insn_before (insns_head, SPOT);
4102
4103 It used to be common to generate SEQUENCE rtl instead, but that
4104 is a relic of the past which no longer occurs. The reason is that
4105 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4106 generated would almost certainly die right after it was created. */
4107
4108 /* Make X be output before the instruction BEFORE. */
4109
4110 rtx
4111 emit_insn_before (x, before)
4112 rtx x, before;
4113 {
4114 rtx last = before;
4115 rtx insn;
4116
4117 #ifdef ENABLE_RTL_CHECKING
4118 if (before == NULL_RTX)
4119 abort ();
4120 #endif
4121
4122 if (x == NULL_RTX)
4123 return last;
4124
4125 switch (GET_CODE (x))
4126 {
4127 case INSN:
4128 case JUMP_INSN:
4129 case CALL_INSN:
4130 case CODE_LABEL:
4131 case BARRIER:
4132 case NOTE:
4133 insn = x;
4134 while (insn)
4135 {
4136 rtx next = NEXT_INSN (insn);
4137 add_insn_before (insn, before);
4138 last = insn;
4139 insn = next;
4140 }
4141 break;
4142
4143 #ifdef ENABLE_RTL_CHECKING
4144 case SEQUENCE:
4145 abort ();
4146 break;
4147 #endif
4148
4149 default:
4150 last = make_insn_raw (x);
4151 add_insn_before (last, before);
4152 break;
4153 }
4154
4155 return last;
4156 }
4157
4158 /* Make an instruction with body X and code JUMP_INSN
4159 and output it before the instruction BEFORE. */
4160
4161 rtx
4162 emit_jump_insn_before (x, before)
4163 rtx x, before;
4164 {
4165 rtx insn, last = NULL_RTX;
4166
4167 #ifdef ENABLE_RTL_CHECKING
4168 if (before == NULL_RTX)
4169 abort ();
4170 #endif
4171
4172 switch (GET_CODE (x))
4173 {
4174 case INSN:
4175 case JUMP_INSN:
4176 case CALL_INSN:
4177 case CODE_LABEL:
4178 case BARRIER:
4179 case NOTE:
4180 insn = x;
4181 while (insn)
4182 {
4183 rtx next = NEXT_INSN (insn);
4184 add_insn_before (insn, before);
4185 last = insn;
4186 insn = next;
4187 }
4188 break;
4189
4190 #ifdef ENABLE_RTL_CHECKING
4191 case SEQUENCE:
4192 abort ();
4193 break;
4194 #endif
4195
4196 default:
4197 last = make_jump_insn_raw (x);
4198 add_insn_before (last, before);
4199 break;
4200 }
4201
4202 return last;
4203 }
4204
4205 /* Make an instruction with body X and code CALL_INSN
4206 and output it before the instruction BEFORE. */
4207
4208 rtx
4209 emit_call_insn_before (x, before)
4210 rtx x, before;
4211 {
4212 rtx last = NULL_RTX, insn;
4213
4214 #ifdef ENABLE_RTL_CHECKING
4215 if (before == NULL_RTX)
4216 abort ();
4217 #endif
4218
4219 switch (GET_CODE (x))
4220 {
4221 case INSN:
4222 case JUMP_INSN:
4223 case CALL_INSN:
4224 case CODE_LABEL:
4225 case BARRIER:
4226 case NOTE:
4227 insn = x;
4228 while (insn)
4229 {
4230 rtx next = NEXT_INSN (insn);
4231 add_insn_before (insn, before);
4232 last = insn;
4233 insn = next;
4234 }
4235 break;
4236
4237 #ifdef ENABLE_RTL_CHECKING
4238 case SEQUENCE:
4239 abort ();
4240 break;
4241 #endif
4242
4243 default:
4244 last = make_call_insn_raw (x);
4245 add_insn_before (last, before);
4246 break;
4247 }
4248
4249 return last;
4250 }
4251
4252 /* Make an insn of code BARRIER
4253 and output it before the insn BEFORE. */
4254
4255 rtx
4256 emit_barrier_before (before)
4257 rtx before;
4258 {
4259 rtx insn = rtx_alloc (BARRIER);
4260
4261 INSN_UID (insn) = cur_insn_uid++;
4262
4263 add_insn_before (insn, before);
4264 return insn;
4265 }
4266
4267 /* Emit the label LABEL before the insn BEFORE. */
4268
4269 rtx
4270 emit_label_before (label, before)
4271 rtx label, before;
4272 {
4273 /* This can be called twice for the same label as a result of the
4274 confusion that follows a syntax error! So make it harmless. */
4275 if (INSN_UID (label) == 0)
4276 {
4277 INSN_UID (label) = cur_insn_uid++;
4278 add_insn_before (label, before);
4279 }
4280
4281 return label;
4282 }
4283
4284 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4285
4286 rtx
4287 emit_note_before (subtype, before)
4288 int subtype;
4289 rtx before;
4290 {
4291 rtx note = rtx_alloc (NOTE);
4292 INSN_UID (note) = cur_insn_uid++;
4293 NOTE_SOURCE_FILE (note) = 0;
4294 NOTE_LINE_NUMBER (note) = subtype;
4295 BLOCK_FOR_INSN (note) = NULL;
4296
4297 add_insn_before (note, before);
4298 return note;
4299 }
4300 \f
4301 /* Helper for emit_insn_after, handles lists of instructions
4302 efficiently. */
4303
4304 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4305
4306 static rtx
4307 emit_insn_after_1 (first, after)
4308 rtx first, after;
4309 {
4310 rtx last;
4311 rtx after_after;
4312 basic_block bb;
4313
4314 if (GET_CODE (after) != BARRIER
4315 && (bb = BLOCK_FOR_INSN (after)))
4316 {
4317 bb->flags |= BB_DIRTY;
4318 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4319 if (GET_CODE (last) != BARRIER)
4320 set_block_for_insn (last, bb);
4321 if (GET_CODE (last) != BARRIER)
4322 set_block_for_insn (last, bb);
4323 if (bb->end == after)
4324 bb->end = last;
4325 }
4326 else
4327 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4328 continue;
4329
4330 after_after = NEXT_INSN (after);
4331
4332 NEXT_INSN (after) = first;
4333 PREV_INSN (first) = after;
4334 NEXT_INSN (last) = after_after;
4335 if (after_after)
4336 PREV_INSN (after_after) = last;
4337
4338 if (after == last_insn)
4339 last_insn = last;
4340 return last;
4341 }
4342
4343 /* Make X be output after the insn AFTER. */
4344
4345 rtx
4346 emit_insn_after (x, after)
4347 rtx x, after;
4348 {
4349 rtx last = after;
4350
4351 #ifdef ENABLE_RTL_CHECKING
4352 if (after == NULL_RTX)
4353 abort ();
4354 #endif
4355
4356 if (x == NULL_RTX)
4357 return last;
4358
4359 switch (GET_CODE (x))
4360 {
4361 case INSN:
4362 case JUMP_INSN:
4363 case CALL_INSN:
4364 case CODE_LABEL:
4365 case BARRIER:
4366 case NOTE:
4367 last = emit_insn_after_1 (x, after);
4368 break;
4369
4370 #ifdef ENABLE_RTL_CHECKING
4371 case SEQUENCE:
4372 abort ();
4373 break;
4374 #endif
4375
4376 default:
4377 last = make_insn_raw (x);
4378 add_insn_after (last, after);
4379 break;
4380 }
4381
4382 return last;
4383 }
4384
4385 /* Similar to emit_insn_after, except that line notes are to be inserted so
4386 as to act as if this insn were at FROM. */
4387
4388 void
4389 emit_insn_after_with_line_notes (x, after, from)
4390 rtx x, after, from;
4391 {
4392 rtx from_line = find_line_note (from);
4393 rtx after_line = find_line_note (after);
4394 rtx insn = emit_insn_after (x, after);
4395
4396 if (from_line)
4397 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4398 NOTE_LINE_NUMBER (from_line),
4399 after);
4400
4401 if (after_line)
4402 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4403 NOTE_LINE_NUMBER (after_line),
4404 insn);
4405 }
4406
4407 /* Make an insn of code JUMP_INSN with body X
4408 and output it after the insn AFTER. */
4409
4410 rtx
4411 emit_jump_insn_after (x, after)
4412 rtx x, after;
4413 {
4414 rtx last;
4415
4416 #ifdef ENABLE_RTL_CHECKING
4417 if (after == NULL_RTX)
4418 abort ();
4419 #endif
4420
4421 switch (GET_CODE (x))
4422 {
4423 case INSN:
4424 case JUMP_INSN:
4425 case CALL_INSN:
4426 case CODE_LABEL:
4427 case BARRIER:
4428 case NOTE:
4429 last = emit_insn_after_1 (x, after);
4430 break;
4431
4432 #ifdef ENABLE_RTL_CHECKING
4433 case SEQUENCE:
4434 abort ();
4435 break;
4436 #endif
4437
4438 default:
4439 last = make_jump_insn_raw (x);
4440 add_insn_after (last, after);
4441 break;
4442 }
4443
4444 return last;
4445 }
4446
4447 /* Make an instruction with body X and code CALL_INSN
4448 and output it after the instruction AFTER. */
4449
4450 rtx
4451 emit_call_insn_after (x, after)
4452 rtx x, after;
4453 {
4454 rtx last;
4455
4456 #ifdef ENABLE_RTL_CHECKING
4457 if (after == NULL_RTX)
4458 abort ();
4459 #endif
4460
4461 switch (GET_CODE (x))
4462 {
4463 case INSN:
4464 case JUMP_INSN:
4465 case CALL_INSN:
4466 case CODE_LABEL:
4467 case BARRIER:
4468 case NOTE:
4469 last = emit_insn_after_1 (x, after);
4470 break;
4471
4472 #ifdef ENABLE_RTL_CHECKING
4473 case SEQUENCE:
4474 abort ();
4475 break;
4476 #endif
4477
4478 default:
4479 last = make_call_insn_raw (x);
4480 add_insn_after (last, after);
4481 break;
4482 }
4483
4484 return last;
4485 }
4486
4487 /* Make an insn of code BARRIER
4488 and output it after the insn AFTER. */
4489
4490 rtx
4491 emit_barrier_after (after)
4492 rtx after;
4493 {
4494 rtx insn = rtx_alloc (BARRIER);
4495
4496 INSN_UID (insn) = cur_insn_uid++;
4497
4498 add_insn_after (insn, after);
4499 return insn;
4500 }
4501
4502 /* Emit the label LABEL after the insn AFTER. */
4503
4504 rtx
4505 emit_label_after (label, after)
4506 rtx label, after;
4507 {
4508 /* This can be called twice for the same label
4509 as a result of the confusion that follows a syntax error!
4510 So make it harmless. */
4511 if (INSN_UID (label) == 0)
4512 {
4513 INSN_UID (label) = cur_insn_uid++;
4514 add_insn_after (label, after);
4515 }
4516
4517 return label;
4518 }
4519
4520 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4521
4522 rtx
4523 emit_note_after (subtype, after)
4524 int subtype;
4525 rtx after;
4526 {
4527 rtx note = rtx_alloc (NOTE);
4528 INSN_UID (note) = cur_insn_uid++;
4529 NOTE_SOURCE_FILE (note) = 0;
4530 NOTE_LINE_NUMBER (note) = subtype;
4531 BLOCK_FOR_INSN (note) = NULL;
4532 add_insn_after (note, after);
4533 return note;
4534 }
4535
4536 /* Emit a line note for FILE and LINE after the insn AFTER. */
4537
4538 rtx
4539 emit_line_note_after (file, line, after)
4540 const char *file;
4541 int line;
4542 rtx after;
4543 {
4544 rtx note;
4545
4546 if (no_line_numbers && line > 0)
4547 {
4548 cur_insn_uid++;
4549 return 0;
4550 }
4551
4552 note = rtx_alloc (NOTE);
4553 INSN_UID (note) = cur_insn_uid++;
4554 NOTE_SOURCE_FILE (note) = file;
4555 NOTE_LINE_NUMBER (note) = line;
4556 BLOCK_FOR_INSN (note) = NULL;
4557 add_insn_after (note, after);
4558 return note;
4559 }
4560 \f
4561 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE. */
4562 rtx
4563 emit_insn_after_scope (pattern, after, scope)
4564 rtx pattern, after;
4565 tree scope;
4566 {
4567 rtx last = emit_insn_after (pattern, after);
4568
4569 after = NEXT_INSN (after);
4570 while (1)
4571 {
4572 if (active_insn_p (after))
4573 INSN_SCOPE (after) = scope;
4574 if (after == last)
4575 break;
4576 after = NEXT_INSN (after);
4577 }
4578 return last;
4579 }
4580
4581 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE. */
4582 rtx
4583 emit_jump_insn_after_scope (pattern, after, scope)
4584 rtx pattern, after;
4585 tree scope;
4586 {
4587 rtx last = emit_jump_insn_after (pattern, after);
4588
4589 after = NEXT_INSN (after);
4590 while (1)
4591 {
4592 if (active_insn_p (after))
4593 INSN_SCOPE (after) = scope;
4594 if (after == last)
4595 break;
4596 after = NEXT_INSN (after);
4597 }
4598 return last;
4599 }
4600
4601 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE. */
4602 rtx
4603 emit_call_insn_after_scope (pattern, after, scope)
4604 rtx pattern, after;
4605 tree scope;
4606 {
4607 rtx last = emit_call_insn_after (pattern, after);
4608
4609 after = NEXT_INSN (after);
4610 while (1)
4611 {
4612 if (active_insn_p (after))
4613 INSN_SCOPE (after) = scope;
4614 if (after == last)
4615 break;
4616 after = NEXT_INSN (after);
4617 }
4618 return last;
4619 }
4620
4621 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE. */
4622 rtx
4623 emit_insn_before_scope (pattern, before, scope)
4624 rtx pattern, before;
4625 tree scope;
4626 {
4627 rtx first = PREV_INSN (before);
4628 rtx last = emit_insn_before (pattern, before);
4629
4630 first = NEXT_INSN (first);
4631 while (1)
4632 {
4633 if (active_insn_p (first))
4634 INSN_SCOPE (first) = scope;
4635 if (first == last)
4636 break;
4637 first = NEXT_INSN (first);
4638 }
4639 return last;
4640 }
4641 \f
4642 /* Take X and emit it at the end of the doubly-linked
4643 INSN list.
4644
4645 Returns the last insn emitted. */
4646
4647 rtx
4648 emit_insn (x)
4649 rtx x;
4650 {
4651 rtx last = last_insn;
4652 rtx insn;
4653
4654 if (x == NULL_RTX)
4655 return last;
4656
4657 switch (GET_CODE (x))
4658 {
4659 case INSN:
4660 case JUMP_INSN:
4661 case CALL_INSN:
4662 case CODE_LABEL:
4663 case BARRIER:
4664 case NOTE:
4665 insn = x;
4666 while (insn)
4667 {
4668 rtx next = NEXT_INSN (insn);
4669 add_insn (insn);
4670 last = insn;
4671 insn = next;
4672 }
4673 break;
4674
4675 #ifdef ENABLE_RTL_CHECKING
4676 case SEQUENCE:
4677 abort ();
4678 break;
4679 #endif
4680
4681 default:
4682 last = make_insn_raw (x);
4683 add_insn (last);
4684 break;
4685 }
4686
4687 return last;
4688 }
4689
4690 /* Make an insn of code JUMP_INSN with pattern X
4691 and add it to the end of the doubly-linked list. */
4692
4693 rtx
4694 emit_jump_insn (x)
4695 rtx x;
4696 {
4697 rtx last = NULL_RTX, insn;
4698
4699 switch (GET_CODE (x))
4700 {
4701 case INSN:
4702 case JUMP_INSN:
4703 case CALL_INSN:
4704 case CODE_LABEL:
4705 case BARRIER:
4706 case NOTE:
4707 insn = x;
4708 while (insn)
4709 {
4710 rtx next = NEXT_INSN (insn);
4711 add_insn (insn);
4712 last = insn;
4713 insn = next;
4714 }
4715 break;
4716
4717 #ifdef ENABLE_RTL_CHECKING
4718 case SEQUENCE:
4719 abort ();
4720 break;
4721 #endif
4722
4723 default:
4724 last = make_jump_insn_raw (x);
4725 add_insn (last);
4726 break;
4727 }
4728
4729 return last;
4730 }
4731
4732 /* Make an insn of code CALL_INSN with pattern X
4733 and add it to the end of the doubly-linked list. */
4734
4735 rtx
4736 emit_call_insn (x)
4737 rtx x;
4738 {
4739 rtx insn;
4740
4741 switch (GET_CODE (x))
4742 {
4743 case INSN:
4744 case JUMP_INSN:
4745 case CALL_INSN:
4746 case CODE_LABEL:
4747 case BARRIER:
4748 case NOTE:
4749 insn = emit_insn (x);
4750 break;
4751
4752 #ifdef ENABLE_RTL_CHECKING
4753 case SEQUENCE:
4754 abort ();
4755 break;
4756 #endif
4757
4758 default:
4759 insn = make_call_insn_raw (x);
4760 add_insn (insn);
4761 break;
4762 }
4763
4764 return insn;
4765 }
4766
4767 /* Add the label LABEL to the end of the doubly-linked list. */
4768
4769 rtx
4770 emit_label (label)
4771 rtx label;
4772 {
4773 /* This can be called twice for the same label
4774 as a result of the confusion that follows a syntax error!
4775 So make it harmless. */
4776 if (INSN_UID (label) == 0)
4777 {
4778 INSN_UID (label) = cur_insn_uid++;
4779 add_insn (label);
4780 }
4781 return label;
4782 }
4783
4784 /* Make an insn of code BARRIER
4785 and add it to the end of the doubly-linked list. */
4786
4787 rtx
4788 emit_barrier ()
4789 {
4790 rtx barrier = rtx_alloc (BARRIER);
4791 INSN_UID (barrier) = cur_insn_uid++;
4792 add_insn (barrier);
4793 return barrier;
4794 }
4795
4796 /* Make an insn of code NOTE
4797 with data-fields specified by FILE and LINE
4798 and add it to the end of the doubly-linked list,
4799 but only if line-numbers are desired for debugging info. */
4800
4801 rtx
4802 emit_line_note (file, line)
4803 const char *file;
4804 int line;
4805 {
4806 set_file_and_line_for_stmt (file, line);
4807
4808 #if 0
4809 if (no_line_numbers)
4810 return 0;
4811 #endif
4812
4813 return emit_note (file, line);
4814 }
4815
4816 /* Make an insn of code NOTE
4817 with data-fields specified by FILE and LINE
4818 and add it to the end of the doubly-linked list.
4819 If it is a line-number NOTE, omit it if it matches the previous one. */
4820
4821 rtx
4822 emit_note (file, line)
4823 const char *file;
4824 int line;
4825 {
4826 rtx note;
4827
4828 if (line > 0)
4829 {
4830 if (file && last_filename && !strcmp (file, last_filename)
4831 && line == last_linenum)
4832 return 0;
4833 last_filename = file;
4834 last_linenum = line;
4835 }
4836
4837 if (no_line_numbers && line > 0)
4838 {
4839 cur_insn_uid++;
4840 return 0;
4841 }
4842
4843 note = rtx_alloc (NOTE);
4844 INSN_UID (note) = cur_insn_uid++;
4845 NOTE_SOURCE_FILE (note) = file;
4846 NOTE_LINE_NUMBER (note) = line;
4847 BLOCK_FOR_INSN (note) = NULL;
4848 add_insn (note);
4849 return note;
4850 }
4851
4852 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4853
4854 rtx
4855 emit_line_note_force (file, line)
4856 const char *file;
4857 int line;
4858 {
4859 last_linenum = -1;
4860 return emit_line_note (file, line);
4861 }
4862
4863 /* Cause next statement to emit a line note even if the line number
4864 has not changed. This is used at the beginning of a function. */
4865
4866 void
4867 force_next_line_note ()
4868 {
4869 last_linenum = -1;
4870 }
4871
4872 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4873 note of this type already exists, remove it first. */
4874
4875 rtx
4876 set_unique_reg_note (insn, kind, datum)
4877 rtx insn;
4878 enum reg_note kind;
4879 rtx datum;
4880 {
4881 rtx note = find_reg_note (insn, kind, NULL_RTX);
4882
4883 switch (kind)
4884 {
4885 case REG_EQUAL:
4886 case REG_EQUIV:
4887 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4888 has multiple sets (some callers assume single_set
4889 means the insn only has one set, when in fact it
4890 means the insn only has one * useful * set). */
4891 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4892 {
4893 if (note)
4894 abort ();
4895 return NULL_RTX;
4896 }
4897
4898 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4899 It serves no useful purpose and breaks eliminate_regs. */
4900 if (GET_CODE (datum) == ASM_OPERANDS)
4901 return NULL_RTX;
4902 break;
4903
4904 default:
4905 break;
4906 }
4907
4908 if (note)
4909 {
4910 XEXP (note, 0) = datum;
4911 return note;
4912 }
4913
4914 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4915 return REG_NOTES (insn);
4916 }
4917 \f
4918 /* Return an indication of which type of insn should have X as a body.
4919 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4920
4921 enum rtx_code
4922 classify_insn (x)
4923 rtx x;
4924 {
4925 if (GET_CODE (x) == CODE_LABEL)
4926 return CODE_LABEL;
4927 if (GET_CODE (x) == CALL)
4928 return CALL_INSN;
4929 if (GET_CODE (x) == RETURN)
4930 return JUMP_INSN;
4931 if (GET_CODE (x) == SET)
4932 {
4933 if (SET_DEST (x) == pc_rtx)
4934 return JUMP_INSN;
4935 else if (GET_CODE (SET_SRC (x)) == CALL)
4936 return CALL_INSN;
4937 else
4938 return INSN;
4939 }
4940 if (GET_CODE (x) == PARALLEL)
4941 {
4942 int j;
4943 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4944 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4945 return CALL_INSN;
4946 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4947 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4948 return JUMP_INSN;
4949 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4950 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4951 return CALL_INSN;
4952 }
4953 return INSN;
4954 }
4955
4956 /* Emit the rtl pattern X as an appropriate kind of insn.
4957 If X is a label, it is simply added into the insn chain. */
4958
4959 rtx
4960 emit (x)
4961 rtx x;
4962 {
4963 enum rtx_code code = classify_insn (x);
4964
4965 if (code == CODE_LABEL)
4966 return emit_label (x);
4967 else if (code == INSN)
4968 return emit_insn (x);
4969 else if (code == JUMP_INSN)
4970 {
4971 rtx insn = emit_jump_insn (x);
4972 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4973 return emit_barrier ();
4974 return insn;
4975 }
4976 else if (code == CALL_INSN)
4977 return emit_call_insn (x);
4978 else
4979 abort ();
4980 }
4981 \f
4982 /* Space for free sequence stack entries. */
4983 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4984
4985 /* Begin emitting insns to a sequence which can be packaged in an
4986 RTL_EXPR. If this sequence will contain something that might cause
4987 the compiler to pop arguments to function calls (because those
4988 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4989 details), use do_pending_stack_adjust before calling this function.
4990 That will ensure that the deferred pops are not accidentally
4991 emitted in the middle of this sequence. */
4992
4993 void
4994 start_sequence ()
4995 {
4996 struct sequence_stack *tem;
4997
4998 if (free_sequence_stack != NULL)
4999 {
5000 tem = free_sequence_stack;
5001 free_sequence_stack = tem->next;
5002 }
5003 else
5004 tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
5005
5006 tem->next = seq_stack;
5007 tem->first = first_insn;
5008 tem->last = last_insn;
5009 tem->sequence_rtl_expr = seq_rtl_expr;
5010
5011 seq_stack = tem;
5012
5013 first_insn = 0;
5014 last_insn = 0;
5015 }
5016
5017 /* Similarly, but indicate that this sequence will be placed in T, an
5018 RTL_EXPR. See the documentation for start_sequence for more
5019 information about how to use this function. */
5020
5021 void
5022 start_sequence_for_rtl_expr (t)
5023 tree t;
5024 {
5025 start_sequence ();
5026
5027 seq_rtl_expr = t;
5028 }
5029
5030 /* Set up the insn chain starting with FIRST as the current sequence,
5031 saving the previously current one. See the documentation for
5032 start_sequence for more information about how to use this function. */
5033
5034 void
5035 push_to_sequence (first)
5036 rtx first;
5037 {
5038 rtx last;
5039
5040 start_sequence ();
5041
5042 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5043
5044 first_insn = first;
5045 last_insn = last;
5046 }
5047
5048 /* Set up the insn chain from a chain stort in FIRST to LAST. */
5049
5050 void
5051 push_to_full_sequence (first, last)
5052 rtx first, last;
5053 {
5054 start_sequence ();
5055 first_insn = first;
5056 last_insn = last;
5057 /* We really should have the end of the insn chain here. */
5058 if (last && NEXT_INSN (last))
5059 abort ();
5060 }
5061
5062 /* Set up the outer-level insn chain
5063 as the current sequence, saving the previously current one. */
5064
5065 void
5066 push_topmost_sequence ()
5067 {
5068 struct sequence_stack *stack, *top = NULL;
5069
5070 start_sequence ();
5071
5072 for (stack = seq_stack; stack; stack = stack->next)
5073 top = stack;
5074
5075 first_insn = top->first;
5076 last_insn = top->last;
5077 seq_rtl_expr = top->sequence_rtl_expr;
5078 }
5079
5080 /* After emitting to the outer-level insn chain, update the outer-level
5081 insn chain, and restore the previous saved state. */
5082
5083 void
5084 pop_topmost_sequence ()
5085 {
5086 struct sequence_stack *stack, *top = NULL;
5087
5088 for (stack = seq_stack; stack; stack = stack->next)
5089 top = stack;
5090
5091 top->first = first_insn;
5092 top->last = last_insn;
5093 /* ??? Why don't we save seq_rtl_expr here? */
5094
5095 end_sequence ();
5096 }
5097
5098 /* After emitting to a sequence, restore previous saved state.
5099
5100 To get the contents of the sequence just made, you must call
5101 `get_insns' *before* calling here.
5102
5103 If the compiler might have deferred popping arguments while
5104 generating this sequence, and this sequence will not be immediately
5105 inserted into the instruction stream, use do_pending_stack_adjust
5106 before calling get_insns. That will ensure that the deferred
5107 pops are inserted into this sequence, and not into some random
5108 location in the instruction stream. See INHIBIT_DEFER_POP for more
5109 information about deferred popping of arguments. */
5110
5111 void
5112 end_sequence ()
5113 {
5114 struct sequence_stack *tem = seq_stack;
5115
5116 first_insn = tem->first;
5117 last_insn = tem->last;
5118 seq_rtl_expr = tem->sequence_rtl_expr;
5119 seq_stack = tem->next;
5120
5121 memset (tem, 0, sizeof (*tem));
5122 tem->next = free_sequence_stack;
5123 free_sequence_stack = tem;
5124 }
5125
5126 /* This works like end_sequence, but records the old sequence in FIRST
5127 and LAST. */
5128
5129 void
5130 end_full_sequence (first, last)
5131 rtx *first, *last;
5132 {
5133 *first = first_insn;
5134 *last = last_insn;
5135 end_sequence ();
5136 }
5137
5138 /* Return 1 if currently emitting into a sequence. */
5139
5140 int
5141 in_sequence_p ()
5142 {
5143 return seq_stack != 0;
5144 }
5145 \f
5146 /* Put the various virtual registers into REGNO_REG_RTX. */
5147
5148 void
5149 init_virtual_regs (es)
5150 struct emit_status *es;
5151 {
5152 rtx *ptr = es->x_regno_reg_rtx;
5153 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5154 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5155 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5156 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5157 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5158 }
5159
5160 \f
5161 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5162 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5163 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5164 static int copy_insn_n_scratches;
5165
5166 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5167 copied an ASM_OPERANDS.
5168 In that case, it is the original input-operand vector. */
5169 static rtvec orig_asm_operands_vector;
5170
5171 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5172 copied an ASM_OPERANDS.
5173 In that case, it is the copied input-operand vector. */
5174 static rtvec copy_asm_operands_vector;
5175
5176 /* Likewise for the constraints vector. */
5177 static rtvec orig_asm_constraints_vector;
5178 static rtvec copy_asm_constraints_vector;
5179
5180 /* Recursively create a new copy of an rtx for copy_insn.
5181 This function differs from copy_rtx in that it handles SCRATCHes and
5182 ASM_OPERANDs properly.
5183 Normally, this function is not used directly; use copy_insn as front end.
5184 However, you could first copy an insn pattern with copy_insn and then use
5185 this function afterwards to properly copy any REG_NOTEs containing
5186 SCRATCHes. */
5187
5188 rtx
5189 copy_insn_1 (orig)
5190 rtx orig;
5191 {
5192 rtx copy;
5193 int i, j;
5194 RTX_CODE code;
5195 const char *format_ptr;
5196
5197 code = GET_CODE (orig);
5198
5199 switch (code)
5200 {
5201 case REG:
5202 case QUEUED:
5203 case CONST_INT:
5204 case CONST_DOUBLE:
5205 case CONST_VECTOR:
5206 case SYMBOL_REF:
5207 case CODE_LABEL:
5208 case PC:
5209 case CC0:
5210 case ADDRESSOF:
5211 return orig;
5212
5213 case SCRATCH:
5214 for (i = 0; i < copy_insn_n_scratches; i++)
5215 if (copy_insn_scratch_in[i] == orig)
5216 return copy_insn_scratch_out[i];
5217 break;
5218
5219 case CONST:
5220 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
5221 a LABEL_REF, it isn't sharable. */
5222 if (GET_CODE (XEXP (orig, 0)) == PLUS
5223 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5224 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5225 return orig;
5226 break;
5227
5228 /* A MEM with a constant address is not sharable. The problem is that
5229 the constant address may need to be reloaded. If the mem is shared,
5230 then reloading one copy of this mem will cause all copies to appear
5231 to have been reloaded. */
5232
5233 default:
5234 break;
5235 }
5236
5237 copy = rtx_alloc (code);
5238
5239 /* Copy the various flags, and other information. We assume that
5240 all fields need copying, and then clear the fields that should
5241 not be copied. That is the sensible default behavior, and forces
5242 us to explicitly document why we are *not* copying a flag. */
5243 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5244
5245 /* We do not copy the USED flag, which is used as a mark bit during
5246 walks over the RTL. */
5247 RTX_FLAG (copy, used) = 0;
5248
5249 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5250 if (GET_RTX_CLASS (code) == 'i')
5251 {
5252 RTX_FLAG (copy, jump) = 0;
5253 RTX_FLAG (copy, call) = 0;
5254 RTX_FLAG (copy, frame_related) = 0;
5255 }
5256
5257 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5258
5259 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5260 {
5261 copy->fld[i] = orig->fld[i];
5262 switch (*format_ptr++)
5263 {
5264 case 'e':
5265 if (XEXP (orig, i) != NULL)
5266 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5267 break;
5268
5269 case 'E':
5270 case 'V':
5271 if (XVEC (orig, i) == orig_asm_constraints_vector)
5272 XVEC (copy, i) = copy_asm_constraints_vector;
5273 else if (XVEC (orig, i) == orig_asm_operands_vector)
5274 XVEC (copy, i) = copy_asm_operands_vector;
5275 else if (XVEC (orig, i) != NULL)
5276 {
5277 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5278 for (j = 0; j < XVECLEN (copy, i); j++)
5279 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5280 }
5281 break;
5282
5283 case 't':
5284 case 'w':
5285 case 'i':
5286 case 's':
5287 case 'S':
5288 case 'u':
5289 case '0':
5290 /* These are left unchanged. */
5291 break;
5292
5293 default:
5294 abort ();
5295 }
5296 }
5297
5298 if (code == SCRATCH)
5299 {
5300 i = copy_insn_n_scratches++;
5301 if (i >= MAX_RECOG_OPERANDS)
5302 abort ();
5303 copy_insn_scratch_in[i] = orig;
5304 copy_insn_scratch_out[i] = copy;
5305 }
5306 else if (code == ASM_OPERANDS)
5307 {
5308 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5309 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5310 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5311 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5312 }
5313
5314 return copy;
5315 }
5316
5317 /* Create a new copy of an rtx.
5318 This function differs from copy_rtx in that it handles SCRATCHes and
5319 ASM_OPERANDs properly.
5320 INSN doesn't really have to be a full INSN; it could be just the
5321 pattern. */
5322 rtx
5323 copy_insn (insn)
5324 rtx insn;
5325 {
5326 copy_insn_n_scratches = 0;
5327 orig_asm_operands_vector = 0;
5328 orig_asm_constraints_vector = 0;
5329 copy_asm_operands_vector = 0;
5330 copy_asm_constraints_vector = 0;
5331 return copy_insn_1 (insn);
5332 }
5333
5334 /* Initialize data structures and variables in this file
5335 before generating rtl for each function. */
5336
5337 void
5338 init_emit ()
5339 {
5340 struct function *f = cfun;
5341
5342 f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5343 first_insn = NULL;
5344 last_insn = NULL;
5345 seq_rtl_expr = NULL;
5346 cur_insn_uid = 1;
5347 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5348 last_linenum = 0;
5349 last_filename = 0;
5350 first_label_num = label_num;
5351 last_label_num = 0;
5352 seq_stack = NULL;
5353
5354 /* Init the tables that describe all the pseudo regs. */
5355
5356 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5357
5358 f->emit->regno_pointer_align
5359 = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5360 * sizeof (unsigned char));
5361
5362 regno_reg_rtx
5363 = (rtx *) ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5364
5365 /* Put copies of all the hard registers into regno_reg_rtx. */
5366 memcpy (regno_reg_rtx,
5367 static_regno_reg_rtx,
5368 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5369
5370 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5371 init_virtual_regs (f->emit);
5372
5373 /* Indicate that the virtual registers and stack locations are
5374 all pointers. */
5375 REG_POINTER (stack_pointer_rtx) = 1;
5376 REG_POINTER (frame_pointer_rtx) = 1;
5377 REG_POINTER (hard_frame_pointer_rtx) = 1;
5378 REG_POINTER (arg_pointer_rtx) = 1;
5379
5380 REG_POINTER (virtual_incoming_args_rtx) = 1;
5381 REG_POINTER (virtual_stack_vars_rtx) = 1;
5382 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5383 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5384 REG_POINTER (virtual_cfa_rtx) = 1;
5385
5386 #ifdef STACK_BOUNDARY
5387 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5388 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5389 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5390 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5391
5392 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5393 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5394 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5395 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5396 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5397 #endif
5398
5399 #ifdef INIT_EXPANDERS
5400 INIT_EXPANDERS;
5401 #endif
5402 }
5403
5404 /* Generate the constant 0. */
5405
5406 static rtx
5407 gen_const_vector_0 (mode)
5408 enum machine_mode mode;
5409 {
5410 rtx tem;
5411 rtvec v;
5412 int units, i;
5413 enum machine_mode inner;
5414
5415 units = GET_MODE_NUNITS (mode);
5416 inner = GET_MODE_INNER (mode);
5417
5418 v = rtvec_alloc (units);
5419
5420 /* We need to call this function after we to set CONST0_RTX first. */
5421 if (!CONST0_RTX (inner))
5422 abort ();
5423
5424 for (i = 0; i < units; ++i)
5425 RTVEC_ELT (v, i) = CONST0_RTX (inner);
5426
5427 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5428 return tem;
5429 }
5430
5431 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5432 all elements are zero. */
5433 rtx
5434 gen_rtx_CONST_VECTOR (mode, v)
5435 enum machine_mode mode;
5436 rtvec v;
5437 {
5438 rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5439 int i;
5440
5441 for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5442 if (RTVEC_ELT (v, i) != inner_zero)
5443 return gen_rtx_raw_CONST_VECTOR (mode, v);
5444 return CONST0_RTX (mode);
5445 }
5446
5447 /* Create some permanent unique rtl objects shared between all functions.
5448 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5449
5450 void
5451 init_emit_once (line_numbers)
5452 int line_numbers;
5453 {
5454 int i;
5455 enum machine_mode mode;
5456 enum machine_mode double_mode;
5457
5458 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5459 tables. */
5460 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5461 const_int_htab_eq, NULL);
5462
5463 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5464 const_double_htab_eq, NULL);
5465
5466 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5467 mem_attrs_htab_eq, NULL);
5468 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5469 reg_attrs_htab_eq, NULL);
5470
5471 no_line_numbers = ! line_numbers;
5472
5473 /* Compute the word and byte modes. */
5474
5475 byte_mode = VOIDmode;
5476 word_mode = VOIDmode;
5477 double_mode = VOIDmode;
5478
5479 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5480 mode = GET_MODE_WIDER_MODE (mode))
5481 {
5482 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5483 && byte_mode == VOIDmode)
5484 byte_mode = mode;
5485
5486 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5487 && word_mode == VOIDmode)
5488 word_mode = mode;
5489 }
5490
5491 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5492 mode = GET_MODE_WIDER_MODE (mode))
5493 {
5494 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5495 && double_mode == VOIDmode)
5496 double_mode = mode;
5497 }
5498
5499 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5500
5501 /* Assign register numbers to the globally defined register rtx.
5502 This must be done at runtime because the register number field
5503 is in a union and some compilers can't initialize unions. */
5504
5505 pc_rtx = gen_rtx (PC, VOIDmode);
5506 cc0_rtx = gen_rtx (CC0, VOIDmode);
5507 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5508 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5509 if (hard_frame_pointer_rtx == 0)
5510 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5511 HARD_FRAME_POINTER_REGNUM);
5512 if (arg_pointer_rtx == 0)
5513 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5514 virtual_incoming_args_rtx =
5515 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5516 virtual_stack_vars_rtx =
5517 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5518 virtual_stack_dynamic_rtx =
5519 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5520 virtual_outgoing_args_rtx =
5521 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5522 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5523
5524 /* Initialize RTL for commonly used hard registers. These are
5525 copied into regno_reg_rtx as we begin to compile each function. */
5526 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5527 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5528
5529 #ifdef INIT_EXPANDERS
5530 /* This is to initialize {init|mark|free}_machine_status before the first
5531 call to push_function_context_to. This is needed by the Chill front
5532 end which calls push_function_context_to before the first call to
5533 init_function_start. */
5534 INIT_EXPANDERS;
5535 #endif
5536
5537 /* Create the unique rtx's for certain rtx codes and operand values. */
5538
5539 /* Don't use gen_rtx here since gen_rtx in this case
5540 tries to use these variables. */
5541 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5542 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5543 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5544
5545 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5546 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5547 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5548 else
5549 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5550
5551 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5552 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5553 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5554 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5555
5556 for (i = 0; i <= 2; i++)
5557 {
5558 REAL_VALUE_TYPE *r =
5559 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5560
5561 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5562 mode = GET_MODE_WIDER_MODE (mode))
5563 const_tiny_rtx[i][(int) mode] =
5564 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5565
5566 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5567
5568 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5569 mode = GET_MODE_WIDER_MODE (mode))
5570 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5571
5572 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5573 mode != VOIDmode;
5574 mode = GET_MODE_WIDER_MODE (mode))
5575 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5576 }
5577
5578 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5579 mode != VOIDmode;
5580 mode = GET_MODE_WIDER_MODE (mode))
5581 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5582
5583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5584 mode != VOIDmode;
5585 mode = GET_MODE_WIDER_MODE (mode))
5586 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5587
5588 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5589 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5590 const_tiny_rtx[0][i] = const0_rtx;
5591
5592 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5593 if (STORE_FLAG_VALUE == 1)
5594 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5595
5596 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5597 return_address_pointer_rtx
5598 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5599 #endif
5600
5601 #ifdef STRUCT_VALUE
5602 struct_value_rtx = STRUCT_VALUE;
5603 #else
5604 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5605 #endif
5606
5607 #ifdef STRUCT_VALUE_INCOMING
5608 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5609 #else
5610 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5611 struct_value_incoming_rtx
5612 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5613 #else
5614 struct_value_incoming_rtx = struct_value_rtx;
5615 #endif
5616 #endif
5617
5618 #ifdef STATIC_CHAIN_REGNUM
5619 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5620
5621 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5622 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5623 static_chain_incoming_rtx
5624 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5625 else
5626 #endif
5627 static_chain_incoming_rtx = static_chain_rtx;
5628 #endif
5629
5630 #ifdef STATIC_CHAIN
5631 static_chain_rtx = STATIC_CHAIN;
5632
5633 #ifdef STATIC_CHAIN_INCOMING
5634 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5635 #else
5636 static_chain_incoming_rtx = static_chain_rtx;
5637 #endif
5638 #endif
5639
5640 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5641 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5642 }
5643 \f
5644 /* Query and clear/ restore no_line_numbers. This is used by the
5645 switch / case handling in stmt.c to give proper line numbers in
5646 warnings about unreachable code. */
5647
5648 int
5649 force_line_numbers ()
5650 {
5651 int old = no_line_numbers;
5652
5653 no_line_numbers = 0;
5654 if (old)
5655 force_next_line_note ();
5656 return old;
5657 }
5658
5659 void
5660 restore_line_number_status (old_value)
5661 int old_value;
5662 {
5663 no_line_numbers = old_value;
5664 }
5665
5666 /* Produce exact duplicate of insn INSN after AFTER.
5667 Care updating of libcall regions if present. */
5668
5669 rtx
5670 emit_copy_of_insn_after (insn, after)
5671 rtx insn, after;
5672 {
5673 rtx new;
5674 rtx note1, note2, link;
5675
5676 switch (GET_CODE (insn))
5677 {
5678 case INSN:
5679 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5680 break;
5681
5682 case JUMP_INSN:
5683 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5684 break;
5685
5686 case CALL_INSN:
5687 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5688 if (CALL_INSN_FUNCTION_USAGE (insn))
5689 CALL_INSN_FUNCTION_USAGE (new)
5690 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5691 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5692 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5693 break;
5694
5695 default:
5696 abort ();
5697 }
5698
5699 /* Update LABEL_NUSES. */
5700 mark_jump_label (PATTERN (new), new, 0);
5701
5702 INSN_SCOPE (new) = INSN_SCOPE (insn);
5703
5704 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5705 make them. */
5706 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5707 if (REG_NOTE_KIND (link) != REG_LABEL)
5708 {
5709 if (GET_CODE (link) == EXPR_LIST)
5710 REG_NOTES (new)
5711 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5712 XEXP (link, 0),
5713 REG_NOTES (new)));
5714 else
5715 REG_NOTES (new)
5716 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5717 XEXP (link, 0),
5718 REG_NOTES (new)));
5719 }
5720
5721 /* Fix the libcall sequences. */
5722 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5723 {
5724 rtx p = new;
5725 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5726 p = PREV_INSN (p);
5727 XEXP (note1, 0) = p;
5728 XEXP (note2, 0) = new;
5729 }
5730 INSN_CODE (new) = INSN_CODE (insn);
5731 return new;
5732 }
5733
5734 #include "gt-emit-rtl.h"
This page took 0.284442 seconds and 6 git commands to generate.