]> gcc.gnu.org Git - gcc.git/blob - gcc/emit-rtl.c
c-parse.in (language_string): Constify.
[gcc.git] / gcc / emit-rtl.c
1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 88, 92-97, 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* Middle-to-low level generation of rtx code and insns.
23
24 This file contains the functions `gen_rtx', `gen_reg_rtx'
25 and `gen_label_rtx' that are the usual ways of creating rtl
26 expressions for most purposes.
27
28 It also has the functions for creating insns and linking
29 them in the doubly-linked chain.
30
31 The patterns of the insns are created by machine-dependent
32 routines in insn-emit.c, which is generated automatically from
33 the machine description. These routines use `gen_rtx' to make
34 the individual rtx's of the pattern; what is machine dependent
35 is the kind of rtx's they make and what arguments they use. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "toplev.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "expr.h"
45 #include "regs.h"
46 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "recog.h"
49 #include "real.h"
50 #include "obstack.h"
51 #include "bitmap.h"
52
53 /* Commonly used modes. */
54
55 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
56 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
57 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
58 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
59
60
61 /* This is *not* reset after each function. It gives each CODE_LABEL
62 in the entire compilation a unique label number. */
63
64 static int label_num = 1;
65
66 /* Highest label number in current function.
67 Zero means use the value of label_num instead.
68 This is nonzero only when belatedly compiling an inline function. */
69
70 static int last_label_num;
71
72 /* Value label_num had when set_new_first_and_last_label_number was called.
73 If label_num has not changed since then, last_label_num is valid. */
74
75 static int base_label_num;
76
77 /* Nonzero means do not generate NOTEs for source line numbers. */
78
79 static int no_line_numbers;
80
81 /* Commonly used rtx's, so that we only need space for one copy.
82 These are initialized once for the entire compilation.
83 All of these except perhaps the floating-point CONST_DOUBLEs
84 are unique; no other rtx-object will be equal to any of these. */
85
86 /* Avoid warnings by initializing the `fld' field. Since its a union,
87 bypass problems with KNR compilers by only doing so when __GNUC__. */
88 #ifdef __GNUC__
89 #define FLDI , {{0}}
90 #else
91 #define FLDI
92 #endif
93
94 struct _global_rtl global_rtl =
95 {
96 {PC, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* pc_rtx */
97 {CC0, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* cc0_rtx */
98 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* stack_pointer_rtx */
99 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* frame_pointer_rtx */
100 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* hard_frame_pointer_rtx */
101 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* arg_pointer_rtx */
102 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* virtual_incoming_args_rtx */
103 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* virtual_stack_vars_rtx */
104 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* virtual_stack_dynamic_rtx */
105 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* virtual_outgoing_args_rtx */
106 {REG, VOIDmode, 0, 0, 0, 0, 0, 0, 0, 0 FLDI }, /* virtual_cfa_rtx */
107 };
108
109 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
110 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
111 record a copy of const[012]_rtx. */
112
113 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
114
115 rtx const_true_rtx;
116
117 REAL_VALUE_TYPE dconst0;
118 REAL_VALUE_TYPE dconst1;
119 REAL_VALUE_TYPE dconst2;
120 REAL_VALUE_TYPE dconstm1;
121
122 /* All references to the following fixed hard registers go through
123 these unique rtl objects. On machines where the frame-pointer and
124 arg-pointer are the same register, they use the same unique object.
125
126 After register allocation, other rtl objects which used to be pseudo-regs
127 may be clobbered to refer to the frame-pointer register.
128 But references that were originally to the frame-pointer can be
129 distinguished from the others because they contain frame_pointer_rtx.
130
131 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
132 tricky: until register elimination has taken place hard_frame_pointer_rtx
133 should be used if it is being set, and frame_pointer_rtx otherwise. After
134 register elimination hard_frame_pointer_rtx should always be used.
135 On machines where the two registers are same (most) then these are the
136 same.
137
138 In an inline procedure, the stack and frame pointer rtxs may not be
139 used for anything else. */
140 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
141 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
142 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
143 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
144 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
145
146 /* This is used to implement __builtin_return_address for some machines.
147 See for instance the MIPS port. */
148 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
149
150 /* We make one copy of (const_int C) where C is in
151 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
152 to save space during the compilation and simplify comparisons of
153 integers. */
154
155 struct rtx_def const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
156
157 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
158 shortly thrown away. We use two mechanisms to prevent this waste:
159
160 First, we keep a list of the expressions used to represent the sequence
161 stack in sequence_element_free_list.
162
163 Second, for sizes up to 5 elements, we keep a SEQUENCE and its associated
164 rtvec for use by gen_sequence. One entry for each size is sufficient
165 because most cases are calls to gen_sequence followed by immediately
166 emitting the SEQUENCE. Reuse is safe since emitting a sequence is
167 destructive on the insn in it anyway and hence can't be redone.
168
169 We do not bother to save this cached data over nested function calls.
170 Instead, we just reinitialize them. */
171
172 #define SEQUENCE_RESULT_SIZE 5
173
174 static struct sequence_stack *sequence_element_free_list;
175 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
176
177 /* During RTL generation, we also keep a list of free INSN rtl codes. */
178 static rtx free_insn;
179
180 #define first_insn (current_function->emit->x_first_insn)
181 #define last_insn (current_function->emit->x_last_insn)
182 #define cur_insn_uid (current_function->emit->x_cur_insn_uid)
183 #define last_linenum (current_function->emit->x_last_linenum)
184 #define last_filename (current_function->emit->x_last_filename)
185 #define first_label_num (current_function->emit->x_first_label_num)
186
187 static rtx make_jump_insn_raw PROTO((rtx));
188 static rtx make_call_insn_raw PROTO((rtx));
189 static rtx find_line_note PROTO((rtx));
190 \f
191 rtx
192 gen_rtx_CONST_INT (mode, arg)
193 enum machine_mode mode;
194 HOST_WIDE_INT arg;
195 {
196 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
197 return &const_int_rtx[arg + MAX_SAVED_CONST_INT];
198
199 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
200 if (const_true_rtx && arg == STORE_FLAG_VALUE)
201 return const_true_rtx;
202 #endif
203
204 return gen_rtx_raw_CONST_INT (mode, arg);
205 }
206
207 /* CONST_DOUBLEs needs special handling because its length is known
208 only at run-time. */
209 rtx
210 gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2)
211 enum machine_mode mode;
212 rtx arg0;
213 HOST_WIDE_INT arg1, arg2;
214 {
215 rtx r = rtx_alloc (CONST_DOUBLE);
216 int i;
217
218 PUT_MODE (r, mode);
219 XEXP (r, 0) = arg0;
220 XEXP (r, 1) = NULL_RTX;
221 XWINT (r, 2) = arg1;
222 XWINT (r, 3) = arg2;
223
224 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 3; --i)
225 XWINT (r, i) = 0;
226
227 return r;
228 }
229
230 rtx
231 gen_rtx_REG (mode, regno)
232 enum machine_mode mode;
233 int regno;
234 {
235 /* In case the MD file explicitly references the frame pointer, have
236 all such references point to the same frame pointer. This is
237 used during frame pointer elimination to distinguish the explicit
238 references to these registers from pseudos that happened to be
239 assigned to them.
240
241 If we have eliminated the frame pointer or arg pointer, we will
242 be using it as a normal register, for example as a spill
243 register. In such cases, we might be accessing it in a mode that
244 is not Pmode and therefore cannot use the pre-allocated rtx.
245
246 Also don't do this when we are making new REGs in reload, since
247 we don't want to get confused with the real pointers. */
248
249 if (mode == Pmode && !reload_in_progress)
250 {
251 if (regno == FRAME_POINTER_REGNUM)
252 return frame_pointer_rtx;
253 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
254 if (regno == HARD_FRAME_POINTER_REGNUM)
255 return hard_frame_pointer_rtx;
256 #endif
257 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
258 if (regno == ARG_POINTER_REGNUM)
259 return arg_pointer_rtx;
260 #endif
261 #ifdef RETURN_ADDRESS_POINTER_REGNUM
262 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
263 return return_address_pointer_rtx;
264 #endif
265 if (regno == STACK_POINTER_REGNUM)
266 return stack_pointer_rtx;
267 }
268
269 return gen_rtx_raw_REG (mode, regno);
270 }
271
272 rtx
273 gen_rtx_MEM (mode, addr)
274 enum machine_mode mode;
275 rtx addr;
276 {
277 rtx rt = gen_rtx_raw_MEM (mode, addr);
278
279 /* This field is not cleared by the mere allocation of the rtx, so
280 we clear it here. */
281 MEM_ALIAS_SET (rt) = 0;
282
283 return rt;
284 }
285
286 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
287 **
288 ** This routine generates an RTX of the size specified by
289 ** <code>, which is an RTX code. The RTX structure is initialized
290 ** from the arguments <element1> through <elementn>, which are
291 ** interpreted according to the specific RTX type's format. The
292 ** special machine mode associated with the rtx (if any) is specified
293 ** in <mode>.
294 **
295 ** gen_rtx can be invoked in a way which resembles the lisp-like
296 ** rtx it will generate. For example, the following rtx structure:
297 **
298 ** (plus:QI (mem:QI (reg:SI 1))
299 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
300 **
301 ** ...would be generated by the following C code:
302 **
303 ** gen_rtx (PLUS, QImode,
304 ** gen_rtx (MEM, QImode,
305 ** gen_rtx (REG, SImode, 1)),
306 ** gen_rtx (MEM, QImode,
307 ** gen_rtx (PLUS, SImode,
308 ** gen_rtx (REG, SImode, 2),
309 ** gen_rtx (REG, SImode, 3)))),
310 */
311
312 /*VARARGS2*/
313 rtx
314 gen_rtx VPROTO((enum rtx_code code, enum machine_mode mode, ...))
315 {
316 #ifndef ANSI_PROTOTYPES
317 enum rtx_code code;
318 enum machine_mode mode;
319 #endif
320 va_list p;
321 register int i; /* Array indices... */
322 register const char *fmt; /* Current rtx's format... */
323 register rtx rt_val; /* RTX to return to caller... */
324
325 VA_START (p, mode);
326
327 #ifndef ANSI_PROTOTYPES
328 code = va_arg (p, enum rtx_code);
329 mode = va_arg (p, enum machine_mode);
330 #endif
331
332 switch (code)
333 {
334 case CONST_INT:
335 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
336 break;
337
338 case CONST_DOUBLE:
339 {
340 rtx arg0 = va_arg (p, rtx);
341 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
342 HOST_WIDE_INT arg2 = va_arg (p, HOST_WIDE_INT);
343 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1, arg2);
344 }
345 break;
346
347 case REG:
348 rt_val = gen_rtx_REG (mode, va_arg (p, int));
349 break;
350
351 case MEM:
352 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
353 break;
354
355 default:
356 rt_val = rtx_alloc (code); /* Allocate the storage space. */
357 rt_val->mode = mode; /* Store the machine mode... */
358
359 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
360 for (i = 0; i < GET_RTX_LENGTH (code); i++)
361 {
362 switch (*fmt++)
363 {
364 case '0': /* Unused field. */
365 break;
366
367 case 'i': /* An integer? */
368 XINT (rt_val, i) = va_arg (p, int);
369 break;
370
371 case 'w': /* A wide integer? */
372 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
373 break;
374
375 case 's': /* A string? */
376 XSTR (rt_val, i) = va_arg (p, char *);
377 break;
378
379 case 'e': /* An expression? */
380 case 'u': /* An insn? Same except when printing. */
381 XEXP (rt_val, i) = va_arg (p, rtx);
382 break;
383
384 case 'E': /* An RTX vector? */
385 XVEC (rt_val, i) = va_arg (p, rtvec);
386 break;
387
388 case 'b': /* A bitmap? */
389 XBITMAP (rt_val, i) = va_arg (p, bitmap);
390 break;
391
392 case 't': /* A tree? */
393 XTREE (rt_val, i) = va_arg (p, tree);
394 break;
395
396 default:
397 abort ();
398 }
399 }
400 break;
401 }
402
403 va_end (p);
404 return rt_val;
405 }
406
407 /* gen_rtvec (n, [rt1, ..., rtn])
408 **
409 ** This routine creates an rtvec and stores within it the
410 ** pointers to rtx's which are its arguments.
411 */
412
413 /*VARARGS1*/
414 rtvec
415 gen_rtvec VPROTO((int n, ...))
416 {
417 #ifndef ANSI_PROTOTYPES
418 int n;
419 #endif
420 int i;
421 va_list p;
422 rtx *vector;
423
424 VA_START (p, n);
425
426 #ifndef ANSI_PROTOTYPES
427 n = va_arg (p, int);
428 #endif
429
430 if (n == 0)
431 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
432
433 vector = (rtx *) alloca (n * sizeof (rtx));
434
435 for (i = 0; i < n; i++)
436 vector[i] = va_arg (p, rtx);
437 va_end (p);
438
439 return gen_rtvec_v (n, vector);
440 }
441
442 rtvec
443 gen_rtvec_v (n, argp)
444 int n;
445 rtx *argp;
446 {
447 register int i;
448 register rtvec rt_val;
449
450 if (n == 0)
451 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
452
453 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
454
455 for (i = 0; i < n; i++)
456 rt_val->elem[i] = *argp++;
457
458 return rt_val;
459 }
460
461 \f
462 /* Generate a REG rtx for a new pseudo register of mode MODE.
463 This pseudo is assigned the next sequential register number. */
464
465 rtx
466 gen_reg_rtx (mode)
467 enum machine_mode mode;
468 {
469 struct function *f = current_function;
470 register rtx val;
471
472 /* Don't let anything called after initial flow analysis create new
473 registers. */
474 if (no_new_pseudos)
475 abort ();
476
477 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
478 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
479 {
480 /* For complex modes, don't make a single pseudo.
481 Instead, make a CONCAT of two pseudos.
482 This allows noncontiguous allocation of the real and imaginary parts,
483 which makes much better code. Besides, allocating DCmode
484 pseudos overstrains reload on some machines like the 386. */
485 rtx realpart, imagpart;
486 int size = GET_MODE_UNIT_SIZE (mode);
487 enum machine_mode partmode
488 = mode_for_size (size * BITS_PER_UNIT,
489 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
490 ? MODE_FLOAT : MODE_INT),
491 0);
492
493 realpart = gen_reg_rtx (partmode);
494 imagpart = gen_reg_rtx (partmode);
495 return gen_rtx_CONCAT (mode, realpart, imagpart);
496 }
497
498 /* Make sure regno_pointer_flag and regno_reg_rtx are large
499 enough to have an element for this pseudo reg number. */
500
501 if (reg_rtx_no == f->emit->regno_pointer_flag_length)
502 {
503 int old_size = f->emit->regno_pointer_flag_length;
504 rtx *new1;
505 char *new = (char *) savealloc (old_size * 2);
506 memcpy (new, f->emit->regno_pointer_flag, old_size);
507 memset (new + old_size, 0, old_size);
508 f->emit->regno_pointer_flag = new;
509
510 new = (char *) savealloc (old_size * 2);
511 memcpy (new, f->emit->regno_pointer_align, old_size);
512 memset (new + old_size, 0, old_size);
513 f->emit->regno_pointer_align = new;
514
515 new1 = (rtx *) savealloc (old_size * 2 * sizeof (rtx));
516 memcpy (new1, regno_reg_rtx, old_size * sizeof (rtx));
517 memset (new1 + old_size, 0, old_size * sizeof (rtx));
518 regno_reg_rtx = new1;
519
520 f->emit->regno_pointer_flag_length = old_size * 2;
521 }
522
523 val = gen_rtx_raw_REG (mode, reg_rtx_no);
524 regno_reg_rtx[reg_rtx_no++] = val;
525 return val;
526 }
527
528 /* Identify REG (which may be a CONCAT) as a user register. */
529
530 void
531 mark_user_reg (reg)
532 rtx reg;
533 {
534 if (GET_CODE (reg) == CONCAT)
535 {
536 REG_USERVAR_P (XEXP (reg, 0)) = 1;
537 REG_USERVAR_P (XEXP (reg, 1)) = 1;
538 }
539 else if (GET_CODE (reg) == REG)
540 REG_USERVAR_P (reg) = 1;
541 else
542 abort ();
543 }
544
545 /* Identify REG as a probable pointer register and show its alignment
546 as ALIGN, if nonzero. */
547
548 void
549 mark_reg_pointer (reg, align)
550 rtx reg;
551 int align;
552 {
553 if (! REGNO_POINTER_FLAG (REGNO (reg)))
554 {
555 REGNO_POINTER_FLAG (REGNO (reg)) = 1;
556
557 if (align)
558 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
559 }
560 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
561 /* We can no-longer be sure just how aligned this pointer is */
562 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
563 }
564
565 /* Return 1 plus largest pseudo reg number used in the current function. */
566
567 int
568 max_reg_num ()
569 {
570 return reg_rtx_no;
571 }
572
573 /* Return 1 + the largest label number used so far in the current function. */
574
575 int
576 max_label_num ()
577 {
578 if (last_label_num && label_num == base_label_num)
579 return last_label_num;
580 return label_num;
581 }
582
583 /* Return first label number used in this function (if any were used). */
584
585 int
586 get_first_label_num ()
587 {
588 return first_label_num;
589 }
590 \f
591 /* Return a value representing some low-order bits of X, where the number
592 of low-order bits is given by MODE. Note that no conversion is done
593 between floating-point and fixed-point values, rather, the bit
594 representation is returned.
595
596 This function handles the cases in common between gen_lowpart, below,
597 and two variants in cse.c and combine.c. These are the cases that can
598 be safely handled at all points in the compilation.
599
600 If this is not a case we can handle, return 0. */
601
602 rtx
603 gen_lowpart_common (mode, x)
604 enum machine_mode mode;
605 register rtx x;
606 {
607 int word = 0;
608
609 if (GET_MODE (x) == mode)
610 return x;
611
612 /* MODE must occupy no more words than the mode of X. */
613 if (GET_MODE (x) != VOIDmode
614 && ((GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
615 > ((GET_MODE_SIZE (GET_MODE (x)) + (UNITS_PER_WORD - 1))
616 / UNITS_PER_WORD)))
617 return 0;
618
619 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
620 word = ((GET_MODE_SIZE (GET_MODE (x))
621 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
622 / UNITS_PER_WORD);
623
624 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
625 && (GET_MODE_CLASS (mode) == MODE_INT
626 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
627 {
628 /* If we are getting the low-order part of something that has been
629 sign- or zero-extended, we can either just use the object being
630 extended or make a narrower extension. If we want an even smaller
631 piece than the size of the object being extended, call ourselves
632 recursively.
633
634 This case is used mostly by combine and cse. */
635
636 if (GET_MODE (XEXP (x, 0)) == mode)
637 return XEXP (x, 0);
638 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
639 return gen_lowpart_common (mode, XEXP (x, 0));
640 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
641 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
642 }
643 else if (GET_CODE (x) == SUBREG
644 && (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
645 || GET_MODE_SIZE (mode) == GET_MODE_UNIT_SIZE (GET_MODE (x))))
646 return (GET_MODE (SUBREG_REG (x)) == mode && SUBREG_WORD (x) == 0
647 ? SUBREG_REG (x)
648 : gen_rtx_SUBREG (mode, SUBREG_REG (x), SUBREG_WORD (x) + word));
649 else if (GET_CODE (x) == REG)
650 {
651 /* Let the backend decide how many registers to skip. This is needed
652 in particular for Sparc64 where fp regs are smaller than a word. */
653 /* ??? Note that subregs are now ambiguous, in that those against
654 pseudos are sized by the Word Size, while those against hard
655 regs are sized by the underlying register size. Better would be
656 to always interpret the subreg offset parameter as bytes or bits. */
657
658 if (WORDS_BIG_ENDIAN && REGNO (x) < FIRST_PSEUDO_REGISTER)
659 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
660 - HARD_REGNO_NREGS (REGNO (x), mode));
661
662 /* If the register is not valid for MODE, return 0. If we don't
663 do this, there is no way to fix up the resulting REG later.
664 But we do do this if the current REG is not valid for its
665 mode. This latter is a kludge, but is required due to the
666 way that parameters are passed on some machines, most
667 notably Sparc. */
668 if (REGNO (x) < FIRST_PSEUDO_REGISTER
669 && ! HARD_REGNO_MODE_OK (REGNO (x) + word, mode)
670 && HARD_REGNO_MODE_OK (REGNO (x), GET_MODE (x)))
671 return 0;
672 else if (REGNO (x) < FIRST_PSEUDO_REGISTER
673 /* integrate.c can't handle parts of a return value register. */
674 && (! REG_FUNCTION_VALUE_P (x)
675 || ! rtx_equal_function_value_matters)
676 #ifdef CLASS_CANNOT_CHANGE_SIZE
677 && ! (GET_MODE_SIZE (mode) != GET_MODE_SIZE (GET_MODE (x))
678 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_INT
679 && GET_MODE_CLASS (GET_MODE (x)) != MODE_COMPLEX_FLOAT
680 && (TEST_HARD_REG_BIT
681 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
682 REGNO (x))))
683 #endif
684 /* We want to keep the stack, frame, and arg pointers
685 special. */
686 && x != frame_pointer_rtx
687 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
688 && x != arg_pointer_rtx
689 #endif
690 && x != stack_pointer_rtx)
691 return gen_rtx_REG (mode, REGNO (x) + word);
692 else
693 return gen_rtx_SUBREG (mode, x, word);
694 }
695 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
696 from the low-order part of the constant. */
697 else if ((GET_MODE_CLASS (mode) == MODE_INT
698 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
699 && GET_MODE (x) == VOIDmode
700 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
701 {
702 /* If MODE is twice the host word size, X is already the desired
703 representation. Otherwise, if MODE is wider than a word, we can't
704 do this. If MODE is exactly a word, return just one CONST_INT.
705 If MODE is smaller than a word, clear the bits that don't belong
706 in our mode, unless they and our sign bit are all one. So we get
707 either a reasonable negative value or a reasonable unsigned value
708 for this mode. */
709
710 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
711 return x;
712 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
713 return 0;
714 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
715 return (GET_CODE (x) == CONST_INT ? x
716 : GEN_INT (CONST_DOUBLE_LOW (x)));
717 else
718 {
719 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
720 int width = GET_MODE_BITSIZE (mode);
721 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
722 : CONST_DOUBLE_LOW (x));
723
724 /* Sign extend to HOST_WIDE_INT. */
725 val = val << (HOST_BITS_PER_WIDE_INT - width) >> (HOST_BITS_PER_WIDE_INT - width);
726
727 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
728 : GEN_INT (val));
729 }
730 }
731
732 /* If X is an integral constant but we want it in floating-point, it
733 must be the case that we have a union of an integer and a floating-point
734 value. If the machine-parameters allow it, simulate that union here
735 and return the result. The two-word and single-word cases are
736 different. */
737
738 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
739 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
740 || flag_pretend_float)
741 && GET_MODE_CLASS (mode) == MODE_FLOAT
742 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
743 && GET_CODE (x) == CONST_INT
744 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
745 #ifdef REAL_ARITHMETIC
746 {
747 REAL_VALUE_TYPE r;
748 HOST_WIDE_INT i;
749
750 i = INTVAL (x);
751 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
752 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
753 }
754 #else
755 {
756 union {HOST_WIDE_INT i; float d; } u;
757
758 u.i = INTVAL (x);
759 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
760 }
761 #endif
762 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
763 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
764 || flag_pretend_float)
765 && GET_MODE_CLASS (mode) == MODE_FLOAT
766 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
767 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
768 && GET_MODE (x) == VOIDmode
769 && (sizeof (double) * HOST_BITS_PER_CHAR
770 == 2 * HOST_BITS_PER_WIDE_INT))
771 #ifdef REAL_ARITHMETIC
772 {
773 REAL_VALUE_TYPE r;
774 HOST_WIDE_INT i[2];
775 HOST_WIDE_INT low, high;
776
777 if (GET_CODE (x) == CONST_INT)
778 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
779 else
780 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
781
782 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
783 target machine. */
784 if (WORDS_BIG_ENDIAN)
785 i[0] = high, i[1] = low;
786 else
787 i[0] = low, i[1] = high;
788
789 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
790 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
791 }
792 #else
793 {
794 union {HOST_WIDE_INT i[2]; double d; } u;
795 HOST_WIDE_INT low, high;
796
797 if (GET_CODE (x) == CONST_INT)
798 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
799 else
800 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
801
802 #ifdef HOST_WORDS_BIG_ENDIAN
803 u.i[0] = high, u.i[1] = low;
804 #else
805 u.i[0] = low, u.i[1] = high;
806 #endif
807
808 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
809 }
810 #endif
811
812 /* We need an extra case for machines where HOST_BITS_PER_WIDE_INT is the
813 same as sizeof (double) or when sizeof (float) is larger than the
814 size of a word on the target machine. */
815 #ifdef REAL_ARITHMETIC
816 else if (mode == SFmode && GET_CODE (x) == CONST_INT)
817 {
818 REAL_VALUE_TYPE r;
819 HOST_WIDE_INT i;
820
821 i = INTVAL (x);
822 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
823 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
824 }
825 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
826 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
827 || flag_pretend_float)
828 && GET_MODE_CLASS (mode) == MODE_FLOAT
829 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
830 && GET_CODE (x) == CONST_INT
831 && (sizeof (double) * HOST_BITS_PER_CHAR
832 == HOST_BITS_PER_WIDE_INT))
833 {
834 REAL_VALUE_TYPE r;
835 HOST_WIDE_INT i;
836
837 i = INTVAL (x);
838 r = REAL_VALUE_FROM_TARGET_DOUBLE (&i);
839 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
840 }
841 #endif
842
843 /* Similarly, if this is converting a floating-point value into a
844 single-word integer. Only do this is the host and target parameters are
845 compatible. */
846
847 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
848 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
849 || flag_pretend_float)
850 && (GET_MODE_CLASS (mode) == MODE_INT
851 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
852 && GET_CODE (x) == CONST_DOUBLE
853 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
854 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
855 return operand_subword (x, word, 0, GET_MODE (x));
856
857 /* Similarly, if this is converting a floating-point value into a
858 two-word integer, we can do this one word at a time and make an
859 integer. Only do this is the host and target parameters are
860 compatible. */
861
862 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
863 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
864 || flag_pretend_float)
865 && (GET_MODE_CLASS (mode) == MODE_INT
866 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
867 && GET_CODE (x) == CONST_DOUBLE
868 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
869 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
870 {
871 rtx lowpart
872 = operand_subword (x, word + WORDS_BIG_ENDIAN, 0, GET_MODE (x));
873 rtx highpart
874 = operand_subword (x, word + ! WORDS_BIG_ENDIAN, 0, GET_MODE (x));
875
876 if (lowpart && GET_CODE (lowpart) == CONST_INT
877 && highpart && GET_CODE (highpart) == CONST_INT)
878 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
879 }
880
881 /* Otherwise, we can't do this. */
882 return 0;
883 }
884 \f
885 /* Return the real part (which has mode MODE) of a complex value X.
886 This always comes at the low address in memory. */
887
888 rtx
889 gen_realpart (mode, x)
890 enum machine_mode mode;
891 register rtx x;
892 {
893 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
894 return XEXP (x, 0);
895 else if (WORDS_BIG_ENDIAN
896 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
897 && REG_P (x)
898 && REGNO (x) < FIRST_PSEUDO_REGISTER)
899 fatal ("Unable to access real part of complex value in a hard register on this target");
900 else if (WORDS_BIG_ENDIAN)
901 return gen_highpart (mode, x);
902 else
903 return gen_lowpart (mode, x);
904 }
905
906 /* Return the imaginary part (which has mode MODE) of a complex value X.
907 This always comes at the high address in memory. */
908
909 rtx
910 gen_imagpart (mode, x)
911 enum machine_mode mode;
912 register rtx x;
913 {
914 if (GET_CODE (x) == CONCAT && GET_MODE (XEXP (x, 0)) == mode)
915 return XEXP (x, 1);
916 else if (WORDS_BIG_ENDIAN)
917 return gen_lowpart (mode, x);
918 else if (!WORDS_BIG_ENDIAN
919 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
920 && REG_P (x)
921 && REGNO (x) < FIRST_PSEUDO_REGISTER)
922 fatal ("Unable to access imaginary part of complex value in a hard register on this target");
923 else
924 return gen_highpart (mode, x);
925 }
926
927 /* Return 1 iff X, assumed to be a SUBREG,
928 refers to the real part of the complex value in its containing reg.
929 Complex values are always stored with the real part in the first word,
930 regardless of WORDS_BIG_ENDIAN. */
931
932 int
933 subreg_realpart_p (x)
934 rtx x;
935 {
936 if (GET_CODE (x) != SUBREG)
937 abort ();
938
939 return SUBREG_WORD (x) * UNITS_PER_WORD < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x)));
940 }
941 \f
942 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
943 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
944 least-significant part of X.
945 MODE specifies how big a part of X to return;
946 it usually should not be larger than a word.
947 If X is a MEM whose address is a QUEUED, the value may be so also. */
948
949 rtx
950 gen_lowpart (mode, x)
951 enum machine_mode mode;
952 register rtx x;
953 {
954 rtx result = gen_lowpart_common (mode, x);
955
956 if (result)
957 return result;
958 else if (GET_CODE (x) == REG)
959 {
960 /* Must be a hard reg that's not valid in MODE. */
961 result = gen_lowpart_common (mode, copy_to_reg (x));
962 if (result == 0)
963 abort ();
964 return result;
965 }
966 else if (GET_CODE (x) == MEM)
967 {
968 /* The only additional case we can do is MEM. */
969 register int offset = 0;
970 if (WORDS_BIG_ENDIAN)
971 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
972 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
973
974 if (BYTES_BIG_ENDIAN)
975 /* Adjust the address so that the address-after-the-data
976 is unchanged. */
977 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
978 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
979
980 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
981 }
982 else if (GET_CODE (x) == ADDRESSOF)
983 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
984 else
985 abort ();
986 }
987
988 /* Like `gen_lowpart', but refer to the most significant part.
989 This is used to access the imaginary part of a complex number. */
990
991 rtx
992 gen_highpart (mode, x)
993 enum machine_mode mode;
994 register rtx x;
995 {
996 /* This case loses if X is a subreg. To catch bugs early,
997 complain if an invalid MODE is used even in other cases. */
998 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
999 && GET_MODE_SIZE (mode) != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1000 abort ();
1001 if (GET_CODE (x) == CONST_DOUBLE
1002 #if !(TARGET_FLOAT_FORMAT != HOST_FLOAT_FORMAT || defined (REAL_IS_NOT_DOUBLE))
1003 && GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT
1004 #endif
1005 )
1006 return GEN_INT (CONST_DOUBLE_HIGH (x) & GET_MODE_MASK (mode));
1007 else if (GET_CODE (x) == CONST_INT)
1008 {
1009 if (HOST_BITS_PER_WIDE_INT <= BITS_PER_WORD)
1010 return const0_rtx;
1011 return GEN_INT (INTVAL (x) >> (HOST_BITS_PER_WIDE_INT - BITS_PER_WORD));
1012 }
1013 else if (GET_CODE (x) == MEM)
1014 {
1015 register int offset = 0;
1016 if (! WORDS_BIG_ENDIAN)
1017 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1018 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1019
1020 if (! BYTES_BIG_ENDIAN
1021 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
1022 offset -= (GET_MODE_SIZE (mode)
1023 - MIN (UNITS_PER_WORD,
1024 GET_MODE_SIZE (GET_MODE (x))));
1025
1026 return change_address (x, mode, plus_constant (XEXP (x, 0), offset));
1027 }
1028 else if (GET_CODE (x) == SUBREG)
1029 {
1030 /* The only time this should occur is when we are looking at a
1031 multi-word item with a SUBREG whose mode is the same as that of the
1032 item. It isn't clear what we would do if it wasn't. */
1033 if (SUBREG_WORD (x) != 0)
1034 abort ();
1035 return gen_highpart (mode, SUBREG_REG (x));
1036 }
1037 else if (GET_CODE (x) == REG)
1038 {
1039 int word;
1040
1041 /* Let the backend decide how many registers to skip. This is needed
1042 in particular for sparc64 where fp regs are smaller than a word. */
1043 /* ??? Note that subregs are now ambiguous, in that those against
1044 pseudos are sized by the word size, while those against hard
1045 regs are sized by the underlying register size. Better would be
1046 to always interpret the subreg offset parameter as bytes or bits. */
1047
1048 if (WORDS_BIG_ENDIAN)
1049 word = 0;
1050 else if (REGNO (x) < FIRST_PSEUDO_REGISTER)
1051 word = (HARD_REGNO_NREGS (REGNO (x), GET_MODE (x))
1052 - HARD_REGNO_NREGS (REGNO (x), mode));
1053 else
1054 word = ((GET_MODE_SIZE (GET_MODE (x))
1055 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1056 / UNITS_PER_WORD);
1057
1058 if (REGNO (x) < FIRST_PSEUDO_REGISTER
1059 /* integrate.c can't handle parts of a return value register. */
1060 && (! REG_FUNCTION_VALUE_P (x)
1061 || ! rtx_equal_function_value_matters)
1062 /* We want to keep the stack, frame, and arg pointers special. */
1063 && x != frame_pointer_rtx
1064 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1065 && x != arg_pointer_rtx
1066 #endif
1067 && x != stack_pointer_rtx)
1068 return gen_rtx_REG (mode, REGNO (x) + word);
1069 else
1070 return gen_rtx_SUBREG (mode, x, word);
1071 }
1072 else
1073 abort ();
1074 }
1075
1076 /* Return 1 iff X, assumed to be a SUBREG,
1077 refers to the least significant part of its containing reg.
1078 If X is not a SUBREG, always return 1 (it is its own low part!). */
1079
1080 int
1081 subreg_lowpart_p (x)
1082 rtx x;
1083 {
1084 if (GET_CODE (x) != SUBREG)
1085 return 1;
1086 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1087 return 0;
1088
1089 if (WORDS_BIG_ENDIAN
1090 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD)
1091 return (SUBREG_WORD (x)
1092 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
1093 - MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD))
1094 / UNITS_PER_WORD));
1095
1096 return SUBREG_WORD (x) == 0;
1097 }
1098 \f
1099 /* Return subword I of operand OP.
1100 The word number, I, is interpreted as the word number starting at the
1101 low-order address. Word 0 is the low-order word if not WORDS_BIG_ENDIAN,
1102 otherwise it is the high-order word.
1103
1104 If we cannot extract the required word, we return zero. Otherwise, an
1105 rtx corresponding to the requested word will be returned.
1106
1107 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1108 reload has completed, a valid address will always be returned. After
1109 reload, if a valid address cannot be returned, we return zero.
1110
1111 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1112 it is the responsibility of the caller.
1113
1114 MODE is the mode of OP in case it is a CONST_INT. */
1115
1116 rtx
1117 operand_subword (op, i, validate_address, mode)
1118 rtx op;
1119 int i;
1120 int validate_address;
1121 enum machine_mode mode;
1122 {
1123 HOST_WIDE_INT val;
1124 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1125
1126 if (mode == VOIDmode)
1127 mode = GET_MODE (op);
1128
1129 if (mode == VOIDmode)
1130 abort ();
1131
1132 /* If OP is narrower than a word, fail. */
1133 if (mode != BLKmode
1134 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1135 return 0;
1136
1137 /* If we want a word outside OP, return zero. */
1138 if (mode != BLKmode
1139 && (i + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1140 return const0_rtx;
1141
1142 /* If OP is already an integer word, return it. */
1143 if (GET_MODE_CLASS (mode) == MODE_INT
1144 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1145 return op;
1146
1147 /* If OP is a REG or SUBREG, we can handle it very simply. */
1148 if (GET_CODE (op) == REG)
1149 {
1150 /* ??? There is a potential problem with this code. It does not
1151 properly handle extractions of a subword from a hard register
1152 that is larger than word_mode. Presumably the check for
1153 HARD_REGNO_MODE_OK catches these most of these cases. */
1154
1155 /* If OP is a hard register, but OP + I is not a hard register,
1156 then extracting a subword is impossible.
1157
1158 For example, consider if OP is the last hard register and it is
1159 larger than word_mode. If we wanted word N (for N > 0) because a
1160 part of that hard register was known to contain a useful value,
1161 then OP + I would refer to a pseudo, not the hard register we
1162 actually wanted. */
1163 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1164 && REGNO (op) + i >= FIRST_PSEUDO_REGISTER)
1165 return 0;
1166
1167 /* If the register is not valid for MODE, return 0. Note we
1168 have to check both OP and OP + I since they may refer to
1169 different parts of the register file.
1170
1171 Consider if OP refers to the last 96bit FP register and we want
1172 subword 3 because that subword is known to contain a value we
1173 needed. */
1174 if (REGNO (op) < FIRST_PSEUDO_REGISTER
1175 && (! HARD_REGNO_MODE_OK (REGNO (op), word_mode)
1176 || ! HARD_REGNO_MODE_OK (REGNO (op) + i, word_mode)))
1177 return 0;
1178 else if (REGNO (op) >= FIRST_PSEUDO_REGISTER
1179 || (REG_FUNCTION_VALUE_P (op)
1180 && rtx_equal_function_value_matters)
1181 /* We want to keep the stack, frame, and arg pointers
1182 special. */
1183 || op == frame_pointer_rtx
1184 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1185 || op == arg_pointer_rtx
1186 #endif
1187 || op == stack_pointer_rtx)
1188 return gen_rtx_SUBREG (word_mode, op, i);
1189 else
1190 return gen_rtx_REG (word_mode, REGNO (op) + i);
1191 }
1192 else if (GET_CODE (op) == SUBREG)
1193 return gen_rtx_SUBREG (word_mode, SUBREG_REG (op), i + SUBREG_WORD (op));
1194 else if (GET_CODE (op) == CONCAT)
1195 {
1196 int partwords = GET_MODE_UNIT_SIZE (GET_MODE (op)) / UNITS_PER_WORD;
1197 if (i < partwords)
1198 return operand_subword (XEXP (op, 0), i, validate_address, mode);
1199 return operand_subword (XEXP (op, 1), i - partwords,
1200 validate_address, mode);
1201 }
1202
1203 /* Form a new MEM at the requested address. */
1204 if (GET_CODE (op) == MEM)
1205 {
1206 rtx addr = plus_constant (XEXP (op, 0), i * UNITS_PER_WORD);
1207 rtx new;
1208
1209 if (validate_address)
1210 {
1211 if (reload_completed)
1212 {
1213 if (! strict_memory_address_p (word_mode, addr))
1214 return 0;
1215 }
1216 else
1217 addr = memory_address (word_mode, addr);
1218 }
1219
1220 new = gen_rtx_MEM (word_mode, addr);
1221
1222 MEM_COPY_ATTRIBUTES (new, op);
1223 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1224 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (op);
1225
1226 return new;
1227 }
1228
1229 /* The only remaining cases are when OP is a constant. If the host and
1230 target floating formats are the same, handling two-word floating
1231 constants are easy. Note that REAL_VALUE_TO_TARGET_{SINGLE,DOUBLE}
1232 are defined as returning one or two 32 bit values, respectively,
1233 and not values of BITS_PER_WORD bits. */
1234 #ifdef REAL_ARITHMETIC
1235 /* The output is some bits, the width of the target machine's word.
1236 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1237 host can't. */
1238 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1239 && GET_MODE_CLASS (mode) == MODE_FLOAT
1240 && GET_MODE_BITSIZE (mode) == 64
1241 && GET_CODE (op) == CONST_DOUBLE)
1242 {
1243 long k[2];
1244 REAL_VALUE_TYPE rv;
1245
1246 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1247 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1248
1249 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1250 which the words are written depends on the word endianness.
1251 ??? This is a potential portability problem and should
1252 be fixed at some point.
1253
1254 We must excercise caution with the sign bit. By definition there
1255 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1256 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1257 So we explicitly mask and sign-extend as necessary. */
1258 if (BITS_PER_WORD == 32)
1259 {
1260 val = k[i];
1261 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1262 return GEN_INT (val);
1263 }
1264 #if HOST_BITS_PER_WIDE_INT >= 64
1265 else if (BITS_PER_WORD >= 64 && i == 0)
1266 {
1267 val = k[! WORDS_BIG_ENDIAN];
1268 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1269 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1270 return GEN_INT (val);
1271 }
1272 #endif
1273 else if (BITS_PER_WORD == 16)
1274 {
1275 val = k[i >> 1];
1276 if ((i & 1) == !WORDS_BIG_ENDIAN)
1277 val >>= 16;
1278 val &= 0xffff;
1279 return GEN_INT (val);
1280 }
1281 else
1282 abort ();
1283 }
1284 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1285 && GET_MODE_CLASS (mode) == MODE_FLOAT
1286 && GET_MODE_BITSIZE (mode) > 64
1287 && GET_CODE (op) == CONST_DOUBLE)
1288 {
1289 long k[4];
1290 REAL_VALUE_TYPE rv;
1291
1292 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1293 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1294
1295 if (BITS_PER_WORD == 32)
1296 {
1297 val = k[i];
1298 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1299 return GEN_INT (val);
1300 }
1301 else
1302 abort ();
1303 }
1304 #else /* no REAL_ARITHMETIC */
1305 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1306 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1307 || flag_pretend_float)
1308 && GET_MODE_CLASS (mode) == MODE_FLOAT
1309 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1310 && GET_CODE (op) == CONST_DOUBLE)
1311 {
1312 /* The constant is stored in the host's word-ordering,
1313 but we want to access it in the target's word-ordering. Some
1314 compilers don't like a conditional inside macro args, so we have two
1315 copies of the return. */
1316 #ifdef HOST_WORDS_BIG_ENDIAN
1317 return GEN_INT (i == WORDS_BIG_ENDIAN
1318 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1319 #else
1320 return GEN_INT (i != WORDS_BIG_ENDIAN
1321 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1322 #endif
1323 }
1324 #endif /* no REAL_ARITHMETIC */
1325
1326 /* Single word float is a little harder, since single- and double-word
1327 values often do not have the same high-order bits. We have already
1328 verified that we want the only defined word of the single-word value. */
1329 #ifdef REAL_ARITHMETIC
1330 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1331 && GET_MODE_BITSIZE (mode) == 32
1332 && GET_CODE (op) == CONST_DOUBLE)
1333 {
1334 long l;
1335 REAL_VALUE_TYPE rv;
1336
1337 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1338 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1339
1340 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1341 val = l;
1342 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1343
1344 if (BITS_PER_WORD == 16)
1345 {
1346 if ((i & 1) == !WORDS_BIG_ENDIAN)
1347 val >>= 16;
1348 val &= 0xffff;
1349 }
1350
1351 return GEN_INT (val);
1352 }
1353 #else
1354 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1355 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1356 || flag_pretend_float)
1357 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1358 && GET_MODE_CLASS (mode) == MODE_FLOAT
1359 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1360 && GET_CODE (op) == CONST_DOUBLE)
1361 {
1362 double d;
1363 union {float f; HOST_WIDE_INT i; } u;
1364
1365 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1366
1367 u.f = d;
1368 return GEN_INT (u.i);
1369 }
1370 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1371 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1372 || flag_pretend_float)
1373 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1374 && GET_MODE_CLASS (mode) == MODE_FLOAT
1375 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1376 && GET_CODE (op) == CONST_DOUBLE)
1377 {
1378 double d;
1379 union {double d; HOST_WIDE_INT i; } u;
1380
1381 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1382
1383 u.d = d;
1384 return GEN_INT (u.i);
1385 }
1386 #endif /* no REAL_ARITHMETIC */
1387
1388 /* The only remaining cases that we can handle are integers.
1389 Convert to proper endianness now since these cases need it.
1390 At this point, i == 0 means the low-order word.
1391
1392 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1393 in general. However, if OP is (const_int 0), we can just return
1394 it for any word. */
1395
1396 if (op == const0_rtx)
1397 return op;
1398
1399 if (GET_MODE_CLASS (mode) != MODE_INT
1400 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1401 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1402 return 0;
1403
1404 if (WORDS_BIG_ENDIAN)
1405 i = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - i;
1406
1407 /* Find out which word on the host machine this value is in and get
1408 it from the constant. */
1409 val = (i / size_ratio == 0
1410 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1411 : (GET_CODE (op) == CONST_INT
1412 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1413
1414 /* Get the value we want into the low bits of val. */
1415 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1416 val = ((val >> ((i % size_ratio) * BITS_PER_WORD)));
1417
1418 val = trunc_int_for_mode (val, word_mode);
1419
1420 return GEN_INT (val);
1421 }
1422
1423 /* Similar to `operand_subword', but never return 0. If we can't extract
1424 the required subword, put OP into a register and try again. If that fails,
1425 abort. We always validate the address in this case. It is not valid
1426 to call this function after reload; it is mostly meant for RTL
1427 generation.
1428
1429 MODE is the mode of OP, in case it is CONST_INT. */
1430
1431 rtx
1432 operand_subword_force (op, i, mode)
1433 rtx op;
1434 int i;
1435 enum machine_mode mode;
1436 {
1437 rtx result = operand_subword (op, i, 1, mode);
1438
1439 if (result)
1440 return result;
1441
1442 if (mode != BLKmode && mode != VOIDmode)
1443 {
1444 /* If this is a register which can not be accessed by words, copy it
1445 to a pseudo register. */
1446 if (GET_CODE (op) == REG)
1447 op = copy_to_reg (op);
1448 else
1449 op = force_reg (mode, op);
1450 }
1451
1452 result = operand_subword (op, i, 1, mode);
1453 if (result == 0)
1454 abort ();
1455
1456 return result;
1457 }
1458 \f
1459 /* Given a compare instruction, swap the operands.
1460 A test instruction is changed into a compare of 0 against the operand. */
1461
1462 void
1463 reverse_comparison (insn)
1464 rtx insn;
1465 {
1466 rtx body = PATTERN (insn);
1467 rtx comp;
1468
1469 if (GET_CODE (body) == SET)
1470 comp = SET_SRC (body);
1471 else
1472 comp = SET_SRC (XVECEXP (body, 0, 0));
1473
1474 if (GET_CODE (comp) == COMPARE)
1475 {
1476 rtx op0 = XEXP (comp, 0);
1477 rtx op1 = XEXP (comp, 1);
1478 XEXP (comp, 0) = op1;
1479 XEXP (comp, 1) = op0;
1480 }
1481 else
1482 {
1483 rtx new = gen_rtx_COMPARE (VOIDmode, CONST0_RTX (GET_MODE (comp)), comp);
1484 if (GET_CODE (body) == SET)
1485 SET_SRC (body) = new;
1486 else
1487 SET_SRC (XVECEXP (body, 0, 0)) = new;
1488 }
1489 }
1490 \f
1491 /* Return a memory reference like MEMREF, but with its mode changed
1492 to MODE and its address changed to ADDR.
1493 (VOIDmode means don't change the mode.
1494 NULL for ADDR means don't change the address.) */
1495
1496 rtx
1497 change_address (memref, mode, addr)
1498 rtx memref;
1499 enum machine_mode mode;
1500 rtx addr;
1501 {
1502 rtx new;
1503
1504 if (GET_CODE (memref) != MEM)
1505 abort ();
1506 if (mode == VOIDmode)
1507 mode = GET_MODE (memref);
1508 if (addr == 0)
1509 addr = XEXP (memref, 0);
1510
1511 /* If reload is in progress or has completed, ADDR must be valid.
1512 Otherwise, we can call memory_address to make it valid. */
1513 if (reload_completed || reload_in_progress)
1514 {
1515 if (! memory_address_p (mode, addr))
1516 abort ();
1517 }
1518 else
1519 addr = memory_address (mode, addr);
1520
1521 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1522 return memref;
1523
1524 new = gen_rtx_MEM (mode, addr);
1525 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (memref);
1526 MEM_COPY_ATTRIBUTES (new, memref);
1527 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (memref);
1528 return new;
1529 }
1530 \f
1531 /* Return a newly created CODE_LABEL rtx with a unique label number. */
1532
1533 rtx
1534 gen_label_rtx ()
1535 {
1536 register rtx label;
1537
1538 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
1539 NULL_RTX, label_num++, NULL_PTR);
1540
1541 LABEL_NUSES (label) = 0;
1542 return label;
1543 }
1544 \f
1545 /* For procedure integration. */
1546
1547 /* Install new pointers to the first and last insns in the chain.
1548 Also, set cur_insn_uid to one higher than the last in use.
1549 Used for an inline-procedure after copying the insn chain. */
1550
1551 void
1552 set_new_first_and_last_insn (first, last)
1553 rtx first, last;
1554 {
1555 rtx insn;
1556
1557 first_insn = first;
1558 last_insn = last;
1559 cur_insn_uid = 0;
1560
1561 for (insn = first; insn; insn = NEXT_INSN (insn))
1562 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
1563
1564 cur_insn_uid++;
1565 }
1566
1567 /* Set the range of label numbers found in the current function.
1568 This is used when belatedly compiling an inline function. */
1569
1570 void
1571 set_new_first_and_last_label_num (first, last)
1572 int first, last;
1573 {
1574 base_label_num = label_num;
1575 first_label_num = first;
1576 last_label_num = last;
1577 }
1578
1579 /* Set the last label number found in the current function.
1580 This is used when belatedly compiling an inline function. */
1581
1582 void
1583 set_new_last_label_num (last)
1584 int last;
1585 {
1586 base_label_num = label_num;
1587 last_label_num = last;
1588 }
1589 \f
1590 /* Restore all variables describing the current status from the structure *P.
1591 This is used after a nested function. */
1592
1593 void
1594 restore_emit_status (p)
1595 struct function *p;
1596 {
1597 last_label_num = 0;
1598 clear_emit_caches ();
1599 }
1600 \f
1601 /* Go through all the RTL insn bodies and copy any invalid shared structure.
1602 It does not work to do this twice, because the mark bits set here
1603 are not cleared afterwards. */
1604
1605 void
1606 unshare_all_rtl (insn)
1607 register rtx insn;
1608 {
1609 for (; insn; insn = NEXT_INSN (insn))
1610 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1611 || GET_CODE (insn) == CALL_INSN)
1612 {
1613 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
1614 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
1615 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
1616 }
1617
1618 /* Make sure the addresses of stack slots found outside the insn chain
1619 (such as, in DECL_RTL of a variable) are not shared
1620 with the insn chain.
1621
1622 This special care is necessary when the stack slot MEM does not
1623 actually appear in the insn chain. If it does appear, its address
1624 is unshared from all else at that point. */
1625
1626 copy_rtx_if_shared (stack_slot_list);
1627 }
1628
1629 /* Mark ORIG as in use, and return a copy of it if it was already in use.
1630 Recursively does the same for subexpressions. */
1631
1632 rtx
1633 copy_rtx_if_shared (orig)
1634 rtx orig;
1635 {
1636 register rtx x = orig;
1637 register int i;
1638 register enum rtx_code code;
1639 register const char *format_ptr;
1640 int copied = 0;
1641
1642 if (x == 0)
1643 return 0;
1644
1645 code = GET_CODE (x);
1646
1647 /* These types may be freely shared. */
1648
1649 switch (code)
1650 {
1651 case REG:
1652 case QUEUED:
1653 case CONST_INT:
1654 case CONST_DOUBLE:
1655 case SYMBOL_REF:
1656 case CODE_LABEL:
1657 case PC:
1658 case CC0:
1659 case SCRATCH:
1660 /* SCRATCH must be shared because they represent distinct values. */
1661 return x;
1662
1663 case CONST:
1664 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
1665 a LABEL_REF, it isn't sharable. */
1666 if (GET_CODE (XEXP (x, 0)) == PLUS
1667 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1668 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1669 return x;
1670 break;
1671
1672 case INSN:
1673 case JUMP_INSN:
1674 case CALL_INSN:
1675 case NOTE:
1676 case BARRIER:
1677 /* The chain of insns is not being copied. */
1678 return x;
1679
1680 case MEM:
1681 /* A MEM is allowed to be shared if its address is constant
1682 or is a constant plus one of the special registers. */
1683 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1684 || XEXP (x, 0) == virtual_stack_vars_rtx
1685 || XEXP (x, 0) == virtual_incoming_args_rtx)
1686 return x;
1687
1688 if (GET_CODE (XEXP (x, 0)) == PLUS
1689 && (XEXP (XEXP (x, 0), 0) == virtual_stack_vars_rtx
1690 || XEXP (XEXP (x, 0), 0) == virtual_incoming_args_rtx)
1691 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
1692 {
1693 /* This MEM can appear in more than one place,
1694 but its address better not be shared with anything else. */
1695 if (! x->used)
1696 XEXP (x, 0) = copy_rtx_if_shared (XEXP (x, 0));
1697 x->used = 1;
1698 return x;
1699 }
1700 break;
1701
1702 default:
1703 break;
1704 }
1705
1706 /* This rtx may not be shared. If it has already been seen,
1707 replace it with a copy of itself. */
1708
1709 if (x->used)
1710 {
1711 register rtx copy;
1712
1713 copy = rtx_alloc (code);
1714 bcopy ((char *) x, (char *) copy,
1715 (sizeof (*copy) - sizeof (copy->fld)
1716 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
1717 x = copy;
1718 copied = 1;
1719 }
1720 x->used = 1;
1721
1722 /* Now scan the subexpressions recursively.
1723 We can store any replaced subexpressions directly into X
1724 since we know X is not shared! Any vectors in X
1725 must be copied if X was copied. */
1726
1727 format_ptr = GET_RTX_FORMAT (code);
1728
1729 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1730 {
1731 switch (*format_ptr++)
1732 {
1733 case 'e':
1734 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
1735 break;
1736
1737 case 'E':
1738 if (XVEC (x, i) != NULL)
1739 {
1740 register int j;
1741 int len = XVECLEN (x, i);
1742
1743 if (copied && len > 0)
1744 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
1745 for (j = 0; j < len; j++)
1746 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
1747 }
1748 break;
1749 }
1750 }
1751 return x;
1752 }
1753
1754 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
1755 to look for shared sub-parts. */
1756
1757 void
1758 reset_used_flags (x)
1759 rtx x;
1760 {
1761 register int i, j;
1762 register enum rtx_code code;
1763 register const char *format_ptr;
1764
1765 if (x == 0)
1766 return;
1767
1768 code = GET_CODE (x);
1769
1770 /* These types may be freely shared so we needn't do any resetting
1771 for them. */
1772
1773 switch (code)
1774 {
1775 case REG:
1776 case QUEUED:
1777 case CONST_INT:
1778 case CONST_DOUBLE:
1779 case SYMBOL_REF:
1780 case CODE_LABEL:
1781 case PC:
1782 case CC0:
1783 return;
1784
1785 case INSN:
1786 case JUMP_INSN:
1787 case CALL_INSN:
1788 case NOTE:
1789 case LABEL_REF:
1790 case BARRIER:
1791 /* The chain of insns is not being copied. */
1792 return;
1793
1794 default:
1795 break;
1796 }
1797
1798 x->used = 0;
1799
1800 format_ptr = GET_RTX_FORMAT (code);
1801 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1802 {
1803 switch (*format_ptr++)
1804 {
1805 case 'e':
1806 reset_used_flags (XEXP (x, i));
1807 break;
1808
1809 case 'E':
1810 for (j = 0; j < XVECLEN (x, i); j++)
1811 reset_used_flags (XVECEXP (x, i, j));
1812 break;
1813 }
1814 }
1815 }
1816 \f
1817 /* Copy X if necessary so that it won't be altered by changes in OTHER.
1818 Return X or the rtx for the pseudo reg the value of X was copied into.
1819 OTHER must be valid as a SET_DEST. */
1820
1821 rtx
1822 make_safe_from (x, other)
1823 rtx x, other;
1824 {
1825 while (1)
1826 switch (GET_CODE (other))
1827 {
1828 case SUBREG:
1829 other = SUBREG_REG (other);
1830 break;
1831 case STRICT_LOW_PART:
1832 case SIGN_EXTEND:
1833 case ZERO_EXTEND:
1834 other = XEXP (other, 0);
1835 break;
1836 default:
1837 goto done;
1838 }
1839 done:
1840 if ((GET_CODE (other) == MEM
1841 && ! CONSTANT_P (x)
1842 && GET_CODE (x) != REG
1843 && GET_CODE (x) != SUBREG)
1844 || (GET_CODE (other) == REG
1845 && (REGNO (other) < FIRST_PSEUDO_REGISTER
1846 || reg_mentioned_p (other, x))))
1847 {
1848 rtx temp = gen_reg_rtx (GET_MODE (x));
1849 emit_move_insn (temp, x);
1850 return temp;
1851 }
1852 return x;
1853 }
1854 \f
1855 /* Emission of insns (adding them to the doubly-linked list). */
1856
1857 /* Return the first insn of the current sequence or current function. */
1858
1859 rtx
1860 get_insns ()
1861 {
1862 return first_insn;
1863 }
1864
1865 /* Return the last insn emitted in current sequence or current function. */
1866
1867 rtx
1868 get_last_insn ()
1869 {
1870 return last_insn;
1871 }
1872
1873 /* Specify a new insn as the last in the chain. */
1874
1875 void
1876 set_last_insn (insn)
1877 rtx insn;
1878 {
1879 if (NEXT_INSN (insn) != 0)
1880 abort ();
1881 last_insn = insn;
1882 }
1883
1884 /* Return the last insn emitted, even if it is in a sequence now pushed. */
1885
1886 rtx
1887 get_last_insn_anywhere ()
1888 {
1889 struct sequence_stack *stack;
1890 if (last_insn)
1891 return last_insn;
1892 for (stack = seq_stack; stack; stack = stack->next)
1893 if (stack->last != 0)
1894 return stack->last;
1895 return 0;
1896 }
1897
1898 /* Return a number larger than any instruction's uid in this function. */
1899
1900 int
1901 get_max_uid ()
1902 {
1903 return cur_insn_uid;
1904 }
1905 \f
1906 /* Return the next insn. If it is a SEQUENCE, return the first insn
1907 of the sequence. */
1908
1909 rtx
1910 next_insn (insn)
1911 rtx insn;
1912 {
1913 if (insn)
1914 {
1915 insn = NEXT_INSN (insn);
1916 if (insn && GET_CODE (insn) == INSN
1917 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1918 insn = XVECEXP (PATTERN (insn), 0, 0);
1919 }
1920
1921 return insn;
1922 }
1923
1924 /* Return the previous insn. If it is a SEQUENCE, return the last insn
1925 of the sequence. */
1926
1927 rtx
1928 previous_insn (insn)
1929 rtx insn;
1930 {
1931 if (insn)
1932 {
1933 insn = PREV_INSN (insn);
1934 if (insn && GET_CODE (insn) == INSN
1935 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1936 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
1937 }
1938
1939 return insn;
1940 }
1941
1942 /* Return the next insn after INSN that is not a NOTE. This routine does not
1943 look inside SEQUENCEs. */
1944
1945 rtx
1946 next_nonnote_insn (insn)
1947 rtx insn;
1948 {
1949 while (insn)
1950 {
1951 insn = NEXT_INSN (insn);
1952 if (insn == 0 || GET_CODE (insn) != NOTE)
1953 break;
1954 }
1955
1956 return insn;
1957 }
1958
1959 /* Return the previous insn before INSN that is not a NOTE. This routine does
1960 not look inside SEQUENCEs. */
1961
1962 rtx
1963 prev_nonnote_insn (insn)
1964 rtx insn;
1965 {
1966 while (insn)
1967 {
1968 insn = PREV_INSN (insn);
1969 if (insn == 0 || GET_CODE (insn) != NOTE)
1970 break;
1971 }
1972
1973 return insn;
1974 }
1975
1976 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
1977 or 0, if there is none. This routine does not look inside
1978 SEQUENCEs. */
1979
1980 rtx
1981 next_real_insn (insn)
1982 rtx insn;
1983 {
1984 while (insn)
1985 {
1986 insn = NEXT_INSN (insn);
1987 if (insn == 0 || GET_CODE (insn) == INSN
1988 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
1989 break;
1990 }
1991
1992 return insn;
1993 }
1994
1995 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
1996 or 0, if there is none. This routine does not look inside
1997 SEQUENCEs. */
1998
1999 rtx
2000 prev_real_insn (insn)
2001 rtx insn;
2002 {
2003 while (insn)
2004 {
2005 insn = PREV_INSN (insn);
2006 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2007 || GET_CODE (insn) == JUMP_INSN)
2008 break;
2009 }
2010
2011 return insn;
2012 }
2013
2014 /* Find the next insn after INSN that really does something. This routine
2015 does not look inside SEQUENCEs. Until reload has completed, this is the
2016 same as next_real_insn. */
2017
2018 rtx
2019 next_active_insn (insn)
2020 rtx insn;
2021 {
2022 while (insn)
2023 {
2024 insn = NEXT_INSN (insn);
2025 if (insn == 0
2026 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2027 || (GET_CODE (insn) == INSN
2028 && (! reload_completed
2029 || (GET_CODE (PATTERN (insn)) != USE
2030 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2031 break;
2032 }
2033
2034 return insn;
2035 }
2036
2037 /* Find the last insn before INSN that really does something. This routine
2038 does not look inside SEQUENCEs. Until reload has completed, this is the
2039 same as prev_real_insn. */
2040
2041 rtx
2042 prev_active_insn (insn)
2043 rtx insn;
2044 {
2045 while (insn)
2046 {
2047 insn = PREV_INSN (insn);
2048 if (insn == 0
2049 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2050 || (GET_CODE (insn) == INSN
2051 && (! reload_completed
2052 || (GET_CODE (PATTERN (insn)) != USE
2053 && GET_CODE (PATTERN (insn)) != CLOBBER))))
2054 break;
2055 }
2056
2057 return insn;
2058 }
2059
2060 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2061
2062 rtx
2063 next_label (insn)
2064 rtx insn;
2065 {
2066 while (insn)
2067 {
2068 insn = NEXT_INSN (insn);
2069 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2070 break;
2071 }
2072
2073 return insn;
2074 }
2075
2076 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2077
2078 rtx
2079 prev_label (insn)
2080 rtx insn;
2081 {
2082 while (insn)
2083 {
2084 insn = PREV_INSN (insn);
2085 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2086 break;
2087 }
2088
2089 return insn;
2090 }
2091 \f
2092 #ifdef HAVE_cc0
2093 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2094 and REG_CC_USER notes so we can find it. */
2095
2096 void
2097 link_cc0_insns (insn)
2098 rtx insn;
2099 {
2100 rtx user = next_nonnote_insn (insn);
2101
2102 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
2103 user = XVECEXP (PATTERN (user), 0, 0);
2104
2105 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn, REG_NOTES (user));
2106 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
2107 }
2108
2109 /* Return the next insn that uses CC0 after INSN, which is assumed to
2110 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
2111 applied to the result of this function should yield INSN).
2112
2113 Normally, this is simply the next insn. However, if a REG_CC_USER note
2114 is present, it contains the insn that uses CC0.
2115
2116 Return 0 if we can't find the insn. */
2117
2118 rtx
2119 next_cc0_user (insn)
2120 rtx insn;
2121 {
2122 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
2123
2124 if (note)
2125 return XEXP (note, 0);
2126
2127 insn = next_nonnote_insn (insn);
2128 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
2129 insn = XVECEXP (PATTERN (insn), 0, 0);
2130
2131 if (insn && GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2132 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
2133 return insn;
2134
2135 return 0;
2136 }
2137
2138 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
2139 note, it is the previous insn. */
2140
2141 rtx
2142 prev_cc0_setter (insn)
2143 rtx insn;
2144 {
2145 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2146
2147 if (note)
2148 return XEXP (note, 0);
2149
2150 insn = prev_nonnote_insn (insn);
2151 if (! sets_cc0_p (PATTERN (insn)))
2152 abort ();
2153
2154 return insn;
2155 }
2156 #endif
2157 \f
2158 /* Try splitting insns that can be split for better scheduling.
2159 PAT is the pattern which might split.
2160 TRIAL is the insn providing PAT.
2161 LAST is non-zero if we should return the last insn of the sequence produced.
2162
2163 If this routine succeeds in splitting, it returns the first or last
2164 replacement insn depending on the value of LAST. Otherwise, it
2165 returns TRIAL. If the insn to be returned can be split, it will be. */
2166
2167 rtx
2168 try_split (pat, trial, last)
2169 rtx pat, trial;
2170 int last;
2171 {
2172 rtx before = PREV_INSN (trial);
2173 rtx after = NEXT_INSN (trial);
2174 rtx seq = split_insns (pat, trial);
2175 int has_barrier = 0;
2176 rtx tem;
2177
2178 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
2179 We may need to handle this specially. */
2180 if (after && GET_CODE (after) == BARRIER)
2181 {
2182 has_barrier = 1;
2183 after = NEXT_INSN (after);
2184 }
2185
2186 if (seq)
2187 {
2188 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
2189 The latter case will normally arise only when being done so that
2190 it, in turn, will be split (SFmode on the 29k is an example). */
2191 if (GET_CODE (seq) == SEQUENCE)
2192 {
2193 /* If we are splitting a JUMP_INSN, look for the JUMP_INSN in
2194 SEQ and copy our JUMP_LABEL to it. If JUMP_LABEL is non-zero,
2195 increment the usage count so we don't delete the label. */
2196 int i;
2197
2198 if (GET_CODE (trial) == JUMP_INSN)
2199 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
2200 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
2201 {
2202 JUMP_LABEL (XVECEXP (seq, 0, i)) = JUMP_LABEL (trial);
2203
2204 if (JUMP_LABEL (trial))
2205 LABEL_NUSES (JUMP_LABEL (trial))++;
2206 }
2207
2208 tem = emit_insn_after (seq, before);
2209
2210 delete_insn (trial);
2211 if (has_barrier)
2212 emit_barrier_after (tem);
2213
2214 /* Recursively call try_split for each new insn created; by the
2215 time control returns here that insn will be fully split, so
2216 set LAST and continue from the insn after the one returned.
2217 We can't use next_active_insn here since AFTER may be a note.
2218 Ignore deleted insns, which can be occur if not optimizing. */
2219 for (tem = NEXT_INSN (before); tem != after;
2220 tem = NEXT_INSN (tem))
2221 if (! INSN_DELETED_P (tem)
2222 && GET_RTX_CLASS (GET_CODE (tem)) == 'i')
2223 tem = try_split (PATTERN (tem), tem, 1);
2224 }
2225 /* Avoid infinite loop if the result matches the original pattern. */
2226 else if (rtx_equal_p (seq, pat))
2227 return trial;
2228 else
2229 {
2230 PATTERN (trial) = seq;
2231 INSN_CODE (trial) = -1;
2232 try_split (seq, trial, last);
2233 }
2234
2235 /* Return either the first or the last insn, depending on which was
2236 requested. */
2237 return last ? prev_active_insn (after) : next_active_insn (before);
2238 }
2239
2240 return trial;
2241 }
2242 \f
2243 /* Make and return an INSN rtx, initializing all its slots.
2244 Store PATTERN in the pattern slots. */
2245
2246 rtx
2247 make_insn_raw (pattern)
2248 rtx pattern;
2249 {
2250 register rtx insn;
2251
2252 /* If in RTL generation phase, see if FREE_INSN can be used. */
2253 if (free_insn != 0 && rtx_equal_function_value_matters)
2254 {
2255 insn = free_insn;
2256 free_insn = NEXT_INSN (free_insn);
2257 PUT_CODE (insn, INSN);
2258 }
2259 else
2260 insn = rtx_alloc (INSN);
2261
2262 INSN_UID (insn) = cur_insn_uid++;
2263 PATTERN (insn) = pattern;
2264 INSN_CODE (insn) = -1;
2265 LOG_LINKS (insn) = NULL;
2266 REG_NOTES (insn) = NULL;
2267
2268 return insn;
2269 }
2270
2271 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
2272
2273 static rtx
2274 make_jump_insn_raw (pattern)
2275 rtx pattern;
2276 {
2277 register rtx insn;
2278
2279 insn = rtx_alloc (JUMP_INSN);
2280 INSN_UID (insn) = cur_insn_uid++;
2281
2282 PATTERN (insn) = pattern;
2283 INSN_CODE (insn) = -1;
2284 LOG_LINKS (insn) = NULL;
2285 REG_NOTES (insn) = NULL;
2286 JUMP_LABEL (insn) = NULL;
2287
2288 return insn;
2289 }
2290
2291 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
2292
2293 static rtx
2294 make_call_insn_raw (pattern)
2295 rtx pattern;
2296 {
2297 register rtx insn;
2298
2299 insn = rtx_alloc (CALL_INSN);
2300 INSN_UID (insn) = cur_insn_uid++;
2301
2302 PATTERN (insn) = pattern;
2303 INSN_CODE (insn) = -1;
2304 LOG_LINKS (insn) = NULL;
2305 REG_NOTES (insn) = NULL;
2306 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
2307
2308 return insn;
2309 }
2310 \f
2311 /* Add INSN to the end of the doubly-linked list.
2312 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
2313
2314 void
2315 add_insn (insn)
2316 register rtx insn;
2317 {
2318 PREV_INSN (insn) = last_insn;
2319 NEXT_INSN (insn) = 0;
2320
2321 if (NULL != last_insn)
2322 NEXT_INSN (last_insn) = insn;
2323
2324 if (NULL == first_insn)
2325 first_insn = insn;
2326
2327 last_insn = insn;
2328 }
2329
2330 /* Add INSN into the doubly-linked list after insn AFTER. This and
2331 the next should be the only functions called to insert an insn once
2332 delay slots have been filled since only they know how to update a
2333 SEQUENCE. */
2334
2335 void
2336 add_insn_after (insn, after)
2337 rtx insn, after;
2338 {
2339 rtx next = NEXT_INSN (after);
2340
2341 if (optimize && INSN_DELETED_P (after))
2342 abort ();
2343
2344 NEXT_INSN (insn) = next;
2345 PREV_INSN (insn) = after;
2346
2347 if (next)
2348 {
2349 PREV_INSN (next) = insn;
2350 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2351 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
2352 }
2353 else if (last_insn == after)
2354 last_insn = insn;
2355 else
2356 {
2357 struct sequence_stack *stack = seq_stack;
2358 /* Scan all pending sequences too. */
2359 for (; stack; stack = stack->next)
2360 if (after == stack->last)
2361 {
2362 stack->last = insn;
2363 break;
2364 }
2365
2366 if (stack == 0)
2367 abort ();
2368 }
2369
2370 NEXT_INSN (after) = insn;
2371 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
2372 {
2373 rtx sequence = PATTERN (after);
2374 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2375 }
2376 }
2377
2378 /* Add INSN into the doubly-linked list before insn BEFORE. This and
2379 the previous should be the only functions called to insert an insn once
2380 delay slots have been filled since only they know how to update a
2381 SEQUENCE. */
2382
2383 void
2384 add_insn_before (insn, before)
2385 rtx insn, before;
2386 {
2387 rtx prev = PREV_INSN (before);
2388
2389 if (optimize && INSN_DELETED_P (before))
2390 abort ();
2391
2392 PREV_INSN (insn) = prev;
2393 NEXT_INSN (insn) = before;
2394
2395 if (prev)
2396 {
2397 NEXT_INSN (prev) = insn;
2398 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2399 {
2400 rtx sequence = PATTERN (prev);
2401 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
2402 }
2403 }
2404 else if (first_insn == before)
2405 first_insn = insn;
2406 else
2407 {
2408 struct sequence_stack *stack = seq_stack;
2409 /* Scan all pending sequences too. */
2410 for (; stack; stack = stack->next)
2411 if (before == stack->first)
2412 {
2413 stack->first = insn;
2414 break;
2415 }
2416
2417 if (stack == 0)
2418 abort ();
2419 }
2420
2421 PREV_INSN (before) = insn;
2422 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
2423 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
2424 }
2425
2426 /* Remove an insn from its doubly-linked list. This function knows how
2427 to handle sequences. */
2428 void
2429 remove_insn (insn)
2430 rtx insn;
2431 {
2432 rtx next = NEXT_INSN (insn);
2433 rtx prev = PREV_INSN (insn);
2434 if (prev)
2435 {
2436 NEXT_INSN (prev) = next;
2437 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2438 {
2439 rtx sequence = PATTERN (prev);
2440 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
2441 }
2442 }
2443 else if (first_insn == insn)
2444 first_insn = next;
2445 else
2446 {
2447 struct sequence_stack *stack = seq_stack;
2448 /* Scan all pending sequences too. */
2449 for (; stack; stack = stack->next)
2450 if (insn == stack->first)
2451 {
2452 stack->first = next;
2453 break;
2454 }
2455
2456 if (stack == 0)
2457 abort ();
2458 }
2459
2460 if (next)
2461 {
2462 PREV_INSN (next) = prev;
2463 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2464 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2465 }
2466 else if (last_insn == insn)
2467 last_insn = prev;
2468 else
2469 {
2470 struct sequence_stack *stack = seq_stack;
2471 /* Scan all pending sequences too. */
2472 for (; stack; stack = stack->next)
2473 if (insn == stack->last)
2474 {
2475 stack->last = prev;
2476 break;
2477 }
2478
2479 if (stack == 0)
2480 abort ();
2481 }
2482 }
2483
2484 /* Delete all insns made since FROM.
2485 FROM becomes the new last instruction. */
2486
2487 void
2488 delete_insns_since (from)
2489 rtx from;
2490 {
2491 if (from == 0)
2492 first_insn = 0;
2493 else
2494 NEXT_INSN (from) = 0;
2495 last_insn = from;
2496 }
2497
2498 /* This function is deprecated, please use sequences instead.
2499
2500 Move a consecutive bunch of insns to a different place in the chain.
2501 The insns to be moved are those between FROM and TO.
2502 They are moved to a new position after the insn AFTER.
2503 AFTER must not be FROM or TO or any insn in between.
2504
2505 This function does not know about SEQUENCEs and hence should not be
2506 called after delay-slot filling has been done. */
2507
2508 void
2509 reorder_insns (from, to, after)
2510 rtx from, to, after;
2511 {
2512 /* Splice this bunch out of where it is now. */
2513 if (PREV_INSN (from))
2514 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
2515 if (NEXT_INSN (to))
2516 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
2517 if (last_insn == to)
2518 last_insn = PREV_INSN (from);
2519 if (first_insn == from)
2520 first_insn = NEXT_INSN (to);
2521
2522 /* Make the new neighbors point to it and it to them. */
2523 if (NEXT_INSN (after))
2524 PREV_INSN (NEXT_INSN (after)) = to;
2525
2526 NEXT_INSN (to) = NEXT_INSN (after);
2527 PREV_INSN (from) = after;
2528 NEXT_INSN (after) = from;
2529 if (after == last_insn)
2530 last_insn = to;
2531 }
2532
2533 /* Return the line note insn preceding INSN. */
2534
2535 static rtx
2536 find_line_note (insn)
2537 rtx insn;
2538 {
2539 if (no_line_numbers)
2540 return 0;
2541
2542 for (; insn; insn = PREV_INSN (insn))
2543 if (GET_CODE (insn) == NOTE
2544 && NOTE_LINE_NUMBER (insn) >= 0)
2545 break;
2546
2547 return insn;
2548 }
2549
2550 /* Like reorder_insns, but inserts line notes to preserve the line numbers
2551 of the moved insns when debugging. This may insert a note between AFTER
2552 and FROM, and another one after TO. */
2553
2554 void
2555 reorder_insns_with_line_notes (from, to, after)
2556 rtx from, to, after;
2557 {
2558 rtx from_line = find_line_note (from);
2559 rtx after_line = find_line_note (after);
2560
2561 reorder_insns (from, to, after);
2562
2563 if (from_line == after_line)
2564 return;
2565
2566 if (from_line)
2567 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2568 NOTE_LINE_NUMBER (from_line),
2569 after);
2570 if (after_line)
2571 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2572 NOTE_LINE_NUMBER (after_line),
2573 to);
2574 }
2575 \f
2576 /* Emit an insn of given code and pattern
2577 at a specified place within the doubly-linked list. */
2578
2579 /* Make an instruction with body PATTERN
2580 and output it before the instruction BEFORE. */
2581
2582 rtx
2583 emit_insn_before (pattern, before)
2584 register rtx pattern, before;
2585 {
2586 register rtx insn = before;
2587
2588 if (GET_CODE (pattern) == SEQUENCE)
2589 {
2590 register int i;
2591
2592 for (i = 0; i < XVECLEN (pattern, 0); i++)
2593 {
2594 insn = XVECEXP (pattern, 0, i);
2595 add_insn_before (insn, before);
2596 }
2597 if (XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2598 sequence_result[XVECLEN (pattern, 0)] = pattern;
2599 }
2600 else
2601 {
2602 insn = make_insn_raw (pattern);
2603 add_insn_before (insn, before);
2604 }
2605
2606 return insn;
2607 }
2608
2609 /* Make an instruction with body PATTERN and code JUMP_INSN
2610 and output it before the instruction BEFORE. */
2611
2612 rtx
2613 emit_jump_insn_before (pattern, before)
2614 register rtx pattern, before;
2615 {
2616 register rtx insn;
2617
2618 if (GET_CODE (pattern) == SEQUENCE)
2619 insn = emit_insn_before (pattern, before);
2620 else
2621 {
2622 insn = make_jump_insn_raw (pattern);
2623 add_insn_before (insn, before);
2624 }
2625
2626 return insn;
2627 }
2628
2629 /* Make an instruction with body PATTERN and code CALL_INSN
2630 and output it before the instruction BEFORE. */
2631
2632 rtx
2633 emit_call_insn_before (pattern, before)
2634 register rtx pattern, before;
2635 {
2636 register rtx insn;
2637
2638 if (GET_CODE (pattern) == SEQUENCE)
2639 insn = emit_insn_before (pattern, before);
2640 else
2641 {
2642 insn = make_call_insn_raw (pattern);
2643 add_insn_before (insn, before);
2644 PUT_CODE (insn, CALL_INSN);
2645 }
2646
2647 return insn;
2648 }
2649
2650 /* Make an insn of code BARRIER
2651 and output it before the insn BEFORE. */
2652
2653 rtx
2654 emit_barrier_before (before)
2655 register rtx before;
2656 {
2657 register rtx insn = rtx_alloc (BARRIER);
2658
2659 INSN_UID (insn) = cur_insn_uid++;
2660
2661 add_insn_before (insn, before);
2662 return insn;
2663 }
2664
2665 /* Emit the label LABEL before the insn BEFORE. */
2666
2667 rtx
2668 emit_label_before (label, before)
2669 rtx label, before;
2670 {
2671 /* This can be called twice for the same label as a result of the
2672 confusion that follows a syntax error! So make it harmless. */
2673 if (INSN_UID (label) == 0)
2674 {
2675 INSN_UID (label) = cur_insn_uid++;
2676 add_insn_before (label, before);
2677 }
2678
2679 return label;
2680 }
2681
2682 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
2683
2684 rtx
2685 emit_note_before (subtype, before)
2686 int subtype;
2687 rtx before;
2688 {
2689 register rtx note = rtx_alloc (NOTE);
2690 INSN_UID (note) = cur_insn_uid++;
2691 NOTE_SOURCE_FILE (note) = 0;
2692 NOTE_LINE_NUMBER (note) = subtype;
2693
2694 add_insn_before (note, before);
2695 return note;
2696 }
2697 \f
2698 /* Make an insn of code INSN with body PATTERN
2699 and output it after the insn AFTER. */
2700
2701 rtx
2702 emit_insn_after (pattern, after)
2703 register rtx pattern, after;
2704 {
2705 register rtx insn = after;
2706
2707 if (GET_CODE (pattern) == SEQUENCE)
2708 {
2709 register int i;
2710
2711 for (i = 0; i < XVECLEN (pattern, 0); i++)
2712 {
2713 insn = XVECEXP (pattern, 0, i);
2714 add_insn_after (insn, after);
2715 after = insn;
2716 }
2717 if (XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2718 sequence_result[XVECLEN (pattern, 0)] = pattern;
2719 }
2720 else
2721 {
2722 insn = make_insn_raw (pattern);
2723 add_insn_after (insn, after);
2724 }
2725
2726 return insn;
2727 }
2728
2729 /* Similar to emit_insn_after, except that line notes are to be inserted so
2730 as to act as if this insn were at FROM. */
2731
2732 void
2733 emit_insn_after_with_line_notes (pattern, after, from)
2734 rtx pattern, after, from;
2735 {
2736 rtx from_line = find_line_note (from);
2737 rtx after_line = find_line_note (after);
2738 rtx insn = emit_insn_after (pattern, after);
2739
2740 if (from_line)
2741 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
2742 NOTE_LINE_NUMBER (from_line),
2743 after);
2744
2745 if (after_line)
2746 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
2747 NOTE_LINE_NUMBER (after_line),
2748 insn);
2749 }
2750
2751 /* Make an insn of code JUMP_INSN with body PATTERN
2752 and output it after the insn AFTER. */
2753
2754 rtx
2755 emit_jump_insn_after (pattern, after)
2756 register rtx pattern, after;
2757 {
2758 register rtx insn;
2759
2760 if (GET_CODE (pattern) == SEQUENCE)
2761 insn = emit_insn_after (pattern, after);
2762 else
2763 {
2764 insn = make_jump_insn_raw (pattern);
2765 add_insn_after (insn, after);
2766 }
2767
2768 return insn;
2769 }
2770
2771 /* Make an insn of code BARRIER
2772 and output it after the insn AFTER. */
2773
2774 rtx
2775 emit_barrier_after (after)
2776 register rtx after;
2777 {
2778 register rtx insn = rtx_alloc (BARRIER);
2779
2780 INSN_UID (insn) = cur_insn_uid++;
2781
2782 add_insn_after (insn, after);
2783 return insn;
2784 }
2785
2786 /* Emit the label LABEL after the insn AFTER. */
2787
2788 rtx
2789 emit_label_after (label, after)
2790 rtx label, after;
2791 {
2792 /* This can be called twice for the same label
2793 as a result of the confusion that follows a syntax error!
2794 So make it harmless. */
2795 if (INSN_UID (label) == 0)
2796 {
2797 INSN_UID (label) = cur_insn_uid++;
2798 add_insn_after (label, after);
2799 }
2800
2801 return label;
2802 }
2803
2804 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
2805
2806 rtx
2807 emit_note_after (subtype, after)
2808 int subtype;
2809 rtx after;
2810 {
2811 register rtx note = rtx_alloc (NOTE);
2812 INSN_UID (note) = cur_insn_uid++;
2813 NOTE_SOURCE_FILE (note) = 0;
2814 NOTE_LINE_NUMBER (note) = subtype;
2815 add_insn_after (note, after);
2816 return note;
2817 }
2818
2819 /* Emit a line note for FILE and LINE after the insn AFTER. */
2820
2821 rtx
2822 emit_line_note_after (file, line, after)
2823 char *file;
2824 int line;
2825 rtx after;
2826 {
2827 register rtx note;
2828
2829 if (no_line_numbers && line > 0)
2830 {
2831 cur_insn_uid++;
2832 return 0;
2833 }
2834
2835 note = rtx_alloc (NOTE);
2836 INSN_UID (note) = cur_insn_uid++;
2837 NOTE_SOURCE_FILE (note) = file;
2838 NOTE_LINE_NUMBER (note) = line;
2839 add_insn_after (note, after);
2840 return note;
2841 }
2842 \f
2843 /* Make an insn of code INSN with pattern PATTERN
2844 and add it to the end of the doubly-linked list.
2845 If PATTERN is a SEQUENCE, take the elements of it
2846 and emit an insn for each element.
2847
2848 Returns the last insn emitted. */
2849
2850 rtx
2851 emit_insn (pattern)
2852 rtx pattern;
2853 {
2854 rtx insn = last_insn;
2855
2856 if (GET_CODE (pattern) == SEQUENCE)
2857 {
2858 register int i;
2859
2860 for (i = 0; i < XVECLEN (pattern, 0); i++)
2861 {
2862 insn = XVECEXP (pattern, 0, i);
2863 add_insn (insn);
2864 }
2865 if (XVECLEN (pattern, 0) < SEQUENCE_RESULT_SIZE)
2866 sequence_result[XVECLEN (pattern, 0)] = pattern;
2867 }
2868 else
2869 {
2870 insn = make_insn_raw (pattern);
2871 add_insn (insn);
2872 }
2873
2874 return insn;
2875 }
2876
2877 /* Emit the insns in a chain starting with INSN.
2878 Return the last insn emitted. */
2879
2880 rtx
2881 emit_insns (insn)
2882 rtx insn;
2883 {
2884 rtx last = 0;
2885
2886 while (insn)
2887 {
2888 rtx next = NEXT_INSN (insn);
2889 add_insn (insn);
2890 last = insn;
2891 insn = next;
2892 }
2893
2894 return last;
2895 }
2896
2897 /* Emit the insns in a chain starting with INSN and place them in front of
2898 the insn BEFORE. Return the last insn emitted. */
2899
2900 rtx
2901 emit_insns_before (insn, before)
2902 rtx insn;
2903 rtx before;
2904 {
2905 rtx last = 0;
2906
2907 while (insn)
2908 {
2909 rtx next = NEXT_INSN (insn);
2910 add_insn_before (insn, before);
2911 last = insn;
2912 insn = next;
2913 }
2914
2915 return last;
2916 }
2917
2918 /* Emit the insns in a chain starting with FIRST and place them in back of
2919 the insn AFTER. Return the last insn emitted. */
2920
2921 rtx
2922 emit_insns_after (first, after)
2923 register rtx first;
2924 register rtx after;
2925 {
2926 register rtx last;
2927 register rtx after_after;
2928
2929 if (!after)
2930 abort ();
2931
2932 if (!first)
2933 return first;
2934
2935 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
2936 continue;
2937
2938 after_after = NEXT_INSN (after);
2939
2940 NEXT_INSN (after) = first;
2941 PREV_INSN (first) = after;
2942 NEXT_INSN (last) = after_after;
2943 if (after_after)
2944 PREV_INSN (after_after) = last;
2945
2946 if (after == last_insn)
2947 last_insn = last;
2948 return last;
2949 }
2950
2951 /* Make an insn of code JUMP_INSN with pattern PATTERN
2952 and add it to the end of the doubly-linked list. */
2953
2954 rtx
2955 emit_jump_insn (pattern)
2956 rtx pattern;
2957 {
2958 if (GET_CODE (pattern) == SEQUENCE)
2959 return emit_insn (pattern);
2960 else
2961 {
2962 register rtx insn = make_jump_insn_raw (pattern);
2963 add_insn (insn);
2964 return insn;
2965 }
2966 }
2967
2968 /* Make an insn of code CALL_INSN with pattern PATTERN
2969 and add it to the end of the doubly-linked list. */
2970
2971 rtx
2972 emit_call_insn (pattern)
2973 rtx pattern;
2974 {
2975 if (GET_CODE (pattern) == SEQUENCE)
2976 return emit_insn (pattern);
2977 else
2978 {
2979 register rtx insn = make_call_insn_raw (pattern);
2980 add_insn (insn);
2981 PUT_CODE (insn, CALL_INSN);
2982 return insn;
2983 }
2984 }
2985
2986 /* Add the label LABEL to the end of the doubly-linked list. */
2987
2988 rtx
2989 emit_label (label)
2990 rtx label;
2991 {
2992 /* This can be called twice for the same label
2993 as a result of the confusion that follows a syntax error!
2994 So make it harmless. */
2995 if (INSN_UID (label) == 0)
2996 {
2997 INSN_UID (label) = cur_insn_uid++;
2998 add_insn (label);
2999 }
3000 return label;
3001 }
3002
3003 /* Make an insn of code BARRIER
3004 and add it to the end of the doubly-linked list. */
3005
3006 rtx
3007 emit_barrier ()
3008 {
3009 register rtx barrier = rtx_alloc (BARRIER);
3010 INSN_UID (barrier) = cur_insn_uid++;
3011 add_insn (barrier);
3012 return barrier;
3013 }
3014
3015 /* Make an insn of code NOTE
3016 with data-fields specified by FILE and LINE
3017 and add it to the end of the doubly-linked list,
3018 but only if line-numbers are desired for debugging info. */
3019
3020 rtx
3021 emit_line_note (file, line)
3022 char *file;
3023 int line;
3024 {
3025 set_file_and_line_for_stmt (file, line);
3026
3027 #if 0
3028 if (no_line_numbers)
3029 return 0;
3030 #endif
3031
3032 return emit_note (file, line);
3033 }
3034
3035 /* Make an insn of code NOTE
3036 with data-fields specified by FILE and LINE
3037 and add it to the end of the doubly-linked list.
3038 If it is a line-number NOTE, omit it if it matches the previous one. */
3039
3040 rtx
3041 emit_note (file, line)
3042 char *file;
3043 int line;
3044 {
3045 register rtx note;
3046
3047 if (line > 0)
3048 {
3049 if (file && last_filename && !strcmp (file, last_filename)
3050 && line == last_linenum)
3051 return 0;
3052 last_filename = file;
3053 last_linenum = line;
3054 }
3055
3056 if (no_line_numbers && line > 0)
3057 {
3058 cur_insn_uid++;
3059 return 0;
3060 }
3061
3062 note = rtx_alloc (NOTE);
3063 INSN_UID (note) = cur_insn_uid++;
3064 NOTE_SOURCE_FILE (note) = file;
3065 NOTE_LINE_NUMBER (note) = line;
3066 add_insn (note);
3067 return note;
3068 }
3069
3070 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
3071
3072 rtx
3073 emit_line_note_force (file, line)
3074 char *file;
3075 int line;
3076 {
3077 last_linenum = -1;
3078 return emit_line_note (file, line);
3079 }
3080
3081 /* Cause next statement to emit a line note even if the line number
3082 has not changed. This is used at the beginning of a function. */
3083
3084 void
3085 force_next_line_note ()
3086 {
3087 last_linenum = -1;
3088 }
3089
3090 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
3091 note of this type already exists, remove it first. */
3092
3093 void
3094 set_unique_reg_note (insn, kind, datum)
3095 rtx insn;
3096 enum reg_note kind;
3097 rtx datum;
3098 {
3099 rtx note = find_reg_note (insn, kind, NULL_RTX);
3100
3101 /* First remove the note if there already is one. */
3102 if (note)
3103 remove_note (insn, note);
3104
3105 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
3106 }
3107 \f
3108 /* Return an indication of which type of insn should have X as a body.
3109 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
3110
3111 enum rtx_code
3112 classify_insn (x)
3113 rtx x;
3114 {
3115 if (GET_CODE (x) == CODE_LABEL)
3116 return CODE_LABEL;
3117 if (GET_CODE (x) == CALL)
3118 return CALL_INSN;
3119 if (GET_CODE (x) == RETURN)
3120 return JUMP_INSN;
3121 if (GET_CODE (x) == SET)
3122 {
3123 if (SET_DEST (x) == pc_rtx)
3124 return JUMP_INSN;
3125 else if (GET_CODE (SET_SRC (x)) == CALL)
3126 return CALL_INSN;
3127 else
3128 return INSN;
3129 }
3130 if (GET_CODE (x) == PARALLEL)
3131 {
3132 register int j;
3133 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
3134 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
3135 return CALL_INSN;
3136 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3137 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
3138 return JUMP_INSN;
3139 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
3140 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
3141 return CALL_INSN;
3142 }
3143 return INSN;
3144 }
3145
3146 /* Emit the rtl pattern X as an appropriate kind of insn.
3147 If X is a label, it is simply added into the insn chain. */
3148
3149 rtx
3150 emit (x)
3151 rtx x;
3152 {
3153 enum rtx_code code = classify_insn (x);
3154
3155 if (code == CODE_LABEL)
3156 return emit_label (x);
3157 else if (code == INSN)
3158 return emit_insn (x);
3159 else if (code == JUMP_INSN)
3160 {
3161 register rtx insn = emit_jump_insn (x);
3162 if (simplejump_p (insn) || GET_CODE (x) == RETURN)
3163 return emit_barrier ();
3164 return insn;
3165 }
3166 else if (code == CALL_INSN)
3167 return emit_call_insn (x);
3168 else
3169 abort ();
3170 }
3171 \f
3172 /* Begin emitting insns to a sequence which can be packaged in an
3173 RTL_EXPR. If this sequence will contain something that might cause
3174 the compiler to pop arguments to function calls (because those
3175 pops have previously been deferred; see INHIBIT_DEFER_POP for more
3176 details), use do_pending_stack_adjust before calling this function.
3177 That will ensure that the deferred pops are not accidentally
3178 emitted in the middel of this sequence. */
3179
3180 void
3181 start_sequence ()
3182 {
3183 struct sequence_stack *tem;
3184
3185 if (sequence_element_free_list)
3186 {
3187 /* Reuse a previously-saved struct sequence_stack. */
3188 tem = sequence_element_free_list;
3189 sequence_element_free_list = tem->next;
3190 }
3191 else
3192 tem = (struct sequence_stack *) permalloc (sizeof (struct sequence_stack));
3193
3194 tem->next = seq_stack;
3195 tem->first = first_insn;
3196 tem->last = last_insn;
3197 tem->sequence_rtl_expr = seq_rtl_expr;
3198
3199 seq_stack = tem;
3200
3201 first_insn = 0;
3202 last_insn = 0;
3203 }
3204
3205 /* Similarly, but indicate that this sequence will be placed in T, an
3206 RTL_EXPR. See the documentation for start_sequence for more
3207 information about how to use this function. */
3208
3209 void
3210 start_sequence_for_rtl_expr (t)
3211 tree t;
3212 {
3213 start_sequence ();
3214
3215 seq_rtl_expr = t;
3216 }
3217
3218 /* Set up the insn chain starting with FIRST as the current sequence,
3219 saving the previously current one. See the documentation for
3220 start_sequence for more information about how to use this function. */
3221
3222 void
3223 push_to_sequence (first)
3224 rtx first;
3225 {
3226 rtx last;
3227
3228 start_sequence ();
3229
3230 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
3231
3232 first_insn = first;
3233 last_insn = last;
3234 }
3235
3236 /* Set up the outer-level insn chain
3237 as the current sequence, saving the previously current one. */
3238
3239 void
3240 push_topmost_sequence ()
3241 {
3242 struct sequence_stack *stack, *top = NULL;
3243
3244 start_sequence ();
3245
3246 for (stack = seq_stack; stack; stack = stack->next)
3247 top = stack;
3248
3249 first_insn = top->first;
3250 last_insn = top->last;
3251 seq_rtl_expr = top->sequence_rtl_expr;
3252 }
3253
3254 /* After emitting to the outer-level insn chain, update the outer-level
3255 insn chain, and restore the previous saved state. */
3256
3257 void
3258 pop_topmost_sequence ()
3259 {
3260 struct sequence_stack *stack, *top = NULL;
3261
3262 for (stack = seq_stack; stack; stack = stack->next)
3263 top = stack;
3264
3265 top->first = first_insn;
3266 top->last = last_insn;
3267 /* ??? Why don't we save seq_rtl_expr here? */
3268
3269 end_sequence ();
3270 }
3271
3272 /* After emitting to a sequence, restore previous saved state.
3273
3274 To get the contents of the sequence just made, you must call
3275 `gen_sequence' *before* calling here.
3276
3277 If the compiler might have deferred popping arguments while
3278 generating this sequence, and this sequence will not be immediately
3279 inserted into the instruction stream, use do_pending_stack_adjust
3280 before calling gen_sequence. That will ensure that the deferred
3281 pops are inserted into this sequence, and not into some random
3282 location in the instruction stream. See INHIBIT_DEFER_POP for more
3283 information about deferred popping of arguments. */
3284
3285 void
3286 end_sequence ()
3287 {
3288 struct sequence_stack *tem = seq_stack;
3289
3290 first_insn = tem->first;
3291 last_insn = tem->last;
3292 seq_rtl_expr = tem->sequence_rtl_expr;
3293 seq_stack = tem->next;
3294
3295 tem->next = sequence_element_free_list;
3296 sequence_element_free_list = tem;
3297 }
3298
3299 /* Return 1 if currently emitting into a sequence. */
3300
3301 int
3302 in_sequence_p ()
3303 {
3304 return seq_stack != 0;
3305 }
3306
3307 /* Generate a SEQUENCE rtx containing the insns already emitted
3308 to the current sequence.
3309
3310 This is how the gen_... function from a DEFINE_EXPAND
3311 constructs the SEQUENCE that it returns. */
3312
3313 rtx
3314 gen_sequence ()
3315 {
3316 rtx result;
3317 rtx tem;
3318 int i;
3319 int len;
3320
3321 /* Count the insns in the chain. */
3322 len = 0;
3323 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
3324 len++;
3325
3326 /* If only one insn, return its pattern rather than a SEQUENCE.
3327 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
3328 the case of an empty list.) */
3329 if (len == 1
3330 && ! RTX_FRAME_RELATED_P (first_insn)
3331 && (GET_CODE (first_insn) == INSN
3332 || GET_CODE (first_insn) == JUMP_INSN
3333 /* Don't discard the call usage field. */
3334 || (GET_CODE (first_insn) == CALL_INSN
3335 && CALL_INSN_FUNCTION_USAGE (first_insn) == NULL_RTX)))
3336 {
3337 NEXT_INSN (first_insn) = free_insn;
3338 free_insn = first_insn;
3339 return PATTERN (first_insn);
3340 }
3341
3342 /* Put them in a vector. See if we already have a SEQUENCE of the
3343 appropriate length around. */
3344 if (len < SEQUENCE_RESULT_SIZE && (result = sequence_result[len]) != 0)
3345 sequence_result[len] = 0;
3346 else
3347 {
3348 /* Ensure that this rtl goes in saveable_obstack, since we may
3349 cache it. */
3350 push_obstacks_nochange ();
3351 rtl_in_saveable_obstack ();
3352 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
3353 pop_obstacks ();
3354 }
3355
3356 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
3357 XVECEXP (result, 0, i) = tem;
3358
3359 return result;
3360 }
3361 \f
3362 /* Put the various virtual registers into REGNO_REG_RTX. */
3363
3364 void
3365 init_virtual_regs (es)
3366 struct emit_status *es;
3367 {
3368 rtx *ptr = es->x_regno_reg_rtx;
3369 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
3370 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
3371 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
3372 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
3373 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
3374 }
3375
3376 void
3377 clear_emit_caches ()
3378 {
3379 int i;
3380
3381 /* Clear the start_sequence/gen_sequence cache. */
3382 sequence_element_free_list = 0;
3383 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
3384 sequence_result[i] = 0;
3385 free_insn = 0;
3386 }
3387
3388 /* Initialize data structures and variables in this file
3389 before generating rtl for each function. */
3390
3391 void
3392 init_emit ()
3393 {
3394 struct function *f = current_function;
3395
3396 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
3397 first_insn = NULL;
3398 last_insn = NULL;
3399 seq_rtl_expr = NULL;
3400 cur_insn_uid = 1;
3401 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
3402 last_linenum = 0;
3403 last_filename = 0;
3404 first_label_num = label_num;
3405 last_label_num = 0;
3406 seq_stack = NULL;
3407
3408 clear_emit_caches ();
3409
3410 /* Init the tables that describe all the pseudo regs. */
3411
3412 f->emit->regno_pointer_flag_length = LAST_VIRTUAL_REGISTER + 101;
3413
3414 f->emit->regno_pointer_flag
3415 = (char *) savealloc (f->emit->regno_pointer_flag_length);
3416 bzero (f->emit->regno_pointer_flag, f->emit->regno_pointer_flag_length);
3417
3418 f->emit->regno_pointer_align
3419 = (char *) savealloc (f->emit->regno_pointer_flag_length);
3420 bzero (f->emit->regno_pointer_align, f->emit->regno_pointer_flag_length);
3421
3422 regno_reg_rtx
3423 = (rtx *) savealloc (f->emit->regno_pointer_flag_length * sizeof (rtx));
3424 bzero ((char *) regno_reg_rtx,
3425 f->emit->regno_pointer_flag_length * sizeof (rtx));
3426
3427 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
3428 init_virtual_regs (f->emit);
3429
3430 /* Indicate that the virtual registers and stack locations are
3431 all pointers. */
3432 REGNO_POINTER_FLAG (STACK_POINTER_REGNUM) = 1;
3433 REGNO_POINTER_FLAG (FRAME_POINTER_REGNUM) = 1;
3434 REGNO_POINTER_FLAG (HARD_FRAME_POINTER_REGNUM) = 1;
3435 REGNO_POINTER_FLAG (ARG_POINTER_REGNUM) = 1;
3436
3437 REGNO_POINTER_FLAG (VIRTUAL_INCOMING_ARGS_REGNUM) = 1;
3438 REGNO_POINTER_FLAG (VIRTUAL_STACK_VARS_REGNUM) = 1;
3439 REGNO_POINTER_FLAG (VIRTUAL_STACK_DYNAMIC_REGNUM) = 1;
3440 REGNO_POINTER_FLAG (VIRTUAL_OUTGOING_ARGS_REGNUM) = 1;
3441 REGNO_POINTER_FLAG (VIRTUAL_CFA_REGNUM) = 1;
3442
3443 #ifdef STACK_BOUNDARY
3444 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3445 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3446 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM)
3447 = STACK_BOUNDARY / BITS_PER_UNIT;
3448 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY / BITS_PER_UNIT;
3449
3450 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM)
3451 = STACK_BOUNDARY / BITS_PER_UNIT;
3452 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM)
3453 = STACK_BOUNDARY / BITS_PER_UNIT;
3454 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM)
3455 = STACK_BOUNDARY / BITS_PER_UNIT;
3456 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM)
3457 = STACK_BOUNDARY / BITS_PER_UNIT;
3458 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = UNITS_PER_WORD;
3459 #endif
3460
3461 #ifdef INIT_EXPANDERS
3462 INIT_EXPANDERS;
3463 #endif
3464 }
3465
3466 /* Create some permanent unique rtl objects shared between all functions.
3467 LINE_NUMBERS is nonzero if line numbers are to be generated. */
3468
3469 void
3470 init_emit_once (line_numbers)
3471 int line_numbers;
3472 {
3473 int i;
3474 enum machine_mode mode;
3475 enum machine_mode double_mode;
3476
3477 no_line_numbers = ! line_numbers;
3478
3479 /* Compute the word and byte modes. */
3480
3481 byte_mode = VOIDmode;
3482 word_mode = VOIDmode;
3483 double_mode = VOIDmode;
3484
3485 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3486 mode = GET_MODE_WIDER_MODE (mode))
3487 {
3488 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
3489 && byte_mode == VOIDmode)
3490 byte_mode = mode;
3491
3492 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
3493 && word_mode == VOIDmode)
3494 word_mode = mode;
3495 }
3496
3497 #ifndef DOUBLE_TYPE_SIZE
3498 #define DOUBLE_TYPE_SIZE (BITS_PER_WORD * 2)
3499 #endif
3500
3501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3502 mode = GET_MODE_WIDER_MODE (mode))
3503 {
3504 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
3505 && double_mode == VOIDmode)
3506 double_mode = mode;
3507 }
3508
3509 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
3510
3511 /* Create the unique rtx's for certain rtx codes and operand values. */
3512
3513 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
3514 {
3515 PUT_CODE (&const_int_rtx[i + MAX_SAVED_CONST_INT], CONST_INT);
3516 PUT_MODE (&const_int_rtx[i + MAX_SAVED_CONST_INT], VOIDmode);
3517 INTVAL (&const_int_rtx[i + MAX_SAVED_CONST_INT]) = i;
3518 }
3519
3520 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
3521 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
3522 const_true_rtx = &const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
3523 else
3524 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
3525
3526 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
3527 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
3528 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
3529 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
3530
3531 for (i = 0; i <= 2; i++)
3532 {
3533 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
3534 mode = GET_MODE_WIDER_MODE (mode))
3535 {
3536 rtx tem = rtx_alloc (CONST_DOUBLE);
3537 union real_extract u;
3538
3539 bzero ((char *) &u, sizeof u); /* Zero any holes in a structure. */
3540 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
3541
3542 bcopy ((char *) &u, (char *) &CONST_DOUBLE_LOW (tem), sizeof u);
3543 CONST_DOUBLE_MEM (tem) = cc0_rtx;
3544 PUT_MODE (tem, mode);
3545
3546 const_tiny_rtx[i][(int) mode] = tem;
3547 }
3548
3549 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
3550
3551 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3552 mode = GET_MODE_WIDER_MODE (mode))
3553 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3554
3555 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
3556 mode != VOIDmode;
3557 mode = GET_MODE_WIDER_MODE (mode))
3558 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
3559 }
3560
3561 for (mode = GET_CLASS_NARROWEST_MODE (MODE_CC); mode != VOIDmode;
3562 mode = GET_MODE_WIDER_MODE (mode))
3563 const_tiny_rtx[0][(int) mode] = const0_rtx;
3564
3565
3566 /* Assign register numbers to the globally defined register rtx.
3567 This must be done at runtime because the register number field
3568 is in a union and some compilers can't initialize unions. */
3569
3570 REGNO (stack_pointer_rtx) = STACK_POINTER_REGNUM;
3571 PUT_MODE (stack_pointer_rtx, Pmode);
3572 REGNO (frame_pointer_rtx) = FRAME_POINTER_REGNUM;
3573 PUT_MODE (frame_pointer_rtx, Pmode);
3574 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3575 REGNO (hard_frame_pointer_rtx) = HARD_FRAME_POINTER_REGNUM;
3576 PUT_MODE (hard_frame_pointer_rtx, Pmode);
3577 #endif
3578 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3579 REGNO (arg_pointer_rtx) = ARG_POINTER_REGNUM;
3580 PUT_MODE (arg_pointer_rtx, Pmode);
3581 #endif
3582
3583 REGNO (virtual_incoming_args_rtx) = VIRTUAL_INCOMING_ARGS_REGNUM;
3584 PUT_MODE (virtual_incoming_args_rtx, Pmode);
3585 REGNO (virtual_stack_vars_rtx) = VIRTUAL_STACK_VARS_REGNUM;
3586 PUT_MODE (virtual_stack_vars_rtx, Pmode);
3587 REGNO (virtual_stack_dynamic_rtx) = VIRTUAL_STACK_DYNAMIC_REGNUM;
3588 PUT_MODE (virtual_stack_dynamic_rtx, Pmode);
3589 REGNO (virtual_outgoing_args_rtx) = VIRTUAL_OUTGOING_ARGS_REGNUM;
3590 PUT_MODE (virtual_outgoing_args_rtx, Pmode);
3591 REGNO (virtual_cfa_rtx) = VIRTUAL_CFA_REGNUM;
3592 PUT_MODE (virtual_cfa_rtx, Pmode);
3593
3594 #ifdef RETURN_ADDRESS_POINTER_REGNUM
3595 return_address_pointer_rtx
3596 = gen_rtx_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
3597 #endif
3598
3599 #ifdef STRUCT_VALUE
3600 struct_value_rtx = STRUCT_VALUE;
3601 #else
3602 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
3603 #endif
3604
3605 #ifdef STRUCT_VALUE_INCOMING
3606 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
3607 #else
3608 #ifdef STRUCT_VALUE_INCOMING_REGNUM
3609 struct_value_incoming_rtx
3610 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
3611 #else
3612 struct_value_incoming_rtx = struct_value_rtx;
3613 #endif
3614 #endif
3615
3616 #ifdef STATIC_CHAIN_REGNUM
3617 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
3618
3619 #ifdef STATIC_CHAIN_INCOMING_REGNUM
3620 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
3621 static_chain_incoming_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
3622 else
3623 #endif
3624 static_chain_incoming_rtx = static_chain_rtx;
3625 #endif
3626
3627 #ifdef STATIC_CHAIN
3628 static_chain_rtx = STATIC_CHAIN;
3629
3630 #ifdef STATIC_CHAIN_INCOMING
3631 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
3632 #else
3633 static_chain_incoming_rtx = static_chain_rtx;
3634 #endif
3635 #endif
3636
3637 #ifdef PIC_OFFSET_TABLE_REGNUM
3638 pic_offset_table_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
3639 #endif
3640
3641 #ifdef INIT_EXPANDERS
3642 /* This is to initialize save_machine_status and restore_machine_status before
3643 the first call to push_function_context_to. This is needed by the Chill
3644 front end which calls push_function_context_to before the first cal to
3645 init_function_start. */
3646 INIT_EXPANDERS;
3647 #endif
3648 }
3649 \f
3650 /* Query and clear/ restore no_line_numbers. This is used by the
3651 switch / case handling in stmt.c to give proper line numbers in
3652 warnings about unreachable code. */
3653
3654 int
3655 force_line_numbers ()
3656 {
3657 int old = no_line_numbers;
3658
3659 no_line_numbers = 0;
3660 if (old)
3661 force_next_line_note ();
3662 return old;
3663 }
3664
3665 void
3666 restore_line_number_status (old_value)
3667 int old_value;
3668 {
3669 no_line_numbers = old_value;
3670 }
This page took 0.207171 seconds and 5 git commands to generate.