]> gcc.gnu.org Git - gcc.git/blame - gcc/integrate.c
x
[gcc.git] / gcc / integrate.c
CommitLineData
175160e7 1/* Procedure integration for GNU CC.
155d7723 2 Copyright (C) 1988, 91, 93, 94, 95, 1996 Free Software Foundation, Inc.
175160e7
MT
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
175160e7
MT
21
22
23#include <stdio.h>
24
25#include "config.h"
26#include "rtl.h"
27#include "tree.h"
12307ca2 28#include "regs.h"
175160e7
MT
29#include "flags.h"
30#include "insn-config.h"
31#include "insn-flags.h"
32#include "expr.h"
33#include "output.h"
34#include "integrate.h"
35#include "real.h"
6adb4e3a 36#include "except.h"
175160e7 37#include "function.h"
ca695ac9 38#include "bytecode.h"
175160e7
MT
39
40#include "obstack.h"
41#define obstack_chunk_alloc xmalloc
42#define obstack_chunk_free free
175160e7
MT
43
44extern struct obstack *function_maybepermanent_obstack;
45
46extern tree pushdecl ();
47extern tree poplevel ();
48
49/* Similar, but round to the next highest integer that meets the
50 alignment. */
51#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
52
53/* Default max number of insns a function can have and still be inline.
54 This is overridden on RISC machines. */
55#ifndef INTEGRATE_THRESHOLD
56#define INTEGRATE_THRESHOLD(DECL) \
57 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58#endif
59\f
81fbaa41
RK
60static rtx initialize_for_inline PROTO((tree, int, int, int, int));
61static void finish_inline PROTO((tree, rtx));
62static void adjust_copied_decl_tree PROTO((tree));
63static tree copy_decl_list PROTO((tree));
64static tree copy_decl_tree PROTO((tree));
65static void copy_decl_rtls PROTO((tree));
66static void save_constants PROTO((rtx *));
67static void note_modified_parmregs PROTO((rtx, rtx));
68static rtx copy_for_inline PROTO((rtx));
69static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
70static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
f6135b20 71static void save_constants_in_decl_trees PROTO ((tree));
81fbaa41
RK
72static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
73static void restore_constants PROTO((rtx *));
74static void set_block_origin_self PROTO((tree));
75static void set_decl_origin_self PROTO((tree));
76static void set_block_abstract_flags PROTO((tree, int));
77
78void set_decl_abstract_flags PROTO((tree, int));
175160e7
MT
79\f
80/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
81 is safe and reasonable to integrate into other functions.
82 Nonzero means value is a warning message with a single %s
83 for the function's name. */
84
85char *
86function_cannot_inline_p (fndecl)
87 register tree fndecl;
88{
89 register rtx insn;
90 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
91 int max_insns = INTEGRATE_THRESHOLD (fndecl);
92 register int ninsns = 0;
93 register tree parms;
94
95 /* No inlines with varargs. `grokdeclarator' gives a warning
96 message about that if `inline' is specified. This code
97 it put in to catch the volunteers. */
98 if ((last && TREE_VALUE (last) != void_type_node)
5d3fe1fe 99 || current_function_varargs)
175160e7
MT
100 return "varargs function cannot be inline";
101
102 if (current_function_calls_alloca)
103 return "function using alloca cannot be inline";
104
105 if (current_function_contains_functions)
106 return "function with nested functions cannot be inline";
107
175160e7 108 /* If its not even close, don't even look. */
216d5cdd 109 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
175160e7
MT
110 return "function too large to be inline";
111
175160e7
MT
112#if 0
113 /* Don't inline functions which do not specify a function prototype and
114 have BLKmode argument or take the address of a parameter. */
115 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
116 {
117 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
118 TREE_ADDRESSABLE (parms) = 1;
119 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
120 return "no prototype, and parameter address used; cannot be inline";
121 }
122#endif
123
124 /* We can't inline functions that return structures
125 the old-fashioned PCC way, copying into a static block. */
126 if (current_function_returns_pcc_struct)
127 return "inline functions not supported for this return value type";
128
203436d9
JL
129 /* We can't inline functions that return BLKmode structures in registers. */
130 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
131 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
132 return "inline functions not supported for this return value type";
133
175160e7
MT
134 /* We can't inline functions that return structures of varying size. */
135 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
136 return "function with varying-size return value cannot be inline";
137
c8ad69c1
RK
138 /* Cannot inline a function with a varying size argument or one that
139 receives a transparent union. */
175160e7 140 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
c8ad69c1
RK
141 {
142 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
143 return "function with varying-size parameter cannot be inline";
144 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
145 return "function with transparent unit parameter cannot be inline";
146 }
175160e7 147
216d5cdd 148 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
175160e7 149 {
12307ca2
RK
150 for (ninsns = 0, insn = get_first_nonparm_insn ();
151 insn && ninsns < max_insns;
175160e7 152 insn = NEXT_INSN (insn))
12307ca2
RK
153 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
154 ninsns++;
175160e7
MT
155
156 if (ninsns >= max_insns)
157 return "function too large to be inline";
158 }
159
ead02915
JW
160 /* We cannot inline this function if forced_labels is non-zero. This
161 implies that a label in this function was used as an initializer.
162 Because labels can not be duplicated, all labels in the function
163 will be renamed when it is inlined. However, there is no way to find
164 and fix all variables initialized with addresses of labels in this
165 function, hence inlining is impossible. */
166
167 if (forced_labels)
168 return "function with label addresses used in initializers cannot inline";
169
2edc3b33
JW
170 /* We cannot inline a nested function that jumps to a nonlocal label. */
171 if (current_function_has_nonlocal_goto)
172 return "function with nonlocal goto cannot be inline";
173
6adb4e3a
MS
174 /* This is a hack, until the inliner is taught about eh regions at
175 the start of the function. */
176 for (insn = get_insns ();
177 insn &&
178 ! (GET_CODE (insn) == NOTE
179 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
180 insn = NEXT_INSN (insn))
181 {
182 if (insn && GET_CODE (insn) == NOTE
183 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
184 return "function with complex parameters cannot be inline";
185 }
186
175160e7
MT
187 return 0;
188}
189\f
190/* Variables used within save_for_inline. */
191
192/* Mapping from old pseudo-register to new pseudo-registers.
193 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
194 It is allocated in `save_for_inline' and `expand_inline_function',
195 and deallocated on exit from each of those routines. */
196static rtx *reg_map;
197
198/* Mapping from old code-labels to new code-labels.
199 The first element of this map is label_map[min_labelno].
200 It is allocated in `save_for_inline' and `expand_inline_function',
201 and deallocated on exit from each of those routines. */
202static rtx *label_map;
203
204/* Mapping from old insn uid's to copied insns.
205 It is allocated in `save_for_inline' and `expand_inline_function',
206 and deallocated on exit from each of those routines. */
207static rtx *insn_map;
208
209/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
210 Zero for a reg that isn't a parm's home.
211 Only reg numbers less than max_parm_reg are mapped here. */
212static tree *parmdecl_map;
213
214/* Keep track of first pseudo-register beyond those that are parms. */
215static int max_parm_reg;
216
217/* When an insn is being copied by copy_for_inline,
218 this is nonzero if we have copied an ASM_OPERANDS.
219 In that case, it is the original input-operand vector. */
220static rtvec orig_asm_operands_vector;
221
222/* When an insn is being copied by copy_for_inline,
223 this is nonzero if we have copied an ASM_OPERANDS.
224 In that case, it is the copied input-operand vector. */
225static rtvec copy_asm_operands_vector;
226
227/* Likewise, this is the copied constraints vector. */
228static rtvec copy_asm_constraints_vector;
229
230/* In save_for_inline, nonzero if past the parm-initialization insns. */
231static int in_nonparm_insns;
232\f
233/* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
234 needed to save FNDECL's insns and info for future inline expansion. */
235
236static rtx
237initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
238 tree fndecl;
239 int min_labelno;
240 int max_labelno;
241 int max_reg;
242 int copy;
243{
244 int function_flags, i;
245 rtvec arg_vector;
246 tree parms;
247
248 /* Compute the values of any flags we must restore when inlining this. */
249
250 function_flags
251 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
252 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
253 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
254 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
255 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
256 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
257 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
258 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
259 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
260 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
261
262 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
4c9a05bc 263 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
175160e7
MT
264 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
265
266 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
267 parms;
268 parms = TREE_CHAIN (parms), i++)
269 {
270 rtx p = DECL_RTL (parms);
271
272 if (GET_CODE (p) == MEM && copy)
9e0a5ab0
RS
273 {
274 /* Copy the rtl so that modifications of the addresses
275 later in compilation won't affect this arg_vector.
276 Virtual register instantiation can screw the address
277 of the rtl. */
278 rtx new = copy_rtx (p);
279
280 /* Don't leave the old copy anywhere in this decl. */
5c8bab4a
RK
281 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
282 || (GET_CODE (DECL_RTL (parms)) == MEM
283 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
284 && (XEXP (DECL_RTL (parms), 0)
285 == XEXP (DECL_INCOMING_RTL (parms), 0))))
9e0a5ab0
RS
286 DECL_INCOMING_RTL (parms) = new;
287 DECL_RTL (parms) = new;
288 }
175160e7
MT
289
290 RTVEC_ELT (arg_vector, i) = p;
291
292 if (GET_CODE (p) == REG)
293 parmdecl_map[REGNO (p)] = parms;
f231e307
RK
294 else if (GET_CODE (p) == CONCAT)
295 {
296 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
297 rtx pimag = gen_imagpart (GET_MODE (preal), p);
298
299 if (GET_CODE (preal) == REG)
300 parmdecl_map[REGNO (preal)] = parms;
301 if (GET_CODE (pimag) == REG)
302 parmdecl_map[REGNO (pimag)] = parms;
303 }
304
048dfa64
RS
305 /* This flag is cleared later
306 if the function ever modifies the value of the parm. */
175160e7
MT
307 TREE_READONLY (parms) = 1;
308 }
309
310 /* Assume we start out in the insns that set up the parameters. */
311 in_nonparm_insns = 0;
312
313 /* The list of DECL_SAVED_INSNS, starts off with a header which
314 contains the following information:
315
316 the first insn of the function (not including the insns that copy
317 parameters into registers).
318 the first parameter insn of the function,
319 the first label used by that function,
320 the last label used by that function,
321 the highest register number used for parameters,
322 the total number of registers used,
323 the size of the incoming stack area for parameters,
324 the number of bytes popped on return,
325 the stack slot list,
6adb4e3a 326 the labels that are forced to exist,
175160e7
MT
327 some flags that are used to restore compiler globals,
328 the value of current_function_outgoing_args_size,
329 the original argument vector,
12307ca2
RK
330 the original DECL_INITIAL,
331 and pointers to the table of psuedo regs, pointer flags, and alignment. */
175160e7 332
02bea8a8 333 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
175160e7
MT
334 max_parm_reg, max_reg,
335 current_function_args_size,
336 current_function_pops_args,
5b0e2c7d 337 stack_slot_list, forced_labels, function_flags,
175160e7 338 current_function_outgoing_args_size,
12307ca2
RK
339 arg_vector, (rtx) DECL_INITIAL (fndecl),
340 (rtvec) regno_reg_rtx, regno_pointer_flag,
341 regno_pointer_align);
175160e7
MT
342}
343
344/* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
345 things that must be done to make FNDECL expandable as an inline function.
346 HEAD contains the chain of insns to which FNDECL will expand. */
347
348static void
349finish_inline (fndecl, head)
350 tree fndecl;
351 rtx head;
352{
6adb4e3a 353 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
175160e7
MT
354 FIRST_PARM_INSN (head) = get_insns ();
355 DECL_SAVED_INSNS (fndecl) = head;
356 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
175160e7
MT
357}
358
c75ac904
RS
359/* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
360 they all point to the new (copied) rtxs. */
361
362static void
363adjust_copied_decl_tree (block)
364 register tree block;
365{
366 register tree subblock;
367 register rtx original_end;
368
369 original_end = BLOCK_END_NOTE (block);
370 if (original_end)
371 {
372 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
373 NOTE_SOURCE_FILE (original_end) = 0;
374 }
375
376 /* Process all subblocks. */
377 for (subblock = BLOCK_SUBBLOCKS (block);
378 subblock;
379 subblock = TREE_CHAIN (subblock))
380 adjust_copied_decl_tree (subblock);
381}
382
175160e7
MT
383/* Make the insns and PARM_DECLs of the current function permanent
384 and record other information in DECL_SAVED_INSNS to allow inlining
385 of this function in subsequent calls.
386
387 This function is called when we are going to immediately compile
388 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
389 modified by the compilation process, so we copy all of them to
390 new storage and consider the new insns to be the insn chain to be
ff2da9fc
RS
391 compiled. Our caller (rest_of_compilation) saves the original
392 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
175160e7 393
fb854c63
JW
394/* ??? The nonlocal_label list should be adjusted also. However, since
395 a function that contains a nested function never gets inlined currently,
396 the nonlocal_label list will always be empty, so we don't worry about
397 it for now. */
398
175160e7
MT
399void
400save_for_inline_copying (fndecl)
401 tree fndecl;
402{
403 rtx first_insn, last_insn, insn;
404 rtx head, copy;
405 int max_labelno, min_labelno, i, len;
406 int max_reg;
407 int max_uid;
408 rtx first_nonparm_insn;
12307ca2 409 char *new, *new1;
175160e7
MT
410
411 /* Make and emit a return-label if we have not already done so.
0f41302f 412 Do this before recording the bounds on label numbers. */
175160e7
MT
413
414 if (return_label == 0)
415 {
416 return_label = gen_label_rtx ();
417 emit_label (return_label);
418 }
419
420 /* Get some bounds on the labels and registers used. */
421
422 max_labelno = max_label_num ();
423 min_labelno = get_first_label_num ();
424 max_reg = max_reg_num ();
425
426 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
427 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
428 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
429 for the parms, prior to elimination of virtual registers.
430 These values are needed for substituting parms properly. */
431
432 max_parm_reg = max_parm_reg_num ();
433 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
434
435 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
436
437 if (current_function_uses_const_pool)
438 {
439 /* Replace any constant pool references with the actual constant. We
440 will put the constants back in the copy made below. */
441 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
442 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
443 {
444 save_constants (&PATTERN (insn));
445 if (REG_NOTES (insn))
446 save_constants (&REG_NOTES (insn));
447 }
448
f6135b20
JW
449 /* Also scan all decls, and replace any constant pool references with the
450 actual constant. */
451 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
452
175160e7
MT
453 /* Clear out the constant pool so that we can recreate it with the
454 copied constants below. */
455 init_const_rtx_hash_table ();
456 clear_const_double_mem ();
457 }
458
459 max_uid = INSN_UID (head);
460
461 /* We have now allocated all that needs to be allocated permanently
462 on the rtx obstack. Set our high-water mark, so that we
463 can free the rest of this when the time comes. */
464
465 preserve_data ();
466
467 /* Copy the chain insns of this function.
468 Install the copied chain as the insns of this function,
469 for continued compilation;
470 the original chain is recorded as the DECL_SAVED_INSNS
471 for inlining future calls. */
472
473 /* If there are insns that copy parms from the stack into pseudo registers,
474 those insns are not copied. `expand_inline_function' must
475 emit the correct code to handle such things. */
476
477 insn = get_insns ();
478 if (GET_CODE (insn) != NOTE)
479 abort ();
480 first_insn = rtx_alloc (NOTE);
481 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
482 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
483 INSN_UID (first_insn) = INSN_UID (insn);
484 PREV_INSN (first_insn) = NULL;
485 NEXT_INSN (first_insn) = NULL;
486 last_insn = first_insn;
487
488 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
489 Make these new rtx's now, and install them in regno_reg_rtx, so they
490 will be the official pseudo-reg rtx's for the rest of compilation. */
491
10568ad0 492 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
175160e7
MT
493
494 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
495 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
496 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
497 regno_reg_rtx[i], len);
498
155d7723 499 regno_reg_rtx = reg_map;
175160e7 500
25e48d20
RK
501 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
502 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
503 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
504 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
505 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
506
175160e7
MT
507 /* Likewise each label rtx must have a unique rtx as its copy. */
508
509 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
510 label_map -= min_labelno;
511
512 for (i = min_labelno; i < max_labelno; i++)
513 label_map[i] = gen_label_rtx ();
514
515 /* Record the mapping of old insns to copied insns. */
516
517 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
4c9a05bc 518 bzero ((char *) insn_map, max_uid * sizeof (rtx));
175160e7
MT
519
520 /* Get the insn which signals the end of parameter setup code. */
521 first_nonparm_insn = get_first_nonparm_insn ();
522
523 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
524 (the former occurs when a variable has its address taken)
525 since these may be shared and can be changed by virtual
526 register instantiation. DECL_RTL values for our arguments
527 have already been copied by initialize_for_inline. */
528 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
529 if (GET_CODE (regno_reg_rtx[i]) == MEM)
530 XEXP (regno_reg_rtx[i], 0)
531 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
532
533 /* Copy the tree of subblocks of the function, and the decls in them.
534 We will use the copy for compiling this function, then restore the original
535 subblocks and decls for use when inlining this function.
536
537 Several parts of the compiler modify BLOCK trees. In particular,
538 instantiate_virtual_regs will instantiate any virtual regs
539 mentioned in the DECL_RTLs of the decls, and loop
540 unrolling will replicate any BLOCK trees inside an unrolled loop.
541
542 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
543 which we will use for inlining. The rtl might even contain pseudoregs
544 whose space has been freed. */
545
546 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
c5caa350 547 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
175160e7
MT
548
549 /* Now copy each DECL_RTL which is a MEM,
550 so it is safe to modify their addresses. */
551 copy_decl_rtls (DECL_INITIAL (fndecl));
552
c5caa350
CH
553 /* The fndecl node acts as its own progenitor, so mark it as such. */
554 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
555
175160e7
MT
556 /* Now copy the chain of insns. Do this twice. The first copy the insn
557 itself and its body. The second time copy of REG_NOTES. This is because
558 a REG_NOTE may have a forward pointer to another insn. */
559
560 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
561 {
562 orig_asm_operands_vector = 0;
563
564 if (insn == first_nonparm_insn)
565 in_nonparm_insns = 1;
566
567 switch (GET_CODE (insn))
568 {
569 case NOTE:
570 /* No need to keep these. */
571 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
572 continue;
573
574 copy = rtx_alloc (NOTE);
175160e7 575 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
c75ac904
RS
576 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
577 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
578 else
579 {
580 NOTE_SOURCE_FILE (insn) = (char *) copy;
581 NOTE_SOURCE_FILE (copy) = 0;
582 }
6adb4e3a
MS
583 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
584 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
585 {
586 /* We have to forward these both to match the new exception
587 region. */
588 NOTE_BLOCK_NUMBER (copy)
589 = CODE_LABEL_NUMBER (label_map[NOTE_BLOCK_NUMBER (copy)]);
590
591 }
1c3f2e00 592 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
175160e7
MT
593 break;
594
595 case INSN:
175160e7 596 case JUMP_INSN:
d7e09326 597 case CALL_INSN:
175160e7 598 copy = rtx_alloc (GET_CODE (insn));
d7e09326
RK
599
600 if (GET_CODE (insn) == CALL_INSN)
601 CALL_INSN_FUNCTION_USAGE (copy) =
602 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
603
175160e7
MT
604 PATTERN (copy) = copy_for_inline (PATTERN (insn));
605 INSN_CODE (copy) = -1;
d7e09326 606 LOG_LINKS (copy) = NULL_RTX;
175160e7
MT
607 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
608 break;
609
610 case CODE_LABEL:
611 copy = label_map[CODE_LABEL_NUMBER (insn)];
d45cf215 612 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
613 break;
614
615 case BARRIER:
616 copy = rtx_alloc (BARRIER);
617 break;
618
619 default:
620 abort ();
621 }
622 INSN_UID (copy) = INSN_UID (insn);
623 insn_map[INSN_UID (insn)] = copy;
624 NEXT_INSN (last_insn) = copy;
625 PREV_INSN (copy) = last_insn;
626 last_insn = copy;
627 }
628
c75ac904
RS
629 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
630
175160e7
MT
631 /* Now copy the REG_NOTES. */
632 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
633 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
634 && insn_map[INSN_UID(insn)])
635 REG_NOTES (insn_map[INSN_UID (insn)])
636 = copy_for_inline (REG_NOTES (insn));
637
638 NEXT_INSN (last_insn) = NULL;
639
640 finish_inline (fndecl, head);
641
12307ca2
RK
642 /* Make new versions of the register tables. */
643 new = (char *) savealloc (regno_pointer_flag_length);
644 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
645 new1 = (char *) savealloc (regno_pointer_flag_length);
646 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
12307ca2
RK
647
648 regno_pointer_flag = new;
649 regno_pointer_align = new1;
12307ca2 650
175160e7
MT
651 set_new_first_and_last_insn (first_insn, last_insn);
652}
653
c5caa350
CH
654/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
655 For example, this can copy a list made of TREE_LIST nodes. While copying,
656 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
657 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
658 point to the corresponding (abstract) original node. */
659
660static tree
661copy_decl_list (list)
662 tree list;
663{
664 tree head;
665 register tree prev, next;
666
667 if (list == 0)
668 return 0;
669
670 head = prev = copy_node (list);
671 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
672 DECL_ABSTRACT_ORIGIN (head) = list;
673 next = TREE_CHAIN (list);
674 while (next)
675 {
676 register tree copy;
677
678 copy = copy_node (next);
679 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
680 DECL_ABSTRACT_ORIGIN (copy) = next;
681 TREE_CHAIN (prev) = copy;
682 prev = copy;
683 next = TREE_CHAIN (next);
684 }
685 return head;
686}
687
175160e7
MT
688/* Make a copy of the entire tree of blocks BLOCK, and return it. */
689
690static tree
691copy_decl_tree (block)
692 tree block;
693{
694 tree t, vars, subblocks;
695
c5caa350 696 vars = copy_decl_list (BLOCK_VARS (block));
175160e7
MT
697 subblocks = 0;
698
699 /* Process all subblocks. */
700 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
701 {
702 tree copy = copy_decl_tree (t);
703 TREE_CHAIN (copy) = subblocks;
704 subblocks = copy;
705 }
706
707 t = copy_node (block);
708 BLOCK_VARS (t) = vars;
709 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
81578142 710 /* If the BLOCK being cloned is already marked as having been instantiated
abc95ed3 711 from something else, then leave that `origin' marking alone. Otherwise,
81578142
RS
712 mark the clone as having originated from the BLOCK we are cloning. */
713 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
714 BLOCK_ABSTRACT_ORIGIN (t) = block;
175160e7
MT
715 return t;
716}
717
718/* Copy DECL_RTLs in all decls in the given BLOCK node. */
719
720static void
721copy_decl_rtls (block)
722 tree block;
723{
724 tree t;
725
726 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
727 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
728 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
729
730 /* Process all subblocks. */
731 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
732 copy_decl_rtls (t);
733}
734
735/* Make the insns and PARM_DECLs of the current function permanent
736 and record other information in DECL_SAVED_INSNS to allow inlining
737 of this function in subsequent calls.
738
739 This routine need not copy any insns because we are not going
740 to immediately compile the insns in the insn chain. There
741 are two cases when we would compile the insns for FNDECL:
742 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
743 be output at the end of other compilation, because somebody took
744 its address. In the first case, the insns of FNDECL are copied
745 as it is expanded inline, so FNDECL's saved insns are not
746 modified. In the second case, FNDECL is used for the last time,
747 so modifying the rtl is not a problem.
748
09578c27
RK
749 We don't have to worry about FNDECL being inline expanded by
750 other functions which are written at the end of compilation
751 because flag_no_inline is turned on when we begin writing
752 functions at the end of compilation. */
175160e7
MT
753
754void
755save_for_inline_nocopy (fndecl)
756 tree fndecl;
757{
758 rtx insn;
29ff1514 759 rtx head;
175160e7 760 rtx first_nonparm_insn;
175160e7
MT
761
762 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
763 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
764 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
765 for the parms, prior to elimination of virtual registers.
766 These values are needed for substituting parms properly. */
767
768 max_parm_reg = max_parm_reg_num ();
769 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
770
771 /* Make and emit a return-label if we have not already done so. */
772
773 if (return_label == 0)
774 {
775 return_label = gen_label_rtx ();
776 emit_label (return_label);
777 }
778
779 head = initialize_for_inline (fndecl, get_first_label_num (),
780 max_label_num (), max_reg_num (), 0);
781
782 /* If there are insns that copy parms from the stack into pseudo registers,
783 those insns are not copied. `expand_inline_function' must
784 emit the correct code to handle such things. */
785
786 insn = get_insns ();
787 if (GET_CODE (insn) != NOTE)
788 abort ();
789
790 /* Get the insn which signals the end of parameter setup code. */
791 first_nonparm_insn = get_first_nonparm_insn ();
792
793 /* Now just scan the chain of insns to see what happens to our
794 PARM_DECLs. If a PARM_DECL is used but never modified, we
795 can substitute its rtl directly when expanding inline (and
796 perform constant folding when its incoming value is constant).
797 Otherwise, we have to copy its value into a new register and track
798 the new register's life. */
799
800 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
801 {
802 if (insn == first_nonparm_insn)
803 in_nonparm_insns = 1;
804
805 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
806 {
807 if (current_function_uses_const_pool)
808 {
809 /* Replace any constant pool references with the actual constant.
810 We will put the constant back if we need to write the
811 function out after all. */
812 save_constants (&PATTERN (insn));
813 if (REG_NOTES (insn))
814 save_constants (&REG_NOTES (insn));
815 }
816
817 /* Record what interesting things happen to our parameters. */
818 note_stores (PATTERN (insn), note_modified_parmregs);
819 }
820 }
821
f6135b20
JW
822 /* Also scan all decls, and replace any constant pool references with the
823 actual constant. */
824 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
825
175160e7
MT
826 /* We have now allocated all that needs to be allocated permanently
827 on the rtx obstack. Set our high-water mark, so that we
828 can free the rest of this when the time comes. */
829
830 preserve_data ();
831
832 finish_inline (fndecl, head);
833}
834\f
835/* Given PX, a pointer into an insn, search for references to the constant
836 pool. Replace each with a CONST that has the mode of the original
837 constant, contains the constant, and has RTX_INTEGRATED_P set.
838 Similarly, constant pool addresses not enclosed in a MEM are replaced
839 with an ADDRESS rtx which also gives the constant, mode, and has
840 RTX_INTEGRATED_P set. */
841
842static void
843save_constants (px)
844 rtx *px;
845{
846 rtx x;
847 int i, j;
848
849 again:
850 x = *px;
851
852 /* If this is a CONST_DOUBLE, don't try to fix things up in
853 CONST_DOUBLE_MEM, because this is an infinite recursion. */
854 if (GET_CODE (x) == CONST_DOUBLE)
855 return;
856 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
857 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
858 {
859 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
860 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
861 RTX_INTEGRATED_P (new) = 1;
862
863 /* If the MEM was in a different mode than the constant (perhaps we
864 were only looking at the low-order part), surround it with a
865 SUBREG so we can save both modes. */
866
867 if (GET_MODE (x) != const_mode)
868 {
869 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
870 RTX_INTEGRATED_P (new) = 1;
871 }
872
873 *px = new;
874 save_constants (&XEXP (*px, 0));
875 }
876 else if (GET_CODE (x) == SYMBOL_REF
877 && CONSTANT_POOL_ADDRESS_P (x))
878 {
879 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
880 save_constants (&XEXP (*px, 0));
881 RTX_INTEGRATED_P (*px) = 1;
882 }
883
884 else
885 {
886 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
887 int len = GET_RTX_LENGTH (GET_CODE (x));
888
889 for (i = len-1; i >= 0; i--)
890 {
891 switch (fmt[i])
892 {
893 case 'E':
894 for (j = 0; j < XVECLEN (x, i); j++)
895 save_constants (&XVECEXP (x, i, j));
896 break;
897
898 case 'e':
899 if (XEXP (x, i) == 0)
900 continue;
901 if (i == 0)
902 {
903 /* Hack tail-recursion here. */
904 px = &XEXP (x, 0);
905 goto again;
906 }
907 save_constants (&XEXP (x, i));
908 break;
909 }
910 }
911 }
912}
913\f
914/* Note whether a parameter is modified or not. */
915
916static void
917note_modified_parmregs (reg, x)
918 rtx reg;
919 rtx x;
920{
921 if (GET_CODE (reg) == REG && in_nonparm_insns
922 && REGNO (reg) < max_parm_reg
923 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
924 && parmdecl_map[REGNO (reg)] != 0)
925 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
926}
927
928/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
929 according to `reg_map' and `label_map'. The original rtl insns
930 will be saved for inlining; this is used to make a copy
931 which is used to finish compiling the inline function itself.
932
933 If we find a "saved" constant pool entry, one which was replaced with
934 the value of the constant, convert it back to a constant pool entry.
935 Since the pool wasn't touched, this should simply restore the old
936 address.
937
938 All other kinds of rtx are copied except those that can never be
939 changed during compilation. */
940
941static rtx
942copy_for_inline (orig)
943 rtx orig;
944{
945 register rtx x = orig;
946 register int i;
947 register enum rtx_code code;
948 register char *format_ptr;
949
950 if (x == 0)
951 return x;
952
953 code = GET_CODE (x);
954
955 /* These types may be freely shared. */
956
957 switch (code)
958 {
959 case QUEUED:
960 case CONST_INT:
961 case SYMBOL_REF:
962 case PC:
963 case CC0:
964 return x;
965
966 case CONST_DOUBLE:
967 /* We have to make a new CONST_DOUBLE to ensure that we account for
968 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
969 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
970 {
971 REAL_VALUE_TYPE d;
972
973 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
81fbaa41 974 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
175160e7
MT
975 }
976 else
977 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
978 VOIDmode);
979
980 case CONST:
981 /* Get constant pool entry for constant in the pool. */
982 if (RTX_INTEGRATED_P (x))
983 return validize_mem (force_const_mem (GET_MODE (x),
984 copy_for_inline (XEXP (x, 0))));
985 break;
986
987 case SUBREG:
988 /* Get constant pool entry, but access in different mode. */
989 if (RTX_INTEGRATED_P (x))
990 {
991 rtx new
992 = force_const_mem (GET_MODE (SUBREG_REG (x)),
993 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
994
995 PUT_MODE (new, GET_MODE (x));
996 return validize_mem (new);
997 }
998 break;
999
1000 case ADDRESS:
1001 /* If not special for constant pool error. Else get constant pool
1002 address. */
1003 if (! RTX_INTEGRATED_P (x))
1004 abort ();
1005
1006 return XEXP (force_const_mem (GET_MODE (x),
1007 copy_for_inline (XEXP (x, 0))), 0);
1008
1009 case ASM_OPERANDS:
1010 /* If a single asm insn contains multiple output operands
1011 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1012 We must make sure that the copied insn continues to share it. */
1013 if (orig_asm_operands_vector == XVEC (orig, 3))
1014 {
1015 x = rtx_alloc (ASM_OPERANDS);
81d82304 1016 x->volatil = orig->volatil;
175160e7
MT
1017 XSTR (x, 0) = XSTR (orig, 0);
1018 XSTR (x, 1) = XSTR (orig, 1);
1019 XINT (x, 2) = XINT (orig, 2);
1020 XVEC (x, 3) = copy_asm_operands_vector;
1021 XVEC (x, 4) = copy_asm_constraints_vector;
1022 XSTR (x, 5) = XSTR (orig, 5);
1023 XINT (x, 6) = XINT (orig, 6);
1024 return x;
1025 }
1026 break;
1027
1028 case MEM:
1029 /* A MEM is usually allowed to be shared if its address is constant
1030 or is a constant plus one of the special registers.
1031
1032 We do not allow sharing of addresses that are either a special
1033 register or the sum of a constant and a special register because
1034 it is possible for unshare_all_rtl to copy the address, into memory
1035 that won't be saved. Although the MEM can safely be shared, and
1036 won't be copied there, the address itself cannot be shared, and may
1037 need to be copied.
1038
1039 There are also two exceptions with constants: The first is if the
1040 constant is a LABEL_REF or the sum of the LABEL_REF
1041 and an integer. This case can happen if we have an inline
1042 function that supplies a constant operand to the call of another
1043 inline function that uses it in a switch statement. In this case,
1044 we will be replacing the LABEL_REF, so we have to replace this MEM
1045 as well.
1046
1047 The second case is if we have a (const (plus (address ..) ...)).
1048 In that case we need to put back the address of the constant pool
1049 entry. */
1050
1051 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1052 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1053 && ! (GET_CODE (XEXP (x, 0)) == CONST
1054 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1055 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1056 == LABEL_REF)
1057 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1058 == ADDRESS)))))
1059 return x;
1060 break;
1061
1062 case LABEL_REF:
c1ceaaa6
RK
1063 /* If this is a non-local label, just make a new LABEL_REF.
1064 Otherwise, use the new label as well. */
1065 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1066 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1067 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1068 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1069 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1070 return x;
175160e7
MT
1071
1072 case REG:
1073 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1074 return reg_map [REGNO (x)];
1075 else
1076 return x;
1077
1078 case SET:
1079 /* If a parm that gets modified lives in a pseudo-reg,
1080 clear its TREE_READONLY to prevent certain optimizations. */
1081 {
1082 rtx dest = SET_DEST (x);
1083
1084 while (GET_CODE (dest) == STRICT_LOW_PART
1085 || GET_CODE (dest) == ZERO_EXTRACT
1086 || GET_CODE (dest) == SUBREG)
1087 dest = XEXP (dest, 0);
1088
1089 if (GET_CODE (dest) == REG
1090 && REGNO (dest) < max_parm_reg
1091 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1092 && parmdecl_map[REGNO (dest)] != 0
1093 /* The insn to load an arg pseudo from a stack slot
1094 does not count as modifying it. */
1095 && in_nonparm_insns)
1096 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1097 }
1098 break;
1099
1100#if 0 /* This is a good idea, but here is the wrong place for it. */
1101 /* Arrange that CONST_INTs always appear as the second operand
1102 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1103 always appear as the first. */
1104 case PLUS:
1105 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1106 || (XEXP (x, 1) == frame_pointer_rtx
1107 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1108 && XEXP (x, 1) == arg_pointer_rtx)))
1109 {
1110 rtx t = XEXP (x, 0);
1111 XEXP (x, 0) = XEXP (x, 1);
1112 XEXP (x, 1) = t;
1113 }
1114 break;
1115#endif
1116 }
1117
1118 /* Replace this rtx with a copy of itself. */
1119
1120 x = rtx_alloc (code);
4c9a05bc
RK
1121 bcopy ((char *) orig, (char *) x,
1122 (sizeof (*x) - sizeof (x->fld)
1123 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
175160e7
MT
1124
1125 /* Now scan the subexpressions recursively.
1126 We can store any replaced subexpressions directly into X
1127 since we know X is not shared! Any vectors in X
1128 must be copied if X was copied. */
1129
1130 format_ptr = GET_RTX_FORMAT (code);
1131
1132 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1133 {
1134 switch (*format_ptr++)
1135 {
1136 case 'e':
1137 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1138 break;
1139
1140 case 'u':
1141 /* Change any references to old-insns to point to the
1142 corresponding copied insns. */
1143 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1144 break;
1145
1146 case 'E':
1147 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1148 {
1149 register int j;
1150
27108369 1151 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
175160e7
MT
1152 for (j = 0; j < XVECLEN (x, i); j++)
1153 XVECEXP (x, i, j)
1154 = copy_for_inline (XVECEXP (x, i, j));
1155 }
1156 break;
1157 }
1158 }
1159
1160 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1161 {
1162 orig_asm_operands_vector = XVEC (orig, 3);
1163 copy_asm_operands_vector = XVEC (x, 3);
1164 copy_asm_constraints_vector = XVEC (x, 4);
1165 }
1166
1167 return x;
1168}
1169
1170/* Unfortunately, we need a global copy of const_equiv map for communication
1171 with a function called from note_stores. Be *very* careful that this
1172 is used properly in the presence of recursion. */
1173
1174rtx *global_const_equiv_map;
2b145ea8 1175int global_const_equiv_map_size;
175160e7
MT
1176\f
1177#define FIXED_BASE_PLUS_P(X) \
1178 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1179 && GET_CODE (XEXP (X, 0)) == REG \
1180 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
f9b06ea4 1181 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
175160e7
MT
1182
1183/* Integrate the procedure defined by FNDECL. Note that this function
1184 may wind up calling itself. Since the static variables are not
1185 reentrant, we do not assign them until after the possibility
bfa30b22 1186 of recursion is eliminated.
175160e7
MT
1187
1188 If IGNORE is nonzero, do not produce a value.
1189 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1190
1191 Value is:
1192 (rtx)-1 if we could not substitute the function
1193 0 if we substituted it and it does not produce a value
1194 else an rtx for where the value is stored. */
1195
1196rtx
12307ca2
RK
1197expand_inline_function (fndecl, parms, target, ignore, type,
1198 structure_value_addr)
175160e7
MT
1199 tree fndecl, parms;
1200 rtx target;
1201 int ignore;
1202 tree type;
1203 rtx structure_value_addr;
1204{
81578142 1205 tree formal, actual, block;
175160e7
MT
1206 rtx header = DECL_SAVED_INSNS (fndecl);
1207 rtx insns = FIRST_FUNCTION_INSN (header);
1208 rtx parm_insns = FIRST_PARM_INSN (header);
1209 tree *arg_trees;
1210 rtx *arg_vals;
1211 rtx insn;
1212 int max_regno;
175160e7
MT
1213 register int i;
1214 int min_labelno = FIRST_LABELNO (header);
1215 int max_labelno = LAST_LABELNO (header);
1216 int nargs;
1217 rtx local_return_label = 0;
1218 rtx loc;
2132517d 1219 rtx stack_save = 0;
175160e7
MT
1220 rtx temp;
1221 struct inline_remap *map;
1222 rtx cc0_insn = 0;
1223 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
a6dd1cb6 1224 rtx static_chain_value = 0;
175160e7
MT
1225
1226 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1227 max_regno = MAX_REGNUM (header) + 3;
1228 if (max_regno < FIRST_PSEUDO_REGISTER)
1229 abort ();
1230
1231 nargs = list_length (DECL_ARGUMENTS (fndecl));
1232
2d8d0db8
RK
1233 /* Check that the parms type match and that sufficient arguments were
1234 passed. Since the appropriate conversions or default promotions have
1235 already been applied, the machine modes should match exactly. */
1236
12307ca2 1237 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
175160e7 1238 formal;
12307ca2 1239 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
175160e7 1240 {
2d8d0db8
RK
1241 tree arg;
1242 enum machine_mode mode;
1243
1244 if (actual == 0)
ab176425 1245 return (rtx) (HOST_WIDE_INT) -1;
2d8d0db8
RK
1246
1247 arg = TREE_VALUE (actual);
12307ca2 1248 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
2d8d0db8
RK
1249
1250 if (mode != TYPE_MODE (TREE_TYPE (arg))
1251 /* If they are block mode, the types should match exactly.
1252 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1253 which could happen if the parameter has incomplete type. */
d80db03d
RK
1254 || (mode == BLKmode
1255 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1256 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
ab176425 1257 return (rtx) (HOST_WIDE_INT) -1;
175160e7
MT
1258 }
1259
2d8d0db8
RK
1260 /* Extra arguments are valid, but will be ignored below, so we must
1261 evaluate them here for side-effects. */
1262 for (; actual; actual = TREE_CHAIN (actual))
1263 expand_expr (TREE_VALUE (actual), const0_rtx,
1264 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1265
175160e7
MT
1266 /* Make a binding contour to keep inline cleanups called at
1267 outer function-scope level from looking like they are shadowing
1268 parameter declarations. */
1269 pushlevel (0);
1270
1271 /* Make a fresh binding contour that we can easily remove. */
1272 pushlevel (0);
1273 expand_start_bindings (0);
175160e7
MT
1274
1275 /* Expand the function arguments. Do this first so that any
1276 new registers get created before we allocate the maps. */
1277
1278 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1279 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1280
1281 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1282 formal;
1283 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1284 {
1285 /* Actual parameter, converted to the type of the argument within the
1286 function. */
1287 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1288 /* Mode of the variable used within the function. */
1289 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
9175051c 1290 int invisiref = 0;
175160e7 1291
175160e7
MT
1292 arg_trees[i] = arg;
1293 loc = RTVEC_ELT (arg_vector, i);
1294
1295 /* If this is an object passed by invisible reference, we copy the
1296 object into a stack slot and save its address. If this will go
1297 into memory, we do nothing now. Otherwise, we just expand the
1298 argument. */
1299 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1300 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1301 {
4b7cb39e
RK
1302 rtx stack_slot
1303 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1304 int_size_in_bytes (TREE_TYPE (arg)), 1);
3668e76e 1305 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
175160e7
MT
1306
1307 store_expr (arg, stack_slot, 0);
1308
1309 arg_vals[i] = XEXP (stack_slot, 0);
9175051c 1310 invisiref = 1;
175160e7
MT
1311 }
1312 else if (GET_CODE (loc) != MEM)
36aa0bf5
RK
1313 {
1314 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1315 /* The mode if LOC and ARG can differ if LOC was a variable
1316 that had its mode promoted via PROMOTED_MODE. */
5be957a2
RS
1317 arg_vals[i] = convert_modes (GET_MODE (loc),
1318 TYPE_MODE (TREE_TYPE (arg)),
1319 expand_expr (arg, NULL_RTX, mode,
1320 EXPAND_SUM),
1321 TREE_UNSIGNED (TREE_TYPE (formal)));
36aa0bf5
RK
1322 else
1323 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1324 }
175160e7
MT
1325 else
1326 arg_vals[i] = 0;
1327
1328 if (arg_vals[i] != 0
1329 && (! TREE_READONLY (formal)
1330 /* If the parameter is not read-only, copy our argument through
1331 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1332 TARGET in any way. In the inline function, they will likely
1333 be two different pseudos, and `safe_from_p' will make all
1334 sorts of smart assumptions about their not conflicting.
1335 But if ARG_VALS[I] overlaps TARGET, these assumptions are
9175051c
JM
1336 wrong, so put ARG_VALS[I] into a fresh register.
1337 Don't worry about invisible references, since their stack
1338 temps will never overlap the target. */
175160e7 1339 || (target != 0
9175051c 1340 && ! invisiref
3eda169f
RK
1341 && (GET_CODE (arg_vals[i]) == REG
1342 || GET_CODE (arg_vals[i]) == SUBREG
1343 || GET_CODE (arg_vals[i]) == MEM)
30caed6d
RS
1344 && reg_overlap_mentioned_p (arg_vals[i], target))
1345 /* ??? We must always copy a SUBREG into a REG, because it might
1346 get substituted into an address, and not all ports correctly
1347 handle SUBREGs in addresses. */
1348 || (GET_CODE (arg_vals[i]) == SUBREG)))
4b7cb39e 1349 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
12307ca2
RK
1350
1351 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1352 && TREE_CODE (TREE_TYPE (formal)) == POINTER_TYPE)
1353 mark_reg_pointer (arg_vals[i],
1354 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1355 / BITS_PER_UNIT));
175160e7
MT
1356 }
1357
1358 /* Allocate the structures we use to remap things. */
1359
1360 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1361 map->fndecl = fndecl;
1362
1363 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 1364 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
175160e7
MT
1365
1366 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1367 map->label_map -= min_labelno;
1368
1369 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
4c9a05bc 1370 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
175160e7
MT
1371 map->min_insnno = 0;
1372 map->max_insnno = INSN_UID (header);
1373
a70f7bb2
JW
1374 map->integrating = 1;
1375
175160e7
MT
1376 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1377 be large enough for all our pseudos. This is the number we are currently
c66e0741
RK
1378 using plus the number in the called routine, plus 15 for each arg,
1379 five to compute the virtual frame pointer, and five for the return value.
1380 This should be enough for most cases. We do not reference entries
1381 outside the range of the map.
1382
1383 ??? These numbers are quite arbitrary and were obtained by
1384 experimentation. At some point, we should try to allocate the
1385 table after all the parameters are set up so we an more accurately
1386 estimate the number of pseudos we will need. */
1387
1388 map->const_equiv_map_size
1389 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1390
1391 map->const_equiv_map
1392 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
4c9a05bc
RK
1393 bzero ((char *) map->const_equiv_map,
1394 map->const_equiv_map_size * sizeof (rtx));
c66e0741
RK
1395
1396 map->const_age_map
1397 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
4c9a05bc
RK
1398 bzero ((char *) map->const_age_map,
1399 map->const_equiv_map_size * sizeof (unsigned));
175160e7
MT
1400 map->const_age = 0;
1401
1402 /* Record the current insn in case we have to set up pointers to frame
1403 and argument memory blocks. */
1404 map->insns_at_start = get_last_insn ();
1405
12307ca2
RK
1406 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1407 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1408
175160e7
MT
1409 /* Update the outgoing argument size to allow for those in the inlined
1410 function. */
1411 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1412 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1413
1414 /* If the inline function needs to make PIC references, that means
1415 that this function's PIC offset table must be used. */
1416 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1417 current_function_uses_pic_offset_table = 1;
1418
a6dd1cb6
RK
1419 /* If this function needs a context, set it up. */
1420 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1421 static_chain_value = lookup_static_chain (fndecl);
1422
1c1f2d29
JM
1423 if (GET_CODE (parm_insns) == NOTE
1424 && NOTE_LINE_NUMBER (parm_insns) > 0)
1425 {
1426 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1427 NOTE_LINE_NUMBER (parm_insns));
1428 if (note)
1429 RTX_INTEGRATED_P (note) = 1;
1430 }
1431
175160e7
MT
1432 /* Process each argument. For each, set up things so that the function's
1433 reference to the argument will refer to the argument being passed.
1434 We only replace REG with REG here. Any simplifications are done
1435 via const_equiv_map.
1436
1437 We make two passes: In the first, we deal with parameters that will
1438 be placed into registers, since we need to ensure that the allocated
1439 register number fits in const_equiv_map. Then we store all non-register
1440 parameters into their memory location. */
1441
fd28789a
RS
1442 /* Don't try to free temp stack slots here, because we may put one of the
1443 parameters into a temp stack slot. */
1444
175160e7
MT
1445 for (i = 0; i < nargs; i++)
1446 {
1447 rtx copy = arg_vals[i];
1448
1449 loc = RTVEC_ELT (arg_vector, i);
1450
1451 /* There are three cases, each handled separately. */
1452 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1453 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1454 {
1455 /* This must be an object passed by invisible reference (it could
1456 also be a variable-sized object, but we forbid inlining functions
1457 with variable-sized arguments). COPY is the address of the
1458 actual value (this computation will cause it to be copied). We
1459 map that address for the register, noting the actual address as
1460 an equivalent in case it can be substituted into the insns. */
1461
1462 if (GET_CODE (copy) != REG)
1463 {
1464 temp = copy_addr_to_reg (copy);
2b145ea8
RK
1465 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1466 && REGNO (temp) < map->const_equiv_map_size)
175160e7
MT
1467 {
1468 map->const_equiv_map[REGNO (temp)] = copy;
1469 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1470 }
1471 copy = temp;
1472 }
1473 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1474 }
1475 else if (GET_CODE (loc) == MEM)
1476 {
1477 /* This is the case of a parameter that lives in memory.
1478 It will live in the block we allocate in the called routine's
1479 frame that simulates the incoming argument area. Do nothing
1480 now; we will call store_expr later. */
1481 ;
1482 }
1483 else if (GET_CODE (loc) == REG)
1484 {
1485 /* This is the good case where the parameter is in a register.
1486 If it is read-only and our argument is a constant, set up the
2ad701ba
RS
1487 constant equivalence.
1488
1489 If LOC is REG_USERVAR_P, the usual case, COPY must also have
23d5d23d
RK
1490 that flag set if it is a register.
1491
1492 Also, don't allow hard registers here; they might not be valid
0f41302f 1493 when substituted into insns. */
2ad701ba
RS
1494
1495 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1496 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
23d5d23d
RK
1497 && ! REG_USERVAR_P (copy))
1498 || (GET_CODE (copy) == REG
1499 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
175160e7
MT
1500 {
1501 temp = copy_to_mode_reg (GET_MODE (loc), copy);
2ad701ba 1502 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
2b145ea8
RK
1503 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1504 && REGNO (temp) < map->const_equiv_map_size)
175160e7
MT
1505 {
1506 map->const_equiv_map[REGNO (temp)] = copy;
1507 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1508 }
1509 copy = temp;
1510 }
1511 map->reg_map[REGNO (loc)] = copy;
1512 }
bc2eeab2
RS
1513 else if (GET_CODE (loc) == CONCAT)
1514 {
1515 /* This is the good case where the parameter is in a
1516 pair of separate pseudos.
1517 If it is read-only and our argument is a constant, set up the
1518 constant equivalence.
1519
1520 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1521 that flag set if it is a register.
1522
1523 Also, don't allow hard registers here; they might not be valid
0f41302f 1524 when substituted into insns. */
bc2eeab2
RS
1525 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1526 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1527 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1528 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1529
1530 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1531 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1532 && ! REG_USERVAR_P (copyreal))
1533 || (GET_CODE (copyreal) == REG
1534 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1535 {
1536 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1537 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
2b145ea8
RK
1538 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1539 && REGNO (temp) < map->const_equiv_map_size)
bc2eeab2
RS
1540 {
1541 map->const_equiv_map[REGNO (temp)] = copyreal;
1542 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1543 }
1544 copyreal = temp;
1545 }
1546 map->reg_map[REGNO (locreal)] = copyreal;
1547
1548 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1549 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1550 && ! REG_USERVAR_P (copyimag))
1551 || (GET_CODE (copyimag) == REG
1552 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1553 {
1554 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1555 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
2b145ea8
RK
1556 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1557 && REGNO (temp) < map->const_equiv_map_size)
bc2eeab2
RS
1558 {
1559 map->const_equiv_map[REGNO (temp)] = copyimag;
1560 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1561 }
1562 copyimag = temp;
1563 }
1564 map->reg_map[REGNO (locimag)] = copyimag;
1565 }
175160e7
MT
1566 else
1567 abort ();
175160e7
MT
1568 }
1569
1570 /* Now do the parameters that will be placed in memory. */
1571
1572 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1573 formal; formal = TREE_CHAIN (formal), i++)
1574 {
175160e7
MT
1575 loc = RTVEC_ELT (arg_vector, i);
1576
1577 if (GET_CODE (loc) == MEM
1578 /* Exclude case handled above. */
1579 && ! (GET_CODE (XEXP (loc, 0)) == REG
1580 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1581 {
cdd6e2db
TW
1582 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1583 DECL_SOURCE_LINE (formal));
1584 if (note)
1585 RTX_INTEGRATED_P (note) = 1;
175160e7
MT
1586
1587 /* Compute the address in the area we reserved and store the
1588 value there. */
1589 temp = copy_rtx_and_substitute (loc, map);
02bea8a8 1590 subst_constants (&temp, NULL_RTX, map);
175160e7
MT
1591 apply_change_group ();
1592 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1593 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1594 store_expr (arg_trees[i], temp, 0);
175160e7
MT
1595 }
1596 }
1597
1598 /* Deal with the places that the function puts its result.
1599 We are driven by what is placed into DECL_RESULT.
1600
1601 Initially, we assume that we don't have anything special handling for
1602 REG_FUNCTION_RETURN_VALUE_P. */
1603
1604 map->inline_target = 0;
1605 loc = DECL_RTL (DECL_RESULT (fndecl));
1606 if (TYPE_MODE (type) == VOIDmode)
1607 /* There is no return value to worry about. */
1608 ;
1609 else if (GET_CODE (loc) == MEM)
1610 {
1611 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1612 abort ();
1613
1614 /* Pass the function the address in which to return a structure value.
1615 Note that a constructor can cause someone to call us with
1616 STRUCTURE_VALUE_ADDR, but the initialization takes place
1617 via the first parameter, rather than the struct return address.
1618
1619 We have two cases: If the address is a simple register indirect,
1620 use the mapping mechanism to point that register to our structure
1621 return address. Otherwise, store the structure return value into
1622 the place that it will be referenced from. */
1623
1624 if (GET_CODE (XEXP (loc, 0)) == REG)
1625 {
f72a8759
RK
1626 temp = force_reg (Pmode,
1627 force_operand (structure_value_addr, NULL_RTX));
175160e7 1628 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
2b145ea8
RK
1629 if ((CONSTANT_P (structure_value_addr)
1630 || (GET_CODE (structure_value_addr) == PLUS
1631 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1632 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1633 && REGNO (temp) < map->const_equiv_map_size)
175160e7
MT
1634 {
1635 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1636 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1637 }
1638 }
1639 else
1640 {
1641 temp = copy_rtx_and_substitute (loc, map);
02bea8a8 1642 subst_constants (&temp, NULL_RTX, map);
175160e7
MT
1643 apply_change_group ();
1644 emit_move_insn (temp, structure_value_addr);
1645 }
1646 }
1647 else if (ignore)
1648 /* We will ignore the result value, so don't look at its structure.
1649 Note that preparations for an aggregate return value
1650 do need to be made (above) even if it will be ignored. */
1651 ;
1652 else if (GET_CODE (loc) == REG)
1653 {
1654 /* The function returns an object in a register and we use the return
1655 value. Set up our target for remapping. */
1656
1657 /* Machine mode function was declared to return. */
1658 enum machine_mode departing_mode = TYPE_MODE (type);
1659 /* (Possibly wider) machine mode it actually computes
1660 (for the sake of callers that fail to declare it right). */
1661 enum machine_mode arriving_mode
1662 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1663 rtx reg_to_map;
1664
1665 /* Don't use MEMs as direct targets because on some machines
1666 substituting a MEM for a REG makes invalid insns.
1667 Let the combiner substitute the MEM if that is valid. */
1668 if (target == 0 || GET_CODE (target) != REG
1669 || GET_MODE (target) != departing_mode)
1670 target = gen_reg_rtx (departing_mode);
1671
1672 /* If function's value was promoted before return,
1673 avoid machine mode mismatch when we substitute INLINE_TARGET.
1674 But TARGET is what we will return to the caller. */
1675 if (arriving_mode != departing_mode)
2d0bd5fd
RK
1676 {
1677 /* Avoid creating a paradoxical subreg wider than
1678 BITS_PER_WORD, since that is illegal. */
1679 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1680 {
1681 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1682 GET_MODE_BITSIZE (arriving_mode)))
1683 /* Maybe could be handled by using convert_move () ? */
1684 abort ();
1685 reg_to_map = gen_reg_rtx (arriving_mode);
1686 target = gen_lowpart (departing_mode, reg_to_map);
1687 }
1688 else
1689 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1690 }
175160e7
MT
1691 else
1692 reg_to_map = target;
1693
1694 /* Usually, the result value is the machine's return register.
1695 Sometimes it may be a pseudo. Handle both cases. */
1696 if (REG_FUNCTION_VALUE_P (loc))
1697 map->inline_target = reg_to_map;
1698 else
1699 map->reg_map[REGNO (loc)] = reg_to_map;
1700 }
1701
1702 /* Make new label equivalences for the labels in the called function. */
1703 for (i = min_labelno; i < max_labelno; i++)
1704 map->label_map[i] = gen_label_rtx ();
1705
1706 /* Perform postincrements before actually calling the function. */
1707 emit_queue ();
1708
1709 /* Clean up stack so that variables might have smaller offsets. */
1710 do_pending_stack_adjust ();
1711
1712 /* Save a copy of the location of const_equiv_map for mark_stores, called
1713 via note_stores. */
1714 global_const_equiv_map = map->const_equiv_map;
2b145ea8 1715 global_const_equiv_map_size = map->const_equiv_map_size;
175160e7 1716
136cf361
RK
1717 /* If the called function does an alloca, save and restore the
1718 stack pointer around the call. This saves stack space, but
2132517d
RK
1719 also is required if this inline is being done between two
1720 pushes. */
1721 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1722 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1723
175160e7
MT
1724 /* Now copy the insns one by one. Do this in two passes, first the insns and
1725 then their REG_NOTES, just like save_for_inline. */
1726
1727 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1728
1729 for (insn = insns; insn; insn = NEXT_INSN (insn))
1730 {
c9734bb9 1731 rtx copy, pattern, set;
175160e7
MT
1732
1733 map->orig_asm_operands_vector = 0;
1734
1735 switch (GET_CODE (insn))
1736 {
1737 case INSN:
1738 pattern = PATTERN (insn);
c9734bb9 1739 set = single_set (insn);
175160e7
MT
1740 copy = 0;
1741 if (GET_CODE (pattern) == USE
1742 && GET_CODE (XEXP (pattern, 0)) == REG
1743 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1744 /* The (USE (REG n)) at return from the function should
1745 be ignored since we are changing (REG n) into
1746 inline_target. */
1747 break;
1748
1749 /* Ignore setting a function value that we don't want to use. */
1750 if (map->inline_target == 0
c9734bb9
RK
1751 && set != 0
1752 && GET_CODE (SET_DEST (set)) == REG
1753 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
5cd76fcd 1754 {
c9734bb9 1755 if (volatile_refs_p (SET_SRC (set)))
5cd76fcd 1756 {
c9734bb9
RK
1757 rtx new_set;
1758
5cd76fcd
RS
1759 /* If we must not delete the source,
1760 load it into a new temporary. */
1761 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
c9734bb9
RK
1762
1763 new_set = single_set (copy);
1764 if (new_set == 0)
1765 abort ();
1766
1767 SET_DEST (new_set)
1768 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
5cd76fcd 1769 }
d8090d46
RK
1770 /* If the source and destination are the same and it
1771 has a note on it, keep the insn. */
1772 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1773 && REG_NOTES (insn) != 0)
1774 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
5cd76fcd
RS
1775 else
1776 break;
1777 }
c9734bb9
RK
1778
1779 /* If this is setting the static chain rtx, omit it. */
1780 else if (static_chain_value != 0
1781 && set != 0
1782 && GET_CODE (SET_DEST (set)) == REG
1783 && rtx_equal_p (SET_DEST (set),
1784 static_chain_incoming_rtx))
1785 break;
1786
a6dd1cb6
RK
1787 /* If this is setting the static chain pseudo, set it from
1788 the value we want to give it instead. */
1789 else if (static_chain_value != 0
c9734bb9
RK
1790 && set != 0
1791 && rtx_equal_p (SET_SRC (set),
a6dd1cb6
RK
1792 static_chain_incoming_rtx))
1793 {
c9734bb9 1794 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
a6dd1cb6 1795
c9734bb9 1796 copy = emit_move_insn (newdest, static_chain_value);
a6dd1cb6
RK
1797 static_chain_value = 0;
1798 }
5cd76fcd
RS
1799 else
1800 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
175160e7
MT
1801 /* REG_NOTES will be copied later. */
1802
1803#ifdef HAVE_cc0
1804 /* If this insn is setting CC0, it may need to look at
1805 the insn that uses CC0 to see what type of insn it is.
1806 In that case, the call to recog via validate_change will
1807 fail. So don't substitute constants here. Instead,
1808 do it when we emit the following insn.
1809
1810 For example, see the pyr.md file. That machine has signed and
1811 unsigned compares. The compare patterns must check the
1812 following branch insn to see which what kind of compare to
1813 emit.
1814
1815 If the previous insn set CC0, substitute constants on it as
1816 well. */
1817 if (sets_cc0_p (PATTERN (copy)) != 0)
1818 cc0_insn = copy;
1819 else
1820 {
1821 if (cc0_insn)
1822 try_constants (cc0_insn, map);
1823 cc0_insn = 0;
1824 try_constants (copy, map);
1825 }
1826#else
1827 try_constants (copy, map);
1828#endif
1829 break;
1830
1831 case JUMP_INSN:
1832 if (GET_CODE (PATTERN (insn)) == RETURN)
1833 {
1834 if (local_return_label == 0)
1835 local_return_label = gen_label_rtx ();
1836 pattern = gen_jump (local_return_label);
1837 }
1838 else
1839 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1840
1841 copy = emit_jump_insn (pattern);
1842
1843#ifdef HAVE_cc0
1844 if (cc0_insn)
1845 try_constants (cc0_insn, map);
1846 cc0_insn = 0;
1847#endif
1848 try_constants (copy, map);
1849
1850 /* If this used to be a conditional jump insn but whose branch
1851 direction is now know, we must do something special. */
1852 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1853 {
1854#ifdef HAVE_cc0
1855 /* The previous insn set cc0 for us. So delete it. */
1856 delete_insn (PREV_INSN (copy));
1857#endif
1858
1859 /* If this is now a no-op, delete it. */
1860 if (map->last_pc_value == pc_rtx)
1861 {
1862 delete_insn (copy);
1863 copy = 0;
1864 }
1865 else
1866 /* Otherwise, this is unconditional jump so we must put a
1867 BARRIER after it. We could do some dead code elimination
1868 here, but jump.c will do it just as well. */
1869 emit_barrier ();
1870 }
1871 break;
1872
1873 case CALL_INSN:
1874 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1875 copy = emit_call_insn (pattern);
1876
d7e09326
RK
1877 /* Because the USAGE information potentially contains objects other
1878 than hard registers, we need to copy it. */
1879 CALL_INSN_FUNCTION_USAGE (copy) =
1880 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1881
175160e7
MT
1882#ifdef HAVE_cc0
1883 if (cc0_insn)
1884 try_constants (cc0_insn, map);
1885 cc0_insn = 0;
1886#endif
1887 try_constants (copy, map);
1888
1889 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1890 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1891 map->const_equiv_map[i] = 0;
1892 break;
1893
1894 case CODE_LABEL:
1895 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
bfa30b22 1896 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
1897 map->const_age++;
1898 break;
1899
1900 case BARRIER:
1901 copy = emit_barrier ();
1902 break;
1903
1904 case NOTE:
1905 /* It is important to discard function-end and function-beg notes,
1906 so we have only one of each in the current function.
1907 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1908 deleted these in the copy used for continuing compilation,
1909 not the copy used for inlining). */
1910 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1911 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1912 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
6adb4e3a
MS
1913 {
1914 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1915 if (copy && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1916 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1917 {
1918 rtx label = map->label_map[NOTE_BLOCK_NUMBER (copy)];
1919
1920 /* We have to forward these both to match the new exception
1921 region. */
1922 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
1923 }
1924 }
175160e7
MT
1925 else
1926 copy = 0;
1927 break;
1928
1929 default:
1930 abort ();
1931 break;
1932 }
1933
1934 if (copy)
1935 RTX_INTEGRATED_P (copy) = 1;
1936
1937 map->insn_map[INSN_UID (insn)] = copy;
1938 }
1939
e62d14be
RS
1940 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1941 from parameters can be substituted in. These are the only ones that
1942 are valid across the entire function. */
1943 map->const_age++;
175160e7
MT
1944 for (insn = insns; insn; insn = NEXT_INSN (insn))
1945 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
db25e492
RS
1946 && map->insn_map[INSN_UID (insn)]
1947 && REG_NOTES (insn))
1948 {
1949 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1950 /* We must also do subst_constants, in case one of our parameters
1951 has const type and constant value. */
1952 subst_constants (&tem, NULL_RTX, map);
1953 apply_change_group ();
1954 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1955 }
175160e7
MT
1956
1957 if (local_return_label)
1958 emit_label (local_return_label);
1959
2132517d
RK
1960 /* Restore the stack pointer if we saved it above. */
1961 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1962 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1963
175160e7
MT
1964 /* Make copies of the decls of the symbols in the inline function, so that
1965 the copies of the variables get declared in the current function. Set
1966 up things so that lookup_static_chain knows that to interpret registers
1967 in SAVE_EXPRs for TYPE_SIZEs as local. */
1968
1969 inline_function_decl = fndecl;
175160e7 1970 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
8ef63e62 1971 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
175160e7
MT
1972 inline_function_decl = 0;
1973
8ef63e62
RS
1974 /* End the scope containing the copied formal parameter variables
1975 and copied LABEL_DECLs. */
175160e7
MT
1976
1977 expand_end_bindings (getdecls (), 1, 1);
81578142 1978 block = poplevel (1, 1, 0);
637c5064
RS
1979 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1980 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
175160e7
MT
1981 poplevel (0, 0, 0);
1982 emit_line_note (input_filename, lineno);
1983
1984 if (structure_value_addr)
1b6d951b
RS
1985 {
1986 target = gen_rtx (MEM, TYPE_MODE (type),
1987 memory_address (TYPE_MODE (type), structure_value_addr));
1988 MEM_IN_STRUCT_P (target) = 1;
1989 }
175160e7
MT
1990 return target;
1991}
1992\f
1993/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1994 push all of those decls and give each one the corresponding home. */
1995
1996static void
1997integrate_parm_decls (args, map, arg_vector)
1998 tree args;
1999 struct inline_remap *map;
2000 rtvec arg_vector;
2001{
2002 register tree tail;
2003 register int i;
2004
2005 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2006 {
2007 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2008 TREE_TYPE (tail));
2009 rtx new_decl_rtl
2010 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2011
a76386d8
RK
2012 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2013 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2014 here, but that's going to require some more work. */
2015 /* DECL_INCOMING_RTL (decl) = ?; */
175160e7
MT
2016 /* These args would always appear unused, if not for this. */
2017 TREE_USED (decl) = 1;
2018 /* Prevent warning for shadowing with these. */
c5caa350 2019 DECL_ABSTRACT_ORIGIN (decl) = tail;
175160e7
MT
2020 pushdecl (decl);
2021 /* Fully instantiate the address with the equivalent form so that the
2022 debugging information contains the actual register, instead of the
2023 virtual register. Do this by not passing an insn to
2024 subst_constants. */
02bea8a8 2025 subst_constants (&new_decl_rtl, NULL_RTX, map);
175160e7
MT
2026 apply_change_group ();
2027 DECL_RTL (decl) = new_decl_rtl;
2028 }
2029}
2030
2031/* Given a BLOCK node LET, push decls and levels so as to construct in the
2032 current function a tree of contexts isomorphic to the one that is given.
2033
2034 LEVEL indicates how far down into the BLOCK tree is the node we are
8ef63e62 2035 currently traversing. It is always zero except for recursive calls.
175160e7 2036
858a47b1 2037 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
175160e7 2038 registers used in the DECL_RTL field should be remapped. If it is zero,
8ef63e62 2039 no mapping is necessary. */
175160e7
MT
2040
2041static void
8ef63e62 2042integrate_decl_tree (let, level, map)
175160e7
MT
2043 tree let;
2044 int level;
2045 struct inline_remap *map;
175160e7
MT
2046{
2047 tree t, node;
2048
8ef63e62
RS
2049 if (level > 0)
2050 pushlevel (0);
175160e7
MT
2051
2052 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2053 {
f6bad6ff
JM
2054 tree d;
2055
2056 push_obstacks_nochange ();
2057 saveable_allocation ();
2058 d = copy_node (t);
2059 pop_obstacks ();
2060
8ef63e62 2061 if (DECL_RTL (t) != 0)
175160e7
MT
2062 {
2063 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2064 /* Fully instantiate the address with the equivalent form so that the
2065 debugging information contains the actual register, instead of the
2066 virtual register. Do this by not passing an insn to
2067 subst_constants. */
02bea8a8 2068 subst_constants (&DECL_RTL (d), NULL_RTX, map);
175160e7
MT
2069 apply_change_group ();
2070 }
175160e7
MT
2071 /* These args would always appear unused, if not for this. */
2072 TREE_USED (d) = 1;
bd95070a
JW
2073 /* Prevent warning for shadowing with these. */
2074 DECL_ABSTRACT_ORIGIN (d) = t;
f6bad6ff
JM
2075
2076 if (DECL_LANG_SPECIFIC (d))
2077 copy_lang_decl (d);
2078
bd95070a 2079 pushdecl (d);
175160e7
MT
2080 }
2081
2082 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
8ef63e62 2083 integrate_decl_tree (t, level + 1, map);
175160e7 2084
8ef63e62
RS
2085 if (level > 0)
2086 {
2087 node = poplevel (1, 0, 0);
2088 if (node)
81578142
RS
2089 {
2090 TREE_USED (node) = TREE_USED (let);
2091 BLOCK_ABSTRACT_ORIGIN (node) = let;
2092 }
8ef63e62 2093 }
175160e7 2094}
f6135b20
JW
2095
2096/* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2097 through save_constants. */
2098
2099static void
2100save_constants_in_decl_trees (let)
2101 tree let;
2102{
2103 tree t;
2104
2105 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2106 if (DECL_RTL (t) != 0)
2107 save_constants (&DECL_RTL (t));
2108
2109 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2110 save_constants_in_decl_trees (t);
2111}
175160e7
MT
2112\f
2113/* Create a new copy of an rtx.
2114 Recursively copies the operands of the rtx,
2115 except for those few rtx codes that are sharable.
2116
2117 We always return an rtx that is similar to that incoming rtx, with the
2118 exception of possibly changing a REG to a SUBREG or vice versa. No
2119 rtl is ever emitted.
2120
2121 Handle constants that need to be placed in the constant pool by
2122 calling `force_const_mem'. */
2123
2124rtx
2125copy_rtx_and_substitute (orig, map)
2126 register rtx orig;
2127 struct inline_remap *map;
2128{
2129 register rtx copy, temp;
2130 register int i, j;
2131 register RTX_CODE code;
2132 register enum machine_mode mode;
2133 register char *format_ptr;
2134 int regno;
2135
2136 if (orig == 0)
2137 return 0;
2138
2139 code = GET_CODE (orig);
2140 mode = GET_MODE (orig);
2141
2142 switch (code)
2143 {
2144 case REG:
2145 /* If the stack pointer register shows up, it must be part of
2146 stack-adjustments (*not* because we eliminated the frame pointer!).
2147 Small hard registers are returned as-is. Pseudo-registers
2148 go through their `reg_map'. */
2149 regno = REGNO (orig);
2150 if (regno <= LAST_VIRTUAL_REGISTER)
2151 {
2152 /* Some hard registers are also mapped,
2153 but others are not translated. */
2154 if (map->reg_map[regno] != 0)
2155 return map->reg_map[regno];
2156
2157 /* If this is the virtual frame pointer, make space in current
2158 function's stack frame for the stack frame of the inline function.
2159
2160 Copy the address of this area into a pseudo. Map
2161 virtual_stack_vars_rtx to this pseudo and set up a constant
2162 equivalence for it to be the address. This will substitute the
2163 address into insns where it can be substituted and use the new
2164 pseudo where it can't. */
2165 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2166 {
2167 rtx loc, seq;
2168 int size = DECL_FRAME_SIZE (map->fndecl);
2169 int rounded;
2170
2171 start_sequence ();
2172 loc = assign_stack_temp (BLKmode, size, 1);
2173 loc = XEXP (loc, 0);
2174#ifdef FRAME_GROWS_DOWNWARD
2175 /* In this case, virtual_stack_vars_rtx points to one byte
2176 higher than the top of the frame area. So compute the offset
2177 to one byte higher than our substitute frame.
2178 Keep the fake frame pointer aligned like a real one. */
2179 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2180 loc = plus_constant (loc, rounded);
2181#endif
59b2d722
RK
2182 map->reg_map[regno] = temp
2183 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 2184
12307ca2
RK
2185#ifdef STACK_BOUNDARY
2186 mark_reg_pointer (map->reg_map[regno],
2187 STACK_BOUNDARY / BITS_PER_UNIT);
2188#endif
2189
2b145ea8
RK
2190 if (REGNO (temp) < map->const_equiv_map_size)
2191 {
2192 map->const_equiv_map[REGNO (temp)] = loc;
2193 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2194 }
175160e7
MT
2195
2196 seq = gen_sequence ();
2197 end_sequence ();
2198 emit_insn_after (seq, map->insns_at_start);
5c23c401 2199 return temp;
175160e7
MT
2200 }
2201 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2202 {
2203 /* Do the same for a block to contain any arguments referenced
0f41302f 2204 in memory. */
175160e7
MT
2205 rtx loc, seq;
2206 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2207
2208 start_sequence ();
2209 loc = assign_stack_temp (BLKmode, size, 1);
2210 loc = XEXP (loc, 0);
931553d8
RS
2211 /* When arguments grow downward, the virtual incoming
2212 args pointer points to the top of the argument block,
0f41302f 2213 so the remapped location better do the same. */
931553d8
RS
2214#ifdef ARGS_GROW_DOWNWARD
2215 loc = plus_constant (loc, size);
2216#endif
59b2d722
RK
2217 map->reg_map[regno] = temp
2218 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 2219
12307ca2
RK
2220#ifdef STACK_BOUNDARY
2221 mark_reg_pointer (map->reg_map[regno],
2222 STACK_BOUNDARY / BITS_PER_UNIT);
2223#endif
2224
2b145ea8
RK
2225 if (REGNO (temp) < map->const_equiv_map_size)
2226 {
2227 map->const_equiv_map[REGNO (temp)] = loc;
2228 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2229 }
175160e7
MT
2230
2231 seq = gen_sequence ();
2232 end_sequence ();
2233 emit_insn_after (seq, map->insns_at_start);
5c23c401 2234 return temp;
175160e7
MT
2235 }
2236 else if (REG_FUNCTION_VALUE_P (orig))
2237 {
2238 /* This is a reference to the function return value. If
2239 the function doesn't have a return value, error. If the
2240 mode doesn't agree, make a SUBREG. */
2241 if (map->inline_target == 0)
2242 /* Must be unrolling loops or replicating code if we
2243 reach here, so return the register unchanged. */
2244 return orig;
2245 else if (mode != GET_MODE (map->inline_target))
293e1467 2246 return gen_lowpart (mode, map->inline_target);
175160e7
MT
2247 else
2248 return map->inline_target;
2249 }
2250 return orig;
2251 }
2252 if (map->reg_map[regno] == NULL)
2253 {
2254 map->reg_map[regno] = gen_reg_rtx (mode);
2255 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2256 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2257 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2258 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
12307ca2
RK
2259
2260 if (map->regno_pointer_flag[regno])
2261 mark_reg_pointer (map->reg_map[regno],
2262 map->regno_pointer_align[regno]);
175160e7
MT
2263 }
2264 return map->reg_map[regno];
2265
2266 case SUBREG:
2267 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2268 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2269 if (GET_CODE (copy) == SUBREG)
2270 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2271 SUBREG_WORD (orig) + SUBREG_WORD (copy));
bc2eeab2 2272 else if (GET_CODE (copy) == CONCAT)
340aa7f6 2273 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
175160e7
MT
2274 else
2275 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2276 SUBREG_WORD (orig));
2277
2278 case USE:
2279 case CLOBBER:
2280 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
d632e927
RS
2281 to (use foo) if the original insn didn't have a subreg.
2282 Removing the subreg distorts the VAX movstrhi pattern
2283 by changing the mode of an operand. */
175160e7 2284 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
d632e927 2285 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
175160e7
MT
2286 copy = SUBREG_REG (copy);
2287 return gen_rtx (code, VOIDmode, copy);
2288
2289 case CODE_LABEL:
2290 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2291 = LABEL_PRESERVE_P (orig);
2292 return map->label_map[CODE_LABEL_NUMBER (orig)];
2293
2294 case LABEL_REF:
c1ceaaa6
RK
2295 copy = gen_rtx (LABEL_REF, mode,
2296 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2297 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
175160e7 2298 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
c1ceaaa6
RK
2299
2300 /* The fact that this label was previously nonlocal does not mean
2301 it still is, so we must check if it is within the range of
2302 this function's labels. */
2303 LABEL_REF_NONLOCAL_P (copy)
2304 = (LABEL_REF_NONLOCAL_P (orig)
2305 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2306 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
81d57b8e
RK
2307
2308 /* If we have made a nonlocal label local, it means that this
9faa82d8 2309 inlined call will be referring to our nonlocal goto handler.
81d57b8e
RK
2310 So make sure we create one for this block; we normally would
2311 not since this is not otherwise considered a "call". */
2312 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2313 function_call_count++;
2314
175160e7
MT
2315 return copy;
2316
2317 case PC:
2318 case CC0:
2319 case CONST_INT:
f543676f
JW
2320 return orig;
2321
175160e7 2322 case SYMBOL_REF:
f543676f
JW
2323 /* Symbols which represent the address of a label stored in the constant
2324 pool must be modified to point to a constant pool entry for the
2325 remapped label. Otherwise, symbols are returned unchanged. */
2326 if (CONSTANT_POOL_ADDRESS_P (orig))
2327 {
2328 rtx constant = get_pool_constant (orig);
2329 if (GET_CODE (constant) == LABEL_REF)
c1ceaaa6
RK
2330 return XEXP (force_const_mem (Pmode,
2331 copy_rtx_and_substitute (constant,
2332 map)),
2333 0);
f543676f 2334 }
c1ceaaa6 2335
175160e7
MT
2336 return orig;
2337
2338 case CONST_DOUBLE:
2339 /* We have to make a new copy of this CONST_DOUBLE because don't want
2340 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2341 duplicate of a CONST_DOUBLE we have already seen. */
2342 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2343 {
2344 REAL_VALUE_TYPE d;
2345
2346 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
81fbaa41 2347 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
175160e7
MT
2348 }
2349 else
2350 return immed_double_const (CONST_DOUBLE_LOW (orig),
2351 CONST_DOUBLE_HIGH (orig), VOIDmode);
2352
2353 case CONST:
2354 /* Make new constant pool entry for a constant
2355 that was in the pool of the inline function. */
2356 if (RTX_INTEGRATED_P (orig))
2357 {
2358 /* If this was an address of a constant pool entry that itself
2359 had to be placed in the constant pool, it might not be a
2360 valid address. So the recursive call below might turn it
2361 into a register. In that case, it isn't a constant any
2362 more, so return it. This has the potential of changing a
2363 MEM into a REG, but we'll assume that it safe. */
2364 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2365 if (! CONSTANT_P (temp))
2366 return temp;
2367 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2368 }
2369 break;
2370
2371 case ADDRESS:
2372 /* If from constant pool address, make new constant pool entry and
2373 return its address. */
2374 if (! RTX_INTEGRATED_P (orig))
2375 abort ();
2376
2377 temp = force_const_mem (GET_MODE (orig),
2378 copy_rtx_and_substitute (XEXP (orig, 0), map));
2379
2380#if 0
2381 /* Legitimizing the address here is incorrect.
2382
2383 The only ADDRESS rtx's that can reach here are ones created by
d7084298 2384 save_constants. Hence the operand of the ADDRESS is always valid
175160e7 2385 in this position of the instruction, since the original rtx without
d7084298 2386 the ADDRESS was valid.
175160e7
MT
2387
2388 The reason we don't legitimize the address here is that on the
2389 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2390 This code forces the operand of the address to a register, which
2391 fails because we can not take the HIGH part of a register.
2392
2393 Also, change_address may create new registers. These registers
2394 will not have valid reg_map entries. This can cause try_constants()
2395 to fail because assumes that all registers in the rtx have valid
2396 reg_map entries, and it may end up replacing one of these new
0f41302f 2397 registers with junk. */
175160e7
MT
2398
2399 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2400 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2401#endif
2402
2403 return XEXP (temp, 0);
2404
2405 case ASM_OPERANDS:
2406 /* If a single asm insn contains multiple output operands
2407 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2408 We must make sure that the copied insn continues to share it. */
2409 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2410 {
2411 copy = rtx_alloc (ASM_OPERANDS);
81d82304 2412 copy->volatil = orig->volatil;
175160e7
MT
2413 XSTR (copy, 0) = XSTR (orig, 0);
2414 XSTR (copy, 1) = XSTR (orig, 1);
2415 XINT (copy, 2) = XINT (orig, 2);
2416 XVEC (copy, 3) = map->copy_asm_operands_vector;
2417 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2418 XSTR (copy, 5) = XSTR (orig, 5);
2419 XINT (copy, 6) = XINT (orig, 6);
2420 return copy;
2421 }
2422 break;
2423
2424 case CALL:
2425 /* This is given special treatment because the first
2426 operand of a CALL is a (MEM ...) which may get
2427 forced into a register for cse. This is undesirable
2428 if function-address cse isn't wanted or if we won't do cse. */
2429#ifndef NO_FUNCTION_CSE
2430 if (! (optimize && ! flag_no_function_cse))
2431#endif
2432 return gen_rtx (CALL, GET_MODE (orig),
2433 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2434 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2435 copy_rtx_and_substitute (XEXP (orig, 1), map));
2436 break;
2437
2438#if 0
2439 /* Must be ifdefed out for loop unrolling to work. */
2440 case RETURN:
2441 abort ();
2442#endif
2443
2444 case SET:
2445 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2446 Don't alter that.
2447 If the nonlocal goto is into the current function,
2448 this will result in unnecessarily bad code, but should work. */
2449 if (SET_DEST (orig) == virtual_stack_vars_rtx
2450 || SET_DEST (orig) == virtual_incoming_args_rtx)
2451 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2452 copy_rtx_and_substitute (SET_SRC (orig), map));
2453 break;
2454
2455 case MEM:
2456 copy = rtx_alloc (MEM);
2457 PUT_MODE (copy, mode);
2458 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2459 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2460 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
a70f7bb2
JW
2461
2462 /* If doing function inlining, this MEM might not be const in the
2463 function that it is being inlined into, and thus may not be
2464 unchanging after function inlining. Constant pool references are
2465 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2466 for them. */
2467 if (! map->integrating)
2468 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2469
175160e7
MT
2470 return copy;
2471 }
2472
2473 copy = rtx_alloc (code);
2474 PUT_MODE (copy, mode);
2475 copy->in_struct = orig->in_struct;
2476 copy->volatil = orig->volatil;
2477 copy->unchanging = orig->unchanging;
2478
2479 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2480
2481 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2482 {
2483 switch (*format_ptr++)
2484 {
2485 case '0':
2486 break;
2487
2488 case 'e':
2489 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2490 break;
2491
2492 case 'u':
2493 /* Change any references to old-insns to point to the
2494 corresponding copied insns. */
2495 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2496 break;
2497
2498 case 'E':
2499 XVEC (copy, i) = XVEC (orig, i);
2500 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2501 {
2502 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2503 for (j = 0; j < XVECLEN (copy, i); j++)
2504 XVECEXP (copy, i, j)
2505 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2506 }
2507 break;
2508
02bea8a8
RK
2509 case 'w':
2510 XWINT (copy, i) = XWINT (orig, i);
2511 break;
2512
175160e7
MT
2513 case 'i':
2514 XINT (copy, i) = XINT (orig, i);
2515 break;
2516
2517 case 's':
2518 XSTR (copy, i) = XSTR (orig, i);
2519 break;
2520
2521 default:
2522 abort ();
2523 }
2524 }
2525
2526 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2527 {
2528 map->orig_asm_operands_vector = XVEC (orig, 3);
2529 map->copy_asm_operands_vector = XVEC (copy, 3);
2530 map->copy_asm_constraints_vector = XVEC (copy, 4);
2531 }
2532
2533 return copy;
2534}
2535\f
2536/* Substitute known constant values into INSN, if that is valid. */
2537
2538void
2539try_constants (insn, map)
2540 rtx insn;
2541 struct inline_remap *map;
2542{
2543 int i;
2544
2545 map->num_sets = 0;
2546 subst_constants (&PATTERN (insn), insn, map);
2547
2548 /* Apply the changes if they are valid; otherwise discard them. */
2549 apply_change_group ();
2550
2551 /* Show we don't know the value of anything stored or clobbered. */
2552 note_stores (PATTERN (insn), mark_stores);
2553 map->last_pc_value = 0;
2554#ifdef HAVE_cc0
2555 map->last_cc0_value = 0;
2556#endif
2557
2558 /* Set up any constant equivalences made in this insn. */
2559 for (i = 0; i < map->num_sets; i++)
2560 {
2561 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2562 {
2563 int regno = REGNO (map->equiv_sets[i].dest);
2564
2b145ea8
RK
2565 if (regno < map->const_equiv_map_size
2566 && (map->const_equiv_map[regno] == 0
2567 /* Following clause is a hack to make case work where GNU C++
2568 reassigns a variable to make cse work right. */
2569 || ! rtx_equal_p (map->const_equiv_map[regno],
2570 map->equiv_sets[i].equiv)))
175160e7
MT
2571 {
2572 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2573 map->const_age_map[regno] = map->const_age;
2574 }
2575 }
2576 else if (map->equiv_sets[i].dest == pc_rtx)
2577 map->last_pc_value = map->equiv_sets[i].equiv;
2578#ifdef HAVE_cc0
2579 else if (map->equiv_sets[i].dest == cc0_rtx)
2580 map->last_cc0_value = map->equiv_sets[i].equiv;
2581#endif
2582 }
2583}
2584\f
2585/* Substitute known constants for pseudo regs in the contents of LOC,
2586 which are part of INSN.
d45cf215 2587 If INSN is zero, the substitution should always be done (this is used to
175160e7
MT
2588 update DECL_RTL).
2589 These changes are taken out by try_constants if the result is not valid.
2590
2591 Note that we are more concerned with determining when the result of a SET
2592 is a constant, for further propagation, than actually inserting constants
2593 into insns; cse will do the latter task better.
2594
2595 This function is also used to adjust address of items previously addressed
2596 via the virtual stack variable or virtual incoming arguments registers. */
2597
2598static void
2599subst_constants (loc, insn, map)
2600 rtx *loc;
2601 rtx insn;
2602 struct inline_remap *map;
2603{
2604 rtx x = *loc;
2605 register int i;
2606 register enum rtx_code code;
2607 register char *format_ptr;
2608 int num_changes = num_validated_changes ();
2609 rtx new = 0;
2610 enum machine_mode op0_mode;
2611
2612 code = GET_CODE (x);
2613
2614 switch (code)
2615 {
2616 case PC:
2617 case CONST_INT:
2618 case CONST_DOUBLE:
2619 case SYMBOL_REF:
2620 case CONST:
2621 case LABEL_REF:
2622 case ADDRESS:
2623 return;
2624
2625#ifdef HAVE_cc0
2626 case CC0:
2627 validate_change (insn, loc, map->last_cc0_value, 1);
2628 return;
2629#endif
2630
2631 case USE:
2632 case CLOBBER:
2633 /* The only thing we can do with a USE or CLOBBER is possibly do
2634 some substitutions in a MEM within it. */
2635 if (GET_CODE (XEXP (x, 0)) == MEM)
2636 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2637 return;
2638
2639 case REG:
2640 /* Substitute for parms and known constants. Don't replace
2641 hard regs used as user variables with constants. */
2642 {
2643 int regno = REGNO (x);
c66e0741 2644
175160e7 2645 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
c66e0741 2646 && regno < map->const_equiv_map_size
175160e7
MT
2647 && map->const_equiv_map[regno] != 0
2648 && map->const_age_map[regno] >= map->const_age)
2649 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2650 return;
2651 }
2652
2653 case SUBREG:
637c5064
RS
2654 /* SUBREG applied to something other than a reg
2655 should be treated as ordinary, since that must
2656 be a special hack and we don't know how to treat it specially.
2657 Consider for example mulsidi3 in m68k.md.
2658 Ordinary SUBREG of a REG needs this special treatment. */
2659 if (GET_CODE (SUBREG_REG (x)) == REG)
2660 {
2661 rtx inner = SUBREG_REG (x);
2662 rtx new = 0;
175160e7 2663
637c5064
RS
2664 /* We can't call subst_constants on &SUBREG_REG (x) because any
2665 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2666 see what is inside, try to form the new SUBREG and see if that is
2667 valid. We handle two cases: extracting a full word in an
2668 integral mode and extracting the low part. */
2669 subst_constants (&inner, NULL_RTX, map);
175160e7 2670
637c5064
RS
2671 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2672 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2673 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2674 new = operand_subword (inner, SUBREG_WORD (x), 0,
2675 GET_MODE (SUBREG_REG (x)));
175160e7 2676
637c5064
RS
2677 if (new == 0 && subreg_lowpart_p (x))
2678 new = gen_lowpart_common (GET_MODE (x), inner);
175160e7 2679
637c5064
RS
2680 if (new)
2681 validate_change (insn, loc, new, 1);
175160e7 2682
637c5064
RS
2683 return;
2684 }
2685 break;
175160e7
MT
2686
2687 case MEM:
2688 subst_constants (&XEXP (x, 0), insn, map);
2689
2690 /* If a memory address got spoiled, change it back. */
2691 if (insn != 0 && num_validated_changes () != num_changes
2692 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2693 cancel_changes (num_changes);
2694 return;
2695
2696 case SET:
2697 {
2698 /* Substitute constants in our source, and in any arguments to a
2699 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2700 itself. */
2701 rtx *dest_loc = &SET_DEST (x);
2702 rtx dest = *dest_loc;
2703 rtx src, tem;
2704
2705 subst_constants (&SET_SRC (x), insn, map);
2706 src = SET_SRC (x);
2707
2708 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
175160e7
MT
2709 || GET_CODE (*dest_loc) == SUBREG
2710 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2711 {
2712 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2713 {
2714 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2715 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2716 }
2717 dest_loc = &XEXP (*dest_loc, 0);
2718 }
2719
91594e43
RS
2720 /* Do substitute in the address of a destination in memory. */
2721 if (GET_CODE (*dest_loc) == MEM)
2722 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2723
175160e7
MT
2724 /* Check for the case of DEST a SUBREG, both it and the underlying
2725 register are less than one word, and the SUBREG has the wider mode.
2726 In the case, we are really setting the underlying register to the
2727 source converted to the mode of DEST. So indicate that. */
2728 if (GET_CODE (dest) == SUBREG
2729 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2730 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2731 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2732 <= GET_MODE_SIZE (GET_MODE (dest)))
e2eb57b7
RK
2733 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2734 src)))
175160e7
MT
2735 src = tem, dest = SUBREG_REG (dest);
2736
2737 /* If storing a recognizable value save it for later recording. */
2738 if ((map->num_sets < MAX_RECOG_OPERANDS)
2739 && (CONSTANT_P (src)
c9734bb9 2740 || (GET_CODE (src) == REG
83b93f40
RK
2741 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2742 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
175160e7
MT
2743 || (GET_CODE (src) == PLUS
2744 && GET_CODE (XEXP (src, 0)) == REG
83b93f40
RK
2745 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2746 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
2747 && CONSTANT_P (XEXP (src, 1)))
2748 || GET_CODE (src) == COMPARE
2749#ifdef HAVE_cc0
2750 || dest == cc0_rtx
2751#endif
2752 || (dest == pc_rtx
2753 && (src == pc_rtx || GET_CODE (src) == RETURN
2754 || GET_CODE (src) == LABEL_REF))))
2755 {
2756 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2757 it will cause us to save the COMPARE with any constants
2758 substituted, which is what we want for later. */
2759 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2760 map->equiv_sets[map->num_sets++].dest = dest;
2761 }
2762
2763 return;
2764 }
2765 }
2766
2767 format_ptr = GET_RTX_FORMAT (code);
2768
2769 /* If the first operand is an expression, save its mode for later. */
2770 if (*format_ptr == 'e')
2771 op0_mode = GET_MODE (XEXP (x, 0));
2772
2773 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2774 {
2775 switch (*format_ptr++)
2776 {
2777 case '0':
2778 break;
2779
2780 case 'e':
2781 if (XEXP (x, i))
2782 subst_constants (&XEXP (x, i), insn, map);
2783 break;
2784
2785 case 'u':
2786 case 'i':
2787 case 's':
02bea8a8 2788 case 'w':
175160e7
MT
2789 break;
2790
2791 case 'E':
2792 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2793 {
2794 int j;
2795 for (j = 0; j < XVECLEN (x, i); j++)
2796 subst_constants (&XVECEXP (x, i, j), insn, map);
2797 }
2798 break;
2799
2800 default:
2801 abort ();
2802 }
2803 }
2804
2805 /* If this is a commutative operation, move a constant to the second
2806 operand unless the second operand is already a CONST_INT. */
2807 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2808 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2809 {
2810 rtx tem = XEXP (x, 0);
2811 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2812 validate_change (insn, &XEXP (x, 1), tem, 1);
2813 }
2814
2815 /* Simplify the expression in case we put in some constants. */
2816 switch (GET_RTX_CLASS (code))
2817 {
2818 case '1':
2819 new = simplify_unary_operation (code, GET_MODE (x),
2820 XEXP (x, 0), op0_mode);
2821 break;
2822
2823 case '<':
2824 {
2825 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2826 if (op_mode == VOIDmode)
2827 op_mode = GET_MODE (XEXP (x, 1));
2828 new = simplify_relational_operation (code, op_mode,
2829 XEXP (x, 0), XEXP (x, 1));
b565a316
RK
2830#ifdef FLOAT_STORE_FLAG_VALUE
2831 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2832 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
81fbaa41
RK
2833 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2834 GET_MODE (x)));
b565a316 2835#endif
175160e7
MT
2836 break;
2837 }
2838
2839 case '2':
2840 case 'c':
2841 new = simplify_binary_operation (code, GET_MODE (x),
2842 XEXP (x, 0), XEXP (x, 1));
2843 break;
2844
2845 case 'b':
2846 case '3':
2847 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2848 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2849 break;
2850 }
2851
2852 if (new)
2853 validate_change (insn, loc, new, 1);
2854}
2855
2856/* Show that register modified no longer contain known constants. We are
2857 called from note_stores with parts of the new insn. */
2858
2859void
2860mark_stores (dest, x)
2861 rtx dest;
2862 rtx x;
2863{
e2eb57b7
RK
2864 int regno = -1;
2865 enum machine_mode mode;
2866
2867 /* DEST is always the innermost thing set, except in the case of
2868 SUBREGs of hard registers. */
175160e7
MT
2869
2870 if (GET_CODE (dest) == REG)
e2eb57b7
RK
2871 regno = REGNO (dest), mode = GET_MODE (dest);
2872 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2873 {
2874 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2875 mode = GET_MODE (SUBREG_REG (dest));
2876 }
2877
2878 if (regno >= 0)
2879 {
2880 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2881 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2882 int i;
2883
2884 for (i = regno; i <= last_reg; i++)
2b145ea8
RK
2885 if (i < global_const_equiv_map_size)
2886 global_const_equiv_map[i] = 0;
e2eb57b7 2887 }
175160e7
MT
2888}
2889\f
2890/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2891 pointed to by PX, they represent constants in the constant pool.
2892 Replace these with a new memory reference obtained from force_const_mem.
2893 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2894 address of a constant pool entry. Replace them with the address of
2895 a new constant pool entry obtained from force_const_mem. */
2896
2897static void
2898restore_constants (px)
2899 rtx *px;
2900{
2901 rtx x = *px;
2902 int i, j;
2903 char *fmt;
2904
2905 if (x == 0)
2906 return;
2907
2908 if (GET_CODE (x) == CONST_DOUBLE)
2909 {
2910 /* We have to make a new CONST_DOUBLE to ensure that we account for
2911 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2912 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2913 {
2914 REAL_VALUE_TYPE d;
2915
2916 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
81fbaa41 2917 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
175160e7
MT
2918 }
2919 else
2920 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2921 VOIDmode);
2922 }
2923
2924 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2925 {
2926 restore_constants (&XEXP (x, 0));
2927 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2928 }
2929 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2930 {
2931 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2932 rtx new = XEXP (SUBREG_REG (x), 0);
2933
2934 restore_constants (&new);
2935 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2936 PUT_MODE (new, GET_MODE (x));
2937 *px = validize_mem (new);
2938 }
2939 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2940 {
2941 restore_constants (&XEXP (x, 0));
2942 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2943 }
2944 else
2945 {
2946 fmt = GET_RTX_FORMAT (GET_CODE (x));
2947 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2948 {
2949 switch (*fmt++)
2950 {
2951 case 'E':
2952 for (j = 0; j < XVECLEN (x, i); j++)
2953 restore_constants (&XVECEXP (x, i, j));
2954 break;
2955
2956 case 'e':
2957 restore_constants (&XEXP (x, i));
2958 break;
2959 }
2960 }
2961 }
2962}
2963\f
81578142
RS
2964/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2965 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2966 that it points to the node itself, thus indicating that the node is its
2967 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2968 the given node is NULL, recursively descend the decl/block tree which
2969 it is the root of, and for each other ..._DECL or BLOCK node contained
2970 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2971 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2972 values to point to themselves. */
2973
81578142
RS
2974static void
2975set_block_origin_self (stmt)
2976 register tree stmt;
2977{
2978 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2979 {
2980 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2981
2982 {
2983 register tree local_decl;
2984
2985 for (local_decl = BLOCK_VARS (stmt);
2986 local_decl != NULL_TREE;
2987 local_decl = TREE_CHAIN (local_decl))
2988 set_decl_origin_self (local_decl); /* Potential recursion. */
2989 }
2990
2991 {
2992 register tree subblock;
2993
2994 for (subblock = BLOCK_SUBBLOCKS (stmt);
2995 subblock != NULL_TREE;
2996 subblock = BLOCK_CHAIN (subblock))
2997 set_block_origin_self (subblock); /* Recurse. */
2998 }
2999 }
3000}
3001
3002/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3003 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3004 node to so that it points to the node itself, thus indicating that the
3005 node represents its own (abstract) origin. Additionally, if the
3006 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3007 the decl/block tree of which the given node is the root of, and for
3008 each other ..._DECL or BLOCK node contained therein whose
3009 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3010 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3011 point to themselves. */
3012
3013static void
3014set_decl_origin_self (decl)
3015 register tree decl;
3016{
3017 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3018 {
3019 DECL_ABSTRACT_ORIGIN (decl) = decl;
3020 if (TREE_CODE (decl) == FUNCTION_DECL)
3021 {
3022 register tree arg;
3023
3024 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3025 DECL_ABSTRACT_ORIGIN (arg) = arg;
29d356fb
RK
3026 if (DECL_INITIAL (decl) != NULL_TREE
3027 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
3028 set_block_origin_self (DECL_INITIAL (decl));
3029 }
3030 }
3031}
3032\f
3033/* Given a pointer to some BLOCK node, and a boolean value to set the
3034 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3035 the given block, and for all local decls and all local sub-blocks
3036 (recursively) which are contained therein. */
3037
81578142
RS
3038static void
3039set_block_abstract_flags (stmt, setting)
3040 register tree stmt;
3041 register int setting;
3042{
12307ca2
RK
3043 register tree local_decl;
3044 register tree subblock;
81578142 3045
12307ca2 3046 BLOCK_ABSTRACT (stmt) = setting;
81578142 3047
12307ca2
RK
3048 for (local_decl = BLOCK_VARS (stmt);
3049 local_decl != NULL_TREE;
3050 local_decl = TREE_CHAIN (local_decl))
3051 set_decl_abstract_flags (local_decl, setting);
81578142 3052
12307ca2
RK
3053 for (subblock = BLOCK_SUBBLOCKS (stmt);
3054 subblock != NULL_TREE;
3055 subblock = BLOCK_CHAIN (subblock))
3056 set_block_abstract_flags (subblock, setting);
81578142
RS
3057}
3058
3059/* Given a pointer to some ..._DECL node, and a boolean value to set the
3060 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3061 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3062 set the abstract flags for all of the parameters, local vars, local
3063 blocks and sub-blocks (recursively) to the same setting. */
3064
3065void
3066set_decl_abstract_flags (decl, setting)
3067 register tree decl;
3068 register int setting;
3069{
3070 DECL_ABSTRACT (decl) = setting;
3071 if (TREE_CODE (decl) == FUNCTION_DECL)
3072 {
3073 register tree arg;
3074
3075 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3076 DECL_ABSTRACT (arg) = setting;
29d356fb
RK
3077 if (DECL_INITIAL (decl) != NULL_TREE
3078 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
3079 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3080 }
3081}
3082\f
175160e7
MT
3083/* Output the assembly language code for the function FNDECL
3084 from its DECL_SAVED_INSNS. Used for inline functions that are output
3085 at end of compilation instead of where they came in the source. */
3086
3087void
3088output_inline_function (fndecl)
3089 tree fndecl;
3090{
ca695ac9 3091 rtx head;
175160e7 3092 rtx last;
09578c27 3093 int save_flag_no_inline = flag_no_inline;
175160e7 3094
ca695ac9
JB
3095 if (output_bytecode)
3096 {
3097 warning ("`inline' ignored for bytecode output");
3098 return;
3099 }
3100
27ed242c
RK
3101 /* Things we allocate from here on are part of this function, not
3102 permanent. */
3103 temporary_allocation ();
3104
ca695ac9 3105 head = DECL_SAVED_INSNS (fndecl);
175160e7
MT
3106 current_function_decl = fndecl;
3107
3108 /* This call is only used to initialize global variables. */
3109 init_function_start (fndecl, "lossage", 1);
3110
3111 /* Redo parameter determinations in case the FUNCTION_...
3112 macros took machine-specific actions that need to be redone. */
3113 assign_parms (fndecl, 1);
3114
3115 /* Set stack frame size. */
3116 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3117
12307ca2
RK
3118 /* The first is a bit of a lie (the array may be larger), but doesn't
3119 matter too much and it isn't worth saving the actual bound. */
3120 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3121 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3122 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3123 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3124
175160e7 3125 stack_slot_list = STACK_SLOT_LIST (head);
5b0e2c7d 3126 forced_labels = FORCED_LABELS (head);
175160e7
MT
3127
3128 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3129 current_function_calls_alloca = 1;
3130
3131 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3132 current_function_calls_setjmp = 1;
3133
3134 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3135 current_function_calls_longjmp = 1;
3136
3137 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3138 current_function_returns_struct = 1;
3139
3140 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3141 current_function_returns_pcc_struct = 1;
3142
3143 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3144 current_function_needs_context = 1;
3145
3146 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3147 current_function_has_nonlocal_label = 1;
3148
3149 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3150 current_function_returns_pointer = 1;
3151
3152 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3153 current_function_uses_const_pool = 1;
3154
3155 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3156 current_function_uses_pic_offset_table = 1;
3157
3158 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3159 current_function_pops_args = POPS_ARGS (head);
3160
354d687f
RK
3161 /* This is the only thing the expand_function_end call that uses to be here
3162 actually does and that call can cause problems. */
3163 immediate_size_expand--;
175160e7
MT
3164
3165 /* Find last insn and rebuild the constant pool. */
3166 for (last = FIRST_PARM_INSN (head);
3167 NEXT_INSN (last); last = NEXT_INSN (last))
3168 {
3169 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3170 {
3171 restore_constants (&PATTERN (last));
3172 restore_constants (&REG_NOTES (last));
3173 }
3174 }
3175
3176 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3177 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3178
81578142
RS
3179 /* We must have already output DWARF debugging information for the
3180 original (abstract) inline function declaration/definition, so
3181 we want to make sure that the debugging information we generate
3182 for this special instance of the inline function refers back to
3183 the information we already generated. To make sure that happens,
3184 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3185 node (and for all of the local ..._DECL nodes which are its children)
3186 so that they all point to themselves. */
3187
3188 set_decl_origin_self (fndecl);
3189
51783c14
JM
3190 /* We're not deferring this any longer. */
3191 DECL_DEFER_OUTPUT (fndecl) = 0;
3192
09578c27
RK
3193 /* Integrating function calls isn't safe anymore, so turn on
3194 flag_no_inline. */
3195 flag_no_inline = 1;
3196
175160e7
MT
3197 /* Compile this function all the way down to assembly code. */
3198 rest_of_compilation (fndecl);
3199
09578c27
RK
3200 /* Reset flag_no_inline to its original value. */
3201 flag_no_inline = save_flag_no_inline;
3202
175160e7 3203 current_function_decl = 0;
175160e7 3204}
This page took 0.844321 seconds and 5 git commands to generate.