]> gcc.gnu.org Git - gcc.git/blame - gcc/integrate.c
Makefile.in: Remove all bytecode support.
[gcc.git] / gcc / integrate.c
CommitLineData
175160e7 1/* Procedure integration for GNU CC.
15e13f2c 2 Copyright (C) 1988, 91, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
175160e7
MT
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
175160e7
MT
21
22
175160e7 23#include "config.h"
e9a25f70 24#include <stdio.h>
175160e7
MT
25#include "rtl.h"
26#include "tree.h"
12307ca2 27#include "regs.h"
175160e7
MT
28#include "flags.h"
29#include "insn-config.h"
30#include "insn-flags.h"
31#include "expr.h"
32#include "output.h"
e9a25f70 33#include "recog.h"
175160e7
MT
34#include "integrate.h"
35#include "real.h"
6adb4e3a 36#include "except.h"
175160e7
MT
37#include "function.h"
38
39#include "obstack.h"
40#define obstack_chunk_alloc xmalloc
41#define obstack_chunk_free free
175160e7
MT
42
43extern struct obstack *function_maybepermanent_obstack;
44
45extern tree pushdecl ();
46extern tree poplevel ();
47
48/* Similar, but round to the next highest integer that meets the
49 alignment. */
50#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51
52/* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54#ifndef INTEGRATE_THRESHOLD
55#define INTEGRATE_THRESHOLD(DECL) \
56 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
57#endif
58\f
81fbaa41
RK
59static rtx initialize_for_inline PROTO((tree, int, int, int, int));
60static void finish_inline PROTO((tree, rtx));
61static void adjust_copied_decl_tree PROTO((tree));
62static tree copy_decl_list PROTO((tree));
63static tree copy_decl_tree PROTO((tree));
64static void copy_decl_rtls PROTO((tree));
65static void save_constants PROTO((rtx *));
66static void note_modified_parmregs PROTO((rtx, rtx));
67static rtx copy_for_inline PROTO((rtx));
68static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
69static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
f6135b20 70static void save_constants_in_decl_trees PROTO ((tree));
81fbaa41
RK
71static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
72static void restore_constants PROTO((rtx *));
73static void set_block_origin_self PROTO((tree));
74static void set_decl_origin_self PROTO((tree));
75static void set_block_abstract_flags PROTO((tree, int));
76
77void set_decl_abstract_flags PROTO((tree, int));
175160e7 78\f
1f3d3a31
JL
79/* Returns the Ith entry in the label_map contained in MAP. If the
80 Ith entry has not yet been set, it is assumed to be a fresh label.
81 Essentially, we use this function to perform a lazy initialization
82 of label_map, thereby avoiding huge memory explosions when the
83 label_map gets very large. */
84rtx
85get_label_from_map (map, i)
86 struct inline_remap* map;
87 int i;
88{
89 rtx x = map->label_map[i];
90
91 if (x == NULL_RTX)
92 x = map->label_map[i] = gen_label_rtx();
93
94 return x;
95}
96
97
175160e7
MT
98/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
99 is safe and reasonable to integrate into other functions.
100 Nonzero means value is a warning message with a single %s
101 for the function's name. */
102
103char *
104function_cannot_inline_p (fndecl)
105 register tree fndecl;
106{
107 register rtx insn;
108 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
109 int max_insns = INTEGRATE_THRESHOLD (fndecl);
110 register int ninsns = 0;
111 register tree parms;
64ed0f40 112 rtx result;
175160e7
MT
113
114 /* No inlines with varargs. `grokdeclarator' gives a warning
115 message about that if `inline' is specified. This code
116 it put in to catch the volunteers. */
117 if ((last && TREE_VALUE (last) != void_type_node)
5d3fe1fe 118 || current_function_varargs)
175160e7
MT
119 return "varargs function cannot be inline";
120
121 if (current_function_calls_alloca)
122 return "function using alloca cannot be inline";
123
124 if (current_function_contains_functions)
125 return "function with nested functions cannot be inline";
126
175160e7 127 /* If its not even close, don't even look. */
216d5cdd 128 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
175160e7
MT
129 return "function too large to be inline";
130
175160e7
MT
131#if 0
132 /* Don't inline functions which do not specify a function prototype and
133 have BLKmode argument or take the address of a parameter. */
134 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
135 {
136 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
137 TREE_ADDRESSABLE (parms) = 1;
138 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
139 return "no prototype, and parameter address used; cannot be inline";
140 }
141#endif
142
143 /* We can't inline functions that return structures
144 the old-fashioned PCC way, copying into a static block. */
145 if (current_function_returns_pcc_struct)
146 return "inline functions not supported for this return value type";
147
203436d9
JL
148 /* We can't inline functions that return BLKmode structures in registers. */
149 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
150 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
151 return "inline functions not supported for this return value type";
152
175160e7
MT
153 /* We can't inline functions that return structures of varying size. */
154 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
155 return "function with varying-size return value cannot be inline";
156
c8ad69c1
RK
157 /* Cannot inline a function with a varying size argument or one that
158 receives a transparent union. */
175160e7 159 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
c8ad69c1
RK
160 {
161 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
162 return "function with varying-size parameter cannot be inline";
163 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
164 return "function with transparent unit parameter cannot be inline";
165 }
175160e7 166
216d5cdd 167 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
175160e7 168 {
12307ca2
RK
169 for (ninsns = 0, insn = get_first_nonparm_insn ();
170 insn && ninsns < max_insns;
175160e7 171 insn = NEXT_INSN (insn))
12307ca2
RK
172 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
173 ninsns++;
175160e7
MT
174
175 if (ninsns >= max_insns)
176 return "function too large to be inline";
177 }
178
ead02915
JW
179 /* We cannot inline this function if forced_labels is non-zero. This
180 implies that a label in this function was used as an initializer.
181 Because labels can not be duplicated, all labels in the function
182 will be renamed when it is inlined. However, there is no way to find
183 and fix all variables initialized with addresses of labels in this
184 function, hence inlining is impossible. */
185
186 if (forced_labels)
187 return "function with label addresses used in initializers cannot inline";
188
2edc3b33
JW
189 /* We cannot inline a nested function that jumps to a nonlocal label. */
190 if (current_function_has_nonlocal_goto)
191 return "function with nonlocal goto cannot be inline";
192
6adb4e3a
MS
193 /* This is a hack, until the inliner is taught about eh regions at
194 the start of the function. */
195 for (insn = get_insns ();
db3cf6fb
MS
196 insn
197 && ! (GET_CODE (insn) == NOTE
198 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
6adb4e3a
MS
199 insn = NEXT_INSN (insn))
200 {
201 if (insn && GET_CODE (insn) == NOTE
202 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
203 return "function with complex parameters cannot be inline";
204 }
205
64ed0f40
JW
206 /* We can't inline functions that return a PARALLEL rtx. */
207 result = DECL_RTL (DECL_RESULT (fndecl));
208 if (result && GET_CODE (result) == PARALLEL)
209 return "inline functions not supported for this return value type";
210
175160e7
MT
211 return 0;
212}
213\f
214/* Variables used within save_for_inline. */
215
216/* Mapping from old pseudo-register to new pseudo-registers.
217 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
218 It is allocated in `save_for_inline' and `expand_inline_function',
219 and deallocated on exit from each of those routines. */
220static rtx *reg_map;
221
222/* Mapping from old code-labels to new code-labels.
223 The first element of this map is label_map[min_labelno].
224 It is allocated in `save_for_inline' and `expand_inline_function',
225 and deallocated on exit from each of those routines. */
226static rtx *label_map;
227
228/* Mapping from old insn uid's to copied insns.
229 It is allocated in `save_for_inline' and `expand_inline_function',
230 and deallocated on exit from each of those routines. */
231static rtx *insn_map;
232
233/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
234 Zero for a reg that isn't a parm's home.
235 Only reg numbers less than max_parm_reg are mapped here. */
236static tree *parmdecl_map;
237
238/* Keep track of first pseudo-register beyond those that are parms. */
e9a25f70
JL
239extern int max_parm_reg;
240extern rtx *parm_reg_stack_loc;
175160e7
MT
241
242/* When an insn is being copied by copy_for_inline,
243 this is nonzero if we have copied an ASM_OPERANDS.
244 In that case, it is the original input-operand vector. */
245static rtvec orig_asm_operands_vector;
246
247/* When an insn is being copied by copy_for_inline,
248 this is nonzero if we have copied an ASM_OPERANDS.
249 In that case, it is the copied input-operand vector. */
250static rtvec copy_asm_operands_vector;
251
252/* Likewise, this is the copied constraints vector. */
253static rtvec copy_asm_constraints_vector;
254
255/* In save_for_inline, nonzero if past the parm-initialization insns. */
256static int in_nonparm_insns;
257\f
258/* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
259 needed to save FNDECL's insns and info for future inline expansion. */
260
261static rtx
262initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
263 tree fndecl;
264 int min_labelno;
265 int max_labelno;
266 int max_reg;
267 int copy;
268{
269 int function_flags, i;
270 rtvec arg_vector;
271 tree parms;
272
273 /* Compute the values of any flags we must restore when inlining this. */
274
275 function_flags
276 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
277 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
278 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
279 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
280 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
281 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
282 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
283 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
284 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
285 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
286
287 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
4c9a05bc 288 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
175160e7
MT
289 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
290
291 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
292 parms;
293 parms = TREE_CHAIN (parms), i++)
294 {
295 rtx p = DECL_RTL (parms);
296
8a173c73
RK
297 /* If we have (mem (addressof (mem ...))), use the inner MEM since
298 otherwise the copy_rtx call below will not unshare the MEM since
299 it shares ADDRESSOF. */
300 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
301 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
302 p = XEXP (XEXP (p, 0), 0);
303
175160e7 304 if (GET_CODE (p) == MEM && copy)
9e0a5ab0
RS
305 {
306 /* Copy the rtl so that modifications of the addresses
307 later in compilation won't affect this arg_vector.
308 Virtual register instantiation can screw the address
309 of the rtl. */
310 rtx new = copy_rtx (p);
311
312 /* Don't leave the old copy anywhere in this decl. */
5c8bab4a
RK
313 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
314 || (GET_CODE (DECL_RTL (parms)) == MEM
315 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
316 && (XEXP (DECL_RTL (parms), 0)
317 == XEXP (DECL_INCOMING_RTL (parms), 0))))
9e0a5ab0
RS
318 DECL_INCOMING_RTL (parms) = new;
319 DECL_RTL (parms) = new;
320 }
175160e7
MT
321
322 RTVEC_ELT (arg_vector, i) = p;
323
324 if (GET_CODE (p) == REG)
325 parmdecl_map[REGNO (p)] = parms;
f231e307
RK
326 else if (GET_CODE (p) == CONCAT)
327 {
328 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
329 rtx pimag = gen_imagpart (GET_MODE (preal), p);
330
331 if (GET_CODE (preal) == REG)
332 parmdecl_map[REGNO (preal)] = parms;
333 if (GET_CODE (pimag) == REG)
334 parmdecl_map[REGNO (pimag)] = parms;
335 }
336
048dfa64
RS
337 /* This flag is cleared later
338 if the function ever modifies the value of the parm. */
175160e7
MT
339 TREE_READONLY (parms) = 1;
340 }
341
342 /* Assume we start out in the insns that set up the parameters. */
343 in_nonparm_insns = 0;
344
345 /* The list of DECL_SAVED_INSNS, starts off with a header which
346 contains the following information:
347
348 the first insn of the function (not including the insns that copy
349 parameters into registers).
350 the first parameter insn of the function,
351 the first label used by that function,
352 the last label used by that function,
353 the highest register number used for parameters,
354 the total number of registers used,
355 the size of the incoming stack area for parameters,
356 the number of bytes popped on return,
357 the stack slot list,
6adb4e3a 358 the labels that are forced to exist,
175160e7
MT
359 some flags that are used to restore compiler globals,
360 the value of current_function_outgoing_args_size,
361 the original argument vector,
12307ca2 362 the original DECL_INITIAL,
956d6950 363 and pointers to the table of pseudo regs, pointer flags, and alignment. */
175160e7 364
02bea8a8 365 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
175160e7
MT
366 max_parm_reg, max_reg,
367 current_function_args_size,
368 current_function_pops_args,
5b0e2c7d 369 stack_slot_list, forced_labels, function_flags,
175160e7 370 current_function_outgoing_args_size,
12307ca2
RK
371 arg_vector, (rtx) DECL_INITIAL (fndecl),
372 (rtvec) regno_reg_rtx, regno_pointer_flag,
e9a25f70
JL
373 regno_pointer_align,
374 (rtvec) parm_reg_stack_loc);
175160e7
MT
375}
376
377/* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
378 things that must be done to make FNDECL expandable as an inline function.
379 HEAD contains the chain of insns to which FNDECL will expand. */
380
381static void
382finish_inline (fndecl, head)
383 tree fndecl;
384 rtx head;
385{
6adb4e3a 386 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
175160e7
MT
387 FIRST_PARM_INSN (head) = get_insns ();
388 DECL_SAVED_INSNS (fndecl) = head;
389 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
175160e7
MT
390}
391
c75ac904
RS
392/* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
393 they all point to the new (copied) rtxs. */
394
395static void
396adjust_copied_decl_tree (block)
397 register tree block;
398{
399 register tree subblock;
400 register rtx original_end;
401
402 original_end = BLOCK_END_NOTE (block);
403 if (original_end)
404 {
405 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
406 NOTE_SOURCE_FILE (original_end) = 0;
407 }
408
409 /* Process all subblocks. */
410 for (subblock = BLOCK_SUBBLOCKS (block);
411 subblock;
412 subblock = TREE_CHAIN (subblock))
413 adjust_copied_decl_tree (subblock);
414}
415
175160e7
MT
416/* Make the insns and PARM_DECLs of the current function permanent
417 and record other information in DECL_SAVED_INSNS to allow inlining
418 of this function in subsequent calls.
419
420 This function is called when we are going to immediately compile
421 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
422 modified by the compilation process, so we copy all of them to
423 new storage and consider the new insns to be the insn chain to be
ff2da9fc
RS
424 compiled. Our caller (rest_of_compilation) saves the original
425 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
175160e7 426
fb854c63
JW
427/* ??? The nonlocal_label list should be adjusted also. However, since
428 a function that contains a nested function never gets inlined currently,
429 the nonlocal_label list will always be empty, so we don't worry about
430 it for now. */
431
175160e7
MT
432void
433save_for_inline_copying (fndecl)
434 tree fndecl;
435{
436 rtx first_insn, last_insn, insn;
437 rtx head, copy;
438 int max_labelno, min_labelno, i, len;
439 int max_reg;
440 int max_uid;
441 rtx first_nonparm_insn;
12307ca2 442 char *new, *new1;
c1132c27 443 rtx *new_parm_reg_stack_loc;
474eff88 444 rtx *new2;
175160e7
MT
445
446 /* Make and emit a return-label if we have not already done so.
0f41302f 447 Do this before recording the bounds on label numbers. */
175160e7
MT
448
449 if (return_label == 0)
450 {
451 return_label = gen_label_rtx ();
452 emit_label (return_label);
453 }
454
455 /* Get some bounds on the labels and registers used. */
456
457 max_labelno = max_label_num ();
458 min_labelno = get_first_label_num ();
459 max_reg = max_reg_num ();
460
461 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
462 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
463 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
464 for the parms, prior to elimination of virtual registers.
465 These values are needed for substituting parms properly. */
466
175160e7
MT
467 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
468
469 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
470
471 if (current_function_uses_const_pool)
472 {
473 /* Replace any constant pool references with the actual constant. We
474 will put the constants back in the copy made below. */
475 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
476 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
477 {
478 save_constants (&PATTERN (insn));
479 if (REG_NOTES (insn))
480 save_constants (&REG_NOTES (insn));
481 }
482
f6135b20
JW
483 /* Also scan all decls, and replace any constant pool references with the
484 actual constant. */
485 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
486
175160e7
MT
487 /* Clear out the constant pool so that we can recreate it with the
488 copied constants below. */
489 init_const_rtx_hash_table ();
490 clear_const_double_mem ();
491 }
492
493 max_uid = INSN_UID (head);
494
495 /* We have now allocated all that needs to be allocated permanently
496 on the rtx obstack. Set our high-water mark, so that we
497 can free the rest of this when the time comes. */
498
499 preserve_data ();
500
501 /* Copy the chain insns of this function.
502 Install the copied chain as the insns of this function,
503 for continued compilation;
504 the original chain is recorded as the DECL_SAVED_INSNS
505 for inlining future calls. */
506
507 /* If there are insns that copy parms from the stack into pseudo registers,
508 those insns are not copied. `expand_inline_function' must
509 emit the correct code to handle such things. */
510
511 insn = get_insns ();
512 if (GET_CODE (insn) != NOTE)
513 abort ();
514 first_insn = rtx_alloc (NOTE);
515 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
516 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
517 INSN_UID (first_insn) = INSN_UID (insn);
518 PREV_INSN (first_insn) = NULL;
519 NEXT_INSN (first_insn) = NULL;
520 last_insn = first_insn;
521
522 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
523 Make these new rtx's now, and install them in regno_reg_rtx, so they
524 will be the official pseudo-reg rtx's for the rest of compilation. */
525
10568ad0 526 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
175160e7
MT
527
528 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
529 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
530 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
531 regno_reg_rtx[i], len);
532
155d7723 533 regno_reg_rtx = reg_map;
175160e7 534
25e48d20
RK
535 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
536 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
537 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
538 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
539 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
540
175160e7
MT
541 /* Likewise each label rtx must have a unique rtx as its copy. */
542
60bf2004
BK
543 /* We used to use alloca here, but the size of what it would try to
544 allocate would occasionally cause it to exceed the stack limit and
545 cause unpredictable core dumps. Some examples were > 2Mb in size. */
f644bd14 546 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
175160e7
MT
547
548 for (i = min_labelno; i < max_labelno; i++)
549 label_map[i] = gen_label_rtx ();
550
c1132c27
RK
551 /* Likewise for parm_reg_stack_slot. */
552 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
553 for (i = 0; i < max_parm_reg; i++)
554 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
555
556 parm_reg_stack_loc = new_parm_reg_stack_loc;
557
175160e7
MT
558 /* Record the mapping of old insns to copied insns. */
559
560 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
4c9a05bc 561 bzero ((char *) insn_map, max_uid * sizeof (rtx));
175160e7
MT
562
563 /* Get the insn which signals the end of parameter setup code. */
564 first_nonparm_insn = get_first_nonparm_insn ();
565
566 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
567 (the former occurs when a variable has its address taken)
568 since these may be shared and can be changed by virtual
569 register instantiation. DECL_RTL values for our arguments
570 have already been copied by initialize_for_inline. */
571 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
572 if (GET_CODE (regno_reg_rtx[i]) == MEM)
573 XEXP (regno_reg_rtx[i], 0)
574 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
575
474eff88
JW
576 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
577 contained in it. */
daa4b717 578 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
474eff88
JW
579 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
580 max_parm_reg * sizeof (rtx));
581 parm_reg_stack_loc = new2;
582 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
583 if (parm_reg_stack_loc[i])
584 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
585
175160e7
MT
586 /* Copy the tree of subblocks of the function, and the decls in them.
587 We will use the copy for compiling this function, then restore the original
588 subblocks and decls for use when inlining this function.
589
590 Several parts of the compiler modify BLOCK trees. In particular,
591 instantiate_virtual_regs will instantiate any virtual regs
592 mentioned in the DECL_RTLs of the decls, and loop
593 unrolling will replicate any BLOCK trees inside an unrolled loop.
594
595 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
596 which we will use for inlining. The rtl might even contain pseudoregs
597 whose space has been freed. */
598
599 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
c5caa350 600 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
175160e7
MT
601
602 /* Now copy each DECL_RTL which is a MEM,
603 so it is safe to modify their addresses. */
604 copy_decl_rtls (DECL_INITIAL (fndecl));
605
c5caa350
CH
606 /* The fndecl node acts as its own progenitor, so mark it as such. */
607 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
608
175160e7
MT
609 /* Now copy the chain of insns. Do this twice. The first copy the insn
610 itself and its body. The second time copy of REG_NOTES. This is because
611 a REG_NOTE may have a forward pointer to another insn. */
612
613 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
614 {
615 orig_asm_operands_vector = 0;
616
617 if (insn == first_nonparm_insn)
618 in_nonparm_insns = 1;
619
620 switch (GET_CODE (insn))
621 {
622 case NOTE:
623 /* No need to keep these. */
624 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
625 continue;
626
627 copy = rtx_alloc (NOTE);
175160e7 628 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
c75ac904
RS
629 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
630 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
631 else
632 {
633 NOTE_SOURCE_FILE (insn) = (char *) copy;
634 NOTE_SOURCE_FILE (copy) = 0;
635 }
6adb4e3a
MS
636 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
637 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
638 {
639 /* We have to forward these both to match the new exception
640 region. */
641 NOTE_BLOCK_NUMBER (copy)
642 = CODE_LABEL_NUMBER (label_map[NOTE_BLOCK_NUMBER (copy)]);
643
644 }
1c3f2e00 645 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
175160e7
MT
646 break;
647
648 case INSN:
175160e7 649 case JUMP_INSN:
d7e09326 650 case CALL_INSN:
175160e7 651 copy = rtx_alloc (GET_CODE (insn));
d7e09326
RK
652
653 if (GET_CODE (insn) == CALL_INSN)
db3cf6fb
MS
654 CALL_INSN_FUNCTION_USAGE (copy)
655 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
d7e09326 656
175160e7
MT
657 PATTERN (copy) = copy_for_inline (PATTERN (insn));
658 INSN_CODE (copy) = -1;
d7e09326 659 LOG_LINKS (copy) = NULL_RTX;
175160e7
MT
660 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
661 break;
662
663 case CODE_LABEL:
664 copy = label_map[CODE_LABEL_NUMBER (insn)];
d45cf215 665 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
666 break;
667
668 case BARRIER:
669 copy = rtx_alloc (BARRIER);
670 break;
671
672 default:
673 abort ();
674 }
675 INSN_UID (copy) = INSN_UID (insn);
676 insn_map[INSN_UID (insn)] = copy;
677 NEXT_INSN (last_insn) = copy;
678 PREV_INSN (copy) = last_insn;
679 last_insn = copy;
680 }
681
c75ac904
RS
682 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
683
175160e7
MT
684 /* Now copy the REG_NOTES. */
685 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
686 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
687 && insn_map[INSN_UID(insn)])
688 REG_NOTES (insn_map[INSN_UID (insn)])
689 = copy_for_inline (REG_NOTES (insn));
690
691 NEXT_INSN (last_insn) = NULL;
692
693 finish_inline (fndecl, head);
694
12307ca2
RK
695 /* Make new versions of the register tables. */
696 new = (char *) savealloc (regno_pointer_flag_length);
697 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
698 new1 = (char *) savealloc (regno_pointer_flag_length);
699 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
12307ca2
RK
700
701 regno_pointer_flag = new;
702 regno_pointer_align = new1;
12307ca2 703
175160e7 704 set_new_first_and_last_insn (first_insn, last_insn);
60bf2004 705
f644bd14
JL
706 if (label_map)
707 free (label_map);
175160e7
MT
708}
709
c5caa350
CH
710/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
711 For example, this can copy a list made of TREE_LIST nodes. While copying,
712 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
713 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
714 point to the corresponding (abstract) original node. */
715
716static tree
717copy_decl_list (list)
718 tree list;
719{
720 tree head;
721 register tree prev, next;
722
723 if (list == 0)
724 return 0;
725
726 head = prev = copy_node (list);
727 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
728 DECL_ABSTRACT_ORIGIN (head) = list;
729 next = TREE_CHAIN (list);
730 while (next)
731 {
732 register tree copy;
733
734 copy = copy_node (next);
735 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
736 DECL_ABSTRACT_ORIGIN (copy) = next;
737 TREE_CHAIN (prev) = copy;
738 prev = copy;
739 next = TREE_CHAIN (next);
740 }
741 return head;
742}
743
175160e7
MT
744/* Make a copy of the entire tree of blocks BLOCK, and return it. */
745
746static tree
747copy_decl_tree (block)
748 tree block;
749{
750 tree t, vars, subblocks;
751
c5caa350 752 vars = copy_decl_list (BLOCK_VARS (block));
175160e7
MT
753 subblocks = 0;
754
755 /* Process all subblocks. */
756 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
757 {
758 tree copy = copy_decl_tree (t);
759 TREE_CHAIN (copy) = subblocks;
760 subblocks = copy;
761 }
762
763 t = copy_node (block);
764 BLOCK_VARS (t) = vars;
765 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
81578142 766 /* If the BLOCK being cloned is already marked as having been instantiated
abc95ed3 767 from something else, then leave that `origin' marking alone. Otherwise,
81578142
RS
768 mark the clone as having originated from the BLOCK we are cloning. */
769 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
770 BLOCK_ABSTRACT_ORIGIN (t) = block;
175160e7
MT
771 return t;
772}
773
774/* Copy DECL_RTLs in all decls in the given BLOCK node. */
775
776static void
777copy_decl_rtls (block)
778 tree block;
779{
780 tree t;
781
782 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
783 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
784 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
785
786 /* Process all subblocks. */
787 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
788 copy_decl_rtls (t);
789}
790
791/* Make the insns and PARM_DECLs of the current function permanent
792 and record other information in DECL_SAVED_INSNS to allow inlining
793 of this function in subsequent calls.
794
795 This routine need not copy any insns because we are not going
796 to immediately compile the insns in the insn chain. There
797 are two cases when we would compile the insns for FNDECL:
798 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
799 be output at the end of other compilation, because somebody took
800 its address. In the first case, the insns of FNDECL are copied
801 as it is expanded inline, so FNDECL's saved insns are not
802 modified. In the second case, FNDECL is used for the last time,
803 so modifying the rtl is not a problem.
804
09578c27
RK
805 We don't have to worry about FNDECL being inline expanded by
806 other functions which are written at the end of compilation
807 because flag_no_inline is turned on when we begin writing
808 functions at the end of compilation. */
175160e7
MT
809
810void
811save_for_inline_nocopy (fndecl)
812 tree fndecl;
813{
814 rtx insn;
29ff1514 815 rtx head;
175160e7 816 rtx first_nonparm_insn;
175160e7
MT
817
818 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
819 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
820 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
821 for the parms, prior to elimination of virtual registers.
822 These values are needed for substituting parms properly. */
823
175160e7
MT
824 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
825
826 /* Make and emit a return-label if we have not already done so. */
827
828 if (return_label == 0)
829 {
830 return_label = gen_label_rtx ();
831 emit_label (return_label);
832 }
833
834 head = initialize_for_inline (fndecl, get_first_label_num (),
835 max_label_num (), max_reg_num (), 0);
836
837 /* If there are insns that copy parms from the stack into pseudo registers,
838 those insns are not copied. `expand_inline_function' must
839 emit the correct code to handle such things. */
840
841 insn = get_insns ();
842 if (GET_CODE (insn) != NOTE)
843 abort ();
844
845 /* Get the insn which signals the end of parameter setup code. */
846 first_nonparm_insn = get_first_nonparm_insn ();
847
848 /* Now just scan the chain of insns to see what happens to our
849 PARM_DECLs. If a PARM_DECL is used but never modified, we
850 can substitute its rtl directly when expanding inline (and
851 perform constant folding when its incoming value is constant).
852 Otherwise, we have to copy its value into a new register and track
853 the new register's life. */
854
855 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
856 {
857 if (insn == first_nonparm_insn)
858 in_nonparm_insns = 1;
859
860 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
861 {
862 if (current_function_uses_const_pool)
863 {
864 /* Replace any constant pool references with the actual constant.
865 We will put the constant back if we need to write the
866 function out after all. */
867 save_constants (&PATTERN (insn));
868 if (REG_NOTES (insn))
869 save_constants (&REG_NOTES (insn));
870 }
871
872 /* Record what interesting things happen to our parameters. */
873 note_stores (PATTERN (insn), note_modified_parmregs);
874 }
875 }
876
f6135b20
JW
877 /* Also scan all decls, and replace any constant pool references with the
878 actual constant. */
879 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
880
175160e7
MT
881 /* We have now allocated all that needs to be allocated permanently
882 on the rtx obstack. Set our high-water mark, so that we
883 can free the rest of this when the time comes. */
884
885 preserve_data ();
886
887 finish_inline (fndecl, head);
888}
889\f
890/* Given PX, a pointer into an insn, search for references to the constant
891 pool. Replace each with a CONST that has the mode of the original
892 constant, contains the constant, and has RTX_INTEGRATED_P set.
893 Similarly, constant pool addresses not enclosed in a MEM are replaced
15e13f2c
RK
894 with an ADDRESS and CONST rtx which also gives the constant, its
895 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
175160e7
MT
896
897static void
898save_constants (px)
899 rtx *px;
900{
901 rtx x;
902 int i, j;
903
904 again:
905 x = *px;
906
907 /* If this is a CONST_DOUBLE, don't try to fix things up in
908 CONST_DOUBLE_MEM, because this is an infinite recursion. */
909 if (GET_CODE (x) == CONST_DOUBLE)
910 return;
911 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
912 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
913 {
914 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
38a448ca 915 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
175160e7
MT
916 RTX_INTEGRATED_P (new) = 1;
917
918 /* If the MEM was in a different mode than the constant (perhaps we
919 were only looking at the low-order part), surround it with a
920 SUBREG so we can save both modes. */
921
922 if (GET_MODE (x) != const_mode)
923 {
38a448ca 924 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
175160e7
MT
925 RTX_INTEGRATED_P (new) = 1;
926 }
927
928 *px = new;
929 save_constants (&XEXP (*px, 0));
930 }
931 else if (GET_CODE (x) == SYMBOL_REF
932 && CONSTANT_POOL_ADDRESS_P (x))
933 {
38a448ca
RH
934 *px = gen_rtx_ADDRESS (GET_MODE (x),
935 gen_rtx_CONST (get_pool_mode (x),
936 get_pool_constant (x)));
175160e7
MT
937 save_constants (&XEXP (*px, 0));
938 RTX_INTEGRATED_P (*px) = 1;
939 }
940
941 else
942 {
943 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
944 int len = GET_RTX_LENGTH (GET_CODE (x));
945
946 for (i = len-1; i >= 0; i--)
947 {
948 switch (fmt[i])
949 {
950 case 'E':
951 for (j = 0; j < XVECLEN (x, i); j++)
952 save_constants (&XVECEXP (x, i, j));
953 break;
954
955 case 'e':
956 if (XEXP (x, i) == 0)
957 continue;
958 if (i == 0)
959 {
960 /* Hack tail-recursion here. */
961 px = &XEXP (x, 0);
962 goto again;
963 }
964 save_constants (&XEXP (x, i));
965 break;
966 }
967 }
968 }
969}
970\f
971/* Note whether a parameter is modified or not. */
972
973static void
974note_modified_parmregs (reg, x)
975 rtx reg;
976 rtx x;
977{
978 if (GET_CODE (reg) == REG && in_nonparm_insns
979 && REGNO (reg) < max_parm_reg
980 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
981 && parmdecl_map[REGNO (reg)] != 0)
982 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
983}
984
985/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
986 according to `reg_map' and `label_map'. The original rtl insns
987 will be saved for inlining; this is used to make a copy
988 which is used to finish compiling the inline function itself.
989
990 If we find a "saved" constant pool entry, one which was replaced with
991 the value of the constant, convert it back to a constant pool entry.
992 Since the pool wasn't touched, this should simply restore the old
993 address.
994
995 All other kinds of rtx are copied except those that can never be
996 changed during compilation. */
997
998static rtx
999copy_for_inline (orig)
1000 rtx orig;
1001{
1002 register rtx x = orig;
15e13f2c 1003 register rtx new;
175160e7
MT
1004 register int i;
1005 register enum rtx_code code;
1006 register char *format_ptr;
1007
1008 if (x == 0)
1009 return x;
1010
1011 code = GET_CODE (x);
1012
1013 /* These types may be freely shared. */
1014
1015 switch (code)
1016 {
1017 case QUEUED:
1018 case CONST_INT:
1019 case SYMBOL_REF:
1020 case PC:
1021 case CC0:
1022 return x;
1023
1024 case CONST_DOUBLE:
1025 /* We have to make a new CONST_DOUBLE to ensure that we account for
1026 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1027 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1028 {
1029 REAL_VALUE_TYPE d;
1030
1031 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
81fbaa41 1032 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
175160e7
MT
1033 }
1034 else
1035 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1036 VOIDmode);
1037
1038 case CONST:
1039 /* Get constant pool entry for constant in the pool. */
1040 if (RTX_INTEGRATED_P (x))
1041 return validize_mem (force_const_mem (GET_MODE (x),
1042 copy_for_inline (XEXP (x, 0))));
1043 break;
1044
1045 case SUBREG:
1046 /* Get constant pool entry, but access in different mode. */
1047 if (RTX_INTEGRATED_P (x))
1048 {
15e13f2c
RK
1049 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1050 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
175160e7
MT
1051
1052 PUT_MODE (new, GET_MODE (x));
1053 return validize_mem (new);
1054 }
1055 break;
1056
1057 case ADDRESS:
1058 /* If not special for constant pool error. Else get constant pool
1059 address. */
1060 if (! RTX_INTEGRATED_P (x))
1061 abort ();
1062
15e13f2c
RK
1063 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1064 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1065 new = XEXP (new, 0);
1066
1067#ifdef POINTERS_EXTEND_UNSIGNED
1068 if (GET_MODE (new) != GET_MODE (x))
1069 new = convert_memory_address (GET_MODE (x), new);
1070#endif
1071
1072 return new;
175160e7
MT
1073
1074 case ASM_OPERANDS:
1075 /* If a single asm insn contains multiple output operands
1076 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1077 We must make sure that the copied insn continues to share it. */
1078 if (orig_asm_operands_vector == XVEC (orig, 3))
1079 {
1080 x = rtx_alloc (ASM_OPERANDS);
81d82304 1081 x->volatil = orig->volatil;
175160e7
MT
1082 XSTR (x, 0) = XSTR (orig, 0);
1083 XSTR (x, 1) = XSTR (orig, 1);
1084 XINT (x, 2) = XINT (orig, 2);
1085 XVEC (x, 3) = copy_asm_operands_vector;
1086 XVEC (x, 4) = copy_asm_constraints_vector;
1087 XSTR (x, 5) = XSTR (orig, 5);
1088 XINT (x, 6) = XINT (orig, 6);
1089 return x;
1090 }
1091 break;
1092
1093 case MEM:
1094 /* A MEM is usually allowed to be shared if its address is constant
1095 or is a constant plus one of the special registers.
1096
1097 We do not allow sharing of addresses that are either a special
1098 register or the sum of a constant and a special register because
1099 it is possible for unshare_all_rtl to copy the address, into memory
1100 that won't be saved. Although the MEM can safely be shared, and
1101 won't be copied there, the address itself cannot be shared, and may
1102 need to be copied.
1103
1104 There are also two exceptions with constants: The first is if the
1105 constant is a LABEL_REF or the sum of the LABEL_REF
1106 and an integer. This case can happen if we have an inline
1107 function that supplies a constant operand to the call of another
1108 inline function that uses it in a switch statement. In this case,
1109 we will be replacing the LABEL_REF, so we have to replace this MEM
1110 as well.
1111
1112 The second case is if we have a (const (plus (address ..) ...)).
1113 In that case we need to put back the address of the constant pool
1114 entry. */
1115
1116 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1117 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1118 && ! (GET_CODE (XEXP (x, 0)) == CONST
1119 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1120 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1121 == LABEL_REF)
1122 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1123 == ADDRESS)))))
1124 return x;
1125 break;
1126
1127 case LABEL_REF:
c1ceaaa6
RK
1128 /* If this is a non-local label, just make a new LABEL_REF.
1129 Otherwise, use the new label as well. */
38a448ca
RH
1130 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1131 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1132 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
c1ceaaa6
RK
1133 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1134 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1135 return x;
175160e7
MT
1136
1137 case REG:
1138 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1139 return reg_map [REGNO (x)];
1140 else
1141 return x;
1142
1143 case SET:
1144 /* If a parm that gets modified lives in a pseudo-reg,
1145 clear its TREE_READONLY to prevent certain optimizations. */
1146 {
1147 rtx dest = SET_DEST (x);
1148
1149 while (GET_CODE (dest) == STRICT_LOW_PART
1150 || GET_CODE (dest) == ZERO_EXTRACT
1151 || GET_CODE (dest) == SUBREG)
1152 dest = XEXP (dest, 0);
1153
1154 if (GET_CODE (dest) == REG
1155 && REGNO (dest) < max_parm_reg
1156 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1157 && parmdecl_map[REGNO (dest)] != 0
1158 /* The insn to load an arg pseudo from a stack slot
1159 does not count as modifying it. */
1160 && in_nonparm_insns)
1161 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1162 }
1163 break;
1164
1165#if 0 /* This is a good idea, but here is the wrong place for it. */
1166 /* Arrange that CONST_INTs always appear as the second operand
1167 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1168 always appear as the first. */
1169 case PLUS:
1170 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1171 || (XEXP (x, 1) == frame_pointer_rtx
1172 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1173 && XEXP (x, 1) == arg_pointer_rtx)))
1174 {
1175 rtx t = XEXP (x, 0);
1176 XEXP (x, 0) = XEXP (x, 1);
1177 XEXP (x, 1) = t;
1178 }
1179 break;
1180#endif
e9a25f70
JL
1181 default:
1182 break;
175160e7
MT
1183 }
1184
1185 /* Replace this rtx with a copy of itself. */
1186
1187 x = rtx_alloc (code);
4c9a05bc
RK
1188 bcopy ((char *) orig, (char *) x,
1189 (sizeof (*x) - sizeof (x->fld)
1190 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
175160e7
MT
1191
1192 /* Now scan the subexpressions recursively.
1193 We can store any replaced subexpressions directly into X
1194 since we know X is not shared! Any vectors in X
1195 must be copied if X was copied. */
1196
1197 format_ptr = GET_RTX_FORMAT (code);
1198
1199 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1200 {
1201 switch (*format_ptr++)
1202 {
1203 case 'e':
1204 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1205 break;
1206
1207 case 'u':
1208 /* Change any references to old-insns to point to the
1209 corresponding copied insns. */
1210 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1211 break;
1212
1213 case 'E':
1214 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1215 {
1216 register int j;
1217
27108369 1218 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
175160e7
MT
1219 for (j = 0; j < XVECLEN (x, i); j++)
1220 XVECEXP (x, i, j)
1221 = copy_for_inline (XVECEXP (x, i, j));
1222 }
1223 break;
1224 }
1225 }
1226
1227 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1228 {
1229 orig_asm_operands_vector = XVEC (orig, 3);
1230 copy_asm_operands_vector = XVEC (x, 3);
1231 copy_asm_constraints_vector = XVEC (x, 4);
1232 }
1233
1234 return x;
1235}
1236
1237/* Unfortunately, we need a global copy of const_equiv map for communication
1238 with a function called from note_stores. Be *very* careful that this
1239 is used properly in the presence of recursion. */
1240
1241rtx *global_const_equiv_map;
2b145ea8 1242int global_const_equiv_map_size;
175160e7
MT
1243\f
1244#define FIXED_BASE_PLUS_P(X) \
1245 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1246 && GET_CODE (XEXP (X, 0)) == REG \
1247 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
f9b06ea4 1248 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
175160e7
MT
1249
1250/* Integrate the procedure defined by FNDECL. Note that this function
1251 may wind up calling itself. Since the static variables are not
1252 reentrant, we do not assign them until after the possibility
bfa30b22 1253 of recursion is eliminated.
175160e7
MT
1254
1255 If IGNORE is nonzero, do not produce a value.
1256 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1257
1258 Value is:
1259 (rtx)-1 if we could not substitute the function
1260 0 if we substituted it and it does not produce a value
1261 else an rtx for where the value is stored. */
1262
1263rtx
12307ca2
RK
1264expand_inline_function (fndecl, parms, target, ignore, type,
1265 structure_value_addr)
175160e7
MT
1266 tree fndecl, parms;
1267 rtx target;
1268 int ignore;
1269 tree type;
1270 rtx structure_value_addr;
1271{
81578142 1272 tree formal, actual, block;
175160e7
MT
1273 rtx header = DECL_SAVED_INSNS (fndecl);
1274 rtx insns = FIRST_FUNCTION_INSN (header);
1275 rtx parm_insns = FIRST_PARM_INSN (header);
1276 tree *arg_trees;
1277 rtx *arg_vals;
1278 rtx insn;
1279 int max_regno;
175160e7
MT
1280 register int i;
1281 int min_labelno = FIRST_LABELNO (header);
1282 int max_labelno = LAST_LABELNO (header);
1283 int nargs;
1284 rtx local_return_label = 0;
1285 rtx loc;
2132517d 1286 rtx stack_save = 0;
175160e7
MT
1287 rtx temp;
1288 struct inline_remap *map;
1289 rtx cc0_insn = 0;
1290 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
a6dd1cb6 1291 rtx static_chain_value = 0;
175160e7 1292
253a01b4
JL
1293 /* The pointer used to track the true location of the memory used
1294 for MAP->LABEL_MAP. */
1295 rtx *real_label_map = 0;
1296
175160e7
MT
1297 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1298 max_regno = MAX_REGNUM (header) + 3;
1299 if (max_regno < FIRST_PSEUDO_REGISTER)
1300 abort ();
1301
1302 nargs = list_length (DECL_ARGUMENTS (fndecl));
1303
2d8d0db8
RK
1304 /* Check that the parms type match and that sufficient arguments were
1305 passed. Since the appropriate conversions or default promotions have
1306 already been applied, the machine modes should match exactly. */
1307
12307ca2 1308 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
175160e7 1309 formal;
12307ca2 1310 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
175160e7 1311 {
2d8d0db8
RK
1312 tree arg;
1313 enum machine_mode mode;
1314
1315 if (actual == 0)
ab176425 1316 return (rtx) (HOST_WIDE_INT) -1;
2d8d0db8
RK
1317
1318 arg = TREE_VALUE (actual);
12307ca2 1319 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
2d8d0db8
RK
1320
1321 if (mode != TYPE_MODE (TREE_TYPE (arg))
1322 /* If they are block mode, the types should match exactly.
1323 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1324 which could happen if the parameter has incomplete type. */
d80db03d
RK
1325 || (mode == BLKmode
1326 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1327 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
ab176425 1328 return (rtx) (HOST_WIDE_INT) -1;
175160e7
MT
1329 }
1330
2d8d0db8
RK
1331 /* Extra arguments are valid, but will be ignored below, so we must
1332 evaluate them here for side-effects. */
1333 for (; actual; actual = TREE_CHAIN (actual))
1334 expand_expr (TREE_VALUE (actual), const0_rtx,
1335 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1336
175160e7
MT
1337 /* Make a binding contour to keep inline cleanups called at
1338 outer function-scope level from looking like they are shadowing
1339 parameter declarations. */
1340 pushlevel (0);
1341
175160e7
MT
1342 /* Expand the function arguments. Do this first so that any
1343 new registers get created before we allocate the maps. */
1344
1345 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1346 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1347
1348 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1349 formal;
1350 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1351 {
1352 /* Actual parameter, converted to the type of the argument within the
1353 function. */
1354 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1355 /* Mode of the variable used within the function. */
1356 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
9175051c 1357 int invisiref = 0;
175160e7 1358
175160e7
MT
1359 arg_trees[i] = arg;
1360 loc = RTVEC_ELT (arg_vector, i);
1361
1362 /* If this is an object passed by invisible reference, we copy the
1363 object into a stack slot and save its address. If this will go
1364 into memory, we do nothing now. Otherwise, we just expand the
1365 argument. */
1366 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1367 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1368 {
4b7cb39e
RK
1369 rtx stack_slot
1370 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1371 int_size_in_bytes (TREE_TYPE (arg)), 1);
3668e76e 1372 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
175160e7
MT
1373
1374 store_expr (arg, stack_slot, 0);
1375
1376 arg_vals[i] = XEXP (stack_slot, 0);
9175051c 1377 invisiref = 1;
175160e7
MT
1378 }
1379 else if (GET_CODE (loc) != MEM)
36aa0bf5
RK
1380 {
1381 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1382 /* The mode if LOC and ARG can differ if LOC was a variable
1383 that had its mode promoted via PROMOTED_MODE. */
5be957a2
RS
1384 arg_vals[i] = convert_modes (GET_MODE (loc),
1385 TYPE_MODE (TREE_TYPE (arg)),
1386 expand_expr (arg, NULL_RTX, mode,
1387 EXPAND_SUM),
1388 TREE_UNSIGNED (TREE_TYPE (formal)));
36aa0bf5
RK
1389 else
1390 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1391 }
175160e7
MT
1392 else
1393 arg_vals[i] = 0;
1394
1395 if (arg_vals[i] != 0
1396 && (! TREE_READONLY (formal)
1397 /* If the parameter is not read-only, copy our argument through
1398 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1399 TARGET in any way. In the inline function, they will likely
1400 be two different pseudos, and `safe_from_p' will make all
1401 sorts of smart assumptions about their not conflicting.
1402 But if ARG_VALS[I] overlaps TARGET, these assumptions are
9175051c
JM
1403 wrong, so put ARG_VALS[I] into a fresh register.
1404 Don't worry about invisible references, since their stack
1405 temps will never overlap the target. */
175160e7 1406 || (target != 0
9175051c 1407 && ! invisiref
3eda169f
RK
1408 && (GET_CODE (arg_vals[i]) == REG
1409 || GET_CODE (arg_vals[i]) == SUBREG
1410 || GET_CODE (arg_vals[i]) == MEM)
30caed6d
RS
1411 && reg_overlap_mentioned_p (arg_vals[i], target))
1412 /* ??? We must always copy a SUBREG into a REG, because it might
1413 get substituted into an address, and not all ports correctly
1414 handle SUBREGs in addresses. */
1415 || (GET_CODE (arg_vals[i]) == SUBREG)))
4b7cb39e 1416 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
12307ca2
RK
1417
1418 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1419 && TREE_CODE (TREE_TYPE (formal)) == POINTER_TYPE)
1420 mark_reg_pointer (arg_vals[i],
1421 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1422 / BITS_PER_UNIT));
175160e7
MT
1423 }
1424
1425 /* Allocate the structures we use to remap things. */
1426
1427 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1428 map->fndecl = fndecl;
1429
1430 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 1431 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
175160e7 1432
3bb1329e
BK
1433 /* We used to use alloca here, but the size of what it would try to
1434 allocate would occasionally cause it to exceed the stack limit and
1435 cause unpredictable core dumps. */
253a01b4
JL
1436 real_label_map
1437 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1438 map->label_map = real_label_map;
175160e7
MT
1439
1440 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
4c9a05bc 1441 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
175160e7
MT
1442 map->min_insnno = 0;
1443 map->max_insnno = INSN_UID (header);
1444
a70f7bb2
JW
1445 map->integrating = 1;
1446
175160e7
MT
1447 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1448 be large enough for all our pseudos. This is the number we are currently
c66e0741
RK
1449 using plus the number in the called routine, plus 15 for each arg,
1450 five to compute the virtual frame pointer, and five for the return value.
1451 This should be enough for most cases. We do not reference entries
1452 outside the range of the map.
1453
1454 ??? These numbers are quite arbitrary and were obtained by
1455 experimentation. At some point, we should try to allocate the
1456 table after all the parameters are set up so we an more accurately
1457 estimate the number of pseudos we will need. */
1458
1459 map->const_equiv_map_size
1460 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1461
1462 map->const_equiv_map
1463 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
4c9a05bc
RK
1464 bzero ((char *) map->const_equiv_map,
1465 map->const_equiv_map_size * sizeof (rtx));
c66e0741
RK
1466
1467 map->const_age_map
1468 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
4c9a05bc
RK
1469 bzero ((char *) map->const_age_map,
1470 map->const_equiv_map_size * sizeof (unsigned));
175160e7
MT
1471 map->const_age = 0;
1472
1473 /* Record the current insn in case we have to set up pointers to frame
3ba10494
AS
1474 and argument memory blocks. If there are no insns yet, add a dummy
1475 insn that can be used as an insertion point. */
175160e7 1476 map->insns_at_start = get_last_insn ();
e9a25f70 1477 if (map->insns_at_start == 0)
3ba10494 1478 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
175160e7 1479
12307ca2
RK
1480 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1481 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1482
175160e7
MT
1483 /* Update the outgoing argument size to allow for those in the inlined
1484 function. */
1485 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1486 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1487
1488 /* If the inline function needs to make PIC references, that means
1489 that this function's PIC offset table must be used. */
1490 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1491 current_function_uses_pic_offset_table = 1;
1492
a6dd1cb6
RK
1493 /* If this function needs a context, set it up. */
1494 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1495 static_chain_value = lookup_static_chain (fndecl);
1496
1c1f2d29
JM
1497 if (GET_CODE (parm_insns) == NOTE
1498 && NOTE_LINE_NUMBER (parm_insns) > 0)
1499 {
1500 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1501 NOTE_LINE_NUMBER (parm_insns));
1502 if (note)
1503 RTX_INTEGRATED_P (note) = 1;
1504 }
1505
175160e7
MT
1506 /* Process each argument. For each, set up things so that the function's
1507 reference to the argument will refer to the argument being passed.
1508 We only replace REG with REG here. Any simplifications are done
1509 via const_equiv_map.
1510
1511 We make two passes: In the first, we deal with parameters that will
1512 be placed into registers, since we need to ensure that the allocated
1513 register number fits in const_equiv_map. Then we store all non-register
1514 parameters into their memory location. */
1515
fd28789a
RS
1516 /* Don't try to free temp stack slots here, because we may put one of the
1517 parameters into a temp stack slot. */
1518
175160e7
MT
1519 for (i = 0; i < nargs; i++)
1520 {
1521 rtx copy = arg_vals[i];
1522
1523 loc = RTVEC_ELT (arg_vector, i);
1524
1525 /* There are three cases, each handled separately. */
1526 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1527 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1528 {
1529 /* This must be an object passed by invisible reference (it could
1530 also be a variable-sized object, but we forbid inlining functions
1531 with variable-sized arguments). COPY is the address of the
1532 actual value (this computation will cause it to be copied). We
1533 map that address for the register, noting the actual address as
1534 an equivalent in case it can be substituted into the insns. */
1535
1536 if (GET_CODE (copy) != REG)
1537 {
1538 temp = copy_addr_to_reg (copy);
2b145ea8
RK
1539 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1540 && REGNO (temp) < map->const_equiv_map_size)
175160e7
MT
1541 {
1542 map->const_equiv_map[REGNO (temp)] = copy;
1543 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1544 }
1545 copy = temp;
1546 }
1547 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1548 }
1549 else if (GET_CODE (loc) == MEM)
1550 {
1551 /* This is the case of a parameter that lives in memory.
1552 It will live in the block we allocate in the called routine's
1553 frame that simulates the incoming argument area. Do nothing
1554 now; we will call store_expr later. */
1555 ;
1556 }
1557 else if (GET_CODE (loc) == REG)
1558 {
1559 /* This is the good case where the parameter is in a register.
1560 If it is read-only and our argument is a constant, set up the
2ad701ba
RS
1561 constant equivalence.
1562
1563 If LOC is REG_USERVAR_P, the usual case, COPY must also have
23d5d23d
RK
1564 that flag set if it is a register.
1565
1566 Also, don't allow hard registers here; they might not be valid
0f41302f 1567 when substituted into insns. */
2ad701ba
RS
1568
1569 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1570 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
23d5d23d
RK
1571 && ! REG_USERVAR_P (copy))
1572 || (GET_CODE (copy) == REG
1573 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
175160e7
MT
1574 {
1575 temp = copy_to_mode_reg (GET_MODE (loc), copy);
2ad701ba 1576 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
2b145ea8
RK
1577 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1578 && REGNO (temp) < map->const_equiv_map_size)
175160e7
MT
1579 {
1580 map->const_equiv_map[REGNO (temp)] = copy;
1581 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1582 }
1583 copy = temp;
1584 }
1585 map->reg_map[REGNO (loc)] = copy;
1586 }
bc2eeab2
RS
1587 else if (GET_CODE (loc) == CONCAT)
1588 {
1589 /* This is the good case where the parameter is in a
1590 pair of separate pseudos.
1591 If it is read-only and our argument is a constant, set up the
1592 constant equivalence.
1593
1594 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1595 that flag set if it is a register.
1596
1597 Also, don't allow hard registers here; they might not be valid
0f41302f 1598 when substituted into insns. */
bc2eeab2
RS
1599 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1600 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1601 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1602 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1603
1604 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1605 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1606 && ! REG_USERVAR_P (copyreal))
1607 || (GET_CODE (copyreal) == REG
1608 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1609 {
1610 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1611 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
2b145ea8
RK
1612 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1613 && REGNO (temp) < map->const_equiv_map_size)
bc2eeab2
RS
1614 {
1615 map->const_equiv_map[REGNO (temp)] = copyreal;
1616 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1617 }
1618 copyreal = temp;
1619 }
1620 map->reg_map[REGNO (locreal)] = copyreal;
1621
1622 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1623 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1624 && ! REG_USERVAR_P (copyimag))
1625 || (GET_CODE (copyimag) == REG
1626 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1627 {
1628 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1629 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
2b145ea8
RK
1630 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1631 && REGNO (temp) < map->const_equiv_map_size)
bc2eeab2
RS
1632 {
1633 map->const_equiv_map[REGNO (temp)] = copyimag;
1634 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1635 }
1636 copyimag = temp;
1637 }
1638 map->reg_map[REGNO (locimag)] = copyimag;
1639 }
175160e7
MT
1640 else
1641 abort ();
175160e7
MT
1642 }
1643
1644 /* Now do the parameters that will be placed in memory. */
1645
1646 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1647 formal; formal = TREE_CHAIN (formal), i++)
1648 {
175160e7
MT
1649 loc = RTVEC_ELT (arg_vector, i);
1650
1651 if (GET_CODE (loc) == MEM
1652 /* Exclude case handled above. */
1653 && ! (GET_CODE (XEXP (loc, 0)) == REG
1654 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1655 {
cdd6e2db
TW
1656 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1657 DECL_SOURCE_LINE (formal));
1658 if (note)
1659 RTX_INTEGRATED_P (note) = 1;
175160e7
MT
1660
1661 /* Compute the address in the area we reserved and store the
1662 value there. */
1663 temp = copy_rtx_and_substitute (loc, map);
02bea8a8 1664 subst_constants (&temp, NULL_RTX, map);
175160e7
MT
1665 apply_change_group ();
1666 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1667 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1668 store_expr (arg_trees[i], temp, 0);
175160e7
MT
1669 }
1670 }
1671
1672 /* Deal with the places that the function puts its result.
1673 We are driven by what is placed into DECL_RESULT.
1674
1675 Initially, we assume that we don't have anything special handling for
1676 REG_FUNCTION_RETURN_VALUE_P. */
1677
1678 map->inline_target = 0;
1679 loc = DECL_RTL (DECL_RESULT (fndecl));
1680 if (TYPE_MODE (type) == VOIDmode)
1681 /* There is no return value to worry about. */
1682 ;
1683 else if (GET_CODE (loc) == MEM)
1684 {
1685 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1686 abort ();
1687
1688 /* Pass the function the address in which to return a structure value.
1689 Note that a constructor can cause someone to call us with
1690 STRUCTURE_VALUE_ADDR, but the initialization takes place
1691 via the first parameter, rather than the struct return address.
1692
1693 We have two cases: If the address is a simple register indirect,
1694 use the mapping mechanism to point that register to our structure
1695 return address. Otherwise, store the structure return value into
1696 the place that it will be referenced from. */
1697
1698 if (GET_CODE (XEXP (loc, 0)) == REG)
1699 {
f72a8759
RK
1700 temp = force_reg (Pmode,
1701 force_operand (structure_value_addr, NULL_RTX));
175160e7 1702 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
2b145ea8 1703 if ((CONSTANT_P (structure_value_addr)
e9a25f70 1704 || GET_CODE (structure_value_addr) == ADDRESSOF
2b145ea8
RK
1705 || (GET_CODE (structure_value_addr) == PLUS
1706 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1707 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1708 && REGNO (temp) < map->const_equiv_map_size)
175160e7
MT
1709 {
1710 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1711 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1712 }
1713 }
1714 else
1715 {
1716 temp = copy_rtx_and_substitute (loc, map);
02bea8a8 1717 subst_constants (&temp, NULL_RTX, map);
175160e7
MT
1718 apply_change_group ();
1719 emit_move_insn (temp, structure_value_addr);
1720 }
1721 }
1722 else if (ignore)
1723 /* We will ignore the result value, so don't look at its structure.
1724 Note that preparations for an aggregate return value
1725 do need to be made (above) even if it will be ignored. */
1726 ;
1727 else if (GET_CODE (loc) == REG)
1728 {
1729 /* The function returns an object in a register and we use the return
1730 value. Set up our target for remapping. */
1731
1732 /* Machine mode function was declared to return. */
1733 enum machine_mode departing_mode = TYPE_MODE (type);
1734 /* (Possibly wider) machine mode it actually computes
3ff2293f
BK
1735 (for the sake of callers that fail to declare it right).
1736 We have to use the mode of the result's RTL, rather than
1737 its type, since expand_function_start may have promoted it. */
175160e7 1738 enum machine_mode arriving_mode
3ff2293f 1739 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
175160e7
MT
1740 rtx reg_to_map;
1741
1742 /* Don't use MEMs as direct targets because on some machines
1743 substituting a MEM for a REG makes invalid insns.
1744 Let the combiner substitute the MEM if that is valid. */
1745 if (target == 0 || GET_CODE (target) != REG
1746 || GET_MODE (target) != departing_mode)
1747 target = gen_reg_rtx (departing_mode);
1748
1749 /* If function's value was promoted before return,
1750 avoid machine mode mismatch when we substitute INLINE_TARGET.
1751 But TARGET is what we will return to the caller. */
1752 if (arriving_mode != departing_mode)
2d0bd5fd
RK
1753 {
1754 /* Avoid creating a paradoxical subreg wider than
1755 BITS_PER_WORD, since that is illegal. */
1756 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1757 {
1758 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1759 GET_MODE_BITSIZE (arriving_mode)))
1760 /* Maybe could be handled by using convert_move () ? */
1761 abort ();
1762 reg_to_map = gen_reg_rtx (arriving_mode);
1763 target = gen_lowpart (departing_mode, reg_to_map);
1764 }
1765 else
38a448ca 1766 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
2d0bd5fd 1767 }
175160e7
MT
1768 else
1769 reg_to_map = target;
1770
1771 /* Usually, the result value is the machine's return register.
1772 Sometimes it may be a pseudo. Handle both cases. */
1773 if (REG_FUNCTION_VALUE_P (loc))
1774 map->inline_target = reg_to_map;
1775 else
1776 map->reg_map[REGNO (loc)] = reg_to_map;
1777 }
64ed0f40
JW
1778 else
1779 abort ();
175160e7 1780
255fe733
JM
1781 /* Make a fresh binding contour that we can easily remove. Do this after
1782 expanding our arguments so cleanups are properly scoped. */
1783 pushlevel (0);
1784 expand_start_bindings (0);
1785
175160e7
MT
1786 /* Make new label equivalences for the labels in the called function. */
1787 for (i = min_labelno; i < max_labelno; i++)
1f3d3a31 1788 map->label_map[i] = NULL_RTX;
175160e7
MT
1789
1790 /* Perform postincrements before actually calling the function. */
1791 emit_queue ();
1792
1793 /* Clean up stack so that variables might have smaller offsets. */
1794 do_pending_stack_adjust ();
1795
1796 /* Save a copy of the location of const_equiv_map for mark_stores, called
1797 via note_stores. */
1798 global_const_equiv_map = map->const_equiv_map;
2b145ea8 1799 global_const_equiv_map_size = map->const_equiv_map_size;
175160e7 1800
136cf361
RK
1801 /* If the called function does an alloca, save and restore the
1802 stack pointer around the call. This saves stack space, but
2132517d
RK
1803 also is required if this inline is being done between two
1804 pushes. */
1805 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1806 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1807
175160e7
MT
1808 /* Now copy the insns one by one. Do this in two passes, first the insns and
1809 then their REG_NOTES, just like save_for_inline. */
1810
1811 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1812
1813 for (insn = insns; insn; insn = NEXT_INSN (insn))
1814 {
c9734bb9 1815 rtx copy, pattern, set;
175160e7
MT
1816
1817 map->orig_asm_operands_vector = 0;
1818
1819 switch (GET_CODE (insn))
1820 {
1821 case INSN:
1822 pattern = PATTERN (insn);
c9734bb9 1823 set = single_set (insn);
175160e7
MT
1824 copy = 0;
1825 if (GET_CODE (pattern) == USE
1826 && GET_CODE (XEXP (pattern, 0)) == REG
1827 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1828 /* The (USE (REG n)) at return from the function should
1829 be ignored since we are changing (REG n) into
1830 inline_target. */
1831 break;
1832
154bba13
TT
1833 /* If the inline fn needs eh context, make sure that
1834 the current fn has one. */
1835 if (GET_CODE (pattern) == USE
1836 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
01eb7f9a 1837 get_eh_context ();
154bba13 1838
175160e7
MT
1839 /* Ignore setting a function value that we don't want to use. */
1840 if (map->inline_target == 0
c9734bb9
RK
1841 && set != 0
1842 && GET_CODE (SET_DEST (set)) == REG
1843 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
5cd76fcd 1844 {
c9734bb9 1845 if (volatile_refs_p (SET_SRC (set)))
5cd76fcd 1846 {
c9734bb9
RK
1847 rtx new_set;
1848
5cd76fcd
RS
1849 /* If we must not delete the source,
1850 load it into a new temporary. */
1851 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
c9734bb9
RK
1852
1853 new_set = single_set (copy);
1854 if (new_set == 0)
1855 abort ();
1856
1857 SET_DEST (new_set)
1858 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
5cd76fcd 1859 }
d8090d46
RK
1860 /* If the source and destination are the same and it
1861 has a note on it, keep the insn. */
1862 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1863 && REG_NOTES (insn) != 0)
1864 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
5cd76fcd
RS
1865 else
1866 break;
1867 }
c9734bb9
RK
1868
1869 /* If this is setting the static chain rtx, omit it. */
1870 else if (static_chain_value != 0
1871 && set != 0
1872 && GET_CODE (SET_DEST (set)) == REG
1873 && rtx_equal_p (SET_DEST (set),
1874 static_chain_incoming_rtx))
1875 break;
1876
a6dd1cb6
RK
1877 /* If this is setting the static chain pseudo, set it from
1878 the value we want to give it instead. */
1879 else if (static_chain_value != 0
c9734bb9
RK
1880 && set != 0
1881 && rtx_equal_p (SET_SRC (set),
a6dd1cb6
RK
1882 static_chain_incoming_rtx))
1883 {
c9734bb9 1884 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
a6dd1cb6 1885
c9734bb9 1886 copy = emit_move_insn (newdest, static_chain_value);
a6dd1cb6
RK
1887 static_chain_value = 0;
1888 }
5cd76fcd
RS
1889 else
1890 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
175160e7
MT
1891 /* REG_NOTES will be copied later. */
1892
1893#ifdef HAVE_cc0
1894 /* If this insn is setting CC0, it may need to look at
1895 the insn that uses CC0 to see what type of insn it is.
1896 In that case, the call to recog via validate_change will
1897 fail. So don't substitute constants here. Instead,
1898 do it when we emit the following insn.
1899
1900 For example, see the pyr.md file. That machine has signed and
1901 unsigned compares. The compare patterns must check the
1902 following branch insn to see which what kind of compare to
1903 emit.
1904
1905 If the previous insn set CC0, substitute constants on it as
1906 well. */
1907 if (sets_cc0_p (PATTERN (copy)) != 0)
1908 cc0_insn = copy;
1909 else
1910 {
1911 if (cc0_insn)
1912 try_constants (cc0_insn, map);
1913 cc0_insn = 0;
1914 try_constants (copy, map);
1915 }
1916#else
1917 try_constants (copy, map);
1918#endif
1919 break;
1920
1921 case JUMP_INSN:
299b54ba
RK
1922 if (GET_CODE (PATTERN (insn)) == RETURN
1923 || (GET_CODE (PATTERN (insn)) == PARALLEL
1924 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
175160e7
MT
1925 {
1926 if (local_return_label == 0)
1927 local_return_label = gen_label_rtx ();
1928 pattern = gen_jump (local_return_label);
1929 }
1930 else
1931 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1932
1933 copy = emit_jump_insn (pattern);
1934
1935#ifdef HAVE_cc0
1936 if (cc0_insn)
1937 try_constants (cc0_insn, map);
1938 cc0_insn = 0;
1939#endif
1940 try_constants (copy, map);
1941
1942 /* If this used to be a conditional jump insn but whose branch
1943 direction is now know, we must do something special. */
1944 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1945 {
1946#ifdef HAVE_cc0
1947 /* The previous insn set cc0 for us. So delete it. */
1948 delete_insn (PREV_INSN (copy));
1949#endif
1950
1951 /* If this is now a no-op, delete it. */
1952 if (map->last_pc_value == pc_rtx)
1953 {
1954 delete_insn (copy);
1955 copy = 0;
1956 }
1957 else
1958 /* Otherwise, this is unconditional jump so we must put a
1959 BARRIER after it. We could do some dead code elimination
1960 here, but jump.c will do it just as well. */
1961 emit_barrier ();
1962 }
1963 break;
1964
1965 case CALL_INSN:
1966 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1967 copy = emit_call_insn (pattern);
1968
d7e09326
RK
1969 /* Because the USAGE information potentially contains objects other
1970 than hard registers, we need to copy it. */
db3cf6fb
MS
1971 CALL_INSN_FUNCTION_USAGE (copy)
1972 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
d7e09326 1973
175160e7
MT
1974#ifdef HAVE_cc0
1975 if (cc0_insn)
1976 try_constants (cc0_insn, map);
1977 cc0_insn = 0;
1978#endif
1979 try_constants (copy, map);
1980
1981 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1982 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1983 map->const_equiv_map[i] = 0;
1984 break;
1985
1986 case CODE_LABEL:
1f3d3a31
JL
1987 copy =
1988 emit_label (get_label_from_map(map,
1989 CODE_LABEL_NUMBER (insn)));
bfa30b22 1990 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
1991 map->const_age++;
1992 break;
1993
1994 case BARRIER:
1995 copy = emit_barrier ();
1996 break;
1997
1998 case NOTE:
1999 /* It is important to discard function-end and function-beg notes,
2000 so we have only one of each in the current function.
2001 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2002 deleted these in the copy used for continuing compilation,
2003 not the copy used for inlining). */
2004 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2005 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2006 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
6adb4e3a
MS
2007 {
2008 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
2009 if (copy && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2010 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2011 {
1f3d3a31
JL
2012 rtx label =
2013 get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
6adb4e3a
MS
2014
2015 /* We have to forward these both to match the new exception
2016 region. */
2017 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2018 }
2019 }
175160e7
MT
2020 else
2021 copy = 0;
2022 break;
2023
2024 default:
2025 abort ();
2026 break;
2027 }
2028
2029 if (copy)
2030 RTX_INTEGRATED_P (copy) = 1;
2031
2032 map->insn_map[INSN_UID (insn)] = copy;
2033 }
2034
e62d14be
RS
2035 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2036 from parameters can be substituted in. These are the only ones that
2037 are valid across the entire function. */
2038 map->const_age++;
175160e7
MT
2039 for (insn = insns; insn; insn = NEXT_INSN (insn))
2040 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
db25e492
RS
2041 && map->insn_map[INSN_UID (insn)]
2042 && REG_NOTES (insn))
2043 {
2044 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2045 /* We must also do subst_constants, in case one of our parameters
2046 has const type and constant value. */
2047 subst_constants (&tem, NULL_RTX, map);
2048 apply_change_group ();
2049 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2050 }
175160e7
MT
2051
2052 if (local_return_label)
2053 emit_label (local_return_label);
2054
2132517d
RK
2055 /* Restore the stack pointer if we saved it above. */
2056 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2057 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2058
175160e7
MT
2059 /* Make copies of the decls of the symbols in the inline function, so that
2060 the copies of the variables get declared in the current function. Set
2061 up things so that lookup_static_chain knows that to interpret registers
2062 in SAVE_EXPRs for TYPE_SIZEs as local. */
2063
2064 inline_function_decl = fndecl;
175160e7 2065 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
8ef63e62 2066 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
175160e7
MT
2067 inline_function_decl = 0;
2068
8ef63e62
RS
2069 /* End the scope containing the copied formal parameter variables
2070 and copied LABEL_DECLs. */
175160e7
MT
2071
2072 expand_end_bindings (getdecls (), 1, 1);
81578142 2073 block = poplevel (1, 1, 0);
637c5064
RS
2074 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2075 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
175160e7 2076 poplevel (0, 0, 0);
9b53bc83
DE
2077
2078 /* Must mark the line number note after inlined functions as a repeat, so
2079 that the test coverage code can avoid counting the call twice. This
2080 just tells the code to ignore the immediately following line note, since
2081 there already exists a copy of this note before the expanded inline call.
2082 This line number note is still needed for debugging though, so we can't
2083 delete it. */
2084 if (flag_test_coverage)
2085 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2086
175160e7
MT
2087 emit_line_note (input_filename, lineno);
2088
2089 if (structure_value_addr)
1b6d951b 2090 {
38a448ca
RH
2091 target = gen_rtx_MEM (TYPE_MODE (type),
2092 memory_address (TYPE_MODE (type),
2093 structure_value_addr));
1b6d951b
RS
2094 MEM_IN_STRUCT_P (target) = 1;
2095 }
3bb1329e
BK
2096
2097 /* Make sure we free the things we explicitly allocated with xmalloc. */
51cbea76
JL
2098 if (real_label_map)
2099 free (real_label_map);
3bb1329e 2100
175160e7
MT
2101 return target;
2102}
2103\f
2104/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2105 push all of those decls and give each one the corresponding home. */
2106
2107static void
2108integrate_parm_decls (args, map, arg_vector)
2109 tree args;
2110 struct inline_remap *map;
2111 rtvec arg_vector;
2112{
2113 register tree tail;
2114 register int i;
2115
2116 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2117 {
2118 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2119 TREE_TYPE (tail));
2120 rtx new_decl_rtl
2121 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2122
a76386d8
RK
2123 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2124 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2125 here, but that's going to require some more work. */
2126 /* DECL_INCOMING_RTL (decl) = ?; */
175160e7
MT
2127 /* These args would always appear unused, if not for this. */
2128 TREE_USED (decl) = 1;
2129 /* Prevent warning for shadowing with these. */
c5caa350 2130 DECL_ABSTRACT_ORIGIN (decl) = tail;
175160e7
MT
2131 pushdecl (decl);
2132 /* Fully instantiate the address with the equivalent form so that the
2133 debugging information contains the actual register, instead of the
2134 virtual register. Do this by not passing an insn to
2135 subst_constants. */
02bea8a8 2136 subst_constants (&new_decl_rtl, NULL_RTX, map);
175160e7
MT
2137 apply_change_group ();
2138 DECL_RTL (decl) = new_decl_rtl;
2139 }
2140}
2141
2142/* Given a BLOCK node LET, push decls and levels so as to construct in the
2143 current function a tree of contexts isomorphic to the one that is given.
2144
2145 LEVEL indicates how far down into the BLOCK tree is the node we are
8ef63e62 2146 currently traversing. It is always zero except for recursive calls.
175160e7 2147
858a47b1 2148 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
175160e7 2149 registers used in the DECL_RTL field should be remapped. If it is zero,
8ef63e62 2150 no mapping is necessary. */
175160e7
MT
2151
2152static void
8ef63e62 2153integrate_decl_tree (let, level, map)
175160e7
MT
2154 tree let;
2155 int level;
2156 struct inline_remap *map;
175160e7
MT
2157{
2158 tree t, node;
2159
8ef63e62
RS
2160 if (level > 0)
2161 pushlevel (0);
175160e7
MT
2162
2163 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2164 {
f6bad6ff
JM
2165 tree d;
2166
2167 push_obstacks_nochange ();
2168 saveable_allocation ();
2169 d = copy_node (t);
2170 pop_obstacks ();
2171
8ef63e62 2172 if (DECL_RTL (t) != 0)
175160e7
MT
2173 {
2174 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2175 /* Fully instantiate the address with the equivalent form so that the
2176 debugging information contains the actual register, instead of the
2177 virtual register. Do this by not passing an insn to
2178 subst_constants. */
02bea8a8 2179 subst_constants (&DECL_RTL (d), NULL_RTX, map);
175160e7
MT
2180 apply_change_group ();
2181 }
175160e7
MT
2182 /* These args would always appear unused, if not for this. */
2183 TREE_USED (d) = 1;
bd95070a
JW
2184 /* Prevent warning for shadowing with these. */
2185 DECL_ABSTRACT_ORIGIN (d) = t;
f6bad6ff
JM
2186
2187 if (DECL_LANG_SPECIFIC (d))
2188 copy_lang_decl (d);
2189
bd95070a 2190 pushdecl (d);
175160e7
MT
2191 }
2192
2193 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
8ef63e62 2194 integrate_decl_tree (t, level + 1, map);
175160e7 2195
8ef63e62
RS
2196 if (level > 0)
2197 {
2198 node = poplevel (1, 0, 0);
2199 if (node)
81578142
RS
2200 {
2201 TREE_USED (node) = TREE_USED (let);
2202 BLOCK_ABSTRACT_ORIGIN (node) = let;
2203 }
8ef63e62 2204 }
175160e7 2205}
f6135b20
JW
2206
2207/* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2208 through save_constants. */
2209
2210static void
2211save_constants_in_decl_trees (let)
2212 tree let;
2213{
2214 tree t;
2215
2216 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2217 if (DECL_RTL (t) != 0)
2218 save_constants (&DECL_RTL (t));
2219
2220 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2221 save_constants_in_decl_trees (t);
2222}
175160e7
MT
2223\f
2224/* Create a new copy of an rtx.
2225 Recursively copies the operands of the rtx,
2226 except for those few rtx codes that are sharable.
2227
2228 We always return an rtx that is similar to that incoming rtx, with the
2229 exception of possibly changing a REG to a SUBREG or vice versa. No
2230 rtl is ever emitted.
2231
2232 Handle constants that need to be placed in the constant pool by
2233 calling `force_const_mem'. */
2234
2235rtx
2236copy_rtx_and_substitute (orig, map)
2237 register rtx orig;
2238 struct inline_remap *map;
2239{
2240 register rtx copy, temp;
2241 register int i, j;
2242 register RTX_CODE code;
2243 register enum machine_mode mode;
2244 register char *format_ptr;
2245 int regno;
2246
2247 if (orig == 0)
2248 return 0;
2249
2250 code = GET_CODE (orig);
2251 mode = GET_MODE (orig);
2252
2253 switch (code)
2254 {
2255 case REG:
2256 /* If the stack pointer register shows up, it must be part of
2257 stack-adjustments (*not* because we eliminated the frame pointer!).
2258 Small hard registers are returned as-is. Pseudo-registers
2259 go through their `reg_map'. */
2260 regno = REGNO (orig);
2261 if (regno <= LAST_VIRTUAL_REGISTER)
2262 {
2263 /* Some hard registers are also mapped,
2264 but others are not translated. */
2265 if (map->reg_map[regno] != 0)
2266 return map->reg_map[regno];
2267
2268 /* If this is the virtual frame pointer, make space in current
2269 function's stack frame for the stack frame of the inline function.
2270
2271 Copy the address of this area into a pseudo. Map
2272 virtual_stack_vars_rtx to this pseudo and set up a constant
2273 equivalence for it to be the address. This will substitute the
2274 address into insns where it can be substituted and use the new
2275 pseudo where it can't. */
2276 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2277 {
2278 rtx loc, seq;
2279 int size = DECL_FRAME_SIZE (map->fndecl);
175160e7 2280
3e42d56b
DE
2281#ifdef FRAME_GROWS_DOWNWARD
2282 /* In this case, virtual_stack_vars_rtx points to one byte
2283 higher than the top of the frame area. So make sure we
2284 allocate a big enough chunk to keep the frame pointer
2285 aligned like a real one. */
2286 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2287#endif
175160e7
MT
2288 start_sequence ();
2289 loc = assign_stack_temp (BLKmode, size, 1);
2290 loc = XEXP (loc, 0);
2291#ifdef FRAME_GROWS_DOWNWARD
2292 /* In this case, virtual_stack_vars_rtx points to one byte
2293 higher than the top of the frame area. So compute the offset
3e42d56b
DE
2294 to one byte higher than our substitute frame. */
2295 loc = plus_constant (loc, size);
175160e7 2296#endif
59b2d722
RK
2297 map->reg_map[regno] = temp
2298 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 2299
12307ca2
RK
2300#ifdef STACK_BOUNDARY
2301 mark_reg_pointer (map->reg_map[regno],
2302 STACK_BOUNDARY / BITS_PER_UNIT);
2303#endif
2304
2b145ea8
RK
2305 if (REGNO (temp) < map->const_equiv_map_size)
2306 {
2307 map->const_equiv_map[REGNO (temp)] = loc;
2308 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2309 }
175160e7
MT
2310
2311 seq = gen_sequence ();
2312 end_sequence ();
2313 emit_insn_after (seq, map->insns_at_start);
5c23c401 2314 return temp;
175160e7
MT
2315 }
2316 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2317 {
2318 /* Do the same for a block to contain any arguments referenced
0f41302f 2319 in memory. */
175160e7
MT
2320 rtx loc, seq;
2321 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2322
2323 start_sequence ();
2324 loc = assign_stack_temp (BLKmode, size, 1);
2325 loc = XEXP (loc, 0);
931553d8
RS
2326 /* When arguments grow downward, the virtual incoming
2327 args pointer points to the top of the argument block,
0f41302f 2328 so the remapped location better do the same. */
931553d8
RS
2329#ifdef ARGS_GROW_DOWNWARD
2330 loc = plus_constant (loc, size);
2331#endif
59b2d722
RK
2332 map->reg_map[regno] = temp
2333 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 2334
12307ca2
RK
2335#ifdef STACK_BOUNDARY
2336 mark_reg_pointer (map->reg_map[regno],
2337 STACK_BOUNDARY / BITS_PER_UNIT);
2338#endif
2339
2b145ea8
RK
2340 if (REGNO (temp) < map->const_equiv_map_size)
2341 {
2342 map->const_equiv_map[REGNO (temp)] = loc;
2343 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2344 }
175160e7
MT
2345
2346 seq = gen_sequence ();
2347 end_sequence ();
2348 emit_insn_after (seq, map->insns_at_start);
5c23c401 2349 return temp;
175160e7
MT
2350 }
2351 else if (REG_FUNCTION_VALUE_P (orig))
2352 {
2353 /* This is a reference to the function return value. If
2354 the function doesn't have a return value, error. If the
2355 mode doesn't agree, make a SUBREG. */
2356 if (map->inline_target == 0)
2357 /* Must be unrolling loops or replicating code if we
2358 reach here, so return the register unchanged. */
2359 return orig;
2360 else if (mode != GET_MODE (map->inline_target))
293e1467 2361 return gen_lowpart (mode, map->inline_target);
175160e7
MT
2362 else
2363 return map->inline_target;
2364 }
2365 return orig;
2366 }
2367 if (map->reg_map[regno] == NULL)
2368 {
2369 map->reg_map[regno] = gen_reg_rtx (mode);
2370 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2371 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2372 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2373 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
12307ca2
RK
2374
2375 if (map->regno_pointer_flag[regno])
2376 mark_reg_pointer (map->reg_map[regno],
2377 map->regno_pointer_align[regno]);
175160e7
MT
2378 }
2379 return map->reg_map[regno];
2380
2381 case SUBREG:
2382 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2383 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2384 if (GET_CODE (copy) == SUBREG)
38a448ca
RH
2385 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2386 SUBREG_WORD (orig) + SUBREG_WORD (copy));
bc2eeab2 2387 else if (GET_CODE (copy) == CONCAT)
340aa7f6 2388 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
175160e7 2389 else
38a448ca
RH
2390 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2391 SUBREG_WORD (orig));
175160e7 2392
e9a25f70 2393 case ADDRESSOF:
38a448ca
RH
2394 copy = gen_rtx_ADDRESSOF (mode,
2395 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
e9a25f70
JL
2396 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2397 regno = ADDRESSOF_REGNO (orig);
2398 if (map->reg_map[regno])
2399 regno = REGNO (map->reg_map[regno]);
2400 else if (regno > LAST_VIRTUAL_REGISTER)
2401 {
2402 temp = XEXP (orig, 0);
2403 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2404 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2405 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2406 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2407 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2408
2409 if (map->regno_pointer_flag[regno])
2410 mark_reg_pointer (map->reg_map[regno],
2411 map->regno_pointer_align[regno]);
2412 regno = REGNO (map->reg_map[regno]);
2413 }
2414 ADDRESSOF_REGNO (copy) = regno;
2415 return copy;
2416
175160e7
MT
2417 case USE:
2418 case CLOBBER:
2419 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
d632e927
RS
2420 to (use foo) if the original insn didn't have a subreg.
2421 Removing the subreg distorts the VAX movstrhi pattern
2422 by changing the mode of an operand. */
175160e7 2423 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
d632e927 2424 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
175160e7 2425 copy = SUBREG_REG (copy);
38a448ca 2426 return gen_rtx_fmt_e (code, VOIDmode, copy);
175160e7
MT
2427
2428 case CODE_LABEL:
1f3d3a31 2429 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
175160e7 2430 = LABEL_PRESERVE_P (orig);
1f3d3a31 2431 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
175160e7
MT
2432
2433 case LABEL_REF:
38a448ca
RH
2434 copy = gen_rtx_LABEL_REF (mode,
2435 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2436 : get_label_from_map (map,
2437 CODE_LABEL_NUMBER (XEXP (orig, 0))));
175160e7 2438 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
c1ceaaa6
RK
2439
2440 /* The fact that this label was previously nonlocal does not mean
2441 it still is, so we must check if it is within the range of
2442 this function's labels. */
2443 LABEL_REF_NONLOCAL_P (copy)
2444 = (LABEL_REF_NONLOCAL_P (orig)
2445 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2446 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
81d57b8e
RK
2447
2448 /* If we have made a nonlocal label local, it means that this
9faa82d8 2449 inlined call will be referring to our nonlocal goto handler.
81d57b8e
RK
2450 So make sure we create one for this block; we normally would
2451 not since this is not otherwise considered a "call". */
2452 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2453 function_call_count++;
2454
175160e7
MT
2455 return copy;
2456
2457 case PC:
2458 case CC0:
2459 case CONST_INT:
f543676f
JW
2460 return orig;
2461
175160e7 2462 case SYMBOL_REF:
f543676f
JW
2463 /* Symbols which represent the address of a label stored in the constant
2464 pool must be modified to point to a constant pool entry for the
2465 remapped label. Otherwise, symbols are returned unchanged. */
2466 if (CONSTANT_POOL_ADDRESS_P (orig))
2467 {
2468 rtx constant = get_pool_constant (orig);
2469 if (GET_CODE (constant) == LABEL_REF)
15e13f2c 2470 return XEXP (force_const_mem (GET_MODE (orig),
c1ceaaa6
RK
2471 copy_rtx_and_substitute (constant,
2472 map)),
2473 0);
f543676f 2474 }
c1ceaaa6 2475
175160e7
MT
2476 return orig;
2477
2478 case CONST_DOUBLE:
2479 /* We have to make a new copy of this CONST_DOUBLE because don't want
2480 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2481 duplicate of a CONST_DOUBLE we have already seen. */
2482 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2483 {
2484 REAL_VALUE_TYPE d;
2485
2486 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
81fbaa41 2487 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
175160e7
MT
2488 }
2489 else
2490 return immed_double_const (CONST_DOUBLE_LOW (orig),
2491 CONST_DOUBLE_HIGH (orig), VOIDmode);
2492
2493 case CONST:
2494 /* Make new constant pool entry for a constant
2495 that was in the pool of the inline function. */
2496 if (RTX_INTEGRATED_P (orig))
2497 {
2498 /* If this was an address of a constant pool entry that itself
2499 had to be placed in the constant pool, it might not be a
2500 valid address. So the recursive call below might turn it
2501 into a register. In that case, it isn't a constant any
2502 more, so return it. This has the potential of changing a
2503 MEM into a REG, but we'll assume that it safe. */
2504 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2505 if (! CONSTANT_P (temp))
2506 return temp;
2507 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2508 }
2509 break;
2510
2511 case ADDRESS:
2512 /* If from constant pool address, make new constant pool entry and
2513 return its address. */
2514 if (! RTX_INTEGRATED_P (orig))
2515 abort ();
2516
15e13f2c
RK
2517 temp
2518 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2519 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2520 map));
175160e7
MT
2521
2522#if 0
2523 /* Legitimizing the address here is incorrect.
2524
2525 The only ADDRESS rtx's that can reach here are ones created by
d7084298 2526 save_constants. Hence the operand of the ADDRESS is always valid
175160e7 2527 in this position of the instruction, since the original rtx without
d7084298 2528 the ADDRESS was valid.
175160e7
MT
2529
2530 The reason we don't legitimize the address here is that on the
2531 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2532 This code forces the operand of the address to a register, which
2533 fails because we can not take the HIGH part of a register.
2534
2535 Also, change_address may create new registers. These registers
2536 will not have valid reg_map entries. This can cause try_constants()
2537 to fail because assumes that all registers in the rtx have valid
2538 reg_map entries, and it may end up replacing one of these new
0f41302f 2539 registers with junk. */
175160e7
MT
2540
2541 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2542 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2543#endif
2544
15e13f2c
RK
2545 temp = XEXP (temp, 0);
2546
2547#ifdef POINTERS_EXTEND_UNSIGNED
2548 if (GET_MODE (temp) != GET_MODE (orig))
2549 temp = convert_memory_address (GET_MODE (orig), temp);
2550#endif
2551
2552 return temp;
175160e7
MT
2553
2554 case ASM_OPERANDS:
2555 /* If a single asm insn contains multiple output operands
2556 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2557 We must make sure that the copied insn continues to share it. */
2558 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2559 {
2560 copy = rtx_alloc (ASM_OPERANDS);
81d82304 2561 copy->volatil = orig->volatil;
175160e7
MT
2562 XSTR (copy, 0) = XSTR (orig, 0);
2563 XSTR (copy, 1) = XSTR (orig, 1);
2564 XINT (copy, 2) = XINT (orig, 2);
2565 XVEC (copy, 3) = map->copy_asm_operands_vector;
2566 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2567 XSTR (copy, 5) = XSTR (orig, 5);
2568 XINT (copy, 6) = XINT (orig, 6);
2569 return copy;
2570 }
2571 break;
2572
2573 case CALL:
2574 /* This is given special treatment because the first
2575 operand of a CALL is a (MEM ...) which may get
2576 forced into a register for cse. This is undesirable
2577 if function-address cse isn't wanted or if we won't do cse. */
2578#ifndef NO_FUNCTION_CSE
2579 if (! (optimize && ! flag_no_function_cse))
2580#endif
38a448ca
RH
2581 return gen_rtx_CALL (GET_MODE (orig),
2582 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2583 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
175160e7
MT
2584 copy_rtx_and_substitute (XEXP (orig, 1), map));
2585 break;
2586
2587#if 0
2588 /* Must be ifdefed out for loop unrolling to work. */
2589 case RETURN:
2590 abort ();
2591#endif
2592
2593 case SET:
2594 /* If this is setting fp or ap, it means that we have a nonlocal goto.
e9a25f70 2595 Adjust the setting by the offset of the area we made.
175160e7
MT
2596 If the nonlocal goto is into the current function,
2597 this will result in unnecessarily bad code, but should work. */
2598 if (SET_DEST (orig) == virtual_stack_vars_rtx
2599 || SET_DEST (orig) == virtual_incoming_args_rtx)
e9a25f70
JL
2600 {
2601 /* In case a translation hasn't occurred already, make one now. */
2602 rtx junk = copy_rtx_and_substitute (SET_DEST (orig), map);
2603 rtx equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2604 rtx equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2605 HOST_WIDE_INT loc_offset
2606 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2607
38a448ca
RH
2608 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2609 force_operand
2610 (plus_constant
2611 (copy_rtx_and_substitute (SET_SRC (orig), map),
2612 - loc_offset),
2613 NULL_RTX));
e9a25f70 2614 }
175160e7
MT
2615 break;
2616
2617 case MEM:
2618 copy = rtx_alloc (MEM);
2619 PUT_MODE (copy, mode);
2620 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2621 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2622 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
a70f7bb2
JW
2623
2624 /* If doing function inlining, this MEM might not be const in the
2625 function that it is being inlined into, and thus may not be
2626 unchanging after function inlining. Constant pool references are
2627 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2628 for them. */
2629 if (! map->integrating)
2630 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2631
175160e7 2632 return copy;
e9a25f70
JL
2633
2634 default:
2635 break;
175160e7
MT
2636 }
2637
2638 copy = rtx_alloc (code);
2639 PUT_MODE (copy, mode);
2640 copy->in_struct = orig->in_struct;
2641 copy->volatil = orig->volatil;
2642 copy->unchanging = orig->unchanging;
2643
2644 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2645
2646 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2647 {
2648 switch (*format_ptr++)
2649 {
2650 case '0':
e9a25f70 2651 XEXP (copy, i) = XEXP (orig, i);
175160e7
MT
2652 break;
2653
2654 case 'e':
2655 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2656 break;
2657
2658 case 'u':
2659 /* Change any references to old-insns to point to the
2660 corresponding copied insns. */
2661 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2662 break;
2663
2664 case 'E':
2665 XVEC (copy, i) = XVEC (orig, i);
2666 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2667 {
2668 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2669 for (j = 0; j < XVECLEN (copy, i); j++)
2670 XVECEXP (copy, i, j)
2671 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2672 }
2673 break;
2674
02bea8a8
RK
2675 case 'w':
2676 XWINT (copy, i) = XWINT (orig, i);
2677 break;
2678
175160e7
MT
2679 case 'i':
2680 XINT (copy, i) = XINT (orig, i);
2681 break;
2682
2683 case 's':
2684 XSTR (copy, i) = XSTR (orig, i);
2685 break;
2686
2687 default:
2688 abort ();
2689 }
2690 }
2691
2692 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2693 {
2694 map->orig_asm_operands_vector = XVEC (orig, 3);
2695 map->copy_asm_operands_vector = XVEC (copy, 3);
2696 map->copy_asm_constraints_vector = XVEC (copy, 4);
2697 }
2698
2699 return copy;
2700}
2701\f
2702/* Substitute known constant values into INSN, if that is valid. */
2703
2704void
2705try_constants (insn, map)
2706 rtx insn;
2707 struct inline_remap *map;
2708{
2709 int i;
2710
2711 map->num_sets = 0;
2712 subst_constants (&PATTERN (insn), insn, map);
2713
2714 /* Apply the changes if they are valid; otherwise discard them. */
2715 apply_change_group ();
2716
2717 /* Show we don't know the value of anything stored or clobbered. */
2718 note_stores (PATTERN (insn), mark_stores);
2719 map->last_pc_value = 0;
2720#ifdef HAVE_cc0
2721 map->last_cc0_value = 0;
2722#endif
2723
2724 /* Set up any constant equivalences made in this insn. */
2725 for (i = 0; i < map->num_sets; i++)
2726 {
2727 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2728 {
2729 int regno = REGNO (map->equiv_sets[i].dest);
2730
2b145ea8
RK
2731 if (regno < map->const_equiv_map_size
2732 && (map->const_equiv_map[regno] == 0
2733 /* Following clause is a hack to make case work where GNU C++
2734 reassigns a variable to make cse work right. */
2735 || ! rtx_equal_p (map->const_equiv_map[regno],
2736 map->equiv_sets[i].equiv)))
175160e7
MT
2737 {
2738 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2739 map->const_age_map[regno] = map->const_age;
2740 }
2741 }
2742 else if (map->equiv_sets[i].dest == pc_rtx)
2743 map->last_pc_value = map->equiv_sets[i].equiv;
2744#ifdef HAVE_cc0
2745 else if (map->equiv_sets[i].dest == cc0_rtx)
2746 map->last_cc0_value = map->equiv_sets[i].equiv;
2747#endif
2748 }
2749}
2750\f
2751/* Substitute known constants for pseudo regs in the contents of LOC,
2752 which are part of INSN.
d45cf215 2753 If INSN is zero, the substitution should always be done (this is used to
175160e7
MT
2754 update DECL_RTL).
2755 These changes are taken out by try_constants if the result is not valid.
2756
2757 Note that we are more concerned with determining when the result of a SET
2758 is a constant, for further propagation, than actually inserting constants
2759 into insns; cse will do the latter task better.
2760
2761 This function is also used to adjust address of items previously addressed
2762 via the virtual stack variable or virtual incoming arguments registers. */
2763
2764static void
2765subst_constants (loc, insn, map)
2766 rtx *loc;
2767 rtx insn;
2768 struct inline_remap *map;
2769{
2770 rtx x = *loc;
2771 register int i;
2772 register enum rtx_code code;
2773 register char *format_ptr;
2774 int num_changes = num_validated_changes ();
2775 rtx new = 0;
2776 enum machine_mode op0_mode;
2777
2778 code = GET_CODE (x);
2779
2780 switch (code)
2781 {
2782 case PC:
2783 case CONST_INT:
2784 case CONST_DOUBLE:
2785 case SYMBOL_REF:
2786 case CONST:
2787 case LABEL_REF:
2788 case ADDRESS:
2789 return;
2790
2791#ifdef HAVE_cc0
2792 case CC0:
2793 validate_change (insn, loc, map->last_cc0_value, 1);
2794 return;
2795#endif
2796
2797 case USE:
2798 case CLOBBER:
2799 /* The only thing we can do with a USE or CLOBBER is possibly do
2800 some substitutions in a MEM within it. */
2801 if (GET_CODE (XEXP (x, 0)) == MEM)
2802 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2803 return;
2804
2805 case REG:
2806 /* Substitute for parms and known constants. Don't replace
2807 hard regs used as user variables with constants. */
2808 {
2809 int regno = REGNO (x);
c66e0741 2810
175160e7 2811 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
c66e0741 2812 && regno < map->const_equiv_map_size
175160e7
MT
2813 && map->const_equiv_map[regno] != 0
2814 && map->const_age_map[regno] >= map->const_age)
2815 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2816 return;
2817 }
2818
2819 case SUBREG:
637c5064
RS
2820 /* SUBREG applied to something other than a reg
2821 should be treated as ordinary, since that must
2822 be a special hack and we don't know how to treat it specially.
2823 Consider for example mulsidi3 in m68k.md.
2824 Ordinary SUBREG of a REG needs this special treatment. */
2825 if (GET_CODE (SUBREG_REG (x)) == REG)
2826 {
2827 rtx inner = SUBREG_REG (x);
2828 rtx new = 0;
175160e7 2829
637c5064
RS
2830 /* We can't call subst_constants on &SUBREG_REG (x) because any
2831 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2832 see what is inside, try to form the new SUBREG and see if that is
2833 valid. We handle two cases: extracting a full word in an
2834 integral mode and extracting the low part. */
2835 subst_constants (&inner, NULL_RTX, map);
175160e7 2836
637c5064
RS
2837 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2838 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2839 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2840 new = operand_subword (inner, SUBREG_WORD (x), 0,
2841 GET_MODE (SUBREG_REG (x)));
175160e7 2842
26986265 2843 cancel_changes (num_changes);
637c5064
RS
2844 if (new == 0 && subreg_lowpart_p (x))
2845 new = gen_lowpart_common (GET_MODE (x), inner);
175160e7 2846
637c5064
RS
2847 if (new)
2848 validate_change (insn, loc, new, 1);
175160e7 2849
637c5064
RS
2850 return;
2851 }
2852 break;
175160e7
MT
2853
2854 case MEM:
2855 subst_constants (&XEXP (x, 0), insn, map);
2856
2857 /* If a memory address got spoiled, change it back. */
2858 if (insn != 0 && num_validated_changes () != num_changes
2859 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2860 cancel_changes (num_changes);
2861 return;
2862
2863 case SET:
2864 {
2865 /* Substitute constants in our source, and in any arguments to a
2866 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2867 itself. */
2868 rtx *dest_loc = &SET_DEST (x);
2869 rtx dest = *dest_loc;
2870 rtx src, tem;
2871
2872 subst_constants (&SET_SRC (x), insn, map);
2873 src = SET_SRC (x);
2874
2875 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
175160e7
MT
2876 || GET_CODE (*dest_loc) == SUBREG
2877 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2878 {
2879 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2880 {
2881 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2882 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2883 }
2884 dest_loc = &XEXP (*dest_loc, 0);
2885 }
2886
91594e43
RS
2887 /* Do substitute in the address of a destination in memory. */
2888 if (GET_CODE (*dest_loc) == MEM)
2889 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2890
175160e7
MT
2891 /* Check for the case of DEST a SUBREG, both it and the underlying
2892 register are less than one word, and the SUBREG has the wider mode.
2893 In the case, we are really setting the underlying register to the
2894 source converted to the mode of DEST. So indicate that. */
2895 if (GET_CODE (dest) == SUBREG
2896 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2897 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2898 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2899 <= GET_MODE_SIZE (GET_MODE (dest)))
e2eb57b7
RK
2900 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2901 src)))
175160e7
MT
2902 src = tem, dest = SUBREG_REG (dest);
2903
2904 /* If storing a recognizable value save it for later recording. */
2905 if ((map->num_sets < MAX_RECOG_OPERANDS)
2906 && (CONSTANT_P (src)
c9734bb9 2907 || (GET_CODE (src) == REG
83b93f40
RK
2908 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2909 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
175160e7
MT
2910 || (GET_CODE (src) == PLUS
2911 && GET_CODE (XEXP (src, 0)) == REG
83b93f40
RK
2912 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2913 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
2914 && CONSTANT_P (XEXP (src, 1)))
2915 || GET_CODE (src) == COMPARE
2916#ifdef HAVE_cc0
2917 || dest == cc0_rtx
2918#endif
2919 || (dest == pc_rtx
2920 && (src == pc_rtx || GET_CODE (src) == RETURN
2921 || GET_CODE (src) == LABEL_REF))))
2922 {
2923 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2924 it will cause us to save the COMPARE with any constants
2925 substituted, which is what we want for later. */
2926 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2927 map->equiv_sets[map->num_sets++].dest = dest;
2928 }
175160e7 2929 }
e9a25f70
JL
2930 return;
2931
2932 default:
2933 break;
175160e7
MT
2934 }
2935
2936 format_ptr = GET_RTX_FORMAT (code);
2937
2938 /* If the first operand is an expression, save its mode for later. */
2939 if (*format_ptr == 'e')
2940 op0_mode = GET_MODE (XEXP (x, 0));
2941
2942 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2943 {
2944 switch (*format_ptr++)
2945 {
2946 case '0':
2947 break;
2948
2949 case 'e':
2950 if (XEXP (x, i))
2951 subst_constants (&XEXP (x, i), insn, map);
2952 break;
2953
2954 case 'u':
2955 case 'i':
2956 case 's':
02bea8a8 2957 case 'w':
175160e7
MT
2958 break;
2959
2960 case 'E':
2961 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2962 {
2963 int j;
2964 for (j = 0; j < XVECLEN (x, i); j++)
2965 subst_constants (&XVECEXP (x, i, j), insn, map);
2966 }
2967 break;
2968
2969 default:
2970 abort ();
2971 }
2972 }
2973
2974 /* If this is a commutative operation, move a constant to the second
2975 operand unless the second operand is already a CONST_INT. */
2976 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2977 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2978 {
2979 rtx tem = XEXP (x, 0);
2980 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2981 validate_change (insn, &XEXP (x, 1), tem, 1);
2982 }
2983
2984 /* Simplify the expression in case we put in some constants. */
2985 switch (GET_RTX_CLASS (code))
2986 {
2987 case '1':
2988 new = simplify_unary_operation (code, GET_MODE (x),
2989 XEXP (x, 0), op0_mode);
2990 break;
2991
2992 case '<':
2993 {
2994 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2995 if (op_mode == VOIDmode)
2996 op_mode = GET_MODE (XEXP (x, 1));
2997 new = simplify_relational_operation (code, op_mode,
2998 XEXP (x, 0), XEXP (x, 1));
b565a316
RK
2999#ifdef FLOAT_STORE_FLAG_VALUE
3000 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3001 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
81fbaa41
RK
3002 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3003 GET_MODE (x)));
b565a316 3004#endif
175160e7
MT
3005 break;
3006 }
3007
3008 case '2':
3009 case 'c':
3010 new = simplify_binary_operation (code, GET_MODE (x),
3011 XEXP (x, 0), XEXP (x, 1));
3012 break;
3013
3014 case 'b':
3015 case '3':
3016 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3017 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3018 break;
3019 }
3020
3021 if (new)
3022 validate_change (insn, loc, new, 1);
3023}
3024
3025/* Show that register modified no longer contain known constants. We are
3026 called from note_stores with parts of the new insn. */
3027
3028void
3029mark_stores (dest, x)
3030 rtx dest;
3031 rtx x;
3032{
e2eb57b7
RK
3033 int regno = -1;
3034 enum machine_mode mode;
3035
3036 /* DEST is always the innermost thing set, except in the case of
3037 SUBREGs of hard registers. */
175160e7
MT
3038
3039 if (GET_CODE (dest) == REG)
e2eb57b7
RK
3040 regno = REGNO (dest), mode = GET_MODE (dest);
3041 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3042 {
3043 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3044 mode = GET_MODE (SUBREG_REG (dest));
3045 }
3046
3047 if (regno >= 0)
3048 {
3049 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3050 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3051 int i;
3052
e9a25f70
JL
3053 /* Ignore virtual stack var or virtual arg register since those
3054 are handled separately. */
3055 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3056 && regno != VIRTUAL_STACK_VARS_REGNUM)
3057 for (i = regno; i <= last_reg; i++)
3058 if (i < global_const_equiv_map_size)
3059 global_const_equiv_map[i] = 0;
e2eb57b7 3060 }
175160e7
MT
3061}
3062\f
3063/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3064 pointed to by PX, they represent constants in the constant pool.
3065 Replace these with a new memory reference obtained from force_const_mem.
3066 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3067 address of a constant pool entry. Replace them with the address of
3068 a new constant pool entry obtained from force_const_mem. */
3069
3070static void
3071restore_constants (px)
3072 rtx *px;
3073{
3074 rtx x = *px;
3075 int i, j;
3076 char *fmt;
3077
3078 if (x == 0)
3079 return;
3080
3081 if (GET_CODE (x) == CONST_DOUBLE)
3082 {
3083 /* We have to make a new CONST_DOUBLE to ensure that we account for
3084 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3085 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3086 {
3087 REAL_VALUE_TYPE d;
3088
3089 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
81fbaa41 3090 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
175160e7
MT
3091 }
3092 else
3093 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3094 VOIDmode);
3095 }
3096
3097 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3098 {
3099 restore_constants (&XEXP (x, 0));
3100 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3101 }
3102 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3103 {
3104 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3105 rtx new = XEXP (SUBREG_REG (x), 0);
3106
3107 restore_constants (&new);
3108 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3109 PUT_MODE (new, GET_MODE (x));
3110 *px = validize_mem (new);
3111 }
3112 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3113 {
15e13f2c
RK
3114 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3115 XEXP (XEXP (x, 0), 0)),
3116 0);
3117
3118#ifdef POINTERS_EXTEND_UNSIGNED
3119 if (GET_MODE (new) != GET_MODE (x))
3120 new = convert_memory_address (GET_MODE (x), new);
3121#endif
3122
3123 *px = new;
175160e7
MT
3124 }
3125 else
3126 {
3127 fmt = GET_RTX_FORMAT (GET_CODE (x));
3128 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3129 {
3130 switch (*fmt++)
3131 {
3132 case 'E':
3133 for (j = 0; j < XVECLEN (x, i); j++)
3134 restore_constants (&XVECEXP (x, i, j));
3135 break;
3136
3137 case 'e':
3138 restore_constants (&XEXP (x, i));
3139 break;
3140 }
3141 }
3142 }
3143}
3144\f
81578142
RS
3145/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3146 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3147 that it points to the node itself, thus indicating that the node is its
3148 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3149 the given node is NULL, recursively descend the decl/block tree which
3150 it is the root of, and for each other ..._DECL or BLOCK node contained
3151 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3152 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3153 values to point to themselves. */
3154
81578142
RS
3155static void
3156set_block_origin_self (stmt)
3157 register tree stmt;
3158{
3159 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3160 {
3161 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3162
3163 {
3164 register tree local_decl;
3165
3166 for (local_decl = BLOCK_VARS (stmt);
3167 local_decl != NULL_TREE;
3168 local_decl = TREE_CHAIN (local_decl))
3169 set_decl_origin_self (local_decl); /* Potential recursion. */
3170 }
3171
3172 {
3173 register tree subblock;
3174
3175 for (subblock = BLOCK_SUBBLOCKS (stmt);
3176 subblock != NULL_TREE;
3177 subblock = BLOCK_CHAIN (subblock))
3178 set_block_origin_self (subblock); /* Recurse. */
3179 }
3180 }
3181}
3182
3183/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3184 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3185 node to so that it points to the node itself, thus indicating that the
3186 node represents its own (abstract) origin. Additionally, if the
3187 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3188 the decl/block tree of which the given node is the root of, and for
3189 each other ..._DECL or BLOCK node contained therein whose
3190 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3191 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3192 point to themselves. */
3193
3194static void
3195set_decl_origin_self (decl)
3196 register tree decl;
3197{
3198 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3199 {
3200 DECL_ABSTRACT_ORIGIN (decl) = decl;
3201 if (TREE_CODE (decl) == FUNCTION_DECL)
3202 {
3203 register tree arg;
3204
3205 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3206 DECL_ABSTRACT_ORIGIN (arg) = arg;
29d356fb
RK
3207 if (DECL_INITIAL (decl) != NULL_TREE
3208 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
3209 set_block_origin_self (DECL_INITIAL (decl));
3210 }
3211 }
3212}
3213\f
3214/* Given a pointer to some BLOCK node, and a boolean value to set the
3215 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3216 the given block, and for all local decls and all local sub-blocks
3217 (recursively) which are contained therein. */
3218
81578142
RS
3219static void
3220set_block_abstract_flags (stmt, setting)
3221 register tree stmt;
3222 register int setting;
3223{
12307ca2
RK
3224 register tree local_decl;
3225 register tree subblock;
81578142 3226
12307ca2 3227 BLOCK_ABSTRACT (stmt) = setting;
81578142 3228
12307ca2
RK
3229 for (local_decl = BLOCK_VARS (stmt);
3230 local_decl != NULL_TREE;
3231 local_decl = TREE_CHAIN (local_decl))
3232 set_decl_abstract_flags (local_decl, setting);
81578142 3233
12307ca2
RK
3234 for (subblock = BLOCK_SUBBLOCKS (stmt);
3235 subblock != NULL_TREE;
3236 subblock = BLOCK_CHAIN (subblock))
3237 set_block_abstract_flags (subblock, setting);
81578142
RS
3238}
3239
3240/* Given a pointer to some ..._DECL node, and a boolean value to set the
3241 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3242 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3243 set the abstract flags for all of the parameters, local vars, local
3244 blocks and sub-blocks (recursively) to the same setting. */
3245
3246void
3247set_decl_abstract_flags (decl, setting)
3248 register tree decl;
3249 register int setting;
3250{
3251 DECL_ABSTRACT (decl) = setting;
3252 if (TREE_CODE (decl) == FUNCTION_DECL)
3253 {
3254 register tree arg;
3255
3256 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3257 DECL_ABSTRACT (arg) = setting;
29d356fb
RK
3258 if (DECL_INITIAL (decl) != NULL_TREE
3259 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
3260 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3261 }
3262}
3263\f
175160e7
MT
3264/* Output the assembly language code for the function FNDECL
3265 from its DECL_SAVED_INSNS. Used for inline functions that are output
3266 at end of compilation instead of where they came in the source. */
3267
3268void
3269output_inline_function (fndecl)
3270 tree fndecl;
3271{
ca695ac9 3272 rtx head;
175160e7
MT
3273 rtx last;
3274
27ed242c
RK
3275 /* Things we allocate from here on are part of this function, not
3276 permanent. */
3277 temporary_allocation ();
3278
ca695ac9 3279 head = DECL_SAVED_INSNS (fndecl);
175160e7
MT
3280 current_function_decl = fndecl;
3281
3282 /* This call is only used to initialize global variables. */
3283 init_function_start (fndecl, "lossage", 1);
3284
3285 /* Redo parameter determinations in case the FUNCTION_...
3286 macros took machine-specific actions that need to be redone. */
3287 assign_parms (fndecl, 1);
3288
3289 /* Set stack frame size. */
3290 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3291
12307ca2
RK
3292 /* The first is a bit of a lie (the array may be larger), but doesn't
3293 matter too much and it isn't worth saving the actual bound. */
3294 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3295 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3296 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3297 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
e9a25f70
JL
3298 max_parm_reg = MAX_PARMREG (head);
3299 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
12307ca2 3300
175160e7 3301 stack_slot_list = STACK_SLOT_LIST (head);
5b0e2c7d 3302 forced_labels = FORCED_LABELS (head);
175160e7
MT
3303
3304 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3305 current_function_calls_alloca = 1;
3306
3307 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3308 current_function_calls_setjmp = 1;
3309
3310 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3311 current_function_calls_longjmp = 1;
3312
3313 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3314 current_function_returns_struct = 1;
3315
3316 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3317 current_function_returns_pcc_struct = 1;
3318
3319 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3320 current_function_needs_context = 1;
3321
3322 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3323 current_function_has_nonlocal_label = 1;
3324
3325 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3326 current_function_returns_pointer = 1;
3327
3328 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3329 current_function_uses_const_pool = 1;
3330
3331 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3332 current_function_uses_pic_offset_table = 1;
3333
3334 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3335 current_function_pops_args = POPS_ARGS (head);
3336
354d687f
RK
3337 /* This is the only thing the expand_function_end call that uses to be here
3338 actually does and that call can cause problems. */
3339 immediate_size_expand--;
175160e7
MT
3340
3341 /* Find last insn and rebuild the constant pool. */
3342 for (last = FIRST_PARM_INSN (head);
3343 NEXT_INSN (last); last = NEXT_INSN (last))
3344 {
3345 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3346 {
3347 restore_constants (&PATTERN (last));
3348 restore_constants (&REG_NOTES (last));
3349 }
3350 }
3351
3352 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3353 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3354
81578142
RS
3355 /* We must have already output DWARF debugging information for the
3356 original (abstract) inline function declaration/definition, so
3357 we want to make sure that the debugging information we generate
3358 for this special instance of the inline function refers back to
3359 the information we already generated. To make sure that happens,
3360 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3361 node (and for all of the local ..._DECL nodes which are its children)
3362 so that they all point to themselves. */
3363
3364 set_decl_origin_self (fndecl);
3365
51783c14
JM
3366 /* We're not deferring this any longer. */
3367 DECL_DEFER_OUTPUT (fndecl) = 0;
3368
f4744807
JM
3369 /* We can't inline this anymore. */
3370 DECL_INLINE (fndecl) = 0;
09578c27 3371
175160e7
MT
3372 /* Compile this function all the way down to assembly code. */
3373 rest_of_compilation (fndecl);
3374
3375 current_function_decl = 0;
175160e7 3376}
This page took 0.820353 seconds and 5 git commands to generate.