]> gcc.gnu.org Git - gcc.git/blame - gcc/integrate.c
integrate.c (expand_inline_function): Don't put a virtual register into the reg map.
[gcc.git] / gcc / integrate.c
CommitLineData
175160e7 1/* Procedure integration for GNU CC.
3c71940f 2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
e2a5f96b 3 1999, 2000, 2001 Free Software Foundation, Inc.
175160e7
MT
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
5
6This file is part of GNU CC.
7
8GNU CC is free software; you can redistribute it and/or modify
9it under the terms of the GNU General Public License as published by
10the Free Software Foundation; either version 2, or (at your option)
11any later version.
12
13GNU CC is distributed in the hope that it will be useful,
14but WITHOUT ANY WARRANTY; without even the implied warranty of
15MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16GNU General Public License for more details.
17
18You should have received a copy of the GNU General Public License
19along with GNU CC; see the file COPYING. If not, write to
a35311b0
RK
20the Free Software Foundation, 59 Temple Place - Suite 330,
21Boston, MA 02111-1307, USA. */
175160e7 22
175160e7 23#include "config.h"
670ee920 24#include "system.h"
ccd043a9 25
175160e7
MT
26#include "rtl.h"
27#include "tree.h"
6baf1cc8 28#include "tm_p.h"
12307ca2 29#include "regs.h"
175160e7
MT
30#include "flags.h"
31#include "insn-config.h"
32#include "insn-flags.h"
33#include "expr.h"
34#include "output.h"
e9a25f70 35#include "recog.h"
175160e7
MT
36#include "integrate.h"
37#include "real.h"
6adb4e3a 38#include "except.h"
175160e7 39#include "function.h"
d6f4ec51 40#include "toplev.h"
ab87f8c8 41#include "intl.h"
e6fd097e 42#include "loop.h"
175160e7
MT
43
44#include "obstack.h"
45#define obstack_chunk_alloc xmalloc
46#define obstack_chunk_free free
175160e7
MT
47
48extern struct obstack *function_maybepermanent_obstack;
49
175160e7
MT
50/* Similar, but round to the next highest integer that meets the
51 alignment. */
52#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53
54/* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56#ifndef INTEGRATE_THRESHOLD
aec98e42
ML
57/* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
175160e7 59#define INTEGRATE_THRESHOLD(DECL) \
aec98e42 60 (optimize_size \
c51262cf 61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
aec98e42 62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
175160e7 63#endif
b36f4ed3 64
00174bdf 65/* Decide whether a function with a target specific attribute
b36f4ed3
NC
66 attached can be inlined. By default we disallow this. */
67#ifndef FUNCTION_ATTRIBUTE_INLINABLE_P
68#define FUNCTION_ATTRIBUTE_INLINABLE_P(FNDECL) 0
69#endif
175160e7 70\f
3fe41456
KG
71static rtvec initialize_for_inline PARAMS ((tree));
72static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
73static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
0a1c58a2 74 rtvec));
3fe41456 75static tree integrate_decl_tree PARAMS ((tree,
0a1c58a2 76 struct inline_remap *));
3fe41456 77static void subst_constants PARAMS ((rtx *, rtx,
0a1c58a2 78 struct inline_remap *, int));
3fe41456 79static void set_block_origin_self PARAMS ((tree));
3fe41456
KG
80static void set_block_abstract_flags PARAMS ((tree, int));
81static void process_reg_param PARAMS ((struct inline_remap *, rtx,
0a1c58a2 82 rtx));
3fe41456
KG
83void set_decl_abstract_flags PARAMS ((tree, int));
84static rtx expand_inline_function_eh_labelmap PARAMS ((rtx));
85static void mark_stores PARAMS ((rtx, rtx, void *));
0a1c58a2
JL
86static void save_parm_insns PARAMS ((rtx, rtx));
87static void copy_insn_list PARAMS ((rtx, struct inline_remap *,
88 rtx));
3fe41456
KG
89static int compare_blocks PARAMS ((const PTR, const PTR));
90static int find_block PARAMS ((const PTR, const PTR));
f9e814f1
TP
91
92/* The maximum number of instructions accepted for inlining a
93 function. Increasing values mean more agressive inlining.
94 This affects currently only functions explicitly marked as
95 inline (or methods defined within the class definition for C++).
96 The default value of 10000 is arbitrary but high to match the
97 previously unlimited gcc capabilities. */
98
99int inline_max_insns = 10000;
100
36edd3cc
BS
101/* Used by copy_rtx_and_substitute; this indicates whether the function is
102 called for the purpose of inlining or some other purpose (i.e. loop
103 unrolling). This affects how constant pool references are handled.
104 This variable contains the FUNCTION_DECL for the inlined function. */
105static struct function *inlining = 0;
175160e7 106\f
1f3d3a31 107/* Returns the Ith entry in the label_map contained in MAP. If the
e5e809f4
JL
108 Ith entry has not yet been set, return a fresh label. This function
109 performs a lazy initialization of label_map, thereby avoiding huge memory
110 explosions when the label_map gets very large. */
111
1f3d3a31
JL
112rtx
113get_label_from_map (map, i)
e5e809f4 114 struct inline_remap *map;
1f3d3a31
JL
115 int i;
116{
117 rtx x = map->label_map[i];
118
119 if (x == NULL_RTX)
00174bdf 120 x = map->label_map[i] = gen_label_rtx ();
1f3d3a31
JL
121
122 return x;
123}
124
175160e7
MT
125/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
126 is safe and reasonable to integrate into other functions.
ab87f8c8 127 Nonzero means value is a warning msgid with a single %s
175160e7
MT
128 for the function's name. */
129
dff01034 130const char *
175160e7
MT
131function_cannot_inline_p (fndecl)
132 register tree fndecl;
133{
134 register rtx insn;
135 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
f9e814f1
TP
136
137 /* For functions marked as inline increase the maximum size to
138 inline_max_insns (-finline-limit-<n>). For regular functions
139 use the limit given by INTEGRATE_THRESHOLD. */
140
141 int max_insns = (DECL_INLINE (fndecl))
142 ? (inline_max_insns
143 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
144 : INTEGRATE_THRESHOLD (fndecl);
145
175160e7
MT
146 register int ninsns = 0;
147 register tree parms;
64ed0f40 148 rtx result;
175160e7 149
e5e809f4 150 /* No inlines with varargs. */
175160e7 151 if ((last && TREE_VALUE (last) != void_type_node)
5d3fe1fe 152 || current_function_varargs)
ab87f8c8 153 return N_("varargs function cannot be inline");
175160e7
MT
154
155 if (current_function_calls_alloca)
ab87f8c8 156 return N_("function using alloca cannot be inline");
175160e7 157
cd8cee7b
RH
158 if (current_function_calls_setjmp)
159 return N_("function using setjmp cannot be inline");
160
175160e7 161 if (current_function_contains_functions)
ab87f8c8 162 return N_("function with nested functions cannot be inline");
175160e7 163
b9096844 164 if (forced_labels)
14a774a9
RK
165 return
166 N_("function with label addresses used in initializers cannot inline");
b9096844 167
aeb302bb
JM
168 if (current_function_cannot_inline)
169 return current_function_cannot_inline;
170
175160e7 171 /* If its not even close, don't even look. */
f9e814f1 172 if (get_max_uid () > 3 * max_insns)
ab87f8c8 173 return N_("function too large to be inline");
175160e7 174
175160e7
MT
175#if 0
176 /* Don't inline functions which do not specify a function prototype and
177 have BLKmode argument or take the address of a parameter. */
178 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
179 {
180 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
181 TREE_ADDRESSABLE (parms) = 1;
182 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
ab87f8c8 183 return N_("no prototype, and parameter address used; cannot be inline");
175160e7
MT
184 }
185#endif
186
187 /* We can't inline functions that return structures
188 the old-fashioned PCC way, copying into a static block. */
189 if (current_function_returns_pcc_struct)
ab87f8c8 190 return N_("inline functions not supported for this return value type");
175160e7
MT
191
192 /* We can't inline functions that return structures of varying size. */
f8013343
MM
193 if (TREE_CODE (TREE_TYPE (TREE_TYPE (fndecl))) != VOID_TYPE
194 && int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
ab87f8c8 195 return N_("function with varying-size return value cannot be inline");
175160e7 196
c8ad69c1
RK
197 /* Cannot inline a function with a varying size argument or one that
198 receives a transparent union. */
175160e7 199 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
c8ad69c1
RK
200 {
201 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
ab87f8c8 202 return N_("function with varying-size parameter cannot be inline");
2bf105ab
RK
203 else if (TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE
204 && TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
ab87f8c8 205 return N_("function with transparent unit parameter cannot be inline");
c8ad69c1 206 }
175160e7 207
f9e814f1 208 if (get_max_uid () > max_insns)
175160e7 209 {
12307ca2
RK
210 for (ninsns = 0, insn = get_first_nonparm_insn ();
211 insn && ninsns < max_insns;
175160e7 212 insn = NEXT_INSN (insn))
2c3c49de 213 if (INSN_P (insn))
12307ca2 214 ninsns++;
175160e7
MT
215
216 if (ninsns >= max_insns)
ab87f8c8 217 return N_("function too large to be inline");
175160e7
MT
218 }
219
acd693d1
RH
220 /* We will not inline a function which uses computed goto. The addresses of
221 its local labels, which may be tucked into global storage, are of course
222 not constant across instantiations, which causes unexpected behaviour. */
223 if (current_function_has_computed_jump)
224 return N_("function with computed jump cannot inline");
ead02915 225
2edc3b33
JW
226 /* We cannot inline a nested function that jumps to a nonlocal label. */
227 if (current_function_has_nonlocal_goto)
ab87f8c8 228 return N_("function with nonlocal goto cannot be inline");
2edc3b33 229
6adb4e3a
MS
230 /* This is a hack, until the inliner is taught about eh regions at
231 the start of the function. */
232 for (insn = get_insns ();
db3cf6fb
MS
233 insn
234 && ! (GET_CODE (insn) == NOTE
235 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
6adb4e3a
MS
236 insn = NEXT_INSN (insn))
237 {
238 if (insn && GET_CODE (insn) == NOTE
239 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
ab87f8c8 240 return N_("function with complex parameters cannot be inline");
6adb4e3a
MS
241 }
242
64ed0f40
JW
243 /* We can't inline functions that return a PARALLEL rtx. */
244 result = DECL_RTL (DECL_RESULT (fndecl));
245 if (result && GET_CODE (result) == PARALLEL)
ab87f8c8 246 return N_("inline functions not supported for this return value type");
64ed0f40 247
b36f4ed3
NC
248 /* If the function has a target specific attribute attached to it,
249 then we assume that we should not inline it. This can be overriden
250 by the target if it defines FUNCTION_ATTRIBUTE_INLINABLE_P. */
251 if (DECL_MACHINE_ATTRIBUTES (fndecl)
252 && ! FUNCTION_ATTRIBUTE_INLINABLE_P (fndecl))
253 return N_("function with target specific attribute(s) cannot be inlined");
254
255 return NULL;
175160e7
MT
256}
257\f
175160e7
MT
258/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
259 Zero for a reg that isn't a parm's home.
260 Only reg numbers less than max_parm_reg are mapped here. */
261static tree *parmdecl_map;
262
175160e7
MT
263/* In save_for_inline, nonzero if past the parm-initialization insns. */
264static int in_nonparm_insns;
265\f
f93dacbd 266/* Subroutine for `save_for_inline'. Performs initialization
175160e7 267 needed to save FNDECL's insns and info for future inline expansion. */
36edd3cc 268
49ad7cfa 269static rtvec
36edd3cc 270initialize_for_inline (fndecl)
175160e7 271 tree fndecl;
175160e7 272{
49ad7cfa 273 int i;
175160e7
MT
274 rtvec arg_vector;
275 tree parms;
276
175160e7 277 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
961192e1 278 memset ((char *) parmdecl_map, 0, max_parm_reg * sizeof (tree));
175160e7
MT
279 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
280
281 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
282 parms;
283 parms = TREE_CHAIN (parms), i++)
284 {
285 rtx p = DECL_RTL (parms);
286
8a173c73
RK
287 /* If we have (mem (addressof (mem ...))), use the inner MEM since
288 otherwise the copy_rtx call below will not unshare the MEM since
289 it shares ADDRESSOF. */
290 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
291 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
292 p = XEXP (XEXP (p, 0), 0);
293
175160e7
MT
294 RTVEC_ELT (arg_vector, i) = p;
295
296 if (GET_CODE (p) == REG)
297 parmdecl_map[REGNO (p)] = parms;
f231e307
RK
298 else if (GET_CODE (p) == CONCAT)
299 {
300 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
301 rtx pimag = gen_imagpart (GET_MODE (preal), p);
302
303 if (GET_CODE (preal) == REG)
304 parmdecl_map[REGNO (preal)] = parms;
305 if (GET_CODE (pimag) == REG)
306 parmdecl_map[REGNO (pimag)] = parms;
307 }
308
048dfa64
RS
309 /* This flag is cleared later
310 if the function ever modifies the value of the parm. */
175160e7
MT
311 TREE_READONLY (parms) = 1;
312 }
313
49ad7cfa 314 return arg_vector;
175160e7
MT
315}
316
94755d92 317/* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
00174bdf 318 originally was in the FROM_FN, but now it will be in the
94755d92 319 TO_FN. */
02e24c7a 320
94755d92
MM
321tree
322copy_decl_for_inlining (decl, from_fn, to_fn)
323 tree decl;
324 tree from_fn;
325 tree to_fn;
02e24c7a 326{
94755d92
MM
327 tree copy;
328
329 /* Copy the declaration. */
330 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
a8f8d1cc
MM
331 {
332 /* For a parameter, we must make an equivalent VAR_DECL, not a
333 new PARM_DECL. */
334 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
335 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
8eec72ec
JM
336 TREE_READONLY (copy) = TREE_READONLY (decl);
337 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
a8f8d1cc 338 }
94755d92
MM
339 else
340 {
341 copy = copy_node (decl);
342 if (DECL_LANG_SPECIFIC (copy))
343 copy_lang_decl (copy);
a71811fe
MM
344
345 /* TREE_ADDRESSABLE isn't used to indicate that a label's
346 address has been taken; it's for internal bookkeeping in
347 expand_goto_internal. */
348 if (TREE_CODE (copy) == LABEL_DECL)
349 TREE_ADDRESSABLE (copy) = 0;
94755d92
MM
350 }
351
352 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
353 declaration inspired this copy. */
354 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
355
356 /* The new variable/label has no RTL, yet. */
357 DECL_RTL (copy) = NULL_RTX;
358
359 /* These args would always appear unused, if not for this. */
360 TREE_USED (copy) = 1;
361
362 /* Set the context for the new declaration. */
363 if (!DECL_CONTEXT (decl))
364 /* Globals stay global. */
00174bdf 365 ;
94755d92
MM
366 else if (DECL_CONTEXT (decl) != from_fn)
367 /* Things that weren't in the scope of the function we're inlining
368 from aren't in the scope we're inlining too, either. */
369 ;
370 else if (TREE_STATIC (decl))
371 /* Function-scoped static variables should say in the original
372 function. */
02e24c7a
MM
373 ;
374 else
94755d92
MM
375 /* Ordinary automatic local variables are now in the scope of the
376 new function. */
377 DECL_CONTEXT (copy) = to_fn;
02e24c7a
MM
378
379 return copy;
380}
381
175160e7
MT
382/* Make the insns and PARM_DECLs of the current function permanent
383 and record other information in DECL_SAVED_INSNS to allow inlining
384 of this function in subsequent calls.
385
386 This routine need not copy any insns because we are not going
387 to immediately compile the insns in the insn chain. There
388 are two cases when we would compile the insns for FNDECL:
389 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
390 be output at the end of other compilation, because somebody took
391 its address. In the first case, the insns of FNDECL are copied
392 as it is expanded inline, so FNDECL's saved insns are not
393 modified. In the second case, FNDECL is used for the last time,
394 so modifying the rtl is not a problem.
395
09578c27
RK
396 We don't have to worry about FNDECL being inline expanded by
397 other functions which are written at the end of compilation
398 because flag_no_inline is turned on when we begin writing
399 functions at the end of compilation. */
175160e7
MT
400
401void
f93dacbd 402save_for_inline (fndecl)
175160e7
MT
403 tree fndecl;
404{
405 rtx insn;
49ad7cfa 406 rtvec argvec;
175160e7 407 rtx first_nonparm_insn;
175160e7
MT
408
409 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
410 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
411 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
412 for the parms, prior to elimination of virtual registers.
413 These values are needed for substituting parms properly. */
414
67289ea6 415 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
175160e7
MT
416
417 /* Make and emit a return-label if we have not already done so. */
418
419 if (return_label == 0)
420 {
421 return_label = gen_label_rtx ();
422 emit_label (return_label);
423 }
424
36edd3cc 425 argvec = initialize_for_inline (fndecl);
175160e7
MT
426
427 /* If there are insns that copy parms from the stack into pseudo registers,
428 those insns are not copied. `expand_inline_function' must
429 emit the correct code to handle such things. */
430
431 insn = get_insns ();
432 if (GET_CODE (insn) != NOTE)
433 abort ();
434
435 /* Get the insn which signals the end of parameter setup code. */
436 first_nonparm_insn = get_first_nonparm_insn ();
437
438 /* Now just scan the chain of insns to see what happens to our
439 PARM_DECLs. If a PARM_DECL is used but never modified, we
440 can substitute its rtl directly when expanding inline (and
441 perform constant folding when its incoming value is constant).
442 Otherwise, we have to copy its value into a new register and track
443 the new register's life. */
eef9a168 444 in_nonparm_insns = 0;
0a1c58a2 445 save_parm_insns (insn, first_nonparm_insn);
175160e7 446
01d939e8
BS
447 cfun->inl_max_label_num = max_label_num ();
448 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
449 cfun->original_arg_vector = argvec;
450 cfun->original_decl_initial = DECL_INITIAL (fndecl);
f93dacbd 451 cfun->no_debugging_symbols = (write_symbols == NO_DEBUG);
01d939e8 452 DECL_SAVED_INSNS (fndecl) = cfun;
67289ea6
MM
453
454 /* Clean up. */
455 free (parmdecl_map);
175160e7 456}
0a1c58a2
JL
457
458/* Scan the chain of insns to see what happens to our PARM_DECLs. If a
459 PARM_DECL is used but never modified, we can substitute its rtl directly
460 when expanding inline (and perform constant folding when its incoming
461 value is constant). Otherwise, we have to copy its value into a new
462 register and track the new register's life. */
463
464static void
465save_parm_insns (insn, first_nonparm_insn)
00174bdf
KH
466 rtx insn;
467 rtx first_nonparm_insn;
0a1c58a2 468{
0a1c58a2
JL
469 if (insn == NULL_RTX)
470 return;
471
472 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
473 {
474 if (insn == first_nonparm_insn)
475 in_nonparm_insns = 1;
476
2c3c49de 477 if (INSN_P (insn))
0a1c58a2
JL
478 {
479 /* Record what interesting things happen to our parameters. */
480 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
481
482 /* If this is a CALL_PLACEHOLDER insn then we need to look into the
483 three attached sequences: normal call, sibling call and tail
00174bdf 484 recursion. */
0a1c58a2
JL
485 if (GET_CODE (insn) == CALL_INSN
486 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
487 {
488 int i;
489
490 for (i = 0; i < 3; i++)
491 save_parm_insns (XEXP (PATTERN (insn), i),
492 first_nonparm_insn);
493 }
494 }
495 }
496}
175160e7 497\f
175160e7
MT
498/* Note whether a parameter is modified or not. */
499
500static void
84832317 501note_modified_parmregs (reg, x, data)
175160e7 502 rtx reg;
487a6e06 503 rtx x ATTRIBUTE_UNUSED;
84832317 504 void *data ATTRIBUTE_UNUSED;
175160e7
MT
505{
506 if (GET_CODE (reg) == REG && in_nonparm_insns
507 && REGNO (reg) < max_parm_reg
508 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
509 && parmdecl_map[REGNO (reg)] != 0)
510 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
511}
512
175160e7
MT
513/* Unfortunately, we need a global copy of const_equiv map for communication
514 with a function called from note_stores. Be *very* careful that this
515 is used properly in the presence of recursion. */
516
c68da89c 517varray_type global_const_equiv_varray;
175160e7
MT
518\f
519#define FIXED_BASE_PLUS_P(X) \
520 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
521 && GET_CODE (XEXP (X, 0)) == REG \
522 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
f9b06ea4 523 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
175160e7 524
a4c3ddd8
BS
525/* Called to set up a mapping for the case where a parameter is in a
526 register. If it is read-only and our argument is a constant, set up the
527 constant equivalence.
528
529 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
530 if it is a register.
531
532 Also, don't allow hard registers here; they might not be valid when
533 substituted into insns. */
534static void
535process_reg_param (map, loc, copy)
536 struct inline_remap *map;
537 rtx loc, copy;
538{
539 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
540 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
541 && ! REG_USERVAR_P (copy))
542 || (GET_CODE (copy) == REG
543 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
544 {
545 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
546 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
c68da89c
KR
547 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
548 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
a4c3ddd8
BS
549 copy = temp;
550 }
551 map->reg_map[REGNO (loc)] = copy;
552}
e6cfb550
AM
553
554/* Used by duplicate_eh_handlers to map labels for the exception table */
555static struct inline_remap *eif_eh_map;
556
00174bdf 557static rtx
e6cfb550 558expand_inline_function_eh_labelmap (label)
00174bdf 559 rtx label;
e6cfb550
AM
560{
561 int index = CODE_LABEL_NUMBER (label);
562 return get_label_from_map (eif_eh_map, index);
563}
564
a97901e6
MM
565/* Compare two BLOCKs for qsort. The key we sort on is the
566 BLOCK_ABSTRACT_ORIGIN of the blocks. */
567
568static int
569compare_blocks (v1, v2)
570 const PTR v1;
571 const PTR v2;
572{
47ee9bcb
KG
573 tree b1 = *((const tree *) v1);
574 tree b2 = *((const tree *) v2);
a97901e6 575
00174bdf 576 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
a97901e6
MM
577 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
578}
579
580/* Compare two BLOCKs for bsearch. The first pointer corresponds to
581 an original block; the second to a remapped equivalent. */
582
583static int
584find_block (v1, v2)
585 const PTR v1;
586 const PTR v2;
587{
47ee9bcb
KG
588 const union tree_node *b1 = (const union tree_node *) v1;
589 tree b2 = *((const tree *) v2);
a97901e6 590
47ee9bcb 591 return ((const char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
a97901e6
MM
592}
593
175160e7
MT
594/* Integrate the procedure defined by FNDECL. Note that this function
595 may wind up calling itself. Since the static variables are not
596 reentrant, we do not assign them until after the possibility
bfa30b22 597 of recursion is eliminated.
175160e7
MT
598
599 If IGNORE is nonzero, do not produce a value.
600 Otherwise store the value in TARGET if it is nonzero and that is convenient.
601
602 Value is:
603 (rtx)-1 if we could not substitute the function
604 0 if we substituted it and it does not produce a value
605 else an rtx for where the value is stored. */
606
607rtx
12307ca2
RK
608expand_inline_function (fndecl, parms, target, ignore, type,
609 structure_value_addr)
175160e7
MT
610 tree fndecl, parms;
611 rtx target;
612 int ignore;
613 tree type;
614 rtx structure_value_addr;
615{
36edd3cc 616 struct function *inlining_previous;
49ad7cfa 617 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
81578142 618 tree formal, actual, block;
36edd3cc 619 rtx parm_insns = inl_f->emit->x_first_insn;
49ad7cfa
BS
620 rtx insns = (inl_f->inl_last_parm_insn
621 ? NEXT_INSN (inl_f->inl_last_parm_insn)
622 : parm_insns);
175160e7
MT
623 tree *arg_trees;
624 rtx *arg_vals;
175160e7 625 int max_regno;
175160e7 626 register int i;
36edd3cc 627 int min_labelno = inl_f->emit->x_first_label_num;
49ad7cfa 628 int max_labelno = inl_f->inl_max_label_num;
175160e7 629 int nargs;
175160e7 630 rtx loc;
2132517d 631 rtx stack_save = 0;
175160e7 632 rtx temp;
c68da89c 633 struct inline_remap *map = 0;
51723711 634#ifdef HAVE_cc0
175160e7 635 rtx cc0_insn = 0;
51723711 636#endif
49ad7cfa 637 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
a6dd1cb6 638 rtx static_chain_value = 0;
49ad7cfa 639 int inl_max_uid;
175160e7 640
253a01b4
JL
641 /* The pointer used to track the true location of the memory used
642 for MAP->LABEL_MAP. */
643 rtx *real_label_map = 0;
644
175160e7 645 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
36edd3cc 646 max_regno = inl_f->emit->x_reg_rtx_no + 3;
175160e7
MT
647 if (max_regno < FIRST_PSEUDO_REGISTER)
648 abort ();
649
ecff20d4
JM
650 /* Pull out the decl for the function definition; fndecl may be a
651 local declaration, which would break DECL_ABSTRACT_ORIGIN. */
652 fndecl = inl_f->decl;
653
175160e7
MT
654 nargs = list_length (DECL_ARGUMENTS (fndecl));
655
c2f8b491
JH
656 if (cfun->preferred_stack_boundary < inl_f->preferred_stack_boundary)
657 cfun->preferred_stack_boundary = inl_f->preferred_stack_boundary;
658
2d8d0db8
RK
659 /* Check that the parms type match and that sufficient arguments were
660 passed. Since the appropriate conversions or default promotions have
661 already been applied, the machine modes should match exactly. */
662
12307ca2 663 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
175160e7 664 formal;
12307ca2 665 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
175160e7 666 {
2d8d0db8
RK
667 tree arg;
668 enum machine_mode mode;
669
670 if (actual == 0)
ab176425 671 return (rtx) (HOST_WIDE_INT) -1;
2d8d0db8
RK
672
673 arg = TREE_VALUE (actual);
12307ca2 674 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
2d8d0db8
RK
675
676 if (mode != TYPE_MODE (TREE_TYPE (arg))
677 /* If they are block mode, the types should match exactly.
678 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
679 which could happen if the parameter has incomplete type. */
d80db03d
RK
680 || (mode == BLKmode
681 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
682 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
ab176425 683 return (rtx) (HOST_WIDE_INT) -1;
175160e7
MT
684 }
685
2d8d0db8
RK
686 /* Extra arguments are valid, but will be ignored below, so we must
687 evaluate them here for side-effects. */
688 for (; actual; actual = TREE_CHAIN (actual))
689 expand_expr (TREE_VALUE (actual), const0_rtx,
690 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
691
175160e7
MT
692 /* Expand the function arguments. Do this first so that any
693 new registers get created before we allocate the maps. */
694
67289ea6
MM
695 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
696 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
175160e7
MT
697
698 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
699 formal;
700 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
701 {
702 /* Actual parameter, converted to the type of the argument within the
703 function. */
704 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
705 /* Mode of the variable used within the function. */
706 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
9175051c 707 int invisiref = 0;
175160e7 708
175160e7
MT
709 arg_trees[i] = arg;
710 loc = RTVEC_ELT (arg_vector, i);
711
712 /* If this is an object passed by invisible reference, we copy the
713 object into a stack slot and save its address. If this will go
714 into memory, we do nothing now. Otherwise, we just expand the
715 argument. */
716 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
717 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
718 {
1da68f56 719 rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
175160e7
MT
720
721 store_expr (arg, stack_slot, 0);
175160e7 722 arg_vals[i] = XEXP (stack_slot, 0);
9175051c 723 invisiref = 1;
175160e7
MT
724 }
725 else if (GET_CODE (loc) != MEM)
36aa0bf5
RK
726 {
727 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
728 /* The mode if LOC and ARG can differ if LOC was a variable
729 that had its mode promoted via PROMOTED_MODE. */
5be957a2
RS
730 arg_vals[i] = convert_modes (GET_MODE (loc),
731 TYPE_MODE (TREE_TYPE (arg)),
732 expand_expr (arg, NULL_RTX, mode,
733 EXPAND_SUM),
734 TREE_UNSIGNED (TREE_TYPE (formal)));
36aa0bf5
RK
735 else
736 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
737 }
175160e7
MT
738 else
739 arg_vals[i] = 0;
740
741 if (arg_vals[i] != 0
742 && (! TREE_READONLY (formal)
743 /* If the parameter is not read-only, copy our argument through
744 a register. Also, we cannot use ARG_VALS[I] if it overlaps
745 TARGET in any way. In the inline function, they will likely
746 be two different pseudos, and `safe_from_p' will make all
747 sorts of smart assumptions about their not conflicting.
748 But if ARG_VALS[I] overlaps TARGET, these assumptions are
9175051c
JM
749 wrong, so put ARG_VALS[I] into a fresh register.
750 Don't worry about invisible references, since their stack
751 temps will never overlap the target. */
175160e7 752 || (target != 0
9175051c 753 && ! invisiref
3eda169f
RK
754 && (GET_CODE (arg_vals[i]) == REG
755 || GET_CODE (arg_vals[i]) == SUBREG
756 || GET_CODE (arg_vals[i]) == MEM)
30caed6d
RS
757 && reg_overlap_mentioned_p (arg_vals[i], target))
758 /* ??? We must always copy a SUBREG into a REG, because it might
759 get substituted into an address, and not all ports correctly
760 handle SUBREGs in addresses. */
761 || (GET_CODE (arg_vals[i]) == SUBREG)))
4b7cb39e 762 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
12307ca2
RK
763
764 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
e5e809f4 765 && POINTER_TYPE_P (TREE_TYPE (formal)))
12307ca2 766 mark_reg_pointer (arg_vals[i],
bdb429a5 767 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal))));
175160e7 768 }
00174bdf 769
175160e7
MT
770 /* Allocate the structures we use to remap things. */
771
67289ea6 772 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
175160e7
MT
773 map->fndecl = fndecl;
774
a97901e6 775 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
67289ea6 776 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
175160e7 777
3bb1329e
BK
778 /* We used to use alloca here, but the size of what it would try to
779 allocate would occasionally cause it to exceed the stack limit and
780 cause unpredictable core dumps. */
253a01b4
JL
781 real_label_map
782 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
783 map->label_map = real_label_map;
175160e7 784
36edd3cc 785 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
67289ea6 786 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
175160e7 787 map->min_insnno = 0;
49ad7cfa 788 map->max_insnno = inl_max_uid;
175160e7 789
a70f7bb2
JW
790 map->integrating = 1;
791
c68da89c
KR
792 /* const_equiv_varray maps pseudos in our routine to constants, so
793 it needs to be large enough for all our pseudos. This is the
794 number we are currently using plus the number in the called
795 routine, plus 15 for each arg, five to compute the virtual frame
796 pointer, and five for the return value. This should be enough
797 for most cases. We do not reference entries outside the range of
798 the map.
c66e0741
RK
799
800 ??? These numbers are quite arbitrary and were obtained by
801 experimentation. At some point, we should try to allocate the
802 table after all the parameters are set up so we an more accurately
803 estimate the number of pseudos we will need. */
804
c68da89c
KR
805 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
806 (max_reg_num ()
807 + (max_regno - FIRST_PSEUDO_REGISTER)
808 + 15 * nargs
809 + 10),
810 "expand_inline_function");
175160e7
MT
811 map->const_age = 0;
812
813 /* Record the current insn in case we have to set up pointers to frame
3ba10494
AS
814 and argument memory blocks. If there are no insns yet, add a dummy
815 insn that can be used as an insertion point. */
175160e7 816 map->insns_at_start = get_last_insn ();
e9a25f70 817 if (map->insns_at_start == 0)
3ba10494 818 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
175160e7 819
36edd3cc 820 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
3502dc9c 821 map->x_regno_reg_rtx = inl_f->emit->x_regno_reg_rtx;
12307ca2 822
175160e7
MT
823 /* Update the outgoing argument size to allow for those in the inlined
824 function. */
49ad7cfa
BS
825 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
826 current_function_outgoing_args_size = inl_f->outgoing_args_size;
175160e7
MT
827
828 /* If the inline function needs to make PIC references, that means
829 that this function's PIC offset table must be used. */
49ad7cfa 830 if (inl_f->uses_pic_offset_table)
175160e7
MT
831 current_function_uses_pic_offset_table = 1;
832
a6dd1cb6 833 /* If this function needs a context, set it up. */
49ad7cfa 834 if (inl_f->needs_context)
a6dd1cb6
RK
835 static_chain_value = lookup_static_chain (fndecl);
836
1c1f2d29
JM
837 if (GET_CODE (parm_insns) == NOTE
838 && NOTE_LINE_NUMBER (parm_insns) > 0)
839 {
840 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
841 NOTE_LINE_NUMBER (parm_insns));
842 if (note)
843 RTX_INTEGRATED_P (note) = 1;
844 }
845
175160e7
MT
846 /* Process each argument. For each, set up things so that the function's
847 reference to the argument will refer to the argument being passed.
848 We only replace REG with REG here. Any simplifications are done
849 via const_equiv_map.
850
851 We make two passes: In the first, we deal with parameters that will
852 be placed into registers, since we need to ensure that the allocated
853 register number fits in const_equiv_map. Then we store all non-register
854 parameters into their memory location. */
855
fd28789a
RS
856 /* Don't try to free temp stack slots here, because we may put one of the
857 parameters into a temp stack slot. */
858
175160e7
MT
859 for (i = 0; i < nargs; i++)
860 {
861 rtx copy = arg_vals[i];
862
863 loc = RTVEC_ELT (arg_vector, i);
864
865 /* There are three cases, each handled separately. */
866 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
867 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
868 {
869 /* This must be an object passed by invisible reference (it could
870 also be a variable-sized object, but we forbid inlining functions
871 with variable-sized arguments). COPY is the address of the
872 actual value (this computation will cause it to be copied). We
873 map that address for the register, noting the actual address as
874 an equivalent in case it can be substituted into the insns. */
875
876 if (GET_CODE (copy) != REG)
877 {
878 temp = copy_addr_to_reg (copy);
c68da89c
KR
879 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
880 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
175160e7
MT
881 copy = temp;
882 }
883 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
884 }
885 else if (GET_CODE (loc) == MEM)
886 {
14a774a9
RK
887 /* This is the case of a parameter that lives in memory. It
888 will live in the block we allocate in the called routine's
175160e7 889 frame that simulates the incoming argument area. Do nothing
14a774a9
RK
890 with the parameter now; we will call store_expr later. In
891 this case, however, we must ensure that the virtual stack and
892 incoming arg rtx values are expanded now so that we can be
893 sure we have enough slots in the const equiv map since the
894 store_expr call can easily blow the size estimate. */
895 if (DECL_FRAME_SIZE (fndecl) != 0)
896 copy_rtx_and_substitute (virtual_stack_vars_rtx, map, 0);
897
898 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
899 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
175160e7
MT
900 }
901 else if (GET_CODE (loc) == REG)
a4c3ddd8 902 process_reg_param (map, loc, copy);
bc2eeab2
RS
903 else if (GET_CODE (loc) == CONCAT)
904 {
bc2eeab2
RS
905 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
906 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
907 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
908 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
909
a4c3ddd8
BS
910 process_reg_param (map, locreal, copyreal);
911 process_reg_param (map, locimag, copyimag);
bc2eeab2 912 }
175160e7
MT
913 else
914 abort ();
175160e7
MT
915 }
916
36edd3cc
BS
917 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
918 specially. This function can be called recursively, so we need to
919 save the previous value. */
920 inlining_previous = inlining;
921 inlining = inl_f;
922
175160e7
MT
923 /* Now do the parameters that will be placed in memory. */
924
925 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
926 formal; formal = TREE_CHAIN (formal), i++)
927 {
175160e7
MT
928 loc = RTVEC_ELT (arg_vector, i);
929
930 if (GET_CODE (loc) == MEM
931 /* Exclude case handled above. */
932 && ! (GET_CODE (XEXP (loc, 0)) == REG
933 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
934 {
cdd6e2db
TW
935 rtx note = emit_note (DECL_SOURCE_FILE (formal),
936 DECL_SOURCE_LINE (formal));
937 if (note)
938 RTX_INTEGRATED_P (note) = 1;
175160e7
MT
939
940 /* Compute the address in the area we reserved and store the
941 value there. */
14a774a9
RK
942 temp = copy_rtx_and_substitute (loc, map, 1);
943 subst_constants (&temp, NULL_RTX, map, 1);
175160e7
MT
944 apply_change_group ();
945 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
946 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
947 store_expr (arg_trees[i], temp, 0);
175160e7
MT
948 }
949 }
950
951 /* Deal with the places that the function puts its result.
952 We are driven by what is placed into DECL_RESULT.
953
954 Initially, we assume that we don't have anything special handling for
955 REG_FUNCTION_RETURN_VALUE_P. */
956
957 map->inline_target = 0;
958 loc = DECL_RTL (DECL_RESULT (fndecl));
58a2f534 959
175160e7
MT
960 if (TYPE_MODE (type) == VOIDmode)
961 /* There is no return value to worry about. */
962 ;
963 else if (GET_CODE (loc) == MEM)
964 {
58a2f534
RH
965 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
966 {
14a774a9
RK
967 temp = copy_rtx_and_substitute (loc, map, 1);
968 subst_constants (&temp, NULL_RTX, map, 1);
58a2f534
RH
969 apply_change_group ();
970 target = temp;
971 }
972 else
973 {
974 if (! structure_value_addr
975 || ! aggregate_value_p (DECL_RESULT (fndecl)))
976 abort ();
00174bdf 977
58a2f534
RH
978 /* Pass the function the address in which to return a structure
979 value. Note that a constructor can cause someone to call us
980 with STRUCTURE_VALUE_ADDR, but the initialization takes place
981 via the first parameter, rather than the struct return address.
175160e7 982
58a2f534
RH
983 We have two cases: If the address is a simple register
984 indirect, use the mapping mechanism to point that register to
985 our structure return address. Otherwise, store the structure
986 return value into the place that it will be referenced from. */
175160e7 987
58a2f534 988 if (GET_CODE (XEXP (loc, 0)) == REG)
175160e7 989 {
58a2f534
RH
990 temp = force_operand (structure_value_addr, NULL_RTX);
991 temp = force_reg (Pmode, temp);
e2a5f96b
R
992 /* A virtual register might be invalid in an insn, because
993 it can cause trouble in reload. Since we don't have access
994 to the expanders at map translation time, make sure we have
995 a proper register now.
996 If a virtual register is actually valid, cse or combine
997 can put it into the mapped insns. */
998 if (REGNO (temp) >= FIRST_VIRTUAL_REGISTER
999 && REGNO (temp) <= LAST_VIRTUAL_REGISTER)
1000 temp = copy_to_mode_reg (Pmode, temp);
58a2f534
RH
1001 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1002
c68da89c
KR
1003 if (CONSTANT_P (structure_value_addr)
1004 || GET_CODE (structure_value_addr) == ADDRESSOF
1005 || (GET_CODE (structure_value_addr) == PLUS
1006 && (XEXP (structure_value_addr, 0)
1007 == virtual_stack_vars_rtx)
1008 && (GET_CODE (XEXP (structure_value_addr, 1))
1009 == CONST_INT)))
58a2f534 1010 {
c68da89c
KR
1011 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
1012 CONST_AGE_PARM);
58a2f534
RH
1013 }
1014 }
1015 else
1016 {
14a774a9
RK
1017 temp = copy_rtx_and_substitute (loc, map, 1);
1018 subst_constants (&temp, NULL_RTX, map, 0);
58a2f534
RH
1019 apply_change_group ();
1020 emit_move_insn (temp, structure_value_addr);
175160e7 1021 }
175160e7
MT
1022 }
1023 }
1024 else if (ignore)
1025 /* We will ignore the result value, so don't look at its structure.
1026 Note that preparations for an aggregate return value
1027 do need to be made (above) even if it will be ignored. */
1028 ;
1029 else if (GET_CODE (loc) == REG)
1030 {
1031 /* The function returns an object in a register and we use the return
1032 value. Set up our target for remapping. */
1033
1034 /* Machine mode function was declared to return. */
1035 enum machine_mode departing_mode = TYPE_MODE (type);
1036 /* (Possibly wider) machine mode it actually computes
3ff2293f
BK
1037 (for the sake of callers that fail to declare it right).
1038 We have to use the mode of the result's RTL, rather than
1039 its type, since expand_function_start may have promoted it. */
60da674b
RH
1040 enum machine_mode arriving_mode
1041 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
175160e7
MT
1042 rtx reg_to_map;
1043
1044 /* Don't use MEMs as direct targets because on some machines
1045 substituting a MEM for a REG makes invalid insns.
1046 Let the combiner substitute the MEM if that is valid. */
1047 if (target == 0 || GET_CODE (target) != REG
1048 || GET_MODE (target) != departing_mode)
c36fce9a
GRK
1049 {
1050 /* Don't make BLKmode registers. If this looks like
1051 a BLKmode object being returned in a register, get
00174bdf 1052 the mode from that, otherwise abort. */
c36fce9a
GRK
1053 if (departing_mode == BLKmode)
1054 {
60da674b
RH
1055 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
1056 {
1057 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1058 arriving_mode = departing_mode;
1059 }
1060 else
00174bdf 1061 abort ();
c36fce9a 1062 }
00174bdf
KH
1063
1064 target = gen_reg_rtx (departing_mode);
c36fce9a 1065 }
175160e7
MT
1066
1067 /* If function's value was promoted before return,
1068 avoid machine mode mismatch when we substitute INLINE_TARGET.
1069 But TARGET is what we will return to the caller. */
1070 if (arriving_mode != departing_mode)
2d0bd5fd
RK
1071 {
1072 /* Avoid creating a paradoxical subreg wider than
1073 BITS_PER_WORD, since that is illegal. */
1074 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1075 {
1076 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1077 GET_MODE_BITSIZE (arriving_mode)))
1078 /* Maybe could be handled by using convert_move () ? */
1079 abort ();
1080 reg_to_map = gen_reg_rtx (arriving_mode);
1081 target = gen_lowpart (departing_mode, reg_to_map);
1082 }
1083 else
38a448ca 1084 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
2d0bd5fd 1085 }
175160e7
MT
1086 else
1087 reg_to_map = target;
1088
1089 /* Usually, the result value is the machine's return register.
1090 Sometimes it may be a pseudo. Handle both cases. */
1091 if (REG_FUNCTION_VALUE_P (loc))
1092 map->inline_target = reg_to_map;
1093 else
1094 map->reg_map[REGNO (loc)] = reg_to_map;
1095 }
64ed0f40
JW
1096 else
1097 abort ();
175160e7 1098
e5e809f4
JL
1099 /* Initialize label_map. get_label_from_map will actually make
1100 the labels. */
961192e1 1101 memset ((char *) &map->label_map[min_labelno], 0,
e5e809f4 1102 (max_labelno - min_labelno) * sizeof (rtx));
175160e7 1103
a97901e6
MM
1104 /* Make copies of the decls of the symbols in the inline function, so that
1105 the copies of the variables get declared in the current function. Set
1106 up things so that lookup_static_chain knows that to interpret registers
1107 in SAVE_EXPRs for TYPE_SIZEs as local. */
1108 inline_function_decl = fndecl;
1109 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1110 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1111 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1112 inline_function_decl = 0;
1113
1114 /* Make a fresh binding contour that we can easily remove. Do this after
1115 expanding our arguments so cleanups are properly scoped. */
1116 expand_start_bindings_and_block (0, block);
1117
1118 /* Sort the block-map so that it will be easy to find remapped
1119 blocks later. */
00174bdf 1120 qsort (&VARRAY_TREE (map->block_map, 0),
a97901e6
MM
1121 map->block_map->elements_used,
1122 sizeof (tree),
1123 compare_blocks);
1124
175160e7
MT
1125 /* Perform postincrements before actually calling the function. */
1126 emit_queue ();
1127
1128 /* Clean up stack so that variables might have smaller offsets. */
1129 do_pending_stack_adjust ();
1130
c68da89c
KR
1131 /* Save a copy of the location of const_equiv_varray for
1132 mark_stores, called via note_stores. */
1133 global_const_equiv_varray = map->const_equiv_varray;
175160e7 1134
136cf361
RK
1135 /* If the called function does an alloca, save and restore the
1136 stack pointer around the call. This saves stack space, but
2132517d
RK
1137 also is required if this inline is being done between two
1138 pushes. */
49ad7cfa 1139 if (inl_f->calls_alloca)
2132517d
RK
1140 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1141
0a1c58a2
JL
1142 /* Now copy the insns one by one. */
1143 copy_insn_list (insns, map, static_chain_value);
1144
1145 /* Restore the stack pointer if we saved it above. */
1146 if (inl_f->calls_alloca)
1147 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1148
1149 if (! cfun->x_whole_function_mode_p)
1150 /* In statement-at-a-time mode, we just tell the front-end to add
1151 this block to the list of blocks at this binding level. We
1152 can't do it the way it's done for function-at-a-time mode the
1153 superblocks have not been created yet. */
1154 insert_block (block);
1155 else
1156 {
00174bdf 1157 BLOCK_CHAIN (block)
0a1c58a2
JL
1158 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1159 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1160 }
1161
1162 /* End the scope containing the copied formal parameter variables
1163 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1164 here so that expand_end_bindings will not check for unused
1165 variables. That's already been checked for when the inlined
1166 function was defined. */
1167 expand_end_bindings (NULL_TREE, 1, 1);
1168
1169 /* Must mark the line number note after inlined functions as a repeat, so
1170 that the test coverage code can avoid counting the call twice. This
1171 just tells the code to ignore the immediately following line note, since
1172 there already exists a copy of this note before the expanded inline call.
1173 This line number note is still needed for debugging though, so we can't
1174 delete it. */
1175 if (flag_test_coverage)
b3b42a4d 1176 emit_note (0, NOTE_INSN_REPEATED_LINE_NUMBER);
0a1c58a2
JL
1177
1178 emit_line_note (input_filename, lineno);
1179
1180 /* If the function returns a BLKmode object in a register, copy it
00174bdf
KH
1181 out of the temp register into a BLKmode memory object. */
1182 if (target
0a1c58a2
JL
1183 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1184 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1185 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
00174bdf 1186
0a1c58a2
JL
1187 if (structure_value_addr)
1188 {
1189 target = gen_rtx_MEM (TYPE_MODE (type),
1190 memory_address (TYPE_MODE (type),
1191 structure_value_addr));
289c5b45 1192 set_mem_attributes (target, type, 1);
0a1c58a2
JL
1193 }
1194
1195 /* Make sure we free the things we explicitly allocated with xmalloc. */
1196 if (real_label_map)
1197 free (real_label_map);
1198 VARRAY_FREE (map->const_equiv_varray);
1199 free (map->reg_map);
1200 VARRAY_FREE (map->block_map);
1201 free (map->insn_map);
1202 free (map);
1203 free (arg_vals);
1204 free (arg_trees);
1205
1206 inlining = inlining_previous;
1207
1208 return target;
1209}
1210
1211/* Make copies of each insn in the given list using the mapping
1212 computed in expand_inline_function. This function may call itself for
1213 insns containing sequences.
00174bdf 1214
f93dacbd 1215 Copying is done in two passes, first the insns and then their REG_NOTES.
0a1c58a2
JL
1216
1217 If static_chain_value is non-zero, it represents the context-pointer
00174bdf 1218 register for the function. */
0a1c58a2
JL
1219
1220static void
1221copy_insn_list (insns, map, static_chain_value)
00174bdf
KH
1222 rtx insns;
1223 struct inline_remap *map;
1224 rtx static_chain_value;
0a1c58a2
JL
1225{
1226 register int i;
1227 rtx insn;
1228 rtx temp;
1229 rtx local_return_label = NULL_RTX;
1230#ifdef HAVE_cc0
1231 rtx cc0_insn = 0;
1232#endif
1233
1234 /* Copy the insns one by one. Do this in two passes, first the insns and
f93dacbd 1235 then their REG_NOTES. */
175160e7
MT
1236
1237 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1238
1239 for (insn = insns; insn; insn = NEXT_INSN (insn))
1240 {
c9734bb9 1241 rtx copy, pattern, set;
175160e7
MT
1242
1243 map->orig_asm_operands_vector = 0;
1244
1245 switch (GET_CODE (insn))
1246 {
1247 case INSN:
1248 pattern = PATTERN (insn);
c9734bb9 1249 set = single_set (insn);
175160e7 1250 copy = 0;
c13fde05
RH
1251 if (GET_CODE (pattern) == USE
1252 && GET_CODE (XEXP (pattern, 0)) == REG
1253 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1254 /* The (USE (REG n)) at return from the function should
1255 be ignored since we are changing (REG n) into
1256 inline_target. */
1257 break;
175160e7 1258
154bba13 1259 /* If the inline fn needs eh context, make sure that
00174bdf 1260 the current fn has one. */
154bba13
TT
1261 if (GET_CODE (pattern) == USE
1262 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
01eb7f9a 1263 get_eh_context ();
154bba13 1264
175160e7
MT
1265 /* Ignore setting a function value that we don't want to use. */
1266 if (map->inline_target == 0
c9734bb9
RK
1267 && set != 0
1268 && GET_CODE (SET_DEST (set)) == REG
1269 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
5cd76fcd 1270 {
c9734bb9 1271 if (volatile_refs_p (SET_SRC (set)))
5cd76fcd 1272 {
c9734bb9
RK
1273 rtx new_set;
1274
5cd76fcd
RS
1275 /* If we must not delete the source,
1276 load it into a new temporary. */
14a774a9 1277 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
c9734bb9
RK
1278
1279 new_set = single_set (copy);
1280 if (new_set == 0)
1281 abort ();
1282
1283 SET_DEST (new_set)
1284 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
5cd76fcd 1285 }
d8090d46
RK
1286 /* If the source and destination are the same and it
1287 has a note on it, keep the insn. */
1288 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1289 && REG_NOTES (insn) != 0)
14a774a9 1290 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
5cd76fcd
RS
1291 else
1292 break;
1293 }
c9734bb9 1294
e93eff94
DL
1295 /* Similarly if an ignored return value is clobbered. */
1296 else if (map->inline_target == 0
1297 && GET_CODE (pattern) == CLOBBER
1298 && GET_CODE (XEXP (pattern, 0)) == REG
1299 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1300 break;
1301
c9734bb9
RK
1302 /* If this is setting the static chain rtx, omit it. */
1303 else if (static_chain_value != 0
1304 && set != 0
1305 && GET_CODE (SET_DEST (set)) == REG
1306 && rtx_equal_p (SET_DEST (set),
1307 static_chain_incoming_rtx))
1308 break;
1309
a6dd1cb6
RK
1310 /* If this is setting the static chain pseudo, set it from
1311 the value we want to give it instead. */
1312 else if (static_chain_value != 0
c9734bb9
RK
1313 && set != 0
1314 && rtx_equal_p (SET_SRC (set),
a6dd1cb6
RK
1315 static_chain_incoming_rtx))
1316 {
14a774a9 1317 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
a6dd1cb6 1318
c9734bb9 1319 copy = emit_move_insn (newdest, static_chain_value);
a6dd1cb6
RK
1320 static_chain_value = 0;
1321 }
14a774a9
RK
1322
1323 /* If this is setting the virtual stack vars register, this must
1324 be the code at the handler for a builtin longjmp. The value
1325 saved in the setjmp buffer will be the address of the frame
1326 we've made for this inlined instance within our frame. But we
1327 know the offset of that value so we can use it to reconstruct
1328 our virtual stack vars register from that value. If we are
1329 copying it from the stack pointer, leave it unchanged. */
1330 else if (set != 0
1331 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1332 {
36a1fa96 1333 HOST_WIDE_INT offset;
14a774a9
RK
1334 temp = map->reg_map[REGNO (SET_DEST (set))];
1335 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1336 REGNO (temp)).rtx;
1337
36a1fa96
JL
1338 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1339 offset = 0;
1340 else if (GET_CODE (temp) == PLUS
1341 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1342 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1343 offset = INTVAL (XEXP (temp, 1));
1344 else
14a774a9
RK
1345 abort ();
1346
1347 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1348 temp = SET_SRC (set);
1349 else
36a1fa96
JL
1350 temp = force_operand (plus_constant (SET_SRC (set),
1351 - offset),
1352 NULL_RTX);
14a774a9 1353
36a1fa96 1354 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
14a774a9
RK
1355 }
1356
5cd76fcd 1357 else
14a774a9 1358 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
175160e7
MT
1359 /* REG_NOTES will be copied later. */
1360
1361#ifdef HAVE_cc0
1362 /* If this insn is setting CC0, it may need to look at
1363 the insn that uses CC0 to see what type of insn it is.
1364 In that case, the call to recog via validate_change will
1365 fail. So don't substitute constants here. Instead,
1366 do it when we emit the following insn.
1367
1368 For example, see the pyr.md file. That machine has signed and
1369 unsigned compares. The compare patterns must check the
1370 following branch insn to see which what kind of compare to
1371 emit.
1372
1373 If the previous insn set CC0, substitute constants on it as
1374 well. */
1375 if (sets_cc0_p (PATTERN (copy)) != 0)
1376 cc0_insn = copy;
1377 else
1378 {
1379 if (cc0_insn)
1380 try_constants (cc0_insn, map);
1381 cc0_insn = 0;
1382 try_constants (copy, map);
1383 }
1384#else
1385 try_constants (copy, map);
1386#endif
1387 break;
1388
1389 case JUMP_INSN:
299b54ba
RK
1390 if (GET_CODE (PATTERN (insn)) == RETURN
1391 || (GET_CODE (PATTERN (insn)) == PARALLEL
1392 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
175160e7
MT
1393 {
1394 if (local_return_label == 0)
1395 local_return_label = gen_label_rtx ();
1396 pattern = gen_jump (local_return_label);
1397 }
1398 else
14a774a9 1399 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
175160e7
MT
1400
1401 copy = emit_jump_insn (pattern);
1402
1403#ifdef HAVE_cc0
1404 if (cc0_insn)
1405 try_constants (cc0_insn, map);
1406 cc0_insn = 0;
1407#endif
1408 try_constants (copy, map);
1409
1410 /* If this used to be a conditional jump insn but whose branch
1411 direction is now know, we must do something special. */
7f1c097d 1412 if (any_condjump_p (insn) && onlyjump_p (insn) && map->last_pc_value)
175160e7
MT
1413 {
1414#ifdef HAVE_cc0
b30f05db
BS
1415 /* If the previous insn set cc0 for us, delete it. */
1416 if (sets_cc0_p (PREV_INSN (copy)))
1417 delete_insn (PREV_INSN (copy));
175160e7
MT
1418#endif
1419
1420 /* If this is now a no-op, delete it. */
1421 if (map->last_pc_value == pc_rtx)
1422 {
1423 delete_insn (copy);
1424 copy = 0;
1425 }
1426 else
1427 /* Otherwise, this is unconditional jump so we must put a
1428 BARRIER after it. We could do some dead code elimination
1429 here, but jump.c will do it just as well. */
1430 emit_barrier ();
1431 }
1432 break;
1433
1434 case CALL_INSN:
0a1c58a2
JL
1435 /* If this is a CALL_PLACEHOLDER insn then we need to copy the
1436 three attached sequences: normal call, sibling call and tail
00174bdf 1437 recursion. */
0a1c58a2
JL
1438 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1439 {
1440 rtx sequence[3];
1441 rtx tail_label;
1442
1443 for (i = 0; i < 3; i++)
1444 {
1445 rtx seq;
00174bdf 1446
0a1c58a2
JL
1447 sequence[i] = NULL_RTX;
1448 seq = XEXP (PATTERN (insn), i);
1449 if (seq)
1450 {
1451 start_sequence ();
1452 copy_insn_list (seq, map, static_chain_value);
1453 sequence[i] = get_insns ();
1454 end_sequence ();
1455 }
1456 }
1457
00174bdf 1458 /* Find the new tail recursion label.
0a1c58a2
JL
1459 It will already be substituted into sequence[2]. */
1460 tail_label = copy_rtx_and_substitute (XEXP (PATTERN (insn), 3),
1461 map, 0);
1462
00174bdf
KH
1463 copy = emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode,
1464 sequence[0],
1465 sequence[1],
1466 sequence[2],
1467 tail_label));
0a1c58a2
JL
1468 break;
1469 }
1470
14a774a9 1471 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
175160e7
MT
1472 copy = emit_call_insn (pattern);
1473
0a1c58a2 1474 SIBLING_CALL_P (copy) = SIBLING_CALL_P (insn);
6f268adf 1475 CONST_CALL_P (copy) = CONST_CALL_P (insn);
0a1c58a2 1476
d7e09326
RK
1477 /* Because the USAGE information potentially contains objects other
1478 than hard registers, we need to copy it. */
0a1c58a2 1479
db3cf6fb 1480 CALL_INSN_FUNCTION_USAGE (copy)
14a774a9
RK
1481 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1482 map, 0);
d7e09326 1483
175160e7
MT
1484#ifdef HAVE_cc0
1485 if (cc0_insn)
1486 try_constants (cc0_insn, map);
1487 cc0_insn = 0;
1488#endif
1489 try_constants (copy, map);
1490
00174bdf 1491 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
175160e7 1492 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
c68da89c 1493 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
175160e7
MT
1494 break;
1495
1496 case CODE_LABEL:
e5e809f4
JL
1497 copy = emit_label (get_label_from_map (map,
1498 CODE_LABEL_NUMBER (insn)));
bfa30b22 1499 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
1500 map->const_age++;
1501 break;
1502
1503 case BARRIER:
1504 copy = emit_barrier ();
1505 break;
1506
1507 case NOTE:
00174bdf
KH
1508 /* NOTE_INSN_FUNCTION_END and NOTE_INSN_FUNCTION_BEG are
1509 discarded because it is important to have only one of
0a1c58a2
JL
1510 each in the current function.
1511
f93dacbd 1512 NOTE_INSN_DELETED notes aren't useful.
0a1c58a2
JL
1513
1514 NOTE_INSN_BASIC_BLOCK is discarded because the saved bb
1515 pointer (which will soon be dangling) confuses flow's
1516 attempts to preserve bb structures during the compilation
1517 of a function. */
1518
175160e7
MT
1519 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1520 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
0a1c58a2
JL
1521 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
1522 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
6adb4e3a 1523 {
e5e809f4
JL
1524 copy = emit_note (NOTE_SOURCE_FILE (insn),
1525 NOTE_LINE_NUMBER (insn));
1526 if (copy
1527 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1528 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
6adb4e3a 1529 {
e5e809f4 1530 rtx label
bf43101e 1531 = get_label_from_map (map, NOTE_EH_HANDLER (copy));
6adb4e3a 1532
00174bdf
KH
1533 /* We have to duplicate the handlers for the original. */
1534 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1535 {
1536 /* We need to duplicate the handlers for the EH region
1537 and we need to indicate where the label map is */
1538 eif_eh_map = map;
1539 duplicate_eh_handlers (NOTE_EH_HANDLER (copy),
1540 CODE_LABEL_NUMBER (label),
1541 expand_inline_function_eh_labelmap);
1542 }
9a0d1e1b 1543
6adb4e3a
MS
1544 /* We have to forward these both to match the new exception
1545 region. */
bf43101e 1546 NOTE_EH_HANDLER (copy) = CODE_LABEL_NUMBER (label);
6adb4e3a 1547 }
a97901e6
MM
1548 else if (copy
1549 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1550 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1551 && NOTE_BLOCK (insn))
1552 {
1553 tree *mapped_block_p;
1554
1555 mapped_block_p
00174bdf 1556 = (tree *) bsearch (NOTE_BLOCK (insn),
a97901e6
MM
1557 &VARRAY_TREE (map->block_map, 0),
1558 map->block_map->elements_used,
1559 sizeof (tree),
1560 find_block);
00174bdf 1561
a97901e6
MM
1562 if (!mapped_block_p)
1563 abort ();
1564 else
1565 NOTE_BLOCK (copy) = *mapped_block_p;
1566 }
6adb4e3a 1567 }
175160e7
MT
1568 else
1569 copy = 0;
1570 break;
1571
1572 default:
1573 abort ();
175160e7
MT
1574 }
1575
1576 if (copy)
1577 RTX_INTEGRATED_P (copy) = 1;
1578
1579 map->insn_map[INSN_UID (insn)] = copy;
1580 }
1581
e62d14be
RS
1582 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1583 from parameters can be substituted in. These are the only ones that
1584 are valid across the entire function. */
1585 map->const_age++;
175160e7 1586 for (insn = insns; insn; insn = NEXT_INSN (insn))
2c3c49de 1587 if (INSN_P (insn)
db25e492
RS
1588 && map->insn_map[INSN_UID (insn)]
1589 && REG_NOTES (insn))
1590 {
ca81c149 1591 rtx next, note = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
14a774a9 1592
db25e492
RS
1593 /* We must also do subst_constants, in case one of our parameters
1594 has const type and constant value. */
ca81c149 1595 subst_constants (&note, NULL_RTX, map, 0);
db25e492 1596 apply_change_group ();
ca81c149
JJ
1597 REG_NOTES (map->insn_map[INSN_UID (insn)]) = note;
1598
00174bdf 1599 /* Finally, delete any REG_LABEL notes from the chain. */
ca81c149
JJ
1600 for (; note; note = next)
1601 {
1602 next = XEXP (note, 1);
1603 if (REG_NOTE_KIND (note) == REG_LABEL)
1604 remove_note (map->insn_map[INSN_UID (insn)], note);
1605 }
db25e492 1606 }
175160e7
MT
1607
1608 if (local_return_label)
1609 emit_label (local_return_label);
175160e7
MT
1610}
1611\f
1612/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1613 push all of those decls and give each one the corresponding home. */
1614
1615static void
1616integrate_parm_decls (args, map, arg_vector)
1617 tree args;
1618 struct inline_remap *map;
1619 rtvec arg_vector;
1620{
1621 register tree tail;
1622 register int i;
1623
1624 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1625 {
94755d92
MM
1626 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1627 current_function_decl);
175160e7 1628 rtx new_decl_rtl
14a774a9 1629 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
175160e7 1630
a76386d8
RK
1631 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1632 here, but that's going to require some more work. */
1633 /* DECL_INCOMING_RTL (decl) = ?; */
175160e7
MT
1634 /* Fully instantiate the address with the equivalent form so that the
1635 debugging information contains the actual register, instead of the
1636 virtual register. Do this by not passing an insn to
1637 subst_constants. */
14a774a9 1638 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
175160e7
MT
1639 apply_change_group ();
1640 DECL_RTL (decl) = new_decl_rtl;
1641 }
1642}
1643
1644/* Given a BLOCK node LET, push decls and levels so as to construct in the
1645 current function a tree of contexts isomorphic to the one that is given.
1646
858a47b1 1647 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
175160e7 1648 registers used in the DECL_RTL field should be remapped. If it is zero,
8ef63e62 1649 no mapping is necessary. */
175160e7 1650
21204d34
MM
1651static tree
1652integrate_decl_tree (let, map)
175160e7 1653 tree let;
175160e7 1654 struct inline_remap *map;
175160e7 1655{
21204d34
MM
1656 tree t;
1657 tree new_block;
1658 tree *next;
1659
1660 new_block = make_node (BLOCK);
a97901e6 1661 VARRAY_PUSH_TREE (map->block_map, new_block);
21204d34 1662 next = &BLOCK_VARS (new_block);
175160e7 1663
175160e7
MT
1664 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1665 {
f6bad6ff
JM
1666 tree d;
1667
94755d92 1668 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
f6bad6ff 1669
8ef63e62 1670 if (DECL_RTL (t) != 0)
175160e7 1671 {
14a774a9
RK
1672 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map, 1);
1673
175160e7
MT
1674 /* Fully instantiate the address with the equivalent form so that the
1675 debugging information contains the actual register, instead of the
1676 virtual register. Do this by not passing an insn to
1677 subst_constants. */
14a774a9 1678 subst_constants (&DECL_RTL (d), NULL_RTX, map, 1);
175160e7
MT
1679 apply_change_group ();
1680 }
175160e7 1681
21204d34
MM
1682 /* Add this declaration to the list of variables in the new
1683 block. */
1684 *next = d;
1685 next = &TREE_CHAIN (d);
1686 }
175160e7 1687
21204d34
MM
1688 next = &BLOCK_SUBBLOCKS (new_block);
1689 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
8ef63e62 1690 {
21204d34
MM
1691 *next = integrate_decl_tree (t, map);
1692 BLOCK_SUPERCONTEXT (*next) = new_block;
1693 next = &BLOCK_CHAIN (*next);
8ef63e62 1694 }
21204d34
MM
1695
1696 TREE_USED (new_block) = TREE_USED (let);
1697 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
00174bdf 1698
21204d34 1699 return new_block;
175160e7
MT
1700}
1701\f
14a774a9 1702/* Create a new copy of an rtx. Recursively copies the operands of the rtx,
175160e7
MT
1703 except for those few rtx codes that are sharable.
1704
1705 We always return an rtx that is similar to that incoming rtx, with the
1706 exception of possibly changing a REG to a SUBREG or vice versa. No
1707 rtl is ever emitted.
1708
14a774a9
RK
1709 If FOR_LHS is nonzero, if means we are processing something that will
1710 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1711 inlining since we need to be conservative in how it is set for
1712 such cases.
1713
175160e7
MT
1714 Handle constants that need to be placed in the constant pool by
1715 calling `force_const_mem'. */
1716
1717rtx
14a774a9 1718copy_rtx_and_substitute (orig, map, for_lhs)
175160e7
MT
1719 register rtx orig;
1720 struct inline_remap *map;
14a774a9 1721 int for_lhs;
175160e7
MT
1722{
1723 register rtx copy, temp;
1724 register int i, j;
1725 register RTX_CODE code;
1726 register enum machine_mode mode;
6f7d635c 1727 register const char *format_ptr;
175160e7
MT
1728 int regno;
1729
1730 if (orig == 0)
1731 return 0;
1732
1733 code = GET_CODE (orig);
1734 mode = GET_MODE (orig);
1735
1736 switch (code)
1737 {
1738 case REG:
1739 /* If the stack pointer register shows up, it must be part of
1740 stack-adjustments (*not* because we eliminated the frame pointer!).
1741 Small hard registers are returned as-is. Pseudo-registers
1742 go through their `reg_map'. */
1743 regno = REGNO (orig);
f83a0992
JL
1744 if (regno <= LAST_VIRTUAL_REGISTER
1745 || (map->integrating
1746 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
175160e7
MT
1747 {
1748 /* Some hard registers are also mapped,
1749 but others are not translated. */
b5d7770c
AO
1750 if (map->reg_map[regno] != 0
1751 /* We shouldn't usually have reg_map set for return
1752 register, but it may happen if we have leaf-register
1753 remapping and the return register is used in one of
1754 the calling sequences of a call_placeholer. In this
1755 case, we'll end up with a reg_map set for this
1756 register, but we don't want to use for registers
1757 marked as return values. */
1758 && ! REG_FUNCTION_VALUE_P (orig))
175160e7
MT
1759 return map->reg_map[regno];
1760
1761 /* If this is the virtual frame pointer, make space in current
1762 function's stack frame for the stack frame of the inline function.
1763
1764 Copy the address of this area into a pseudo. Map
1765 virtual_stack_vars_rtx to this pseudo and set up a constant
1766 equivalence for it to be the address. This will substitute the
1767 address into insns where it can be substituted and use the new
1768 pseudo where it can't. */
b5d7770c 1769 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
1770 {
1771 rtx loc, seq;
49ad7cfa 1772 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
d219c7f1 1773#ifdef FRAME_GROWS_DOWNWARD
c2f8b491
JH
1774 int alignment
1775 = (DECL_SAVED_INSNS (map->fndecl)->stack_alignment_needed
1776 / BITS_PER_UNIT);
175160e7 1777
3e42d56b
DE
1778 /* In this case, virtual_stack_vars_rtx points to one byte
1779 higher than the top of the frame area. So make sure we
1780 allocate a big enough chunk to keep the frame pointer
1781 aligned like a real one. */
c2f8b491
JH
1782 if (alignment)
1783 size = CEIL_ROUND (size, alignment);
3e42d56b 1784#endif
175160e7
MT
1785 start_sequence ();
1786 loc = assign_stack_temp (BLKmode, size, 1);
1787 loc = XEXP (loc, 0);
1788#ifdef FRAME_GROWS_DOWNWARD
1789 /* In this case, virtual_stack_vars_rtx points to one byte
1790 higher than the top of the frame area. So compute the offset
3e42d56b
DE
1791 to one byte higher than our substitute frame. */
1792 loc = plus_constant (loc, size);
175160e7 1793#endif
59b2d722
RK
1794 map->reg_map[regno] = temp
1795 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 1796
12307ca2 1797#ifdef STACK_BOUNDARY
bdb429a5 1798 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
12307ca2
RK
1799#endif
1800
c68da89c 1801 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7
MT
1802
1803 seq = gen_sequence ();
1804 end_sequence ();
1805 emit_insn_after (seq, map->insns_at_start);
5c23c401 1806 return temp;
175160e7 1807 }
f83a0992
JL
1808 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1809 || (map->integrating
1810 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1811 == orig)))
175160e7
MT
1812 {
1813 /* Do the same for a block to contain any arguments referenced
0f41302f 1814 in memory. */
175160e7 1815 rtx loc, seq;
49ad7cfa 1816 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
175160e7
MT
1817
1818 start_sequence ();
1819 loc = assign_stack_temp (BLKmode, size, 1);
1820 loc = XEXP (loc, 0);
00174bdf 1821 /* When arguments grow downward, the virtual incoming
931553d8 1822 args pointer points to the top of the argument block,
0f41302f 1823 so the remapped location better do the same. */
931553d8
RS
1824#ifdef ARGS_GROW_DOWNWARD
1825 loc = plus_constant (loc, size);
1826#endif
59b2d722
RK
1827 map->reg_map[regno] = temp
1828 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2b145ea8 1829
12307ca2 1830#ifdef STACK_BOUNDARY
bdb429a5 1831 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
12307ca2
RK
1832#endif
1833
c68da89c 1834 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
175160e7
MT
1835
1836 seq = gen_sequence ();
1837 end_sequence ();
1838 emit_insn_after (seq, map->insns_at_start);
5c23c401 1839 return temp;
175160e7
MT
1840 }
1841 else if (REG_FUNCTION_VALUE_P (orig))
1842 {
1843 /* This is a reference to the function return value. If
1844 the function doesn't have a return value, error. If the
c36fce9a 1845 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
175160e7
MT
1846 if (map->inline_target == 0)
1847 /* Must be unrolling loops or replicating code if we
1848 reach here, so return the register unchanged. */
1849 return orig;
60da674b
RH
1850 else if (GET_MODE (map->inline_target) != BLKmode
1851 && mode != GET_MODE (map->inline_target))
293e1467 1852 return gen_lowpart (mode, map->inline_target);
175160e7
MT
1853 else
1854 return map->inline_target;
1855 }
b5d7770c
AO
1856#if defined (LEAF_REGISTERS) && defined (LEAF_REG_REMAP)
1857 /* If leaf_renumber_regs_insn() might remap this register to
1858 some other number, make sure we don't share it with the
1859 inlined function, otherwise delayed optimization of the
1860 inlined function may change it in place, breaking our
1861 reference to it. We may still shared it within the
1862 function, so create an entry for this register in the
1863 reg_map. */
1864 if (map->integrating && regno < FIRST_PSEUDO_REGISTER
1865 && LEAF_REGISTERS[regno] && LEAF_REG_REMAP (regno) != regno)
1866 {
1867 temp = gen_rtx_REG (mode, regno);
1868 map->reg_map[regno] = temp;
1869 return temp;
1870 }
1871#endif
1872 else
1873 return orig;
1874
1875 abort ();
175160e7
MT
1876 }
1877 if (map->reg_map[regno] == NULL)
1878 {
1879 map->reg_map[regno] = gen_reg_rtx (mode);
1880 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1881 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1882 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1883 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
12307ca2 1884
3502dc9c 1885 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
12307ca2
RK
1886 mark_reg_pointer (map->reg_map[regno],
1887 map->regno_pointer_align[regno]);
175160e7
MT
1888 }
1889 return map->reg_map[regno];
1890
1891 case SUBREG:
14a774a9 1892 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
175160e7
MT
1893 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1894 if (GET_CODE (copy) == SUBREG)
38a448ca
RH
1895 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1896 SUBREG_WORD (orig) + SUBREG_WORD (copy));
bc2eeab2 1897 else if (GET_CODE (copy) == CONCAT)
ddc54eaa
NC
1898 {
1899 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1900
1901 if (GET_MODE (retval) == GET_MODE (orig))
1902 return retval;
1903 else
1904 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1905 (SUBREG_WORD (orig) %
1906 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1907 / (unsigned) UNITS_PER_WORD)));
1908 }
175160e7 1909 else
38a448ca
RH
1910 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1911 SUBREG_WORD (orig));
175160e7 1912
e9a25f70 1913 case ADDRESSOF:
38a448ca 1914 copy = gen_rtx_ADDRESSOF (mode,
14a774a9
RK
1915 copy_rtx_and_substitute (XEXP (orig, 0),
1916 map, for_lhs),
00174bdf 1917 0, ADDRESSOF_DECL (orig));
e9a25f70
JL
1918 regno = ADDRESSOF_REGNO (orig);
1919 if (map->reg_map[regno])
1920 regno = REGNO (map->reg_map[regno]);
1921 else if (regno > LAST_VIRTUAL_REGISTER)
1922 {
1923 temp = XEXP (orig, 0);
1924 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1925 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1926 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1927 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1928 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1929
3502dc9c 1930 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
e9a25f70
JL
1931 mark_reg_pointer (map->reg_map[regno],
1932 map->regno_pointer_align[regno]);
1933 regno = REGNO (map->reg_map[regno]);
1934 }
1935 ADDRESSOF_REGNO (copy) = regno;
1936 return copy;
1937
175160e7
MT
1938 case USE:
1939 case CLOBBER:
1940 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
d632e927
RS
1941 to (use foo) if the original insn didn't have a subreg.
1942 Removing the subreg distorts the VAX movstrhi pattern
1943 by changing the mode of an operand. */
14a774a9 1944 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
d632e927 1945 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
175160e7 1946 copy = SUBREG_REG (copy);
38a448ca 1947 return gen_rtx_fmt_e (code, VOIDmode, copy);
175160e7
MT
1948
1949 case CODE_LABEL:
1f3d3a31 1950 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
175160e7 1951 = LABEL_PRESERVE_P (orig);
1f3d3a31 1952 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
175160e7 1953
0a1c58a2
JL
1954 /* We need to handle "deleted" labels that appear in the DECL_RTL
1955 of a LABEL_DECL. */
1956 case NOTE:
1957 if (NOTE_LINE_NUMBER (orig) == NOTE_INSN_DELETED_LABEL)
1958 return map->insn_map[INSN_UID (orig)];
1959 break;
1960
175160e7 1961 case LABEL_REF:
c5c76735
JL
1962 copy
1963 = gen_rtx_LABEL_REF
1964 (mode,
1965 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1966 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1967
175160e7 1968 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
c1ceaaa6
RK
1969
1970 /* The fact that this label was previously nonlocal does not mean
1971 it still is, so we must check if it is within the range of
1972 this function's labels. */
1973 LABEL_REF_NONLOCAL_P (copy)
1974 = (LABEL_REF_NONLOCAL_P (orig)
1975 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1976 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
81d57b8e
RK
1977
1978 /* If we have made a nonlocal label local, it means that this
9faa82d8 1979 inlined call will be referring to our nonlocal goto handler.
81d57b8e
RK
1980 So make sure we create one for this block; we normally would
1981 not since this is not otherwise considered a "call". */
1982 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1983 function_call_count++;
1984
175160e7
MT
1985 return copy;
1986
1987 case PC:
1988 case CC0:
1989 case CONST_INT:
f543676f
JW
1990 return orig;
1991
175160e7 1992 case SYMBOL_REF:
f543676f
JW
1993 /* Symbols which represent the address of a label stored in the constant
1994 pool must be modified to point to a constant pool entry for the
1995 remapped label. Otherwise, symbols are returned unchanged. */
1996 if (CONSTANT_POOL_ADDRESS_P (orig))
1997 {
01d939e8 1998 struct function *f = inlining ? inlining : cfun;
36edd3cc
BS
1999 rtx constant = get_pool_constant_for_function (f, orig);
2000 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
2001 if (inlining)
2002 {
2003 rtx temp = force_const_mem (const_mode,
14a774a9
RK
2004 copy_rtx_and_substitute (constant,
2005 map, 0));
2006
36edd3cc
BS
2007#if 0
2008 /* Legitimizing the address here is incorrect.
2009
2010 Since we had a SYMBOL_REF before, we can assume it is valid
2011 to have one in this position in the insn.
2012
2013 Also, change_address may create new registers. These
2014 registers will not have valid reg_map entries. This can
2015 cause try_constants() to fail because assumes that all
2016 registers in the rtx have valid reg_map entries, and it may
2017 end up replacing one of these new registers with junk. */
2018
2019 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2020 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2021#endif
2022
2023 temp = XEXP (temp, 0);
2024
2025#ifdef POINTERS_EXTEND_UNSIGNED
2026 if (GET_MODE (temp) != GET_MODE (orig))
2027 temp = convert_memory_address (GET_MODE (orig), temp);
2028#endif
2029 return temp;
2030 }
2031 else if (GET_CODE (constant) == LABEL_REF)
14a774a9
RK
2032 return XEXP (force_const_mem
2033 (GET_MODE (orig),
2034 copy_rtx_and_substitute (constant, map, for_lhs)),
c1ceaaa6 2035 0);
f543676f 2036 }
00174bdf
KH
2037 else if (SYMBOL_REF_NEED_ADJUST (orig))
2038 {
2039 eif_eh_map = map;
2040 return rethrow_symbol_map (orig,
2041 expand_inline_function_eh_labelmap);
2042 }
c1ceaaa6 2043
175160e7
MT
2044 return orig;
2045
2046 case CONST_DOUBLE:
2047 /* We have to make a new copy of this CONST_DOUBLE because don't want
2048 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2049 duplicate of a CONST_DOUBLE we have already seen. */
2050 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2051 {
2052 REAL_VALUE_TYPE d;
2053
2054 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
81fbaa41 2055 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
175160e7
MT
2056 }
2057 else
2058 return immed_double_const (CONST_DOUBLE_LOW (orig),
2059 CONST_DOUBLE_HIGH (orig), VOIDmode);
2060
2061 case CONST:
2062 /* Make new constant pool entry for a constant
2063 that was in the pool of the inline function. */
2064 if (RTX_INTEGRATED_P (orig))
175160e7 2065 abort ();
36edd3cc 2066 break;
175160e7
MT
2067
2068 case ASM_OPERANDS:
6462bb43
AO
2069 /* If a single asm insn contains multiple output operands then
2070 it contains multiple ASM_OPERANDS rtx's that share the input
2071 and constraint vecs. We must make sure that the copied insn
2072 continues to share it. */
2073 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
175160e7
MT
2074 {
2075 copy = rtx_alloc (ASM_OPERANDS);
81d82304 2076 copy->volatil = orig->volatil;
dde068d9 2077 PUT_MODE (copy, GET_MODE (orig));
6462bb43
AO
2078 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
2079 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
2080 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
2081 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
2082 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
2083 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
2084 = map->copy_asm_constraints_vector;
2085 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
2086 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
175160e7
MT
2087 return copy;
2088 }
2089 break;
2090
2091 case CALL:
2092 /* This is given special treatment because the first
2093 operand of a CALL is a (MEM ...) which may get
2094 forced into a register for cse. This is undesirable
2095 if function-address cse isn't wanted or if we won't do cse. */
2096#ifndef NO_FUNCTION_CSE
2097 if (! (optimize && ! flag_no_function_cse))
2098#endif
c5c76735
JL
2099 return
2100 gen_rtx_CALL
2101 (GET_MODE (orig),
2102 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2103 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
14a774a9
RK
2104 map, 0)),
2105 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
175160e7
MT
2106 break;
2107
2108#if 0
2109 /* Must be ifdefed out for loop unrolling to work. */
2110 case RETURN:
2111 abort ();
2112#endif
2113
2114 case SET:
2115 /* If this is setting fp or ap, it means that we have a nonlocal goto.
e9a25f70 2116 Adjust the setting by the offset of the area we made.
175160e7
MT
2117 If the nonlocal goto is into the current function,
2118 this will result in unnecessarily bad code, but should work. */
2119 if (SET_DEST (orig) == virtual_stack_vars_rtx
2120 || SET_DEST (orig) == virtual_incoming_args_rtx)
e9a25f70 2121 {
00174bdf 2122 /* In case a translation hasn't occurred already, make one now. */
d6e6c585
JL
2123 rtx equiv_reg;
2124 rtx equiv_loc;
2125 HOST_WIDE_INT loc_offset;
2126
14a774a9 2127 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
d6e6c585 2128 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
14a774a9
RK
2129 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
2130 REGNO (equiv_reg)).rtx;
d6e6c585 2131 loc_offset
e9a25f70 2132 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
00174bdf 2133
38a448ca
RH
2134 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2135 force_operand
2136 (plus_constant
14a774a9
RK
2137 (copy_rtx_and_substitute (SET_SRC (orig),
2138 map, 0),
38a448ca
RH
2139 - loc_offset),
2140 NULL_RTX));
e9a25f70 2141 }
14a774a9
RK
2142 else
2143 return gen_rtx_SET (VOIDmode,
2144 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
2145 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
175160e7
MT
2146 break;
2147
2148 case MEM:
36edd3cc
BS
2149 if (inlining
2150 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
2151 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
2152 {
14a774a9
RK
2153 enum machine_mode const_mode
2154 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
2155 rtx constant
2156 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
2157
2158 constant = copy_rtx_and_substitute (constant, map, 0);
2159
36edd3cc
BS
2160 /* If this was an address of a constant pool entry that itself
2161 had to be placed in the constant pool, it might not be a
2162 valid address. So the recursive call might have turned it
2163 into a register. In that case, it isn't a constant any
2164 more, so return it. This has the potential of changing a
2165 MEM into a REG, but we'll assume that it safe. */
2166 if (! CONSTANT_P (constant))
2167 return constant;
14a774a9 2168
36edd3cc
BS
2169 return validize_mem (force_const_mem (const_mode, constant));
2170 }
14a774a9 2171
175160e7
MT
2172 copy = rtx_alloc (MEM);
2173 PUT_MODE (copy, mode);
14a774a9 2174 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
440b3dae 2175 MEM_COPY_ATTRIBUTES (copy, orig);
175160e7 2176 return copy;
00174bdf 2177
e9a25f70
JL
2178 default:
2179 break;
175160e7
MT
2180 }
2181
2182 copy = rtx_alloc (code);
2183 PUT_MODE (copy, mode);
2184 copy->in_struct = orig->in_struct;
2185 copy->volatil = orig->volatil;
2186 copy->unchanging = orig->unchanging;
2187
2188 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2189
2190 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2191 {
2192 switch (*format_ptr++)
2193 {
2194 case '0':
ef178af3
ZW
2195 /* Copy this through the wide int field; that's safest. */
2196 X0WINT (copy, i) = X0WINT (orig, i);
175160e7
MT
2197 break;
2198
2199 case 'e':
14a774a9
RK
2200 XEXP (copy, i)
2201 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
175160e7
MT
2202 break;
2203
2204 case 'u':
2205 /* Change any references to old-insns to point to the
2206 corresponding copied insns. */
2207 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2208 break;
2209
2210 case 'E':
2211 XVEC (copy, i) = XVEC (orig, i);
2212 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2213 {
2214 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2215 for (j = 0; j < XVECLEN (copy, i); j++)
2216 XVECEXP (copy, i, j)
14a774a9
RK
2217 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2218 map, for_lhs);
175160e7
MT
2219 }
2220 break;
2221
02bea8a8
RK
2222 case 'w':
2223 XWINT (copy, i) = XWINT (orig, i);
2224 break;
2225
175160e7
MT
2226 case 'i':
2227 XINT (copy, i) = XINT (orig, i);
2228 break;
2229
2230 case 's':
2231 XSTR (copy, i) = XSTR (orig, i);
2232 break;
2233
8f985ec4
ZW
2234 case 't':
2235 XTREE (copy, i) = XTREE (orig, i);
2236 break;
2237
175160e7
MT
2238 default:
2239 abort ();
2240 }
2241 }
2242
2243 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2244 {
6462bb43
AO
2245 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
2246 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
2247 map->copy_asm_constraints_vector
2248 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
175160e7
MT
2249 }
2250
2251 return copy;
2252}
2253\f
2254/* Substitute known constant values into INSN, if that is valid. */
2255
2256void
2257try_constants (insn, map)
2258 rtx insn;
2259 struct inline_remap *map;
2260{
2261 int i;
2262
2263 map->num_sets = 0;
175160e7 2264
14a774a9
RK
2265 /* First try just updating addresses, then other things. This is
2266 important when we have something like the store of a constant
2267 into memory and we can update the memory address but the machine
2268 does not support a constant source. */
2269 subst_constants (&PATTERN (insn), insn, map, 1);
2270 apply_change_group ();
2271 subst_constants (&PATTERN (insn), insn, map, 0);
175160e7
MT
2272 apply_change_group ();
2273
2274 /* Show we don't know the value of anything stored or clobbered. */
84832317 2275 note_stores (PATTERN (insn), mark_stores, NULL);
175160e7
MT
2276 map->last_pc_value = 0;
2277#ifdef HAVE_cc0
2278 map->last_cc0_value = 0;
2279#endif
2280
2281 /* Set up any constant equivalences made in this insn. */
2282 for (i = 0; i < map->num_sets; i++)
2283 {
2284 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2285 {
2286 int regno = REGNO (map->equiv_sets[i].dest);
2287
c68da89c
KR
2288 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2289 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2290 /* Following clause is a hack to make case work where GNU C++
2291 reassigns a variable to make cse work right. */
2292 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2293 regno).rtx,
2294 map->equiv_sets[i].equiv))
2295 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2296 map->equiv_sets[i].equiv, map->const_age);
175160e7
MT
2297 }
2298 else if (map->equiv_sets[i].dest == pc_rtx)
2299 map->last_pc_value = map->equiv_sets[i].equiv;
2300#ifdef HAVE_cc0
2301 else if (map->equiv_sets[i].dest == cc0_rtx)
2302 map->last_cc0_value = map->equiv_sets[i].equiv;
2303#endif
2304 }
2305}
2306\f
2307/* Substitute known constants for pseudo regs in the contents of LOC,
2308 which are part of INSN.
d45cf215 2309 If INSN is zero, the substitution should always be done (this is used to
175160e7
MT
2310 update DECL_RTL).
2311 These changes are taken out by try_constants if the result is not valid.
2312
2313 Note that we are more concerned with determining when the result of a SET
2314 is a constant, for further propagation, than actually inserting constants
2315 into insns; cse will do the latter task better.
2316
2317 This function is also used to adjust address of items previously addressed
00174bdf 2318 via the virtual stack variable or virtual incoming arguments registers.
14a774a9
RK
2319
2320 If MEMONLY is nonzero, only make changes inside a MEM. */
175160e7
MT
2321
2322static void
14a774a9 2323subst_constants (loc, insn, map, memonly)
175160e7
MT
2324 rtx *loc;
2325 rtx insn;
2326 struct inline_remap *map;
14a774a9 2327 int memonly;
175160e7
MT
2328{
2329 rtx x = *loc;
14a774a9 2330 register int i, j;
175160e7 2331 register enum rtx_code code;
6f7d635c 2332 register const char *format_ptr;
175160e7
MT
2333 int num_changes = num_validated_changes ();
2334 rtx new = 0;
a30d557c 2335 enum machine_mode op0_mode = MAX_MACHINE_MODE;
175160e7
MT
2336
2337 code = GET_CODE (x);
2338
2339 switch (code)
2340 {
2341 case PC:
2342 case CONST_INT:
2343 case CONST_DOUBLE:
2344 case SYMBOL_REF:
2345 case CONST:
2346 case LABEL_REF:
2347 case ADDRESS:
2348 return;
2349
2350#ifdef HAVE_cc0
2351 case CC0:
14a774a9
RK
2352 if (! memonly)
2353 validate_change (insn, loc, map->last_cc0_value, 1);
175160e7
MT
2354 return;
2355#endif
2356
2357 case USE:
2358 case CLOBBER:
2359 /* The only thing we can do with a USE or CLOBBER is possibly do
2360 some substitutions in a MEM within it. */
2361 if (GET_CODE (XEXP (x, 0)) == MEM)
14a774a9 2362 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
175160e7
MT
2363 return;
2364
2365 case REG:
2366 /* Substitute for parms and known constants. Don't replace
2367 hard regs used as user variables with constants. */
14a774a9
RK
2368 if (! memonly)
2369 {
2370 int regno = REGNO (x);
2371 struct const_equiv_data *p;
2372
2373 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2374 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2375 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2376 p->rtx != 0)
2377 && p->age >= map->const_age)
2378 validate_change (insn, loc, p->rtx, 1);
2379 }
2380 return;
175160e7
MT
2381
2382 case SUBREG:
637c5064
RS
2383 /* SUBREG applied to something other than a reg
2384 should be treated as ordinary, since that must
2385 be a special hack and we don't know how to treat it specially.
2386 Consider for example mulsidi3 in m68k.md.
2387 Ordinary SUBREG of a REG needs this special treatment. */
14a774a9 2388 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
637c5064
RS
2389 {
2390 rtx inner = SUBREG_REG (x);
2391 rtx new = 0;
175160e7 2392
637c5064
RS
2393 /* We can't call subst_constants on &SUBREG_REG (x) because any
2394 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2395 see what is inside, try to form the new SUBREG and see if that is
00174bdf 2396 valid. We handle two cases: extracting a full word in an
637c5064 2397 integral mode and extracting the low part. */
14a774a9 2398 subst_constants (&inner, NULL_RTX, map, 0);
175160e7 2399
637c5064
RS
2400 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2401 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2402 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2403 new = operand_subword (inner, SUBREG_WORD (x), 0,
2404 GET_MODE (SUBREG_REG (x)));
175160e7 2405
26986265 2406 cancel_changes (num_changes);
637c5064
RS
2407 if (new == 0 && subreg_lowpart_p (x))
2408 new = gen_lowpart_common (GET_MODE (x), inner);
175160e7 2409
637c5064
RS
2410 if (new)
2411 validate_change (insn, loc, new, 1);
175160e7 2412
637c5064
RS
2413 return;
2414 }
2415 break;
175160e7
MT
2416
2417 case MEM:
14a774a9 2418 subst_constants (&XEXP (x, 0), insn, map, 0);
175160e7
MT
2419
2420 /* If a memory address got spoiled, change it back. */
14a774a9
RK
2421 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2422 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
175160e7
MT
2423 cancel_changes (num_changes);
2424 return;
2425
2426 case SET:
2427 {
2428 /* Substitute constants in our source, and in any arguments to a
2429 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2430 itself. */
2431 rtx *dest_loc = &SET_DEST (x);
2432 rtx dest = *dest_loc;
2433 rtx src, tem;
2434
14a774a9 2435 subst_constants (&SET_SRC (x), insn, map, memonly);
175160e7
MT
2436 src = SET_SRC (x);
2437
2438 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
175160e7
MT
2439 || GET_CODE (*dest_loc) == SUBREG
2440 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2441 {
2442 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2443 {
14a774a9
RK
2444 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2445 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
175160e7
MT
2446 }
2447 dest_loc = &XEXP (*dest_loc, 0);
2448 }
2449
91594e43
RS
2450 /* Do substitute in the address of a destination in memory. */
2451 if (GET_CODE (*dest_loc) == MEM)
14a774a9 2452 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
91594e43 2453
175160e7
MT
2454 /* Check for the case of DEST a SUBREG, both it and the underlying
2455 register are less than one word, and the SUBREG has the wider mode.
2456 In the case, we are really setting the underlying register to the
2457 source converted to the mode of DEST. So indicate that. */
2458 if (GET_CODE (dest) == SUBREG
2459 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2460 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2461 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2462 <= GET_MODE_SIZE (GET_MODE (dest)))
e2eb57b7
RK
2463 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2464 src)))
175160e7
MT
2465 src = tem, dest = SUBREG_REG (dest);
2466
2467 /* If storing a recognizable value save it for later recording. */
2468 if ((map->num_sets < MAX_RECOG_OPERANDS)
2469 && (CONSTANT_P (src)
c9734bb9 2470 || (GET_CODE (src) == REG
83b93f40
RK
2471 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2472 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
175160e7
MT
2473 || (GET_CODE (src) == PLUS
2474 && GET_CODE (XEXP (src, 0)) == REG
83b93f40
RK
2475 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2476 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
175160e7
MT
2477 && CONSTANT_P (XEXP (src, 1)))
2478 || GET_CODE (src) == COMPARE
2479#ifdef HAVE_cc0
2480 || dest == cc0_rtx
2481#endif
2482 || (dest == pc_rtx
2483 && (src == pc_rtx || GET_CODE (src) == RETURN
2484 || GET_CODE (src) == LABEL_REF))))
2485 {
2486 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2487 it will cause us to save the COMPARE with any constants
2488 substituted, which is what we want for later. */
2489 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2490 map->equiv_sets[map->num_sets++].dest = dest;
2491 }
175160e7 2492 }
e9a25f70
JL
2493 return;
2494
2495 default:
2496 break;
175160e7
MT
2497 }
2498
2499 format_ptr = GET_RTX_FORMAT (code);
00174bdf 2500
175160e7
MT
2501 /* If the first operand is an expression, save its mode for later. */
2502 if (*format_ptr == 'e')
2503 op0_mode = GET_MODE (XEXP (x, 0));
2504
2505 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2506 {
2507 switch (*format_ptr++)
2508 {
2509 case '0':
2510 break;
2511
2512 case 'e':
2513 if (XEXP (x, i))
14a774a9 2514 subst_constants (&XEXP (x, i), insn, map, memonly);
175160e7
MT
2515 break;
2516
2517 case 'u':
2518 case 'i':
2519 case 's':
02bea8a8 2520 case 'w':
00174bdf 2521 case 'n':
8f985ec4 2522 case 't':
175160e7
MT
2523 break;
2524
2525 case 'E':
2526 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
14a774a9
RK
2527 for (j = 0; j < XVECLEN (x, i); j++)
2528 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2529
175160e7
MT
2530 break;
2531
2532 default:
2533 abort ();
2534 }
2535 }
2536
2537 /* If this is a commutative operation, move a constant to the second
2538 operand unless the second operand is already a CONST_INT. */
14a774a9
RK
2539 if (! memonly
2540 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
175160e7
MT
2541 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2542 {
2543 rtx tem = XEXP (x, 0);
2544 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2545 validate_change (insn, &XEXP (x, 1), tem, 1);
2546 }
2547
2548 /* Simplify the expression in case we put in some constants. */
14a774a9
RK
2549 if (! memonly)
2550 switch (GET_RTX_CLASS (code))
175160e7 2551 {
14a774a9
RK
2552 case '1':
2553 if (op0_mode == MAX_MACHINE_MODE)
2554 abort ();
2555 new = simplify_unary_operation (code, GET_MODE (x),
2556 XEXP (x, 0), op0_mode);
2557 break;
2558
2559 case '<':
2560 {
2561 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2562
2563 if (op_mode == VOIDmode)
2564 op_mode = GET_MODE (XEXP (x, 1));
2565 new = simplify_relational_operation (code, op_mode,
2566 XEXP (x, 0), XEXP (x, 1));
b565a316 2567#ifdef FLOAT_STORE_FLAG_VALUE
14a774a9 2568 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
12530dbe
RH
2569 {
2570 enum machine_mode mode = GET_MODE (x);
2571 if (new == const0_rtx)
2572 new = CONST0_RTX (mode);
2573 else
2574 {
2575 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2576 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2577 }
2578 }
b565a316 2579#endif
14a774a9 2580 break;
00174bdf 2581 }
175160e7 2582
14a774a9
RK
2583 case '2':
2584 case 'c':
2585 new = simplify_binary_operation (code, GET_MODE (x),
2586 XEXP (x, 0), XEXP (x, 1));
2587 break;
175160e7 2588
14a774a9
RK
2589 case 'b':
2590 case '3':
2591 if (op0_mode == MAX_MACHINE_MODE)
2592 abort ();
2593
2594 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2595 XEXP (x, 0), XEXP (x, 1),
2596 XEXP (x, 2));
2597 break;
2598 }
175160e7
MT
2599
2600 if (new)
2601 validate_change (insn, loc, new, 1);
2602}
2603
2604/* Show that register modified no longer contain known constants. We are
2605 called from note_stores with parts of the new insn. */
2606
915b80ed 2607static void
84832317 2608mark_stores (dest, x, data)
175160e7 2609 rtx dest;
487a6e06 2610 rtx x ATTRIBUTE_UNUSED;
84832317 2611 void *data ATTRIBUTE_UNUSED;
175160e7 2612{
e2eb57b7 2613 int regno = -1;
6a651371 2614 enum machine_mode mode = VOIDmode;
e2eb57b7
RK
2615
2616 /* DEST is always the innermost thing set, except in the case of
2617 SUBREGs of hard registers. */
175160e7
MT
2618
2619 if (GET_CODE (dest) == REG)
e2eb57b7
RK
2620 regno = REGNO (dest), mode = GET_MODE (dest);
2621 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2622 {
2623 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2624 mode = GET_MODE (SUBREG_REG (dest));
2625 }
2626
2627 if (regno >= 0)
2628 {
770ae6cc
RK
2629 unsigned int uregno = regno;
2630 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
00174bdf 2631 : uregno + HARD_REGNO_NREGS (uregno, mode) - 1);
770ae6cc 2632 unsigned int i;
e2eb57b7 2633
e9a25f70
JL
2634 /* Ignore virtual stack var or virtual arg register since those
2635 are handled separately. */
770ae6cc
RK
2636 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
2637 && uregno != VIRTUAL_STACK_VARS_REGNUM)
2638 for (i = uregno; i <= last_reg; i++)
6a651371 2639 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
c68da89c 2640 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
e2eb57b7 2641 }
175160e7
MT
2642}
2643\f
81578142
RS
2644/* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2645 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2646 that it points to the node itself, thus indicating that the node is its
2647 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2648 the given node is NULL, recursively descend the decl/block tree which
2649 it is the root of, and for each other ..._DECL or BLOCK node contained
2650 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2651 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2652 values to point to themselves. */
2653
81578142
RS
2654static void
2655set_block_origin_self (stmt)
2656 register tree stmt;
2657{
2658 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2659 {
2660 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2661
2662 {
00174bdf 2663 register tree local_decl;
81578142 2664
00174bdf 2665 for (local_decl = BLOCK_VARS (stmt);
81578142
RS
2666 local_decl != NULL_TREE;
2667 local_decl = TREE_CHAIN (local_decl))
00174bdf 2668 set_decl_origin_self (local_decl); /* Potential recursion. */
81578142
RS
2669 }
2670
2671 {
00174bdf 2672 register tree subblock;
81578142 2673
00174bdf 2674 for (subblock = BLOCK_SUBBLOCKS (stmt);
81578142
RS
2675 subblock != NULL_TREE;
2676 subblock = BLOCK_CHAIN (subblock))
00174bdf 2677 set_block_origin_self (subblock); /* Recurse. */
81578142
RS
2678 }
2679 }
2680}
2681
2682/* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2683 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2684 node to so that it points to the node itself, thus indicating that the
2685 node represents its own (abstract) origin. Additionally, if the
2686 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2687 the decl/block tree of which the given node is the root of, and for
2688 each other ..._DECL or BLOCK node contained therein whose
2689 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2690 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2691 point to themselves. */
2692
1cfdcc15 2693void
81578142
RS
2694set_decl_origin_self (decl)
2695 register tree decl;
2696{
2697 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2698 {
2699 DECL_ABSTRACT_ORIGIN (decl) = decl;
2700 if (TREE_CODE (decl) == FUNCTION_DECL)
2701 {
2702 register tree arg;
2703
2704 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2705 DECL_ABSTRACT_ORIGIN (arg) = arg;
29d356fb
RK
2706 if (DECL_INITIAL (decl) != NULL_TREE
2707 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
2708 set_block_origin_self (DECL_INITIAL (decl));
2709 }
2710 }
2711}
2712\f
2713/* Given a pointer to some BLOCK node, and a boolean value to set the
2714 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2715 the given block, and for all local decls and all local sub-blocks
2716 (recursively) which are contained therein. */
2717
81578142
RS
2718static void
2719set_block_abstract_flags (stmt, setting)
2720 register tree stmt;
2721 register int setting;
2722{
12307ca2
RK
2723 register tree local_decl;
2724 register tree subblock;
81578142 2725
12307ca2 2726 BLOCK_ABSTRACT (stmt) = setting;
81578142 2727
12307ca2
RK
2728 for (local_decl = BLOCK_VARS (stmt);
2729 local_decl != NULL_TREE;
2730 local_decl = TREE_CHAIN (local_decl))
2731 set_decl_abstract_flags (local_decl, setting);
81578142 2732
12307ca2
RK
2733 for (subblock = BLOCK_SUBBLOCKS (stmt);
2734 subblock != NULL_TREE;
2735 subblock = BLOCK_CHAIN (subblock))
2736 set_block_abstract_flags (subblock, setting);
81578142
RS
2737}
2738
2739/* Given a pointer to some ..._DECL node, and a boolean value to set the
2740 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2741 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2742 set the abstract flags for all of the parameters, local vars, local
2743 blocks and sub-blocks (recursively) to the same setting. */
2744
2745void
2746set_decl_abstract_flags (decl, setting)
2747 register tree decl;
2748 register int setting;
2749{
2750 DECL_ABSTRACT (decl) = setting;
2751 if (TREE_CODE (decl) == FUNCTION_DECL)
2752 {
2753 register tree arg;
2754
2755 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2756 DECL_ABSTRACT (arg) = setting;
29d356fb
RK
2757 if (DECL_INITIAL (decl) != NULL_TREE
2758 && DECL_INITIAL (decl) != error_mark_node)
81578142
RS
2759 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2760 }
2761}
2762\f
175160e7
MT
2763/* Output the assembly language code for the function FNDECL
2764 from its DECL_SAVED_INSNS. Used for inline functions that are output
2765 at end of compilation instead of where they came in the source. */
2766
2767void
2768output_inline_function (fndecl)
2769 tree fndecl;
2770{
01d939e8 2771 struct function *old_cfun = cfun;
f93dacbd 2772 enum debug_info_type old_write_symbols = write_symbols;
49ad7cfa 2773 struct function *f = DECL_SAVED_INSNS (fndecl);
175160e7 2774
01d939e8 2775 cfun = f;
175160e7 2776 current_function_decl = fndecl;
49ad7cfa 2777 clear_emit_caches ();
175160e7 2778
49ad7cfa 2779 set_new_last_label_num (f->inl_max_label_num);
175160e7 2780
51783c14
JM
2781 /* We're not deferring this any longer. */
2782 DECL_DEFER_OUTPUT (fndecl) = 0;
2783
f93dacbd
RK
2784 /* If requested, suppress debugging information. */
2785 if (f->no_debugging_symbols)
2786 write_symbols = NO_DEBUG;
2787
7d2e8eff
JM
2788 /* Compile this function all the way down to assembly code. */
2789 rest_of_compilation (fndecl);
2790
f4744807 2791 /* We can't inline this anymore. */
49ad7cfa 2792 f->inlinable = 0;
f4744807 2793 DECL_INLINE (fndecl) = 0;
09578c27 2794
01d939e8
BS
2795 cfun = old_cfun;
2796 current_function_decl = old_cfun ? old_cfun->decl : 0;
f93dacbd 2797 write_symbols = old_write_symbols;
175160e7 2798}
This page took 1.58014 seconds and 5 git commands to generate.