]> gcc.gnu.org Git - gcc.git/blob - gcc/integrate.c
Makefile.in (INTREGRATE_H): Rename to INTEGRATE_H.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "insn-flags.h"
33 #include "expr.h"
34 #include "output.h"
35 #include "recog.h"
36 #include "integrate.h"
37 #include "real.h"
38 #include "except.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "intl.h"
42 #include "loop.h"
43
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
47
48 extern struct obstack *function_maybepermanent_obstack;
49
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
64 \f
65 static rtvec initialize_for_inline PROTO((tree));
66 static void note_modified_parmregs PROTO((rtx, rtx, void *));
67 static void integrate_parm_decls PROTO((tree, struct inline_remap *,
68 rtvec));
69 static tree integrate_decl_tree PROTO((tree,
70 struct inline_remap *));
71 static void subst_constants PROTO((rtx *, rtx,
72 struct inline_remap *, int));
73 static void set_block_origin_self PROTO((tree));
74 static void set_decl_origin_self PROTO((tree));
75 static void set_block_abstract_flags PROTO((tree, int));
76 static void process_reg_param PROTO((struct inline_remap *, rtx,
77 rtx));
78 void set_decl_abstract_flags PROTO((tree, int));
79 static rtx expand_inline_function_eh_labelmap PROTO((rtx));
80 static void mark_stores PROTO((rtx, rtx, void *));
81 static int compare_blocks PROTO((const PTR, const PTR));
82 static int find_block PROTO((const PTR, const PTR));
83
84 /* The maximum number of instructions accepted for inlining a
85 function. Increasing values mean more agressive inlining.
86 This affects currently only functions explicitly marked as
87 inline (or methods defined within the class definition for C++).
88 The default value of 10000 is arbitrary but high to match the
89 previously unlimited gcc capabilities. */
90
91 int inline_max_insns = 10000;
92
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
98 \f
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
103
104 rtx
105 get_label_from_map (map, i)
106 struct inline_remap *map;
107 int i;
108 {
109 rtx x = map->label_map[i];
110
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx();
113
114 return x;
115 }
116
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
121
122 const char *
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
125 {
126 register rtx insn;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
128
129 /* For functions marked as inline increase the maximum size to
130 inline_max_insns (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
132
133 int max_insns = (DECL_INLINE (fndecl))
134 ? (inline_max_insns
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
137
138 register int ninsns = 0;
139 register tree parms;
140 rtx result;
141
142 /* No inlines with varargs. */
143 if ((last && TREE_VALUE (last) != void_type_node)
144 || current_function_varargs)
145 return N_("varargs function cannot be inline");
146
147 if (current_function_calls_alloca)
148 return N_("function using alloca cannot be inline");
149
150 if (current_function_calls_setjmp)
151 return N_("function using setjmp cannot be inline");
152
153 if (current_function_contains_functions)
154 return N_("function with nested functions cannot be inline");
155
156 if (forced_labels)
157 return
158 N_("function with label addresses used in initializers cannot inline");
159
160 if (current_function_cannot_inline)
161 return current_function_cannot_inline;
162
163 /* If its not even close, don't even look. */
164 if (get_max_uid () > 3 * max_insns)
165 return N_("function too large to be inline");
166
167 #if 0
168 /* Don't inline functions which do not specify a function prototype and
169 have BLKmode argument or take the address of a parameter. */
170 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
171 {
172 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
173 TREE_ADDRESSABLE (parms) = 1;
174 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
175 return N_("no prototype, and parameter address used; cannot be inline");
176 }
177 #endif
178
179 /* We can't inline functions that return structures
180 the old-fashioned PCC way, copying into a static block. */
181 if (current_function_returns_pcc_struct)
182 return N_("inline functions not supported for this return value type");
183
184 /* We can't inline functions that return structures of varying size. */
185 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
186 return N_("function with varying-size return value cannot be inline");
187
188 /* Cannot inline a function with a varying size argument or one that
189 receives a transparent union. */
190 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
191 {
192 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
193 return N_("function with varying-size parameter cannot be inline");
194 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
195 return N_("function with transparent unit parameter cannot be inline");
196 }
197
198 if (get_max_uid () > max_insns)
199 {
200 for (ninsns = 0, insn = get_first_nonparm_insn ();
201 insn && ninsns < max_insns;
202 insn = NEXT_INSN (insn))
203 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
204 ninsns++;
205
206 if (ninsns >= max_insns)
207 return N_("function too large to be inline");
208 }
209
210 /* We will not inline a function which uses computed goto. The addresses of
211 its local labels, which may be tucked into global storage, are of course
212 not constant across instantiations, which causes unexpected behaviour. */
213 if (current_function_has_computed_jump)
214 return N_("function with computed jump cannot inline");
215
216 /* We cannot inline a nested function that jumps to a nonlocal label. */
217 if (current_function_has_nonlocal_goto)
218 return N_("function with nonlocal goto cannot be inline");
219
220 /* This is a hack, until the inliner is taught about eh regions at
221 the start of the function. */
222 for (insn = get_insns ();
223 insn
224 && ! (GET_CODE (insn) == NOTE
225 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
226 insn = NEXT_INSN (insn))
227 {
228 if (insn && GET_CODE (insn) == NOTE
229 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
230 return N_("function with complex parameters cannot be inline");
231 }
232
233 /* We can't inline functions that return a PARALLEL rtx. */
234 result = DECL_RTL (DECL_RESULT (fndecl));
235 if (result && GET_CODE (result) == PARALLEL)
236 return N_("inline functions not supported for this return value type");
237
238 return 0;
239 }
240 \f
241 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
242 Zero for a reg that isn't a parm's home.
243 Only reg numbers less than max_parm_reg are mapped here. */
244 static tree *parmdecl_map;
245
246 /* In save_for_inline, nonzero if past the parm-initialization insns. */
247 static int in_nonparm_insns;
248 \f
249 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
250 needed to save FNDECL's insns and info for future inline expansion. */
251
252 static rtvec
253 initialize_for_inline (fndecl)
254 tree fndecl;
255 {
256 int i;
257 rtvec arg_vector;
258 tree parms;
259
260 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
261 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
262 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
263
264 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
265 parms;
266 parms = TREE_CHAIN (parms), i++)
267 {
268 rtx p = DECL_RTL (parms);
269
270 /* If we have (mem (addressof (mem ...))), use the inner MEM since
271 otherwise the copy_rtx call below will not unshare the MEM since
272 it shares ADDRESSOF. */
273 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
274 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
275 p = XEXP (XEXP (p, 0), 0);
276
277 RTVEC_ELT (arg_vector, i) = p;
278
279 if (GET_CODE (p) == REG)
280 parmdecl_map[REGNO (p)] = parms;
281 else if (GET_CODE (p) == CONCAT)
282 {
283 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
284 rtx pimag = gen_imagpart (GET_MODE (preal), p);
285
286 if (GET_CODE (preal) == REG)
287 parmdecl_map[REGNO (preal)] = parms;
288 if (GET_CODE (pimag) == REG)
289 parmdecl_map[REGNO (pimag)] = parms;
290 }
291
292 /* This flag is cleared later
293 if the function ever modifies the value of the parm. */
294 TREE_READONLY (parms) = 1;
295 }
296
297 return arg_vector;
298 }
299
300 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
301 originally was in the FROM_FN, but now it will be in the
302 TO_FN. */
303
304 tree
305 copy_decl_for_inlining (decl, from_fn, to_fn)
306 tree decl;
307 tree from_fn;
308 tree to_fn;
309 {
310 tree copy;
311
312 /* Copy the declaration. */
313 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
314 /* For a parameter, we must make an equivalent VAR_DECL, not a
315 new PARM_DECL. */
316 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
317 else
318 {
319 copy = copy_node (decl);
320 if (DECL_LANG_SPECIFIC (copy))
321 copy_lang_decl (copy);
322 }
323
324 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
325 declaration inspired this copy. */
326 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
327
328 /* The new variable/label has no RTL, yet. */
329 DECL_RTL (copy) = NULL_RTX;
330
331 /* These args would always appear unused, if not for this. */
332 TREE_USED (copy) = 1;
333
334 /* Set the context for the new declaration. */
335 if (!DECL_CONTEXT (decl))
336 /* Globals stay global. */
337 ;
338 else if (DECL_CONTEXT (decl) != from_fn)
339 /* Things that weren't in the scope of the function we're inlining
340 from aren't in the scope we're inlining too, either. */
341 ;
342 else if (TREE_STATIC (decl))
343 /* Function-scoped static variables should say in the original
344 function. */
345 ;
346 else
347 /* Ordinary automatic local variables are now in the scope of the
348 new function. */
349 DECL_CONTEXT (copy) = to_fn;
350
351 return copy;
352 }
353
354 /* Make the insns and PARM_DECLs of the current function permanent
355 and record other information in DECL_SAVED_INSNS to allow inlining
356 of this function in subsequent calls.
357
358 This routine need not copy any insns because we are not going
359 to immediately compile the insns in the insn chain. There
360 are two cases when we would compile the insns for FNDECL:
361 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
362 be output at the end of other compilation, because somebody took
363 its address. In the first case, the insns of FNDECL are copied
364 as it is expanded inline, so FNDECL's saved insns are not
365 modified. In the second case, FNDECL is used for the last time,
366 so modifying the rtl is not a problem.
367
368 We don't have to worry about FNDECL being inline expanded by
369 other functions which are written at the end of compilation
370 because flag_no_inline is turned on when we begin writing
371 functions at the end of compilation. */
372
373 void
374 save_for_inline_nocopy (fndecl)
375 tree fndecl;
376 {
377 rtx insn;
378 rtvec argvec;
379 rtx first_nonparm_insn;
380
381 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
382 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
383 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
384 for the parms, prior to elimination of virtual registers.
385 These values are needed for substituting parms properly. */
386
387 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
388
389 /* Make and emit a return-label if we have not already done so. */
390
391 if (return_label == 0)
392 {
393 return_label = gen_label_rtx ();
394 emit_label (return_label);
395 }
396
397 argvec = initialize_for_inline (fndecl);
398
399 /* If there are insns that copy parms from the stack into pseudo registers,
400 those insns are not copied. `expand_inline_function' must
401 emit the correct code to handle such things. */
402
403 insn = get_insns ();
404 if (GET_CODE (insn) != NOTE)
405 abort ();
406
407 /* Get the insn which signals the end of parameter setup code. */
408 first_nonparm_insn = get_first_nonparm_insn ();
409
410 /* Now just scan the chain of insns to see what happens to our
411 PARM_DECLs. If a PARM_DECL is used but never modified, we
412 can substitute its rtl directly when expanding inline (and
413 perform constant folding when its incoming value is constant).
414 Otherwise, we have to copy its value into a new register and track
415 the new register's life. */
416 in_nonparm_insns = 0;
417 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
418 {
419 if (insn == first_nonparm_insn)
420 in_nonparm_insns = 1;
421
422 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
423 /* Record what interesting things happen to our parameters. */
424 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
425 }
426
427 /* We have now allocated all that needs to be allocated permanently
428 on the rtx obstack. Set our high-water mark, so that we
429 can free the rest of this when the time comes. */
430
431 preserve_data ();
432
433 current_function->inl_max_label_num = max_label_num ();
434 current_function->inl_last_parm_insn = current_function->x_last_parm_insn;
435 current_function->original_arg_vector = argvec;
436 current_function->original_decl_initial = DECL_INITIAL (fndecl);
437 DECL_SAVED_INSNS (fndecl) = current_function;
438
439 /* Clean up. */
440 free (parmdecl_map);
441 }
442 \f
443 /* Note whether a parameter is modified or not. */
444
445 static void
446 note_modified_parmregs (reg, x, data)
447 rtx reg;
448 rtx x ATTRIBUTE_UNUSED;
449 void *data ATTRIBUTE_UNUSED;
450 {
451 if (GET_CODE (reg) == REG && in_nonparm_insns
452 && REGNO (reg) < max_parm_reg
453 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
454 && parmdecl_map[REGNO (reg)] != 0)
455 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
456 }
457
458 /* Unfortunately, we need a global copy of const_equiv map for communication
459 with a function called from note_stores. Be *very* careful that this
460 is used properly in the presence of recursion. */
461
462 varray_type global_const_equiv_varray;
463 \f
464 #define FIXED_BASE_PLUS_P(X) \
465 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
466 && GET_CODE (XEXP (X, 0)) == REG \
467 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
468 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
469
470 /* Called to set up a mapping for the case where a parameter is in a
471 register. If it is read-only and our argument is a constant, set up the
472 constant equivalence.
473
474 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
475 if it is a register.
476
477 Also, don't allow hard registers here; they might not be valid when
478 substituted into insns. */
479 static void
480 process_reg_param (map, loc, copy)
481 struct inline_remap *map;
482 rtx loc, copy;
483 {
484 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
485 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
486 && ! REG_USERVAR_P (copy))
487 || (GET_CODE (copy) == REG
488 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
489 {
490 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
491 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
492 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
493 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
494 copy = temp;
495 }
496 map->reg_map[REGNO (loc)] = copy;
497 }
498
499 /* Used by duplicate_eh_handlers to map labels for the exception table */
500 static struct inline_remap *eif_eh_map;
501
502 static rtx
503 expand_inline_function_eh_labelmap (label)
504 rtx label;
505 {
506 int index = CODE_LABEL_NUMBER (label);
507 return get_label_from_map (eif_eh_map, index);
508 }
509
510 /* Compare two BLOCKs for qsort. The key we sort on is the
511 BLOCK_ABSTRACT_ORIGIN of the blocks. */
512
513 static int
514 compare_blocks (v1, v2)
515 const PTR v1;
516 const PTR v2;
517 {
518 tree b1 = *((tree *) v1);
519 tree b2 = *((tree *) v2);
520
521 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
522 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
523 }
524
525 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
526 an original block; the second to a remapped equivalent. */
527
528 static int
529 find_block (v1, v2)
530 const PTR v1;
531 const PTR v2;
532 {
533 tree b1 = (tree) v1;
534 tree b2 = *((tree *) v2);
535
536 return ((char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
537 }
538
539 /* Integrate the procedure defined by FNDECL. Note that this function
540 may wind up calling itself. Since the static variables are not
541 reentrant, we do not assign them until after the possibility
542 of recursion is eliminated.
543
544 If IGNORE is nonzero, do not produce a value.
545 Otherwise store the value in TARGET if it is nonzero and that is convenient.
546
547 Value is:
548 (rtx)-1 if we could not substitute the function
549 0 if we substituted it and it does not produce a value
550 else an rtx for where the value is stored. */
551
552 rtx
553 expand_inline_function (fndecl, parms, target, ignore, type,
554 structure_value_addr)
555 tree fndecl, parms;
556 rtx target;
557 int ignore;
558 tree type;
559 rtx structure_value_addr;
560 {
561 struct function *inlining_previous;
562 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
563 tree formal, actual, block;
564 rtx parm_insns = inl_f->emit->x_first_insn;
565 rtx insns = (inl_f->inl_last_parm_insn
566 ? NEXT_INSN (inl_f->inl_last_parm_insn)
567 : parm_insns);
568 tree *arg_trees;
569 rtx *arg_vals;
570 rtx insn;
571 int max_regno;
572 register int i;
573 int min_labelno = inl_f->emit->x_first_label_num;
574 int max_labelno = inl_f->inl_max_label_num;
575 int nargs;
576 rtx local_return_label = 0;
577 rtx loc;
578 rtx stack_save = 0;
579 rtx temp;
580 struct inline_remap *map = 0;
581 #ifdef HAVE_cc0
582 rtx cc0_insn = 0;
583 #endif
584 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
585 rtx static_chain_value = 0;
586 int inl_max_uid;
587
588 /* The pointer used to track the true location of the memory used
589 for MAP->LABEL_MAP. */
590 rtx *real_label_map = 0;
591
592 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
593 max_regno = inl_f->emit->x_reg_rtx_no + 3;
594 if (max_regno < FIRST_PSEUDO_REGISTER)
595 abort ();
596
597 nargs = list_length (DECL_ARGUMENTS (fndecl));
598
599 /* Check that the parms type match and that sufficient arguments were
600 passed. Since the appropriate conversions or default promotions have
601 already been applied, the machine modes should match exactly. */
602
603 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
604 formal;
605 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
606 {
607 tree arg;
608 enum machine_mode mode;
609
610 if (actual == 0)
611 return (rtx) (HOST_WIDE_INT) -1;
612
613 arg = TREE_VALUE (actual);
614 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
615
616 if (mode != TYPE_MODE (TREE_TYPE (arg))
617 /* If they are block mode, the types should match exactly.
618 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
619 which could happen if the parameter has incomplete type. */
620 || (mode == BLKmode
621 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
622 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
623 return (rtx) (HOST_WIDE_INT) -1;
624 }
625
626 /* Extra arguments are valid, but will be ignored below, so we must
627 evaluate them here for side-effects. */
628 for (; actual; actual = TREE_CHAIN (actual))
629 expand_expr (TREE_VALUE (actual), const0_rtx,
630 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
631
632 /* Expand the function arguments. Do this first so that any
633 new registers get created before we allocate the maps. */
634
635 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
636 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
637
638 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
639 formal;
640 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
641 {
642 /* Actual parameter, converted to the type of the argument within the
643 function. */
644 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
645 /* Mode of the variable used within the function. */
646 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
647 int invisiref = 0;
648
649 arg_trees[i] = arg;
650 loc = RTVEC_ELT (arg_vector, i);
651
652 /* If this is an object passed by invisible reference, we copy the
653 object into a stack slot and save its address. If this will go
654 into memory, we do nothing now. Otherwise, we just expand the
655 argument. */
656 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
657 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
658 {
659 rtx stack_slot
660 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
661 int_size_in_bytes (TREE_TYPE (arg)), 1);
662 MEM_SET_IN_STRUCT_P (stack_slot,
663 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
664
665 store_expr (arg, stack_slot, 0);
666
667 arg_vals[i] = XEXP (stack_slot, 0);
668 invisiref = 1;
669 }
670 else if (GET_CODE (loc) != MEM)
671 {
672 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
673 /* The mode if LOC and ARG can differ if LOC was a variable
674 that had its mode promoted via PROMOTED_MODE. */
675 arg_vals[i] = convert_modes (GET_MODE (loc),
676 TYPE_MODE (TREE_TYPE (arg)),
677 expand_expr (arg, NULL_RTX, mode,
678 EXPAND_SUM),
679 TREE_UNSIGNED (TREE_TYPE (formal)));
680 else
681 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
682 }
683 else
684 arg_vals[i] = 0;
685
686 if (arg_vals[i] != 0
687 && (! TREE_READONLY (formal)
688 /* If the parameter is not read-only, copy our argument through
689 a register. Also, we cannot use ARG_VALS[I] if it overlaps
690 TARGET in any way. In the inline function, they will likely
691 be two different pseudos, and `safe_from_p' will make all
692 sorts of smart assumptions about their not conflicting.
693 But if ARG_VALS[I] overlaps TARGET, these assumptions are
694 wrong, so put ARG_VALS[I] into a fresh register.
695 Don't worry about invisible references, since their stack
696 temps will never overlap the target. */
697 || (target != 0
698 && ! invisiref
699 && (GET_CODE (arg_vals[i]) == REG
700 || GET_CODE (arg_vals[i]) == SUBREG
701 || GET_CODE (arg_vals[i]) == MEM)
702 && reg_overlap_mentioned_p (arg_vals[i], target))
703 /* ??? We must always copy a SUBREG into a REG, because it might
704 get substituted into an address, and not all ports correctly
705 handle SUBREGs in addresses. */
706 || (GET_CODE (arg_vals[i]) == SUBREG)))
707 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
708
709 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
710 && POINTER_TYPE_P (TREE_TYPE (formal)))
711 mark_reg_pointer (arg_vals[i],
712 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
713 / BITS_PER_UNIT));
714 }
715
716 /* Allocate the structures we use to remap things. */
717
718 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
719 map->fndecl = fndecl;
720
721 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
722 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
723
724 /* We used to use alloca here, but the size of what it would try to
725 allocate would occasionally cause it to exceed the stack limit and
726 cause unpredictable core dumps. */
727 real_label_map
728 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
729 map->label_map = real_label_map;
730
731 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
732 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
733 map->min_insnno = 0;
734 map->max_insnno = inl_max_uid;
735
736 map->integrating = 1;
737
738 /* const_equiv_varray maps pseudos in our routine to constants, so
739 it needs to be large enough for all our pseudos. This is the
740 number we are currently using plus the number in the called
741 routine, plus 15 for each arg, five to compute the virtual frame
742 pointer, and five for the return value. This should be enough
743 for most cases. We do not reference entries outside the range of
744 the map.
745
746 ??? These numbers are quite arbitrary and were obtained by
747 experimentation. At some point, we should try to allocate the
748 table after all the parameters are set up so we an more accurately
749 estimate the number of pseudos we will need. */
750
751 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
752 (max_reg_num ()
753 + (max_regno - FIRST_PSEUDO_REGISTER)
754 + 15 * nargs
755 + 10),
756 "expand_inline_function");
757 map->const_age = 0;
758
759 /* Record the current insn in case we have to set up pointers to frame
760 and argument memory blocks. If there are no insns yet, add a dummy
761 insn that can be used as an insertion point. */
762 map->insns_at_start = get_last_insn ();
763 if (map->insns_at_start == 0)
764 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
765
766 map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
767 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
768
769 /* Update the outgoing argument size to allow for those in the inlined
770 function. */
771 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
772 current_function_outgoing_args_size = inl_f->outgoing_args_size;
773
774 /* If the inline function needs to make PIC references, that means
775 that this function's PIC offset table must be used. */
776 if (inl_f->uses_pic_offset_table)
777 current_function_uses_pic_offset_table = 1;
778
779 /* If this function needs a context, set it up. */
780 if (inl_f->needs_context)
781 static_chain_value = lookup_static_chain (fndecl);
782
783 if (GET_CODE (parm_insns) == NOTE
784 && NOTE_LINE_NUMBER (parm_insns) > 0)
785 {
786 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
787 NOTE_LINE_NUMBER (parm_insns));
788 if (note)
789 RTX_INTEGRATED_P (note) = 1;
790 }
791
792 /* Process each argument. For each, set up things so that the function's
793 reference to the argument will refer to the argument being passed.
794 We only replace REG with REG here. Any simplifications are done
795 via const_equiv_map.
796
797 We make two passes: In the first, we deal with parameters that will
798 be placed into registers, since we need to ensure that the allocated
799 register number fits in const_equiv_map. Then we store all non-register
800 parameters into their memory location. */
801
802 /* Don't try to free temp stack slots here, because we may put one of the
803 parameters into a temp stack slot. */
804
805 for (i = 0; i < nargs; i++)
806 {
807 rtx copy = arg_vals[i];
808
809 loc = RTVEC_ELT (arg_vector, i);
810
811 /* There are three cases, each handled separately. */
812 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
813 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
814 {
815 /* This must be an object passed by invisible reference (it could
816 also be a variable-sized object, but we forbid inlining functions
817 with variable-sized arguments). COPY is the address of the
818 actual value (this computation will cause it to be copied). We
819 map that address for the register, noting the actual address as
820 an equivalent in case it can be substituted into the insns. */
821
822 if (GET_CODE (copy) != REG)
823 {
824 temp = copy_addr_to_reg (copy);
825 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
826 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
827 copy = temp;
828 }
829 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
830 }
831 else if (GET_CODE (loc) == MEM)
832 {
833 /* This is the case of a parameter that lives in memory. It
834 will live in the block we allocate in the called routine's
835 frame that simulates the incoming argument area. Do nothing
836 with the parameter now; we will call store_expr later. In
837 this case, however, we must ensure that the virtual stack and
838 incoming arg rtx values are expanded now so that we can be
839 sure we have enough slots in the const equiv map since the
840 store_expr call can easily blow the size estimate. */
841 if (DECL_FRAME_SIZE (fndecl) != 0)
842 copy_rtx_and_substitute (virtual_stack_vars_rtx, map, 0);
843
844 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
845 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
846 }
847 else if (GET_CODE (loc) == REG)
848 process_reg_param (map, loc, copy);
849 else if (GET_CODE (loc) == CONCAT)
850 {
851 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
852 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
853 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
854 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
855
856 process_reg_param (map, locreal, copyreal);
857 process_reg_param (map, locimag, copyimag);
858 }
859 else
860 abort ();
861 }
862
863 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
864 specially. This function can be called recursively, so we need to
865 save the previous value. */
866 inlining_previous = inlining;
867 inlining = inl_f;
868
869 /* Now do the parameters that will be placed in memory. */
870
871 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
872 formal; formal = TREE_CHAIN (formal), i++)
873 {
874 loc = RTVEC_ELT (arg_vector, i);
875
876 if (GET_CODE (loc) == MEM
877 /* Exclude case handled above. */
878 && ! (GET_CODE (XEXP (loc, 0)) == REG
879 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
880 {
881 rtx note = emit_note (DECL_SOURCE_FILE (formal),
882 DECL_SOURCE_LINE (formal));
883 if (note)
884 RTX_INTEGRATED_P (note) = 1;
885
886 /* Compute the address in the area we reserved and store the
887 value there. */
888 temp = copy_rtx_and_substitute (loc, map, 1);
889 subst_constants (&temp, NULL_RTX, map, 1);
890 apply_change_group ();
891 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
892 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
893 store_expr (arg_trees[i], temp, 0);
894 }
895 }
896
897 /* Deal with the places that the function puts its result.
898 We are driven by what is placed into DECL_RESULT.
899
900 Initially, we assume that we don't have anything special handling for
901 REG_FUNCTION_RETURN_VALUE_P. */
902
903 map->inline_target = 0;
904 loc = DECL_RTL (DECL_RESULT (fndecl));
905
906 if (TYPE_MODE (type) == VOIDmode)
907 /* There is no return value to worry about. */
908 ;
909 else if (GET_CODE (loc) == MEM)
910 {
911 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
912 {
913 temp = copy_rtx_and_substitute (loc, map, 1);
914 subst_constants (&temp, NULL_RTX, map, 1);
915 apply_change_group ();
916 target = temp;
917 }
918 else
919 {
920 if (! structure_value_addr
921 || ! aggregate_value_p (DECL_RESULT (fndecl)))
922 abort ();
923
924 /* Pass the function the address in which to return a structure
925 value. Note that a constructor can cause someone to call us
926 with STRUCTURE_VALUE_ADDR, but the initialization takes place
927 via the first parameter, rather than the struct return address.
928
929 We have two cases: If the address is a simple register
930 indirect, use the mapping mechanism to point that register to
931 our structure return address. Otherwise, store the structure
932 return value into the place that it will be referenced from. */
933
934 if (GET_CODE (XEXP (loc, 0)) == REG)
935 {
936 temp = force_operand (structure_value_addr, NULL_RTX);
937 temp = force_reg (Pmode, temp);
938 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
939
940 if (CONSTANT_P (structure_value_addr)
941 || GET_CODE (structure_value_addr) == ADDRESSOF
942 || (GET_CODE (structure_value_addr) == PLUS
943 && (XEXP (structure_value_addr, 0)
944 == virtual_stack_vars_rtx)
945 && (GET_CODE (XEXP (structure_value_addr, 1))
946 == CONST_INT)))
947 {
948 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
949 CONST_AGE_PARM);
950 }
951 }
952 else
953 {
954 temp = copy_rtx_and_substitute (loc, map, 1);
955 subst_constants (&temp, NULL_RTX, map, 0);
956 apply_change_group ();
957 emit_move_insn (temp, structure_value_addr);
958 }
959 }
960 }
961 else if (ignore)
962 /* We will ignore the result value, so don't look at its structure.
963 Note that preparations for an aggregate return value
964 do need to be made (above) even if it will be ignored. */
965 ;
966 else if (GET_CODE (loc) == REG)
967 {
968 /* The function returns an object in a register and we use the return
969 value. Set up our target for remapping. */
970
971 /* Machine mode function was declared to return. */
972 enum machine_mode departing_mode = TYPE_MODE (type);
973 /* (Possibly wider) machine mode it actually computes
974 (for the sake of callers that fail to declare it right).
975 We have to use the mode of the result's RTL, rather than
976 its type, since expand_function_start may have promoted it. */
977 enum machine_mode arriving_mode
978 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
979 rtx reg_to_map;
980
981 /* Don't use MEMs as direct targets because on some machines
982 substituting a MEM for a REG makes invalid insns.
983 Let the combiner substitute the MEM if that is valid. */
984 if (target == 0 || GET_CODE (target) != REG
985 || GET_MODE (target) != departing_mode)
986 {
987 /* Don't make BLKmode registers. If this looks like
988 a BLKmode object being returned in a register, get
989 the mode from that, otherwise abort. */
990 if (departing_mode == BLKmode)
991 {
992 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
993 {
994 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
995 arriving_mode = departing_mode;
996 }
997 else
998 abort();
999 }
1000
1001 target = gen_reg_rtx (departing_mode);
1002 }
1003
1004 /* If function's value was promoted before return,
1005 avoid machine mode mismatch when we substitute INLINE_TARGET.
1006 But TARGET is what we will return to the caller. */
1007 if (arriving_mode != departing_mode)
1008 {
1009 /* Avoid creating a paradoxical subreg wider than
1010 BITS_PER_WORD, since that is illegal. */
1011 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1012 {
1013 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1014 GET_MODE_BITSIZE (arriving_mode)))
1015 /* Maybe could be handled by using convert_move () ? */
1016 abort ();
1017 reg_to_map = gen_reg_rtx (arriving_mode);
1018 target = gen_lowpart (departing_mode, reg_to_map);
1019 }
1020 else
1021 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1022 }
1023 else
1024 reg_to_map = target;
1025
1026 /* Usually, the result value is the machine's return register.
1027 Sometimes it may be a pseudo. Handle both cases. */
1028 if (REG_FUNCTION_VALUE_P (loc))
1029 map->inline_target = reg_to_map;
1030 else
1031 map->reg_map[REGNO (loc)] = reg_to_map;
1032 }
1033 else
1034 abort ();
1035
1036 /* Initialize label_map. get_label_from_map will actually make
1037 the labels. */
1038 bzero ((char *) &map->label_map [min_labelno],
1039 (max_labelno - min_labelno) * sizeof (rtx));
1040
1041 /* Make copies of the decls of the symbols in the inline function, so that
1042 the copies of the variables get declared in the current function. Set
1043 up things so that lookup_static_chain knows that to interpret registers
1044 in SAVE_EXPRs for TYPE_SIZEs as local. */
1045 inline_function_decl = fndecl;
1046 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1047 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1048 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1049 inline_function_decl = 0;
1050
1051 /* Make a fresh binding contour that we can easily remove. Do this after
1052 expanding our arguments so cleanups are properly scoped. */
1053 expand_start_bindings_and_block (0, block);
1054
1055 /* Sort the block-map so that it will be easy to find remapped
1056 blocks later. */
1057 qsort (&VARRAY_TREE (map->block_map, 0),
1058 map->block_map->elements_used,
1059 sizeof (tree),
1060 compare_blocks);
1061
1062 /* Perform postincrements before actually calling the function. */
1063 emit_queue ();
1064
1065 /* Clean up stack so that variables might have smaller offsets. */
1066 do_pending_stack_adjust ();
1067
1068 /* Save a copy of the location of const_equiv_varray for
1069 mark_stores, called via note_stores. */
1070 global_const_equiv_varray = map->const_equiv_varray;
1071
1072 /* If the called function does an alloca, save and restore the
1073 stack pointer around the call. This saves stack space, but
1074 also is required if this inline is being done between two
1075 pushes. */
1076 if (inl_f->calls_alloca)
1077 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1078
1079 /* Now copy the insns one by one. Do this in two passes, first the insns and
1080 then their REG_NOTES, just like save_for_inline. */
1081
1082 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1083
1084 for (insn = insns; insn; insn = NEXT_INSN (insn))
1085 {
1086 rtx copy, pattern, set;
1087
1088 map->orig_asm_operands_vector = 0;
1089
1090 switch (GET_CODE (insn))
1091 {
1092 case INSN:
1093 pattern = PATTERN (insn);
1094 set = single_set (insn);
1095 copy = 0;
1096 if (GET_CODE (pattern) == USE
1097 && GET_CODE (XEXP (pattern, 0)) == REG
1098 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1099 /* The (USE (REG n)) at return from the function should
1100 be ignored since we are changing (REG n) into
1101 inline_target. */
1102 break;
1103
1104 /* If the inline fn needs eh context, make sure that
1105 the current fn has one. */
1106 if (GET_CODE (pattern) == USE
1107 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1108 get_eh_context ();
1109
1110 /* Ignore setting a function value that we don't want to use. */
1111 if (map->inline_target == 0
1112 && set != 0
1113 && GET_CODE (SET_DEST (set)) == REG
1114 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1115 {
1116 if (volatile_refs_p (SET_SRC (set)))
1117 {
1118 rtx new_set;
1119
1120 /* If we must not delete the source,
1121 load it into a new temporary. */
1122 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1123
1124 new_set = single_set (copy);
1125 if (new_set == 0)
1126 abort ();
1127
1128 SET_DEST (new_set)
1129 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1130 }
1131 /* If the source and destination are the same and it
1132 has a note on it, keep the insn. */
1133 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1134 && REG_NOTES (insn) != 0)
1135 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1136 else
1137 break;
1138 }
1139
1140 /* If this is setting the static chain rtx, omit it. */
1141 else if (static_chain_value != 0
1142 && set != 0
1143 && GET_CODE (SET_DEST (set)) == REG
1144 && rtx_equal_p (SET_DEST (set),
1145 static_chain_incoming_rtx))
1146 break;
1147
1148 /* If this is setting the static chain pseudo, set it from
1149 the value we want to give it instead. */
1150 else if (static_chain_value != 0
1151 && set != 0
1152 && rtx_equal_p (SET_SRC (set),
1153 static_chain_incoming_rtx))
1154 {
1155 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1156
1157 copy = emit_move_insn (newdest, static_chain_value);
1158 static_chain_value = 0;
1159 }
1160
1161 /* If this is setting the virtual stack vars register, this must
1162 be the code at the handler for a builtin longjmp. The value
1163 saved in the setjmp buffer will be the address of the frame
1164 we've made for this inlined instance within our frame. But we
1165 know the offset of that value so we can use it to reconstruct
1166 our virtual stack vars register from that value. If we are
1167 copying it from the stack pointer, leave it unchanged. */
1168 else if (set != 0
1169 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1170 {
1171 HOST_WIDE_INT offset;
1172 temp = map->reg_map[REGNO (SET_DEST (set))];
1173 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1174 REGNO (temp)).rtx;
1175
1176 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1177 offset = 0;
1178 else if (GET_CODE (temp) == PLUS
1179 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1180 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1181 offset = INTVAL (XEXP (temp, 1));
1182 else
1183 abort ();
1184
1185 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1186 temp = SET_SRC (set);
1187 else
1188 temp = force_operand (plus_constant (SET_SRC (set),
1189 - offset),
1190 NULL_RTX);
1191
1192 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1193 }
1194
1195 else
1196 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1197 /* REG_NOTES will be copied later. */
1198
1199 #ifdef HAVE_cc0
1200 /* If this insn is setting CC0, it may need to look at
1201 the insn that uses CC0 to see what type of insn it is.
1202 In that case, the call to recog via validate_change will
1203 fail. So don't substitute constants here. Instead,
1204 do it when we emit the following insn.
1205
1206 For example, see the pyr.md file. That machine has signed and
1207 unsigned compares. The compare patterns must check the
1208 following branch insn to see which what kind of compare to
1209 emit.
1210
1211 If the previous insn set CC0, substitute constants on it as
1212 well. */
1213 if (sets_cc0_p (PATTERN (copy)) != 0)
1214 cc0_insn = copy;
1215 else
1216 {
1217 if (cc0_insn)
1218 try_constants (cc0_insn, map);
1219 cc0_insn = 0;
1220 try_constants (copy, map);
1221 }
1222 #else
1223 try_constants (copy, map);
1224 #endif
1225 break;
1226
1227 case JUMP_INSN:
1228 if (GET_CODE (PATTERN (insn)) == RETURN
1229 || (GET_CODE (PATTERN (insn)) == PARALLEL
1230 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1231 {
1232 if (local_return_label == 0)
1233 local_return_label = gen_label_rtx ();
1234 pattern = gen_jump (local_return_label);
1235 }
1236 else
1237 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1238
1239 copy = emit_jump_insn (pattern);
1240
1241 #ifdef HAVE_cc0
1242 if (cc0_insn)
1243 try_constants (cc0_insn, map);
1244 cc0_insn = 0;
1245 #endif
1246 try_constants (copy, map);
1247
1248 /* If this used to be a conditional jump insn but whose branch
1249 direction is now know, we must do something special. */
1250 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1251 {
1252 #ifdef HAVE_cc0
1253 /* If the previous insn set cc0 for us, delete it. */
1254 if (sets_cc0_p (PREV_INSN (copy)))
1255 delete_insn (PREV_INSN (copy));
1256 #endif
1257
1258 /* If this is now a no-op, delete it. */
1259 if (map->last_pc_value == pc_rtx)
1260 {
1261 delete_insn (copy);
1262 copy = 0;
1263 }
1264 else
1265 /* Otherwise, this is unconditional jump so we must put a
1266 BARRIER after it. We could do some dead code elimination
1267 here, but jump.c will do it just as well. */
1268 emit_barrier ();
1269 }
1270 break;
1271
1272 case CALL_INSN:
1273 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1274 copy = emit_call_insn (pattern);
1275
1276 /* Because the USAGE information potentially contains objects other
1277 than hard registers, we need to copy it. */
1278 CALL_INSN_FUNCTION_USAGE (copy)
1279 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1280 map, 0);
1281
1282 #ifdef HAVE_cc0
1283 if (cc0_insn)
1284 try_constants (cc0_insn, map);
1285 cc0_insn = 0;
1286 #endif
1287 try_constants (copy, map);
1288
1289 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1290 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1291 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1292 break;
1293
1294 case CODE_LABEL:
1295 copy = emit_label (get_label_from_map (map,
1296 CODE_LABEL_NUMBER (insn)));
1297 LABEL_NAME (copy) = LABEL_NAME (insn);
1298 map->const_age++;
1299 break;
1300
1301 case BARRIER:
1302 copy = emit_barrier ();
1303 break;
1304
1305 case NOTE:
1306 /* It is important to discard function-end and function-beg notes,
1307 so we have only one of each in the current function.
1308 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1309 deleted these in the copy used for continuing compilation,
1310 not the copy used for inlining). */
1311 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1312 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1313 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1314 {
1315 copy = emit_note (NOTE_SOURCE_FILE (insn),
1316 NOTE_LINE_NUMBER (insn));
1317 if (copy
1318 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1319 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1320 {
1321 rtx label
1322 = get_label_from_map (map, NOTE_EH_HANDLER (copy));
1323
1324 /* we have to duplicate the handlers for the original */
1325 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1326 {
1327 /* We need to duplicate the handlers for the EH region
1328 and we need to indicate where the label map is */
1329 eif_eh_map = map;
1330 duplicate_eh_handlers (NOTE_EH_HANDLER (copy),
1331 CODE_LABEL_NUMBER (label),
1332 expand_inline_function_eh_labelmap);
1333 }
1334
1335 /* We have to forward these both to match the new exception
1336 region. */
1337 NOTE_EH_HANDLER (copy) = CODE_LABEL_NUMBER (label);
1338 }
1339 else if (copy
1340 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1341 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1342 && NOTE_BLOCK (insn))
1343 {
1344 tree *mapped_block_p;
1345
1346 mapped_block_p
1347 = (tree *) bsearch (NOTE_BLOCK (insn),
1348 &VARRAY_TREE (map->block_map, 0),
1349 map->block_map->elements_used,
1350 sizeof (tree),
1351 find_block);
1352
1353 if (!mapped_block_p)
1354 abort ();
1355 else
1356 NOTE_BLOCK (copy) = *mapped_block_p;
1357 }
1358 }
1359 else
1360 copy = 0;
1361 break;
1362
1363 default:
1364 abort ();
1365 }
1366
1367 if (copy)
1368 RTX_INTEGRATED_P (copy) = 1;
1369
1370 map->insn_map[INSN_UID (insn)] = copy;
1371 }
1372
1373 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1374 from parameters can be substituted in. These are the only ones that
1375 are valid across the entire function. */
1376 map->const_age++;
1377 for (insn = insns; insn; insn = NEXT_INSN (insn))
1378 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1379 && map->insn_map[INSN_UID (insn)]
1380 && REG_NOTES (insn))
1381 {
1382 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1383
1384 /* We must also do subst_constants, in case one of our parameters
1385 has const type and constant value. */
1386 subst_constants (&tem, NULL_RTX, map, 0);
1387 apply_change_group ();
1388 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1389 }
1390
1391 if (local_return_label)
1392 emit_label (local_return_label);
1393
1394 /* Restore the stack pointer if we saved it above. */
1395 if (inl_f->calls_alloca)
1396 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1397
1398 if (!current_function->x_whole_function_mode_p)
1399 /* In statement-at-a-time mode, we just tell the front-end to add
1400 this block to the list of blocks at this binding level. We
1401 can't do it the way it's done for function-at-a-time mode the
1402 superblocks have not been created yet. */
1403 insert_block (block);
1404 else
1405 {
1406 BLOCK_CHAIN (block)
1407 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1408 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1409 }
1410
1411 /* End the scope containing the copied formal parameter variables
1412 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1413 here so that expand_end_bindings will not check for unused
1414 variables. That's already been checked for when the inlined
1415 function was defined. */
1416 expand_end_bindings (NULL_TREE, 1, 1);
1417
1418 /* Must mark the line number note after inlined functions as a repeat, so
1419 that the test coverage code can avoid counting the call twice. This
1420 just tells the code to ignore the immediately following line note, since
1421 there already exists a copy of this note before the expanded inline call.
1422 This line number note is still needed for debugging though, so we can't
1423 delete it. */
1424 if (flag_test_coverage)
1425 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
1426
1427 emit_line_note (input_filename, lineno);
1428
1429 /* If the function returns a BLKmode object in a register, copy it
1430 out of the temp register into a BLKmode memory object. */
1431 if (target
1432 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1433 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1434 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1435
1436 if (structure_value_addr)
1437 {
1438 target = gen_rtx_MEM (TYPE_MODE (type),
1439 memory_address (TYPE_MODE (type),
1440 structure_value_addr));
1441 MEM_SET_IN_STRUCT_P (target, 1);
1442 }
1443
1444 /* Make sure we free the things we explicitly allocated with xmalloc. */
1445 if (real_label_map)
1446 free (real_label_map);
1447 VARRAY_FREE (map->const_equiv_varray);
1448 free (map->reg_map);
1449 VARRAY_FREE (map->block_map);
1450 free (map->insn_map);
1451 free (map);
1452 free (arg_vals);
1453 free (arg_trees);
1454
1455 inlining = inlining_previous;
1456
1457 return target;
1458 }
1459 \f
1460 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1461 push all of those decls and give each one the corresponding home. */
1462
1463 static void
1464 integrate_parm_decls (args, map, arg_vector)
1465 tree args;
1466 struct inline_remap *map;
1467 rtvec arg_vector;
1468 {
1469 register tree tail;
1470 register int i;
1471
1472 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1473 {
1474 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1475 current_function_decl);
1476 rtx new_decl_rtl
1477 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1478
1479 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1480 here, but that's going to require some more work. */
1481 /* DECL_INCOMING_RTL (decl) = ?; */
1482 /* Fully instantiate the address with the equivalent form so that the
1483 debugging information contains the actual register, instead of the
1484 virtual register. Do this by not passing an insn to
1485 subst_constants. */
1486 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1487 apply_change_group ();
1488 DECL_RTL (decl) = new_decl_rtl;
1489 }
1490 }
1491
1492 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1493 current function a tree of contexts isomorphic to the one that is given.
1494
1495 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1496 registers used in the DECL_RTL field should be remapped. If it is zero,
1497 no mapping is necessary. */
1498
1499 static tree
1500 integrate_decl_tree (let, map)
1501 tree let;
1502 struct inline_remap *map;
1503 {
1504 tree t;
1505 tree new_block;
1506 tree *next;
1507
1508 new_block = make_node (BLOCK);
1509 VARRAY_PUSH_TREE (map->block_map, new_block);
1510 next = &BLOCK_VARS (new_block);
1511
1512 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1513 {
1514 tree d;
1515
1516 push_obstacks_nochange ();
1517 saveable_allocation ();
1518 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1519 pop_obstacks ();
1520
1521 if (DECL_RTL (t) != 0)
1522 {
1523 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map, 1);
1524
1525 /* Fully instantiate the address with the equivalent form so that the
1526 debugging information contains the actual register, instead of the
1527 virtual register. Do this by not passing an insn to
1528 subst_constants. */
1529 subst_constants (&DECL_RTL (d), NULL_RTX, map, 1);
1530 apply_change_group ();
1531 }
1532
1533 /* Add this declaration to the list of variables in the new
1534 block. */
1535 *next = d;
1536 next = &TREE_CHAIN (d);
1537 }
1538
1539 next = &BLOCK_SUBBLOCKS (new_block);
1540 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1541 {
1542 *next = integrate_decl_tree (t, map);
1543 BLOCK_SUPERCONTEXT (*next) = new_block;
1544 next = &BLOCK_CHAIN (*next);
1545 }
1546
1547 TREE_USED (new_block) = TREE_USED (let);
1548 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1549
1550 return new_block;
1551 }
1552 \f
1553 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1554 except for those few rtx codes that are sharable.
1555
1556 We always return an rtx that is similar to that incoming rtx, with the
1557 exception of possibly changing a REG to a SUBREG or vice versa. No
1558 rtl is ever emitted.
1559
1560 If FOR_LHS is nonzero, if means we are processing something that will
1561 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1562 inlining since we need to be conservative in how it is set for
1563 such cases.
1564
1565 Handle constants that need to be placed in the constant pool by
1566 calling `force_const_mem'. */
1567
1568 rtx
1569 copy_rtx_and_substitute (orig, map, for_lhs)
1570 register rtx orig;
1571 struct inline_remap *map;
1572 int for_lhs;
1573 {
1574 register rtx copy, temp;
1575 register int i, j;
1576 register RTX_CODE code;
1577 register enum machine_mode mode;
1578 register const char *format_ptr;
1579 int regno;
1580
1581 if (orig == 0)
1582 return 0;
1583
1584 code = GET_CODE (orig);
1585 mode = GET_MODE (orig);
1586
1587 switch (code)
1588 {
1589 case REG:
1590 /* If the stack pointer register shows up, it must be part of
1591 stack-adjustments (*not* because we eliminated the frame pointer!).
1592 Small hard registers are returned as-is. Pseudo-registers
1593 go through their `reg_map'. */
1594 regno = REGNO (orig);
1595 if (regno <= LAST_VIRTUAL_REGISTER
1596 || (map->integrating
1597 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1598 {
1599 /* Some hard registers are also mapped,
1600 but others are not translated. */
1601 if (map->reg_map[regno] != 0)
1602 return map->reg_map[regno];
1603
1604 /* If this is the virtual frame pointer, make space in current
1605 function's stack frame for the stack frame of the inline function.
1606
1607 Copy the address of this area into a pseudo. Map
1608 virtual_stack_vars_rtx to this pseudo and set up a constant
1609 equivalence for it to be the address. This will substitute the
1610 address into insns where it can be substituted and use the new
1611 pseudo where it can't. */
1612 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1613 {
1614 rtx loc, seq;
1615 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1616
1617 #ifdef FRAME_GROWS_DOWNWARD
1618 /* In this case, virtual_stack_vars_rtx points to one byte
1619 higher than the top of the frame area. So make sure we
1620 allocate a big enough chunk to keep the frame pointer
1621 aligned like a real one. */
1622 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1623 #endif
1624 start_sequence ();
1625 loc = assign_stack_temp (BLKmode, size, 1);
1626 loc = XEXP (loc, 0);
1627 #ifdef FRAME_GROWS_DOWNWARD
1628 /* In this case, virtual_stack_vars_rtx points to one byte
1629 higher than the top of the frame area. So compute the offset
1630 to one byte higher than our substitute frame. */
1631 loc = plus_constant (loc, size);
1632 #endif
1633 map->reg_map[regno] = temp
1634 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1635
1636 #ifdef STACK_BOUNDARY
1637 mark_reg_pointer (map->reg_map[regno],
1638 STACK_BOUNDARY / BITS_PER_UNIT);
1639 #endif
1640
1641 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1642
1643 seq = gen_sequence ();
1644 end_sequence ();
1645 emit_insn_after (seq, map->insns_at_start);
1646 return temp;
1647 }
1648 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1649 || (map->integrating
1650 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1651 == orig)))
1652 {
1653 /* Do the same for a block to contain any arguments referenced
1654 in memory. */
1655 rtx loc, seq;
1656 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1657
1658 start_sequence ();
1659 loc = assign_stack_temp (BLKmode, size, 1);
1660 loc = XEXP (loc, 0);
1661 /* When arguments grow downward, the virtual incoming
1662 args pointer points to the top of the argument block,
1663 so the remapped location better do the same. */
1664 #ifdef ARGS_GROW_DOWNWARD
1665 loc = plus_constant (loc, size);
1666 #endif
1667 map->reg_map[regno] = temp
1668 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1669
1670 #ifdef STACK_BOUNDARY
1671 mark_reg_pointer (map->reg_map[regno],
1672 STACK_BOUNDARY / BITS_PER_UNIT);
1673 #endif
1674
1675 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1676
1677 seq = gen_sequence ();
1678 end_sequence ();
1679 emit_insn_after (seq, map->insns_at_start);
1680 return temp;
1681 }
1682 else if (REG_FUNCTION_VALUE_P (orig))
1683 {
1684 /* This is a reference to the function return value. If
1685 the function doesn't have a return value, error. If the
1686 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1687 if (map->inline_target == 0)
1688 /* Must be unrolling loops or replicating code if we
1689 reach here, so return the register unchanged. */
1690 return orig;
1691 else if (GET_MODE (map->inline_target) != BLKmode
1692 && mode != GET_MODE (map->inline_target))
1693 return gen_lowpart (mode, map->inline_target);
1694 else
1695 return map->inline_target;
1696 }
1697 return orig;
1698 }
1699 if (map->reg_map[regno] == NULL)
1700 {
1701 map->reg_map[regno] = gen_reg_rtx (mode);
1702 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1703 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1704 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1705 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1706
1707 if (map->regno_pointer_flag[regno])
1708 mark_reg_pointer (map->reg_map[regno],
1709 map->regno_pointer_align[regno]);
1710 }
1711 return map->reg_map[regno];
1712
1713 case SUBREG:
1714 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1715 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1716 if (GET_CODE (copy) == SUBREG)
1717 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1718 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1719 else if (GET_CODE (copy) == CONCAT)
1720 {
1721 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1722
1723 if (GET_MODE (retval) == GET_MODE (orig))
1724 return retval;
1725 else
1726 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1727 (SUBREG_WORD (orig) %
1728 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1729 / (unsigned) UNITS_PER_WORD)));
1730 }
1731 else
1732 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1733 SUBREG_WORD (orig));
1734
1735 case ADDRESSOF:
1736 copy = gen_rtx_ADDRESSOF (mode,
1737 copy_rtx_and_substitute (XEXP (orig, 0),
1738 map, for_lhs),
1739 0, ADDRESSOF_DECL(orig));
1740 regno = ADDRESSOF_REGNO (orig);
1741 if (map->reg_map[regno])
1742 regno = REGNO (map->reg_map[regno]);
1743 else if (regno > LAST_VIRTUAL_REGISTER)
1744 {
1745 temp = XEXP (orig, 0);
1746 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1747 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1748 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1749 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1750 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1751
1752 if (map->regno_pointer_flag[regno])
1753 mark_reg_pointer (map->reg_map[regno],
1754 map->regno_pointer_align[regno]);
1755 regno = REGNO (map->reg_map[regno]);
1756 }
1757 ADDRESSOF_REGNO (copy) = regno;
1758 return copy;
1759
1760 case USE:
1761 case CLOBBER:
1762 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1763 to (use foo) if the original insn didn't have a subreg.
1764 Removing the subreg distorts the VAX movstrhi pattern
1765 by changing the mode of an operand. */
1766 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1767 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1768 copy = SUBREG_REG (copy);
1769 return gen_rtx_fmt_e (code, VOIDmode, copy);
1770
1771 case CODE_LABEL:
1772 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1773 = LABEL_PRESERVE_P (orig);
1774 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1775
1776 case LABEL_REF:
1777 copy
1778 = gen_rtx_LABEL_REF
1779 (mode,
1780 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1781 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1782
1783 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1784
1785 /* The fact that this label was previously nonlocal does not mean
1786 it still is, so we must check if it is within the range of
1787 this function's labels. */
1788 LABEL_REF_NONLOCAL_P (copy)
1789 = (LABEL_REF_NONLOCAL_P (orig)
1790 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1791 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
1792
1793 /* If we have made a nonlocal label local, it means that this
1794 inlined call will be referring to our nonlocal goto handler.
1795 So make sure we create one for this block; we normally would
1796 not since this is not otherwise considered a "call". */
1797 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1798 function_call_count++;
1799
1800 return copy;
1801
1802 case PC:
1803 case CC0:
1804 case CONST_INT:
1805 return orig;
1806
1807 case SYMBOL_REF:
1808 /* Symbols which represent the address of a label stored in the constant
1809 pool must be modified to point to a constant pool entry for the
1810 remapped label. Otherwise, symbols are returned unchanged. */
1811 if (CONSTANT_POOL_ADDRESS_P (orig))
1812 {
1813 struct function *f = inlining ? inlining : current_function;
1814 rtx constant = get_pool_constant_for_function (f, orig);
1815 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
1816 if (inlining)
1817 {
1818 rtx temp = force_const_mem (const_mode,
1819 copy_rtx_and_substitute (constant,
1820 map, 0));
1821
1822 #if 0
1823 /* Legitimizing the address here is incorrect.
1824
1825 Since we had a SYMBOL_REF before, we can assume it is valid
1826 to have one in this position in the insn.
1827
1828 Also, change_address may create new registers. These
1829 registers will not have valid reg_map entries. This can
1830 cause try_constants() to fail because assumes that all
1831 registers in the rtx have valid reg_map entries, and it may
1832 end up replacing one of these new registers with junk. */
1833
1834 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1835 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1836 #endif
1837
1838 temp = XEXP (temp, 0);
1839
1840 #ifdef POINTERS_EXTEND_UNSIGNED
1841 if (GET_MODE (temp) != GET_MODE (orig))
1842 temp = convert_memory_address (GET_MODE (orig), temp);
1843 #endif
1844 return temp;
1845 }
1846 else if (GET_CODE (constant) == LABEL_REF)
1847 return XEXP (force_const_mem
1848 (GET_MODE (orig),
1849 copy_rtx_and_substitute (constant, map, for_lhs)),
1850 0);
1851 }
1852 else
1853 if (SYMBOL_REF_NEED_ADJUST (orig))
1854 {
1855 eif_eh_map = map;
1856 return rethrow_symbol_map (orig,
1857 expand_inline_function_eh_labelmap);
1858 }
1859
1860 return orig;
1861
1862 case CONST_DOUBLE:
1863 /* We have to make a new copy of this CONST_DOUBLE because don't want
1864 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1865 duplicate of a CONST_DOUBLE we have already seen. */
1866 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1867 {
1868 REAL_VALUE_TYPE d;
1869
1870 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1871 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
1872 }
1873 else
1874 return immed_double_const (CONST_DOUBLE_LOW (orig),
1875 CONST_DOUBLE_HIGH (orig), VOIDmode);
1876
1877 case CONST:
1878 /* Make new constant pool entry for a constant
1879 that was in the pool of the inline function. */
1880 if (RTX_INTEGRATED_P (orig))
1881 abort ();
1882 break;
1883
1884 case ASM_OPERANDS:
1885 /* If a single asm insn contains multiple output operands
1886 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1887 We must make sure that the copied insn continues to share it. */
1888 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1889 {
1890 copy = rtx_alloc (ASM_OPERANDS);
1891 copy->volatil = orig->volatil;
1892 XSTR (copy, 0) = XSTR (orig, 0);
1893 XSTR (copy, 1) = XSTR (orig, 1);
1894 XINT (copy, 2) = XINT (orig, 2);
1895 XVEC (copy, 3) = map->copy_asm_operands_vector;
1896 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1897 XSTR (copy, 5) = XSTR (orig, 5);
1898 XINT (copy, 6) = XINT (orig, 6);
1899 return copy;
1900 }
1901 break;
1902
1903 case CALL:
1904 /* This is given special treatment because the first
1905 operand of a CALL is a (MEM ...) which may get
1906 forced into a register for cse. This is undesirable
1907 if function-address cse isn't wanted or if we won't do cse. */
1908 #ifndef NO_FUNCTION_CSE
1909 if (! (optimize && ! flag_no_function_cse))
1910 #endif
1911 return
1912 gen_rtx_CALL
1913 (GET_MODE (orig),
1914 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
1915 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
1916 map, 0)),
1917 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
1918 break;
1919
1920 #if 0
1921 /* Must be ifdefed out for loop unrolling to work. */
1922 case RETURN:
1923 abort ();
1924 #endif
1925
1926 case SET:
1927 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1928 Adjust the setting by the offset of the area we made.
1929 If the nonlocal goto is into the current function,
1930 this will result in unnecessarily bad code, but should work. */
1931 if (SET_DEST (orig) == virtual_stack_vars_rtx
1932 || SET_DEST (orig) == virtual_incoming_args_rtx)
1933 {
1934 /* In case a translation hasn't occurred already, make one now. */
1935 rtx equiv_reg;
1936 rtx equiv_loc;
1937 HOST_WIDE_INT loc_offset;
1938
1939 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
1940 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
1941 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1942 REGNO (equiv_reg)).rtx;
1943 loc_offset
1944 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
1945
1946 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
1947 force_operand
1948 (plus_constant
1949 (copy_rtx_and_substitute (SET_SRC (orig),
1950 map, 0),
1951 - loc_offset),
1952 NULL_RTX));
1953 }
1954 else
1955 return gen_rtx_SET (VOIDmode,
1956 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
1957 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
1958 break;
1959
1960 case MEM:
1961 if (inlining
1962 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
1963 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
1964 {
1965 enum machine_mode const_mode
1966 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
1967 rtx constant
1968 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
1969
1970 constant = copy_rtx_and_substitute (constant, map, 0);
1971
1972 /* If this was an address of a constant pool entry that itself
1973 had to be placed in the constant pool, it might not be a
1974 valid address. So the recursive call might have turned it
1975 into a register. In that case, it isn't a constant any
1976 more, so return it. This has the potential of changing a
1977 MEM into a REG, but we'll assume that it safe. */
1978 if (! CONSTANT_P (constant))
1979 return constant;
1980
1981 return validize_mem (force_const_mem (const_mode, constant));
1982 }
1983
1984 copy = rtx_alloc (MEM);
1985 PUT_MODE (copy, mode);
1986 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
1987 MEM_COPY_ATTRIBUTES (copy, orig);
1988 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
1989 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
1990 return copy;
1991
1992 default:
1993 break;
1994 }
1995
1996 copy = rtx_alloc (code);
1997 PUT_MODE (copy, mode);
1998 copy->in_struct = orig->in_struct;
1999 copy->volatil = orig->volatil;
2000 copy->unchanging = orig->unchanging;
2001
2002 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2003
2004 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2005 {
2006 switch (*format_ptr++)
2007 {
2008 case '0':
2009 /* Copy this through the wide int field; that's safest. */
2010 X0WINT (copy, i) = X0WINT (orig, i);
2011 break;
2012
2013 case 'e':
2014 XEXP (copy, i)
2015 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2016 break;
2017
2018 case 'u':
2019 /* Change any references to old-insns to point to the
2020 corresponding copied insns. */
2021 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2022 break;
2023
2024 case 'E':
2025 XVEC (copy, i) = XVEC (orig, i);
2026 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2027 {
2028 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2029 for (j = 0; j < XVECLEN (copy, i); j++)
2030 XVECEXP (copy, i, j)
2031 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2032 map, for_lhs);
2033 }
2034 break;
2035
2036 case 'w':
2037 XWINT (copy, i) = XWINT (orig, i);
2038 break;
2039
2040 case 'i':
2041 XINT (copy, i) = XINT (orig, i);
2042 break;
2043
2044 case 's':
2045 XSTR (copy, i) = XSTR (orig, i);
2046 break;
2047
2048 case 't':
2049 XTREE (copy, i) = XTREE (orig, i);
2050 break;
2051
2052 default:
2053 abort ();
2054 }
2055 }
2056
2057 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2058 {
2059 map->orig_asm_operands_vector = XVEC (orig, 3);
2060 map->copy_asm_operands_vector = XVEC (copy, 3);
2061 map->copy_asm_constraints_vector = XVEC (copy, 4);
2062 }
2063
2064 return copy;
2065 }
2066 \f
2067 /* Substitute known constant values into INSN, if that is valid. */
2068
2069 void
2070 try_constants (insn, map)
2071 rtx insn;
2072 struct inline_remap *map;
2073 {
2074 int i;
2075
2076 map->num_sets = 0;
2077
2078 /* First try just updating addresses, then other things. This is
2079 important when we have something like the store of a constant
2080 into memory and we can update the memory address but the machine
2081 does not support a constant source. */
2082 subst_constants (&PATTERN (insn), insn, map, 1);
2083 apply_change_group ();
2084 subst_constants (&PATTERN (insn), insn, map, 0);
2085 apply_change_group ();
2086
2087 /* Show we don't know the value of anything stored or clobbered. */
2088 note_stores (PATTERN (insn), mark_stores, NULL);
2089 map->last_pc_value = 0;
2090 #ifdef HAVE_cc0
2091 map->last_cc0_value = 0;
2092 #endif
2093
2094 /* Set up any constant equivalences made in this insn. */
2095 for (i = 0; i < map->num_sets; i++)
2096 {
2097 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2098 {
2099 int regno = REGNO (map->equiv_sets[i].dest);
2100
2101 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2102 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2103 /* Following clause is a hack to make case work where GNU C++
2104 reassigns a variable to make cse work right. */
2105 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2106 regno).rtx,
2107 map->equiv_sets[i].equiv))
2108 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2109 map->equiv_sets[i].equiv, map->const_age);
2110 }
2111 else if (map->equiv_sets[i].dest == pc_rtx)
2112 map->last_pc_value = map->equiv_sets[i].equiv;
2113 #ifdef HAVE_cc0
2114 else if (map->equiv_sets[i].dest == cc0_rtx)
2115 map->last_cc0_value = map->equiv_sets[i].equiv;
2116 #endif
2117 }
2118 }
2119 \f
2120 /* Substitute known constants for pseudo regs in the contents of LOC,
2121 which are part of INSN.
2122 If INSN is zero, the substitution should always be done (this is used to
2123 update DECL_RTL).
2124 These changes are taken out by try_constants if the result is not valid.
2125
2126 Note that we are more concerned with determining when the result of a SET
2127 is a constant, for further propagation, than actually inserting constants
2128 into insns; cse will do the latter task better.
2129
2130 This function is also used to adjust address of items previously addressed
2131 via the virtual stack variable or virtual incoming arguments registers.
2132
2133 If MEMONLY is nonzero, only make changes inside a MEM. */
2134
2135 static void
2136 subst_constants (loc, insn, map, memonly)
2137 rtx *loc;
2138 rtx insn;
2139 struct inline_remap *map;
2140 int memonly;
2141 {
2142 rtx x = *loc;
2143 register int i, j;
2144 register enum rtx_code code;
2145 register const char *format_ptr;
2146 int num_changes = num_validated_changes ();
2147 rtx new = 0;
2148 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2149
2150 code = GET_CODE (x);
2151
2152 switch (code)
2153 {
2154 case PC:
2155 case CONST_INT:
2156 case CONST_DOUBLE:
2157 case SYMBOL_REF:
2158 case CONST:
2159 case LABEL_REF:
2160 case ADDRESS:
2161 return;
2162
2163 #ifdef HAVE_cc0
2164 case CC0:
2165 if (! memonly)
2166 validate_change (insn, loc, map->last_cc0_value, 1);
2167 return;
2168 #endif
2169
2170 case USE:
2171 case CLOBBER:
2172 /* The only thing we can do with a USE or CLOBBER is possibly do
2173 some substitutions in a MEM within it. */
2174 if (GET_CODE (XEXP (x, 0)) == MEM)
2175 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2176 return;
2177
2178 case REG:
2179 /* Substitute for parms and known constants. Don't replace
2180 hard regs used as user variables with constants. */
2181 if (! memonly)
2182 {
2183 int regno = REGNO (x);
2184 struct const_equiv_data *p;
2185
2186 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2187 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2188 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2189 p->rtx != 0)
2190 && p->age >= map->const_age)
2191 validate_change (insn, loc, p->rtx, 1);
2192 }
2193 return;
2194
2195 case SUBREG:
2196 /* SUBREG applied to something other than a reg
2197 should be treated as ordinary, since that must
2198 be a special hack and we don't know how to treat it specially.
2199 Consider for example mulsidi3 in m68k.md.
2200 Ordinary SUBREG of a REG needs this special treatment. */
2201 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2202 {
2203 rtx inner = SUBREG_REG (x);
2204 rtx new = 0;
2205
2206 /* We can't call subst_constants on &SUBREG_REG (x) because any
2207 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2208 see what is inside, try to form the new SUBREG and see if that is
2209 valid. We handle two cases: extracting a full word in an
2210 integral mode and extracting the low part. */
2211 subst_constants (&inner, NULL_RTX, map, 0);
2212
2213 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2214 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2215 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2216 new = operand_subword (inner, SUBREG_WORD (x), 0,
2217 GET_MODE (SUBREG_REG (x)));
2218
2219 cancel_changes (num_changes);
2220 if (new == 0 && subreg_lowpart_p (x))
2221 new = gen_lowpart_common (GET_MODE (x), inner);
2222
2223 if (new)
2224 validate_change (insn, loc, new, 1);
2225
2226 return;
2227 }
2228 break;
2229
2230 case MEM:
2231 subst_constants (&XEXP (x, 0), insn, map, 0);
2232
2233 /* If a memory address got spoiled, change it back. */
2234 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2235 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2236 cancel_changes (num_changes);
2237 return;
2238
2239 case SET:
2240 {
2241 /* Substitute constants in our source, and in any arguments to a
2242 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2243 itself. */
2244 rtx *dest_loc = &SET_DEST (x);
2245 rtx dest = *dest_loc;
2246 rtx src, tem;
2247
2248 subst_constants (&SET_SRC (x), insn, map, memonly);
2249 src = SET_SRC (x);
2250
2251 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2252 || GET_CODE (*dest_loc) == SUBREG
2253 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2254 {
2255 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2256 {
2257 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2258 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2259 }
2260 dest_loc = &XEXP (*dest_loc, 0);
2261 }
2262
2263 /* Do substitute in the address of a destination in memory. */
2264 if (GET_CODE (*dest_loc) == MEM)
2265 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2266
2267 /* Check for the case of DEST a SUBREG, both it and the underlying
2268 register are less than one word, and the SUBREG has the wider mode.
2269 In the case, we are really setting the underlying register to the
2270 source converted to the mode of DEST. So indicate that. */
2271 if (GET_CODE (dest) == SUBREG
2272 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2273 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2274 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2275 <= GET_MODE_SIZE (GET_MODE (dest)))
2276 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2277 src)))
2278 src = tem, dest = SUBREG_REG (dest);
2279
2280 /* If storing a recognizable value save it for later recording. */
2281 if ((map->num_sets < MAX_RECOG_OPERANDS)
2282 && (CONSTANT_P (src)
2283 || (GET_CODE (src) == REG
2284 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2285 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2286 || (GET_CODE (src) == PLUS
2287 && GET_CODE (XEXP (src, 0)) == REG
2288 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2289 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2290 && CONSTANT_P (XEXP (src, 1)))
2291 || GET_CODE (src) == COMPARE
2292 #ifdef HAVE_cc0
2293 || dest == cc0_rtx
2294 #endif
2295 || (dest == pc_rtx
2296 && (src == pc_rtx || GET_CODE (src) == RETURN
2297 || GET_CODE (src) == LABEL_REF))))
2298 {
2299 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2300 it will cause us to save the COMPARE with any constants
2301 substituted, which is what we want for later. */
2302 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2303 map->equiv_sets[map->num_sets++].dest = dest;
2304 }
2305 }
2306 return;
2307
2308 default:
2309 break;
2310 }
2311
2312 format_ptr = GET_RTX_FORMAT (code);
2313
2314 /* If the first operand is an expression, save its mode for later. */
2315 if (*format_ptr == 'e')
2316 op0_mode = GET_MODE (XEXP (x, 0));
2317
2318 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2319 {
2320 switch (*format_ptr++)
2321 {
2322 case '0':
2323 break;
2324
2325 case 'e':
2326 if (XEXP (x, i))
2327 subst_constants (&XEXP (x, i), insn, map, memonly);
2328 break;
2329
2330 case 'u':
2331 case 'i':
2332 case 's':
2333 case 'w':
2334 case 't':
2335 break;
2336
2337 case 'E':
2338 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2339 for (j = 0; j < XVECLEN (x, i); j++)
2340 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2341
2342 break;
2343
2344 default:
2345 abort ();
2346 }
2347 }
2348
2349 /* If this is a commutative operation, move a constant to the second
2350 operand unless the second operand is already a CONST_INT. */
2351 if (! memonly
2352 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2353 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2354 {
2355 rtx tem = XEXP (x, 0);
2356 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2357 validate_change (insn, &XEXP (x, 1), tem, 1);
2358 }
2359
2360 /* Simplify the expression in case we put in some constants. */
2361 if (! memonly)
2362 switch (GET_RTX_CLASS (code))
2363 {
2364 case '1':
2365 if (op0_mode == MAX_MACHINE_MODE)
2366 abort ();
2367 new = simplify_unary_operation (code, GET_MODE (x),
2368 XEXP (x, 0), op0_mode);
2369 break;
2370
2371 case '<':
2372 {
2373 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2374
2375 if (op_mode == VOIDmode)
2376 op_mode = GET_MODE (XEXP (x, 1));
2377 new = simplify_relational_operation (code, op_mode,
2378 XEXP (x, 0), XEXP (x, 1));
2379 #ifdef FLOAT_STORE_FLAG_VALUE
2380 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2381 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2382 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2383 GET_MODE (x)));
2384 #endif
2385 break;
2386 }
2387
2388 case '2':
2389 case 'c':
2390 new = simplify_binary_operation (code, GET_MODE (x),
2391 XEXP (x, 0), XEXP (x, 1));
2392 break;
2393
2394 case 'b':
2395 case '3':
2396 if (op0_mode == MAX_MACHINE_MODE)
2397 abort ();
2398
2399 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2400 XEXP (x, 0), XEXP (x, 1),
2401 XEXP (x, 2));
2402 break;
2403 }
2404
2405 if (new)
2406 validate_change (insn, loc, new, 1);
2407 }
2408
2409 /* Show that register modified no longer contain known constants. We are
2410 called from note_stores with parts of the new insn. */
2411
2412 static void
2413 mark_stores (dest, x, data)
2414 rtx dest;
2415 rtx x ATTRIBUTE_UNUSED;
2416 void *data ATTRIBUTE_UNUSED;
2417 {
2418 int regno = -1;
2419 enum machine_mode mode = VOIDmode;
2420
2421 /* DEST is always the innermost thing set, except in the case of
2422 SUBREGs of hard registers. */
2423
2424 if (GET_CODE (dest) == REG)
2425 regno = REGNO (dest), mode = GET_MODE (dest);
2426 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2427 {
2428 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2429 mode = GET_MODE (SUBREG_REG (dest));
2430 }
2431
2432 if (regno >= 0)
2433 {
2434 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2435 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2436 int i;
2437
2438 /* Ignore virtual stack var or virtual arg register since those
2439 are handled separately. */
2440 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
2441 && regno != VIRTUAL_STACK_VARS_REGNUM)
2442 for (i = regno; i <= last_reg; i++)
2443 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2444 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2445 }
2446 }
2447 \f
2448 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2449 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2450 that it points to the node itself, thus indicating that the node is its
2451 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2452 the given node is NULL, recursively descend the decl/block tree which
2453 it is the root of, and for each other ..._DECL or BLOCK node contained
2454 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2455 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2456 values to point to themselves. */
2457
2458 static void
2459 set_block_origin_self (stmt)
2460 register tree stmt;
2461 {
2462 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2463 {
2464 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2465
2466 {
2467 register tree local_decl;
2468
2469 for (local_decl = BLOCK_VARS (stmt);
2470 local_decl != NULL_TREE;
2471 local_decl = TREE_CHAIN (local_decl))
2472 set_decl_origin_self (local_decl); /* Potential recursion. */
2473 }
2474
2475 {
2476 register tree subblock;
2477
2478 for (subblock = BLOCK_SUBBLOCKS (stmt);
2479 subblock != NULL_TREE;
2480 subblock = BLOCK_CHAIN (subblock))
2481 set_block_origin_self (subblock); /* Recurse. */
2482 }
2483 }
2484 }
2485
2486 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2487 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2488 node to so that it points to the node itself, thus indicating that the
2489 node represents its own (abstract) origin. Additionally, if the
2490 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2491 the decl/block tree of which the given node is the root of, and for
2492 each other ..._DECL or BLOCK node contained therein whose
2493 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2494 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2495 point to themselves. */
2496
2497 static void
2498 set_decl_origin_self (decl)
2499 register tree decl;
2500 {
2501 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2502 {
2503 DECL_ABSTRACT_ORIGIN (decl) = decl;
2504 if (TREE_CODE (decl) == FUNCTION_DECL)
2505 {
2506 register tree arg;
2507
2508 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2509 DECL_ABSTRACT_ORIGIN (arg) = arg;
2510 if (DECL_INITIAL (decl) != NULL_TREE
2511 && DECL_INITIAL (decl) != error_mark_node)
2512 set_block_origin_self (DECL_INITIAL (decl));
2513 }
2514 }
2515 }
2516 \f
2517 /* Given a pointer to some BLOCK node, and a boolean value to set the
2518 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2519 the given block, and for all local decls and all local sub-blocks
2520 (recursively) which are contained therein. */
2521
2522 static void
2523 set_block_abstract_flags (stmt, setting)
2524 register tree stmt;
2525 register int setting;
2526 {
2527 register tree local_decl;
2528 register tree subblock;
2529
2530 BLOCK_ABSTRACT (stmt) = setting;
2531
2532 for (local_decl = BLOCK_VARS (stmt);
2533 local_decl != NULL_TREE;
2534 local_decl = TREE_CHAIN (local_decl))
2535 set_decl_abstract_flags (local_decl, setting);
2536
2537 for (subblock = BLOCK_SUBBLOCKS (stmt);
2538 subblock != NULL_TREE;
2539 subblock = BLOCK_CHAIN (subblock))
2540 set_block_abstract_flags (subblock, setting);
2541 }
2542
2543 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2544 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2545 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2546 set the abstract flags for all of the parameters, local vars, local
2547 blocks and sub-blocks (recursively) to the same setting. */
2548
2549 void
2550 set_decl_abstract_flags (decl, setting)
2551 register tree decl;
2552 register int setting;
2553 {
2554 DECL_ABSTRACT (decl) = setting;
2555 if (TREE_CODE (decl) == FUNCTION_DECL)
2556 {
2557 register tree arg;
2558
2559 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2560 DECL_ABSTRACT (arg) = setting;
2561 if (DECL_INITIAL (decl) != NULL_TREE
2562 && DECL_INITIAL (decl) != error_mark_node)
2563 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2564 }
2565 }
2566 \f
2567 /* Output the assembly language code for the function FNDECL
2568 from its DECL_SAVED_INSNS. Used for inline functions that are output
2569 at end of compilation instead of where they came in the source. */
2570
2571 void
2572 output_inline_function (fndecl)
2573 tree fndecl;
2574 {
2575 struct function *curf = current_function;
2576 struct function *f = DECL_SAVED_INSNS (fndecl);
2577
2578 current_function = f;
2579 current_function_decl = fndecl;
2580 clear_emit_caches ();
2581
2582 /* Things we allocate from here on are part of this function, not
2583 permanent. */
2584 temporary_allocation ();
2585
2586 set_new_last_label_num (f->inl_max_label_num);
2587
2588 /* We must have already output DWARF debugging information for the
2589 original (abstract) inline function declaration/definition, so
2590 we want to make sure that the debugging information we generate
2591 for this special instance of the inline function refers back to
2592 the information we already generated. To make sure that happens,
2593 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2594 node (and for all of the local ..._DECL nodes which are its children)
2595 so that they all point to themselves. */
2596
2597 set_decl_origin_self (fndecl);
2598
2599 /* We're not deferring this any longer. */
2600 DECL_DEFER_OUTPUT (fndecl) = 0;
2601
2602 /* We can't inline this anymore. */
2603 f->inlinable = 0;
2604 DECL_INLINE (fndecl) = 0;
2605
2606 /* Compile this function all the way down to assembly code. */
2607 rest_of_compilation (fndecl);
2608
2609 current_function = curf;
2610 current_function_decl = curf ? curf->decl : 0;
2611 }
This page took 0.168836 seconds and 6 git commands to generate.