]> gcc.gnu.org Git - gcc.git/blob - gcc/integrate.c
combine.c (combine_simplify_rtx): Give FLOAT_STORE_FLAG_VALUE a mode.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-98, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "insn-config.h"
32 #include "insn-flags.h"
33 #include "expr.h"
34 #include "output.h"
35 #include "recog.h"
36 #include "integrate.h"
37 #include "real.h"
38 #include "except.h"
39 #include "function.h"
40 #include "toplev.h"
41 #include "intl.h"
42 #include "loop.h"
43
44 #include "obstack.h"
45 #define obstack_chunk_alloc xmalloc
46 #define obstack_chunk_free free
47
48 extern struct obstack *function_maybepermanent_obstack;
49
50 /* Similar, but round to the next highest integer that meets the
51 alignment. */
52 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53
54 /* Default max number of insns a function can have and still be inline.
55 This is overridden on RISC machines. */
56 #ifndef INTEGRATE_THRESHOLD
57 /* Inlining small functions might save more space then not inlining at
58 all. Assume 1 instruction for the call and 1.5 insns per argument. */
59 #define INTEGRATE_THRESHOLD(DECL) \
60 (optimize_size \
61 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL))) / 2) \
62 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
63 #endif
64 \f
65 static rtvec initialize_for_inline PARAMS ((tree));
66 static void note_modified_parmregs PARAMS ((rtx, rtx, void *));
67 static void integrate_parm_decls PARAMS ((tree, struct inline_remap *,
68 rtvec));
69 static tree integrate_decl_tree PARAMS ((tree,
70 struct inline_remap *));
71 static void subst_constants PARAMS ((rtx *, rtx,
72 struct inline_remap *, int));
73 static void set_block_origin_self PARAMS ((tree));
74 static void set_decl_origin_self PARAMS ((tree));
75 static void set_block_abstract_flags PARAMS ((tree, int));
76 static void process_reg_param PARAMS ((struct inline_remap *, rtx,
77 rtx));
78 void set_decl_abstract_flags PARAMS ((tree, int));
79 static rtx expand_inline_function_eh_labelmap PARAMS ((rtx));
80 static void mark_stores PARAMS ((rtx, rtx, void *));
81 static int compare_blocks PARAMS ((const PTR, const PTR));
82 static int find_block PARAMS ((const PTR, const PTR));
83
84 /* The maximum number of instructions accepted for inlining a
85 function. Increasing values mean more agressive inlining.
86 This affects currently only functions explicitly marked as
87 inline (or methods defined within the class definition for C++).
88 The default value of 10000 is arbitrary but high to match the
89 previously unlimited gcc capabilities. */
90
91 int inline_max_insns = 10000;
92
93 /* Used by copy_rtx_and_substitute; this indicates whether the function is
94 called for the purpose of inlining or some other purpose (i.e. loop
95 unrolling). This affects how constant pool references are handled.
96 This variable contains the FUNCTION_DECL for the inlined function. */
97 static struct function *inlining = 0;
98 \f
99 /* Returns the Ith entry in the label_map contained in MAP. If the
100 Ith entry has not yet been set, return a fresh label. This function
101 performs a lazy initialization of label_map, thereby avoiding huge memory
102 explosions when the label_map gets very large. */
103
104 rtx
105 get_label_from_map (map, i)
106 struct inline_remap *map;
107 int i;
108 {
109 rtx x = map->label_map[i];
110
111 if (x == NULL_RTX)
112 x = map->label_map[i] = gen_label_rtx();
113
114 return x;
115 }
116
117 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
118 is safe and reasonable to integrate into other functions.
119 Nonzero means value is a warning msgid with a single %s
120 for the function's name. */
121
122 const char *
123 function_cannot_inline_p (fndecl)
124 register tree fndecl;
125 {
126 register rtx insn;
127 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
128
129 /* For functions marked as inline increase the maximum size to
130 inline_max_insns (-finline-limit-<n>). For regular functions
131 use the limit given by INTEGRATE_THRESHOLD. */
132
133 int max_insns = (DECL_INLINE (fndecl))
134 ? (inline_max_insns
135 + 8 * list_length (DECL_ARGUMENTS (fndecl)))
136 : INTEGRATE_THRESHOLD (fndecl);
137
138 register int ninsns = 0;
139 register tree parms;
140 rtx result;
141
142 /* No inlines with varargs. */
143 if ((last && TREE_VALUE (last) != void_type_node)
144 || current_function_varargs)
145 return N_("varargs function cannot be inline");
146
147 if (current_function_calls_alloca)
148 return N_("function using alloca cannot be inline");
149
150 if (current_function_calls_setjmp)
151 return N_("function using setjmp cannot be inline");
152
153 if (current_function_contains_functions)
154 return N_("function with nested functions cannot be inline");
155
156 if (forced_labels)
157 return
158 N_("function with label addresses used in initializers cannot inline");
159
160 if (current_function_cannot_inline)
161 return current_function_cannot_inline;
162
163 /* If its not even close, don't even look. */
164 if (get_max_uid () > 3 * max_insns)
165 return N_("function too large to be inline");
166
167 #if 0
168 /* Don't inline functions which do not specify a function prototype and
169 have BLKmode argument or take the address of a parameter. */
170 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
171 {
172 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
173 TREE_ADDRESSABLE (parms) = 1;
174 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
175 return N_("no prototype, and parameter address used; cannot be inline");
176 }
177 #endif
178
179 /* We can't inline functions that return structures
180 the old-fashioned PCC way, copying into a static block. */
181 if (current_function_returns_pcc_struct)
182 return N_("inline functions not supported for this return value type");
183
184 /* We can't inline functions that return structures of varying size. */
185 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
186 return N_("function with varying-size return value cannot be inline");
187
188 /* Cannot inline a function with a varying size argument or one that
189 receives a transparent union. */
190 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
191 {
192 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
193 return N_("function with varying-size parameter cannot be inline");
194 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
195 return N_("function with transparent unit parameter cannot be inline");
196 }
197
198 if (get_max_uid () > max_insns)
199 {
200 for (ninsns = 0, insn = get_first_nonparm_insn ();
201 insn && ninsns < max_insns;
202 insn = NEXT_INSN (insn))
203 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
204 ninsns++;
205
206 if (ninsns >= max_insns)
207 return N_("function too large to be inline");
208 }
209
210 /* We will not inline a function which uses computed goto. The addresses of
211 its local labels, which may be tucked into global storage, are of course
212 not constant across instantiations, which causes unexpected behaviour. */
213 if (current_function_has_computed_jump)
214 return N_("function with computed jump cannot inline");
215
216 /* We cannot inline a nested function that jumps to a nonlocal label. */
217 if (current_function_has_nonlocal_goto)
218 return N_("function with nonlocal goto cannot be inline");
219
220 /* This is a hack, until the inliner is taught about eh regions at
221 the start of the function. */
222 for (insn = get_insns ();
223 insn
224 && ! (GET_CODE (insn) == NOTE
225 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
226 insn = NEXT_INSN (insn))
227 {
228 if (insn && GET_CODE (insn) == NOTE
229 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
230 return N_("function with complex parameters cannot be inline");
231 }
232
233 /* We can't inline functions that return a PARALLEL rtx. */
234 result = DECL_RTL (DECL_RESULT (fndecl));
235 if (result && GET_CODE (result) == PARALLEL)
236 return N_("inline functions not supported for this return value type");
237
238 return 0;
239 }
240 \f
241 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
242 Zero for a reg that isn't a parm's home.
243 Only reg numbers less than max_parm_reg are mapped here. */
244 static tree *parmdecl_map;
245
246 /* In save_for_inline, nonzero if past the parm-initialization insns. */
247 static int in_nonparm_insns;
248 \f
249 /* Subroutine for `save_for_inline_nocopy'. Performs initialization
250 needed to save FNDECL's insns and info for future inline expansion. */
251
252 static rtvec
253 initialize_for_inline (fndecl)
254 tree fndecl;
255 {
256 int i;
257 rtvec arg_vector;
258 tree parms;
259
260 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
261 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
262 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
263
264 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
265 parms;
266 parms = TREE_CHAIN (parms), i++)
267 {
268 rtx p = DECL_RTL (parms);
269
270 /* If we have (mem (addressof (mem ...))), use the inner MEM since
271 otherwise the copy_rtx call below will not unshare the MEM since
272 it shares ADDRESSOF. */
273 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
274 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
275 p = XEXP (XEXP (p, 0), 0);
276
277 RTVEC_ELT (arg_vector, i) = p;
278
279 if (GET_CODE (p) == REG)
280 parmdecl_map[REGNO (p)] = parms;
281 else if (GET_CODE (p) == CONCAT)
282 {
283 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
284 rtx pimag = gen_imagpart (GET_MODE (preal), p);
285
286 if (GET_CODE (preal) == REG)
287 parmdecl_map[REGNO (preal)] = parms;
288 if (GET_CODE (pimag) == REG)
289 parmdecl_map[REGNO (pimag)] = parms;
290 }
291
292 /* This flag is cleared later
293 if the function ever modifies the value of the parm. */
294 TREE_READONLY (parms) = 1;
295 }
296
297 return arg_vector;
298 }
299
300 /* Copy NODE (which must be a DECL, but not a PARM_DECL). The DECL
301 originally was in the FROM_FN, but now it will be in the
302 TO_FN. */
303
304 tree
305 copy_decl_for_inlining (decl, from_fn, to_fn)
306 tree decl;
307 tree from_fn;
308 tree to_fn;
309 {
310 tree copy;
311
312 /* Copy the declaration. */
313 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
314 /* For a parameter, we must make an equivalent VAR_DECL, not a
315 new PARM_DECL. */
316 copy = build_decl (VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
317 else
318 {
319 copy = copy_node (decl);
320 if (DECL_LANG_SPECIFIC (copy))
321 copy_lang_decl (copy);
322
323 /* TREE_ADDRESSABLE isn't used to indicate that a label's
324 address has been taken; it's for internal bookkeeping in
325 expand_goto_internal. */
326 if (TREE_CODE (copy) == LABEL_DECL)
327 TREE_ADDRESSABLE (copy) = 0;
328 }
329
330 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
331 declaration inspired this copy. */
332 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
333
334 /* The new variable/label has no RTL, yet. */
335 DECL_RTL (copy) = NULL_RTX;
336
337 /* These args would always appear unused, if not for this. */
338 TREE_USED (copy) = 1;
339
340 /* Set the context for the new declaration. */
341 if (!DECL_CONTEXT (decl))
342 /* Globals stay global. */
343 ;
344 else if (DECL_CONTEXT (decl) != from_fn)
345 /* Things that weren't in the scope of the function we're inlining
346 from aren't in the scope we're inlining too, either. */
347 ;
348 else if (TREE_STATIC (decl))
349 /* Function-scoped static variables should say in the original
350 function. */
351 ;
352 else
353 /* Ordinary automatic local variables are now in the scope of the
354 new function. */
355 DECL_CONTEXT (copy) = to_fn;
356
357 return copy;
358 }
359
360 /* Make the insns and PARM_DECLs of the current function permanent
361 and record other information in DECL_SAVED_INSNS to allow inlining
362 of this function in subsequent calls.
363
364 This routine need not copy any insns because we are not going
365 to immediately compile the insns in the insn chain. There
366 are two cases when we would compile the insns for FNDECL:
367 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
368 be output at the end of other compilation, because somebody took
369 its address. In the first case, the insns of FNDECL are copied
370 as it is expanded inline, so FNDECL's saved insns are not
371 modified. In the second case, FNDECL is used for the last time,
372 so modifying the rtl is not a problem.
373
374 We don't have to worry about FNDECL being inline expanded by
375 other functions which are written at the end of compilation
376 because flag_no_inline is turned on when we begin writing
377 functions at the end of compilation. */
378
379 void
380 save_for_inline_nocopy (fndecl)
381 tree fndecl;
382 {
383 rtx insn;
384 rtvec argvec;
385 rtx first_nonparm_insn;
386
387 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
388 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
389 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
390 for the parms, prior to elimination of virtual registers.
391 These values are needed for substituting parms properly. */
392
393 parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
394
395 /* Make and emit a return-label if we have not already done so. */
396
397 if (return_label == 0)
398 {
399 return_label = gen_label_rtx ();
400 emit_label (return_label);
401 }
402
403 argvec = initialize_for_inline (fndecl);
404
405 /* If there are insns that copy parms from the stack into pseudo registers,
406 those insns are not copied. `expand_inline_function' must
407 emit the correct code to handle such things. */
408
409 insn = get_insns ();
410 if (GET_CODE (insn) != NOTE)
411 abort ();
412
413 /* Get the insn which signals the end of parameter setup code. */
414 first_nonparm_insn = get_first_nonparm_insn ();
415
416 /* Now just scan the chain of insns to see what happens to our
417 PARM_DECLs. If a PARM_DECL is used but never modified, we
418 can substitute its rtl directly when expanding inline (and
419 perform constant folding when its incoming value is constant).
420 Otherwise, we have to copy its value into a new register and track
421 the new register's life. */
422 in_nonparm_insns = 0;
423 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
424 {
425 if (insn == first_nonparm_insn)
426 in_nonparm_insns = 1;
427
428 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
429 /* Record what interesting things happen to our parameters. */
430 note_stores (PATTERN (insn), note_modified_parmregs, NULL);
431 }
432
433 /* We have now allocated all that needs to be allocated permanently
434 on the rtx obstack. Set our high-water mark, so that we
435 can free the rest of this when the time comes. */
436
437 preserve_data ();
438
439 cfun->inl_max_label_num = max_label_num ();
440 cfun->inl_last_parm_insn = cfun->x_last_parm_insn;
441 cfun->original_arg_vector = argvec;
442 cfun->original_decl_initial = DECL_INITIAL (fndecl);
443 DECL_SAVED_INSNS (fndecl) = cfun;
444
445 /* Clean up. */
446 free (parmdecl_map);
447 }
448 \f
449 /* Note whether a parameter is modified or not. */
450
451 static void
452 note_modified_parmregs (reg, x, data)
453 rtx reg;
454 rtx x ATTRIBUTE_UNUSED;
455 void *data ATTRIBUTE_UNUSED;
456 {
457 if (GET_CODE (reg) == REG && in_nonparm_insns
458 && REGNO (reg) < max_parm_reg
459 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
460 && parmdecl_map[REGNO (reg)] != 0)
461 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
462 }
463
464 /* Unfortunately, we need a global copy of const_equiv map for communication
465 with a function called from note_stores. Be *very* careful that this
466 is used properly in the presence of recursion. */
467
468 varray_type global_const_equiv_varray;
469 \f
470 #define FIXED_BASE_PLUS_P(X) \
471 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
472 && GET_CODE (XEXP (X, 0)) == REG \
473 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
474 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
475
476 /* Called to set up a mapping for the case where a parameter is in a
477 register. If it is read-only and our argument is a constant, set up the
478 constant equivalence.
479
480 If LOC is REG_USERVAR_P, the usual case, COPY must also have that flag set
481 if it is a register.
482
483 Also, don't allow hard registers here; they might not be valid when
484 substituted into insns. */
485 static void
486 process_reg_param (map, loc, copy)
487 struct inline_remap *map;
488 rtx loc, copy;
489 {
490 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
491 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
492 && ! REG_USERVAR_P (copy))
493 || (GET_CODE (copy) == REG
494 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
495 {
496 rtx temp = copy_to_mode_reg (GET_MODE (loc), copy);
497 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
498 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
499 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
500 copy = temp;
501 }
502 map->reg_map[REGNO (loc)] = copy;
503 }
504
505 /* Used by duplicate_eh_handlers to map labels for the exception table */
506 static struct inline_remap *eif_eh_map;
507
508 static rtx
509 expand_inline_function_eh_labelmap (label)
510 rtx label;
511 {
512 int index = CODE_LABEL_NUMBER (label);
513 return get_label_from_map (eif_eh_map, index);
514 }
515
516 /* Compare two BLOCKs for qsort. The key we sort on is the
517 BLOCK_ABSTRACT_ORIGIN of the blocks. */
518
519 static int
520 compare_blocks (v1, v2)
521 const PTR v1;
522 const PTR v2;
523 {
524 tree b1 = *((tree *) v1);
525 tree b2 = *((tree *) v2);
526
527 return ((char *) BLOCK_ABSTRACT_ORIGIN (b1)
528 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
529 }
530
531 /* Compare two BLOCKs for bsearch. The first pointer corresponds to
532 an original block; the second to a remapped equivalent. */
533
534 static int
535 find_block (v1, v2)
536 const PTR v1;
537 const PTR v2;
538 {
539 tree b1 = (tree) v1;
540 tree b2 = *((tree *) v2);
541
542 return ((char *) b1 - (char *) BLOCK_ABSTRACT_ORIGIN (b2));
543 }
544
545 /* Integrate the procedure defined by FNDECL. Note that this function
546 may wind up calling itself. Since the static variables are not
547 reentrant, we do not assign them until after the possibility
548 of recursion is eliminated.
549
550 If IGNORE is nonzero, do not produce a value.
551 Otherwise store the value in TARGET if it is nonzero and that is convenient.
552
553 Value is:
554 (rtx)-1 if we could not substitute the function
555 0 if we substituted it and it does not produce a value
556 else an rtx for where the value is stored. */
557
558 rtx
559 expand_inline_function (fndecl, parms, target, ignore, type,
560 structure_value_addr)
561 tree fndecl, parms;
562 rtx target;
563 int ignore;
564 tree type;
565 rtx structure_value_addr;
566 {
567 struct function *inlining_previous;
568 struct function *inl_f = DECL_SAVED_INSNS (fndecl);
569 tree formal, actual, block;
570 rtx parm_insns = inl_f->emit->x_first_insn;
571 rtx insns = (inl_f->inl_last_parm_insn
572 ? NEXT_INSN (inl_f->inl_last_parm_insn)
573 : parm_insns);
574 tree *arg_trees;
575 rtx *arg_vals;
576 rtx insn;
577 int max_regno;
578 register int i;
579 int min_labelno = inl_f->emit->x_first_label_num;
580 int max_labelno = inl_f->inl_max_label_num;
581 int nargs;
582 rtx local_return_label = 0;
583 rtx loc;
584 rtx stack_save = 0;
585 rtx temp;
586 struct inline_remap *map = 0;
587 #ifdef HAVE_cc0
588 rtx cc0_insn = 0;
589 #endif
590 rtvec arg_vector = (rtvec) inl_f->original_arg_vector;
591 rtx static_chain_value = 0;
592 int inl_max_uid;
593
594 /* The pointer used to track the true location of the memory used
595 for MAP->LABEL_MAP. */
596 rtx *real_label_map = 0;
597
598 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
599 max_regno = inl_f->emit->x_reg_rtx_no + 3;
600 if (max_regno < FIRST_PSEUDO_REGISTER)
601 abort ();
602
603 nargs = list_length (DECL_ARGUMENTS (fndecl));
604
605 /* Check that the parms type match and that sufficient arguments were
606 passed. Since the appropriate conversions or default promotions have
607 already been applied, the machine modes should match exactly. */
608
609 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
610 formal;
611 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
612 {
613 tree arg;
614 enum machine_mode mode;
615
616 if (actual == 0)
617 return (rtx) (HOST_WIDE_INT) -1;
618
619 arg = TREE_VALUE (actual);
620 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
621
622 if (mode != TYPE_MODE (TREE_TYPE (arg))
623 /* If they are block mode, the types should match exactly.
624 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
625 which could happen if the parameter has incomplete type. */
626 || (mode == BLKmode
627 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
628 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
629 return (rtx) (HOST_WIDE_INT) -1;
630 }
631
632 /* Extra arguments are valid, but will be ignored below, so we must
633 evaluate them here for side-effects. */
634 for (; actual; actual = TREE_CHAIN (actual))
635 expand_expr (TREE_VALUE (actual), const0_rtx,
636 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
637
638 /* Expand the function arguments. Do this first so that any
639 new registers get created before we allocate the maps. */
640
641 arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
642 arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
643
644 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
645 formal;
646 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
647 {
648 /* Actual parameter, converted to the type of the argument within the
649 function. */
650 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
651 /* Mode of the variable used within the function. */
652 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
653 int invisiref = 0;
654
655 arg_trees[i] = arg;
656 loc = RTVEC_ELT (arg_vector, i);
657
658 /* If this is an object passed by invisible reference, we copy the
659 object into a stack slot and save its address. If this will go
660 into memory, we do nothing now. Otherwise, we just expand the
661 argument. */
662 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
663 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
664 {
665 rtx stack_slot
666 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
667 int_size_in_bytes (TREE_TYPE (arg)), 1);
668 MEM_SET_IN_STRUCT_P (stack_slot,
669 AGGREGATE_TYPE_P (TREE_TYPE (arg)));
670
671 store_expr (arg, stack_slot, 0);
672
673 arg_vals[i] = XEXP (stack_slot, 0);
674 invisiref = 1;
675 }
676 else if (GET_CODE (loc) != MEM)
677 {
678 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
679 /* The mode if LOC and ARG can differ if LOC was a variable
680 that had its mode promoted via PROMOTED_MODE. */
681 arg_vals[i] = convert_modes (GET_MODE (loc),
682 TYPE_MODE (TREE_TYPE (arg)),
683 expand_expr (arg, NULL_RTX, mode,
684 EXPAND_SUM),
685 TREE_UNSIGNED (TREE_TYPE (formal)));
686 else
687 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
688 }
689 else
690 arg_vals[i] = 0;
691
692 if (arg_vals[i] != 0
693 && (! TREE_READONLY (formal)
694 /* If the parameter is not read-only, copy our argument through
695 a register. Also, we cannot use ARG_VALS[I] if it overlaps
696 TARGET in any way. In the inline function, they will likely
697 be two different pseudos, and `safe_from_p' will make all
698 sorts of smart assumptions about their not conflicting.
699 But if ARG_VALS[I] overlaps TARGET, these assumptions are
700 wrong, so put ARG_VALS[I] into a fresh register.
701 Don't worry about invisible references, since their stack
702 temps will never overlap the target. */
703 || (target != 0
704 && ! invisiref
705 && (GET_CODE (arg_vals[i]) == REG
706 || GET_CODE (arg_vals[i]) == SUBREG
707 || GET_CODE (arg_vals[i]) == MEM)
708 && reg_overlap_mentioned_p (arg_vals[i], target))
709 /* ??? We must always copy a SUBREG into a REG, because it might
710 get substituted into an address, and not all ports correctly
711 handle SUBREGs in addresses. */
712 || (GET_CODE (arg_vals[i]) == SUBREG)))
713 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
714
715 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
716 && POINTER_TYPE_P (TREE_TYPE (formal)))
717 mark_reg_pointer (arg_vals[i],
718 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
719 / BITS_PER_UNIT));
720 }
721
722 /* Allocate the structures we use to remap things. */
723
724 map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
725 map->fndecl = fndecl;
726
727 VARRAY_TREE_INIT (map->block_map, 10, "block_map");
728 map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
729
730 /* We used to use alloca here, but the size of what it would try to
731 allocate would occasionally cause it to exceed the stack limit and
732 cause unpredictable core dumps. */
733 real_label_map
734 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
735 map->label_map = real_label_map;
736
737 inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
738 map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
739 map->min_insnno = 0;
740 map->max_insnno = inl_max_uid;
741
742 map->integrating = 1;
743
744 /* const_equiv_varray maps pseudos in our routine to constants, so
745 it needs to be large enough for all our pseudos. This is the
746 number we are currently using plus the number in the called
747 routine, plus 15 for each arg, five to compute the virtual frame
748 pointer, and five for the return value. This should be enough
749 for most cases. We do not reference entries outside the range of
750 the map.
751
752 ??? These numbers are quite arbitrary and were obtained by
753 experimentation. At some point, we should try to allocate the
754 table after all the parameters are set up so we an more accurately
755 estimate the number of pseudos we will need. */
756
757 VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray,
758 (max_reg_num ()
759 + (max_regno - FIRST_PSEUDO_REGISTER)
760 + 15 * nargs
761 + 10),
762 "expand_inline_function");
763 map->const_age = 0;
764
765 /* Record the current insn in case we have to set up pointers to frame
766 and argument memory blocks. If there are no insns yet, add a dummy
767 insn that can be used as an insertion point. */
768 map->insns_at_start = get_last_insn ();
769 if (map->insns_at_start == 0)
770 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
771
772 map->regno_pointer_flag = inl_f->emit->regno_pointer_flag;
773 map->regno_pointer_align = inl_f->emit->regno_pointer_align;
774
775 /* Update the outgoing argument size to allow for those in the inlined
776 function. */
777 if (inl_f->outgoing_args_size > current_function_outgoing_args_size)
778 current_function_outgoing_args_size = inl_f->outgoing_args_size;
779
780 /* If the inline function needs to make PIC references, that means
781 that this function's PIC offset table must be used. */
782 if (inl_f->uses_pic_offset_table)
783 current_function_uses_pic_offset_table = 1;
784
785 /* If this function needs a context, set it up. */
786 if (inl_f->needs_context)
787 static_chain_value = lookup_static_chain (fndecl);
788
789 if (GET_CODE (parm_insns) == NOTE
790 && NOTE_LINE_NUMBER (parm_insns) > 0)
791 {
792 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
793 NOTE_LINE_NUMBER (parm_insns));
794 if (note)
795 RTX_INTEGRATED_P (note) = 1;
796 }
797
798 /* Process each argument. For each, set up things so that the function's
799 reference to the argument will refer to the argument being passed.
800 We only replace REG with REG here. Any simplifications are done
801 via const_equiv_map.
802
803 We make two passes: In the first, we deal with parameters that will
804 be placed into registers, since we need to ensure that the allocated
805 register number fits in const_equiv_map. Then we store all non-register
806 parameters into their memory location. */
807
808 /* Don't try to free temp stack slots here, because we may put one of the
809 parameters into a temp stack slot. */
810
811 for (i = 0; i < nargs; i++)
812 {
813 rtx copy = arg_vals[i];
814
815 loc = RTVEC_ELT (arg_vector, i);
816
817 /* There are three cases, each handled separately. */
818 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
819 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
820 {
821 /* This must be an object passed by invisible reference (it could
822 also be a variable-sized object, but we forbid inlining functions
823 with variable-sized arguments). COPY is the address of the
824 actual value (this computation will cause it to be copied). We
825 map that address for the register, noting the actual address as
826 an equivalent in case it can be substituted into the insns. */
827
828 if (GET_CODE (copy) != REG)
829 {
830 temp = copy_addr_to_reg (copy);
831 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
832 SET_CONST_EQUIV_DATA (map, temp, copy, CONST_AGE_PARM);
833 copy = temp;
834 }
835 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
836 }
837 else if (GET_CODE (loc) == MEM)
838 {
839 /* This is the case of a parameter that lives in memory. It
840 will live in the block we allocate in the called routine's
841 frame that simulates the incoming argument area. Do nothing
842 with the parameter now; we will call store_expr later. In
843 this case, however, we must ensure that the virtual stack and
844 incoming arg rtx values are expanded now so that we can be
845 sure we have enough slots in the const equiv map since the
846 store_expr call can easily blow the size estimate. */
847 if (DECL_FRAME_SIZE (fndecl) != 0)
848 copy_rtx_and_substitute (virtual_stack_vars_rtx, map, 0);
849
850 if (DECL_SAVED_INSNS (fndecl)->args_size != 0)
851 copy_rtx_and_substitute (virtual_incoming_args_rtx, map, 0);
852 }
853 else if (GET_CODE (loc) == REG)
854 process_reg_param (map, loc, copy);
855 else if (GET_CODE (loc) == CONCAT)
856 {
857 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
858 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
859 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
860 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
861
862 process_reg_param (map, locreal, copyreal);
863 process_reg_param (map, locimag, copyimag);
864 }
865 else
866 abort ();
867 }
868
869 /* Tell copy_rtx_and_substitute to handle constant pool SYMBOL_REFs
870 specially. This function can be called recursively, so we need to
871 save the previous value. */
872 inlining_previous = inlining;
873 inlining = inl_f;
874
875 /* Now do the parameters that will be placed in memory. */
876
877 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
878 formal; formal = TREE_CHAIN (formal), i++)
879 {
880 loc = RTVEC_ELT (arg_vector, i);
881
882 if (GET_CODE (loc) == MEM
883 /* Exclude case handled above. */
884 && ! (GET_CODE (XEXP (loc, 0)) == REG
885 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
886 {
887 rtx note = emit_note (DECL_SOURCE_FILE (formal),
888 DECL_SOURCE_LINE (formal));
889 if (note)
890 RTX_INTEGRATED_P (note) = 1;
891
892 /* Compute the address in the area we reserved and store the
893 value there. */
894 temp = copy_rtx_and_substitute (loc, map, 1);
895 subst_constants (&temp, NULL_RTX, map, 1);
896 apply_change_group ();
897 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
898 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
899 store_expr (arg_trees[i], temp, 0);
900 }
901 }
902
903 /* Deal with the places that the function puts its result.
904 We are driven by what is placed into DECL_RESULT.
905
906 Initially, we assume that we don't have anything special handling for
907 REG_FUNCTION_RETURN_VALUE_P. */
908
909 map->inline_target = 0;
910 loc = DECL_RTL (DECL_RESULT (fndecl));
911
912 if (TYPE_MODE (type) == VOIDmode)
913 /* There is no return value to worry about. */
914 ;
915 else if (GET_CODE (loc) == MEM)
916 {
917 if (GET_CODE (XEXP (loc, 0)) == ADDRESSOF)
918 {
919 temp = copy_rtx_and_substitute (loc, map, 1);
920 subst_constants (&temp, NULL_RTX, map, 1);
921 apply_change_group ();
922 target = temp;
923 }
924 else
925 {
926 if (! structure_value_addr
927 || ! aggregate_value_p (DECL_RESULT (fndecl)))
928 abort ();
929
930 /* Pass the function the address in which to return a structure
931 value. Note that a constructor can cause someone to call us
932 with STRUCTURE_VALUE_ADDR, but the initialization takes place
933 via the first parameter, rather than the struct return address.
934
935 We have two cases: If the address is a simple register
936 indirect, use the mapping mechanism to point that register to
937 our structure return address. Otherwise, store the structure
938 return value into the place that it will be referenced from. */
939
940 if (GET_CODE (XEXP (loc, 0)) == REG)
941 {
942 temp = force_operand (structure_value_addr, NULL_RTX);
943 temp = force_reg (Pmode, temp);
944 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
945
946 if (CONSTANT_P (structure_value_addr)
947 || GET_CODE (structure_value_addr) == ADDRESSOF
948 || (GET_CODE (structure_value_addr) == PLUS
949 && (XEXP (structure_value_addr, 0)
950 == virtual_stack_vars_rtx)
951 && (GET_CODE (XEXP (structure_value_addr, 1))
952 == CONST_INT)))
953 {
954 SET_CONST_EQUIV_DATA (map, temp, structure_value_addr,
955 CONST_AGE_PARM);
956 }
957 }
958 else
959 {
960 temp = copy_rtx_and_substitute (loc, map, 1);
961 subst_constants (&temp, NULL_RTX, map, 0);
962 apply_change_group ();
963 emit_move_insn (temp, structure_value_addr);
964 }
965 }
966 }
967 else if (ignore)
968 /* We will ignore the result value, so don't look at its structure.
969 Note that preparations for an aggregate return value
970 do need to be made (above) even if it will be ignored. */
971 ;
972 else if (GET_CODE (loc) == REG)
973 {
974 /* The function returns an object in a register and we use the return
975 value. Set up our target for remapping. */
976
977 /* Machine mode function was declared to return. */
978 enum machine_mode departing_mode = TYPE_MODE (type);
979 /* (Possibly wider) machine mode it actually computes
980 (for the sake of callers that fail to declare it right).
981 We have to use the mode of the result's RTL, rather than
982 its type, since expand_function_start may have promoted it. */
983 enum machine_mode arriving_mode
984 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
985 rtx reg_to_map;
986
987 /* Don't use MEMs as direct targets because on some machines
988 substituting a MEM for a REG makes invalid insns.
989 Let the combiner substitute the MEM if that is valid. */
990 if (target == 0 || GET_CODE (target) != REG
991 || GET_MODE (target) != departing_mode)
992 {
993 /* Don't make BLKmode registers. If this looks like
994 a BLKmode object being returned in a register, get
995 the mode from that, otherwise abort. */
996 if (departing_mode == BLKmode)
997 {
998 if (REG == GET_CODE (DECL_RTL (DECL_RESULT (fndecl))))
999 {
1000 departing_mode = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1001 arriving_mode = departing_mode;
1002 }
1003 else
1004 abort();
1005 }
1006
1007 target = gen_reg_rtx (departing_mode);
1008 }
1009
1010 /* If function's value was promoted before return,
1011 avoid machine mode mismatch when we substitute INLINE_TARGET.
1012 But TARGET is what we will return to the caller. */
1013 if (arriving_mode != departing_mode)
1014 {
1015 /* Avoid creating a paradoxical subreg wider than
1016 BITS_PER_WORD, since that is illegal. */
1017 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1018 {
1019 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1020 GET_MODE_BITSIZE (arriving_mode)))
1021 /* Maybe could be handled by using convert_move () ? */
1022 abort ();
1023 reg_to_map = gen_reg_rtx (arriving_mode);
1024 target = gen_lowpart (departing_mode, reg_to_map);
1025 }
1026 else
1027 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1028 }
1029 else
1030 reg_to_map = target;
1031
1032 /* Usually, the result value is the machine's return register.
1033 Sometimes it may be a pseudo. Handle both cases. */
1034 if (REG_FUNCTION_VALUE_P (loc))
1035 map->inline_target = reg_to_map;
1036 else
1037 map->reg_map[REGNO (loc)] = reg_to_map;
1038 }
1039 else
1040 abort ();
1041
1042 /* Initialize label_map. get_label_from_map will actually make
1043 the labels. */
1044 bzero ((char *) &map->label_map [min_labelno],
1045 (max_labelno - min_labelno) * sizeof (rtx));
1046
1047 /* Make copies of the decls of the symbols in the inline function, so that
1048 the copies of the variables get declared in the current function. Set
1049 up things so that lookup_static_chain knows that to interpret registers
1050 in SAVE_EXPRs for TYPE_SIZEs as local. */
1051 inline_function_decl = fndecl;
1052 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1053 block = integrate_decl_tree (inl_f->original_decl_initial, map);
1054 BLOCK_ABSTRACT_ORIGIN (block) = DECL_ORIGIN (fndecl);
1055 inline_function_decl = 0;
1056
1057 /* Make a fresh binding contour that we can easily remove. Do this after
1058 expanding our arguments so cleanups are properly scoped. */
1059 expand_start_bindings_and_block (0, block);
1060
1061 /* Sort the block-map so that it will be easy to find remapped
1062 blocks later. */
1063 qsort (&VARRAY_TREE (map->block_map, 0),
1064 map->block_map->elements_used,
1065 sizeof (tree),
1066 compare_blocks);
1067
1068 /* Perform postincrements before actually calling the function. */
1069 emit_queue ();
1070
1071 /* Clean up stack so that variables might have smaller offsets. */
1072 do_pending_stack_adjust ();
1073
1074 /* Save a copy of the location of const_equiv_varray for
1075 mark_stores, called via note_stores. */
1076 global_const_equiv_varray = map->const_equiv_varray;
1077
1078 /* If the called function does an alloca, save and restore the
1079 stack pointer around the call. This saves stack space, but
1080 also is required if this inline is being done between two
1081 pushes. */
1082 if (inl_f->calls_alloca)
1083 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1084
1085 /* Now copy the insns one by one. Do this in two passes, first the insns and
1086 then their REG_NOTES, just like save_for_inline. */
1087
1088 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1089
1090 for (insn = insns; insn; insn = NEXT_INSN (insn))
1091 {
1092 rtx copy, pattern, set;
1093
1094 map->orig_asm_operands_vector = 0;
1095
1096 switch (GET_CODE (insn))
1097 {
1098 case INSN:
1099 pattern = PATTERN (insn);
1100 set = single_set (insn);
1101 copy = 0;
1102 if (GET_CODE (pattern) == USE
1103 && GET_CODE (XEXP (pattern, 0)) == REG
1104 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1105 /* The (USE (REG n)) at return from the function should
1106 be ignored since we are changing (REG n) into
1107 inline_target. */
1108 break;
1109
1110 /* If the inline fn needs eh context, make sure that
1111 the current fn has one. */
1112 if (GET_CODE (pattern) == USE
1113 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1114 get_eh_context ();
1115
1116 /* Ignore setting a function value that we don't want to use. */
1117 if (map->inline_target == 0
1118 && set != 0
1119 && GET_CODE (SET_DEST (set)) == REG
1120 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1121 {
1122 if (volatile_refs_p (SET_SRC (set)))
1123 {
1124 rtx new_set;
1125
1126 /* If we must not delete the source,
1127 load it into a new temporary. */
1128 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1129
1130 new_set = single_set (copy);
1131 if (new_set == 0)
1132 abort ();
1133
1134 SET_DEST (new_set)
1135 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1136 }
1137 /* If the source and destination are the same and it
1138 has a note on it, keep the insn. */
1139 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1140 && REG_NOTES (insn) != 0)
1141 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1142 else
1143 break;
1144 }
1145
1146 /* If this is setting the static chain rtx, omit it. */
1147 else if (static_chain_value != 0
1148 && set != 0
1149 && GET_CODE (SET_DEST (set)) == REG
1150 && rtx_equal_p (SET_DEST (set),
1151 static_chain_incoming_rtx))
1152 break;
1153
1154 /* If this is setting the static chain pseudo, set it from
1155 the value we want to give it instead. */
1156 else if (static_chain_value != 0
1157 && set != 0
1158 && rtx_equal_p (SET_SRC (set),
1159 static_chain_incoming_rtx))
1160 {
1161 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map, 1);
1162
1163 copy = emit_move_insn (newdest, static_chain_value);
1164 static_chain_value = 0;
1165 }
1166
1167 /* If this is setting the virtual stack vars register, this must
1168 be the code at the handler for a builtin longjmp. The value
1169 saved in the setjmp buffer will be the address of the frame
1170 we've made for this inlined instance within our frame. But we
1171 know the offset of that value so we can use it to reconstruct
1172 our virtual stack vars register from that value. If we are
1173 copying it from the stack pointer, leave it unchanged. */
1174 else if (set != 0
1175 && rtx_equal_p (SET_DEST (set), virtual_stack_vars_rtx))
1176 {
1177 HOST_WIDE_INT offset;
1178 temp = map->reg_map[REGNO (SET_DEST (set))];
1179 temp = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1180 REGNO (temp)).rtx;
1181
1182 if (rtx_equal_p (temp, virtual_stack_vars_rtx))
1183 offset = 0;
1184 else if (GET_CODE (temp) == PLUS
1185 && rtx_equal_p (XEXP (temp, 0), virtual_stack_vars_rtx)
1186 && GET_CODE (XEXP (temp, 1)) == CONST_INT)
1187 offset = INTVAL (XEXP (temp, 1));
1188 else
1189 abort ();
1190
1191 if (rtx_equal_p (SET_SRC (set), stack_pointer_rtx))
1192 temp = SET_SRC (set);
1193 else
1194 temp = force_operand (plus_constant (SET_SRC (set),
1195 - offset),
1196 NULL_RTX);
1197
1198 copy = emit_move_insn (virtual_stack_vars_rtx, temp);
1199 }
1200
1201 else
1202 copy = emit_insn (copy_rtx_and_substitute (pattern, map, 0));
1203 /* REG_NOTES will be copied later. */
1204
1205 #ifdef HAVE_cc0
1206 /* If this insn is setting CC0, it may need to look at
1207 the insn that uses CC0 to see what type of insn it is.
1208 In that case, the call to recog via validate_change will
1209 fail. So don't substitute constants here. Instead,
1210 do it when we emit the following insn.
1211
1212 For example, see the pyr.md file. That machine has signed and
1213 unsigned compares. The compare patterns must check the
1214 following branch insn to see which what kind of compare to
1215 emit.
1216
1217 If the previous insn set CC0, substitute constants on it as
1218 well. */
1219 if (sets_cc0_p (PATTERN (copy)) != 0)
1220 cc0_insn = copy;
1221 else
1222 {
1223 if (cc0_insn)
1224 try_constants (cc0_insn, map);
1225 cc0_insn = 0;
1226 try_constants (copy, map);
1227 }
1228 #else
1229 try_constants (copy, map);
1230 #endif
1231 break;
1232
1233 case JUMP_INSN:
1234 if (GET_CODE (PATTERN (insn)) == RETURN
1235 || (GET_CODE (PATTERN (insn)) == PARALLEL
1236 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1237 {
1238 if (local_return_label == 0)
1239 local_return_label = gen_label_rtx ();
1240 pattern = gen_jump (local_return_label);
1241 }
1242 else
1243 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1244
1245 copy = emit_jump_insn (pattern);
1246
1247 #ifdef HAVE_cc0
1248 if (cc0_insn)
1249 try_constants (cc0_insn, map);
1250 cc0_insn = 0;
1251 #endif
1252 try_constants (copy, map);
1253
1254 /* If this used to be a conditional jump insn but whose branch
1255 direction is now know, we must do something special. */
1256 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1257 {
1258 #ifdef HAVE_cc0
1259 /* If the previous insn set cc0 for us, delete it. */
1260 if (sets_cc0_p (PREV_INSN (copy)))
1261 delete_insn (PREV_INSN (copy));
1262 #endif
1263
1264 /* If this is now a no-op, delete it. */
1265 if (map->last_pc_value == pc_rtx)
1266 {
1267 delete_insn (copy);
1268 copy = 0;
1269 }
1270 else
1271 /* Otherwise, this is unconditional jump so we must put a
1272 BARRIER after it. We could do some dead code elimination
1273 here, but jump.c will do it just as well. */
1274 emit_barrier ();
1275 }
1276 break;
1277
1278 case CALL_INSN:
1279 pattern = copy_rtx_and_substitute (PATTERN (insn), map, 0);
1280 copy = emit_call_insn (pattern);
1281
1282 /* Because the USAGE information potentially contains objects other
1283 than hard registers, we need to copy it. */
1284 CALL_INSN_FUNCTION_USAGE (copy)
1285 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn),
1286 map, 0);
1287
1288 #ifdef HAVE_cc0
1289 if (cc0_insn)
1290 try_constants (cc0_insn, map);
1291 cc0_insn = 0;
1292 #endif
1293 try_constants (copy, map);
1294
1295 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1296 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1297 VARRAY_CONST_EQUIV (map->const_equiv_varray, i).rtx = 0;
1298 break;
1299
1300 case CODE_LABEL:
1301 copy = emit_label (get_label_from_map (map,
1302 CODE_LABEL_NUMBER (insn)));
1303 LABEL_NAME (copy) = LABEL_NAME (insn);
1304 map->const_age++;
1305 break;
1306
1307 case BARRIER:
1308 copy = emit_barrier ();
1309 break;
1310
1311 case NOTE:
1312 /* It is important to discard function-end and function-beg notes,
1313 so we have only one of each in the current function.
1314 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1315 deleted these in the copy used for continuing compilation,
1316 not the copy used for inlining). */
1317 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1318 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1319 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1320 {
1321 copy = emit_note (NOTE_SOURCE_FILE (insn),
1322 NOTE_LINE_NUMBER (insn));
1323 if (copy
1324 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
1325 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
1326 {
1327 rtx label
1328 = get_label_from_map (map, NOTE_EH_HANDLER (copy));
1329
1330 /* we have to duplicate the handlers for the original */
1331 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
1332 {
1333 /* We need to duplicate the handlers for the EH region
1334 and we need to indicate where the label map is */
1335 eif_eh_map = map;
1336 duplicate_eh_handlers (NOTE_EH_HANDLER (copy),
1337 CODE_LABEL_NUMBER (label),
1338 expand_inline_function_eh_labelmap);
1339 }
1340
1341 /* We have to forward these both to match the new exception
1342 region. */
1343 NOTE_EH_HANDLER (copy) = CODE_LABEL_NUMBER (label);
1344 }
1345 else if (copy
1346 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_BEG
1347 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_BLOCK_END)
1348 && NOTE_BLOCK (insn))
1349 {
1350 tree *mapped_block_p;
1351
1352 mapped_block_p
1353 = (tree *) bsearch (NOTE_BLOCK (insn),
1354 &VARRAY_TREE (map->block_map, 0),
1355 map->block_map->elements_used,
1356 sizeof (tree),
1357 find_block);
1358
1359 if (!mapped_block_p)
1360 abort ();
1361 else
1362 NOTE_BLOCK (copy) = *mapped_block_p;
1363 }
1364 }
1365 else
1366 copy = 0;
1367 break;
1368
1369 default:
1370 abort ();
1371 }
1372
1373 if (copy)
1374 RTX_INTEGRATED_P (copy) = 1;
1375
1376 map->insn_map[INSN_UID (insn)] = copy;
1377 }
1378
1379 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1380 from parameters can be substituted in. These are the only ones that
1381 are valid across the entire function. */
1382 map->const_age++;
1383 for (insn = insns; insn; insn = NEXT_INSN (insn))
1384 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1385 && map->insn_map[INSN_UID (insn)]
1386 && REG_NOTES (insn))
1387 {
1388 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map, 0);
1389
1390 /* We must also do subst_constants, in case one of our parameters
1391 has const type and constant value. */
1392 subst_constants (&tem, NULL_RTX, map, 0);
1393 apply_change_group ();
1394 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1395 }
1396
1397 if (local_return_label)
1398 emit_label (local_return_label);
1399
1400 /* Restore the stack pointer if we saved it above. */
1401 if (inl_f->calls_alloca)
1402 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1403
1404 if (! cfun->x_whole_function_mode_p)
1405 /* In statement-at-a-time mode, we just tell the front-end to add
1406 this block to the list of blocks at this binding level. We
1407 can't do it the way it's done for function-at-a-time mode the
1408 superblocks have not been created yet. */
1409 insert_block (block);
1410 else
1411 {
1412 BLOCK_CHAIN (block)
1413 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1414 BLOCK_CHAIN (DECL_INITIAL (current_function_decl)) = block;
1415 }
1416
1417 /* End the scope containing the copied formal parameter variables
1418 and copied LABEL_DECLs. We pass NULL_TREE for the variables list
1419 here so that expand_end_bindings will not check for unused
1420 variables. That's already been checked for when the inlined
1421 function was defined. */
1422 expand_end_bindings (NULL_TREE, 1, 1);
1423
1424 /* Must mark the line number note after inlined functions as a repeat, so
1425 that the test coverage code can avoid counting the call twice. This
1426 just tells the code to ignore the immediately following line note, since
1427 there already exists a copy of this note before the expanded inline call.
1428 This line number note is still needed for debugging though, so we can't
1429 delete it. */
1430 if (flag_test_coverage)
1431 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
1432
1433 emit_line_note (input_filename, lineno);
1434
1435 /* If the function returns a BLKmode object in a register, copy it
1436 out of the temp register into a BLKmode memory object. */
1437 if (target
1438 && TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
1439 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
1440 target = copy_blkmode_from_reg (0, target, TREE_TYPE (TREE_TYPE (fndecl)));
1441
1442 if (structure_value_addr)
1443 {
1444 target = gen_rtx_MEM (TYPE_MODE (type),
1445 memory_address (TYPE_MODE (type),
1446 structure_value_addr));
1447 MEM_SET_IN_STRUCT_P (target, 1);
1448 }
1449
1450 /* Make sure we free the things we explicitly allocated with xmalloc. */
1451 if (real_label_map)
1452 free (real_label_map);
1453 VARRAY_FREE (map->const_equiv_varray);
1454 free (map->reg_map);
1455 VARRAY_FREE (map->block_map);
1456 free (map->insn_map);
1457 free (map);
1458 free (arg_vals);
1459 free (arg_trees);
1460
1461 inlining = inlining_previous;
1462
1463 return target;
1464 }
1465 \f
1466 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1467 push all of those decls and give each one the corresponding home. */
1468
1469 static void
1470 integrate_parm_decls (args, map, arg_vector)
1471 tree args;
1472 struct inline_remap *map;
1473 rtvec arg_vector;
1474 {
1475 register tree tail;
1476 register int i;
1477
1478 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1479 {
1480 tree decl = copy_decl_for_inlining (tail, map->fndecl,
1481 current_function_decl);
1482 rtx new_decl_rtl
1483 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map, 1);
1484
1485 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1486 here, but that's going to require some more work. */
1487 /* DECL_INCOMING_RTL (decl) = ?; */
1488 /* Fully instantiate the address with the equivalent form so that the
1489 debugging information contains the actual register, instead of the
1490 virtual register. Do this by not passing an insn to
1491 subst_constants. */
1492 subst_constants (&new_decl_rtl, NULL_RTX, map, 1);
1493 apply_change_group ();
1494 DECL_RTL (decl) = new_decl_rtl;
1495 }
1496 }
1497
1498 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1499 current function a tree of contexts isomorphic to the one that is given.
1500
1501 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1502 registers used in the DECL_RTL field should be remapped. If it is zero,
1503 no mapping is necessary. */
1504
1505 static tree
1506 integrate_decl_tree (let, map)
1507 tree let;
1508 struct inline_remap *map;
1509 {
1510 tree t;
1511 tree new_block;
1512 tree *next;
1513
1514 new_block = make_node (BLOCK);
1515 VARRAY_PUSH_TREE (map->block_map, new_block);
1516 next = &BLOCK_VARS (new_block);
1517
1518 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1519 {
1520 tree d;
1521
1522 push_obstacks_nochange ();
1523 saveable_allocation ();
1524 d = copy_decl_for_inlining (t, map->fndecl, current_function_decl);
1525 pop_obstacks ();
1526
1527 if (DECL_RTL (t) != 0)
1528 {
1529 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map, 1);
1530
1531 /* Fully instantiate the address with the equivalent form so that the
1532 debugging information contains the actual register, instead of the
1533 virtual register. Do this by not passing an insn to
1534 subst_constants. */
1535 subst_constants (&DECL_RTL (d), NULL_RTX, map, 1);
1536 apply_change_group ();
1537 }
1538
1539 /* Add this declaration to the list of variables in the new
1540 block. */
1541 *next = d;
1542 next = &TREE_CHAIN (d);
1543 }
1544
1545 next = &BLOCK_SUBBLOCKS (new_block);
1546 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1547 {
1548 *next = integrate_decl_tree (t, map);
1549 BLOCK_SUPERCONTEXT (*next) = new_block;
1550 next = &BLOCK_CHAIN (*next);
1551 }
1552
1553 TREE_USED (new_block) = TREE_USED (let);
1554 BLOCK_ABSTRACT_ORIGIN (new_block) = let;
1555
1556 return new_block;
1557 }
1558 \f
1559 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
1560 except for those few rtx codes that are sharable.
1561
1562 We always return an rtx that is similar to that incoming rtx, with the
1563 exception of possibly changing a REG to a SUBREG or vice versa. No
1564 rtl is ever emitted.
1565
1566 If FOR_LHS is nonzero, if means we are processing something that will
1567 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
1568 inlining since we need to be conservative in how it is set for
1569 such cases.
1570
1571 Handle constants that need to be placed in the constant pool by
1572 calling `force_const_mem'. */
1573
1574 rtx
1575 copy_rtx_and_substitute (orig, map, for_lhs)
1576 register rtx orig;
1577 struct inline_remap *map;
1578 int for_lhs;
1579 {
1580 register rtx copy, temp;
1581 register int i, j;
1582 register RTX_CODE code;
1583 register enum machine_mode mode;
1584 register const char *format_ptr;
1585 int regno;
1586
1587 if (orig == 0)
1588 return 0;
1589
1590 code = GET_CODE (orig);
1591 mode = GET_MODE (orig);
1592
1593 switch (code)
1594 {
1595 case REG:
1596 /* If the stack pointer register shows up, it must be part of
1597 stack-adjustments (*not* because we eliminated the frame pointer!).
1598 Small hard registers are returned as-is. Pseudo-registers
1599 go through their `reg_map'. */
1600 regno = REGNO (orig);
1601 if (regno <= LAST_VIRTUAL_REGISTER
1602 || (map->integrating
1603 && DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer == orig))
1604 {
1605 /* Some hard registers are also mapped,
1606 but others are not translated. */
1607 if (map->reg_map[regno] != 0)
1608 return map->reg_map[regno];
1609
1610 /* If this is the virtual frame pointer, make space in current
1611 function's stack frame for the stack frame of the inline function.
1612
1613 Copy the address of this area into a pseudo. Map
1614 virtual_stack_vars_rtx to this pseudo and set up a constant
1615 equivalence for it to be the address. This will substitute the
1616 address into insns where it can be substituted and use the new
1617 pseudo where it can't. */
1618 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1619 {
1620 rtx loc, seq;
1621 int size = get_func_frame_size (DECL_SAVED_INSNS (map->fndecl));
1622
1623 #ifdef FRAME_GROWS_DOWNWARD
1624 /* In this case, virtual_stack_vars_rtx points to one byte
1625 higher than the top of the frame area. So make sure we
1626 allocate a big enough chunk to keep the frame pointer
1627 aligned like a real one. */
1628 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1629 #endif
1630 start_sequence ();
1631 loc = assign_stack_temp (BLKmode, size, 1);
1632 loc = XEXP (loc, 0);
1633 #ifdef FRAME_GROWS_DOWNWARD
1634 /* In this case, virtual_stack_vars_rtx points to one byte
1635 higher than the top of the frame area. So compute the offset
1636 to one byte higher than our substitute frame. */
1637 loc = plus_constant (loc, size);
1638 #endif
1639 map->reg_map[regno] = temp
1640 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1641
1642 #ifdef STACK_BOUNDARY
1643 mark_reg_pointer (map->reg_map[regno],
1644 STACK_BOUNDARY / BITS_PER_UNIT);
1645 #endif
1646
1647 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1648
1649 seq = gen_sequence ();
1650 end_sequence ();
1651 emit_insn_after (seq, map->insns_at_start);
1652 return temp;
1653 }
1654 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM
1655 || (map->integrating
1656 && (DECL_SAVED_INSNS (map->fndecl)->internal_arg_pointer
1657 == orig)))
1658 {
1659 /* Do the same for a block to contain any arguments referenced
1660 in memory. */
1661 rtx loc, seq;
1662 int size = DECL_SAVED_INSNS (map->fndecl)->args_size;
1663
1664 start_sequence ();
1665 loc = assign_stack_temp (BLKmode, size, 1);
1666 loc = XEXP (loc, 0);
1667 /* When arguments grow downward, the virtual incoming
1668 args pointer points to the top of the argument block,
1669 so the remapped location better do the same. */
1670 #ifdef ARGS_GROW_DOWNWARD
1671 loc = plus_constant (loc, size);
1672 #endif
1673 map->reg_map[regno] = temp
1674 = force_reg (Pmode, force_operand (loc, NULL_RTX));
1675
1676 #ifdef STACK_BOUNDARY
1677 mark_reg_pointer (map->reg_map[regno],
1678 STACK_BOUNDARY / BITS_PER_UNIT);
1679 #endif
1680
1681 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
1682
1683 seq = gen_sequence ();
1684 end_sequence ();
1685 emit_insn_after (seq, map->insns_at_start);
1686 return temp;
1687 }
1688 else if (REG_FUNCTION_VALUE_P (orig))
1689 {
1690 /* This is a reference to the function return value. If
1691 the function doesn't have a return value, error. If the
1692 mode doesn't agree, and it ain't BLKmode, make a SUBREG. */
1693 if (map->inline_target == 0)
1694 /* Must be unrolling loops or replicating code if we
1695 reach here, so return the register unchanged. */
1696 return orig;
1697 else if (GET_MODE (map->inline_target) != BLKmode
1698 && mode != GET_MODE (map->inline_target))
1699 return gen_lowpart (mode, map->inline_target);
1700 else
1701 return map->inline_target;
1702 }
1703 return orig;
1704 }
1705 if (map->reg_map[regno] == NULL)
1706 {
1707 map->reg_map[regno] = gen_reg_rtx (mode);
1708 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1709 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1710 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1711 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1712
1713 if (map->regno_pointer_flag[regno])
1714 mark_reg_pointer (map->reg_map[regno],
1715 map->regno_pointer_align[regno]);
1716 }
1717 return map->reg_map[regno];
1718
1719 case SUBREG:
1720 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
1721 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1722 if (GET_CODE (copy) == SUBREG)
1723 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
1724 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1725 else if (GET_CODE (copy) == CONCAT)
1726 {
1727 rtx retval = subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1);
1728
1729 if (GET_MODE (retval) == GET_MODE (orig))
1730 return retval;
1731 else
1732 return gen_rtx_SUBREG (GET_MODE (orig), retval,
1733 (SUBREG_WORD (orig) %
1734 (GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (orig)))
1735 / (unsigned) UNITS_PER_WORD)));
1736 }
1737 else
1738 return gen_rtx_SUBREG (GET_MODE (orig), copy,
1739 SUBREG_WORD (orig));
1740
1741 case ADDRESSOF:
1742 copy = gen_rtx_ADDRESSOF (mode,
1743 copy_rtx_and_substitute (XEXP (orig, 0),
1744 map, for_lhs),
1745 0, ADDRESSOF_DECL(orig));
1746 regno = ADDRESSOF_REGNO (orig);
1747 if (map->reg_map[regno])
1748 regno = REGNO (map->reg_map[regno]);
1749 else if (regno > LAST_VIRTUAL_REGISTER)
1750 {
1751 temp = XEXP (orig, 0);
1752 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
1753 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
1754 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
1755 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
1756 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1757
1758 if (map->regno_pointer_flag[regno])
1759 mark_reg_pointer (map->reg_map[regno],
1760 map->regno_pointer_align[regno]);
1761 regno = REGNO (map->reg_map[regno]);
1762 }
1763 ADDRESSOF_REGNO (copy) = regno;
1764 return copy;
1765
1766 case USE:
1767 case CLOBBER:
1768 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1769 to (use foo) if the original insn didn't have a subreg.
1770 Removing the subreg distorts the VAX movstrhi pattern
1771 by changing the mode of an operand. */
1772 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
1773 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
1774 copy = SUBREG_REG (copy);
1775 return gen_rtx_fmt_e (code, VOIDmode, copy);
1776
1777 case CODE_LABEL:
1778 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
1779 = LABEL_PRESERVE_P (orig);
1780 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
1781
1782 case LABEL_REF:
1783 copy
1784 = gen_rtx_LABEL_REF
1785 (mode,
1786 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1787 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
1788
1789 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1790
1791 /* The fact that this label was previously nonlocal does not mean
1792 it still is, so we must check if it is within the range of
1793 this function's labels. */
1794 LABEL_REF_NONLOCAL_P (copy)
1795 = (LABEL_REF_NONLOCAL_P (orig)
1796 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
1797 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
1798
1799 /* If we have made a nonlocal label local, it means that this
1800 inlined call will be referring to our nonlocal goto handler.
1801 So make sure we create one for this block; we normally would
1802 not since this is not otherwise considered a "call". */
1803 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
1804 function_call_count++;
1805
1806 return copy;
1807
1808 case PC:
1809 case CC0:
1810 case CONST_INT:
1811 return orig;
1812
1813 case SYMBOL_REF:
1814 /* Symbols which represent the address of a label stored in the constant
1815 pool must be modified to point to a constant pool entry for the
1816 remapped label. Otherwise, symbols are returned unchanged. */
1817 if (CONSTANT_POOL_ADDRESS_P (orig))
1818 {
1819 struct function *f = inlining ? inlining : cfun;
1820 rtx constant = get_pool_constant_for_function (f, orig);
1821 enum machine_mode const_mode = get_pool_mode_for_function (f, orig);
1822 if (inlining)
1823 {
1824 rtx temp = force_const_mem (const_mode,
1825 copy_rtx_and_substitute (constant,
1826 map, 0));
1827
1828 #if 0
1829 /* Legitimizing the address here is incorrect.
1830
1831 Since we had a SYMBOL_REF before, we can assume it is valid
1832 to have one in this position in the insn.
1833
1834 Also, change_address may create new registers. These
1835 registers will not have valid reg_map entries. This can
1836 cause try_constants() to fail because assumes that all
1837 registers in the rtx have valid reg_map entries, and it may
1838 end up replacing one of these new registers with junk. */
1839
1840 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1841 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1842 #endif
1843
1844 temp = XEXP (temp, 0);
1845
1846 #ifdef POINTERS_EXTEND_UNSIGNED
1847 if (GET_MODE (temp) != GET_MODE (orig))
1848 temp = convert_memory_address (GET_MODE (orig), temp);
1849 #endif
1850 return temp;
1851 }
1852 else if (GET_CODE (constant) == LABEL_REF)
1853 return XEXP (force_const_mem
1854 (GET_MODE (orig),
1855 copy_rtx_and_substitute (constant, map, for_lhs)),
1856 0);
1857 }
1858 else
1859 if (SYMBOL_REF_NEED_ADJUST (orig))
1860 {
1861 eif_eh_map = map;
1862 return rethrow_symbol_map (orig,
1863 expand_inline_function_eh_labelmap);
1864 }
1865
1866 return orig;
1867
1868 case CONST_DOUBLE:
1869 /* We have to make a new copy of this CONST_DOUBLE because don't want
1870 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1871 duplicate of a CONST_DOUBLE we have already seen. */
1872 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1873 {
1874 REAL_VALUE_TYPE d;
1875
1876 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1877 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
1878 }
1879 else
1880 return immed_double_const (CONST_DOUBLE_LOW (orig),
1881 CONST_DOUBLE_HIGH (orig), VOIDmode);
1882
1883 case CONST:
1884 /* Make new constant pool entry for a constant
1885 that was in the pool of the inline function. */
1886 if (RTX_INTEGRATED_P (orig))
1887 abort ();
1888 break;
1889
1890 case ASM_OPERANDS:
1891 /* If a single asm insn contains multiple output operands
1892 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1893 We must make sure that the copied insn continues to share it. */
1894 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1895 {
1896 copy = rtx_alloc (ASM_OPERANDS);
1897 copy->volatil = orig->volatil;
1898 XSTR (copy, 0) = XSTR (orig, 0);
1899 XSTR (copy, 1) = XSTR (orig, 1);
1900 XINT (copy, 2) = XINT (orig, 2);
1901 XVEC (copy, 3) = map->copy_asm_operands_vector;
1902 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1903 XSTR (copy, 5) = XSTR (orig, 5);
1904 XINT (copy, 6) = XINT (orig, 6);
1905 return copy;
1906 }
1907 break;
1908
1909 case CALL:
1910 /* This is given special treatment because the first
1911 operand of a CALL is a (MEM ...) which may get
1912 forced into a register for cse. This is undesirable
1913 if function-address cse isn't wanted or if we won't do cse. */
1914 #ifndef NO_FUNCTION_CSE
1915 if (! (optimize && ! flag_no_function_cse))
1916 #endif
1917 return
1918 gen_rtx_CALL
1919 (GET_MODE (orig),
1920 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
1921 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
1922 map, 0)),
1923 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
1924 break;
1925
1926 #if 0
1927 /* Must be ifdefed out for loop unrolling to work. */
1928 case RETURN:
1929 abort ();
1930 #endif
1931
1932 case SET:
1933 /* If this is setting fp or ap, it means that we have a nonlocal goto.
1934 Adjust the setting by the offset of the area we made.
1935 If the nonlocal goto is into the current function,
1936 this will result in unnecessarily bad code, but should work. */
1937 if (SET_DEST (orig) == virtual_stack_vars_rtx
1938 || SET_DEST (orig) == virtual_incoming_args_rtx)
1939 {
1940 /* In case a translation hasn't occurred already, make one now. */
1941 rtx equiv_reg;
1942 rtx equiv_loc;
1943 HOST_WIDE_INT loc_offset;
1944
1945 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
1946 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
1947 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
1948 REGNO (equiv_reg)).rtx;
1949 loc_offset
1950 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
1951
1952 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
1953 force_operand
1954 (plus_constant
1955 (copy_rtx_and_substitute (SET_SRC (orig),
1956 map, 0),
1957 - loc_offset),
1958 NULL_RTX));
1959 }
1960 else
1961 return gen_rtx_SET (VOIDmode,
1962 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
1963 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
1964 break;
1965
1966 case MEM:
1967 if (inlining
1968 && GET_CODE (XEXP (orig, 0)) == SYMBOL_REF
1969 && CONSTANT_POOL_ADDRESS_P (XEXP (orig, 0)))
1970 {
1971 enum machine_mode const_mode
1972 = get_pool_mode_for_function (inlining, XEXP (orig, 0));
1973 rtx constant
1974 = get_pool_constant_for_function (inlining, XEXP (orig, 0));
1975
1976 constant = copy_rtx_and_substitute (constant, map, 0);
1977
1978 /* If this was an address of a constant pool entry that itself
1979 had to be placed in the constant pool, it might not be a
1980 valid address. So the recursive call might have turned it
1981 into a register. In that case, it isn't a constant any
1982 more, so return it. This has the potential of changing a
1983 MEM into a REG, but we'll assume that it safe. */
1984 if (! CONSTANT_P (constant))
1985 return constant;
1986
1987 return validize_mem (force_const_mem (const_mode, constant));
1988 }
1989
1990 copy = rtx_alloc (MEM);
1991 PUT_MODE (copy, mode);
1992 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map, 0);
1993 MEM_COPY_ATTRIBUTES (copy, orig);
1994 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
1995 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
1996 return copy;
1997
1998 default:
1999 break;
2000 }
2001
2002 copy = rtx_alloc (code);
2003 PUT_MODE (copy, mode);
2004 copy->in_struct = orig->in_struct;
2005 copy->volatil = orig->volatil;
2006 copy->unchanging = orig->unchanging;
2007
2008 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2009
2010 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2011 {
2012 switch (*format_ptr++)
2013 {
2014 case '0':
2015 /* Copy this through the wide int field; that's safest. */
2016 X0WINT (copy, i) = X0WINT (orig, i);
2017 break;
2018
2019 case 'e':
2020 XEXP (copy, i)
2021 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
2022 break;
2023
2024 case 'u':
2025 /* Change any references to old-insns to point to the
2026 corresponding copied insns. */
2027 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2028 break;
2029
2030 case 'E':
2031 XVEC (copy, i) = XVEC (orig, i);
2032 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2033 {
2034 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2035 for (j = 0; j < XVECLEN (copy, i); j++)
2036 XVECEXP (copy, i, j)
2037 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
2038 map, for_lhs);
2039 }
2040 break;
2041
2042 case 'w':
2043 XWINT (copy, i) = XWINT (orig, i);
2044 break;
2045
2046 case 'i':
2047 XINT (copy, i) = XINT (orig, i);
2048 break;
2049
2050 case 's':
2051 XSTR (copy, i) = XSTR (orig, i);
2052 break;
2053
2054 case 't':
2055 XTREE (copy, i) = XTREE (orig, i);
2056 break;
2057
2058 default:
2059 abort ();
2060 }
2061 }
2062
2063 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2064 {
2065 map->orig_asm_operands_vector = XVEC (orig, 3);
2066 map->copy_asm_operands_vector = XVEC (copy, 3);
2067 map->copy_asm_constraints_vector = XVEC (copy, 4);
2068 }
2069
2070 return copy;
2071 }
2072 \f
2073 /* Substitute known constant values into INSN, if that is valid. */
2074
2075 void
2076 try_constants (insn, map)
2077 rtx insn;
2078 struct inline_remap *map;
2079 {
2080 int i;
2081
2082 map->num_sets = 0;
2083
2084 /* First try just updating addresses, then other things. This is
2085 important when we have something like the store of a constant
2086 into memory and we can update the memory address but the machine
2087 does not support a constant source. */
2088 subst_constants (&PATTERN (insn), insn, map, 1);
2089 apply_change_group ();
2090 subst_constants (&PATTERN (insn), insn, map, 0);
2091 apply_change_group ();
2092
2093 /* Show we don't know the value of anything stored or clobbered. */
2094 note_stores (PATTERN (insn), mark_stores, NULL);
2095 map->last_pc_value = 0;
2096 #ifdef HAVE_cc0
2097 map->last_cc0_value = 0;
2098 #endif
2099
2100 /* Set up any constant equivalences made in this insn. */
2101 for (i = 0; i < map->num_sets; i++)
2102 {
2103 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2104 {
2105 int regno = REGNO (map->equiv_sets[i].dest);
2106
2107 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
2108 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
2109 /* Following clause is a hack to make case work where GNU C++
2110 reassigns a variable to make cse work right. */
2111 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
2112 regno).rtx,
2113 map->equiv_sets[i].equiv))
2114 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
2115 map->equiv_sets[i].equiv, map->const_age);
2116 }
2117 else if (map->equiv_sets[i].dest == pc_rtx)
2118 map->last_pc_value = map->equiv_sets[i].equiv;
2119 #ifdef HAVE_cc0
2120 else if (map->equiv_sets[i].dest == cc0_rtx)
2121 map->last_cc0_value = map->equiv_sets[i].equiv;
2122 #endif
2123 }
2124 }
2125 \f
2126 /* Substitute known constants for pseudo regs in the contents of LOC,
2127 which are part of INSN.
2128 If INSN is zero, the substitution should always be done (this is used to
2129 update DECL_RTL).
2130 These changes are taken out by try_constants if the result is not valid.
2131
2132 Note that we are more concerned with determining when the result of a SET
2133 is a constant, for further propagation, than actually inserting constants
2134 into insns; cse will do the latter task better.
2135
2136 This function is also used to adjust address of items previously addressed
2137 via the virtual stack variable or virtual incoming arguments registers.
2138
2139 If MEMONLY is nonzero, only make changes inside a MEM. */
2140
2141 static void
2142 subst_constants (loc, insn, map, memonly)
2143 rtx *loc;
2144 rtx insn;
2145 struct inline_remap *map;
2146 int memonly;
2147 {
2148 rtx x = *loc;
2149 register int i, j;
2150 register enum rtx_code code;
2151 register const char *format_ptr;
2152 int num_changes = num_validated_changes ();
2153 rtx new = 0;
2154 enum machine_mode op0_mode = MAX_MACHINE_MODE;
2155
2156 code = GET_CODE (x);
2157
2158 switch (code)
2159 {
2160 case PC:
2161 case CONST_INT:
2162 case CONST_DOUBLE:
2163 case SYMBOL_REF:
2164 case CONST:
2165 case LABEL_REF:
2166 case ADDRESS:
2167 return;
2168
2169 #ifdef HAVE_cc0
2170 case CC0:
2171 if (! memonly)
2172 validate_change (insn, loc, map->last_cc0_value, 1);
2173 return;
2174 #endif
2175
2176 case USE:
2177 case CLOBBER:
2178 /* The only thing we can do with a USE or CLOBBER is possibly do
2179 some substitutions in a MEM within it. */
2180 if (GET_CODE (XEXP (x, 0)) == MEM)
2181 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
2182 return;
2183
2184 case REG:
2185 /* Substitute for parms and known constants. Don't replace
2186 hard regs used as user variables with constants. */
2187 if (! memonly)
2188 {
2189 int regno = REGNO (x);
2190 struct const_equiv_data *p;
2191
2192 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2193 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
2194 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
2195 p->rtx != 0)
2196 && p->age >= map->const_age)
2197 validate_change (insn, loc, p->rtx, 1);
2198 }
2199 return;
2200
2201 case SUBREG:
2202 /* SUBREG applied to something other than a reg
2203 should be treated as ordinary, since that must
2204 be a special hack and we don't know how to treat it specially.
2205 Consider for example mulsidi3 in m68k.md.
2206 Ordinary SUBREG of a REG needs this special treatment. */
2207 if (! memonly && GET_CODE (SUBREG_REG (x)) == REG)
2208 {
2209 rtx inner = SUBREG_REG (x);
2210 rtx new = 0;
2211
2212 /* We can't call subst_constants on &SUBREG_REG (x) because any
2213 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2214 see what is inside, try to form the new SUBREG and see if that is
2215 valid. We handle two cases: extracting a full word in an
2216 integral mode and extracting the low part. */
2217 subst_constants (&inner, NULL_RTX, map, 0);
2218
2219 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2220 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2221 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2222 new = operand_subword (inner, SUBREG_WORD (x), 0,
2223 GET_MODE (SUBREG_REG (x)));
2224
2225 cancel_changes (num_changes);
2226 if (new == 0 && subreg_lowpart_p (x))
2227 new = gen_lowpart_common (GET_MODE (x), inner);
2228
2229 if (new)
2230 validate_change (insn, loc, new, 1);
2231
2232 return;
2233 }
2234 break;
2235
2236 case MEM:
2237 subst_constants (&XEXP (x, 0), insn, map, 0);
2238
2239 /* If a memory address got spoiled, change it back. */
2240 if (! memonly && insn != 0 && num_validated_changes () != num_changes
2241 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2242 cancel_changes (num_changes);
2243 return;
2244
2245 case SET:
2246 {
2247 /* Substitute constants in our source, and in any arguments to a
2248 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2249 itself. */
2250 rtx *dest_loc = &SET_DEST (x);
2251 rtx dest = *dest_loc;
2252 rtx src, tem;
2253
2254 subst_constants (&SET_SRC (x), insn, map, memonly);
2255 src = SET_SRC (x);
2256
2257 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2258 || GET_CODE (*dest_loc) == SUBREG
2259 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2260 {
2261 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2262 {
2263 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
2264 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
2265 }
2266 dest_loc = &XEXP (*dest_loc, 0);
2267 }
2268
2269 /* Do substitute in the address of a destination in memory. */
2270 if (GET_CODE (*dest_loc) == MEM)
2271 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
2272
2273 /* Check for the case of DEST a SUBREG, both it and the underlying
2274 register are less than one word, and the SUBREG has the wider mode.
2275 In the case, we are really setting the underlying register to the
2276 source converted to the mode of DEST. So indicate that. */
2277 if (GET_CODE (dest) == SUBREG
2278 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2279 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2280 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2281 <= GET_MODE_SIZE (GET_MODE (dest)))
2282 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2283 src)))
2284 src = tem, dest = SUBREG_REG (dest);
2285
2286 /* If storing a recognizable value save it for later recording. */
2287 if ((map->num_sets < MAX_RECOG_OPERANDS)
2288 && (CONSTANT_P (src)
2289 || (GET_CODE (src) == REG
2290 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2291 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2292 || (GET_CODE (src) == PLUS
2293 && GET_CODE (XEXP (src, 0)) == REG
2294 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2295 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2296 && CONSTANT_P (XEXP (src, 1)))
2297 || GET_CODE (src) == COMPARE
2298 #ifdef HAVE_cc0
2299 || dest == cc0_rtx
2300 #endif
2301 || (dest == pc_rtx
2302 && (src == pc_rtx || GET_CODE (src) == RETURN
2303 || GET_CODE (src) == LABEL_REF))))
2304 {
2305 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2306 it will cause us to save the COMPARE with any constants
2307 substituted, which is what we want for later. */
2308 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2309 map->equiv_sets[map->num_sets++].dest = dest;
2310 }
2311 }
2312 return;
2313
2314 default:
2315 break;
2316 }
2317
2318 format_ptr = GET_RTX_FORMAT (code);
2319
2320 /* If the first operand is an expression, save its mode for later. */
2321 if (*format_ptr == 'e')
2322 op0_mode = GET_MODE (XEXP (x, 0));
2323
2324 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2325 {
2326 switch (*format_ptr++)
2327 {
2328 case '0':
2329 break;
2330
2331 case 'e':
2332 if (XEXP (x, i))
2333 subst_constants (&XEXP (x, i), insn, map, memonly);
2334 break;
2335
2336 case 'u':
2337 case 'i':
2338 case 's':
2339 case 'w':
2340 case 't':
2341 break;
2342
2343 case 'E':
2344 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2345 for (j = 0; j < XVECLEN (x, i); j++)
2346 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
2347
2348 break;
2349
2350 default:
2351 abort ();
2352 }
2353 }
2354
2355 /* If this is a commutative operation, move a constant to the second
2356 operand unless the second operand is already a CONST_INT. */
2357 if (! memonly
2358 && (GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2359 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2360 {
2361 rtx tem = XEXP (x, 0);
2362 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2363 validate_change (insn, &XEXP (x, 1), tem, 1);
2364 }
2365
2366 /* Simplify the expression in case we put in some constants. */
2367 if (! memonly)
2368 switch (GET_RTX_CLASS (code))
2369 {
2370 case '1':
2371 if (op0_mode == MAX_MACHINE_MODE)
2372 abort ();
2373 new = simplify_unary_operation (code, GET_MODE (x),
2374 XEXP (x, 0), op0_mode);
2375 break;
2376
2377 case '<':
2378 {
2379 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2380
2381 if (op_mode == VOIDmode)
2382 op_mode = GET_MODE (XEXP (x, 1));
2383 new = simplify_relational_operation (code, op_mode,
2384 XEXP (x, 0), XEXP (x, 1));
2385 #ifdef FLOAT_STORE_FLAG_VALUE
2386 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2387 {
2388 enum machine_mode mode = GET_MODE (x);
2389 if (new == const0_rtx)
2390 new = CONST0_RTX (mode);
2391 else
2392 {
2393 REAL_VALUE_TYPE val = FLOAT_STORE_FLAG_VALUE (mode);
2394 new = CONST_DOUBLE_FROM_REAL_VALUE (val, mode);
2395 }
2396 }
2397 #endif
2398 break;
2399 }
2400
2401 case '2':
2402 case 'c':
2403 new = simplify_binary_operation (code, GET_MODE (x),
2404 XEXP (x, 0), XEXP (x, 1));
2405 break;
2406
2407 case 'b':
2408 case '3':
2409 if (op0_mode == MAX_MACHINE_MODE)
2410 abort ();
2411
2412 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2413 XEXP (x, 0), XEXP (x, 1),
2414 XEXP (x, 2));
2415 break;
2416 }
2417
2418 if (new)
2419 validate_change (insn, loc, new, 1);
2420 }
2421
2422 /* Show that register modified no longer contain known constants. We are
2423 called from note_stores with parts of the new insn. */
2424
2425 static void
2426 mark_stores (dest, x, data)
2427 rtx dest;
2428 rtx x ATTRIBUTE_UNUSED;
2429 void *data ATTRIBUTE_UNUSED;
2430 {
2431 int regno = -1;
2432 enum machine_mode mode = VOIDmode;
2433
2434 /* DEST is always the innermost thing set, except in the case of
2435 SUBREGs of hard registers. */
2436
2437 if (GET_CODE (dest) == REG)
2438 regno = REGNO (dest), mode = GET_MODE (dest);
2439 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2440 {
2441 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2442 mode = GET_MODE (SUBREG_REG (dest));
2443 }
2444
2445 if (regno >= 0)
2446 {
2447 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2448 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2449 int i;
2450
2451 /* Ignore virtual stack var or virtual arg register since those
2452 are handled separately. */
2453 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
2454 && regno != VIRTUAL_STACK_VARS_REGNUM)
2455 for (i = regno; i <= last_reg; i++)
2456 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
2457 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
2458 }
2459 }
2460 \f
2461 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2462 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2463 that it points to the node itself, thus indicating that the node is its
2464 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2465 the given node is NULL, recursively descend the decl/block tree which
2466 it is the root of, and for each other ..._DECL or BLOCK node contained
2467 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2468 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2469 values to point to themselves. */
2470
2471 static void
2472 set_block_origin_self (stmt)
2473 register tree stmt;
2474 {
2475 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2476 {
2477 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2478
2479 {
2480 register tree local_decl;
2481
2482 for (local_decl = BLOCK_VARS (stmt);
2483 local_decl != NULL_TREE;
2484 local_decl = TREE_CHAIN (local_decl))
2485 set_decl_origin_self (local_decl); /* Potential recursion. */
2486 }
2487
2488 {
2489 register tree subblock;
2490
2491 for (subblock = BLOCK_SUBBLOCKS (stmt);
2492 subblock != NULL_TREE;
2493 subblock = BLOCK_CHAIN (subblock))
2494 set_block_origin_self (subblock); /* Recurse. */
2495 }
2496 }
2497 }
2498
2499 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2500 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2501 node to so that it points to the node itself, thus indicating that the
2502 node represents its own (abstract) origin. Additionally, if the
2503 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2504 the decl/block tree of which the given node is the root of, and for
2505 each other ..._DECL or BLOCK node contained therein whose
2506 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2507 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2508 point to themselves. */
2509
2510 static void
2511 set_decl_origin_self (decl)
2512 register tree decl;
2513 {
2514 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2515 {
2516 DECL_ABSTRACT_ORIGIN (decl) = decl;
2517 if (TREE_CODE (decl) == FUNCTION_DECL)
2518 {
2519 register tree arg;
2520
2521 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2522 DECL_ABSTRACT_ORIGIN (arg) = arg;
2523 if (DECL_INITIAL (decl) != NULL_TREE
2524 && DECL_INITIAL (decl) != error_mark_node)
2525 set_block_origin_self (DECL_INITIAL (decl));
2526 }
2527 }
2528 }
2529 \f
2530 /* Given a pointer to some BLOCK node, and a boolean value to set the
2531 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2532 the given block, and for all local decls and all local sub-blocks
2533 (recursively) which are contained therein. */
2534
2535 static void
2536 set_block_abstract_flags (stmt, setting)
2537 register tree stmt;
2538 register int setting;
2539 {
2540 register tree local_decl;
2541 register tree subblock;
2542
2543 BLOCK_ABSTRACT (stmt) = setting;
2544
2545 for (local_decl = BLOCK_VARS (stmt);
2546 local_decl != NULL_TREE;
2547 local_decl = TREE_CHAIN (local_decl))
2548 set_decl_abstract_flags (local_decl, setting);
2549
2550 for (subblock = BLOCK_SUBBLOCKS (stmt);
2551 subblock != NULL_TREE;
2552 subblock = BLOCK_CHAIN (subblock))
2553 set_block_abstract_flags (subblock, setting);
2554 }
2555
2556 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2557 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2558 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2559 set the abstract flags for all of the parameters, local vars, local
2560 blocks and sub-blocks (recursively) to the same setting. */
2561
2562 void
2563 set_decl_abstract_flags (decl, setting)
2564 register tree decl;
2565 register int setting;
2566 {
2567 DECL_ABSTRACT (decl) = setting;
2568 if (TREE_CODE (decl) == FUNCTION_DECL)
2569 {
2570 register tree arg;
2571
2572 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2573 DECL_ABSTRACT (arg) = setting;
2574 if (DECL_INITIAL (decl) != NULL_TREE
2575 && DECL_INITIAL (decl) != error_mark_node)
2576 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2577 }
2578 }
2579 \f
2580 /* Output the assembly language code for the function FNDECL
2581 from its DECL_SAVED_INSNS. Used for inline functions that are output
2582 at end of compilation instead of where they came in the source. */
2583
2584 void
2585 output_inline_function (fndecl)
2586 tree fndecl;
2587 {
2588 struct function *old_cfun = cfun;
2589 struct function *f = DECL_SAVED_INSNS (fndecl);
2590
2591 cfun = f;
2592 current_function_decl = fndecl;
2593 clear_emit_caches ();
2594
2595 /* Things we allocate from here on are part of this function, not
2596 permanent. */
2597 temporary_allocation ();
2598
2599 set_new_last_label_num (f->inl_max_label_num);
2600
2601 /* We must have already output DWARF debugging information for the
2602 original (abstract) inline function declaration/definition, so
2603 we want to make sure that the debugging information we generate
2604 for this special instance of the inline function refers back to
2605 the information we already generated. To make sure that happens,
2606 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
2607 node (and for all of the local ..._DECL nodes which are its children)
2608 so that they all point to themselves. */
2609
2610 set_decl_origin_self (fndecl);
2611
2612 /* We're not deferring this any longer. */
2613 DECL_DEFER_OUTPUT (fndecl) = 0;
2614
2615 /* We can't inline this anymore. */
2616 f->inlinable = 0;
2617 DECL_INLINE (fndecl) = 0;
2618
2619 /* Compile this function all the way down to assembly code. */
2620 rest_of_compilation (fndecl);
2621
2622 cfun = old_cfun;
2623 current_function_decl = old_cfun ? old_cfun->decl : 0;
2624 }
This page took 0.178987 seconds and 6 git commands to generate.