]> gcc.gnu.org Git - gcc.git/blob - gcc/integrate.c
pa.h (PARSE_LDD_OUTPUT): Handle dynamic libraries that are loaded "statically".
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93, 94, 95, 1996 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include <stdio.h>
24
25 #include "config.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "integrate.h"
35 #include "real.h"
36 #include "function.h"
37 #include "bytecode.h"
38
39 #include "obstack.h"
40 #define obstack_chunk_alloc xmalloc
41 #define obstack_chunk_free free
42
43 extern struct obstack *function_maybepermanent_obstack;
44
45 extern tree pushdecl ();
46 extern tree poplevel ();
47
48 /* Similar, but round to the next highest integer that meets the
49 alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51
52 /* Default max number of insns a function can have and still be inline.
53 This is overridden on RISC machines. */
54 #ifndef INTEGRATE_THRESHOLD
55 #define INTEGRATE_THRESHOLD(DECL) \
56 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
57 #endif
58 \f
59 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
60 static void finish_inline PROTO((tree, rtx));
61 static void adjust_copied_decl_tree PROTO((tree));
62 static tree copy_decl_list PROTO((tree));
63 static tree copy_decl_tree PROTO((tree));
64 static void copy_decl_rtls PROTO((tree));
65 static void save_constants PROTO((rtx *));
66 static void note_modified_parmregs PROTO((rtx, rtx));
67 static rtx copy_for_inline PROTO((rtx));
68 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
69 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
70 static void save_constants_in_decl_trees PROTO ((tree));
71 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
72 static void restore_constants PROTO((rtx *));
73 static void set_block_origin_self PROTO((tree));
74 static void set_decl_origin_self PROTO((tree));
75 static void set_block_abstract_flags PROTO((tree, int));
76
77 void set_decl_abstract_flags PROTO((tree, int));
78 \f
79 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
80 is safe and reasonable to integrate into other functions.
81 Nonzero means value is a warning message with a single %s
82 for the function's name. */
83
84 char *
85 function_cannot_inline_p (fndecl)
86 register tree fndecl;
87 {
88 register rtx insn;
89 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
90 int max_insns = INTEGRATE_THRESHOLD (fndecl);
91 register int ninsns = 0;
92 register tree parms;
93
94 /* No inlines with varargs. `grokdeclarator' gives a warning
95 message about that if `inline' is specified. This code
96 it put in to catch the volunteers. */
97 if ((last && TREE_VALUE (last) != void_type_node)
98 || current_function_varargs)
99 return "varargs function cannot be inline";
100
101 if (current_function_calls_alloca)
102 return "function using alloca cannot be inline";
103
104 if (current_function_contains_functions)
105 return "function with nested functions cannot be inline";
106
107 /* If its not even close, don't even look. */
108 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
109 return "function too large to be inline";
110
111 #if 0
112 /* Don't inline functions which do not specify a function prototype and
113 have BLKmode argument or take the address of a parameter. */
114 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
115 {
116 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
117 TREE_ADDRESSABLE (parms) = 1;
118 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
119 return "no prototype, and parameter address used; cannot be inline";
120 }
121 #endif
122
123 /* We can't inline functions that return structures
124 the old-fashioned PCC way, copying into a static block. */
125 if (current_function_returns_pcc_struct)
126 return "inline functions not supported for this return value type";
127
128 /* We can't inline functions that return BLKmode structures in registers. */
129 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
130 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
131 return "inline functions not supported for this return value type";
132
133 /* We can't inline functions that return structures of varying size. */
134 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
135 return "function with varying-size return value cannot be inline";
136
137 /* Cannot inline a function with a varying size argument or one that
138 receives a transparent union. */
139 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
140 {
141 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
142 return "function with varying-size parameter cannot be inline";
143 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
144 return "function with transparent unit parameter cannot be inline";
145 }
146
147 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
148 {
149 for (ninsns = 0, insn = get_first_nonparm_insn ();
150 insn && ninsns < max_insns;
151 insn = NEXT_INSN (insn))
152 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
153 ninsns++;
154
155 if (ninsns >= max_insns)
156 return "function too large to be inline";
157 }
158
159 /* We cannot inline this function if forced_labels is non-zero. This
160 implies that a label in this function was used as an initializer.
161 Because labels can not be duplicated, all labels in the function
162 will be renamed when it is inlined. However, there is no way to find
163 and fix all variables initialized with addresses of labels in this
164 function, hence inlining is impossible. */
165
166 if (forced_labels)
167 return "function with label addresses used in initializers cannot inline";
168
169 /* We cannot inline a nested function that jumps to a nonlocal label. */
170 if (current_function_has_nonlocal_goto)
171 return "function with nonlocal goto cannot be inline";
172
173 return 0;
174 }
175 \f
176 /* Variables used within save_for_inline. */
177
178 /* Mapping from old pseudo-register to new pseudo-registers.
179 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
180 It is allocated in `save_for_inline' and `expand_inline_function',
181 and deallocated on exit from each of those routines. */
182 static rtx *reg_map;
183
184 /* Mapping from old code-labels to new code-labels.
185 The first element of this map is label_map[min_labelno].
186 It is allocated in `save_for_inline' and `expand_inline_function',
187 and deallocated on exit from each of those routines. */
188 static rtx *label_map;
189
190 /* Mapping from old insn uid's to copied insns.
191 It is allocated in `save_for_inline' and `expand_inline_function',
192 and deallocated on exit from each of those routines. */
193 static rtx *insn_map;
194
195 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
196 Zero for a reg that isn't a parm's home.
197 Only reg numbers less than max_parm_reg are mapped here. */
198 static tree *parmdecl_map;
199
200 /* Keep track of first pseudo-register beyond those that are parms. */
201 static int max_parm_reg;
202
203 /* When an insn is being copied by copy_for_inline,
204 this is nonzero if we have copied an ASM_OPERANDS.
205 In that case, it is the original input-operand vector. */
206 static rtvec orig_asm_operands_vector;
207
208 /* When an insn is being copied by copy_for_inline,
209 this is nonzero if we have copied an ASM_OPERANDS.
210 In that case, it is the copied input-operand vector. */
211 static rtvec copy_asm_operands_vector;
212
213 /* Likewise, this is the copied constraints vector. */
214 static rtvec copy_asm_constraints_vector;
215
216 /* In save_for_inline, nonzero if past the parm-initialization insns. */
217 static int in_nonparm_insns;
218 \f
219 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
220 needed to save FNDECL's insns and info for future inline expansion. */
221
222 static rtx
223 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
224 tree fndecl;
225 int min_labelno;
226 int max_labelno;
227 int max_reg;
228 int copy;
229 {
230 int function_flags, i;
231 rtvec arg_vector;
232 tree parms;
233
234 /* Compute the values of any flags we must restore when inlining this. */
235
236 function_flags
237 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
238 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
239 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
240 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
241 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
242 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
243 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
244 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
245 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
246 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
247
248 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
249 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
250 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
251
252 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
253 parms;
254 parms = TREE_CHAIN (parms), i++)
255 {
256 rtx p = DECL_RTL (parms);
257
258 if (GET_CODE (p) == MEM && copy)
259 {
260 /* Copy the rtl so that modifications of the addresses
261 later in compilation won't affect this arg_vector.
262 Virtual register instantiation can screw the address
263 of the rtl. */
264 rtx new = copy_rtx (p);
265
266 /* Don't leave the old copy anywhere in this decl. */
267 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
268 || (GET_CODE (DECL_RTL (parms)) == MEM
269 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
270 && (XEXP (DECL_RTL (parms), 0)
271 == XEXP (DECL_INCOMING_RTL (parms), 0))))
272 DECL_INCOMING_RTL (parms) = new;
273 DECL_RTL (parms) = new;
274 }
275
276 RTVEC_ELT (arg_vector, i) = p;
277
278 if (GET_CODE (p) == REG)
279 parmdecl_map[REGNO (p)] = parms;
280 else if (GET_CODE (p) == CONCAT)
281 {
282 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
283 rtx pimag = gen_imagpart (GET_MODE (preal), p);
284
285 if (GET_CODE (preal) == REG)
286 parmdecl_map[REGNO (preal)] = parms;
287 if (GET_CODE (pimag) == REG)
288 parmdecl_map[REGNO (pimag)] = parms;
289 }
290
291 /* This flag is cleared later
292 if the function ever modifies the value of the parm. */
293 TREE_READONLY (parms) = 1;
294 }
295
296 /* Assume we start out in the insns that set up the parameters. */
297 in_nonparm_insns = 0;
298
299 /* The list of DECL_SAVED_INSNS, starts off with a header which
300 contains the following information:
301
302 the first insn of the function (not including the insns that copy
303 parameters into registers).
304 the first parameter insn of the function,
305 the first label used by that function,
306 the last label used by that function,
307 the highest register number used for parameters,
308 the total number of registers used,
309 the size of the incoming stack area for parameters,
310 the number of bytes popped on return,
311 the stack slot list,
312 some flags that are used to restore compiler globals,
313 the value of current_function_outgoing_args_size,
314 the original argument vector,
315 the original DECL_INITIAL,
316 and pointers to the table of psuedo regs, pointer flags, and alignment. */
317
318 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
319 max_parm_reg, max_reg,
320 current_function_args_size,
321 current_function_pops_args,
322 stack_slot_list, forced_labels, function_flags,
323 current_function_outgoing_args_size,
324 arg_vector, (rtx) DECL_INITIAL (fndecl),
325 (rtvec) regno_reg_rtx, regno_pointer_flag,
326 regno_pointer_align);
327 }
328
329 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
330 things that must be done to make FNDECL expandable as an inline function.
331 HEAD contains the chain of insns to which FNDECL will expand. */
332
333 static void
334 finish_inline (fndecl, head)
335 tree fndecl;
336 rtx head;
337 {
338 NEXT_INSN (head) = get_first_nonparm_insn ();
339 FIRST_PARM_INSN (head) = get_insns ();
340 DECL_SAVED_INSNS (fndecl) = head;
341 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
342 }
343
344 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
345 they all point to the new (copied) rtxs. */
346
347 static void
348 adjust_copied_decl_tree (block)
349 register tree block;
350 {
351 register tree subblock;
352 register rtx original_end;
353
354 original_end = BLOCK_END_NOTE (block);
355 if (original_end)
356 {
357 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
358 NOTE_SOURCE_FILE (original_end) = 0;
359 }
360
361 /* Process all subblocks. */
362 for (subblock = BLOCK_SUBBLOCKS (block);
363 subblock;
364 subblock = TREE_CHAIN (subblock))
365 adjust_copied_decl_tree (subblock);
366 }
367
368 /* Make the insns and PARM_DECLs of the current function permanent
369 and record other information in DECL_SAVED_INSNS to allow inlining
370 of this function in subsequent calls.
371
372 This function is called when we are going to immediately compile
373 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
374 modified by the compilation process, so we copy all of them to
375 new storage and consider the new insns to be the insn chain to be
376 compiled. Our caller (rest_of_compilation) saves the original
377 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
378
379 /* ??? The nonlocal_label list should be adjusted also. However, since
380 a function that contains a nested function never gets inlined currently,
381 the nonlocal_label list will always be empty, so we don't worry about
382 it for now. */
383
384 void
385 save_for_inline_copying (fndecl)
386 tree fndecl;
387 {
388 rtx first_insn, last_insn, insn;
389 rtx head, copy;
390 int max_labelno, min_labelno, i, len;
391 int max_reg;
392 int max_uid;
393 rtx first_nonparm_insn;
394 char *new, *new1;
395
396 /* Make and emit a return-label if we have not already done so.
397 Do this before recording the bounds on label numbers. */
398
399 if (return_label == 0)
400 {
401 return_label = gen_label_rtx ();
402 emit_label (return_label);
403 }
404
405 /* Get some bounds on the labels and registers used. */
406
407 max_labelno = max_label_num ();
408 min_labelno = get_first_label_num ();
409 max_reg = max_reg_num ();
410
411 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
412 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
413 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
414 for the parms, prior to elimination of virtual registers.
415 These values are needed for substituting parms properly. */
416
417 max_parm_reg = max_parm_reg_num ();
418 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
419
420 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
421
422 if (current_function_uses_const_pool)
423 {
424 /* Replace any constant pool references with the actual constant. We
425 will put the constants back in the copy made below. */
426 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
427 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
428 {
429 save_constants (&PATTERN (insn));
430 if (REG_NOTES (insn))
431 save_constants (&REG_NOTES (insn));
432 }
433
434 /* Also scan all decls, and replace any constant pool references with the
435 actual constant. */
436 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
437
438 /* Clear out the constant pool so that we can recreate it with the
439 copied constants below. */
440 init_const_rtx_hash_table ();
441 clear_const_double_mem ();
442 }
443
444 max_uid = INSN_UID (head);
445
446 /* We have now allocated all that needs to be allocated permanently
447 on the rtx obstack. Set our high-water mark, so that we
448 can free the rest of this when the time comes. */
449
450 preserve_data ();
451
452 /* Copy the chain insns of this function.
453 Install the copied chain as the insns of this function,
454 for continued compilation;
455 the original chain is recorded as the DECL_SAVED_INSNS
456 for inlining future calls. */
457
458 /* If there are insns that copy parms from the stack into pseudo registers,
459 those insns are not copied. `expand_inline_function' must
460 emit the correct code to handle such things. */
461
462 insn = get_insns ();
463 if (GET_CODE (insn) != NOTE)
464 abort ();
465 first_insn = rtx_alloc (NOTE);
466 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
467 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
468 INSN_UID (first_insn) = INSN_UID (insn);
469 PREV_INSN (first_insn) = NULL;
470 NEXT_INSN (first_insn) = NULL;
471 last_insn = first_insn;
472
473 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
474 Make these new rtx's now, and install them in regno_reg_rtx, so they
475 will be the official pseudo-reg rtx's for the rest of compilation. */
476
477 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
478
479 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
480 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
481 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
482 regno_reg_rtx[i], len);
483
484 regno_reg_rtx = reg_map;
485
486 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
487 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
488 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
489 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
490 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
491
492 /* Likewise each label rtx must have a unique rtx as its copy. */
493
494 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
495 label_map -= min_labelno;
496
497 for (i = min_labelno; i < max_labelno; i++)
498 label_map[i] = gen_label_rtx ();
499
500 /* Record the mapping of old insns to copied insns. */
501
502 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
503 bzero ((char *) insn_map, max_uid * sizeof (rtx));
504
505 /* Get the insn which signals the end of parameter setup code. */
506 first_nonparm_insn = get_first_nonparm_insn ();
507
508 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
509 (the former occurs when a variable has its address taken)
510 since these may be shared and can be changed by virtual
511 register instantiation. DECL_RTL values for our arguments
512 have already been copied by initialize_for_inline. */
513 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
514 if (GET_CODE (regno_reg_rtx[i]) == MEM)
515 XEXP (regno_reg_rtx[i], 0)
516 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
517
518 /* Copy the tree of subblocks of the function, and the decls in them.
519 We will use the copy for compiling this function, then restore the original
520 subblocks and decls for use when inlining this function.
521
522 Several parts of the compiler modify BLOCK trees. In particular,
523 instantiate_virtual_regs will instantiate any virtual regs
524 mentioned in the DECL_RTLs of the decls, and loop
525 unrolling will replicate any BLOCK trees inside an unrolled loop.
526
527 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
528 which we will use for inlining. The rtl might even contain pseudoregs
529 whose space has been freed. */
530
531 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
532 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
533
534 /* Now copy each DECL_RTL which is a MEM,
535 so it is safe to modify their addresses. */
536 copy_decl_rtls (DECL_INITIAL (fndecl));
537
538 /* The fndecl node acts as its own progenitor, so mark it as such. */
539 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
540
541 /* Now copy the chain of insns. Do this twice. The first copy the insn
542 itself and its body. The second time copy of REG_NOTES. This is because
543 a REG_NOTE may have a forward pointer to another insn. */
544
545 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
546 {
547 orig_asm_operands_vector = 0;
548
549 if (insn == first_nonparm_insn)
550 in_nonparm_insns = 1;
551
552 switch (GET_CODE (insn))
553 {
554 case NOTE:
555 /* No need to keep these. */
556 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
557 continue;
558
559 copy = rtx_alloc (NOTE);
560 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
561 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
562 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
563 else
564 {
565 NOTE_SOURCE_FILE (insn) = (char *) copy;
566 NOTE_SOURCE_FILE (copy) = 0;
567 }
568 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
569 break;
570
571 case INSN:
572 case JUMP_INSN:
573 case CALL_INSN:
574 copy = rtx_alloc (GET_CODE (insn));
575
576 if (GET_CODE (insn) == CALL_INSN)
577 CALL_INSN_FUNCTION_USAGE (copy) =
578 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
579
580 PATTERN (copy) = copy_for_inline (PATTERN (insn));
581 INSN_CODE (copy) = -1;
582 LOG_LINKS (copy) = NULL_RTX;
583 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
584 break;
585
586 case CODE_LABEL:
587 copy = label_map[CODE_LABEL_NUMBER (insn)];
588 LABEL_NAME (copy) = LABEL_NAME (insn);
589 break;
590
591 case BARRIER:
592 copy = rtx_alloc (BARRIER);
593 break;
594
595 default:
596 abort ();
597 }
598 INSN_UID (copy) = INSN_UID (insn);
599 insn_map[INSN_UID (insn)] = copy;
600 NEXT_INSN (last_insn) = copy;
601 PREV_INSN (copy) = last_insn;
602 last_insn = copy;
603 }
604
605 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
606
607 /* Now copy the REG_NOTES. */
608 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
609 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
610 && insn_map[INSN_UID(insn)])
611 REG_NOTES (insn_map[INSN_UID (insn)])
612 = copy_for_inline (REG_NOTES (insn));
613
614 NEXT_INSN (last_insn) = NULL;
615
616 finish_inline (fndecl, head);
617
618 /* Make new versions of the register tables. */
619 new = (char *) savealloc (regno_pointer_flag_length);
620 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
621 new1 = (char *) savealloc (regno_pointer_flag_length);
622 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
623
624 regno_pointer_flag = new;
625 regno_pointer_align = new1;
626
627 set_new_first_and_last_insn (first_insn, last_insn);
628 }
629
630 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
631 For example, this can copy a list made of TREE_LIST nodes. While copying,
632 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
633 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
634 point to the corresponding (abstract) original node. */
635
636 static tree
637 copy_decl_list (list)
638 tree list;
639 {
640 tree head;
641 register tree prev, next;
642
643 if (list == 0)
644 return 0;
645
646 head = prev = copy_node (list);
647 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
648 DECL_ABSTRACT_ORIGIN (head) = list;
649 next = TREE_CHAIN (list);
650 while (next)
651 {
652 register tree copy;
653
654 copy = copy_node (next);
655 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
656 DECL_ABSTRACT_ORIGIN (copy) = next;
657 TREE_CHAIN (prev) = copy;
658 prev = copy;
659 next = TREE_CHAIN (next);
660 }
661 return head;
662 }
663
664 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
665
666 static tree
667 copy_decl_tree (block)
668 tree block;
669 {
670 tree t, vars, subblocks;
671
672 vars = copy_decl_list (BLOCK_VARS (block));
673 subblocks = 0;
674
675 /* Process all subblocks. */
676 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
677 {
678 tree copy = copy_decl_tree (t);
679 TREE_CHAIN (copy) = subblocks;
680 subblocks = copy;
681 }
682
683 t = copy_node (block);
684 BLOCK_VARS (t) = vars;
685 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
686 /* If the BLOCK being cloned is already marked as having been instantiated
687 from something else, then leave that `origin' marking alone. Otherwise,
688 mark the clone as having originated from the BLOCK we are cloning. */
689 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
690 BLOCK_ABSTRACT_ORIGIN (t) = block;
691 return t;
692 }
693
694 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
695
696 static void
697 copy_decl_rtls (block)
698 tree block;
699 {
700 tree t;
701
702 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
703 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
704 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
705
706 /* Process all subblocks. */
707 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
708 copy_decl_rtls (t);
709 }
710
711 /* Make the insns and PARM_DECLs of the current function permanent
712 and record other information in DECL_SAVED_INSNS to allow inlining
713 of this function in subsequent calls.
714
715 This routine need not copy any insns because we are not going
716 to immediately compile the insns in the insn chain. There
717 are two cases when we would compile the insns for FNDECL:
718 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
719 be output at the end of other compilation, because somebody took
720 its address. In the first case, the insns of FNDECL are copied
721 as it is expanded inline, so FNDECL's saved insns are not
722 modified. In the second case, FNDECL is used for the last time,
723 so modifying the rtl is not a problem.
724
725 We don't have to worry about FNDECL being inline expanded by
726 other functions which are written at the end of compilation
727 because flag_no_inline is turned on when we begin writing
728 functions at the end of compilation. */
729
730 void
731 save_for_inline_nocopy (fndecl)
732 tree fndecl;
733 {
734 rtx insn;
735 rtx head;
736 rtx first_nonparm_insn;
737
738 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
739 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
740 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
741 for the parms, prior to elimination of virtual registers.
742 These values are needed for substituting parms properly. */
743
744 max_parm_reg = max_parm_reg_num ();
745 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
746
747 /* Make and emit a return-label if we have not already done so. */
748
749 if (return_label == 0)
750 {
751 return_label = gen_label_rtx ();
752 emit_label (return_label);
753 }
754
755 head = initialize_for_inline (fndecl, get_first_label_num (),
756 max_label_num (), max_reg_num (), 0);
757
758 /* If there are insns that copy parms from the stack into pseudo registers,
759 those insns are not copied. `expand_inline_function' must
760 emit the correct code to handle such things. */
761
762 insn = get_insns ();
763 if (GET_CODE (insn) != NOTE)
764 abort ();
765
766 /* Get the insn which signals the end of parameter setup code. */
767 first_nonparm_insn = get_first_nonparm_insn ();
768
769 /* Now just scan the chain of insns to see what happens to our
770 PARM_DECLs. If a PARM_DECL is used but never modified, we
771 can substitute its rtl directly when expanding inline (and
772 perform constant folding when its incoming value is constant).
773 Otherwise, we have to copy its value into a new register and track
774 the new register's life. */
775
776 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
777 {
778 if (insn == first_nonparm_insn)
779 in_nonparm_insns = 1;
780
781 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
782 {
783 if (current_function_uses_const_pool)
784 {
785 /* Replace any constant pool references with the actual constant.
786 We will put the constant back if we need to write the
787 function out after all. */
788 save_constants (&PATTERN (insn));
789 if (REG_NOTES (insn))
790 save_constants (&REG_NOTES (insn));
791 }
792
793 /* Record what interesting things happen to our parameters. */
794 note_stores (PATTERN (insn), note_modified_parmregs);
795 }
796 }
797
798 /* Also scan all decls, and replace any constant pool references with the
799 actual constant. */
800 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
801
802 /* We have now allocated all that needs to be allocated permanently
803 on the rtx obstack. Set our high-water mark, so that we
804 can free the rest of this when the time comes. */
805
806 preserve_data ();
807
808 finish_inline (fndecl, head);
809 }
810 \f
811 /* Given PX, a pointer into an insn, search for references to the constant
812 pool. Replace each with a CONST that has the mode of the original
813 constant, contains the constant, and has RTX_INTEGRATED_P set.
814 Similarly, constant pool addresses not enclosed in a MEM are replaced
815 with an ADDRESS rtx which also gives the constant, mode, and has
816 RTX_INTEGRATED_P set. */
817
818 static void
819 save_constants (px)
820 rtx *px;
821 {
822 rtx x;
823 int i, j;
824
825 again:
826 x = *px;
827
828 /* If this is a CONST_DOUBLE, don't try to fix things up in
829 CONST_DOUBLE_MEM, because this is an infinite recursion. */
830 if (GET_CODE (x) == CONST_DOUBLE)
831 return;
832 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
833 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
834 {
835 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
836 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
837 RTX_INTEGRATED_P (new) = 1;
838
839 /* If the MEM was in a different mode than the constant (perhaps we
840 were only looking at the low-order part), surround it with a
841 SUBREG so we can save both modes. */
842
843 if (GET_MODE (x) != const_mode)
844 {
845 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
846 RTX_INTEGRATED_P (new) = 1;
847 }
848
849 *px = new;
850 save_constants (&XEXP (*px, 0));
851 }
852 else if (GET_CODE (x) == SYMBOL_REF
853 && CONSTANT_POOL_ADDRESS_P (x))
854 {
855 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
856 save_constants (&XEXP (*px, 0));
857 RTX_INTEGRATED_P (*px) = 1;
858 }
859
860 else
861 {
862 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
863 int len = GET_RTX_LENGTH (GET_CODE (x));
864
865 for (i = len-1; i >= 0; i--)
866 {
867 switch (fmt[i])
868 {
869 case 'E':
870 for (j = 0; j < XVECLEN (x, i); j++)
871 save_constants (&XVECEXP (x, i, j));
872 break;
873
874 case 'e':
875 if (XEXP (x, i) == 0)
876 continue;
877 if (i == 0)
878 {
879 /* Hack tail-recursion here. */
880 px = &XEXP (x, 0);
881 goto again;
882 }
883 save_constants (&XEXP (x, i));
884 break;
885 }
886 }
887 }
888 }
889 \f
890 /* Note whether a parameter is modified or not. */
891
892 static void
893 note_modified_parmregs (reg, x)
894 rtx reg;
895 rtx x;
896 {
897 if (GET_CODE (reg) == REG && in_nonparm_insns
898 && REGNO (reg) < max_parm_reg
899 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
900 && parmdecl_map[REGNO (reg)] != 0)
901 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
902 }
903
904 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
905 according to `reg_map' and `label_map'. The original rtl insns
906 will be saved for inlining; this is used to make a copy
907 which is used to finish compiling the inline function itself.
908
909 If we find a "saved" constant pool entry, one which was replaced with
910 the value of the constant, convert it back to a constant pool entry.
911 Since the pool wasn't touched, this should simply restore the old
912 address.
913
914 All other kinds of rtx are copied except those that can never be
915 changed during compilation. */
916
917 static rtx
918 copy_for_inline (orig)
919 rtx orig;
920 {
921 register rtx x = orig;
922 register int i;
923 register enum rtx_code code;
924 register char *format_ptr;
925
926 if (x == 0)
927 return x;
928
929 code = GET_CODE (x);
930
931 /* These types may be freely shared. */
932
933 switch (code)
934 {
935 case QUEUED:
936 case CONST_INT:
937 case SYMBOL_REF:
938 case PC:
939 case CC0:
940 return x;
941
942 case CONST_DOUBLE:
943 /* We have to make a new CONST_DOUBLE to ensure that we account for
944 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
945 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
946 {
947 REAL_VALUE_TYPE d;
948
949 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
950 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
951 }
952 else
953 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
954 VOIDmode);
955
956 case CONST:
957 /* Get constant pool entry for constant in the pool. */
958 if (RTX_INTEGRATED_P (x))
959 return validize_mem (force_const_mem (GET_MODE (x),
960 copy_for_inline (XEXP (x, 0))));
961 break;
962
963 case SUBREG:
964 /* Get constant pool entry, but access in different mode. */
965 if (RTX_INTEGRATED_P (x))
966 {
967 rtx new
968 = force_const_mem (GET_MODE (SUBREG_REG (x)),
969 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
970
971 PUT_MODE (new, GET_MODE (x));
972 return validize_mem (new);
973 }
974 break;
975
976 case ADDRESS:
977 /* If not special for constant pool error. Else get constant pool
978 address. */
979 if (! RTX_INTEGRATED_P (x))
980 abort ();
981
982 return XEXP (force_const_mem (GET_MODE (x),
983 copy_for_inline (XEXP (x, 0))), 0);
984
985 case ASM_OPERANDS:
986 /* If a single asm insn contains multiple output operands
987 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
988 We must make sure that the copied insn continues to share it. */
989 if (orig_asm_operands_vector == XVEC (orig, 3))
990 {
991 x = rtx_alloc (ASM_OPERANDS);
992 x->volatil = orig->volatil;
993 XSTR (x, 0) = XSTR (orig, 0);
994 XSTR (x, 1) = XSTR (orig, 1);
995 XINT (x, 2) = XINT (orig, 2);
996 XVEC (x, 3) = copy_asm_operands_vector;
997 XVEC (x, 4) = copy_asm_constraints_vector;
998 XSTR (x, 5) = XSTR (orig, 5);
999 XINT (x, 6) = XINT (orig, 6);
1000 return x;
1001 }
1002 break;
1003
1004 case MEM:
1005 /* A MEM is usually allowed to be shared if its address is constant
1006 or is a constant plus one of the special registers.
1007
1008 We do not allow sharing of addresses that are either a special
1009 register or the sum of a constant and a special register because
1010 it is possible for unshare_all_rtl to copy the address, into memory
1011 that won't be saved. Although the MEM can safely be shared, and
1012 won't be copied there, the address itself cannot be shared, and may
1013 need to be copied.
1014
1015 There are also two exceptions with constants: The first is if the
1016 constant is a LABEL_REF or the sum of the LABEL_REF
1017 and an integer. This case can happen if we have an inline
1018 function that supplies a constant operand to the call of another
1019 inline function that uses it in a switch statement. In this case,
1020 we will be replacing the LABEL_REF, so we have to replace this MEM
1021 as well.
1022
1023 The second case is if we have a (const (plus (address ..) ...)).
1024 In that case we need to put back the address of the constant pool
1025 entry. */
1026
1027 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1028 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1029 && ! (GET_CODE (XEXP (x, 0)) == CONST
1030 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1031 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1032 == LABEL_REF)
1033 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1034 == ADDRESS)))))
1035 return x;
1036 break;
1037
1038 case LABEL_REF:
1039 /* If this is a non-local label, just make a new LABEL_REF.
1040 Otherwise, use the new label as well. */
1041 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1042 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1043 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1044 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1045 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1046 return x;
1047
1048 case REG:
1049 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1050 return reg_map [REGNO (x)];
1051 else
1052 return x;
1053
1054 case SET:
1055 /* If a parm that gets modified lives in a pseudo-reg,
1056 clear its TREE_READONLY to prevent certain optimizations. */
1057 {
1058 rtx dest = SET_DEST (x);
1059
1060 while (GET_CODE (dest) == STRICT_LOW_PART
1061 || GET_CODE (dest) == ZERO_EXTRACT
1062 || GET_CODE (dest) == SUBREG)
1063 dest = XEXP (dest, 0);
1064
1065 if (GET_CODE (dest) == REG
1066 && REGNO (dest) < max_parm_reg
1067 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1068 && parmdecl_map[REGNO (dest)] != 0
1069 /* The insn to load an arg pseudo from a stack slot
1070 does not count as modifying it. */
1071 && in_nonparm_insns)
1072 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1073 }
1074 break;
1075
1076 #if 0 /* This is a good idea, but here is the wrong place for it. */
1077 /* Arrange that CONST_INTs always appear as the second operand
1078 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1079 always appear as the first. */
1080 case PLUS:
1081 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1082 || (XEXP (x, 1) == frame_pointer_rtx
1083 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1084 && XEXP (x, 1) == arg_pointer_rtx)))
1085 {
1086 rtx t = XEXP (x, 0);
1087 XEXP (x, 0) = XEXP (x, 1);
1088 XEXP (x, 1) = t;
1089 }
1090 break;
1091 #endif
1092 }
1093
1094 /* Replace this rtx with a copy of itself. */
1095
1096 x = rtx_alloc (code);
1097 bcopy ((char *) orig, (char *) x,
1098 (sizeof (*x) - sizeof (x->fld)
1099 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1100
1101 /* Now scan the subexpressions recursively.
1102 We can store any replaced subexpressions directly into X
1103 since we know X is not shared! Any vectors in X
1104 must be copied if X was copied. */
1105
1106 format_ptr = GET_RTX_FORMAT (code);
1107
1108 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1109 {
1110 switch (*format_ptr++)
1111 {
1112 case 'e':
1113 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1114 break;
1115
1116 case 'u':
1117 /* Change any references to old-insns to point to the
1118 corresponding copied insns. */
1119 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1120 break;
1121
1122 case 'E':
1123 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1124 {
1125 register int j;
1126
1127 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1128 for (j = 0; j < XVECLEN (x, i); j++)
1129 XVECEXP (x, i, j)
1130 = copy_for_inline (XVECEXP (x, i, j));
1131 }
1132 break;
1133 }
1134 }
1135
1136 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1137 {
1138 orig_asm_operands_vector = XVEC (orig, 3);
1139 copy_asm_operands_vector = XVEC (x, 3);
1140 copy_asm_constraints_vector = XVEC (x, 4);
1141 }
1142
1143 return x;
1144 }
1145
1146 /* Unfortunately, we need a global copy of const_equiv map for communication
1147 with a function called from note_stores. Be *very* careful that this
1148 is used properly in the presence of recursion. */
1149
1150 rtx *global_const_equiv_map;
1151 int global_const_equiv_map_size;
1152 \f
1153 #define FIXED_BASE_PLUS_P(X) \
1154 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1155 && GET_CODE (XEXP (X, 0)) == REG \
1156 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1157 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1158
1159 /* Integrate the procedure defined by FNDECL. Note that this function
1160 may wind up calling itself. Since the static variables are not
1161 reentrant, we do not assign them until after the possibility
1162 of recursion is eliminated.
1163
1164 If IGNORE is nonzero, do not produce a value.
1165 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1166
1167 Value is:
1168 (rtx)-1 if we could not substitute the function
1169 0 if we substituted it and it does not produce a value
1170 else an rtx for where the value is stored. */
1171
1172 rtx
1173 expand_inline_function (fndecl, parms, target, ignore, type,
1174 structure_value_addr)
1175 tree fndecl, parms;
1176 rtx target;
1177 int ignore;
1178 tree type;
1179 rtx structure_value_addr;
1180 {
1181 tree formal, actual, block;
1182 rtx header = DECL_SAVED_INSNS (fndecl);
1183 rtx insns = FIRST_FUNCTION_INSN (header);
1184 rtx parm_insns = FIRST_PARM_INSN (header);
1185 tree *arg_trees;
1186 rtx *arg_vals;
1187 rtx insn;
1188 int max_regno;
1189 register int i;
1190 int min_labelno = FIRST_LABELNO (header);
1191 int max_labelno = LAST_LABELNO (header);
1192 int nargs;
1193 rtx local_return_label = 0;
1194 rtx loc;
1195 rtx stack_save = 0;
1196 rtx temp;
1197 struct inline_remap *map;
1198 rtx cc0_insn = 0;
1199 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1200 rtx static_chain_value = 0;
1201
1202 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1203 max_regno = MAX_REGNUM (header) + 3;
1204 if (max_regno < FIRST_PSEUDO_REGISTER)
1205 abort ();
1206
1207 nargs = list_length (DECL_ARGUMENTS (fndecl));
1208
1209 /* Check that the parms type match and that sufficient arguments were
1210 passed. Since the appropriate conversions or default promotions have
1211 already been applied, the machine modes should match exactly. */
1212
1213 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1214 formal;
1215 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1216 {
1217 tree arg;
1218 enum machine_mode mode;
1219
1220 if (actual == 0)
1221 return (rtx) (HOST_WIDE_INT) -1;
1222
1223 arg = TREE_VALUE (actual);
1224 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1225
1226 if (mode != TYPE_MODE (TREE_TYPE (arg))
1227 /* If they are block mode, the types should match exactly.
1228 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1229 which could happen if the parameter has incomplete type. */
1230 || (mode == BLKmode
1231 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1232 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1233 return (rtx) (HOST_WIDE_INT) -1;
1234 }
1235
1236 /* Extra arguments are valid, but will be ignored below, so we must
1237 evaluate them here for side-effects. */
1238 for (; actual; actual = TREE_CHAIN (actual))
1239 expand_expr (TREE_VALUE (actual), const0_rtx,
1240 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1241
1242 /* Make a binding contour to keep inline cleanups called at
1243 outer function-scope level from looking like they are shadowing
1244 parameter declarations. */
1245 pushlevel (0);
1246
1247 /* Make a fresh binding contour that we can easily remove. */
1248 pushlevel (0);
1249 expand_start_bindings (0);
1250
1251 /* Expand the function arguments. Do this first so that any
1252 new registers get created before we allocate the maps. */
1253
1254 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1255 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1256
1257 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1258 formal;
1259 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1260 {
1261 /* Actual parameter, converted to the type of the argument within the
1262 function. */
1263 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1264 /* Mode of the variable used within the function. */
1265 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1266 int invisiref = 0;
1267
1268 arg_trees[i] = arg;
1269 loc = RTVEC_ELT (arg_vector, i);
1270
1271 /* If this is an object passed by invisible reference, we copy the
1272 object into a stack slot and save its address. If this will go
1273 into memory, we do nothing now. Otherwise, we just expand the
1274 argument. */
1275 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1276 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1277 {
1278 rtx stack_slot
1279 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1280 int_size_in_bytes (TREE_TYPE (arg)), 1);
1281 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1282
1283 store_expr (arg, stack_slot, 0);
1284
1285 arg_vals[i] = XEXP (stack_slot, 0);
1286 invisiref = 1;
1287 }
1288 else if (GET_CODE (loc) != MEM)
1289 {
1290 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1291 /* The mode if LOC and ARG can differ if LOC was a variable
1292 that had its mode promoted via PROMOTED_MODE. */
1293 arg_vals[i] = convert_modes (GET_MODE (loc),
1294 TYPE_MODE (TREE_TYPE (arg)),
1295 expand_expr (arg, NULL_RTX, mode,
1296 EXPAND_SUM),
1297 TREE_UNSIGNED (TREE_TYPE (formal)));
1298 else
1299 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1300 }
1301 else
1302 arg_vals[i] = 0;
1303
1304 if (arg_vals[i] != 0
1305 && (! TREE_READONLY (formal)
1306 /* If the parameter is not read-only, copy our argument through
1307 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1308 TARGET in any way. In the inline function, they will likely
1309 be two different pseudos, and `safe_from_p' will make all
1310 sorts of smart assumptions about their not conflicting.
1311 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1312 wrong, so put ARG_VALS[I] into a fresh register.
1313 Don't worry about invisible references, since their stack
1314 temps will never overlap the target. */
1315 || (target != 0
1316 && ! invisiref
1317 && (GET_CODE (arg_vals[i]) == REG
1318 || GET_CODE (arg_vals[i]) == SUBREG
1319 || GET_CODE (arg_vals[i]) == MEM)
1320 && reg_overlap_mentioned_p (arg_vals[i], target))
1321 /* ??? We must always copy a SUBREG into a REG, because it might
1322 get substituted into an address, and not all ports correctly
1323 handle SUBREGs in addresses. */
1324 || (GET_CODE (arg_vals[i]) == SUBREG)))
1325 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1326
1327 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1328 && TREE_CODE (TREE_TYPE (formal)) == POINTER_TYPE)
1329 mark_reg_pointer (arg_vals[i],
1330 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1331 / BITS_PER_UNIT));
1332 }
1333
1334 /* Allocate the structures we use to remap things. */
1335
1336 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1337 map->fndecl = fndecl;
1338
1339 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1340 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1341
1342 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1343 map->label_map -= min_labelno;
1344
1345 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1346 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1347 map->min_insnno = 0;
1348 map->max_insnno = INSN_UID (header);
1349
1350 map->integrating = 1;
1351
1352 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1353 be large enough for all our pseudos. This is the number we are currently
1354 using plus the number in the called routine, plus 15 for each arg,
1355 five to compute the virtual frame pointer, and five for the return value.
1356 This should be enough for most cases. We do not reference entries
1357 outside the range of the map.
1358
1359 ??? These numbers are quite arbitrary and were obtained by
1360 experimentation. At some point, we should try to allocate the
1361 table after all the parameters are set up so we an more accurately
1362 estimate the number of pseudos we will need. */
1363
1364 map->const_equiv_map_size
1365 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1366
1367 map->const_equiv_map
1368 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1369 bzero ((char *) map->const_equiv_map,
1370 map->const_equiv_map_size * sizeof (rtx));
1371
1372 map->const_age_map
1373 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1374 bzero ((char *) map->const_age_map,
1375 map->const_equiv_map_size * sizeof (unsigned));
1376 map->const_age = 0;
1377
1378 /* Record the current insn in case we have to set up pointers to frame
1379 and argument memory blocks. */
1380 map->insns_at_start = get_last_insn ();
1381
1382 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1383 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1384
1385 /* Update the outgoing argument size to allow for those in the inlined
1386 function. */
1387 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1388 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1389
1390 /* If the inline function needs to make PIC references, that means
1391 that this function's PIC offset table must be used. */
1392 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1393 current_function_uses_pic_offset_table = 1;
1394
1395 /* If this function needs a context, set it up. */
1396 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1397 static_chain_value = lookup_static_chain (fndecl);
1398
1399 if (GET_CODE (parm_insns) == NOTE
1400 && NOTE_LINE_NUMBER (parm_insns) > 0)
1401 {
1402 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1403 NOTE_LINE_NUMBER (parm_insns));
1404 if (note)
1405 RTX_INTEGRATED_P (note) = 1;
1406 }
1407
1408 /* Process each argument. For each, set up things so that the function's
1409 reference to the argument will refer to the argument being passed.
1410 We only replace REG with REG here. Any simplifications are done
1411 via const_equiv_map.
1412
1413 We make two passes: In the first, we deal with parameters that will
1414 be placed into registers, since we need to ensure that the allocated
1415 register number fits in const_equiv_map. Then we store all non-register
1416 parameters into their memory location. */
1417
1418 /* Don't try to free temp stack slots here, because we may put one of the
1419 parameters into a temp stack slot. */
1420
1421 for (i = 0; i < nargs; i++)
1422 {
1423 rtx copy = arg_vals[i];
1424
1425 loc = RTVEC_ELT (arg_vector, i);
1426
1427 /* There are three cases, each handled separately. */
1428 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1429 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1430 {
1431 /* This must be an object passed by invisible reference (it could
1432 also be a variable-sized object, but we forbid inlining functions
1433 with variable-sized arguments). COPY is the address of the
1434 actual value (this computation will cause it to be copied). We
1435 map that address for the register, noting the actual address as
1436 an equivalent in case it can be substituted into the insns. */
1437
1438 if (GET_CODE (copy) != REG)
1439 {
1440 temp = copy_addr_to_reg (copy);
1441 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1442 && REGNO (temp) < map->const_equiv_map_size)
1443 {
1444 map->const_equiv_map[REGNO (temp)] = copy;
1445 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1446 }
1447 copy = temp;
1448 }
1449 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1450 }
1451 else if (GET_CODE (loc) == MEM)
1452 {
1453 /* This is the case of a parameter that lives in memory.
1454 It will live in the block we allocate in the called routine's
1455 frame that simulates the incoming argument area. Do nothing
1456 now; we will call store_expr later. */
1457 ;
1458 }
1459 else if (GET_CODE (loc) == REG)
1460 {
1461 /* This is the good case where the parameter is in a register.
1462 If it is read-only and our argument is a constant, set up the
1463 constant equivalence.
1464
1465 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1466 that flag set if it is a register.
1467
1468 Also, don't allow hard registers here; they might not be valid
1469 when substituted into insns. */
1470
1471 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1472 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1473 && ! REG_USERVAR_P (copy))
1474 || (GET_CODE (copy) == REG
1475 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1476 {
1477 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1478 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1479 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1480 && REGNO (temp) < map->const_equiv_map_size)
1481 {
1482 map->const_equiv_map[REGNO (temp)] = copy;
1483 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1484 }
1485 copy = temp;
1486 }
1487 map->reg_map[REGNO (loc)] = copy;
1488 }
1489 else if (GET_CODE (loc) == CONCAT)
1490 {
1491 /* This is the good case where the parameter is in a
1492 pair of separate pseudos.
1493 If it is read-only and our argument is a constant, set up the
1494 constant equivalence.
1495
1496 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1497 that flag set if it is a register.
1498
1499 Also, don't allow hard registers here; they might not be valid
1500 when substituted into insns. */
1501 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1502 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1503 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1504 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1505
1506 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1507 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1508 && ! REG_USERVAR_P (copyreal))
1509 || (GET_CODE (copyreal) == REG
1510 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1511 {
1512 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1513 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1514 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1515 && REGNO (temp) < map->const_equiv_map_size)
1516 {
1517 map->const_equiv_map[REGNO (temp)] = copyreal;
1518 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1519 }
1520 copyreal = temp;
1521 }
1522 map->reg_map[REGNO (locreal)] = copyreal;
1523
1524 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1525 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1526 && ! REG_USERVAR_P (copyimag))
1527 || (GET_CODE (copyimag) == REG
1528 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1529 {
1530 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1531 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1532 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1533 && REGNO (temp) < map->const_equiv_map_size)
1534 {
1535 map->const_equiv_map[REGNO (temp)] = copyimag;
1536 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1537 }
1538 copyimag = temp;
1539 }
1540 map->reg_map[REGNO (locimag)] = copyimag;
1541 }
1542 else
1543 abort ();
1544 }
1545
1546 /* Now do the parameters that will be placed in memory. */
1547
1548 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1549 formal; formal = TREE_CHAIN (formal), i++)
1550 {
1551 loc = RTVEC_ELT (arg_vector, i);
1552
1553 if (GET_CODE (loc) == MEM
1554 /* Exclude case handled above. */
1555 && ! (GET_CODE (XEXP (loc, 0)) == REG
1556 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1557 {
1558 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1559 DECL_SOURCE_LINE (formal));
1560 if (note)
1561 RTX_INTEGRATED_P (note) = 1;
1562
1563 /* Compute the address in the area we reserved and store the
1564 value there. */
1565 temp = copy_rtx_and_substitute (loc, map);
1566 subst_constants (&temp, NULL_RTX, map);
1567 apply_change_group ();
1568 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1569 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1570 store_expr (arg_trees[i], temp, 0);
1571 }
1572 }
1573
1574 /* Deal with the places that the function puts its result.
1575 We are driven by what is placed into DECL_RESULT.
1576
1577 Initially, we assume that we don't have anything special handling for
1578 REG_FUNCTION_RETURN_VALUE_P. */
1579
1580 map->inline_target = 0;
1581 loc = DECL_RTL (DECL_RESULT (fndecl));
1582 if (TYPE_MODE (type) == VOIDmode)
1583 /* There is no return value to worry about. */
1584 ;
1585 else if (GET_CODE (loc) == MEM)
1586 {
1587 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1588 abort ();
1589
1590 /* Pass the function the address in which to return a structure value.
1591 Note that a constructor can cause someone to call us with
1592 STRUCTURE_VALUE_ADDR, but the initialization takes place
1593 via the first parameter, rather than the struct return address.
1594
1595 We have two cases: If the address is a simple register indirect,
1596 use the mapping mechanism to point that register to our structure
1597 return address. Otherwise, store the structure return value into
1598 the place that it will be referenced from. */
1599
1600 if (GET_CODE (XEXP (loc, 0)) == REG)
1601 {
1602 temp = force_reg (Pmode,
1603 force_operand (structure_value_addr, NULL_RTX));
1604 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1605 if ((CONSTANT_P (structure_value_addr)
1606 || (GET_CODE (structure_value_addr) == PLUS
1607 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1608 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1609 && REGNO (temp) < map->const_equiv_map_size)
1610 {
1611 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1612 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1613 }
1614 }
1615 else
1616 {
1617 temp = copy_rtx_and_substitute (loc, map);
1618 subst_constants (&temp, NULL_RTX, map);
1619 apply_change_group ();
1620 emit_move_insn (temp, structure_value_addr);
1621 }
1622 }
1623 else if (ignore)
1624 /* We will ignore the result value, so don't look at its structure.
1625 Note that preparations for an aggregate return value
1626 do need to be made (above) even if it will be ignored. */
1627 ;
1628 else if (GET_CODE (loc) == REG)
1629 {
1630 /* The function returns an object in a register and we use the return
1631 value. Set up our target for remapping. */
1632
1633 /* Machine mode function was declared to return. */
1634 enum machine_mode departing_mode = TYPE_MODE (type);
1635 /* (Possibly wider) machine mode it actually computes
1636 (for the sake of callers that fail to declare it right). */
1637 enum machine_mode arriving_mode
1638 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1639 rtx reg_to_map;
1640
1641 /* Don't use MEMs as direct targets because on some machines
1642 substituting a MEM for a REG makes invalid insns.
1643 Let the combiner substitute the MEM if that is valid. */
1644 if (target == 0 || GET_CODE (target) != REG
1645 || GET_MODE (target) != departing_mode)
1646 target = gen_reg_rtx (departing_mode);
1647
1648 /* If function's value was promoted before return,
1649 avoid machine mode mismatch when we substitute INLINE_TARGET.
1650 But TARGET is what we will return to the caller. */
1651 if (arriving_mode != departing_mode)
1652 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1653 else
1654 reg_to_map = target;
1655
1656 /* Usually, the result value is the machine's return register.
1657 Sometimes it may be a pseudo. Handle both cases. */
1658 if (REG_FUNCTION_VALUE_P (loc))
1659 map->inline_target = reg_to_map;
1660 else
1661 map->reg_map[REGNO (loc)] = reg_to_map;
1662 }
1663
1664 /* Make new label equivalences for the labels in the called function. */
1665 for (i = min_labelno; i < max_labelno; i++)
1666 map->label_map[i] = gen_label_rtx ();
1667
1668 /* Perform postincrements before actually calling the function. */
1669 emit_queue ();
1670
1671 /* Clean up stack so that variables might have smaller offsets. */
1672 do_pending_stack_adjust ();
1673
1674 /* Save a copy of the location of const_equiv_map for mark_stores, called
1675 via note_stores. */
1676 global_const_equiv_map = map->const_equiv_map;
1677 global_const_equiv_map_size = map->const_equiv_map_size;
1678
1679 /* If the called function does an alloca, save and restore the
1680 stack pointer around the call. This saves stack space, but
1681 also is required if this inline is being done between two
1682 pushes. */
1683 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1684 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1685
1686 /* Now copy the insns one by one. Do this in two passes, first the insns and
1687 then their REG_NOTES, just like save_for_inline. */
1688
1689 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1690
1691 for (insn = insns; insn; insn = NEXT_INSN (insn))
1692 {
1693 rtx copy, pattern, set;
1694
1695 map->orig_asm_operands_vector = 0;
1696
1697 switch (GET_CODE (insn))
1698 {
1699 case INSN:
1700 pattern = PATTERN (insn);
1701 set = single_set (insn);
1702 copy = 0;
1703 if (GET_CODE (pattern) == USE
1704 && GET_CODE (XEXP (pattern, 0)) == REG
1705 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1706 /* The (USE (REG n)) at return from the function should
1707 be ignored since we are changing (REG n) into
1708 inline_target. */
1709 break;
1710
1711 /* Ignore setting a function value that we don't want to use. */
1712 if (map->inline_target == 0
1713 && set != 0
1714 && GET_CODE (SET_DEST (set)) == REG
1715 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1716 {
1717 if (volatile_refs_p (SET_SRC (set)))
1718 {
1719 rtx new_set;
1720
1721 /* If we must not delete the source,
1722 load it into a new temporary. */
1723 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1724
1725 new_set = single_set (copy);
1726 if (new_set == 0)
1727 abort ();
1728
1729 SET_DEST (new_set)
1730 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1731 }
1732 /* If the source and destination are the same and it
1733 has a note on it, keep the insn. */
1734 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1735 && REG_NOTES (insn) != 0)
1736 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1737 else
1738 break;
1739 }
1740
1741 /* If this is setting the static chain rtx, omit it. */
1742 else if (static_chain_value != 0
1743 && set != 0
1744 && GET_CODE (SET_DEST (set)) == REG
1745 && rtx_equal_p (SET_DEST (set),
1746 static_chain_incoming_rtx))
1747 break;
1748
1749 /* If this is setting the static chain pseudo, set it from
1750 the value we want to give it instead. */
1751 else if (static_chain_value != 0
1752 && set != 0
1753 && rtx_equal_p (SET_SRC (set),
1754 static_chain_incoming_rtx))
1755 {
1756 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1757
1758 copy = emit_move_insn (newdest, static_chain_value);
1759 static_chain_value = 0;
1760 }
1761 else
1762 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1763 /* REG_NOTES will be copied later. */
1764
1765 #ifdef HAVE_cc0
1766 /* If this insn is setting CC0, it may need to look at
1767 the insn that uses CC0 to see what type of insn it is.
1768 In that case, the call to recog via validate_change will
1769 fail. So don't substitute constants here. Instead,
1770 do it when we emit the following insn.
1771
1772 For example, see the pyr.md file. That machine has signed and
1773 unsigned compares. The compare patterns must check the
1774 following branch insn to see which what kind of compare to
1775 emit.
1776
1777 If the previous insn set CC0, substitute constants on it as
1778 well. */
1779 if (sets_cc0_p (PATTERN (copy)) != 0)
1780 cc0_insn = copy;
1781 else
1782 {
1783 if (cc0_insn)
1784 try_constants (cc0_insn, map);
1785 cc0_insn = 0;
1786 try_constants (copy, map);
1787 }
1788 #else
1789 try_constants (copy, map);
1790 #endif
1791 break;
1792
1793 case JUMP_INSN:
1794 if (GET_CODE (PATTERN (insn)) == RETURN)
1795 {
1796 if (local_return_label == 0)
1797 local_return_label = gen_label_rtx ();
1798 pattern = gen_jump (local_return_label);
1799 }
1800 else
1801 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1802
1803 copy = emit_jump_insn (pattern);
1804
1805 #ifdef HAVE_cc0
1806 if (cc0_insn)
1807 try_constants (cc0_insn, map);
1808 cc0_insn = 0;
1809 #endif
1810 try_constants (copy, map);
1811
1812 /* If this used to be a conditional jump insn but whose branch
1813 direction is now know, we must do something special. */
1814 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1815 {
1816 #ifdef HAVE_cc0
1817 /* The previous insn set cc0 for us. So delete it. */
1818 delete_insn (PREV_INSN (copy));
1819 #endif
1820
1821 /* If this is now a no-op, delete it. */
1822 if (map->last_pc_value == pc_rtx)
1823 {
1824 delete_insn (copy);
1825 copy = 0;
1826 }
1827 else
1828 /* Otherwise, this is unconditional jump so we must put a
1829 BARRIER after it. We could do some dead code elimination
1830 here, but jump.c will do it just as well. */
1831 emit_barrier ();
1832 }
1833 break;
1834
1835 case CALL_INSN:
1836 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1837 copy = emit_call_insn (pattern);
1838
1839 /* Because the USAGE information potentially contains objects other
1840 than hard registers, we need to copy it. */
1841 CALL_INSN_FUNCTION_USAGE (copy) =
1842 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1843
1844 #ifdef HAVE_cc0
1845 if (cc0_insn)
1846 try_constants (cc0_insn, map);
1847 cc0_insn = 0;
1848 #endif
1849 try_constants (copy, map);
1850
1851 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1852 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1853 map->const_equiv_map[i] = 0;
1854 break;
1855
1856 case CODE_LABEL:
1857 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1858 LABEL_NAME (copy) = LABEL_NAME (insn);
1859 map->const_age++;
1860 break;
1861
1862 case BARRIER:
1863 copy = emit_barrier ();
1864 break;
1865
1866 case NOTE:
1867 /* It is important to discard function-end and function-beg notes,
1868 so we have only one of each in the current function.
1869 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1870 deleted these in the copy used for continuing compilation,
1871 not the copy used for inlining). */
1872 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1873 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1874 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1875 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1876 else
1877 copy = 0;
1878 break;
1879
1880 default:
1881 abort ();
1882 break;
1883 }
1884
1885 if (copy)
1886 RTX_INTEGRATED_P (copy) = 1;
1887
1888 map->insn_map[INSN_UID (insn)] = copy;
1889 }
1890
1891 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1892 from parameters can be substituted in. These are the only ones that
1893 are valid across the entire function. */
1894 map->const_age++;
1895 for (insn = insns; insn; insn = NEXT_INSN (insn))
1896 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1897 && map->insn_map[INSN_UID (insn)]
1898 && REG_NOTES (insn))
1899 {
1900 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1901 /* We must also do subst_constants, in case one of our parameters
1902 has const type and constant value. */
1903 subst_constants (&tem, NULL_RTX, map);
1904 apply_change_group ();
1905 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1906 }
1907
1908 if (local_return_label)
1909 emit_label (local_return_label);
1910
1911 /* Restore the stack pointer if we saved it above. */
1912 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1913 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1914
1915 /* Make copies of the decls of the symbols in the inline function, so that
1916 the copies of the variables get declared in the current function. Set
1917 up things so that lookup_static_chain knows that to interpret registers
1918 in SAVE_EXPRs for TYPE_SIZEs as local. */
1919
1920 inline_function_decl = fndecl;
1921 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1922 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1923 inline_function_decl = 0;
1924
1925 /* End the scope containing the copied formal parameter variables
1926 and copied LABEL_DECLs. */
1927
1928 expand_end_bindings (getdecls (), 1, 1);
1929 block = poplevel (1, 1, 0);
1930 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1931 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1932 poplevel (0, 0, 0);
1933 emit_line_note (input_filename, lineno);
1934
1935 if (structure_value_addr)
1936 {
1937 target = gen_rtx (MEM, TYPE_MODE (type),
1938 memory_address (TYPE_MODE (type), structure_value_addr));
1939 MEM_IN_STRUCT_P (target) = 1;
1940 }
1941 return target;
1942 }
1943 \f
1944 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1945 push all of those decls and give each one the corresponding home. */
1946
1947 static void
1948 integrate_parm_decls (args, map, arg_vector)
1949 tree args;
1950 struct inline_remap *map;
1951 rtvec arg_vector;
1952 {
1953 register tree tail;
1954 register int i;
1955
1956 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1957 {
1958 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1959 TREE_TYPE (tail));
1960 rtx new_decl_rtl
1961 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1962
1963 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1964 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1965 here, but that's going to require some more work. */
1966 /* DECL_INCOMING_RTL (decl) = ?; */
1967 /* These args would always appear unused, if not for this. */
1968 TREE_USED (decl) = 1;
1969 /* Prevent warning for shadowing with these. */
1970 DECL_ABSTRACT_ORIGIN (decl) = tail;
1971 pushdecl (decl);
1972 /* Fully instantiate the address with the equivalent form so that the
1973 debugging information contains the actual register, instead of the
1974 virtual register. Do this by not passing an insn to
1975 subst_constants. */
1976 subst_constants (&new_decl_rtl, NULL_RTX, map);
1977 apply_change_group ();
1978 DECL_RTL (decl) = new_decl_rtl;
1979 }
1980 }
1981
1982 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1983 current function a tree of contexts isomorphic to the one that is given.
1984
1985 LEVEL indicates how far down into the BLOCK tree is the node we are
1986 currently traversing. It is always zero except for recursive calls.
1987
1988 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1989 registers used in the DECL_RTL field should be remapped. If it is zero,
1990 no mapping is necessary. */
1991
1992 static void
1993 integrate_decl_tree (let, level, map)
1994 tree let;
1995 int level;
1996 struct inline_remap *map;
1997 {
1998 tree t, node;
1999
2000 if (level > 0)
2001 pushlevel (0);
2002
2003 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2004 {
2005 tree d;
2006
2007 push_obstacks_nochange ();
2008 saveable_allocation ();
2009 d = copy_node (t);
2010 pop_obstacks ();
2011
2012 if (DECL_RTL (t) != 0)
2013 {
2014 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2015 /* Fully instantiate the address with the equivalent form so that the
2016 debugging information contains the actual register, instead of the
2017 virtual register. Do this by not passing an insn to
2018 subst_constants. */
2019 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2020 apply_change_group ();
2021 }
2022 /* These args would always appear unused, if not for this. */
2023 TREE_USED (d) = 1;
2024 /* Prevent warning for shadowing with these. */
2025 DECL_ABSTRACT_ORIGIN (d) = t;
2026
2027 if (DECL_LANG_SPECIFIC (d))
2028 copy_lang_decl (d);
2029
2030 pushdecl (d);
2031 }
2032
2033 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2034 integrate_decl_tree (t, level + 1, map);
2035
2036 if (level > 0)
2037 {
2038 node = poplevel (1, 0, 0);
2039 if (node)
2040 {
2041 TREE_USED (node) = TREE_USED (let);
2042 BLOCK_ABSTRACT_ORIGIN (node) = let;
2043 }
2044 }
2045 }
2046
2047 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2048 through save_constants. */
2049
2050 static void
2051 save_constants_in_decl_trees (let)
2052 tree let;
2053 {
2054 tree t;
2055
2056 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2057 if (DECL_RTL (t) != 0)
2058 save_constants (&DECL_RTL (t));
2059
2060 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2061 save_constants_in_decl_trees (t);
2062 }
2063 \f
2064 /* Create a new copy of an rtx.
2065 Recursively copies the operands of the rtx,
2066 except for those few rtx codes that are sharable.
2067
2068 We always return an rtx that is similar to that incoming rtx, with the
2069 exception of possibly changing a REG to a SUBREG or vice versa. No
2070 rtl is ever emitted.
2071
2072 Handle constants that need to be placed in the constant pool by
2073 calling `force_const_mem'. */
2074
2075 rtx
2076 copy_rtx_and_substitute (orig, map)
2077 register rtx orig;
2078 struct inline_remap *map;
2079 {
2080 register rtx copy, temp;
2081 register int i, j;
2082 register RTX_CODE code;
2083 register enum machine_mode mode;
2084 register char *format_ptr;
2085 int regno;
2086
2087 if (orig == 0)
2088 return 0;
2089
2090 code = GET_CODE (orig);
2091 mode = GET_MODE (orig);
2092
2093 switch (code)
2094 {
2095 case REG:
2096 /* If the stack pointer register shows up, it must be part of
2097 stack-adjustments (*not* because we eliminated the frame pointer!).
2098 Small hard registers are returned as-is. Pseudo-registers
2099 go through their `reg_map'. */
2100 regno = REGNO (orig);
2101 if (regno <= LAST_VIRTUAL_REGISTER)
2102 {
2103 /* Some hard registers are also mapped,
2104 but others are not translated. */
2105 if (map->reg_map[regno] != 0)
2106 return map->reg_map[regno];
2107
2108 /* If this is the virtual frame pointer, make space in current
2109 function's stack frame for the stack frame of the inline function.
2110
2111 Copy the address of this area into a pseudo. Map
2112 virtual_stack_vars_rtx to this pseudo and set up a constant
2113 equivalence for it to be the address. This will substitute the
2114 address into insns where it can be substituted and use the new
2115 pseudo where it can't. */
2116 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2117 {
2118 rtx loc, seq;
2119 int size = DECL_FRAME_SIZE (map->fndecl);
2120 int rounded;
2121
2122 start_sequence ();
2123 loc = assign_stack_temp (BLKmode, size, 1);
2124 loc = XEXP (loc, 0);
2125 #ifdef FRAME_GROWS_DOWNWARD
2126 /* In this case, virtual_stack_vars_rtx points to one byte
2127 higher than the top of the frame area. So compute the offset
2128 to one byte higher than our substitute frame.
2129 Keep the fake frame pointer aligned like a real one. */
2130 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2131 loc = plus_constant (loc, rounded);
2132 #endif
2133 map->reg_map[regno] = temp
2134 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2135
2136 #ifdef STACK_BOUNDARY
2137 mark_reg_pointer (map->reg_map[regno],
2138 STACK_BOUNDARY / BITS_PER_UNIT);
2139 #endif
2140
2141 if (REGNO (temp) < map->const_equiv_map_size)
2142 {
2143 map->const_equiv_map[REGNO (temp)] = loc;
2144 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2145 }
2146
2147 seq = gen_sequence ();
2148 end_sequence ();
2149 emit_insn_after (seq, map->insns_at_start);
2150 return temp;
2151 }
2152 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2153 {
2154 /* Do the same for a block to contain any arguments referenced
2155 in memory. */
2156 rtx loc, seq;
2157 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2158
2159 start_sequence ();
2160 loc = assign_stack_temp (BLKmode, size, 1);
2161 loc = XEXP (loc, 0);
2162 /* When arguments grow downward, the virtual incoming
2163 args pointer points to the top of the argument block,
2164 so the remapped location better do the same. */
2165 #ifdef ARGS_GROW_DOWNWARD
2166 loc = plus_constant (loc, size);
2167 #endif
2168 map->reg_map[regno] = temp
2169 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2170
2171 #ifdef STACK_BOUNDARY
2172 mark_reg_pointer (map->reg_map[regno],
2173 STACK_BOUNDARY / BITS_PER_UNIT);
2174 #endif
2175
2176 if (REGNO (temp) < map->const_equiv_map_size)
2177 {
2178 map->const_equiv_map[REGNO (temp)] = loc;
2179 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2180 }
2181
2182 seq = gen_sequence ();
2183 end_sequence ();
2184 emit_insn_after (seq, map->insns_at_start);
2185 return temp;
2186 }
2187 else if (REG_FUNCTION_VALUE_P (orig))
2188 {
2189 /* This is a reference to the function return value. If
2190 the function doesn't have a return value, error. If the
2191 mode doesn't agree, make a SUBREG. */
2192 if (map->inline_target == 0)
2193 /* Must be unrolling loops or replicating code if we
2194 reach here, so return the register unchanged. */
2195 return orig;
2196 else if (mode != GET_MODE (map->inline_target))
2197 return gen_lowpart (mode, map->inline_target);
2198 else
2199 return map->inline_target;
2200 }
2201 return orig;
2202 }
2203 if (map->reg_map[regno] == NULL)
2204 {
2205 map->reg_map[regno] = gen_reg_rtx (mode);
2206 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2207 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2208 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2209 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2210
2211 if (map->regno_pointer_flag[regno])
2212 mark_reg_pointer (map->reg_map[regno],
2213 map->regno_pointer_align[regno]);
2214 }
2215 return map->reg_map[regno];
2216
2217 case SUBREG:
2218 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2219 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2220 if (GET_CODE (copy) == SUBREG)
2221 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2222 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2223 else if (GET_CODE (copy) == CONCAT)
2224 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2225 else
2226 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2227 SUBREG_WORD (orig));
2228
2229 case USE:
2230 case CLOBBER:
2231 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2232 to (use foo) if the original insn didn't have a subreg.
2233 Removing the subreg distorts the VAX movstrhi pattern
2234 by changing the mode of an operand. */
2235 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2236 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2237 copy = SUBREG_REG (copy);
2238 return gen_rtx (code, VOIDmode, copy);
2239
2240 case CODE_LABEL:
2241 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2242 = LABEL_PRESERVE_P (orig);
2243 return map->label_map[CODE_LABEL_NUMBER (orig)];
2244
2245 case LABEL_REF:
2246 copy = gen_rtx (LABEL_REF, mode,
2247 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2248 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2249 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2250
2251 /* The fact that this label was previously nonlocal does not mean
2252 it still is, so we must check if it is within the range of
2253 this function's labels. */
2254 LABEL_REF_NONLOCAL_P (copy)
2255 = (LABEL_REF_NONLOCAL_P (orig)
2256 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2257 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2258
2259 /* If we have made a nonlocal label local, it means that this
2260 inlined call will be referring to our nonlocal goto handler.
2261 So make sure we create one for this block; we normally would
2262 not since this is not otherwise considered a "call". */
2263 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2264 function_call_count++;
2265
2266 return copy;
2267
2268 case PC:
2269 case CC0:
2270 case CONST_INT:
2271 return orig;
2272
2273 case SYMBOL_REF:
2274 /* Symbols which represent the address of a label stored in the constant
2275 pool must be modified to point to a constant pool entry for the
2276 remapped label. Otherwise, symbols are returned unchanged. */
2277 if (CONSTANT_POOL_ADDRESS_P (orig))
2278 {
2279 rtx constant = get_pool_constant (orig);
2280 if (GET_CODE (constant) == LABEL_REF)
2281 return XEXP (force_const_mem (Pmode,
2282 copy_rtx_and_substitute (constant,
2283 map)),
2284 0);
2285 }
2286
2287 return orig;
2288
2289 case CONST_DOUBLE:
2290 /* We have to make a new copy of this CONST_DOUBLE because don't want
2291 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2292 duplicate of a CONST_DOUBLE we have already seen. */
2293 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2294 {
2295 REAL_VALUE_TYPE d;
2296
2297 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2298 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2299 }
2300 else
2301 return immed_double_const (CONST_DOUBLE_LOW (orig),
2302 CONST_DOUBLE_HIGH (orig), VOIDmode);
2303
2304 case CONST:
2305 /* Make new constant pool entry for a constant
2306 that was in the pool of the inline function. */
2307 if (RTX_INTEGRATED_P (orig))
2308 {
2309 /* If this was an address of a constant pool entry that itself
2310 had to be placed in the constant pool, it might not be a
2311 valid address. So the recursive call below might turn it
2312 into a register. In that case, it isn't a constant any
2313 more, so return it. This has the potential of changing a
2314 MEM into a REG, but we'll assume that it safe. */
2315 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2316 if (! CONSTANT_P (temp))
2317 return temp;
2318 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2319 }
2320 break;
2321
2322 case ADDRESS:
2323 /* If from constant pool address, make new constant pool entry and
2324 return its address. */
2325 if (! RTX_INTEGRATED_P (orig))
2326 abort ();
2327
2328 temp = force_const_mem (GET_MODE (orig),
2329 copy_rtx_and_substitute (XEXP (orig, 0), map));
2330
2331 #if 0
2332 /* Legitimizing the address here is incorrect.
2333
2334 The only ADDRESS rtx's that can reach here are ones created by
2335 save_constants. Hence the operand of the ADDRESS is always valid
2336 in this position of the instruction, since the original rtx without
2337 the ADDRESS was valid.
2338
2339 The reason we don't legitimize the address here is that on the
2340 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2341 This code forces the operand of the address to a register, which
2342 fails because we can not take the HIGH part of a register.
2343
2344 Also, change_address may create new registers. These registers
2345 will not have valid reg_map entries. This can cause try_constants()
2346 to fail because assumes that all registers in the rtx have valid
2347 reg_map entries, and it may end up replacing one of these new
2348 registers with junk. */
2349
2350 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2351 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2352 #endif
2353
2354 return XEXP (temp, 0);
2355
2356 case ASM_OPERANDS:
2357 /* If a single asm insn contains multiple output operands
2358 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2359 We must make sure that the copied insn continues to share it. */
2360 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2361 {
2362 copy = rtx_alloc (ASM_OPERANDS);
2363 copy->volatil = orig->volatil;
2364 XSTR (copy, 0) = XSTR (orig, 0);
2365 XSTR (copy, 1) = XSTR (orig, 1);
2366 XINT (copy, 2) = XINT (orig, 2);
2367 XVEC (copy, 3) = map->copy_asm_operands_vector;
2368 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2369 XSTR (copy, 5) = XSTR (orig, 5);
2370 XINT (copy, 6) = XINT (orig, 6);
2371 return copy;
2372 }
2373 break;
2374
2375 case CALL:
2376 /* This is given special treatment because the first
2377 operand of a CALL is a (MEM ...) which may get
2378 forced into a register for cse. This is undesirable
2379 if function-address cse isn't wanted or if we won't do cse. */
2380 #ifndef NO_FUNCTION_CSE
2381 if (! (optimize && ! flag_no_function_cse))
2382 #endif
2383 return gen_rtx (CALL, GET_MODE (orig),
2384 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2385 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2386 copy_rtx_and_substitute (XEXP (orig, 1), map));
2387 break;
2388
2389 #if 0
2390 /* Must be ifdefed out for loop unrolling to work. */
2391 case RETURN:
2392 abort ();
2393 #endif
2394
2395 case SET:
2396 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2397 Don't alter that.
2398 If the nonlocal goto is into the current function,
2399 this will result in unnecessarily bad code, but should work. */
2400 if (SET_DEST (orig) == virtual_stack_vars_rtx
2401 || SET_DEST (orig) == virtual_incoming_args_rtx)
2402 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2403 copy_rtx_and_substitute (SET_SRC (orig), map));
2404 break;
2405
2406 case MEM:
2407 copy = rtx_alloc (MEM);
2408 PUT_MODE (copy, mode);
2409 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2410 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2411 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2412
2413 /* If doing function inlining, this MEM might not be const in the
2414 function that it is being inlined into, and thus may not be
2415 unchanging after function inlining. Constant pool references are
2416 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2417 for them. */
2418 if (! map->integrating)
2419 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2420
2421 return copy;
2422 }
2423
2424 copy = rtx_alloc (code);
2425 PUT_MODE (copy, mode);
2426 copy->in_struct = orig->in_struct;
2427 copy->volatil = orig->volatil;
2428 copy->unchanging = orig->unchanging;
2429
2430 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2431
2432 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2433 {
2434 switch (*format_ptr++)
2435 {
2436 case '0':
2437 break;
2438
2439 case 'e':
2440 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2441 break;
2442
2443 case 'u':
2444 /* Change any references to old-insns to point to the
2445 corresponding copied insns. */
2446 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2447 break;
2448
2449 case 'E':
2450 XVEC (copy, i) = XVEC (orig, i);
2451 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2452 {
2453 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2454 for (j = 0; j < XVECLEN (copy, i); j++)
2455 XVECEXP (copy, i, j)
2456 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2457 }
2458 break;
2459
2460 case 'w':
2461 XWINT (copy, i) = XWINT (orig, i);
2462 break;
2463
2464 case 'i':
2465 XINT (copy, i) = XINT (orig, i);
2466 break;
2467
2468 case 's':
2469 XSTR (copy, i) = XSTR (orig, i);
2470 break;
2471
2472 default:
2473 abort ();
2474 }
2475 }
2476
2477 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2478 {
2479 map->orig_asm_operands_vector = XVEC (orig, 3);
2480 map->copy_asm_operands_vector = XVEC (copy, 3);
2481 map->copy_asm_constraints_vector = XVEC (copy, 4);
2482 }
2483
2484 return copy;
2485 }
2486 \f
2487 /* Substitute known constant values into INSN, if that is valid. */
2488
2489 void
2490 try_constants (insn, map)
2491 rtx insn;
2492 struct inline_remap *map;
2493 {
2494 int i;
2495
2496 map->num_sets = 0;
2497 subst_constants (&PATTERN (insn), insn, map);
2498
2499 /* Apply the changes if they are valid; otherwise discard them. */
2500 apply_change_group ();
2501
2502 /* Show we don't know the value of anything stored or clobbered. */
2503 note_stores (PATTERN (insn), mark_stores);
2504 map->last_pc_value = 0;
2505 #ifdef HAVE_cc0
2506 map->last_cc0_value = 0;
2507 #endif
2508
2509 /* Set up any constant equivalences made in this insn. */
2510 for (i = 0; i < map->num_sets; i++)
2511 {
2512 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2513 {
2514 int regno = REGNO (map->equiv_sets[i].dest);
2515
2516 if (regno < map->const_equiv_map_size
2517 && (map->const_equiv_map[regno] == 0
2518 /* Following clause is a hack to make case work where GNU C++
2519 reassigns a variable to make cse work right. */
2520 || ! rtx_equal_p (map->const_equiv_map[regno],
2521 map->equiv_sets[i].equiv)))
2522 {
2523 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2524 map->const_age_map[regno] = map->const_age;
2525 }
2526 }
2527 else if (map->equiv_sets[i].dest == pc_rtx)
2528 map->last_pc_value = map->equiv_sets[i].equiv;
2529 #ifdef HAVE_cc0
2530 else if (map->equiv_sets[i].dest == cc0_rtx)
2531 map->last_cc0_value = map->equiv_sets[i].equiv;
2532 #endif
2533 }
2534 }
2535 \f
2536 /* Substitute known constants for pseudo regs in the contents of LOC,
2537 which are part of INSN.
2538 If INSN is zero, the substitution should always be done (this is used to
2539 update DECL_RTL).
2540 These changes are taken out by try_constants if the result is not valid.
2541
2542 Note that we are more concerned with determining when the result of a SET
2543 is a constant, for further propagation, than actually inserting constants
2544 into insns; cse will do the latter task better.
2545
2546 This function is also used to adjust address of items previously addressed
2547 via the virtual stack variable or virtual incoming arguments registers. */
2548
2549 static void
2550 subst_constants (loc, insn, map)
2551 rtx *loc;
2552 rtx insn;
2553 struct inline_remap *map;
2554 {
2555 rtx x = *loc;
2556 register int i;
2557 register enum rtx_code code;
2558 register char *format_ptr;
2559 int num_changes = num_validated_changes ();
2560 rtx new = 0;
2561 enum machine_mode op0_mode;
2562
2563 code = GET_CODE (x);
2564
2565 switch (code)
2566 {
2567 case PC:
2568 case CONST_INT:
2569 case CONST_DOUBLE:
2570 case SYMBOL_REF:
2571 case CONST:
2572 case LABEL_REF:
2573 case ADDRESS:
2574 return;
2575
2576 #ifdef HAVE_cc0
2577 case CC0:
2578 validate_change (insn, loc, map->last_cc0_value, 1);
2579 return;
2580 #endif
2581
2582 case USE:
2583 case CLOBBER:
2584 /* The only thing we can do with a USE or CLOBBER is possibly do
2585 some substitutions in a MEM within it. */
2586 if (GET_CODE (XEXP (x, 0)) == MEM)
2587 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2588 return;
2589
2590 case REG:
2591 /* Substitute for parms and known constants. Don't replace
2592 hard regs used as user variables with constants. */
2593 {
2594 int regno = REGNO (x);
2595
2596 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2597 && regno < map->const_equiv_map_size
2598 && map->const_equiv_map[regno] != 0
2599 && map->const_age_map[regno] >= map->const_age)
2600 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2601 return;
2602 }
2603
2604 case SUBREG:
2605 /* SUBREG applied to something other than a reg
2606 should be treated as ordinary, since that must
2607 be a special hack and we don't know how to treat it specially.
2608 Consider for example mulsidi3 in m68k.md.
2609 Ordinary SUBREG of a REG needs this special treatment. */
2610 if (GET_CODE (SUBREG_REG (x)) == REG)
2611 {
2612 rtx inner = SUBREG_REG (x);
2613 rtx new = 0;
2614
2615 /* We can't call subst_constants on &SUBREG_REG (x) because any
2616 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2617 see what is inside, try to form the new SUBREG and see if that is
2618 valid. We handle two cases: extracting a full word in an
2619 integral mode and extracting the low part. */
2620 subst_constants (&inner, NULL_RTX, map);
2621
2622 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2623 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2624 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2625 new = operand_subword (inner, SUBREG_WORD (x), 0,
2626 GET_MODE (SUBREG_REG (x)));
2627
2628 if (new == 0 && subreg_lowpart_p (x))
2629 new = gen_lowpart_common (GET_MODE (x), inner);
2630
2631 if (new)
2632 validate_change (insn, loc, new, 1);
2633
2634 return;
2635 }
2636 break;
2637
2638 case MEM:
2639 subst_constants (&XEXP (x, 0), insn, map);
2640
2641 /* If a memory address got spoiled, change it back. */
2642 if (insn != 0 && num_validated_changes () != num_changes
2643 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2644 cancel_changes (num_changes);
2645 return;
2646
2647 case SET:
2648 {
2649 /* Substitute constants in our source, and in any arguments to a
2650 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2651 itself. */
2652 rtx *dest_loc = &SET_DEST (x);
2653 rtx dest = *dest_loc;
2654 rtx src, tem;
2655
2656 subst_constants (&SET_SRC (x), insn, map);
2657 src = SET_SRC (x);
2658
2659 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2660 || GET_CODE (*dest_loc) == SUBREG
2661 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2662 {
2663 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2664 {
2665 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2666 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2667 }
2668 dest_loc = &XEXP (*dest_loc, 0);
2669 }
2670
2671 /* Do substitute in the address of a destination in memory. */
2672 if (GET_CODE (*dest_loc) == MEM)
2673 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2674
2675 /* Check for the case of DEST a SUBREG, both it and the underlying
2676 register are less than one word, and the SUBREG has the wider mode.
2677 In the case, we are really setting the underlying register to the
2678 source converted to the mode of DEST. So indicate that. */
2679 if (GET_CODE (dest) == SUBREG
2680 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2681 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2682 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2683 <= GET_MODE_SIZE (GET_MODE (dest)))
2684 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2685 src)))
2686 src = tem, dest = SUBREG_REG (dest);
2687
2688 /* If storing a recognizable value save it for later recording. */
2689 if ((map->num_sets < MAX_RECOG_OPERANDS)
2690 && (CONSTANT_P (src)
2691 || (GET_CODE (src) == REG
2692 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2693 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2694 || (GET_CODE (src) == PLUS
2695 && GET_CODE (XEXP (src, 0)) == REG
2696 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2697 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2698 && CONSTANT_P (XEXP (src, 1)))
2699 || GET_CODE (src) == COMPARE
2700 #ifdef HAVE_cc0
2701 || dest == cc0_rtx
2702 #endif
2703 || (dest == pc_rtx
2704 && (src == pc_rtx || GET_CODE (src) == RETURN
2705 || GET_CODE (src) == LABEL_REF))))
2706 {
2707 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2708 it will cause us to save the COMPARE with any constants
2709 substituted, which is what we want for later. */
2710 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2711 map->equiv_sets[map->num_sets++].dest = dest;
2712 }
2713
2714 return;
2715 }
2716 }
2717
2718 format_ptr = GET_RTX_FORMAT (code);
2719
2720 /* If the first operand is an expression, save its mode for later. */
2721 if (*format_ptr == 'e')
2722 op0_mode = GET_MODE (XEXP (x, 0));
2723
2724 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2725 {
2726 switch (*format_ptr++)
2727 {
2728 case '0':
2729 break;
2730
2731 case 'e':
2732 if (XEXP (x, i))
2733 subst_constants (&XEXP (x, i), insn, map);
2734 break;
2735
2736 case 'u':
2737 case 'i':
2738 case 's':
2739 case 'w':
2740 break;
2741
2742 case 'E':
2743 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2744 {
2745 int j;
2746 for (j = 0; j < XVECLEN (x, i); j++)
2747 subst_constants (&XVECEXP (x, i, j), insn, map);
2748 }
2749 break;
2750
2751 default:
2752 abort ();
2753 }
2754 }
2755
2756 /* If this is a commutative operation, move a constant to the second
2757 operand unless the second operand is already a CONST_INT. */
2758 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2759 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2760 {
2761 rtx tem = XEXP (x, 0);
2762 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2763 validate_change (insn, &XEXP (x, 1), tem, 1);
2764 }
2765
2766 /* Simplify the expression in case we put in some constants. */
2767 switch (GET_RTX_CLASS (code))
2768 {
2769 case '1':
2770 new = simplify_unary_operation (code, GET_MODE (x),
2771 XEXP (x, 0), op0_mode);
2772 break;
2773
2774 case '<':
2775 {
2776 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2777 if (op_mode == VOIDmode)
2778 op_mode = GET_MODE (XEXP (x, 1));
2779 new = simplify_relational_operation (code, op_mode,
2780 XEXP (x, 0), XEXP (x, 1));
2781 #ifdef FLOAT_STORE_FLAG_VALUE
2782 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2783 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2784 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2785 GET_MODE (x)));
2786 #endif
2787 break;
2788 }
2789
2790 case '2':
2791 case 'c':
2792 new = simplify_binary_operation (code, GET_MODE (x),
2793 XEXP (x, 0), XEXP (x, 1));
2794 break;
2795
2796 case 'b':
2797 case '3':
2798 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2799 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2800 break;
2801 }
2802
2803 if (new)
2804 validate_change (insn, loc, new, 1);
2805 }
2806
2807 /* Show that register modified no longer contain known constants. We are
2808 called from note_stores with parts of the new insn. */
2809
2810 void
2811 mark_stores (dest, x)
2812 rtx dest;
2813 rtx x;
2814 {
2815 int regno = -1;
2816 enum machine_mode mode;
2817
2818 /* DEST is always the innermost thing set, except in the case of
2819 SUBREGs of hard registers. */
2820
2821 if (GET_CODE (dest) == REG)
2822 regno = REGNO (dest), mode = GET_MODE (dest);
2823 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2824 {
2825 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2826 mode = GET_MODE (SUBREG_REG (dest));
2827 }
2828
2829 if (regno >= 0)
2830 {
2831 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2832 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2833 int i;
2834
2835 for (i = regno; i <= last_reg; i++)
2836 if (i < global_const_equiv_map_size)
2837 global_const_equiv_map[i] = 0;
2838 }
2839 }
2840 \f
2841 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2842 pointed to by PX, they represent constants in the constant pool.
2843 Replace these with a new memory reference obtained from force_const_mem.
2844 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2845 address of a constant pool entry. Replace them with the address of
2846 a new constant pool entry obtained from force_const_mem. */
2847
2848 static void
2849 restore_constants (px)
2850 rtx *px;
2851 {
2852 rtx x = *px;
2853 int i, j;
2854 char *fmt;
2855
2856 if (x == 0)
2857 return;
2858
2859 if (GET_CODE (x) == CONST_DOUBLE)
2860 {
2861 /* We have to make a new CONST_DOUBLE to ensure that we account for
2862 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2863 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2864 {
2865 REAL_VALUE_TYPE d;
2866
2867 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2868 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
2869 }
2870 else
2871 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2872 VOIDmode);
2873 }
2874
2875 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2876 {
2877 restore_constants (&XEXP (x, 0));
2878 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2879 }
2880 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2881 {
2882 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2883 rtx new = XEXP (SUBREG_REG (x), 0);
2884
2885 restore_constants (&new);
2886 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2887 PUT_MODE (new, GET_MODE (x));
2888 *px = validize_mem (new);
2889 }
2890 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2891 {
2892 restore_constants (&XEXP (x, 0));
2893 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2894 }
2895 else
2896 {
2897 fmt = GET_RTX_FORMAT (GET_CODE (x));
2898 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2899 {
2900 switch (*fmt++)
2901 {
2902 case 'E':
2903 for (j = 0; j < XVECLEN (x, i); j++)
2904 restore_constants (&XVECEXP (x, i, j));
2905 break;
2906
2907 case 'e':
2908 restore_constants (&XEXP (x, i));
2909 break;
2910 }
2911 }
2912 }
2913 }
2914 \f
2915 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2916 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2917 that it points to the node itself, thus indicating that the node is its
2918 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2919 the given node is NULL, recursively descend the decl/block tree which
2920 it is the root of, and for each other ..._DECL or BLOCK node contained
2921 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2922 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2923 values to point to themselves. */
2924
2925 static void
2926 set_block_origin_self (stmt)
2927 register tree stmt;
2928 {
2929 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2930 {
2931 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2932
2933 {
2934 register tree local_decl;
2935
2936 for (local_decl = BLOCK_VARS (stmt);
2937 local_decl != NULL_TREE;
2938 local_decl = TREE_CHAIN (local_decl))
2939 set_decl_origin_self (local_decl); /* Potential recursion. */
2940 }
2941
2942 {
2943 register tree subblock;
2944
2945 for (subblock = BLOCK_SUBBLOCKS (stmt);
2946 subblock != NULL_TREE;
2947 subblock = BLOCK_CHAIN (subblock))
2948 set_block_origin_self (subblock); /* Recurse. */
2949 }
2950 }
2951 }
2952
2953 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2954 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2955 node to so that it points to the node itself, thus indicating that the
2956 node represents its own (abstract) origin. Additionally, if the
2957 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2958 the decl/block tree of which the given node is the root of, and for
2959 each other ..._DECL or BLOCK node contained therein whose
2960 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2961 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2962 point to themselves. */
2963
2964 static void
2965 set_decl_origin_self (decl)
2966 register tree decl;
2967 {
2968 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2969 {
2970 DECL_ABSTRACT_ORIGIN (decl) = decl;
2971 if (TREE_CODE (decl) == FUNCTION_DECL)
2972 {
2973 register tree arg;
2974
2975 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2976 DECL_ABSTRACT_ORIGIN (arg) = arg;
2977 if (DECL_INITIAL (decl) != NULL_TREE
2978 && DECL_INITIAL (decl) != error_mark_node)
2979 set_block_origin_self (DECL_INITIAL (decl));
2980 }
2981 }
2982 }
2983 \f
2984 /* Given a pointer to some BLOCK node, and a boolean value to set the
2985 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2986 the given block, and for all local decls and all local sub-blocks
2987 (recursively) which are contained therein. */
2988
2989 static void
2990 set_block_abstract_flags (stmt, setting)
2991 register tree stmt;
2992 register int setting;
2993 {
2994 register tree local_decl;
2995 register tree subblock;
2996
2997 BLOCK_ABSTRACT (stmt) = setting;
2998
2999 for (local_decl = BLOCK_VARS (stmt);
3000 local_decl != NULL_TREE;
3001 local_decl = TREE_CHAIN (local_decl))
3002 set_decl_abstract_flags (local_decl, setting);
3003
3004 for (subblock = BLOCK_SUBBLOCKS (stmt);
3005 subblock != NULL_TREE;
3006 subblock = BLOCK_CHAIN (subblock))
3007 set_block_abstract_flags (subblock, setting);
3008 }
3009
3010 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3011 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3012 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3013 set the abstract flags for all of the parameters, local vars, local
3014 blocks and sub-blocks (recursively) to the same setting. */
3015
3016 void
3017 set_decl_abstract_flags (decl, setting)
3018 register tree decl;
3019 register int setting;
3020 {
3021 DECL_ABSTRACT (decl) = setting;
3022 if (TREE_CODE (decl) == FUNCTION_DECL)
3023 {
3024 register tree arg;
3025
3026 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3027 DECL_ABSTRACT (arg) = setting;
3028 if (DECL_INITIAL (decl) != NULL_TREE
3029 && DECL_INITIAL (decl) != error_mark_node)
3030 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3031 }
3032 }
3033 \f
3034 /* Output the assembly language code for the function FNDECL
3035 from its DECL_SAVED_INSNS. Used for inline functions that are output
3036 at end of compilation instead of where they came in the source. */
3037
3038 void
3039 output_inline_function (fndecl)
3040 tree fndecl;
3041 {
3042 rtx head;
3043 rtx last;
3044 int save_flag_no_inline = flag_no_inline;
3045
3046 if (output_bytecode)
3047 {
3048 warning ("`inline' ignored for bytecode output");
3049 return;
3050 }
3051
3052 /* Things we allocate from here on are part of this function, not
3053 permanent. */
3054 temporary_allocation ();
3055
3056 head = DECL_SAVED_INSNS (fndecl);
3057 current_function_decl = fndecl;
3058
3059 /* This call is only used to initialize global variables. */
3060 init_function_start (fndecl, "lossage", 1);
3061
3062 /* Redo parameter determinations in case the FUNCTION_...
3063 macros took machine-specific actions that need to be redone. */
3064 assign_parms (fndecl, 1);
3065
3066 /* Set stack frame size. */
3067 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3068
3069 /* The first is a bit of a lie (the array may be larger), but doesn't
3070 matter too much and it isn't worth saving the actual bound. */
3071 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3072 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3073 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3074 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3075
3076 stack_slot_list = STACK_SLOT_LIST (head);
3077 forced_labels = FORCED_LABELS (head);
3078
3079 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3080 current_function_calls_alloca = 1;
3081
3082 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3083 current_function_calls_setjmp = 1;
3084
3085 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3086 current_function_calls_longjmp = 1;
3087
3088 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3089 current_function_returns_struct = 1;
3090
3091 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3092 current_function_returns_pcc_struct = 1;
3093
3094 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3095 current_function_needs_context = 1;
3096
3097 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3098 current_function_has_nonlocal_label = 1;
3099
3100 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3101 current_function_returns_pointer = 1;
3102
3103 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3104 current_function_uses_const_pool = 1;
3105
3106 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3107 current_function_uses_pic_offset_table = 1;
3108
3109 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3110 current_function_pops_args = POPS_ARGS (head);
3111
3112 /* This is the only thing the expand_function_end call that uses to be here
3113 actually does and that call can cause problems. */
3114 immediate_size_expand--;
3115
3116 /* Find last insn and rebuild the constant pool. */
3117 for (last = FIRST_PARM_INSN (head);
3118 NEXT_INSN (last); last = NEXT_INSN (last))
3119 {
3120 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3121 {
3122 restore_constants (&PATTERN (last));
3123 restore_constants (&REG_NOTES (last));
3124 }
3125 }
3126
3127 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3128 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3129
3130 /* We must have already output DWARF debugging information for the
3131 original (abstract) inline function declaration/definition, so
3132 we want to make sure that the debugging information we generate
3133 for this special instance of the inline function refers back to
3134 the information we already generated. To make sure that happens,
3135 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3136 node (and for all of the local ..._DECL nodes which are its children)
3137 so that they all point to themselves. */
3138
3139 set_decl_origin_self (fndecl);
3140
3141 /* We're not deferring this any longer. */
3142 DECL_DEFER_OUTPUT (fndecl) = 0;
3143
3144 /* Integrating function calls isn't safe anymore, so turn on
3145 flag_no_inline. */
3146 flag_no_inline = 1;
3147
3148 /* Compile this function all the way down to assembly code. */
3149 rest_of_compilation (fndecl);
3150
3151 /* Reset flag_no_inline to its original value. */
3152 flag_no_inline = save_flag_no_inline;
3153
3154 current_function_decl = 0;
3155 }
This page took 0.188013 seconds and 5 git commands to generate.