]> gcc.gnu.org Git - gcc.git/blob - gcc/integrate.c
Update FSF address.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include <stdio.h>
24
25 #include "config.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "expr.h"
32 #include "output.h"
33 #include "integrate.h"
34 #include "real.h"
35 #include "function.h"
36 #include "bytecode.h"
37
38 #include "obstack.h"
39 #define obstack_chunk_alloc xmalloc
40 #define obstack_chunk_free free
41
42 extern struct obstack *function_maybepermanent_obstack;
43
44 extern tree pushdecl ();
45 extern tree poplevel ();
46
47 /* Similar, but round to the next highest integer that meets the
48 alignment. */
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 #define INTEGRATE_THRESHOLD(DECL) \
55 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
56 #endif
57 \f
58 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
59 static void finish_inline PROTO((tree, rtx));
60 static void adjust_copied_decl_tree PROTO((tree));
61 static tree copy_decl_list PROTO((tree));
62 static tree copy_decl_tree PROTO((tree));
63 static void copy_decl_rtls PROTO((tree));
64 static void save_constants PROTO((rtx *));
65 static void note_modified_parmregs PROTO((rtx, rtx));
66 static rtx copy_for_inline PROTO((rtx));
67 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
68 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
69 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
70 static void restore_constants PROTO((rtx *));
71 static void set_block_origin_self PROTO((tree));
72 static void set_decl_origin_self PROTO((tree));
73 static void set_block_abstract_flags PROTO((tree, int));
74
75 void set_decl_abstract_flags PROTO((tree, int));
76 \f
77 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
78 is safe and reasonable to integrate into other functions.
79 Nonzero means value is a warning message with a single %s
80 for the function's name. */
81
82 char *
83 function_cannot_inline_p (fndecl)
84 register tree fndecl;
85 {
86 register rtx insn;
87 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
88 int max_insns = INTEGRATE_THRESHOLD (fndecl);
89 register int ninsns = 0;
90 register tree parms;
91
92 /* No inlines with varargs. `grokdeclarator' gives a warning
93 message about that if `inline' is specified. This code
94 it put in to catch the volunteers. */
95 if ((last && TREE_VALUE (last) != void_type_node)
96 || current_function_varargs)
97 return "varargs function cannot be inline";
98
99 if (current_function_calls_alloca)
100 return "function using alloca cannot be inline";
101
102 if (current_function_contains_functions)
103 return "function with nested functions cannot be inline";
104
105 /* If its not even close, don't even look. */
106 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
107 return "function too large to be inline";
108
109 #if 0
110 /* Large stacks are OK now that inlined functions can share them. */
111 /* Don't inline functions with large stack usage,
112 since they can make other recursive functions burn up stack. */
113 if (!DECL_INLINE (fndecl) && get_frame_size () > 100)
114 return "function stack frame for inlining";
115 #endif
116
117 #if 0
118 /* Don't inline functions which do not specify a function prototype and
119 have BLKmode argument or take the address of a parameter. */
120 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
121 {
122 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
123 TREE_ADDRESSABLE (parms) = 1;
124 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
125 return "no prototype, and parameter address used; cannot be inline";
126 }
127 #endif
128
129 /* We can't inline functions that return structures
130 the old-fashioned PCC way, copying into a static block. */
131 if (current_function_returns_pcc_struct)
132 return "inline functions not supported for this return value type";
133
134 /* We can't inline functions that return BLKmode structures in registers. */
135 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
136 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
137 return "inline functions not supported for this return value type";
138
139 /* We can't inline functions that return structures of varying size. */
140 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
141 return "function with varying-size return value cannot be inline";
142
143 /* Cannot inline a function with a varying size argument or one that
144 receives a transparent union. */
145 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
146 {
147 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
148 return "function with varying-size parameter cannot be inline";
149 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
150 return "function with transparent unit parameter cannot be inline";
151 }
152
153 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
154 {
155 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
156 insn = NEXT_INSN (insn))
157 {
158 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
159 ninsns++;
160 }
161
162 if (ninsns >= max_insns)
163 return "function too large to be inline";
164 }
165
166 /* We cannot inline this function if forced_labels is non-zero. This
167 implies that a label in this function was used as an initializer.
168 Because labels can not be duplicated, all labels in the function
169 will be renamed when it is inlined. However, there is no way to find
170 and fix all variables initialized with addresses of labels in this
171 function, hence inlining is impossible. */
172
173 if (forced_labels)
174 return "function with label addresses used in initializers cannot inline";
175
176 /* We cannot inline a nested function that jumps to a nonlocal label. */
177 if (current_function_has_nonlocal_goto)
178 return "function with nonlocal goto cannot be inline";
179
180 return 0;
181 }
182 \f
183 /* Variables used within save_for_inline. */
184
185 /* Mapping from old pseudo-register to new pseudo-registers.
186 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
187 It is allocated in `save_for_inline' and `expand_inline_function',
188 and deallocated on exit from each of those routines. */
189 static rtx *reg_map;
190
191 /* Mapping from old code-labels to new code-labels.
192 The first element of this map is label_map[min_labelno].
193 It is allocated in `save_for_inline' and `expand_inline_function',
194 and deallocated on exit from each of those routines. */
195 static rtx *label_map;
196
197 /* Mapping from old insn uid's to copied insns.
198 It is allocated in `save_for_inline' and `expand_inline_function',
199 and deallocated on exit from each of those routines. */
200 static rtx *insn_map;
201
202 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
203 Zero for a reg that isn't a parm's home.
204 Only reg numbers less than max_parm_reg are mapped here. */
205 static tree *parmdecl_map;
206
207 /* Keep track of first pseudo-register beyond those that are parms. */
208 static int max_parm_reg;
209
210 /* When an insn is being copied by copy_for_inline,
211 this is nonzero if we have copied an ASM_OPERANDS.
212 In that case, it is the original input-operand vector. */
213 static rtvec orig_asm_operands_vector;
214
215 /* When an insn is being copied by copy_for_inline,
216 this is nonzero if we have copied an ASM_OPERANDS.
217 In that case, it is the copied input-operand vector. */
218 static rtvec copy_asm_operands_vector;
219
220 /* Likewise, this is the copied constraints vector. */
221 static rtvec copy_asm_constraints_vector;
222
223 /* In save_for_inline, nonzero if past the parm-initialization insns. */
224 static int in_nonparm_insns;
225 \f
226 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
227 needed to save FNDECL's insns and info for future inline expansion. */
228
229 static rtx
230 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
231 tree fndecl;
232 int min_labelno;
233 int max_labelno;
234 int max_reg;
235 int copy;
236 {
237 int function_flags, i;
238 rtvec arg_vector;
239 tree parms;
240
241 /* Compute the values of any flags we must restore when inlining this. */
242
243 function_flags
244 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
245 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
246 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
247 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
248 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
249 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
250 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
251 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
252 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
253 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
254
255 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
256 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
257 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
258
259 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
260 parms;
261 parms = TREE_CHAIN (parms), i++)
262 {
263 rtx p = DECL_RTL (parms);
264
265 if (GET_CODE (p) == MEM && copy)
266 {
267 /* Copy the rtl so that modifications of the addresses
268 later in compilation won't affect this arg_vector.
269 Virtual register instantiation can screw the address
270 of the rtl. */
271 rtx new = copy_rtx (p);
272
273 /* Don't leave the old copy anywhere in this decl. */
274 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
275 || (GET_CODE (DECL_RTL (parms)) == MEM
276 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
277 && (XEXP (DECL_RTL (parms), 0)
278 == XEXP (DECL_INCOMING_RTL (parms), 0))))
279 DECL_INCOMING_RTL (parms) = new;
280 DECL_RTL (parms) = new;
281 }
282
283 RTVEC_ELT (arg_vector, i) = p;
284
285 if (GET_CODE (p) == REG)
286 parmdecl_map[REGNO (p)] = parms;
287 else if (GET_CODE (p) == CONCAT)
288 {
289 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
290 rtx pimag = gen_imagpart (GET_MODE (preal), p);
291
292 if (GET_CODE (preal) == REG)
293 parmdecl_map[REGNO (preal)] = parms;
294 if (GET_CODE (pimag) == REG)
295 parmdecl_map[REGNO (pimag)] = parms;
296 }
297
298 /* This flag is cleared later
299 if the function ever modifies the value of the parm. */
300 TREE_READONLY (parms) = 1;
301 }
302
303 /* Assume we start out in the insns that set up the parameters. */
304 in_nonparm_insns = 0;
305
306 /* The list of DECL_SAVED_INSNS, starts off with a header which
307 contains the following information:
308
309 the first insn of the function (not including the insns that copy
310 parameters into registers).
311 the first parameter insn of the function,
312 the first label used by that function,
313 the last label used by that function,
314 the highest register number used for parameters,
315 the total number of registers used,
316 the size of the incoming stack area for parameters,
317 the number of bytes popped on return,
318 the stack slot list,
319 some flags that are used to restore compiler globals,
320 the value of current_function_outgoing_args_size,
321 the original argument vector,
322 and the original DECL_INITIAL. */
323
324 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
325 max_parm_reg, max_reg,
326 current_function_args_size,
327 current_function_pops_args,
328 stack_slot_list, forced_labels, function_flags,
329 current_function_outgoing_args_size,
330 arg_vector, (rtx) DECL_INITIAL (fndecl));
331 }
332
333 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
334 things that must be done to make FNDECL expandable as an inline function.
335 HEAD contains the chain of insns to which FNDECL will expand. */
336
337 static void
338 finish_inline (fndecl, head)
339 tree fndecl;
340 rtx head;
341 {
342 NEXT_INSN (head) = get_first_nonparm_insn ();
343 FIRST_PARM_INSN (head) = get_insns ();
344 DECL_SAVED_INSNS (fndecl) = head;
345 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
346 }
347
348 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
349 they all point to the new (copied) rtxs. */
350
351 static void
352 adjust_copied_decl_tree (block)
353 register tree block;
354 {
355 register tree subblock;
356 register rtx original_end;
357
358 original_end = BLOCK_END_NOTE (block);
359 if (original_end)
360 {
361 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
362 NOTE_SOURCE_FILE (original_end) = 0;
363 }
364
365 /* Process all subblocks. */
366 for (subblock = BLOCK_SUBBLOCKS (block);
367 subblock;
368 subblock = TREE_CHAIN (subblock))
369 adjust_copied_decl_tree (subblock);
370 }
371
372 /* Make the insns and PARM_DECLs of the current function permanent
373 and record other information in DECL_SAVED_INSNS to allow inlining
374 of this function in subsequent calls.
375
376 This function is called when we are going to immediately compile
377 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
378 modified by the compilation process, so we copy all of them to
379 new storage and consider the new insns to be the insn chain to be
380 compiled. Our caller (rest_of_compilation) saves the original
381 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
382
383 /* ??? The nonlocal_label list should be adjusted also. However, since
384 a function that contains a nested function never gets inlined currently,
385 the nonlocal_label list will always be empty, so we don't worry about
386 it for now. */
387
388 void
389 save_for_inline_copying (fndecl)
390 tree fndecl;
391 {
392 rtx first_insn, last_insn, insn;
393 rtx head, copy;
394 int max_labelno, min_labelno, i, len;
395 int max_reg;
396 int max_uid;
397 rtx first_nonparm_insn;
398
399 /* Make and emit a return-label if we have not already done so.
400 Do this before recording the bounds on label numbers. */
401
402 if (return_label == 0)
403 {
404 return_label = gen_label_rtx ();
405 emit_label (return_label);
406 }
407
408 /* Get some bounds on the labels and registers used. */
409
410 max_labelno = max_label_num ();
411 min_labelno = get_first_label_num ();
412 max_reg = max_reg_num ();
413
414 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
415 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
416 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
417 for the parms, prior to elimination of virtual registers.
418 These values are needed for substituting parms properly. */
419
420 max_parm_reg = max_parm_reg_num ();
421 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
422
423 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
424
425 if (current_function_uses_const_pool)
426 {
427 /* Replace any constant pool references with the actual constant. We
428 will put the constants back in the copy made below. */
429 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
430 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
431 {
432 save_constants (&PATTERN (insn));
433 if (REG_NOTES (insn))
434 save_constants (&REG_NOTES (insn));
435 }
436
437 /* Clear out the constant pool so that we can recreate it with the
438 copied constants below. */
439 init_const_rtx_hash_table ();
440 clear_const_double_mem ();
441 }
442
443 max_uid = INSN_UID (head);
444
445 /* We have now allocated all that needs to be allocated permanently
446 on the rtx obstack. Set our high-water mark, so that we
447 can free the rest of this when the time comes. */
448
449 preserve_data ();
450
451 /* Copy the chain insns of this function.
452 Install the copied chain as the insns of this function,
453 for continued compilation;
454 the original chain is recorded as the DECL_SAVED_INSNS
455 for inlining future calls. */
456
457 /* If there are insns that copy parms from the stack into pseudo registers,
458 those insns are not copied. `expand_inline_function' must
459 emit the correct code to handle such things. */
460
461 insn = get_insns ();
462 if (GET_CODE (insn) != NOTE)
463 abort ();
464 first_insn = rtx_alloc (NOTE);
465 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
466 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
467 INSN_UID (first_insn) = INSN_UID (insn);
468 PREV_INSN (first_insn) = NULL;
469 NEXT_INSN (first_insn) = NULL;
470 last_insn = first_insn;
471
472 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
473 Make these new rtx's now, and install them in regno_reg_rtx, so they
474 will be the official pseudo-reg rtx's for the rest of compilation. */
475
476 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
477
478 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
479 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
480 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
481 regno_reg_rtx[i], len);
482
483 bcopy ((char *) (reg_map + LAST_VIRTUAL_REGISTER + 1),
484 (char *) (regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1),
485 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
486
487 /* Likewise each label rtx must have a unique rtx as its copy. */
488
489 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
490 label_map -= min_labelno;
491
492 for (i = min_labelno; i < max_labelno; i++)
493 label_map[i] = gen_label_rtx ();
494
495 /* Record the mapping of old insns to copied insns. */
496
497 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
498 bzero ((char *) insn_map, max_uid * sizeof (rtx));
499
500 /* Get the insn which signals the end of parameter setup code. */
501 first_nonparm_insn = get_first_nonparm_insn ();
502
503 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
504 (the former occurs when a variable has its address taken)
505 since these may be shared and can be changed by virtual
506 register instantiation. DECL_RTL values for our arguments
507 have already been copied by initialize_for_inline. */
508 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
509 if (GET_CODE (regno_reg_rtx[i]) == MEM)
510 XEXP (regno_reg_rtx[i], 0)
511 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
512
513 /* Copy the tree of subblocks of the function, and the decls in them.
514 We will use the copy for compiling this function, then restore the original
515 subblocks and decls for use when inlining this function.
516
517 Several parts of the compiler modify BLOCK trees. In particular,
518 instantiate_virtual_regs will instantiate any virtual regs
519 mentioned in the DECL_RTLs of the decls, and loop
520 unrolling will replicate any BLOCK trees inside an unrolled loop.
521
522 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
523 which we will use for inlining. The rtl might even contain pseudoregs
524 whose space has been freed. */
525
526 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
527 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
528
529 /* Now copy each DECL_RTL which is a MEM,
530 so it is safe to modify their addresses. */
531 copy_decl_rtls (DECL_INITIAL (fndecl));
532
533 /* The fndecl node acts as its own progenitor, so mark it as such. */
534 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
535
536 /* Now copy the chain of insns. Do this twice. The first copy the insn
537 itself and its body. The second time copy of REG_NOTES. This is because
538 a REG_NOTE may have a forward pointer to another insn. */
539
540 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
541 {
542 orig_asm_operands_vector = 0;
543
544 if (insn == first_nonparm_insn)
545 in_nonparm_insns = 1;
546
547 switch (GET_CODE (insn))
548 {
549 case NOTE:
550 /* No need to keep these. */
551 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
552 continue;
553
554 copy = rtx_alloc (NOTE);
555 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
556 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
557 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
558 else
559 {
560 NOTE_SOURCE_FILE (insn) = (char *) copy;
561 NOTE_SOURCE_FILE (copy) = 0;
562 }
563 break;
564
565 case INSN:
566 case JUMP_INSN:
567 case CALL_INSN:
568 copy = rtx_alloc (GET_CODE (insn));
569
570 if (GET_CODE (insn) == CALL_INSN)
571 CALL_INSN_FUNCTION_USAGE (copy) =
572 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
573
574 PATTERN (copy) = copy_for_inline (PATTERN (insn));
575 INSN_CODE (copy) = -1;
576 LOG_LINKS (copy) = NULL_RTX;
577 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
578 break;
579
580 case CODE_LABEL:
581 copy = label_map[CODE_LABEL_NUMBER (insn)];
582 LABEL_NAME (copy) = LABEL_NAME (insn);
583 break;
584
585 case BARRIER:
586 copy = rtx_alloc (BARRIER);
587 break;
588
589 default:
590 abort ();
591 }
592 INSN_UID (copy) = INSN_UID (insn);
593 insn_map[INSN_UID (insn)] = copy;
594 NEXT_INSN (last_insn) = copy;
595 PREV_INSN (copy) = last_insn;
596 last_insn = copy;
597 }
598
599 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
600
601 /* Now copy the REG_NOTES. */
602 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
603 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
604 && insn_map[INSN_UID(insn)])
605 REG_NOTES (insn_map[INSN_UID (insn)])
606 = copy_for_inline (REG_NOTES (insn));
607
608 NEXT_INSN (last_insn) = NULL;
609
610 finish_inline (fndecl, head);
611
612 set_new_first_and_last_insn (first_insn, last_insn);
613 }
614
615 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
616 For example, this can copy a list made of TREE_LIST nodes. While copying,
617 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
618 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
619 point to the corresponding (abstract) original node. */
620
621 static tree
622 copy_decl_list (list)
623 tree list;
624 {
625 tree head;
626 register tree prev, next;
627
628 if (list == 0)
629 return 0;
630
631 head = prev = copy_node (list);
632 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
633 DECL_ABSTRACT_ORIGIN (head) = list;
634 next = TREE_CHAIN (list);
635 while (next)
636 {
637 register tree copy;
638
639 copy = copy_node (next);
640 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
641 DECL_ABSTRACT_ORIGIN (copy) = next;
642 TREE_CHAIN (prev) = copy;
643 prev = copy;
644 next = TREE_CHAIN (next);
645 }
646 return head;
647 }
648
649 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
650
651 static tree
652 copy_decl_tree (block)
653 tree block;
654 {
655 tree t, vars, subblocks;
656
657 vars = copy_decl_list (BLOCK_VARS (block));
658 subblocks = 0;
659
660 /* Process all subblocks. */
661 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
662 {
663 tree copy = copy_decl_tree (t);
664 TREE_CHAIN (copy) = subblocks;
665 subblocks = copy;
666 }
667
668 t = copy_node (block);
669 BLOCK_VARS (t) = vars;
670 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
671 /* If the BLOCK being cloned is already marked as having been instantiated
672 from something else, then leave that `origin' marking alone. Elsewise,
673 mark the clone as having originated from the BLOCK we are cloning. */
674 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
675 BLOCK_ABSTRACT_ORIGIN (t) = block;
676 return t;
677 }
678
679 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
680
681 static void
682 copy_decl_rtls (block)
683 tree block;
684 {
685 tree t;
686
687 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
688 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
689 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
690
691 /* Process all subblocks. */
692 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
693 copy_decl_rtls (t);
694 }
695
696 /* Make the insns and PARM_DECLs of the current function permanent
697 and record other information in DECL_SAVED_INSNS to allow inlining
698 of this function in subsequent calls.
699
700 This routine need not copy any insns because we are not going
701 to immediately compile the insns in the insn chain. There
702 are two cases when we would compile the insns for FNDECL:
703 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
704 be output at the end of other compilation, because somebody took
705 its address. In the first case, the insns of FNDECL are copied
706 as it is expanded inline, so FNDECL's saved insns are not
707 modified. In the second case, FNDECL is used for the last time,
708 so modifying the rtl is not a problem.
709
710 ??? Actually, we do not verify that FNDECL is not inline expanded
711 by other functions which must also be written down at the end
712 of compilation. We could set flag_no_inline to nonzero when
713 the time comes to write down such functions. */
714
715 void
716 save_for_inline_nocopy (fndecl)
717 tree fndecl;
718 {
719 rtx insn;
720 rtx head;
721 rtx first_nonparm_insn;
722
723 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
724 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
725 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
726 for the parms, prior to elimination of virtual registers.
727 These values are needed for substituting parms properly. */
728
729 max_parm_reg = max_parm_reg_num ();
730 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
731
732 /* Make and emit a return-label if we have not already done so. */
733
734 if (return_label == 0)
735 {
736 return_label = gen_label_rtx ();
737 emit_label (return_label);
738 }
739
740 head = initialize_for_inline (fndecl, get_first_label_num (),
741 max_label_num (), max_reg_num (), 0);
742
743 /* If there are insns that copy parms from the stack into pseudo registers,
744 those insns are not copied. `expand_inline_function' must
745 emit the correct code to handle such things. */
746
747 insn = get_insns ();
748 if (GET_CODE (insn) != NOTE)
749 abort ();
750
751 /* Get the insn which signals the end of parameter setup code. */
752 first_nonparm_insn = get_first_nonparm_insn ();
753
754 /* Now just scan the chain of insns to see what happens to our
755 PARM_DECLs. If a PARM_DECL is used but never modified, we
756 can substitute its rtl directly when expanding inline (and
757 perform constant folding when its incoming value is constant).
758 Otherwise, we have to copy its value into a new register and track
759 the new register's life. */
760
761 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
762 {
763 if (insn == first_nonparm_insn)
764 in_nonparm_insns = 1;
765
766 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
767 {
768 if (current_function_uses_const_pool)
769 {
770 /* Replace any constant pool references with the actual constant.
771 We will put the constant back if we need to write the
772 function out after all. */
773 save_constants (&PATTERN (insn));
774 if (REG_NOTES (insn))
775 save_constants (&REG_NOTES (insn));
776 }
777
778 /* Record what interesting things happen to our parameters. */
779 note_stores (PATTERN (insn), note_modified_parmregs);
780 }
781 }
782
783 /* We have now allocated all that needs to be allocated permanently
784 on the rtx obstack. Set our high-water mark, so that we
785 can free the rest of this when the time comes. */
786
787 preserve_data ();
788
789 finish_inline (fndecl, head);
790 }
791 \f
792 /* Given PX, a pointer into an insn, search for references to the constant
793 pool. Replace each with a CONST that has the mode of the original
794 constant, contains the constant, and has RTX_INTEGRATED_P set.
795 Similarly, constant pool addresses not enclosed in a MEM are replaced
796 with an ADDRESS rtx which also gives the constant, mode, and has
797 RTX_INTEGRATED_P set. */
798
799 static void
800 save_constants (px)
801 rtx *px;
802 {
803 rtx x;
804 int i, j;
805
806 again:
807 x = *px;
808
809 /* If this is a CONST_DOUBLE, don't try to fix things up in
810 CONST_DOUBLE_MEM, because this is an infinite recursion. */
811 if (GET_CODE (x) == CONST_DOUBLE)
812 return;
813 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
814 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
815 {
816 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
817 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
818 RTX_INTEGRATED_P (new) = 1;
819
820 /* If the MEM was in a different mode than the constant (perhaps we
821 were only looking at the low-order part), surround it with a
822 SUBREG so we can save both modes. */
823
824 if (GET_MODE (x) != const_mode)
825 {
826 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
827 RTX_INTEGRATED_P (new) = 1;
828 }
829
830 *px = new;
831 save_constants (&XEXP (*px, 0));
832 }
833 else if (GET_CODE (x) == SYMBOL_REF
834 && CONSTANT_POOL_ADDRESS_P (x))
835 {
836 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
837 save_constants (&XEXP (*px, 0));
838 RTX_INTEGRATED_P (*px) = 1;
839 }
840
841 else
842 {
843 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
844 int len = GET_RTX_LENGTH (GET_CODE (x));
845
846 for (i = len-1; i >= 0; i--)
847 {
848 switch (fmt[i])
849 {
850 case 'E':
851 for (j = 0; j < XVECLEN (x, i); j++)
852 save_constants (&XVECEXP (x, i, j));
853 break;
854
855 case 'e':
856 if (XEXP (x, i) == 0)
857 continue;
858 if (i == 0)
859 {
860 /* Hack tail-recursion here. */
861 px = &XEXP (x, 0);
862 goto again;
863 }
864 save_constants (&XEXP (x, i));
865 break;
866 }
867 }
868 }
869 }
870 \f
871 /* Note whether a parameter is modified or not. */
872
873 static void
874 note_modified_parmregs (reg, x)
875 rtx reg;
876 rtx x;
877 {
878 if (GET_CODE (reg) == REG && in_nonparm_insns
879 && REGNO (reg) < max_parm_reg
880 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
881 && parmdecl_map[REGNO (reg)] != 0)
882 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
883 }
884
885 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
886 according to `reg_map' and `label_map'. The original rtl insns
887 will be saved for inlining; this is used to make a copy
888 which is used to finish compiling the inline function itself.
889
890 If we find a "saved" constant pool entry, one which was replaced with
891 the value of the constant, convert it back to a constant pool entry.
892 Since the pool wasn't touched, this should simply restore the old
893 address.
894
895 All other kinds of rtx are copied except those that can never be
896 changed during compilation. */
897
898 static rtx
899 copy_for_inline (orig)
900 rtx orig;
901 {
902 register rtx x = orig;
903 register int i;
904 register enum rtx_code code;
905 register char *format_ptr;
906
907 if (x == 0)
908 return x;
909
910 code = GET_CODE (x);
911
912 /* These types may be freely shared. */
913
914 switch (code)
915 {
916 case QUEUED:
917 case CONST_INT:
918 case SYMBOL_REF:
919 case PC:
920 case CC0:
921 return x;
922
923 case CONST_DOUBLE:
924 /* We have to make a new CONST_DOUBLE to ensure that we account for
925 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
926 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
927 {
928 REAL_VALUE_TYPE d;
929
930 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
931 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
932 }
933 else
934 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
935 VOIDmode);
936
937 case CONST:
938 /* Get constant pool entry for constant in the pool. */
939 if (RTX_INTEGRATED_P (x))
940 return validize_mem (force_const_mem (GET_MODE (x),
941 copy_for_inline (XEXP (x, 0))));
942 break;
943
944 case SUBREG:
945 /* Get constant pool entry, but access in different mode. */
946 if (RTX_INTEGRATED_P (x))
947 {
948 rtx new
949 = force_const_mem (GET_MODE (SUBREG_REG (x)),
950 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
951
952 PUT_MODE (new, GET_MODE (x));
953 return validize_mem (new);
954 }
955 break;
956
957 case ADDRESS:
958 /* If not special for constant pool error. Else get constant pool
959 address. */
960 if (! RTX_INTEGRATED_P (x))
961 abort ();
962
963 return XEXP (force_const_mem (GET_MODE (x),
964 copy_for_inline (XEXP (x, 0))), 0);
965
966 case ASM_OPERANDS:
967 /* If a single asm insn contains multiple output operands
968 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
969 We must make sure that the copied insn continues to share it. */
970 if (orig_asm_operands_vector == XVEC (orig, 3))
971 {
972 x = rtx_alloc (ASM_OPERANDS);
973 x->volatil = orig->volatil;
974 XSTR (x, 0) = XSTR (orig, 0);
975 XSTR (x, 1) = XSTR (orig, 1);
976 XINT (x, 2) = XINT (orig, 2);
977 XVEC (x, 3) = copy_asm_operands_vector;
978 XVEC (x, 4) = copy_asm_constraints_vector;
979 XSTR (x, 5) = XSTR (orig, 5);
980 XINT (x, 6) = XINT (orig, 6);
981 return x;
982 }
983 break;
984
985 case MEM:
986 /* A MEM is usually allowed to be shared if its address is constant
987 or is a constant plus one of the special registers.
988
989 We do not allow sharing of addresses that are either a special
990 register or the sum of a constant and a special register because
991 it is possible for unshare_all_rtl to copy the address, into memory
992 that won't be saved. Although the MEM can safely be shared, and
993 won't be copied there, the address itself cannot be shared, and may
994 need to be copied.
995
996 There are also two exceptions with constants: The first is if the
997 constant is a LABEL_REF or the sum of the LABEL_REF
998 and an integer. This case can happen if we have an inline
999 function that supplies a constant operand to the call of another
1000 inline function that uses it in a switch statement. In this case,
1001 we will be replacing the LABEL_REF, so we have to replace this MEM
1002 as well.
1003
1004 The second case is if we have a (const (plus (address ..) ...)).
1005 In that case we need to put back the address of the constant pool
1006 entry. */
1007
1008 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1009 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1010 && ! (GET_CODE (XEXP (x, 0)) == CONST
1011 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1012 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1013 == LABEL_REF)
1014 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1015 == ADDRESS)))))
1016 return x;
1017 break;
1018
1019 case LABEL_REF:
1020 /* If this is a non-local label, just make a new LABEL_REF.
1021 Otherwise, use the new label as well. */
1022 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1023 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1024 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1025 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1026 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1027 return x;
1028
1029 case REG:
1030 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1031 return reg_map [REGNO (x)];
1032 else
1033 return x;
1034
1035 case SET:
1036 /* If a parm that gets modified lives in a pseudo-reg,
1037 clear its TREE_READONLY to prevent certain optimizations. */
1038 {
1039 rtx dest = SET_DEST (x);
1040
1041 while (GET_CODE (dest) == STRICT_LOW_PART
1042 || GET_CODE (dest) == ZERO_EXTRACT
1043 || GET_CODE (dest) == SUBREG)
1044 dest = XEXP (dest, 0);
1045
1046 if (GET_CODE (dest) == REG
1047 && REGNO (dest) < max_parm_reg
1048 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1049 && parmdecl_map[REGNO (dest)] != 0
1050 /* The insn to load an arg pseudo from a stack slot
1051 does not count as modifying it. */
1052 && in_nonparm_insns)
1053 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1054 }
1055 break;
1056
1057 #if 0 /* This is a good idea, but here is the wrong place for it. */
1058 /* Arrange that CONST_INTs always appear as the second operand
1059 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1060 always appear as the first. */
1061 case PLUS:
1062 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1063 || (XEXP (x, 1) == frame_pointer_rtx
1064 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1065 && XEXP (x, 1) == arg_pointer_rtx)))
1066 {
1067 rtx t = XEXP (x, 0);
1068 XEXP (x, 0) = XEXP (x, 1);
1069 XEXP (x, 1) = t;
1070 }
1071 break;
1072 #endif
1073 }
1074
1075 /* Replace this rtx with a copy of itself. */
1076
1077 x = rtx_alloc (code);
1078 bcopy ((char *) orig, (char *) x,
1079 (sizeof (*x) - sizeof (x->fld)
1080 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1081
1082 /* Now scan the subexpressions recursively.
1083 We can store any replaced subexpressions directly into X
1084 since we know X is not shared! Any vectors in X
1085 must be copied if X was copied. */
1086
1087 format_ptr = GET_RTX_FORMAT (code);
1088
1089 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1090 {
1091 switch (*format_ptr++)
1092 {
1093 case 'e':
1094 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1095 break;
1096
1097 case 'u':
1098 /* Change any references to old-insns to point to the
1099 corresponding copied insns. */
1100 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1101 break;
1102
1103 case 'E':
1104 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1105 {
1106 register int j;
1107
1108 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1109 for (j = 0; j < XVECLEN (x, i); j++)
1110 XVECEXP (x, i, j)
1111 = copy_for_inline (XVECEXP (x, i, j));
1112 }
1113 break;
1114 }
1115 }
1116
1117 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1118 {
1119 orig_asm_operands_vector = XVEC (orig, 3);
1120 copy_asm_operands_vector = XVEC (x, 3);
1121 copy_asm_constraints_vector = XVEC (x, 4);
1122 }
1123
1124 return x;
1125 }
1126
1127 /* Unfortunately, we need a global copy of const_equiv map for communication
1128 with a function called from note_stores. Be *very* careful that this
1129 is used properly in the presence of recursion. */
1130
1131 rtx *global_const_equiv_map;
1132 int global_const_equiv_map_size;
1133 \f
1134 #define FIXED_BASE_PLUS_P(X) \
1135 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1136 && GET_CODE (XEXP (X, 0)) == REG \
1137 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1138 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1139
1140 /* Integrate the procedure defined by FNDECL. Note that this function
1141 may wind up calling itself. Since the static variables are not
1142 reentrant, we do not assign them until after the possibility
1143 of recursion is eliminated.
1144
1145 If IGNORE is nonzero, do not produce a value.
1146 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1147
1148 Value is:
1149 (rtx)-1 if we could not substitute the function
1150 0 if we substituted it and it does not produce a value
1151 else an rtx for where the value is stored. */
1152
1153 rtx
1154 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1155 tree fndecl, parms;
1156 rtx target;
1157 int ignore;
1158 tree type;
1159 rtx structure_value_addr;
1160 {
1161 tree formal, actual, block;
1162 rtx header = DECL_SAVED_INSNS (fndecl);
1163 rtx insns = FIRST_FUNCTION_INSN (header);
1164 rtx parm_insns = FIRST_PARM_INSN (header);
1165 tree *arg_trees;
1166 rtx *arg_vals;
1167 rtx insn;
1168 int max_regno;
1169 register int i;
1170 int min_labelno = FIRST_LABELNO (header);
1171 int max_labelno = LAST_LABELNO (header);
1172 int nargs;
1173 rtx local_return_label = 0;
1174 rtx loc;
1175 rtx stack_save = 0;
1176 rtx temp;
1177 struct inline_remap *map;
1178 rtx cc0_insn = 0;
1179 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1180 rtx static_chain_value = 0;
1181
1182 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1183 max_regno = MAX_REGNUM (header) + 3;
1184 if (max_regno < FIRST_PSEUDO_REGISTER)
1185 abort ();
1186
1187 nargs = list_length (DECL_ARGUMENTS (fndecl));
1188
1189 /* Check that the parms type match and that sufficient arguments were
1190 passed. Since the appropriate conversions or default promotions have
1191 already been applied, the machine modes should match exactly. */
1192
1193 for (formal = DECL_ARGUMENTS (fndecl),
1194 actual = parms;
1195 formal;
1196 formal = TREE_CHAIN (formal),
1197 actual = TREE_CHAIN (actual))
1198 {
1199 tree arg;
1200 enum machine_mode mode;
1201
1202 if (actual == 0)
1203 return (rtx) (HOST_WIDE_INT) -1;
1204
1205 arg = TREE_VALUE (actual);
1206 mode= TYPE_MODE (DECL_ARG_TYPE (formal));
1207
1208 if (mode != TYPE_MODE (TREE_TYPE (arg))
1209 /* If they are block mode, the types should match exactly.
1210 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1211 which could happen if the parameter has incomplete type. */
1212 || (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)))
1213 return (rtx) (HOST_WIDE_INT) -1;
1214 }
1215
1216 /* Extra arguments are valid, but will be ignored below, so we must
1217 evaluate them here for side-effects. */
1218 for (; actual; actual = TREE_CHAIN (actual))
1219 expand_expr (TREE_VALUE (actual), const0_rtx,
1220 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1221
1222 /* Make a binding contour to keep inline cleanups called at
1223 outer function-scope level from looking like they are shadowing
1224 parameter declarations. */
1225 pushlevel (0);
1226
1227 /* Make a fresh binding contour that we can easily remove. */
1228 pushlevel (0);
1229 expand_start_bindings (0);
1230
1231 /* Expand the function arguments. Do this first so that any
1232 new registers get created before we allocate the maps. */
1233
1234 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1235 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1236
1237 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1238 formal;
1239 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1240 {
1241 /* Actual parameter, converted to the type of the argument within the
1242 function. */
1243 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1244 /* Mode of the variable used within the function. */
1245 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1246 int invisiref = 0;
1247
1248 arg_trees[i] = arg;
1249 loc = RTVEC_ELT (arg_vector, i);
1250
1251 /* If this is an object passed by invisible reference, we copy the
1252 object into a stack slot and save its address. If this will go
1253 into memory, we do nothing now. Otherwise, we just expand the
1254 argument. */
1255 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1256 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1257 {
1258 rtx stack_slot
1259 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1260 int_size_in_bytes (TREE_TYPE (arg)), 1);
1261 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1262
1263 store_expr (arg, stack_slot, 0);
1264
1265 arg_vals[i] = XEXP (stack_slot, 0);
1266 invisiref = 1;
1267 }
1268 else if (GET_CODE (loc) != MEM)
1269 {
1270 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1271 /* The mode if LOC and ARG can differ if LOC was a variable
1272 that had its mode promoted via PROMOTED_MODE. */
1273 arg_vals[i] = convert_modes (GET_MODE (loc),
1274 TYPE_MODE (TREE_TYPE (arg)),
1275 expand_expr (arg, NULL_RTX, mode,
1276 EXPAND_SUM),
1277 TREE_UNSIGNED (TREE_TYPE (formal)));
1278 else
1279 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1280 }
1281 else
1282 arg_vals[i] = 0;
1283
1284 if (arg_vals[i] != 0
1285 && (! TREE_READONLY (formal)
1286 /* If the parameter is not read-only, copy our argument through
1287 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1288 TARGET in any way. In the inline function, they will likely
1289 be two different pseudos, and `safe_from_p' will make all
1290 sorts of smart assumptions about their not conflicting.
1291 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1292 wrong, so put ARG_VALS[I] into a fresh register.
1293 Don't worry about invisible references, since their stack
1294 temps will never overlap the target. */
1295 || (target != 0
1296 && ! invisiref
1297 && (GET_CODE (arg_vals[i]) == REG
1298 || GET_CODE (arg_vals[i]) == SUBREG
1299 || GET_CODE (arg_vals[i]) == MEM)
1300 && reg_overlap_mentioned_p (arg_vals[i], target))
1301 /* ??? We must always copy a SUBREG into a REG, because it might
1302 get substituted into an address, and not all ports correctly
1303 handle SUBREGs in addresses. */
1304 || (GET_CODE (arg_vals[i]) == SUBREG)))
1305 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1306 }
1307
1308 /* Allocate the structures we use to remap things. */
1309
1310 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1311 map->fndecl = fndecl;
1312
1313 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1314 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1315
1316 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1317 map->label_map -= min_labelno;
1318
1319 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1320 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1321 map->min_insnno = 0;
1322 map->max_insnno = INSN_UID (header);
1323
1324 map->integrating = 1;
1325
1326 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1327 be large enough for all our pseudos. This is the number we are currently
1328 using plus the number in the called routine, plus 15 for each arg,
1329 five to compute the virtual frame pointer, and five for the return value.
1330 This should be enough for most cases. We do not reference entries
1331 outside the range of the map.
1332
1333 ??? These numbers are quite arbitrary and were obtained by
1334 experimentation. At some point, we should try to allocate the
1335 table after all the parameters are set up so we an more accurately
1336 estimate the number of pseudos we will need. */
1337
1338 map->const_equiv_map_size
1339 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1340
1341 map->const_equiv_map
1342 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1343 bzero ((char *) map->const_equiv_map,
1344 map->const_equiv_map_size * sizeof (rtx));
1345
1346 map->const_age_map
1347 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1348 bzero ((char *) map->const_age_map,
1349 map->const_equiv_map_size * sizeof (unsigned));
1350 map->const_age = 0;
1351
1352 /* Record the current insn in case we have to set up pointers to frame
1353 and argument memory blocks. */
1354 map->insns_at_start = get_last_insn ();
1355
1356 /* Update the outgoing argument size to allow for those in the inlined
1357 function. */
1358 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1359 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1360
1361 /* If the inline function needs to make PIC references, that means
1362 that this function's PIC offset table must be used. */
1363 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1364 current_function_uses_pic_offset_table = 1;
1365
1366 /* If this function needs a context, set it up. */
1367 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1368 static_chain_value = lookup_static_chain (fndecl);
1369
1370 if (GET_CODE (parm_insns) == NOTE
1371 && NOTE_LINE_NUMBER (parm_insns) > 0)
1372 {
1373 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1374 NOTE_LINE_NUMBER (parm_insns));
1375 if (note)
1376 RTX_INTEGRATED_P (note) = 1;
1377 }
1378
1379 /* Process each argument. For each, set up things so that the function's
1380 reference to the argument will refer to the argument being passed.
1381 We only replace REG with REG here. Any simplifications are done
1382 via const_equiv_map.
1383
1384 We make two passes: In the first, we deal with parameters that will
1385 be placed into registers, since we need to ensure that the allocated
1386 register number fits in const_equiv_map. Then we store all non-register
1387 parameters into their memory location. */
1388
1389 /* Don't try to free temp stack slots here, because we may put one of the
1390 parameters into a temp stack slot. */
1391
1392 for (i = 0; i < nargs; i++)
1393 {
1394 rtx copy = arg_vals[i];
1395
1396 loc = RTVEC_ELT (arg_vector, i);
1397
1398 /* There are three cases, each handled separately. */
1399 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1400 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1401 {
1402 /* This must be an object passed by invisible reference (it could
1403 also be a variable-sized object, but we forbid inlining functions
1404 with variable-sized arguments). COPY is the address of the
1405 actual value (this computation will cause it to be copied). We
1406 map that address for the register, noting the actual address as
1407 an equivalent in case it can be substituted into the insns. */
1408
1409 if (GET_CODE (copy) != REG)
1410 {
1411 temp = copy_addr_to_reg (copy);
1412 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1413 && REGNO (temp) < map->const_equiv_map_size)
1414 {
1415 map->const_equiv_map[REGNO (temp)] = copy;
1416 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1417 }
1418 copy = temp;
1419 }
1420 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1421 }
1422 else if (GET_CODE (loc) == MEM)
1423 {
1424 /* This is the case of a parameter that lives in memory.
1425 It will live in the block we allocate in the called routine's
1426 frame that simulates the incoming argument area. Do nothing
1427 now; we will call store_expr later. */
1428 ;
1429 }
1430 else if (GET_CODE (loc) == REG)
1431 {
1432 /* This is the good case where the parameter is in a register.
1433 If it is read-only and our argument is a constant, set up the
1434 constant equivalence.
1435
1436 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1437 that flag set if it is a register.
1438
1439 Also, don't allow hard registers here; they might not be valid
1440 when substituted into insns. */
1441
1442 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1443 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1444 && ! REG_USERVAR_P (copy))
1445 || (GET_CODE (copy) == REG
1446 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1447 {
1448 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1449 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1450 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1451 && REGNO (temp) < map->const_equiv_map_size)
1452 {
1453 map->const_equiv_map[REGNO (temp)] = copy;
1454 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1455 }
1456 copy = temp;
1457 }
1458 map->reg_map[REGNO (loc)] = copy;
1459 }
1460 else if (GET_CODE (loc) == CONCAT)
1461 {
1462 /* This is the good case where the parameter is in a
1463 pair of separate pseudos.
1464 If it is read-only and our argument is a constant, set up the
1465 constant equivalence.
1466
1467 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1468 that flag set if it is a register.
1469
1470 Also, don't allow hard registers here; they might not be valid
1471 when substituted into insns. */
1472 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1473 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1474 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1475 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1476
1477 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1478 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1479 && ! REG_USERVAR_P (copyreal))
1480 || (GET_CODE (copyreal) == REG
1481 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1482 {
1483 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1484 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1485 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1486 && REGNO (temp) < map->const_equiv_map_size)
1487 {
1488 map->const_equiv_map[REGNO (temp)] = copyreal;
1489 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1490 }
1491 copyreal = temp;
1492 }
1493 map->reg_map[REGNO (locreal)] = copyreal;
1494
1495 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1496 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1497 && ! REG_USERVAR_P (copyimag))
1498 || (GET_CODE (copyimag) == REG
1499 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1500 {
1501 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1502 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1503 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1504 && REGNO (temp) < map->const_equiv_map_size)
1505 {
1506 map->const_equiv_map[REGNO (temp)] = copyimag;
1507 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1508 }
1509 copyimag = temp;
1510 }
1511 map->reg_map[REGNO (locimag)] = copyimag;
1512 }
1513 else
1514 abort ();
1515 }
1516
1517 /* Now do the parameters that will be placed in memory. */
1518
1519 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1520 formal; formal = TREE_CHAIN (formal), i++)
1521 {
1522 loc = RTVEC_ELT (arg_vector, i);
1523
1524 if (GET_CODE (loc) == MEM
1525 /* Exclude case handled above. */
1526 && ! (GET_CODE (XEXP (loc, 0)) == REG
1527 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1528 {
1529 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1530 DECL_SOURCE_LINE (formal));
1531 if (note)
1532 RTX_INTEGRATED_P (note) = 1;
1533
1534 /* Compute the address in the area we reserved and store the
1535 value there. */
1536 temp = copy_rtx_and_substitute (loc, map);
1537 subst_constants (&temp, NULL_RTX, map);
1538 apply_change_group ();
1539 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1540 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1541 store_expr (arg_trees[i], temp, 0);
1542 }
1543 }
1544
1545 /* Deal with the places that the function puts its result.
1546 We are driven by what is placed into DECL_RESULT.
1547
1548 Initially, we assume that we don't have anything special handling for
1549 REG_FUNCTION_RETURN_VALUE_P. */
1550
1551 map->inline_target = 0;
1552 loc = DECL_RTL (DECL_RESULT (fndecl));
1553 if (TYPE_MODE (type) == VOIDmode)
1554 /* There is no return value to worry about. */
1555 ;
1556 else if (GET_CODE (loc) == MEM)
1557 {
1558 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1559 abort ();
1560
1561 /* Pass the function the address in which to return a structure value.
1562 Note that a constructor can cause someone to call us with
1563 STRUCTURE_VALUE_ADDR, but the initialization takes place
1564 via the first parameter, rather than the struct return address.
1565
1566 We have two cases: If the address is a simple register indirect,
1567 use the mapping mechanism to point that register to our structure
1568 return address. Otherwise, store the structure return value into
1569 the place that it will be referenced from. */
1570
1571 if (GET_CODE (XEXP (loc, 0)) == REG)
1572 {
1573 temp = force_reg (Pmode, structure_value_addr);
1574 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1575 if ((CONSTANT_P (structure_value_addr)
1576 || (GET_CODE (structure_value_addr) == PLUS
1577 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1578 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1579 && REGNO (temp) < map->const_equiv_map_size)
1580 {
1581 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1582 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1583 }
1584 }
1585 else
1586 {
1587 temp = copy_rtx_and_substitute (loc, map);
1588 subst_constants (&temp, NULL_RTX, map);
1589 apply_change_group ();
1590 emit_move_insn (temp, structure_value_addr);
1591 }
1592 }
1593 else if (ignore)
1594 /* We will ignore the result value, so don't look at its structure.
1595 Note that preparations for an aggregate return value
1596 do need to be made (above) even if it will be ignored. */
1597 ;
1598 else if (GET_CODE (loc) == REG)
1599 {
1600 /* The function returns an object in a register and we use the return
1601 value. Set up our target for remapping. */
1602
1603 /* Machine mode function was declared to return. */
1604 enum machine_mode departing_mode = TYPE_MODE (type);
1605 /* (Possibly wider) machine mode it actually computes
1606 (for the sake of callers that fail to declare it right). */
1607 enum machine_mode arriving_mode
1608 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1609 rtx reg_to_map;
1610
1611 /* Don't use MEMs as direct targets because on some machines
1612 substituting a MEM for a REG makes invalid insns.
1613 Let the combiner substitute the MEM if that is valid. */
1614 if (target == 0 || GET_CODE (target) != REG
1615 || GET_MODE (target) != departing_mode)
1616 target = gen_reg_rtx (departing_mode);
1617
1618 /* If function's value was promoted before return,
1619 avoid machine mode mismatch when we substitute INLINE_TARGET.
1620 But TARGET is what we will return to the caller. */
1621 if (arriving_mode != departing_mode)
1622 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1623 else
1624 reg_to_map = target;
1625
1626 /* Usually, the result value is the machine's return register.
1627 Sometimes it may be a pseudo. Handle both cases. */
1628 if (REG_FUNCTION_VALUE_P (loc))
1629 map->inline_target = reg_to_map;
1630 else
1631 map->reg_map[REGNO (loc)] = reg_to_map;
1632 }
1633
1634 /* Make new label equivalences for the labels in the called function. */
1635 for (i = min_labelno; i < max_labelno; i++)
1636 map->label_map[i] = gen_label_rtx ();
1637
1638 /* Perform postincrements before actually calling the function. */
1639 emit_queue ();
1640
1641 /* Clean up stack so that variables might have smaller offsets. */
1642 do_pending_stack_adjust ();
1643
1644 /* Save a copy of the location of const_equiv_map for mark_stores, called
1645 via note_stores. */
1646 global_const_equiv_map = map->const_equiv_map;
1647 global_const_equiv_map_size = map->const_equiv_map_size;
1648
1649 /* If the called function does an alloca, save and restore the
1650 stack pointer around the call. This saves stack space, but
1651 also is required if this inline is being done between two
1652 pushes. */
1653 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1654 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1655
1656 /* Now copy the insns one by one. Do this in two passes, first the insns and
1657 then their REG_NOTES, just like save_for_inline. */
1658
1659 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1660
1661 for (insn = insns; insn; insn = NEXT_INSN (insn))
1662 {
1663 rtx copy, pattern, set;
1664
1665 map->orig_asm_operands_vector = 0;
1666
1667 switch (GET_CODE (insn))
1668 {
1669 case INSN:
1670 pattern = PATTERN (insn);
1671 set = single_set (insn);
1672 copy = 0;
1673 if (GET_CODE (pattern) == USE
1674 && GET_CODE (XEXP (pattern, 0)) == REG
1675 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1676 /* The (USE (REG n)) at return from the function should
1677 be ignored since we are changing (REG n) into
1678 inline_target. */
1679 break;
1680
1681 /* Ignore setting a function value that we don't want to use. */
1682 if (map->inline_target == 0
1683 && set != 0
1684 && GET_CODE (SET_DEST (set)) == REG
1685 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1686 {
1687 if (volatile_refs_p (SET_SRC (set)))
1688 {
1689 rtx new_set;
1690
1691 /* If we must not delete the source,
1692 load it into a new temporary. */
1693 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1694
1695 new_set = single_set (copy);
1696 if (new_set == 0)
1697 abort ();
1698
1699 SET_DEST (new_set)
1700 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1701 }
1702 else
1703 break;
1704 }
1705
1706 /* If this is setting the static chain rtx, omit it. */
1707 else if (static_chain_value != 0
1708 && set != 0
1709 && GET_CODE (SET_DEST (set)) == REG
1710 && rtx_equal_p (SET_DEST (set),
1711 static_chain_incoming_rtx))
1712 break;
1713
1714 /* If this is setting the static chain pseudo, set it from
1715 the value we want to give it instead. */
1716 else if (static_chain_value != 0
1717 && set != 0
1718 && rtx_equal_p (SET_SRC (set),
1719 static_chain_incoming_rtx))
1720 {
1721 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1722
1723 copy = emit_move_insn (newdest, static_chain_value);
1724 static_chain_value = 0;
1725 }
1726 else
1727 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1728 /* REG_NOTES will be copied later. */
1729
1730 #ifdef HAVE_cc0
1731 /* If this insn is setting CC0, it may need to look at
1732 the insn that uses CC0 to see what type of insn it is.
1733 In that case, the call to recog via validate_change will
1734 fail. So don't substitute constants here. Instead,
1735 do it when we emit the following insn.
1736
1737 For example, see the pyr.md file. That machine has signed and
1738 unsigned compares. The compare patterns must check the
1739 following branch insn to see which what kind of compare to
1740 emit.
1741
1742 If the previous insn set CC0, substitute constants on it as
1743 well. */
1744 if (sets_cc0_p (PATTERN (copy)) != 0)
1745 cc0_insn = copy;
1746 else
1747 {
1748 if (cc0_insn)
1749 try_constants (cc0_insn, map);
1750 cc0_insn = 0;
1751 try_constants (copy, map);
1752 }
1753 #else
1754 try_constants (copy, map);
1755 #endif
1756 break;
1757
1758 case JUMP_INSN:
1759 if (GET_CODE (PATTERN (insn)) == RETURN)
1760 {
1761 if (local_return_label == 0)
1762 local_return_label = gen_label_rtx ();
1763 pattern = gen_jump (local_return_label);
1764 }
1765 else
1766 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1767
1768 copy = emit_jump_insn (pattern);
1769
1770 #ifdef HAVE_cc0
1771 if (cc0_insn)
1772 try_constants (cc0_insn, map);
1773 cc0_insn = 0;
1774 #endif
1775 try_constants (copy, map);
1776
1777 /* If this used to be a conditional jump insn but whose branch
1778 direction is now know, we must do something special. */
1779 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1780 {
1781 #ifdef HAVE_cc0
1782 /* The previous insn set cc0 for us. So delete it. */
1783 delete_insn (PREV_INSN (copy));
1784 #endif
1785
1786 /* If this is now a no-op, delete it. */
1787 if (map->last_pc_value == pc_rtx)
1788 {
1789 delete_insn (copy);
1790 copy = 0;
1791 }
1792 else
1793 /* Otherwise, this is unconditional jump so we must put a
1794 BARRIER after it. We could do some dead code elimination
1795 here, but jump.c will do it just as well. */
1796 emit_barrier ();
1797 }
1798 break;
1799
1800 case CALL_INSN:
1801 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1802 copy = emit_call_insn (pattern);
1803
1804 /* Because the USAGE information potentially contains objects other
1805 than hard registers, we need to copy it. */
1806 CALL_INSN_FUNCTION_USAGE (copy) =
1807 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1808
1809 #ifdef HAVE_cc0
1810 if (cc0_insn)
1811 try_constants (cc0_insn, map);
1812 cc0_insn = 0;
1813 #endif
1814 try_constants (copy, map);
1815
1816 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1817 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1818 map->const_equiv_map[i] = 0;
1819 break;
1820
1821 case CODE_LABEL:
1822 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1823 LABEL_NAME (copy) = LABEL_NAME (insn);
1824 map->const_age++;
1825 break;
1826
1827 case BARRIER:
1828 copy = emit_barrier ();
1829 break;
1830
1831 case NOTE:
1832 /* It is important to discard function-end and function-beg notes,
1833 so we have only one of each in the current function.
1834 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1835 deleted these in the copy used for continuing compilation,
1836 not the copy used for inlining). */
1837 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1838 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1839 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1840 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1841 else
1842 copy = 0;
1843 break;
1844
1845 default:
1846 abort ();
1847 break;
1848 }
1849
1850 if (copy)
1851 RTX_INTEGRATED_P (copy) = 1;
1852
1853 map->insn_map[INSN_UID (insn)] = copy;
1854 }
1855
1856 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1857 from parameters can be substituted in. These are the only ones that
1858 are valid across the entire function. */
1859 map->const_age++;
1860 for (insn = insns; insn; insn = NEXT_INSN (insn))
1861 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1862 && map->insn_map[INSN_UID (insn)]
1863 && REG_NOTES (insn))
1864 {
1865 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1866 /* We must also do subst_constants, in case one of our parameters
1867 has const type and constant value. */
1868 subst_constants (&tem, NULL_RTX, map);
1869 apply_change_group ();
1870 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1871 }
1872
1873 if (local_return_label)
1874 emit_label (local_return_label);
1875
1876 /* Restore the stack pointer if we saved it above. */
1877 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1878 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1879
1880 /* Make copies of the decls of the symbols in the inline function, so that
1881 the copies of the variables get declared in the current function. Set
1882 up things so that lookup_static_chain knows that to interpret registers
1883 in SAVE_EXPRs for TYPE_SIZEs as local. */
1884
1885 inline_function_decl = fndecl;
1886 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1887 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1888 inline_function_decl = 0;
1889
1890 /* End the scope containing the copied formal parameter variables
1891 and copied LABEL_DECLs. */
1892
1893 expand_end_bindings (getdecls (), 1, 1);
1894 block = poplevel (1, 1, 0);
1895 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1896 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1897 poplevel (0, 0, 0);
1898 emit_line_note (input_filename, lineno);
1899
1900 if (structure_value_addr)
1901 {
1902 target = gen_rtx (MEM, TYPE_MODE (type),
1903 memory_address (TYPE_MODE (type), structure_value_addr));
1904 MEM_IN_STRUCT_P (target) = 1;
1905 }
1906 return target;
1907 }
1908 \f
1909 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1910 push all of those decls and give each one the corresponding home. */
1911
1912 static void
1913 integrate_parm_decls (args, map, arg_vector)
1914 tree args;
1915 struct inline_remap *map;
1916 rtvec arg_vector;
1917 {
1918 register tree tail;
1919 register int i;
1920
1921 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1922 {
1923 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1924 TREE_TYPE (tail));
1925 rtx new_decl_rtl
1926 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1927
1928 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1929 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1930 here, but that's going to require some more work. */
1931 /* DECL_INCOMING_RTL (decl) = ?; */
1932 /* These args would always appear unused, if not for this. */
1933 TREE_USED (decl) = 1;
1934 /* Prevent warning for shadowing with these. */
1935 DECL_ABSTRACT_ORIGIN (decl) = tail;
1936 pushdecl (decl);
1937 /* Fully instantiate the address with the equivalent form so that the
1938 debugging information contains the actual register, instead of the
1939 virtual register. Do this by not passing an insn to
1940 subst_constants. */
1941 subst_constants (&new_decl_rtl, NULL_RTX, map);
1942 apply_change_group ();
1943 DECL_RTL (decl) = new_decl_rtl;
1944 }
1945 }
1946
1947 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1948 current function a tree of contexts isomorphic to the one that is given.
1949
1950 LEVEL indicates how far down into the BLOCK tree is the node we are
1951 currently traversing. It is always zero except for recursive calls.
1952
1953 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1954 registers used in the DECL_RTL field should be remapped. If it is zero,
1955 no mapping is necessary. */
1956
1957 static void
1958 integrate_decl_tree (let, level, map)
1959 tree let;
1960 int level;
1961 struct inline_remap *map;
1962 {
1963 tree t, node;
1964
1965 if (level > 0)
1966 pushlevel (0);
1967
1968 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1969 {
1970 tree d;
1971
1972 push_obstacks_nochange ();
1973 saveable_allocation ();
1974 d = copy_node (t);
1975 pop_obstacks ();
1976
1977 if (DECL_RTL (t) != 0)
1978 {
1979 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1980 /* Fully instantiate the address with the equivalent form so that the
1981 debugging information contains the actual register, instead of the
1982 virtual register. Do this by not passing an insn to
1983 subst_constants. */
1984 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1985 apply_change_group ();
1986 }
1987 /* These args would always appear unused, if not for this. */
1988 TREE_USED (d) = 1;
1989 /* Prevent warning for shadowing with these. */
1990 DECL_ABSTRACT_ORIGIN (d) = t;
1991
1992 if (DECL_LANG_SPECIFIC (d))
1993 copy_lang_decl (d);
1994
1995 pushdecl (d);
1996 }
1997
1998 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1999 integrate_decl_tree (t, level + 1, map);
2000
2001 if (level > 0)
2002 {
2003 node = poplevel (1, 0, 0);
2004 if (node)
2005 {
2006 TREE_USED (node) = TREE_USED (let);
2007 BLOCK_ABSTRACT_ORIGIN (node) = let;
2008 }
2009 }
2010 }
2011 \f
2012 /* Create a new copy of an rtx.
2013 Recursively copies the operands of the rtx,
2014 except for those few rtx codes that are sharable.
2015
2016 We always return an rtx that is similar to that incoming rtx, with the
2017 exception of possibly changing a REG to a SUBREG or vice versa. No
2018 rtl is ever emitted.
2019
2020 Handle constants that need to be placed in the constant pool by
2021 calling `force_const_mem'. */
2022
2023 rtx
2024 copy_rtx_and_substitute (orig, map)
2025 register rtx orig;
2026 struct inline_remap *map;
2027 {
2028 register rtx copy, temp;
2029 register int i, j;
2030 register RTX_CODE code;
2031 register enum machine_mode mode;
2032 register char *format_ptr;
2033 int regno;
2034
2035 if (orig == 0)
2036 return 0;
2037
2038 code = GET_CODE (orig);
2039 mode = GET_MODE (orig);
2040
2041 switch (code)
2042 {
2043 case REG:
2044 /* If the stack pointer register shows up, it must be part of
2045 stack-adjustments (*not* because we eliminated the frame pointer!).
2046 Small hard registers are returned as-is. Pseudo-registers
2047 go through their `reg_map'. */
2048 regno = REGNO (orig);
2049 if (regno <= LAST_VIRTUAL_REGISTER)
2050 {
2051 /* Some hard registers are also mapped,
2052 but others are not translated. */
2053 if (map->reg_map[regno] != 0)
2054 return map->reg_map[regno];
2055
2056 /* If this is the virtual frame pointer, make space in current
2057 function's stack frame for the stack frame of the inline function.
2058
2059 Copy the address of this area into a pseudo. Map
2060 virtual_stack_vars_rtx to this pseudo and set up a constant
2061 equivalence for it to be the address. This will substitute the
2062 address into insns where it can be substituted and use the new
2063 pseudo where it can't. */
2064 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2065 {
2066 rtx loc, seq;
2067 int size = DECL_FRAME_SIZE (map->fndecl);
2068 int rounded;
2069
2070 start_sequence ();
2071 loc = assign_stack_temp (BLKmode, size, 1);
2072 loc = XEXP (loc, 0);
2073 #ifdef FRAME_GROWS_DOWNWARD
2074 /* In this case, virtual_stack_vars_rtx points to one byte
2075 higher than the top of the frame area. So compute the offset
2076 to one byte higher than our substitute frame.
2077 Keep the fake frame pointer aligned like a real one. */
2078 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2079 loc = plus_constant (loc, rounded);
2080 #endif
2081 map->reg_map[regno] = temp
2082 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2083
2084 if (REGNO (temp) < map->const_equiv_map_size)
2085 {
2086 map->const_equiv_map[REGNO (temp)] = loc;
2087 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2088 }
2089
2090 seq = gen_sequence ();
2091 end_sequence ();
2092 emit_insn_after (seq, map->insns_at_start);
2093 return temp;
2094 }
2095 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2096 {
2097 /* Do the same for a block to contain any arguments referenced
2098 in memory. */
2099 rtx loc, seq;
2100 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2101
2102 start_sequence ();
2103 loc = assign_stack_temp (BLKmode, size, 1);
2104 loc = XEXP (loc, 0);
2105 /* When arguments grow downward, the virtual incoming
2106 args pointer points to the top of the argument block,
2107 so the remapped location better do the same. */
2108 #ifdef ARGS_GROW_DOWNWARD
2109 loc = plus_constant (loc, size);
2110 #endif
2111 map->reg_map[regno] = temp
2112 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2113
2114 if (REGNO (temp) < map->const_equiv_map_size)
2115 {
2116 map->const_equiv_map[REGNO (temp)] = loc;
2117 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2118 }
2119
2120 seq = gen_sequence ();
2121 end_sequence ();
2122 emit_insn_after (seq, map->insns_at_start);
2123 return temp;
2124 }
2125 else if (REG_FUNCTION_VALUE_P (orig))
2126 {
2127 /* This is a reference to the function return value. If
2128 the function doesn't have a return value, error. If the
2129 mode doesn't agree, make a SUBREG. */
2130 if (map->inline_target == 0)
2131 /* Must be unrolling loops or replicating code if we
2132 reach here, so return the register unchanged. */
2133 return orig;
2134 else if (mode != GET_MODE (map->inline_target))
2135 return gen_lowpart (mode, map->inline_target);
2136 else
2137 return map->inline_target;
2138 }
2139 return orig;
2140 }
2141 if (map->reg_map[regno] == NULL)
2142 {
2143 map->reg_map[regno] = gen_reg_rtx (mode);
2144 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2145 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2146 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2147 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2148 }
2149 return map->reg_map[regno];
2150
2151 case SUBREG:
2152 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2153 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2154 if (GET_CODE (copy) == SUBREG)
2155 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2156 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2157 else if (GET_CODE (copy) == CONCAT)
2158 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2159 else
2160 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2161 SUBREG_WORD (orig));
2162
2163 case USE:
2164 case CLOBBER:
2165 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2166 to (use foo) if the original insn didn't have a subreg.
2167 Removing the subreg distorts the VAX movstrhi pattern
2168 by changing the mode of an operand. */
2169 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2170 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2171 copy = SUBREG_REG (copy);
2172 return gen_rtx (code, VOIDmode, copy);
2173
2174 case CODE_LABEL:
2175 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2176 = LABEL_PRESERVE_P (orig);
2177 return map->label_map[CODE_LABEL_NUMBER (orig)];
2178
2179 case LABEL_REF:
2180 copy = gen_rtx (LABEL_REF, mode,
2181 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2182 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2183 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2184
2185 /* The fact that this label was previously nonlocal does not mean
2186 it still is, so we must check if it is within the range of
2187 this function's labels. */
2188 LABEL_REF_NONLOCAL_P (copy)
2189 = (LABEL_REF_NONLOCAL_P (orig)
2190 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2191 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2192
2193 /* If we have made a nonlocal label local, it means that this
2194 inlined call will be referring to our nonlocal goto handler.
2195 So make sure we create one for this block; we normally would
2196 not since this is not otherwise considered a "call". */
2197 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2198 function_call_count++;
2199
2200 return copy;
2201
2202 case PC:
2203 case CC0:
2204 case CONST_INT:
2205 return orig;
2206
2207 case SYMBOL_REF:
2208 /* Symbols which represent the address of a label stored in the constant
2209 pool must be modified to point to a constant pool entry for the
2210 remapped label. Otherwise, symbols are returned unchanged. */
2211 if (CONSTANT_POOL_ADDRESS_P (orig))
2212 {
2213 rtx constant = get_pool_constant (orig);
2214 if (GET_CODE (constant) == LABEL_REF)
2215 return XEXP (force_const_mem (Pmode,
2216 copy_rtx_and_substitute (constant,
2217 map)),
2218 0);
2219 }
2220
2221 return orig;
2222
2223 case CONST_DOUBLE:
2224 /* We have to make a new copy of this CONST_DOUBLE because don't want
2225 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2226 duplicate of a CONST_DOUBLE we have already seen. */
2227 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2228 {
2229 REAL_VALUE_TYPE d;
2230
2231 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2232 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2233 }
2234 else
2235 return immed_double_const (CONST_DOUBLE_LOW (orig),
2236 CONST_DOUBLE_HIGH (orig), VOIDmode);
2237
2238 case CONST:
2239 /* Make new constant pool entry for a constant
2240 that was in the pool of the inline function. */
2241 if (RTX_INTEGRATED_P (orig))
2242 {
2243 /* If this was an address of a constant pool entry that itself
2244 had to be placed in the constant pool, it might not be a
2245 valid address. So the recursive call below might turn it
2246 into a register. In that case, it isn't a constant any
2247 more, so return it. This has the potential of changing a
2248 MEM into a REG, but we'll assume that it safe. */
2249 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2250 if (! CONSTANT_P (temp))
2251 return temp;
2252 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2253 }
2254 break;
2255
2256 case ADDRESS:
2257 /* If from constant pool address, make new constant pool entry and
2258 return its address. */
2259 if (! RTX_INTEGRATED_P (orig))
2260 abort ();
2261
2262 temp = force_const_mem (GET_MODE (orig),
2263 copy_rtx_and_substitute (XEXP (orig, 0), map));
2264
2265 #if 0
2266 /* Legitimizing the address here is incorrect.
2267
2268 The only ADDRESS rtx's that can reach here are ones created by
2269 save_constants. Hence the operand of the ADDRESS is always valid
2270 in this position of the instruction, since the original rtx without
2271 the ADDRESS was valid.
2272
2273 The reason we don't legitimize the address here is that on the
2274 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2275 This code forces the operand of the address to a register, which
2276 fails because we can not take the HIGH part of a register.
2277
2278 Also, change_address may create new registers. These registers
2279 will not have valid reg_map entries. This can cause try_constants()
2280 to fail because assumes that all registers in the rtx have valid
2281 reg_map entries, and it may end up replacing one of these new
2282 registers with junk. */
2283
2284 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2285 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2286 #endif
2287
2288 return XEXP (temp, 0);
2289
2290 case ASM_OPERANDS:
2291 /* If a single asm insn contains multiple output operands
2292 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2293 We must make sure that the copied insn continues to share it. */
2294 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2295 {
2296 copy = rtx_alloc (ASM_OPERANDS);
2297 copy->volatil = orig->volatil;
2298 XSTR (copy, 0) = XSTR (orig, 0);
2299 XSTR (copy, 1) = XSTR (orig, 1);
2300 XINT (copy, 2) = XINT (orig, 2);
2301 XVEC (copy, 3) = map->copy_asm_operands_vector;
2302 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2303 XSTR (copy, 5) = XSTR (orig, 5);
2304 XINT (copy, 6) = XINT (orig, 6);
2305 return copy;
2306 }
2307 break;
2308
2309 case CALL:
2310 /* This is given special treatment because the first
2311 operand of a CALL is a (MEM ...) which may get
2312 forced into a register for cse. This is undesirable
2313 if function-address cse isn't wanted or if we won't do cse. */
2314 #ifndef NO_FUNCTION_CSE
2315 if (! (optimize && ! flag_no_function_cse))
2316 #endif
2317 return gen_rtx (CALL, GET_MODE (orig),
2318 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2319 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2320 copy_rtx_and_substitute (XEXP (orig, 1), map));
2321 break;
2322
2323 #if 0
2324 /* Must be ifdefed out for loop unrolling to work. */
2325 case RETURN:
2326 abort ();
2327 #endif
2328
2329 case SET:
2330 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2331 Don't alter that.
2332 If the nonlocal goto is into the current function,
2333 this will result in unnecessarily bad code, but should work. */
2334 if (SET_DEST (orig) == virtual_stack_vars_rtx
2335 || SET_DEST (orig) == virtual_incoming_args_rtx)
2336 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2337 copy_rtx_and_substitute (SET_SRC (orig), map));
2338 break;
2339
2340 case MEM:
2341 copy = rtx_alloc (MEM);
2342 PUT_MODE (copy, mode);
2343 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2344 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2345 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2346
2347 /* If doing function inlining, this MEM might not be const in the
2348 function that it is being inlined into, and thus may not be
2349 unchanging after function inlining. Constant pool references are
2350 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2351 for them. */
2352 if (! map->integrating)
2353 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2354
2355 return copy;
2356 }
2357
2358 copy = rtx_alloc (code);
2359 PUT_MODE (copy, mode);
2360 copy->in_struct = orig->in_struct;
2361 copy->volatil = orig->volatil;
2362 copy->unchanging = orig->unchanging;
2363
2364 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2365
2366 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2367 {
2368 switch (*format_ptr++)
2369 {
2370 case '0':
2371 break;
2372
2373 case 'e':
2374 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2375 break;
2376
2377 case 'u':
2378 /* Change any references to old-insns to point to the
2379 corresponding copied insns. */
2380 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2381 break;
2382
2383 case 'E':
2384 XVEC (copy, i) = XVEC (orig, i);
2385 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2386 {
2387 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2388 for (j = 0; j < XVECLEN (copy, i); j++)
2389 XVECEXP (copy, i, j)
2390 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2391 }
2392 break;
2393
2394 case 'w':
2395 XWINT (copy, i) = XWINT (orig, i);
2396 break;
2397
2398 case 'i':
2399 XINT (copy, i) = XINT (orig, i);
2400 break;
2401
2402 case 's':
2403 XSTR (copy, i) = XSTR (orig, i);
2404 break;
2405
2406 default:
2407 abort ();
2408 }
2409 }
2410
2411 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2412 {
2413 map->orig_asm_operands_vector = XVEC (orig, 3);
2414 map->copy_asm_operands_vector = XVEC (copy, 3);
2415 map->copy_asm_constraints_vector = XVEC (copy, 4);
2416 }
2417
2418 return copy;
2419 }
2420 \f
2421 /* Substitute known constant values into INSN, if that is valid. */
2422
2423 void
2424 try_constants (insn, map)
2425 rtx insn;
2426 struct inline_remap *map;
2427 {
2428 int i;
2429
2430 map->num_sets = 0;
2431 subst_constants (&PATTERN (insn), insn, map);
2432
2433 /* Apply the changes if they are valid; otherwise discard them. */
2434 apply_change_group ();
2435
2436 /* Show we don't know the value of anything stored or clobbered. */
2437 note_stores (PATTERN (insn), mark_stores);
2438 map->last_pc_value = 0;
2439 #ifdef HAVE_cc0
2440 map->last_cc0_value = 0;
2441 #endif
2442
2443 /* Set up any constant equivalences made in this insn. */
2444 for (i = 0; i < map->num_sets; i++)
2445 {
2446 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2447 {
2448 int regno = REGNO (map->equiv_sets[i].dest);
2449
2450 if (regno < map->const_equiv_map_size
2451 && (map->const_equiv_map[regno] == 0
2452 /* Following clause is a hack to make case work where GNU C++
2453 reassigns a variable to make cse work right. */
2454 || ! rtx_equal_p (map->const_equiv_map[regno],
2455 map->equiv_sets[i].equiv)))
2456 {
2457 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2458 map->const_age_map[regno] = map->const_age;
2459 }
2460 }
2461 else if (map->equiv_sets[i].dest == pc_rtx)
2462 map->last_pc_value = map->equiv_sets[i].equiv;
2463 #ifdef HAVE_cc0
2464 else if (map->equiv_sets[i].dest == cc0_rtx)
2465 map->last_cc0_value = map->equiv_sets[i].equiv;
2466 #endif
2467 }
2468 }
2469 \f
2470 /* Substitute known constants for pseudo regs in the contents of LOC,
2471 which are part of INSN.
2472 If INSN is zero, the substitution should always be done (this is used to
2473 update DECL_RTL).
2474 These changes are taken out by try_constants if the result is not valid.
2475
2476 Note that we are more concerned with determining when the result of a SET
2477 is a constant, for further propagation, than actually inserting constants
2478 into insns; cse will do the latter task better.
2479
2480 This function is also used to adjust address of items previously addressed
2481 via the virtual stack variable or virtual incoming arguments registers. */
2482
2483 static void
2484 subst_constants (loc, insn, map)
2485 rtx *loc;
2486 rtx insn;
2487 struct inline_remap *map;
2488 {
2489 rtx x = *loc;
2490 register int i;
2491 register enum rtx_code code;
2492 register char *format_ptr;
2493 int num_changes = num_validated_changes ();
2494 rtx new = 0;
2495 enum machine_mode op0_mode;
2496
2497 code = GET_CODE (x);
2498
2499 switch (code)
2500 {
2501 case PC:
2502 case CONST_INT:
2503 case CONST_DOUBLE:
2504 case SYMBOL_REF:
2505 case CONST:
2506 case LABEL_REF:
2507 case ADDRESS:
2508 return;
2509
2510 #ifdef HAVE_cc0
2511 case CC0:
2512 validate_change (insn, loc, map->last_cc0_value, 1);
2513 return;
2514 #endif
2515
2516 case USE:
2517 case CLOBBER:
2518 /* The only thing we can do with a USE or CLOBBER is possibly do
2519 some substitutions in a MEM within it. */
2520 if (GET_CODE (XEXP (x, 0)) == MEM)
2521 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2522 return;
2523
2524 case REG:
2525 /* Substitute for parms and known constants. Don't replace
2526 hard regs used as user variables with constants. */
2527 {
2528 int regno = REGNO (x);
2529
2530 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2531 && regno < map->const_equiv_map_size
2532 && map->const_equiv_map[regno] != 0
2533 && map->const_age_map[regno] >= map->const_age)
2534 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2535 return;
2536 }
2537
2538 case SUBREG:
2539 /* SUBREG applied to something other than a reg
2540 should be treated as ordinary, since that must
2541 be a special hack and we don't know how to treat it specially.
2542 Consider for example mulsidi3 in m68k.md.
2543 Ordinary SUBREG of a REG needs this special treatment. */
2544 if (GET_CODE (SUBREG_REG (x)) == REG)
2545 {
2546 rtx inner = SUBREG_REG (x);
2547 rtx new = 0;
2548
2549 /* We can't call subst_constants on &SUBREG_REG (x) because any
2550 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2551 see what is inside, try to form the new SUBREG and see if that is
2552 valid. We handle two cases: extracting a full word in an
2553 integral mode and extracting the low part. */
2554 subst_constants (&inner, NULL_RTX, map);
2555
2556 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2557 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2558 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2559 new = operand_subword (inner, SUBREG_WORD (x), 0,
2560 GET_MODE (SUBREG_REG (x)));
2561
2562 if (new == 0 && subreg_lowpart_p (x))
2563 new = gen_lowpart_common (GET_MODE (x), inner);
2564
2565 if (new)
2566 validate_change (insn, loc, new, 1);
2567
2568 return;
2569 }
2570 break;
2571
2572 case MEM:
2573 subst_constants (&XEXP (x, 0), insn, map);
2574
2575 /* If a memory address got spoiled, change it back. */
2576 if (insn != 0 && num_validated_changes () != num_changes
2577 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2578 cancel_changes (num_changes);
2579 return;
2580
2581 case SET:
2582 {
2583 /* Substitute constants in our source, and in any arguments to a
2584 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2585 itself. */
2586 rtx *dest_loc = &SET_DEST (x);
2587 rtx dest = *dest_loc;
2588 rtx src, tem;
2589
2590 subst_constants (&SET_SRC (x), insn, map);
2591 src = SET_SRC (x);
2592
2593 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2594 /* By convention, we always use ZERO_EXTRACT in the dest. */
2595 /* || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2596 || GET_CODE (*dest_loc) == SUBREG
2597 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2598 {
2599 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2600 {
2601 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2602 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2603 }
2604 dest_loc = &XEXP (*dest_loc, 0);
2605 }
2606
2607 /* Do substitute in the address of a destination in memory. */
2608 if (GET_CODE (*dest_loc) == MEM)
2609 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2610
2611 /* Check for the case of DEST a SUBREG, both it and the underlying
2612 register are less than one word, and the SUBREG has the wider mode.
2613 In the case, we are really setting the underlying register to the
2614 source converted to the mode of DEST. So indicate that. */
2615 if (GET_CODE (dest) == SUBREG
2616 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2617 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2618 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2619 <= GET_MODE_SIZE (GET_MODE (dest)))
2620 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2621 src)))
2622 src = tem, dest = SUBREG_REG (dest);
2623
2624 /* If storing a recognizable value save it for later recording. */
2625 if ((map->num_sets < MAX_RECOG_OPERANDS)
2626 && (CONSTANT_P (src)
2627 || (GET_CODE (src) == REG
2628 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2629 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2630 || (GET_CODE (src) == PLUS
2631 && GET_CODE (XEXP (src, 0)) == REG
2632 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2633 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2634 && CONSTANT_P (XEXP (src, 1)))
2635 || GET_CODE (src) == COMPARE
2636 #ifdef HAVE_cc0
2637 || dest == cc0_rtx
2638 #endif
2639 || (dest == pc_rtx
2640 && (src == pc_rtx || GET_CODE (src) == RETURN
2641 || GET_CODE (src) == LABEL_REF))))
2642 {
2643 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2644 it will cause us to save the COMPARE with any constants
2645 substituted, which is what we want for later. */
2646 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2647 map->equiv_sets[map->num_sets++].dest = dest;
2648 }
2649
2650 return;
2651 }
2652 }
2653
2654 format_ptr = GET_RTX_FORMAT (code);
2655
2656 /* If the first operand is an expression, save its mode for later. */
2657 if (*format_ptr == 'e')
2658 op0_mode = GET_MODE (XEXP (x, 0));
2659
2660 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2661 {
2662 switch (*format_ptr++)
2663 {
2664 case '0':
2665 break;
2666
2667 case 'e':
2668 if (XEXP (x, i))
2669 subst_constants (&XEXP (x, i), insn, map);
2670 break;
2671
2672 case 'u':
2673 case 'i':
2674 case 's':
2675 case 'w':
2676 break;
2677
2678 case 'E':
2679 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2680 {
2681 int j;
2682 for (j = 0; j < XVECLEN (x, i); j++)
2683 subst_constants (&XVECEXP (x, i, j), insn, map);
2684 }
2685 break;
2686
2687 default:
2688 abort ();
2689 }
2690 }
2691
2692 /* If this is a commutative operation, move a constant to the second
2693 operand unless the second operand is already a CONST_INT. */
2694 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2695 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2696 {
2697 rtx tem = XEXP (x, 0);
2698 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2699 validate_change (insn, &XEXP (x, 1), tem, 1);
2700 }
2701
2702 /* Simplify the expression in case we put in some constants. */
2703 switch (GET_RTX_CLASS (code))
2704 {
2705 case '1':
2706 new = simplify_unary_operation (code, GET_MODE (x),
2707 XEXP (x, 0), op0_mode);
2708 break;
2709
2710 case '<':
2711 {
2712 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2713 if (op_mode == VOIDmode)
2714 op_mode = GET_MODE (XEXP (x, 1));
2715 new = simplify_relational_operation (code, op_mode,
2716 XEXP (x, 0), XEXP (x, 1));
2717 #ifdef FLOAT_STORE_FLAG_VALUE
2718 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2719 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2720 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2721 GET_MODE (x)));
2722 #endif
2723 break;
2724 }
2725
2726 case '2':
2727 case 'c':
2728 new = simplify_binary_operation (code, GET_MODE (x),
2729 XEXP (x, 0), XEXP (x, 1));
2730 break;
2731
2732 case 'b':
2733 case '3':
2734 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2735 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2736 break;
2737 }
2738
2739 if (new)
2740 validate_change (insn, loc, new, 1);
2741 }
2742
2743 /* Show that register modified no longer contain known constants. We are
2744 called from note_stores with parts of the new insn. */
2745
2746 void
2747 mark_stores (dest, x)
2748 rtx dest;
2749 rtx x;
2750 {
2751 int regno = -1;
2752 enum machine_mode mode;
2753
2754 /* DEST is always the innermost thing set, except in the case of
2755 SUBREGs of hard registers. */
2756
2757 if (GET_CODE (dest) == REG)
2758 regno = REGNO (dest), mode = GET_MODE (dest);
2759 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2760 {
2761 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2762 mode = GET_MODE (SUBREG_REG (dest));
2763 }
2764
2765 if (regno >= 0)
2766 {
2767 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2768 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2769 int i;
2770
2771 for (i = regno; i <= last_reg; i++)
2772 if (i < global_const_equiv_map_size)
2773 global_const_equiv_map[i] = 0;
2774 }
2775 }
2776 \f
2777 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2778 pointed to by PX, they represent constants in the constant pool.
2779 Replace these with a new memory reference obtained from force_const_mem.
2780 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2781 address of a constant pool entry. Replace them with the address of
2782 a new constant pool entry obtained from force_const_mem. */
2783
2784 static void
2785 restore_constants (px)
2786 rtx *px;
2787 {
2788 rtx x = *px;
2789 int i, j;
2790 char *fmt;
2791
2792 if (x == 0)
2793 return;
2794
2795 if (GET_CODE (x) == CONST_DOUBLE)
2796 {
2797 /* We have to make a new CONST_DOUBLE to ensure that we account for
2798 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2799 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2800 {
2801 REAL_VALUE_TYPE d;
2802
2803 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2804 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
2805 }
2806 else
2807 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2808 VOIDmode);
2809 }
2810
2811 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2812 {
2813 restore_constants (&XEXP (x, 0));
2814 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2815 }
2816 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2817 {
2818 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2819 rtx new = XEXP (SUBREG_REG (x), 0);
2820
2821 restore_constants (&new);
2822 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2823 PUT_MODE (new, GET_MODE (x));
2824 *px = validize_mem (new);
2825 }
2826 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2827 {
2828 restore_constants (&XEXP (x, 0));
2829 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2830 }
2831 else
2832 {
2833 fmt = GET_RTX_FORMAT (GET_CODE (x));
2834 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2835 {
2836 switch (*fmt++)
2837 {
2838 case 'E':
2839 for (j = 0; j < XVECLEN (x, i); j++)
2840 restore_constants (&XVECEXP (x, i, j));
2841 break;
2842
2843 case 'e':
2844 restore_constants (&XEXP (x, i));
2845 break;
2846 }
2847 }
2848 }
2849 }
2850 \f
2851 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2852 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2853 that it points to the node itself, thus indicating that the node is its
2854 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2855 the given node is NULL, recursively descend the decl/block tree which
2856 it is the root of, and for each other ..._DECL or BLOCK node contained
2857 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2858 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2859 values to point to themselves. */
2860
2861 static void
2862 set_block_origin_self (stmt)
2863 register tree stmt;
2864 {
2865 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2866 {
2867 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2868
2869 {
2870 register tree local_decl;
2871
2872 for (local_decl = BLOCK_VARS (stmt);
2873 local_decl != NULL_TREE;
2874 local_decl = TREE_CHAIN (local_decl))
2875 set_decl_origin_self (local_decl); /* Potential recursion. */
2876 }
2877
2878 {
2879 register tree subblock;
2880
2881 for (subblock = BLOCK_SUBBLOCKS (stmt);
2882 subblock != NULL_TREE;
2883 subblock = BLOCK_CHAIN (subblock))
2884 set_block_origin_self (subblock); /* Recurse. */
2885 }
2886 }
2887 }
2888
2889 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2890 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2891 node to so that it points to the node itself, thus indicating that the
2892 node represents its own (abstract) origin. Additionally, if the
2893 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2894 the decl/block tree of which the given node is the root of, and for
2895 each other ..._DECL or BLOCK node contained therein whose
2896 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2897 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2898 point to themselves. */
2899
2900 static void
2901 set_decl_origin_self (decl)
2902 register tree decl;
2903 {
2904 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2905 {
2906 DECL_ABSTRACT_ORIGIN (decl) = decl;
2907 if (TREE_CODE (decl) == FUNCTION_DECL)
2908 {
2909 register tree arg;
2910
2911 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2912 DECL_ABSTRACT_ORIGIN (arg) = arg;
2913 if (DECL_INITIAL (decl) != NULL_TREE)
2914 set_block_origin_self (DECL_INITIAL (decl));
2915 }
2916 }
2917 }
2918 \f
2919 /* Given a pointer to some BLOCK node, and a boolean value to set the
2920 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2921 the given block, and for all local decls and all local sub-blocks
2922 (recursively) which are contained therein. */
2923
2924 static void
2925 set_block_abstract_flags (stmt, setting)
2926 register tree stmt;
2927 register int setting;
2928 {
2929 BLOCK_ABSTRACT (stmt) = setting;
2930
2931 {
2932 register tree local_decl;
2933
2934 for (local_decl = BLOCK_VARS (stmt);
2935 local_decl != NULL_TREE;
2936 local_decl = TREE_CHAIN (local_decl))
2937 set_decl_abstract_flags (local_decl, setting);
2938 }
2939
2940 {
2941 register tree subblock;
2942
2943 for (subblock = BLOCK_SUBBLOCKS (stmt);
2944 subblock != NULL_TREE;
2945 subblock = BLOCK_CHAIN (subblock))
2946 set_block_abstract_flags (subblock, setting);
2947 }
2948 }
2949
2950 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2951 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2952 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2953 set the abstract flags for all of the parameters, local vars, local
2954 blocks and sub-blocks (recursively) to the same setting. */
2955
2956 void
2957 set_decl_abstract_flags (decl, setting)
2958 register tree decl;
2959 register int setting;
2960 {
2961 DECL_ABSTRACT (decl) = setting;
2962 if (TREE_CODE (decl) == FUNCTION_DECL)
2963 {
2964 register tree arg;
2965
2966 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2967 DECL_ABSTRACT (arg) = setting;
2968 if (DECL_INITIAL (decl) != NULL_TREE)
2969 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2970 }
2971 }
2972 \f
2973 /* Output the assembly language code for the function FNDECL
2974 from its DECL_SAVED_INSNS. Used for inline functions that are output
2975 at end of compilation instead of where they came in the source. */
2976
2977 void
2978 output_inline_function (fndecl)
2979 tree fndecl;
2980 {
2981 rtx head;
2982 rtx last;
2983
2984 if (output_bytecode)
2985 {
2986 warning ("`inline' ignored for bytecode output");
2987 return;
2988 }
2989
2990 head = DECL_SAVED_INSNS (fndecl);
2991 current_function_decl = fndecl;
2992
2993 /* This call is only used to initialize global variables. */
2994 init_function_start (fndecl, "lossage", 1);
2995
2996 /* Redo parameter determinations in case the FUNCTION_...
2997 macros took machine-specific actions that need to be redone. */
2998 assign_parms (fndecl, 1);
2999
3000 /* Set stack frame size. */
3001 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3002
3003 restore_reg_data (FIRST_PARM_INSN (head));
3004
3005 stack_slot_list = STACK_SLOT_LIST (head);
3006 forced_labels = FORCED_LABELS (head);
3007
3008 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3009 current_function_calls_alloca = 1;
3010
3011 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3012 current_function_calls_setjmp = 1;
3013
3014 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3015 current_function_calls_longjmp = 1;
3016
3017 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3018 current_function_returns_struct = 1;
3019
3020 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3021 current_function_returns_pcc_struct = 1;
3022
3023 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3024 current_function_needs_context = 1;
3025
3026 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3027 current_function_has_nonlocal_label = 1;
3028
3029 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3030 current_function_returns_pointer = 1;
3031
3032 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3033 current_function_uses_const_pool = 1;
3034
3035 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3036 current_function_uses_pic_offset_table = 1;
3037
3038 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3039 current_function_pops_args = POPS_ARGS (head);
3040
3041 /* This is the only thing the expand_function_end call that uses to be here
3042 actually does and that call can cause problems. */
3043 immediate_size_expand--;
3044
3045 /* Find last insn and rebuild the constant pool. */
3046 for (last = FIRST_PARM_INSN (head);
3047 NEXT_INSN (last); last = NEXT_INSN (last))
3048 {
3049 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3050 {
3051 restore_constants (&PATTERN (last));
3052 restore_constants (&REG_NOTES (last));
3053 }
3054 }
3055
3056 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3057 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3058
3059 /* We must have already output DWARF debugging information for the
3060 original (abstract) inline function declaration/definition, so
3061 we want to make sure that the debugging information we generate
3062 for this special instance of the inline function refers back to
3063 the information we already generated. To make sure that happens,
3064 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3065 node (and for all of the local ..._DECL nodes which are its children)
3066 so that they all point to themselves. */
3067
3068 set_decl_origin_self (fndecl);
3069
3070 /* We're not deferring this any longer. */
3071 DECL_DEFER_OUTPUT (fndecl) = 0;
3072
3073 /* Compile this function all the way down to assembly code. */
3074 rest_of_compilation (fndecl);
3075
3076 current_function_decl = 0;
3077 }
This page took 0.186545 seconds and 5 git commands to generate.