]> gcc.gnu.org Git - gcc.git/blame - gcc/integrate.c
*** empty log message ***
[gcc.git] / gcc / integrate.c
CommitLineData
175160e7 1/* Procedure integration for GNU CC.
34e56753 2 Copyright (C) 1988, 1991 Free Software Foundation, Inc.
175160e7
MT
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21
22#include <stdio.h>
23
24#include "config.h"
25#include "rtl.h"
26#include "tree.h"
27#include "flags.h"
28#include "insn-config.h"
29#include "insn-flags.h"
30#include "expr.h"
31#include "output.h"
32#include "integrate.h"
33#include "real.h"
34#include "function.h"
35
36#include "obstack.h"
37#define obstack_chunk_alloc xmalloc
38#define obstack_chunk_free free
39extern int xmalloc ();
40extern void free ();
41
42extern struct obstack *function_maybepermanent_obstack;
43
44extern tree pushdecl ();
45extern tree poplevel ();
46
47/* Similar, but round to the next highest integer that meets the
48 alignment. */
49#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50
51/* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53#ifndef INTEGRATE_THRESHOLD
54#define INTEGRATE_THRESHOLD(DECL) \
55 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
56#endif
57\f
58/* Save any constant pool constants in an insn. */
59static void save_constants ();
60
61/* Note when parameter registers are the destination of a SET. */
62static void note_modified_parmregs ();
63
64/* Copy an rtx for save_for_inline_copying. */
65static rtx copy_for_inline ();
66
67/* Make copies of MEMs in DECL_RTLs. */
68static void copy_decl_rtls ();
69
70static tree copy_decl_tree ();
71
72/* Return the constant equivalent of a given rtx, or 0 if none. */
73static rtx const_equiv ();
74
75static void integrate_parm_decls ();
76static void integrate_decl_tree ();
77
78static void subst_constants ();
79static rtx fold_out_const_cc0 ();
80\f
81/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
82 is safe and reasonable to integrate into other functions.
83 Nonzero means value is a warning message with a single %s
84 for the function's name. */
85
86char *
87function_cannot_inline_p (fndecl)
88 register tree fndecl;
89{
90 register rtx insn;
91 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
92 int max_insns = INTEGRATE_THRESHOLD (fndecl);
93 register int ninsns = 0;
94 register tree parms;
95
96 /* No inlines with varargs. `grokdeclarator' gives a warning
97 message about that if `inline' is specified. This code
98 it put in to catch the volunteers. */
99 if ((last && TREE_VALUE (last) != void_type_node)
100 || (DECL_ARGUMENTS (fndecl) && DECL_NAME (DECL_ARGUMENTS (fndecl))
101 && ! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))),
102 "__builtin_va_alist")))
103 return "varargs function cannot be inline";
104
105 if (current_function_calls_alloca)
106 return "function using alloca cannot be inline";
107
108 if (current_function_contains_functions)
109 return "function with nested functions cannot be inline";
110
111 /* This restriction may be eliminated sometime soon. But for now, don't
112 worry about remapping the static chain. */
113 if (current_function_needs_context)
114 return "nested function cannot be inline";
115
116 /* If its not even close, don't even look. */
117 if (!TREE_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
118 return "function too large to be inline";
119
120#if 0
121 /* Large stacks are OK now that inlined functions can share them. */
122 /* Don't inline functions with large stack usage,
123 since they can make other recursive functions burn up stack. */
124 if (!TREE_INLINE (fndecl) && get_frame_size () > 100)
125 return "function stack frame for inlining";
126#endif
127
128#if 0
129 /* Don't inline functions which do not specify a function prototype and
130 have BLKmode argument or take the address of a parameter. */
131 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
132 {
133 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
134 TREE_ADDRESSABLE (parms) = 1;
135 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
136 return "no prototype, and parameter address used; cannot be inline";
137 }
138#endif
139
140 /* We can't inline functions that return structures
141 the old-fashioned PCC way, copying into a static block. */
142 if (current_function_returns_pcc_struct)
143 return "inline functions not supported for this return value type";
144
145 /* We can't inline functions that return structures of varying size. */
146 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
147 return "function with varying-size return value cannot be inline";
148
149 /* Cannot inline a function with a varying size argument. */
150 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
151 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
152 return "function with varying-size parameter cannot be inline";
153
154 if (!TREE_INLINE (fndecl) && get_max_uid () > max_insns)
155 {
156 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
157 insn = NEXT_INSN (insn))
158 {
159 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
160 ninsns++;
161 }
162
163 if (ninsns >= max_insns)
164 return "function too large to be inline";
165 }
166
ead02915
JW
167 /* We cannot inline this function if forced_labels is non-zero. This
168 implies that a label in this function was used as an initializer.
169 Because labels can not be duplicated, all labels in the function
170 will be renamed when it is inlined. However, there is no way to find
171 and fix all variables initialized with addresses of labels in this
172 function, hence inlining is impossible. */
173
174 if (forced_labels)
175 return "function with label addresses used in initializers cannot inline";
176
175160e7
MT
177 return 0;
178}
179\f
180/* Variables used within save_for_inline. */
181
182/* Mapping from old pseudo-register to new pseudo-registers.
183 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
184 It is allocated in `save_for_inline' and `expand_inline_function',
185 and deallocated on exit from each of those routines. */
186static rtx *reg_map;
187
188/* Mapping from old code-labels to new code-labels.
189 The first element of this map is label_map[min_labelno].
190 It is allocated in `save_for_inline' and `expand_inline_function',
191 and deallocated on exit from each of those routines. */
192static rtx *label_map;
193
194/* Mapping from old insn uid's to copied insns.
195 It is allocated in `save_for_inline' and `expand_inline_function',
196 and deallocated on exit from each of those routines. */
197static rtx *insn_map;
198
199/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
200 Zero for a reg that isn't a parm's home.
201 Only reg numbers less than max_parm_reg are mapped here. */
202static tree *parmdecl_map;
203
204/* Keep track of first pseudo-register beyond those that are parms. */
205static int max_parm_reg;
206
207/* When an insn is being copied by copy_for_inline,
208 this is nonzero if we have copied an ASM_OPERANDS.
209 In that case, it is the original input-operand vector. */
210static rtvec orig_asm_operands_vector;
211
212/* When an insn is being copied by copy_for_inline,
213 this is nonzero if we have copied an ASM_OPERANDS.
214 In that case, it is the copied input-operand vector. */
215static rtvec copy_asm_operands_vector;
216
217/* Likewise, this is the copied constraints vector. */
218static rtvec copy_asm_constraints_vector;
219
220/* In save_for_inline, nonzero if past the parm-initialization insns. */
221static int in_nonparm_insns;
222\f
223/* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
224 needed to save FNDECL's insns and info for future inline expansion. */
225
226static rtx
227initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
228 tree fndecl;
229 int min_labelno;
230 int max_labelno;
231 int max_reg;
232 int copy;
233{
234 int function_flags, i;
235 rtvec arg_vector;
236 tree parms;
237
238 /* Compute the values of any flags we must restore when inlining this. */
239
240 function_flags
241 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
242 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
243 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
244 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
245 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
246 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
247 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
248 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
249 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
250 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
251
252 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
253 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
254 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
255
256 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
257 parms;
258 parms = TREE_CHAIN (parms), i++)
259 {
260 rtx p = DECL_RTL (parms);
261
262 if (GET_CODE (p) == MEM && copy)
9e0a5ab0
RS
263 {
264 /* Copy the rtl so that modifications of the addresses
265 later in compilation won't affect this arg_vector.
266 Virtual register instantiation can screw the address
267 of the rtl. */
268 rtx new = copy_rtx (p);
269
270 /* Don't leave the old copy anywhere in this decl. */
5c8bab4a
RK
271 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
272 || (GET_CODE (DECL_RTL (parms)) == MEM
273 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
274 && (XEXP (DECL_RTL (parms), 0)
275 == XEXP (DECL_INCOMING_RTL (parms), 0))))
9e0a5ab0
RS
276 DECL_INCOMING_RTL (parms) = new;
277 DECL_RTL (parms) = new;
278 }
175160e7
MT
279
280 RTVEC_ELT (arg_vector, i) = p;
281
282 if (GET_CODE (p) == REG)
283 parmdecl_map[REGNO (p)] = parms;
048dfa64
RS
284 /* This flag is cleared later
285 if the function ever modifies the value of the parm. */
175160e7
MT
286 TREE_READONLY (parms) = 1;
287 }
288
289 /* Assume we start out in the insns that set up the parameters. */
290 in_nonparm_insns = 0;
291
292 /* The list of DECL_SAVED_INSNS, starts off with a header which
293 contains the following information:
294
295 the first insn of the function (not including the insns that copy
296 parameters into registers).
297 the first parameter insn of the function,
298 the first label used by that function,
299 the last label used by that function,
300 the highest register number used for parameters,
301 the total number of registers used,
302 the size of the incoming stack area for parameters,
303 the number of bytes popped on return,
304 the stack slot list,
305 some flags that are used to restore compiler globals,
306 the value of current_function_outgoing_args_size,
307 the original argument vector,
308 and the original DECL_INITIAL. */
309
310 return gen_inline_header_rtx (NULL, NULL, min_labelno, max_labelno,
311 max_parm_reg, max_reg,
312 current_function_args_size,
313 current_function_pops_args,
314 stack_slot_list, function_flags,
315 current_function_outgoing_args_size,
316 arg_vector, (rtx) DECL_INITIAL (fndecl));
317}
318
319/* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
320 things that must be done to make FNDECL expandable as an inline function.
321 HEAD contains the chain of insns to which FNDECL will expand. */
322
323static void
324finish_inline (fndecl, head)
325 tree fndecl;
326 rtx head;
327{
328 NEXT_INSN (head) = get_first_nonparm_insn ();
329 FIRST_PARM_INSN (head) = get_insns ();
330 DECL_SAVED_INSNS (fndecl) = head;
331 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
332 TREE_INLINE (fndecl) = 1;
333}
334
335/* Make the insns and PARM_DECLs of the current function permanent
336 and record other information in DECL_SAVED_INSNS to allow inlining
337 of this function in subsequent calls.
338
339 This function is called when we are going to immediately compile
340 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
341 modified by the compilation process, so we copy all of them to
342 new storage and consider the new insns to be the insn chain to be
343 compiled. */
344
345void
346save_for_inline_copying (fndecl)
347 tree fndecl;
348{
349 rtx first_insn, last_insn, insn;
350 rtx head, copy;
351 int max_labelno, min_labelno, i, len;
352 int max_reg;
353 int max_uid;
354 rtx first_nonparm_insn;
355
356 /* Make and emit a return-label if we have not already done so.
357 Do this before recording the bounds on label numbers. */
358
359 if (return_label == 0)
360 {
361 return_label = gen_label_rtx ();
362 emit_label (return_label);
363 }
364
365 /* Get some bounds on the labels and registers used. */
366
367 max_labelno = max_label_num ();
368 min_labelno = get_first_label_num ();
369 max_reg = max_reg_num ();
370
371 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
372 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
373 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
374 for the parms, prior to elimination of virtual registers.
375 These values are needed for substituting parms properly. */
376
377 max_parm_reg = max_parm_reg_num ();
378 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
379
380 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
381
382 if (current_function_uses_const_pool)
383 {
384 /* Replace any constant pool references with the actual constant. We
385 will put the constants back in the copy made below. */
386 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
387 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
388 {
389 save_constants (&PATTERN (insn));
390 if (REG_NOTES (insn))
391 save_constants (&REG_NOTES (insn));
392 }
393
394 /* Clear out the constant pool so that we can recreate it with the
395 copied constants below. */
396 init_const_rtx_hash_table ();
397 clear_const_double_mem ();
398 }
399
400 max_uid = INSN_UID (head);
401
402 /* We have now allocated all that needs to be allocated permanently
403 on the rtx obstack. Set our high-water mark, so that we
404 can free the rest of this when the time comes. */
405
406 preserve_data ();
407
408 /* Copy the chain insns of this function.
409 Install the copied chain as the insns of this function,
410 for continued compilation;
411 the original chain is recorded as the DECL_SAVED_INSNS
412 for inlining future calls. */
413
414 /* If there are insns that copy parms from the stack into pseudo registers,
415 those insns are not copied. `expand_inline_function' must
416 emit the correct code to handle such things. */
417
418 insn = get_insns ();
419 if (GET_CODE (insn) != NOTE)
420 abort ();
421 first_insn = rtx_alloc (NOTE);
422 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
423 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
424 INSN_UID (first_insn) = INSN_UID (insn);
425 PREV_INSN (first_insn) = NULL;
426 NEXT_INSN (first_insn) = NULL;
427 last_insn = first_insn;
428
429 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
430 Make these new rtx's now, and install them in regno_reg_rtx, so they
431 will be the official pseudo-reg rtx's for the rest of compilation. */
432
433 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
434
435 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
436 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
437 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
438 regno_reg_rtx[i], len);
439
440 bcopy (reg_map + LAST_VIRTUAL_REGISTER + 1,
441 regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1,
442 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
443
444 /* Likewise each label rtx must have a unique rtx as its copy. */
445
446 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
447 label_map -= min_labelno;
448
449 for (i = min_labelno; i < max_labelno; i++)
450 label_map[i] = gen_label_rtx ();
451
452 /* Record the mapping of old insns to copied insns. */
453
454 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
455 bzero (insn_map, max_uid * sizeof (rtx));
456
457 /* Get the insn which signals the end of parameter setup code. */
458 first_nonparm_insn = get_first_nonparm_insn ();
459
460 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
461 (the former occurs when a variable has its address taken)
462 since these may be shared and can be changed by virtual
463 register instantiation. DECL_RTL values for our arguments
464 have already been copied by initialize_for_inline. */
465 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
466 if (GET_CODE (regno_reg_rtx[i]) == MEM)
467 XEXP (regno_reg_rtx[i], 0)
468 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
469
470 /* Copy the tree of subblocks of the function, and the decls in them.
471 We will use the copy for compiling this function, then restore the original
472 subblocks and decls for use when inlining this function.
473
474 Several parts of the compiler modify BLOCK trees. In particular,
475 instantiate_virtual_regs will instantiate any virtual regs
476 mentioned in the DECL_RTLs of the decls, and loop
477 unrolling will replicate any BLOCK trees inside an unrolled loop.
478
479 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
480 which we will use for inlining. The rtl might even contain pseudoregs
481 whose space has been freed. */
482
483 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
484
485 /* Now copy each DECL_RTL which is a MEM,
486 so it is safe to modify their addresses. */
487 copy_decl_rtls (DECL_INITIAL (fndecl));
488
489 /* Now copy the chain of insns. Do this twice. The first copy the insn
490 itself and its body. The second time copy of REG_NOTES. This is because
491 a REG_NOTE may have a forward pointer to another insn. */
492
493 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
494 {
495 orig_asm_operands_vector = 0;
496
497 if (insn == first_nonparm_insn)
498 in_nonparm_insns = 1;
499
500 switch (GET_CODE (insn))
501 {
502 case NOTE:
503 /* No need to keep these. */
504 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
505 continue;
506
507 copy = rtx_alloc (NOTE);
508 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
509 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
510 break;
511
512 case INSN:
513 case CALL_INSN:
514 case JUMP_INSN:
515 copy = rtx_alloc (GET_CODE (insn));
516 PATTERN (copy) = copy_for_inline (PATTERN (insn));
517 INSN_CODE (copy) = -1;
518 LOG_LINKS (copy) = NULL;
519 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
520 break;
521
522 case CODE_LABEL:
523 copy = label_map[CODE_LABEL_NUMBER (insn)];
d45cf215 524 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
525 break;
526
527 case BARRIER:
528 copy = rtx_alloc (BARRIER);
529 break;
530
531 default:
532 abort ();
533 }
534 INSN_UID (copy) = INSN_UID (insn);
535 insn_map[INSN_UID (insn)] = copy;
536 NEXT_INSN (last_insn) = copy;
537 PREV_INSN (copy) = last_insn;
538 last_insn = copy;
539 }
540
541 /* Now copy the REG_NOTES. */
542 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
543 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
544 && insn_map[INSN_UID(insn)])
545 REG_NOTES (insn_map[INSN_UID (insn)])
546 = copy_for_inline (REG_NOTES (insn));
547
548 NEXT_INSN (last_insn) = NULL;
549
550 finish_inline (fndecl, head);
551
552 set_new_first_and_last_insn (first_insn, last_insn);
553}
554
555/* Make a copy of the entire tree of blocks BLOCK, and return it. */
556
557static tree
558copy_decl_tree (block)
559 tree block;
560{
561 tree t, vars, subblocks;
562
563 vars = copy_list (BLOCK_VARS (block));
564 subblocks = 0;
565
566 /* Process all subblocks. */
567 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
568 {
569 tree copy = copy_decl_tree (t);
570 TREE_CHAIN (copy) = subblocks;
571 subblocks = copy;
572 }
573
574 t = copy_node (block);
575 BLOCK_VARS (t) = vars;
576 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
577 return t;
578}
579
580/* Copy DECL_RTLs in all decls in the given BLOCK node. */
581
582static void
583copy_decl_rtls (block)
584 tree block;
585{
586 tree t;
587
588 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
589 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
590 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
591
592 /* Process all subblocks. */
593 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
594 copy_decl_rtls (t);
595}
596
597/* Make the insns and PARM_DECLs of the current function permanent
598 and record other information in DECL_SAVED_INSNS to allow inlining
599 of this function in subsequent calls.
600
601 This routine need not copy any insns because we are not going
602 to immediately compile the insns in the insn chain. There
603 are two cases when we would compile the insns for FNDECL:
604 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
605 be output at the end of other compilation, because somebody took
606 its address. In the first case, the insns of FNDECL are copied
607 as it is expanded inline, so FNDECL's saved insns are not
608 modified. In the second case, FNDECL is used for the last time,
609 so modifying the rtl is not a problem.
610
611 ??? Actually, we do not verify that FNDECL is not inline expanded
612 by other functions which must also be written down at the end
613 of compilation. We could set flag_no_inline to nonzero when
614 the time comes to write down such functions. */
615
616void
617save_for_inline_nocopy (fndecl)
618 tree fndecl;
619{
620 rtx insn;
621 rtx head, copy;
622 tree parms;
623 int max_labelno, min_labelno, i, len;
624 int max_reg;
625 int max_uid;
626 rtx first_nonparm_insn;
627 int function_flags;
628
629 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
630 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
631 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
632 for the parms, prior to elimination of virtual registers.
633 These values are needed for substituting parms properly. */
634
635 max_parm_reg = max_parm_reg_num ();
636 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
637
638 /* Make and emit a return-label if we have not already done so. */
639
640 if (return_label == 0)
641 {
642 return_label = gen_label_rtx ();
643 emit_label (return_label);
644 }
645
646 head = initialize_for_inline (fndecl, get_first_label_num (),
647 max_label_num (), max_reg_num (), 0);
648
649 /* If there are insns that copy parms from the stack into pseudo registers,
650 those insns are not copied. `expand_inline_function' must
651 emit the correct code to handle such things. */
652
653 insn = get_insns ();
654 if (GET_CODE (insn) != NOTE)
655 abort ();
656
657 /* Get the insn which signals the end of parameter setup code. */
658 first_nonparm_insn = get_first_nonparm_insn ();
659
660 /* Now just scan the chain of insns to see what happens to our
661 PARM_DECLs. If a PARM_DECL is used but never modified, we
662 can substitute its rtl directly when expanding inline (and
663 perform constant folding when its incoming value is constant).
664 Otherwise, we have to copy its value into a new register and track
665 the new register's life. */
666
667 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
668 {
669 if (insn == first_nonparm_insn)
670 in_nonparm_insns = 1;
671
672 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
673 {
674 if (current_function_uses_const_pool)
675 {
676 /* Replace any constant pool references with the actual constant.
677 We will put the constant back if we need to write the
678 function out after all. */
679 save_constants (&PATTERN (insn));
680 if (REG_NOTES (insn))
681 save_constants (&REG_NOTES (insn));
682 }
683
684 /* Record what interesting things happen to our parameters. */
685 note_stores (PATTERN (insn), note_modified_parmregs);
686 }
687 }
688
689 /* We have now allocated all that needs to be allocated permanently
690 on the rtx obstack. Set our high-water mark, so that we
691 can free the rest of this when the time comes. */
692
693 preserve_data ();
694
695 finish_inline (fndecl, head);
696}
697\f
698/* Given PX, a pointer into an insn, search for references to the constant
699 pool. Replace each with a CONST that has the mode of the original
700 constant, contains the constant, and has RTX_INTEGRATED_P set.
701 Similarly, constant pool addresses not enclosed in a MEM are replaced
702 with an ADDRESS rtx which also gives the constant, mode, and has
703 RTX_INTEGRATED_P set. */
704
705static void
706save_constants (px)
707 rtx *px;
708{
709 rtx x;
710 int i, j;
711
712 again:
713 x = *px;
714
715 /* If this is a CONST_DOUBLE, don't try to fix things up in
716 CONST_DOUBLE_MEM, because this is an infinite recursion. */
717 if (GET_CODE (x) == CONST_DOUBLE)
718 return;
719 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
720 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
721 {
722 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
723 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
724 RTX_INTEGRATED_P (new) = 1;
725
726 /* If the MEM was in a different mode than the constant (perhaps we
727 were only looking at the low-order part), surround it with a
728 SUBREG so we can save both modes. */
729
730 if (GET_MODE (x) != const_mode)
731 {
732 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
733 RTX_INTEGRATED_P (new) = 1;
734 }
735
736 *px = new;
737 save_constants (&XEXP (*px, 0));
738 }
739 else if (GET_CODE (x) == SYMBOL_REF
740 && CONSTANT_POOL_ADDRESS_P (x))
741 {
742 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
743 save_constants (&XEXP (*px, 0));
744 RTX_INTEGRATED_P (*px) = 1;
745 }
746
747 else
748 {
749 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
750 int len = GET_RTX_LENGTH (GET_CODE (x));
751
752 for (i = len-1; i >= 0; i--)
753 {
754 switch (fmt[i])
755 {
756 case 'E':
757 for (j = 0; j < XVECLEN (x, i); j++)
758 save_constants (&XVECEXP (x, i, j));
759 break;
760
761 case 'e':
762 if (XEXP (x, i) == 0)
763 continue;
764 if (i == 0)
765 {
766 /* Hack tail-recursion here. */
767 px = &XEXP (x, 0);
768 goto again;
769 }
770 save_constants (&XEXP (x, i));
771 break;
772 }
773 }
774 }
775}
776\f
777/* Note whether a parameter is modified or not. */
778
779static void
780note_modified_parmregs (reg, x)
781 rtx reg;
782 rtx x;
783{
784 if (GET_CODE (reg) == REG && in_nonparm_insns
785 && REGNO (reg) < max_parm_reg
786 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
787 && parmdecl_map[REGNO (reg)] != 0)
788 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
789}
790
791/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
792 according to `reg_map' and `label_map'. The original rtl insns
793 will be saved for inlining; this is used to make a copy
794 which is used to finish compiling the inline function itself.
795
796 If we find a "saved" constant pool entry, one which was replaced with
797 the value of the constant, convert it back to a constant pool entry.
798 Since the pool wasn't touched, this should simply restore the old
799 address.
800
801 All other kinds of rtx are copied except those that can never be
802 changed during compilation. */
803
804static rtx
805copy_for_inline (orig)
806 rtx orig;
807{
808 register rtx x = orig;
809 register int i;
810 register enum rtx_code code;
811 register char *format_ptr;
812
813 if (x == 0)
814 return x;
815
816 code = GET_CODE (x);
817
818 /* These types may be freely shared. */
819
820 switch (code)
821 {
822 case QUEUED:
823 case CONST_INT:
824 case SYMBOL_REF:
825 case PC:
826 case CC0:
827 return x;
828
829 case CONST_DOUBLE:
830 /* We have to make a new CONST_DOUBLE to ensure that we account for
831 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
832 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
833 {
834 REAL_VALUE_TYPE d;
835
836 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
837 return immed_real_const_1 (d, GET_MODE (x));
838 }
839 else
840 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
841 VOIDmode);
842
843 case CONST:
844 /* Get constant pool entry for constant in the pool. */
845 if (RTX_INTEGRATED_P (x))
846 return validize_mem (force_const_mem (GET_MODE (x),
847 copy_for_inline (XEXP (x, 0))));
848 break;
849
850 case SUBREG:
851 /* Get constant pool entry, but access in different mode. */
852 if (RTX_INTEGRATED_P (x))
853 {
854 rtx new
855 = force_const_mem (GET_MODE (SUBREG_REG (x)),
856 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
857
858 PUT_MODE (new, GET_MODE (x));
859 return validize_mem (new);
860 }
861 break;
862
863 case ADDRESS:
864 /* If not special for constant pool error. Else get constant pool
865 address. */
866 if (! RTX_INTEGRATED_P (x))
867 abort ();
868
869 return XEXP (force_const_mem (GET_MODE (x),
870 copy_for_inline (XEXP (x, 0))), 0);
871
872 case ASM_OPERANDS:
873 /* If a single asm insn contains multiple output operands
874 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
875 We must make sure that the copied insn continues to share it. */
876 if (orig_asm_operands_vector == XVEC (orig, 3))
877 {
878 x = rtx_alloc (ASM_OPERANDS);
879 XSTR (x, 0) = XSTR (orig, 0);
880 XSTR (x, 1) = XSTR (orig, 1);
881 XINT (x, 2) = XINT (orig, 2);
882 XVEC (x, 3) = copy_asm_operands_vector;
883 XVEC (x, 4) = copy_asm_constraints_vector;
884 XSTR (x, 5) = XSTR (orig, 5);
885 XINT (x, 6) = XINT (orig, 6);
886 return x;
887 }
888 break;
889
890 case MEM:
891 /* A MEM is usually allowed to be shared if its address is constant
892 or is a constant plus one of the special registers.
893
894 We do not allow sharing of addresses that are either a special
895 register or the sum of a constant and a special register because
896 it is possible for unshare_all_rtl to copy the address, into memory
897 that won't be saved. Although the MEM can safely be shared, and
898 won't be copied there, the address itself cannot be shared, and may
899 need to be copied.
900
901 There are also two exceptions with constants: The first is if the
902 constant is a LABEL_REF or the sum of the LABEL_REF
903 and an integer. This case can happen if we have an inline
904 function that supplies a constant operand to the call of another
905 inline function that uses it in a switch statement. In this case,
906 we will be replacing the LABEL_REF, so we have to replace this MEM
907 as well.
908
909 The second case is if we have a (const (plus (address ..) ...)).
910 In that case we need to put back the address of the constant pool
911 entry. */
912
913 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
914 && GET_CODE (XEXP (x, 0)) != LABEL_REF
915 && ! (GET_CODE (XEXP (x, 0)) == CONST
916 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
917 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
918 == LABEL_REF)
919 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
920 == ADDRESS)))))
921 return x;
922 break;
923
924 case LABEL_REF:
925 {
926 /* Must point to the new insn. */
927 return gen_rtx (LABEL_REF, GET_MODE (orig),
928 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
929 }
930
931 case REG:
932 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
933 return reg_map [REGNO (x)];
934 else
935 return x;
936
937 case SET:
938 /* If a parm that gets modified lives in a pseudo-reg,
939 clear its TREE_READONLY to prevent certain optimizations. */
940 {
941 rtx dest = SET_DEST (x);
942
943 while (GET_CODE (dest) == STRICT_LOW_PART
944 || GET_CODE (dest) == ZERO_EXTRACT
945 || GET_CODE (dest) == SUBREG)
946 dest = XEXP (dest, 0);
947
948 if (GET_CODE (dest) == REG
949 && REGNO (dest) < max_parm_reg
950 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
951 && parmdecl_map[REGNO (dest)] != 0
952 /* The insn to load an arg pseudo from a stack slot
953 does not count as modifying it. */
954 && in_nonparm_insns)
955 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
956 }
957 break;
958
959#if 0 /* This is a good idea, but here is the wrong place for it. */
960 /* Arrange that CONST_INTs always appear as the second operand
961 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
962 always appear as the first. */
963 case PLUS:
964 if (GET_CODE (XEXP (x, 0)) == CONST_INT
965 || (XEXP (x, 1) == frame_pointer_rtx
966 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
967 && XEXP (x, 1) == arg_pointer_rtx)))
968 {
969 rtx t = XEXP (x, 0);
970 XEXP (x, 0) = XEXP (x, 1);
971 XEXP (x, 1) = t;
972 }
973 break;
974#endif
975 }
976
977 /* Replace this rtx with a copy of itself. */
978
979 x = rtx_alloc (code);
980 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
981 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
982
983 /* Now scan the subexpressions recursively.
984 We can store any replaced subexpressions directly into X
985 since we know X is not shared! Any vectors in X
986 must be copied if X was copied. */
987
988 format_ptr = GET_RTX_FORMAT (code);
989
990 for (i = 0; i < GET_RTX_LENGTH (code); i++)
991 {
992 switch (*format_ptr++)
993 {
994 case 'e':
995 XEXP (x, i) = copy_for_inline (XEXP (x, i));
996 break;
997
998 case 'u':
999 /* Change any references to old-insns to point to the
1000 corresponding copied insns. */
1001 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1002 break;
1003
1004 case 'E':
1005 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1006 {
1007 register int j;
1008
1009 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1010 for (j = 0; j < XVECLEN (x, i); j++)
1011 XVECEXP (x, i, j)
1012 = copy_for_inline (XVECEXP (x, i, j));
1013 }
1014 break;
1015 }
1016 }
1017
1018 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1019 {
1020 orig_asm_operands_vector = XVEC (orig, 3);
1021 copy_asm_operands_vector = XVEC (x, 3);
1022 copy_asm_constraints_vector = XVEC (x, 4);
1023 }
1024
1025 return x;
1026}
1027
1028/* Unfortunately, we need a global copy of const_equiv map for communication
1029 with a function called from note_stores. Be *very* careful that this
1030 is used properly in the presence of recursion. */
1031
1032rtx *global_const_equiv_map;
1033\f
1034#define FIXED_BASE_PLUS_P(X) \
1035 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1036 && GET_CODE (XEXP (X, 0)) == REG \
1037 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1038 && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER)
1039
1040/* Integrate the procedure defined by FNDECL. Note that this function
1041 may wind up calling itself. Since the static variables are not
1042 reentrant, we do not assign them until after the possibility
bfa30b22 1043 of recursion is eliminated.
175160e7
MT
1044
1045 If IGNORE is nonzero, do not produce a value.
1046 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1047
1048 Value is:
1049 (rtx)-1 if we could not substitute the function
1050 0 if we substituted it and it does not produce a value
1051 else an rtx for where the value is stored. */
1052
1053rtx
1054expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1055 tree fndecl, parms;
1056 rtx target;
1057 int ignore;
1058 tree type;
1059 rtx structure_value_addr;
1060{
1061 tree formal, actual;
1062 rtx header = DECL_SAVED_INSNS (fndecl);
1063 rtx insns = FIRST_FUNCTION_INSN (header);
1064 rtx parm_insns = FIRST_PARM_INSN (header);
1065 tree *arg_trees;
1066 rtx *arg_vals;
1067 rtx insn;
1068 int max_regno;
175160e7
MT
1069 register int i;
1070 int min_labelno = FIRST_LABELNO (header);
1071 int max_labelno = LAST_LABELNO (header);
1072 int nargs;
1073 rtx local_return_label = 0;
1074 rtx loc;
1075 rtx temp;
1076 struct inline_remap *map;
1077 rtx cc0_insn = 0;
1078 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1079
1080 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1081 max_regno = MAX_REGNUM (header) + 3;
1082 if (max_regno < FIRST_PSEUDO_REGISTER)
1083 abort ();
1084
1085 nargs = list_length (DECL_ARGUMENTS (fndecl));
1086
1087 /* We expect PARMS to have the right length; don't crash if not. */
1088 if (list_length (parms) != nargs)
1089 return (rtx)-1;
1090 /* Also check that the parms type match. Since the appropriate
1091 conversions or default promotions have already been applied,
1092 the machine modes should match exactly. */
1093 for (formal = DECL_ARGUMENTS (fndecl),
1094 actual = parms;
1095 formal;
1096 formal = TREE_CHAIN (formal),
1097 actual = TREE_CHAIN (actual))
1098 {
1099 tree arg = TREE_VALUE (actual);
1100 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1101 if (mode != TYPE_MODE (TREE_TYPE (arg)))
1102 return (rtx)-1;
1103 /* If they are block mode, the types should match exactly.
1104 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1105 which could happen if the parameter has incomplete type. */
1106 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
1107 return (rtx)-1;
1108 }
1109
1110 /* Make a binding contour to keep inline cleanups called at
1111 outer function-scope level from looking like they are shadowing
1112 parameter declarations. */
1113 pushlevel (0);
1114
1115 /* Make a fresh binding contour that we can easily remove. */
1116 pushlevel (0);
1117 expand_start_bindings (0);
1118 if (GET_CODE (parm_insns) == NOTE
1119 && NOTE_LINE_NUMBER (parm_insns) > 0)
1120 emit_note (NOTE_SOURCE_FILE (parm_insns), NOTE_LINE_NUMBER (parm_insns));
1121
1122 /* Expand the function arguments. Do this first so that any
1123 new registers get created before we allocate the maps. */
1124
1125 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1126 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1127
1128 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1129 formal;
1130 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1131 {
1132 /* Actual parameter, converted to the type of the argument within the
1133 function. */
1134 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1135 /* Mode of the variable used within the function. */
1136 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1137 /* Where parameter is located in the function. */
1138 rtx copy;
1139
1140 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1141
1142 arg_trees[i] = arg;
1143 loc = RTVEC_ELT (arg_vector, i);
1144
1145 /* If this is an object passed by invisible reference, we copy the
1146 object into a stack slot and save its address. If this will go
1147 into memory, we do nothing now. Otherwise, we just expand the
1148 argument. */
1149 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1150 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1151 {
1152 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
1153 rtx stack_slot = assign_stack_temp (mode, int_size_in_bytes (TREE_TYPE (arg)), 1);
1154
1155 store_expr (arg, stack_slot, 0);
1156
1157 arg_vals[i] = XEXP (stack_slot, 0);
1158 }
1159 else if (GET_CODE (loc) != MEM)
1160 arg_vals[i] = expand_expr (arg, 0, mode, EXPAND_SUM);
1161 else
1162 arg_vals[i] = 0;
1163
1164 if (arg_vals[i] != 0
1165 && (! TREE_READONLY (formal)
1166 /* If the parameter is not read-only, copy our argument through
1167 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1168 TARGET in any way. In the inline function, they will likely
1169 be two different pseudos, and `safe_from_p' will make all
1170 sorts of smart assumptions about their not conflicting.
1171 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1172 wrong, so put ARG_VALS[I] into a fresh register. */
1173 || (target != 0
1174 && (GET_CODE (arg_vals[i]) == REG
1175 || GET_CODE (arg_vals[i]) == SUBREG
1176 || GET_CODE (arg_vals[i]) == MEM)
1177 && reg_overlap_mentioned_p (arg_vals[i], target))))
1178 arg_vals[i] = copy_to_mode_reg (mode, arg_vals[i]);
1179 }
1180
1181 /* Allocate the structures we use to remap things. */
1182
1183 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1184 map->fndecl = fndecl;
1185
1186 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1187 bzero (map->reg_map, max_regno * sizeof (rtx));
1188
1189 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1190 map->label_map -= min_labelno;
1191
1192 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1193 bzero (map->insn_map, INSN_UID (header) * sizeof (rtx));
1194 map->min_insnno = 0;
1195 map->max_insnno = INSN_UID (header);
1196
1197 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1198 be large enough for all our pseudos. This is the number we are currently
c66e0741
RK
1199 using plus the number in the called routine, plus 15 for each arg,
1200 five to compute the virtual frame pointer, and five for the return value.
1201 This should be enough for most cases. We do not reference entries
1202 outside the range of the map.
1203
1204 ??? These numbers are quite arbitrary and were obtained by
1205 experimentation. At some point, we should try to allocate the
1206 table after all the parameters are set up so we an more accurately
1207 estimate the number of pseudos we will need. */
1208
1209 map->const_equiv_map_size
1210 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1211
1212 map->const_equiv_map
1213 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1214 bzero (map->const_equiv_map, map->const_equiv_map_size * sizeof (rtx));
1215
1216 map->const_age_map
1217 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1218 bzero (map->const_age_map, map->const_equiv_map_size * sizeof (unsigned));
175160e7
MT
1219 map->const_age = 0;
1220
1221 /* Record the current insn in case we have to set up pointers to frame
1222 and argument memory blocks. */
1223 map->insns_at_start = get_last_insn ();
1224
1225 /* Update the outgoing argument size to allow for those in the inlined
1226 function. */
1227 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1228 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1229
1230 /* If the inline function needs to make PIC references, that means
1231 that this function's PIC offset table must be used. */
1232 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1233 current_function_uses_pic_offset_table = 1;
1234
1235 /* Process each argument. For each, set up things so that the function's
1236 reference to the argument will refer to the argument being passed.
1237 We only replace REG with REG here. Any simplifications are done
1238 via const_equiv_map.
1239
1240 We make two passes: In the first, we deal with parameters that will
1241 be placed into registers, since we need to ensure that the allocated
1242 register number fits in const_equiv_map. Then we store all non-register
1243 parameters into their memory location. */
1244
1245 for (i = 0; i < nargs; i++)
1246 {
1247 rtx copy = arg_vals[i];
1248
1249 loc = RTVEC_ELT (arg_vector, i);
1250
1251 /* There are three cases, each handled separately. */
1252 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1253 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1254 {
1255 /* This must be an object passed by invisible reference (it could
1256 also be a variable-sized object, but we forbid inlining functions
1257 with variable-sized arguments). COPY is the address of the
1258 actual value (this computation will cause it to be copied). We
1259 map that address for the register, noting the actual address as
1260 an equivalent in case it can be substituted into the insns. */
1261
1262 if (GET_CODE (copy) != REG)
1263 {
1264 temp = copy_addr_to_reg (copy);
1265 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1266 {
1267 map->const_equiv_map[REGNO (temp)] = copy;
1268 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1269 }
1270 copy = temp;
1271 }
1272 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1273 }
1274 else if (GET_CODE (loc) == MEM)
1275 {
1276 /* This is the case of a parameter that lives in memory.
1277 It will live in the block we allocate in the called routine's
1278 frame that simulates the incoming argument area. Do nothing
1279 now; we will call store_expr later. */
1280 ;
1281 }
1282 else if (GET_CODE (loc) == REG)
1283 {
1284 /* This is the good case where the parameter is in a register.
1285 If it is read-only and our argument is a constant, set up the
1286 constant equivalence. */
1287 if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1288 {
1289 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1290 if (CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1291 {
1292 map->const_equiv_map[REGNO (temp)] = copy;
1293 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1294 }
1295 copy = temp;
1296 }
1297 map->reg_map[REGNO (loc)] = copy;
1298 }
1299 else
1300 abort ();
1301
1302 /* Free any temporaries we made setting up this parameter. */
1303 free_temp_slots ();
1304 }
1305
1306 /* Now do the parameters that will be placed in memory. */
1307
1308 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1309 formal; formal = TREE_CHAIN (formal), i++)
1310 {
1311 rtx copy = arg_vals[i];
1312
1313 loc = RTVEC_ELT (arg_vector, i);
1314
1315 if (GET_CODE (loc) == MEM
1316 /* Exclude case handled above. */
1317 && ! (GET_CODE (XEXP (loc, 0)) == REG
1318 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1319 {
1320 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
1321
1322 /* Compute the address in the area we reserved and store the
1323 value there. */
1324 temp = copy_rtx_and_substitute (loc, map);
1325 subst_constants (&temp, 0, map);
1326 apply_change_group ();
1327 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1328 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1329 store_expr (arg_trees[i], temp, 0);
1330
1331 /* Free any temporaries we made setting up this parameter. */
1332 free_temp_slots ();
1333 }
1334 }
1335
1336 /* Deal with the places that the function puts its result.
1337 We are driven by what is placed into DECL_RESULT.
1338
1339 Initially, we assume that we don't have anything special handling for
1340 REG_FUNCTION_RETURN_VALUE_P. */
1341
1342 map->inline_target = 0;
1343 loc = DECL_RTL (DECL_RESULT (fndecl));
1344 if (TYPE_MODE (type) == VOIDmode)
1345 /* There is no return value to worry about. */
1346 ;
1347 else if (GET_CODE (loc) == MEM)
1348 {
1349 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1350 abort ();
1351
1352 /* Pass the function the address in which to return a structure value.
1353 Note that a constructor can cause someone to call us with
1354 STRUCTURE_VALUE_ADDR, but the initialization takes place
1355 via the first parameter, rather than the struct return address.
1356
1357 We have two cases: If the address is a simple register indirect,
1358 use the mapping mechanism to point that register to our structure
1359 return address. Otherwise, store the structure return value into
1360 the place that it will be referenced from. */
1361
1362 if (GET_CODE (XEXP (loc, 0)) == REG)
1363 {
1364 temp = force_reg (Pmode, structure_value_addr);
1365 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1366 if (CONSTANT_P (structure_value_addr)
1367 || (GET_CODE (structure_value_addr) == PLUS
1368 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1369 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1370 {
1371 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1372 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1373 }
1374 }
1375 else
1376 {
1377 temp = copy_rtx_and_substitute (loc, map);
1378 subst_constants (&temp, 0, map);
1379 apply_change_group ();
1380 emit_move_insn (temp, structure_value_addr);
1381 }
1382 }
1383 else if (ignore)
1384 /* We will ignore the result value, so don't look at its structure.
1385 Note that preparations for an aggregate return value
1386 do need to be made (above) even if it will be ignored. */
1387 ;
1388 else if (GET_CODE (loc) == REG)
1389 {
1390 /* The function returns an object in a register and we use the return
1391 value. Set up our target for remapping. */
1392
1393 /* Machine mode function was declared to return. */
1394 enum machine_mode departing_mode = TYPE_MODE (type);
1395 /* (Possibly wider) machine mode it actually computes
1396 (for the sake of callers that fail to declare it right). */
1397 enum machine_mode arriving_mode
1398 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1399 rtx reg_to_map;
1400
1401 /* Don't use MEMs as direct targets because on some machines
1402 substituting a MEM for a REG makes invalid insns.
1403 Let the combiner substitute the MEM if that is valid. */
1404 if (target == 0 || GET_CODE (target) != REG
1405 || GET_MODE (target) != departing_mode)
1406 target = gen_reg_rtx (departing_mode);
1407
1408 /* If function's value was promoted before return,
1409 avoid machine mode mismatch when we substitute INLINE_TARGET.
1410 But TARGET is what we will return to the caller. */
1411 if (arriving_mode != departing_mode)
1412 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1413 else
1414 reg_to_map = target;
1415
1416 /* Usually, the result value is the machine's return register.
1417 Sometimes it may be a pseudo. Handle both cases. */
1418 if (REG_FUNCTION_VALUE_P (loc))
1419 map->inline_target = reg_to_map;
1420 else
1421 map->reg_map[REGNO (loc)] = reg_to_map;
1422 }
1423
1424 /* Make new label equivalences for the labels in the called function. */
1425 for (i = min_labelno; i < max_labelno; i++)
1426 map->label_map[i] = gen_label_rtx ();
1427
1428 /* Perform postincrements before actually calling the function. */
1429 emit_queue ();
1430
1431 /* Clean up stack so that variables might have smaller offsets. */
1432 do_pending_stack_adjust ();
1433
1434 /* Save a copy of the location of const_equiv_map for mark_stores, called
1435 via note_stores. */
1436 global_const_equiv_map = map->const_equiv_map;
1437
1438 /* Now copy the insns one by one. Do this in two passes, first the insns and
1439 then their REG_NOTES, just like save_for_inline. */
1440
1441 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1442
1443 for (insn = insns; insn; insn = NEXT_INSN (insn))
1444 {
1445 rtx copy, pattern;
1446
1447 map->orig_asm_operands_vector = 0;
1448
1449 switch (GET_CODE (insn))
1450 {
1451 case INSN:
1452 pattern = PATTERN (insn);
1453 copy = 0;
1454 if (GET_CODE (pattern) == USE
1455 && GET_CODE (XEXP (pattern, 0)) == REG
1456 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1457 /* The (USE (REG n)) at return from the function should
1458 be ignored since we are changing (REG n) into
1459 inline_target. */
1460 break;
1461
1462 /* Ignore setting a function value that we don't want to use. */
1463 if (map->inline_target == 0
1464 && GET_CODE (pattern) == SET
1465 && GET_CODE (SET_DEST (pattern)) == REG
1466 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
1467 break;
1468
1469 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1470 /* REG_NOTES will be copied later. */
1471
1472#ifdef HAVE_cc0
1473 /* If this insn is setting CC0, it may need to look at
1474 the insn that uses CC0 to see what type of insn it is.
1475 In that case, the call to recog via validate_change will
1476 fail. So don't substitute constants here. Instead,
1477 do it when we emit the following insn.
1478
1479 For example, see the pyr.md file. That machine has signed and
1480 unsigned compares. The compare patterns must check the
1481 following branch insn to see which what kind of compare to
1482 emit.
1483
1484 If the previous insn set CC0, substitute constants on it as
1485 well. */
1486 if (sets_cc0_p (PATTERN (copy)) != 0)
1487 cc0_insn = copy;
1488 else
1489 {
1490 if (cc0_insn)
1491 try_constants (cc0_insn, map);
1492 cc0_insn = 0;
1493 try_constants (copy, map);
1494 }
1495#else
1496 try_constants (copy, map);
1497#endif
1498 break;
1499
1500 case JUMP_INSN:
1501 if (GET_CODE (PATTERN (insn)) == RETURN)
1502 {
1503 if (local_return_label == 0)
1504 local_return_label = gen_label_rtx ();
1505 pattern = gen_jump (local_return_label);
1506 }
1507 else
1508 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1509
1510 copy = emit_jump_insn (pattern);
1511
1512#ifdef HAVE_cc0
1513 if (cc0_insn)
1514 try_constants (cc0_insn, map);
1515 cc0_insn = 0;
1516#endif
1517 try_constants (copy, map);
1518
1519 /* If this used to be a conditional jump insn but whose branch
1520 direction is now know, we must do something special. */
1521 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1522 {
1523#ifdef HAVE_cc0
1524 /* The previous insn set cc0 for us. So delete it. */
1525 delete_insn (PREV_INSN (copy));
1526#endif
1527
1528 /* If this is now a no-op, delete it. */
1529 if (map->last_pc_value == pc_rtx)
1530 {
1531 delete_insn (copy);
1532 copy = 0;
1533 }
1534 else
1535 /* Otherwise, this is unconditional jump so we must put a
1536 BARRIER after it. We could do some dead code elimination
1537 here, but jump.c will do it just as well. */
1538 emit_barrier ();
1539 }
1540 break;
1541
1542 case CALL_INSN:
1543 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1544 copy = emit_call_insn (pattern);
1545
1546#ifdef HAVE_cc0
1547 if (cc0_insn)
1548 try_constants (cc0_insn, map);
1549 cc0_insn = 0;
1550#endif
1551 try_constants (copy, map);
1552
1553 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1554 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1555 map->const_equiv_map[i] = 0;
1556 break;
1557
1558 case CODE_LABEL:
1559 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
bfa30b22 1560 LABEL_NAME (copy) = LABEL_NAME (insn);
175160e7
MT
1561 map->const_age++;
1562 break;
1563
1564 case BARRIER:
1565 copy = emit_barrier ();
1566 break;
1567
1568 case NOTE:
1569 /* It is important to discard function-end and function-beg notes,
1570 so we have only one of each in the current function.
1571 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1572 deleted these in the copy used for continuing compilation,
1573 not the copy used for inlining). */
1574 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1575 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1576 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1577 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1578 else
1579 copy = 0;
1580 break;
1581
1582 default:
1583 abort ();
1584 break;
1585 }
1586
1587 if (copy)
1588 RTX_INTEGRATED_P (copy) = 1;
1589
1590 map->insn_map[INSN_UID (insn)] = copy;
1591 }
1592
1593 /* Now copy the REG_NOTES. */
1594 for (insn = insns; insn; insn = NEXT_INSN (insn))
1595 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1596 && map->insn_map[INSN_UID (insn)])
1597 REG_NOTES (map->insn_map[INSN_UID (insn)])
1598 = copy_rtx_and_substitute (REG_NOTES (insn), map);
1599
1600 if (local_return_label)
1601 emit_label (local_return_label);
1602
1603 /* Make copies of the decls of the symbols in the inline function, so that
1604 the copies of the variables get declared in the current function. Set
1605 up things so that lookup_static_chain knows that to interpret registers
1606 in SAVE_EXPRs for TYPE_SIZEs as local. */
1607
1608 inline_function_decl = fndecl;
1609 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map, 0);
1610 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1611 inline_function_decl = 0;
1612
1613 /* End the scope containing the copied formal parameter variables. */
1614
1615 expand_end_bindings (getdecls (), 1, 1);
1616 poplevel (1, 1, 0);
1617 poplevel (0, 0, 0);
1618 emit_line_note (input_filename, lineno);
1619
1620 if (structure_value_addr)
1621 return gen_rtx (MEM, TYPE_MODE (type),
1622 memory_address (TYPE_MODE (type), structure_value_addr));
1623 return target;
1624}
1625\f
1626/* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1627 push all of those decls and give each one the corresponding home. */
1628
1629static void
1630integrate_parm_decls (args, map, arg_vector)
1631 tree args;
1632 struct inline_remap *map;
1633 rtvec arg_vector;
1634{
1635 register tree tail;
1636 register int i;
1637
1638 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1639 {
1640 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1641 TREE_TYPE (tail));
1642 rtx new_decl_rtl
1643 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1644
1645 /* These args would always appear unused, if not for this. */
1646 TREE_USED (decl) = 1;
1647 /* Prevent warning for shadowing with these. */
1648 DECL_FROM_INLINE (decl) = 1;
1649 pushdecl (decl);
1650 /* Fully instantiate the address with the equivalent form so that the
1651 debugging information contains the actual register, instead of the
1652 virtual register. Do this by not passing an insn to
1653 subst_constants. */
1654 subst_constants (&new_decl_rtl, 0, map);
1655 apply_change_group ();
1656 DECL_RTL (decl) = new_decl_rtl;
1657 }
1658}
1659
1660/* Given a BLOCK node LET, push decls and levels so as to construct in the
1661 current function a tree of contexts isomorphic to the one that is given.
1662
1663 LEVEL indicates how far down into the BLOCK tree is the node we are
1664 currently traversing. It is always zero for the initial call.
1665
1666 MAP, if nonzero, is a pointer to a inline_remap map which indicates how
1667 registers used in the DECL_RTL field should be remapped. If it is zero,
1668 no mapping is necessary.
1669
1670 FUNCTIONBODY indicates whether the top level block tree corresponds to
1671 a function body. This is identical in meaning to the functionbody
1672 argument of poplevel. */
1673
1674static void
1675integrate_decl_tree (let, level, map, functionbody)
1676 tree let;
1677 int level;
1678 struct inline_remap *map;
1679 int functionbody;
1680{
1681 tree t, node;
1682
1683 pushlevel (0);
1684
1685 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1686 {
1687 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1688 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1689 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1690 if (! functionbody && DECL_RTL (t) != 0)
1691 {
1692 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1693 /* Fully instantiate the address with the equivalent form so that the
1694 debugging information contains the actual register, instead of the
1695 virtual register. Do this by not passing an insn to
1696 subst_constants. */
1697 subst_constants (&DECL_RTL (d), 0, map);
1698 apply_change_group ();
1699 }
1700 else if (DECL_RTL (t))
1701 DECL_RTL (d) = copy_rtx (DECL_RTL (t));
1702 TREE_EXTERNAL (d) = TREE_EXTERNAL (t);
1703 TREE_STATIC (d) = TREE_STATIC (t);
1704 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1705 TREE_CONSTANT (d) = TREE_CONSTANT (t);
1706 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1707 TREE_READONLY (d) = TREE_READONLY (t);
1708 TREE_SIDE_EFFECTS (d) = TREE_SIDE_EFFECTS (t);
1709 /* These args would always appear unused, if not for this. */
1710 TREE_USED (d) = 1;
1711 /* Prevent warning for shadowing with these. */
1712 DECL_FROM_INLINE (d) = 1;
1713 pushdecl (d);
1714 }
1715
1716 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1717 integrate_decl_tree (t, level + 1, map, functionbody);
1718
1719 node = poplevel (level > 0, 0, level == 0 && functionbody);
1720 if (node)
1721 TREE_USED (node) = TREE_USED (let);
1722}
1723\f
1724/* Create a new copy of an rtx.
1725 Recursively copies the operands of the rtx,
1726 except for those few rtx codes that are sharable.
1727
1728 We always return an rtx that is similar to that incoming rtx, with the
1729 exception of possibly changing a REG to a SUBREG or vice versa. No
1730 rtl is ever emitted.
1731
1732 Handle constants that need to be placed in the constant pool by
1733 calling `force_const_mem'. */
1734
1735rtx
1736copy_rtx_and_substitute (orig, map)
1737 register rtx orig;
1738 struct inline_remap *map;
1739{
1740 register rtx copy, temp;
1741 register int i, j;
1742 register RTX_CODE code;
1743 register enum machine_mode mode;
1744 register char *format_ptr;
1745 int regno;
1746
1747 if (orig == 0)
1748 return 0;
1749
1750 code = GET_CODE (orig);
1751 mode = GET_MODE (orig);
1752
1753 switch (code)
1754 {
1755 case REG:
1756 /* If the stack pointer register shows up, it must be part of
1757 stack-adjustments (*not* because we eliminated the frame pointer!).
1758 Small hard registers are returned as-is. Pseudo-registers
1759 go through their `reg_map'. */
1760 regno = REGNO (orig);
1761 if (regno <= LAST_VIRTUAL_REGISTER)
1762 {
1763 /* Some hard registers are also mapped,
1764 but others are not translated. */
1765 if (map->reg_map[regno] != 0)
1766 return map->reg_map[regno];
1767
1768 /* If this is the virtual frame pointer, make space in current
1769 function's stack frame for the stack frame of the inline function.
1770
1771 Copy the address of this area into a pseudo. Map
1772 virtual_stack_vars_rtx to this pseudo and set up a constant
1773 equivalence for it to be the address. This will substitute the
1774 address into insns where it can be substituted and use the new
1775 pseudo where it can't. */
1776 if (regno == VIRTUAL_STACK_VARS_REGNUM)
1777 {
1778 rtx loc, seq;
1779 int size = DECL_FRAME_SIZE (map->fndecl);
1780 int rounded;
1781
1782 start_sequence ();
1783 loc = assign_stack_temp (BLKmode, size, 1);
1784 loc = XEXP (loc, 0);
1785#ifdef FRAME_GROWS_DOWNWARD
1786 /* In this case, virtual_stack_vars_rtx points to one byte
1787 higher than the top of the frame area. So compute the offset
1788 to one byte higher than our substitute frame.
1789 Keep the fake frame pointer aligned like a real one. */
1790 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1791 loc = plus_constant (loc, rounded);
1792#endif
5c23c401
RK
1793 map->reg_map[regno] = temp = force_operand (loc, 0);
1794 map->const_equiv_map[REGNO (temp)] = loc;
1795 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
175160e7
MT
1796
1797 seq = gen_sequence ();
1798 end_sequence ();
1799 emit_insn_after (seq, map->insns_at_start);
5c23c401 1800 return temp;
175160e7
MT
1801 }
1802 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
1803 {
1804 /* Do the same for a block to contain any arguments referenced
1805 in memory. */
1806 rtx loc, seq;
1807 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
1808
1809 start_sequence ();
1810 loc = assign_stack_temp (BLKmode, size, 1);
1811 loc = XEXP (loc, 0);
5c23c401
RK
1812 map->reg_map[regno] = temp = force_operand (loc, 0);
1813 map->const_equiv_map[REGNO (temp)] = loc;
1814 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
175160e7
MT
1815
1816 seq = gen_sequence ();
1817 end_sequence ();
1818 emit_insn_after (seq, map->insns_at_start);
5c23c401 1819 return temp;
175160e7
MT
1820 }
1821 else if (REG_FUNCTION_VALUE_P (orig))
1822 {
1823 /* This is a reference to the function return value. If
1824 the function doesn't have a return value, error. If the
1825 mode doesn't agree, make a SUBREG. */
1826 if (map->inline_target == 0)
1827 /* Must be unrolling loops or replicating code if we
1828 reach here, so return the register unchanged. */
1829 return orig;
1830 else if (mode != GET_MODE (map->inline_target))
1831 return gen_rtx (SUBREG, mode, map->inline_target, 0);
1832 else
1833 return map->inline_target;
1834 }
1835 return orig;
1836 }
1837 if (map->reg_map[regno] == NULL)
1838 {
1839 map->reg_map[regno] = gen_reg_rtx (mode);
1840 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
1841 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
1842 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
1843 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
1844 }
1845 return map->reg_map[regno];
1846
1847 case SUBREG:
1848 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
1849 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1850 if (GET_CODE (copy) == SUBREG)
1851 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1852 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1853 else
1854 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1855 SUBREG_WORD (orig));
1856
1857 case USE:
1858 case CLOBBER:
1859 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
1860 to (use foo). */
1861 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
1862 if (GET_CODE (copy) == SUBREG)
1863 copy = SUBREG_REG (copy);
1864 return gen_rtx (code, VOIDmode, copy);
1865
1866 case CODE_LABEL:
1867 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
1868 = LABEL_PRESERVE_P (orig);
1869 return map->label_map[CODE_LABEL_NUMBER (orig)];
1870
1871 case LABEL_REF:
1872 copy = rtx_alloc (LABEL_REF);
1873 PUT_MODE (copy, mode);
1874 XEXP (copy, 0) = map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1875 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1876 return copy;
1877
1878 case PC:
1879 case CC0:
1880 case CONST_INT:
f543676f
JW
1881 return orig;
1882
175160e7 1883 case SYMBOL_REF:
f543676f
JW
1884 /* Symbols which represent the address of a label stored in the constant
1885 pool must be modified to point to a constant pool entry for the
1886 remapped label. Otherwise, symbols are returned unchanged. */
1887 if (CONSTANT_POOL_ADDRESS_P (orig))
1888 {
1889 rtx constant = get_pool_constant (orig);
1890 if (GET_CODE (constant) == LABEL_REF)
1891 {
1892 copy = rtx_alloc (LABEL_REF);
1893 PUT_MODE (copy, mode);
1894 XEXP (copy, 0)
1895 = map->label_map[CODE_LABEL_NUMBER (XEXP (constant, 0))];
1896 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
1897 copy = force_const_mem (Pmode, copy);
1898 return XEXP (copy, 0);
1899 }
1900 }
175160e7
MT
1901 return orig;
1902
1903 case CONST_DOUBLE:
1904 /* We have to make a new copy of this CONST_DOUBLE because don't want
1905 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
1906 duplicate of a CONST_DOUBLE we have already seen. */
1907 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
1908 {
1909 REAL_VALUE_TYPE d;
1910
1911 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
1912 return immed_real_const_1 (d, GET_MODE (orig));
1913 }
1914 else
1915 return immed_double_const (CONST_DOUBLE_LOW (orig),
1916 CONST_DOUBLE_HIGH (orig), VOIDmode);
1917
1918 case CONST:
1919 /* Make new constant pool entry for a constant
1920 that was in the pool of the inline function. */
1921 if (RTX_INTEGRATED_P (orig))
1922 {
1923 /* If this was an address of a constant pool entry that itself
1924 had to be placed in the constant pool, it might not be a
1925 valid address. So the recursive call below might turn it
1926 into a register. In that case, it isn't a constant any
1927 more, so return it. This has the potential of changing a
1928 MEM into a REG, but we'll assume that it safe. */
1929 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
1930 if (! CONSTANT_P (temp))
1931 return temp;
1932 return validize_mem (force_const_mem (GET_MODE (orig), temp));
1933 }
1934 break;
1935
1936 case ADDRESS:
1937 /* If from constant pool address, make new constant pool entry and
1938 return its address. */
1939 if (! RTX_INTEGRATED_P (orig))
1940 abort ();
1941
1942 temp = force_const_mem (GET_MODE (orig),
1943 copy_rtx_and_substitute (XEXP (orig, 0), map));
1944
1945#if 0
1946 /* Legitimizing the address here is incorrect.
1947
1948 The only ADDRESS rtx's that can reach here are ones created by
1949 save_constants. Hence the operand of the ADDRESS is always legal
1950 in this position of the instruction, since the original rtx without
1951 the ADDRESS was legal.
1952
1953 The reason we don't legitimize the address here is that on the
1954 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
1955 This code forces the operand of the address to a register, which
1956 fails because we can not take the HIGH part of a register.
1957
1958 Also, change_address may create new registers. These registers
1959 will not have valid reg_map entries. This can cause try_constants()
1960 to fail because assumes that all registers in the rtx have valid
1961 reg_map entries, and it may end up replacing one of these new
1962 registers with junk. */
1963
1964 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1965 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
1966#endif
1967
1968 return XEXP (temp, 0);
1969
1970 case ASM_OPERANDS:
1971 /* If a single asm insn contains multiple output operands
1972 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1973 We must make sure that the copied insn continues to share it. */
1974 if (map->orig_asm_operands_vector == XVEC (orig, 3))
1975 {
1976 copy = rtx_alloc (ASM_OPERANDS);
1977 XSTR (copy, 0) = XSTR (orig, 0);
1978 XSTR (copy, 1) = XSTR (orig, 1);
1979 XINT (copy, 2) = XINT (orig, 2);
1980 XVEC (copy, 3) = map->copy_asm_operands_vector;
1981 XVEC (copy, 4) = map->copy_asm_constraints_vector;
1982 XSTR (copy, 5) = XSTR (orig, 5);
1983 XINT (copy, 6) = XINT (orig, 6);
1984 return copy;
1985 }
1986 break;
1987
1988 case CALL:
1989 /* This is given special treatment because the first
1990 operand of a CALL is a (MEM ...) which may get
1991 forced into a register for cse. This is undesirable
1992 if function-address cse isn't wanted or if we won't do cse. */
1993#ifndef NO_FUNCTION_CSE
1994 if (! (optimize && ! flag_no_function_cse))
1995#endif
1996 return gen_rtx (CALL, GET_MODE (orig),
1997 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
1998 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
1999 copy_rtx_and_substitute (XEXP (orig, 1), map));
2000 break;
2001
2002#if 0
2003 /* Must be ifdefed out for loop unrolling to work. */
2004 case RETURN:
2005 abort ();
2006#endif
2007
2008 case SET:
2009 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2010 Don't alter that.
2011 If the nonlocal goto is into the current function,
2012 this will result in unnecessarily bad code, but should work. */
2013 if (SET_DEST (orig) == virtual_stack_vars_rtx
2014 || SET_DEST (orig) == virtual_incoming_args_rtx)
2015 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2016 copy_rtx_and_substitute (SET_SRC (orig), map));
2017 break;
2018
2019 case MEM:
2020 copy = rtx_alloc (MEM);
2021 PUT_MODE (copy, mode);
2022 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2023 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2024 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2025 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2026 return copy;
2027 }
2028
2029 copy = rtx_alloc (code);
2030 PUT_MODE (copy, mode);
2031 copy->in_struct = orig->in_struct;
2032 copy->volatil = orig->volatil;
2033 copy->unchanging = orig->unchanging;
2034
2035 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2036
2037 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2038 {
2039 switch (*format_ptr++)
2040 {
2041 case '0':
2042 break;
2043
2044 case 'e':
2045 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2046 break;
2047
2048 case 'u':
2049 /* Change any references to old-insns to point to the
2050 corresponding copied insns. */
2051 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2052 break;
2053
2054 case 'E':
2055 XVEC (copy, i) = XVEC (orig, i);
2056 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2057 {
2058 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2059 for (j = 0; j < XVECLEN (copy, i); j++)
2060 XVECEXP (copy, i, j)
2061 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2062 }
2063 break;
2064
2065 case 'i':
2066 XINT (copy, i) = XINT (orig, i);
2067 break;
2068
2069 case 's':
2070 XSTR (copy, i) = XSTR (orig, i);
2071 break;
2072
2073 default:
2074 abort ();
2075 }
2076 }
2077
2078 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2079 {
2080 map->orig_asm_operands_vector = XVEC (orig, 3);
2081 map->copy_asm_operands_vector = XVEC (copy, 3);
2082 map->copy_asm_constraints_vector = XVEC (copy, 4);
2083 }
2084
2085 return copy;
2086}
2087\f
2088/* Substitute known constant values into INSN, if that is valid. */
2089
2090void
2091try_constants (insn, map)
2092 rtx insn;
2093 struct inline_remap *map;
2094{
2095 int i;
2096
2097 map->num_sets = 0;
2098 subst_constants (&PATTERN (insn), insn, map);
2099
2100 /* Apply the changes if they are valid; otherwise discard them. */
2101 apply_change_group ();
2102
2103 /* Show we don't know the value of anything stored or clobbered. */
2104 note_stores (PATTERN (insn), mark_stores);
2105 map->last_pc_value = 0;
2106#ifdef HAVE_cc0
2107 map->last_cc0_value = 0;
2108#endif
2109
2110 /* Set up any constant equivalences made in this insn. */
2111 for (i = 0; i < map->num_sets; i++)
2112 {
2113 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2114 {
2115 int regno = REGNO (map->equiv_sets[i].dest);
2116
2117 if (map->const_equiv_map[regno] == 0
2118 /* Following clause is a hack to make case work where GNU C++
2119 reassigns a variable to make cse work right. */
2120 || ! rtx_equal_p (map->const_equiv_map[regno],
2121 map->equiv_sets[i].equiv))
2122 {
2123 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2124 map->const_age_map[regno] = map->const_age;
2125 }
2126 }
2127 else if (map->equiv_sets[i].dest == pc_rtx)
2128 map->last_pc_value = map->equiv_sets[i].equiv;
2129#ifdef HAVE_cc0
2130 else if (map->equiv_sets[i].dest == cc0_rtx)
2131 map->last_cc0_value = map->equiv_sets[i].equiv;
2132#endif
2133 }
2134}
2135\f
2136/* Substitute known constants for pseudo regs in the contents of LOC,
2137 which are part of INSN.
d45cf215 2138 If INSN is zero, the substitution should always be done (this is used to
175160e7
MT
2139 update DECL_RTL).
2140 These changes are taken out by try_constants if the result is not valid.
2141
2142 Note that we are more concerned with determining when the result of a SET
2143 is a constant, for further propagation, than actually inserting constants
2144 into insns; cse will do the latter task better.
2145
2146 This function is also used to adjust address of items previously addressed
2147 via the virtual stack variable or virtual incoming arguments registers. */
2148
2149static void
2150subst_constants (loc, insn, map)
2151 rtx *loc;
2152 rtx insn;
2153 struct inline_remap *map;
2154{
2155 rtx x = *loc;
2156 register int i;
2157 register enum rtx_code code;
2158 register char *format_ptr;
2159 int num_changes = num_validated_changes ();
2160 rtx new = 0;
2161 enum machine_mode op0_mode;
2162
2163 code = GET_CODE (x);
2164
2165 switch (code)
2166 {
2167 case PC:
2168 case CONST_INT:
2169 case CONST_DOUBLE:
2170 case SYMBOL_REF:
2171 case CONST:
2172 case LABEL_REF:
2173 case ADDRESS:
2174 return;
2175
2176#ifdef HAVE_cc0
2177 case CC0:
2178 validate_change (insn, loc, map->last_cc0_value, 1);
2179 return;
2180#endif
2181
2182 case USE:
2183 case CLOBBER:
2184 /* The only thing we can do with a USE or CLOBBER is possibly do
2185 some substitutions in a MEM within it. */
2186 if (GET_CODE (XEXP (x, 0)) == MEM)
2187 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2188 return;
2189
2190 case REG:
2191 /* Substitute for parms and known constants. Don't replace
2192 hard regs used as user variables with constants. */
2193 {
2194 int regno = REGNO (x);
c66e0741 2195
175160e7 2196 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
c66e0741 2197 && regno < map->const_equiv_map_size
175160e7
MT
2198 && map->const_equiv_map[regno] != 0
2199 && map->const_age_map[regno] >= map->const_age)
2200 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2201 return;
2202 }
2203
2204 case SUBREG:
2205 /* SUBREG is ordinary, but don't make nested SUBREGs and try to simplify
2206 constants. */
2207 {
2208 rtx inner = SUBREG_REG (x);
2209 rtx new = 0;
2210
2211 /* We can't call subst_constants on &SUBREG_REG (x) because any
2212 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2213 see what is inside, try to form the new SUBREG and see if that is
2214 valid. We handle two cases: extracting a full word in an
2215 integral mode and extracting the low part. */
2216 subst_constants (&inner, 0, map);
2217
2218 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2219 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2220 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2221 new = operand_subword (inner, SUBREG_WORD (x), 0,
2222 GET_MODE (SUBREG_REG (x)));
2223
2224 if (new == 0 && subreg_lowpart_p (x))
2225 new = gen_lowpart_common (GET_MODE (x), inner);
2226
2227 if (new)
2228 validate_change (insn, loc, new, 1);
2229
2230 return;
2231 }
2232
2233 case MEM:
2234 subst_constants (&XEXP (x, 0), insn, map);
2235
2236 /* If a memory address got spoiled, change it back. */
2237 if (insn != 0 && num_validated_changes () != num_changes
2238 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2239 cancel_changes (num_changes);
2240 return;
2241
2242 case SET:
2243 {
2244 /* Substitute constants in our source, and in any arguments to a
2245 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2246 itself. */
2247 rtx *dest_loc = &SET_DEST (x);
2248 rtx dest = *dest_loc;
2249 rtx src, tem;
2250
2251 subst_constants (&SET_SRC (x), insn, map);
2252 src = SET_SRC (x);
2253
2254 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2255 || GET_CODE (*dest_loc) == SIGN_EXTRACT
2256 || GET_CODE (*dest_loc) == SUBREG
2257 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2258 {
2259 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2260 {
2261 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2262 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2263 }
2264 dest_loc = &XEXP (*dest_loc, 0);
2265 }
2266
2267 /* Check for the case of DEST a SUBREG, both it and the underlying
2268 register are less than one word, and the SUBREG has the wider mode.
2269 In the case, we are really setting the underlying register to the
2270 source converted to the mode of DEST. So indicate that. */
2271 if (GET_CODE (dest) == SUBREG
2272 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2273 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2274 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2275 <= GET_MODE_SIZE (GET_MODE (dest)))
2276 && (tem = gen_lowpart_if_possible (GET_MODE (dest), src)))
2277 src = tem, dest = SUBREG_REG (dest);
2278
2279 /* If storing a recognizable value save it for later recording. */
2280 if ((map->num_sets < MAX_RECOG_OPERANDS)
2281 && (CONSTANT_P (src)
2282 || (GET_CODE (src) == PLUS
2283 && GET_CODE (XEXP (src, 0)) == REG
2284 && REGNO (XEXP (src, 0)) >= FIRST_VIRTUAL_REGISTER
2285 && REGNO (XEXP (src, 0)) <= LAST_VIRTUAL_REGISTER
2286 && CONSTANT_P (XEXP (src, 1)))
2287 || GET_CODE (src) == COMPARE
2288#ifdef HAVE_cc0
2289 || dest == cc0_rtx
2290#endif
2291 || (dest == pc_rtx
2292 && (src == pc_rtx || GET_CODE (src) == RETURN
2293 || GET_CODE (src) == LABEL_REF))))
2294 {
2295 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2296 it will cause us to save the COMPARE with any constants
2297 substituted, which is what we want for later. */
2298 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2299 map->equiv_sets[map->num_sets++].dest = dest;
2300 }
2301
2302 return;
2303 }
2304 }
2305
2306 format_ptr = GET_RTX_FORMAT (code);
2307
2308 /* If the first operand is an expression, save its mode for later. */
2309 if (*format_ptr == 'e')
2310 op0_mode = GET_MODE (XEXP (x, 0));
2311
2312 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2313 {
2314 switch (*format_ptr++)
2315 {
2316 case '0':
2317 break;
2318
2319 case 'e':
2320 if (XEXP (x, i))
2321 subst_constants (&XEXP (x, i), insn, map);
2322 break;
2323
2324 case 'u':
2325 case 'i':
2326 case 's':
2327 break;
2328
2329 case 'E':
2330 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2331 {
2332 int j;
2333 for (j = 0; j < XVECLEN (x, i); j++)
2334 subst_constants (&XVECEXP (x, i, j), insn, map);
2335 }
2336 break;
2337
2338 default:
2339 abort ();
2340 }
2341 }
2342
2343 /* If this is a commutative operation, move a constant to the second
2344 operand unless the second operand is already a CONST_INT. */
2345 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2346 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2347 {
2348 rtx tem = XEXP (x, 0);
2349 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2350 validate_change (insn, &XEXP (x, 1), tem, 1);
2351 }
2352
2353 /* Simplify the expression in case we put in some constants. */
2354 switch (GET_RTX_CLASS (code))
2355 {
2356 case '1':
2357 new = simplify_unary_operation (code, GET_MODE (x),
2358 XEXP (x, 0), op0_mode);
2359 break;
2360
2361 case '<':
2362 {
2363 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2364 if (op_mode == VOIDmode)
2365 op_mode = GET_MODE (XEXP (x, 1));
2366 new = simplify_relational_operation (code, op_mode,
2367 XEXP (x, 0), XEXP (x, 1));
2368 break;
2369 }
2370
2371 case '2':
2372 case 'c':
2373 new = simplify_binary_operation (code, GET_MODE (x),
2374 XEXP (x, 0), XEXP (x, 1));
2375 break;
2376
2377 case 'b':
2378 case '3':
2379 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2380 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2381 break;
2382 }
2383
2384 if (new)
2385 validate_change (insn, loc, new, 1);
2386}
2387
2388/* Show that register modified no longer contain known constants. We are
2389 called from note_stores with parts of the new insn. */
2390
2391void
2392mark_stores (dest, x)
2393 rtx dest;
2394 rtx x;
2395{
2396 if (GET_CODE (dest) == SUBREG)
2397 dest = SUBREG_REG (dest);
2398
2399 if (GET_CODE (dest) == REG)
2400 global_const_equiv_map[REGNO (dest)] = 0;
2401}
2402\f
2403/* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2404 pointed to by PX, they represent constants in the constant pool.
2405 Replace these with a new memory reference obtained from force_const_mem.
2406 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2407 address of a constant pool entry. Replace them with the address of
2408 a new constant pool entry obtained from force_const_mem. */
2409
2410static void
2411restore_constants (px)
2412 rtx *px;
2413{
2414 rtx x = *px;
2415 int i, j;
2416 char *fmt;
2417
2418 if (x == 0)
2419 return;
2420
2421 if (GET_CODE (x) == CONST_DOUBLE)
2422 {
2423 /* We have to make a new CONST_DOUBLE to ensure that we account for
2424 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2425 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2426 {
2427 REAL_VALUE_TYPE d;
2428
2429 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2430 *px = immed_real_const_1 (d, GET_MODE (x));
2431 }
2432 else
2433 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2434 VOIDmode);
2435 }
2436
2437 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2438 {
2439 restore_constants (&XEXP (x, 0));
2440 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2441 }
2442 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2443 {
2444 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2445 rtx new = XEXP (SUBREG_REG (x), 0);
2446
2447 restore_constants (&new);
2448 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2449 PUT_MODE (new, GET_MODE (x));
2450 *px = validize_mem (new);
2451 }
2452 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2453 {
2454 restore_constants (&XEXP (x, 0));
2455 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2456 }
2457 else
2458 {
2459 fmt = GET_RTX_FORMAT (GET_CODE (x));
2460 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2461 {
2462 switch (*fmt++)
2463 {
2464 case 'E':
2465 for (j = 0; j < XVECLEN (x, i); j++)
2466 restore_constants (&XVECEXP (x, i, j));
2467 break;
2468
2469 case 'e':
2470 restore_constants (&XEXP (x, i));
2471 break;
2472 }
2473 }
2474 }
2475}
2476\f
2477/* Output the assembly language code for the function FNDECL
2478 from its DECL_SAVED_INSNS. Used for inline functions that are output
2479 at end of compilation instead of where they came in the source. */
2480
2481void
2482output_inline_function (fndecl)
2483 tree fndecl;
2484{
2485 rtx head = DECL_SAVED_INSNS (fndecl);
2486 rtx last;
2487
2488 temporary_allocation ();
2489
2490 current_function_decl = fndecl;
2491
2492 /* This call is only used to initialize global variables. */
2493 init_function_start (fndecl, "lossage", 1);
2494
2495 /* Redo parameter determinations in case the FUNCTION_...
2496 macros took machine-specific actions that need to be redone. */
2497 assign_parms (fndecl, 1);
2498
2499 /* Set stack frame size. */
2500 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
2501
2502 restore_reg_data (FIRST_PARM_INSN (head));
2503
2504 stack_slot_list = STACK_SLOT_LIST (head);
2505
2506 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
2507 current_function_calls_alloca = 1;
2508
2509 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
2510 current_function_calls_setjmp = 1;
2511
2512 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
2513 current_function_calls_longjmp = 1;
2514
2515 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
2516 current_function_returns_struct = 1;
2517
2518 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
2519 current_function_returns_pcc_struct = 1;
2520
2521 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
2522 current_function_needs_context = 1;
2523
2524 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
2525 current_function_has_nonlocal_label = 1;
2526
2527 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
2528 current_function_returns_pointer = 1;
2529
2530 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
2531 current_function_uses_const_pool = 1;
2532
2533 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
2534 current_function_uses_pic_offset_table = 1;
2535
2536 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
2537 current_function_pops_args = POPS_ARGS (head);
2538
2539 /* There is no need to output a return label again. */
2540 return_label = 0;
2541
2542 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2543
2544 /* Find last insn and rebuild the constant pool. */
2545 for (last = FIRST_PARM_INSN (head);
2546 NEXT_INSN (last); last = NEXT_INSN (last))
2547 {
2548 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
2549 {
2550 restore_constants (&PATTERN (last));
2551 restore_constants (&REG_NOTES (last));
2552 }
2553 }
2554
2555 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2556 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
2557
2558 /* Compile this function all the way down to assembly code. */
2559 rest_of_compilation (fndecl);
2560
2561 current_function_decl = 0;
2562
2563 permanent_allocation ();
2564}
This page took 0.288362 seconds and 5 git commands to generate.