]> gcc.gnu.org Git - gcc.git/blob - gcc/integrate.c
tree.h (DECL_ORIGIN): New macro.
[gcc.git] / gcc / integrate.c
1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 91, 93-97, 1998 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "flags.h"
30 #include "insn-config.h"
31 #include "insn-flags.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "recog.h"
35 #include "integrate.h"
36 #include "real.h"
37 #include "except.h"
38 #include "function.h"
39 #include "toplev.h"
40
41 #include "obstack.h"
42 #define obstack_chunk_alloc xmalloc
43 #define obstack_chunk_free free
44
45 extern struct obstack *function_maybepermanent_obstack;
46
47 /* Similar, but round to the next highest integer that meets the
48 alignment. */
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
50
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 /* Inlining small functions might save more space then not inlining at
55 all. Assume 1 instruction for the call and 1.5 insns per argument. */
56 #define INTEGRATE_THRESHOLD(DECL) \
57 (optimize_size \
58 ? (1 + (3 * list_length (DECL_ARGUMENTS (DECL)) / 2)) \
59 : (8 * (8 + list_length (DECL_ARGUMENTS (DECL)))))
60 #endif
61 \f
62 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
63 static void finish_inline PROTO((tree, rtx));
64 static void adjust_copied_decl_tree PROTO((tree));
65 static tree copy_decl_list PROTO((tree));
66 static tree copy_decl_tree PROTO((tree));
67 static void copy_decl_rtls PROTO((tree));
68 static void save_constants PROTO((rtx *));
69 static void note_modified_parmregs PROTO((rtx, rtx));
70 static rtx copy_for_inline PROTO((rtx));
71 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
72 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
73 static void save_constants_in_decl_trees PROTO ((tree));
74 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
75 static void restore_constants PROTO((rtx *));
76 static void set_block_origin_self PROTO((tree));
77 static void set_decl_origin_self PROTO((tree));
78 static void set_block_abstract_flags PROTO((tree, int));
79
80 void set_decl_abstract_flags PROTO((tree, int));
81 static tree copy_and_set_decl_abstract_origin PROTO((tree));
82 \f
83 /* Returns the Ith entry in the label_map contained in MAP. If the
84 Ith entry has not yet been set, return a fresh label. This function
85 performs a lazy initialization of label_map, thereby avoiding huge memory
86 explosions when the label_map gets very large. */
87
88 rtx
89 get_label_from_map (map, i)
90 struct inline_remap *map;
91 int i;
92 {
93 rtx x = map->label_map[i];
94
95 if (x == NULL_RTX)
96 {
97 push_obstacks_nochange ();
98 end_temporary_allocation ();
99 x = map->label_map[i] = gen_label_rtx();
100 pop_obstacks ();
101 }
102
103 return x;
104 }
105
106 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
107 is safe and reasonable to integrate into other functions.
108 Nonzero means value is a warning message with a single %s
109 for the function's name. */
110
111 char *
112 function_cannot_inline_p (fndecl)
113 register tree fndecl;
114 {
115 register rtx insn;
116 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
117 int max_insns = INTEGRATE_THRESHOLD (fndecl);
118 register int ninsns = 0;
119 register tree parms;
120 rtx result;
121
122 /* No inlines with varargs. */
123 if ((last && TREE_VALUE (last) != void_type_node)
124 || current_function_varargs)
125 return "varargs function cannot be inline";
126
127 if (current_function_calls_alloca)
128 return "function using alloca cannot be inline";
129
130 if (current_function_contains_functions)
131 return "function with nested functions cannot be inline";
132
133 if (current_function_cannot_inline)
134 return current_function_cannot_inline;
135
136 /* If its not even close, don't even look. */
137 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
138 return "function too large to be inline";
139
140 #if 0
141 /* Don't inline functions which do not specify a function prototype and
142 have BLKmode argument or take the address of a parameter. */
143 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
144 {
145 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
146 TREE_ADDRESSABLE (parms) = 1;
147 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
148 return "no prototype, and parameter address used; cannot be inline";
149 }
150 #endif
151
152 /* We can't inline functions that return structures
153 the old-fashioned PCC way, copying into a static block. */
154 if (current_function_returns_pcc_struct)
155 return "inline functions not supported for this return value type";
156
157 /* We can't inline functions that return BLKmode structures in registers. */
158 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
159 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
160 return "inline functions not supported for this return value type";
161
162 /* We can't inline functions that return structures of varying size. */
163 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
164 return "function with varying-size return value cannot be inline";
165
166 /* Cannot inline a function with a varying size argument or one that
167 receives a transparent union. */
168 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
169 {
170 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
171 return "function with varying-size parameter cannot be inline";
172 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
173 return "function with transparent unit parameter cannot be inline";
174 }
175
176 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
177 {
178 for (ninsns = 0, insn = get_first_nonparm_insn ();
179 insn && ninsns < max_insns;
180 insn = NEXT_INSN (insn))
181 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
182 ninsns++;
183
184 if (ninsns >= max_insns)
185 return "function too large to be inline";
186 }
187
188 /* We cannot inline this function if forced_labels is non-zero. This
189 implies that a label in this function was used as an initializer.
190 Because labels can not be duplicated, all labels in the function
191 will be renamed when it is inlined. However, there is no way to find
192 and fix all variables initialized with addresses of labels in this
193 function, hence inlining is impossible. */
194
195 if (forced_labels)
196 return "function with label addresses used in initializers cannot inline";
197
198 /* We cannot inline a nested function that jumps to a nonlocal label. */
199 if (current_function_has_nonlocal_goto)
200 return "function with nonlocal goto cannot be inline";
201
202 /* This is a hack, until the inliner is taught about eh regions at
203 the start of the function. */
204 for (insn = get_insns ();
205 insn
206 && ! (GET_CODE (insn) == NOTE
207 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG);
208 insn = NEXT_INSN (insn))
209 {
210 if (insn && GET_CODE (insn) == NOTE
211 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
212 return "function with complex parameters cannot be inline";
213 }
214
215 /* We can't inline functions that return a PARALLEL rtx. */
216 result = DECL_RTL (DECL_RESULT (fndecl));
217 if (result && GET_CODE (result) == PARALLEL)
218 return "inline functions not supported for this return value type";
219
220 return 0;
221 }
222 \f
223 /* Variables used within save_for_inline. */
224
225 /* Mapping from old pseudo-register to new pseudo-registers.
226 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
227 It is allocated in `save_for_inline' and `expand_inline_function',
228 and deallocated on exit from each of those routines. */
229 static rtx *reg_map;
230
231 /* Mapping from old code-labels to new code-labels.
232 The first element of this map is label_map[min_labelno].
233 It is allocated in `save_for_inline' and `expand_inline_function',
234 and deallocated on exit from each of those routines. */
235 static rtx *label_map;
236
237 /* Mapping from old insn uid's to copied insns.
238 It is allocated in `save_for_inline' and `expand_inline_function',
239 and deallocated on exit from each of those routines. */
240 static rtx *insn_map;
241
242 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
243 Zero for a reg that isn't a parm's home.
244 Only reg numbers less than max_parm_reg are mapped here. */
245 static tree *parmdecl_map;
246
247 /* Keep track of first pseudo-register beyond those that are parms. */
248 extern int max_parm_reg;
249 extern rtx *parm_reg_stack_loc;
250
251 /* When an insn is being copied by copy_for_inline,
252 this is nonzero if we have copied an ASM_OPERANDS.
253 In that case, it is the original input-operand vector. */
254 static rtvec orig_asm_operands_vector;
255
256 /* When an insn is being copied by copy_for_inline,
257 this is nonzero if we have copied an ASM_OPERANDS.
258 In that case, it is the copied input-operand vector. */
259 static rtvec copy_asm_operands_vector;
260
261 /* Likewise, this is the copied constraints vector. */
262 static rtvec copy_asm_constraints_vector;
263
264 /* In save_for_inline, nonzero if past the parm-initialization insns. */
265 static int in_nonparm_insns;
266 \f
267 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
268 needed to save FNDECL's insns and info for future inline expansion. */
269
270 static rtx
271 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
272 tree fndecl;
273 int min_labelno;
274 int max_labelno;
275 int max_reg;
276 int copy;
277 {
278 int function_flags, i;
279 rtvec arg_vector;
280 tree parms;
281
282 /* Compute the values of any flags we must restore when inlining this. */
283
284 function_flags
285 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
286 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
287 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
288 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
289 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
290 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
291 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
292 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
293 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
294 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
295
296 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
297 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
298 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
299
300 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
301 parms;
302 parms = TREE_CHAIN (parms), i++)
303 {
304 rtx p = DECL_RTL (parms);
305 int copied_incoming = 0;
306
307 /* If we have (mem (addressof (mem ...))), use the inner MEM since
308 otherwise the copy_rtx call below will not unshare the MEM since
309 it shares ADDRESSOF. */
310 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
311 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
312 p = XEXP (XEXP (p, 0), 0);
313
314 if (GET_CODE (p) == MEM && copy)
315 {
316 /* Copy the rtl so that modifications of the addresses
317 later in compilation won't affect this arg_vector.
318 Virtual register instantiation can screw the address
319 of the rtl. */
320 rtx new = copy_rtx (p);
321
322 /* Don't leave the old copy anywhere in this decl. */
323 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
324 || (GET_CODE (DECL_RTL (parms)) == MEM
325 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
326 && (XEXP (DECL_RTL (parms), 0)
327 == XEXP (DECL_INCOMING_RTL (parms), 0))))
328 DECL_INCOMING_RTL (parms) = new, copied_incoming = 1;
329
330 DECL_RTL (parms) = new;
331 }
332
333 RTVEC_ELT (arg_vector, i) = p;
334
335 if (GET_CODE (p) == REG)
336 parmdecl_map[REGNO (p)] = parms;
337 else if (GET_CODE (p) == CONCAT)
338 {
339 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
340 rtx pimag = gen_imagpart (GET_MODE (preal), p);
341
342 if (GET_CODE (preal) == REG)
343 parmdecl_map[REGNO (preal)] = parms;
344 if (GET_CODE (pimag) == REG)
345 parmdecl_map[REGNO (pimag)] = parms;
346 }
347
348 /* This flag is cleared later
349 if the function ever modifies the value of the parm. */
350 TREE_READONLY (parms) = 1;
351
352 /* Copy DECL_INCOMING_RTL if not done already. This can
353 happen if DECL_RTL is a reg. */
354 if (copy && ! copied_incoming)
355 {
356 p = DECL_INCOMING_RTL (parms);
357
358 /* If we have (mem (addressof (mem ...))), use the inner MEM since
359 otherwise the copy_rtx call below will not unshare the MEM since
360 it shares ADDRESSOF. */
361 if (GET_CODE (p) == MEM && GET_CODE (XEXP (p, 0)) == ADDRESSOF
362 && GET_CODE (XEXP (XEXP (p, 0), 0)) == MEM)
363 p = XEXP (XEXP (p, 0), 0);
364
365 if (GET_CODE (p) == MEM)
366 DECL_INCOMING_RTL (parms) = copy_rtx (p);
367 }
368 }
369
370 /* Assume we start out in the insns that set up the parameters. */
371 in_nonparm_insns = 0;
372
373 /* The list of DECL_SAVED_INSNS, starts off with a header which
374 contains the following information:
375
376 the first insn of the function (not including the insns that copy
377 parameters into registers).
378 the first parameter insn of the function,
379 the first label used by that function,
380 the last label used by that function,
381 the highest register number used for parameters,
382 the total number of registers used,
383 the size of the incoming stack area for parameters,
384 the number of bytes popped on return,
385 the stack slot list,
386 the labels that are forced to exist,
387 some flags that are used to restore compiler globals,
388 the value of current_function_outgoing_args_size,
389 the original argument vector,
390 the original DECL_INITIAL,
391 and pointers to the table of pseudo regs, pointer flags, and alignment. */
392
393 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
394 max_parm_reg, max_reg,
395 current_function_args_size,
396 current_function_pops_args,
397 stack_slot_list, forced_labels, function_flags,
398 current_function_outgoing_args_size,
399 arg_vector, (rtx) DECL_INITIAL (fndecl),
400 (rtvec) regno_reg_rtx, regno_pointer_flag,
401 regno_pointer_align,
402 (rtvec) parm_reg_stack_loc);
403 }
404
405 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
406 things that must be done to make FNDECL expandable as an inline function.
407 HEAD contains the chain of insns to which FNDECL will expand. */
408
409 static void
410 finish_inline (fndecl, head)
411 tree fndecl;
412 rtx head;
413 {
414 FIRST_FUNCTION_INSN (head) = get_first_nonparm_insn ();
415 FIRST_PARM_INSN (head) = get_insns ();
416 DECL_SAVED_INSNS (fndecl) = head;
417 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
418 }
419
420 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
421 they all point to the new (copied) rtxs. */
422
423 static void
424 adjust_copied_decl_tree (block)
425 register tree block;
426 {
427 register tree subblock;
428 register rtx original_end;
429
430 original_end = BLOCK_END_NOTE (block);
431 if (original_end)
432 {
433 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
434 NOTE_SOURCE_FILE (original_end) = 0;
435 }
436
437 /* Process all subblocks. */
438 for (subblock = BLOCK_SUBBLOCKS (block);
439 subblock;
440 subblock = TREE_CHAIN (subblock))
441 adjust_copied_decl_tree (subblock);
442 }
443
444 /* Make the insns and PARM_DECLs of the current function permanent
445 and record other information in DECL_SAVED_INSNS to allow inlining
446 of this function in subsequent calls.
447
448 This function is called when we are going to immediately compile
449 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
450 modified by the compilation process, so we copy all of them to
451 new storage and consider the new insns to be the insn chain to be
452 compiled. Our caller (rest_of_compilation) saves the original
453 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
454
455 /* ??? The nonlocal_label list should be adjusted also. However, since
456 a function that contains a nested function never gets inlined currently,
457 the nonlocal_label list will always be empty, so we don't worry about
458 it for now. */
459
460 void
461 save_for_inline_copying (fndecl)
462 tree fndecl;
463 {
464 rtx first_insn, last_insn, insn;
465 rtx head, copy;
466 int max_labelno, min_labelno, i, len;
467 int max_reg;
468 int max_uid;
469 rtx first_nonparm_insn;
470 char *new, *new1;
471 rtx *new_parm_reg_stack_loc;
472 rtx *new2;
473
474 /* Make and emit a return-label if we have not already done so.
475 Do this before recording the bounds on label numbers. */
476
477 if (return_label == 0)
478 {
479 return_label = gen_label_rtx ();
480 emit_label (return_label);
481 }
482
483 /* Get some bounds on the labels and registers used. */
484
485 max_labelno = max_label_num ();
486 min_labelno = get_first_label_num ();
487 max_reg = max_reg_num ();
488
489 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
490 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
491 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
492 for the parms, prior to elimination of virtual registers.
493 These values are needed for substituting parms properly. */
494
495 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
496
497 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
498
499 if (current_function_uses_const_pool)
500 {
501 /* Replace any constant pool references with the actual constant. We
502 will put the constants back in the copy made below. */
503 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
504 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
505 {
506 save_constants (&PATTERN (insn));
507 if (REG_NOTES (insn))
508 save_constants (&REG_NOTES (insn));
509 }
510
511 /* Also scan all decls, and replace any constant pool references with the
512 actual constant. */
513 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
514
515 /* Clear out the constant pool so that we can recreate it with the
516 copied constants below. */
517 init_const_rtx_hash_table ();
518 clear_const_double_mem ();
519 }
520
521 max_uid = INSN_UID (head);
522
523 /* We have now allocated all that needs to be allocated permanently
524 on the rtx obstack. Set our high-water mark, so that we
525 can free the rest of this when the time comes. */
526
527 preserve_data ();
528
529 /* Copy the chain insns of this function.
530 Install the copied chain as the insns of this function,
531 for continued compilation;
532 the original chain is recorded as the DECL_SAVED_INSNS
533 for inlining future calls. */
534
535 /* If there are insns that copy parms from the stack into pseudo registers,
536 those insns are not copied. `expand_inline_function' must
537 emit the correct code to handle such things. */
538
539 insn = get_insns ();
540 if (GET_CODE (insn) != NOTE)
541 abort ();
542 first_insn = rtx_alloc (NOTE);
543 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
544 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
545 INSN_UID (first_insn) = INSN_UID (insn);
546 PREV_INSN (first_insn) = NULL;
547 NEXT_INSN (first_insn) = NULL;
548 last_insn = first_insn;
549
550 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
551 Make these new rtx's now, and install them in regno_reg_rtx, so they
552 will be the official pseudo-reg rtx's for the rest of compilation. */
553
554 reg_map = (rtx *) savealloc (regno_pointer_flag_length * sizeof (rtx));
555
556 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
557 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
558 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
559 regno_reg_rtx[i], len);
560
561 regno_reg_rtx = reg_map;
562
563 /* Put copies of all the virtual register rtx into the new regno_reg_rtx. */
564 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
565 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
566 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
567 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
568
569 /* Likewise each label rtx must have a unique rtx as its copy. */
570
571 /* We used to use alloca here, but the size of what it would try to
572 allocate would occasionally cause it to exceed the stack limit and
573 cause unpredictable core dumps. Some examples were > 2Mb in size. */
574 label_map = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
575
576 for (i = min_labelno; i < max_labelno; i++)
577 label_map[i] = gen_label_rtx ();
578
579 /* Likewise for parm_reg_stack_slot. */
580 new_parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
581 for (i = 0; i < max_parm_reg; i++)
582 new_parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
583
584 parm_reg_stack_loc = new_parm_reg_stack_loc;
585
586 /* Record the mapping of old insns to copied insns. */
587
588 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
589 bzero ((char *) insn_map, max_uid * sizeof (rtx));
590
591 /* Get the insn which signals the end of parameter setup code. */
592 first_nonparm_insn = get_first_nonparm_insn ();
593
594 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
595 (the former occurs when a variable has its address taken)
596 since these may be shared and can be changed by virtual
597 register instantiation. DECL_RTL values for our arguments
598 have already been copied by initialize_for_inline. */
599 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
600 if (GET_CODE (regno_reg_rtx[i]) == MEM)
601 XEXP (regno_reg_rtx[i], 0)
602 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
603
604 /* Copy the parm_reg_stack_loc array, and substitute for all of the rtx
605 contained in it. */
606 new2 = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
607 bcopy ((char *) parm_reg_stack_loc, (char *) new2,
608 max_parm_reg * sizeof (rtx));
609 parm_reg_stack_loc = new2;
610 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; ++i)
611 if (parm_reg_stack_loc[i])
612 parm_reg_stack_loc[i] = copy_for_inline (parm_reg_stack_loc[i]);
613
614 /* Copy the tree of subblocks of the function, and the decls in them.
615 We will use the copy for compiling this function, then restore the original
616 subblocks and decls for use when inlining this function.
617
618 Several parts of the compiler modify BLOCK trees. In particular,
619 instantiate_virtual_regs will instantiate any virtual regs
620 mentioned in the DECL_RTLs of the decls, and loop
621 unrolling will replicate any BLOCK trees inside an unrolled loop.
622
623 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
624 which we will use for inlining. The rtl might even contain pseudoregs
625 whose space has been freed. */
626
627 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
628 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
629
630 /* Now copy each DECL_RTL which is a MEM,
631 so it is safe to modify their addresses. */
632 copy_decl_rtls (DECL_INITIAL (fndecl));
633
634 /* The fndecl node acts as its own progenitor, so mark it as such. */
635 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
636
637 /* Now copy the chain of insns. Do this twice. The first copy the insn
638 itself and its body. The second time copy of REG_NOTES. This is because
639 a REG_NOTE may have a forward pointer to another insn. */
640
641 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
642 {
643 orig_asm_operands_vector = 0;
644
645 if (insn == first_nonparm_insn)
646 in_nonparm_insns = 1;
647
648 switch (GET_CODE (insn))
649 {
650 case NOTE:
651 /* No need to keep these. */
652 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
653 continue;
654
655 copy = rtx_alloc (NOTE);
656 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
657 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
658 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
659 else
660 {
661 NOTE_SOURCE_FILE (insn) = (char *) copy;
662 NOTE_SOURCE_FILE (copy) = 0;
663 }
664 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
665 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END)
666 {
667 int new_region = CODE_LABEL_NUMBER
668 (label_map[NOTE_BLOCK_NUMBER (copy)]);
669
670 /* we have to duplicate the handlers for the original */
671 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
672 {
673 handler_info *ptr, *temp;
674 int nr;
675 nr = new_eh_region_entry (new_region);
676 ptr = get_first_handler (NOTE_BLOCK_NUMBER (copy));
677 for ( ; ptr; ptr = ptr->next)
678 {
679 temp = get_new_handler (
680 label_map[CODE_LABEL_NUMBER (ptr->handler_label)],
681 ptr->type_info);
682 add_new_handler (nr, temp);
683 }
684 }
685
686 /* We have to forward these both to match the new exception
687 region. */
688 NOTE_BLOCK_NUMBER (copy) = new_region;
689
690 }
691 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
692 break;
693
694 case INSN:
695 case JUMP_INSN:
696 case CALL_INSN:
697 copy = rtx_alloc (GET_CODE (insn));
698
699 if (GET_CODE (insn) == CALL_INSN)
700 CALL_INSN_FUNCTION_USAGE (copy)
701 = copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
702
703 PATTERN (copy) = copy_for_inline (PATTERN (insn));
704 INSN_CODE (copy) = -1;
705 LOG_LINKS (copy) = NULL_RTX;
706 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
707 break;
708
709 case CODE_LABEL:
710 copy = label_map[CODE_LABEL_NUMBER (insn)];
711 LABEL_NAME (copy) = LABEL_NAME (insn);
712 break;
713
714 case BARRIER:
715 copy = rtx_alloc (BARRIER);
716 break;
717
718 default:
719 abort ();
720 }
721 INSN_UID (copy) = INSN_UID (insn);
722 insn_map[INSN_UID (insn)] = copy;
723 NEXT_INSN (last_insn) = copy;
724 PREV_INSN (copy) = last_insn;
725 last_insn = copy;
726 }
727
728 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
729
730 /* Now copy the REG_NOTES. */
731 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
732 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
733 && insn_map[INSN_UID(insn)])
734 REG_NOTES (insn_map[INSN_UID (insn)])
735 = copy_for_inline (REG_NOTES (insn));
736
737 NEXT_INSN (last_insn) = NULL;
738
739 finish_inline (fndecl, head);
740
741 /* Make new versions of the register tables. */
742 new = (char *) savealloc (regno_pointer_flag_length);
743 bcopy (regno_pointer_flag, new, regno_pointer_flag_length);
744 new1 = (char *) savealloc (regno_pointer_flag_length);
745 bcopy (regno_pointer_align, new1, regno_pointer_flag_length);
746
747 regno_pointer_flag = new;
748 regno_pointer_align = new1;
749
750 set_new_first_and_last_insn (first_insn, last_insn);
751
752 if (label_map)
753 free (label_map);
754 }
755
756 /* Copy NODE (as with copy_node). NODE must be a DECL. Set the
757 DECL_ABSTRACT_ORIGIN for the new accordinly. */
758
759 static tree
760 copy_and_set_decl_abstract_origin (node)
761 tree node;
762 {
763 tree copy = copy_node (node);
764 if (DECL_ABSTRACT_ORIGIN (copy) != NULL_TREE)
765 /* That means that NODE already had a DECL_ABSTRACT_ORIGIN. (This
766 situation occurs if we inline a function which itself made
767 calls to inline functions.) Since DECL_ABSTRACT_ORIGIN is the
768 most distant ancestor, we don't have to do anything here. */
769 ;
770 else
771 /* The most distant ancestor must be NODE. */
772 DECL_ABSTRACT_ORIGIN (copy) = node;
773
774 return copy;
775 }
776
777 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
778 For example, this can copy a list made of TREE_LIST nodes. While copying,
779 set DECL_ABSTRACT_ORIGIN appropriately. */
780
781 static tree
782 copy_decl_list (list)
783 tree list;
784 {
785 tree head;
786 register tree prev, next;
787
788 if (list == 0)
789 return 0;
790
791 head = prev = copy_and_set_decl_abstract_origin (head);
792 next = TREE_CHAIN (list);
793 while (next)
794 {
795 register tree copy;
796
797 copy = copy_and_set_decl_abstract_origin (next);
798 TREE_CHAIN (prev) = copy;
799 prev = copy;
800 next = TREE_CHAIN (next);
801 }
802 return head;
803 }
804
805 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
806
807 static tree
808 copy_decl_tree (block)
809 tree block;
810 {
811 tree t, vars, subblocks;
812
813 vars = copy_decl_list (BLOCK_VARS (block));
814 subblocks = 0;
815
816 /* Process all subblocks. */
817 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
818 {
819 tree copy = copy_decl_tree (t);
820 TREE_CHAIN (copy) = subblocks;
821 subblocks = copy;
822 }
823
824 t = copy_node (block);
825 BLOCK_VARS (t) = vars;
826 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
827 /* If the BLOCK being cloned is already marked as having been instantiated
828 from something else, then leave that `origin' marking alone. Otherwise,
829 mark the clone as having originated from the BLOCK we are cloning. */
830 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
831 BLOCK_ABSTRACT_ORIGIN (t) = block;
832 return t;
833 }
834
835 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
836
837 static void
838 copy_decl_rtls (block)
839 tree block;
840 {
841 tree t;
842
843 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
844 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
845 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
846
847 /* Process all subblocks. */
848 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
849 copy_decl_rtls (t);
850 }
851
852 /* Make the insns and PARM_DECLs of the current function permanent
853 and record other information in DECL_SAVED_INSNS to allow inlining
854 of this function in subsequent calls.
855
856 This routine need not copy any insns because we are not going
857 to immediately compile the insns in the insn chain. There
858 are two cases when we would compile the insns for FNDECL:
859 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
860 be output at the end of other compilation, because somebody took
861 its address. In the first case, the insns of FNDECL are copied
862 as it is expanded inline, so FNDECL's saved insns are not
863 modified. In the second case, FNDECL is used for the last time,
864 so modifying the rtl is not a problem.
865
866 We don't have to worry about FNDECL being inline expanded by
867 other functions which are written at the end of compilation
868 because flag_no_inline is turned on when we begin writing
869 functions at the end of compilation. */
870
871 void
872 save_for_inline_nocopy (fndecl)
873 tree fndecl;
874 {
875 rtx insn;
876 rtx head;
877 rtx first_nonparm_insn;
878
879 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
880 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
881 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
882 for the parms, prior to elimination of virtual registers.
883 These values are needed for substituting parms properly. */
884
885 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
886
887 /* Make and emit a return-label if we have not already done so. */
888
889 if (return_label == 0)
890 {
891 return_label = gen_label_rtx ();
892 emit_label (return_label);
893 }
894
895 head = initialize_for_inline (fndecl, get_first_label_num (),
896 max_label_num (), max_reg_num (), 0);
897
898 /* If there are insns that copy parms from the stack into pseudo registers,
899 those insns are not copied. `expand_inline_function' must
900 emit the correct code to handle such things. */
901
902 insn = get_insns ();
903 if (GET_CODE (insn) != NOTE)
904 abort ();
905
906 /* Get the insn which signals the end of parameter setup code. */
907 first_nonparm_insn = get_first_nonparm_insn ();
908
909 /* Now just scan the chain of insns to see what happens to our
910 PARM_DECLs. If a PARM_DECL is used but never modified, we
911 can substitute its rtl directly when expanding inline (and
912 perform constant folding when its incoming value is constant).
913 Otherwise, we have to copy its value into a new register and track
914 the new register's life. */
915
916 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
917 {
918 if (insn == first_nonparm_insn)
919 in_nonparm_insns = 1;
920
921 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
922 {
923 if (current_function_uses_const_pool)
924 {
925 /* Replace any constant pool references with the actual constant.
926 We will put the constant back if we need to write the
927 function out after all. */
928 save_constants (&PATTERN (insn));
929 if (REG_NOTES (insn))
930 save_constants (&REG_NOTES (insn));
931 }
932
933 /* Record what interesting things happen to our parameters. */
934 note_stores (PATTERN (insn), note_modified_parmregs);
935 }
936 }
937
938 /* Also scan all decls, and replace any constant pool references with the
939 actual constant. */
940 save_constants_in_decl_trees (DECL_INITIAL (fndecl));
941
942 /* We have now allocated all that needs to be allocated permanently
943 on the rtx obstack. Set our high-water mark, so that we
944 can free the rest of this when the time comes. */
945
946 preserve_data ();
947
948 finish_inline (fndecl, head);
949 }
950 \f
951 /* Given PX, a pointer into an insn, search for references to the constant
952 pool. Replace each with a CONST that has the mode of the original
953 constant, contains the constant, and has RTX_INTEGRATED_P set.
954 Similarly, constant pool addresses not enclosed in a MEM are replaced
955 with an ADDRESS and CONST rtx which also gives the constant, its
956 mode, the mode of the address, and has RTX_INTEGRATED_P set. */
957
958 static void
959 save_constants (px)
960 rtx *px;
961 {
962 rtx x;
963 int i, j;
964
965 again:
966 x = *px;
967
968 /* If this is a CONST_DOUBLE, don't try to fix things up in
969 CONST_DOUBLE_MEM, because this is an infinite recursion. */
970 if (GET_CODE (x) == CONST_DOUBLE)
971 return;
972 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
973 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
974 {
975 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
976 rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
977 RTX_INTEGRATED_P (new) = 1;
978
979 /* If the MEM was in a different mode than the constant (perhaps we
980 were only looking at the low-order part), surround it with a
981 SUBREG so we can save both modes. */
982
983 if (GET_MODE (x) != const_mode)
984 {
985 new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
986 RTX_INTEGRATED_P (new) = 1;
987 }
988
989 *px = new;
990 save_constants (&XEXP (*px, 0));
991 }
992 else if (GET_CODE (x) == SYMBOL_REF
993 && CONSTANT_POOL_ADDRESS_P (x))
994 {
995 *px = gen_rtx_ADDRESS (GET_MODE (x),
996 gen_rtx_CONST (get_pool_mode (x),
997 get_pool_constant (x)));
998 save_constants (&XEXP (*px, 0));
999 RTX_INTEGRATED_P (*px) = 1;
1000 }
1001
1002 else
1003 {
1004 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
1005 int len = GET_RTX_LENGTH (GET_CODE (x));
1006
1007 for (i = len-1; i >= 0; i--)
1008 {
1009 switch (fmt[i])
1010 {
1011 case 'E':
1012 for (j = 0; j < XVECLEN (x, i); j++)
1013 save_constants (&XVECEXP (x, i, j));
1014 break;
1015
1016 case 'e':
1017 if (XEXP (x, i) == 0)
1018 continue;
1019 if (i == 0)
1020 {
1021 /* Hack tail-recursion here. */
1022 px = &XEXP (x, 0);
1023 goto again;
1024 }
1025 save_constants (&XEXP (x, i));
1026 break;
1027 }
1028 }
1029 }
1030 }
1031 \f
1032 /* Note whether a parameter is modified or not. */
1033
1034 static void
1035 note_modified_parmregs (reg, x)
1036 rtx reg;
1037 rtx x ATTRIBUTE_UNUSED;
1038 {
1039 if (GET_CODE (reg) == REG && in_nonparm_insns
1040 && REGNO (reg) < max_parm_reg
1041 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1042 && parmdecl_map[REGNO (reg)] != 0)
1043 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
1044 }
1045
1046 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
1047 according to `reg_map' and `label_map'. The original rtl insns
1048 will be saved for inlining; this is used to make a copy
1049 which is used to finish compiling the inline function itself.
1050
1051 If we find a "saved" constant pool entry, one which was replaced with
1052 the value of the constant, convert it back to a constant pool entry.
1053 Since the pool wasn't touched, this should simply restore the old
1054 address.
1055
1056 All other kinds of rtx are copied except those that can never be
1057 changed during compilation. */
1058
1059 static rtx
1060 copy_for_inline (orig)
1061 rtx orig;
1062 {
1063 register rtx x = orig;
1064 register rtx new;
1065 register int i;
1066 register enum rtx_code code;
1067 register char *format_ptr;
1068
1069 if (x == 0)
1070 return x;
1071
1072 code = GET_CODE (x);
1073
1074 /* These types may be freely shared. */
1075
1076 switch (code)
1077 {
1078 case QUEUED:
1079 case CONST_INT:
1080 case SYMBOL_REF:
1081 case PC:
1082 case CC0:
1083 return x;
1084
1085 case CONST_DOUBLE:
1086 /* We have to make a new CONST_DOUBLE to ensure that we account for
1087 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
1088 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1089 {
1090 REAL_VALUE_TYPE d;
1091
1092 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1093 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
1094 }
1095 else
1096 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
1097 VOIDmode);
1098
1099 case CONST:
1100 /* Get constant pool entry for constant in the pool. */
1101 if (RTX_INTEGRATED_P (x))
1102 return validize_mem (force_const_mem (GET_MODE (x),
1103 copy_for_inline (XEXP (x, 0))));
1104 break;
1105
1106 case SUBREG:
1107 /* Get constant pool entry, but access in different mode. */
1108 if (RTX_INTEGRATED_P (x))
1109 {
1110 new = force_const_mem (GET_MODE (SUBREG_REG (x)),
1111 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
1112
1113 PUT_MODE (new, GET_MODE (x));
1114 return validize_mem (new);
1115 }
1116 break;
1117
1118 case ADDRESS:
1119 /* If not special for constant pool error. Else get constant pool
1120 address. */
1121 if (! RTX_INTEGRATED_P (x))
1122 abort ();
1123
1124 new = force_const_mem (GET_MODE (XEXP (x, 0)),
1125 copy_for_inline (XEXP (XEXP (x, 0), 0)));
1126 new = XEXP (new, 0);
1127
1128 #ifdef POINTERS_EXTEND_UNSIGNED
1129 if (GET_MODE (new) != GET_MODE (x))
1130 new = convert_memory_address (GET_MODE (x), new);
1131 #endif
1132
1133 return new;
1134
1135 case ASM_OPERANDS:
1136 /* If a single asm insn contains multiple output operands
1137 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1138 We must make sure that the copied insn continues to share it. */
1139 if (orig_asm_operands_vector == XVEC (orig, 3))
1140 {
1141 x = rtx_alloc (ASM_OPERANDS);
1142 x->volatil = orig->volatil;
1143 XSTR (x, 0) = XSTR (orig, 0);
1144 XSTR (x, 1) = XSTR (orig, 1);
1145 XINT (x, 2) = XINT (orig, 2);
1146 XVEC (x, 3) = copy_asm_operands_vector;
1147 XVEC (x, 4) = copy_asm_constraints_vector;
1148 XSTR (x, 5) = XSTR (orig, 5);
1149 XINT (x, 6) = XINT (orig, 6);
1150 return x;
1151 }
1152 break;
1153
1154 case MEM:
1155 /* A MEM is usually allowed to be shared if its address is constant
1156 or is a constant plus one of the special registers.
1157
1158 We do not allow sharing of addresses that are either a special
1159 register or the sum of a constant and a special register because
1160 it is possible for unshare_all_rtl to copy the address, into memory
1161 that won't be saved. Although the MEM can safely be shared, and
1162 won't be copied there, the address itself cannot be shared, and may
1163 need to be copied.
1164
1165 There are also two exceptions with constants: The first is if the
1166 constant is a LABEL_REF or the sum of the LABEL_REF
1167 and an integer. This case can happen if we have an inline
1168 function that supplies a constant operand to the call of another
1169 inline function that uses it in a switch statement. In this case,
1170 we will be replacing the LABEL_REF, so we have to replace this MEM
1171 as well.
1172
1173 The second case is if we have a (const (plus (address ..) ...)).
1174 In that case we need to put back the address of the constant pool
1175 entry. */
1176
1177 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1178 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1179 && ! (GET_CODE (XEXP (x, 0)) == CONST
1180 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1181 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1182 == LABEL_REF)
1183 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1184 == ADDRESS)))))
1185 return x;
1186 break;
1187
1188 case LABEL_REF:
1189 /* If this is a non-local label, just make a new LABEL_REF.
1190 Otherwise, use the new label as well. */
1191 x = gen_rtx_LABEL_REF (GET_MODE (orig),
1192 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1193 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1194 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1195 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1196 return x;
1197
1198 case REG:
1199 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1200 return reg_map [REGNO (x)];
1201 else
1202 return x;
1203
1204 case SET:
1205 /* If a parm that gets modified lives in a pseudo-reg,
1206 clear its TREE_READONLY to prevent certain optimizations. */
1207 {
1208 rtx dest = SET_DEST (x);
1209
1210 while (GET_CODE (dest) == STRICT_LOW_PART
1211 || GET_CODE (dest) == ZERO_EXTRACT
1212 || GET_CODE (dest) == SUBREG)
1213 dest = XEXP (dest, 0);
1214
1215 if (GET_CODE (dest) == REG
1216 && REGNO (dest) < max_parm_reg
1217 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1218 && parmdecl_map[REGNO (dest)] != 0
1219 /* The insn to load an arg pseudo from a stack slot
1220 does not count as modifying it. */
1221 && in_nonparm_insns)
1222 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1223 }
1224 break;
1225
1226 #if 0 /* This is a good idea, but here is the wrong place for it. */
1227 /* Arrange that CONST_INTs always appear as the second operand
1228 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1229 always appear as the first. */
1230 case PLUS:
1231 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1232 || (XEXP (x, 1) == frame_pointer_rtx
1233 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1234 && XEXP (x, 1) == arg_pointer_rtx)))
1235 {
1236 rtx t = XEXP (x, 0);
1237 XEXP (x, 0) = XEXP (x, 1);
1238 XEXP (x, 1) = t;
1239 }
1240 break;
1241 #endif
1242 default:
1243 break;
1244 }
1245
1246 /* Replace this rtx with a copy of itself. */
1247
1248 x = rtx_alloc (code);
1249 bcopy ((char *) orig, (char *) x,
1250 (sizeof (*x) - sizeof (x->fld)
1251 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1252
1253 /* Now scan the subexpressions recursively.
1254 We can store any replaced subexpressions directly into X
1255 since we know X is not shared! Any vectors in X
1256 must be copied if X was copied. */
1257
1258 format_ptr = GET_RTX_FORMAT (code);
1259
1260 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1261 {
1262 switch (*format_ptr++)
1263 {
1264 case 'e':
1265 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1266 break;
1267
1268 case 'u':
1269 /* Change any references to old-insns to point to the
1270 corresponding copied insns. */
1271 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1272 break;
1273
1274 case 'E':
1275 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1276 {
1277 register int j;
1278
1279 XVEC (x, i) = gen_rtvec_vv (XVECLEN (x, i), XVEC (x, i)->elem);
1280 for (j = 0; j < XVECLEN (x, i); j++)
1281 XVECEXP (x, i, j)
1282 = copy_for_inline (XVECEXP (x, i, j));
1283 }
1284 break;
1285 }
1286 }
1287
1288 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1289 {
1290 orig_asm_operands_vector = XVEC (orig, 3);
1291 copy_asm_operands_vector = XVEC (x, 3);
1292 copy_asm_constraints_vector = XVEC (x, 4);
1293 }
1294
1295 return x;
1296 }
1297
1298 /* Unfortunately, we need a global copy of const_equiv map for communication
1299 with a function called from note_stores. Be *very* careful that this
1300 is used properly in the presence of recursion. */
1301
1302 rtx *global_const_equiv_map;
1303 int global_const_equiv_map_size;
1304 \f
1305 #define FIXED_BASE_PLUS_P(X) \
1306 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1307 && GET_CODE (XEXP (X, 0)) == REG \
1308 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1309 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1310
1311 /* Integrate the procedure defined by FNDECL. Note that this function
1312 may wind up calling itself. Since the static variables are not
1313 reentrant, we do not assign them until after the possibility
1314 of recursion is eliminated.
1315
1316 If IGNORE is nonzero, do not produce a value.
1317 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1318
1319 Value is:
1320 (rtx)-1 if we could not substitute the function
1321 0 if we substituted it and it does not produce a value
1322 else an rtx for where the value is stored. */
1323
1324 rtx
1325 expand_inline_function (fndecl, parms, target, ignore, type,
1326 structure_value_addr)
1327 tree fndecl, parms;
1328 rtx target;
1329 int ignore;
1330 tree type;
1331 rtx structure_value_addr;
1332 {
1333 tree formal, actual, block;
1334 rtx header = DECL_SAVED_INSNS (fndecl);
1335 rtx insns = FIRST_FUNCTION_INSN (header);
1336 rtx parm_insns = FIRST_PARM_INSN (header);
1337 tree *arg_trees;
1338 rtx *arg_vals;
1339 rtx insn;
1340 int max_regno;
1341 register int i;
1342 int min_labelno = FIRST_LABELNO (header);
1343 int max_labelno = LAST_LABELNO (header);
1344 int nargs;
1345 rtx local_return_label = 0;
1346 rtx loc;
1347 rtx stack_save = 0;
1348 rtx temp;
1349 struct inline_remap *map;
1350 #ifdef HAVE_cc0
1351 rtx cc0_insn = 0;
1352 #endif
1353 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1354 rtx static_chain_value = 0;
1355
1356 /* The pointer used to track the true location of the memory used
1357 for MAP->LABEL_MAP. */
1358 rtx *real_label_map = 0;
1359
1360 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1361 max_regno = MAX_REGNUM (header) + 3;
1362 if (max_regno < FIRST_PSEUDO_REGISTER)
1363 abort ();
1364
1365 nargs = list_length (DECL_ARGUMENTS (fndecl));
1366
1367 /* Check that the parms type match and that sufficient arguments were
1368 passed. Since the appropriate conversions or default promotions have
1369 already been applied, the machine modes should match exactly. */
1370
1371 for (formal = DECL_ARGUMENTS (fndecl), actual = parms;
1372 formal;
1373 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual))
1374 {
1375 tree arg;
1376 enum machine_mode mode;
1377
1378 if (actual == 0)
1379 return (rtx) (HOST_WIDE_INT) -1;
1380
1381 arg = TREE_VALUE (actual);
1382 mode = TYPE_MODE (DECL_ARG_TYPE (formal));
1383
1384 if (mode != TYPE_MODE (TREE_TYPE (arg))
1385 /* If they are block mode, the types should match exactly.
1386 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1387 which could happen if the parameter has incomplete type. */
1388 || (mode == BLKmode
1389 && (TYPE_MAIN_VARIANT (TREE_TYPE (arg))
1390 != TYPE_MAIN_VARIANT (TREE_TYPE (formal)))))
1391 return (rtx) (HOST_WIDE_INT) -1;
1392 }
1393
1394 /* Extra arguments are valid, but will be ignored below, so we must
1395 evaluate them here for side-effects. */
1396 for (; actual; actual = TREE_CHAIN (actual))
1397 expand_expr (TREE_VALUE (actual), const0_rtx,
1398 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1399
1400 /* Make a binding contour to keep inline cleanups called at
1401 outer function-scope level from looking like they are shadowing
1402 parameter declarations. */
1403 pushlevel (0);
1404
1405 /* Expand the function arguments. Do this first so that any
1406 new registers get created before we allocate the maps. */
1407
1408 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1409 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1410
1411 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1412 formal;
1413 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1414 {
1415 /* Actual parameter, converted to the type of the argument within the
1416 function. */
1417 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1418 /* Mode of the variable used within the function. */
1419 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1420 int invisiref = 0;
1421
1422 arg_trees[i] = arg;
1423 loc = RTVEC_ELT (arg_vector, i);
1424
1425 /* If this is an object passed by invisible reference, we copy the
1426 object into a stack slot and save its address. If this will go
1427 into memory, we do nothing now. Otherwise, we just expand the
1428 argument. */
1429 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1430 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1431 {
1432 rtx stack_slot
1433 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1434 int_size_in_bytes (TREE_TYPE (arg)), 1);
1435 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1436
1437 store_expr (arg, stack_slot, 0);
1438
1439 arg_vals[i] = XEXP (stack_slot, 0);
1440 invisiref = 1;
1441 }
1442 else if (GET_CODE (loc) != MEM)
1443 {
1444 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1445 /* The mode if LOC and ARG can differ if LOC was a variable
1446 that had its mode promoted via PROMOTED_MODE. */
1447 arg_vals[i] = convert_modes (GET_MODE (loc),
1448 TYPE_MODE (TREE_TYPE (arg)),
1449 expand_expr (arg, NULL_RTX, mode,
1450 EXPAND_SUM),
1451 TREE_UNSIGNED (TREE_TYPE (formal)));
1452 else
1453 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1454 }
1455 else
1456 arg_vals[i] = 0;
1457
1458 if (arg_vals[i] != 0
1459 && (! TREE_READONLY (formal)
1460 /* If the parameter is not read-only, copy our argument through
1461 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1462 TARGET in any way. In the inline function, they will likely
1463 be two different pseudos, and `safe_from_p' will make all
1464 sorts of smart assumptions about their not conflicting.
1465 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1466 wrong, so put ARG_VALS[I] into a fresh register.
1467 Don't worry about invisible references, since their stack
1468 temps will never overlap the target. */
1469 || (target != 0
1470 && ! invisiref
1471 && (GET_CODE (arg_vals[i]) == REG
1472 || GET_CODE (arg_vals[i]) == SUBREG
1473 || GET_CODE (arg_vals[i]) == MEM)
1474 && reg_overlap_mentioned_p (arg_vals[i], target))
1475 /* ??? We must always copy a SUBREG into a REG, because it might
1476 get substituted into an address, and not all ports correctly
1477 handle SUBREGs in addresses. */
1478 || (GET_CODE (arg_vals[i]) == SUBREG)))
1479 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1480
1481 if (arg_vals[i] != 0 && GET_CODE (arg_vals[i]) == REG
1482 && POINTER_TYPE_P (TREE_TYPE (formal)))
1483 mark_reg_pointer (arg_vals[i],
1484 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (formal)))
1485 / BITS_PER_UNIT));
1486 }
1487
1488 /* Allocate the structures we use to remap things. */
1489
1490 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1491 map->fndecl = fndecl;
1492
1493 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1494 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1495
1496 /* We used to use alloca here, but the size of what it would try to
1497 allocate would occasionally cause it to exceed the stack limit and
1498 cause unpredictable core dumps. */
1499 real_label_map
1500 = (rtx *) xmalloc ((max_labelno) * sizeof (rtx));
1501 map->label_map = real_label_map;
1502
1503 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1504 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1505 map->min_insnno = 0;
1506 map->max_insnno = INSN_UID (header);
1507
1508 map->integrating = 1;
1509
1510 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1511 be large enough for all our pseudos. This is the number we are currently
1512 using plus the number in the called routine, plus 15 for each arg,
1513 five to compute the virtual frame pointer, and five for the return value.
1514 This should be enough for most cases. We do not reference entries
1515 outside the range of the map.
1516
1517 ??? These numbers are quite arbitrary and were obtained by
1518 experimentation. At some point, we should try to allocate the
1519 table after all the parameters are set up so we an more accurately
1520 estimate the number of pseudos we will need. */
1521
1522 map->const_equiv_map_size
1523 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1524
1525 map->const_equiv_map
1526 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1527 bzero ((char *) map->const_equiv_map,
1528 map->const_equiv_map_size * sizeof (rtx));
1529
1530 map->const_age_map
1531 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1532 bzero ((char *) map->const_age_map,
1533 map->const_equiv_map_size * sizeof (unsigned));
1534 map->const_age = 0;
1535
1536 /* Record the current insn in case we have to set up pointers to frame
1537 and argument memory blocks. If there are no insns yet, add a dummy
1538 insn that can be used as an insertion point. */
1539 map->insns_at_start = get_last_insn ();
1540 if (map->insns_at_start == 0)
1541 map->insns_at_start = emit_note (NULL_PTR, NOTE_INSN_DELETED);
1542
1543 map->regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (header);
1544 map->regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (header);
1545
1546 /* Update the outgoing argument size to allow for those in the inlined
1547 function. */
1548 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1549 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1550
1551 /* If the inline function needs to make PIC references, that means
1552 that this function's PIC offset table must be used. */
1553 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1554 current_function_uses_pic_offset_table = 1;
1555
1556 /* If this function needs a context, set it up. */
1557 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1558 static_chain_value = lookup_static_chain (fndecl);
1559
1560 if (GET_CODE (parm_insns) == NOTE
1561 && NOTE_LINE_NUMBER (parm_insns) > 0)
1562 {
1563 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1564 NOTE_LINE_NUMBER (parm_insns));
1565 if (note)
1566 RTX_INTEGRATED_P (note) = 1;
1567 }
1568
1569 /* Process each argument. For each, set up things so that the function's
1570 reference to the argument will refer to the argument being passed.
1571 We only replace REG with REG here. Any simplifications are done
1572 via const_equiv_map.
1573
1574 We make two passes: In the first, we deal with parameters that will
1575 be placed into registers, since we need to ensure that the allocated
1576 register number fits in const_equiv_map. Then we store all non-register
1577 parameters into their memory location. */
1578
1579 /* Don't try to free temp stack slots here, because we may put one of the
1580 parameters into a temp stack slot. */
1581
1582 for (i = 0; i < nargs; i++)
1583 {
1584 rtx copy = arg_vals[i];
1585
1586 loc = RTVEC_ELT (arg_vector, i);
1587
1588 /* There are three cases, each handled separately. */
1589 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1590 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1591 {
1592 /* This must be an object passed by invisible reference (it could
1593 also be a variable-sized object, but we forbid inlining functions
1594 with variable-sized arguments). COPY is the address of the
1595 actual value (this computation will cause it to be copied). We
1596 map that address for the register, noting the actual address as
1597 an equivalent in case it can be substituted into the insns. */
1598
1599 if (GET_CODE (copy) != REG)
1600 {
1601 temp = copy_addr_to_reg (copy);
1602 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1603 && REGNO (temp) < map->const_equiv_map_size)
1604 {
1605 map->const_equiv_map[REGNO (temp)] = copy;
1606 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1607 }
1608 copy = temp;
1609 }
1610 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1611 }
1612 else if (GET_CODE (loc) == MEM)
1613 {
1614 /* This is the case of a parameter that lives in memory.
1615 It will live in the block we allocate in the called routine's
1616 frame that simulates the incoming argument area. Do nothing
1617 now; we will call store_expr later. */
1618 ;
1619 }
1620 else if (GET_CODE (loc) == REG)
1621 {
1622 /* This is the good case where the parameter is in a register.
1623 If it is read-only and our argument is a constant, set up the
1624 constant equivalence.
1625
1626 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1627 that flag set if it is a register.
1628
1629 Also, don't allow hard registers here; they might not be valid
1630 when substituted into insns. */
1631
1632 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1633 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1634 && ! REG_USERVAR_P (copy))
1635 || (GET_CODE (copy) == REG
1636 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1637 {
1638 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1639 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1640 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1641 && REGNO (temp) < map->const_equiv_map_size)
1642 {
1643 map->const_equiv_map[REGNO (temp)] = copy;
1644 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1645 }
1646 copy = temp;
1647 }
1648 map->reg_map[REGNO (loc)] = copy;
1649 }
1650 else if (GET_CODE (loc) == CONCAT)
1651 {
1652 /* This is the good case where the parameter is in a
1653 pair of separate pseudos.
1654 If it is read-only and our argument is a constant, set up the
1655 constant equivalence.
1656
1657 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1658 that flag set if it is a register.
1659
1660 Also, don't allow hard registers here; they might not be valid
1661 when substituted into insns. */
1662 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1663 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1664 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1665 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1666
1667 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1668 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1669 && ! REG_USERVAR_P (copyreal))
1670 || (GET_CODE (copyreal) == REG
1671 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1672 {
1673 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1674 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1675 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1676 && REGNO (temp) < map->const_equiv_map_size)
1677 {
1678 map->const_equiv_map[REGNO (temp)] = copyreal;
1679 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1680 }
1681 copyreal = temp;
1682 }
1683 map->reg_map[REGNO (locreal)] = copyreal;
1684
1685 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1686 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1687 && ! REG_USERVAR_P (copyimag))
1688 || (GET_CODE (copyimag) == REG
1689 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1690 {
1691 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1692 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1693 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1694 && REGNO (temp) < map->const_equiv_map_size)
1695 {
1696 map->const_equiv_map[REGNO (temp)] = copyimag;
1697 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1698 }
1699 copyimag = temp;
1700 }
1701 map->reg_map[REGNO (locimag)] = copyimag;
1702 }
1703 else
1704 abort ();
1705 }
1706
1707 /* Now do the parameters that will be placed in memory. */
1708
1709 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1710 formal; formal = TREE_CHAIN (formal), i++)
1711 {
1712 loc = RTVEC_ELT (arg_vector, i);
1713
1714 if (GET_CODE (loc) == MEM
1715 /* Exclude case handled above. */
1716 && ! (GET_CODE (XEXP (loc, 0)) == REG
1717 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1718 {
1719 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1720 DECL_SOURCE_LINE (formal));
1721 if (note)
1722 RTX_INTEGRATED_P (note) = 1;
1723
1724 /* Compute the address in the area we reserved and store the
1725 value there. */
1726 temp = copy_rtx_and_substitute (loc, map);
1727 subst_constants (&temp, NULL_RTX, map);
1728 apply_change_group ();
1729 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1730 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1731 store_expr (arg_trees[i], temp, 0);
1732 }
1733 }
1734
1735 /* Deal with the places that the function puts its result.
1736 We are driven by what is placed into DECL_RESULT.
1737
1738 Initially, we assume that we don't have anything special handling for
1739 REG_FUNCTION_RETURN_VALUE_P. */
1740
1741 map->inline_target = 0;
1742 loc = DECL_RTL (DECL_RESULT (fndecl));
1743 if (TYPE_MODE (type) == VOIDmode)
1744 /* There is no return value to worry about. */
1745 ;
1746 else if (GET_CODE (loc) == MEM)
1747 {
1748 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1749 abort ();
1750
1751 /* Pass the function the address in which to return a structure value.
1752 Note that a constructor can cause someone to call us with
1753 STRUCTURE_VALUE_ADDR, but the initialization takes place
1754 via the first parameter, rather than the struct return address.
1755
1756 We have two cases: If the address is a simple register indirect,
1757 use the mapping mechanism to point that register to our structure
1758 return address. Otherwise, store the structure return value into
1759 the place that it will be referenced from. */
1760
1761 if (GET_CODE (XEXP (loc, 0)) == REG)
1762 {
1763 temp = force_reg (Pmode,
1764 force_operand (structure_value_addr, NULL_RTX));
1765 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1766 if ((CONSTANT_P (structure_value_addr)
1767 || GET_CODE (structure_value_addr) == ADDRESSOF
1768 || (GET_CODE (structure_value_addr) == PLUS
1769 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1770 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1771 && REGNO (temp) < map->const_equiv_map_size)
1772 {
1773 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1774 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1775 }
1776 }
1777 else
1778 {
1779 temp = copy_rtx_and_substitute (loc, map);
1780 subst_constants (&temp, NULL_RTX, map);
1781 apply_change_group ();
1782 emit_move_insn (temp, structure_value_addr);
1783 }
1784 }
1785 else if (ignore)
1786 /* We will ignore the result value, so don't look at its structure.
1787 Note that preparations for an aggregate return value
1788 do need to be made (above) even if it will be ignored. */
1789 ;
1790 else if (GET_CODE (loc) == REG)
1791 {
1792 /* The function returns an object in a register and we use the return
1793 value. Set up our target for remapping. */
1794
1795 /* Machine mode function was declared to return. */
1796 enum machine_mode departing_mode = TYPE_MODE (type);
1797 /* (Possibly wider) machine mode it actually computes
1798 (for the sake of callers that fail to declare it right).
1799 We have to use the mode of the result's RTL, rather than
1800 its type, since expand_function_start may have promoted it. */
1801 enum machine_mode arriving_mode
1802 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
1803 rtx reg_to_map;
1804
1805 /* Don't use MEMs as direct targets because on some machines
1806 substituting a MEM for a REG makes invalid insns.
1807 Let the combiner substitute the MEM if that is valid. */
1808 if (target == 0 || GET_CODE (target) != REG
1809 || GET_MODE (target) != departing_mode)
1810 target = gen_reg_rtx (departing_mode);
1811
1812 /* If function's value was promoted before return,
1813 avoid machine mode mismatch when we substitute INLINE_TARGET.
1814 But TARGET is what we will return to the caller. */
1815 if (arriving_mode != departing_mode)
1816 {
1817 /* Avoid creating a paradoxical subreg wider than
1818 BITS_PER_WORD, since that is illegal. */
1819 if (GET_MODE_BITSIZE (arriving_mode) > BITS_PER_WORD)
1820 {
1821 if (!TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (departing_mode),
1822 GET_MODE_BITSIZE (arriving_mode)))
1823 /* Maybe could be handled by using convert_move () ? */
1824 abort ();
1825 reg_to_map = gen_reg_rtx (arriving_mode);
1826 target = gen_lowpart (departing_mode, reg_to_map);
1827 }
1828 else
1829 reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
1830 }
1831 else
1832 reg_to_map = target;
1833
1834 /* Usually, the result value is the machine's return register.
1835 Sometimes it may be a pseudo. Handle both cases. */
1836 if (REG_FUNCTION_VALUE_P (loc))
1837 map->inline_target = reg_to_map;
1838 else
1839 map->reg_map[REGNO (loc)] = reg_to_map;
1840 }
1841 else
1842 abort ();
1843
1844 /* Make a fresh binding contour that we can easily remove. Do this after
1845 expanding our arguments so cleanups are properly scoped. */
1846 pushlevel (0);
1847 expand_start_bindings (0);
1848
1849 /* Initialize label_map. get_label_from_map will actually make
1850 the labels. */
1851 bzero ((char *) &map->label_map [min_labelno],
1852 (max_labelno - min_labelno) * sizeof (rtx));
1853
1854 /* Perform postincrements before actually calling the function. */
1855 emit_queue ();
1856
1857 /* Clean up stack so that variables might have smaller offsets. */
1858 do_pending_stack_adjust ();
1859
1860 /* Save a copy of the location of const_equiv_map for mark_stores, called
1861 via note_stores. */
1862 global_const_equiv_map = map->const_equiv_map;
1863 global_const_equiv_map_size = map->const_equiv_map_size;
1864
1865 /* If the called function does an alloca, save and restore the
1866 stack pointer around the call. This saves stack space, but
1867 also is required if this inline is being done between two
1868 pushes. */
1869 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1870 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1871
1872 /* Now copy the insns one by one. Do this in two passes, first the insns and
1873 then their REG_NOTES, just like save_for_inline. */
1874
1875 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1876
1877 for (insn = insns; insn; insn = NEXT_INSN (insn))
1878 {
1879 rtx copy, pattern, set;
1880
1881 map->orig_asm_operands_vector = 0;
1882
1883 switch (GET_CODE (insn))
1884 {
1885 case INSN:
1886 pattern = PATTERN (insn);
1887 set = single_set (insn);
1888 copy = 0;
1889 if (GET_CODE (pattern) == USE
1890 && GET_CODE (XEXP (pattern, 0)) == REG
1891 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1892 /* The (USE (REG n)) at return from the function should
1893 be ignored since we are changing (REG n) into
1894 inline_target. */
1895 break;
1896
1897 /* If the inline fn needs eh context, make sure that
1898 the current fn has one. */
1899 if (GET_CODE (pattern) == USE
1900 && find_reg_note (insn, REG_EH_CONTEXT, 0) != 0)
1901 get_eh_context ();
1902
1903 /* Ignore setting a function value that we don't want to use. */
1904 if (map->inline_target == 0
1905 && set != 0
1906 && GET_CODE (SET_DEST (set)) == REG
1907 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1908 {
1909 if (volatile_refs_p (SET_SRC (set)))
1910 {
1911 rtx new_set;
1912
1913 /* If we must not delete the source,
1914 load it into a new temporary. */
1915 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1916
1917 new_set = single_set (copy);
1918 if (new_set == 0)
1919 abort ();
1920
1921 SET_DEST (new_set)
1922 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1923 }
1924 /* If the source and destination are the same and it
1925 has a note on it, keep the insn. */
1926 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1927 && REG_NOTES (insn) != 0)
1928 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1929 else
1930 break;
1931 }
1932
1933 /* If this is setting the static chain rtx, omit it. */
1934 else if (static_chain_value != 0
1935 && set != 0
1936 && GET_CODE (SET_DEST (set)) == REG
1937 && rtx_equal_p (SET_DEST (set),
1938 static_chain_incoming_rtx))
1939 break;
1940
1941 /* If this is setting the static chain pseudo, set it from
1942 the value we want to give it instead. */
1943 else if (static_chain_value != 0
1944 && set != 0
1945 && rtx_equal_p (SET_SRC (set),
1946 static_chain_incoming_rtx))
1947 {
1948 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1949
1950 copy = emit_move_insn (newdest, static_chain_value);
1951 static_chain_value = 0;
1952 }
1953 else
1954 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1955 /* REG_NOTES will be copied later. */
1956
1957 #ifdef HAVE_cc0
1958 /* If this insn is setting CC0, it may need to look at
1959 the insn that uses CC0 to see what type of insn it is.
1960 In that case, the call to recog via validate_change will
1961 fail. So don't substitute constants here. Instead,
1962 do it when we emit the following insn.
1963
1964 For example, see the pyr.md file. That machine has signed and
1965 unsigned compares. The compare patterns must check the
1966 following branch insn to see which what kind of compare to
1967 emit.
1968
1969 If the previous insn set CC0, substitute constants on it as
1970 well. */
1971 if (sets_cc0_p (PATTERN (copy)) != 0)
1972 cc0_insn = copy;
1973 else
1974 {
1975 if (cc0_insn)
1976 try_constants (cc0_insn, map);
1977 cc0_insn = 0;
1978 try_constants (copy, map);
1979 }
1980 #else
1981 try_constants (copy, map);
1982 #endif
1983 break;
1984
1985 case JUMP_INSN:
1986 if (GET_CODE (PATTERN (insn)) == RETURN
1987 || (GET_CODE (PATTERN (insn)) == PARALLEL
1988 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
1989 {
1990 if (local_return_label == 0)
1991 local_return_label = gen_label_rtx ();
1992 pattern = gen_jump (local_return_label);
1993 }
1994 else
1995 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1996
1997 copy = emit_jump_insn (pattern);
1998
1999 #ifdef HAVE_cc0
2000 if (cc0_insn)
2001 try_constants (cc0_insn, map);
2002 cc0_insn = 0;
2003 #endif
2004 try_constants (copy, map);
2005
2006 /* If this used to be a conditional jump insn but whose branch
2007 direction is now know, we must do something special. */
2008 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
2009 {
2010 #ifdef HAVE_cc0
2011 /* The previous insn set cc0 for us. So delete it. */
2012 delete_insn (PREV_INSN (copy));
2013 #endif
2014
2015 /* If this is now a no-op, delete it. */
2016 if (map->last_pc_value == pc_rtx)
2017 {
2018 delete_insn (copy);
2019 copy = 0;
2020 }
2021 else
2022 /* Otherwise, this is unconditional jump so we must put a
2023 BARRIER after it. We could do some dead code elimination
2024 here, but jump.c will do it just as well. */
2025 emit_barrier ();
2026 }
2027 break;
2028
2029 case CALL_INSN:
2030 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
2031 copy = emit_call_insn (pattern);
2032
2033 /* Because the USAGE information potentially contains objects other
2034 than hard registers, we need to copy it. */
2035 CALL_INSN_FUNCTION_USAGE (copy)
2036 = copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
2037
2038 #ifdef HAVE_cc0
2039 if (cc0_insn)
2040 try_constants (cc0_insn, map);
2041 cc0_insn = 0;
2042 #endif
2043 try_constants (copy, map);
2044
2045 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
2046 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2047 map->const_equiv_map[i] = 0;
2048 break;
2049
2050 case CODE_LABEL:
2051 copy = emit_label (get_label_from_map (map,
2052 CODE_LABEL_NUMBER (insn)));
2053 LABEL_NAME (copy) = LABEL_NAME (insn);
2054 map->const_age++;
2055 break;
2056
2057 case BARRIER:
2058 copy = emit_barrier ();
2059 break;
2060
2061 case NOTE:
2062 /* It is important to discard function-end and function-beg notes,
2063 so we have only one of each in the current function.
2064 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
2065 deleted these in the copy used for continuing compilation,
2066 not the copy used for inlining). */
2067 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
2068 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
2069 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
2070 {
2071 copy = emit_note (NOTE_SOURCE_FILE (insn),
2072 NOTE_LINE_NUMBER (insn));
2073 if (copy
2074 && (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG
2075 || NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_END))
2076 {
2077 rtx label
2078 = get_label_from_map (map, NOTE_BLOCK_NUMBER (copy));
2079
2080 /* we have to duplicate the handlers for the original */
2081 if (NOTE_LINE_NUMBER (copy) == NOTE_INSN_EH_REGION_BEG)
2082 {
2083 handler_info *ptr, *temp;
2084 int nr;
2085 nr = new_eh_region_entry (CODE_LABEL_NUMBER (label));
2086 ptr = get_first_handler (NOTE_BLOCK_NUMBER (copy));
2087 for ( ; ptr; ptr = ptr->next)
2088 {
2089 temp = get_new_handler ( get_label_from_map (map,
2090 CODE_LABEL_NUMBER (ptr->handler_label)),
2091 ptr->type_info);
2092 add_new_handler (nr, temp);
2093 }
2094 }
2095
2096 /* We have to forward these both to match the new exception
2097 region. */
2098 NOTE_BLOCK_NUMBER (copy) = CODE_LABEL_NUMBER (label);
2099 }
2100 }
2101 else
2102 copy = 0;
2103 break;
2104
2105 default:
2106 abort ();
2107 break;
2108 }
2109
2110 if (copy)
2111 RTX_INTEGRATED_P (copy) = 1;
2112
2113 map->insn_map[INSN_UID (insn)] = copy;
2114 }
2115
2116 /* Now copy the REG_NOTES. Increment const_age, so that only constants
2117 from parameters can be substituted in. These are the only ones that
2118 are valid across the entire function. */
2119 map->const_age++;
2120 for (insn = insns; insn; insn = NEXT_INSN (insn))
2121 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
2122 && map->insn_map[INSN_UID (insn)]
2123 && REG_NOTES (insn))
2124 {
2125 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
2126 /* We must also do subst_constants, in case one of our parameters
2127 has const type and constant value. */
2128 subst_constants (&tem, NULL_RTX, map);
2129 apply_change_group ();
2130 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
2131 }
2132
2133 if (local_return_label)
2134 emit_label (local_return_label);
2135
2136 /* Restore the stack pointer if we saved it above. */
2137 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
2138 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
2139
2140 /* Make copies of the decls of the symbols in the inline function, so that
2141 the copies of the variables get declared in the current function. Set
2142 up things so that lookup_static_chain knows that to interpret registers
2143 in SAVE_EXPRs for TYPE_SIZEs as local. */
2144
2145 inline_function_decl = fndecl;
2146 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
2147 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
2148 inline_function_decl = 0;
2149
2150 /* End the scope containing the copied formal parameter variables
2151 and copied LABEL_DECLs. */
2152
2153 expand_end_bindings (getdecls (), 1, 1);
2154 block = poplevel (1, 1, 0);
2155 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
2156 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
2157 poplevel (0, 0, 0);
2158
2159 /* Must mark the line number note after inlined functions as a repeat, so
2160 that the test coverage code can avoid counting the call twice. This
2161 just tells the code to ignore the immediately following line note, since
2162 there already exists a copy of this note before the expanded inline call.
2163 This line number note is still needed for debugging though, so we can't
2164 delete it. */
2165 if (flag_test_coverage)
2166 emit_note (0, NOTE_REPEATED_LINE_NUMBER);
2167
2168 emit_line_note (input_filename, lineno);
2169
2170 if (structure_value_addr)
2171 {
2172 target = gen_rtx_MEM (TYPE_MODE (type),
2173 memory_address (TYPE_MODE (type),
2174 structure_value_addr));
2175 MEM_IN_STRUCT_P (target) = 1;
2176 }
2177
2178 /* Make sure we free the things we explicitly allocated with xmalloc. */
2179 if (real_label_map)
2180 free (real_label_map);
2181
2182 return target;
2183 }
2184 \f
2185 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
2186 push all of those decls and give each one the corresponding home. */
2187
2188 static void
2189 integrate_parm_decls (args, map, arg_vector)
2190 tree args;
2191 struct inline_remap *map;
2192 rtvec arg_vector;
2193 {
2194 register tree tail;
2195 register int i;
2196
2197 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
2198 {
2199 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
2200 TREE_TYPE (tail));
2201 rtx new_decl_rtl
2202 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
2203
2204 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
2205 /* We really should be setting DECL_INCOMING_RTL to something reasonable
2206 here, but that's going to require some more work. */
2207 /* DECL_INCOMING_RTL (decl) = ?; */
2208 /* These args would always appear unused, if not for this. */
2209 TREE_USED (decl) = 1;
2210 /* Prevent warning for shadowing with these. */
2211 DECL_ABSTRACT_ORIGIN (decl) = DECL_ORIGIN (tail);
2212 pushdecl (decl);
2213 /* Fully instantiate the address with the equivalent form so that the
2214 debugging information contains the actual register, instead of the
2215 virtual register. Do this by not passing an insn to
2216 subst_constants. */
2217 subst_constants (&new_decl_rtl, NULL_RTX, map);
2218 apply_change_group ();
2219 DECL_RTL (decl) = new_decl_rtl;
2220 }
2221 }
2222
2223 /* Given a BLOCK node LET, push decls and levels so as to construct in the
2224 current function a tree of contexts isomorphic to the one that is given.
2225
2226 LEVEL indicates how far down into the BLOCK tree is the node we are
2227 currently traversing. It is always zero except for recursive calls.
2228
2229 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
2230 registers used in the DECL_RTL field should be remapped. If it is zero,
2231 no mapping is necessary. */
2232
2233 static void
2234 integrate_decl_tree (let, level, map)
2235 tree let;
2236 int level;
2237 struct inline_remap *map;
2238 {
2239 tree t, node;
2240
2241 if (level > 0)
2242 pushlevel (0);
2243
2244 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2245 {
2246 tree d;
2247
2248 push_obstacks_nochange ();
2249 saveable_allocation ();
2250 d = copy_and_set_decl_abstract_origin (t);
2251 pop_obstacks ();
2252
2253 if (DECL_RTL (t) != 0)
2254 {
2255 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
2256 /* Fully instantiate the address with the equivalent form so that the
2257 debugging information contains the actual register, instead of the
2258 virtual register. Do this by not passing an insn to
2259 subst_constants. */
2260 subst_constants (&DECL_RTL (d), NULL_RTX, map);
2261 apply_change_group ();
2262 }
2263 /* These args would always appear unused, if not for this. */
2264 TREE_USED (d) = 1;
2265
2266 if (DECL_LANG_SPECIFIC (d))
2267 copy_lang_decl (d);
2268
2269 pushdecl (d);
2270 }
2271
2272 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2273 integrate_decl_tree (t, level + 1, map);
2274
2275 if (level > 0)
2276 {
2277 node = poplevel (1, 0, 0);
2278 if (node)
2279 {
2280 TREE_USED (node) = TREE_USED (let);
2281 BLOCK_ABSTRACT_ORIGIN (node) = let;
2282 }
2283 }
2284 }
2285
2286 /* Given a BLOCK node LET, search for all DECL_RTL fields, and pass them
2287 through save_constants. */
2288
2289 static void
2290 save_constants_in_decl_trees (let)
2291 tree let;
2292 {
2293 tree t;
2294
2295 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2296 if (DECL_RTL (t) != 0)
2297 save_constants (&DECL_RTL (t));
2298
2299 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2300 save_constants_in_decl_trees (t);
2301 }
2302 \f
2303 /* Create a new copy of an rtx.
2304 Recursively copies the operands of the rtx,
2305 except for those few rtx codes that are sharable.
2306
2307 We always return an rtx that is similar to that incoming rtx, with the
2308 exception of possibly changing a REG to a SUBREG or vice versa. No
2309 rtl is ever emitted.
2310
2311 Handle constants that need to be placed in the constant pool by
2312 calling `force_const_mem'. */
2313
2314 rtx
2315 copy_rtx_and_substitute (orig, map)
2316 register rtx orig;
2317 struct inline_remap *map;
2318 {
2319 register rtx copy, temp;
2320 register int i, j;
2321 register RTX_CODE code;
2322 register enum machine_mode mode;
2323 register char *format_ptr;
2324 int regno;
2325
2326 if (orig == 0)
2327 return 0;
2328
2329 code = GET_CODE (orig);
2330 mode = GET_MODE (orig);
2331
2332 switch (code)
2333 {
2334 case REG:
2335 /* If the stack pointer register shows up, it must be part of
2336 stack-adjustments (*not* because we eliminated the frame pointer!).
2337 Small hard registers are returned as-is. Pseudo-registers
2338 go through their `reg_map'. */
2339 regno = REGNO (orig);
2340 if (regno <= LAST_VIRTUAL_REGISTER)
2341 {
2342 /* Some hard registers are also mapped,
2343 but others are not translated. */
2344 if (map->reg_map[regno] != 0)
2345 return map->reg_map[regno];
2346
2347 /* If this is the virtual frame pointer, make space in current
2348 function's stack frame for the stack frame of the inline function.
2349
2350 Copy the address of this area into a pseudo. Map
2351 virtual_stack_vars_rtx to this pseudo and set up a constant
2352 equivalence for it to be the address. This will substitute the
2353 address into insns where it can be substituted and use the new
2354 pseudo where it can't. */
2355 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2356 {
2357 rtx loc, seq;
2358 int size = DECL_FRAME_SIZE (map->fndecl);
2359
2360 #ifdef FRAME_GROWS_DOWNWARD
2361 /* In this case, virtual_stack_vars_rtx points to one byte
2362 higher than the top of the frame area. So make sure we
2363 allocate a big enough chunk to keep the frame pointer
2364 aligned like a real one. */
2365 size = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2366 #endif
2367 start_sequence ();
2368 loc = assign_stack_temp (BLKmode, size, 1);
2369 loc = XEXP (loc, 0);
2370 #ifdef FRAME_GROWS_DOWNWARD
2371 /* In this case, virtual_stack_vars_rtx points to one byte
2372 higher than the top of the frame area. So compute the offset
2373 to one byte higher than our substitute frame. */
2374 loc = plus_constant (loc, size);
2375 #endif
2376 map->reg_map[regno] = temp
2377 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2378
2379 #ifdef STACK_BOUNDARY
2380 mark_reg_pointer (map->reg_map[regno],
2381 STACK_BOUNDARY / BITS_PER_UNIT);
2382 #endif
2383
2384 if (REGNO (temp) < map->const_equiv_map_size)
2385 {
2386 map->const_equiv_map[REGNO (temp)] = loc;
2387 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2388 }
2389
2390 seq = gen_sequence ();
2391 end_sequence ();
2392 emit_insn_after (seq, map->insns_at_start);
2393 return temp;
2394 }
2395 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2396 {
2397 /* Do the same for a block to contain any arguments referenced
2398 in memory. */
2399 rtx loc, seq;
2400 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2401
2402 start_sequence ();
2403 loc = assign_stack_temp (BLKmode, size, 1);
2404 loc = XEXP (loc, 0);
2405 /* When arguments grow downward, the virtual incoming
2406 args pointer points to the top of the argument block,
2407 so the remapped location better do the same. */
2408 #ifdef ARGS_GROW_DOWNWARD
2409 loc = plus_constant (loc, size);
2410 #endif
2411 map->reg_map[regno] = temp
2412 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2413
2414 #ifdef STACK_BOUNDARY
2415 mark_reg_pointer (map->reg_map[regno],
2416 STACK_BOUNDARY / BITS_PER_UNIT);
2417 #endif
2418
2419 if (REGNO (temp) < map->const_equiv_map_size)
2420 {
2421 map->const_equiv_map[REGNO (temp)] = loc;
2422 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2423 }
2424
2425 seq = gen_sequence ();
2426 end_sequence ();
2427 emit_insn_after (seq, map->insns_at_start);
2428 return temp;
2429 }
2430 else if (REG_FUNCTION_VALUE_P (orig))
2431 {
2432 /* This is a reference to the function return value. If
2433 the function doesn't have a return value, error. If the
2434 mode doesn't agree, make a SUBREG. */
2435 if (map->inline_target == 0)
2436 /* Must be unrolling loops or replicating code if we
2437 reach here, so return the register unchanged. */
2438 return orig;
2439 else if (mode != GET_MODE (map->inline_target))
2440 return gen_lowpart (mode, map->inline_target);
2441 else
2442 return map->inline_target;
2443 }
2444 return orig;
2445 }
2446 if (map->reg_map[regno] == NULL)
2447 {
2448 map->reg_map[regno] = gen_reg_rtx (mode);
2449 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2450 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2451 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2452 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2453
2454 if (map->regno_pointer_flag[regno])
2455 mark_reg_pointer (map->reg_map[regno],
2456 map->regno_pointer_align[regno]);
2457 }
2458 return map->reg_map[regno];
2459
2460 case SUBREG:
2461 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2462 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2463 if (GET_CODE (copy) == SUBREG)
2464 return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
2465 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2466 else if (GET_CODE (copy) == CONCAT)
2467 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2468 else
2469 return gen_rtx_SUBREG (GET_MODE (orig), copy,
2470 SUBREG_WORD (orig));
2471
2472 case ADDRESSOF:
2473 copy = gen_rtx_ADDRESSOF (mode,
2474 copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
2475 SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
2476 regno = ADDRESSOF_REGNO (orig);
2477 if (map->reg_map[regno])
2478 regno = REGNO (map->reg_map[regno]);
2479 else if (regno > LAST_VIRTUAL_REGISTER)
2480 {
2481 temp = XEXP (orig, 0);
2482 map->reg_map[regno] = gen_reg_rtx (GET_MODE (temp));
2483 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (temp);
2484 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (temp);
2485 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (temp);
2486 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2487
2488 if (map->regno_pointer_flag[regno])
2489 mark_reg_pointer (map->reg_map[regno],
2490 map->regno_pointer_align[regno]);
2491 regno = REGNO (map->reg_map[regno]);
2492 }
2493 ADDRESSOF_REGNO (copy) = regno;
2494 return copy;
2495
2496 case USE:
2497 case CLOBBER:
2498 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2499 to (use foo) if the original insn didn't have a subreg.
2500 Removing the subreg distorts the VAX movstrhi pattern
2501 by changing the mode of an operand. */
2502 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2503 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2504 copy = SUBREG_REG (copy);
2505 return gen_rtx_fmt_e (code, VOIDmode, copy);
2506
2507 case CODE_LABEL:
2508 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
2509 = LABEL_PRESERVE_P (orig);
2510 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
2511
2512 case LABEL_REF:
2513 copy = gen_rtx_LABEL_REF (mode,
2514 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2515 : get_label_from_map (map,
2516 CODE_LABEL_NUMBER (XEXP (orig, 0))));
2517 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2518
2519 /* The fact that this label was previously nonlocal does not mean
2520 it still is, so we must check if it is within the range of
2521 this function's labels. */
2522 LABEL_REF_NONLOCAL_P (copy)
2523 = (LABEL_REF_NONLOCAL_P (orig)
2524 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2525 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2526
2527 /* If we have made a nonlocal label local, it means that this
2528 inlined call will be referring to our nonlocal goto handler.
2529 So make sure we create one for this block; we normally would
2530 not since this is not otherwise considered a "call". */
2531 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2532 function_call_count++;
2533
2534 return copy;
2535
2536 case PC:
2537 case CC0:
2538 case CONST_INT:
2539 return orig;
2540
2541 case SYMBOL_REF:
2542 /* Symbols which represent the address of a label stored in the constant
2543 pool must be modified to point to a constant pool entry for the
2544 remapped label. Otherwise, symbols are returned unchanged. */
2545 if (CONSTANT_POOL_ADDRESS_P (orig))
2546 {
2547 rtx constant = get_pool_constant (orig);
2548 if (GET_CODE (constant) == LABEL_REF)
2549 return XEXP (force_const_mem (GET_MODE (orig),
2550 copy_rtx_and_substitute (constant,
2551 map)),
2552 0);
2553 }
2554
2555 return orig;
2556
2557 case CONST_DOUBLE:
2558 /* We have to make a new copy of this CONST_DOUBLE because don't want
2559 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2560 duplicate of a CONST_DOUBLE we have already seen. */
2561 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2562 {
2563 REAL_VALUE_TYPE d;
2564
2565 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2566 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2567 }
2568 else
2569 return immed_double_const (CONST_DOUBLE_LOW (orig),
2570 CONST_DOUBLE_HIGH (orig), VOIDmode);
2571
2572 case CONST:
2573 /* Make new constant pool entry for a constant
2574 that was in the pool of the inline function. */
2575 if (RTX_INTEGRATED_P (orig))
2576 {
2577 /* If this was an address of a constant pool entry that itself
2578 had to be placed in the constant pool, it might not be a
2579 valid address. So the recursive call below might turn it
2580 into a register. In that case, it isn't a constant any
2581 more, so return it. This has the potential of changing a
2582 MEM into a REG, but we'll assume that it safe. */
2583 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2584 if (! CONSTANT_P (temp))
2585 return temp;
2586 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2587 }
2588 break;
2589
2590 case ADDRESS:
2591 /* If from constant pool address, make new constant pool entry and
2592 return its address. */
2593 if (! RTX_INTEGRATED_P (orig))
2594 abort ();
2595
2596 temp
2597 = force_const_mem (GET_MODE (XEXP (orig, 0)),
2598 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
2599 map));
2600
2601 #if 0
2602 /* Legitimizing the address here is incorrect.
2603
2604 The only ADDRESS rtx's that can reach here are ones created by
2605 save_constants. Hence the operand of the ADDRESS is always valid
2606 in this position of the instruction, since the original rtx without
2607 the ADDRESS was valid.
2608
2609 The reason we don't legitimize the address here is that on the
2610 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2611 This code forces the operand of the address to a register, which
2612 fails because we can not take the HIGH part of a register.
2613
2614 Also, change_address may create new registers. These registers
2615 will not have valid reg_map entries. This can cause try_constants()
2616 to fail because assumes that all registers in the rtx have valid
2617 reg_map entries, and it may end up replacing one of these new
2618 registers with junk. */
2619
2620 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2621 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2622 #endif
2623
2624 temp = XEXP (temp, 0);
2625
2626 #ifdef POINTERS_EXTEND_UNSIGNED
2627 if (GET_MODE (temp) != GET_MODE (orig))
2628 temp = convert_memory_address (GET_MODE (orig), temp);
2629 #endif
2630
2631 return temp;
2632
2633 case ASM_OPERANDS:
2634 /* If a single asm insn contains multiple output operands
2635 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2636 We must make sure that the copied insn continues to share it. */
2637 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2638 {
2639 copy = rtx_alloc (ASM_OPERANDS);
2640 copy->volatil = orig->volatil;
2641 XSTR (copy, 0) = XSTR (orig, 0);
2642 XSTR (copy, 1) = XSTR (orig, 1);
2643 XINT (copy, 2) = XINT (orig, 2);
2644 XVEC (copy, 3) = map->copy_asm_operands_vector;
2645 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2646 XSTR (copy, 5) = XSTR (orig, 5);
2647 XINT (copy, 6) = XINT (orig, 6);
2648 return copy;
2649 }
2650 break;
2651
2652 case CALL:
2653 /* This is given special treatment because the first
2654 operand of a CALL is a (MEM ...) which may get
2655 forced into a register for cse. This is undesirable
2656 if function-address cse isn't wanted or if we won't do cse. */
2657 #ifndef NO_FUNCTION_CSE
2658 if (! (optimize && ! flag_no_function_cse))
2659 #endif
2660 return gen_rtx_CALL (GET_MODE (orig),
2661 gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
2662 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2663 copy_rtx_and_substitute (XEXP (orig, 1), map));
2664 break;
2665
2666 #if 0
2667 /* Must be ifdefed out for loop unrolling to work. */
2668 case RETURN:
2669 abort ();
2670 #endif
2671
2672 case SET:
2673 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2674 Adjust the setting by the offset of the area we made.
2675 If the nonlocal goto is into the current function,
2676 this will result in unnecessarily bad code, but should work. */
2677 if (SET_DEST (orig) == virtual_stack_vars_rtx
2678 || SET_DEST (orig) == virtual_incoming_args_rtx)
2679 {
2680 /* In case a translation hasn't occurred already, make one now. */
2681 rtx equiv_reg;
2682 rtx equiv_loc;
2683 HOST_WIDE_INT loc_offset;
2684
2685 copy_rtx_and_substitute (SET_DEST (orig), map);
2686 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
2687 equiv_loc = map->const_equiv_map[REGNO (equiv_reg)];
2688 loc_offset
2689 = GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
2690 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
2691 force_operand
2692 (plus_constant
2693 (copy_rtx_and_substitute (SET_SRC (orig), map),
2694 - loc_offset),
2695 NULL_RTX));
2696 }
2697 break;
2698
2699 case MEM:
2700 copy = rtx_alloc (MEM);
2701 PUT_MODE (copy, mode);
2702 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2703 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2704 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2705 MEM_ALIAS_SET (copy) = MEM_ALIAS_SET (orig);
2706
2707 /* If doing function inlining, this MEM might not be const in the
2708 function that it is being inlined into, and thus may not be
2709 unchanging after function inlining. Constant pool references are
2710 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2711 for them. */
2712 if (! map->integrating)
2713 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2714
2715 return copy;
2716
2717 default:
2718 break;
2719 }
2720
2721 copy = rtx_alloc (code);
2722 PUT_MODE (copy, mode);
2723 copy->in_struct = orig->in_struct;
2724 copy->volatil = orig->volatil;
2725 copy->unchanging = orig->unchanging;
2726
2727 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2728
2729 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2730 {
2731 switch (*format_ptr++)
2732 {
2733 case '0':
2734 XEXP (copy, i) = XEXP (orig, i);
2735 break;
2736
2737 case 'e':
2738 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2739 break;
2740
2741 case 'u':
2742 /* Change any references to old-insns to point to the
2743 corresponding copied insns. */
2744 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2745 break;
2746
2747 case 'E':
2748 XVEC (copy, i) = XVEC (orig, i);
2749 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2750 {
2751 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2752 for (j = 0; j < XVECLEN (copy, i); j++)
2753 XVECEXP (copy, i, j)
2754 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2755 }
2756 break;
2757
2758 case 'w':
2759 XWINT (copy, i) = XWINT (orig, i);
2760 break;
2761
2762 case 'i':
2763 XINT (copy, i) = XINT (orig, i);
2764 break;
2765
2766 case 's':
2767 XSTR (copy, i) = XSTR (orig, i);
2768 break;
2769
2770 default:
2771 abort ();
2772 }
2773 }
2774
2775 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2776 {
2777 map->orig_asm_operands_vector = XVEC (orig, 3);
2778 map->copy_asm_operands_vector = XVEC (copy, 3);
2779 map->copy_asm_constraints_vector = XVEC (copy, 4);
2780 }
2781
2782 return copy;
2783 }
2784 \f
2785 /* Substitute known constant values into INSN, if that is valid. */
2786
2787 void
2788 try_constants (insn, map)
2789 rtx insn;
2790 struct inline_remap *map;
2791 {
2792 int i;
2793
2794 map->num_sets = 0;
2795 subst_constants (&PATTERN (insn), insn, map);
2796
2797 /* Apply the changes if they are valid; otherwise discard them. */
2798 apply_change_group ();
2799
2800 /* Show we don't know the value of anything stored or clobbered. */
2801 note_stores (PATTERN (insn), mark_stores);
2802 map->last_pc_value = 0;
2803 #ifdef HAVE_cc0
2804 map->last_cc0_value = 0;
2805 #endif
2806
2807 /* Set up any constant equivalences made in this insn. */
2808 for (i = 0; i < map->num_sets; i++)
2809 {
2810 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2811 {
2812 int regno = REGNO (map->equiv_sets[i].dest);
2813
2814 if (regno < map->const_equiv_map_size
2815 && (map->const_equiv_map[regno] == 0
2816 /* Following clause is a hack to make case work where GNU C++
2817 reassigns a variable to make cse work right. */
2818 || ! rtx_equal_p (map->const_equiv_map[regno],
2819 map->equiv_sets[i].equiv)))
2820 {
2821 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2822 map->const_age_map[regno] = map->const_age;
2823 }
2824 }
2825 else if (map->equiv_sets[i].dest == pc_rtx)
2826 map->last_pc_value = map->equiv_sets[i].equiv;
2827 #ifdef HAVE_cc0
2828 else if (map->equiv_sets[i].dest == cc0_rtx)
2829 map->last_cc0_value = map->equiv_sets[i].equiv;
2830 #endif
2831 }
2832 }
2833 \f
2834 /* Substitute known constants for pseudo regs in the contents of LOC,
2835 which are part of INSN.
2836 If INSN is zero, the substitution should always be done (this is used to
2837 update DECL_RTL).
2838 These changes are taken out by try_constants if the result is not valid.
2839
2840 Note that we are more concerned with determining when the result of a SET
2841 is a constant, for further propagation, than actually inserting constants
2842 into insns; cse will do the latter task better.
2843
2844 This function is also used to adjust address of items previously addressed
2845 via the virtual stack variable or virtual incoming arguments registers. */
2846
2847 static void
2848 subst_constants (loc, insn, map)
2849 rtx *loc;
2850 rtx insn;
2851 struct inline_remap *map;
2852 {
2853 rtx x = *loc;
2854 register int i;
2855 register enum rtx_code code;
2856 register char *format_ptr;
2857 int num_changes = num_validated_changes ();
2858 rtx new = 0;
2859 enum machine_mode op0_mode;
2860
2861 code = GET_CODE (x);
2862
2863 switch (code)
2864 {
2865 case PC:
2866 case CONST_INT:
2867 case CONST_DOUBLE:
2868 case SYMBOL_REF:
2869 case CONST:
2870 case LABEL_REF:
2871 case ADDRESS:
2872 return;
2873
2874 #ifdef HAVE_cc0
2875 case CC0:
2876 validate_change (insn, loc, map->last_cc0_value, 1);
2877 return;
2878 #endif
2879
2880 case USE:
2881 case CLOBBER:
2882 /* The only thing we can do with a USE or CLOBBER is possibly do
2883 some substitutions in a MEM within it. */
2884 if (GET_CODE (XEXP (x, 0)) == MEM)
2885 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2886 return;
2887
2888 case REG:
2889 /* Substitute for parms and known constants. Don't replace
2890 hard regs used as user variables with constants. */
2891 {
2892 int regno = REGNO (x);
2893
2894 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2895 && regno < map->const_equiv_map_size
2896 && map->const_equiv_map[regno] != 0
2897 && map->const_age_map[regno] >= map->const_age)
2898 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2899 return;
2900 }
2901
2902 case SUBREG:
2903 /* SUBREG applied to something other than a reg
2904 should be treated as ordinary, since that must
2905 be a special hack and we don't know how to treat it specially.
2906 Consider for example mulsidi3 in m68k.md.
2907 Ordinary SUBREG of a REG needs this special treatment. */
2908 if (GET_CODE (SUBREG_REG (x)) == REG)
2909 {
2910 rtx inner = SUBREG_REG (x);
2911 rtx new = 0;
2912
2913 /* We can't call subst_constants on &SUBREG_REG (x) because any
2914 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2915 see what is inside, try to form the new SUBREG and see if that is
2916 valid. We handle two cases: extracting a full word in an
2917 integral mode and extracting the low part. */
2918 subst_constants (&inner, NULL_RTX, map);
2919
2920 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2921 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2922 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2923 new = operand_subword (inner, SUBREG_WORD (x), 0,
2924 GET_MODE (SUBREG_REG (x)));
2925
2926 cancel_changes (num_changes);
2927 if (new == 0 && subreg_lowpart_p (x))
2928 new = gen_lowpart_common (GET_MODE (x), inner);
2929
2930 if (new)
2931 validate_change (insn, loc, new, 1);
2932
2933 return;
2934 }
2935 break;
2936
2937 case MEM:
2938 subst_constants (&XEXP (x, 0), insn, map);
2939
2940 /* If a memory address got spoiled, change it back. */
2941 if (insn != 0 && num_validated_changes () != num_changes
2942 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2943 cancel_changes (num_changes);
2944 return;
2945
2946 case SET:
2947 {
2948 /* Substitute constants in our source, and in any arguments to a
2949 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2950 itself. */
2951 rtx *dest_loc = &SET_DEST (x);
2952 rtx dest = *dest_loc;
2953 rtx src, tem;
2954
2955 subst_constants (&SET_SRC (x), insn, map);
2956 src = SET_SRC (x);
2957
2958 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2959 || GET_CODE (*dest_loc) == SUBREG
2960 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2961 {
2962 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2963 {
2964 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2965 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2966 }
2967 dest_loc = &XEXP (*dest_loc, 0);
2968 }
2969
2970 /* Do substitute in the address of a destination in memory. */
2971 if (GET_CODE (*dest_loc) == MEM)
2972 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2973
2974 /* Check for the case of DEST a SUBREG, both it and the underlying
2975 register are less than one word, and the SUBREG has the wider mode.
2976 In the case, we are really setting the underlying register to the
2977 source converted to the mode of DEST. So indicate that. */
2978 if (GET_CODE (dest) == SUBREG
2979 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2980 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2981 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2982 <= GET_MODE_SIZE (GET_MODE (dest)))
2983 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2984 src)))
2985 src = tem, dest = SUBREG_REG (dest);
2986
2987 /* If storing a recognizable value save it for later recording. */
2988 if ((map->num_sets < MAX_RECOG_OPERANDS)
2989 && (CONSTANT_P (src)
2990 || (GET_CODE (src) == REG
2991 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2992 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2993 || (GET_CODE (src) == PLUS
2994 && GET_CODE (XEXP (src, 0)) == REG
2995 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2996 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2997 && CONSTANT_P (XEXP (src, 1)))
2998 || GET_CODE (src) == COMPARE
2999 #ifdef HAVE_cc0
3000 || dest == cc0_rtx
3001 #endif
3002 || (dest == pc_rtx
3003 && (src == pc_rtx || GET_CODE (src) == RETURN
3004 || GET_CODE (src) == LABEL_REF))))
3005 {
3006 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
3007 it will cause us to save the COMPARE with any constants
3008 substituted, which is what we want for later. */
3009 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
3010 map->equiv_sets[map->num_sets++].dest = dest;
3011 }
3012 }
3013 return;
3014
3015 default:
3016 break;
3017 }
3018
3019 format_ptr = GET_RTX_FORMAT (code);
3020
3021 /* If the first operand is an expression, save its mode for later. */
3022 if (*format_ptr == 'e')
3023 op0_mode = GET_MODE (XEXP (x, 0));
3024
3025 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3026 {
3027 switch (*format_ptr++)
3028 {
3029 case '0':
3030 break;
3031
3032 case 'e':
3033 if (XEXP (x, i))
3034 subst_constants (&XEXP (x, i), insn, map);
3035 break;
3036
3037 case 'u':
3038 case 'i':
3039 case 's':
3040 case 'w':
3041 break;
3042
3043 case 'E':
3044 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
3045 {
3046 int j;
3047 for (j = 0; j < XVECLEN (x, i); j++)
3048 subst_constants (&XVECEXP (x, i, j), insn, map);
3049 }
3050 break;
3051
3052 default:
3053 abort ();
3054 }
3055 }
3056
3057 /* If this is a commutative operation, move a constant to the second
3058 operand unless the second operand is already a CONST_INT. */
3059 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
3060 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3061 {
3062 rtx tem = XEXP (x, 0);
3063 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3064 validate_change (insn, &XEXP (x, 1), tem, 1);
3065 }
3066
3067 /* Simplify the expression in case we put in some constants. */
3068 switch (GET_RTX_CLASS (code))
3069 {
3070 case '1':
3071 new = simplify_unary_operation (code, GET_MODE (x),
3072 XEXP (x, 0), op0_mode);
3073 break;
3074
3075 case '<':
3076 {
3077 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
3078 if (op_mode == VOIDmode)
3079 op_mode = GET_MODE (XEXP (x, 1));
3080 new = simplify_relational_operation (code, op_mode,
3081 XEXP (x, 0), XEXP (x, 1));
3082 #ifdef FLOAT_STORE_FLAG_VALUE
3083 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3084 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3085 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
3086 GET_MODE (x)));
3087 #endif
3088 break;
3089 }
3090
3091 case '2':
3092 case 'c':
3093 new = simplify_binary_operation (code, GET_MODE (x),
3094 XEXP (x, 0), XEXP (x, 1));
3095 break;
3096
3097 case 'b':
3098 case '3':
3099 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
3100 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
3101 break;
3102 }
3103
3104 if (new)
3105 validate_change (insn, loc, new, 1);
3106 }
3107
3108 /* Show that register modified no longer contain known constants. We are
3109 called from note_stores with parts of the new insn. */
3110
3111 void
3112 mark_stores (dest, x)
3113 rtx dest;
3114 rtx x ATTRIBUTE_UNUSED;
3115 {
3116 int regno = -1;
3117 enum machine_mode mode;
3118
3119 /* DEST is always the innermost thing set, except in the case of
3120 SUBREGs of hard registers. */
3121
3122 if (GET_CODE (dest) == REG)
3123 regno = REGNO (dest), mode = GET_MODE (dest);
3124 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
3125 {
3126 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
3127 mode = GET_MODE (SUBREG_REG (dest));
3128 }
3129
3130 if (regno >= 0)
3131 {
3132 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
3133 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
3134 int i;
3135
3136 /* Ignore virtual stack var or virtual arg register since those
3137 are handled separately. */
3138 if (regno != VIRTUAL_INCOMING_ARGS_REGNUM
3139 && regno != VIRTUAL_STACK_VARS_REGNUM)
3140 for (i = regno; i <= last_reg; i++)
3141 if (i < global_const_equiv_map_size)
3142 global_const_equiv_map[i] = 0;
3143 }
3144 }
3145 \f
3146 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
3147 pointed to by PX, they represent constants in the constant pool.
3148 Replace these with a new memory reference obtained from force_const_mem.
3149 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
3150 address of a constant pool entry. Replace them with the address of
3151 a new constant pool entry obtained from force_const_mem. */
3152
3153 static void
3154 restore_constants (px)
3155 rtx *px;
3156 {
3157 rtx x = *px;
3158 int i, j;
3159 char *fmt;
3160
3161 if (x == 0)
3162 return;
3163
3164 if (GET_CODE (x) == CONST_DOUBLE)
3165 {
3166 /* We have to make a new CONST_DOUBLE to ensure that we account for
3167 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
3168 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3169 {
3170 REAL_VALUE_TYPE d;
3171
3172 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
3173 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
3174 }
3175 else
3176 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
3177 VOIDmode);
3178 }
3179
3180 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
3181 {
3182 restore_constants (&XEXP (x, 0));
3183 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
3184 }
3185 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
3186 {
3187 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
3188 rtx new = XEXP (SUBREG_REG (x), 0);
3189
3190 restore_constants (&new);
3191 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
3192 PUT_MODE (new, GET_MODE (x));
3193 *px = validize_mem (new);
3194 }
3195 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
3196 {
3197 rtx new = XEXP (force_const_mem (GET_MODE (XEXP (x, 0)),
3198 XEXP (XEXP (x, 0), 0)),
3199 0);
3200
3201 #ifdef POINTERS_EXTEND_UNSIGNED
3202 if (GET_MODE (new) != GET_MODE (x))
3203 new = convert_memory_address (GET_MODE (x), new);
3204 #endif
3205
3206 *px = new;
3207 }
3208 else
3209 {
3210 fmt = GET_RTX_FORMAT (GET_CODE (x));
3211 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
3212 {
3213 switch (*fmt++)
3214 {
3215 case 'E':
3216 for (j = 0; j < XVECLEN (x, i); j++)
3217 restore_constants (&XVECEXP (x, i, j));
3218 break;
3219
3220 case 'e':
3221 restore_constants (&XEXP (x, i));
3222 break;
3223 }
3224 }
3225 }
3226 }
3227 \f
3228 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
3229 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
3230 that it points to the node itself, thus indicating that the node is its
3231 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
3232 the given node is NULL, recursively descend the decl/block tree which
3233 it is the root of, and for each other ..._DECL or BLOCK node contained
3234 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
3235 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
3236 values to point to themselves. */
3237
3238 static void
3239 set_block_origin_self (stmt)
3240 register tree stmt;
3241 {
3242 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
3243 {
3244 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
3245
3246 {
3247 register tree local_decl;
3248
3249 for (local_decl = BLOCK_VARS (stmt);
3250 local_decl != NULL_TREE;
3251 local_decl = TREE_CHAIN (local_decl))
3252 set_decl_origin_self (local_decl); /* Potential recursion. */
3253 }
3254
3255 {
3256 register tree subblock;
3257
3258 for (subblock = BLOCK_SUBBLOCKS (stmt);
3259 subblock != NULL_TREE;
3260 subblock = BLOCK_CHAIN (subblock))
3261 set_block_origin_self (subblock); /* Recurse. */
3262 }
3263 }
3264 }
3265
3266 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
3267 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
3268 node to so that it points to the node itself, thus indicating that the
3269 node represents its own (abstract) origin. Additionally, if the
3270 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
3271 the decl/block tree of which the given node is the root of, and for
3272 each other ..._DECL or BLOCK node contained therein whose
3273 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
3274 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
3275 point to themselves. */
3276
3277 static void
3278 set_decl_origin_self (decl)
3279 register tree decl;
3280 {
3281 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
3282 {
3283 DECL_ABSTRACT_ORIGIN (decl) = decl;
3284 if (TREE_CODE (decl) == FUNCTION_DECL)
3285 {
3286 register tree arg;
3287
3288 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3289 DECL_ABSTRACT_ORIGIN (arg) = arg;
3290 if (DECL_INITIAL (decl) != NULL_TREE
3291 && DECL_INITIAL (decl) != error_mark_node)
3292 set_block_origin_self (DECL_INITIAL (decl));
3293 }
3294 }
3295 }
3296 \f
3297 /* Given a pointer to some BLOCK node, and a boolean value to set the
3298 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
3299 the given block, and for all local decls and all local sub-blocks
3300 (recursively) which are contained therein. */
3301
3302 static void
3303 set_block_abstract_flags (stmt, setting)
3304 register tree stmt;
3305 register int setting;
3306 {
3307 register tree local_decl;
3308 register tree subblock;
3309
3310 BLOCK_ABSTRACT (stmt) = setting;
3311
3312 for (local_decl = BLOCK_VARS (stmt);
3313 local_decl != NULL_TREE;
3314 local_decl = TREE_CHAIN (local_decl))
3315 set_decl_abstract_flags (local_decl, setting);
3316
3317 for (subblock = BLOCK_SUBBLOCKS (stmt);
3318 subblock != NULL_TREE;
3319 subblock = BLOCK_CHAIN (subblock))
3320 set_block_abstract_flags (subblock, setting);
3321 }
3322
3323 /* Given a pointer to some ..._DECL node, and a boolean value to set the
3324 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
3325 given decl, and (in the case where the decl is a FUNCTION_DECL) also
3326 set the abstract flags for all of the parameters, local vars, local
3327 blocks and sub-blocks (recursively) to the same setting. */
3328
3329 void
3330 set_decl_abstract_flags (decl, setting)
3331 register tree decl;
3332 register int setting;
3333 {
3334 DECL_ABSTRACT (decl) = setting;
3335 if (TREE_CODE (decl) == FUNCTION_DECL)
3336 {
3337 register tree arg;
3338
3339 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3340 DECL_ABSTRACT (arg) = setting;
3341 if (DECL_INITIAL (decl) != NULL_TREE
3342 && DECL_INITIAL (decl) != error_mark_node)
3343 set_block_abstract_flags (DECL_INITIAL (decl), setting);
3344 }
3345 }
3346 \f
3347 /* Output the assembly language code for the function FNDECL
3348 from its DECL_SAVED_INSNS. Used for inline functions that are output
3349 at end of compilation instead of where they came in the source. */
3350
3351 void
3352 output_inline_function (fndecl)
3353 tree fndecl;
3354 {
3355 rtx head;
3356 rtx last;
3357
3358 /* Things we allocate from here on are part of this function, not
3359 permanent. */
3360 temporary_allocation ();
3361
3362 head = DECL_SAVED_INSNS (fndecl);
3363 current_function_decl = fndecl;
3364
3365 /* This call is only used to initialize global variables. */
3366 init_function_start (fndecl, "lossage", 1);
3367
3368 /* Redo parameter determinations in case the FUNCTION_...
3369 macros took machine-specific actions that need to be redone. */
3370 assign_parms (fndecl, 1);
3371
3372 /* Set stack frame size. */
3373 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3374
3375 /* The first is a bit of a lie (the array may be larger), but doesn't
3376 matter too much and it isn't worth saving the actual bound. */
3377 reg_rtx_no = regno_pointer_flag_length = MAX_REGNUM (head);
3378 regno_reg_rtx = (rtx *) INLINE_REGNO_REG_RTX (head);
3379 regno_pointer_flag = INLINE_REGNO_POINTER_FLAG (head);
3380 regno_pointer_align = INLINE_REGNO_POINTER_ALIGN (head);
3381 max_parm_reg = MAX_PARMREG (head);
3382 parm_reg_stack_loc = (rtx *) PARMREG_STACK_LOC (head);
3383
3384 stack_slot_list = STACK_SLOT_LIST (head);
3385 forced_labels = FORCED_LABELS (head);
3386
3387 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3388 current_function_calls_alloca = 1;
3389
3390 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3391 current_function_calls_setjmp = 1;
3392
3393 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3394 current_function_calls_longjmp = 1;
3395
3396 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3397 current_function_returns_struct = 1;
3398
3399 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3400 current_function_returns_pcc_struct = 1;
3401
3402 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3403 current_function_needs_context = 1;
3404
3405 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3406 current_function_has_nonlocal_label = 1;
3407
3408 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3409 current_function_returns_pointer = 1;
3410
3411 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3412 current_function_uses_const_pool = 1;
3413
3414 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3415 current_function_uses_pic_offset_table = 1;
3416
3417 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3418 current_function_pops_args = POPS_ARGS (head);
3419
3420 /* This is the only thing the expand_function_end call that uses to be here
3421 actually does and that call can cause problems. */
3422 immediate_size_expand--;
3423
3424 /* Find last insn and rebuild the constant pool. */
3425 for (last = FIRST_PARM_INSN (head);
3426 NEXT_INSN (last); last = NEXT_INSN (last))
3427 {
3428 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3429 {
3430 restore_constants (&PATTERN (last));
3431 restore_constants (&REG_NOTES (last));
3432 }
3433 }
3434
3435 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3436 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3437
3438 /* We must have already output DWARF debugging information for the
3439 original (abstract) inline function declaration/definition, so
3440 we want to make sure that the debugging information we generate
3441 for this special instance of the inline function refers back to
3442 the information we already generated. To make sure that happens,
3443 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3444 node (and for all of the local ..._DECL nodes which are its children)
3445 so that they all point to themselves. */
3446
3447 set_decl_origin_self (fndecl);
3448
3449 /* We're not deferring this any longer. */
3450 DECL_DEFER_OUTPUT (fndecl) = 0;
3451
3452 /* We can't inline this anymore. */
3453 DECL_INLINE (fndecl) = 0;
3454
3455 /* Compile this function all the way down to assembly code. */
3456 rest_of_compilation (fndecl);
3457
3458 current_function_decl = 0;
3459 }
This page took 0.282304 seconds and 6 git commands to generate.